[turbofan] Initial support for keyed access to fast JSArrays.

This adds some initial support for keyed element access to fast,
non-holey JSArray objects.

Also renames PropertyAccessInfoFactory to AccessInfoFactory and
PropertyAccessMode to AccessMode.

R=jarin@chromium.org
BUG=v8:4470
LOG=n

Review URL: https://codereview.chromium.org/1418213010

Cr-Commit-Position: refs/heads/master@{#31717}
This commit is contained in:
bmeurer 2015-11-02 10:29:47 -08:00 committed by Commit bot
parent 0fa11bfb80
commit 1195b0e24d
6 changed files with 413 additions and 74 deletions

View File

@ -692,6 +692,8 @@ source_set("v8_base") {
"src/compilation-statistics.h",
"src/compiler/access-builder.cc",
"src/compiler/access-builder.h",
"src/compiler/access-info.cc",
"src/compiler/access-info.h",
"src/compiler/all-nodes.cc",
"src/compiler/all-nodes.h",
"src/compiler/ast-graph-builder.cc",
@ -823,8 +825,6 @@ source_set("v8_base") {
"src/compiler/pipeline.h",
"src/compiler/pipeline-statistics.cc",
"src/compiler/pipeline-statistics.h",
"src/compiler/property-access-info.cc",
"src/compiler/property-access-info.h",
"src/compiler/raw-machine-assembler.cc",
"src/compiler/raw-machine-assembler.h",
"src/compiler/register-allocator.cc",

View File

@ -6,7 +6,7 @@
#include "src/accessors.h"
#include "src/compilation-dependencies.h"
#include "src/compiler/property-access-info.h"
#include "src/compiler/access-info.h"
#include "src/field-index-inl.h"
#include "src/objects-inl.h" // TODO(mstarzinger): Temporary cycle breaker!
#include "src/type-cache.h"
@ -16,11 +16,35 @@ namespace v8 {
namespace internal {
namespace compiler {
std::ostream& operator<<(std::ostream& os, PropertyAccessMode access_mode) {
namespace {
bool CanInlineElementAccess(Handle<Map> map) {
// TODO(bmeurer): IsJSObjectMap
// TODO(bmeurer): !map->has_dictionary_elements()
// TODO(bmeurer): !map->has_sloppy_arguments_elements()
return map->IsJSArrayMap() && map->has_fast_elements() &&
!map->has_indexed_interceptor() && !map->is_access_check_needed();
}
bool CanInlinePropertyAccess(Handle<Map> map) {
// TODO(bmeurer): Add support for Number primitives.
// if (map->instance_type() == HEAP_NUMBER_TYPE) return false;
if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
return map->IsJSObjectMap() && !map->is_dictionary_map() &&
!map->has_named_interceptor() &&
// TODO(verwaest): Whitelist contexts to which we have access.
!map->is_access_check_needed();
}
} // namespace
std::ostream& operator<<(std::ostream& os, AccessMode access_mode) {
switch (access_mode) {
case PropertyAccessMode::kLoad:
case AccessMode::kLoad:
return os << "Load";
case PropertyAccessMode::kStore:
case AccessMode::kStore:
return os << "Store";
}
UNREACHABLE();
@ -52,6 +76,9 @@ PropertyAccessInfo PropertyAccessInfo::DataField(
}
ElementAccessInfo::ElementAccessInfo() : receiver_type_(Type::None()) {}
PropertyAccessInfo::PropertyAccessInfo()
: kind_(kInvalid), receiver_type_(Type::None()), field_type_(Type::Any()) {}
@ -86,9 +113,8 @@ PropertyAccessInfo::PropertyAccessInfo(MaybeHandle<JSObject> holder,
field_type_(field_type) {}
PropertyAccessInfoFactory::PropertyAccessInfoFactory(
CompilationDependencies* dependencies, Handle<Context> native_context,
Zone* zone)
AccessInfoFactory::AccessInfoFactory(CompilationDependencies* dependencies,
Handle<Context> native_context, Zone* zone)
: dependencies_(dependencies),
native_context_(native_context),
isolate_(native_context->GetIsolate()),
@ -96,23 +122,52 @@ PropertyAccessInfoFactory::PropertyAccessInfoFactory(
zone_(zone) {}
namespace {
bool AccessInfoFactory::ComputeElementAccessInfo(
Handle<Map> map, AccessMode access_mode, ElementAccessInfo* access_info) {
// Check if it is safe to inline element access for the {map}.
if (!CanInlineElementAccess(map)) return false;
bool CanInlinePropertyAccess(Handle<Map> map) {
// TODO(bmeurer): Do something about the number stuff.
if (map->instance_type() == HEAP_NUMBER_TYPE) return false;
if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
return map->IsJSObjectMap() && !map->is_dictionary_map() &&
!map->has_named_interceptor() &&
// TODO(verwaest): Whitelist contexts to which we have access.
!map->is_access_check_needed();
// TODO(bmeurer): Add support for holey elements.
ElementsKind elements_kind = map->elements_kind();
if (IsHoleyElementsKind(elements_kind)) return false;
// Certain (monomorphic) stores need a prototype chain check because shape
// changes could allow callbacks on elements in the chain that are not
// compatible with monomorphic keyed stores.
MaybeHandle<JSObject> holder;
if (access_mode == AccessMode::kStore && map->prototype()->IsJSObject()) {
for (PrototypeIterator i(map); !i.IsAtEnd(); i.Advance()) {
Handle<JSReceiver> prototype =
PrototypeIterator::GetCurrent<JSReceiver>(i);
if (!prototype->IsJSObject()) return false;
holder = Handle<JSObject>::cast(prototype);
}
}
*access_info =
ElementAccessInfo(Type::Class(map, zone()), elements_kind, holder);
return true;
}
} // namespace
bool AccessInfoFactory::ComputeElementAccessInfos(
MapHandleList const& maps, AccessMode access_mode,
ZoneVector<ElementAccessInfo>* access_infos) {
for (Handle<Map> map : maps) {
if (Map::TryUpdate(map).ToHandle(&map)) {
ElementAccessInfo access_info;
if (!ComputeElementAccessInfo(map, access_mode, &access_info)) {
return false;
}
access_infos->push_back(access_info);
}
}
return true;
}
bool PropertyAccessInfoFactory::ComputePropertyAccessInfo(
Handle<Map> map, Handle<Name> name, PropertyAccessMode access_mode,
bool AccessInfoFactory::ComputePropertyAccessInfo(
Handle<Map> map, Handle<Name> name, AccessMode access_mode,
PropertyAccessInfo* access_info) {
// Check if it is safe to inline property access for the {map}.
if (!CanInlinePropertyAccess(map)) return false;
@ -121,7 +176,7 @@ bool PropertyAccessInfoFactory::ComputePropertyAccessInfo(
Handle<Map> receiver_map = map;
// We support fast inline cases for certain JSObject getters.
if (access_mode == PropertyAccessMode::kLoad &&
if (access_mode == AccessMode::kLoad &&
LookupSpecialFieldAccessor(map, name, access_info)) {
return true;
}
@ -133,7 +188,7 @@ bool PropertyAccessInfoFactory::ComputePropertyAccessInfo(
int const number = descriptors->SearchWithCache(*name, *map);
if (number != DescriptorArray::kNotFound) {
PropertyDetails const details = descriptors->GetDetails(number);
if (access_mode == PropertyAccessMode::kStore) {
if (access_mode == AccessMode::kStore) {
// Don't bother optimizing stores to read-only properties.
if (details.IsReadOnly()) {
return false;
@ -170,7 +225,7 @@ bool PropertyAccessInfoFactory::ComputePropertyAccessInfo(
Type::TaggedPointer(), zone());
if (field_type->Is(Type::None())) {
// Store is not safe if the field type was cleared.
if (access_mode == PropertyAccessMode::kStore) return false;
if (access_mode == AccessMode::kStore) return false;
// The field type was cleared by the GC, so we don't know anything
// about the contents now.
@ -216,7 +271,7 @@ bool PropertyAccessInfoFactory::ComputePropertyAccessInfo(
// Store to property not found on the receiver or any prototype, we need
// to transition to a new data property.
// Implemented according to ES6 section 9.1.9 [[Set]] (P, V, Receiver)
if (access_mode == PropertyAccessMode::kStore) {
if (access_mode == AccessMode::kStore) {
return LookupTransition(receiver_map, name, holder, access_info);
}
// The property was not found, return undefined or throw depending
@ -242,9 +297,8 @@ bool PropertyAccessInfoFactory::ComputePropertyAccessInfo(
}
bool PropertyAccessInfoFactory::ComputePropertyAccessInfos(
MapHandleList const& maps, Handle<Name> name,
PropertyAccessMode access_mode,
bool AccessInfoFactory::ComputePropertyAccessInfos(
MapHandleList const& maps, Handle<Name> name, AccessMode access_mode,
ZoneVector<PropertyAccessInfo>* access_infos) {
for (Handle<Map> map : maps) {
if (Map::TryUpdate(map).ToHandle(&map)) {
@ -259,7 +313,7 @@ bool PropertyAccessInfoFactory::ComputePropertyAccessInfos(
}
bool PropertyAccessInfoFactory::LookupSpecialFieldAccessor(
bool AccessInfoFactory::LookupSpecialFieldAccessor(
Handle<Map> map, Handle<Name> name, PropertyAccessInfo* access_info) {
// Check for special JSObject field accessors.
int offset;
@ -294,9 +348,9 @@ bool PropertyAccessInfoFactory::LookupSpecialFieldAccessor(
}
bool PropertyAccessInfoFactory::LookupTransition(
Handle<Map> map, Handle<Name> name, MaybeHandle<JSObject> holder,
PropertyAccessInfo* access_info) {
bool AccessInfoFactory::LookupTransition(Handle<Map> map, Handle<Name> name,
MaybeHandle<JSObject> holder,
PropertyAccessInfo* access_info) {
// Check if the {map} has a data transition with the given {name}.
if (map->unused_property_fields() == 0) return false;
Handle<Map> transition_map;
@ -349,9 +403,7 @@ bool PropertyAccessInfoFactory::LookupTransition(
}
Factory* PropertyAccessInfoFactory::factory() const {
return isolate()->factory();
}
Factory* AccessInfoFactory::factory() const { return isolate()->factory(); }
} // namespace compiler
} // namespace internal

View File

@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_COMPILER_PROPERTY_ACCESS_INFO_H_
#define V8_COMPILER_PROPERTY_ACCESS_INFO_H_
#ifndef V8_COMPILER_ACCESS_INFO_H_
#define V8_COMPILER_ACCESS_INFO_H_
#include <iosfwd>
@ -23,9 +23,30 @@ class TypeCache;
namespace compiler {
// Whether we are loading a property or storing to a property.
enum class PropertyAccessMode { kLoad, kStore };
enum class AccessMode { kLoad, kStore };
std::ostream& operator<<(std::ostream&, PropertyAccessMode);
std::ostream& operator<<(std::ostream&, AccessMode);
// This class encapsulates all information required to access a certain element.
class ElementAccessInfo final {
public:
ElementAccessInfo();
ElementAccessInfo(Type* receiver_type, ElementsKind elements_kind,
MaybeHandle<JSObject> holder)
: elements_kind_(elements_kind),
holder_(holder),
receiver_type_(receiver_type) {}
MaybeHandle<JSObject> holder() const { return holder_; }
ElementsKind elements_kind() const { return elements_kind_; }
Type* receiver_type() const { return receiver_type_; }
private:
ElementsKind elements_kind_;
MaybeHandle<JSObject> holder_;
Type* receiver_type_;
};
// This class encapsulates all information required to access a certain
@ -78,17 +99,22 @@ class PropertyAccessInfo final {
};
// Factory class for {PropertyAccessInfo}s.
class PropertyAccessInfoFactory final {
// Factory class for {ElementAccessInfo}s and {PropertyAccessInfo}s.
class AccessInfoFactory final {
public:
PropertyAccessInfoFactory(CompilationDependencies* dependencies,
Handle<Context> native_context, Zone* zone);
AccessInfoFactory(CompilationDependencies* dependencies,
Handle<Context> native_context, Zone* zone);
bool ComputeElementAccessInfo(Handle<Map> map, AccessMode access_mode,
ElementAccessInfo* access_info);
bool ComputeElementAccessInfos(MapHandleList const& maps,
AccessMode access_mode,
ZoneVector<ElementAccessInfo>* access_infos);
bool ComputePropertyAccessInfo(Handle<Map> map, Handle<Name> name,
PropertyAccessMode access_mode,
AccessMode access_mode,
PropertyAccessInfo* access_info);
bool ComputePropertyAccessInfos(MapHandleList const& maps, Handle<Name> name,
PropertyAccessMode access_mode,
AccessMode access_mode,
ZoneVector<PropertyAccessInfo>* access_infos);
private:
@ -110,11 +136,11 @@ class PropertyAccessInfoFactory final {
TypeCache const& type_cache_;
Zone* const zone_;
DISALLOW_COPY_AND_ASSIGN(PropertyAccessInfoFactory);
DISALLOW_COPY_AND_ASSIGN(AccessInfoFactory);
};
} // namespace compiler
} // namespace internal
} // namespace v8
#endif // V8_COMPILER_PROPERTY_ACCESS_INFO_H_
#endif // V8_COMPILER_ACCESS_INFO_H_

View File

@ -307,8 +307,8 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreGlobal(Node* node) {
Reduction JSNativeContextSpecialization::ReduceNamedAccess(
Node* node, Node* value, MapHandleList const& receiver_maps,
Handle<Name> name, PropertyAccessMode access_mode,
LanguageMode language_mode, Node* index) {
Handle<Name> name, AccessMode access_mode, LanguageMode language_mode,
Node* index) {
DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
node->opcode() == IrOpcode::kJSStoreNamed ||
node->opcode() == IrOpcode::kJSLoadProperty ||
@ -417,7 +417,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
// Generate the actual property access.
if (access_info.IsNotFound()) {
DCHECK_EQ(PropertyAccessMode::kLoad, access_mode);
DCHECK_EQ(AccessMode::kLoad, access_mode);
if (is_strong(language_mode)) {
// TODO(bmeurer/mstarzinger): Add support for lowering inside try
// blocks rewiring the IfException edge to a runtime call/throw.
@ -428,7 +428,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
}
} else if (access_info.IsDataConstant()) {
this_value = jsgraph()->Constant(access_info.constant());
if (access_mode == PropertyAccessMode::kStore) {
if (access_mode == AccessMode::kStore) {
Node* check = graph()->NewNode(
simplified()->ReferenceEqual(Type::Tagged()), value, this_value);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
@ -440,7 +440,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
DCHECK(access_info.IsDataField());
FieldIndex const field_index = access_info.field_index();
Type* const field_type = access_info.field_type();
if (access_mode == PropertyAccessMode::kLoad &&
if (access_mode == AccessMode::kLoad &&
access_info.holder().ToHandle(&holder)) {
this_receiver = jsgraph()->Constant(holder);
}
@ -452,7 +452,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
}
FieldAccess field_access = {kTaggedBase, field_index.offset(), name,
field_type, kMachAnyTagged};
if (access_mode == PropertyAccessMode::kLoad) {
if (access_mode == AccessMode::kLoad) {
if (field_type->Is(Type::UntaggedFloat64())) {
if (!field_index.is_inobject() || field_index.is_hidden_field() ||
!FLAG_unbox_double_fields) {
@ -468,7 +468,7 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
graph()->NewNode(simplified()->LoadField(field_access),
this_storage, this_effect, this_control);
} else {
DCHECK_EQ(PropertyAccessMode::kStore, access_mode);
DCHECK_EQ(AccessMode::kStore, access_mode);
if (field_type->Is(Type::UntaggedFloat64())) {
Node* check =
graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
@ -581,9 +581,9 @@ Reduction JSNativeContextSpecialization::ReduceNamedAccess(
controls.push_back(this_control);
}
// Collect the fallthru control as final "exit" control.
// Collect the fallthrough control as final "exit" control.
if (fallthrough_control != control) {
// Mark the last fallthru branch as deferred.
// Mark the last fallthrough branch as deferred.
Node* branch = NodeProperties::GetControlInput(fallthrough_control);
DCHECK_EQ(IrOpcode::kBranch, branch->opcode());
if (fallthrough_control->opcode() == IrOpcode::kIfTrue) {
@ -647,7 +647,7 @@ Reduction JSNativeContextSpecialization::ReduceJSLoadNamed(Node* node) {
// Try to lower the named access based on the {receiver_maps}.
return ReduceNamedAccess(node, value, receiver_maps, p.name(),
PropertyAccessMode::kLoad, p.language_mode());
AccessMode::kLoad, p.language_mode());
}
@ -665,13 +665,269 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreNamed(Node* node) {
// Try to lower the named access based on the {receiver_maps}.
return ReduceNamedAccess(node, value, receiver_maps, p.name(),
PropertyAccessMode::kStore, p.language_mode());
AccessMode::kStore, p.language_mode());
}
Reduction JSNativeContextSpecialization::ReduceElementAccess(
Node* node, Node* index, Node* value, MapHandleList const& receiver_maps,
AccessMode access_mode, LanguageMode language_mode) {
DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
node->opcode() == IrOpcode::kJSStoreProperty);
Node* receiver = NodeProperties::GetValueInput(node, 0);
Node* frame_state = NodeProperties::GetFrameStateInput(node, 1);
Node* effect = NodeProperties::GetEffectInput(node);
Node* control = NodeProperties::GetControlInput(node);
// Not much we can do if deoptimization support is disabled.
if (!(flags() & kDeoptimizationEnabled)) return NoChange();
// Compute element access infos for the receiver maps.
ZoneVector<ElementAccessInfo> access_infos(zone());
if (!access_info_factory().ComputeElementAccessInfos(
receiver_maps, access_mode, &access_infos)) {
return NoChange();
}
// Nothing to do if we have no non-deprecated maps.
if (access_infos.empty()) return NoChange();
// The final states for every polymorphic branch. We join them with
// Merge+Phi+EffectPhi at the bottom.
ZoneVector<Node*> values(zone());
ZoneVector<Node*> effects(zone());
ZoneVector<Node*> controls(zone());
// The list of "exiting" controls, which currently go to a single deoptimize.
// TODO(bmeurer): Consider using an IC as fallback.
Node* const exit_effect = effect;
ZoneVector<Node*> exit_controls(zone());
// Ensure that {receiver} is a heap object.
Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
Node* branch =
graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
exit_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
control = graph()->NewNode(common()->IfFalse(), branch);
// Load the {receiver} map. The resulting effect is the dominating effect for
// all (polymorphic) branches.
Node* receiver_map = effect =
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
receiver, effect, control);
// Generate code for the various different element access patterns.
Node* fallthrough_control = control;
for (ElementAccessInfo const& access_info : access_infos) {
Node* this_receiver = receiver;
Node* this_value = value;
Node* this_index = index;
Node* this_effect = effect;
Node* this_control;
// Perform map check on {receiver}.
Type* receiver_type = access_info.receiver_type();
{
ZoneVector<Node*> this_controls(zone());
for (auto i = access_info.receiver_type()->Classes(); !i.Done();
i.Advance()) {
Handle<Map> map = i.Current();
Node* check =
graph()->NewNode(simplified()->ReferenceEqual(Type::Internal()),
receiver_map, jsgraph()->Constant(map));
Node* branch =
graph()->NewNode(common()->Branch(), check, fallthrough_control);
this_controls.push_back(graph()->NewNode(common()->IfTrue(), branch));
fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
}
int const this_control_count = static_cast<int>(this_controls.size());
this_control =
(this_control_count == 1)
? this_controls.front()
: graph()->NewNode(common()->Merge(this_control_count),
this_control_count, &this_controls.front());
}
// Certain stores need a prototype chain check because shape changes
// could allow callbacks on elements in the prototype chain that are
// not compatible with (monomorphic) keyed stores.
Handle<JSObject> holder;
if (access_info.holder().ToHandle(&holder)) {
AssumePrototypesStable(receiver_type, holder);
}
// Check that the {index} is actually a Number.
if (!NumberMatcher(this_index).HasValue()) {
Node* check =
graph()->NewNode(simplified()->ObjectIsNumber(), this_index);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check, this_control);
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
this_control = graph()->NewNode(common()->IfTrue(), branch);
this_index = graph()->NewNode(common()->Guard(Type::Number()), this_index,
this_control);
}
// Convert the {index} to an unsigned32 value and check if the result is
// equal to the original {index}.
if (!NumberMatcher(this_index).IsInRange(0.0, kMaxUInt32)) {
Node* this_index32 =
graph()->NewNode(simplified()->NumberToUint32(), this_index);
Node* check = graph()->NewNode(simplified()->NumberEqual(), this_index32,
this_index);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check, this_control);
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
this_control = graph()->NewNode(common()->IfTrue(), branch);
this_index = this_index32;
}
// TODO(bmeurer): We currently specialize based on elements kind. We should
// also be able to properly support strings and other JSObjects here.
ElementsKind elements_kind = access_info.elements_kind();
// Load the elements for the {receiver}.
Node* this_elements = this_effect = graph()->NewNode(
simplified()->LoadField(AccessBuilder::ForJSObjectElements()),
this_receiver, this_effect, this_control);
// Don't try to store to a copy-on-write backing store.
if (access_mode == AccessMode::kStore &&
IsFastSmiOrObjectElementsKind(elements_kind)) {
Node* this_elements_map = this_effect =
graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
this_elements, this_effect, this_control);
check = graph()->NewNode(
simplified()->ReferenceEqual(Type::Any()), this_elements_map,
jsgraph()->HeapConstant(factory()->fixed_array_map()));
branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
this_control);
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
this_control = graph()->NewNode(common()->IfTrue(), branch);
}
// Load the length of the {receiver}.
FieldAccess length_access = {
kTaggedBase, JSArray::kLengthOffset, factory()->name_string(),
type_cache_.kJSArrayLengthType, kMachAnyTagged};
if (IsFastDoubleElementsKind(elements_kind)) {
length_access.type = type_cache_.kFixedDoubleArrayLengthType;
} else if (IsFastElementsKind(elements_kind)) {
length_access.type = type_cache_.kFixedArrayLengthType;
}
Node* this_length = this_effect =
graph()->NewNode(simplified()->LoadField(length_access), this_receiver,
this_effect, this_control);
// Check that the {index} is in the valid range for the {receiver}.
Node* check = graph()->NewNode(simplified()->NumberLessThan(), this_index,
this_length);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue), check,
this_control);
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
this_control = graph()->NewNode(common()->IfTrue(), branch);
// Compute the element access.
Type* element_type = Type::Any();
MachineType element_machine_type = kMachAnyTagged;
if (IsFastDoubleElementsKind(elements_kind)) {
element_type = type_cache_.kFloat64;
element_machine_type = kMachFloat64;
} else if (IsFastSmiElementsKind(elements_kind)) {
element_type = type_cache_.kSmi;
}
ElementAccess element_access = {kTaggedBase, FixedArray::kHeaderSize,
element_type, element_machine_type};
// Access the actual element.
if (access_mode == AccessMode::kLoad) {
this_value = this_effect = graph()->NewNode(
simplified()->LoadElement(element_access), this_elements, this_index,
this_effect, this_control);
} else {
DCHECK_EQ(AccessMode::kStore, access_mode);
if (IsFastSmiElementsKind(elements_kind)) {
Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), this_value);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check, this_control);
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
this_control = graph()->NewNode(common()->IfTrue(), branch);
} else if (IsFastDoubleElementsKind(elements_kind)) {
Node* check =
graph()->NewNode(simplified()->ObjectIsNumber(), this_value);
Node* branch = graph()->NewNode(common()->Branch(BranchHint::kTrue),
check, this_control);
exit_controls.push_back(graph()->NewNode(common()->IfFalse(), branch));
this_control = graph()->NewNode(common()->IfTrue(), branch);
this_value = graph()->NewNode(common()->Guard(Type::Number()),
this_value, this_control);
}
this_effect = graph()->NewNode(simplified()->StoreElement(element_access),
this_elements, this_index, this_value,
this_effect, this_control);
}
// Remember the final state for this element access.
values.push_back(this_value);
effects.push_back(this_effect);
controls.push_back(this_control);
}
// Collect the fallthrough control as final "exit" control.
if (fallthrough_control != control) {
// Mark the last fallthrough branch as deferred.
Node* branch = NodeProperties::GetControlInput(fallthrough_control);
DCHECK_EQ(IrOpcode::kBranch, branch->opcode());
if (fallthrough_control->opcode() == IrOpcode::kIfTrue) {
NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kFalse));
} else {
DCHECK_EQ(IrOpcode::kIfFalse, fallthrough_control->opcode());
NodeProperties::ChangeOp(branch, common()->Branch(BranchHint::kTrue));
}
}
exit_controls.push_back(fallthrough_control);
// Generate the single "exit" point, where we get if either all map/instance
// type checks failed, or one of the assumptions inside one of the cases
// failes (i.e. failing prototype chain check).
// TODO(bmeurer): Consider falling back to IC here if deoptimization is
// disabled.
int const exit_control_count = static_cast<int>(exit_controls.size());
Node* exit_control =
(exit_control_count == 1)
? exit_controls.front()
: graph()->NewNode(common()->Merge(exit_control_count),
exit_control_count, &exit_controls.front());
Node* deoptimize = graph()->NewNode(common()->Deoptimize(), frame_state,
exit_effect, exit_control);
// TODO(bmeurer): This should be on the AdvancedReducer somehow.
NodeProperties::MergeControlToEnd(graph(), common(), deoptimize);
// Generate the final merge point for all (polymorphic) branches.
int const control_count = static_cast<int>(controls.size());
if (control_count == 0) {
value = effect = control = jsgraph()->Dead();
} else if (control_count == 1) {
value = values.front();
effect = effects.front();
control = controls.front();
} else {
control = graph()->NewNode(common()->Merge(control_count), control_count,
&controls.front());
values.push_back(control);
value = graph()->NewNode(common()->Phi(kMachAnyTagged, control_count),
control_count + 1, &values.front());
effects.push_back(control);
effect = graph()->NewNode(common()->EffectPhi(control_count),
control_count + 1, &effects.front());
}
return Replace(node, value, effect, control);
}
Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
Node* node, Node* index, Node* value, FeedbackNexus const& nexus,
PropertyAccessMode access_mode, LanguageMode language_mode) {
AccessMode access_mode, LanguageMode language_mode) {
DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
node->opcode() == IrOpcode::kJSStoreProperty);
@ -691,7 +947,8 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
if (Object::ToName(isolate(), mindex.Value()).ToHandle(&name)) {
uint32_t array_index;
if (name->AsArrayIndex(&array_index)) {
// TODO(bmeurer): Optimize element access with constant {index}.
// Use the constant array index.
index = jsgraph()->Constant(static_cast<double>(array_index));
} else {
name = factory()->InternalizeName(name);
return ReduceNamedAccess(node, value, receiver_maps, name, access_mode,
@ -707,7 +964,9 @@ Reduction JSNativeContextSpecialization::ReduceKeyedAccess(
language_mode, index);
}
return NoChange();
// Try to lower the element access based on the {receiver_maps}.
return ReduceElementAccess(node, index, value, receiver_maps, access_mode,
language_mode);
}
@ -722,7 +981,7 @@ Reduction JSNativeContextSpecialization::ReduceJSLoadProperty(Node* node) {
KeyedLoadICNexus nexus(p.feedback().vector(), p.feedback().slot());
// Try to lower the keyed access based on the {nexus}.
return ReduceKeyedAccess(node, index, value, nexus, PropertyAccessMode::kLoad,
return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kLoad,
p.language_mode());
}
@ -738,8 +997,8 @@ Reduction JSNativeContextSpecialization::ReduceJSStoreProperty(Node* node) {
KeyedStoreICNexus nexus(p.feedback().vector(), p.feedback().slot());
// Try to lower the keyed access based on the {nexus}.
return ReduceKeyedAccess(node, index, value, nexus,
PropertyAccessMode::kStore, p.language_mode());
return ReduceKeyedAccess(node, index, value, nexus, AccessMode::kStore,
p.language_mode());
}
@ -780,7 +1039,7 @@ void JSNativeContextSpecialization::AssumePrototypesStable(
.ToHandle(&constructor)) {
map = handle(constructor->initial_map(), isolate());
}
for (PrototypeIterator j(map);; j.Advance()) {
for (PrototypeIterator j(map); !j.IsAtEnd(); j.Advance()) {
// Check that the {prototype} still has the same map. All prototype
// maps are guaranteed to be stable, so it's sufficient to add a
// stability dependency here.

View File

@ -6,8 +6,8 @@
#define V8_COMPILER_JS_NATIVE_CONTEXT_SPECIALIZATION_H_
#include "src/base/flags.h"
#include "src/compiler/access-info.h"
#include "src/compiler/graph-reducer.h"
#include "src/compiler/property-access-info.h"
#include "src/compiler/simplified-operator.h"
namespace v8 {
@ -65,13 +65,17 @@ class JSNativeContextSpecialization final : public AdvancedReducer {
}
Reduction Replace(Node* node, Handle<Object> value);
Reduction ReduceElementAccess(Node* node, Node* index, Node* value,
MapHandleList const& receiver_maps,
AccessMode access_mode,
LanguageMode language_mode);
Reduction ReduceKeyedAccess(Node* node, Node* index, Node* value,
FeedbackNexus const& nexus,
PropertyAccessMode access_mode,
AccessMode access_mode,
LanguageMode language_mode);
Reduction ReduceNamedAccess(Node* node, Node* value,
MapHandleList const& receiver_maps,
Handle<Name> name, PropertyAccessMode access_mode,
Handle<Name> name, AccessMode access_mode,
LanguageMode language_mode,
Node* index = nullptr);
@ -96,9 +100,7 @@ class JSNativeContextSpecialization final : public AdvancedReducer {
Handle<Context> native_context() const { return native_context_; }
CompilationDependencies* dependencies() const { return dependencies_; }
Zone* zone() const { return zone_; }
PropertyAccessInfoFactory& access_info_factory() {
return access_info_factory_;
}
AccessInfoFactory& access_info_factory() { return access_info_factory_; }
JSGraph* const jsgraph_;
Flags const flags_;
@ -107,7 +109,7 @@ class JSNativeContextSpecialization final : public AdvancedReducer {
CompilationDependencies* const dependencies_;
Zone* const zone_;
TypeCache const& type_cache_;
PropertyAccessInfoFactory access_info_factory_;
AccessInfoFactory access_info_factory_;
DISALLOW_COPY_AND_ASSIGN(JSNativeContextSpecialization);
};

View File

@ -451,6 +451,8 @@
'../../src/compilation-statistics.h',
'../../src/compiler/access-builder.cc',
'../../src/compiler/access-builder.h',
'../../src/compiler/access-info.cc',
'../../src/compiler/access-info.h',
'../../src/compiler/all-nodes.cc',
'../../src/compiler/all-nodes.h',
'../../src/compiler/ast-graph-builder.cc',
@ -583,8 +585,6 @@
'../../src/compiler/pipeline.h',
'../../src/compiler/pipeline-statistics.cc',
'../../src/compiler/pipeline-statistics.h',
'../../src/compiler/property-access-info.cc',
'../../src/compiler/property-access-info.h',
'../../src/compiler/raw-machine-assembler.cc',
'../../src/compiler/raw-machine-assembler.h',
'../../src/compiler/register-allocator.cc',