Generate KeyedLoadGeneric with Hydrogen
R=verwaest@chromium.org Review URL: https://codereview.chromium.org/57123002 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21774 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
e832badd19
commit
3b9039abc3
@ -118,6 +118,16 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { r1, r0 };
|
||||
descriptor->register_param_count_ = 2;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->deoptimization_handler_ =
|
||||
Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
|
||||
}
|
||||
|
||||
|
||||
void LoadFieldStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { r0 };
|
||||
|
@ -101,6 +101,16 @@ void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { x1, x0 };
|
||||
descriptor->register_param_count_ = 2;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->deoptimization_handler_ =
|
||||
Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
// x1: receiver
|
||||
|
@ -1390,7 +1390,11 @@ HValue* CodeStubGraphBuilder<KeyedLoadDictionaryElementStub>::BuildCodeStub() {
|
||||
|
||||
Add<HCheckSmi>(key);
|
||||
|
||||
return BuildUncheckedDictionaryElementLoad(receiver, key);
|
||||
HValue* elements = AddLoadElements(receiver);
|
||||
|
||||
HValue* hash = BuildElementIndexHash(key);
|
||||
|
||||
return BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash);
|
||||
}
|
||||
|
||||
|
||||
@ -1417,4 +1421,304 @@ Handle<Code> RegExpConstructResultStub::GenerateCode() {
|
||||
}
|
||||
|
||||
|
||||
template <>
|
||||
class CodeStubGraphBuilder<KeyedLoadGenericElementStub>
|
||||
: public CodeStubGraphBuilderBase {
|
||||
public:
|
||||
CodeStubGraphBuilder(Isolate* isolate,
|
||||
KeyedLoadGenericElementStub* stub)
|
||||
: CodeStubGraphBuilderBase(isolate, stub) {}
|
||||
|
||||
protected:
|
||||
virtual HValue* BuildCodeStub();
|
||||
|
||||
void BuildElementsKindLimitCheck(HGraphBuilder::IfBuilder* if_builder,
|
||||
HValue* bit_field2,
|
||||
ElementsKind kind);
|
||||
|
||||
void BuildFastElementLoad(HGraphBuilder::IfBuilder* if_builder,
|
||||
HValue* receiver,
|
||||
HValue* key,
|
||||
HValue* instance_type,
|
||||
HValue* bit_field2,
|
||||
ElementsKind kind);
|
||||
|
||||
void BuildExternalElementLoad(HGraphBuilder::IfBuilder* if_builder,
|
||||
HValue* receiver,
|
||||
HValue* key,
|
||||
HValue* instance_type,
|
||||
HValue* bit_field2,
|
||||
ElementsKind kind);
|
||||
|
||||
KeyedLoadGenericElementStub* casted_stub() {
|
||||
return static_cast<KeyedLoadGenericElementStub*>(stub());
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
void CodeStubGraphBuilder<
|
||||
KeyedLoadGenericElementStub>::BuildElementsKindLimitCheck(
|
||||
HGraphBuilder::IfBuilder* if_builder,
|
||||
HValue* bit_field2,
|
||||
ElementsKind kind) {
|
||||
ElementsKind next_kind = static_cast<ElementsKind>(kind + 1);
|
||||
HValue* kind_limit = Add<HConstant>(
|
||||
static_cast<int>(Map::ElementsKindBits::encode(next_kind)));
|
||||
|
||||
if_builder->If<HCompareNumericAndBranch>(bit_field2, kind_limit, Token::LT);
|
||||
if_builder->Then();
|
||||
}
|
||||
|
||||
|
||||
void CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildFastElementLoad(
|
||||
HGraphBuilder::IfBuilder* if_builder,
|
||||
HValue* receiver,
|
||||
HValue* key,
|
||||
HValue* instance_type,
|
||||
HValue* bit_field2,
|
||||
ElementsKind kind) {
|
||||
ASSERT(!IsExternalArrayElementsKind(kind));
|
||||
|
||||
BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
|
||||
|
||||
IfBuilder js_array_check(this);
|
||||
js_array_check.If<HCompareNumericAndBranch>(
|
||||
instance_type, Add<HConstant>(JS_ARRAY_TYPE), Token::EQ);
|
||||
js_array_check.Then();
|
||||
Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
|
||||
true, kind,
|
||||
LOAD, NEVER_RETURN_HOLE,
|
||||
STANDARD_STORE));
|
||||
js_array_check.Else();
|
||||
Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
|
||||
false, kind,
|
||||
LOAD, NEVER_RETURN_HOLE,
|
||||
STANDARD_STORE));
|
||||
js_array_check.End();
|
||||
}
|
||||
|
||||
|
||||
void CodeStubGraphBuilder<
|
||||
KeyedLoadGenericElementStub>::BuildExternalElementLoad(
|
||||
HGraphBuilder::IfBuilder* if_builder,
|
||||
HValue* receiver,
|
||||
HValue* key,
|
||||
HValue* instance_type,
|
||||
HValue* bit_field2,
|
||||
ElementsKind kind) {
|
||||
ASSERT(IsExternalArrayElementsKind(kind));
|
||||
|
||||
BuildElementsKindLimitCheck(if_builder, bit_field2, kind);
|
||||
|
||||
Push(BuildUncheckedMonomorphicElementAccess(receiver, key, NULL,
|
||||
false, kind,
|
||||
LOAD, NEVER_RETURN_HOLE,
|
||||
STANDARD_STORE));
|
||||
}
|
||||
|
||||
|
||||
HValue* CodeStubGraphBuilder<KeyedLoadGenericElementStub>::BuildCodeStub() {
|
||||
HValue* receiver = GetParameter(0);
|
||||
HValue* key = GetParameter(1);
|
||||
|
||||
// Split into a smi/integer case and unique string case.
|
||||
HIfContinuation index_name_split_continuation(graph()->CreateBasicBlock(),
|
||||
graph()->CreateBasicBlock());
|
||||
|
||||
BuildKeyedIndexCheck(key, &index_name_split_continuation);
|
||||
|
||||
IfBuilder index_name_split(this, &index_name_split_continuation);
|
||||
index_name_split.Then();
|
||||
{
|
||||
// Key is an index (number)
|
||||
key = Pop();
|
||||
|
||||
int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
|
||||
(1 << Map::kHasIndexedInterceptor);
|
||||
BuildJSObjectCheck(receiver, bit_field_mask);
|
||||
|
||||
HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
|
||||
HValue* instance_type =
|
||||
Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMapInstanceType());
|
||||
|
||||
HValue* bit_field2 = Add<HLoadNamedField>(map,
|
||||
static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMapBitField2());
|
||||
|
||||
IfBuilder kind_if(this);
|
||||
BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
FAST_HOLEY_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
{
|
||||
BuildFastElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
FAST_HOLEY_DOUBLE_ELEMENTS);
|
||||
}
|
||||
kind_if.Else();
|
||||
|
||||
// The DICTIONARY_ELEMENTS check generates a "kind_if.Then"
|
||||
BuildElementsKindLimitCheck(&kind_if, bit_field2, DICTIONARY_ELEMENTS);
|
||||
{
|
||||
HValue* elements = AddLoadElements(receiver);
|
||||
|
||||
HValue* hash = BuildElementIndexHash(key);
|
||||
|
||||
Push(BuildUncheckedDictionaryElementLoad(receiver, elements, key, hash));
|
||||
}
|
||||
kind_if.Else();
|
||||
|
||||
// The SLOPPY_ARGUMENTS_ELEMENTS check generates a "kind_if.Then"
|
||||
BuildElementsKindLimitCheck(&kind_if, bit_field2,
|
||||
SLOPPY_ARGUMENTS_ELEMENTS);
|
||||
// Non-strict elements are not handled.
|
||||
Add<HDeoptimize>("non-strict elements in KeyedLoadGenericElementStub",
|
||||
Deoptimizer::EAGER);
|
||||
Push(graph()->GetConstant0());
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_INT8_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_UINT8_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_INT16_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_UINT16_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_INT32_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_UINT32_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_FLOAT32_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_FLOAT64_ELEMENTS);
|
||||
|
||||
kind_if.Else();
|
||||
BuildExternalElementLoad(&kind_if, receiver, key, instance_type, bit_field2,
|
||||
EXTERNAL_UINT8_CLAMPED_ELEMENTS);
|
||||
|
||||
kind_if.ElseDeopt("ElementsKind unhandled in KeyedLoadGenericElementStub");
|
||||
|
||||
kind_if.End();
|
||||
}
|
||||
index_name_split.Else();
|
||||
{
|
||||
// Key is a unique string.
|
||||
key = Pop();
|
||||
|
||||
int bit_field_mask = (1 << Map::kIsAccessCheckNeeded) |
|
||||
(1 << Map::kHasNamedInterceptor);
|
||||
BuildJSObjectCheck(receiver, bit_field_mask);
|
||||
|
||||
HIfContinuation continuation;
|
||||
BuildTestForDictionaryProperties(receiver, &continuation);
|
||||
IfBuilder if_dict_properties(this, &continuation);
|
||||
if_dict_properties.Then();
|
||||
{
|
||||
// Key is string, properties are dictionary mode
|
||||
BuildNonGlobalObjectCheck(receiver);
|
||||
|
||||
HValue* properties = Add<HLoadNamedField>(
|
||||
receiver, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForPropertiesPointer());
|
||||
|
||||
HValue* hash =
|
||||
Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForNameHashField());
|
||||
|
||||
HValue* value = BuildUncheckedDictionaryElementLoad(receiver,
|
||||
properties,
|
||||
key,
|
||||
hash);
|
||||
Push(value);
|
||||
}
|
||||
if_dict_properties.Else();
|
||||
{
|
||||
// Key is string, properties are fast mode
|
||||
HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
|
||||
|
||||
ExternalReference cache_keys_ref =
|
||||
ExternalReference::keyed_lookup_cache_keys(isolate());
|
||||
HValue* cache_keys = Add<HConstant>(cache_keys_ref);
|
||||
|
||||
HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
HValue* base_index = AddUncasted<HMul>(hash, Add<HConstant>(2));
|
||||
base_index->ClearFlag(HValue::kCanOverflow);
|
||||
|
||||
IfBuilder lookup_if(this);
|
||||
for (int probe = 0; probe < KeyedLookupCache::kEntriesPerBucket;
|
||||
++probe) {
|
||||
int probe_base = probe * KeyedLookupCache::kEntryLength;
|
||||
HValue* map_index = AddUncasted<HAdd>(base_index,
|
||||
Add<HConstant>(probe_base + KeyedLookupCache::kMapIndex));
|
||||
map_index->ClearFlag(HValue::kCanOverflow);
|
||||
HValue* key_index = AddUncasted<HAdd>(base_index,
|
||||
Add<HConstant>(probe_base + KeyedLookupCache::kKeyIndex));
|
||||
key_index->ClearFlag(HValue::kCanOverflow);
|
||||
HValue* map_to_check = Add<HLoadKeyed>(cache_keys,
|
||||
map_index,
|
||||
static_cast<HValue*>(NULL),
|
||||
FAST_ELEMENTS,
|
||||
NEVER_RETURN_HOLE, 0);
|
||||
lookup_if.If<HCompareObjectEqAndBranch>(map_to_check, map);
|
||||
lookup_if.And();
|
||||
HValue* key_to_check = Add<HLoadKeyed>(cache_keys,
|
||||
key_index,
|
||||
static_cast<HValue*>(NULL),
|
||||
FAST_ELEMENTS,
|
||||
NEVER_RETURN_HOLE, 0);
|
||||
lookup_if.If<HCompareObjectEqAndBranch>(key_to_check, key);
|
||||
lookup_if.Then();
|
||||
{
|
||||
ExternalReference cache_field_offsets_ref =
|
||||
ExternalReference::keyed_lookup_cache_field_offsets(isolate());
|
||||
HValue* cache_field_offsets = Add<HConstant>(cache_field_offsets_ref);
|
||||
HValue* index = AddUncasted<HAdd>(hash,
|
||||
Add<HConstant>(probe));
|
||||
index->ClearFlag(HValue::kCanOverflow);
|
||||
HValue* property_index = Add<HLoadKeyed>(cache_field_offsets,
|
||||
index,
|
||||
static_cast<HValue*>(NULL),
|
||||
EXTERNAL_INT32_ELEMENTS,
|
||||
NEVER_RETURN_HOLE, 0);
|
||||
Push(property_index);
|
||||
}
|
||||
lookup_if.Else();
|
||||
}
|
||||
Add<HDeoptimize>("KeyedLoad fall-back", Deoptimizer::EAGER);
|
||||
Push(graph()->GetConstant0());
|
||||
lookup_if.End();
|
||||
Push(Add<HLoadFieldByIndex>(receiver, Pop()));
|
||||
}
|
||||
if_dict_properties.End();
|
||||
}
|
||||
index_name_split.End();
|
||||
|
||||
return Pop();
|
||||
}
|
||||
|
||||
|
||||
Handle<Code> KeyedLoadGenericElementStub::GenerateCode() {
|
||||
return DoGenerateCode(this);
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
@ -764,6 +764,13 @@ void RegExpConstructResultStub::InstallDescriptors(Isolate* isolate) {
|
||||
}
|
||||
|
||||
|
||||
// static
|
||||
void KeyedLoadGenericElementStub::InstallDescriptors(Isolate* isolate) {
|
||||
KeyedLoadGenericElementStub stub(isolate);
|
||||
InstallDescriptor(isolate, &stub);
|
||||
}
|
||||
|
||||
|
||||
ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
|
||||
: PlatformCodeStub(isolate), argument_count_(ANY) {
|
||||
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
|
||||
|
@ -53,6 +53,7 @@ namespace internal {
|
||||
V(CEntry) \
|
||||
V(JSEntry) \
|
||||
V(KeyedLoadElement) \
|
||||
V(KeyedLoadGeneric) \
|
||||
V(ArrayNoArgumentConstructor) \
|
||||
V(ArraySingleArgumentConstructor) \
|
||||
V(ArrayNArgumentsConstructor) \
|
||||
@ -1845,6 +1846,29 @@ class KeyedLoadDictionaryElementPlatformStub : public PlatformCodeStub {
|
||||
};
|
||||
|
||||
|
||||
class KeyedLoadGenericElementStub : public HydrogenCodeStub {
|
||||
public:
|
||||
explicit KeyedLoadGenericElementStub(Isolate *isolate)
|
||||
: HydrogenCodeStub(isolate) {}
|
||||
|
||||
virtual Handle<Code> GenerateCode() V8_OVERRIDE;
|
||||
|
||||
virtual void InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
|
||||
|
||||
static void InstallDescriptors(Isolate* isolate);
|
||||
|
||||
virtual Code::Kind GetCodeKind() const { return Code::KEYED_LOAD_IC; }
|
||||
virtual InlineCacheState GetICState() { return GENERIC; }
|
||||
|
||||
private:
|
||||
Major MajorKey() { return KeyedLoadGeneric; }
|
||||
int NotMissMinorKey() { return 0; }
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(KeyedLoadGenericElementStub);
|
||||
};
|
||||
|
||||
|
||||
class DoubleToIStub : public PlatformCodeStub {
|
||||
public:
|
||||
DoubleToIStub(Isolate* isolate,
|
||||
|
@ -53,8 +53,9 @@ int ElementsKindToShiftSize(ElementsKind elements_kind) {
|
||||
|
||||
|
||||
int GetDefaultHeaderSizeForElementsKind(ElementsKind elements_kind) {
|
||||
STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
|
||||
return IsExternalArrayElementsKind(elements_kind)
|
||||
? 0 : (FixedArray::kHeaderSize - kSmiTagSize);
|
||||
? 0 : (FixedArray::kHeaderSize - kHeapObjectTag);
|
||||
}
|
||||
|
||||
|
||||
|
@ -75,6 +75,24 @@ inline FieldIndex FieldIndex::ForDescriptor(Map* map, int descriptor_index) {
|
||||
}
|
||||
|
||||
|
||||
inline FieldIndex FieldIndex::ForKeyedLookupCacheIndex(Map* map, int index) {
|
||||
if (FLAG_compiled_keyed_generic_loads) {
|
||||
return ForLoadByFieldIndex(map, index);
|
||||
} else {
|
||||
return ForPropertyIndex(map, index);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline int FieldIndex::GetKeyedLookupCacheIndex() const {
|
||||
if (FLAG_compiled_keyed_generic_loads) {
|
||||
return GetLoadByFieldIndex();
|
||||
} else {
|
||||
return property_index();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif
|
||||
|
@ -26,9 +26,7 @@ class FieldIndex V8_FINAL {
|
||||
static FieldIndex ForLookupResult(const LookupResult* result);
|
||||
static FieldIndex ForDescriptor(Map* map, int descriptor_index);
|
||||
static FieldIndex ForLoadByFieldIndex(Map* map, int index);
|
||||
static FieldIndex ForKeyedLookupCacheIndex(Map* map, int index) {
|
||||
return ForPropertyIndex(map, index);
|
||||
}
|
||||
static FieldIndex ForKeyedLookupCacheIndex(Map* map, int index);
|
||||
|
||||
bool is_inobject() const {
|
||||
return IsInObjectBits::decode(bit_field_);
|
||||
@ -75,9 +73,7 @@ class FieldIndex V8_FINAL {
|
||||
return is_double() ? (result | 1) : result;
|
||||
}
|
||||
|
||||
int GetKeyedLookupCacheIndex() const {
|
||||
return property_index();
|
||||
}
|
||||
int GetKeyedLookupCacheIndex() const;
|
||||
|
||||
int GetLoadFieldStubKey() const {
|
||||
return bit_field_ &
|
||||
|
@ -190,6 +190,8 @@ DEFINE_bool(packed_arrays, true, "optimizes arrays that have no holes")
|
||||
DEFINE_bool(smi_only_arrays, true, "tracks arrays with only smi values")
|
||||
DEFINE_bool(compiled_keyed_dictionary_loads, true,
|
||||
"use optimizing compiler to generate keyed dictionary load stubs")
|
||||
DEFINE_bool(compiled_keyed_generic_loads, false,
|
||||
"use optimizing compiler to generate keyed generic load stubs")
|
||||
DEFINE_bool(clever_optimizations, true,
|
||||
"Optimize object size, Array shift, DOM strings and string +")
|
||||
// TODO(hpayer): We will remove this flag as soon as we have pretenuring
|
||||
|
@ -2404,6 +2404,9 @@ class KeyedLookupCache {
|
||||
static const int kMapHashShift = 5;
|
||||
static const int kHashMask = -4; // Zero the last two bits.
|
||||
static const int kEntriesPerBucket = 4;
|
||||
static const int kEntryLength = 2;
|
||||
static const int kMapIndex = 0;
|
||||
static const int kKeyIndex = 1;
|
||||
static const int kNotFound = -1;
|
||||
|
||||
// kEntriesPerBucket should be a power of 2.
|
||||
|
@ -6049,9 +6049,14 @@ class HObjectAccess V8_FINAL {
|
||||
return HObjectAccess(kMaps, JSObject::kMapOffset);
|
||||
}
|
||||
|
||||
static HObjectAccess ForMapInstanceSize() {
|
||||
static HObjectAccess ForMapAsInteger32() {
|
||||
return HObjectAccess(kMaps, JSObject::kMapOffset,
|
||||
Representation::Integer32());
|
||||
}
|
||||
|
||||
static HObjectAccess ForMapInObjectProperties() {
|
||||
return HObjectAccess(kInobject,
|
||||
Map::kInstanceSizeOffset,
|
||||
Map::kInObjectPropertiesOffset,
|
||||
Representation::UInteger8());
|
||||
}
|
||||
|
||||
@ -6061,6 +6066,38 @@ class HObjectAccess V8_FINAL {
|
||||
Representation::UInteger8());
|
||||
}
|
||||
|
||||
static HObjectAccess ForMapInstanceSize() {
|
||||
return HObjectAccess(kInobject,
|
||||
Map::kInstanceSizeOffset,
|
||||
Representation::UInteger8());
|
||||
}
|
||||
|
||||
static HObjectAccess ForMapBitField() {
|
||||
return HObjectAccess(kInobject,
|
||||
Map::kBitFieldOffset,
|
||||
Representation::UInteger8());
|
||||
}
|
||||
|
||||
static HObjectAccess ForMapBitField2() {
|
||||
return HObjectAccess(kInobject,
|
||||
Map::kBitField2Offset,
|
||||
Representation::UInteger8());
|
||||
}
|
||||
|
||||
static HObjectAccess ForNameHashField() {
|
||||
return HObjectAccess(kInobject,
|
||||
Name::kHashFieldOffset,
|
||||
Representation::Integer32());
|
||||
}
|
||||
|
||||
static HObjectAccess ForMapInstanceTypeAndBitField() {
|
||||
STATIC_ASSERT((Map::kInstanceTypeOffset & 1) == 0);
|
||||
STATIC_ASSERT(Map::kBitFieldOffset == Map::kInstanceTypeOffset + 1);
|
||||
return HObjectAccess(kInobject,
|
||||
Map::kInstanceTypeOffset,
|
||||
Representation::UInteger16());
|
||||
}
|
||||
|
||||
static HObjectAccess ForPropertyCellValue() {
|
||||
return HObjectAccess(kInobject, PropertyCell::kValueOffset);
|
||||
}
|
||||
@ -6453,6 +6490,10 @@ class HLoadKeyed V8_FINAL
|
||||
bool HasDependency() const { return OperandAt(0) != OperandAt(2); }
|
||||
uint32_t base_offset() { return BaseOffsetField::decode(bit_field_); }
|
||||
void IncreaseBaseOffset(uint32_t base_offset) {
|
||||
// The base offset is usually simply the size of the array header, except
|
||||
// with dehoisting adds an addition offset due to a array index key
|
||||
// manipulation, in which case it becomes (array header size +
|
||||
// constant-offset-from-key * kPointerSize)
|
||||
base_offset += BaseOffsetField::decode(bit_field_);
|
||||
bit_field_ = BaseOffsetField::update(bit_field_, base_offset);
|
||||
}
|
||||
@ -6465,7 +6506,7 @@ class HLoadKeyed V8_FINAL
|
||||
void SetDehoisted(bool is_dehoisted) {
|
||||
bit_field_ = IsDehoistedField::update(bit_field_, is_dehoisted);
|
||||
}
|
||||
ElementsKind elements_kind() const {
|
||||
virtual ElementsKind elements_kind() const V8_OVERRIDE {
|
||||
return ElementsKindField::decode(bit_field_);
|
||||
}
|
||||
LoadKeyedHoleMode hole_mode() const {
|
||||
@ -6923,6 +6964,10 @@ class HStoreKeyed V8_FINAL
|
||||
ElementsKind elements_kind() const { return elements_kind_; }
|
||||
uint32_t base_offset() { return base_offset_; }
|
||||
void IncreaseBaseOffset(uint32_t base_offset) {
|
||||
// The base offset is usually simply the size of the array header, except
|
||||
// with dehoisting adds an addition offset due to a array index key
|
||||
// manipulation, in which case it becomes (array header size +
|
||||
// constant-offset-from-key * kPointerSize)
|
||||
base_offset_ += base_offset;
|
||||
}
|
||||
virtual int MaxBaseOffsetBits() {
|
||||
|
206
src/hydrogen.cc
206
src/hydrogen.cc
@ -1230,6 +1230,16 @@ HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
|
||||
}
|
||||
|
||||
|
||||
HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
|
||||
HValue* map = Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
|
||||
HValue* bit_field2 = Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMapBitField2());
|
||||
return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
|
||||
}
|
||||
|
||||
|
||||
HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
|
||||
if (obj->type().IsHeapObject()) return obj;
|
||||
return Add<HCheckHeapObject>(obj);
|
||||
@ -1399,6 +1409,194 @@ void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
|
||||
}
|
||||
|
||||
|
||||
void HGraphBuilder::BuildJSObjectCheck(HValue* receiver,
|
||||
int bit_field_mask) {
|
||||
// Check that the object isn't a smi.
|
||||
Add<HCheckHeapObject>(receiver);
|
||||
|
||||
// Get the map of the receiver.
|
||||
HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
|
||||
// Check the instance type and if an access check is needed, this can be
|
||||
// done with a single load, since both bytes are adjacent in the map.
|
||||
HObjectAccess access(HObjectAccess::ForMapInstanceTypeAndBitField());
|
||||
HValue* instance_type_and_bit_field =
|
||||
Add<HLoadNamedField>(map, static_cast<HValue*>(NULL), access);
|
||||
|
||||
HValue* mask = Add<HConstant>(0x00FF | (bit_field_mask << 8));
|
||||
HValue* and_result = AddUncasted<HBitwise>(Token::BIT_AND,
|
||||
instance_type_and_bit_field,
|
||||
mask);
|
||||
HValue* sub_result = AddUncasted<HSub>(and_result,
|
||||
Add<HConstant>(JS_OBJECT_TYPE));
|
||||
Add<HBoundsCheck>(sub_result, Add<HConstant>(0x100 - JS_OBJECT_TYPE));
|
||||
}
|
||||
|
||||
|
||||
void HGraphBuilder::BuildKeyedIndexCheck(HValue* key,
|
||||
HIfContinuation* join_continuation) {
|
||||
// The sometimes unintuitively backward ordering of the ifs below is
|
||||
// convoluted, but necessary. All of the paths must guarantee that the
|
||||
// if-true of the continuation returns a smi element index and the if-false of
|
||||
// the continuation returns either a symbol or a unique string key. All other
|
||||
// object types cause a deopt to fall back to the runtime.
|
||||
|
||||
IfBuilder key_smi_if(this);
|
||||
key_smi_if.If<HIsSmiAndBranch>(key);
|
||||
key_smi_if.Then();
|
||||
{
|
||||
Push(key); // Nothing to do, just continue to true of continuation.
|
||||
}
|
||||
key_smi_if.Else();
|
||||
{
|
||||
HValue* map = Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
HValue* instance_type =
|
||||
Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMapInstanceType());
|
||||
|
||||
// Non-unique string, check for a string with a hash code that is actually
|
||||
// an index.
|
||||
STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
|
||||
IfBuilder not_string_or_name_if(this);
|
||||
not_string_or_name_if.If<HCompareNumericAndBranch>(
|
||||
instance_type,
|
||||
Add<HConstant>(LAST_UNIQUE_NAME_TYPE),
|
||||
Token::GT);
|
||||
|
||||
not_string_or_name_if.Then();
|
||||
{
|
||||
// Non-smi, non-Name, non-String: Try to convert to smi in case of
|
||||
// HeapNumber.
|
||||
// TODO(danno): This could call some variant of ToString
|
||||
Push(AddUncasted<HForceRepresentation>(key, Representation::Smi()));
|
||||
}
|
||||
not_string_or_name_if.Else();
|
||||
{
|
||||
// String or Name: check explicitly for Name, they can short-circuit
|
||||
// directly to unique non-index key path.
|
||||
IfBuilder not_symbol_if(this);
|
||||
not_symbol_if.If<HCompareNumericAndBranch>(
|
||||
instance_type,
|
||||
Add<HConstant>(SYMBOL_TYPE),
|
||||
Token::NE);
|
||||
|
||||
not_symbol_if.Then();
|
||||
{
|
||||
// String: check whether the String is a String of an index. If it is,
|
||||
// extract the index value from the hash.
|
||||
HValue* hash =
|
||||
Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForNameHashField());
|
||||
HValue* not_index_mask = Add<HConstant>(static_cast<int>(
|
||||
String::kContainsCachedArrayIndexMask));
|
||||
|
||||
HValue* not_index_test = AddUncasted<HBitwise>(
|
||||
Token::BIT_AND, hash, not_index_mask);
|
||||
|
||||
IfBuilder string_index_if(this);
|
||||
string_index_if.If<HCompareNumericAndBranch>(not_index_test,
|
||||
graph()->GetConstant0(),
|
||||
Token::EQ);
|
||||
string_index_if.Then();
|
||||
{
|
||||
// String with index in hash: extract string and merge to index path.
|
||||
Push(BuildDecodeField<String::ArrayIndexValueBits>(hash));
|
||||
}
|
||||
string_index_if.Else();
|
||||
{
|
||||
// Key is a non-index String, check for uniqueness/internalization. If
|
||||
// it's not, deopt.
|
||||
HValue* not_internalized_bit = AddUncasted<HBitwise>(
|
||||
Token::BIT_AND,
|
||||
instance_type,
|
||||
Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
|
||||
DeoptimizeIf<HCompareNumericAndBranch>(
|
||||
not_internalized_bit,
|
||||
graph()->GetConstant0(),
|
||||
Token::NE,
|
||||
"BuildKeyedIndexCheck: string isn't internalized");
|
||||
// Key guaranteed to be a unqiue string
|
||||
Push(key);
|
||||
}
|
||||
string_index_if.JoinContinuation(join_continuation);
|
||||
}
|
||||
not_symbol_if.Else();
|
||||
{
|
||||
Push(key); // Key is symbol
|
||||
}
|
||||
not_symbol_if.JoinContinuation(join_continuation);
|
||||
}
|
||||
not_string_or_name_if.JoinContinuation(join_continuation);
|
||||
}
|
||||
key_smi_if.JoinContinuation(join_continuation);
|
||||
}
|
||||
|
||||
|
||||
void HGraphBuilder::BuildNonGlobalObjectCheck(HValue* receiver) {
|
||||
// Get the the instance type of the receiver, and make sure that it is
|
||||
// not one of the global object types.
|
||||
HValue* map = Add<HLoadNamedField>(receiver, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
HValue* instance_type =
|
||||
Add<HLoadNamedField>(map, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMapInstanceType());
|
||||
STATIC_ASSERT(JS_BUILTINS_OBJECT_TYPE == JS_GLOBAL_OBJECT_TYPE + 1);
|
||||
HValue* min_global_type = Add<HConstant>(JS_GLOBAL_OBJECT_TYPE);
|
||||
HValue* max_global_type = Add<HConstant>(JS_BUILTINS_OBJECT_TYPE);
|
||||
|
||||
IfBuilder if_global_object(this);
|
||||
if_global_object.If<HCompareNumericAndBranch>(instance_type,
|
||||
max_global_type,
|
||||
Token::LTE);
|
||||
if_global_object.And();
|
||||
if_global_object.If<HCompareNumericAndBranch>(instance_type,
|
||||
min_global_type,
|
||||
Token::GTE);
|
||||
if_global_object.ThenDeopt("receiver was a global object");
|
||||
if_global_object.End();
|
||||
}
|
||||
|
||||
|
||||
void HGraphBuilder::BuildTestForDictionaryProperties(
|
||||
HValue* object,
|
||||
HIfContinuation* continuation) {
|
||||
HValue* properties = Add<HLoadNamedField>(
|
||||
object, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForPropertiesPointer());
|
||||
HValue* properties_map =
|
||||
Add<HLoadNamedField>(properties, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMap());
|
||||
HValue* hash_map = Add<HLoadRoot>(Heap::kHashTableMapRootIndex);
|
||||
IfBuilder builder(this);
|
||||
builder.If<HCompareObjectEqAndBranch>(properties_map, hash_map);
|
||||
builder.CaptureContinuation(continuation);
|
||||
}
|
||||
|
||||
|
||||
HValue* HGraphBuilder::BuildKeyedLookupCacheHash(HValue* object,
|
||||
HValue* key) {
|
||||
// Load the map of the receiver, compute the keyed lookup cache hash
|
||||
// based on 32 bits of the map pointer and the string hash.
|
||||
HValue* object_map =
|
||||
Add<HLoadNamedField>(object, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForMapAsInteger32());
|
||||
HValue* shifted_map = AddUncasted<HShr>(
|
||||
object_map, Add<HConstant>(KeyedLookupCache::kMapHashShift));
|
||||
HValue* string_hash =
|
||||
Add<HLoadNamedField>(key, static_cast<HValue*>(NULL),
|
||||
HObjectAccess::ForStringHashField());
|
||||
HValue* shifted_hash = AddUncasted<HShr>(
|
||||
string_hash, Add<HConstant>(String::kHashShift));
|
||||
HValue* xor_result = AddUncasted<HBitwise>(Token::BIT_XOR, shifted_map,
|
||||
shifted_hash);
|
||||
int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
|
||||
return AddUncasted<HBitwise>(Token::BIT_AND, xor_result,
|
||||
Add<HConstant>(mask));
|
||||
}
|
||||
|
||||
|
||||
HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
|
||||
HValue* elements,
|
||||
HValue* key,
|
||||
@ -1511,11 +1709,9 @@ HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
|
||||
|
||||
|
||||
HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
|
||||
HValue* key) {
|
||||
HValue* elements = AddLoadElements(receiver);
|
||||
|
||||
HValue* hash = BuildElementIndexHash(key);
|
||||
|
||||
HValue* elements,
|
||||
HValue* key,
|
||||
HValue* hash) {
|
||||
HValue* capacity = Add<HLoadKeyed>(
|
||||
elements,
|
||||
Add<HConstant>(NameDictionary::kCapacityIndex),
|
||||
|
@ -1309,6 +1309,16 @@ class HGraphBuilder {
|
||||
HBasicBlock* CreateBasicBlock(HEnvironment* env);
|
||||
HBasicBlock* CreateLoopHeaderBlock();
|
||||
|
||||
template <class BitFieldClass>
|
||||
HValue* BuildDecodeField(HValue* encoded_field) {
|
||||
HValue* shifted_field = AddUncasted<HShr>(encoded_field,
|
||||
Add<HConstant>(static_cast<int>(BitFieldClass::kShift)));
|
||||
HValue* mask_value = Add<HConstant>(static_cast<int>(BitFieldClass::kMask));
|
||||
return AddUncasted<HBitwise>(Token::BIT_AND, shifted_field, mask_value);
|
||||
}
|
||||
|
||||
HValue* BuildGetElementsKind(HValue* object);
|
||||
|
||||
HValue* BuildCheckHeapObject(HValue* object);
|
||||
HValue* BuildCheckString(HValue* string);
|
||||
HValue* BuildWrapReceiver(HValue* object, HValue* function);
|
||||
@ -1335,9 +1345,33 @@ class HGraphBuilder {
|
||||
|
||||
HValue* BuildNumberToString(HValue* object, Type* type);
|
||||
|
||||
HValue* BuildUncheckedDictionaryElementLoad(HValue* receiver,
|
||||
void BuildJSObjectCheck(HValue* receiver,
|
||||
int bit_field_mask);
|
||||
|
||||
// Checks a key value that's being used for a keyed element access context. If
|
||||
// the key is a index, i.e. a smi or a number in a unique string with a cached
|
||||
// numeric value, the "true" of the continuation is joined. Otherwise,
|
||||
// if the key is a name or a unique string, the "false" of the continuation is
|
||||
// joined. Otherwise, a deoptimization is triggered. In both paths of the
|
||||
// continuation, the key is pushed on the top of the environment.
|
||||
void BuildKeyedIndexCheck(HValue* key,
|
||||
HIfContinuation* join_continuation);
|
||||
|
||||
// Checks the properties of an object if they are in dictionary case, in which
|
||||
// case "true" of continuation is taken, otherwise the "false"
|
||||
void BuildTestForDictionaryProperties(HValue* object,
|
||||
HIfContinuation* continuation);
|
||||
|
||||
void BuildNonGlobalObjectCheck(HValue* receiver);
|
||||
|
||||
HValue* BuildKeyedLookupCacheHash(HValue* object,
|
||||
HValue* key);
|
||||
|
||||
HValue* BuildUncheckedDictionaryElementLoad(HValue* receiver,
|
||||
HValue* elements,
|
||||
HValue* key,
|
||||
HValue* hash);
|
||||
|
||||
HValue* BuildRegExpConstructResult(HValue* length,
|
||||
HValue* index,
|
||||
HValue* input);
|
||||
@ -1674,6 +1708,27 @@ class HGraphBuilder {
|
||||
bool finished_;
|
||||
};
|
||||
|
||||
template <class A, class P1>
|
||||
void DeoptimizeIf(P1 p1, char* const reason) {
|
||||
IfBuilder builder(this);
|
||||
builder.If<A>(p1);
|
||||
builder.ThenDeopt(reason);
|
||||
}
|
||||
|
||||
template <class A, class P1, class P2>
|
||||
void DeoptimizeIf(P1 p1, P2 p2, const char* reason) {
|
||||
IfBuilder builder(this);
|
||||
builder.If<A>(p1, p2);
|
||||
builder.ThenDeopt(reason);
|
||||
}
|
||||
|
||||
template <class A, class P1, class P2, class P3>
|
||||
void DeoptimizeIf(P1 p1, P2 p2, P3 p3, const char* reason) {
|
||||
IfBuilder builder(this);
|
||||
builder.If<A>(p1, p2, p3);
|
||||
builder.ThenDeopt(reason);
|
||||
}
|
||||
|
||||
HValue* BuildNewElementsCapacity(HValue* old_capacity);
|
||||
|
||||
class JSArrayBuilder V8_FINAL {
|
||||
|
@ -123,6 +123,16 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { edx, ecx };
|
||||
descriptor->register_param_count_ = 2;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->deoptimization_handler_ =
|
||||
Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
|
||||
}
|
||||
|
||||
|
||||
void LoadFieldStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { edx };
|
||||
|
17
src/ic.cc
17
src/ic.cc
@ -554,6 +554,23 @@ void CompareIC::Clear(Isolate* isolate,
|
||||
}
|
||||
|
||||
|
||||
Handle<Code> KeyedLoadIC::megamorphic_stub() {
|
||||
if (FLAG_compiled_keyed_generic_loads) {
|
||||
return KeyedLoadGenericElementStub(isolate()).GetCode();
|
||||
} else {
|
||||
return isolate()->builtins()->KeyedLoadIC_Generic();
|
||||
}
|
||||
}
|
||||
|
||||
Handle<Code> KeyedLoadIC::generic_stub() const {
|
||||
if (FLAG_compiled_keyed_generic_loads) {
|
||||
return KeyedLoadGenericElementStub(isolate()).GetCode();
|
||||
} else {
|
||||
return isolate()->builtins()->KeyedLoadIC_Generic();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static bool MigrateDeprecated(Handle<Object> object) {
|
||||
if (!object->IsJSObject()) return false;
|
||||
Handle<JSObject> receiver = Handle<JSObject>::cast(object);
|
||||
|
9
src/ic.h
9
src/ic.h
@ -525,12 +525,9 @@ class KeyedLoadIC: public LoadIC {
|
||||
|
||||
Handle<Code> LoadElementStub(Handle<JSObject> receiver);
|
||||
|
||||
virtual Handle<Code> megamorphic_stub() {
|
||||
return isolate()->builtins()->KeyedLoadIC_Generic();
|
||||
}
|
||||
virtual Handle<Code> generic_stub() const {
|
||||
return isolate()->builtins()->KeyedLoadIC_Generic();
|
||||
}
|
||||
virtual Handle<Code> megamorphic_stub();
|
||||
virtual Handle<Code> generic_stub() const;
|
||||
|
||||
virtual Handle<Code> slow_stub() const {
|
||||
return isolate()->builtins()->KeyedLoadIC_Slow();
|
||||
}
|
||||
|
@ -2011,6 +2011,7 @@ bool Isolate::Init(Deserializer* des) {
|
||||
NumberToStringStub::InstallDescriptors(this);
|
||||
StringAddStub::InstallDescriptors(this);
|
||||
RegExpConstructResultStub::InstallDescriptors(this);
|
||||
KeyedLoadGenericElementStub::InstallDescriptors(this);
|
||||
}
|
||||
|
||||
CallDescriptors::InitializeForIsolate(this);
|
||||
|
@ -6723,10 +6723,18 @@ class Map: public HeapObject {
|
||||
static const int kVisitorIdOffset = kInstanceSizesOffset + kVisitorIdByte;
|
||||
|
||||
// Byte offsets within kInstanceAttributesOffset attributes.
|
||||
#if V8_TARGET_LITTLE_ENDIAN
|
||||
// Order instance type and bit field together such that they can be loaded
|
||||
// together as a 16-bit word with instance type in the lower 8 bits regardless
|
||||
// of endianess.
|
||||
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 0;
|
||||
static const int kUnusedPropertyFieldsOffset = kInstanceAttributesOffset + 1;
|
||||
static const int kBitFieldOffset = kInstanceAttributesOffset + 2;
|
||||
static const int kBitField2Offset = kInstanceAttributesOffset + 3;
|
||||
static const int kBitFieldOffset = kInstanceAttributesOffset + 1;
|
||||
#else
|
||||
static const int kBitFieldOffset = kInstanceAttributesOffset + 0;
|
||||
static const int kInstanceTypeOffset = kInstanceAttributesOffset + 1;
|
||||
#endif
|
||||
static const int kBitField2Offset = kInstanceAttributesOffset + 2;
|
||||
static const int kUnusedPropertyFieldsOffset = kInstanceAttributesOffset + 3;
|
||||
|
||||
STATIC_ASSERT(kInstanceTypeOffset == Internals::kMapInstanceTypeOffset);
|
||||
|
||||
@ -9048,6 +9056,33 @@ class String: public Name {
|
||||
public:
|
||||
enum Encoding { ONE_BYTE_ENCODING, TWO_BYTE_ENCODING };
|
||||
|
||||
// Array index strings this short can keep their index in the hash field.
|
||||
static const int kMaxCachedArrayIndexLength = 7;
|
||||
|
||||
// For strings which are array indexes the hash value has the string length
|
||||
// mixed into the hash, mainly to avoid a hash value of zero which would be
|
||||
// the case for the string '0'. 24 bits are used for the array index value.
|
||||
static const int kArrayIndexValueBits = 24;
|
||||
static const int kArrayIndexLengthBits =
|
||||
kBitsPerInt - kArrayIndexValueBits - kNofHashBitFields;
|
||||
|
||||
STATIC_ASSERT((kArrayIndexLengthBits > 0));
|
||||
|
||||
class ArrayIndexValueBits : public BitField<unsigned int, kNofHashBitFields,
|
||||
kArrayIndexValueBits> {}; // NOLINT
|
||||
class ArrayIndexLengthBits : public BitField<unsigned int,
|
||||
kNofHashBitFields + kArrayIndexValueBits,
|
||||
kArrayIndexLengthBits> {}; // NOLINT
|
||||
|
||||
// Check that kMaxCachedArrayIndexLength + 1 is a power of two so we
|
||||
// could use a mask to test if the length of string is less than or equal to
|
||||
// kMaxCachedArrayIndexLength.
|
||||
STATIC_ASSERT(IS_POWER_OF_TWO(kMaxCachedArrayIndexLength + 1));
|
||||
|
||||
static const unsigned int kContainsCachedArrayIndexMask =
|
||||
(~kMaxCachedArrayIndexLength << ArrayIndexLengthBits::kShift) |
|
||||
kIsNotArrayIndexMask;
|
||||
|
||||
// Representation of the flat content of a String.
|
||||
// A non-flat string doesn't have flat content.
|
||||
// A flat string has content that's encoded as a sequence of either
|
||||
|
@ -5560,17 +5560,18 @@ RUNTIME_FUNCTION(Runtime_StoreArrayLiteralElement) {
|
||||
HeapNumber* number = HeapNumber::cast(*value);
|
||||
double_array->set(store_index, number->Number());
|
||||
} else {
|
||||
ASSERT(IsFastSmiElementsKind(elements_kind) ||
|
||||
IsFastDoubleElementsKind(elements_kind));
|
||||
if (!IsFastObjectElementsKind(elements_kind)) {
|
||||
ElementsKind transitioned_kind = IsFastHoleyElementsKind(elements_kind)
|
||||
? FAST_HOLEY_ELEMENTS
|
||||
: FAST_ELEMENTS;
|
||||
JSObject::TransitionElementsKind(object, transitioned_kind);
|
||||
if (IsMoreGeneralElementsKindTransition(
|
||||
boilerplate_object->GetElementsKind(),
|
||||
ElementsKind boilerplate_elements_kind =
|
||||
boilerplate_object->GetElementsKind();
|
||||
if (IsMoreGeneralElementsKindTransition(boilerplate_elements_kind,
|
||||
transitioned_kind)) {
|
||||
JSObject::TransitionElementsKind(boilerplate_object, transitioned_kind);
|
||||
}
|
||||
}
|
||||
FixedArray* object_array = FixedArray::cast(object->elements());
|
||||
object_array->set(store_index, *value);
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ const int kReferenceIdBits = 16;
|
||||
const int kReferenceIdMask = (1 << kReferenceIdBits) - 1;
|
||||
const int kReferenceTypeShift = kReferenceIdBits;
|
||||
|
||||
const int kDeoptTableSerializeEntryCount = 12;
|
||||
const int kDeoptTableSerializeEntryCount = 64;
|
||||
|
||||
// ExternalReferenceTable is a helper class that defines the relationship
|
||||
// between external references and their encodings. It is used to build
|
||||
|
@ -119,6 +119,16 @@ void RegExpConstructResultStub::InitializeInterfaceDescriptor(
|
||||
}
|
||||
|
||||
|
||||
void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { rdx, rax };
|
||||
descriptor->register_param_count_ = 2;
|
||||
descriptor->register_params_ = registers;
|
||||
descriptor->deoptimization_handler_ =
|
||||
Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
|
||||
}
|
||||
|
||||
|
||||
void LoadFieldStub::InitializeInterfaceDescriptor(
|
||||
CodeStubInterfaceDescriptor* descriptor) {
|
||||
static Register registers[] = { rax };
|
||||
|
20
test/mjsunit/keyed-load-dictionary-stub.js
Normal file
20
test/mjsunit/keyed-load-dictionary-stub.js
Normal file
@ -0,0 +1,20 @@
|
||||
// Copyright 2014 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --allow-natives-syntax
|
||||
|
||||
function generate_dictionary_array() {
|
||||
var result = [0, 1, 2, 3, 4];
|
||||
result[256 * 1024] = 5;
|
||||
return result;
|
||||
}
|
||||
|
||||
function get_accessor(a, i) {
|
||||
return a[i];
|
||||
}
|
||||
|
||||
var array1 = generate_dictionary_array();
|
||||
get_accessor(array1, 1);
|
||||
get_accessor(array1, 2);
|
||||
get_accessor(12345, 2);
|
Loading…
Reference in New Issue
Block a user