Add MEGAMORPHIC state support for KeyedLoadIC

BUG=

Review URL: https://codereview.chromium.org/878263002

Cr-Commit-Position: refs/heads/master@{#26345}
This commit is contained in:
dcarney 2015-01-30 01:17:01 -08:00 committed by Commit bot
parent 9b60e89482
commit 491eb81780
11 changed files with 60 additions and 387 deletions

View File

@ -1273,8 +1273,8 @@ static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
}
static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
KeyedLoadIC::GenerateGeneric(masm);
static void Generate_KeyedLoadIC_Megamorphic(MacroAssembler* masm) {
KeyedLoadIC::GenerateMegamorphic(masm);
}

View File

@ -87,7 +87,7 @@ enum BuiltinExtraArguments {
V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, kNoExtraICState) \
V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \
kNoExtraICState) \
V(KeyedLoadIC_Generic, KEYED_LOAD_IC, GENERIC, kNoExtraICState) \
V(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, MEGAMORPHIC, kNoExtraICState) \
\
V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, StoreIC::kStrictModeState) \
\

View File

@ -1907,6 +1907,9 @@ HValue* CodeStubGraphBuilder<KeyedLoadGenericStub>::BuildCodeStub() {
}
if_dict_properties.Else();
{
// TODO(dcarney): don't use keyed lookup cache, but convert to use
// megamorphic stub cache.
UNREACHABLE();
// Key is string, properties are fast mode
HValue* hash = BuildKeyedLookupCacheHash(receiver, key);
@ -2176,7 +2179,7 @@ HValue* CodeStubGraphBuilder<VectorKeyedLoadStub>::BuildCodeStub() {
generic_checker.Then();
{
// Tail-call to the generic KeyedLoadIC, treating it like a handler.
Handle<Code> stub = KeyedLoadIC::generic_stub(isolate());
Handle<Code> stub = KeyedLoadIC::megamorphic_stub(isolate());
HValue* constant_stub = Add<HConstant>(stub);
LoadDescriptor descriptor(isolate());
HValue* op_vals[] = {context(), receiver, name};

View File

@ -465,7 +465,7 @@ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
}
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// The return address is in lr.
Label slow, check_name, index_smi, index_name, property_array_property;
Label probe_dictionary, check_number_dictionary;
@ -519,94 +519,20 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
GenerateKeyedLoadReceiverCheck(masm, receiver, r0, r3,
Map::kHasNamedInterceptor, &slow);
// If the receiver is a fast-case object, check the keyed lookup
// cache. Otherwise probe the dictionary.
// If the receiver is a fast-case object, check the stub cache. Otherwise
// probe the dictionary.
__ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHashTableMapRootIndex);
__ cmp(r4, ip);
__ b(eq, &probe_dictionary);
// Load the map of the receiver, compute the keyed lookup cache hash
// based on 32 bits of the map pointer and the name hash.
__ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ mov(r3, Operand(r0, ASR, KeyedLookupCache::kMapHashShift));
__ ldr(r4, FieldMemOperand(key, Name::kHashFieldOffset));
__ eor(r3, r3, Operand(r4, ASR, Name::kHashShift));
int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
__ And(r3, r3, Operand(mask));
// Load the key (consisting of map and unique name) from the cache and
// check for match.
Label load_in_object_property;
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(isolate);
__ mov(r4, Operand(cache_keys));
__ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
Label try_next_entry;
// Load map and move r4 to next entry.
__ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
__ cmp(r0, r5);
__ b(ne, &try_next_entry);
__ ldr(r5, MemOperand(r4, -kPointerSize)); // Load name
__ cmp(key, r5);
__ b(eq, &hit_on_nth_entry[i]);
__ bind(&try_next_entry);
}
// Last entry: Load map and move r4 to name.
__ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
__ cmp(r0, r5);
__ b(ne, &slow);
__ ldr(r5, MemOperand(r4));
__ cmp(key, r5);
__ b(ne, &slow);
// Get field offset.
// r0 : receiver's map
// r3 : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(isolate);
// Hit on nth entry.
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
__ bind(&hit_on_nth_entry[i]);
__ mov(r4, Operand(cache_field_offsets));
if (i != 0) {
__ add(r3, r3, Operand(i));
}
__ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
__ ldrb(r6, FieldMemOperand(r0, Map::kInObjectPropertiesOffset));
__ sub(r5, r5, r6, SetCC);
__ b(ge, &property_array_property);
if (i != 0) {
__ jmp(&load_in_object_property);
}
}
// Load in-object property.
__ bind(&load_in_object_property);
__ ldrb(r6, FieldMemOperand(r0, Map::kInstanceSizeOffset));
__ add(r6, r6, r5); // Index from start of object.
__ sub(receiver, receiver, Operand(kHeapObjectTag)); // Remove the heap tag.
__ ldr(r0, MemOperand(receiver, r6, LSL, kPointerSizeLog2));
__ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1,
r4, r3);
__ Ret();
// Load property array property.
__ bind(&property_array_property);
__ ldr(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ add(receiver, receiver, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r0, MemOperand(receiver, r5, LSL, kPointerSizeLog2));
__ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1,
r4, r3);
__ Ret();
Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
Code::ComputeHandlerFlags(Code::LOAD_IC));
masm->isolate()->stub_cache()->GenerateProbe(
masm, Code::LOAD_IC, flags, false, receiver, key, r3, r4, r5, r6);
// Cache miss.
GenerateMiss(masm);
// Do a quick inline probe of the receiver's dictionary, if it
// exists.

View File

@ -527,94 +527,19 @@ static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm, Register key,
GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2,
Map::kHasNamedInterceptor, slow);
// If the receiver is a fast-case object, check the keyed lookup cache.
// Otherwise probe the dictionary.
// If the receiver is a fast-case object, check the stub cache. Otherwise
// probe the dictionary.
__ Ldr(scratch2, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ Ldr(scratch3, FieldMemOperand(scratch2, HeapObject::kMapOffset));
__ JumpIfRoot(scratch3, Heap::kHashTableMapRootIndex, &probe_dictionary);
// We keep the map of the receiver in scratch1.
Register receiver_map = scratch1;
// Load the map of the receiver, compute the keyed lookup cache hash
// based on 32 bits of the map pointer and the name hash.
__ Ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
__ Mov(scratch2, Operand(receiver_map, ASR, KeyedLookupCache::kMapHashShift));
__ Ldr(scratch3.W(), FieldMemOperand(key, Name::kHashFieldOffset));
__ Eor(scratch2, scratch2, Operand(scratch3, ASR, Name::kHashShift));
int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
__ And(scratch2, scratch2, mask);
// Load the key (consisting of map and unique name) from the cache and
// check for match.
Label load_in_object_property;
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(isolate);
__ Mov(scratch3, cache_keys);
__ Add(scratch3, scratch3, Operand(scratch2, LSL, kPointerSizeLog2 + 1));
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
Label try_next_entry;
// Load map and make scratch3 pointing to the next entry.
__ Ldr(scratch4, MemOperand(scratch3, kPointerSize * 2, PostIndex));
__ Cmp(receiver_map, scratch4);
__ B(ne, &try_next_entry);
__ Ldr(scratch4, MemOperand(scratch3, -kPointerSize)); // Load name
__ Cmp(key, scratch4);
__ B(eq, &hit_on_nth_entry[i]);
__ Bind(&try_next_entry);
}
// Last entry.
__ Ldr(scratch4, MemOperand(scratch3, kPointerSize, PostIndex));
__ Cmp(receiver_map, scratch4);
__ B(ne, slow);
__ Ldr(scratch4, MemOperand(scratch3));
__ Cmp(key, scratch4);
__ B(ne, slow);
// Get field offset.
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(isolate);
// Hit on nth entry.
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
__ Bind(&hit_on_nth_entry[i]);
__ Mov(scratch3, cache_field_offsets);
if (i != 0) {
__ Add(scratch2, scratch2, i);
}
__ Ldr(scratch4.W(), MemOperand(scratch3, scratch2, LSL, 2));
__ Ldrb(scratch5,
FieldMemOperand(receiver_map, Map::kInObjectPropertiesOffset));
__ Subs(scratch4, scratch4, scratch5);
__ B(ge, &property_array_property);
if (i != 0) {
__ B(&load_in_object_property);
}
}
// Load in-object property.
__ Bind(&load_in_object_property);
__ Ldrb(scratch5, FieldMemOperand(receiver_map, Map::kInstanceSizeOffset));
__ Add(scratch5, scratch5, scratch4); // Index from start of object.
__ Sub(receiver, receiver, kHeapObjectTag); // Remove the heap tag.
__ Ldr(result, MemOperand(receiver, scratch5, LSL, kPointerSizeLog2));
__ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1,
scratch1, scratch2);
__ Ret();
// Load property array property.
__ Bind(&property_array_property);
__ Ldr(scratch1, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag);
__ Ldr(result, MemOperand(scratch1, scratch4, LSL, kPointerSizeLog2));
__ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1,
scratch1, scratch2);
__ Ret();
Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
Code::ComputeHandlerFlags(Code::LOAD_IC));
masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, flags,
false, receiver, key, scratch1,
scratch2, scratch3, scratch4);
// Cache miss.
KeyedLoadIC::GenerateMiss(masm);
// Do a quick inline probe of the receiver's dictionary, if it exists.
__ Bind(&probe_dictionary);
@ -629,7 +554,7 @@ static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm, Register key,
}
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// The return address is in lr.
Label slow, check_name, index_smi, index_name;

View File

@ -330,7 +330,7 @@ static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
}
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// The return address is on the stack.
Label slow, check_name, index_smi, index_name, property_array_property;
Label probe_dictionary, check_number_dictionary;
@ -392,95 +392,19 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor,
&slow);
// If the receiver is a fast-case object, check the keyed lookup
// cache. Otherwise probe the dictionary.
// If the receiver is a fast-case object, check the stub cache. Otherwise
// probe the dictionary.
__ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset));
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Immediate(isolate->factory()->hash_table_map()));
__ j(equal, &probe_dictionary);
// The receiver's map is still in eax, compute the keyed lookup cache hash
// based on 32 bits of the map pointer and the string hash.
if (FLAG_debug_code) {
__ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset));
__ Check(equal, kMapIsNoLongerInEax);
}
__ mov(ebx, eax); // Keep the map around for later.
__ shr(eax, KeyedLookupCache::kMapHashShift);
__ mov(edi, FieldOperand(key, String::kHashFieldOffset));
__ shr(edi, String::kHashShift);
__ xor_(eax, edi);
__ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
// Load the key (consisting of map and internalized string) from the cache and
// check for match.
Label load_in_object_property;
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(masm->isolate());
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
Label try_next_entry;
__ mov(edi, eax);
__ shl(edi, kPointerSizeLog2 + 1);
if (i != 0) {
__ add(edi, Immediate(kPointerSize * i * 2));
}
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &try_next_entry);
__ add(edi, Immediate(kPointerSize));
__ cmp(key, Operand::StaticArray(edi, times_1, cache_keys));
__ j(equal, &hit_on_nth_entry[i]);
__ bind(&try_next_entry);
}
__ lea(edi, Operand(eax, 1));
__ shl(edi, kPointerSizeLog2 + 1);
__ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2));
__ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
__ add(edi, Immediate(kPointerSize));
__ cmp(key, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
// Get field offset.
// ebx : receiver's map
// eax : lookup cache index
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
// Hit on nth entry.
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
__ bind(&hit_on_nth_entry[i]);
if (i != 0) {
__ add(eax, Immediate(i));
}
__ mov(edi,
Operand::StaticArray(eax, times_pointer_size, cache_field_offsets));
__ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
__ sub(edi, eax);
__ j(above_equal, &property_array_property);
if (i != 0) {
__ jmp(&load_in_object_property);
}
}
// Load in-object property.
__ bind(&load_in_object_property);
__ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset));
__ add(eax, edi);
__ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0));
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
// Load property array property.
__ bind(&property_array_property);
__ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset));
__ mov(eax,
FieldOperand(eax, edi, times_pointer_size, FixedArray::kHeaderSize));
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
Code::ComputeHandlerFlags(Code::LOAD_IC));
masm->isolate()->stub_cache()->GenerateProbe(
masm, Code::LOAD_IC, flags, false, receiver, key, ebx, no_reg);
// Cache miss.
GenerateMiss(masm);
// Do a quick inline probe of the receiver's dictionary, if it
// exists.

View File

@ -606,11 +606,11 @@ void CompareIC::Clear(Isolate* isolate, Address address, Code* target,
// static
Handle<Code> KeyedLoadIC::generic_stub(Isolate* isolate) {
Handle<Code> KeyedLoadIC::megamorphic_stub(Isolate* isolate) {
if (FLAG_compiled_keyed_generic_loads) {
return KeyedLoadGenericStub(isolate).GetCode();
} else {
return isolate->builtins()->KeyedLoadIC_Generic();
return isolate->builtins()->KeyedLoadIC_Megamorphic();
}
}
@ -706,7 +706,7 @@ MaybeHandle<Object> LoadIC::Load(Handle<Object> object, Handle<Name> name) {
if (UseVector()) {
ConfigureVectorState(GENERIC);
} else {
set_target(*KeyedLoadIC::generic_stub(isolate()));
set_target(*KeyedLoadIC::megamorphic_stub(isolate()));
}
TRACE_IC("LoadIC", name);
TRACE_GENERIC_IC(isolate(), "LoadIC", "name as array index");
@ -951,8 +951,7 @@ void IC::PatchCache(Handle<Name> name, Handle<Code> code) {
CopyICToMegamorphicCache(name);
}
if (UseVector()) {
ConfigureVectorState(kind() == Code::KEYED_LOAD_IC ? GENERIC
: MEGAMORPHIC);
ConfigureVectorState(MEGAMORPHIC);
} else {
set_target(*megamorphic_stub());
}
@ -969,12 +968,8 @@ void IC::PatchCache(Handle<Name> name, Handle<Code> code) {
case DEBUG_STUB:
break;
case DEFAULT:
UNREACHABLE();
break;
case GENERIC:
// The generic keyed store stub re-uses store handlers, which can miss.
// That's ok, no reason to do anything.
DCHECK(target()->kind() == Code::KEYED_STORE_IC);
UNREACHABLE();
break;
}
}
@ -1022,7 +1017,7 @@ Handle<Code> LoadIC::megamorphic_stub() {
return stub.GetCode();
} else {
DCHECK_EQ(Code::KEYED_LOAD_IC, kind());
return KeyedLoadIC::generic_stub(isolate());
return KeyedLoadIC::megamorphic_stub(isolate());
}
}
@ -1091,8 +1086,6 @@ void LoadIC::UpdateCaches(LookupIterator* lookup) {
void IC::UpdateMegamorphicCache(HeapType* type, Name* name, Code* code) {
// Megamorphic state isn't implemented for keyed loads currently.
if (kind() == Code::KEYED_LOAD_IC) return;
Map* map = *TypeToMap(type, isolate());
isolate()->stub_cache()->Set(name, map, code);
}
@ -1370,14 +1363,14 @@ Handle<Code> KeyedLoadIC::LoadElementStub(Handle<HeapObject> receiver) {
// If the miss wasn't due to an unseen map, a polymorphic stub
// won't help, use the generic stub.
TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "same map added twice");
return generic_stub();
return megamorphic_stub(isolate());
}
// If the maximum number of receiver maps has been exceeded, use the generic
// version of the IC.
if (target_receiver_maps.length() > kMaxKeyedPolymorphism) {
TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "max polymorph exceeded");
return generic_stub();
return megamorphic_stub(isolate());
}
if (FLAG_vector_ics) {
@ -1407,7 +1400,7 @@ MaybeHandle<Object> KeyedLoadIC::Load(Handle<Object> object,
}
Handle<Object> load_handle;
Handle<Code> stub = generic_stub();
Handle<Code> stub = megamorphic_stub(isolate());
// Check for non-string values that can be converted into an
// internalized string directly or is representable as a smi.
@ -1428,7 +1421,7 @@ MaybeHandle<Object> KeyedLoadIC::Load(Handle<Object> object,
if (!UseVector()) {
if (!is_target_set()) {
Code* generic = *generic_stub();
Code* generic = *megamorphic_stub(isolate());
if (*stub == generic) {
TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "set generic");
}
@ -1438,7 +1431,7 @@ MaybeHandle<Object> KeyedLoadIC::Load(Handle<Object> object,
}
} else {
if (!is_vector_set() || stub.is_null()) {
Code* generic = *generic_stub();
Code* generic = *megamorphic_stub(isolate());
if (!stub.is_null() && *stub == generic) {
ConfigureVectorState(GENERIC);
TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "set generic");

View File

@ -487,7 +487,7 @@ class KeyedLoadIC : public LoadIC {
static void GeneratePreMonomorphic(MacroAssembler* masm) {
GenerateMiss(masm);
}
static void GenerateGeneric(MacroAssembler* masm);
static void GenerateMegamorphic(MacroAssembler* masm);
// Bit mask to be tested against bit field for the cases when
// generic stub should go into slow case.
@ -498,7 +498,7 @@ class KeyedLoadIC : public LoadIC {
static Handle<Code> initialize_stub(Isolate* isolate);
static Handle<Code> initialize_stub_in_optimized_code(Isolate* isolate);
static Handle<Code> generic_stub(Isolate* isolate);
static Handle<Code> megamorphic_stub(Isolate* isolate);
static Handle<Code> pre_monomorphic_stub(Isolate* isolate);
static void Clear(Isolate* isolate, Code* host, KeyedLoadICNexus* nexus);
@ -511,8 +511,6 @@ class KeyedLoadIC : public LoadIC {
}
private:
Handle<Code> generic_stub() const { return generic_stub(isolate()); }
static void Clear(Isolate* isolate, Address address, Code* target,
ConstantPoolArray* constant_pool);

View File

@ -274,7 +274,7 @@ static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
}
void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
// The return address is on the stack.
Label slow, check_name, index_smi, index_name, property_array_property;
Label probe_dictionary, check_number_dictionary;
@ -325,86 +325,19 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor,
&slow);
// If the receiver is a fast-case object, check the keyed lookup
// cache. Otherwise probe the dictionary leaving result in key.
// If the receiver is a fast-case object, check the stub cache. Otherwise
// probe the dictionary.
__ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset));
__ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
Heap::kHashTableMapRootIndex);
__ j(equal, &probe_dictionary);
// Load the map of the receiver, compute the keyed lookup cache hash
// based on 32 bits of the map pointer and the string hash.
__ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset));
__ movl(rax, rbx);
__ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift));
__ movl(rdi, FieldOperand(key, String::kHashFieldOffset));
__ shrl(rdi, Immediate(String::kHashShift));
__ xorp(rax, rdi);
int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask);
__ andp(rax, Immediate(mask));
// Load the key (consisting of map and internalized string) from the cache and
// check for match.
Label load_in_object_property;
static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
Label hit_on_nth_entry[kEntriesPerBucket];
ExternalReference cache_keys =
ExternalReference::keyed_lookup_cache_keys(masm->isolate());
for (int i = 0; i < kEntriesPerBucket - 1; i++) {
Label try_next_entry;
__ movp(rdi, rax);
__ shlp(rdi, Immediate(kPointerSizeLog2 + 1));
__ LoadAddress(kScratchRegister, cache_keys);
int off = kPointerSize * i * 2;
__ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
__ j(not_equal, &try_next_entry);
__ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
__ j(equal, &hit_on_nth_entry[i]);
__ bind(&try_next_entry);
}
int off = kPointerSize * (kEntriesPerBucket - 1) * 2;
__ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off));
__ j(not_equal, &slow);
__ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize));
__ j(not_equal, &slow);
// Get field offset, which is a 32-bit integer.
ExternalReference cache_field_offsets =
ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
// Hit on nth entry.
for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
__ bind(&hit_on_nth_entry[i]);
if (i != 0) {
__ addl(rax, Immediate(i));
}
__ LoadAddress(kScratchRegister, cache_field_offsets);
__ movl(rdi, Operand(kScratchRegister, rax, times_4, 0));
__ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
__ subp(rdi, rax);
__ j(above_equal, &property_array_property);
if (i != 0) {
__ jmp(&load_in_object_property);
}
}
// Load in-object property.
__ bind(&load_in_object_property);
__ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset));
__ addp(rax, rdi);
__ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0));
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
// Load property array property.
__ bind(&property_array_property);
__ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset));
__ movp(rax,
FieldOperand(rax, rdi, times_pointer_size, FixedArray::kHeaderSize));
__ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
Code::Flags flags = Code::RemoveTypeAndHolderFromFlags(
Code::ComputeHandlerFlags(Code::LOAD_IC));
masm->isolate()->stub_cache()->GenerateProbe(
masm, Code::LOAD_IC, flags, false, receiver, key, rbx, no_reg);
// Cache miss.
GenerateMiss(masm);
// Do a quick inline probe of the receiver's dictionary, if it
// exists.

View File

@ -601,33 +601,7 @@ RUNTIME_FUNCTION(Runtime_KeyedGetProperty) {
DisallowHeapAllocation no_allocation;
Handle<JSObject> receiver = Handle<JSObject>::cast(receiver_obj);
Handle<Name> key = Handle<Name>::cast(key_obj);
if (receiver->HasFastProperties()) {
// Attempt to use lookup cache.
Handle<Map> receiver_map(receiver->map(), isolate);
KeyedLookupCache* keyed_lookup_cache = isolate->keyed_lookup_cache();
int index = keyed_lookup_cache->Lookup(receiver_map, key);
if (index != -1) {
// Doubles are not cached, so raw read the value.
return receiver->RawFastPropertyAt(
FieldIndex::ForKeyedLookupCacheIndex(*receiver_map, index));
}
// Lookup cache miss. Perform lookup and update the cache if
// appropriate.
LookupIterator it(receiver, key, LookupIterator::OWN);
if (it.state() == LookupIterator::DATA &&
it.property_details().type() == DATA) {
FieldIndex field_index = it.GetFieldIndex();
// Do not track double fields in the keyed lookup cache. Reading
// double values requires boxing.
if (!it.representation().IsDouble()) {
keyed_lookup_cache->Update(receiver_map, key,
field_index.GetKeyedLookupCacheIndex());
}
AllowHeapAllocation allow_allocation;
return *JSObject::FastPropertyAt(receiver, it.representation(),
field_index);
}
} else {
if (!receiver->HasFastProperties()) {
// Attempt dictionary lookup.
NameDictionary* dictionary = receiver->property_dictionary();
int entry = dictionary->FindEntry(key);

View File

@ -2395,11 +2395,8 @@ TEST(CheckCodeNames) {
stub_path, arraysize(stub_path));
CHECK_NE(NULL, node);
const char* builtin_path1[] = {
"::(GC roots)",
"::(Builtins)",
"::(KeyedLoadIC_Generic builtin)"
};
const char* builtin_path1[] = {"::(GC roots)", "::(Builtins)",
"::(KeyedLoadIC_Megamorphic builtin)"};
node = GetNodeByPath(snapshot, builtin_path1, arraysize(builtin_path1));
CHECK_NE(NULL, node);