From 491eb81780873aafd4d7f6638d20f26839383c68 Mon Sep 17 00:00:00 2001 From: dcarney Date: Fri, 30 Jan 2015 01:17:01 -0800 Subject: [PATCH] Add MEGAMORPHIC state support for KeyedLoadIC BUG= Review URL: https://codereview.chromium.org/878263002 Cr-Commit-Position: refs/heads/master@{#26345} --- src/builtins.cc | 4 +- src/builtins.h | 2 +- src/code-stubs-hydrogen.cc | 5 +- src/ic/arm/ic-arm.cc | 92 +++--------------------------- src/ic/arm64/ic-arm64.cc | 95 ++++--------------------------- src/ic/ia32/ic-ia32.cc | 94 +++--------------------------- src/ic/ic.cc | 29 ++++------ src/ic/ic.h | 6 +- src/ic/x64/ic-x64.cc | 85 +++------------------------ src/runtime/runtime-object.cc | 28 +-------- test/cctest/test-heap-profiler.cc | 7 +-- 11 files changed, 60 insertions(+), 387 deletions(-) diff --git a/src/builtins.cc b/src/builtins.cc index 0f1baa1f0a..3e0e1ba6dc 100644 --- a/src/builtins.cc +++ b/src/builtins.cc @@ -1273,8 +1273,8 @@ static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) { } -static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) { - KeyedLoadIC::GenerateGeneric(masm); +static void Generate_KeyedLoadIC_Megamorphic(MacroAssembler* masm) { + KeyedLoadIC::GenerateMegamorphic(masm); } diff --git a/src/builtins.h b/src/builtins.h index 8d09d57fd0..8e4149f257 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -87,7 +87,7 @@ enum BuiltinExtraArguments { V(KeyedLoadIC_Initialize, KEYED_LOAD_IC, UNINITIALIZED, kNoExtraICState) \ V(KeyedLoadIC_PreMonomorphic, KEYED_LOAD_IC, PREMONOMORPHIC, \ kNoExtraICState) \ - V(KeyedLoadIC_Generic, KEYED_LOAD_IC, GENERIC, kNoExtraICState) \ + V(KeyedLoadIC_Megamorphic, KEYED_LOAD_IC, MEGAMORPHIC, kNoExtraICState) \ \ V(StoreIC_Setter_ForDeopt, STORE_IC, MONOMORPHIC, StoreIC::kStrictModeState) \ \ diff --git a/src/code-stubs-hydrogen.cc b/src/code-stubs-hydrogen.cc index 83a994b1c3..b5a73c660c 100644 --- a/src/code-stubs-hydrogen.cc +++ b/src/code-stubs-hydrogen.cc @@ -1907,6 +1907,9 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { } if_dict_properties.Else(); { + // TODO(dcarney): don't use keyed lookup cache, but convert to use + // megamorphic stub cache. + UNREACHABLE(); // Key is string, properties are fast mode HValue* hash = BuildKeyedLookupCacheHash(receiver, key); @@ -2176,7 +2179,7 @@ HValue* CodeStubGraphBuilder::BuildCodeStub() { generic_checker.Then(); { // Tail-call to the generic KeyedLoadIC, treating it like a handler. - Handle stub = KeyedLoadIC::generic_stub(isolate()); + Handle stub = KeyedLoadIC::megamorphic_stub(isolate()); HValue* constant_stub = Add(stub); LoadDescriptor descriptor(isolate()); HValue* op_vals[] = {context(), receiver, name}; diff --git a/src/ic/arm/ic-arm.cc b/src/ic/arm/ic-arm.cc index 4ca8aa72e7..29ec47f6b8 100644 --- a/src/ic/arm/ic-arm.cc +++ b/src/ic/arm/ic-arm.cc @@ -465,7 +465,7 @@ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { } -void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { +void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { // The return address is in lr. Label slow, check_name, index_smi, index_name, property_array_property; Label probe_dictionary, check_number_dictionary; @@ -519,94 +519,20 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { GenerateKeyedLoadReceiverCheck(masm, receiver, r0, r3, Map::kHasNamedInterceptor, &slow); - // If the receiver is a fast-case object, check the keyed lookup - // cache. Otherwise probe the dictionary. + // If the receiver is a fast-case object, check the stub cache. Otherwise + // probe the dictionary. __ ldr(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset)); __ LoadRoot(ip, Heap::kHashTableMapRootIndex); __ cmp(r4, ip); __ b(eq, &probe_dictionary); - // Load the map of the receiver, compute the keyed lookup cache hash - // based on 32 bits of the map pointer and the name hash. - __ ldr(r0, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ mov(r3, Operand(r0, ASR, KeyedLookupCache::kMapHashShift)); - __ ldr(r4, FieldMemOperand(key, Name::kHashFieldOffset)); - __ eor(r3, r3, Operand(r4, ASR, Name::kHashShift)); - int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; - __ And(r3, r3, Operand(mask)); - - // Load the key (consisting of map and unique name) from the cache and - // check for match. - Label load_in_object_property; - static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; - Label hit_on_nth_entry[kEntriesPerBucket]; - ExternalReference cache_keys = - ExternalReference::keyed_lookup_cache_keys(isolate); - - __ mov(r4, Operand(cache_keys)); - __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1)); - - for (int i = 0; i < kEntriesPerBucket - 1; i++) { - Label try_next_entry; - // Load map and move r4 to next entry. - __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex)); - __ cmp(r0, r5); - __ b(ne, &try_next_entry); - __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load name - __ cmp(key, r5); - __ b(eq, &hit_on_nth_entry[i]); - __ bind(&try_next_entry); - } - - // Last entry: Load map and move r4 to name. - __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); - __ cmp(r0, r5); - __ b(ne, &slow); - __ ldr(r5, MemOperand(r4)); - __ cmp(key, r5); - __ b(ne, &slow); - - // Get field offset. - // r0 : receiver's map - // r3 : lookup cache index - ExternalReference cache_field_offsets = - ExternalReference::keyed_lookup_cache_field_offsets(isolate); - - // Hit on nth entry. - for (int i = kEntriesPerBucket - 1; i >= 0; i--) { - __ bind(&hit_on_nth_entry[i]); - __ mov(r4, Operand(cache_field_offsets)); - if (i != 0) { - __ add(r3, r3, Operand(i)); - } - __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2)); - __ ldrb(r6, FieldMemOperand(r0, Map::kInObjectPropertiesOffset)); - __ sub(r5, r5, r6, SetCC); - __ b(ge, &property_array_property); - if (i != 0) { - __ jmp(&load_in_object_property); - } - } - - // Load in-object property. - __ bind(&load_in_object_property); - __ ldrb(r6, FieldMemOperand(r0, Map::kInstanceSizeOffset)); - __ add(r6, r6, r5); // Index from start of object. - __ sub(receiver, receiver, Operand(kHeapObjectTag)); // Remove the heap tag. - __ ldr(r0, MemOperand(receiver, r6, LSL, kPointerSizeLog2)); - __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, - r4, r3); - __ Ret(); - - // Load property array property. - __ bind(&property_array_property); - __ ldr(receiver, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - __ add(receiver, receiver, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ ldr(r0, MemOperand(receiver, r5, LSL, kPointerSizeLog2)); - __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, - r4, r3); - __ Ret(); + Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( + Code::ComputeHandlerFlags(Code::LOAD_IC)); + masm->isolate()->stub_cache()->GenerateProbe( + masm, Code::LOAD_IC, flags, false, receiver, key, r3, r4, r5, r6); + // Cache miss. + GenerateMiss(masm); // Do a quick inline probe of the receiver's dictionary, if it // exists. diff --git a/src/ic/arm64/ic-arm64.cc b/src/ic/arm64/ic-arm64.cc index 2c1642bba7..7c51938699 100644 --- a/src/ic/arm64/ic-arm64.cc +++ b/src/ic/arm64/ic-arm64.cc @@ -527,94 +527,19 @@ static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm, Register key, GenerateKeyedLoadReceiverCheck(masm, receiver, scratch1, scratch2, Map::kHasNamedInterceptor, slow); - // If the receiver is a fast-case object, check the keyed lookup cache. - // Otherwise probe the dictionary. + // If the receiver is a fast-case object, check the stub cache. Otherwise + // probe the dictionary. __ Ldr(scratch2, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); __ Ldr(scratch3, FieldMemOperand(scratch2, HeapObject::kMapOffset)); __ JumpIfRoot(scratch3, Heap::kHashTableMapRootIndex, &probe_dictionary); - // We keep the map of the receiver in scratch1. - Register receiver_map = scratch1; - - // Load the map of the receiver, compute the keyed lookup cache hash - // based on 32 bits of the map pointer and the name hash. - __ Ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset)); - __ Mov(scratch2, Operand(receiver_map, ASR, KeyedLookupCache::kMapHashShift)); - __ Ldr(scratch3.W(), FieldMemOperand(key, Name::kHashFieldOffset)); - __ Eor(scratch2, scratch2, Operand(scratch3, ASR, Name::kHashShift)); - int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask; - __ And(scratch2, scratch2, mask); - - // Load the key (consisting of map and unique name) from the cache and - // check for match. - Label load_in_object_property; - static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; - Label hit_on_nth_entry[kEntriesPerBucket]; - ExternalReference cache_keys = - ExternalReference::keyed_lookup_cache_keys(isolate); - - __ Mov(scratch3, cache_keys); - __ Add(scratch3, scratch3, Operand(scratch2, LSL, kPointerSizeLog2 + 1)); - - for (int i = 0; i < kEntriesPerBucket - 1; i++) { - Label try_next_entry; - // Load map and make scratch3 pointing to the next entry. - __ Ldr(scratch4, MemOperand(scratch3, kPointerSize * 2, PostIndex)); - __ Cmp(receiver_map, scratch4); - __ B(ne, &try_next_entry); - __ Ldr(scratch4, MemOperand(scratch3, -kPointerSize)); // Load name - __ Cmp(key, scratch4); - __ B(eq, &hit_on_nth_entry[i]); - __ Bind(&try_next_entry); - } - - // Last entry. - __ Ldr(scratch4, MemOperand(scratch3, kPointerSize, PostIndex)); - __ Cmp(receiver_map, scratch4); - __ B(ne, slow); - __ Ldr(scratch4, MemOperand(scratch3)); - __ Cmp(key, scratch4); - __ B(ne, slow); - - // Get field offset. - ExternalReference cache_field_offsets = - ExternalReference::keyed_lookup_cache_field_offsets(isolate); - - // Hit on nth entry. - for (int i = kEntriesPerBucket - 1; i >= 0; i--) { - __ Bind(&hit_on_nth_entry[i]); - __ Mov(scratch3, cache_field_offsets); - if (i != 0) { - __ Add(scratch2, scratch2, i); - } - __ Ldr(scratch4.W(), MemOperand(scratch3, scratch2, LSL, 2)); - __ Ldrb(scratch5, - FieldMemOperand(receiver_map, Map::kInObjectPropertiesOffset)); - __ Subs(scratch4, scratch4, scratch5); - __ B(ge, &property_array_property); - if (i != 0) { - __ B(&load_in_object_property); - } - } - - // Load in-object property. - __ Bind(&load_in_object_property); - __ Ldrb(scratch5, FieldMemOperand(receiver_map, Map::kInstanceSizeOffset)); - __ Add(scratch5, scratch5, scratch4); // Index from start of object. - __ Sub(receiver, receiver, kHeapObjectTag); // Remove the heap tag. - __ Ldr(result, MemOperand(receiver, scratch5, LSL, kPointerSizeLog2)); - __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, - scratch1, scratch2); - __ Ret(); - - // Load property array property. - __ Bind(&property_array_property); - __ Ldr(scratch1, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); - __ Add(scratch1, scratch1, FixedArray::kHeaderSize - kHeapObjectTag); - __ Ldr(result, MemOperand(scratch1, scratch4, LSL, kPointerSizeLog2)); - __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(), 1, - scratch1, scratch2); - __ Ret(); + Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( + Code::ComputeHandlerFlags(Code::LOAD_IC)); + masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, flags, + false, receiver, key, scratch1, + scratch2, scratch3, scratch4); + // Cache miss. + KeyedLoadIC::GenerateMiss(masm); // Do a quick inline probe of the receiver's dictionary, if it exists. __ Bind(&probe_dictionary); @@ -629,7 +554,7 @@ static void GenerateKeyedLoadWithNameKey(MacroAssembler* masm, Register key, } -void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { +void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { // The return address is in lr. Label slow, check_name, index_smi, index_name; diff --git a/src/ic/ia32/ic-ia32.cc b/src/ic/ia32/ic-ia32.cc index bdaa1f4a99..c302e85d17 100644 --- a/src/ic/ia32/ic-ia32.cc +++ b/src/ic/ia32/ic-ia32.cc @@ -330,7 +330,7 @@ static Operand GenerateUnmappedArgumentsLookup(MacroAssembler* masm, } -void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { +void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { // The return address is on the stack. Label slow, check_name, index_smi, index_name, property_array_property; Label probe_dictionary, check_number_dictionary; @@ -392,95 +392,19 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor, &slow); - // If the receiver is a fast-case object, check the keyed lookup - // cache. Otherwise probe the dictionary. + // If the receiver is a fast-case object, check the stub cache. Otherwise + // probe the dictionary. __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset)); __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), Immediate(isolate->factory()->hash_table_map())); __ j(equal, &probe_dictionary); - // The receiver's map is still in eax, compute the keyed lookup cache hash - // based on 32 bits of the map pointer and the string hash. - if (FLAG_debug_code) { - __ cmp(eax, FieldOperand(receiver, HeapObject::kMapOffset)); - __ Check(equal, kMapIsNoLongerInEax); - } - __ mov(ebx, eax); // Keep the map around for later. - __ shr(eax, KeyedLookupCache::kMapHashShift); - __ mov(edi, FieldOperand(key, String::kHashFieldOffset)); - __ shr(edi, String::kHashShift); - __ xor_(eax, edi); - __ and_(eax, KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); - - // Load the key (consisting of map and internalized string) from the cache and - // check for match. - Label load_in_object_property; - static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; - Label hit_on_nth_entry[kEntriesPerBucket]; - ExternalReference cache_keys = - ExternalReference::keyed_lookup_cache_keys(masm->isolate()); - - for (int i = 0; i < kEntriesPerBucket - 1; i++) { - Label try_next_entry; - __ mov(edi, eax); - __ shl(edi, kPointerSizeLog2 + 1); - if (i != 0) { - __ add(edi, Immediate(kPointerSize * i * 2)); - } - __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(not_equal, &try_next_entry); - __ add(edi, Immediate(kPointerSize)); - __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(equal, &hit_on_nth_entry[i]); - __ bind(&try_next_entry); - } - - __ lea(edi, Operand(eax, 1)); - __ shl(edi, kPointerSizeLog2 + 1); - __ add(edi, Immediate(kPointerSize * (kEntriesPerBucket - 1) * 2)); - __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(not_equal, &slow); - __ add(edi, Immediate(kPointerSize)); - __ cmp(key, Operand::StaticArray(edi, times_1, cache_keys)); - __ j(not_equal, &slow); - - // Get field offset. - // ebx : receiver's map - // eax : lookup cache index - ExternalReference cache_field_offsets = - ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); - - // Hit on nth entry. - for (int i = kEntriesPerBucket - 1; i >= 0; i--) { - __ bind(&hit_on_nth_entry[i]); - if (i != 0) { - __ add(eax, Immediate(i)); - } - __ mov(edi, - Operand::StaticArray(eax, times_pointer_size, cache_field_offsets)); - __ movzx_b(eax, FieldOperand(ebx, Map::kInObjectPropertiesOffset)); - __ sub(edi, eax); - __ j(above_equal, &property_array_property); - if (i != 0) { - __ jmp(&load_in_object_property); - } - } - - // Load in-object property. - __ bind(&load_in_object_property); - __ movzx_b(eax, FieldOperand(ebx, Map::kInstanceSizeOffset)); - __ add(eax, edi); - __ mov(eax, FieldOperand(receiver, eax, times_pointer_size, 0)); - __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); - __ ret(0); - - // Load property array property. - __ bind(&property_array_property); - __ mov(eax, FieldOperand(receiver, JSObject::kPropertiesOffset)); - __ mov(eax, - FieldOperand(eax, edi, times_pointer_size, FixedArray::kHeaderSize)); - __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); - __ ret(0); + Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( + Code::ComputeHandlerFlags(Code::LOAD_IC)); + masm->isolate()->stub_cache()->GenerateProbe( + masm, Code::LOAD_IC, flags, false, receiver, key, ebx, no_reg); + // Cache miss. + GenerateMiss(masm); // Do a quick inline probe of the receiver's dictionary, if it // exists. diff --git a/src/ic/ic.cc b/src/ic/ic.cc index 68c7cc2c45..8fd6e180b2 100644 --- a/src/ic/ic.cc +++ b/src/ic/ic.cc @@ -606,11 +606,11 @@ void CompareIC::Clear(Isolate* isolate, Address address, Code* target, // static -Handle KeyedLoadIC::generic_stub(Isolate* isolate) { +Handle KeyedLoadIC::megamorphic_stub(Isolate* isolate) { if (FLAG_compiled_keyed_generic_loads) { return KeyedLoadGenericStub(isolate).GetCode(); } else { - return isolate->builtins()->KeyedLoadIC_Generic(); + return isolate->builtins()->KeyedLoadIC_Megamorphic(); } } @@ -706,7 +706,7 @@ MaybeHandle LoadIC::Load(Handle object, Handle name) { if (UseVector()) { ConfigureVectorState(GENERIC); } else { - set_target(*KeyedLoadIC::generic_stub(isolate())); + set_target(*KeyedLoadIC::megamorphic_stub(isolate())); } TRACE_IC("LoadIC", name); TRACE_GENERIC_IC(isolate(), "LoadIC", "name as array index"); @@ -951,8 +951,7 @@ void IC::PatchCache(Handle name, Handle code) { CopyICToMegamorphicCache(name); } if (UseVector()) { - ConfigureVectorState(kind() == Code::KEYED_LOAD_IC ? GENERIC - : MEGAMORPHIC); + ConfigureVectorState(MEGAMORPHIC); } else { set_target(*megamorphic_stub()); } @@ -969,12 +968,8 @@ void IC::PatchCache(Handle name, Handle code) { case DEBUG_STUB: break; case DEFAULT: - UNREACHABLE(); - break; case GENERIC: - // The generic keyed store stub re-uses store handlers, which can miss. - // That's ok, no reason to do anything. - DCHECK(target()->kind() == Code::KEYED_STORE_IC); + UNREACHABLE(); break; } } @@ -1022,7 +1017,7 @@ Handle LoadIC::megamorphic_stub() { return stub.GetCode(); } else { DCHECK_EQ(Code::KEYED_LOAD_IC, kind()); - return KeyedLoadIC::generic_stub(isolate()); + return KeyedLoadIC::megamorphic_stub(isolate()); } } @@ -1091,8 +1086,6 @@ void LoadIC::UpdateCaches(LookupIterator* lookup) { void IC::UpdateMegamorphicCache(HeapType* type, Name* name, Code* code) { - // Megamorphic state isn't implemented for keyed loads currently. - if (kind() == Code::KEYED_LOAD_IC) return; Map* map = *TypeToMap(type, isolate()); isolate()->stub_cache()->Set(name, map, code); } @@ -1370,14 +1363,14 @@ Handle KeyedLoadIC::LoadElementStub(Handle receiver) { // If the miss wasn't due to an unseen map, a polymorphic stub // won't help, use the generic stub. TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "same map added twice"); - return generic_stub(); + return megamorphic_stub(isolate()); } // If the maximum number of receiver maps has been exceeded, use the generic // version of the IC. if (target_receiver_maps.length() > kMaxKeyedPolymorphism) { TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "max polymorph exceeded"); - return generic_stub(); + return megamorphic_stub(isolate()); } if (FLAG_vector_ics) { @@ -1407,7 +1400,7 @@ MaybeHandle KeyedLoadIC::Load(Handle object, } Handle load_handle; - Handle stub = generic_stub(); + Handle stub = megamorphic_stub(isolate()); // Check for non-string values that can be converted into an // internalized string directly or is representable as a smi. @@ -1428,7 +1421,7 @@ MaybeHandle KeyedLoadIC::Load(Handle object, if (!UseVector()) { if (!is_target_set()) { - Code* generic = *generic_stub(); + Code* generic = *megamorphic_stub(isolate()); if (*stub == generic) { TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "set generic"); } @@ -1438,7 +1431,7 @@ MaybeHandle KeyedLoadIC::Load(Handle object, } } else { if (!is_vector_set() || stub.is_null()) { - Code* generic = *generic_stub(); + Code* generic = *megamorphic_stub(isolate()); if (!stub.is_null() && *stub == generic) { ConfigureVectorState(GENERIC); TRACE_GENERIC_IC(isolate(), "KeyedLoadIC", "set generic"); diff --git a/src/ic/ic.h b/src/ic/ic.h index 2d67a87d22..2afbb0db78 100644 --- a/src/ic/ic.h +++ b/src/ic/ic.h @@ -487,7 +487,7 @@ class KeyedLoadIC : public LoadIC { static void GeneratePreMonomorphic(MacroAssembler* masm) { GenerateMiss(masm); } - static void GenerateGeneric(MacroAssembler* masm); + static void GenerateMegamorphic(MacroAssembler* masm); // Bit mask to be tested against bit field for the cases when // generic stub should go into slow case. @@ -498,7 +498,7 @@ class KeyedLoadIC : public LoadIC { static Handle initialize_stub(Isolate* isolate); static Handle initialize_stub_in_optimized_code(Isolate* isolate); - static Handle generic_stub(Isolate* isolate); + static Handle megamorphic_stub(Isolate* isolate); static Handle pre_monomorphic_stub(Isolate* isolate); static void Clear(Isolate* isolate, Code* host, KeyedLoadICNexus* nexus); @@ -511,8 +511,6 @@ class KeyedLoadIC : public LoadIC { } private: - Handle generic_stub() const { return generic_stub(isolate()); } - static void Clear(Isolate* isolate, Address address, Code* target, ConstantPoolArray* constant_pool); diff --git a/src/ic/x64/ic-x64.cc b/src/ic/x64/ic-x64.cc index b2b0f8d200..0aba18ebe8 100644 --- a/src/ic/x64/ic-x64.cc +++ b/src/ic/x64/ic-x64.cc @@ -274,7 +274,7 @@ static void GenerateKeyNameCheck(MacroAssembler* masm, Register key, } -void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { +void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { // The return address is on the stack. Label slow, check_name, index_smi, index_name, property_array_property; Label probe_dictionary, check_number_dictionary; @@ -325,86 +325,19 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { GenerateKeyedLoadReceiverCheck(masm, receiver, rax, Map::kHasNamedInterceptor, &slow); - // If the receiver is a fast-case object, check the keyed lookup - // cache. Otherwise probe the dictionary leaving result in key. + // If the receiver is a fast-case object, check the stub cache. Otherwise + // probe the dictionary. __ movp(rbx, FieldOperand(receiver, JSObject::kPropertiesOffset)); __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), Heap::kHashTableMapRootIndex); __ j(equal, &probe_dictionary); - // Load the map of the receiver, compute the keyed lookup cache hash - // based on 32 bits of the map pointer and the string hash. - __ movp(rbx, FieldOperand(receiver, HeapObject::kMapOffset)); - __ movl(rax, rbx); - __ shrl(rax, Immediate(KeyedLookupCache::kMapHashShift)); - __ movl(rdi, FieldOperand(key, String::kHashFieldOffset)); - __ shrl(rdi, Immediate(String::kHashShift)); - __ xorp(rax, rdi); - int mask = (KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask); - __ andp(rax, Immediate(mask)); - - // Load the key (consisting of map and internalized string) from the cache and - // check for match. - Label load_in_object_property; - static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket; - Label hit_on_nth_entry[kEntriesPerBucket]; - ExternalReference cache_keys = - ExternalReference::keyed_lookup_cache_keys(masm->isolate()); - - for (int i = 0; i < kEntriesPerBucket - 1; i++) { - Label try_next_entry; - __ movp(rdi, rax); - __ shlp(rdi, Immediate(kPointerSizeLog2 + 1)); - __ LoadAddress(kScratchRegister, cache_keys); - int off = kPointerSize * i * 2; - __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); - __ j(not_equal, &try_next_entry); - __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); - __ j(equal, &hit_on_nth_entry[i]); - __ bind(&try_next_entry); - } - - int off = kPointerSize * (kEntriesPerBucket - 1) * 2; - __ cmpp(rbx, Operand(kScratchRegister, rdi, times_1, off)); - __ j(not_equal, &slow); - __ cmpp(key, Operand(kScratchRegister, rdi, times_1, off + kPointerSize)); - __ j(not_equal, &slow); - - // Get field offset, which is a 32-bit integer. - ExternalReference cache_field_offsets = - ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate()); - - // Hit on nth entry. - for (int i = kEntriesPerBucket - 1; i >= 0; i--) { - __ bind(&hit_on_nth_entry[i]); - if (i != 0) { - __ addl(rax, Immediate(i)); - } - __ LoadAddress(kScratchRegister, cache_field_offsets); - __ movl(rdi, Operand(kScratchRegister, rax, times_4, 0)); - __ movzxbp(rax, FieldOperand(rbx, Map::kInObjectPropertiesOffset)); - __ subp(rdi, rax); - __ j(above_equal, &property_array_property); - if (i != 0) { - __ jmp(&load_in_object_property); - } - } - - // Load in-object property. - __ bind(&load_in_object_property); - __ movzxbp(rax, FieldOperand(rbx, Map::kInstanceSizeOffset)); - __ addp(rax, rdi); - __ movp(rax, FieldOperand(receiver, rax, times_pointer_size, 0)); - __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); - __ ret(0); - - // Load property array property. - __ bind(&property_array_property); - __ movp(rax, FieldOperand(receiver, JSObject::kPropertiesOffset)); - __ movp(rax, - FieldOperand(rax, rdi, times_pointer_size, FixedArray::kHeaderSize)); - __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1); - __ ret(0); + Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( + Code::ComputeHandlerFlags(Code::LOAD_IC)); + masm->isolate()->stub_cache()->GenerateProbe( + masm, Code::LOAD_IC, flags, false, receiver, key, rbx, no_reg); + // Cache miss. + GenerateMiss(masm); // Do a quick inline probe of the receiver's dictionary, if it // exists. diff --git a/src/runtime/runtime-object.cc b/src/runtime/runtime-object.cc index 340a6452e8..2d01bd3e15 100644 --- a/src/runtime/runtime-object.cc +++ b/src/runtime/runtime-object.cc @@ -601,33 +601,7 @@ RUNTIME_FUNCTION(Runtime_KeyedGetProperty) { DisallowHeapAllocation no_allocation; Handle receiver = Handle::cast(receiver_obj); Handle key = Handle::cast(key_obj); - if (receiver->HasFastProperties()) { - // Attempt to use lookup cache. - Handle receiver_map(receiver->map(), isolate); - KeyedLookupCache* keyed_lookup_cache = isolate->keyed_lookup_cache(); - int index = keyed_lookup_cache->Lookup(receiver_map, key); - if (index != -1) { - // Doubles are not cached, so raw read the value. - return receiver->RawFastPropertyAt( - FieldIndex::ForKeyedLookupCacheIndex(*receiver_map, index)); - } - // Lookup cache miss. Perform lookup and update the cache if - // appropriate. - LookupIterator it(receiver, key, LookupIterator::OWN); - if (it.state() == LookupIterator::DATA && - it.property_details().type() == DATA) { - FieldIndex field_index = it.GetFieldIndex(); - // Do not track double fields in the keyed lookup cache. Reading - // double values requires boxing. - if (!it.representation().IsDouble()) { - keyed_lookup_cache->Update(receiver_map, key, - field_index.GetKeyedLookupCacheIndex()); - } - AllowHeapAllocation allow_allocation; - return *JSObject::FastPropertyAt(receiver, it.representation(), - field_index); - } - } else { + if (!receiver->HasFastProperties()) { // Attempt dictionary lookup. NameDictionary* dictionary = receiver->property_dictionary(); int entry = dictionary->FindEntry(key); diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc index 13bcebc3c8..ed1d4936bc 100644 --- a/test/cctest/test-heap-profiler.cc +++ b/test/cctest/test-heap-profiler.cc @@ -2395,11 +2395,8 @@ TEST(CheckCodeNames) { stub_path, arraysize(stub_path)); CHECK_NE(NULL, node); - const char* builtin_path1[] = { - "::(GC roots)", - "::(Builtins)", - "::(KeyedLoadIC_Generic builtin)" - }; + const char* builtin_path1[] = {"::(GC roots)", "::(Builtins)", + "::(KeyedLoadIC_Megamorphic builtin)"}; node = GetNodeByPath(snapshot, builtin_path1, arraysize(builtin_path1)); CHECK_NE(NULL, node);