Revert of [ic] [stubs] Don't use Code::flags in megamorphic stub cache hash computations. (patchset #2 id:20001 of https://codereview.chromium.org/2147433002/ )
Reason for revert: Unfortunately, the performance issues are still there on Android. Original issue's description: > [ic] [stubs] Don't use Code::flags in megamorphic stub cache hash computations. > > This should avoid weird performance issues when changing layout of Code::flags field. > > BUG=chromium:618701 > > Committed: https://crrev.com/aa4140b1ccc114b82700471513c715f68b7c5dac > Cr-Commit-Position: refs/heads/master@{#37755} TBR=jkummerow@chromium.org # Not skipping CQ checks because original CL landed more than 1 days ago. BUG=chromium:618701 Review-Url: https://codereview.chromium.org/2147213004 Cr-Commit-Position: refs/heads/master@{#37801}
This commit is contained in:
parent
a25e9c6f97
commit
e6b3002a61
@ -2791,6 +2791,7 @@ void CodeStubAssembler::HandlePolymorphicCase(
|
||||
}
|
||||
|
||||
compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
|
||||
Code::Flags flags,
|
||||
compiler::Node* map) {
|
||||
// See v8::internal::StubCache::PrimaryOffset().
|
||||
STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
|
||||
@ -2804,18 +2805,28 @@ compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
|
||||
// risk of collision even if the heap is spread over an area larger than
|
||||
// 4Gb (and not at all if it isn't).
|
||||
Node* hash = Int32Add(hash_field, map);
|
||||
// Base the offset on a simple combination of name and map.
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
// Base the offset on a simple combination of name, flags, and map.
|
||||
hash = Word32Xor(hash, Int32Constant(iflags));
|
||||
uint32_t mask = (StubCache::kPrimaryTableSize - 1)
|
||||
<< StubCache::kCacheIndexShift;
|
||||
return Word32And(hash, Int32Constant(mask));
|
||||
}
|
||||
|
||||
compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset(
|
||||
compiler::Node* name, compiler::Node* seed) {
|
||||
compiler::Node* name, Code::Flags flags, compiler::Node* seed) {
|
||||
// See v8::internal::StubCache::SecondaryOffset().
|
||||
|
||||
// Use the seed from the primary cache in the secondary cache.
|
||||
Node* hash = Int32Sub(seed, name);
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
hash = Int32Add(hash, Int32Constant(iflags));
|
||||
int32_t mask = (StubCache::kSecondaryTableSize - 1)
|
||||
<< StubCache::kCacheIndexShift;
|
||||
return Word32And(hash, Int32Constant(mask));
|
||||
@ -2828,8 +2839,9 @@ enum CodeStubAssembler::StubCacheTable : int {
|
||||
|
||||
void CodeStubAssembler::TryProbeStubCacheTable(
|
||||
StubCache* stub_cache, StubCacheTable table_id,
|
||||
compiler::Node* entry_offset, compiler::Node* name, compiler::Node* map,
|
||||
Label* if_handler, Variable* var_handler, Label* if_miss) {
|
||||
compiler::Node* entry_offset, compiler::Node* name, Code::Flags flags,
|
||||
compiler::Node* map, Label* if_handler, Variable* var_handler,
|
||||
Label* if_miss) {
|
||||
StubCache::Table table = static_cast<StubCache::Table>(table_id);
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
@ -2859,19 +2871,18 @@ void CodeStubAssembler::TryProbeStubCacheTable(
|
||||
Int32Add(entry_offset, Int32Constant(kPointerSize * 2)));
|
||||
GotoIf(WordNotEqual(map, entry_map), if_miss);
|
||||
|
||||
// Check that the flags match what we're looking for.
|
||||
DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() -
|
||||
stub_cache->key_reference(table).address());
|
||||
Node* code = Load(MachineType::Pointer(), key_base,
|
||||
Int32Add(entry_offset, Int32Constant(kPointerSize)));
|
||||
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Node* code_flags =
|
||||
LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32());
|
||||
Assert(Word32Equal(
|
||||
Int32Constant(flags),
|
||||
Word32And(code_flags, Int32Constant(~Code::kFlagsNotUsedInLookup))));
|
||||
GotoIf(Word32NotEqual(Int32Constant(flags),
|
||||
Word32And(code_flags,
|
||||
Int32Constant(~Code::kFlagsNotUsedInLookup))),
|
||||
if_miss);
|
||||
|
||||
// We found the handler.
|
||||
var_handler->Bind(code);
|
||||
@ -2881,6 +2892,9 @@ void CodeStubAssembler::TryProbeStubCacheTable(
|
||||
void CodeStubAssembler::TryProbeStubCache(
|
||||
StubCache* stub_cache, compiler::Node* receiver, compiler::Node* name,
|
||||
Label* if_handler, Variable* var_handler, Label* if_miss) {
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
|
||||
Label try_secondary(this), miss(this);
|
||||
|
||||
Counters* counters = isolate()->counters();
|
||||
@ -2892,16 +2906,17 @@ void CodeStubAssembler::TryProbeStubCache(
|
||||
Node* receiver_map = LoadMap(receiver);
|
||||
|
||||
// Probe the primary table.
|
||||
Node* primary_offset = StubCachePrimaryOffset(name, receiver_map);
|
||||
TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name,
|
||||
Node* primary_offset = StubCachePrimaryOffset(name, flags, receiver_map);
|
||||
TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, flags,
|
||||
receiver_map, if_handler, var_handler, &try_secondary);
|
||||
|
||||
Bind(&try_secondary);
|
||||
{
|
||||
// Probe the secondary table.
|
||||
Node* secondary_offset = StubCacheSecondaryOffset(name, primary_offset);
|
||||
Node* secondary_offset =
|
||||
StubCacheSecondaryOffset(name, flags, primary_offset);
|
||||
TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name,
|
||||
receiver_map, if_handler, var_handler, &miss);
|
||||
flags, receiver_map, if_handler, var_handler, &miss);
|
||||
}
|
||||
|
||||
Bind(&miss);
|
||||
|
@ -440,9 +440,11 @@ class CodeStubAssembler : public compiler::CodeAssembler {
|
||||
int unroll_count);
|
||||
|
||||
compiler::Node* StubCachePrimaryOffset(compiler::Node* name,
|
||||
Code::Flags flags,
|
||||
compiler::Node* map);
|
||||
|
||||
compiler::Node* StubCacheSecondaryOffset(compiler::Node* name,
|
||||
Code::Flags flags,
|
||||
compiler::Node* seed);
|
||||
|
||||
// This enum is used here as a replacement for StubCache::Table to avoid
|
||||
@ -451,9 +453,9 @@ class CodeStubAssembler : public compiler::CodeAssembler {
|
||||
|
||||
void TryProbeStubCacheTable(StubCache* stub_cache, StubCacheTable table_id,
|
||||
compiler::Node* entry_offset,
|
||||
compiler::Node* name, compiler::Node* map,
|
||||
Label* if_handler, Variable* var_handler,
|
||||
Label* if_miss);
|
||||
compiler::Node* name, Code::Flags flags,
|
||||
compiler::Node* map, Label* if_handler,
|
||||
Variable* var_handler, Label* if_miss);
|
||||
|
||||
void TryProbeStubCache(StubCache* stub_cache, compiler::Node* receiver,
|
||||
compiler::Node* name, Label* if_handler,
|
||||
|
@ -1386,7 +1386,6 @@ class FunctionPrototypeStub : public PlatformCodeStub {
|
||||
: PlatformCodeStub(isolate) {}
|
||||
|
||||
Code::Kind GetCodeKind() const override { return Code::HANDLER; }
|
||||
ExtraICState GetExtraICState() const override { return Code::LOAD_IC; }
|
||||
|
||||
// TODO(mvstanton): only the receiver register is accessed. When this is
|
||||
// translated to a hydrogen code stub, a new CallInterfaceDescriptor
|
||||
|
@ -15,7 +15,8 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -68,15 +69,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
// It's a nice optimization if this constant is encodable in the bic insn.
|
||||
|
||||
#ifdef DEBUG
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
uint32_t mask = Code::kFlagsNotUsedInLookup;
|
||||
DCHECK(__ ImmediateFitsAddrMode1Instruction(mask));
|
||||
__ bic(flags_reg, flags_reg, Operand(mask));
|
||||
__ cmp(flags_reg, Operand(flags));
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
__ b(ne, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -94,6 +93,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that code is valid. The multiplying code relies on the
|
||||
@ -142,21 +144,25 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// We shift out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps.
|
||||
__ mov(scratch, Operand(scratch, LSR, kCacheIndexShift));
|
||||
// Mask down the eor argument to the minimum to keep the immediate
|
||||
// ARM-encodable.
|
||||
__ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
||||
// Prefer and_ to ubfx here because ubfx takes 2 cycles.
|
||||
__ and_(scratch, scratch, Operand(mask));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift));
|
||||
uint32_t mask2 = kSecondaryTableSize - 1;
|
||||
__ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
||||
__ and_(scratch, scratch, Operand(mask2));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -23,9 +23,9 @@ namespace internal {
|
||||
//
|
||||
// 'receiver', 'name' and 'offset' registers are preserved on miss.
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register scratch3) {
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name, Register offset,
|
||||
Register scratch, Register scratch2, Register scratch3) {
|
||||
// Some code below relies on the fact that the Entry struct contains
|
||||
// 3 pointers (name, code, map).
|
||||
STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize));
|
||||
@ -64,16 +64,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
// Get the code entry from the cache.
|
||||
__ Ldr(scratch, MemOperand(scratch, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ Ldr(scratch2.W(), FieldMemOperand(scratch, Code::kFlagsOffset));
|
||||
__ Bic(scratch2.W(), scratch2.W(), Code::kFlagsNotUsedInLookup);
|
||||
__ Cmp(scratch2.W(), flags);
|
||||
__ B(ne, &miss);
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ B(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -92,6 +89,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that there are no register conflicts.
|
||||
@ -131,21 +131,23 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
||||
__ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ Add(scratch, scratch, extra);
|
||||
__ Eor(scratch, scratch, flags);
|
||||
// We shift out the last two bits because they are not part of the hash.
|
||||
__ Ubfx(scratch, scratch, kCacheIndexShift,
|
||||
CountTrailingZeros(kPrimaryTableSize, 64));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary table.
|
||||
__ Sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift));
|
||||
__ Add(scratch, scratch, flags >> kCacheIndexShift);
|
||||
__ And(scratch, scratch, kSecondaryTableSize - 1);
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,6 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Kind ic_kind, Code::Flags flags,
|
||||
StubCache::Table table, Register name, Register receiver,
|
||||
// Number of the cache entry pointer-size scaled.
|
||||
Register offset, Register extra) {
|
||||
@ -25,7 +26,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
ExternalReference::virtual_handler_register(masm->isolate());
|
||||
|
||||
Label miss;
|
||||
Code::Kind ic_kind = stub_cache->ic_kind();
|
||||
bool is_vector_store =
|
||||
IC::ICUseVector(ic_kind) &&
|
||||
(ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC);
|
||||
@ -46,15 +46,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -103,15 +101,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
// Get the code entry from the cache.
|
||||
__ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -148,6 +144,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Assert that code is valid. The multiplying code relies on the entry size
|
||||
@ -178,6 +177,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// Get the map of the receiver and compute the hash.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
// We mask out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps. Also in the two 'and' instructions below.
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
@ -186,17 +186,21 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
DCHECK(kCacheIndexShift == kPointerSizeLog2);
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, name, receiver, offset, extra);
|
||||
ProbeTable(this, masm, ic_kind_, flags, kPrimary, name, receiver, offset,
|
||||
extra);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
__ sub(offset, name);
|
||||
__ add(offset, Immediate(flags));
|
||||
__ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift);
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, name, receiver, offset, extra);
|
||||
ProbeTable(this, masm, ic_kind_, flags, kSecondary, name, receiver, offset,
|
||||
extra);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -1024,7 +1024,8 @@ Handle<Code> IC::ComputeHandler(LookupIterator* lookup, Handle<Object> value) {
|
||||
// cache (which just missed) is different from the cached handler.
|
||||
if (state() == MEGAMORPHIC && lookup->GetReceiver()->IsHeapObject()) {
|
||||
Map* map = Handle<HeapObject>::cast(lookup->GetReceiver())->map();
|
||||
Code* megamorphic_cached_code = stub_cache()->Get(*lookup->name(), map);
|
||||
Code* megamorphic_cached_code =
|
||||
stub_cache()->Get(*lookup->name(), map, code->flags());
|
||||
if (megamorphic_cached_code != *code) {
|
||||
TRACE_HANDLER_STATS(isolate(), IC_HandlerCacheHit);
|
||||
return code;
|
||||
|
@ -15,7 +15,8 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -60,16 +61,14 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
scratch2 = no_reg;
|
||||
__ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
__ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
|
||||
__ Check(eq, kUnexpectedValue, flags_reg, Operand(flags));
|
||||
__ Branch(&miss, ne, flags_reg, Operand(flags));
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -88,6 +87,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that code is valid. The multiplying code relies on the
|
||||
@ -136,21 +138,23 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// We shift out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps.
|
||||
__ srl(scratch, scratch, kCacheIndexShift);
|
||||
__ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
||||
__ And(scratch, scratch, Operand(mask));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ srl(at, name, kCacheIndexShift);
|
||||
__ Subu(scratch, scratch, at);
|
||||
uint32_t mask2 = kSecondaryTableSize - 1;
|
||||
__ Addu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
||||
__ And(scratch, scratch, Operand(mask2));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,7 +15,8 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -62,16 +63,14 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ ld(code, MemOperand(base_addr,
|
||||
static_cast<int32_t>(value_off_addr - key_off_addr)));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
__ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
|
||||
__ Check(eq, kUnexpectedValue, flags_reg, Operand(flags));
|
||||
__ Branch(&miss, ne, flags_reg, Operand(flags));
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -90,6 +89,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that code is valid. The multiplying code relies on the
|
||||
@ -139,21 +141,23 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// We shift out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps.
|
||||
__ dsrl(scratch, scratch, kCacheIndexShift);
|
||||
__ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
||||
__ And(scratch, scratch, Operand(mask));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ dsrl(at, name, kCacheIndexShift);
|
||||
__ Dsubu(scratch, scratch, at);
|
||||
uint64_t mask2 = kSecondaryTableSize - 1;
|
||||
__ Daddu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
||||
__ And(scratch, scratch, Operand(mask2));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,7 +15,8 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -71,10 +72,7 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
scratch2 = no_reg;
|
||||
__ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ lwz(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
@ -84,8 +82,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ andc(flags_reg, flags_reg, r0);
|
||||
__ mov(r0, Operand(flags));
|
||||
__ cmpl(flags_reg, r0);
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
__ bne(&miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ b(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -105,6 +104,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
#if V8_TARGET_ARCH_PPC64
|
||||
@ -155,22 +157,24 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ lwz(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
||||
__ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ add(scratch, scratch, ip);
|
||||
__ xori(scratch, scratch, Operand(flags));
|
||||
// The mask omits the last two bits because they are not part of the hash.
|
||||
__ andi(scratch, scratch,
|
||||
Operand((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ sub(scratch, scratch, name);
|
||||
__ addi(scratch, scratch, Operand(flags));
|
||||
__ andi(scratch, scratch,
|
||||
Operand((kSecondaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,7 +15,8 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -69,10 +70,7 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
scratch2 = no_reg;
|
||||
__ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ LoadlW(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
@ -80,8 +78,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
DCHECK(!r0.is(flags_reg));
|
||||
__ AndP(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
|
||||
__ CmpLogicalP(flags_reg, Operand(flags));
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
__ bne(&miss, Label::kNear);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ b(&miss, Label::kNear);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -101,6 +100,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
@ -151,22 +153,24 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ LoadlW(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
||||
__ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ AddP(scratch, scratch, ip);
|
||||
__ XorP(scratch, scratch, Operand(flags));
|
||||
// The mask omits the last two bits because they are not part of the hash.
|
||||
__ AndP(scratch, scratch,
|
||||
Operand((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ SubP(scratch, scratch, name);
|
||||
__ AddP(scratch, scratch, Operand(flags));
|
||||
__ AndP(scratch, scratch,
|
||||
Operand((kSecondaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -19,23 +19,33 @@ void StubCache::Initialize() {
|
||||
Clear();
|
||||
}
|
||||
|
||||
static void CommonStubCacheChecks(Name* name, Map* map, Code* code) {
|
||||
|
||||
static Code::Flags CommonStubCacheChecks(Name* name, Map* map,
|
||||
Code::Flags flags) {
|
||||
flags = Code::RemoveHolderFromFlags(flags);
|
||||
|
||||
// Validate that the name does not move on scavenge, and that we
|
||||
// can use identity checks instead of structural equality checks.
|
||||
DCHECK(!name->GetHeap()->InNewSpace(name));
|
||||
DCHECK(name->IsUniqueName());
|
||||
DCHECK(name->HasHashCode());
|
||||
if (code) {
|
||||
DCHECK_EQ(Code::HANDLER, Code::ExtractKindFromFlags(code->flags()));
|
||||
}
|
||||
|
||||
// The state bits are not important to the hash function because the stub
|
||||
// cache only contains handlers. Make sure that the bits are the least
|
||||
// significant so they will be the ones masked out.
|
||||
DCHECK_EQ(Code::HANDLER, Code::ExtractKindFromFlags(flags));
|
||||
|
||||
// Make sure that the cache holder are not included in the hash.
|
||||
DCHECK(Code::ExtractCacheHolderFromFlags(flags) == 0);
|
||||
|
||||
return flags;
|
||||
}
|
||||
|
||||
|
||||
Code* StubCache::Set(Name* name, Map* map, Code* code) {
|
||||
CommonStubCacheChecks(name, map, code);
|
||||
Code::Flags flags = CommonStubCacheChecks(name, map, code->flags());
|
||||
|
||||
// Compute the primary entry.
|
||||
int primary_offset = PrimaryOffset(name, map);
|
||||
int primary_offset = PrimaryOffset(name, flags, map);
|
||||
Entry* primary = entry(primary_, primary_offset);
|
||||
Code* old_code = primary->value;
|
||||
|
||||
@ -43,8 +53,9 @@ Code* StubCache::Set(Name* name, Map* map, Code* code) {
|
||||
// secondary cache before overwriting it.
|
||||
if (old_code != isolate_->builtins()->builtin(Builtins::kIllegal)) {
|
||||
Map* old_map = primary->map;
|
||||
int seed = PrimaryOffset(primary->key, old_map);
|
||||
int secondary_offset = SecondaryOffset(primary->key, seed);
|
||||
Code::Flags old_flags = Code::RemoveHolderFromFlags(old_code->flags());
|
||||
int seed = PrimaryOffset(primary->key, old_flags, old_map);
|
||||
int secondary_offset = SecondaryOffset(primary->key, old_flags, seed);
|
||||
Entry* secondary = entry(secondary_, secondary_offset);
|
||||
*secondary = *primary;
|
||||
}
|
||||
@ -57,16 +68,19 @@ Code* StubCache::Set(Name* name, Map* map, Code* code) {
|
||||
return code;
|
||||
}
|
||||
|
||||
Code* StubCache::Get(Name* name, Map* map) {
|
||||
CommonStubCacheChecks(name, map, nullptr);
|
||||
int primary_offset = PrimaryOffset(name, map);
|
||||
|
||||
Code* StubCache::Get(Name* name, Map* map, Code::Flags flags) {
|
||||
flags = CommonStubCacheChecks(name, map, flags);
|
||||
int primary_offset = PrimaryOffset(name, flags, map);
|
||||
Entry* primary = entry(primary_, primary_offset);
|
||||
if (primary->key == name && primary->map == map) {
|
||||
if (primary->key == name && primary->map == map &&
|
||||
flags == Code::RemoveHolderFromFlags(primary->value->flags())) {
|
||||
return primary->value;
|
||||
}
|
||||
int secondary_offset = SecondaryOffset(name, primary_offset);
|
||||
int secondary_offset = SecondaryOffset(name, flags, primary_offset);
|
||||
Entry* secondary = entry(secondary_, secondary_offset);
|
||||
if (secondary->key == name && secondary->map == map) {
|
||||
if (secondary->key == name && secondary->map == map &&
|
||||
flags == Code::RemoveHolderFromFlags(secondary->value->flags())) {
|
||||
return secondary->value;
|
||||
}
|
||||
return NULL;
|
||||
@ -89,6 +103,7 @@ void StubCache::Clear() {
|
||||
|
||||
|
||||
void StubCache::CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
Code::Flags flags,
|
||||
Handle<Context> native_context,
|
||||
Zone* zone) {
|
||||
for (int i = 0; i < kPrimaryTableSize; i++) {
|
||||
@ -98,7 +113,7 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
// with a primitive receiver.
|
||||
if (map == NULL) continue;
|
||||
|
||||
int offset = PrimaryOffset(*name, map);
|
||||
int offset = PrimaryOffset(*name, flags, map);
|
||||
if (entry(primary_, offset) == &primary_[i] &&
|
||||
TypeFeedbackOracle::IsRelevantFeedback(map, *native_context)) {
|
||||
types->AddMapIfMissing(Handle<Map>(map), zone);
|
||||
@ -114,10 +129,10 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
if (map == NULL) continue;
|
||||
|
||||
// Lookup in primary table and skip duplicates.
|
||||
int primary_offset = PrimaryOffset(*name, map);
|
||||
int primary_offset = PrimaryOffset(*name, flags, map);
|
||||
|
||||
// Lookup in secondary table and add matches.
|
||||
int offset = SecondaryOffset(*name, primary_offset);
|
||||
int offset = SecondaryOffset(*name, flags, primary_offset);
|
||||
if (entry(secondary_, offset) == &secondary_[i] &&
|
||||
TypeFeedbackOracle::IsRelevantFeedback(map, *native_context)) {
|
||||
types->AddMapIfMissing(Handle<Map>(map), zone);
|
||||
|
@ -41,12 +41,13 @@ class StubCache {
|
||||
void Initialize();
|
||||
// Access cache for entry hash(name, map).
|
||||
Code* Set(Name* name, Map* map, Code* code);
|
||||
Code* Get(Name* name, Map* map);
|
||||
Code* Get(Name* name, Map* map, Code::Flags flags);
|
||||
// Clear the lookup table (@ mark compact collection).
|
||||
void Clear();
|
||||
// Collect all maps that match the name.
|
||||
// Collect all maps that match the name and flags.
|
||||
void CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
Handle<Context> native_context, Zone* zone);
|
||||
Code::Flags flags, Handle<Context> native_context,
|
||||
Zone* zone);
|
||||
// Generate code for probing the stub cache table.
|
||||
// Arguments extra, extra2 and extra3 may be used to pass additional scratch
|
||||
// registers. Set to no_reg if not needed.
|
||||
@ -96,12 +97,13 @@ class StubCache {
|
||||
static const int kSecondaryTableBits = 9;
|
||||
static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
|
||||
|
||||
static int PrimaryOffsetForTesting(Name* name, Map* map) {
|
||||
return PrimaryOffset(name, map);
|
||||
static int PrimaryOffsetForTesting(Name* name, Code::Flags flags, Map* map) {
|
||||
return PrimaryOffset(name, flags, map);
|
||||
}
|
||||
|
||||
static int SecondaryOffsetForTesting(Name* name, int seed) {
|
||||
return SecondaryOffset(name, seed);
|
||||
static int SecondaryOffsetForTesting(Name* name, Code::Flags flags,
|
||||
int seed) {
|
||||
return SecondaryOffset(name, flags, seed);
|
||||
}
|
||||
|
||||
// The constructor is made public only for the purposes of testing.
|
||||
@ -118,7 +120,7 @@ class StubCache {
|
||||
// Hash algorithm for the primary table. This algorithm is replicated in
|
||||
// assembler for every architecture. Returns an index into the table that
|
||||
// is scaled by 1 << kCacheIndexShift.
|
||||
static int PrimaryOffset(Name* name, Map* map) {
|
||||
static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
|
||||
STATIC_ASSERT(kCacheIndexShift == Name::kHashShift);
|
||||
// Compute the hash of the name (use entire hash field).
|
||||
DCHECK(name->HasHashCode());
|
||||
@ -128,19 +130,27 @@ class StubCache {
|
||||
// 4Gb (and not at all if it isn't).
|
||||
uint32_t map_low32bits =
|
||||
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
|
||||
// Base the offset on a simple combination of name and map.
|
||||
uint32_t key = map_low32bits + field;
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
// Base the offset on a simple combination of name, flags, and map.
|
||||
uint32_t key = (map_low32bits + field) ^ iflags;
|
||||
return key & ((kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
}
|
||||
|
||||
// Hash algorithm for the secondary table. This algorithm is replicated in
|
||||
// assembler for every architecture. Returns an index into the table that
|
||||
// is scaled by 1 << kCacheIndexShift.
|
||||
static int SecondaryOffset(Name* name, int seed) {
|
||||
static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
|
||||
// Use the seed from the primary cache in the secondary cache.
|
||||
uint32_t name_low32bits =
|
||||
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
|
||||
uint32_t key = (seed - name_low32bits);
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
uint32_t key = (seed - name_low32bits) + iflags;
|
||||
return key & ((kSecondaryTableSize - 1) << kCacheIndexShift);
|
||||
}
|
||||
|
||||
|
@ -15,7 +15,8 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
// The offset is scaled by 4, based on
|
||||
// kCacheIndexShift, which is two bits
|
||||
Register offset) {
|
||||
@ -56,15 +57,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ LoadAddress(kScratchRegister, value_offset);
|
||||
__ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
|
||||
__ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
|
||||
__ cmpl(offset, Immediate(flags));
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -82,6 +81,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
USE(extra); // The register extra is not used on the X64 platform.
|
||||
USE(extra2); // The register extra2 is not used on the X64 platform.
|
||||
@ -127,22 +129,25 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
|
||||
// Use only the low 32 bits of the map pointer.
|
||||
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xorp(scratch, Immediate(flags));
|
||||
// We mask out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps. Also in the two 'and' instructions below.
|
||||
__ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch);
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xorp(scratch, Immediate(flags));
|
||||
__ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
__ subl(scratch, name);
|
||||
__ addl(scratch, Immediate(flags));
|
||||
__ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch);
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,6 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Kind ic_kind, Code::Flags flags,
|
||||
StubCache::Table table, Register name, Register receiver,
|
||||
// Number of the cache entry pointer-size scaled.
|
||||
Register offset, Register extra) {
|
||||
@ -25,7 +26,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
ExternalReference::virtual_handler_register(masm->isolate());
|
||||
|
||||
Label miss;
|
||||
Code::Kind ic_kind = stub_cache->ic_kind();
|
||||
bool is_vector_store =
|
||||
IC::ICUseVector(ic_kind) &&
|
||||
(ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC);
|
||||
@ -46,15 +46,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -103,15 +101,13 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
// Get the code entry from the cache.
|
||||
__ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -148,6 +144,9 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Assert that code is valid. The multiplying code relies on the entry size
|
||||
@ -178,6 +177,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// Get the map of the receiver and compute the hash.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
// We mask out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps. Also in the two 'and' instructions below.
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
@ -186,17 +186,21 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
DCHECK(kCacheIndexShift == kPointerSizeLog2);
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, kPrimary, name, receiver, offset, extra);
|
||||
ProbeTable(this, masm, ic_kind_, flags, kPrimary, name, receiver, offset,
|
||||
extra);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
__ sub(offset, name);
|
||||
__ add(offset, Immediate(flags));
|
||||
__ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift);
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, kSecondary, name, receiver, offset, extra);
|
||||
ProbeTable(this, masm, ic_kind_, flags, kSecondary, name, receiver, offset,
|
||||
extra);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -280,7 +280,8 @@ void TypeFeedbackOracle::PropertyReceiverTypes(FeedbackVectorSlot slot,
|
||||
receiver_types->Clear();
|
||||
if (!slot.IsInvalid()) {
|
||||
LoadICNexus nexus(feedback_vector_, slot);
|
||||
CollectReceiverTypes(isolate()->load_stub_cache(), &nexus, name,
|
||||
Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC);
|
||||
CollectReceiverTypes(isolate()->load_stub_cache(), &nexus, name, flags,
|
||||
receiver_types);
|
||||
}
|
||||
}
|
||||
@ -306,7 +307,8 @@ void TypeFeedbackOracle::AssignmentReceiverTypes(FeedbackVectorSlot slot,
|
||||
Handle<Name> name,
|
||||
SmallMapList* receiver_types) {
|
||||
receiver_types->Clear();
|
||||
CollectReceiverTypes(isolate()->store_stub_cache(), slot, name,
|
||||
Code::Flags flags = Code::ComputeHandlerFlags(Code::STORE_IC);
|
||||
CollectReceiverTypes(isolate()->store_stub_cache(), slot, name, flags,
|
||||
receiver_types);
|
||||
}
|
||||
|
||||
@ -329,19 +331,22 @@ void TypeFeedbackOracle::CountReceiverTypes(FeedbackVectorSlot slot,
|
||||
void TypeFeedbackOracle::CollectReceiverTypes(StubCache* stub_cache,
|
||||
FeedbackVectorSlot slot,
|
||||
Handle<Name> name,
|
||||
Code::Flags flags,
|
||||
SmallMapList* types) {
|
||||
StoreICNexus nexus(feedback_vector_, slot);
|
||||
CollectReceiverTypes(stub_cache, &nexus, name, types);
|
||||
CollectReceiverTypes(stub_cache, &nexus, name, flags, types);
|
||||
}
|
||||
|
||||
void TypeFeedbackOracle::CollectReceiverTypes(StubCache* stub_cache,
|
||||
FeedbackNexus* nexus,
|
||||
Handle<Name> name,
|
||||
Code::Flags flags,
|
||||
SmallMapList* types) {
|
||||
if (FLAG_collect_megamorphic_maps_from_stub_cache &&
|
||||
nexus->ic_state() == MEGAMORPHIC) {
|
||||
types->Reserve(4, zone());
|
||||
stub_cache->CollectMatchingMaps(types, name, native_context_, zone());
|
||||
stub_cache->CollectMatchingMaps(types, name, flags, native_context_,
|
||||
zone());
|
||||
} else {
|
||||
CollectReceiverTypes(nexus, types);
|
||||
}
|
||||
|
@ -97,9 +97,11 @@ class TypeFeedbackOracle: public ZoneObject {
|
||||
|
||||
private:
|
||||
void CollectReceiverTypes(StubCache* stub_cache, FeedbackVectorSlot slot,
|
||||
Handle<Name> name, SmallMapList* types);
|
||||
Handle<Name> name, Code::Flags flags,
|
||||
SmallMapList* types);
|
||||
void CollectReceiverTypes(StubCache* stub_cache, FeedbackNexus* nexus,
|
||||
Handle<Name> name, SmallMapList* types);
|
||||
Handle<Name> name, Code::Flags flags,
|
||||
SmallMapList* types);
|
||||
|
||||
// Returns true if there is at least one string map and if
|
||||
// all maps are string maps.
|
||||
|
@ -1109,16 +1109,18 @@ void TestStubCacheOffsetCalculation(StubCache::Table table,
|
||||
const int kNumParams = 2;
|
||||
CodeStubAssemblerTester m(isolate, kNumParams);
|
||||
|
||||
Code::Flags code_flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(handler_kind));
|
||||
{
|
||||
Node* name = m.Parameter(0);
|
||||
Node* map = m.Parameter(1);
|
||||
Node* primary_offset = m.StubCachePrimaryOffset(name, map);
|
||||
Node* primary_offset = m.StubCachePrimaryOffset(name, code_flags, map);
|
||||
Node* result;
|
||||
if (table == StubCache::kPrimary) {
|
||||
result = primary_offset;
|
||||
} else {
|
||||
CHECK_EQ(StubCache::kSecondary, table);
|
||||
result = m.StubCacheSecondaryOffset(name, primary_offset);
|
||||
result = m.StubCacheSecondaryOffset(name, code_flags, primary_offset);
|
||||
}
|
||||
m.Return(m.SmiFromWord32(result));
|
||||
}
|
||||
@ -1161,12 +1163,13 @@ void TestStubCacheOffsetCalculation(StubCache::Table table,
|
||||
|
||||
int expected_result;
|
||||
{
|
||||
int primary_offset = StubCache::PrimaryOffsetForTesting(*name, *map);
|
||||
int primary_offset =
|
||||
StubCache::PrimaryOffsetForTesting(*name, code_flags, *map);
|
||||
if (table == StubCache::kPrimary) {
|
||||
expected_result = primary_offset;
|
||||
} else {
|
||||
expected_result =
|
||||
StubCache::SecondaryOffsetForTesting(*name, primary_offset);
|
||||
expected_result = StubCache::SecondaryOffsetForTesting(
|
||||
*name, code_flags, primary_offset);
|
||||
}
|
||||
}
|
||||
Handle<Object> result = ft.Call(name, map).ToHandleChecked();
|
||||
@ -1214,6 +1217,9 @@ TEST(TryProbeStubCache) {
|
||||
CodeStubAssemblerTester m(isolate, kNumParams);
|
||||
|
||||
Code::Kind ic_kind = Code::LOAD_IC;
|
||||
Code::Flags flags_to_query =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind));
|
||||
|
||||
StubCache stub_cache(isolate, ic_kind);
|
||||
stub_cache.Clear();
|
||||
|
||||
@ -1293,8 +1299,25 @@ TEST(TryProbeStubCache) {
|
||||
|
||||
// Generate some number of handlers.
|
||||
for (int i = 0; i < 30; i++) {
|
||||
Code::Kind code_kind;
|
||||
switch (rand_gen.NextInt(4)) {
|
||||
case 0:
|
||||
code_kind = Code::LOAD_IC;
|
||||
break;
|
||||
case 1:
|
||||
code_kind = Code::KEYED_LOAD_IC;
|
||||
break;
|
||||
case 2:
|
||||
code_kind = Code::STORE_IC;
|
||||
break;
|
||||
case 3:
|
||||
code_kind = Code::KEYED_STORE_IC;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind));
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(code_kind));
|
||||
handlers.push_back(CreateCodeWithFlags(flags));
|
||||
}
|
||||
|
||||
@ -1319,7 +1342,7 @@ TEST(TryProbeStubCache) {
|
||||
int index = rand_gen.NextInt();
|
||||
Handle<Name> name = names[index % names.size()];
|
||||
Handle<JSObject> receiver = receivers[index % receivers.size()];
|
||||
Code* handler = stub_cache.Get(*name, receiver->map());
|
||||
Code* handler = stub_cache.Get(*name, receiver->map(), flags_to_query);
|
||||
if (handler == nullptr) {
|
||||
queried_non_existing = true;
|
||||
} else {
|
||||
@ -1335,7 +1358,7 @@ TEST(TryProbeStubCache) {
|
||||
int index2 = rand_gen.NextInt();
|
||||
Handle<Name> name = names[index1 % names.size()];
|
||||
Handle<JSObject> receiver = receivers[index2 % receivers.size()];
|
||||
Code* handler = stub_cache.Get(*name, receiver->map());
|
||||
Code* handler = stub_cache.Get(*name, receiver->map(), flags_to_query);
|
||||
if (handler == nullptr) {
|
||||
queried_non_existing = true;
|
||||
} else {
|
||||
|
Loading…
Reference in New Issue
Block a user