[ic] [stubs] Don't use Code::flags in megamorphic stub cache hash computations.
This should avoid weird performance issues when changing layout of Code::flags field. BUG=chromium:618701 Review-Url: https://codereview.chromium.org/2147433002 Cr-Commit-Position: refs/heads/master@{#37755}
This commit is contained in:
parent
a91dc7cde2
commit
aa4140b1cc
@ -2730,7 +2730,6 @@ void CodeStubAssembler::HandlePolymorphicCase(
|
||||
}
|
||||
|
||||
compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
|
||||
Code::Flags flags,
|
||||
compiler::Node* map) {
|
||||
// See v8::internal::StubCache::PrimaryOffset().
|
||||
STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
|
||||
@ -2744,28 +2743,18 @@ compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
|
||||
// risk of collision even if the heap is spread over an area larger than
|
||||
// 4Gb (and not at all if it isn't).
|
||||
Node* hash = Int32Add(hash_field, map);
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
// Base the offset on a simple combination of name, flags, and map.
|
||||
hash = Word32Xor(hash, Int32Constant(iflags));
|
||||
// Base the offset on a simple combination of name and map.
|
||||
uint32_t mask = (StubCache::kPrimaryTableSize - 1)
|
||||
<< StubCache::kCacheIndexShift;
|
||||
return Word32And(hash, Int32Constant(mask));
|
||||
}
|
||||
|
||||
compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset(
|
||||
compiler::Node* name, Code::Flags flags, compiler::Node* seed) {
|
||||
compiler::Node* name, compiler::Node* seed) {
|
||||
// See v8::internal::StubCache::SecondaryOffset().
|
||||
|
||||
// Use the seed from the primary cache in the secondary cache.
|
||||
Node* hash = Int32Sub(seed, name);
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
hash = Int32Add(hash, Int32Constant(iflags));
|
||||
int32_t mask = (StubCache::kSecondaryTableSize - 1)
|
||||
<< StubCache::kCacheIndexShift;
|
||||
return Word32And(hash, Int32Constant(mask));
|
||||
@ -2778,9 +2767,8 @@ enum CodeStubAssembler::StubCacheTable : int {
|
||||
|
||||
void CodeStubAssembler::TryProbeStubCacheTable(
|
||||
StubCache* stub_cache, StubCacheTable table_id,
|
||||
compiler::Node* entry_offset, compiler::Node* name, Code::Flags flags,
|
||||
compiler::Node* map, Label* if_handler, Variable* var_handler,
|
||||
Label* if_miss) {
|
||||
compiler::Node* entry_offset, compiler::Node* name, compiler::Node* map,
|
||||
Label* if_handler, Variable* var_handler, Label* if_miss) {
|
||||
StubCache::Table table = static_cast<StubCache::Table>(table_id);
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
@ -2810,18 +2798,19 @@ void CodeStubAssembler::TryProbeStubCacheTable(
|
||||
Int32Add(entry_offset, Int32Constant(kPointerSize * 2)));
|
||||
GotoIf(WordNotEqual(map, entry_map), if_miss);
|
||||
|
||||
// Check that the flags match what we're looking for.
|
||||
DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() -
|
||||
stub_cache->key_reference(table).address());
|
||||
Node* code = Load(MachineType::Pointer(), key_base,
|
||||
Int32Add(entry_offset, Int32Constant(kPointerSize)));
|
||||
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Node* code_flags =
|
||||
LoadObjectField(code, Code::kFlagsOffset, MachineType::Uint32());
|
||||
GotoIf(Word32NotEqual(Int32Constant(flags),
|
||||
Word32And(code_flags,
|
||||
Int32Constant(~Code::kFlagsNotUsedInLookup))),
|
||||
if_miss);
|
||||
Assert(Word32Equal(
|
||||
Int32Constant(flags),
|
||||
Word32And(code_flags, Int32Constant(~Code::kFlagsNotUsedInLookup))));
|
||||
|
||||
// We found the handler.
|
||||
var_handler->Bind(code);
|
||||
@ -2831,9 +2820,6 @@ void CodeStubAssembler::TryProbeStubCacheTable(
|
||||
void CodeStubAssembler::TryProbeStubCache(
|
||||
StubCache* stub_cache, compiler::Node* receiver, compiler::Node* name,
|
||||
Label* if_handler, Variable* var_handler, Label* if_miss) {
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
|
||||
Label try_secondary(this), miss(this);
|
||||
|
||||
Counters* counters = isolate()->counters();
|
||||
@ -2845,17 +2831,16 @@ void CodeStubAssembler::TryProbeStubCache(
|
||||
Node* receiver_map = LoadMap(receiver);
|
||||
|
||||
// Probe the primary table.
|
||||
Node* primary_offset = StubCachePrimaryOffset(name, flags, receiver_map);
|
||||
TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name, flags,
|
||||
Node* primary_offset = StubCachePrimaryOffset(name, receiver_map);
|
||||
TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name,
|
||||
receiver_map, if_handler, var_handler, &try_secondary);
|
||||
|
||||
Bind(&try_secondary);
|
||||
{
|
||||
// Probe the secondary table.
|
||||
Node* secondary_offset =
|
||||
StubCacheSecondaryOffset(name, flags, primary_offset);
|
||||
Node* secondary_offset = StubCacheSecondaryOffset(name, primary_offset);
|
||||
TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name,
|
||||
flags, receiver_map, if_handler, var_handler, &miss);
|
||||
receiver_map, if_handler, var_handler, &miss);
|
||||
}
|
||||
|
||||
Bind(&miss);
|
||||
|
@ -438,11 +438,9 @@ class CodeStubAssembler : public compiler::CodeAssembler {
|
||||
int unroll_count);
|
||||
|
||||
compiler::Node* StubCachePrimaryOffset(compiler::Node* name,
|
||||
Code::Flags flags,
|
||||
compiler::Node* map);
|
||||
|
||||
compiler::Node* StubCacheSecondaryOffset(compiler::Node* name,
|
||||
Code::Flags flags,
|
||||
compiler::Node* seed);
|
||||
|
||||
// This enum is used here as a replacement for StubCache::Table to avoid
|
||||
@ -451,9 +449,9 @@ class CodeStubAssembler : public compiler::CodeAssembler {
|
||||
|
||||
void TryProbeStubCacheTable(StubCache* stub_cache, StubCacheTable table_id,
|
||||
compiler::Node* entry_offset,
|
||||
compiler::Node* name, Code::Flags flags,
|
||||
compiler::Node* map, Label* if_handler,
|
||||
Variable* var_handler, Label* if_miss);
|
||||
compiler::Node* name, compiler::Node* map,
|
||||
Label* if_handler, Variable* var_handler,
|
||||
Label* if_miss);
|
||||
|
||||
void TryProbeStubCache(StubCache* stub_cache, compiler::Node* receiver,
|
||||
compiler::Node* name, Label* if_handler,
|
||||
|
@ -1386,6 +1386,7 @@ class FunctionPrototypeStub : public PlatformCodeStub {
|
||||
: PlatformCodeStub(isolate) {}
|
||||
|
||||
Code::Kind GetCodeKind() const override { return Code::HANDLER; }
|
||||
ExtraICState GetExtraICState() const override { return Code::LOAD_IC; }
|
||||
|
||||
// TODO(mvstanton): only the receiver register is accessed. When this is
|
||||
// translated to a hydrogen code stub, a new CallInterfaceDescriptor
|
||||
|
@ -15,8 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -69,13 +68,15 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ ldr(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
// It's a nice optimization if this constant is encodable in the bic insn.
|
||||
|
||||
#ifdef DEBUG
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
uint32_t mask = Code::kFlagsNotUsedInLookup;
|
||||
DCHECK(__ ImmediateFitsAddrMode1Instruction(mask));
|
||||
__ bic(flags_reg, flags_reg, Operand(mask));
|
||||
__ cmp(flags_reg, Operand(flags));
|
||||
__ b(ne, &miss);
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -93,9 +94,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that code is valid. The multiplying code relies on the
|
||||
@ -144,25 +142,21 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// We shift out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps.
|
||||
__ mov(scratch, Operand(scratch, LSR, kCacheIndexShift));
|
||||
// Mask down the eor argument to the minimum to keep the immediate
|
||||
// ARM-encodable.
|
||||
__ eor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
||||
// Prefer and_ to ubfx here because ubfx takes 2 cycles.
|
||||
__ and_(scratch, scratch, Operand(mask));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift));
|
||||
uint32_t mask2 = kSecondaryTableSize - 1;
|
||||
__ add(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
||||
__ and_(scratch, scratch, Operand(mask2));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -23,9 +23,9 @@ namespace internal {
|
||||
//
|
||||
// 'receiver', 'name' and 'offset' registers are preserved on miss.
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name, Register offset,
|
||||
Register scratch, Register scratch2, Register scratch3) {
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register scratch3) {
|
||||
// Some code below relies on the fact that the Entry struct contains
|
||||
// 3 pointers (name, code, map).
|
||||
STATIC_ASSERT(sizeof(StubCache::Entry) == (3 * kPointerSize));
|
||||
@ -64,13 +64,16 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
// Get the code entry from the cache.
|
||||
__ Ldr(scratch, MemOperand(scratch, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ Ldr(scratch2.W(), FieldMemOperand(scratch, Code::kFlagsOffset));
|
||||
__ Bic(scratch2.W(), scratch2.W(), Code::kFlagsNotUsedInLookup);
|
||||
__ Cmp(scratch2.W(), flags);
|
||||
__ B(ne, &miss);
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ B(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -89,9 +92,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that there are no register conflicts.
|
||||
@ -131,23 +131,21 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ Ldr(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
||||
__ Ldr(extra, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ Add(scratch, scratch, extra);
|
||||
__ Eor(scratch, scratch, flags);
|
||||
// We shift out the last two bits because they are not part of the hash.
|
||||
__ Ubfx(scratch, scratch, kCacheIndexShift,
|
||||
CountTrailingZeros(kPrimaryTableSize, 64));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary table.
|
||||
__ Sub(scratch, scratch, Operand(name, LSR, kCacheIndexShift));
|
||||
__ Add(scratch, scratch, flags >> kCacheIndexShift);
|
||||
__ And(scratch, scratch, kSecondaryTableSize - 1);
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,7 +15,6 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Kind ic_kind, Code::Flags flags,
|
||||
StubCache::Table table, Register name, Register receiver,
|
||||
// Number of the cache entry pointer-size scaled.
|
||||
Register offset, Register extra) {
|
||||
@ -26,6 +25,7 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
ExternalReference::virtual_handler_register(masm->isolate());
|
||||
|
||||
Label miss;
|
||||
Code::Kind ic_kind = stub_cache->ic_kind();
|
||||
bool is_vector_store =
|
||||
IC::ICUseVector(ic_kind) &&
|
||||
(ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC);
|
||||
@ -46,13 +46,15 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ j(not_equal, &miss);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -101,13 +103,15 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
// Get the code entry from the cache.
|
||||
__ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ j(not_equal, &miss);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -144,9 +148,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Assert that code is valid. The multiplying code relies on the entry size
|
||||
@ -177,7 +178,6 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// Get the map of the receiver and compute the hash.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
// We mask out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps. Also in the two 'and' instructions below.
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
@ -186,21 +186,17 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
DCHECK(kCacheIndexShift == kPointerSizeLog2);
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, ic_kind_, flags, kPrimary, name, receiver, offset,
|
||||
extra);
|
||||
ProbeTable(this, masm, kPrimary, name, receiver, offset, extra);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
__ sub(offset, name);
|
||||
__ add(offset, Immediate(flags));
|
||||
__ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift);
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, ic_kind_, flags, kSecondary, name, receiver, offset,
|
||||
extra);
|
||||
ProbeTable(this, masm, kSecondary, name, receiver, offset, extra);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -1024,8 +1024,7 @@ Handle<Code> IC::ComputeHandler(LookupIterator* lookup, Handle<Object> value) {
|
||||
// cache (which just missed) is different from the cached handler.
|
||||
if (state() == MEGAMORPHIC && lookup->GetReceiver()->IsHeapObject()) {
|
||||
Map* map = Handle<HeapObject>::cast(lookup->GetReceiver())->map();
|
||||
Code* megamorphic_cached_code =
|
||||
stub_cache()->Get(*lookup->name(), map, code->flags());
|
||||
Code* megamorphic_cached_code = stub_cache()->Get(*lookup->name(), map);
|
||||
if (megamorphic_cached_code != *code) {
|
||||
TRACE_HANDLER_STATS(isolate(), IC_HandlerCacheHit);
|
||||
return code;
|
||||
|
@ -15,8 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -61,14 +60,16 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
scratch2 = no_reg;
|
||||
__ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
__ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
|
||||
__ Branch(&miss, ne, flags_reg, Operand(flags));
|
||||
__ Check(eq, kUnexpectedValue, flags_reg, Operand(flags));
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -87,9 +88,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that code is valid. The multiplying code relies on the
|
||||
@ -138,23 +136,21 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// We shift out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps.
|
||||
__ srl(scratch, scratch, kCacheIndexShift);
|
||||
__ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
||||
__ And(scratch, scratch, Operand(mask));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ srl(at, name, kCacheIndexShift);
|
||||
__ Subu(scratch, scratch, at);
|
||||
uint32_t mask2 = kSecondaryTableSize - 1;
|
||||
__ Addu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
||||
__ And(scratch, scratch, Operand(mask2));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,8 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -63,14 +62,16 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ ld(code, MemOperand(base_addr,
|
||||
static_cast<int32_t>(value_off_addr - key_off_addr)));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
__ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
|
||||
__ Branch(&miss, ne, flags_reg, Operand(flags));
|
||||
__ Check(eq, kUnexpectedValue, flags_reg, Operand(flags));
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -89,9 +90,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Make sure that code is valid. The multiplying code relies on the
|
||||
@ -141,23 +139,21 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// We shift out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps.
|
||||
__ dsrl(scratch, scratch, kCacheIndexShift);
|
||||
__ Xor(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask));
|
||||
__ And(scratch, scratch, Operand(mask));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ dsrl(at, name, kCacheIndexShift);
|
||||
__ Dsubu(scratch, scratch, at);
|
||||
uint64_t mask2 = kSecondaryTableSize - 1;
|
||||
__ Daddu(scratch, scratch, Operand((flags >> kCacheIndexShift) & mask2));
|
||||
__ And(scratch, scratch, Operand(mask2));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,8 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -72,7 +71,10 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
scratch2 = no_reg;
|
||||
__ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ lwz(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
@ -82,9 +84,8 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ andc(flags_reg, flags_reg, r0);
|
||||
__ mov(r0, Operand(flags));
|
||||
__ cmpl(flags_reg, r0);
|
||||
__ bne(&miss);
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ b(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -104,9 +105,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
#if V8_TARGET_ARCH_PPC64
|
||||
@ -157,24 +155,22 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ lwz(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
||||
__ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ add(scratch, scratch, ip);
|
||||
__ xori(scratch, scratch, Operand(flags));
|
||||
// The mask omits the last two bits because they are not part of the hash.
|
||||
__ andi(scratch, scratch,
|
||||
Operand((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ sub(scratch, scratch, name);
|
||||
__ addi(scratch, scratch, Operand(flags));
|
||||
__ andi(scratch, scratch,
|
||||
Operand((kSecondaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,8 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
// Number of the cache entry, not scaled.
|
||||
Register offset, Register scratch, Register scratch2,
|
||||
Register offset_scratch) {
|
||||
@ -70,7 +69,10 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
scratch2 = no_reg;
|
||||
__ LoadP(code, MemOperand(base_addr, value_off_addr - key_off_addr));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
Register flags_reg = base_addr;
|
||||
base_addr = no_reg;
|
||||
__ LoadlW(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
|
||||
@ -78,9 +80,8 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
DCHECK(!r0.is(flags_reg));
|
||||
__ AndP(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
|
||||
__ CmpLogicalP(flags_reg, Operand(flags));
|
||||
__ bne(&miss, Label::kNear);
|
||||
__ Check(eq, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ b(&miss, Label::kNear);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -100,9 +101,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
#if V8_TARGET_ARCH_S390X
|
||||
@ -153,24 +151,22 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ LoadlW(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
|
||||
__ LoadP(ip, FieldMemOperand(receiver, HeapObject::kMapOffset));
|
||||
__ AddP(scratch, scratch, ip);
|
||||
__ XorP(scratch, scratch, Operand(flags));
|
||||
// The mask omits the last two bits because they are not part of the hash.
|
||||
__ AndP(scratch, scratch,
|
||||
Operand((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ SubP(scratch, scratch, name);
|
||||
__ AddP(scratch, scratch, Operand(flags));
|
||||
__ AndP(scratch, scratch,
|
||||
Operand((kSecondaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch, extra,
|
||||
extra2, extra3);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch, extra, extra2,
|
||||
extra3);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -19,33 +19,23 @@ void StubCache::Initialize() {
|
||||
Clear();
|
||||
}
|
||||
|
||||
|
||||
static Code::Flags CommonStubCacheChecks(Name* name, Map* map,
|
||||
Code::Flags flags) {
|
||||
flags = Code::RemoveHolderFromFlags(flags);
|
||||
|
||||
static void CommonStubCacheChecks(Name* name, Map* map, Code* code) {
|
||||
// Validate that the name does not move on scavenge, and that we
|
||||
// can use identity checks instead of structural equality checks.
|
||||
DCHECK(!name->GetHeap()->InNewSpace(name));
|
||||
DCHECK(name->IsUniqueName());
|
||||
|
||||
// The state bits are not important to the hash function because the stub
|
||||
// cache only contains handlers. Make sure that the bits are the least
|
||||
// significant so they will be the ones masked out.
|
||||
DCHECK_EQ(Code::HANDLER, Code::ExtractKindFromFlags(flags));
|
||||
|
||||
// Make sure that the cache holder are not included in the hash.
|
||||
DCHECK(Code::ExtractCacheHolderFromFlags(flags) == 0);
|
||||
|
||||
return flags;
|
||||
DCHECK(name->HasHashCode());
|
||||
if (code) {
|
||||
DCHECK_EQ(Code::HANDLER, Code::ExtractKindFromFlags(code->flags()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Code* StubCache::Set(Name* name, Map* map, Code* code) {
|
||||
Code::Flags flags = CommonStubCacheChecks(name, map, code->flags());
|
||||
CommonStubCacheChecks(name, map, code);
|
||||
|
||||
// Compute the primary entry.
|
||||
int primary_offset = PrimaryOffset(name, flags, map);
|
||||
int primary_offset = PrimaryOffset(name, map);
|
||||
Entry* primary = entry(primary_, primary_offset);
|
||||
Code* old_code = primary->value;
|
||||
|
||||
@ -53,9 +43,8 @@ Code* StubCache::Set(Name* name, Map* map, Code* code) {
|
||||
// secondary cache before overwriting it.
|
||||
if (old_code != isolate_->builtins()->builtin(Builtins::kIllegal)) {
|
||||
Map* old_map = primary->map;
|
||||
Code::Flags old_flags = Code::RemoveHolderFromFlags(old_code->flags());
|
||||
int seed = PrimaryOffset(primary->key, old_flags, old_map);
|
||||
int secondary_offset = SecondaryOffset(primary->key, old_flags, seed);
|
||||
int seed = PrimaryOffset(primary->key, old_map);
|
||||
int secondary_offset = SecondaryOffset(primary->key, seed);
|
||||
Entry* secondary = entry(secondary_, secondary_offset);
|
||||
*secondary = *primary;
|
||||
}
|
||||
@ -68,19 +57,16 @@ Code* StubCache::Set(Name* name, Map* map, Code* code) {
|
||||
return code;
|
||||
}
|
||||
|
||||
|
||||
Code* StubCache::Get(Name* name, Map* map, Code::Flags flags) {
|
||||
flags = CommonStubCacheChecks(name, map, flags);
|
||||
int primary_offset = PrimaryOffset(name, flags, map);
|
||||
Code* StubCache::Get(Name* name, Map* map) {
|
||||
CommonStubCacheChecks(name, map, nullptr);
|
||||
int primary_offset = PrimaryOffset(name, map);
|
||||
Entry* primary = entry(primary_, primary_offset);
|
||||
if (primary->key == name && primary->map == map &&
|
||||
flags == Code::RemoveHolderFromFlags(primary->value->flags())) {
|
||||
if (primary->key == name && primary->map == map) {
|
||||
return primary->value;
|
||||
}
|
||||
int secondary_offset = SecondaryOffset(name, flags, primary_offset);
|
||||
int secondary_offset = SecondaryOffset(name, primary_offset);
|
||||
Entry* secondary = entry(secondary_, secondary_offset);
|
||||
if (secondary->key == name && secondary->map == map &&
|
||||
flags == Code::RemoveHolderFromFlags(secondary->value->flags())) {
|
||||
if (secondary->key == name && secondary->map == map) {
|
||||
return secondary->value;
|
||||
}
|
||||
return NULL;
|
||||
@ -103,7 +89,6 @@ void StubCache::Clear() {
|
||||
|
||||
|
||||
void StubCache::CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
Code::Flags flags,
|
||||
Handle<Context> native_context,
|
||||
Zone* zone) {
|
||||
for (int i = 0; i < kPrimaryTableSize; i++) {
|
||||
@ -113,7 +98,7 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
// with a primitive receiver.
|
||||
if (map == NULL) continue;
|
||||
|
||||
int offset = PrimaryOffset(*name, flags, map);
|
||||
int offset = PrimaryOffset(*name, map);
|
||||
if (entry(primary_, offset) == &primary_[i] &&
|
||||
TypeFeedbackOracle::IsRelevantFeedback(map, *native_context)) {
|
||||
types->AddMapIfMissing(Handle<Map>(map), zone);
|
||||
@ -129,10 +114,10 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
if (map == NULL) continue;
|
||||
|
||||
// Lookup in primary table and skip duplicates.
|
||||
int primary_offset = PrimaryOffset(*name, flags, map);
|
||||
int primary_offset = PrimaryOffset(*name, map);
|
||||
|
||||
// Lookup in secondary table and add matches.
|
||||
int offset = SecondaryOffset(*name, flags, primary_offset);
|
||||
int offset = SecondaryOffset(*name, primary_offset);
|
||||
if (entry(secondary_, offset) == &secondary_[i] &&
|
||||
TypeFeedbackOracle::IsRelevantFeedback(map, *native_context)) {
|
||||
types->AddMapIfMissing(Handle<Map>(map), zone);
|
||||
|
@ -41,13 +41,12 @@ class StubCache {
|
||||
void Initialize();
|
||||
// Access cache for entry hash(name, map).
|
||||
Code* Set(Name* name, Map* map, Code* code);
|
||||
Code* Get(Name* name, Map* map, Code::Flags flags);
|
||||
Code* Get(Name* name, Map* map);
|
||||
// Clear the lookup table (@ mark compact collection).
|
||||
void Clear();
|
||||
// Collect all maps that match the name and flags.
|
||||
// Collect all maps that match the name.
|
||||
void CollectMatchingMaps(SmallMapList* types, Handle<Name> name,
|
||||
Code::Flags flags, Handle<Context> native_context,
|
||||
Zone* zone);
|
||||
Handle<Context> native_context, Zone* zone);
|
||||
// Generate code for probing the stub cache table.
|
||||
// Arguments extra, extra2 and extra3 may be used to pass additional scratch
|
||||
// registers. Set to no_reg if not needed.
|
||||
@ -97,13 +96,12 @@ class StubCache {
|
||||
static const int kSecondaryTableBits = 9;
|
||||
static const int kSecondaryTableSize = (1 << kSecondaryTableBits);
|
||||
|
||||
static int PrimaryOffsetForTesting(Name* name, Code::Flags flags, Map* map) {
|
||||
return PrimaryOffset(name, flags, map);
|
||||
static int PrimaryOffsetForTesting(Name* name, Map* map) {
|
||||
return PrimaryOffset(name, map);
|
||||
}
|
||||
|
||||
static int SecondaryOffsetForTesting(Name* name, Code::Flags flags,
|
||||
int seed) {
|
||||
return SecondaryOffset(name, flags, seed);
|
||||
static int SecondaryOffsetForTesting(Name* name, int seed) {
|
||||
return SecondaryOffset(name, seed);
|
||||
}
|
||||
|
||||
// The constructor is made public only for the purposes of testing.
|
||||
@ -120,7 +118,7 @@ class StubCache {
|
||||
// Hash algorithm for the primary table. This algorithm is replicated in
|
||||
// assembler for every architecture. Returns an index into the table that
|
||||
// is scaled by 1 << kCacheIndexShift.
|
||||
static int PrimaryOffset(Name* name, Code::Flags flags, Map* map) {
|
||||
static int PrimaryOffset(Name* name, Map* map) {
|
||||
STATIC_ASSERT(kCacheIndexShift == Name::kHashShift);
|
||||
// Compute the hash of the name (use entire hash field).
|
||||
DCHECK(name->HasHashCode());
|
||||
@ -130,27 +128,19 @@ class StubCache {
|
||||
// 4Gb (and not at all if it isn't).
|
||||
uint32_t map_low32bits =
|
||||
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map));
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
// Base the offset on a simple combination of name, flags, and map.
|
||||
uint32_t key = (map_low32bits + field) ^ iflags;
|
||||
// Base the offset on a simple combination of name and map.
|
||||
uint32_t key = map_low32bits + field;
|
||||
return key & ((kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
}
|
||||
|
||||
// Hash algorithm for the secondary table. This algorithm is replicated in
|
||||
// assembler for every architecture. Returns an index into the table that
|
||||
// is scaled by 1 << kCacheIndexShift.
|
||||
static int SecondaryOffset(Name* name, Code::Flags flags, int seed) {
|
||||
static int SecondaryOffset(Name* name, int seed) {
|
||||
// Use the seed from the primary cache in the secondary cache.
|
||||
uint32_t name_low32bits =
|
||||
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name));
|
||||
// We always set the in_loop bit to zero when generating the lookup code
|
||||
// so do it here too so the hash codes match.
|
||||
uint32_t iflags =
|
||||
(static_cast<uint32_t>(flags) & ~Code::kFlagsNotUsedInLookup);
|
||||
uint32_t key = (seed - name_low32bits) + iflags;
|
||||
uint32_t key = (seed - name_low32bits);
|
||||
return key & ((kSecondaryTableSize - 1) << kCacheIndexShift);
|
||||
}
|
||||
|
||||
|
@ -15,8 +15,7 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Flags flags, StubCache::Table table,
|
||||
Register receiver, Register name,
|
||||
StubCache::Table table, Register receiver, Register name,
|
||||
// The offset is scaled by 4, based on
|
||||
// kCacheIndexShift, which is two bits
|
||||
Register offset) {
|
||||
@ -57,13 +56,15 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ LoadAddress(kScratchRegister, value_offset);
|
||||
__ movp(kScratchRegister, Operand(kScratchRegister, offset, scale_factor, 0));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
|
||||
__ andp(offset, Immediate(~Code::kFlagsNotUsedInLookup));
|
||||
__ cmpl(offset, Immediate(flags));
|
||||
__ j(not_equal, &miss);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -81,9 +82,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
USE(extra); // The register extra is not used on the X64 platform.
|
||||
USE(extra2); // The register extra2 is not used on the X64 platform.
|
||||
@ -129,25 +127,22 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
__ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
|
||||
// Use only the low 32 bits of the map pointer.
|
||||
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xorp(scratch, Immediate(flags));
|
||||
// We mask out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps. Also in the two 'and' instructions below.
|
||||
__ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, flags, kPrimary, receiver, name, scratch);
|
||||
ProbeTable(this, masm, kPrimary, receiver, name, scratch);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ movl(scratch, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xorp(scratch, Immediate(flags));
|
||||
__ andp(scratch, Immediate((kPrimaryTableSize - 1) << kCacheIndexShift));
|
||||
__ subl(scratch, name);
|
||||
__ addl(scratch, Immediate(flags));
|
||||
__ andp(scratch, Immediate((kSecondaryTableSize - 1) << kCacheIndexShift));
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, flags, kSecondary, receiver, name, scratch);
|
||||
ProbeTable(this, masm, kSecondary, receiver, name, scratch);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -15,7 +15,6 @@ namespace internal {
|
||||
#define __ ACCESS_MASM(masm)
|
||||
|
||||
static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
Code::Kind ic_kind, Code::Flags flags,
|
||||
StubCache::Table table, Register name, Register receiver,
|
||||
// Number of the cache entry pointer-size scaled.
|
||||
Register offset, Register extra) {
|
||||
@ -26,6 +25,7 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
ExternalReference::virtual_handler_register(masm->isolate());
|
||||
|
||||
Label miss;
|
||||
Code::Kind ic_kind = stub_cache->ic_kind();
|
||||
bool is_vector_store =
|
||||
IC::ICUseVector(ic_kind) &&
|
||||
(ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC);
|
||||
@ -46,13 +46,15 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
__ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ j(not_equal, &miss);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ j(not_equal, &miss);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -101,13 +103,15 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
// Get the code entry from the cache.
|
||||
__ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
|
||||
|
||||
#ifdef DEBUG
|
||||
// Check that the flags match what we're looking for.
|
||||
Code::Flags flags = Code::RemoveHolderFromFlags(
|
||||
Code::ComputeHandlerFlags(stub_cache->ic_kind()));
|
||||
__ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
|
||||
__ and_(offset, ~Code::kFlagsNotUsedInLookup);
|
||||
__ cmp(offset, flags);
|
||||
__ j(not_equal, &miss);
|
||||
__ Check(equal, kUnexpectedValue);
|
||||
|
||||
#ifdef DEBUG
|
||||
if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
|
||||
__ jmp(&miss);
|
||||
} else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
|
||||
@ -144,9 +148,6 @@ static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm,
|
||||
void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
Register name, Register scratch, Register extra,
|
||||
Register extra2, Register extra3) {
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind_));
|
||||
|
||||
Label miss;
|
||||
|
||||
// Assert that code is valid. The multiplying code relies on the entry size
|
||||
@ -177,7 +178,6 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
// Get the map of the receiver and compute the hash.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
// We mask out the last two bits because they are not part of the hash and
|
||||
// they are always 01 for maps. Also in the two 'and' instructions below.
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
@ -186,21 +186,17 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver,
|
||||
DCHECK(kCacheIndexShift == kPointerSizeLog2);
|
||||
|
||||
// Probe the primary table.
|
||||
ProbeTable(this, masm, ic_kind_, flags, kPrimary, name, receiver, offset,
|
||||
extra);
|
||||
ProbeTable(this, masm, kPrimary, name, receiver, offset, extra);
|
||||
|
||||
// Primary miss: Compute hash for secondary probe.
|
||||
__ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
|
||||
__ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
|
||||
__ xor_(offset, flags);
|
||||
__ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift);
|
||||
__ sub(offset, name);
|
||||
__ add(offset, Immediate(flags));
|
||||
__ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift);
|
||||
|
||||
// Probe the secondary table.
|
||||
ProbeTable(this, masm, ic_kind_, flags, kSecondary, name, receiver, offset,
|
||||
extra);
|
||||
ProbeTable(this, masm, kSecondary, name, receiver, offset, extra);
|
||||
|
||||
// Cache miss: Fall-through and let caller handle the miss by
|
||||
// entering the runtime system.
|
||||
|
@ -280,8 +280,7 @@ void TypeFeedbackOracle::PropertyReceiverTypes(FeedbackVectorSlot slot,
|
||||
receiver_types->Clear();
|
||||
if (!slot.IsInvalid()) {
|
||||
LoadICNexus nexus(feedback_vector_, slot);
|
||||
Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC);
|
||||
CollectReceiverTypes(isolate()->load_stub_cache(), &nexus, name, flags,
|
||||
CollectReceiverTypes(isolate()->load_stub_cache(), &nexus, name,
|
||||
receiver_types);
|
||||
}
|
||||
}
|
||||
@ -307,8 +306,7 @@ void TypeFeedbackOracle::AssignmentReceiverTypes(FeedbackVectorSlot slot,
|
||||
Handle<Name> name,
|
||||
SmallMapList* receiver_types) {
|
||||
receiver_types->Clear();
|
||||
Code::Flags flags = Code::ComputeHandlerFlags(Code::STORE_IC);
|
||||
CollectReceiverTypes(isolate()->store_stub_cache(), slot, name, flags,
|
||||
CollectReceiverTypes(isolate()->store_stub_cache(), slot, name,
|
||||
receiver_types);
|
||||
}
|
||||
|
||||
@ -331,22 +329,19 @@ void TypeFeedbackOracle::CountReceiverTypes(FeedbackVectorSlot slot,
|
||||
void TypeFeedbackOracle::CollectReceiverTypes(StubCache* stub_cache,
|
||||
FeedbackVectorSlot slot,
|
||||
Handle<Name> name,
|
||||
Code::Flags flags,
|
||||
SmallMapList* types) {
|
||||
StoreICNexus nexus(feedback_vector_, slot);
|
||||
CollectReceiverTypes(stub_cache, &nexus, name, flags, types);
|
||||
CollectReceiverTypes(stub_cache, &nexus, name, types);
|
||||
}
|
||||
|
||||
void TypeFeedbackOracle::CollectReceiverTypes(StubCache* stub_cache,
|
||||
FeedbackNexus* nexus,
|
||||
Handle<Name> name,
|
||||
Code::Flags flags,
|
||||
SmallMapList* types) {
|
||||
if (FLAG_collect_megamorphic_maps_from_stub_cache &&
|
||||
nexus->ic_state() == MEGAMORPHIC) {
|
||||
types->Reserve(4, zone());
|
||||
stub_cache->CollectMatchingMaps(types, name, flags, native_context_,
|
||||
zone());
|
||||
stub_cache->CollectMatchingMaps(types, name, native_context_, zone());
|
||||
} else {
|
||||
CollectReceiverTypes(nexus, types);
|
||||
}
|
||||
|
@ -97,11 +97,9 @@ class TypeFeedbackOracle: public ZoneObject {
|
||||
|
||||
private:
|
||||
void CollectReceiverTypes(StubCache* stub_cache, FeedbackVectorSlot slot,
|
||||
Handle<Name> name, Code::Flags flags,
|
||||
SmallMapList* types);
|
||||
Handle<Name> name, SmallMapList* types);
|
||||
void CollectReceiverTypes(StubCache* stub_cache, FeedbackNexus* nexus,
|
||||
Handle<Name> name, Code::Flags flags,
|
||||
SmallMapList* types);
|
||||
Handle<Name> name, SmallMapList* types);
|
||||
|
||||
// Returns true if there is at least one string map and if
|
||||
// all maps are string maps.
|
||||
|
@ -1109,18 +1109,16 @@ void TestStubCacheOffsetCalculation(StubCache::Table table,
|
||||
const int kNumParams = 2;
|
||||
CodeStubAssemblerTester m(isolate, kNumParams);
|
||||
|
||||
Code::Flags code_flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(handler_kind));
|
||||
{
|
||||
Node* name = m.Parameter(0);
|
||||
Node* map = m.Parameter(1);
|
||||
Node* primary_offset = m.StubCachePrimaryOffset(name, code_flags, map);
|
||||
Node* primary_offset = m.StubCachePrimaryOffset(name, map);
|
||||
Node* result;
|
||||
if (table == StubCache::kPrimary) {
|
||||
result = primary_offset;
|
||||
} else {
|
||||
CHECK_EQ(StubCache::kSecondary, table);
|
||||
result = m.StubCacheSecondaryOffset(name, code_flags, primary_offset);
|
||||
result = m.StubCacheSecondaryOffset(name, primary_offset);
|
||||
}
|
||||
m.Return(m.SmiFromWord32(result));
|
||||
}
|
||||
@ -1163,13 +1161,12 @@ void TestStubCacheOffsetCalculation(StubCache::Table table,
|
||||
|
||||
int expected_result;
|
||||
{
|
||||
int primary_offset =
|
||||
StubCache::PrimaryOffsetForTesting(*name, code_flags, *map);
|
||||
int primary_offset = StubCache::PrimaryOffsetForTesting(*name, *map);
|
||||
if (table == StubCache::kPrimary) {
|
||||
expected_result = primary_offset;
|
||||
} else {
|
||||
expected_result = StubCache::SecondaryOffsetForTesting(
|
||||
*name, code_flags, primary_offset);
|
||||
expected_result =
|
||||
StubCache::SecondaryOffsetForTesting(*name, primary_offset);
|
||||
}
|
||||
}
|
||||
Handle<Object> result = ft.Call(name, map).ToHandleChecked();
|
||||
@ -1217,9 +1214,6 @@ TEST(TryProbeStubCache) {
|
||||
CodeStubAssemblerTester m(isolate, kNumParams);
|
||||
|
||||
Code::Kind ic_kind = Code::LOAD_IC;
|
||||
Code::Flags flags_to_query =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind));
|
||||
|
||||
StubCache stub_cache(isolate, ic_kind);
|
||||
stub_cache.Clear();
|
||||
|
||||
@ -1299,25 +1293,8 @@ TEST(TryProbeStubCache) {
|
||||
|
||||
// Generate some number of handlers.
|
||||
for (int i = 0; i < 30; i++) {
|
||||
Code::Kind code_kind;
|
||||
switch (rand_gen.NextInt(4)) {
|
||||
case 0:
|
||||
code_kind = Code::LOAD_IC;
|
||||
break;
|
||||
case 1:
|
||||
code_kind = Code::KEYED_LOAD_IC;
|
||||
break;
|
||||
case 2:
|
||||
code_kind = Code::STORE_IC;
|
||||
break;
|
||||
case 3:
|
||||
code_kind = Code::KEYED_STORE_IC;
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
Code::Flags flags =
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(code_kind));
|
||||
Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(ic_kind));
|
||||
handlers.push_back(CreateCodeWithFlags(flags));
|
||||
}
|
||||
|
||||
@ -1342,7 +1319,7 @@ TEST(TryProbeStubCache) {
|
||||
int index = rand_gen.NextInt();
|
||||
Handle<Name> name = names[index % names.size()];
|
||||
Handle<JSObject> receiver = receivers[index % receivers.size()];
|
||||
Code* handler = stub_cache.Get(*name, receiver->map(), flags_to_query);
|
||||
Code* handler = stub_cache.Get(*name, receiver->map());
|
||||
if (handler == nullptr) {
|
||||
queried_non_existing = true;
|
||||
} else {
|
||||
@ -1358,7 +1335,7 @@ TEST(TryProbeStubCache) {
|
||||
int index2 = rand_gen.NextInt();
|
||||
Handle<Name> name = names[index1 % names.size()];
|
||||
Handle<JSObject> receiver = receivers[index2 % receivers.size()];
|
||||
Code* handler = stub_cache.Get(*name, receiver->map(), flags_to_query);
|
||||
Code* handler = stub_cache.Get(*name, receiver->map());
|
||||
if (handler == nullptr) {
|
||||
queried_non_existing = true;
|
||||
} else {
|
||||
|
Loading…
Reference in New Issue
Block a user