On a call to Array(), we patched a call ic. This CL makes do with a single dispatcher which inlines the special handling for the Array() call case, loading the allocation site found in the vector and calling the array constructor stub appropriately.

BUG=

Review URL: https://codereview.chromium.org/1332563003

Cr-Commit-Position: refs/heads/master@{#30649}
This commit is contained in:
mvstanton 2015-09-09 01:05:25 -07:00 committed by Commit bot
parent b37907ff7f
commit ba7b641398
12 changed files with 118 additions and 343 deletions

View File

@ -2556,26 +2556,16 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
}
void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r1 - function
// r3 - slot id
// r2 - vector
Label miss;
int argc = arg_count();
ParameterCount actual(argc);
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
__ cmp(r1, r4);
__ b(ne, &miss);
// r4 - allocation site (loaded from vector[slot])
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, miss);
__ mov(r0, Operand(arg_count()));
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
// Verify that r4 contains an AllocationSite
__ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &miss);
// Increment the call count for monomorphic function calls.
__ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
@ -2588,13 +2578,6 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
__ mov(r3, r1);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
__ bind(&miss);
GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss.
__ mov(r0, Operand(arg_count()));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
@ -2669,11 +2652,21 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
Label uninitialized, miss;
Label uninitialized, miss, not_allocation_site;
__ CompareRoot(r4, Heap::kmegamorphic_symbolRootIndex);
__ b(eq, &slow_start);
// Verify that r4 contains an AllocationSite
__ ldr(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
__ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex);
__ b(ne, &not_allocation_site);
// We have an allocation site.
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@ -2764,10 +2757,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(r1, r2, r3);
// Call the entry.
Runtime::FunctionId id = GetICState() == DEFAULT
? Runtime::kCallIC_Miss
: Runtime::kCallIC_Customization_Miss;
__ CallRuntime(id, 3);
__ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ mov(r1, r0);
@ -4276,13 +4266,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
}
void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, r2);
CallIC_ArrayStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }

View File

@ -2933,31 +2933,23 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
}
void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// x1 - function
// x3 - slot id
// x2 - vector
Label miss;
// x4 - allocation site (loaded from vector[slot])
Register function = x1;
Register feedback_vector = x2;
Register index = x3;
Register scratch = x4;
Register allocation_site = x4;
Register scratch = x5;
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
__ Cmp(function, scratch);
__ B(ne, &miss);
__ B(ne, miss);
__ Mov(x0, Operand(arg_count()));
__ Add(scratch, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Ldr(scratch, FieldMemOperand(scratch, FixedArray::kHeaderSize));
// Verify that scratch contains an AllocationSite
Register map = x5;
__ Ldr(map, FieldMemOperand(scratch, HeapObject::kMapOffset));
__ JumpIfNotRoot(map, Heap::kAllocationSiteMapRootIndex, &miss);
// Increment the call count for monomorphic function calls.
__ Add(feedback_vector, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
@ -2967,19 +2959,13 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
__ Add(index, index, Operand(Smi::FromInt(CallICNexus::kCallCountIncrement)));
__ Str(index, FieldMemOperand(feedback_vector, 0));
Register allocation_site = feedback_vector;
Register original_constructor = index;
__ Mov(allocation_site, scratch);
__ Mov(original_constructor, function);
// Set up arguments for the array constructor stub.
Register allocation_site_arg = feedback_vector;
Register original_constructor_arg = index;
__ Mov(allocation_site_arg, allocation_site);
__ Mov(original_constructor_arg, function);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
__ bind(&miss);
GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss.
__ Mov(x0, arg_count());
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
@ -3067,10 +3053,17 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
Label uninitialized, miss;
Label uninitialized, miss, not_allocation_site;
__ JumpIfRoot(x4, Heap::kmegamorphic_symbolRootIndex, &slow_start);
__ Ldr(x5, FieldMemOperand(x4, HeapObject::kMapOffset));
__ JumpIfNotRoot(x5, Heap::kAllocationSiteMapRootIndex, &not_allocation_site);
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@ -3161,10 +3154,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(x1, x2, x3);
// Call the entry.
Runtime::FunctionId id = GetICState() == DEFAULT
? Runtime::kCallIC_Miss
: Runtime::kCallIC_Customization_Miss;
__ CallRuntime(id, 3);
__ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ Mov(x1, x0);
@ -4406,13 +4396,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
}
void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, x2);
CallIC_ArrayStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }

View File

@ -584,11 +584,6 @@ Type* CompareNilICStub::GetInputType(Zone* zone, Handle<Map> map) {
}
void CallIC_ArrayStub::PrintState(std::ostream& os) const { // NOLINT
os << state() << " (Array)";
}
void CallICStub::PrintState(std::ostream& os) const { // NOLINT
os << state();
}

View File

@ -29,7 +29,6 @@ namespace internal {
V(CallConstruct) \
V(CallFunction) \
V(CallIC) \
V(CallIC_Array) \
V(CEntry) \
V(CompareIC) \
V(DoubleToI) \
@ -40,7 +39,6 @@ namespace internal {
V(KeyedLoadICTrampoline) \
V(LoadICTrampoline) \
V(CallICTrampoline) \
V(CallIC_ArrayTrampoline) \
V(LoadIndexedInterceptor) \
V(LoadIndexedString) \
V(MathPow) \
@ -989,6 +987,7 @@ class CallICStub: public PlatformCodeStub {
// Code generation helpers.
void GenerateMiss(MacroAssembler* masm);
void HandleArrayCase(MacroAssembler* masm, Label* miss);
private:
void PrintState(std::ostream& os) const override; // NOLINT
@ -998,20 +997,6 @@ class CallICStub: public PlatformCodeStub {
};
class CallIC_ArrayStub: public CallICStub {
public:
CallIC_ArrayStub(Isolate* isolate, const CallICState& state_in)
: CallICStub(isolate, state_in) {}
InlineCacheState GetICState() const final { return MONOMORPHIC; }
private:
void PrintState(std::ostream& os) const override; // NOLINT
DEFINE_PLATFORM_CODE_STUB(CallIC_Array, CallICStub);
};
// TODO(verwaest): Translate to hydrogen code stub.
class FunctionPrototypeStub : public PlatformCodeStub {
public:
@ -2339,16 +2324,6 @@ class CallICTrampolineStub : public PlatformCodeStub {
};
class CallIC_ArrayTrampolineStub : public CallICTrampolineStub {
public:
CallIC_ArrayTrampolineStub(Isolate* isolate, const CallICState& state)
: CallICTrampolineStub(isolate, state) {}
private:
DEFINE_PLATFORM_CODE_STUB(CallIC_ArrayTrampoline, CallICTrampolineStub);
};
class LoadICStub : public PlatformCodeStub {
public:
explicit LoadICStub(Isolate* isolate, const LoadICState& state)

View File

@ -2209,28 +2209,19 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
}
void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// edi - function
// edx - slot id
// ebx - vector
Label miss;
int argc = arg_count();
ParameterCount actual(argc);
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
__ cmp(edi, ecx);
__ j(not_equal, &miss);
__ j(not_equal, miss);
__ mov(eax, arg_count());
// Reload ecx.
__ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
// Verify that ecx contains an AllocationSite
Factory* factory = masm->isolate()->factory();
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
factory->allocation_site_map());
__ j(not_equal, &miss);
// Increment the call count for monomorphic function calls.
__ add(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
@ -2241,12 +2232,7 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
__ bind(&miss);
GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss.
__ Set(eax, arg_count());
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
// Unreachable.
}
@ -2321,11 +2307,21 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
Label uninitialized, miss;
Label uninitialized, miss, not_allocation_site;
__ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
__ j(equal, &slow_start);
// Check if we have an allocation site.
__ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &not_allocation_site);
// We have an allocation site.
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@ -2415,10 +2411,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ push(edx);
// Call the entry.
Runtime::FunctionId id = GetICState() == DEFAULT
? Runtime::kCallIC_Miss
: Runtime::kCallIC_Customization_Miss;
__ CallRuntime(id, 3);
__ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ mov(edi, eax);
@ -4892,13 +4885,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
}
void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, ebx);
CallIC_ArrayStub stub(isolate(), state());
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
}
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());

View File

@ -60,6 +60,7 @@ void IC::SetTargetAtAddress(Address address, Code* target,
DCHECK(!target->is_inline_cache_stub() ||
(target->kind() != Code::LOAD_IC &&
target->kind() != Code::KEYED_LOAD_IC &&
target->kind() != Code::CALL_IC &&
(!FLAG_vector_stores || (target->kind() != Code::STORE_IC &&
target->kind() != Code::KEYED_STORE_IC))));

View File

@ -2297,73 +2297,7 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object,
}
bool CallIC::DoCustomHandler(Handle<Object> function,
const CallICState& callic_state) {
DCHECK(FLAG_use_ic && function->IsJSFunction());
// Are we the array function?
Handle<JSFunction> array_function =
Handle<JSFunction>(isolate()->native_context()->array_function());
if (array_function.is_identical_to(Handle<JSFunction>::cast(function))) {
// Alter the slot.
CallICNexus* nexus = casted_nexus<CallICNexus>();
nexus->ConfigureMonomorphicArray();
// Vector-based ICs have a different calling convention in optimized code
// than full code so the correct stub has to be chosen.
if (AddressIsOptimizedCode()) {
CallIC_ArrayStub stub(isolate(), callic_state);
set_target(*stub.GetCode());
} else {
CallIC_ArrayTrampolineStub stub(isolate(), callic_state);
set_target(*stub.GetCode());
}
Handle<String> name;
if (array_function->shared()->name()->IsString()) {
name = Handle<String>(String::cast(array_function->shared()->name()),
isolate());
}
TRACE_IC("CallIC", name);
OnTypeFeedbackChanged(isolate(), get_host(), nexus->vector(), state(),
MONOMORPHIC);
return true;
}
return false;
}
void CallIC::PatchMegamorphic(Handle<Object> function) {
CallICState callic_state(target()->extra_ic_state());
// We are going generic.
CallICNexus* nexus = casted_nexus<CallICNexus>();
nexus->ConfigureMegamorphic();
// Vector-based ICs have a different calling convention in optimized code
// than full code so the correct stub has to be chosen.
if (AddressIsOptimizedCode()) {
CallICStub stub(isolate(), callic_state);
set_target(*stub.GetCode());
} else {
CallICTrampolineStub stub(isolate(), callic_state);
set_target(*stub.GetCode());
}
Handle<Object> name = isolate()->factory()->empty_string();
if (function->IsJSFunction()) {
Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
name = handle(js_function->shared()->name(), isolate());
}
TRACE_IC("CallIC", name);
OnTypeFeedbackChanged(isolate(), get_host(), nexus->vector(), state(),
GENERIC);
}
void CallIC::HandleMiss(Handle<Object> function) {
CallICState callic_state(target()->extra_ic_state());
Handle<Object> name = isolate()->factory()->empty_string();
CallICNexus* nexus = casted_nexus<CallICNexus>();
Object* feedback = nexus->GetFeedback();
@ -2371,25 +2305,22 @@ void CallIC::HandleMiss(Handle<Object> function) {
// Hand-coded MISS handling is easier if CallIC slots don't contain smis.
DCHECK(!feedback->IsSmi());
if (feedback->IsWeakCell() || !function->IsJSFunction()) {
if (feedback->IsWeakCell() || !function->IsJSFunction() ||
feedback->IsAllocationSite()) {
// We are going generic.
nexus->ConfigureMegamorphic();
} else {
// The feedback is either uninitialized or an allocation site.
// It might be an allocation site because if we re-compile the full code
// to add deoptimization support, we call with the default call-ic, and
// merely need to patch the target to match the feedback.
// TODO(mvstanton): the better approach is to dispense with patching
// altogether, which is in progress.
DCHECK(feedback == *TypeFeedbackVector::UninitializedSentinel(isolate()) ||
feedback->IsAllocationSite());
DCHECK(feedback == *TypeFeedbackVector::UninitializedSentinel(isolate()));
Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
// Do we want to install a custom handler?
if (FLAG_use_ic && DoCustomHandler(function, callic_state)) {
return;
Handle<JSFunction> array_function =
Handle<JSFunction>(isolate()->native_context()->array_function());
if (array_function.is_identical_to(js_function)) {
// Alter the slot.
nexus->ConfigureMonomorphicArray();
} else {
nexus->ConfigureMonomorphic(js_function);
}
nexus->ConfigureMonomorphic(Handle<JSFunction>::cast(function));
}
if (function->IsJSFunction()) {
@ -2426,22 +2357,6 @@ RUNTIME_FUNCTION(Runtime_CallIC_Miss) {
}
RUNTIME_FUNCTION(Runtime_CallIC_Customization_Miss) {
TimerEventScope<TimerEventIcMiss> timer(isolate);
HandleScope scope(isolate);
DCHECK(args.length() == 3);
Handle<Object> function = args.at<Object>(0);
Handle<TypeFeedbackVector> vector = args.at<TypeFeedbackVector>(1);
Handle<Smi> slot = args.at<Smi>(2);
FeedbackVectorICSlot vector_slot = vector->ToICSlot(slot->value());
CallICNexus nexus(vector, vector_slot);
// A miss on a custom call ic always results in going megamorphic.
CallIC ic(isolate, &nexus);
ic.PatchMegamorphic(function);
return *function;
}
// Used from ic-<arch>.cc.
RUNTIME_FUNCTION(Runtime_LoadIC_Miss) {
TimerEventScope<TimerEventIcMiss> timer(isolate);

View File

@ -285,14 +285,8 @@ class CallIC : public IC {
DCHECK(nexus != NULL);
}
void PatchMegamorphic(Handle<Object> function);
void HandleMiss(Handle<Object> function);
// Returns true if a custom handler was installed.
bool DoCustomHandler(Handle<Object> function,
const CallICState& callic_state);
// Code generator routines.
static Handle<Code> initialize_stub(Isolate* isolate, int argc,
CallICState::CallType call_type);

View File

@ -2687,24 +2687,15 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
}
void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function
// a3 - slot id
// a2 - vector
Label miss;
// t0 - loaded from vector[slot]
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
__ Branch(&miss, ne, a1, Operand(at));
__ Branch(miss, ne, a1, Operand(at));
__ li(a0, Operand(arg_count()));
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(at, a2, Operand(at));
__ lw(t0, FieldMemOperand(at, FixedArray::kHeaderSize));
// Verify that t0 contains an AllocationSite
__ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, t1, Operand(at));
// Increment the call count for monomorphic function calls.
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
@ -2717,13 +2708,6 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
__ mov(a3, a1);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
__ bind(&miss);
GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss.
__ li(a0, Operand(arg_count()));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
@ -2798,11 +2782,20 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
Label uninitialized, miss;
Label uninitialized, miss, not_allocation_site;
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ Branch(&slow_start, eq, t0, Operand(at));
// Verify that t0 contains an AllocationSite
__ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&not_allocation_site, ne, t1, Operand(at));
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@ -2894,10 +2887,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(a1, a2, a3);
// Call the entry.
Runtime::FunctionId id = GetICState() == DEFAULT
? Runtime::kCallIC_Miss
: Runtime::kCallIC_Customization_Miss;
__ CallRuntime(id, 3);
__ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to a1 and exit the internal frame.
__ mov(a1, v0);
@ -4499,13 +4489,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
}
void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, a2);
CallIC_ArrayStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }

View File

@ -2763,24 +2763,13 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
}
void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// a1 - function
// a3 - slot id
// a2 - vector
Label miss;
// a4 - allocation site (loaded from vector[slot])
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
__ Branch(&miss, ne, a1, Operand(at));
__ li(a0, Operand(arg_count()));
__ dsrl(at, a3, 32 - kPointerSizeLog2);
__ Daddu(at, a2, Operand(at));
__ ld(a4, FieldMemOperand(at, FixedArray::kHeaderSize));
// Verify that a4 contains an AllocationSite
__ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&miss, ne, a5, Operand(at));
__ Branch(miss, ne, a1, Operand(at));
// Increment the call count for monomorphic function calls.
__ dsrl(t0, a3, 32 - kPointerSizeLog2);
@ -2793,13 +2782,6 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
__ mov(a3, a1);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
__ bind(&miss);
GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss.
__ li(a0, Operand(arg_count()));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
@ -2874,11 +2856,20 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
Label uninitialized, miss;
Label uninitialized, miss, not_allocation_site;
__ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
__ Branch(&slow_start, eq, a4, Operand(at));
// Verify that a4 contains an AllocationSite
__ ld(a5, FieldMemOperand(a4, HeapObject::kMapOffset));
__ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
__ Branch(&not_allocation_site, ne, a5, Operand(at));
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@ -2970,10 +2961,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(a1, a2, a3);
// Call the entry.
Runtime::FunctionId id = GetICState() == DEFAULT
? Runtime::kCallIC_Miss //
: Runtime::kCallIC_Customization_Miss;
__ CallRuntime(id, 3);
__ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to a1 and exit the internal frame.
__ mov(a1, v0);
@ -4531,13 +4519,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
}
void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, a2);
CallIC_ArrayStub stub(isolate(), state());
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }

View File

@ -1022,7 +1022,6 @@ namespace internal {
F(LoadIC_Miss, 3, 1) \
F(KeyedLoadIC_Miss, 3, 1) \
F(CallIC_Miss, 3, 1) \
F(CallIC_Customization_Miss, 3, 1) \
F(StoreIC_Miss, 3, 1) \
F(StoreIC_Slow, 3, 1) \
F(KeyedStoreIC_Miss, 3, 1) \

View File

@ -2068,46 +2068,26 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
}
void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// rdi - function
// rdx - slot id (as integer)
// rdx - slot id
// rbx - vector
Label miss;
int argc = arg_count();
ParameterCount actual(argc);
__ SmiToInteger32(rdx, rdx);
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
__ cmpp(rdi, rcx);
__ j(not_equal, &miss);
// rcx - allocation site (loaded from vector[slot]).
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8);
__ cmpp(rdi, r8);
__ j(not_equal, miss);
__ movp(rax, Immediate(arg_count()));
__ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize));
// Verify that ecx contains an AllocationSite
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &miss, Label::kNear);
// Increment the call count for monomorphic function calls.
{
__ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
Smi::FromInt(CallICNexus::kCallCountIncrement));
__ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize + kPointerSize),
Smi::FromInt(CallICNexus::kCallCountIncrement));
__ movp(rbx, rcx);
__ movp(rdx, rdi);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
}
__ bind(&miss);
GenerateMiss(masm);
// The slow case, we need this no matter what to complete a call after a miss.
__ Set(rax, arg_count());
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
__ movp(rbx, rcx);
__ movp(rdx, rdi);
ArrayConstructorStub stub(masm->isolate(), arg_count());
__ TailCallStub(&stub);
}
@ -2184,11 +2164,21 @@ void CallICStub::Generate(MacroAssembler* masm) {
}
__ bind(&extra_checks_or_miss);
Label uninitialized, miss;
Label uninitialized, miss, not_allocation_site;
__ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
__ j(equal, &slow_start);
// Check if we have an allocation site.
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
Heap::kAllocationSiteMapRootIndex);
__ j(not_equal, &not_allocation_site);
// We have an allocation site.
HandleArrayCase(masm, &miss);
__ bind(&not_allocation_site);
// The following cases attempt to handle MISS cases without going to the
// runtime.
if (FLAG_trace_ic) {
@ -2278,10 +2268,7 @@ void CallICStub::GenerateMiss(MacroAssembler* masm) {
__ Push(rdx);
// Call the entry.
Runtime::FunctionId id = GetICState() == DEFAULT
? Runtime::kCallIC_Miss
: Runtime::kCallIC_Customization_Miss;
__ CallRuntime(id, 3);
__ CallRuntime(Runtime::kCallIC_Miss, 3);
// Move result to edi and exit the internal frame.
__ movp(rdi, rax);
@ -4649,13 +4636,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
}
void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
EmitLoadTypeFeedbackVector(masm, rbx);
CallIC_ArrayStub stub(isolate(), state());
__ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
}
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());