Reland "Customized support for feedback on calls to Array." and follow-up fixes.
Comparing one CallIC::State to another was not done correctly, leading to a failure to patch a CallIC when transitioning from monomorphic Array to megamorphic. BUG=chromium:377198,chromium:377290 LOG=Y R=jkummerow@chromium.org Review URL: https://codereview.chromium.org/305493003 git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21499 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
2dc61205fd
commit
d755611e93
@ -2934,11 +2934,13 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
static void CallFunctionNoFeedback(MacroAssembler* masm,
|
||||
int argc, bool needs_checks,
|
||||
bool call_as_method) {
|
||||
// r1 : the function to call
|
||||
Label slow, non_function, wrap, cont;
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Check that the function is really a JavaScript function.
|
||||
// r1: pushed function (to be verified)
|
||||
__ JumpIfSmi(r1, &non_function);
|
||||
@ -2950,18 +2952,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Fast-case: Invoke the function now.
|
||||
// r1: pushed function
|
||||
int argc = argc_;
|
||||
ParameterCount actual(argc);
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (NeedsChecks()) {
|
||||
if (call_as_method) {
|
||||
if (needs_checks) {
|
||||
EmitContinueIfStrictOrNative(masm, &cont);
|
||||
}
|
||||
|
||||
// Compute the receiver in sloppy mode.
|
||||
__ ldr(r3, MemOperand(sp, argc * kPointerSize));
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
__ JumpIfSmi(r3, &wrap);
|
||||
__ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE);
|
||||
__ b(lt, &wrap);
|
||||
@ -2974,19 +2975,24 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
__ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper());
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
EmitSlowCase(masm, argc, &non_function);
|
||||
}
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (call_as_method) {
|
||||
__ bind(&wrap);
|
||||
EmitWrapCase(masm, argc, &cont);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
|
||||
}
|
||||
|
||||
|
||||
void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
// r0 : number of arguments
|
||||
// r1 : the function to call
|
||||
@ -3046,7 +3052,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
__ bind(&do_call);
|
||||
// Set expected number of arguments to zero (not changing r0).
|
||||
__ mov(r2, Operand::Zero());
|
||||
__ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
@ -3060,6 +3066,51 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
|
||||
// r1 - function
|
||||
// r2 - feedback vector
|
||||
// r3 - slot id
|
||||
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r4);
|
||||
__ cmp(r1, r4);
|
||||
__ b(ne, miss);
|
||||
|
||||
__ mov(r0, Operand(arg_count()));
|
||||
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
|
||||
__ ldr(r2, FieldMemOperand(r4, FixedArray::kHeaderSize));
|
||||
// Verify that r2 contains an AllocationSite
|
||||
__ AssertUndefinedOrAllocationSite(r2, r4);
|
||||
ArrayConstructorStub stub(masm->isolate(), arg_count());
|
||||
__ TailCallStub(&stub);
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
|
||||
// r1 - function
|
||||
// r2 - feedback vector
|
||||
// r3 - slot id
|
||||
Label miss;
|
||||
|
||||
if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
|
||||
Generate_MonomorphicArray(masm, &miss);
|
||||
} else {
|
||||
// So far there is only one customer for our custom feedback scheme.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
|
||||
// The slow case, we need this no matter what to complete a call after a miss.
|
||||
CallFunctionNoFeedback(masm,
|
||||
arg_count(),
|
||||
true,
|
||||
CallAsMethod());
|
||||
|
||||
// Unreachable.
|
||||
__ stop("Unexpected code address");
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate(MacroAssembler* masm) {
|
||||
// r1 - function
|
||||
// r3 - slot id (Smi)
|
||||
@ -3071,6 +3122,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
EmitLoadTypeFeedbackVector(masm, r2);
|
||||
|
||||
if (state_.stub_type() != CallIC::DEFAULT) {
|
||||
Generate_CustomFeedbackCall(masm);
|
||||
return;
|
||||
}
|
||||
|
||||
// The checks. First, does r1 match the recorded monomorphic target?
|
||||
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
|
||||
__ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize));
|
||||
|
@ -3217,10 +3217,10 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
ASM_LOCATION("CallFunctionStub::Generate");
|
||||
static void CallFunctionNoFeedback(MacroAssembler* masm,
|
||||
int argc, bool needs_checks,
|
||||
bool call_as_method) {
|
||||
// x1 function the function to call
|
||||
|
||||
Register function = x1;
|
||||
Register type = x4;
|
||||
Label slow, non_function, wrap, cont;
|
||||
@ -3228,7 +3228,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
// TODO(jbramley): This function has a lot of unnamed registers. Name them,
|
||||
// and tidy things up a bit.
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Check that the function is really a JavaScript function.
|
||||
__ JumpIfSmi(function, &non_function);
|
||||
|
||||
@ -3238,18 +3238,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Fast-case: Invoke the function now.
|
||||
// x1 function pushed function
|
||||
int argc = argc_;
|
||||
ParameterCount actual(argc);
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (NeedsChecks()) {
|
||||
if (call_as_method) {
|
||||
if (needs_checks) {
|
||||
EmitContinueIfStrictOrNative(masm, &cont);
|
||||
}
|
||||
|
||||
// Compute the receiver in sloppy mode.
|
||||
__ Peek(x3, argc * kPointerSize);
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
__ JumpIfSmi(x3, &wrap);
|
||||
__ JumpIfObjectType(x3, x10, type, FIRST_SPEC_OBJECT_TYPE, &wrap, lt);
|
||||
} else {
|
||||
@ -3263,20 +3262,25 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper());
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Slow-case: Non-function called.
|
||||
__ Bind(&slow);
|
||||
EmitSlowCase(masm, argc, function, type, &non_function);
|
||||
}
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (call_as_method) {
|
||||
__ Bind(&wrap);
|
||||
EmitWrapCase(masm, argc, &cont);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
ASM_LOCATION("CallFunctionStub::Generate");
|
||||
CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
|
||||
}
|
||||
|
||||
|
||||
void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
ASM_LOCATION("CallConstructStub::Generate");
|
||||
// x0 : number of arguments
|
||||
@ -3356,6 +3360,59 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
|
||||
// x1 - function
|
||||
// x2 - feedback vector
|
||||
// x3 - slot id
|
||||
Register function = x1;
|
||||
Register feedback_vector = x2;
|
||||
Register index = x3;
|
||||
Register scratch = x4;
|
||||
|
||||
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, scratch);
|
||||
__ Cmp(function, scratch);
|
||||
__ B(ne, miss);
|
||||
|
||||
Register allocation_site = feedback_vector;
|
||||
__ Mov(x0, Operand(arg_count()));
|
||||
|
||||
__ Add(scratch, feedback_vector,
|
||||
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
|
||||
__ Ldr(allocation_site, FieldMemOperand(scratch, FixedArray::kHeaderSize));
|
||||
|
||||
// Verify that x2 contains an AllocationSite
|
||||
__ AssertUndefinedOrAllocationSite(allocation_site, scratch);
|
||||
ArrayConstructorStub stub(masm->isolate(), arg_count());
|
||||
__ TailCallStub(&stub);
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
|
||||
// x1 - function
|
||||
// x2 - feedback vector
|
||||
// x3 - slot id
|
||||
Label miss;
|
||||
|
||||
if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
|
||||
Generate_MonomorphicArray(masm, &miss);
|
||||
} else {
|
||||
// So far there is only one customer for our custom feedback scheme.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
|
||||
// The slow case, we need this no matter what to complete a call after a miss.
|
||||
CallFunctionNoFeedback(masm,
|
||||
arg_count(),
|
||||
true,
|
||||
CallAsMethod());
|
||||
|
||||
__ Unreachable();
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate(MacroAssembler* masm) {
|
||||
ASM_LOCATION("CallICStub");
|
||||
|
||||
@ -3374,6 +3431,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
EmitLoadTypeFeedbackVector(masm, feedback_vector);
|
||||
|
||||
if (state_.stub_type() != CallIC::DEFAULT) {
|
||||
Generate_CustomFeedbackCall(masm);
|
||||
return;
|
||||
}
|
||||
|
||||
// The checks. First, does x1 match the recorded monomorphic target?
|
||||
__ Add(x4, feedback_vector,
|
||||
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
|
||||
|
15
src/ast.h
15
src/ast.h
@ -1762,11 +1762,25 @@ class Call V8_FINAL : public Expression, public FeedbackSlotInterface {
|
||||
return !target_.is_null();
|
||||
}
|
||||
|
||||
bool global_call() const {
|
||||
VariableProxy* proxy = expression_->AsVariableProxy();
|
||||
return proxy != NULL && proxy->var()->IsUnallocated();
|
||||
}
|
||||
|
||||
bool known_global_function() const {
|
||||
return global_call() && !target_.is_null();
|
||||
}
|
||||
|
||||
Handle<JSFunction> target() { return target_; }
|
||||
|
||||
Handle<Cell> cell() { return cell_; }
|
||||
|
||||
Handle<AllocationSite> allocation_site() { return allocation_site_; }
|
||||
|
||||
void set_target(Handle<JSFunction> target) { target_ = target; }
|
||||
void set_allocation_site(Handle<AllocationSite> site) {
|
||||
allocation_site_ = site;
|
||||
}
|
||||
bool ComputeGlobalTarget(Handle<GlobalObject> global, LookupResult* lookup);
|
||||
|
||||
BailoutId ReturnId() const { return return_id_; }
|
||||
@ -1809,6 +1823,7 @@ class Call V8_FINAL : public Expression, public FeedbackSlotInterface {
|
||||
|
||||
Handle<JSFunction> target_;
|
||||
Handle<Cell> cell_;
|
||||
Handle<AllocationSite> allocation_site_;
|
||||
int call_feedback_slot_;
|
||||
|
||||
const BailoutId return_id_;
|
||||
|
@ -824,6 +824,8 @@ class CallICStub: public PlatformCodeStub {
|
||||
|
||||
// Code generation helpers.
|
||||
void GenerateMiss(MacroAssembler* masm);
|
||||
void Generate_CustomFeedbackCall(MacroAssembler* masm);
|
||||
void Generate_MonomorphicArray(MacroAssembler* masm, Label* miss);
|
||||
|
||||
CallIC::State state_;
|
||||
};
|
||||
|
147
src/hydrogen.cc
147
src/hydrogen.cc
@ -2474,14 +2474,14 @@ void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
|
||||
}
|
||||
|
||||
// Special loop unfolding case
|
||||
static const int kLoopUnfoldLimit = 8;
|
||||
STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
|
||||
STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
|
||||
kElementLoopUnrollThreshold);
|
||||
int initial_capacity = -1;
|
||||
if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
|
||||
int constant_from = from->GetInteger32Constant();
|
||||
int constant_to = to->GetInteger32Constant();
|
||||
|
||||
if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
|
||||
if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
|
||||
initial_capacity = constant_to;
|
||||
}
|
||||
}
|
||||
@ -8234,6 +8234,56 @@ HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
|
||||
}
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::BuildArrayCall(Expression* expression,
|
||||
int arguments_count,
|
||||
HValue* function,
|
||||
Handle<AllocationSite> site) {
|
||||
Add<HCheckValue>(function, array_function());
|
||||
|
||||
if (IsCallArrayInlineable(arguments_count, site)) {
|
||||
BuildInlinedCallArray(expression, arguments_count, site);
|
||||
return;
|
||||
}
|
||||
|
||||
HInstruction* call = PreProcessCall(New<HCallNewArray>(
|
||||
function, arguments_count + 1, site->GetElementsKind()));
|
||||
if (expression->IsCall()) {
|
||||
Drop(1);
|
||||
}
|
||||
ast_context()->ReturnInstruction(call, expression->id());
|
||||
}
|
||||
|
||||
|
||||
bool HOptimizedGraphBuilder::TryHandleArrayCall(Call* expr, HValue* function) {
|
||||
if (!array_function().is_identical_to(expr->target())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Handle<AllocationSite> site = expr->allocation_site();
|
||||
if (site.is_null()) return false;
|
||||
|
||||
BuildArrayCall(expr,
|
||||
expr->arguments()->length(),
|
||||
function,
|
||||
site);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool HOptimizedGraphBuilder::TryHandleArrayCallNew(CallNew* expr,
|
||||
HValue* function) {
|
||||
if (!array_function().is_identical_to(expr->target())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
BuildArrayCall(expr,
|
||||
expr->arguments()->length(),
|
||||
function,
|
||||
expr->allocation_site());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
||||
ASSERT(!HasStackOverflow());
|
||||
ASSERT(current_block() != NULL);
|
||||
@ -8328,8 +8378,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
||||
// evaluation of the arguments.
|
||||
CHECK_ALIVE(VisitForValue(expr->expression()));
|
||||
HValue* function = Top();
|
||||
bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
|
||||
if (global_call) {
|
||||
if (expr->global_call()) {
|
||||
Variable* var = proxy->var();
|
||||
bool known_global_function = false;
|
||||
// If there is a global property cell for the name at compile time and
|
||||
@ -8363,6 +8412,7 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
||||
return;
|
||||
}
|
||||
if (TryInlineApiFunctionCall(expr, receiver)) return;
|
||||
if (TryHandleArrayCall(expr, function)) return;
|
||||
if (TryInlineCall(expr)) return;
|
||||
|
||||
PushArgumentsFromEnvironment(argument_count);
|
||||
@ -8412,20 +8462,21 @@ void HOptimizedGraphBuilder::VisitCall(Call* expr) {
|
||||
}
|
||||
|
||||
|
||||
void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
|
||||
void HOptimizedGraphBuilder::BuildInlinedCallArray(
|
||||
Expression* expression,
|
||||
int argument_count,
|
||||
Handle<AllocationSite> site) {
|
||||
ASSERT(!site.is_null());
|
||||
ASSERT(argument_count >= 0 && argument_count <= 1);
|
||||
NoObservableSideEffectsScope no_effects(this);
|
||||
|
||||
int argument_count = expr->arguments()->length();
|
||||
// We should at least have the constructor on the expression stack.
|
||||
HValue* constructor = environment()->ExpressionStackAt(argument_count);
|
||||
|
||||
ElementsKind kind = expr->elements_kind();
|
||||
Handle<AllocationSite> site = expr->allocation_site();
|
||||
ASSERT(!site.is_null());
|
||||
|
||||
// Register on the site for deoptimization if the transition feedback changes.
|
||||
AllocationSite::AddDependentCompilationInfo(
|
||||
site, AllocationSite::TRANSITIONS, top_info());
|
||||
ElementsKind kind = site->GetElementsKind();
|
||||
HInstruction* site_instruction = Add<HConstant>(site);
|
||||
|
||||
// In the single constant argument case, we may have to adjust elements kind
|
||||
@ -8448,32 +8499,12 @@ void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
|
||||
site_instruction,
|
||||
constructor,
|
||||
DISABLE_ALLOCATION_SITES);
|
||||
HValue* new_object;
|
||||
if (argument_count == 0) {
|
||||
new_object = array_builder.AllocateEmptyArray();
|
||||
} else if (argument_count == 1) {
|
||||
HValue* argument = environment()->Top();
|
||||
new_object = BuildAllocateArrayFromLength(&array_builder, argument);
|
||||
} else {
|
||||
HValue* length = Add<HConstant>(argument_count);
|
||||
// Smi arrays need to initialize array elements with the hole because
|
||||
// bailout could occur if the arguments don't fit in a smi.
|
||||
//
|
||||
// TODO(mvstanton): If all the arguments are constants in smi range, then
|
||||
// we could set fill_with_hole to false and save a few instructions.
|
||||
JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
|
||||
? JSArrayBuilder::FILL_WITH_HOLE
|
||||
: JSArrayBuilder::DONT_FILL_WITH_HOLE;
|
||||
new_object = array_builder.AllocateArray(length, length, fill_mode);
|
||||
HValue* elements = array_builder.GetElementsLocation();
|
||||
for (int i = 0; i < argument_count; i++) {
|
||||
HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
|
||||
HValue* constant_i = Add<HConstant>(i);
|
||||
Add<HStoreKeyed>(elements, constant_i, value, kind);
|
||||
}
|
||||
}
|
||||
HValue* new_object = argument_count == 0
|
||||
? array_builder.AllocateEmptyArray()
|
||||
: BuildAllocateArrayFromLength(&array_builder, Top());
|
||||
|
||||
Drop(argument_count + 1); // drop constructor and args.
|
||||
int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
|
||||
Drop(args_to_drop);
|
||||
ast_context()->ReturnValue(new_object);
|
||||
}
|
||||
|
||||
@ -8487,14 +8518,13 @@ static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
|
||||
}
|
||||
|
||||
|
||||
bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
|
||||
bool HOptimizedGraphBuilder::IsCallArrayInlineable(
|
||||
int argument_count,
|
||||
Handle<AllocationSite> site) {
|
||||
Handle<JSFunction> caller = current_info()->closure();
|
||||
Handle<JSFunction> target(isolate()->native_context()->array_function(),
|
||||
isolate());
|
||||
int argument_count = expr->arguments()->length();
|
||||
Handle<JSFunction> target = array_function();
|
||||
// We should have the function plus array arguments on the environment stack.
|
||||
ASSERT(environment()->length() >= (argument_count + 1));
|
||||
Handle<AllocationSite> site = expr->allocation_site();
|
||||
ASSERT(!site.is_null());
|
||||
|
||||
bool inline_ok = false;
|
||||
@ -8504,22 +8534,24 @@ bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
|
||||
HValue* argument = Top();
|
||||
if (argument->IsConstant()) {
|
||||
// Do not inline if the constant length argument is not a smi or
|
||||
// outside the valid range for a fast array.
|
||||
// outside the valid range for unrolled loop initialization.
|
||||
HConstant* constant_argument = HConstant::cast(argument);
|
||||
if (constant_argument->HasSmiValue()) {
|
||||
int value = constant_argument->Integer32Value();
|
||||
inline_ok = value >= 0 &&
|
||||
value < JSObject::kInitialMaxFastElementArray;
|
||||
inline_ok = value >= 0 && value <= kElementLoopUnrollThreshold;
|
||||
if (!inline_ok) {
|
||||
TraceInline(target, caller,
|
||||
"Length outside of valid array range");
|
||||
"Constant length outside of valid inlining range.");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
inline_ok = true;
|
||||
TraceInline(target, caller,
|
||||
"Dont inline [new] Array(n) where n isn't constant.");
|
||||
}
|
||||
} else {
|
||||
} else if (argument_count == 0) {
|
||||
inline_ok = true;
|
||||
} else {
|
||||
TraceInline(target, caller, "Too many arguments to inline.");
|
||||
}
|
||||
} else {
|
||||
TraceInline(target, caller, "AllocationSite requested no inlining.");
|
||||
@ -8644,25 +8676,10 @@ void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
|
||||
} else {
|
||||
// The constructor function is both an operand to the instruction and an
|
||||
// argument to the construct call.
|
||||
Handle<JSFunction> array_function(
|
||||
isolate()->native_context()->array_function(), isolate());
|
||||
bool use_call_new_array = expr->target().is_identical_to(array_function);
|
||||
if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
|
||||
// Verify we are still calling the array function for our native context.
|
||||
Add<HCheckValue>(function, array_function);
|
||||
BuildInlinedCallNewArray(expr);
|
||||
return;
|
||||
}
|
||||
if (TryHandleArrayCallNew(expr, function)) return;
|
||||
|
||||
HBinaryCall* call;
|
||||
if (use_call_new_array) {
|
||||
Add<HCheckValue>(function, array_function);
|
||||
call = New<HCallNewArray>(function, argument_count,
|
||||
expr->elements_kind());
|
||||
} else {
|
||||
call = New<HCallNew>(function, argument_count);
|
||||
}
|
||||
PreProcessCall(call);
|
||||
HInstruction* call =
|
||||
PreProcessCall(New<HCallNew>(function, argument_count));
|
||||
return ast_context()->ReturnInstruction(call, expr->id());
|
||||
}
|
||||
}
|
||||
|
@ -1295,6 +1295,10 @@ class HGraphBuilder {
|
||||
|
||||
void AddSimulate(BailoutId id, RemovableSimulate removable = FIXED_SIMULATE);
|
||||
|
||||
// When initializing arrays, we'll unfold the loop if the number of elements
|
||||
// is known at compile time and is <= kElementLoopUnrollThreshold.
|
||||
static const int kElementLoopUnrollThreshold = 8;
|
||||
|
||||
protected:
|
||||
virtual bool BuildGraph() = 0;
|
||||
|
||||
@ -2242,6 +2246,11 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor {
|
||||
// Try to optimize fun.apply(receiver, arguments) pattern.
|
||||
bool TryCallApply(Call* expr);
|
||||
|
||||
bool TryHandleArrayCall(Call* expr, HValue* function);
|
||||
bool TryHandleArrayCallNew(CallNew* expr, HValue* function);
|
||||
void BuildArrayCall(Expression* expr, int arguments_count, HValue* function,
|
||||
Handle<AllocationSite> cell);
|
||||
|
||||
HValue* ImplicitReceiverFor(HValue* function,
|
||||
Handle<JSFunction> target);
|
||||
|
||||
@ -2325,8 +2334,13 @@ class HOptimizedGraphBuilder : public HGraphBuilder, public AstVisitor {
|
||||
ElementsKind fixed_elements_kind,
|
||||
HValue* byte_length, HValue* length);
|
||||
|
||||
bool IsCallNewArrayInlineable(CallNew* expr);
|
||||
void BuildInlinedCallNewArray(CallNew* expr);
|
||||
Handle<JSFunction> array_function() {
|
||||
return handle(isolate()->native_context()->array_function());
|
||||
}
|
||||
|
||||
bool IsCallArrayInlineable(int argument_count, Handle<AllocationSite> site);
|
||||
void BuildInlinedCallArray(Expression* expression, int argument_count,
|
||||
Handle<AllocationSite> site);
|
||||
|
||||
class PropertyAccessInfo {
|
||||
public:
|
||||
|
@ -2335,11 +2335,13 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
static void CallFunctionNoFeedback(MacroAssembler* masm,
|
||||
int argc, bool needs_checks,
|
||||
bool call_as_method) {
|
||||
// edi : the function to call
|
||||
Label slow, non_function, wrap, cont;
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Check that the function really is a JavaScript function.
|
||||
__ JumpIfSmi(edi, &non_function);
|
||||
|
||||
@ -2349,17 +2351,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
// Fast-case: Just invoke the function.
|
||||
ParameterCount actual(argc_);
|
||||
ParameterCount actual(argc);
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (NeedsChecks()) {
|
||||
if (call_as_method) {
|
||||
if (needs_checks) {
|
||||
EmitContinueIfStrictOrNative(masm, &cont);
|
||||
}
|
||||
|
||||
// Load the receiver from the stack.
|
||||
__ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
|
||||
__ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (call_as_method) {
|
||||
__ JumpIfSmi(eax, &wrap);
|
||||
|
||||
__ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
|
||||
@ -2373,20 +2375,25 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
__ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
// (non_function is bound in EmitSlowCase)
|
||||
EmitSlowCase(isolate(), masm, argc_, &non_function);
|
||||
EmitSlowCase(masm->isolate(), masm, argc, &non_function);
|
||||
}
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (call_as_method) {
|
||||
__ bind(&wrap);
|
||||
EmitWrapCase(masm, argc_, &cont);
|
||||
EmitWrapCase(masm, argc, &cont);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
|
||||
}
|
||||
|
||||
|
||||
void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
// eax : number of arguments
|
||||
// ebx : feedback vector
|
||||
@ -2463,6 +2470,51 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
|
||||
// edi - function
|
||||
// ebx - feedback vector
|
||||
// edx - slot id
|
||||
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
|
||||
__ cmp(edi, ecx);
|
||||
__ j(not_equal, miss);
|
||||
|
||||
__ mov(eax, arg_count());
|
||||
__ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
|
||||
FixedArray::kHeaderSize));
|
||||
// Verify that ecx contains an AllocationSite
|
||||
__ AssertUndefinedOrAllocationSite(ebx);
|
||||
ArrayConstructorStub stub(masm->isolate(), arg_count());
|
||||
__ TailCallStub(&stub);
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
|
||||
// edi - function
|
||||
// ebx - feedback vector
|
||||
// edx - slot id
|
||||
Label miss;
|
||||
|
||||
if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
|
||||
Generate_MonomorphicArray(masm, &miss);
|
||||
} else {
|
||||
// So far there is only one customer for our custom feedback scheme.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
|
||||
// The slow case, we need this no matter what to complete a call after a miss.
|
||||
CallFunctionNoFeedback(masm,
|
||||
arg_count(),
|
||||
true,
|
||||
CallAsMethod());
|
||||
|
||||
// Unreachable.
|
||||
__ int3();
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate(MacroAssembler* masm) {
|
||||
// edi - function
|
||||
// edx - slot id
|
||||
@ -2475,6 +2527,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
EmitLoadTypeFeedbackVector(masm, ebx);
|
||||
|
||||
if (state_.stub_type() != CallIC::DEFAULT) {
|
||||
Generate_CustomFeedbackCall(masm);
|
||||
return;
|
||||
}
|
||||
|
||||
// The checks. First, does edi match the recorded monomorphic target?
|
||||
__ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
|
||||
FixedArray::kHeaderSize));
|
||||
|
68
src/ic.cc
68
src/ic.cc
@ -501,7 +501,14 @@ void CallIC::Clear(Isolate* isolate,
|
||||
Code* target,
|
||||
ConstantPoolArray* constant_pool) {
|
||||
// Currently, CallIC doesn't have state changes.
|
||||
ASSERT(target->ic_state() == v8::internal::GENERIC);
|
||||
if (target->ic_state() != v8::internal::MONOMORPHIC) return;
|
||||
CallIC::State existing_state(target->extra_ic_state());
|
||||
|
||||
// Monomorphic array stubs don't need to be cleared because
|
||||
// 1) the stub doesn't store information that should be cleared, and
|
||||
// 2) the AllocationSite stored in the type feedback vector is immune
|
||||
// from gc type feedback clearing.
|
||||
ASSERT(existing_state.stub_type() == MONOMORPHIC_ARRAY);
|
||||
}
|
||||
|
||||
|
||||
@ -1818,18 +1825,50 @@ MaybeHandle<Object> KeyedStoreIC::Store(Handle<Object> object,
|
||||
|
||||
CallIC::State::State(ExtraICState extra_ic_state)
|
||||
: argc_(ArgcBits::decode(extra_ic_state)),
|
||||
call_type_(CallTypeBits::decode(extra_ic_state)) {
|
||||
call_type_(CallTypeBits::decode(extra_ic_state)),
|
||||
stub_type_(StubTypeBits::decode(extra_ic_state)) {
|
||||
}
|
||||
|
||||
|
||||
ExtraICState CallIC::State::GetExtraICState() const {
|
||||
ExtraICState extra_ic_state =
|
||||
ArgcBits::encode(argc_) |
|
||||
CallTypeBits::encode(call_type_);
|
||||
CallTypeBits::encode(call_type_) |
|
||||
StubTypeBits::encode(stub_type_);
|
||||
return extra_ic_state;
|
||||
}
|
||||
|
||||
|
||||
bool CallIC::DoCustomHandler(Handle<Object> receiver,
|
||||
Handle<Object> function,
|
||||
Handle<FixedArray> vector,
|
||||
Handle<Smi> slot,
|
||||
const State& state) {
|
||||
ASSERT(FLAG_use_ic && function->IsJSFunction());
|
||||
|
||||
// Are we the array function?
|
||||
Handle<JSFunction> array_function = Handle<JSFunction>(
|
||||
isolate()->context()->native_context()->array_function(), isolate());
|
||||
if (array_function.is_identical_to(Handle<JSFunction>::cast(function))) {
|
||||
// Alter the slot.
|
||||
Handle<AllocationSite> new_site = isolate()->factory()->NewAllocationSite();
|
||||
vector->set(slot->value(), *new_site);
|
||||
State new_state = state.ToMonomorphicArrayCallState();
|
||||
CallICStub stub(isolate(), new_state);
|
||||
set_target(*stub.GetCode());
|
||||
Handle<String> name;
|
||||
if (array_function->shared()->name()->IsString()) {
|
||||
name = Handle<String>(String::cast(array_function->shared()->name()),
|
||||
isolate());
|
||||
}
|
||||
|
||||
TRACE_IC("CallIC (Array call)", name);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
void CallIC::HandleMiss(Handle<Object> receiver,
|
||||
Handle<Object> function,
|
||||
Handle<FixedArray> vector,
|
||||
@ -1837,18 +1876,35 @@ void CallIC::HandleMiss(Handle<Object> receiver,
|
||||
State state(target()->extra_ic_state());
|
||||
Object* feedback = vector->get(slot->value());
|
||||
|
||||
if (feedback->IsJSFunction() || !function->IsJSFunction()) {
|
||||
if (feedback->IsJSFunction() || !function->IsJSFunction() ||
|
||||
state.stub_type() != DEFAULT) {
|
||||
// We are going generic.
|
||||
ASSERT(!function->IsJSFunction() || *function != feedback);
|
||||
|
||||
vector->set(slot->value(),
|
||||
*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
|
||||
SKIP_WRITE_BARRIER);
|
||||
|
||||
State new_state = state.ToGenericState();
|
||||
if (new_state != state) {
|
||||
// Only happens when the array ic goes generic.
|
||||
ASSERT(state.stub_type() == MONOMORPHIC_ARRAY &&
|
||||
FLAG_use_ic);
|
||||
CallICStub stub(isolate(), new_state);
|
||||
Handle<Code> code = stub.GetCode();
|
||||
set_target(*code);
|
||||
}
|
||||
|
||||
TRACE_GENERIC_IC(isolate(), "CallIC", "megamorphic");
|
||||
} else {
|
||||
// If we came here feedback must be the uninitialized sentinel,
|
||||
// and we are going monomorphic.
|
||||
ASSERT(feedback == *TypeFeedbackInfo::UninitializedSentinel(isolate()));
|
||||
|
||||
// Do we want to install a custom handler?
|
||||
if (FLAG_use_ic &&
|
||||
DoCustomHandler(receiver, function, vector, slot, state)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Handle<JSFunction> js_function = Handle<JSFunction>::cast(function);
|
||||
Handle<Object> name(js_function->shared()->name(), isolate());
|
||||
TRACE_IC("CallIC", name);
|
||||
|
42
src/ic.h
42
src/ic.h
@ -333,20 +333,34 @@ class IC_Utility {
|
||||
class CallIC: public IC {
|
||||
public:
|
||||
enum CallType { METHOD, FUNCTION };
|
||||
enum StubType { DEFAULT, MONOMORPHIC_ARRAY };
|
||||
|
||||
class State V8_FINAL BASE_EMBEDDED {
|
||||
public:
|
||||
explicit State(ExtraICState extra_ic_state);
|
||||
|
||||
static State MonomorphicArrayCallState(int argc, CallType call_type) {
|
||||
return State(argc, call_type, MONOMORPHIC_ARRAY);
|
||||
}
|
||||
|
||||
static State DefaultCallState(int argc, CallType call_type) {
|
||||
return State(argc, call_type);
|
||||
return State(argc, call_type, DEFAULT);
|
||||
}
|
||||
|
||||
static State MegamorphicCallState(int argc, CallType call_type) {
|
||||
return State(argc, call_type);
|
||||
// Transition from the current state to another.
|
||||
State ToGenericState() const {
|
||||
return DefaultCallState(arg_count(), call_type());
|
||||
}
|
||||
|
||||
InlineCacheState GetICState() const { return ::v8::internal::GENERIC; }
|
||||
State ToMonomorphicArrayCallState() const {
|
||||
return MonomorphicArrayCallState(arg_count(), call_type());
|
||||
}
|
||||
|
||||
InlineCacheState GetICState() const {
|
||||
return stub_type_ == CallIC::DEFAULT
|
||||
? ::v8::internal::GENERIC
|
||||
: ::v8::internal::MONOMORPHIC;
|
||||
}
|
||||
|
||||
ExtraICState GetExtraICState() const;
|
||||
|
||||
@ -355,6 +369,7 @@ class CallIC: public IC {
|
||||
|
||||
int arg_count() const { return argc_; }
|
||||
CallType call_type() const { return call_type_; }
|
||||
StubType stub_type() const { return stub_type_; }
|
||||
|
||||
bool CallAsMethod() const { return call_type_ == METHOD; }
|
||||
|
||||
@ -362,7 +377,8 @@ class CallIC: public IC {
|
||||
|
||||
bool operator==(const State& other_state) const {
|
||||
return (argc_ == other_state.argc_ &&
|
||||
call_type_ == other_state.call_type_);
|
||||
call_type_ == other_state.call_type_ &&
|
||||
stub_type_ == other_state.stub_type_);
|
||||
}
|
||||
|
||||
bool operator!=(const State& other_state) const {
|
||||
@ -370,17 +386,20 @@ class CallIC: public IC {
|
||||
}
|
||||
|
||||
private:
|
||||
State(int argc,
|
||||
CallType call_type)
|
||||
State(int argc, CallType call_type, StubType stub_type)
|
||||
: argc_(argc),
|
||||
call_type_(call_type) {
|
||||
call_type_(call_type),
|
||||
stub_type_(stub_type) {
|
||||
}
|
||||
|
||||
class ArgcBits: public BitField<int, 0, Code::kArgumentsBits> {};
|
||||
class CallTypeBits: public BitField<CallType, Code::kArgumentsBits, 1> {};
|
||||
class StubTypeBits:
|
||||
public BitField<StubType, Code::kArgumentsBits + 1, 1> {}; // NOLINT
|
||||
|
||||
const int argc_;
|
||||
const CallType call_type_;
|
||||
const StubType stub_type_;
|
||||
};
|
||||
|
||||
explicit CallIC(Isolate* isolate)
|
||||
@ -392,6 +411,13 @@ class CallIC: public IC {
|
||||
Handle<FixedArray> vector,
|
||||
Handle<Smi> slot);
|
||||
|
||||
// Returns true if a custom handler was installed.
|
||||
bool DoCustomHandler(Handle<Object> receiver,
|
||||
Handle<Object> function,
|
||||
Handle<FixedArray> vector,
|
||||
Handle<Smi> slot,
|
||||
const State& new_state);
|
||||
|
||||
// Code generator routines.
|
||||
static Handle<Code> initialize_stub(Isolate* isolate,
|
||||
int argc,
|
||||
|
@ -3094,11 +3094,13 @@ static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
static void CallFunctionNoFeedback(MacroAssembler* masm,
|
||||
int argc, bool needs_checks,
|
||||
bool call_as_method) {
|
||||
// a1 : the function to call
|
||||
Label slow, non_function, wrap, cont;
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Check that the function is really a JavaScript function.
|
||||
// a1: pushed function (to be verified)
|
||||
__ JumpIfSmi(a1, &non_function);
|
||||
@ -3110,18 +3112,17 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Fast-case: Invoke the function now.
|
||||
// a1: pushed function
|
||||
int argc = argc_;
|
||||
ParameterCount actual(argc);
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (NeedsChecks()) {
|
||||
if (call_as_method) {
|
||||
if (needs_checks) {
|
||||
EmitContinueIfStrictOrNative(masm, &cont);
|
||||
}
|
||||
|
||||
// Compute the receiver in sloppy mode.
|
||||
__ lw(a3, MemOperand(sp, argc * kPointerSize));
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
__ JumpIfSmi(a3, &wrap);
|
||||
__ GetObjectType(a3, t0, t0);
|
||||
__ Branch(&wrap, lt, t0, Operand(FIRST_SPEC_OBJECT_TYPE));
|
||||
@ -3134,13 +3135,13 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
__ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper());
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
EmitSlowCase(masm, argc, &non_function);
|
||||
}
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (call_as_method) {
|
||||
__ bind(&wrap);
|
||||
// Wrap the receiver and patch it back onto the stack.
|
||||
EmitWrapCase(masm, argc, &cont);
|
||||
@ -3148,6 +3149,11 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
|
||||
}
|
||||
|
||||
|
||||
void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
// a0 : number of arguments
|
||||
// a1 : the function to call
|
||||
@ -3207,8 +3213,8 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
__ bind(&do_call);
|
||||
// Set expected number of arguments to zero (not changing r0).
|
||||
__ li(a2, Operand(0, RelocInfo::NONE32));
|
||||
__ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
|
||||
@ -3221,6 +3227,51 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
|
||||
// a1 - function
|
||||
// a2 - feedback vector
|
||||
// a3 - slot id
|
||||
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, at);
|
||||
__ Branch(miss, ne, a1, Operand(at));
|
||||
|
||||
__ li(a0, Operand(arg_count()));
|
||||
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize);
|
||||
__ Addu(at, a2, Operand(at));
|
||||
__ lw(a2, FieldMemOperand(at, FixedArray::kHeaderSize));
|
||||
// Verify that a2 contains an AllocationSite
|
||||
__ AssertUndefinedOrAllocationSite(a2, at);
|
||||
ArrayConstructorStub stub(masm->isolate(), arg_count());
|
||||
__ TailCallStub(&stub);
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
|
||||
// a1 - function
|
||||
// a2 - feedback vector
|
||||
// a3 - slot id
|
||||
Label miss;
|
||||
|
||||
if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
|
||||
Generate_MonomorphicArray(masm, &miss);
|
||||
} else {
|
||||
// So far there is only one customer for our custom feedback scheme.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
|
||||
// The slow case, we need this no matter what to complete a call after a miss.
|
||||
CallFunctionNoFeedback(masm,
|
||||
arg_count(),
|
||||
true,
|
||||
CallAsMethod());
|
||||
|
||||
// Unreachable.
|
||||
__ stop("Unexpected code address");
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate(MacroAssembler* masm) {
|
||||
// r1 - function
|
||||
// r3 - slot id (Smi)
|
||||
@ -3232,6 +3283,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
EmitLoadTypeFeedbackVector(masm, a2);
|
||||
|
||||
if (state_.stub_type() != CallIC::DEFAULT) {
|
||||
Generate_CustomFeedbackCall(masm);
|
||||
return;
|
||||
}
|
||||
|
||||
// The checks. First, does r1 match the recorded monomorphic target?
|
||||
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
|
||||
__ Addu(t0, a2, Operand(t0));
|
||||
|
@ -11205,13 +11205,30 @@ void Code::ClearInlineCaches(Code::Kind* kind) {
|
||||
void SharedFunctionInfo::ClearTypeFeedbackInfo() {
|
||||
FixedArray* vector = feedback_vector();
|
||||
Heap* heap = GetHeap();
|
||||
for (int i = 0; i < vector->length(); i++) {
|
||||
int length = vector->length();
|
||||
|
||||
for (int i = 0; i < length; i++) {
|
||||
Object* obj = vector->get(i);
|
||||
if (!obj->IsAllocationSite()) {
|
||||
vector->set(
|
||||
i,
|
||||
TypeFeedbackInfo::RawUninitializedSentinel(heap),
|
||||
SKIP_WRITE_BARRIER);
|
||||
if (obj->IsHeapObject()) {
|
||||
InstanceType instance_type =
|
||||
HeapObject::cast(obj)->map()->instance_type();
|
||||
switch (instance_type) {
|
||||
case ALLOCATION_SITE_TYPE:
|
||||
// AllocationSites are not cleared because they do not store
|
||||
// information that leaks.
|
||||
break;
|
||||
case JS_FUNCTION_TYPE:
|
||||
// No need to clear the native context array function.
|
||||
if (obj == JSFunction::cast(obj)->context()->native_context()->
|
||||
get(Context::ARRAY_FUNCTION_INDEX)) {
|
||||
break;
|
||||
}
|
||||
// Fall through...
|
||||
|
||||
default:
|
||||
vector->set(i, TypeFeedbackInfo::RawUninitializedSentinel(heap),
|
||||
SKIP_WRITE_BARRIER);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,9 +97,7 @@ bool TypeFeedbackOracle::StoreIsKeyedPolymorphic(TypeFeedbackId ast_id) {
|
||||
|
||||
bool TypeFeedbackOracle::CallIsMonomorphic(int slot) {
|
||||
Handle<Object> value = GetInfo(slot);
|
||||
return FLAG_pretenuring_call_new
|
||||
? value->IsJSFunction()
|
||||
: value->IsAllocationSite() || value->IsJSFunction();
|
||||
return value->IsAllocationSite() || value->IsJSFunction();
|
||||
}
|
||||
|
||||
|
||||
@ -134,7 +132,10 @@ KeyedAccessStoreMode TypeFeedbackOracle::GetStoreMode(
|
||||
|
||||
Handle<JSFunction> TypeFeedbackOracle::GetCallTarget(int slot) {
|
||||
Handle<Object> info = GetInfo(slot);
|
||||
if (FLAG_pretenuring_call_new || info->IsJSFunction()) {
|
||||
if (info->IsAllocationSite()) {
|
||||
ASSERT(!FLAG_pretenuring_call_new);
|
||||
return Handle<JSFunction>(isolate()->native_context()->array_function());
|
||||
} else {
|
||||
return Handle<JSFunction>::cast(info);
|
||||
}
|
||||
|
||||
@ -154,6 +155,15 @@ Handle<JSFunction> TypeFeedbackOracle::GetCallNewTarget(int slot) {
|
||||
}
|
||||
|
||||
|
||||
Handle<AllocationSite> TypeFeedbackOracle::GetCallAllocationSite(int slot) {
|
||||
Handle<Object> info = GetInfo(slot);
|
||||
if (info->IsAllocationSite()) {
|
||||
return Handle<AllocationSite>::cast(info);
|
||||
}
|
||||
return Handle<AllocationSite>::null();
|
||||
}
|
||||
|
||||
|
||||
Handle<AllocationSite> TypeFeedbackOracle::GetCallNewAllocationSite(int slot) {
|
||||
Handle<Object> info = GetInfo(slot);
|
||||
if (FLAG_pretenuring_call_new || info->IsAllocationSite()) {
|
||||
|
@ -65,6 +65,7 @@ class TypeFeedbackOracle: public ZoneObject {
|
||||
Context* native_context);
|
||||
|
||||
Handle<JSFunction> GetCallTarget(int slot);
|
||||
Handle<AllocationSite> GetCallAllocationSite(int slot);
|
||||
Handle<JSFunction> GetCallNewTarget(int slot);
|
||||
Handle<AllocationSite> GetCallNewAllocationSite(int slot);
|
||||
|
||||
|
@ -511,6 +511,9 @@ void AstTyper::VisitCall(Call* expr) {
|
||||
expr->IsUsingCallFeedbackSlot(isolate()) &&
|
||||
oracle()->CallIsMonomorphic(expr->CallFeedbackSlot())) {
|
||||
expr->set_target(oracle()->GetCallTarget(expr->CallFeedbackSlot()));
|
||||
Handle<AllocationSite> site =
|
||||
oracle()->GetCallAllocationSite(expr->CallFeedbackSlot());
|
||||
expr->set_allocation_site(site);
|
||||
}
|
||||
|
||||
ZoneList<Expression*>* args = expr->arguments();
|
||||
|
@ -2228,16 +2228,17 @@ static void EmitWrapCase(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
static void CallFunctionNoFeedback(MacroAssembler* masm,
|
||||
int argc, bool needs_checks,
|
||||
bool call_as_method) {
|
||||
// rdi : the function to call
|
||||
|
||||
// wrap_and_call can only be true if we are compiling a monomorphic method.
|
||||
Isolate* isolate = masm->isolate();
|
||||
Label slow, non_function, wrap, cont;
|
||||
int argc = argc_;
|
||||
StackArgumentsAccessor args(rsp, argc);
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Check that the function really is a JavaScript function.
|
||||
__ JumpIfSmi(rdi, &non_function);
|
||||
|
||||
@ -2249,15 +2250,15 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
// Fast-case: Just invoke the function.
|
||||
ParameterCount actual(argc);
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (NeedsChecks()) {
|
||||
if (call_as_method) {
|
||||
if (needs_checks) {
|
||||
EmitContinueIfStrictOrNative(masm, &cont);
|
||||
}
|
||||
|
||||
// Load the receiver from the stack.
|
||||
__ movp(rax, args.GetReceiverOperand());
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
__ JumpIfSmi(rax, &wrap);
|
||||
|
||||
__ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
|
||||
@ -2271,19 +2272,24 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
__ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
|
||||
|
||||
if (NeedsChecks()) {
|
||||
if (needs_checks) {
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
EmitSlowCase(isolate, masm, &args, argc, &non_function);
|
||||
}
|
||||
|
||||
if (CallAsMethod()) {
|
||||
if (call_as_method) {
|
||||
__ bind(&wrap);
|
||||
EmitWrapCase(masm, &args, &cont);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
|
||||
}
|
||||
|
||||
|
||||
void CallConstructStub::Generate(MacroAssembler* masm) {
|
||||
// rax : number of arguments
|
||||
// rbx : feedback vector
|
||||
@ -2358,6 +2364,54 @@ static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_MonomorphicArray(MacroAssembler* masm, Label* miss) {
|
||||
// rdi - function
|
||||
// rbx - feedback vector
|
||||
// rdx - slot id (as integer)
|
||||
__ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
|
||||
__ cmpq(rdi, rcx);
|
||||
__ j(not_equal, miss);
|
||||
|
||||
__ movq(rax, Immediate(arg_count()));
|
||||
__ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
|
||||
FixedArray::kHeaderSize));
|
||||
|
||||
// Verify that ecx contains an AllocationSite
|
||||
__ AssertUndefinedOrAllocationSite(rbx);
|
||||
ArrayConstructorStub stub(masm->isolate(), arg_count());
|
||||
__ TailCallStub(&stub);
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate_CustomFeedbackCall(MacroAssembler* masm) {
|
||||
// rdi - function
|
||||
// rbx - feedback vector
|
||||
// rdx - slot id
|
||||
Label miss;
|
||||
|
||||
__ SmiToInteger32(rdx, rdx);
|
||||
|
||||
if (state_.stub_type() == CallIC::MONOMORPHIC_ARRAY) {
|
||||
Generate_MonomorphicArray(masm, &miss);
|
||||
} else {
|
||||
// So far there is only one customer for our custom feedback scheme.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
__ bind(&miss);
|
||||
GenerateMiss(masm);
|
||||
|
||||
// The slow case, we need this no matter what to complete a call after a miss.
|
||||
CallFunctionNoFeedback(masm,
|
||||
arg_count(),
|
||||
true,
|
||||
CallAsMethod());
|
||||
|
||||
// Unreachable.
|
||||
__ int3();
|
||||
}
|
||||
|
||||
|
||||
void CallICStub::Generate(MacroAssembler* masm) {
|
||||
// rdi - function
|
||||
// rbx - vector
|
||||
@ -2372,6 +2426,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
EmitLoadTypeFeedbackVector(masm, rbx);
|
||||
|
||||
if (state_.stub_type() != CallIC::DEFAULT) {
|
||||
Generate_CustomFeedbackCall(masm);
|
||||
return;
|
||||
}
|
||||
|
||||
// The checks. First, does rdi match the recorded monomorphic target?
|
||||
__ SmiToInteger32(rdx, rdx);
|
||||
__ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size,
|
||||
|
@ -150,18 +150,11 @@ if (support_smi_only_arrays) {
|
||||
a = bar(10);
|
||||
assertKind(elements_kind.fast, a);
|
||||
assertOptimized(bar);
|
||||
// bar should deopt because the length is too large.
|
||||
a = bar(100000);
|
||||
assertUnoptimized(bar);
|
||||
assertKind(elements_kind.dictionary, a);
|
||||
// The allocation site now has feedback that means the array constructor
|
||||
// will not be inlined.
|
||||
%OptimizeFunctionOnNextCall(bar);
|
||||
a = bar(100000);
|
||||
assertKind(elements_kind.dictionary, a);
|
||||
assertOptimized(bar);
|
||||
|
||||
// If the argument isn't a smi, it bails out as well
|
||||
// If the argument isn't a smi, things should still work.
|
||||
a = bar("oops");
|
||||
assertOptimized(bar);
|
||||
assertKind(elements_kind.fast, a);
|
||||
@ -176,12 +169,6 @@ if (support_smi_only_arrays) {
|
||||
barn(1, 2, 3);
|
||||
assertOptimized(barn);
|
||||
a = barn(1, "oops", 3);
|
||||
// The method should deopt, but learn from the failure to avoid inlining
|
||||
// the array.
|
||||
assertKind(elements_kind.fast, a);
|
||||
assertUnoptimized(barn);
|
||||
%OptimizeFunctionOnNextCall(barn);
|
||||
a = barn(1, "oops", 3);
|
||||
assertOptimized(barn);
|
||||
})();
|
||||
|
||||
@ -228,10 +215,8 @@ if (support_smi_only_arrays) {
|
||||
assertTrue(Realm.eval(contextB, "bar2() instanceof Array"));
|
||||
})();
|
||||
|
||||
// Test: create array with packed feedback, then optimize/inline
|
||||
// function. Verify that if we ask for a holey array then we deopt.
|
||||
// Reoptimization will proceed with the correct feedback and we
|
||||
// won't deopt anymore.
|
||||
// Test: create array with packed feedback, then optimize function, which
|
||||
// should deal with arguments that create holey arrays.
|
||||
(function() {
|
||||
function bar(len) { return new Array(len); }
|
||||
bar(0);
|
||||
@ -241,15 +226,16 @@ if (support_smi_only_arrays) {
|
||||
assertOptimized(bar);
|
||||
assertFalse(isHoley(a));
|
||||
a = bar(1); // ouch!
|
||||
assertUnoptimized(bar);
|
||||
assertTrue(isHoley(a));
|
||||
// Try again
|
||||
%OptimizeFunctionOnNextCall(bar);
|
||||
a = bar(100);
|
||||
assertOptimized(bar);
|
||||
assertTrue(isHoley(a));
|
||||
a = bar(100);
|
||||
assertTrue(isHoley(a));
|
||||
a = bar(0);
|
||||
assertOptimized(bar);
|
||||
assertTrue(isHoley(a));
|
||||
// Crankshafted functions don't use mementos, so feedback still
|
||||
// indicates a packed array is desired. (unless --nocrankshaft is in use).
|
||||
if (4 != %GetOptimizationStatus(bar)) {
|
||||
assertFalse(isHoley(a));
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
@ -85,69 +85,86 @@ if (support_smi_only_arrays) {
|
||||
// Verify that basic elements kind feedback works for non-constructor
|
||||
// array calls (as long as the call is made through an IC, and not
|
||||
// a CallStub).
|
||||
// (function (){
|
||||
// function create0() {
|
||||
// return Array();
|
||||
// }
|
||||
(function (){
|
||||
function create0() {
|
||||
return Array();
|
||||
}
|
||||
|
||||
// // Calls through ICs need warm up through uninitialized, then
|
||||
// // premonomorphic first.
|
||||
// create0();
|
||||
// create0();
|
||||
// a = create0();
|
||||
// assertKind(elements_kind.fast_smi_only, a);
|
||||
// a[0] = 3.5;
|
||||
// b = create0();
|
||||
// assertKind(elements_kind.fast_double, b);
|
||||
// Calls through ICs need warm up through uninitialized, then
|
||||
// premonomorphic first.
|
||||
create0();
|
||||
a = create0();
|
||||
assertKind(elements_kind.fast_smi_only, a);
|
||||
a[0] = 3.5;
|
||||
b = create0();
|
||||
assertKind(elements_kind.fast_double, b);
|
||||
|
||||
// function create1(arg) {
|
||||
// return Array(arg);
|
||||
// }
|
||||
function create1(arg) {
|
||||
return Array(arg);
|
||||
}
|
||||
|
||||
// create1(0);
|
||||
// create1(0);
|
||||
// a = create1(0);
|
||||
// assertFalse(isHoley(a));
|
||||
// assertKind(elements_kind.fast_smi_only, a);
|
||||
// a[0] = "hello";
|
||||
// b = create1(10);
|
||||
// assertTrue(isHoley(b));
|
||||
// assertKind(elements_kind.fast, b);
|
||||
create1(0);
|
||||
create1(0);
|
||||
a = create1(0);
|
||||
assertFalse(isHoley(a));
|
||||
assertKind(elements_kind.fast_smi_only, a);
|
||||
a[0] = "hello";
|
||||
b = create1(10);
|
||||
assertTrue(isHoley(b));
|
||||
assertKind(elements_kind.fast, b);
|
||||
|
||||
// a = create1(100000);
|
||||
// assertKind(elements_kind.dictionary, a);
|
||||
a = create1(100000);
|
||||
assertKind(elements_kind.dictionary, a);
|
||||
|
||||
// function create3(arg1, arg2, arg3) {
|
||||
// return Array(arg1, arg2, arg3);
|
||||
// }
|
||||
function create3(arg1, arg2, arg3) {
|
||||
return Array(arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
// create3();
|
||||
// create3();
|
||||
// a = create3(1,2,3);
|
||||
// a[0] = 3.5;
|
||||
// b = create3(1,2,3);
|
||||
// assertKind(elements_kind.fast_double, b);
|
||||
// assertFalse(isHoley(b));
|
||||
// })();
|
||||
create3(1,2,3);
|
||||
create3(1,2,3);
|
||||
a = create3(1,2,3);
|
||||
a[0] = 3.035;
|
||||
assertKind(elements_kind.fast_double, a);
|
||||
b = create3(1,2,3);
|
||||
assertKind(elements_kind.fast_double, b);
|
||||
assertFalse(isHoley(b));
|
||||
})();
|
||||
|
||||
|
||||
// Verify that keyed calls work
|
||||
// (function (){
|
||||
// function create0(name) {
|
||||
// return this[name]();
|
||||
// }
|
||||
(function (){
|
||||
function create0(name) {
|
||||
return this[name]();
|
||||
}
|
||||
|
||||
// name = "Array";
|
||||
// create0(name);
|
||||
// create0(name);
|
||||
// a = create0(name);
|
||||
// a[0] = 3.5;
|
||||
// b = create0(name);
|
||||
// assertKind(elements_kind.fast_double, b);
|
||||
// })();
|
||||
name = "Array";
|
||||
create0(name);
|
||||
create0(name);
|
||||
a = create0(name);
|
||||
a[0] = 3.5;
|
||||
b = create0(name);
|
||||
assertKind(elements_kind.fast_double, b);
|
||||
})();
|
||||
|
||||
|
||||
// Verify that the IC can't be spoofed by patching
|
||||
// Verify that feedback is turned off if the call site goes megamorphic.
|
||||
(function (){
|
||||
function foo(arg) { return arg(); }
|
||||
foo(Array);
|
||||
foo(function() {});
|
||||
foo(Array);
|
||||
|
||||
gc();
|
||||
|
||||
a = foo(Array);
|
||||
a[0] = 3.5;
|
||||
b = foo(Array);
|
||||
// b doesn't benefit from elements kind feedback at a megamorphic site.
|
||||
assertKind(elements_kind.fast_smi_only, b);
|
||||
})();
|
||||
|
||||
|
||||
// Verify that crankshaft consumes type feedback.
|
||||
(function (){
|
||||
function create0() {
|
||||
return Array();
|
||||
@ -156,42 +173,41 @@ if (support_smi_only_arrays) {
|
||||
create0();
|
||||
create0();
|
||||
a = create0();
|
||||
assertKind(elements_kind.fast_smi_only, a);
|
||||
var oldArray = this.Array;
|
||||
this.Array = function() { return ["hi"]; };
|
||||
a[0] = 3.5;
|
||||
%OptimizeFunctionOnNextCall(create0);
|
||||
create0();
|
||||
create0();
|
||||
b = create0();
|
||||
assertEquals(["hi"], b);
|
||||
this.Array = oldArray;
|
||||
assertKind(elements_kind.fast_double, b);
|
||||
assertOptimized(create0);
|
||||
|
||||
function create1(arg) {
|
||||
return Array(arg);
|
||||
}
|
||||
|
||||
create1(8);
|
||||
create1(8);
|
||||
a = create1(8);
|
||||
a[0] = 3.5;
|
||||
%OptimizeFunctionOnNextCall(create1);
|
||||
b = create1(8);
|
||||
assertKind(elements_kind.fast_double, b);
|
||||
assertOptimized(create1);
|
||||
|
||||
function createN(arg1, arg2, arg3) {
|
||||
return Array(arg1, arg2, arg3);
|
||||
}
|
||||
|
||||
createN(1, 2, 3);
|
||||
createN(1, 2, 3);
|
||||
a = createN(1, 2, 3);
|
||||
a[0] = 3.5;
|
||||
%OptimizeFunctionOnNextCall(createN);
|
||||
b = createN(1, 2, 3);
|
||||
assertKind(elements_kind.fast_double, b);
|
||||
assertOptimized(createN);
|
||||
})();
|
||||
|
||||
// Verify that calls are still made through an IC after crankshaft,
|
||||
// though the type information is reset.
|
||||
// TODO(mvstanton): instead, consume the type feedback gathered up
|
||||
// until crankshaft time.
|
||||
// (function (){
|
||||
// function create0() {
|
||||
// return Array();
|
||||
// }
|
||||
|
||||
// create0();
|
||||
// create0();
|
||||
// a = create0();
|
||||
// a[0] = 3.5;
|
||||
// %OptimizeFunctionOnNextCall(create0);
|
||||
// create0();
|
||||
// // This test only makes sense if crankshaft is allowed
|
||||
// if (4 != %GetOptimizationStatus(create0)) {
|
||||
// create0();
|
||||
// b = create0();
|
||||
// assertKind(elements_kind.fast_smi_only, b);
|
||||
// b[0] = 3.5;
|
||||
// c = create0();
|
||||
// assertKind(elements_kind.fast_double, c);
|
||||
// assertOptimized(create0);
|
||||
// }
|
||||
// })();
|
||||
|
||||
|
||||
// Verify that cross context calls work
|
||||
(function (){
|
||||
var realmA = Realm.current();
|
||||
|
17
test/mjsunit/regress/regress-377290.js
Normal file
17
test/mjsunit/regress/regress-377290.js
Normal file
@ -0,0 +1,17 @@
|
||||
// Copyright 2014 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --expose-gc
|
||||
|
||||
Object.prototype.__defineGetter__('constructor', function() { throw 42; });
|
||||
__v_7 = [
|
||||
function() { [].push() },
|
||||
];
|
||||
for (var __v_6 = 0; __v_6 < 5; ++__v_6) {
|
||||
for (var __v_8 in __v_7) {
|
||||
print(__v_8, " -> ", __v_7[__v_8]);
|
||||
gc();
|
||||
try { __v_7[__v_8](); } catch (e) {};
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user