Fix calls of strict mode function with an implicit receiver.
Only IA32 version for now. I'll start porting. Strict mode functions are to get 'undefined' as the receiver when called with an implicit receiver. Modes are bad! It forces us to have checks on all function calls. This change attempts to limit the cost by passing information about whether or not a call is with an implicit or explicit receiver in ecx as part of the calling convention. The cost is setting ecx on all calls and checking ecx on entry to strict mode functions. Implicit/explicit receiver state has to be maintained by ICs. Various stubs have to not clobber ecx or save and restore it. CallFunction stub needs to check if the receiver is implicit when it doesn't know from the context. Review URL: http://codereview.chromium.org/7039036 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@8040 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
583afefab5
commit
6f775f2fb0
@ -636,6 +636,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
|
||||
// Set expected number of arguments to zero (not changing r0).
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -1077,12 +1078,17 @@ void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
|
||||
|
||||
// Preserve the function.
|
||||
__ push(r1);
|
||||
// Push call kind information.
|
||||
__ push(r5);
|
||||
|
||||
// Push the function on the stack as the argument to the runtime function.
|
||||
__ push(r1);
|
||||
__ CallRuntime(Runtime::kLazyCompile, 1);
|
||||
// Calculate the entry point.
|
||||
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
|
||||
// Restore call kind information.
|
||||
__ pop(r5);
|
||||
// Restore saved function.
|
||||
__ pop(r1);
|
||||
|
||||
@ -1100,12 +1106,17 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
|
||||
|
||||
// Preserve the function.
|
||||
__ push(r1);
|
||||
// Push call kind information.
|
||||
__ push(r5);
|
||||
|
||||
// Push the function on the stack as the argument to the runtime function.
|
||||
__ push(r1);
|
||||
__ CallRuntime(Runtime::kLazyRecompile, 1);
|
||||
// Calculate the entry point.
|
||||
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
|
||||
// Restore call kind information.
|
||||
__ pop(r5);
|
||||
// Restore saved function.
|
||||
__ pop(r1);
|
||||
|
||||
@ -1345,6 +1356,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
// Expected number of arguments is 0 for CALL_NON_FUNCTION.
|
||||
__ mov(r2, Operand(0, RelocInfo::NONE));
|
||||
__ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ bind(&function);
|
||||
@ -1360,6 +1372,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
|
||||
__ mov(r2, Operand(r2, ASR, kSmiTagSize));
|
||||
__ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
|
||||
__ SetCallKind(r5, CALL_AS_METHOD);
|
||||
__ cmp(r2, r0); // Check formal and actual parameter counts.
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET,
|
||||
@ -1539,6 +1552,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// -- r1 : function (passed through to callee)
|
||||
// -- r2 : expected number of arguments
|
||||
// -- r3 : code entry to call
|
||||
// -- r5 : call kind information
|
||||
// -----------------------------------
|
||||
|
||||
Label invoke, dont_adapt_arguments;
|
||||
|
@ -4486,30 +4486,22 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
Label slow;
|
||||
|
||||
// If the receiver might be a value (string, number or boolean) check for this
|
||||
// and box it if it is.
|
||||
if (ReceiverMightBeValue()) {
|
||||
// The receiver might implicitly be the global object. This is
|
||||
// indicated by passing the hole as the receiver to the call
|
||||
// function stub.
|
||||
if (ReceiverMightBeImplicit()) {
|
||||
Label call;
|
||||
// Get the receiver from the stack.
|
||||
// function, receiver [, arguments]
|
||||
Label receiver_is_value, receiver_is_js_object;
|
||||
__ ldr(r1, MemOperand(sp, argc_ * kPointerSize));
|
||||
|
||||
// Check if receiver is a smi (which is a number value).
|
||||
__ JumpIfSmi(r1, &receiver_is_value);
|
||||
|
||||
// Check if the receiver is a valid JS object.
|
||||
__ CompareObjectType(r1, r2, r2, FIRST_JS_OBJECT_TYPE);
|
||||
__ b(ge, &receiver_is_js_object);
|
||||
|
||||
// Call the runtime to box the value.
|
||||
__ bind(&receiver_is_value);
|
||||
__ EnterInternalFrame();
|
||||
__ push(r1);
|
||||
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
|
||||
__ LeaveInternalFrame();
|
||||
__ str(r0, MemOperand(sp, argc_ * kPointerSize));
|
||||
|
||||
__ bind(&receiver_is_js_object);
|
||||
__ ldr(r4, MemOperand(sp, argc_ * kPointerSize));
|
||||
// Call as function is indicated with the hole.
|
||||
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
|
||||
__ b(ne, &call);
|
||||
// Patch the receiver on the stack with the global receiver object.
|
||||
__ ldr(r1, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
|
||||
__ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
|
||||
__ str(r1, MemOperand(sp, argc_ * kPointerSize));
|
||||
__ bind(&call);
|
||||
}
|
||||
|
||||
// Get the function to call from the stack.
|
||||
@ -4526,7 +4518,19 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
// Fast-case: Invoke the function now.
|
||||
// r1: pushed function
|
||||
ParameterCount actual(argc_);
|
||||
__ InvokeFunction(r1, actual, JUMP_FUNCTION);
|
||||
|
||||
if (ReceiverMightBeImplicit()) {
|
||||
Label call_as_function;
|
||||
__ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
|
||||
__ b(eq, &call_as_function);
|
||||
__ InvokeFunction(r1, actual, JUMP_FUNCTION);
|
||||
__ bind(&call_as_function);
|
||||
}
|
||||
__ InvokeFunction(r1,
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper(),
|
||||
CALL_AS_FUNCTION);
|
||||
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
|
@ -139,6 +139,20 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
}
|
||||
#endif
|
||||
|
||||
// Strict mode functions need to replace the receiver with undefined
|
||||
// when called as functions (without an explicit receiver
|
||||
// object). r5 is zero for method calls and non-zero for function
|
||||
// calls.
|
||||
if (info->is_strict_mode()) {
|
||||
Label ok;
|
||||
__ cmp(r5, Operand(0));
|
||||
__ b(eq, &ok);
|
||||
int receiver_offset = scope()->num_parameters() * kPointerSize;
|
||||
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
||||
__ str(r2, MemOperand(sp, receiver_offset));
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
int locals_count = scope()->num_stack_slots();
|
||||
|
||||
__ Push(lr, fp, cp, r1);
|
||||
@ -2093,7 +2107,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
||||
// Call the IC initialization code.
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
@ -2103,8 +2117,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
|
||||
Expression* key,
|
||||
RelocInfo::Mode mode) {
|
||||
Expression* key) {
|
||||
// Load the key.
|
||||
VisitForAccumulatorValue(key);
|
||||
|
||||
@ -2129,7 +2142,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
|
||||
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
@ -2245,7 +2258,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
// Record source position for debugger.
|
||||
SetSourcePosition(expr->position());
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
|
||||
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
__ CallStub(&stub);
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
@ -2295,9 +2308,10 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
__ bind(&call);
|
||||
}
|
||||
|
||||
// The receiver is either the global receiver or a JSObject found by
|
||||
// LoadContextSlot.
|
||||
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
|
||||
// The receiver is either the global receiver or an object found
|
||||
// by LoadContextSlot. That object could be the hole if the
|
||||
// receiver is implicitly the global object.
|
||||
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
} else if (fun->AsProperty() != NULL) {
|
||||
// Call to an object property.
|
||||
Property* prop = fun->AsProperty();
|
||||
@ -2338,7 +2352,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
{ PreservePositionScope scope(masm()->positions_recorder());
|
||||
VisitForStackValue(prop->obj());
|
||||
}
|
||||
EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
|
||||
EmitKeyedCallWithIC(expr, prop->key());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -3647,9 +3661,12 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
||||
if (expr->is_jsruntime()) {
|
||||
// Call the JS runtime function.
|
||||
__ mov(r2, Operand(expr->name()));
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arg_count, NOT_IN_LOOP);
|
||||
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arg_count,
|
||||
NOT_IN_LOOP,
|
||||
mode);
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
// Restore context register.
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
} else {
|
||||
|
@ -479,7 +479,8 @@ Object* CallIC_Miss(Arguments args);
|
||||
// The generated code falls through if both probes miss.
|
||||
static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::Kind kind) {
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r1 : receiver
|
||||
// -- r2 : name
|
||||
@ -490,7 +491,7 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
MONOMORPHIC,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(
|
||||
@ -582,7 +583,10 @@ static void GenerateCallNormal(MacroAssembler* masm, int argc) {
|
||||
}
|
||||
|
||||
|
||||
static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
|
||||
static void GenerateCallMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
IC::UtilityId id,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r2 : name
|
||||
// -- lr : return address
|
||||
@ -634,22 +638,33 @@ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
|
||||
}
|
||||
|
||||
// Invoke the function.
|
||||
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
ParameterCount actual(argc);
|
||||
__ InvokeFunction(r1, actual, JUMP_FUNCTION);
|
||||
__ InvokeFunction(r1,
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper(),
|
||||
call_kind);
|
||||
}
|
||||
|
||||
|
||||
void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
void CallIC::GenerateMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r2 : name
|
||||
// -- lr : return address
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
|
||||
GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
|
||||
}
|
||||
|
||||
|
||||
void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
void CallIC::GenerateMegamorphic(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r2 : name
|
||||
// -- lr : return address
|
||||
@ -657,8 +672,8 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
|
||||
// Get the receiver of the function from the stack into r1.
|
||||
__ ldr(r1, MemOperand(sp, argc * kPointerSize));
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
|
||||
GenerateMiss(masm, argc);
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
|
||||
GenerateMiss(masm, argc, extra_ic_state);
|
||||
}
|
||||
|
||||
|
||||
@ -669,7 +684,7 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallNormal(masm, argc);
|
||||
GenerateMiss(masm, argc);
|
||||
GenerateMiss(masm, argc, Code::kNoExtraICState);
|
||||
}
|
||||
|
||||
|
||||
@ -679,7 +694,7 @@ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
// -- lr : return address
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
|
||||
GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
|
||||
}
|
||||
|
||||
|
||||
@ -765,7 +780,10 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
|
||||
__ bind(&lookup_monomorphic_cache);
|
||||
__ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
|
||||
GenerateMonomorphicCacheProbe(masm,
|
||||
argc,
|
||||
Code::KEYED_CALL_IC,
|
||||
Code::kNoExtraICState);
|
||||
// Fall through on miss.
|
||||
|
||||
__ bind(&slow_call);
|
||||
|
@ -2180,7 +2180,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
|
||||
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
|
||||
instr->function(),
|
||||
HEnvironment::LITHIUM,
|
||||
undefined);
|
||||
undefined,
|
||||
instr->call_kind());
|
||||
current_block_->UpdateEnvironment(inner);
|
||||
chunk_->AddInlinedClosure(instr->closure());
|
||||
return NULL;
|
||||
|
@ -146,6 +146,20 @@ bool LCodeGen::GeneratePrologue() {
|
||||
// fp: Caller's frame pointer.
|
||||
// lr: Caller's pc.
|
||||
|
||||
// Strict mode functions need to replace the receiver with undefined
|
||||
// when called as functions (without an explicit receiver
|
||||
// object). r5 is zero for method calls and non-zero for function
|
||||
// calls.
|
||||
if (info_->is_strict_mode()) {
|
||||
Label ok;
|
||||
__ cmp(r5, Operand(0));
|
||||
__ b(eq, &ok);
|
||||
int receiver_offset = scope()->num_parameters() * kPointerSize;
|
||||
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
|
||||
__ str(r2, MemOperand(sp, receiver_offset));
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
|
||||
__ add(fp, sp, Operand(2 * kPointerSize)); // Adjust FP to point to saved FP.
|
||||
|
||||
@ -2798,7 +2812,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
|
||||
|
||||
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
int arity,
|
||||
LInstruction* instr) {
|
||||
LInstruction* instr,
|
||||
CallKind call_kind) {
|
||||
// Change context if needed.
|
||||
bool change_context =
|
||||
(info()->closure()->context() != function->context()) ||
|
||||
@ -2818,6 +2833,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
RecordPosition(pointers->position());
|
||||
|
||||
// Invoke function.
|
||||
__ SetCallKind(r5, call_kind);
|
||||
__ ldr(ip, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
|
||||
__ Call(ip);
|
||||
|
||||
@ -2832,7 +2848,10 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(r0));
|
||||
__ mov(r1, Operand(instr->function()));
|
||||
CallKnownFunction(instr->function(), instr->arity(), instr);
|
||||
CallKnownFunction(instr->function(),
|
||||
instr->arity(),
|
||||
instr,
|
||||
CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
@ -3214,10 +3233,11 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(r0));
|
||||
|
||||
int arity = instr->arity();
|
||||
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
|
||||
arity, NOT_IN_LOOP);
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
||||
__ mov(r2, Operand(instr->name()));
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(ic, mode, instr);
|
||||
// Restore context register.
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
@ -3227,7 +3247,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(r0));
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
|
||||
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
__ Drop(1);
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
@ -3238,10 +3258,11 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(r0));
|
||||
|
||||
int arity = instr->arity();
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP);
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
||||
__ mov(r2, Operand(instr->name()));
|
||||
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
|
||||
CallCode(ic, mode, instr);
|
||||
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
|
||||
@ -3249,7 +3270,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
||||
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(r0));
|
||||
__ mov(r1, Operand(instr->target()));
|
||||
CallKnownFunction(instr->target(), instr->arity(), instr);
|
||||
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
|
||||
}
|
||||
|
||||
|
||||
|
@ -209,7 +209,8 @@ class LCodeGen BASE_EMBEDDED {
|
||||
// to be in edi.
|
||||
void CallKnownFunction(Handle<JSFunction> function,
|
||||
int arity,
|
||||
LInstruction* instr);
|
||||
LInstruction* instr,
|
||||
CallKind call_kind);
|
||||
|
||||
void LoadHeapObject(Register result, Handle<HeapObject> object);
|
||||
|
||||
|
@ -880,13 +880,28 @@ void MacroAssembler::GetCFunctionDoubleResult(const DoubleRegister dst) {
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
|
||||
// This macro takes the dst register to make the code more readable
|
||||
// at the call sites. However, the dst register has to be r5 to
|
||||
// follow the calling convention which requires the call type to be
|
||||
// in r5.
|
||||
ASSERT(dst.is(r5));
|
||||
if (call_kind == CALL_AS_FUNCTION) {
|
||||
mov(dst, Operand(Smi::FromInt(1)));
|
||||
} else {
|
||||
mov(dst, Operand(Smi::FromInt(0)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
Handle<Code> code_constant,
|
||||
Register code_reg,
|
||||
Label* done,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
bool definitely_matches = false;
|
||||
Label regular_invoke;
|
||||
|
||||
@ -942,10 +957,12 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
isolate()->builtins()->ArgumentsAdaptorTrampoline();
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
|
||||
SetCallKind(r5, call_kind);
|
||||
Call(adaptor, RelocInfo::CODE_TARGET);
|
||||
call_wrapper.AfterCall();
|
||||
b(done);
|
||||
} else {
|
||||
SetCallKind(r5, call_kind);
|
||||
Jump(adaptor, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
bind(®ular_invoke);
|
||||
@ -957,17 +974,20 @@ void MacroAssembler::InvokeCode(Register code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
Label done;
|
||||
|
||||
InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag,
|
||||
call_wrapper);
|
||||
call_wrapper, call_kind);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(code));
|
||||
SetCallKind(r5, call_kind);
|
||||
Call(code);
|
||||
call_wrapper.AfterCall();
|
||||
} else {
|
||||
ASSERT(flag == JUMP_FUNCTION);
|
||||
SetCallKind(r5, call_kind);
|
||||
Jump(code);
|
||||
}
|
||||
|
||||
@ -981,13 +1001,17 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag) {
|
||||
InvokeFlag flag,
|
||||
CallKind call_kind) {
|
||||
Label done;
|
||||
|
||||
InvokePrologue(expected, actual, code, no_reg, &done, flag);
|
||||
InvokePrologue(expected, actual, code, no_reg, &done, flag,
|
||||
NullCallWrapper(), call_kind);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
SetCallKind(r5, call_kind);
|
||||
Call(code, rmode);
|
||||
} else {
|
||||
SetCallKind(r5, call_kind);
|
||||
Jump(code, rmode);
|
||||
}
|
||||
|
||||
@ -1000,7 +1024,8 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
void MacroAssembler::InvokeFunction(Register fun,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
// Contract with called JS functions requires that function is passed in r1.
|
||||
ASSERT(fun.is(r1));
|
||||
|
||||
@ -1017,7 +1042,7 @@ void MacroAssembler::InvokeFunction(Register fun,
|
||||
FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
|
||||
|
||||
ParameterCount expected(expected_reg);
|
||||
InvokeCode(code_reg, expected, actual, flag, call_wrapper);
|
||||
InvokeCode(code_reg, expected, actual, flag, call_wrapper, call_kind);
|
||||
}
|
||||
|
||||
|
||||
|
@ -346,25 +346,33 @@ class MacroAssembler: public Assembler {
|
||||
// ---------------------------------------------------------------------------
|
||||
// JavaScript invokes
|
||||
|
||||
// Setup call kind marking in ecx. The method takes ecx as an
|
||||
// explicit first parameter to make the code more readable at the
|
||||
// call sites.
|
||||
void SetCallKind(Register dst, CallKind kind);
|
||||
|
||||
// Invoke the JavaScript function code by either calling or jumping.
|
||||
void InvokeCode(Register code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
void InvokeCode(Handle<Code> code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag);
|
||||
InvokeFlag flag,
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Invoke the JavaScript function in the given register. Changes the
|
||||
// current context to the context in the function before invoking.
|
||||
void InvokeFunction(Register function,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
void InvokeFunction(JSFunction* function,
|
||||
const ParameterCount& actual,
|
||||
@ -1028,7 +1036,8 @@ class MacroAssembler: public Assembler {
|
||||
Register code_reg,
|
||||
Label* done,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
|
@ -1457,8 +1457,10 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
|
||||
|
||||
|
||||
MaybeObject* CallStubCompiler::GenerateMissBranch() {
|
||||
MaybeObject* maybe_obj = masm()->isolate()->stub_cache()->ComputeCallMiss(
|
||||
arguments().immediate(), kind_);
|
||||
MaybeObject* maybe_obj =
|
||||
isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
|
||||
kind_,
|
||||
extra_ic_state_);
|
||||
Object* obj;
|
||||
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
||||
__ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
|
||||
@ -1778,7 +1780,9 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
|
||||
Label index_out_of_range;
|
||||
Label* index_out_of_range_label = &index_out_of_range;
|
||||
|
||||
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
(CallICBase::StringStubState::decode(extra_ic_state_) ==
|
||||
DEFAULT_STRING_STUB)) {
|
||||
index_out_of_range_label = &miss;
|
||||
}
|
||||
|
||||
@ -1862,7 +1866,9 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
|
||||
Label index_out_of_range;
|
||||
Label* index_out_of_range_label = &index_out_of_range;
|
||||
|
||||
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
(CallICBase::StringStubState::decode(extra_ic_state_) ==
|
||||
DEFAULT_STRING_STUB)) {
|
||||
index_out_of_range_label = &miss;
|
||||
}
|
||||
|
||||
@ -2489,11 +2495,13 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name) {
|
||||
MaybeObject* CallStubCompiler::CompileCallGlobal(
|
||||
JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- r2 : name
|
||||
// -- lr : return address
|
||||
@ -2535,15 +2543,19 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
|
||||
ASSERT(function->is_compiled());
|
||||
Handle<Code> code(function->code());
|
||||
ParameterCount expected(function->shared()->formal_parameter_count());
|
||||
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
if (V8::UseCrankshaft()) {
|
||||
// TODO(kasperl): For now, we always call indirectly through the
|
||||
// code field in the function to allow recompilation to take effect
|
||||
// without changing any of the call sites.
|
||||
__ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
|
||||
__ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION);
|
||||
__ InvokeCode(r3, expected, arguments(), JUMP_FUNCTION,
|
||||
NullCallWrapper(), call_kind);
|
||||
} else {
|
||||
__ InvokeCode(code, expected, arguments(),
|
||||
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
|
||||
__ InvokeCode(code, expected, arguments(), RelocInfo::CODE_TARGET,
|
||||
JUMP_FUNCTION, call_kind);
|
||||
}
|
||||
|
||||
// Handle call cache miss.
|
||||
|
@ -712,14 +712,15 @@ bool Call::ComputeGlobalTarget(Handle<GlobalObject> global,
|
||||
}
|
||||
|
||||
|
||||
void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
|
||||
void Call::RecordTypeFeedback(TypeFeedbackOracle* oracle,
|
||||
CallKind call_kind) {
|
||||
Property* property = expression()->AsProperty();
|
||||
ASSERT(property != NULL);
|
||||
// Specialize for the receiver types seen at runtime.
|
||||
Literal* key = property->key()->AsLiteral();
|
||||
ASSERT(key != NULL && key->handle()->IsString());
|
||||
Handle<String> name = Handle<String>::cast(key->handle());
|
||||
receiver_types_ = oracle->CallReceiverTypes(this, name);
|
||||
receiver_types_ = oracle->CallReceiverTypes(this, name, call_kind);
|
||||
#ifdef DEBUG
|
||||
if (FLAG_enable_slow_asserts) {
|
||||
if (receiver_types_ != NULL) {
|
||||
|
@ -1279,7 +1279,8 @@ class Call: public Expression {
|
||||
ZoneList<Expression*>* arguments() const { return arguments_; }
|
||||
virtual int position() const { return pos_; }
|
||||
|
||||
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
|
||||
void RecordTypeFeedback(TypeFeedbackOracle* oracle,
|
||||
CallKind call_kind);
|
||||
virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; }
|
||||
virtual bool IsMonomorphic() { return is_monomorphic_; }
|
||||
CheckType check_type() const { return check_type_; }
|
||||
|
@ -745,8 +745,9 @@ class CallFunctionStub: public CodeStub {
|
||||
}
|
||||
|
||||
InLoopFlag InLoop() { return in_loop_; }
|
||||
bool ReceiverMightBeValue() {
|
||||
return (flags_ & RECEIVER_MIGHT_BE_VALUE) != 0;
|
||||
|
||||
bool ReceiverMightBeImplicit() {
|
||||
return (flags_ & RECEIVER_MIGHT_BE_IMPLICIT) != 0;
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -676,6 +676,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
|
||||
CompilationInfo info(script);
|
||||
info.SetFunction(literal);
|
||||
info.SetScope(literal->scope());
|
||||
if (literal->scope()->is_strict_mode()) info.MarkAsStrictMode();
|
||||
|
||||
LiveEditFunctionTracker live_edit_tracker(info.isolate(), literal);
|
||||
// Determine if the function can be lazily compiled. This is necessary to
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -92,7 +92,7 @@ static Handle<Code> ComputeCallDebugBreak(int argc, Code::Kind kind) {
|
||||
}
|
||||
|
||||
|
||||
static Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) {
|
||||
static Handle<Code> ComputeCallDebugPrepareStepIn(int argc, Code::Kind kind) {
|
||||
Isolate* isolate = Isolate::Current();
|
||||
CALL_HEAP_FUNCTION(
|
||||
isolate,
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
@ -742,24 +742,30 @@ void OptimizedFrame::Summarize(List<FrameSummary>* frames) {
|
||||
// at the first position. Since we are always at a call when we need
|
||||
// to construct a stack trace, the receiver is always in a stack slot.
|
||||
opcode = static_cast<Translation::Opcode>(it.Next());
|
||||
ASSERT(opcode == Translation::STACK_SLOT);
|
||||
int input_slot_index = it.Next();
|
||||
ASSERT(opcode == Translation::STACK_SLOT ||
|
||||
opcode == Translation::LITERAL);
|
||||
int index = it.Next();
|
||||
|
||||
// Get the correct receiver in the optimized frame.
|
||||
Object* receiver = NULL;
|
||||
// Positive index means the value is spilled to the locals area. Negative
|
||||
// means it is stored in the incoming parameter area.
|
||||
if (input_slot_index >= 0) {
|
||||
receiver = GetExpression(input_slot_index);
|
||||
if (opcode == Translation::LITERAL) {
|
||||
receiver = data->LiteralArray()->get(index);
|
||||
} else {
|
||||
// Index -1 overlaps with last parameter, -n with the first parameter,
|
||||
// (-n - 1) with the receiver with n being the number of parameters
|
||||
// of the outermost, optimized frame.
|
||||
int parameter_count = ComputeParametersCount();
|
||||
int parameter_index = input_slot_index + parameter_count;
|
||||
receiver = (parameter_index == -1)
|
||||
? this->receiver()
|
||||
: this->GetParameter(parameter_index);
|
||||
// Positive index means the value is spilled to the locals
|
||||
// area. Negative means it is stored in the incoming parameter
|
||||
// area.
|
||||
if (index >= 0) {
|
||||
receiver = GetExpression(index);
|
||||
} else {
|
||||
// Index -1 overlaps with last parameter, -n with the first parameter,
|
||||
// (-n - 1) with the receiver with n being the number of parameters
|
||||
// of the outermost, optimized frame.
|
||||
int parameter_count = ComputeParametersCount();
|
||||
int parameter_index = index + parameter_count;
|
||||
receiver = (parameter_index == -1)
|
||||
? this->receiver()
|
||||
: this->GetParameter(parameter_index);
|
||||
}
|
||||
}
|
||||
|
||||
Code* code = function->shared()->code();
|
||||
|
@ -407,7 +407,7 @@ class FullCodeGenerator: public AstVisitor {
|
||||
// Platform-specific code sequences for calls
|
||||
void EmitCallWithStub(Call* expr, CallFunctionFlags flags);
|
||||
void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode);
|
||||
void EmitKeyedCallWithIC(Call* expr, Expression* key, RelocInfo::Mode mode);
|
||||
void EmitKeyedCallWithIC(Call* expr, Expression* key);
|
||||
|
||||
// Platform-specific code for inline runtime calls.
|
||||
InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
|
||||
|
@ -1196,14 +1196,19 @@ class HStackCheck: public HTemplateInstruction<0> {
|
||||
|
||||
class HEnterInlined: public HTemplateInstruction<0> {
|
||||
public:
|
||||
HEnterInlined(Handle<JSFunction> closure, FunctionLiteral* function)
|
||||
: closure_(closure), function_(function) {
|
||||
HEnterInlined(Handle<JSFunction> closure,
|
||||
FunctionLiteral* function,
|
||||
CallKind call_kind)
|
||||
: closure_(closure),
|
||||
function_(function),
|
||||
call_kind_(call_kind) {
|
||||
}
|
||||
|
||||
virtual void PrintDataTo(StringStream* stream);
|
||||
|
||||
Handle<JSFunction> closure() const { return closure_; }
|
||||
FunctionLiteral* function() const { return function_; }
|
||||
CallKind call_kind() const { return call_kind_; }
|
||||
|
||||
virtual Representation RequiredInputRepresentation(int index) const {
|
||||
return Representation::None();
|
||||
@ -1214,6 +1219,7 @@ class HEnterInlined: public HTemplateInstruction<0> {
|
||||
private:
|
||||
Handle<JSFunction> closure_;
|
||||
FunctionLiteral* function_;
|
||||
CallKind call_kind_;
|
||||
};
|
||||
|
||||
|
||||
|
@ -4054,6 +4054,12 @@ void HGraphBuilder::TraceInline(Handle<JSFunction> target,
|
||||
bool HGraphBuilder::TryInline(Call* expr) {
|
||||
if (!FLAG_use_inlining) return false;
|
||||
|
||||
// The function call we are inlining is a method call if the call
|
||||
// is a property call.
|
||||
CallKind call_kind = (expr->expression()->AsProperty() == NULL)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
|
||||
// Precondition: call is monomorphic and we have found a target with the
|
||||
// appropriate arity.
|
||||
Handle<JSFunction> caller = info()->closure();
|
||||
@ -4189,13 +4195,16 @@ bool HGraphBuilder::TryInline(Call* expr) {
|
||||
environment()->CopyForInlining(target,
|
||||
function,
|
||||
HEnvironment::HYDROGEN,
|
||||
undefined);
|
||||
undefined,
|
||||
call_kind);
|
||||
HBasicBlock* body_entry = CreateBasicBlock(inner_env);
|
||||
current_block()->Goto(body_entry);
|
||||
|
||||
body_entry->SetJoinId(expr->ReturnId());
|
||||
set_current_block(body_entry);
|
||||
AddInstruction(new(zone()) HEnterInlined(target, function));
|
||||
AddInstruction(new(zone()) HEnterInlined(target,
|
||||
function,
|
||||
call_kind));
|
||||
VisitStatements(function->body());
|
||||
if (HasStackOverflow()) {
|
||||
// Bail out if the inline function did, as we cannot residualize a call
|
||||
@ -4441,7 +4450,7 @@ void HGraphBuilder::VisitCall(Call* expr) {
|
||||
}
|
||||
|
||||
// Named function call.
|
||||
expr->RecordTypeFeedback(oracle());
|
||||
expr->RecordTypeFeedback(oracle(), CALL_AS_METHOD);
|
||||
|
||||
if (TryCallApply(expr)) return;
|
||||
|
||||
@ -4450,7 +4459,6 @@ void HGraphBuilder::VisitCall(Call* expr) {
|
||||
|
||||
Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
|
||||
|
||||
expr->RecordTypeFeedback(oracle());
|
||||
ZoneMapList* types = expr->GetReceiverTypes();
|
||||
|
||||
HValue* receiver =
|
||||
@ -4544,8 +4552,8 @@ void HGraphBuilder::VisitCall(Call* expr) {
|
||||
CHECK_ALIVE(VisitExpressions(expr->arguments()));
|
||||
|
||||
call = PreProcessCall(new(zone()) HCallGlobal(context,
|
||||
var->name(),
|
||||
argument_count));
|
||||
var->name(),
|
||||
argument_count));
|
||||
}
|
||||
|
||||
} else {
|
||||
@ -5849,10 +5857,12 @@ HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
|
||||
}
|
||||
|
||||
|
||||
HEnvironment* HEnvironment::CopyForInlining(Handle<JSFunction> target,
|
||||
FunctionLiteral* function,
|
||||
CompilationPhase compilation_phase,
|
||||
HConstant* undefined) const {
|
||||
HEnvironment* HEnvironment::CopyForInlining(
|
||||
Handle<JSFunction> target,
|
||||
FunctionLiteral* function,
|
||||
CompilationPhase compilation_phase,
|
||||
HConstant* undefined,
|
||||
CallKind call_kind) const {
|
||||
// Outer environment is a copy of this one without the arguments.
|
||||
int arity = function->scope()->num_parameters();
|
||||
HEnvironment* outer = Copy();
|
||||
@ -5874,6 +5884,12 @@ HEnvironment* HEnvironment::CopyForInlining(Handle<JSFunction> target,
|
||||
inner->SetValueAt(i, push);
|
||||
}
|
||||
}
|
||||
// If the function we are inlining is a strict mode function, pass
|
||||
// undefined as the receiver for function calls (instead of the
|
||||
// global receiver).
|
||||
if (function->strict_mode() && call_kind == CALL_AS_FUNCTION) {
|
||||
inner->SetValueAt(0, undefined);
|
||||
}
|
||||
inner->SetValueAt(arity + 1, outer->LookupContext());
|
||||
for (int i = arity + 2; i < inner->length(); ++i) {
|
||||
inner->SetValueAt(i, undefined);
|
||||
|
@ -404,7 +404,8 @@ class HEnvironment: public ZoneObject {
|
||||
HEnvironment* CopyForInlining(Handle<JSFunction> target,
|
||||
FunctionLiteral* function,
|
||||
CompilationPhase compilation_phase,
|
||||
HConstant* undefined) const;
|
||||
HConstant* undefined,
|
||||
CallKind call_kind) const;
|
||||
|
||||
void AddIncomingEdge(HBasicBlock* block, HEnvironment* other);
|
||||
|
||||
|
@ -102,6 +102,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
|
||||
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
|
||||
Handle<Code> arguments_adaptor =
|
||||
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
|
||||
__ SetCallKind(ecx, CALL_AS_METHOD);
|
||||
__ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
|
||||
@ -467,19 +468,25 @@ void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
|
||||
// Enter an internal frame.
|
||||
__ EnterInternalFrame();
|
||||
|
||||
// Push a copy of the function onto the stack.
|
||||
// Push a copy of the function.
|
||||
__ push(edi);
|
||||
// Push call kind information.
|
||||
__ push(ecx);
|
||||
|
||||
__ push(edi); // Function is also the parameter to the runtime call.
|
||||
__ CallRuntime(Runtime::kLazyCompile, 1);
|
||||
|
||||
// Restore call kind information.
|
||||
__ pop(ecx);
|
||||
// Restore receiver.
|
||||
__ pop(edi);
|
||||
|
||||
// Tear down temporary frame.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Do a tail-call of the compiled function.
|
||||
__ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
|
||||
__ jmp(Operand(ecx));
|
||||
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
|
||||
__ jmp(Operand(eax));
|
||||
}
|
||||
|
||||
|
||||
@ -489,17 +496,23 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
|
||||
|
||||
// Push a copy of the function onto the stack.
|
||||
__ push(edi);
|
||||
// Push call kind information.
|
||||
__ push(ecx);
|
||||
|
||||
__ push(edi); // Function is also the parameter to the runtime call.
|
||||
__ CallRuntime(Runtime::kLazyRecompile, 1);
|
||||
|
||||
// Restore function and tear down temporary frame.
|
||||
// Restore call kind information.
|
||||
__ pop(ecx);
|
||||
// Restore receiver.
|
||||
__ pop(edi);
|
||||
|
||||
// Tear down temporary frame.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Do a tail-call of the compiled function.
|
||||
__ lea(ecx, FieldOperand(eax, Code::kHeaderSize));
|
||||
__ jmp(Operand(ecx));
|
||||
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
|
||||
__ jmp(Operand(eax));
|
||||
}
|
||||
|
||||
|
||||
@ -682,6 +695,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
__ j(not_zero, &function);
|
||||
__ Set(ebx, Immediate(0));
|
||||
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
|
||||
__ SetCallKind(ecx, CALL_AS_METHOD);
|
||||
__ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ bind(&function);
|
||||
@ -695,6 +709,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
|
||||
__ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
|
||||
__ SmiUntag(ebx);
|
||||
__ SetCallKind(ecx, CALL_AS_METHOD);
|
||||
__ cmp(eax, Operand(ebx));
|
||||
__ j(not_equal,
|
||||
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
|
||||
@ -1424,12 +1439,12 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
|
||||
// Push the function on the stack.
|
||||
__ push(edi);
|
||||
|
||||
// Preserve the number of arguments on the stack. Must preserve both
|
||||
// eax and ebx because these registers are used when copying the
|
||||
// Preserve the number of arguments on the stack. Must preserve eax,
|
||||
// ebx and ecx because these registers are used when copying the
|
||||
// arguments and the receiver.
|
||||
ASSERT(kSmiTagSize == 1);
|
||||
__ lea(ecx, Operand(eax, eax, times_1, kSmiTag));
|
||||
__ push(ecx);
|
||||
__ lea(edi, Operand(eax, eax, times_1, kSmiTag));
|
||||
__ push(edi);
|
||||
}
|
||||
|
||||
|
||||
@ -1452,6 +1467,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- eax : actual number of arguments
|
||||
// -- ebx : expected number of arguments
|
||||
// -- ecx : call kind information
|
||||
// -- edx : code entry to call
|
||||
// -----------------------------------
|
||||
|
||||
@ -1471,14 +1487,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// Copy receiver and all expected arguments.
|
||||
const int offset = StandardFrameConstants::kCallerSPOffset;
|
||||
__ lea(eax, Operand(ebp, eax, times_4, offset));
|
||||
__ mov(ecx, -1); // account for receiver
|
||||
__ mov(edi, -1); // account for receiver
|
||||
|
||||
Label copy;
|
||||
__ bind(©);
|
||||
__ inc(ecx);
|
||||
__ inc(edi);
|
||||
__ push(Operand(eax, 0));
|
||||
__ sub(Operand(eax), Immediate(kPointerSize));
|
||||
__ cmp(ecx, Operand(ebx));
|
||||
__ cmp(edi, Operand(ebx));
|
||||
__ j(less, ©);
|
||||
__ jmp(&invoke);
|
||||
}
|
||||
@ -1490,30 +1506,33 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// Copy receiver and all actual arguments.
|
||||
const int offset = StandardFrameConstants::kCallerSPOffset;
|
||||
__ lea(edi, Operand(ebp, eax, times_4, offset));
|
||||
__ mov(ecx, -1); // account for receiver
|
||||
// ebx = expected - actual.
|
||||
__ sub(ebx, Operand(eax));
|
||||
// eax = -actual - 1
|
||||
__ neg(eax);
|
||||
__ sub(Operand(eax), Immediate(1));
|
||||
|
||||
Label copy;
|
||||
__ bind(©);
|
||||
__ inc(ecx);
|
||||
__ inc(eax);
|
||||
__ push(Operand(edi, 0));
|
||||
__ sub(Operand(edi), Immediate(kPointerSize));
|
||||
__ cmp(ecx, Operand(eax));
|
||||
__ j(less, ©);
|
||||
__ test(eax, Operand(eax));
|
||||
__ j(not_zero, ©);
|
||||
|
||||
// Fill remaining expected arguments with undefined values.
|
||||
Label fill;
|
||||
__ bind(&fill);
|
||||
__ inc(ecx);
|
||||
__ inc(eax);
|
||||
__ push(Immediate(masm->isolate()->factory()->undefined_value()));
|
||||
__ cmp(ecx, Operand(ebx));
|
||||
__ cmp(eax, Operand(ebx));
|
||||
__ j(less, &fill);
|
||||
|
||||
// Restore function pointer.
|
||||
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
}
|
||||
|
||||
// Call the entry point.
|
||||
__ bind(&invoke);
|
||||
// Restore function pointer.
|
||||
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
__ call(Operand(edx));
|
||||
|
||||
// Leave frame and return.
|
||||
|
@ -3945,31 +3945,22 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
Label slow;
|
||||
|
||||
// If the receiver might be a value (string, number or boolean) check for this
|
||||
// and box it if it is.
|
||||
if (ReceiverMightBeValue()) {
|
||||
// The receiver might implicitly be the global object. This is
|
||||
// indicated by passing the hole as the receiver to the call
|
||||
// function stub.
|
||||
if (ReceiverMightBeImplicit()) {
|
||||
Label call;
|
||||
// Get the receiver from the stack.
|
||||
// +1 ~ return address
|
||||
Label receiver_is_value, receiver_is_js_object;
|
||||
__ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
|
||||
|
||||
// Check if receiver is a smi (which is a number value).
|
||||
__ test(eax, Immediate(kSmiTagMask));
|
||||
__ j(zero, &receiver_is_value);
|
||||
|
||||
// Check if the receiver is a valid JS object.
|
||||
__ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edi);
|
||||
__ j(above_equal, &receiver_is_js_object);
|
||||
|
||||
// Call the runtime to box the value.
|
||||
__ bind(&receiver_is_value);
|
||||
__ EnterInternalFrame();
|
||||
__ push(eax);
|
||||
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
|
||||
__ LeaveInternalFrame();
|
||||
__ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
|
||||
|
||||
__ bind(&receiver_is_js_object);
|
||||
// Call as function is indicated with the hole.
|
||||
__ cmp(eax, masm->isolate()->factory()->the_hole_value());
|
||||
__ j(not_equal, &call, Label::kNear);
|
||||
// Patch the receiver on the stack with the global receiver object.
|
||||
__ mov(ebx, GlobalObjectOperand());
|
||||
__ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
|
||||
__ mov(Operand(esp, (argc_ + 1) * kPointerSize), ebx);
|
||||
__ bind(&call);
|
||||
}
|
||||
|
||||
// Get the function to call from the stack.
|
||||
@ -3985,7 +3976,19 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Fast-case: Just invoke the function.
|
||||
ParameterCount actual(argc_);
|
||||
__ InvokeFunction(edi, actual, JUMP_FUNCTION);
|
||||
|
||||
if (ReceiverMightBeImplicit()) {
|
||||
Label call_as_function;
|
||||
__ cmp(eax, masm->isolate()->factory()->the_hole_value());
|
||||
__ j(equal, &call_as_function);
|
||||
__ InvokeFunction(edi, actual, JUMP_FUNCTION);
|
||||
__ bind(&call_as_function);
|
||||
}
|
||||
__ InvokeFunction(edi,
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper(),
|
||||
CALL_AS_FUNCTION);
|
||||
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
|
@ -131,6 +131,21 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
}
|
||||
#endif
|
||||
|
||||
// Strict mode functions need to replace the receiver with undefined
|
||||
// when called as functions (without an explicit receiver
|
||||
// object). ecx is zero for method calls and non-zero for function
|
||||
// calls.
|
||||
if (info->is_strict_mode()) {
|
||||
Label ok;
|
||||
__ test(ecx, Operand(ecx));
|
||||
__ j(zero, &ok, Label::kNear);
|
||||
// +1 for return address.
|
||||
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
||||
__ mov(Operand(esp, receiver_offset),
|
||||
Immediate(isolate()->factory()->undefined_value()));
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
__ push(ebp); // Caller's frame pointer.
|
||||
__ mov(ebp, esp);
|
||||
__ push(esi); // Callee's context.
|
||||
@ -2029,8 +2044,8 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
||||
// Record source position of the IC call.
|
||||
SetSourcePosition(expr->position());
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
|
||||
arg_count, in_loop);
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
@ -2040,8 +2055,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
|
||||
Expression* key,
|
||||
RelocInfo::Mode mode) {
|
||||
Expression* key) {
|
||||
// Load the key.
|
||||
VisitForAccumulatorValue(key);
|
||||
|
||||
@ -2065,7 +2079,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
|
||||
Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
|
||||
arg_count, in_loop);
|
||||
__ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
|
||||
@ -2176,7 +2190,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
// Record source position for debugger.
|
||||
SetSourcePosition(expr->position());
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
|
||||
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
__ CallStub(&stub);
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
@ -2224,9 +2238,10 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
__ bind(&call);
|
||||
}
|
||||
|
||||
// The receiver is either the global receiver or a JSObject found by
|
||||
// LoadContextSlot.
|
||||
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
|
||||
// The receiver is either the global receiver or an object found
|
||||
// by LoadContextSlot. That object could be the hole if the
|
||||
// receiver is implicitly the global object.
|
||||
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
} else if (fun->AsProperty() != NULL) {
|
||||
// Call to an object property.
|
||||
Property* prop = fun->AsProperty();
|
||||
@ -2269,7 +2284,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
{ PreservePositionScope scope(masm()->positions_recorder());
|
||||
VisitForStackValue(prop->obj());
|
||||
}
|
||||
EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
|
||||
EmitKeyedCallWithIC(expr, prop->key());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -3615,9 +3630,10 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
||||
// Call the JS runtime function via a call IC.
|
||||
__ Set(ecx, Immediate(expr->name()));
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
||||
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
|
||||
arg_count, in_loop);
|
||||
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
|
||||
arg_count, in_loop, mode);
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
// Restore context register.
|
||||
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
|
||||
} else {
|
||||
|
@ -796,7 +796,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
|
||||
// The generated code falls through if both probes miss.
|
||||
static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::Kind kind) {
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- ecx : name
|
||||
// -- edx : receiver
|
||||
@ -807,7 +808,7 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
MONOMORPHIC,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
|
||||
@ -906,7 +907,8 @@ static void GenerateCallNormal(MacroAssembler* masm, int argc) {
|
||||
|
||||
static void GenerateCallMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
IC::UtilityId id) {
|
||||
IC::UtilityId id,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- ecx : name
|
||||
// -- esp[0] : return address
|
||||
@ -964,12 +966,21 @@ static void GenerateCallMiss(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
// Invoke the function.
|
||||
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
ParameterCount actual(argc);
|
||||
__ InvokeFunction(edi, actual, JUMP_FUNCTION);
|
||||
__ InvokeFunction(edi,
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper(),
|
||||
call_kind);
|
||||
}
|
||||
|
||||
|
||||
void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
void CallIC::GenerateMegamorphic(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- ecx : name
|
||||
// -- esp[0] : return address
|
||||
@ -980,8 +991,9 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
|
||||
// Get the receiver of the function from the stack; 1 ~ return address.
|
||||
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
|
||||
GenerateMiss(masm, argc);
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
|
||||
|
||||
GenerateMiss(masm, argc, extra_ic_state);
|
||||
}
|
||||
|
||||
|
||||
@ -995,11 +1007,13 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallNormal(masm, argc);
|
||||
GenerateMiss(masm, argc);
|
||||
GenerateMiss(masm, argc, Code::kNoExtraICState);
|
||||
}
|
||||
|
||||
|
||||
void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
void CallIC::GenerateMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- ecx : name
|
||||
// -- esp[0] : return address
|
||||
@ -1008,7 +1022,7 @@ void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
// -- esp[(argc + 1) * 4] : receiver
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
|
||||
GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
|
||||
}
|
||||
|
||||
|
||||
@ -1107,7 +1121,10 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
|
||||
__ bind(&lookup_monomorphic_cache);
|
||||
__ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
|
||||
GenerateMonomorphicCacheProbe(masm,
|
||||
argc,
|
||||
Code::KEYED_CALL_IC,
|
||||
Code::kNoExtraICState);
|
||||
// Fall through on miss.
|
||||
|
||||
__ bind(&slow_call);
|
||||
@ -1157,7 +1174,7 @@ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
// -- esp[(argc + 1) * 4] : receiver
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
|
||||
GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
|
||||
}
|
||||
|
||||
|
||||
|
@ -128,6 +128,21 @@ bool LCodeGen::GeneratePrologue() {
|
||||
}
|
||||
#endif
|
||||
|
||||
// Strict mode functions need to replace the receiver with undefined
|
||||
// when called as functions (without an explicit receiver
|
||||
// object). ecx is zero for method calls and non-zero for function
|
||||
// calls.
|
||||
if (info_->is_strict_mode()) {
|
||||
Label ok;
|
||||
__ test(ecx, Operand(ecx));
|
||||
__ j(zero, &ok, Label::kNear);
|
||||
// +1 for return address.
|
||||
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
||||
__ mov(Operand(esp, receiver_offset),
|
||||
Immediate(isolate()->factory()->undefined_value()));
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
__ push(ebp); // Caller's frame pointer.
|
||||
__ mov(ebp, esp);
|
||||
__ push(esi); // Callee's context.
|
||||
@ -2685,7 +2700,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
|
||||
|
||||
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
int arity,
|
||||
LInstruction* instr) {
|
||||
LInstruction* instr,
|
||||
CallKind call_kind) {
|
||||
// Change context if needed.
|
||||
bool change_context =
|
||||
(info()->closure()->context() != function->context()) ||
|
||||
@ -2707,6 +2723,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
RecordPosition(pointers->position());
|
||||
|
||||
// Invoke function.
|
||||
__ SetCallKind(ecx, call_kind);
|
||||
if (*function == *info()->closure()) {
|
||||
__ CallSelf();
|
||||
} else {
|
||||
@ -2721,7 +2738,10 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(eax));
|
||||
__ mov(edi, instr->function());
|
||||
CallKnownFunction(instr->function(), instr->arity(), instr);
|
||||
CallKnownFunction(instr->function(),
|
||||
instr->arity(),
|
||||
instr,
|
||||
CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
@ -3084,10 +3104,11 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(eax));
|
||||
|
||||
int arity = instr->arity();
|
||||
Handle<Code> ic = isolate()->stub_cache()->
|
||||
ComputeCallInitialize(arity, NOT_IN_LOOP);
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
||||
__ mov(ecx, instr->name());
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
|
||||
CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
|
||||
}
|
||||
|
||||
|
||||
@ -3096,7 +3117,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(eax));
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
|
||||
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, CONTEXT_ADJUSTED);
|
||||
__ Drop(1);
|
||||
}
|
||||
@ -3107,17 +3128,18 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(eax));
|
||||
|
||||
int arity = instr->arity();
|
||||
Handle<Code> ic = isolate()->stub_cache()->
|
||||
ComputeCallInitialize(arity, NOT_IN_LOOP);
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
||||
__ mov(ecx, instr->name());
|
||||
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr, CONTEXT_ADJUSTED);
|
||||
CallCode(ic, mode, instr, CONTEXT_ADJUSTED);
|
||||
}
|
||||
|
||||
|
||||
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(eax));
|
||||
__ mov(edi, instr->target());
|
||||
CallKnownFunction(instr->target(), instr->arity(), instr);
|
||||
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
|
||||
}
|
||||
|
||||
|
||||
|
@ -208,7 +208,8 @@ class LCodeGen BASE_EMBEDDED {
|
||||
// to be in edi.
|
||||
void CallKnownFunction(Handle<JSFunction> function,
|
||||
int arity,
|
||||
LInstruction* instr);
|
||||
LInstruction* instr,
|
||||
CallKind call_kind);
|
||||
|
||||
void LoadHeapObject(Register result, Handle<HeapObject> object);
|
||||
|
||||
|
@ -2232,7 +2232,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
|
||||
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
|
||||
instr->function(),
|
||||
HEnvironment::LITHIUM,
|
||||
undefined);
|
||||
undefined,
|
||||
instr->call_kind());
|
||||
current_block_->UpdateEnvironment(inner);
|
||||
chunk_->AddInlinedClosure(instr->closure());
|
||||
return NULL;
|
||||
|
@ -1532,6 +1532,23 @@ MaybeObject* MacroAssembler::TryJumpToExternalReference(
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
|
||||
// This macro takes the dst register to make the code more readable
|
||||
// at the call sites. However, the dst register has to be ecx to
|
||||
// follow the calling convention which requires the call type to be
|
||||
// in ecx.
|
||||
ASSERT(dst.is(ecx));
|
||||
if (call_kind == CALL_AS_FUNCTION) {
|
||||
// Set to some non-zero smi by updating the least significant
|
||||
// byte.
|
||||
mov_b(Operand(dst), 1 << kSmiTagSize);
|
||||
} else {
|
||||
// Set to smi zero by clearing the register.
|
||||
xor_(dst, Operand(dst));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
Handle<Code> code_constant,
|
||||
@ -1539,7 +1556,8 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
Label* done,
|
||||
InvokeFlag flag,
|
||||
Label::Distance done_near,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
bool definitely_matches = false;
|
||||
Label invoke;
|
||||
if (expected.is_immediate()) {
|
||||
@ -1590,10 +1608,12 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
|
||||
SetCallKind(ecx, call_kind);
|
||||
call(adaptor, RelocInfo::CODE_TARGET);
|
||||
call_wrapper.AfterCall();
|
||||
jmp(done, done_near);
|
||||
} else {
|
||||
SetCallKind(ecx, call_kind);
|
||||
jmp(adaptor, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
bind(&invoke);
|
||||
@ -1605,16 +1625,20 @@ void MacroAssembler::InvokeCode(const Operand& code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
Label done;
|
||||
InvokePrologue(expected, actual, Handle<Code>::null(), code,
|
||||
&done, flag, Label::kNear, call_wrapper);
|
||||
&done, flag, Label::kNear, call_wrapper,
|
||||
call_kind);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(code));
|
||||
SetCallKind(ecx, call_kind);
|
||||
call(code);
|
||||
call_wrapper.AfterCall();
|
||||
} else {
|
||||
ASSERT(flag == JUMP_FUNCTION);
|
||||
SetCallKind(ecx, call_kind);
|
||||
jmp(code);
|
||||
}
|
||||
bind(&done);
|
||||
@ -1626,17 +1650,20 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
const ParameterCount& actual,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
Label done;
|
||||
Operand dummy(eax);
|
||||
InvokePrologue(expected, actual, code, dummy, &done, flag, Label::kNear,
|
||||
call_wrapper);
|
||||
call_wrapper, call_kind);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(code, rmode));
|
||||
SetCallKind(ecx, call_kind);
|
||||
call(code, rmode);
|
||||
call_wrapper.AfterCall();
|
||||
} else {
|
||||
ASSERT(flag == JUMP_FUNCTION);
|
||||
SetCallKind(ecx, call_kind);
|
||||
jmp(code, rmode);
|
||||
}
|
||||
bind(&done);
|
||||
@ -1646,7 +1673,8 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
void MacroAssembler::InvokeFunction(Register fun,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
ASSERT(fun.is(edi));
|
||||
mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
|
||||
mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
|
||||
@ -1655,7 +1683,7 @@ void MacroAssembler::InvokeFunction(Register fun,
|
||||
|
||||
ParameterCount expected(ebx);
|
||||
InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
|
||||
expected, actual, flag, call_wrapper);
|
||||
expected, actual, flag, call_wrapper, call_kind);
|
||||
}
|
||||
|
||||
|
||||
|
@ -153,26 +153,34 @@ class MacroAssembler: public Assembler {
|
||||
// ---------------------------------------------------------------------------
|
||||
// JavaScript invokes
|
||||
|
||||
// Setup call kind marking in ecx. The method takes ecx as an
|
||||
// explicit first parameter to make the code more readable at the
|
||||
// call sites.
|
||||
void SetCallKind(Register dst, CallKind kind);
|
||||
|
||||
// Invoke the JavaScript function code by either calling or jumping.
|
||||
void InvokeCode(const Operand& code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
void InvokeCode(Handle<Code> code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Invoke the JavaScript function in the given register. Changes the
|
||||
// current context to the context in the function before invoking.
|
||||
void InvokeFunction(Register function,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
void InvokeFunction(JSFunction* function,
|
||||
const ParameterCount& actual,
|
||||
@ -652,7 +660,8 @@ class MacroAssembler: public Assembler {
|
||||
Label* done,
|
||||
InvokeFlag flag,
|
||||
Label::Distance done_near = Label::kFar,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
|
@ -1330,7 +1330,8 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
|
||||
MaybeObject* CallStubCompiler::GenerateMissBranch() {
|
||||
MaybeObject* maybe_obj =
|
||||
isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
|
||||
kind_);
|
||||
kind_,
|
||||
extra_ic_state_);
|
||||
Object* obj;
|
||||
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
||||
__ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
|
||||
@ -1657,7 +1658,9 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
|
||||
Label index_out_of_range;
|
||||
Label* index_out_of_range_label = &index_out_of_range;
|
||||
|
||||
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
(CallICBase::StringStubState::decode(extra_ic_state_) ==
|
||||
DEFAULT_STRING_STUB)) {
|
||||
index_out_of_range_label = &miss;
|
||||
}
|
||||
|
||||
@ -1741,7 +1744,9 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
|
||||
Label index_out_of_range;
|
||||
Label* index_out_of_range_label = &index_out_of_range;
|
||||
|
||||
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
(CallICBase::StringStubState::decode(extra_ic_state_) ==
|
||||
DEFAULT_STRING_STUB)) {
|
||||
index_out_of_range_label = &miss;
|
||||
}
|
||||
|
||||
@ -2373,11 +2378,13 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name) {
|
||||
MaybeObject* CallStubCompiler::CompileCallGlobal(
|
||||
JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- ecx : name
|
||||
// -- esp[0] : return address
|
||||
@ -2420,16 +2427,21 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
|
||||
__ IncrementCounter(counters->call_global_inline(), 1);
|
||||
ASSERT(function->is_compiled());
|
||||
ParameterCount expected(function->shared()->formal_parameter_count());
|
||||
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
if (V8::UseCrankshaft()) {
|
||||
// TODO(kasperl): For now, we always call indirectly through the
|
||||
// code field in the function to allow recompilation to take effect
|
||||
// without changing any of the call sites.
|
||||
__ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
|
||||
expected, arguments(), JUMP_FUNCTION);
|
||||
expected, arguments(), JUMP_FUNCTION,
|
||||
NullCallWrapper(), call_kind);
|
||||
} else {
|
||||
Handle<Code> code(function->code());
|
||||
__ InvokeCode(code, expected, arguments(),
|
||||
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
|
||||
RelocInfo::CODE_TARGET, JUMP_FUNCTION,
|
||||
NullCallWrapper(), call_kind);
|
||||
}
|
||||
|
||||
// Handle call cache miss.
|
||||
|
30
src/ic.cc
30
src/ic.cc
@ -318,12 +318,14 @@ void IC::Clear(Address address) {
|
||||
|
||||
|
||||
void CallICBase::Clear(Address address, Code* target) {
|
||||
bool contextual = CallICBase::Contextual::decode(target->extra_ic_state());
|
||||
State state = target->ic_state();
|
||||
if (state == UNINITIALIZED) return;
|
||||
Code* code =
|
||||
Isolate::Current()->stub_cache()->FindCallInitialize(
|
||||
target->arguments_count(),
|
||||
target->ic_in_loop(),
|
||||
contextual ? RelocInfo::CODE_TARGET_CONTEXT : RelocInfo::CODE_TARGET,
|
||||
target->kind());
|
||||
SetTargetAtAddress(address, code);
|
||||
}
|
||||
@ -574,7 +576,7 @@ bool CallICBase::TryUpdateExtraICState(LookupResult* lookup,
|
||||
ASSERT(string == args[0] || string == JSValue::cast(args[0])->value());
|
||||
// If we're in the default (fastest) state and the index is
|
||||
// out of bounds, update the state to record this fact.
|
||||
if (*extra_ic_state == DEFAULT_STRING_STUB &&
|
||||
if (StringStubState::decode(*extra_ic_state) == DEFAULT_STRING_STUB &&
|
||||
argc >= 1 && args[1]->IsNumber()) {
|
||||
double index;
|
||||
if (args[1]->IsSmi()) {
|
||||
@ -584,7 +586,9 @@ bool CallICBase::TryUpdateExtraICState(LookupResult* lookup,
|
||||
index = DoubleToInteger(HeapNumber::cast(args[1])->value());
|
||||
}
|
||||
if (index < 0 || index >= string->length()) {
|
||||
*extra_ic_state = STRING_INDEX_OUT_OF_BOUNDS;
|
||||
*extra_ic_state =
|
||||
StringStubState::update(*extra_ic_state,
|
||||
STRING_INDEX_OUT_OF_BOUNDS);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -612,6 +616,7 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
|
||||
maybe_code = isolate()->stub_cache()->ComputeCallField(argc,
|
||||
in_loop,
|
||||
kind_,
|
||||
extra_ic_state,
|
||||
*name,
|
||||
*object,
|
||||
lookup->holder(),
|
||||
@ -647,6 +652,7 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
|
||||
maybe_code = isolate()->stub_cache()->ComputeCallGlobal(argc,
|
||||
in_loop,
|
||||
kind_,
|
||||
extra_ic_state,
|
||||
*name,
|
||||
*receiver,
|
||||
global,
|
||||
@ -661,6 +667,7 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
|
||||
maybe_code = isolate()->stub_cache()->ComputeCallNormal(argc,
|
||||
in_loop,
|
||||
kind_,
|
||||
extra_ic_state,
|
||||
*name,
|
||||
*receiver);
|
||||
}
|
||||
@ -671,6 +678,7 @@ MaybeObject* CallICBase::ComputeMonomorphicStub(
|
||||
maybe_code = isolate()->stub_cache()->ComputeCallInterceptor(
|
||||
argc,
|
||||
kind_,
|
||||
extra_ic_state,
|
||||
*name,
|
||||
*object,
|
||||
lookup->holder());
|
||||
@ -709,9 +717,11 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
|
||||
// This is the first time we execute this inline cache.
|
||||
// Set the target to the pre monomorphic stub to delay
|
||||
// setting the monomorphic state.
|
||||
maybe_code = isolate()->stub_cache()->ComputeCallPreMonomorphic(argc,
|
||||
in_loop,
|
||||
kind_);
|
||||
maybe_code =
|
||||
isolate()->stub_cache()->ComputeCallPreMonomorphic(argc,
|
||||
in_loop,
|
||||
kind_,
|
||||
extra_ic_state);
|
||||
} else if (state == MONOMORPHIC) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
TryUpdateExtraICState(lookup, object, &extra_ic_state)) {
|
||||
@ -731,9 +741,11 @@ void CallICBase::UpdateCaches(LookupResult* lookup,
|
||||
object,
|
||||
name);
|
||||
} else {
|
||||
maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(argc,
|
||||
in_loop,
|
||||
kind_);
|
||||
maybe_code =
|
||||
isolate()->stub_cache()->ComputeCallMegamorphic(argc,
|
||||
in_loop,
|
||||
kind_,
|
||||
extra_ic_state);
|
||||
}
|
||||
} else {
|
||||
maybe_code = ComputeMonomorphicStub(lookup,
|
||||
@ -791,7 +803,7 @@ MaybeObject* KeyedCallIC::LoadFunction(State state,
|
||||
int argc = target()->arguments_count();
|
||||
InLoopFlag in_loop = target()->ic_in_loop();
|
||||
MaybeObject* maybe_code = isolate()->stub_cache()->ComputeCallMegamorphic(
|
||||
argc, in_loop, Code::KEYED_CALL_IC);
|
||||
argc, in_loop, Code::KEYED_CALL_IC, Code::kNoExtraICState);
|
||||
Object* code;
|
||||
if (maybe_code->ToObject(&code)) {
|
||||
set_target(Code::cast(code));
|
||||
|
20
src/ic.h
20
src/ic.h
@ -29,6 +29,7 @@
|
||||
#define V8_IC_H_
|
||||
|
||||
#include "macro-assembler.h"
|
||||
#include "type-info.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -194,6 +195,10 @@ class IC_Utility {
|
||||
|
||||
|
||||
class CallICBase: public IC {
|
||||
public:
|
||||
class Contextual: public BitField<bool, 0, 1> {};
|
||||
class StringStubState: public BitField<StringStubFeedback, 1, 1> {};
|
||||
|
||||
protected:
|
||||
CallICBase(Code::Kind kind, Isolate* isolate)
|
||||
: IC(EXTRA_CALL_FRAME, isolate), kind_(kind) {}
|
||||
@ -234,6 +239,7 @@ class CallICBase: public IC {
|
||||
void ReceiverToObjectIfRequired(Handle<Object> callee, Handle<Object> object);
|
||||
|
||||
static void Clear(Address address, Code* target);
|
||||
|
||||
friend class IC;
|
||||
};
|
||||
|
||||
@ -245,11 +251,17 @@ class CallIC: public CallICBase {
|
||||
}
|
||||
|
||||
// Code generator routines.
|
||||
static void GenerateInitialize(MacroAssembler* masm, int argc) {
|
||||
GenerateMiss(masm, argc);
|
||||
static void GenerateInitialize(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
GenerateMiss(masm, argc, extra_ic_state);
|
||||
}
|
||||
static void GenerateMiss(MacroAssembler* masm, int argc);
|
||||
static void GenerateMegamorphic(MacroAssembler* masm, int argc);
|
||||
static void GenerateMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state);
|
||||
static void GenerateMegamorphic(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state);
|
||||
static void GenerateNormal(MacroAssembler* masm, int argc);
|
||||
};
|
||||
|
||||
|
@ -2798,11 +2798,10 @@ Code::Flags Code::ComputeFlags(Kind kind,
|
||||
PropertyType type,
|
||||
int argc,
|
||||
InlineCacheHolderFlag holder) {
|
||||
// Extra IC state is only allowed for monomorphic call IC stubs
|
||||
// or for store IC stubs.
|
||||
// Extra IC state is only allowed for call IC stubs or for store IC
|
||||
// stubs.
|
||||
ASSERT(extra_ic_state == kNoExtraICState ||
|
||||
(kind == CALL_IC && (ic_state == MONOMORPHIC ||
|
||||
ic_state == MONOMORPHIC_PROTOTYPE_FAILURE)) ||
|
||||
(kind == CALL_IC) ||
|
||||
(kind == STORE_IC) ||
|
||||
(kind == KEYED_STORE_IC));
|
||||
// Compute the bit mask.
|
||||
|
@ -7886,8 +7886,8 @@ static inline MaybeObject* Unhole(Heap* heap,
|
||||
}
|
||||
|
||||
|
||||
static JSObject* ComputeReceiverForNonGlobal(Isolate* isolate,
|
||||
JSObject* holder) {
|
||||
static Object* ComputeReceiverForNonGlobal(Isolate* isolate,
|
||||
JSObject* holder) {
|
||||
ASSERT(!holder->IsGlobalObject());
|
||||
Context* top = isolate->context();
|
||||
// Get the context extension function.
|
||||
@ -7899,10 +7899,11 @@ static JSObject* ComputeReceiverForNonGlobal(Isolate* isolate,
|
||||
// explicitly via a with-statement.
|
||||
Object* constructor = holder->map()->constructor();
|
||||
if (constructor != context_extension_function) return holder;
|
||||
// Fall back to using the global object as the receiver if the
|
||||
// property turns out to be a local variable allocated in a context
|
||||
// extension object - introduced via eval.
|
||||
return top->global()->global_receiver();
|
||||
// Fall back to using the global object as the implicit receiver if
|
||||
// the property turns out to be a local variable allocated in a
|
||||
// context extension object - introduced via eval. Implicit global
|
||||
// receivers are indicated with the hole value.
|
||||
return isolate->heap()->the_hole_value();
|
||||
}
|
||||
|
||||
|
||||
@ -7930,9 +7931,10 @@ static ObjectPair LoadContextSlotHelper(Arguments args,
|
||||
// If the "property" we were looking for is a local variable or an
|
||||
// argument in a context, the receiver is the global object; see
|
||||
// ECMA-262, 3rd., 10.1.6 and 10.2.3.
|
||||
// GetElement below can cause GC.
|
||||
Handle<JSObject> receiver(
|
||||
isolate->context()->global()->global_receiver());
|
||||
//
|
||||
// Use the hole as the receiver to signal that the receiver is
|
||||
// implicit and that the global receiver should be used.
|
||||
Handle<Object> receiver = isolate->factory()->the_hole_value();
|
||||
MaybeObject* value = (holder->IsContext())
|
||||
? Context::cast(*holder)->get(index)
|
||||
: JSObject::cast(*holder)->GetElement(index);
|
||||
@ -7943,17 +7945,19 @@ static ObjectPair LoadContextSlotHelper(Arguments args,
|
||||
if (!holder.is_null() && holder->IsJSObject()) {
|
||||
ASSERT(Handle<JSObject>::cast(holder)->HasProperty(*name));
|
||||
JSObject* object = JSObject::cast(*holder);
|
||||
JSObject* receiver;
|
||||
Object* receiver;
|
||||
if (object->IsGlobalObject()) {
|
||||
receiver = GlobalObject::cast(object)->global_receiver();
|
||||
} else if (context->is_exception_holder(*holder)) {
|
||||
receiver = isolate->context()->global()->global_receiver();
|
||||
// Use the hole as the receiver to signal that the receiver is
|
||||
// implicit and that the global receiver should be used.
|
||||
receiver = isolate->heap()->the_hole_value();
|
||||
} else {
|
||||
receiver = ComputeReceiverForNonGlobal(isolate, object);
|
||||
}
|
||||
|
||||
// GetProperty below can cause GC.
|
||||
Handle<JSObject> receiver_handle(receiver);
|
||||
Handle<Object> receiver_handle(receiver);
|
||||
|
||||
// No need to unhole the value here. This is taken care of by the
|
||||
// GetProperty function.
|
||||
@ -7968,7 +7972,7 @@ static ObjectPair LoadContextSlotHelper(Arguments args,
|
||||
HandleVector(&name, 1));
|
||||
return MakePair(isolate->Throw(*reference_error), NULL);
|
||||
} else {
|
||||
// The property doesn't exist - return undefined
|
||||
// The property doesn't exist - return undefined.
|
||||
return MakePair(isolate->heap()->undefined_value(),
|
||||
isolate->heap()->undefined_value());
|
||||
}
|
||||
@ -8480,8 +8484,7 @@ RUNTIME_FUNCTION(ObjectPair, Runtime_ResolvePossiblyDirectEval) {
|
||||
// 'eval' is not bound in the global context. Just call the function
|
||||
// with the given arguments. This is not necessarily the global eval.
|
||||
if (receiver->IsContext() || receiver->IsJSContextExtensionObject()) {
|
||||
receiver = Handle<JSObject>(
|
||||
isolate->context()->global()->global_receiver(), isolate);
|
||||
receiver = isolate->factory()->the_hole_value();
|
||||
}
|
||||
return MakePair(*callee, *receiver);
|
||||
}
|
||||
@ -8490,8 +8493,7 @@ RUNTIME_FUNCTION(ObjectPair, Runtime_ResolvePossiblyDirectEval) {
|
||||
// Compare it to the builtin 'GlobalEval' function to make sure.
|
||||
if (*callee != isolate->global_context()->global_eval_fun() ||
|
||||
!args[1]->IsString()) {
|
||||
return MakePair(*callee,
|
||||
isolate->context()->global()->global_receiver());
|
||||
return MakePair(*callee, isolate->heap()->the_hole_value());
|
||||
}
|
||||
|
||||
ASSERT(args[3]->IsSmi());
|
||||
@ -8513,8 +8515,7 @@ RUNTIME_FUNCTION(ObjectPair, Runtime_ResolvePossiblyDirectEvalNoLookup) {
|
||||
// Compare it to the builtin 'GlobalEval' function to make sure.
|
||||
if (*callee != isolate->global_context()->global_eval_fun() ||
|
||||
!args[1]->IsString()) {
|
||||
return MakePair(*callee,
|
||||
isolate->context()->global()->global_receiver());
|
||||
return MakePair(*callee, isolate->heap()->the_hole_value());
|
||||
}
|
||||
|
||||
ASSERT(args[3]->IsSmi());
|
||||
|
@ -789,6 +789,7 @@ MaybeObject* StubCache::ComputeCallConstant(int argc,
|
||||
MaybeObject* StubCache::ComputeCallField(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder,
|
||||
@ -807,14 +808,14 @@ MaybeObject* StubCache::ComputeCallField(int argc,
|
||||
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
|
||||
FIELD,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
cache_holder,
|
||||
in_loop,
|
||||
argc);
|
||||
Object* code = map_holder->map()->FindInCodeCache(name, flags);
|
||||
if (code->IsUndefined()) {
|
||||
CallStubCompiler compiler(
|
||||
argc, in_loop, kind, Code::kNoExtraICState, cache_holder);
|
||||
argc, in_loop, kind, extra_ic_state, cache_holder);
|
||||
{ MaybeObject* maybe_code =
|
||||
compiler.CompileCallField(JSObject::cast(object),
|
||||
holder,
|
||||
@ -837,11 +838,13 @@ MaybeObject* StubCache::ComputeCallField(int argc,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeCallInterceptor(int argc,
|
||||
Code::Kind kind,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder) {
|
||||
MaybeObject* StubCache::ComputeCallInterceptor(
|
||||
int argc,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder) {
|
||||
// Compute the check type and the map.
|
||||
InlineCacheHolderFlag cache_holder =
|
||||
IC::GetCodeCacheForObject(object, holder);
|
||||
@ -856,14 +859,14 @@ MaybeObject* StubCache::ComputeCallInterceptor(int argc,
|
||||
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
|
||||
INTERCEPTOR,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
cache_holder,
|
||||
NOT_IN_LOOP,
|
||||
argc);
|
||||
Object* code = map_holder->map()->FindInCodeCache(name, flags);
|
||||
if (code->IsUndefined()) {
|
||||
CallStubCompiler compiler(
|
||||
argc, NOT_IN_LOOP, kind, Code::kNoExtraICState, cache_holder);
|
||||
argc, NOT_IN_LOOP, kind, extra_ic_state, cache_holder);
|
||||
{ MaybeObject* maybe_code =
|
||||
compiler.CompileCallInterceptor(JSObject::cast(object), holder, name);
|
||||
if (!maybe_code->ToObject(&code)) return maybe_code;
|
||||
@ -886,10 +889,12 @@ MaybeObject* StubCache::ComputeCallInterceptor(int argc,
|
||||
MaybeObject* StubCache::ComputeCallNormal(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
JSObject* receiver) {
|
||||
Object* code;
|
||||
{ MaybeObject* maybe_code = ComputeCallNormal(argc, in_loop, kind);
|
||||
{ MaybeObject* maybe_code =
|
||||
ComputeCallNormal(argc, in_loop, kind, extra_ic_state);
|
||||
if (!maybe_code->ToObject(&code)) return maybe_code;
|
||||
}
|
||||
return code;
|
||||
@ -899,6 +904,7 @@ MaybeObject* StubCache::ComputeCallNormal(int argc,
|
||||
MaybeObject* StubCache::ComputeCallGlobal(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
JSObject* receiver,
|
||||
GlobalObject* holder,
|
||||
@ -909,7 +915,7 @@ MaybeObject* StubCache::ComputeCallGlobal(int argc,
|
||||
JSObject* map_holder = IC::GetCodeCacheHolder(receiver, cache_holder);
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(kind,
|
||||
NORMAL,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
cache_holder,
|
||||
in_loop,
|
||||
argc);
|
||||
@ -921,9 +927,14 @@ MaybeObject* StubCache::ComputeCallGlobal(int argc,
|
||||
// caches.
|
||||
if (!function->is_compiled()) return Failure::InternalError();
|
||||
CallStubCompiler compiler(
|
||||
argc, in_loop, kind, Code::kNoExtraICState, cache_holder);
|
||||
argc, in_loop, kind, extra_ic_state, cache_holder);
|
||||
{ MaybeObject* maybe_code =
|
||||
compiler.CompileCallGlobal(receiver, holder, cell, function, name);
|
||||
compiler.CompileCallGlobal(receiver,
|
||||
holder,
|
||||
cell,
|
||||
function,
|
||||
name,
|
||||
extra_ic_state);
|
||||
if (!maybe_code->ToObject(&code)) return maybe_code;
|
||||
}
|
||||
ASSERT_EQ(flags, Code::cast(code)->flags());
|
||||
@ -991,11 +1002,15 @@ static MaybeObject* FillCache(Isolate* isolate, MaybeObject* maybe_code) {
|
||||
|
||||
Code* StubCache::FindCallInitialize(int argc,
|
||||
InLoopFlag in_loop,
|
||||
RelocInfo::Mode mode,
|
||||
Code::Kind kind) {
|
||||
Code::ExtraICState extra_state =
|
||||
CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) |
|
||||
CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
in_loop,
|
||||
UNINITIALIZED,
|
||||
Code::kNoExtraICState,
|
||||
extra_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Object* result = ProbeCache(isolate(), flags)->ToObjectUnchecked();
|
||||
@ -1008,11 +1023,15 @@ Code* StubCache::FindCallInitialize(int argc,
|
||||
|
||||
MaybeObject* StubCache::ComputeCallInitialize(int argc,
|
||||
InLoopFlag in_loop,
|
||||
RelocInfo::Mode mode,
|
||||
Code::Kind kind) {
|
||||
Code::ExtraICState extra_state =
|
||||
CallICBase::StringStubState::encode(DEFAULT_STRING_STUB) |
|
||||
CallICBase::Contextual::encode(mode == RelocInfo::CODE_TARGET_CONTEXT);
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
in_loop,
|
||||
UNINITIALIZED,
|
||||
Code::kNoExtraICState,
|
||||
extra_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Object* probe;
|
||||
@ -1025,17 +1044,20 @@ MaybeObject* StubCache::ComputeCallInitialize(int argc,
|
||||
}
|
||||
|
||||
|
||||
Handle<Code> StubCache::ComputeCallInitialize(int argc, InLoopFlag in_loop) {
|
||||
Handle<Code> StubCache::ComputeCallInitialize(int argc,
|
||||
InLoopFlag in_loop,
|
||||
RelocInfo::Mode mode) {
|
||||
if (in_loop == IN_LOOP) {
|
||||
// Force the creation of the corresponding stub outside loops,
|
||||
// because it may be used when clearing the ICs later - it is
|
||||
// possible for a series of IC transitions to lose the in-loop
|
||||
// information, and the IC clearing code can't generate a stub
|
||||
// that it needs so we need to ensure it is generated already.
|
||||
ComputeCallInitialize(argc, NOT_IN_LOOP);
|
||||
ComputeCallInitialize(argc, NOT_IN_LOOP, mode);
|
||||
}
|
||||
CALL_HEAP_FUNCTION(isolate_,
|
||||
ComputeCallInitialize(argc, in_loop, Code::CALL_IC), Code);
|
||||
ComputeCallInitialize(argc, in_loop, mode, Code::CALL_IC),
|
||||
Code);
|
||||
}
|
||||
|
||||
|
||||
@ -1051,17 +1073,23 @@ Handle<Code> StubCache::ComputeKeyedCallInitialize(int argc,
|
||||
}
|
||||
CALL_HEAP_FUNCTION(
|
||||
isolate_,
|
||||
ComputeCallInitialize(argc, in_loop, Code::KEYED_CALL_IC), Code);
|
||||
ComputeCallInitialize(argc,
|
||||
in_loop,
|
||||
RelocInfo::CODE_TARGET,
|
||||
Code::KEYED_CALL_IC),
|
||||
Code);
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeCallPreMonomorphic(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind) {
|
||||
MaybeObject* StubCache::ComputeCallPreMonomorphic(
|
||||
int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
in_loop,
|
||||
PREMONOMORPHIC,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Object* probe;
|
||||
@ -1076,11 +1104,12 @@ MaybeObject* StubCache::ComputeCallPreMonomorphic(int argc,
|
||||
|
||||
MaybeObject* StubCache::ComputeCallNormal(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind) {
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
in_loop,
|
||||
MONOMORPHIC,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Object* probe;
|
||||
@ -1093,13 +1122,15 @@ MaybeObject* StubCache::ComputeCallNormal(int argc,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeCallMegamorphic(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind) {
|
||||
MaybeObject* StubCache::ComputeCallMegamorphic(
|
||||
int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
in_loop,
|
||||
MEGAMORPHIC,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Object* probe;
|
||||
@ -1112,13 +1143,15 @@ MaybeObject* StubCache::ComputeCallMegamorphic(int argc,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeCallMiss(int argc, Code::Kind kind) {
|
||||
MaybeObject* StubCache::ComputeCallMiss(int argc,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// MONOMORPHIC_PROTOTYPE_FAILURE state is used to make sure that miss stubs
|
||||
// and monomorphic stubs are not mixed up together in the stub cache.
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
MONOMORPHIC_PROTOTYPE_FAILURE,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc,
|
||||
OWN_MAP);
|
||||
@ -1133,7 +1166,11 @@ MaybeObject* StubCache::ComputeCallMiss(int argc, Code::Kind kind) {
|
||||
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
MaybeObject* StubCache::ComputeCallDebugBreak(int argc, Code::Kind kind) {
|
||||
MaybeObject* StubCache::ComputeCallDebugBreak(
|
||||
int argc,
|
||||
Code::Kind kind) {
|
||||
// Extra IC state is irrelevant for debug break ICs. They jump to
|
||||
// the actual call ic to carry out the work.
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
DEBUG_BREAK,
|
||||
@ -1150,8 +1187,11 @@ MaybeObject* StubCache::ComputeCallDebugBreak(int argc, Code::Kind kind) {
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* StubCache::ComputeCallDebugPrepareStepIn(int argc,
|
||||
Code::Kind kind) {
|
||||
MaybeObject* StubCache::ComputeCallDebugPrepareStepIn(
|
||||
int argc,
|
||||
Code::Kind kind) {
|
||||
// Extra IC state is irrelevant for debug break ICs. They jump to
|
||||
// the actual call ic to carry out the work.
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
DEBUG_PREPARE_STEP_IN,
|
||||
@ -1435,8 +1475,9 @@ MaybeObject* StubCompiler::CompileCallInitialize(Code::Flags flags) {
|
||||
HandleScope scope(isolate());
|
||||
int argc = Code::ExtractArgumentsCountFromFlags(flags);
|
||||
Code::Kind kind = Code::ExtractKindFromFlags(flags);
|
||||
Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
|
||||
if (kind == Code::CALL_IC) {
|
||||
CallIC::GenerateInitialize(masm(), argc);
|
||||
CallIC::GenerateInitialize(masm(), argc, extra_ic_state);
|
||||
} else {
|
||||
KeyedCallIC::GenerateInitialize(masm(), argc);
|
||||
}
|
||||
@ -1462,8 +1503,9 @@ MaybeObject* StubCompiler::CompileCallPreMonomorphic(Code::Flags flags) {
|
||||
// The code of the PreMonomorphic stub is the same as the code
|
||||
// of the Initialized stub. They just differ on the code object flags.
|
||||
Code::Kind kind = Code::ExtractKindFromFlags(flags);
|
||||
Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
|
||||
if (kind == Code::CALL_IC) {
|
||||
CallIC::GenerateInitialize(masm(), argc);
|
||||
CallIC::GenerateInitialize(masm(), argc, extra_ic_state);
|
||||
} else {
|
||||
KeyedCallIC::GenerateInitialize(masm(), argc);
|
||||
}
|
||||
@ -1488,6 +1530,9 @@ MaybeObject* StubCompiler::CompileCallNormal(Code::Flags flags) {
|
||||
int argc = Code::ExtractArgumentsCountFromFlags(flags);
|
||||
Code::Kind kind = Code::ExtractKindFromFlags(flags);
|
||||
if (kind == Code::CALL_IC) {
|
||||
// Call normal is always with a explict receiver.
|
||||
ASSERT(!CallIC::Contextual::decode(
|
||||
Code::ExtractExtraICStateFromFlags(flags)));
|
||||
CallIC::GenerateNormal(masm(), argc);
|
||||
} else {
|
||||
KeyedCallIC::GenerateNormal(masm(), argc);
|
||||
@ -1511,8 +1556,9 @@ MaybeObject* StubCompiler::CompileCallMegamorphic(Code::Flags flags) {
|
||||
HandleScope scope(isolate());
|
||||
int argc = Code::ExtractArgumentsCountFromFlags(flags);
|
||||
Code::Kind kind = Code::ExtractKindFromFlags(flags);
|
||||
Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
|
||||
if (kind == Code::CALL_IC) {
|
||||
CallIC::GenerateMegamorphic(masm(), argc);
|
||||
CallIC::GenerateMegamorphic(masm(), argc, extra_ic_state);
|
||||
} else {
|
||||
KeyedCallIC::GenerateMegamorphic(masm(), argc);
|
||||
}
|
||||
@ -1536,8 +1582,9 @@ MaybeObject* StubCompiler::CompileCallMiss(Code::Flags flags) {
|
||||
HandleScope scope(isolate());
|
||||
int argc = Code::ExtractArgumentsCountFromFlags(flags);
|
||||
Code::Kind kind = Code::ExtractKindFromFlags(flags);
|
||||
Code::ExtraICState extra_ic_state = Code::ExtractExtraICStateFromFlags(flags);
|
||||
if (kind == Code::CALL_IC) {
|
||||
CallIC::GenerateMiss(masm(), argc);
|
||||
CallIC::GenerateMiss(masm(), argc, extra_ic_state);
|
||||
} else {
|
||||
KeyedCallIC::GenerateMiss(masm(), argc);
|
||||
}
|
||||
@ -1583,7 +1630,8 @@ MaybeObject* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) {
|
||||
int argc = Code::ExtractArgumentsCountFromFlags(flags);
|
||||
Code::Kind kind = Code::ExtractKindFromFlags(flags);
|
||||
if (kind == Code::CALL_IC) {
|
||||
CallIC::GenerateMiss(masm(), argc);
|
||||
// For the debugger extra ic state is irrelevant.
|
||||
CallIC::GenerateMiss(masm(), argc, Code::kNoExtraICState);
|
||||
} else {
|
||||
KeyedCallIC::GenerateMiss(masm(), argc);
|
||||
}
|
||||
|
@ -194,13 +194,15 @@ class StubCache {
|
||||
StrictModeFlag strict_mode);
|
||||
// ---
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallField(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder,
|
||||
int index);
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallField(
|
||||
int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder,
|
||||
int index);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallConstant(
|
||||
int argc,
|
||||
@ -212,22 +214,27 @@ class StubCache {
|
||||
JSObject* holder,
|
||||
JSFunction* function);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallNormal(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind,
|
||||
String* name,
|
||||
JSObject* receiver);
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallNormal(
|
||||
int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
JSObject* receiver);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallInterceptor(int argc,
|
||||
Code::Kind,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder);
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallInterceptor(
|
||||
int argc,
|
||||
Code::Kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
Object* object,
|
||||
JSObject* holder);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallGlobal(
|
||||
int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind,
|
||||
Code::ExtraICState extra_ic_state,
|
||||
String* name,
|
||||
JSObject* receiver,
|
||||
GlobalObject* holder,
|
||||
@ -238,30 +245,39 @@ class StubCache {
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallInitialize(int argc,
|
||||
InLoopFlag in_loop,
|
||||
RelocInfo::Mode mode,
|
||||
Code::Kind kind);
|
||||
|
||||
Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
|
||||
Handle<Code> ComputeCallInitialize(int argc,
|
||||
InLoopFlag in_loop,
|
||||
RelocInfo::Mode mode);
|
||||
|
||||
Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallPreMonomorphic(
|
||||
int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind);
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallNormal(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind);
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState state);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallMegamorphic(int argc,
|
||||
InLoopFlag in_loop,
|
||||
Code::Kind kind);
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState state);
|
||||
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallMiss(int argc, Code::Kind kind);
|
||||
MUST_USE_RESULT MaybeObject* ComputeCallMiss(int argc,
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState state);
|
||||
|
||||
// Finds the Code object stored in the Heap::non_monomorphic_cache().
|
||||
MUST_USE_RESULT Code* FindCallInitialize(int argc,
|
||||
InLoopFlag in_loop,
|
||||
RelocInfo::Mode mode,
|
||||
Code::Kind kind);
|
||||
|
||||
#ifdef ENABLE_DEBUGGER_SUPPORT
|
||||
@ -744,11 +760,13 @@ class CallStubCompiler: public StubCompiler {
|
||||
MUST_USE_RESULT MaybeObject* CompileCallInterceptor(JSObject* object,
|
||||
JSObject* holder,
|
||||
String* name);
|
||||
MUST_USE_RESULT MaybeObject* CompileCallGlobal(JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name);
|
||||
MUST_USE_RESULT MaybeObject* CompileCallGlobal(
|
||||
JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name,
|
||||
Code::ExtraICState extra_ic_state);
|
||||
|
||||
static bool HasCustomCallGenerator(JSFunction* function);
|
||||
|
||||
|
@ -58,9 +58,6 @@ TypeInfo TypeInfo::TypeFromValue(Handle<Object> value) {
|
||||
}
|
||||
|
||||
|
||||
STATIC_ASSERT(DEFAULT_STRING_STUB == Code::kNoExtraICState);
|
||||
|
||||
|
||||
TypeFeedbackOracle::TypeFeedbackOracle(Handle<Code> code,
|
||||
Handle<Context> global_context) {
|
||||
global_context_ = global_context;
|
||||
@ -147,15 +144,18 @@ ZoneMapList* TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr,
|
||||
|
||||
|
||||
ZoneMapList* TypeFeedbackOracle::CallReceiverTypes(Call* expr,
|
||||
Handle<String> name) {
|
||||
Handle<String> name,
|
||||
CallKind call_kind) {
|
||||
int arity = expr->arguments()->length();
|
||||
// Note: these flags won't let us get maps from stubs with
|
||||
// non-default extra ic state in the megamorphic case. In the more
|
||||
// important monomorphic case the map is obtained directly, so it's
|
||||
// not a problem until we decide to emit more polymorphic code.
|
||||
|
||||
// Note: Currently we do not take string extra ic data into account
|
||||
// here.
|
||||
Code::ExtraICState extra_ic_state =
|
||||
CallIC::Contextual::encode(call_kind == CALL_AS_FUNCTION);
|
||||
|
||||
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::CALL_IC,
|
||||
NORMAL,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
OWN_MAP,
|
||||
NOT_IN_LOOP,
|
||||
arity);
|
||||
|
@ -224,7 +224,9 @@ class TypeFeedbackOracle BASE_EMBEDDED {
|
||||
|
||||
ZoneMapList* LoadReceiverTypes(Property* expr, Handle<String> name);
|
||||
ZoneMapList* StoreReceiverTypes(Assignment* expr, Handle<String> name);
|
||||
ZoneMapList* CallReceiverTypes(Call* expr, Handle<String> name);
|
||||
ZoneMapList* CallReceiverTypes(Call* expr,
|
||||
Handle<String> name,
|
||||
CallKind call_kind);
|
||||
|
||||
ExternalArrayType GetKeyedLoadExternalArrayType(Property* expr);
|
||||
ExternalArrayType GetKeyedStoreExternalArrayType(Expression* expr);
|
||||
|
@ -222,6 +222,11 @@ class BitField {
|
||||
return static_cast<uint32_t>(value) << shift;
|
||||
}
|
||||
|
||||
// Returns a uint32_t with the bit field value updated.
|
||||
static uint32_t update(uint32_t previous, T value) {
|
||||
return (previous & ~mask()) | encode(value);
|
||||
}
|
||||
|
||||
// Extracts the bit field from the value.
|
||||
static T decode(uint32_t value) {
|
||||
return static_cast<T>((value & mask()) >> shift);
|
||||
|
@ -310,7 +310,9 @@ enum InLoopFlag {
|
||||
|
||||
enum CallFunctionFlags {
|
||||
NO_CALL_FUNCTION_FLAGS = 0,
|
||||
RECEIVER_MIGHT_BE_VALUE = 1 << 0 // Receiver might not be a JSObject.
|
||||
// Receiver might implicitly be the global objects. If it is, the
|
||||
// hole is passed to the call function stub.
|
||||
RECEIVER_MIGHT_BE_IMPLICIT = 1 << 0
|
||||
};
|
||||
|
||||
|
||||
@ -497,6 +499,14 @@ enum SmiCheckType {
|
||||
DO_SMI_CHECK
|
||||
};
|
||||
|
||||
|
||||
// Used to specify whether a receiver is implicitly or explicitly
|
||||
// provided to a call.
|
||||
enum CallKind {
|
||||
CALL_AS_METHOD = 0,
|
||||
CALL_AS_FUNCTION
|
||||
};
|
||||
|
||||
} } // namespace v8::internal
|
||||
|
||||
#endif // V8_V8GLOBALS_H_
|
||||
|
@ -98,6 +98,7 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
|
||||
// Set expected number of arguments to zero (not changing rax).
|
||||
__ Set(rbx, 0);
|
||||
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
|
||||
__ SetCallKind(rcx, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
}
|
||||
@ -526,17 +527,23 @@ void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
|
||||
|
||||
// Push a copy of the function onto the stack.
|
||||
__ push(rdi);
|
||||
// Push call kind information.
|
||||
__ push(rcx);
|
||||
|
||||
__ push(rdi); // Function is also the parameter to the runtime call.
|
||||
__ CallRuntime(Runtime::kLazyCompile, 1);
|
||||
|
||||
// Restore call kind information.
|
||||
__ pop(rcx);
|
||||
// Restore receiver.
|
||||
__ pop(rdi);
|
||||
|
||||
// Tear down temporary frame.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Do a tail-call of the compiled function.
|
||||
__ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
|
||||
__ jmp(rcx);
|
||||
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
|
||||
__ jmp(rax);
|
||||
}
|
||||
|
||||
|
||||
@ -546,17 +553,23 @@ void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
|
||||
|
||||
// Push a copy of the function onto the stack.
|
||||
__ push(rdi);
|
||||
// Push call kind information.
|
||||
__ push(rcx);
|
||||
|
||||
__ push(rdi); // Function is also the parameter to the runtime call.
|
||||
__ CallRuntime(Runtime::kLazyRecompile, 1);
|
||||
|
||||
// Restore function and tear down temporary frame.
|
||||
// Restore call kind information.
|
||||
__ pop(rcx);
|
||||
// Restore function.
|
||||
__ pop(rdi);
|
||||
|
||||
// Tear down temporary frame.
|
||||
__ LeaveInternalFrame();
|
||||
|
||||
// Do a tail-call of the compiled function.
|
||||
__ lea(rcx, FieldOperand(rax, Code::kHeaderSize));
|
||||
__ jmp(rcx);
|
||||
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
|
||||
__ jmp(rax);
|
||||
}
|
||||
|
||||
|
||||
@ -740,6 +753,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
__ j(not_zero, &function);
|
||||
__ Set(rbx, 0);
|
||||
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
|
||||
__ SetCallKind(rcx, CALL_AS_METHOD);
|
||||
__ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
RelocInfo::CODE_TARGET);
|
||||
__ bind(&function);
|
||||
@ -753,6 +767,7 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
||||
FieldOperand(rdx,
|
||||
SharedFunctionInfo::kFormalParameterCountOffset));
|
||||
__ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
|
||||
__ SetCallKind(rcx, CALL_AS_METHOD);
|
||||
__ cmpq(rax, rbx);
|
||||
__ j(not_equal,
|
||||
masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
|
||||
@ -1335,11 +1350,11 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
|
||||
// Push the function on the stack.
|
||||
__ push(rdi);
|
||||
|
||||
// Preserve the number of arguments on the stack. Must preserve both
|
||||
// rax and rbx because these registers are used when copying the
|
||||
// Preserve the number of arguments on the stack. Must preserve rax,
|
||||
// rbx and rcx because these registers are used when copying the
|
||||
// arguments and the receiver.
|
||||
__ Integer32ToSmi(rcx, rax);
|
||||
__ push(rcx);
|
||||
__ Integer32ToSmi(r8, rax);
|
||||
__ push(r8);
|
||||
}
|
||||
|
||||
|
||||
@ -1363,6 +1378,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// ----------- S t a t e -------------
|
||||
// -- rax : actual number of arguments
|
||||
// -- rbx : expected number of arguments
|
||||
// -- rcx : call kind information
|
||||
// -- rdx : code entry to call
|
||||
// -----------------------------------
|
||||
|
||||
@ -1383,14 +1399,14 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// Copy receiver and all expected arguments.
|
||||
const int offset = StandardFrameConstants::kCallerSPOffset;
|
||||
__ lea(rax, Operand(rbp, rax, times_pointer_size, offset));
|
||||
__ Set(rcx, -1); // account for receiver
|
||||
__ Set(r8, -1); // account for receiver
|
||||
|
||||
Label copy;
|
||||
__ bind(©);
|
||||
__ incq(rcx);
|
||||
__ incq(r8);
|
||||
__ push(Operand(rax, 0));
|
||||
__ subq(rax, Immediate(kPointerSize));
|
||||
__ cmpq(rcx, rbx);
|
||||
__ cmpq(r8, rbx);
|
||||
__ j(less, ©);
|
||||
__ jmp(&invoke);
|
||||
}
|
||||
@ -1402,23 +1418,23 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
// Copy receiver and all actual arguments.
|
||||
const int offset = StandardFrameConstants::kCallerSPOffset;
|
||||
__ lea(rdi, Operand(rbp, rax, times_pointer_size, offset));
|
||||
__ Set(rcx, -1); // account for receiver
|
||||
__ Set(r8, -1); // account for receiver
|
||||
|
||||
Label copy;
|
||||
__ bind(©);
|
||||
__ incq(rcx);
|
||||
__ incq(r8);
|
||||
__ push(Operand(rdi, 0));
|
||||
__ subq(rdi, Immediate(kPointerSize));
|
||||
__ cmpq(rcx, rax);
|
||||
__ cmpq(r8, rax);
|
||||
__ j(less, ©);
|
||||
|
||||
// Fill remaining expected arguments with undefined values.
|
||||
Label fill;
|
||||
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ bind(&fill);
|
||||
__ incq(rcx);
|
||||
__ incq(r8);
|
||||
__ push(kScratchRegister);
|
||||
__ cmpq(rcx, rbx);
|
||||
__ cmpq(r8, rbx);
|
||||
__ j(less, &fill);
|
||||
|
||||
// Restore function pointer.
|
||||
|
@ -2947,30 +2947,22 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
|
||||
void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
Label slow;
|
||||
|
||||
// If the receiver might be a value (string, number or boolean) check for this
|
||||
// and box it if it is.
|
||||
if (ReceiverMightBeValue()) {
|
||||
// The receiver might implicitly be the global object. This is
|
||||
// indicated by passing the hole as the receiver to the call
|
||||
// function stub.
|
||||
if (ReceiverMightBeImplicit()) {
|
||||
Label call;
|
||||
// Get the receiver from the stack.
|
||||
// +1 ~ return address
|
||||
Label receiver_is_value, receiver_is_js_object;
|
||||
__ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize));
|
||||
|
||||
// Check if receiver is a smi (which is a number value).
|
||||
__ JumpIfSmi(rax, &receiver_is_value);
|
||||
|
||||
// Check if the receiver is a valid JS object.
|
||||
__ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdi);
|
||||
__ j(above_equal, &receiver_is_js_object);
|
||||
|
||||
// Call the runtime to box the value.
|
||||
__ bind(&receiver_is_value);
|
||||
__ EnterInternalFrame();
|
||||
__ push(rax);
|
||||
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
|
||||
__ LeaveInternalFrame();
|
||||
__ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rax);
|
||||
|
||||
__ bind(&receiver_is_js_object);
|
||||
// Call as function is indicated with the hole.
|
||||
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
|
||||
__ j(not_equal, &call, Label::kNear);
|
||||
// Patch the receiver on the stack with the global receiver object.
|
||||
__ movq(rbx, GlobalObjectOperand());
|
||||
__ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
|
||||
__ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rbx);
|
||||
__ bind(&call);
|
||||
}
|
||||
|
||||
// Get the function to call from the stack.
|
||||
@ -2985,7 +2977,19 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
|
||||
|
||||
// Fast-case: Just invoke the function.
|
||||
ParameterCount actual(argc_);
|
||||
__ InvokeFunction(rdi, actual, JUMP_FUNCTION);
|
||||
|
||||
if (ReceiverMightBeImplicit()) {
|
||||
Label call_as_function;
|
||||
__ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
|
||||
__ j(equal, &call_as_function);
|
||||
__ InvokeFunction(rdi, actual, JUMP_FUNCTION);
|
||||
__ bind(&call_as_function);
|
||||
}
|
||||
__ InvokeFunction(rdi,
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper(),
|
||||
CALL_AS_FUNCTION);
|
||||
|
||||
// Slow-case: Non-function called.
|
||||
__ bind(&slow);
|
||||
|
@ -130,6 +130,22 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
|
||||
__ int3();
|
||||
}
|
||||
#endif
|
||||
|
||||
// Strict mode functions need to replace the receiver with undefined
|
||||
// when called as functions (without an explicit receiver
|
||||
// object). rcx is zero for method calls and non-zero for function
|
||||
// calls.
|
||||
if (info->is_strict_mode()) {
|
||||
Label ok;
|
||||
__ testq(rcx, rcx);
|
||||
__ j(zero, &ok, Label::kNear);
|
||||
// +1 for return address.
|
||||
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
||||
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ movq(Operand(rsp, receiver_offset), kScratchRegister);
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
__ push(rbp); // Caller's frame pointer.
|
||||
__ movq(rbp, rsp);
|
||||
__ push(rsi); // Callee's context.
|
||||
@ -2004,7 +2020,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
||||
// Call the IC initialization code.
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
Handle<Code> ic =
|
||||
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
|
||||
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
@ -2014,8 +2030,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
|
||||
|
||||
|
||||
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
|
||||
Expression* key,
|
||||
RelocInfo::Mode mode) {
|
||||
Expression* key) {
|
||||
// Load the key.
|
||||
VisitForAccumulatorValue(key);
|
||||
|
||||
@ -2040,7 +2055,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
|
||||
Handle<Code> ic =
|
||||
ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
|
||||
__ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
||||
@ -2151,7 +2166,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
// Record source position for debugger.
|
||||
SetSourcePosition(expr->position());
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
|
||||
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
__ CallStub(&stub);
|
||||
RecordJSReturnSite(expr);
|
||||
// Restore context register.
|
||||
@ -2200,9 +2215,10 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
__ bind(&call);
|
||||
}
|
||||
|
||||
// The receiver is either the global receiver or a JSObject found by
|
||||
// LoadContextSlot.
|
||||
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
|
||||
// The receiver is either the global receiver or an object found
|
||||
// by LoadContextSlot. That object could be the hole if the
|
||||
// receiver is implicitly the global object.
|
||||
EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
} else if (fun->AsProperty() != NULL) {
|
||||
// Call to an object property.
|
||||
Property* prop = fun->AsProperty();
|
||||
@ -2245,7 +2261,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
|
||||
{ PreservePositionScope scope(masm()->positions_recorder());
|
||||
VisitForStackValue(prop->obj());
|
||||
}
|
||||
EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
|
||||
EmitKeyedCallWithIC(expr, prop->key());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -3592,9 +3608,10 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
|
||||
// Call the JS runtime function using a call IC.
|
||||
__ Move(rcx, expr->name());
|
||||
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
||||
Handle<Code> ic =
|
||||
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
|
||||
EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
|
||||
ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
|
||||
EmitCallIC(ic, mode, expr->id());
|
||||
// Restore context register.
|
||||
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
||||
} else {
|
||||
|
@ -813,7 +813,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
|
||||
// The generated code falls through if both probes miss.
|
||||
static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::Kind kind) {
|
||||
Code::Kind kind,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// rcx : function name
|
||||
// rdx : receiver
|
||||
@ -824,7 +825,7 @@ static void GenerateMonomorphicCacheProbe(MacroAssembler* masm,
|
||||
Code::Flags flags = Code::ComputeFlags(kind,
|
||||
NOT_IN_LOOP,
|
||||
MONOMORPHIC,
|
||||
Code::kNoExtraICState,
|
||||
extra_ic_state,
|
||||
NORMAL,
|
||||
argc);
|
||||
Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
|
||||
@ -923,7 +924,10 @@ static void GenerateCallNormal(MacroAssembler* masm, int argc) {
|
||||
}
|
||||
|
||||
|
||||
static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
|
||||
static void GenerateCallMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
IC::UtilityId id,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// rcx : function name
|
||||
// rsp[0] : return address
|
||||
@ -980,12 +984,21 @@ static void GenerateCallMiss(MacroAssembler* masm, int argc, IC::UtilityId id) {
|
||||
}
|
||||
|
||||
// Invoke the function.
|
||||
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
ParameterCount actual(argc);
|
||||
__ InvokeFunction(rdi, actual, JUMP_FUNCTION);
|
||||
__ InvokeFunction(rdi,
|
||||
actual,
|
||||
JUMP_FUNCTION,
|
||||
NullCallWrapper(),
|
||||
call_kind);
|
||||
}
|
||||
|
||||
|
||||
void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
void CallIC::GenerateMegamorphic(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// rcx : function name
|
||||
// rsp[0] : return address
|
||||
@ -998,8 +1011,8 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
|
||||
// Get the receiver of the function from the stack; 1 ~ return address.
|
||||
__ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC);
|
||||
GenerateMiss(masm, argc);
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
|
||||
GenerateMiss(masm, argc, extra_ic_state);
|
||||
}
|
||||
|
||||
|
||||
@ -1015,11 +1028,13 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallNormal(masm, argc);
|
||||
GenerateMiss(masm, argc);
|
||||
GenerateMiss(masm, argc, Code::kNoExtraICState);
|
||||
}
|
||||
|
||||
|
||||
void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
void CallIC::GenerateMiss(MacroAssembler* masm,
|
||||
int argc,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// rcx : function name
|
||||
// rsp[0] : return address
|
||||
@ -1030,7 +1045,7 @@ void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallMiss(masm, argc, IC::kCallIC_Miss);
|
||||
GenerateCallMiss(masm, argc, IC::kCallIC_Miss, extra_ic_state);
|
||||
}
|
||||
|
||||
|
||||
@ -1121,7 +1136,10 @@ void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
|
||||
|
||||
__ bind(&lookup_monomorphic_cache);
|
||||
__ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
|
||||
GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
|
||||
GenerateMonomorphicCacheProbe(masm,
|
||||
argc,
|
||||
Code::KEYED_CALL_IC,
|
||||
Code::kNoExtraICState);
|
||||
// Fall through on miss.
|
||||
|
||||
__ bind(&slow_call);
|
||||
@ -1174,7 +1192,7 @@ void KeyedCallIC::GenerateMiss(MacroAssembler* masm, int argc) {
|
||||
// rsp[(argc + 1) * 8] : argument 0 = receiver
|
||||
// -----------------------------------
|
||||
|
||||
GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss);
|
||||
GenerateCallMiss(masm, argc, IC::kKeyedCallIC_Miss, Code::kNoExtraICState);
|
||||
}
|
||||
|
||||
|
||||
|
@ -140,6 +140,21 @@ bool LCodeGen::GeneratePrologue() {
|
||||
}
|
||||
#endif
|
||||
|
||||
// Strict mode functions need to replace the receiver with undefined
|
||||
// when called as functions (without an explicit receiver
|
||||
// object). rcx is zero for method calls and non-zero for function
|
||||
// calls.
|
||||
if (info_->is_strict_mode()) {
|
||||
Label ok;
|
||||
__ testq(rcx, rcx);
|
||||
__ j(zero, &ok, Label::kNear);
|
||||
// +1 for return address.
|
||||
int receiver_offset = (scope()->num_parameters() + 1) * kPointerSize;
|
||||
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
|
||||
__ movq(Operand(rsp, receiver_offset), kScratchRegister);
|
||||
__ bind(&ok);
|
||||
}
|
||||
|
||||
__ push(rbp); // Caller's frame pointer.
|
||||
__ movq(rbp, rsp);
|
||||
__ push(rsi); // Callee's context.
|
||||
@ -2688,7 +2703,8 @@ void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
|
||||
|
||||
void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
int arity,
|
||||
LInstruction* instr) {
|
||||
LInstruction* instr,
|
||||
CallKind call_kind) {
|
||||
// Change context if needed.
|
||||
bool change_context =
|
||||
(info()->closure()->context() != function->context()) ||
|
||||
@ -2708,6 +2724,7 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
RecordPosition(pointers->position());
|
||||
|
||||
// Invoke function.
|
||||
__ SetCallKind(rcx, call_kind);
|
||||
if (*function == *info()->closure()) {
|
||||
__ CallSelf();
|
||||
} else {
|
||||
@ -2725,7 +2742,10 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
|
||||
void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(rax));
|
||||
__ Move(rdi, instr->function());
|
||||
CallKnownFunction(instr->function(), instr->arity(), instr);
|
||||
CallKnownFunction(instr->function(),
|
||||
instr->arity(),
|
||||
instr,
|
||||
CALL_AS_METHOD);
|
||||
}
|
||||
|
||||
|
||||
@ -3076,10 +3096,11 @@ void LCodeGen::DoCallNamed(LCallNamed* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(rax));
|
||||
|
||||
int arity = instr->arity();
|
||||
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
|
||||
arity, NOT_IN_LOOP);
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
||||
__ Move(rcx, instr->name());
|
||||
CallCode(ic, RelocInfo::CODE_TARGET, instr);
|
||||
CallCode(ic, mode, instr);
|
||||
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
|
||||
@ -3088,7 +3109,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(rax));
|
||||
|
||||
int arity = instr->arity();
|
||||
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
|
||||
CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_IMPLICIT);
|
||||
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
||||
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
||||
__ Drop(1);
|
||||
@ -3098,10 +3119,11 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
|
||||
void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(rax));
|
||||
int arity = instr->arity();
|
||||
Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
|
||||
arity, NOT_IN_LOOP);
|
||||
RelocInfo::Mode mode = RelocInfo::CODE_TARGET_CONTEXT;
|
||||
Handle<Code> ic =
|
||||
isolate()->stub_cache()->ComputeCallInitialize(arity, NOT_IN_LOOP, mode);
|
||||
__ Move(rcx, instr->name());
|
||||
CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
|
||||
CallCode(ic, mode, instr);
|
||||
__ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
|
||||
}
|
||||
|
||||
@ -3109,7 +3131,7 @@ void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
|
||||
void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
|
||||
ASSERT(ToRegister(instr->result()).is(rax));
|
||||
__ Move(rdi, instr->target());
|
||||
CallKnownFunction(instr->target(), instr->arity(), instr);
|
||||
CallKnownFunction(instr->target(), instr->arity(), instr, CALL_AS_FUNCTION);
|
||||
}
|
||||
|
||||
|
||||
|
@ -194,7 +194,8 @@ class LCodeGen BASE_EMBEDDED {
|
||||
// to be in edi.
|
||||
void CallKnownFunction(Handle<JSFunction> function,
|
||||
int arity,
|
||||
LInstruction* instr);
|
||||
LInstruction* instr,
|
||||
CallKind call_kind);
|
||||
|
||||
void LoadHeapObject(Register result, Handle<HeapObject> object);
|
||||
|
||||
|
@ -2174,7 +2174,8 @@ LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
|
||||
HEnvironment* inner = outer->CopyForInlining(instr->closure(),
|
||||
instr->function(),
|
||||
HEnvironment::LITHIUM,
|
||||
undefined);
|
||||
undefined,
|
||||
instr->call_kind());
|
||||
current_block_->UpdateEnvironment(inner);
|
||||
chunk_->AddInlinedClosure(instr->closure());
|
||||
return NULL;
|
||||
|
@ -2780,11 +2780,26 @@ void MacroAssembler::DebugBreak() {
|
||||
#endif // ENABLE_DEBUGGER_SUPPORT
|
||||
|
||||
|
||||
void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
|
||||
// This macro takes the dst register to make the code more readable
|
||||
// at the call sites. However, the dst register has to be rcx to
|
||||
// follow the calling convention which requires the call type to be
|
||||
// in rcx.
|
||||
ASSERT(dst.is(rcx));
|
||||
if (call_kind == CALL_AS_FUNCTION) {
|
||||
LoadSmiConstant(dst, Smi::FromInt(1));
|
||||
} else {
|
||||
LoadSmiConstant(dst, Smi::FromInt(0));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MacroAssembler::InvokeCode(Register code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
Label done;
|
||||
InvokePrologue(expected,
|
||||
actual,
|
||||
@ -2792,14 +2807,17 @@ void MacroAssembler::InvokeCode(Register code,
|
||||
code,
|
||||
&done,
|
||||
flag,
|
||||
Label::kNear,
|
||||
call_wrapper,
|
||||
Label::kNear);
|
||||
call_kind);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(code));
|
||||
SetCallKind(rcx, call_kind);
|
||||
call(code);
|
||||
call_wrapper.AfterCall();
|
||||
} else {
|
||||
ASSERT(flag == JUMP_FUNCTION);
|
||||
SetCallKind(rcx, call_kind);
|
||||
jmp(code);
|
||||
}
|
||||
bind(&done);
|
||||
@ -2811,7 +2829,8 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
const ParameterCount& actual,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
Label done;
|
||||
Register dummy = rax;
|
||||
InvokePrologue(expected,
|
||||
@ -2820,14 +2839,17 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
dummy,
|
||||
&done,
|
||||
flag,
|
||||
Label::kNear,
|
||||
call_wrapper,
|
||||
Label::kNear);
|
||||
call_kind);
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(code));
|
||||
SetCallKind(rcx, call_kind);
|
||||
Call(code, rmode);
|
||||
call_wrapper.AfterCall();
|
||||
} else {
|
||||
ASSERT(flag == JUMP_FUNCTION);
|
||||
SetCallKind(rcx, call_kind);
|
||||
Jump(code, rmode);
|
||||
}
|
||||
bind(&done);
|
||||
@ -2837,7 +2859,8 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
|
||||
void MacroAssembler::InvokeFunction(Register function,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper) {
|
||||
const CallWrapper& call_wrapper,
|
||||
CallKind call_kind) {
|
||||
ASSERT(function.is(rdi));
|
||||
movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
|
||||
movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
|
||||
@ -2848,7 +2871,7 @@ void MacroAssembler::InvokeFunction(Register function,
|
||||
movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
|
||||
|
||||
ParameterCount expected(rbx);
|
||||
InvokeCode(rdx, expected, actual, flag, call_wrapper);
|
||||
InvokeCode(rdx, expected, actual, flag, call_wrapper, call_kind);
|
||||
}
|
||||
|
||||
|
||||
@ -2887,8 +2910,9 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
Register code_register,
|
||||
Label* done,
|
||||
InvokeFlag flag,
|
||||
Label::Distance near_jump,
|
||||
const CallWrapper& call_wrapper,
|
||||
Label::Distance near_jump) {
|
||||
CallKind call_kind) {
|
||||
bool definitely_matches = false;
|
||||
Label invoke;
|
||||
if (expected.is_immediate()) {
|
||||
@ -2938,10 +2962,12 @@ void MacroAssembler::InvokePrologue(const ParameterCount& expected,
|
||||
|
||||
if (flag == CALL_FUNCTION) {
|
||||
call_wrapper.BeforeCall(CallSize(adaptor));
|
||||
SetCallKind(rcx, call_kind);
|
||||
Call(adaptor, RelocInfo::CODE_TARGET);
|
||||
call_wrapper.AfterCall();
|
||||
jmp(done, near_jump);
|
||||
} else {
|
||||
SetCallKind(rcx, call_kind);
|
||||
Jump(adaptor, RelocInfo::CODE_TARGET);
|
||||
}
|
||||
bind(&invoke);
|
||||
|
@ -241,26 +241,34 @@ class MacroAssembler: public Assembler {
|
||||
// ---------------------------------------------------------------------------
|
||||
// JavaScript invokes
|
||||
|
||||
// Setup call kind marking in rcx. The method takes rcx as an
|
||||
// explicit first parameter to make the code more readable at the
|
||||
// call sites.
|
||||
void SetCallKind(Register dst, CallKind kind);
|
||||
|
||||
// Invoke the JavaScript function code by either calling or jumping.
|
||||
void InvokeCode(Register code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
void InvokeCode(Handle<Code> code,
|
||||
const ParameterCount& expected,
|
||||
const ParameterCount& actual,
|
||||
RelocInfo::Mode rmode,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Invoke the JavaScript function in the given register. Changes the
|
||||
// current context to the context in the function before invoking.
|
||||
void InvokeFunction(Register function,
|
||||
const ParameterCount& actual,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper());
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
void InvokeFunction(JSFunction* function,
|
||||
const ParameterCount& actual,
|
||||
@ -1149,8 +1157,9 @@ class MacroAssembler: public Assembler {
|
||||
Register code_register,
|
||||
Label* done,
|
||||
InvokeFlag flag,
|
||||
const CallWrapper& call_wrapper,
|
||||
Label::Distance near_jump = Label::kFar);
|
||||
Label::Distance near_jump = Label::kFar,
|
||||
const CallWrapper& call_wrapper = NullCallWrapper(),
|
||||
CallKind call_kind = CALL_AS_METHOD);
|
||||
|
||||
// Activation support.
|
||||
void EnterFrame(StackFrame::Type type);
|
||||
|
@ -1294,8 +1294,10 @@ void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
|
||||
|
||||
|
||||
MaybeObject* CallStubCompiler::GenerateMissBranch() {
|
||||
MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(
|
||||
arguments().immediate(), kind_);
|
||||
MaybeObject* maybe_obj =
|
||||
isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
|
||||
kind_,
|
||||
extra_ic_state_);
|
||||
Object* obj;
|
||||
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
|
||||
__ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
|
||||
@ -1626,7 +1628,9 @@ MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
|
||||
Label index_out_of_range;
|
||||
Label* index_out_of_range_label = &index_out_of_range;
|
||||
|
||||
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
(CallICBase::StringStubState::decode(extra_ic_state_) ==
|
||||
DEFAULT_STRING_STUB)) {
|
||||
index_out_of_range_label = &miss;
|
||||
}
|
||||
|
||||
@ -1708,7 +1712,9 @@ MaybeObject* CallStubCompiler::CompileStringCharAtCall(
|
||||
Label index_out_of_range;
|
||||
Label* index_out_of_range_label = &index_out_of_range;
|
||||
|
||||
if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
|
||||
if (kind_ == Code::CALL_IC &&
|
||||
(CallICBase::StringStubState::decode(extra_ic_state_) ==
|
||||
DEFAULT_STRING_STUB)) {
|
||||
index_out_of_range_label = &miss;
|
||||
}
|
||||
|
||||
@ -2211,11 +2217,13 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
|
||||
}
|
||||
|
||||
|
||||
MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name) {
|
||||
MaybeObject* CallStubCompiler::CompileCallGlobal(
|
||||
JSObject* object,
|
||||
GlobalObject* holder,
|
||||
JSGlobalPropertyCell* cell,
|
||||
JSFunction* function,
|
||||
String* name,
|
||||
Code::ExtraICState extra_ic_state) {
|
||||
// ----------- S t a t e -------------
|
||||
// rcx : function name
|
||||
// rsp[0] : return address
|
||||
@ -2260,16 +2268,21 @@ MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
|
||||
__ IncrementCounter(counters->call_global_inline(), 1);
|
||||
ASSERT(function->is_compiled());
|
||||
ParameterCount expected(function->shared()->formal_parameter_count());
|
||||
CallKind call_kind = CallICBase::Contextual::decode(extra_ic_state)
|
||||
? CALL_AS_FUNCTION
|
||||
: CALL_AS_METHOD;
|
||||
if (V8::UseCrankshaft()) {
|
||||
// TODO(kasperl): For now, we always call indirectly through the
|
||||
// code field in the function to allow recompilation to take effect
|
||||
// without changing any of the call sites.
|
||||
__ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
|
||||
__ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION);
|
||||
__ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION,
|
||||
NullCallWrapper(), call_kind);
|
||||
} else {
|
||||
Handle<Code> code(function->code());
|
||||
__ InvokeCode(code, expected, arguments(),
|
||||
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
|
||||
RelocInfo::CODE_TARGET, JUMP_FUNCTION,
|
||||
NullCallWrapper(), call_kind);
|
||||
}
|
||||
// Handle call cache miss.
|
||||
__ bind(&miss);
|
||||
|
176
test/mjsunit/strict-mode-implicit-receiver.js
Normal file
176
test/mjsunit/strict-mode-implicit-receiver.js
Normal file
@ -0,0 +1,176 @@
|
||||
// Copyright 2011 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Flags: --allow-natives-syntax
|
||||
|
||||
var y = 3;
|
||||
|
||||
function get_y() { return this.y; }
|
||||
function strict_get_y() { "use strict"; return this.y; }
|
||||
|
||||
// Test calls to strict mode function as methods.
|
||||
for (var i = 0; i < 10; i++) assertEquals(3, strict_get_y.call(this));
|
||||
var o = { y: 42 };
|
||||
for (var i = 0; i < 10; i++) assertEquals(42, strict_get_y.call(o));
|
||||
|
||||
// Test calls to strict mode function with implicit receiver.
|
||||
function g() {
|
||||
var exception = false;
|
||||
try { strict_get_y(); } catch(e) { exception = true; }
|
||||
assertTrue(exception);
|
||||
}
|
||||
for (var i = 0; i < 3; i++) g();
|
||||
|
||||
// Test calls to local strict mode function with implicit receiver.
|
||||
function local_function_test() {
|
||||
function get_y() { return this.y; }
|
||||
function strict_get_y() { "use strict"; return this.y; }
|
||||
assertEquals(3, get_y());
|
||||
assertEquals(3, get_y(23));
|
||||
var exception = false;
|
||||
try {
|
||||
strict_get_y();
|
||||
} catch(e) {
|
||||
exception = true;
|
||||
}
|
||||
assertTrue(exception);
|
||||
}
|
||||
|
||||
for (var i = 0; i < 10; i++) {
|
||||
local_function_test();
|
||||
}
|
||||
|
||||
// Test call to catch variable strict-mode function. Implicit
|
||||
// receiver.
|
||||
var exception = false;
|
||||
try {
|
||||
throw strict_get_y;
|
||||
} catch(f) {
|
||||
try {
|
||||
f();
|
||||
} catch(e) {
|
||||
exception = true;
|
||||
}
|
||||
assertTrue(exception);
|
||||
}
|
||||
|
||||
|
||||
// Test calls to strict-mode function with the object from a with
|
||||
// statement as the receiver.
|
||||
with(this) {
|
||||
assertEquals(3, strict_get_y());
|
||||
assertEquals(3, strict_get_y(32));
|
||||
}
|
||||
|
||||
var o = { y: 27 };
|
||||
o.f = strict_get_y;
|
||||
with(o) {
|
||||
assertEquals(27, f());
|
||||
assertEquals(27, f(23));
|
||||
}
|
||||
|
||||
|
||||
// Check calls to eval within a function with 'undefined' as receiver.
|
||||
function implicit_receiver_eval() {
|
||||
"use strict";
|
||||
return eval("this");
|
||||
}
|
||||
|
||||
assertEquals(void 0, implicit_receiver_eval());
|
||||
assertEquals(void 0, implicit_receiver_eval(32));
|
||||
|
||||
|
||||
// Strict mode function to get inlined.
|
||||
function strict_return_receiver() {
|
||||
"use strict";
|
||||
return this;
|
||||
}
|
||||
|
||||
// Inline with implicit receiver.
|
||||
function g() {
|
||||
return strict_return_receiver();
|
||||
}
|
||||
|
||||
for (var i = 0; i < 5; i++) {
|
||||
assertEquals(void 0, g());
|
||||
assertEquals(void 0, g(42));
|
||||
}
|
||||
%OptimizeFunctionOnNextCall(g);
|
||||
assertEquals(void 0, g(42));
|
||||
assertEquals(void 0, g());
|
||||
|
||||
// Inline with explicit receiver.
|
||||
function g2() {
|
||||
var o = {};
|
||||
o.f = strict_return_receiver;
|
||||
return o.f();
|
||||
}
|
||||
|
||||
for (var i = 0; i < 5; i++) {
|
||||
assertTrue(typeof g2() == "object");
|
||||
assertTrue(typeof g2(42) == "object");
|
||||
}
|
||||
%OptimizeFunctionOnNextCall(g2);
|
||||
assertTrue(typeof g2() == "object");
|
||||
assertTrue(typeof g2(42) == "object");
|
||||
|
||||
// Test calls of aliased eval.
|
||||
function outer_eval_receiver() {
|
||||
var eval = function() { return this; }
|
||||
function inner_strict() {
|
||||
"use strict";
|
||||
assertEquals('object', typeof eval());
|
||||
}
|
||||
inner_strict();
|
||||
}
|
||||
outer_eval_receiver();
|
||||
|
||||
function outer_eval_conversion3(eval, expected) {
|
||||
function inner_strict() {
|
||||
"use strict";
|
||||
var x = eval("this");
|
||||
assertEquals(expected, typeof x);
|
||||
}
|
||||
inner_strict();
|
||||
}
|
||||
|
||||
function strict_return_this() { "use strict"; return this; }
|
||||
function return_this() { return this; }
|
||||
function strict_eval(s) { "use strict"; return eval(s); }
|
||||
function non_strict_eval(s) { return eval(s); }
|
||||
|
||||
outer_eval_conversion3(strict_return_this, 'undefined');
|
||||
outer_eval_conversion3(return_this, 'object');
|
||||
outer_eval_conversion3(strict_eval, 'undefined');
|
||||
outer_eval_conversion3(non_strict_eval, 'object');
|
||||
|
||||
// TODO(ager): I'm not sure this is in accordance with the spec. At
|
||||
// the moment, any call to eval where eval is not bound in the global
|
||||
// context is treated as an indirect call to eval which means that the
|
||||
// global context is used and the global object is passed as the
|
||||
// receiver.
|
||||
outer_eval_conversion3(eval, 'object');
|
Loading…
Reference in New Issue
Block a user