diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index f80998a7f5..1f92ed27e0 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -91,11 +91,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { &gc, TAG_OBJECT); + int map_index = strict_mode_ == kStrictMode + ? Context::STRICT_MODE_FUNCTION_MAP_INDEX + : Context::FUNCTION_MAP_INDEX; + // Compute the function map in the current global context and set that // as the map of the allocated object. __ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset)); - __ ldr(r2, MemOperand(r2, Context::SlotOffset(Context::FUNCTION_MAP_INDEX))); + __ ldr(r2, MemOperand(r2, Context::SlotOffset(map_index))); __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); // Initialize the rest of the function. We don't have to update the diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc index e6334abf71..f14662cf0e 100644 --- a/src/arm/codegen-arm.cc +++ b/src/arm/codegen-arm.cc @@ -3116,9 +3116,9 @@ void CodeGenerator::InstantiateFunction( // space for nested functions that don't need literals cloning. if (!pretenure && scope()->is_function_scope() && - function_info->num_literals() == 0 && - !function_info->strict_mode()) { // Strict mode functions use slow path. - FastNewClosureStub stub; + function_info->num_literals() == 0) { + FastNewClosureStub stub( + function_info->strict_mode() ? kStrictMode : kNonStrictMode); frame_->EmitPush(Operand(function_info)); frame_->SpillAll(); frame_->CallStub(&stub, 1); diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index ac97bdc69d..0d79a10f77 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -1086,9 +1086,8 @@ void FullCodeGenerator::EmitNewClosure(Handle info, !FLAG_prepare_always_opt && !pretenure && scope()->is_function_scope() && - info->num_literals() == 0 && - !info->strict_mode()) { // Strict mode functions use slow path. - FastNewClosureStub stub; + info->num_literals() == 0) { + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ mov(r0, Operand(info)); __ push(r0); __ CallStub(&stub); diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 2b1fe352a2..4bfa048cf0 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -3726,9 +3726,9 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. Handle shared_info = instr->shared_info(); bool pretenure = instr->hydrogen()->pretenure(); - if (!pretenure && shared_info->num_literals() == 0 && - !shared_info->strict_mode()) { // Strict mode functions use slow path. - FastNewClosureStub stub; + if (!pretenure && shared_info->num_literals() == 0) { + FastNewClosureStub stub( + shared_info->strict_mode() ? kStrictMode : kNonStrictMode); __ mov(r1, Operand(shared_info)); __ push(r1); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); diff --git a/src/code-stubs.h b/src/code-stubs.h index 6af45d54fa..d094886450 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -277,12 +277,17 @@ class ToNumberStub: public CodeStub { class FastNewClosureStub : public CodeStub { public: + explicit FastNewClosureStub(StrictModeFlag strict_mode) + : strict_mode_(strict_mode) { } + void Generate(MacroAssembler* masm); private: const char* GetName() { return "FastNewClosureStub"; } Major MajorKey() { return FastNewClosure; } - int MinorKey() { return 0; } + int MinorKey() { return strict_mode_; } + + StrictModeFlag strict_mode_; }; diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 01ea60c3a2..4fb5c58ba0 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -69,11 +69,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { // Get the function info from the stack. __ mov(edx, Operand(esp, 1 * kPointerSize)); + int map_index = strict_mode_ == kStrictMode + ? Context::STRICT_MODE_FUNCTION_MAP_INDEX + : Context::FUNCTION_MAP_INDEX; + // Compute the function map in the current global context and set that // as the map of the allocated object. __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX))); __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset)); - __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX))); + __ mov(ecx, Operand(ecx, Context::SlotOffset(map_index))); __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx); // Initialize the rest of the function. We don't have to update the diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc index 03ca2750bd..517e2de59b 100644 --- a/src/ia32/codegen-ia32.cc +++ b/src/ia32/codegen-ia32.cc @@ -4918,9 +4918,9 @@ Result CodeGenerator::InstantiateFunction( // space for nested functions that don't need literals cloning. if (!pretenure && scope()->is_function_scope() && - function_info->num_literals() == 0 && - !function_info->strict_mode()) { // Strict mode functions use slow path. - FastNewClosureStub stub; + function_info->num_literals() == 0) { + FastNewClosureStub stub( + function_info->strict_mode() ? kStrictMode : kNonStrictMode); frame()->EmitPush(Immediate(function_info)); return frame()->CallStub(&stub, 1); } else { diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 7371555ef4..94d3cf555b 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -1019,9 +1019,8 @@ void FullCodeGenerator::EmitNewClosure(Handle info, !FLAG_prepare_always_opt && !pretenure && scope()->is_function_scope() && - info->num_literals() == 0 && - !info->strict_mode()) { // Strict mode functions go through slow path. - FastNewClosureStub stub; + info->num_literals() == 0) { + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ push(Immediate(info)); __ CallStub(&stub); } else { diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index 66ca5320d1..5df44ca9b7 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -3739,9 +3739,9 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. Handle shared_info = instr->shared_info(); bool pretenure = instr->hydrogen()->pretenure(); - if (!pretenure && shared_info->num_literals() == 0 && - !shared_info->strict_mode()) { - FastNewClosureStub stub; + if (!pretenure && shared_info->num_literals() == 0) { + FastNewClosureStub stub( + shared_info->strict_mode() ? kStrictMode : kNonStrictMode); __ push(Immediate(shared_info)); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false); } else { diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index ee5237e6c8..c390995601 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -68,11 +68,15 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) { // Get the function info from the stack. __ movq(rdx, Operand(rsp, 1 * kPointerSize)); + int map_index = strict_mode_ == kStrictMode + ? Context::STRICT_MODE_FUNCTION_MAP_INDEX + : Context::FUNCTION_MAP_INDEX; + // Compute the function map in the current global context and set that // as the map of the allocated object. __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); - __ movq(rcx, Operand(rcx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX))); + __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index))); __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx); // Initialize the rest of the function. We don't have to update the diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc index 10aa4519f1..aab8d7f4a4 100644 --- a/src/x64/codegen-x64.cc +++ b/src/x64/codegen-x64.cc @@ -4262,9 +4262,9 @@ void CodeGenerator::InstantiateFunction( // space for nested functions that don't need literals cloning. if (!pretenure && scope()->is_function_scope() && - function_info->num_literals() == 0 && - !function_info->strict_mode()) { // Strict mode functions use slow path. - FastNewClosureStub stub; + function_info->num_literals() == 0) { + FastNewClosureStub stub( + function_info->strict_mode() ? kStrictMode : kNonStrictMode); frame_->Push(function_info); Result answer = frame_->CallStub(&stub, 1); frame_->Push(&answer); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index cd3a999073..646ced9259 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -1041,9 +1041,8 @@ void FullCodeGenerator::EmitNewClosure(Handle info, !FLAG_prepare_always_opt && !pretenure && scope()->is_function_scope() && - info->num_literals() == 0 && - !info->strict_mode()) { // Strict mode functions use slow path. - FastNewClosureStub stub; + info->num_literals() == 0) { + FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode); __ Push(info); __ CallStub(&stub); } else { diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index b15aef658e..aaad7c5fb2 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -3539,9 +3539,9 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) { // space for nested functions that don't need literals cloning. Handle shared_info = instr->shared_info(); bool pretenure = instr->hydrogen()->pretenure(); - if (!pretenure && shared_info->num_literals() == 0 && - !shared_info->strict_mode()) { - FastNewClosureStub stub; + if (!pretenure && shared_info->num_literals() == 0) { + FastNewClosureStub stub( + shared_info->strict_mode() ? kStrictMode : kNonStrictMode); __ Push(shared_info); CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); } else {