diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index dc315470fb..c2dfde3cc4 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -269,6 +269,9 @@ void LCodeGen::GenerateOsrPrologue() { void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { + if (instr->IsCall()) { + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + } if (!instr->IsLazyBailout() && !instr->IsGap()) { safepoints_.BumpLastLazySafepointIndex(); } @@ -5533,7 +5536,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { void LCodeGen::DoLazyBailout(LLazyBailout* instr) { - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + last_lazy_deopt_pc_ = masm()->pc_offset(); ASSERT(instr->HasEnvironment()); LEnvironment* env = instr->environment(); RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); @@ -5607,10 +5610,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { CallCode(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET, instr); - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); __ bind(&done); - RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); - safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); } else { ASSERT(instr->hydrogen()->is_backwards_branch()); // Perform stack overflow check if this goto needs it before jumping. diff --git a/src/arm64/lithium-codegen-arm64.cc b/src/arm64/lithium-codegen-arm64.cc index c1ac121a34..ddf1677573 100644 --- a/src/arm64/lithium-codegen-arm64.cc +++ b/src/arm64/lithium-codegen-arm64.cc @@ -770,6 +770,9 @@ void LCodeGen::GenerateOsrPrologue() { void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { + if (instr->IsCall()) { + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + } if (!instr->IsLazyBailout() && !instr->IsGap()) { safepoints_.BumpLastLazySafepointIndex(); } @@ -2549,7 +2552,7 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) { void LCodeGen::DoLazyBailout(LLazyBailout* instr) { - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + last_lazy_deopt_pc_ = masm()->pc_offset(); ASSERT(instr->HasEnvironment()); LEnvironment* env = instr->environment(); RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); @@ -4942,11 +4945,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { CallCode(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET, instr); - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); - __ Bind(&done); - RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); - safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); } else { ASSERT(instr->hydrogen()->is_backwards_branch()); // Perform stack overflow check if this goto needs it before jumping. diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index cd283cf608..7c0df1847a 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -390,6 +390,9 @@ void LCodeGen::GenerateOsrPrologue() { void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { + if (instr->IsCall()) { + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + } if (!instr->IsLazyBailout() && !instr->IsGap()) { safepoints_.BumpLastLazySafepointIndex(); } @@ -6151,7 +6154,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { void LCodeGen::DoLazyBailout(LLazyBailout* instr) { - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + last_lazy_deopt_pc_ = masm()->pc_offset(); ASSERT(instr->HasEnvironment()); LEnvironment* env = instr->environment(); RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); @@ -6227,10 +6230,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { CallCode(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET, instr); - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); __ bind(&done); - RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); - safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); } else { ASSERT(instr->hydrogen()->is_backwards_branch()); // Perform stack overflow check if this goto needs it before jumping. diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc index 5e47665c0c..b1919ba54e 100644 --- a/src/mips/lithium-codegen-mips.cc +++ b/src/mips/lithium-codegen-mips.cc @@ -260,6 +260,9 @@ void LCodeGen::GenerateOsrPrologue() { void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { + if (instr->IsCall()) { + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + } if (!instr->IsLazyBailout() && !instr->IsGap()) { safepoints_.BumpLastLazySafepointIndex(); } @@ -5614,7 +5617,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { void LCodeGen::DoLazyBailout(LLazyBailout* instr) { - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + last_lazy_deopt_pc_ = masm()->pc_offset(); ASSERT(instr->HasEnvironment()); LEnvironment* env = instr->environment(); RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); @@ -5686,10 +5689,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { CallCode(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET, instr); - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); __ bind(&done); - RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); - safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); } else { ASSERT(instr->hydrogen()->is_backwards_branch()); // Perform stack overflow check if this goto needs it before jumping. diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index bd8a6cad15..5849cf4ef9 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -274,6 +274,9 @@ void LCodeGen::GenerateOsrPrologue() { void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) { + if (instr->IsCall()) { + EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + } if (!instr->IsLazyBailout() && !instr->IsGap()) { safepoints_.BumpLastLazySafepointIndex(); } @@ -5462,7 +5465,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) { void LCodeGen::DoLazyBailout(LLazyBailout* instr) { - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); + last_lazy_deopt_pc_ = masm()->pc_offset(); ASSERT(instr->HasEnvironment()); LEnvironment* env = instr->environment(); RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); @@ -5534,10 +5537,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { CallCode(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET, instr); - EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); __ bind(&done); - RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); - safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); } else { ASSERT(instr->hydrogen()->is_backwards_branch()); // Perform stack overflow check if this goto needs it before jumping. diff --git a/test/mjsunit/regress/regress-354433.js b/test/mjsunit/regress/regress-354433.js new file mode 100644 index 0000000000..80ea286230 --- /dev/null +++ b/test/mjsunit/regress/regress-354433.js @@ -0,0 +1,54 @@ +// Copyright 2014 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --allow-natives-syntax + +var __v_0 = {}; +var __v_5 = {}; +function __f_2() { + this.__defineGetter__('str', function() { return __f_2(this); }); + this.str = "1"; + this.toString = function() { + return this.str; + }; +}; + +__v_5 = new __f_2(); +__v_0 = new __f_2(); + +function __f_5(fun,a,b) { + __v_5.str = a; + __v_0.str = b; + fun(__v_5, __v_0); +} + +function __f_8(a,b) { return a%b }; + +__f_5(__f_8, 1 << 30, 1); +__f_5(__f_8, 1, 1 << 30); +%OptimizeFunctionOnNextCall(__f_8); +__f_5(__f_8, 1, 1 << 30);