Ensure that lazy deopt sequence does not override calls.
BUG=354433 LOG=N TEST=mjsunit/regress/regress-354433.js R=jkummerow@chromium.org Review URL: https://codereview.chromium.org/198463006 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20155 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
309bf937a0
commit
f20a9473f3
@ -269,6 +269,9 @@ void LCodeGen::GenerateOsrPrologue() {
|
||||
|
||||
|
||||
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
|
||||
if (instr->IsCall()) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
}
|
||||
if (!instr->IsLazyBailout() && !instr->IsGap()) {
|
||||
safepoints_.BumpLastLazySafepointIndex();
|
||||
}
|
||||
@ -5533,7 +5536,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
|
||||
|
||||
|
||||
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
ASSERT(instr->HasEnvironment());
|
||||
LEnvironment* env = instr->environment();
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
@ -5607,10 +5610,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
CallCode(isolate()->builtins()->StackCheck(),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr);
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
||||
} else {
|
||||
ASSERT(instr->hydrogen()->is_backwards_branch());
|
||||
// Perform stack overflow check if this goto needs it before jumping.
|
||||
|
@ -770,6 +770,9 @@ void LCodeGen::GenerateOsrPrologue() {
|
||||
|
||||
|
||||
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
|
||||
if (instr->IsCall()) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
}
|
||||
if (!instr->IsLazyBailout() && !instr->IsGap()) {
|
||||
safepoints_.BumpLastLazySafepointIndex();
|
||||
}
|
||||
@ -2549,7 +2552,7 @@ void LCodeGen::DoCheckValue(LCheckValue* instr) {
|
||||
|
||||
|
||||
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
ASSERT(instr->HasEnvironment());
|
||||
LEnvironment* env = instr->environment();
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
@ -4942,11 +4945,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
CallCode(isolate()->builtins()->StackCheck(),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr);
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
|
||||
__ Bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
||||
} else {
|
||||
ASSERT(instr->hydrogen()->is_backwards_branch());
|
||||
// Perform stack overflow check if this goto needs it before jumping.
|
||||
|
@ -390,6 +390,9 @@ void LCodeGen::GenerateOsrPrologue() {
|
||||
|
||||
|
||||
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
|
||||
if (instr->IsCall()) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
}
|
||||
if (!instr->IsLazyBailout() && !instr->IsGap()) {
|
||||
safepoints_.BumpLastLazySafepointIndex();
|
||||
}
|
||||
@ -6151,7 +6154,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
|
||||
|
||||
|
||||
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
ASSERT(instr->HasEnvironment());
|
||||
LEnvironment* env = instr->environment();
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
@ -6227,10 +6230,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
CallCode(isolate()->builtins()->StackCheck(),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr);
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
||||
} else {
|
||||
ASSERT(instr->hydrogen()->is_backwards_branch());
|
||||
// Perform stack overflow check if this goto needs it before jumping.
|
||||
|
@ -260,6 +260,9 @@ void LCodeGen::GenerateOsrPrologue() {
|
||||
|
||||
|
||||
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
|
||||
if (instr->IsCall()) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
}
|
||||
if (!instr->IsLazyBailout() && !instr->IsGap()) {
|
||||
safepoints_.BumpLastLazySafepointIndex();
|
||||
}
|
||||
@ -5614,7 +5617,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
|
||||
|
||||
|
||||
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
ASSERT(instr->HasEnvironment());
|
||||
LEnvironment* env = instr->environment();
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
@ -5686,10 +5689,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
CallCode(isolate()->builtins()->StackCheck(),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr);
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
||||
} else {
|
||||
ASSERT(instr->hydrogen()->is_backwards_branch());
|
||||
// Perform stack overflow check if this goto needs it before jumping.
|
||||
|
@ -274,6 +274,9 @@ void LCodeGen::GenerateOsrPrologue() {
|
||||
|
||||
|
||||
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
|
||||
if (instr->IsCall()) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
}
|
||||
if (!instr->IsLazyBailout() && !instr->IsGap()) {
|
||||
safepoints_.BumpLastLazySafepointIndex();
|
||||
}
|
||||
@ -5462,7 +5465,7 @@ void LCodeGen::EnsureSpaceForLazyDeopt(int space_needed) {
|
||||
|
||||
|
||||
void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
last_lazy_deopt_pc_ = masm()->pc_offset();
|
||||
ASSERT(instr->HasEnvironment());
|
||||
LEnvironment* env = instr->environment();
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
@ -5534,10 +5537,7 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) {
|
||||
CallCode(isolate()->builtins()->StackCheck(),
|
||||
RelocInfo::CODE_TARGET,
|
||||
instr);
|
||||
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
|
||||
__ bind(&done);
|
||||
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
|
||||
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
||||
} else {
|
||||
ASSERT(instr->hydrogen()->is_backwards_branch());
|
||||
// Perform stack overflow check if this goto needs it before jumping.
|
||||
|
54
test/mjsunit/regress/regress-354433.js
Normal file
54
test/mjsunit/regress/regress-354433.js
Normal file
@ -0,0 +1,54 @@
|
||||
// Copyright 2014 the V8 project authors. All rights reserved.
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following
|
||||
// disclaimer in the documentation and/or other materials provided
|
||||
// with the distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived
|
||||
// from this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Flags: --allow-natives-syntax
|
||||
|
||||
var __v_0 = {};
|
||||
var __v_5 = {};
|
||||
function __f_2() {
|
||||
this.__defineGetter__('str', function() { return __f_2(this); });
|
||||
this.str = "1";
|
||||
this.toString = function() {
|
||||
return this.str;
|
||||
};
|
||||
};
|
||||
|
||||
__v_5 = new __f_2();
|
||||
__v_0 = new __f_2();
|
||||
|
||||
function __f_5(fun,a,b) {
|
||||
__v_5.str = a;
|
||||
__v_0.str = b;
|
||||
fun(__v_5, __v_0);
|
||||
}
|
||||
|
||||
function __f_8(a,b) { return a%b };
|
||||
|
||||
__f_5(__f_8, 1 << 30, 1);
|
||||
__f_5(__f_8, 1, 1 << 30);
|
||||
%OptimizeFunctionOnNextCall(__f_8);
|
||||
__f_5(__f_8, 1, 1 << 30);
|
Loading…
Reference in New Issue
Block a user