Fix incorrect patching for OSR.
If OSR happens before regular recompilation, the unoptimized function code on the stack may not have deoptimization support. In that case, graph creation compiles the unoptimized code again to include support. That code is then installed as shared code. When we patch code for OSR, the function code on the stack and not the shared code is what we want. R=titzer@chromium.org TEST=block-conflicts.js with --always-osr --concurrent-osr Review URL: https://codereview.chromium.org/99013003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18261 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
parent
69314e1f47
commit
96b58ba4a3
@ -938,18 +938,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
||||
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Lookup and calculate pc offset.
|
||||
__ ldr(r1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
|
||||
__ ldr(r2, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
|
||||
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
|
||||
__ sub(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
__ sub(r1, r1, r2);
|
||||
__ SmiTag(r1);
|
||||
|
||||
// Pass both function and pc offset as arguments.
|
||||
// Pass function as argument.
|
||||
__ push(r0);
|
||||
__ push(r1);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
|
||||
}
|
||||
|
||||
// If the code object is null, just return to the unoptimized code.
|
||||
|
@ -1056,6 +1056,7 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
|
||||
|
||||
|
||||
bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
|
||||
Handle<Code> unoptimized,
|
||||
uint32_t osr_pc_offset) {
|
||||
bool compiling_for_osr = (osr_pc_offset != 0);
|
||||
|
||||
@ -1078,11 +1079,10 @@ bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
|
||||
Handle<SharedFunctionInfo> shared = info->shared_info();
|
||||
|
||||
if (compiling_for_osr) {
|
||||
BailoutId osr_ast_id =
|
||||
shared->code()->TranslatePcOffsetToAstId(osr_pc_offset);
|
||||
BailoutId osr_ast_id = unoptimized->TranslatePcOffsetToAstId(osr_pc_offset);
|
||||
ASSERT(!osr_ast_id.IsNone());
|
||||
info->SetOptimizing(osr_ast_id);
|
||||
info->set_osr_pc_offset(osr_pc_offset);
|
||||
info->SetOsrInfo(unoptimized, osr_pc_offset);
|
||||
|
||||
if (FLAG_trace_osr) {
|
||||
PrintF("[COSR - attempt to queue ");
|
||||
@ -1117,7 +1117,7 @@ bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
|
||||
RecompileJob::Status status = job->CreateGraph();
|
||||
if (status == RecompileJob::SUCCEEDED) {
|
||||
info.Detach();
|
||||
shared->code()->set_profiler_ticks(0);
|
||||
unoptimized->set_profiler_ticks(0);
|
||||
isolate->optimizing_compiler_thread()->QueueForOptimization(job);
|
||||
ASSERT(!isolate->has_pending_exception());
|
||||
return true;
|
||||
|
@ -85,6 +85,7 @@ class CompilationInfo {
|
||||
Handle<Context> context() const { return context_; }
|
||||
BailoutId osr_ast_id() const { return osr_ast_id_; }
|
||||
uint32_t osr_pc_offset() const { return osr_pc_offset_; }
|
||||
Handle<Code> osr_patched_code() const { return osr_patched_code_; }
|
||||
int opt_count() const { return opt_count_; }
|
||||
int num_parameters() const;
|
||||
int num_heap_slots() const;
|
||||
@ -265,6 +266,7 @@ class CompilationInfo {
|
||||
SaveHandle(&shared_info_);
|
||||
SaveHandle(&context_);
|
||||
SaveHandle(&script_);
|
||||
SaveHandle(&osr_patched_code_);
|
||||
}
|
||||
|
||||
BailoutReason bailout_reason() const { return bailout_reason_; }
|
||||
@ -311,7 +313,8 @@ class CompilationInfo {
|
||||
return abort_due_to_dependency_;
|
||||
}
|
||||
|
||||
void set_osr_pc_offset(uint32_t pc_offset) {
|
||||
void SetOsrInfo(Handle<Code> code, uint32_t pc_offset) {
|
||||
osr_patched_code_ = code;
|
||||
osr_pc_offset_ = pc_offset;
|
||||
}
|
||||
|
||||
@ -416,6 +419,10 @@ class CompilationInfo {
|
||||
// The pc_offset corresponding to osr_ast_id_ in unoptimized code.
|
||||
// We can look this up in the back edge table, but cache it for quick access.
|
||||
uint32_t osr_pc_offset_;
|
||||
// The unoptimized code we patched for OSR may not be the shared code
|
||||
// afterwards, since we may need to compile it again to include deoptimization
|
||||
// data. Keep track which code we patched.
|
||||
Handle<Code> osr_patched_code_;
|
||||
|
||||
// Flag whether compilation needs to be aborted due to dependency change.
|
||||
bool abort_due_to_dependency_;
|
||||
@ -626,6 +633,7 @@ class Compiler : public AllStatic {
|
||||
static bool CompileLazy(CompilationInfo* info);
|
||||
|
||||
static bool RecompileConcurrent(Handle<JSFunction> function,
|
||||
Handle<Code> unoptimized,
|
||||
uint32_t osr_pc_offset = 0);
|
||||
|
||||
// Compile a shared function info object (the function is possibly lazily
|
||||
|
@ -1697,9 +1697,11 @@ void BackEdgeTable::Revert(Isolate* isolate,
|
||||
void BackEdgeTable::AddStackCheck(CompilationInfo* info) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
Isolate* isolate = info->isolate();
|
||||
Code* code = info->shared_info()->code();
|
||||
Code* code = *info->osr_patched_code();
|
||||
Address pc = code->instruction_start() + info->osr_pc_offset();
|
||||
ASSERT_EQ(ON_STACK_REPLACEMENT, GetBackEdgeState(isolate, code, pc));
|
||||
ASSERT_EQ(info->osr_ast_id().ToInt(),
|
||||
code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt());
|
||||
ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, code, pc));
|
||||
Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
|
||||
PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch);
|
||||
}
|
||||
@ -1708,8 +1710,10 @@ void BackEdgeTable::AddStackCheck(CompilationInfo* info) {
|
||||
void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
Isolate* isolate = info->isolate();
|
||||
Code* code = info->shared_info()->code();
|
||||
Code* code = *info->osr_patched_code();
|
||||
Address pc = code->instruction_start() + info->osr_pc_offset();
|
||||
ASSERT_EQ(info->osr_ast_id().ToInt(),
|
||||
code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt());
|
||||
if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) {
|
||||
Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
|
||||
PatchAt(code, pc, ON_STACK_REPLACEMENT, patch);
|
||||
|
@ -1316,17 +1316,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
||||
__ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Lookup and calculate pc offset.
|
||||
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerPCOffset));
|
||||
__ mov(ebx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
|
||||
__ sub(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
|
||||
__ sub(edx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
|
||||
__ SmiTag(edx);
|
||||
|
||||
// Pass both function and pc offset as arguments.
|
||||
// Pass function as argument.
|
||||
__ push(eax);
|
||||
__ push(edx);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
|
||||
}
|
||||
|
||||
Label skip;
|
||||
|
@ -969,18 +969,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
||||
__ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Lookup and calculate pc offset.
|
||||
__ lw(a1, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
|
||||
__ lw(a2, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
|
||||
__ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
|
||||
__ Subu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
|
||||
__ Subu(a1, a1, a2);
|
||||
__ SmiTag(a1);
|
||||
|
||||
// Pass both function and pc offset as arguments.
|
||||
// Pass function as argument.
|
||||
__ push(a0);
|
||||
__ push(a1);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
|
||||
}
|
||||
|
||||
// If the code object is null, just return to the unoptimized code.
|
||||
|
@ -8377,10 +8377,11 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ConcurrentRecompile) {
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
function->shared()->code()->set_profiler_ticks(0);
|
||||
Handle<Code> shared_code(function->shared()->code());
|
||||
shared_code->set_profiler_ticks(0);
|
||||
ASSERT(isolate->concurrent_recompilation_enabled());
|
||||
if (!Compiler::RecompileConcurrent(function)) {
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
if (!Compiler::RecompileConcurrent(function, shared_code)) {
|
||||
function->ReplaceCode(*shared_code);
|
||||
}
|
||||
return isolate->heap()->undefined_value();
|
||||
}
|
||||
@ -8630,20 +8631,27 @@ static bool IsSuitableForOnStackReplacement(Isolate* isolate,
|
||||
|
||||
RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
|
||||
HandleScope scope(isolate);
|
||||
ASSERT(args.length() == 2);
|
||||
ASSERT(args.length() == 1);
|
||||
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
|
||||
CONVERT_NUMBER_CHECKED(uint32_t, pc_offset, Uint32, args[1]);
|
||||
Handle<Code> unoptimized(function->shared()->code(), isolate);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Passing the PC in the javascript frame from the caller directly is
|
||||
// not GC safe, so we walk the stack to get it.
|
||||
JavaScriptFrameIterator it(isolate);
|
||||
JavaScriptFrame* frame = it.frame();
|
||||
if (!unoptimized->contains(frame->pc())) {
|
||||
// Code on the stack may not be the code object referenced by the shared
|
||||
// function info. It may have been replaced to include deoptimization data.
|
||||
unoptimized = Handle<Code>(frame->LookupCode());
|
||||
}
|
||||
|
||||
uint32_t pc_offset = static_cast<uint32_t>(frame->pc() -
|
||||
unoptimized->instruction_start());
|
||||
|
||||
#ifdef DEBUG
|
||||
ASSERT_EQ(frame->function(), *function);
|
||||
ASSERT_EQ(frame->LookupCode(), *unoptimized);
|
||||
ASSERT(unoptimized->contains(frame->pc()));
|
||||
|
||||
ASSERT(pc_offset ==
|
||||
static_cast<uint32_t>(frame->pc() - unoptimized->instruction_start()));
|
||||
#endif // DEBUG
|
||||
|
||||
// We're not prepared to handle a function with arguments object.
|
||||
@ -8669,12 +8677,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
|
||||
|
||||
if (job == NULL) {
|
||||
if (IsSuitableForOnStackReplacement(isolate, function, unoptimized) &&
|
||||
Compiler::RecompileConcurrent(function, pc_offset)) {
|
||||
Compiler::RecompileConcurrent(function, unoptimized, pc_offset)) {
|
||||
if (function->IsMarkedForLazyRecompilation() ||
|
||||
function->IsMarkedForConcurrentRecompilation()) {
|
||||
// Prevent regular recompilation if we queue this for OSR.
|
||||
// TODO(yangguo): remove this as soon as OSR becomes one-shot.
|
||||
function->ReplaceCode(*unoptimized);
|
||||
function->ReplaceCode(function->shared()->code());
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ namespace internal {
|
||||
F(GetOptimizationStatus, -1, 1) \
|
||||
F(GetOptimizationCount, 1, 1) \
|
||||
F(UnblockConcurrentRecompilation, 0, 1) \
|
||||
F(CompileForOnStackReplacement, 2, 1) \
|
||||
F(CompileForOnStackReplacement, 1, 1) \
|
||||
F(SetAllocationTimeout, 2, 1) \
|
||||
F(AllocateInNewSpace, 1, 1) \
|
||||
F(AllocateInTargetSpace, 2, 1) \
|
||||
|
@ -1393,17 +1393,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
||||
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
|
||||
{
|
||||
FrameScope scope(masm, StackFrame::INTERNAL);
|
||||
// Lookup and calculate pc offset.
|
||||
__ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerPCOffset));
|
||||
__ movq(rbx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
|
||||
__ subq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
|
||||
__ subq(rdx, FieldOperand(rbx, SharedFunctionInfo::kCodeOffset));
|
||||
__ Integer32ToSmi(rdx, rdx);
|
||||
|
||||
// Pass both function and pc offset as arguments.
|
||||
// Pass function as argument.
|
||||
__ push(rax);
|
||||
__ push(rdx);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 2);
|
||||
__ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
|
||||
}
|
||||
|
||||
Label skip;
|
||||
|
Loading…
Reference in New Issue
Block a user