Simplify installing concurrently recompiled code.

Instead of overwriting the code entry of the function, we trigger
an interrupt to install the code on the main thread.

R=mstarzinger@chromium.org
BUG=

Review URL: https://codereview.chromium.org/23542029

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16681 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
yangguo@chromium.org 2013-09-12 11:30:56 +00:00
parent 599f42f022
commit 151e514930
21 changed files with 206 additions and 426 deletions

View File

@ -291,68 +291,55 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(r1);
// Push call kind information.
__ push(r5);
// Function is also the parameter to the runtime call.
__ push(r1);
__ CallRuntime(function_id, 1);
// Restore call kind information.
__ pop(r5);
// Restore receiver.
__ pop(r1);
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
__ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
__ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
__ mov(pc, r2);
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the function.
__ push(r1);
// Push call kind information.
__ push(r5);
// Push the function on the stack as the argument to the runtime function.
__ push(r1);
__ CallRuntime(Runtime::kInstallRecompiledCode, 1);
// Calculate the entry point.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore call kind information.
__ pop(r5);
// Restore saved function.
__ pop(r1);
// Tear down internal frame.
}
// Do a tail-call of the compiled function.
__ Jump(r2);
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
// would be quite expensive. A good compromise is to first check against
// stack limit as a cue for an interrupt signal.
Label ok;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r0);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(r1);
// Push call kind information.
__ push(r5);
__ push(r1); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kConcurrentRecompile, 1);
// Restore call kind information.
__ pop(r5);
// Restore receiver.
__ pop(r1);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@ -795,59 +782,17 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the function.
__ push(r1);
// Push call kind information.
__ push(r5);
// Push the function on the stack as the argument to the runtime function.
__ push(r1);
__ CallRuntime(Runtime::kLazyCompile, 1);
// Calculate the entry point.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore call kind information.
__ pop(r5);
// Restore saved function.
__ pop(r1);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the function.
__ push(r1);
// Push call kind information.
__ push(r5);
// Push the function on the stack as the argument to the runtime function.
__ push(r1);
__ CallRuntime(Runtime::kLazyRecompile, 1);
// Calculate the entry point.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore call kind information.
__ pop(r5);
// Restore saved function.
__ pop(r1);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
}

View File

@ -87,8 +87,6 @@ enum BuiltinExtraArguments {
Code::kNoExtraICState) \
V(InRecompileQueue, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(InstallRecompiledCode, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, \
@ -382,7 +380,6 @@ class Builtins {
CFunctionId id,
BuiltinExtraArguments extra_args);
static void Generate_InRecompileQueue(MacroAssembler* masm);
static void Generate_InstallRecompiledCode(MacroAssembler* masm);
static void Generate_ConcurrentRecompile(MacroAssembler* masm);
static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);

View File

@ -1067,7 +1067,7 @@ Handle<Code> Compiler::InstallOptimizedCode(
info->closure()->PrintName();
PrintF(" as it has been disabled.\n");
}
ASSERT(!info->closure()->IsMarkedForInstallingRecompiledCode());
ASSERT(!info->closure()->IsInRecompileQueue());
return Handle<Code>::null();
}
@ -1114,7 +1114,7 @@ Handle<Code> Compiler::InstallOptimizedCode(
// Optimized code is finally replacing unoptimized code. Reset the latter's
// profiler ticks to prevent too soon re-opt after a deopt.
info->shared_info()->code()->set_profiler_ticks(0);
ASSERT(!info->closure()->IsMarkedForInstallingRecompiledCode());
ASSERT(!info->closure()->IsInRecompileQueue());
return (status == OptimizingCompiler::SUCCEEDED) ? info->code()
: Handle<Code>::null();
}

View File

@ -2110,8 +2110,7 @@ void Debug::PrepareForBreakPoints() {
function->set_code(*lazy_compile);
function->shared()->set_code(*lazy_compile);
} else if (kind == Code::BUILTIN &&
(function->IsMarkedForInstallingRecompiledCode() ||
function->IsInRecompileQueue() ||
(function->IsInRecompileQueue() ||
function->IsMarkedForLazyRecompilation() ||
function->IsMarkedForConcurrentRecompilation())) {
// Abort in-flight compilation.

View File

@ -459,6 +459,22 @@ void StackGuard::RequestGC() {
}
bool StackGuard::IsInstallCodeRequest() {
ExecutionAccess access(isolate_);
return (thread_local_.interrupt_flags_ & INSTALL_CODE) != 0;
}
void StackGuard::RequestInstallCode() {
ExecutionAccess access(isolate_);
thread_local_.interrupt_flags_ |= INSTALL_CODE;
if (thread_local_.postpone_interrupts_nesting_ == 0) {
thread_local_.jslimit_ = thread_local_.climit_ = kInterruptLimit;
isolate_->heap()->SetStackLimits();
}
}
bool StackGuard::IsFullDeopt() {
ExecutionAccess access(isolate_);
return (thread_local_.interrupt_flags_ & FULL_DEOPT) != 0;
@ -916,7 +932,6 @@ MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) {
isolate->counters()->stack_interrupts()->Increment();
isolate->counters()->runtime_profiler_ticks()->Increment();
isolate->runtime_profiler()->OptimizeNow();
#ifdef ENABLE_DEBUGGER_SUPPORT
if (stack_guard->IsDebugBreak() || stack_guard->IsDebugCommand()) {
DebugBreakHelper(isolate);
@ -935,6 +950,12 @@ MaybeObject* Execution::HandleStackGuardInterrupt(Isolate* isolate) {
stack_guard->Continue(FULL_DEOPT);
Deoptimizer::DeoptimizeAll(isolate);
}
if (stack_guard->IsInstallCodeRequest()) {
ASSERT(FLAG_concurrent_recompilation);
stack_guard->Continue(INSTALL_CODE);
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
}
isolate->runtime_profiler()->OptimizeNow();
return isolate->heap()->undefined_value();
}

View File

@ -42,7 +42,8 @@ enum InterruptFlag {
PREEMPT = 1 << 3,
TERMINATE = 1 << 4,
GC_REQUEST = 1 << 5,
FULL_DEOPT = 1 << 6
FULL_DEOPT = 1 << 6,
INSTALL_CODE = 1 << 7
};
@ -213,6 +214,8 @@ class StackGuard {
#endif
bool IsGCRequest();
void RequestGC();
bool IsInstallCodeRequest();
void RequestInstallCode();
bool IsFullDeopt();
void FullDeopt();
void Continue(InterruptFlag after_what);

View File

@ -74,6 +74,24 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function.
__ push(edi);
// Push call kind information.
__ push(ecx);
// Function is also the parameter to the runtime call.
__ push(edi);
__ CallRuntime(function_id, 1);
// Restore call kind information.
__ pop(ecx);
// Restore receiver.
__ pop(edi);
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(eax, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
@ -83,56 +101,29 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
// would be quite expensive. A good compromise is to first check against
// stack limit as a cue for an interrupt signal.
Label ok;
ExternalReference stack_limit =
ExternalReference::address_of_stack_limit(masm->isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, Label::kNear);
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function.
__ push(edi);
// Push call kind information.
__ push(ecx);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kInstallRecompiledCode, 1);
// Restore call kind information.
__ pop(ecx);
// Restore receiver.
__ pop(edi);
// Tear down internal frame.
}
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(edi);
// Push call kind information.
__ push(ecx);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kConcurrentRecompile, 1);
// Restore call kind information.
__ pop(ecx);
// Restore receiver.
__ pop(edi);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@ -519,25 +510,7 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function.
__ push(edi);
// Push call kind information.
__ push(ecx);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyCompile, 1);
// Restore call kind information.
__ pop(ecx);
// Restore receiver.
__ pop(edi);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
@ -545,25 +518,7 @@ void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(edi);
// Push call kind information.
__ push(ecx);
__ push(edi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyRecompile, 1);
// Restore call kind information.
__ pop(ecx);
// Restore receiver.
__ pop(edi);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);

View File

@ -299,6 +299,24 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(a1);
// Push call kind information.
__ push(t1);
// Function is also the parameter to the runtime call.
__ push(a1);
__ CallRuntime(function_id, 1);
// Restore call kind information.
__ pop(t1);
// Restore receiver.
__ pop(a1);
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
__ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
@ -308,59 +326,27 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
// would be quite expensive. A good compromise is to first check against
// stack limit as a cue for an interrupt signal.
Label ok;
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
__ Branch(&ok, hs, sp, Operand(t0));
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(at);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the function.
__ push(a1);
// Push call kind information.
__ push(t1);
// Push the function on the stack as the argument to the runtime function.
__ push(a1);
__ CallRuntime(Runtime::kInstallRecompiledCode, 1);
// Calculate the entry point.
__ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore call kind information.
__ pop(t1);
// Restore saved function.
__ pop(a1);
// Tear down temporary frame.
}
// Do a tail-call of the compiled function.
__ Jump(t9);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(a1);
// Push call kind information.
__ push(t1);
__ push(a1); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kConcurrentRecompile, 1);
// Restore call kind information.
__ pop(t1);
// Restore receiver.
__ pop(a1);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@ -815,60 +801,17 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the function.
__ push(a1);
// Push call kind information.
__ push(t1);
// Push the function on the stack as the argument to the runtime function.
__ push(a1);
// Call the runtime function.
__ CallRuntime(Runtime::kLazyCompile, 1);
// Calculate the entry point.
__ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag);
// Restore call kind information.
__ pop(t1);
// Restore saved function.
__ pop(a1);
// Tear down temporary frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(t9);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve the function.
__ push(a1);
// Push call kind information.
__ push(t1);
// Push the function on the stack as the argument to the runtime function.
__ push(a1);
__ CallRuntime(Runtime::kLazyRecompile, 1);
// Calculate the entry point.
__ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
// Restore call kind information.
__ pop(t1);
// Restore saved function.
__ pop(a1);
// Tear down temporary frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(t9);
}

View File

@ -4970,12 +4970,6 @@ bool JSFunction::IsMarkedForLazyRecompilation() {
}
bool JSFunction::IsMarkedForInstallingRecompiledCode() {
return code() == GetIsolate()->builtins()->builtin(
Builtins::kInstallRecompiledCode);
}
bool JSFunction::IsMarkedForConcurrentRecompilation() {
return code() == GetIsolate()->builtins()->builtin(
Builtins::kConcurrentRecompile);

View File

@ -9321,18 +9321,6 @@ void JSFunction::MarkForConcurrentRecompilation() {
}
void JSFunction::MarkForInstallingRecompiledCode() {
// The debugger could have switched the builtin to lazy compile.
// In that case, simply carry on. It will be dealt with later.
ASSERT(!IsOptimized());
ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
ASSERT(FLAG_concurrent_recompilation);
set_code_no_write_barrier(
GetIsolate()->builtins()->builtin(Builtins::kInstallRecompiledCode));
// No write barrier required, since the builtin is part of the root set.
}
void JSFunction::MarkInRecompileQueue() {
// We can only arrive here via the concurrent-recompilation builtin. If
// break points were set, the code would point to the lazy-compile builtin.

View File

@ -6989,7 +6989,6 @@ class JSFunction: public JSObject {
// recompiled the next time it is executed.
void MarkForLazyRecompilation();
void MarkForConcurrentRecompilation();
void MarkForInstallingRecompiledCode();
void MarkInRecompileQueue();
// Helpers to compile this function. Returns true on success, false on
@ -7008,7 +7007,6 @@ class JSFunction: public JSObject {
// recompilation.
inline bool IsMarkedForLazyRecompilation();
inline bool IsMarkedForConcurrentRecompilation();
inline bool IsMarkedForInstallingRecompiledCode();
// Tells whether or not the function is on the concurrent recompilation queue.
inline bool IsInRecompileQueue();

View File

@ -114,11 +114,8 @@ void OptimizingCompilerThread::CompileNext() {
osr_candidates_.RemoveElement(optimizing_compiler);
ready_for_osr_.Add(optimizing_compiler);
} else {
LockGuard<Mutex> mark_and_queue(&install_mutex_);
Heap::RelocationLock relocation_lock(isolate_->heap());
AllowHandleDereference ahd;
optimizing_compiler->info()->closure()->MarkForInstallingRecompiledCode();
output_queue_.Enqueue(optimizing_compiler);
isolate_->stack_guard()->RequestInstallCode();
}
}
@ -201,10 +198,7 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
HandleScope handle_scope(isolate_);
OptimizingCompiler* compiler;
while (true) {
{ // Memory barrier to ensure marked functions are queued.
LockGuard<Mutex> marked_and_queued(&install_mutex_);
if (!output_queue_.Dequeue(&compiler)) return;
}
if (!output_queue_.Dequeue(&compiler)) return;
Compiler::InstallOptimizedCode(compiler);
}

View File

@ -120,7 +120,6 @@ class OptimizingCompilerThread : public Thread {
// List of recompilation tasks ready for OSR.
List<OptimizingCompiler*> ready_for_osr_;
Mutex install_mutex_;
volatile AtomicWord stop_thread_;
volatile Atomic32 queue_length_;
TimeDelta time_spent_compiling_;

View File

@ -149,7 +149,6 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
// recompilation race. This goes away as soon as OSR becomes one-shot.
return;
}
ASSERT(!function->IsMarkedForInstallingRecompiledCode());
ASSERT(!function->IsInRecompileQueue());
function->MarkForConcurrentRecompilation();
} else {
@ -227,12 +226,6 @@ void RuntimeProfiler::OptimizeNow() {
if (isolate_->DebuggerHasBreakPoints()) return;
if (FLAG_concurrent_recompilation) {
// Take this as opportunity to process the optimizing compiler thread's
// output queue so that it does not unnecessarily keep objects alive.
isolate_->optimizing_compiler_thread()->InstallOptimizedFunctions();
}
DisallowHeapAllocation no_gc;
// Run through the JavaScript frames and collect them. If we already

View File

@ -8346,16 +8346,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ConcurrentRecompile) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_InstallRecompiledCode) {
HandleScope handle_scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
ASSERT(isolate->use_crankshaft() && FLAG_concurrent_recompilation);
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
return function->code();
}
class ActivationsFinder : public ThreadVisitor {
public:
Code* code_;
@ -8553,8 +8543,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
}
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (FLAG_concurrent_recompilation && sync_with_compiler_thread) {
while (function->IsInRecompileQueue() ||
function->IsMarkedForInstallingRecompiledCode()) {
while (function->IsInRecompileQueue()) {
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
OS::Sleep(50);
}
@ -9382,7 +9371,6 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
// First check if this is a real stack overflow.
if (isolate->stack_guard()->IsStackOverflow()) {
SealHandleScope shs(isolate);
return isolate->StackOverflow();
}
@ -9390,6 +9378,23 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_TryInstallRecompiledCode) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
// First check if this is a real stack overflow.
if (isolate->stack_guard()->IsStackOverflow()) {
SealHandleScope shs(isolate);
return isolate->StackOverflow();
}
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
return (function->IsOptimized()) ? function->code()
: function->shared()->code();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_Interrupt) {
SealHandleScope shs(isolate);
ASSERT(args.length() == 0);

View File

@ -88,7 +88,7 @@ namespace internal {
F(LazyCompile, 1, 1) \
F(LazyRecompile, 1, 1) \
F(ConcurrentRecompile, 1, 1) \
F(InstallRecompiledCode, 1, 1) \
F(TryInstallRecompiledCode, 1, 1) \
F(NotifyDeoptimized, 1, 1) \
F(NotifyStubFailure, 0, 1) \
F(NotifyOSR, 0, 1) \

View File

@ -73,6 +73,24 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
// Push call kind information.
__ push(rcx);
// Function is also the parameter to the runtime call.
__ push(rdi);
__ CallRuntime(function_id, 1);
// Restore call kind information.
__ pop(rcx);
// Restore receiver.
__ pop(rdi);
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ movq(kScratchRegister,
FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
@ -84,57 +102,27 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
// would be quite expensive. A good compromise is to first check against
// stack limit as a cue for an interrupt signal.
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok);
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_InstallRecompiledCode(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
// Push call kind information.
__ push(rcx);
__ push(rdi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kInstallRecompiledCode, 1);
// Restore call kind information.
__ pop(rcx);
// Restore function.
__ pop(rdi);
// Tear down internal frame.
}
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
// Push call kind information.
__ push(rcx);
__ push(rdi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kConcurrentRecompile, 1);
// Restore call kind information.
__ pop(rcx);
// Restore receiver.
__ pop(rdi);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
@ -586,26 +574,7 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
// Push call kind information.
__ push(rcx);
__ push(rdi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyCompile, 1);
// Restore call kind information.
__ pop(rcx);
// Restore receiver.
__ pop(rdi);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
@ -613,26 +582,7 @@ void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
// Enter an internal frame.
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
// Push call kind information.
__ push(rcx);
__ push(rdi); // Function is also the parameter to the runtime call.
__ CallRuntime(Runtime::kLazyRecompile, 1);
// Restore call kind information.
__ pop(rcx);
// Restore function.
__ pop(rdi);
// Tear down internal frame.
}
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);

View File

@ -151,7 +151,6 @@ var knownProblems = {
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -151,7 +151,6 @@ var knownProblems = {
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -151,7 +151,6 @@ var knownProblems = {
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -151,7 +151,6 @@ var knownProblems = {
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"InstallRecompiledCode": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,