Reland "Flush parallel recompilation queues on context dispose notification."

BUG=
R=jkummerow@chromium.org

Review URL: https://codereview.chromium.org/19500022

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15883 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
yangguo@chromium.org 2013-07-25 15:01:23 +00:00
parent f06f57f7fb
commit 14e205e9cf
16 changed files with 156 additions and 50 deletions

View File

@ -770,7 +770,6 @@ void Context::Exit() {
i::Context* last_context = i::Context* last_context =
isolate->handle_scope_implementer()->RestoreContext(); isolate->handle_scope_implementer()->RestoreContext();
isolate->set_context(last_context); isolate->set_context(last_context);
isolate->set_context_exit_happened(true);
} }

View File

@ -969,7 +969,9 @@ void Compiler::RecompileParallel(Handle<JSFunction> closure) {
if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) { if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
if (FLAG_trace_parallel_recompilation) { if (FLAG_trace_parallel_recompilation) {
PrintF(" ** Compilation queue, will retry opting on next run.\n"); PrintF(" ** Compilation queue full, will retry optimizing ");
closure->PrintName();
PrintF(" on next run.\n");
} }
return; return;
} }

View File

@ -2044,6 +2044,10 @@ void Debug::PrepareForBreakPoints() {
// If preparing for the first break point make sure to deoptimize all // If preparing for the first break point make sure to deoptimize all
// functions as debugging does not work with optimized code. // functions as debugging does not work with optimized code.
if (!has_break_points_) { if (!has_break_points_) {
if (FLAG_parallel_recompilation) {
isolate_->optimizing_compiler_thread()->Flush();
}
Deoptimizer::DeoptimizeAll(isolate_); Deoptimizer::DeoptimizeAll(isolate_);
Handle<Code> lazy_compile = Handle<Code> lazy_compile =

View File

@ -1221,6 +1221,7 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
shared->set_num_literals(literals_array_size); shared->set_num_literals(literals_array_size);
if (is_generator) { if (is_generator) {
shared->set_instance_class_name(isolate()->heap()->Generator_string()); shared->set_instance_class_name(isolate()->heap()->Generator_string());
shared->DisableOptimization("generator");
} }
return shared; return shared;
} }

View File

@ -703,6 +703,16 @@ bool Heap::CollectGarbage(AllocationSpace space,
} }
int Heap::NotifyContextDisposed() {
if (FLAG_parallel_recompilation) {
// Flush the queued recompilation tasks.
isolate()->optimizing_compiler_thread()->Flush();
}
flush_monomorphic_ics_ = true;
return ++contexts_disposed_;
}
void Heap::PerformScavenge() { void Heap::PerformScavenge() {
GCTracer tracer(this, NULL, NULL); GCTracer tracer(this, NULL, NULL);
if (incremental_marking()->IsStopped()) { if (incremental_marking()->IsStopped()) {

View File

@ -1252,10 +1252,7 @@ class Heap {
void EnsureHeapIsIterable(); void EnsureHeapIsIterable();
// Notify the heap that a context has been disposed. // Notify the heap that a context has been disposed.
int NotifyContextDisposed() { int NotifyContextDisposed();
flush_monomorphic_ics_ = true;
return ++contexts_disposed_;
}
// Utility to invoke the scavenger. This is needed in test code to // Utility to invoke the scavenger. This is needed in test code to
// ensure correct callback for weak global handles. // ensure correct callback for weak global handles.

View File

@ -1777,7 +1777,6 @@ Isolate::Isolate()
regexp_stack_(NULL), regexp_stack_(NULL),
date_cache_(NULL), date_cache_(NULL),
code_stub_interface_descriptors_(NULL), code_stub_interface_descriptors_(NULL),
context_exit_happened_(false),
initialized_from_snapshot_(false), initialized_from_snapshot_(false),
cpu_profiler_(NULL), cpu_profiler_(NULL),
heap_profiler_(NULL), heap_profiler_(NULL),

View File

@ -1059,13 +1059,6 @@ class Isolate {
thread_local_top_.top_lookup_result_ = top; thread_local_top_.top_lookup_result_ = top;
} }
bool context_exit_happened() {
return context_exit_happened_;
}
void set_context_exit_happened(bool context_exit_happened) {
context_exit_happened_ = context_exit_happened;
}
bool initialized_from_snapshot() { return initialized_from_snapshot_; } bool initialized_from_snapshot() { return initialized_from_snapshot_; }
double time_millis_since_init() { double time_millis_since_init() {
@ -1313,10 +1306,6 @@ class Isolate {
unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_; unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
CodeStubInterfaceDescriptor* code_stub_interface_descriptors_; CodeStubInterfaceDescriptor* code_stub_interface_descriptors_;
// The garbage collector should be a little more aggressive when it knows
// that a context was recently exited.
bool context_exit_happened_;
// True if this isolate was initialized from a snapshot. // True if this isolate was initialized from a snapshot.
bool initialized_from_snapshot_; bool initialized_from_snapshot_;

View File

@ -1290,6 +1290,7 @@ MaybeObject* LiveEdit::ReplaceFunctionCode(
if (code_scope_info->IsFixedArray()) { if (code_scope_info->IsFixedArray()) {
shared_info->set_scope_info(ScopeInfo::cast(*code_scope_info)); shared_info->set_scope_info(ScopeInfo::cast(*code_scope_info));
} }
shared_info->DisableOptimization("LiveEdit");
} }
if (shared_info->debug_info()->IsDebugInfo()) { if (shared_info->debug_info()->IsDebugInfo()) {

View File

@ -9234,6 +9234,7 @@ void JSFunction::MarkForLazyRecompilation() {
ASSERT(!IsOptimized()); ASSERT(!IsOptimized());
ASSERT(shared()->allows_lazy_compilation() || ASSERT(shared()->allows_lazy_compilation() ||
code()->optimizable()); code()->optimizable());
ASSERT(!shared()->is_generator());
set_code_no_write_barrier( set_code_no_write_barrier(
GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile)); GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile));
// No write barrier required, since the builtin is part of the root set. // No write barrier required, since the builtin is part of the root set.
@ -9244,10 +9245,8 @@ void JSFunction::MarkForParallelRecompilation() {
ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints()); ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
ASSERT(!IsOptimized()); ASSERT(!IsOptimized());
ASSERT(shared()->allows_lazy_compilation() || code()->optimizable()); ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
if (!FLAG_parallel_recompilation) { ASSERT(!shared()->is_generator());
JSFunction::MarkForLazyRecompilation(); ASSERT(FLAG_parallel_recompilation);
return;
}
if (FLAG_trace_parallel_recompilation) { if (FLAG_trace_parallel_recompilation) {
PrintF(" ** Marking "); PrintF(" ** Marking ");
PrintName(); PrintName();

View File

@ -60,12 +60,23 @@ void OptimizingCompilerThread::Run() {
OS::Sleep(FLAG_parallel_recompilation_delay); OS::Sleep(FLAG_parallel_recompilation_delay);
} }
if (Acquire_Load(&stop_thread_)) { switch (static_cast<StopFlag>(Acquire_Load(&stop_thread_))) {
stop_semaphore_->Signal(); case CONTINUE:
if (FLAG_trace_parallel_recompilation) { break;
time_spent_total_ = OS::Ticks() - epoch; case STOP:
} if (FLAG_trace_parallel_recompilation) {
return; time_spent_total_ = OS::Ticks() - epoch;
}
stop_semaphore_->Signal();
return;
case FLUSH:
// Reset input queue semaphore.
delete input_queue_semaphore_;
input_queue_semaphore_ = OS::CreateSemaphore(0);
// Signal for main thread to start flushing.
stop_semaphore_->Signal();
// Return to start of consumer loop.
continue;
} }
int64_t compiling_start = 0; int64_t compiling_start = 0;
@ -102,9 +113,41 @@ void OptimizingCompilerThread::CompileNext() {
} }
void OptimizingCompilerThread::FlushQueue(
UnboundQueue<OptimizingCompiler*>* queue,
bool restore_function_code) {
ASSERT(!IsOptimizerThread());
OptimizingCompiler* optimizing_compiler;
// The optimizing compiler is allocated in the CompilationInfo's zone.
while (queue->Dequeue(&optimizing_compiler)) {
CompilationInfo* info = optimizing_compiler->info();
if (restore_function_code) {
Handle<JSFunction> function = info->closure();
function->ReplaceCode(function->shared()->code());
}
delete info;
}
}
void OptimizingCompilerThread::Flush() {
ASSERT(!IsOptimizerThread());
Release_Store(&stop_thread_, static_cast<AtomicWord>(FLUSH));
input_queue_semaphore_->Signal();
FlushQueue(&input_queue_, true);
NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0));
stop_semaphore_->Wait();
Release_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE));
FlushQueue(&output_queue_, true);
}
void OptimizingCompilerThread::Stop() { void OptimizingCompilerThread::Stop() {
ASSERT(!IsOptimizerThread()); ASSERT(!IsOptimizerThread());
Release_Store(&stop_thread_, static_cast<AtomicWord>(true)); Release_Store(&stop_thread_, static_cast<AtomicWord>(STOP));
input_queue_semaphore_->Signal(); input_queue_semaphore_->Signal();
stop_semaphore_->Wait(); stop_semaphore_->Wait();
@ -114,14 +157,8 @@ void OptimizingCompilerThread::Stop() {
while (NoBarrier_Load(&queue_length_) > 0) CompileNext(); while (NoBarrier_Load(&queue_length_) > 0) CompileNext();
InstallOptimizedFunctions(); InstallOptimizedFunctions();
} else { } else {
OptimizingCompiler* optimizing_compiler; FlushQueue(&input_queue_, false);
// The optimizing compiler is allocated in the CompilationInfo's zone. FlushQueue(&output_queue_, false);
while (input_queue_.Dequeue(&optimizing_compiler)) {
delete optimizing_compiler->info();
}
while (output_queue_.Dequeue(&optimizing_compiler)) {
delete optimizing_compiler->info();
}
} }
if (FLAG_trace_parallel_recompilation) { if (FLAG_trace_parallel_recompilation) {

View File

@ -54,13 +54,13 @@ class OptimizingCompilerThread : public Thread {
install_mutex_(OS::CreateMutex()), install_mutex_(OS::CreateMutex()),
time_spent_compiling_(0), time_spent_compiling_(0),
time_spent_total_(0) { time_spent_total_(0) {
NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(false)); NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE));
NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0)); NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0));
} }
void Run(); void Run();
void Stop(); void Stop();
void CompileNext(); void Flush();
void QueueForOptimization(OptimizingCompiler* optimizing_compiler); void QueueForOptimization(OptimizingCompiler* optimizing_compiler);
void InstallOptimizedFunctions(); void InstallOptimizedFunctions();
@ -92,6 +92,12 @@ class OptimizingCompilerThread : public Thread {
} }
private: private:
enum StopFlag { CONTINUE, STOP, FLUSH };
void FlushQueue(UnboundQueue<OptimizingCompiler*>* queue,
bool restore_function_code);
void CompileNext();
#ifdef DEBUG #ifdef DEBUG
int thread_id_; int thread_id_;
Mutex* thread_id_mutex_; Mutex* thread_id_mutex_;

View File

@ -2948,6 +2948,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ResumeJSGeneratorObject) {
JavaScriptFrame* frame = stack_iterator.frame(); JavaScriptFrame* frame = stack_iterator.frame();
ASSERT_EQ(frame->function(), generator_object->function()); ASSERT_EQ(frame->function(), generator_object->function());
ASSERT(frame->function()->is_compiled());
STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0); STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting <= 0);
STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0); STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed <= 0);
@ -8465,8 +8466,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
} }
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0); CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (FLAG_parallel_recompilation && sync_with_compiler_thread) { if (FLAG_parallel_recompilation && sync_with_compiler_thread) {
while (function->IsMarkedForParallelRecompilation() || while (function->IsInRecompileQueue() ||
function->IsInRecompileQueue() ||
function->IsMarkedForInstallingRecompiledCode()) { function->IsMarkedForInstallingRecompiledCode()) {
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions(); isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
OS::Sleep(50); OS::Sleep(50);

View File

@ -77,23 +77,27 @@ class AlwaysOptimizeAllowNativesSyntaxNoInlining {
// Utility class to set --allow-natives-syntax and --nouse-inlining when // Utility class to set --allow-natives-syntax and --nouse-inlining when
// constructed and return to their default state when destroyed. // constructed and return to their default state when destroyed.
class AllowNativesSyntaxNoInlining { class AllowNativesSyntaxNoInliningNoParallel {
public: public:
AllowNativesSyntaxNoInlining() AllowNativesSyntaxNoInliningNoParallel()
: allow_natives_syntax_(i::FLAG_allow_natives_syntax), : allow_natives_syntax_(i::FLAG_allow_natives_syntax),
use_inlining_(i::FLAG_use_inlining) { use_inlining_(i::FLAG_use_inlining),
parallel_recompilation_(i::FLAG_parallel_recompilation) {
i::FLAG_allow_natives_syntax = true; i::FLAG_allow_natives_syntax = true;
i::FLAG_use_inlining = false; i::FLAG_use_inlining = false;
i::FLAG_parallel_recompilation = false;
} }
~AllowNativesSyntaxNoInlining() { ~AllowNativesSyntaxNoInliningNoParallel() {
i::FLAG_allow_natives_syntax = allow_natives_syntax_; i::FLAG_allow_natives_syntax = allow_natives_syntax_;
i::FLAG_use_inlining = use_inlining_; i::FLAG_use_inlining = use_inlining_;
i::FLAG_parallel_recompilation = parallel_recompilation_;
} }
private: private:
bool allow_natives_syntax_; bool allow_natives_syntax_;
bool use_inlining_; bool use_inlining_;
bool parallel_recompilation_;
}; };
@ -343,7 +347,7 @@ TEST(DeoptimizeBinaryOperationADDString) {
const char* f_source = "function f(x, y) { return x + y; };"; const char* f_source = "function f(x, y) { return x + y; };";
{ {
AllowNativesSyntaxNoInlining options; AllowNativesSyntaxNoInliningNoParallel options;
// Compile function f and collect to type feedback to insert binary op stub // Compile function f and collect to type feedback to insert binary op stub
// call in the optimized code. // call in the optimized code.
i::FLAG_prepare_always_opt = true; i::FLAG_prepare_always_opt = true;
@ -401,7 +405,7 @@ static void TestDeoptimizeBinaryOpHelper(LocalContext* env,
binary_op); binary_op);
char* f_source = f_source_buffer.start(); char* f_source = f_source_buffer.start();
AllowNativesSyntaxNoInlining options; AllowNativesSyntaxNoInliningNoParallel options;
// Compile function f and collect to type feedback to insert binary op stub // Compile function f and collect to type feedback to insert binary op stub
// call in the optimized code. // call in the optimized code.
i::FLAG_prepare_always_opt = true; i::FLAG_prepare_always_opt = true;
@ -493,7 +497,7 @@ TEST(DeoptimizeCompare) {
const char* f_source = "function f(x, y) { return x < y; };"; const char* f_source = "function f(x, y) { return x < y; };";
{ {
AllowNativesSyntaxNoInlining options; AllowNativesSyntaxNoInliningNoParallel options;
// Compile function f and collect to type feedback to insert compare ic // Compile function f and collect to type feedback to insert compare ic
// call in the optimized code. // call in the optimized code.
i::FLAG_prepare_always_opt = true; i::FLAG_prepare_always_opt = true;
@ -540,7 +544,7 @@ TEST(DeoptimizeLoadICStoreIC) {
const char* g2_source = "function g2(x, y) { x[y] = 1; };"; const char* g2_source = "function g2(x, y) { x[y] = 1; };";
{ {
AllowNativesSyntaxNoInlining options; AllowNativesSyntaxNoInliningNoParallel options;
// Compile functions and collect to type feedback to insert ic // Compile functions and collect to type feedback to insert ic
// calls in the optimized code. // calls in the optimized code.
i::FLAG_prepare_always_opt = true; i::FLAG_prepare_always_opt = true;
@ -620,7 +624,7 @@ TEST(DeoptimizeLoadICStoreICNested) {
const char* g2_source = "function g2(x, y) { x[y] = 1; };"; const char* g2_source = "function g2(x, y) { x[y] = 1; };";
{ {
AllowNativesSyntaxNoInlining options; AllowNativesSyntaxNoInliningNoParallel options;
// Compile functions and collect to type feedback to insert ic // Compile functions and collect to type feedback to insert ic
// calls in the optimized code. // calls in the optimized code.
i::FLAG_prepare_always_opt = true; i::FLAG_prepare_always_opt = true;

View File

@ -2826,6 +2826,7 @@ void ReleaseStackTraceDataTest(const char* source, const char* accessor) {
// to check whether the data is being released since the external string // to check whether the data is being released since the external string
// resource's callback is fired when the external string is GC'ed. // resource's callback is fired when the external string is GC'ed.
FLAG_use_ic = false; // ICs retain objects. FLAG_use_ic = false; // ICs retain objects.
FLAG_parallel_recompilation = false;
CcTest::InitializeVM(); CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
SourceResource* resource = new SourceResource(i::StrDup(source)); SourceResource* resource = new SourceResource(i::StrDup(source));

View File

@ -0,0 +1,57 @@
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --expose-debug-as debug --allow-natives-syntax
// Flags: --parallel-recompilation-delay=300
Debug = debug.Debug
function foo() {
var x = 1;
return x;
}
function bar() {
var x = 2;
return x;
}
foo();
// Mark and trigger parallel optimization.
%OptimizeFunctionOnNextCall(foo, "parallel");
foo();
// Set break points on an unrelated function. This clears both optimized
// and (shared) unoptimized code on foo, and sets both to lazy-compile builtin.
// Clear the break point immediately after to deactivate the debugger.
Debug.setBreakPoint(bar, 0, 0);
Debug.clearAllBreakPoints();
// Install optimized code when parallel optimization finishes.
// This needs to be able to deal with shared code being a builtin.
assertUnoptimized(foo, "sync");