Refactor the compiling pipeline.

Goals:
 - easier to read, more suitable identifiers.
 - better distinction between compiling optimized/unoptimized code
 - compiler does not install code on the function.
 - easier to add features (e.g. caching optimized code for osr).
 - remove unnecessary code.

R=titzer@chromium.org

Review URL: https://codereview.chromium.org/110203002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18409 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
yangguo@chromium.org 2013-12-23 14:30:35 +00:00
parent f7929d2a87
commit 2a4be7067c
39 changed files with 1248 additions and 1397 deletions

View File

@ -28,6 +28,7 @@
#include "v8.h"
#include "accessors.h"
#include "compiler.h"
#include "contexts.h"
#include "deoptimizer.h"
#include "execution.h"
@ -648,9 +649,9 @@ MaybeObject* Accessors::FunctionGetLength(Isolate* isolate,
// If the function isn't compiled yet, the length is not computed correctly
// yet. Compile it now and return the right length.
HandleScope scope(isolate);
Handle<JSFunction> handle(function);
if (JSFunction::CompileLazy(handle, KEEP_EXCEPTION)) {
return Smi::FromInt(handle->shared()->length());
Handle<JSFunction> function_handle(function);
if (Compiler::EnsureCompiled(function_handle, KEEP_EXCEPTION)) {
return Smi::FromInt(function_handle->shared()->length());
}
return Failure::Exception();
}

View File

@ -1720,16 +1720,16 @@ Local<Script> Script::New(v8::Handle<String> source,
pre_data_impl = NULL;
}
i::Handle<i::SharedFunctionInfo> result =
i::Compiler::Compile(str,
name_obj,
line_offset,
column_offset,
is_shared_cross_origin,
isolate->global_context(),
NULL,
pre_data_impl,
Utils::OpenHandle(*script_data, true),
i::NOT_NATIVES_CODE);
i::Compiler::CompileScript(str,
name_obj,
line_offset,
column_offset,
is_shared_cross_origin,
isolate->global_context(),
NULL,
pre_data_impl,
Utils::OpenHandle(*script_data, true),
i::NOT_NATIVES_CODE);
has_pending_exception = result.is_null();
EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
raw_result = *result;

View File

@ -289,8 +289,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
static void CallRuntimePassFunction(
MacroAssembler* masm, Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(r1);
@ -313,7 +313,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
__ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r0);
}
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
@ -324,22 +330,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r0);
CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
GenerateTailCallToReturnedCode(masm);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@ -774,19 +772,38 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(r2);
static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(r1);
// Push call kind information and function as parameter to the runtime call.
__ Push(r5, r1);
// Whether to compile in a background thread.
__ Push(masm->isolate()->factory()->ToBoolean(concurrent));
__ CallRuntime(Runtime::kCompileOptimized, 2);
// Restore call kind information.
__ pop(r5);
// Restore receiver.
__ pop(r1);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
CallCompileOptimized(masm, false);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
CallCompileOptimized(masm, true);
GenerateTailCallToReturnedCode(masm);
}

View File

@ -1499,7 +1499,7 @@ bool Genesis::CompileScriptCached(Isolate* isolate,
if (cache == NULL || !cache->Lookup(name, &function_info)) {
ASSERT(source->IsOneByteRepresentation());
Handle<String> script_name = factory->NewStringFromUtf8(name);
function_info = Compiler::Compile(
function_info = Compiler::CompileScript(
source,
script_name,
0,
@ -2354,7 +2354,7 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
Handle<JSFunction> function
= Handle<JSFunction>(JSFunction::cast(function_object));
builtins->set_javascript_builtin(id, *function);
if (!JSFunction::CompileLazy(function, CLEAR_EXCEPTION)) {
if (!Compiler::EnsureCompiled(function, CLEAR_EXCEPTION)) {
return false;
}
builtins->set_javascript_builtin_code(id, function->shared()->code());

View File

@ -88,7 +88,7 @@ enum BuiltinExtraArguments {
#define BUILTIN_LIST_A(V) \
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(InRecompileQueue, BUILTIN, UNINITIALIZED, \
V(InOptimizationQueue, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(JSConstructStubCountdown, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
@ -100,11 +100,11 @@ enum BuiltinExtraArguments {
kNoExtraICState) \
V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(LazyCompile, BUILTIN, UNINITIALIZED, \
V(CompileUnoptimized, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(LazyRecompile, BUILTIN, UNINITIALIZED, \
V(CompileOptimized, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(ConcurrentRecompile, BUILTIN, UNINITIALIZED, \
V(CompileOptimizedConcurrent, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
V(NotifyDeoptimized, BUILTIN, UNINITIALIZED, \
kNoExtraICState) \
@ -385,15 +385,15 @@ class Builtins {
static void Generate_Adaptor(MacroAssembler* masm,
CFunctionId id,
BuiltinExtraArguments extra_args);
static void Generate_InRecompileQueue(MacroAssembler* masm);
static void Generate_ConcurrentRecompile(MacroAssembler* masm);
static void Generate_CompileUnoptimized(MacroAssembler* masm);
static void Generate_InOptimizationQueue(MacroAssembler* masm);
static void Generate_CompileOptimized(MacroAssembler* masm);
static void Generate_CompileOptimizedConcurrent(MacroAssembler* masm);
static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
static void Generate_JSEntryTrampoline(MacroAssembler* masm);
static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm);
static void Generate_LazyCompile(MacroAssembler* masm);
static void Generate_LazyRecompile(MacroAssembler* masm);
static void Generate_NotifyDeoptimized(MacroAssembler* masm);
static void Generate_NotifySoftDeoptimized(MacroAssembler* masm);
static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm);

View File

@ -421,7 +421,6 @@ Handle<SharedFunctionInfo> CompilationCache::LookupScript(
Handle<SharedFunctionInfo> CompilationCache::LookupEval(
Handle<String> source,
Handle<Context> context,
bool is_global,
LanguageMode language_mode,
int scope_position) {
if (!IsEnabled()) {
@ -429,7 +428,7 @@ Handle<SharedFunctionInfo> CompilationCache::LookupEval(
}
Handle<SharedFunctionInfo> result;
if (is_global) {
if (context->IsNativeContext()) {
result = eval_global_.Lookup(
source, context, language_mode, scope_position);
} else {
@ -454,9 +453,7 @@ Handle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source,
void CompilationCache::PutScript(Handle<String> source,
Handle<Context> context,
Handle<SharedFunctionInfo> function_info) {
if (!IsEnabled()) {
return;
}
if (!IsEnabled()) return;
script_.Put(source, context, function_info);
}
@ -464,15 +461,12 @@ void CompilationCache::PutScript(Handle<String> source,
void CompilationCache::PutEval(Handle<String> source,
Handle<Context> context,
bool is_global,
Handle<SharedFunctionInfo> function_info,
int scope_position) {
if (!IsEnabled()) {
return;
}
if (!IsEnabled()) return;
HandleScope scope(isolate());
if (is_global) {
if (context->IsNativeContext()) {
eval_global_.Put(source, context, function_info, scope_position);
} else {
ASSERT(scope_position != RelocInfo::kNoPosition);

View File

@ -222,7 +222,6 @@ class CompilationCache {
// contain a script for the given source string.
Handle<SharedFunctionInfo> LookupEval(Handle<String> source,
Handle<Context> context,
bool is_global,
LanguageMode language_mode,
int scope_position);
@ -241,7 +240,6 @@ class CompilationCache {
// with the shared function info. This may overwrite an existing mapping.
void PutEval(Handle<String> source,
Handle<Context> context,
bool is_global,
Handle<SharedFunctionInfo> function_info,
int scope_position);

File diff suppressed because it is too large Load Diff

View File

@ -84,8 +84,7 @@ class CompilationInfo {
ScriptDataImpl* pre_parse_data() const { return pre_parse_data_; }
Handle<Context> context() const { return context_; }
BailoutId osr_ast_id() const { return osr_ast_id_; }
uint32_t osr_pc_offset() const { return osr_pc_offset_; }
Handle<Code> osr_patched_code() const { return osr_patched_code_; }
Handle<Code> unoptimized_code() const { return unoptimized_code_; }
int opt_count() const { return opt_count_; }
int num_parameters() const;
int num_heap_slots() const;
@ -189,19 +188,16 @@ class CompilationInfo {
void SetContext(Handle<Context> context) {
context_ = context;
}
void MarkCompilingForDebugging(Handle<Code> current_code) {
ASSERT(mode_ != OPTIMIZE);
ASSERT(current_code->kind() == Code::FUNCTION);
void MarkCompilingForDebugging() {
flags_ |= IsCompilingForDebugging::encode(true);
if (current_code->is_compiled_optimizable()) {
EnableDeoptimizationSupport();
} else {
mode_ = CompilationInfo::NONOPT;
}
}
bool IsCompilingForDebugging() {
return IsCompilingForDebugging::decode(flags_);
}
void MarkNonOptimizable() {
SetMode(CompilationInfo::NONOPT);
}
bool ShouldTrapOnDeopt() const {
return (FLAG_trap_on_deopt && IsOptimizing()) ||
@ -221,9 +217,11 @@ class CompilationInfo {
bool IsOptimizing() const { return mode_ == OPTIMIZE; }
bool IsOptimizable() const { return mode_ == BASE; }
bool IsStub() const { return mode_ == STUB; }
void SetOptimizing(BailoutId osr_ast_id) {
void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
ASSERT(!shared_info_.is_null());
SetMode(OPTIMIZE);
osr_ast_id_ = osr_ast_id;
unoptimized_code_ = unoptimized;
}
void DisableOptimization();
@ -239,11 +237,6 @@ class CompilationInfo {
// Determines whether or not to insert a self-optimization header.
bool ShouldSelfOptimize();
// Reset code to the unoptimized version when optimization is aborted.
void AbortOptimization() {
SetCode(handle(shared_info()->code()));
}
void set_deferred_handles(DeferredHandles* deferred_handles) {
ASSERT(deferred_handles_ == NULL);
deferred_handles_ = deferred_handles;
@ -266,7 +259,7 @@ class CompilationInfo {
SaveHandle(&shared_info_);
SaveHandle(&context_);
SaveHandle(&script_);
SaveHandle(&osr_patched_code_);
SaveHandle(&unoptimized_code_);
}
BailoutReason bailout_reason() const { return bailout_reason_; }
@ -313,13 +306,8 @@ class CompilationInfo {
return abort_due_to_dependency_;
}
void SetOsrInfo(Handle<Code> code, uint32_t pc_offset) {
osr_patched_code_ = code;
osr_pc_offset_ = pc_offset;
}
bool HasSameOsrEntry(Handle<JSFunction> function, uint32_t pc_offset) {
return osr_pc_offset_ == pc_offset && function.is_identical_to(closure_);
bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure_);
}
protected:
@ -416,13 +404,10 @@ class CompilationInfo {
// Compilation mode flag and whether deoptimization is allowed.
Mode mode_;
BailoutId osr_ast_id_;
// The pc_offset corresponding to osr_ast_id_ in unoptimized code.
// We can look this up in the back edge table, but cache it for quick access.
uint32_t osr_pc_offset_;
// The unoptimized code we patched for OSR may not be the shared code
// afterwards, since we may need to compile it again to include deoptimization
// data. Keep track which code we patched.
Handle<Code> osr_patched_code_;
Handle<Code> unoptimized_code_;
// Flag whether compilation needs to be aborted due to dependency change.
bool abort_due_to_dependency_;
@ -518,9 +503,9 @@ class LChunk;
// fail, bail-out to the full code generator or succeed. Apart from
// their return value, the status of the phase last run can be checked
// using last_status().
class RecompileJob: public ZoneObject {
class OptimizedCompileJob: public ZoneObject {
public:
explicit RecompileJob(CompilationInfo* info)
explicit OptimizedCompileJob(CompilationInfo* info)
: info_(info),
graph_builder_(NULL),
graph_(NULL),
@ -534,14 +519,21 @@ class RecompileJob: public ZoneObject {
MUST_USE_RESULT Status CreateGraph();
MUST_USE_RESULT Status OptimizeGraph();
MUST_USE_RESULT Status GenerateAndInstallCode();
MUST_USE_RESULT Status GenerateCode();
Status last_status() const { return last_status_; }
CompilationInfo* info() const { return info_; }
Isolate* isolate() const { return info()->isolate(); }
MUST_USE_RESULT Status AbortOptimization() {
info_->AbortOptimization();
MUST_USE_RESULT Status AbortOptimization(
BailoutReason reason = kNoReason) {
if (reason != kNoReason) info_->set_bailout_reason(reason);
return SetLastStatus(BAILED_OUT);
}
MUST_USE_RESULT Status AbortAndDisableOptimization(
BailoutReason reason = kNoReason) {
if (reason != kNoReason) info_->set_bailout_reason(reason);
info_->shared_info()->DisableOptimization(info_->bailout_reason());
return SetLastStatus(BAILED_OUT);
}
@ -571,7 +563,7 @@ class RecompileJob: public ZoneObject {
void RecordOptimizationStats();
struct Timer {
Timer(RecompileJob* job, TimeDelta* location)
Timer(OptimizedCompileJob* job, TimeDelta* location)
: job_(job), location_(location) {
ASSERT(location_ != NULL);
timer_.Start();
@ -581,7 +573,7 @@ class RecompileJob: public ZoneObject {
*location_ += timer_.Elapsed();
}
RecompileJob* job_;
OptimizedCompileJob* job_;
ElapsedTimer timer_;
TimeDelta* location_;
};
@ -601,57 +593,53 @@ class RecompileJob: public ZoneObject {
class Compiler : public AllStatic {
public:
// Call count before primitive functions trigger their own optimization.
static const int kCallsUntilPrimitiveOpt = 200;
static Handle<Code> GetUnoptimizedCode(Handle<JSFunction> function);
static Handle<Code> GetUnoptimizedCode(Handle<SharedFunctionInfo> shared);
static bool EnsureCompiled(Handle<JSFunction> function,
ClearExceptionFlag flag);
static Handle<Code> GetCodeForDebugging(Handle<JSFunction> function);
// All routines return a SharedFunctionInfo.
// If an error occurs an exception is raised and the return handle
// contains NULL.
#ifdef ENABLE_DEBUGGER_SUPPORT
static void CompileForLiveEdit(Handle<Script> script);
#endif
// Compile a String source within a context.
static Handle<SharedFunctionInfo> Compile(Handle<String> source,
Handle<Object> script_name,
int line_offset,
int column_offset,
bool is_shared_cross_origin,
Handle<Context> context,
v8::Extension* extension,
ScriptDataImpl* pre_data,
Handle<Object> script_data,
NativesFlag is_natives_code);
// Compile a String source within a context for Eval.
static Handle<SharedFunctionInfo> CompileEval(Handle<String> source,
// Compile a String source within a context for eval.
static Handle<JSFunction> GetFunctionFromEval(Handle<String> source,
Handle<Context> context,
bool is_global,
LanguageMode language_mode,
ParseRestriction restriction,
int scope_position);
// Compile from function info (used for lazy compilation). Returns true on
// success and false if the compilation resulted in a stack overflow.
static bool CompileLazy(CompilationInfo* info);
// Compile a String source within a context.
static Handle<SharedFunctionInfo> CompileScript(Handle<String> source,
Handle<Object> script_name,
int line_offset,
int column_offset,
bool is_shared_cross_origin,
Handle<Context> context,
v8::Extension* extension,
ScriptDataImpl* pre_data,
Handle<Object> script_data,
NativesFlag is_natives_code);
static bool RecompileConcurrent(Handle<JSFunction> function,
Handle<Code> unoptimized,
uint32_t osr_pc_offset = 0);
// Compile a shared function info object (the function is possibly lazily
// compiled).
// Create a shared function info object (the code may be lazily compiled).
static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
Handle<Script> script);
// Set the function info for a newly compiled function.
static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
FunctionLiteral* lit,
bool is_toplevel,
Handle<Script> script);
enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
static Handle<Code> InstallOptimizedCode(RecompileJob* job);
// Generate and return optimized code or start a concurrent optimization job.
// In the latter case, return the InOptimizationQueue builtin. On failure,
// return the empty handle.
static Handle<Code> GetOptimizedCode(
Handle<JSFunction> function,
Handle<Code> current_code,
ConcurrencyMode mode,
BailoutId osr_ast_id = BailoutId::None());
#ifdef ENABLE_DEBUGGER_SUPPORT
static bool MakeCodeForLiveEdit(CompilationInfo* info);
#endif
// Generate and return code from previously queued optimization job.
// On failure, return the empty handle.
static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
CompilationInfo* info,

View File

@ -783,14 +783,13 @@ bool Debug::CompileDebuggerScript(Isolate* isolate, int index) {
// Compile the script.
Handle<SharedFunctionInfo> function_info;
function_info = Compiler::Compile(source_code,
script_name,
0, 0,
false,
context,
NULL, NULL,
Handle<String>::null(),
NATIVES_CODE);
function_info = Compiler::CompileScript(source_code,
script_name, 0, 0,
false,
context,
NULL, NULL,
Handle<String>::null(),
NATIVES_CODE);
// Silently ignore stack overflows during compilation.
if (function_info.is_null()) {
@ -1868,41 +1867,6 @@ void Debug::ClearStepNext() {
}
// Helper function to compile full code for debugging. This code will
// have debug break slots and deoptimization information. Deoptimization
// information is required in case that an optimized version of this
// function is still activated on the stack. It will also make sure that
// the full code is compiled with the same flags as the previous version,
// that is flags which can change the code generated. The current method
// of mapping from already compiled full code without debug break slots
// to full code with debug break slots depends on the generated code is
// otherwise exactly the same.
static bool CompileFullCodeForDebugging(Handle<JSFunction> function,
Handle<Code> current_code) {
ASSERT(!current_code->has_debug_break_slots());
CompilationInfoWithZone info(function);
info.MarkCompilingForDebugging(current_code);
ASSERT(!info.shared_info()->is_compiled());
ASSERT(!info.isolate()->has_pending_exception());
// Use compile lazy which will end up compiling the full code in the
// configuration configured above.
bool result = Compiler::CompileLazy(&info);
ASSERT(result != info.isolate()->has_pending_exception());
info.isolate()->clear_pending_exception();
#if DEBUG
if (result) {
Handle<Code> new_code(function->shared()->code());
ASSERT(new_code->has_debug_break_slots());
ASSERT(current_code->is_compiled_optimizable() ==
new_code->is_compiled_optimizable());
}
#endif
return result;
}
static void CollectActiveFunctionsFromThread(
Isolate* isolate,
ThreadLocalTop* top,
@ -2059,8 +2023,7 @@ void Debug::PrepareForBreakPoints() {
Deoptimizer::DeoptimizeAll(isolate_);
Handle<Code> lazy_compile =
Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));
Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized();
// There will be at least one break point when we are done.
has_break_points_ = true;
@ -2112,9 +2075,9 @@ void Debug::PrepareForBreakPoints() {
function->set_code(*lazy_compile);
function->shared()->set_code(*lazy_compile);
} else if (kind == Code::BUILTIN &&
(function->IsInRecompileQueue() ||
function->IsMarkedForLazyRecompilation() ||
function->IsMarkedForConcurrentRecompilation())) {
(function->IsInOptimizationQueue() ||
function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization())) {
// Abort in-flight compilation.
Code* shared_code = function->shared()->code();
if (shared_code->kind() == Code::FUNCTION &&
@ -2159,19 +2122,12 @@ void Debug::PrepareForBreakPoints() {
if (!shared->code()->has_debug_break_slots()) {
// Try to compile the full code with debug break slots. If it
// fails just keep the current code.
Handle<Code> current_code(function->shared()->code());
shared->set_code(*lazy_compile);
bool prev_force_debugger_active =
isolate_->debugger()->force_debugger_active();
isolate_->debugger()->set_force_debugger_active(true);
ASSERT(current_code->kind() == Code::FUNCTION);
CompileFullCodeForDebugging(function, current_code);
function->ReplaceCode(*Compiler::GetCodeForDebugging(function));
isolate_->debugger()->set_force_debugger_active(
prev_force_debugger_active);
if (!shared->is_compiled()) {
shared->set_code(*current_code);
continue;
}
}
// Keep function code in sync with shared function info.
@ -2284,11 +2240,10 @@ Object* Debug::FindSharedFunctionInfoInScript(Handle<Script> script,
// will compile all inner functions that cannot be compiled without a
// context, because Compiler::BuildFunctionInfo checks whether the
// debugger is active.
if (target_function.is_null()) {
SharedFunctionInfo::CompileLazy(target, KEEP_EXCEPTION);
} else {
JSFunction::CompileLazy(target_function, KEEP_EXCEPTION);
}
Handle<Code> result = target_function.is_null()
? Compiler::GetUnoptimizedCode(target)
: Compiler::GetUnoptimizedCode(target_function);
if (result.is_null()) return isolate_->heap()->undefined_value();
}
} // End while loop.
@ -2312,7 +2267,7 @@ bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared,
// Ensure function is compiled. Return false if this failed.
if (!function.is_null() &&
!JSFunction::EnsureCompiled(function, CLEAR_EXCEPTION)) {
!Compiler::EnsureCompiled(function, CLEAR_EXCEPTION)) {
return false;
}
@ -2598,6 +2553,21 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
}
void Debug::RecordEvalCaller(Handle<Script> script) {
script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
// For eval scripts add information on the function from which eval was
// called.
StackTraceFrameIterator it(script->GetIsolate());
if (!it.done()) {
script->set_eval_from_shared(it.frame()->function()->shared());
Code* code = it.frame()->LookupCode();
int offset = static_cast<int>(
it.frame()->pc() - code->instruction_start());
script->set_eval_from_instructions_offset(Smi::FromInt(offset));
}
}
void Debug::AfterGarbageCollection() {
// Generate events for collected scripts.
if (script_cache_ != NULL) {

View File

@ -424,6 +424,9 @@ class Debug {
void AddScriptToScriptCache(Handle<Script> script);
Handle<FixedArray> GetLoadedScripts();
// Record function from which eval was called.
static void RecordEvalCaller(Handle<Script> script);
// Garbage collection notifications.
void AfterGarbageCollection();

View File

@ -940,7 +940,10 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
if (index > 0) {
// Caching of optimized code enabled and optimized code found.
function_info->InstallFromOptimizedCodeMap(*result, index);
FixedArray* literals =
function_info->GetLiteralsFromOptimizedCodeMap(index);
if (literals != NULL) result->set_literals(literals);
result->ReplaceCode(function_info->GetCodeFromOptimizedCodeMap(index));
return result;
}
@ -951,7 +954,7 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
function_info->allows_lazy_compilation() &&
!function_info->optimization_disabled() &&
!isolate()->DebuggerHasBreakPoints()) {
result->MarkForLazyRecompilation();
result->MarkForOptimization();
}
return result;
}

View File

@ -312,6 +312,10 @@ void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
Isolate* isolate = info->isolate();
Logger::TimerEventScope timer(
isolate, Logger::TimerEventScope::v8_compile_full_code);
Handle<Script> script = info->script();
if (!script->IsUndefined() && !script->source()->IsUndefined()) {
int len = String::cast(script->source())->length();
@ -1644,8 +1648,7 @@ bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
}
void BackEdgeTable::Patch(Isolate* isolate,
Code* unoptimized) {
void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
DisallowHeapAllocation no_gc;
Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
@ -1668,8 +1671,7 @@ void BackEdgeTable::Patch(Isolate* isolate,
}
void BackEdgeTable::Revert(Isolate* isolate,
Code* unoptimized) {
void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
DisallowHeapAllocation no_gc;
Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
@ -1694,29 +1696,23 @@ void BackEdgeTable::Revert(Isolate* isolate,
}
void BackEdgeTable::AddStackCheck(CompilationInfo* info) {
void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
DisallowHeapAllocation no_gc;
Isolate* isolate = info->isolate();
Code* code = *info->osr_patched_code();
Address pc = code->instruction_start() + info->osr_pc_offset();
ASSERT_EQ(info->osr_ast_id().ToInt(),
code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt());
ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, code, pc));
Isolate* isolate = code->GetIsolate();
Address pc = code->instruction_start() + pc_offset;
Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch);
PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
}
void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) {
void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
DisallowHeapAllocation no_gc;
Isolate* isolate = info->isolate();
Code* code = *info->osr_patched_code();
Address pc = code->instruction_start() + info->osr_pc_offset();
ASSERT_EQ(info->osr_ast_id().ToInt(),
code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt());
if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) {
Isolate* isolate = code->GetIsolate();
Address pc = code->instruction_start() + pc_offset;
if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
PatchAt(code, pc, ON_STACK_REPLACEMENT, patch);
PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
}
}

View File

@ -928,10 +928,10 @@ class BackEdgeTable {
// Change a back edge patched for on-stack replacement to perform a
// stack check first.
static void AddStackCheck(CompilationInfo* info);
static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
// Remove the stack check, if available, and replace by on-stack replacement.
static void RemoveStackCheck(CompilationInfo* info);
// Revert the patch by AddStackCheck.
static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
// Return the current patch state of the back edge.
static BackEdgeState GetBackEdgeState(Isolate* isolate,

View File

@ -74,8 +74,8 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
static void CallRuntimePassFunction(
MacroAssembler* masm, Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function.
__ push(edi);
@ -100,7 +100,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
}
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
@ -112,22 +118,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, Label::kNear);
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
GenerateTailCallToReturnedCode(masm);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@ -509,19 +507,41 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ lea(eax, FieldOperand(eax, Code::kHeaderSize));
__ jmp(eax);
static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function.
__ push(edi);
// Push call kind information.
__ push(ecx);
// Function is also the parameter to the runtime call.
__ push(edi);
// Whether to compile in a background thread.
__ Push(masm->isolate()->factory()->ToBoolean(concurrent));
__ CallRuntime(Runtime::kCompileOptimized, 2);
// Restore call kind information.
__ pop(ecx);
// Restore receiver.
__ pop(edi);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
CallCompileOptimized(masm, false);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
CallCompileOptimized(masm, true);
GenerateTailCallToReturnedCode(masm);
}

View File

@ -2054,7 +2054,7 @@ RUNTIME_FUNCTION(MaybeObject*, CallIC_Miss) {
if (raw_function->is_compiled()) return raw_function;
Handle<JSFunction> function(raw_function);
JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
return *function;
}
@ -2075,7 +2075,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_Miss) {
if (raw_function->is_compiled()) return raw_function;
Handle<JSFunction> function(raw_function, isolate);
JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
return *function;
}
@ -2155,7 +2155,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_MissFromStubFailure) {
if (raw_function->is_compiled()) return raw_function;
Handle<JSFunction> function(raw_function, isolate);
JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
return *function;
}

View File

@ -602,27 +602,6 @@ Handle<JSArray> LiveEdit::CompareStrings(Handle<String> s1,
}
static void CompileScriptForTracker(Isolate* isolate, Handle<Script> script) {
// TODO(635): support extensions.
PostponeInterruptsScope postpone(isolate);
// Build AST.
CompilationInfoWithZone info(script);
info.MarkAsGlobal();
// Parse and don't allow skipping lazy functions.
if (Parser::Parse(&info)) {
// Compile the code.
LiveEditFunctionTracker tracker(info.isolate(), info.function());
if (Compiler::MakeCodeForLiveEdit(&info)) {
ASSERT(!info.code().is_null());
tracker.RecordRootFunctionInfo(info.code());
} else {
info.isolate()->StackOverflow();
}
}
}
// Unwraps JSValue object, returning its field "value"
static Handle<Object> UnwrapJSValue(Handle<JSValue> jsValue) {
return Handle<Object>(jsValue->value(), jsValue->GetIsolate());
@ -951,7 +930,7 @@ JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script,
try_catch.SetVerbose(true);
// A logical 'try' section.
CompileScriptForTracker(isolate, script);
Compiler::CompileForLiveEdit(script);
}
// A logical 'catch' section.

View File

@ -1432,8 +1432,7 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
CALL_LISTENERS(CodeCreateEvent(tag, code, shared, info, name));
if (!FLAG_log_code || !log_->IsEnabled()) return;
if (code == isolate_->builtins()->builtin(
Builtins::kLazyCompile))
if (code == isolate_->builtins()->builtin(Builtins::kCompileUnoptimized))
return;
Log::MessageBuilder msg(log_);
@ -1967,8 +1966,8 @@ void Logger::LogCompiledFunctions() {
// During iteration, there can be heap allocation due to
// GetScriptLineNumber call.
for (int i = 0; i < compiled_funcs_count; ++i) {
if (*code_objects[i] == isolate_->builtins()->builtin(
Builtins::kLazyCompile))
if (code_objects[i].is_identical_to(
isolate_->builtins()->CompileUnoptimized()))
continue;
LogExistingFunction(sfis[i], code_objects[i]);
}

View File

@ -986,7 +986,8 @@ void MarkCompactCollector::Finish() {
// objects have been marked.
void CodeFlusher::ProcessJSFunctionCandidates() {
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
Code* lazy_compile =
isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
Object* undefined = isolate_->heap()->undefined_value();
JSFunction* candidate = jsfunction_candidates_head_;
@ -1031,7 +1032,8 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
Code* lazy_compile =
isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
SharedFunctionInfo* next_candidate;

View File

@ -297,8 +297,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
static void CallRuntimePassFunction(
MacroAssembler* masm, Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
// Push call kind information and function as parameter to the runtime call.
@ -318,7 +318,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
__ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(at);
}
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
@ -328,22 +334,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
__ Branch(&ok, hs, sp, Operand(t0));
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(at);
CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
GenerateTailCallToReturnedCode(masm);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@ -790,22 +788,40 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(t9);
void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(t9);
static void CallCompileOptimized(MacroAssembler* masm,
bool concurrent) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
// Push call kind information and function as parameter to the runtime call.
__ Push(a1, t1, a1);
// Whether to compile in a background thread.
__ Push(masm->isolate()->factory()->ToBoolean(concurrent));
__ CallRuntime(Runtime::kCompileOptimized, 2);
// Restore call kind information and receiver.
__ Pop(a1, t1);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
CallCompileOptimized(masm, false);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
CallCompileOptimized(masm, true);
GenerateTailCallToReturnedCode(masm);
}
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
// For now, we are relying on the fact that make_code_young doesn't do any
// garbage collection which allows us to save/restore the registers without

View File

@ -4950,7 +4950,7 @@ void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
bool SharedFunctionInfo::is_compiled() {
return code() !=
GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
}
@ -5073,20 +5073,21 @@ bool JSFunction::IsOptimizable() {
}
bool JSFunction::IsMarkedForLazyRecompilation() {
return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
bool JSFunction::IsMarkedForOptimization() {
return code() == GetIsolate()->builtins()->builtin(
Builtins::kCompileOptimized);
}
bool JSFunction::IsMarkedForConcurrentRecompilation() {
bool JSFunction::IsMarkedForConcurrentOptimization() {
return code() == GetIsolate()->builtins()->builtin(
Builtins::kConcurrentRecompile);
Builtins::kCompileOptimizedConcurrent);
}
bool JSFunction::IsInRecompileQueue() {
bool JSFunction::IsInOptimizationQueue() {
return code() == GetIsolate()->builtins()->builtin(
Builtins::kInRecompileQueue);
Builtins::kInOptimizationQueue);
}
@ -5196,7 +5197,8 @@ bool JSFunction::should_have_prototype() {
bool JSFunction::is_compiled() {
return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
return code() !=
GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
}

View File

@ -9475,19 +9475,19 @@ void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
}
void JSFunction::MarkForLazyRecompilation() {
void JSFunction::MarkForOptimization() {
ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
ASSERT(!IsOptimized());
ASSERT(shared()->allows_lazy_compilation() ||
code()->optimizable());
ASSERT(!shared()->is_generator());
set_code_no_write_barrier(
GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile));
GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
// No write barrier required, since the builtin is part of the root set.
}
void JSFunction::MarkForConcurrentRecompilation() {
void JSFunction::MarkForConcurrentOptimization() {
ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
ASSERT(!IsOptimized());
ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
@ -9499,16 +9499,16 @@ void JSFunction::MarkForConcurrentRecompilation() {
PrintF(" for concurrent recompilation.\n");
}
set_code_no_write_barrier(
GetIsolate()->builtins()->builtin(Builtins::kConcurrentRecompile));
GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
// No write barrier required, since the builtin is part of the root set.
}
void JSFunction::MarkInRecompileQueue() {
void JSFunction::MarkInOptimizationQueue() {
// We can only arrive here via the concurrent-recompilation builtin. If
// break points were set, the code would point to the lazy-compile builtin.
ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
ASSERT(IsMarkedForConcurrentRecompilation() && !IsOptimized());
ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized());
ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
ASSERT(GetIsolate()->concurrent_recompilation_enabled());
if (FLAG_trace_concurrent_recompilation) {
@ -9517,33 +9517,11 @@ void JSFunction::MarkInRecompileQueue() {
PrintF(" for concurrent recompilation.\n");
}
set_code_no_write_barrier(
GetIsolate()->builtins()->builtin(Builtins::kInRecompileQueue));
GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
// No write barrier required, since the builtin is part of the root set.
}
static bool CompileLazyHelper(CompilationInfo* info,
ClearExceptionFlag flag) {
// Compile the source information to a code object.
ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
ASSERT(!info->isolate()->has_pending_exception());
bool result = Compiler::CompileLazy(info);
ASSERT(result != info->isolate()->has_pending_exception());
if (!result && flag == CLEAR_EXCEPTION) {
info->isolate()->clear_pending_exception();
}
return result;
}
bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag) {
ASSERT(shared->allows_lazy_compilation_without_context());
CompilationInfoWithZone info(shared);
return CompileLazyHelper(&info, flag);
}
void SharedFunctionInfo::AddToOptimizedCodeMap(
Handle<SharedFunctionInfo> shared,
Handle<Context> native_context,
@ -9604,19 +9582,25 @@ MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context,
}
void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function,
int index) {
FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
ASSERT(index > kEntriesStart);
FixedArray* code_map = FixedArray::cast(optimized_code_map());
if (!bound()) {
FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
ASSERT(cached_literals != NULL);
function->set_literals(cached_literals);
ASSERT_NE(NULL, cached_literals);
return cached_literals;
}
return NULL;
}
Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
ASSERT(index > kEntriesStart);
FixedArray* code_map = FixedArray::cast(optimized_code_map());
Code* code = Code::cast(code_map->get(index));
ASSERT(code != NULL);
ASSERT(function->context()->native_context() == code_map->get(index - 1));
function->ReplaceCode(code);
ASSERT_NE(NULL, code);
return code;
}
@ -9682,50 +9666,6 @@ void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
}
bool JSFunction::CompileLazy(Handle<JSFunction> function,
ClearExceptionFlag flag) {
bool result = true;
if (function->shared()->is_compiled()) {
function->ReplaceCode(function->shared()->code());
} else {
ASSERT(function->shared()->allows_lazy_compilation());
CompilationInfoWithZone info(function);
result = CompileLazyHelper(&info, flag);
ASSERT(!result || function->is_compiled());
}
return result;
}
Handle<Code> JSFunction::CompileOsr(Handle<JSFunction> function,
BailoutId osr_ast_id,
ClearExceptionFlag flag) {
CompilationInfoWithZone info(function);
info.SetOptimizing(osr_ast_id);
if (CompileLazyHelper(&info, flag)) {
// TODO(titzer): don't install the OSR code.
// ASSERT(function->code() != *info.code());
return info.code();
} else {
return Handle<Code>::null();
}
}
bool JSFunction::CompileOptimized(Handle<JSFunction> function,
ClearExceptionFlag flag) {
CompilationInfoWithZone info(function);
info.SetOptimizing(BailoutId::None());
return CompileLazyHelper(&info, flag);
}
bool JSFunction::EnsureCompiled(Handle<JSFunction> function,
ClearExceptionFlag flag) {
return function->is_compiled() || CompileLazy(function, flag);
}
void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
if (object->IsGlobalObject()) return;
@ -10698,6 +10638,18 @@ BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
}
uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
DisallowHeapAllocation no_gc;
ASSERT(kind() == FUNCTION);
BackEdgeTable back_edges(this, &no_gc);
for (uint32_t i = 0; i < back_edges.length(); i++) {
if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
}
UNREACHABLE(); // We expect to find the back edge.
return 0;
}
void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
}

View File

@ -1056,47 +1056,47 @@ class MaybeObject BASE_EMBEDDED {
\
V(k32BitValueInRegisterIsNotZeroExtended, \
"32 bit value in register is not zero-extended") \
V(kAlignmentMarkerExpected, "alignment marker expected") \
V(kAlignmentMarkerExpected, "Alignment marker expected") \
V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned") \
V(kAPICallReturnedInvalidObject, "API call returned invalid object") \
V(kArgumentsObjectValueInATestContext, \
"arguments object value in a test context") \
V(kArrayBoilerplateCreationFailed, "array boilerplate creation failed") \
V(kArrayIndexConstantValueTooBig, "array index constant value too big") \
V(kAssignmentToArguments, "assignment to arguments") \
"Arguments object value in a test context") \
V(kArrayBoilerplateCreationFailed, "Array boilerplate creation failed") \
V(kArrayIndexConstantValueTooBig, "Array index constant value too big") \
V(kAssignmentToArguments, "Assignment to arguments") \
V(kAssignmentToLetVariableBeforeInitialization, \
"assignment to let variable before initialization") \
V(kAssignmentToLOOKUPVariable, "assignment to LOOKUP variable") \
"Assignment to let variable before initialization") \
V(kAssignmentToLOOKUPVariable, "Assignment to LOOKUP variable") \
V(kAssignmentToParameterFunctionUsesArgumentsObject, \
"assignment to parameter, function uses arguments object") \
"Assignment to parameter, function uses arguments object") \
V(kAssignmentToParameterInArgumentsObject, \
"assignment to parameter in arguments object") \
"Assignment to parameter in arguments object") \
V(kAttemptToUseUndefinedCache, "Attempt to use undefined cache") \
V(kBadValueContextForArgumentsObjectValue, \
"bad value context for arguments object value") \
"Bad value context for arguments object value") \
V(kBadValueContextForArgumentsValue, \
"bad value context for arguments value") \
V(kBailedOutDueToDependencyChange, "bailed out due to dependency change") \
V(kBailoutWasNotPrepared, "bailout was not prepared") \
"Bad value context for arguments value") \
V(kBailedOutDueToDependencyChange, "Bailed out due to dependency change") \
V(kBailoutWasNotPrepared, "Bailout was not prepared") \
V(kBinaryStubGenerateFloatingPointCode, \
"BinaryStub_GenerateFloatingPointCode") \
V(kBothRegistersWereSmisInSelectNonSmi, \
"Both registers were smis in SelectNonSmi") \
V(kCallToAJavaScriptRuntimeFunction, \
"call to a JavaScript runtime function") \
"Call to a JavaScript runtime function") \
V(kCannotTranslatePositionInChangedArea, \
"Cannot translate position in changed area") \
V(kCodeGenerationFailed, "code generation failed") \
V(kCodeObjectNotProperlyPatched, "code object not properly patched") \
V(kCompoundAssignmentToLookupSlot, "compound assignment to lookup slot") \
V(kContextAllocatedArguments, "context-allocated arguments") \
V(kDebuggerIsActive, "debugger is active") \
V(kCodeGenerationFailed, "Code generation failed") \
V(kCodeObjectNotProperlyPatched, "Code object not properly patched") \
V(kCompoundAssignmentToLookupSlot, "Compound assignment to lookup slot") \
V(kContextAllocatedArguments, "Context-allocated arguments") \
V(kDebuggerIsActive, "Debugger is active") \
V(kDebuggerStatement, "DebuggerStatement") \
V(kDeclarationInCatchContext, "Declaration in catch context") \
V(kDeclarationInWithContext, "Declaration in with context") \
V(kDefaultNaNModeNotSet, "Default NaN mode not set") \
V(kDeleteWithGlobalVariable, "delete with global variable") \
V(kDeleteWithNonGlobalVariable, "delete with non-global variable") \
V(kDeleteWithGlobalVariable, "Delete with global variable") \
V(kDeleteWithNonGlobalVariable, "Delete with non-global variable") \
V(kDestinationOfCopyNotAligned, "Destination of copy not aligned") \
V(kDontDeleteCellsCannotContainTheHole, \
"DontDelete cells can't contain the hole") \
@ -1104,9 +1104,9 @@ class MaybeObject BASE_EMBEDDED {
"DoPushArgument not implemented for double type") \
V(kEmitLoadRegisterUnsupportedDoubleImmediate, \
"EmitLoadRegister: Unsupported double immediate") \
V(kEval, "eval") \
V(kEval, "Eval") \
V(kExpected0AsASmiSentinel, "Expected 0 as a Smi sentinel") \
V(kExpectedAlignmentMarker, "expected alignment marker") \
V(kExpectedAlignmentMarker, "Expected alignment marker") \
V(kExpectedAllocationSiteInCell, \
"Expected AllocationSite in property cell") \
V(kExpectedPropertyCellInRegisterA2, \
@ -1119,47 +1119,48 @@ class MaybeObject BASE_EMBEDDED {
"Expecting alignment for CopyBytes") \
V(kExportDeclaration, "Export declaration") \
V(kExternalStringExpectedButNotFound, \
"external string expected, but not found") \
V(kFailedBailedOutLastTime, "failed/bailed out last time") \
"External string expected, but not found") \
V(kFailedBailedOutLastTime, "Failed/bailed out last time") \
V(kForInStatementIsNotFastCase, "ForInStatement is not fast case") \
V(kForInStatementOptimizationIsDisabled, \
"ForInStatement optimization is disabled") \
V(kForInStatementWithNonLocalEachVariable, \
"ForInStatement with non-local each variable") \
V(kForOfStatement, "ForOfStatement") \
V(kFrameIsExpectedToBeAligned, "frame is expected to be aligned") \
V(kFunctionCallsEval, "function calls eval") \
V(kFunctionIsAGenerator, "function is a generator") \
V(kFunctionWithIllegalRedeclaration, "function with illegal redeclaration") \
V(kFrameIsExpectedToBeAligned, "Frame is expected to be aligned") \
V(kFunctionCallsEval, "Function calls eval") \
V(kFunctionIsAGenerator, "Function is a generator") \
V(kFunctionWithIllegalRedeclaration, "Function with illegal redeclaration") \
V(kGeneratedCodeIsTooLarge, "Generated code is too large") \
V(kGeneratorFailedToResume, "Generator failed to resume") \
V(kGenerator, "generator") \
V(kGenerator, "Generator") \
V(kGlobalFunctionsMustHaveInitialMap, \
"Global functions must have initial map") \
V(kHeapNumberMapRegisterClobbered, "HeapNumberMap register clobbered") \
V(kHydrogenFilter, "Optimization disabled by filter") \
V(kImportDeclaration, "Import declaration") \
V(kImproperObjectOnPrototypeChainForStore, \
"improper object on prototype chain for store") \
"Improper object on prototype chain for store") \
V(kIndexIsNegative, "Index is negative") \
V(kIndexIsTooLarge, "Index is too large") \
V(kInlinedRuntimeFunctionClassOf, "inlined runtime function: ClassOf") \
V(kInlinedRuntimeFunctionClassOf, "Inlined runtime function: ClassOf") \
V(kInlinedRuntimeFunctionFastAsciiArrayJoin, \
"inlined runtime function: FastAsciiArrayJoin") \
"Inlined runtime function: FastAsciiArrayJoin") \
V(kInlinedRuntimeFunctionGeneratorNext, \
"inlined runtime function: GeneratorNext") \
"Inlined runtime function: GeneratorNext") \
V(kInlinedRuntimeFunctionGeneratorThrow, \
"inlined runtime function: GeneratorThrow") \
"Inlined runtime function: GeneratorThrow") \
V(kInlinedRuntimeFunctionGetFromCache, \
"inlined runtime function: GetFromCache") \
"Inlined runtime function: GetFromCache") \
V(kInlinedRuntimeFunctionIsNonNegativeSmi, \
"inlined runtime function: IsNonNegativeSmi") \
"Inlined runtime function: IsNonNegativeSmi") \
V(kInlinedRuntimeFunctionIsRegExpEquivalent, \
"inlined runtime function: IsRegExpEquivalent") \
"Inlined runtime function: IsRegExpEquivalent") \
V(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf, \
"inlined runtime function: IsStringWrapperSafeForDefaultValueOf") \
V(kInliningBailedOut, "inlining bailed out") \
"Inlined runtime function: IsStringWrapperSafeForDefaultValueOf") \
V(kInliningBailedOut, "Inlining bailed out") \
V(kInputGPRIsExpectedToHaveUpper32Cleared, \
"input GPR is expected to have upper32 cleared") \
"Input GPR is expected to have upper32 cleared") \
V(kInstanceofStubUnexpectedCallSiteCacheCheck, \
"InstanceofStub unexpected call site cache (check)") \
V(kInstanceofStubUnexpectedCallSiteCacheCmp1, \
@ -1174,9 +1175,9 @@ class MaybeObject BASE_EMBEDDED {
V(kInvalidElementsKindForInternalArrayOrInternalPackedArray, \
"Invalid ElementsKind for InternalArray or InternalPackedArray") \
V(kInvalidHandleScopeLevel, "Invalid HandleScope level") \
V(kInvalidLeftHandSideInAssignment, "invalid left-hand side in assignment") \
V(kInvalidLhsInCompoundAssignment, "invalid lhs in compound assignment") \
V(kInvalidLhsInCountOperation, "invalid lhs in count operation") \
V(kInvalidLeftHandSideInAssignment, "Invalid left-hand side in assignment") \
V(kInvalidLhsInCompoundAssignment, "Invalid lhs in compound assignment") \
V(kInvalidLhsInCountOperation, "Invalid lhs in count operation") \
V(kInvalidMinLength, "Invalid min_length") \
V(kJSGlobalObjectNativeContextShouldBeANativeContext, \
"JSGlobalObject::native_context should be a native context") \
@ -1192,7 +1193,7 @@ class MaybeObject BASE_EMBEDDED {
"LiveEdit frame dropping is not supported on mips") \
V(kLiveEdit, "LiveEdit") \
V(kLookupVariableInCountOperation, \
"lookup variable in count operation") \
"Lookup variable in count operation") \
V(kMapIsNoLongerInEax, "Map is no longer in eax") \
V(kModuleDeclaration, "Module declaration") \
V(kModuleLiteral, "Module literal") \
@ -1201,26 +1202,26 @@ class MaybeObject BASE_EMBEDDED {
V(kModuleVariable, "Module variable") \
V(kModuleUrl, "Module url") \
V(kNativeFunctionLiteral, "Native function literal") \
V(kNoCasesLeft, "no cases left") \
V(kNoCasesLeft, "No cases left") \
V(kNoEmptyArraysHereInEmitFastAsciiArrayJoin, \
"No empty arrays here in EmitFastAsciiArrayJoin") \
V(kNonInitializerAssignmentToConst, \
"non-initializer assignment to const") \
"Non-initializer assignment to const") \
V(kNonSmiIndex, "Non-smi index") \
V(kNonSmiKeyInArrayLiteral, "Non-smi key in array literal") \
V(kNonSmiValue, "Non-smi value") \
V(kNonObject, "Non-object value") \
V(kNotEnoughVirtualRegistersForValues, \
"not enough virtual registers for values") \
"Not enough virtual registers for values") \
V(kNotEnoughSpillSlotsForOsr, \
"not enough spill slots for OSR") \
"Not enough spill slots for OSR") \
V(kNotEnoughVirtualRegistersRegalloc, \
"not enough virtual registers (regalloc)") \
V(kObjectFoundInSmiOnlyArray, "object found in smi-only array") \
"Not enough virtual registers (regalloc)") \
V(kObjectFoundInSmiOnlyArray, "Object found in smi-only array") \
V(kObjectLiteralWithComplexProperty, \
"Object literal with complex property") \
V(kOddballInStringTableIsNotUndefinedOrTheHole, \
"oddball in string table is not undefined or the hole") \
"Oddball in string table is not undefined or the hole") \
V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name") \
V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string") \
V(kOperandIsASmi, "Operand is a smi") \
@ -1230,24 +1231,25 @@ class MaybeObject BASE_EMBEDDED {
V(kOperandIsNotAString, "Operand is not a string") \
V(kOperandIsNotSmi, "Operand is not smi") \
V(kOperandNotANumber, "Operand not a number") \
V(kOptimizedTooManyTimes, "optimized too many times") \
V(kOptimizationDisabled, "Optimization is disabled") \
V(kOptimizedTooManyTimes, "Optimized too many times") \
V(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister, \
"Out of virtual registers while trying to allocate temp register") \
V(kParseScopeError, "parse/scope error") \
V(kPossibleDirectCallToEval, "possible direct call to eval") \
V(kParseScopeError, "Parse/scope error") \
V(kPossibleDirectCallToEval, "Possible direct call to eval") \
V(kPropertyAllocationCountFailed, "Property allocation count failed") \
V(kReceivedInvalidReturnAddress, "Received invalid return address") \
V(kReferenceToAVariableWhichRequiresDynamicLookup, \
"reference to a variable which requires dynamic lookup") \
"Reference to a variable which requires dynamic lookup") \
V(kReferenceToGlobalLexicalVariable, \
"reference to global lexical variable") \
V(kReferenceToUninitializedVariable, "reference to uninitialized variable") \
"Reference to global lexical variable") \
V(kReferenceToUninitializedVariable, "Reference to uninitialized variable") \
V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
V(kRegisterWasClobbered, "register was clobbered") \
V(kRegisterWasClobbered, "Register was clobbered") \
V(kScopedBlock, "ScopedBlock") \
V(kSmiAdditionOverflow, "Smi addition overflow") \
V(kSmiSubtractionOverflow, "Smi subtraction overflow") \
V(kStackFrameTypesMustMatch, "stack frame types must match") \
V(kStackFrameTypesMustMatch, "Stack frame types must match") \
V(kSwitchStatementMixedOrNonLiteralSwitchLabels, \
"SwitchStatement: mixed or non-literal switch labels") \
V(kSwitchStatementTooManyClauses, "SwitchStatement: too many clauses") \
@ -1259,8 +1261,8 @@ class MaybeObject BASE_EMBEDDED {
"The instruction to patch should be a lui") \
V(kTheInstructionToPatchShouldBeAnOri, \
"The instruction to patch should be an ori") \
V(kTooManyParametersLocals, "too many parameters/locals") \
V(kTooManyParameters, "too many parameters") \
V(kTooManyParametersLocals, "Too many parameters/locals") \
V(kTooManyParameters, "Too many parameters") \
V(kTooManySpillSlotsNeededForOSR, "Too many spill slots needed for OSR") \
V(kToOperandIsDoubleRegisterUnimplemented, \
"ToOperand IsDoubleRegister unimplemented") \
@ -1311,23 +1313,23 @@ class MaybeObject BASE_EMBEDDED {
V(kUnexpectedUnusedPropertiesOfStringWrapper, \
"Unexpected unused properties of string wrapper") \
V(kUninitializedKSmiConstantRegister, "Uninitialized kSmiConstantRegister") \
V(kUnknown, "unknown") \
V(kUnknown, "Unknown") \
V(kUnsupportedConstCompoundAssignment, \
"unsupported const compound assignment") \
"Unsupported const compound assignment") \
V(kUnsupportedCountOperationWithConst, \
"unsupported count operation with const") \
V(kUnsupportedDoubleImmediate, "unsupported double immediate") \
V(kUnsupportedLetCompoundAssignment, "unsupported let compound assignment") \
"Unsupported count operation with const") \
V(kUnsupportedDoubleImmediate, "Unsupported double immediate") \
V(kUnsupportedLetCompoundAssignment, "Unsupported let compound assignment") \
V(kUnsupportedLookupSlotInDeclaration, \
"unsupported lookup slot in declaration") \
"Unsupported lookup slot in declaration") \
V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare") \
V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments") \
V(kUnsupportedPhiUseOfConstVariable, \
"Unsupported phi use of const variable") \
V(kUnsupportedTaggedImmediate, "unsupported tagged immediate") \
V(kUnsupportedTaggedImmediate, "Unsupported tagged immediate") \
V(kVariableResolvedToWithContext, "Variable resolved to with context") \
V(kWeShouldNotHaveAnEmptyLexicalContext, \
"we should not have an empty lexical context") \
"We should not have an empty lexical context") \
V(kWithStatement, "WithStatement") \
V(kWrongAddressOrValuePassedToRecordWrite, \
"Wrong address or value passed to RecordWrite") \
@ -5358,6 +5360,7 @@ class Code: public HeapObject {
void ClearTypeFeedbackCells(Heap* heap);
BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset);
uint32_t TranslateAstIdToPcOffset(BailoutId ast_id);
#define DECLARE_CODE_AGE_ENUM(X) k##X##CodeAge,
enum Age {
@ -6540,7 +6543,9 @@ class SharedFunctionInfo: public HeapObject {
// Installs optimized code from the code map on the given closure. The
// index has to be consistent with a search result as defined above.
void InstallFromOptimizedCodeMap(JSFunction* function, int index);
FixedArray* GetLiteralsFromOptimizedCodeMap(int index);
Code* GetCodeFromOptimizedCodeMap(int index);
// Clear optimized code map.
void ClearOptimizedCodeMap();
@ -6924,12 +6929,6 @@ class SharedFunctionInfo: public HeapObject {
void ResetForNewContext(int new_ic_age);
// Helper to compile the shared code. Returns true on success, false on
// failure (e.g., stack overflow during compilation). This is only used by
// the debugger, it is not possible to compile without a context otherwise.
static bool CompileLazy(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag);
// Casting.
static inline SharedFunctionInfo* cast(Object* obj);
@ -7260,29 +7259,20 @@ class JSFunction: public JSObject {
// Mark this function for lazy recompilation. The function will be
// recompiled the next time it is executed.
void MarkForLazyRecompilation();
void MarkForConcurrentRecompilation();
void MarkInRecompileQueue();
void MarkForOptimization();
void MarkForConcurrentOptimization();
void MarkInOptimizationQueue();
// Helpers to compile this function. Returns true on success, false on
// failure (e.g., stack overflow during compilation).
static bool EnsureCompiled(Handle<JSFunction> function,
ClearExceptionFlag flag);
static bool CompileLazy(Handle<JSFunction> function,
ClearExceptionFlag flag);
static Handle<Code> CompileOsr(Handle<JSFunction> function,
BailoutId osr_ast_id,
ClearExceptionFlag flag);
static bool CompileOptimized(Handle<JSFunction> function,
ClearExceptionFlag flag);
// Tells whether or not the function is already marked for lazy
// recompilation.
inline bool IsMarkedForLazyRecompilation();
inline bool IsMarkedForConcurrentRecompilation();
inline bool IsMarkedForOptimization();
inline bool IsMarkedForConcurrentOptimization();
// Tells whether or not the function is on the concurrent recompilation queue.
inline bool IsInRecompileQueue();
inline bool IsInOptimizationQueue();
// [literals_or_bindings]: Fixed array holding either
// the materialized literals or the bindings of a bound function.

View File

@ -106,10 +106,10 @@ void OptimizingCompilerThread::Run() {
}
RecompileJob* OptimizingCompilerThread::NextInput() {
OptimizedCompileJob* OptimizingCompilerThread::NextInput() {
LockGuard<Mutex> access_input_queue_(&input_queue_mutex_);
if (input_queue_length_ == 0) return NULL;
RecompileJob* job = input_queue_[InputQueueIndex(0)];
OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)];
ASSERT_NE(NULL, job);
input_queue_shift_ = InputQueueIndex(1);
input_queue_length_--;
@ -118,13 +118,13 @@ RecompileJob* OptimizingCompilerThread::NextInput() {
void OptimizingCompilerThread::CompileNext() {
RecompileJob* job = NextInput();
OptimizedCompileJob* job = NextInput();
ASSERT_NE(NULL, job);
// The function may have already been optimized by OSR. Simply continue.
RecompileJob::Status status = job->OptimizeGraph();
OptimizedCompileJob::Status status = job->OptimizeGraph();
USE(status); // Prevent an unused-variable error in release mode.
ASSERT(status != RecompileJob::FAILED);
ASSERT(status != OptimizedCompileJob::FAILED);
// The function may have already been optimized by OSR. Simply continue.
// Use a mutex to make sure that functions marked for install
@ -134,13 +134,18 @@ void OptimizingCompilerThread::CompileNext() {
}
static void DisposeRecompileJob(RecompileJob* job,
bool restore_function_code) {
static void DisposeOptimizedCompileJob(OptimizedCompileJob* job,
bool restore_function_code) {
// The recompile job is allocated in the CompilationInfo's zone.
CompilationInfo* info = job->info();
if (restore_function_code) {
if (info->is_osr()) {
if (!job->IsWaitingForInstall()) BackEdgeTable::RemoveStackCheck(info);
if (!job->IsWaitingForInstall()) {
// Remove stack check that guards OSR entry on original code.
Handle<Code> code = info->unoptimized_code();
uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
BackEdgeTable::RemoveStackCheck(code, offset);
}
} else {
Handle<JSFunction> function = info->closure();
function->ReplaceCode(function->shared()->code());
@ -151,25 +156,25 @@ static void DisposeRecompileJob(RecompileJob* job,
void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
RecompileJob* job;
OptimizedCompileJob* job;
while ((job = NextInput())) {
// This should not block, since we have one signal on the input queue
// semaphore corresponding to each element in the input queue.
input_queue_semaphore_.Wait();
// OSR jobs are dealt with separately.
if (!job->info()->is_osr()) {
DisposeRecompileJob(job, restore_function_code);
DisposeOptimizedCompileJob(job, restore_function_code);
}
}
}
void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
RecompileJob* job;
OptimizedCompileJob* job;
while (output_queue_.Dequeue(&job)) {
// OSR jobs are dealt with separately.
if (!job->info()->is_osr()) {
DisposeRecompileJob(job, restore_function_code);
DisposeOptimizedCompileJob(job, restore_function_code);
}
}
}
@ -178,7 +183,7 @@ void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
for (int i = 0; i < osr_buffer_capacity_; i++) {
if (osr_buffer_[i] != NULL) {
DisposeRecompileJob(osr_buffer_[i], restore_function_code);
DisposeOptimizedCompileJob(osr_buffer_[i], restore_function_code);
osr_buffer_[i] = NULL;
}
}
@ -236,9 +241,10 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
ASSERT(!IsOptimizerThread());
HandleScope handle_scope(isolate_);
RecompileJob* job;
OptimizedCompileJob* job;
while (output_queue_.Dequeue(&job)) {
CompilationInfo* info = job->info();
Handle<JSFunction> function(*info->closure());
if (info->is_osr()) {
if (FLAG_trace_osr) {
PrintF("[COSR - ");
@ -247,26 +253,25 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
info->osr_ast_id().ToInt());
}
job->WaitForInstall();
BackEdgeTable::RemoveStackCheck(info);
// Remove stack check that guards OSR entry on original code.
Handle<Code> code = info->unoptimized_code();
uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
BackEdgeTable::RemoveStackCheck(code, offset);
} else {
Compiler::InstallOptimizedCode(job);
Handle<Code> code = Compiler::GetConcurrentlyOptimizedCode(job);
function->ReplaceCode(
code.is_null() ? function->shared()->code() : *code);
}
}
}
void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
void OptimizingCompilerThread::QueueForOptimization(OptimizedCompileJob* job) {
ASSERT(IsQueueAvailable());
ASSERT(!IsOptimizerThread());
CompilationInfo* info = job->info();
if (info->is_osr()) {
if (FLAG_trace_concurrent_recompilation) {
PrintF(" ** Queueing ");
info->closure()->PrintName();
PrintF(" for concurrent on-stack replacement.\n");
}
osr_attempts_++;
BackEdgeTable::AddStackCheck(info);
AddToOsrBuffer(job);
// Add job to the front of the input queue.
LockGuard<Mutex> access_input_queue(&input_queue_mutex_);
@ -276,7 +281,6 @@ void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
input_queue_[InputQueueIndex(0)] = job;
input_queue_length_++;
} else {
info->closure()->MarkInRecompileQueue();
// Add job to the back of the input queue.
LockGuard<Mutex> access_input_queue(&input_queue_mutex_);
ASSERT_LT(input_queue_length_, input_queue_capacity_);
@ -300,14 +304,14 @@ void OptimizingCompilerThread::Unblock() {
}
RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
Handle<JSFunction> function, uint32_t osr_pc_offset) {
OptimizedCompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
Handle<JSFunction> function, BailoutId osr_ast_id) {
ASSERT(!IsOptimizerThread());
for (int i = 0; i < osr_buffer_capacity_; i++) {
RecompileJob* current = osr_buffer_[i];
OptimizedCompileJob* current = osr_buffer_[i];
if (current != NULL &&
current->IsWaitingForInstall() &&
current->info()->HasSameOsrEntry(function, osr_pc_offset)) {
current->info()->HasSameOsrEntry(function, osr_ast_id)) {
osr_hits_++;
osr_buffer_[i] = NULL;
return current;
@ -318,12 +322,12 @@ RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
bool OptimizingCompilerThread::IsQueuedForOSR(Handle<JSFunction> function,
uint32_t osr_pc_offset) {
BailoutId osr_ast_id) {
ASSERT(!IsOptimizerThread());
for (int i = 0; i < osr_buffer_capacity_; i++) {
RecompileJob* current = osr_buffer_[i];
OptimizedCompileJob* current = osr_buffer_[i];
if (current != NULL &&
current->info()->HasSameOsrEntry(function, osr_pc_offset)) {
current->info()->HasSameOsrEntry(function, osr_ast_id)) {
return !current->IsWaitingForInstall();
}
}
@ -334,7 +338,7 @@ bool OptimizingCompilerThread::IsQueuedForOSR(Handle<JSFunction> function,
bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
ASSERT(!IsOptimizerThread());
for (int i = 0; i < osr_buffer_capacity_; i++) {
RecompileJob* current = osr_buffer_[i];
OptimizedCompileJob* current = osr_buffer_[i];
if (current != NULL && *current->info()->closure() == function) {
return !current->IsWaitingForInstall();
}
@ -343,10 +347,10 @@ bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
}
void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
void OptimizingCompilerThread::AddToOsrBuffer(OptimizedCompileJob* job) {
ASSERT(!IsOptimizerThread());
// Find the next slot that is empty or has a stale job.
RecompileJob* stale = NULL;
OptimizedCompileJob* stale = NULL;
while (true) {
stale = osr_buffer_[osr_buffer_cursor_];
if (stale == NULL || stale->IsWaitingForInstall()) break;
@ -362,7 +366,7 @@ void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
info->closure()->PrintName();
PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
}
DisposeRecompileJob(stale, false);
DisposeOptimizedCompileJob(stale, false);
}
osr_buffer_[osr_buffer_cursor_] = job;
osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;

View File

@ -40,7 +40,7 @@ namespace v8 {
namespace internal {
class HOptimizedGraphBuilder;
class RecompileJob;
class OptimizedCompileJob;
class SharedFunctionInfo;
class OptimizingCompilerThread : public Thread {
@ -62,10 +62,10 @@ class OptimizingCompilerThread : public Thread {
osr_attempts_(0),
blocked_jobs_(0) {
NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE));
input_queue_ = NewArray<RecompileJob*>(input_queue_capacity_);
input_queue_ = NewArray<OptimizedCompileJob*>(input_queue_capacity_);
if (FLAG_concurrent_osr) {
// Allocate and mark OSR buffer slots as empty.
osr_buffer_ = NewArray<RecompileJob*>(osr_buffer_capacity_);
osr_buffer_ = NewArray<OptimizedCompileJob*>(osr_buffer_capacity_);
for (int i = 0; i < osr_buffer_capacity_; i++) osr_buffer_[i] = NULL;
}
}
@ -75,12 +75,12 @@ class OptimizingCompilerThread : public Thread {
void Run();
void Stop();
void Flush();
void QueueForOptimization(RecompileJob* optimizing_compiler);
void QueueForOptimization(OptimizedCompileJob* optimizing_compiler);
void Unblock();
void InstallOptimizedFunctions();
RecompileJob* FindReadyOSRCandidate(Handle<JSFunction> function,
uint32_t osr_pc_offset);
bool IsQueuedForOSR(Handle<JSFunction> function, uint32_t osr_pc_offset);
OptimizedCompileJob* FindReadyOSRCandidate(Handle<JSFunction> function,
BailoutId osr_ast_id);
bool IsQueuedForOSR(Handle<JSFunction> function, BailoutId osr_ast_id);
bool IsQueuedForOSR(JSFunction* function);
@ -112,11 +112,11 @@ class OptimizingCompilerThread : public Thread {
void FlushOutputQueue(bool restore_function_code);
void FlushOsrBuffer(bool restore_function_code);
void CompileNext();
RecompileJob* NextInput();
OptimizedCompileJob* NextInput();
// Add a recompilation task for OSR to the cyclic buffer, awaiting OSR entry.
// Tasks evicted from the cyclic buffer are discarded.
void AddToOsrBuffer(RecompileJob* compiler);
void AddToOsrBuffer(OptimizedCompileJob* compiler);
inline int InputQueueIndex(int i) {
int result = (i + input_queue_shift_) % input_queue_capacity_;
@ -135,17 +135,17 @@ class OptimizingCompilerThread : public Thread {
Semaphore input_queue_semaphore_;
// Circular queue of incoming recompilation tasks (including OSR).
RecompileJob** input_queue_;
OptimizedCompileJob** input_queue_;
int input_queue_capacity_;
int input_queue_length_;
int input_queue_shift_;
Mutex input_queue_mutex_;
// Queue of recompilation tasks ready to be installed (excluding OSR).
UnboundQueue<RecompileJob*> output_queue_;
UnboundQueue<OptimizedCompileJob*> output_queue_;
// Cyclic buffer of recompilation tasks for OSR.
RecompileJob** osr_buffer_;
OptimizedCompileJob** osr_buffer_;
int osr_buffer_capacity_;
int osr_buffer_cursor_;

View File

@ -418,7 +418,12 @@ class Parser : public ParserBase {
// Parses the source code represented by the compilation info and sets its
// function literal. Returns false (and deallocates any allocated AST
// nodes) if parsing failed.
static bool Parse(CompilationInfo* info) { return Parser(info).Parse(); }
static bool Parse(CompilationInfo* info,
bool allow_lazy = false) {
Parser parser(info);
parser.set_allow_lazy(allow_lazy);
return parser.Parse();
}
bool Parse();
private:

View File

@ -124,11 +124,11 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
// recompilation race. This goes away as soon as OSR becomes one-shot.
return;
}
ASSERT(!function->IsInRecompileQueue());
function->MarkForConcurrentRecompilation();
ASSERT(!function->IsInOptimizationQueue());
function->MarkForConcurrentOptimization();
} else {
// The next call to the function will trigger optimization.
function->MarkForLazyRecompilation();
function->MarkForOptimization();
}
}
@ -186,7 +186,7 @@ void RuntimeProfiler::OptimizeNow() {
Code* shared_code = shared->code();
if (shared_code->kind() != Code::FUNCTION) continue;
if (function->IsInRecompileQueue()) continue;
if (function->IsInOptimizationQueue()) continue;
if (FLAG_always_osr &&
shared_code->allow_osr_at_loop_nesting_level() == 0) {
@ -198,8 +198,8 @@ void RuntimeProfiler::OptimizeNow() {
}
// Fall through and do a normal optimized compile as well.
} else if (!frame->is_optimized() &&
(function->IsMarkedForLazyRecompilation() ||
function->IsMarkedForConcurrentRecompilation() ||
(function->IsMarkedForOptimization() ||
function->IsMarkedForConcurrentOptimization() ||
function->IsOptimized())) {
// Attempt OSR if we are still running unoptimized code even though the
// the function has long been marked or even already been optimized.

View File

@ -2957,7 +2957,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
Handle<SharedFunctionInfo> target_shared(target->shared());
Handle<SharedFunctionInfo> source_shared(source->shared());
if (!JSFunction::EnsureCompiled(source, KEEP_EXCEPTION)) {
if (!Compiler::EnsureCompiled(source, KEEP_EXCEPTION)) {
return Failure::Exception();
}
@ -8267,7 +8267,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
// The function should be compiled for the optimization hints to be
// available.
JSFunction::EnsureCompiled(function, CLEAR_EXCEPTION);
Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
Handle<SharedFunctionInfo> shared(function->shared(), isolate);
if (!function->has_initial_map() &&
@ -8299,42 +8299,53 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FinalizeInstanceSize) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyCompile) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileUnoptimized) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
Handle<JSFunction> function = args.at<JSFunction>(0);
#ifdef DEBUG
if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
PrintF("[lazy: ");
PrintF("[unoptimized: ");
function->PrintName();
PrintF("]\n");
}
#endif
// Compile the target function.
ASSERT(!function->is_compiled());
if (!JSFunction::CompileLazy(function, KEEP_EXCEPTION)) {
return Failure::Exception();
}
ASSERT(function->shared()->allows_lazy_compilation());
Handle<Code> code = Compiler::GetUnoptimizedCode(function);
RETURN_IF_EMPTY_HANDLE(isolate, code);
function->ReplaceCode(*code);
// All done. Return the compiled code.
ASSERT(function->is_compiled());
return function->code();
ASSERT(function->code()->kind() == Code::FUNCTION ||
(FLAG_always_opt &&
function->code()->kind() == Code::OPTIMIZED_FUNCTION));
return *code;
}
bool AllowOptimization(Isolate* isolate, Handle<JSFunction> function) {
// If the function is not compiled ignore the lazy
// recompilation. This can happen if the debugger is activated and
// the function is returned to the not compiled state.
if (!function->shared()->is_compiled()) return false;
RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileOptimized) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
Handle<JSFunction> function = args.at<JSFunction>(0);
CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1);
// If the function is not optimizable or debugger is active continue using the
// code from the full compiler.
if (!isolate->use_crankshaft() ||
function->shared()->optimization_disabled() ||
isolate->DebuggerHasBreakPoints()) {
Handle<Code> unoptimized(function->shared()->code());
if (!function->shared()->is_compiled()) {
// If the function is not compiled, do not optimize.
// This can happen if the debugger is activated and
// the function is returned to the not compiled state.
// TODO(yangguo): reconsider this.
function->ReplaceCode(function->shared()->code());
} else if (!isolate->use_crankshaft() ||
function->shared()->optimization_disabled() ||
isolate->DebuggerHasBreakPoints()) {
// If the function is not optimizable or debugger is active continue
// using the code from the full compiler.
if (FLAG_trace_opt) {
PrintF("[failed to optimize ");
function->PrintName();
@ -8342,53 +8353,21 @@ bool AllowOptimization(Isolate* isolate, Handle<JSFunction> function) {
function->shared()->optimization_disabled() ? "F" : "T",
isolate->DebuggerHasBreakPoints() ? "T" : "F");
}
return false;
function->ReplaceCode(*unoptimized);
} else {
Compiler::ConcurrencyMode mode = concurrent ? Compiler::CONCURRENT
: Compiler::NOT_CONCURRENT;
Handle<Code> code = Compiler::GetOptimizedCode(function, unoptimized, mode);
function->ReplaceCode(code.is_null() ? *unoptimized : *code);
}
return true;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
Handle<JSFunction> function = args.at<JSFunction>(0);
if (!AllowOptimization(isolate, function)) {
function->ReplaceCode(function->shared()->code());
return function->code();
}
function->shared()->code()->set_profiler_ticks(0);
if (JSFunction::CompileOptimized(function, CLEAR_EXCEPTION)) {
return function->code();
}
if (FLAG_trace_opt) {
PrintF("[failed to optimize ");
function->PrintName();
PrintF(": optimized compilation failed]\n");
}
function->ReplaceCode(function->shared()->code());
ASSERT(function->code()->kind() == Code::FUNCTION ||
function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
function->IsInOptimizationQueue());
return function->code();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_ConcurrentRecompile) {
HandleScope handle_scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (!AllowOptimization(isolate, function)) {
function->ReplaceCode(function->shared()->code());
return isolate->heap()->undefined_value();
}
Handle<Code> shared_code(function->shared()->code());
shared_code->set_profiler_ticks(0);
ASSERT(isolate->concurrent_recompilation_enabled());
if (!Compiler::RecompileConcurrent(function, shared_code)) {
function->ReplaceCode(*shared_code);
}
return isolate->heap()->undefined_value();
}
class ActivationsFinder : public ThreadVisitor {
public:
Code* code_;
@ -8529,7 +8508,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) {
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (!function->IsOptimizable()) return isolate->heap()->undefined_value();
function->MarkForLazyRecompilation();
function->MarkForOptimization();
Code* unoptimized = function->shared()->code();
if (args.length() == 2 &&
@ -8545,7 +8524,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) {
}
} else if (type->IsOneByteEqualTo(STATIC_ASCII_VECTOR("concurrent")) &&
isolate->concurrent_recompilation_enabled()) {
function->MarkForConcurrentRecompilation();
function->MarkForConcurrentOptimization();
}
}
@ -8579,7 +8558,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
if (isolate->concurrent_recompilation_enabled() &&
sync_with_compiler_thread) {
while (function->IsInRecompileQueue()) {
while (function->IsInOptimizationQueue()) {
isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
OS::Sleep(50);
}
@ -8615,9 +8594,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) {
static bool IsSuitableForOnStackReplacement(Isolate* isolate,
Handle<JSFunction> function,
Handle<Code> unoptimized) {
Handle<Code> current_code) {
// Keep track of whether we've succeeded in optimizing.
if (!isolate->use_crankshaft() || !unoptimized->optimizable()) return false;
if (!isolate->use_crankshaft() || !current_code->optimizable()) return false;
// If we are trying to do OSR when there are already optimized
// activations of the function, it means (a) the function is directly or
// indirectly recursive and (b) an optimized invocation has been
@ -8636,79 +8615,79 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
Handle<Code> unoptimized(function->shared()->code(), isolate);
Handle<Code> caller_code(function->shared()->code());
// We're not prepared to handle a function with arguments object.
ASSERT(!function->shared()->uses_arguments());
// Passing the PC in the javascript frame from the caller directly is
// not GC safe, so we walk the stack to get it.
JavaScriptFrameIterator it(isolate);
JavaScriptFrame* frame = it.frame();
if (!unoptimized->contains(frame->pc())) {
if (!caller_code->contains(frame->pc())) {
// Code on the stack may not be the code object referenced by the shared
// function info. It may have been replaced to include deoptimization data.
unoptimized = Handle<Code>(frame->LookupCode());
caller_code = Handle<Code>(frame->LookupCode());
}
uint32_t pc_offset = static_cast<uint32_t>(frame->pc() -
unoptimized->instruction_start());
uint32_t pc_offset = static_cast<uint32_t>(
frame->pc() - caller_code->instruction_start());
#ifdef DEBUG
ASSERT_EQ(frame->function(), *function);
ASSERT_EQ(frame->LookupCode(), *unoptimized);
ASSERT(unoptimized->contains(frame->pc()));
ASSERT_EQ(frame->LookupCode(), *caller_code);
ASSERT(caller_code->contains(frame->pc()));
#endif // DEBUG
// We're not prepared to handle a function with arguments object.
ASSERT(!function->shared()->uses_arguments());
BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
ASSERT(!ast_id.IsNone());
Compiler::ConcurrencyMode mode = isolate->concurrent_osr_enabled()
? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT;
Handle<Code> result = Handle<Code>::null();
BailoutId ast_id = BailoutId::None();
if (isolate->concurrent_osr_enabled()) {
if (isolate->optimizing_compiler_thread()->
IsQueuedForOSR(function, pc_offset)) {
// Still waiting for the optimizing compiler thread to finish. Carry on.
OptimizedCompileJob* job = NULL;
if (mode == Compiler::CONCURRENT) {
// Gate the OSR entry with a stack check.
BackEdgeTable::AddStackCheck(caller_code, pc_offset);
// Poll already queued compilation jobs.
OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread();
if (thread->IsQueuedForOSR(function, ast_id)) {
if (FLAG_trace_osr) {
PrintF("[COSR - polling recompile tasks for ");
PrintF("[OSR - Still waiting for queued: ");
function->PrintName();
PrintF("]\n");
PrintF(" at AST id %d]\n", ast_id.ToInt());
}
return NULL;
}
RecompileJob* job = isolate->optimizing_compiler_thread()->
FindReadyOSRCandidate(function, pc_offset);
job = thread->FindReadyOSRCandidate(function, ast_id);
}
if (job == NULL) {
if (IsSuitableForOnStackReplacement(isolate, function, unoptimized) &&
Compiler::RecompileConcurrent(function, unoptimized, pc_offset)) {
if (function->IsMarkedForLazyRecompilation() ||
function->IsMarkedForConcurrentRecompilation()) {
// Prevent regular recompilation if we queue this for OSR.
// TODO(yangguo): remove this as soon as OSR becomes one-shot.
function->ReplaceCode(function->shared()->code());
}
return NULL;
}
// Fall through to the end in case of failure.
} else {
// TODO(titzer): don't install the OSR code into the function.
ast_id = job->info()->osr_ast_id();
result = Compiler::InstallOptimizedCode(job);
}
} else if (IsSuitableForOnStackReplacement(isolate, function, unoptimized)) {
ast_id = unoptimized->TranslatePcOffsetToAstId(pc_offset);
ASSERT(!ast_id.IsNone());
if (job != NULL) {
if (FLAG_trace_osr) {
PrintF("[OSR - replacing at AST id %d in ", ast_id.ToInt());
PrintF("[OSR - Found ready: ");
function->PrintName();
PrintF("]\n");
PrintF(" at AST id %d]\n", ast_id.ToInt());
}
result = Compiler::GetConcurrentlyOptimizedCode(job);
} else if (result.is_null() &&
IsSuitableForOnStackReplacement(isolate, function, caller_code)) {
if (FLAG_trace_osr) {
PrintF("[OSR - Compiling: ");
function->PrintName();
PrintF(" at AST id %d]\n", ast_id.ToInt());
}
result = Compiler::GetOptimizedCode(function, caller_code, mode, ast_id);
if (result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
// Optimization is queued. Return to check later.
return NULL;
}
// Attempt OSR compilation.
result = JSFunction::CompileOsr(function, ast_id, CLEAR_EXCEPTION);
}
// Revert the patched back edge table, regardless of whether OSR succeeds.
BackEdgeTable::Revert(isolate, *unoptimized);
BackEdgeTable::Revert(isolate, *caller_code);
// Check whether we ended up with usable optimized code.
if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
@ -8718,26 +8697,27 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
if (data->OsrPcOffset()->value() >= 0) {
ASSERT(BailoutId(data->OsrAstId()->value()) == ast_id);
if (FLAG_trace_osr) {
PrintF("[OSR - entry at AST id %d, offset %d in optimized code]\n",
PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
ast_id.ToInt(), data->OsrPcOffset()->value());
}
// TODO(titzer): this is a massive hack to make the deopt counts
// match. Fix heuristics for reenabling optimizations!
function->shared()->increment_deopt_count();
// TODO(titzer): Do not install code into the function.
function->ReplaceCode(*result);
return *result;
}
}
// Failed.
if (FLAG_trace_osr) {
PrintF("[OSR - optimization failed for ");
PrintF("[OSR - Failed: ");
function->PrintName();
PrintF("]\n");
PrintF(" at AST id %d]\n", ast_id.ToInt());
}
if (function->IsMarkedForLazyRecompilation() ||
function->IsMarkedForConcurrentRecompilation()) {
function->ReplaceCode(function->shared()->code());
}
function->ReplaceCode(function->shared()->code());
return NULL;
}
@ -9439,7 +9419,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_TryInstallRecompiledCode) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_TryInstallOptimizedCode) {
HandleScope scope(isolate);
ASSERT(args.length() == 1);
CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
@ -9698,13 +9678,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) {
// Compile source string in the native context.
ParseRestriction restriction = function_literal_only
? ONLY_SINGLE_FUNCTION_LITERAL : NO_PARSE_RESTRICTION;
Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
source, context, true, CLASSIC_MODE, restriction, RelocInfo::kNoPosition);
RETURN_IF_EMPTY_HANDLE(isolate, shared);
Handle<JSFunction> fun =
isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
context,
NOT_TENURED);
Handle<JSFunction> fun = Compiler::GetFunctionFromEval(
source, context, CLASSIC_MODE, restriction, RelocInfo::kNoPosition);
RETURN_IF_EMPTY_HANDLE(isolate, fun);
return *fun;
}
@ -9730,18 +9706,11 @@ static ObjectPair CompileGlobalEval(Isolate* isolate,
// Deal with a normal eval call with a string argument. Compile it
// and return the compiled function bound in the local context.
Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
source,
context,
context->IsNativeContext(),
language_mode,
NO_PARSE_RESTRICTION,
scope_position);
RETURN_IF_EMPTY_HANDLE_VALUE(isolate, shared,
static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
Handle<JSFunction> compiled = Compiler::GetFunctionFromEval(
source, context, language_mode, restriction, scope_position);
RETURN_IF_EMPTY_HANDLE_VALUE(isolate, compiled,
MakePair(Failure::Exception(), NULL));
Handle<JSFunction> compiled =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
shared, context, NOT_TENURED);
return MakePair(*compiled, *receiver);
}
@ -12573,7 +12542,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScriptBreakPoint) {
if (!isolate->debug()->SetBreakPointForScript(script, break_point_object_arg,
&source_position,
alignment)) {
return isolate->heap()->undefined_value();
return isolate->heap()->undefined_value();
}
return Smi::FromInt(source_position);
@ -12733,18 +12702,14 @@ static MaybeObject* DebugEvaluate(Isolate* isolate,
context = isolate->factory()->NewWithContext(closure, context, extension);
}
Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
source,
context,
context->IsNativeContext(),
CLASSIC_MODE,
NO_PARSE_RESTRICTION,
RelocInfo::kNoPosition);
RETURN_IF_EMPTY_HANDLE(isolate, shared);
Handle<JSFunction> eval_fun =
isolate->factory()->NewFunctionFromSharedFunctionInfo(
shared, context, NOT_TENURED);
Compiler::GetFunctionFromEval(source,
context,
CLASSIC_MODE,
NO_PARSE_RESTRICTION,
RelocInfo::kNoPosition);
RETURN_IF_EMPTY_HANDLE(isolate, eval_fun);
bool pending_exception;
Handle<Object> result = Execution::Call(
isolate, eval_fun, receiver, 0, NULL, &pending_exception);
@ -13160,7 +13125,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) {
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
if (!JSFunction::EnsureCompiled(func, KEEP_EXCEPTION)) {
if (!Compiler::EnsureCompiled(func, KEEP_EXCEPTION)) {
return Failure::Exception();
}
func->code()->PrintLn();
@ -13175,7 +13140,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) {
ASSERT(args.length() == 1);
// Get the function and make sure it is compiled.
CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
if (!JSFunction::EnsureCompiled(func, KEEP_EXCEPTION)) {
if (!Compiler::EnsureCompiled(func, KEEP_EXCEPTION)) {
return Failure::Exception();
}
func->shared()->construct_stub()->PrintLn();

View File

@ -86,10 +86,9 @@ namespace internal {
F(GetConstructorDelegate, 1, 1) \
F(NewArgumentsFast, 3, 1) \
F(NewStrictArgumentsFast, 3, 1) \
F(LazyCompile, 1, 1) \
F(LazyRecompile, 1, 1) \
F(ConcurrentRecompile, 1, 1) \
F(TryInstallRecompiledCode, 1, 1) \
F(CompileUnoptimized, 1, 1) \
F(CompileOptimized, 2, 1) \
F(TryInstallOptimizedCode, 1, 1) \
F(NotifyDeoptimized, 1, 1) \
F(NotifyStubFailure, 0, 1) \
F(DeoptimizeFunction, 1, 1) \

View File

@ -73,8 +73,8 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
}
static void CallRuntimePassFunction(MacroAssembler* masm,
Runtime::FunctionId function_id) {
static void CallRuntimePassFunction(
MacroAssembler* masm, Runtime::FunctionId function_id) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
@ -101,7 +101,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
}
void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
}
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
// Checking whether the queued function is ready for install is optional,
// since we come across interrupts and stack checks elsewhere. However,
// not checking may delay installing ready functions, and always checking
@ -111,22 +117,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok);
CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
// Tail call to returned code.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
GenerateTailCallToReturnedCode(masm);
__ bind(&ok);
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
GenerateTailCallToSharedCode(masm);
}
static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function,
bool count_constructions) {
@ -573,19 +571,41 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
}
void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyCompile);
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
// Do a tail-call of the compiled function.
__ lea(rax, FieldOperand(rax, Code::kHeaderSize));
__ jmp(rax);
static void CallCompileOptimized(MacroAssembler* masm,
bool concurrent) {
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the function onto the stack.
__ push(rdi);
// Push call kind information.
__ push(rcx);
// Function is also the parameter to the runtime call.
__ push(rdi);
// Whether to compile in a background thread.
__ Push(masm->isolate()->factory()->ToBoolean(concurrent));
__ CallRuntime(Runtime::kCompileOptimized, 2);
// Restore call kind information.
__ pop(rcx);
// Restore receiver.
__ pop(rdi);
}
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
CallCompileOptimized(masm, false);
GenerateTailCallToReturnedCode(masm);
}
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
CallCompileOptimized(masm, true);
GenerateTailCallToReturnedCode(masm);
}

View File

@ -104,16 +104,15 @@ static Handle<JSFunction> Compile(const char* source) {
Handle<String> source_code(
isolate->factory()->NewStringFromUtf8(CStrVector(source)));
Handle<SharedFunctionInfo> shared_function =
Compiler::Compile(source_code,
Handle<String>(),
0,
0,
false,
Handle<Context>(isolate->native_context()),
NULL,
NULL,
Handle<String>::null(),
NOT_NATIVES_CODE);
Compiler::CompileScript(source_code,
Handle<String>(),
0,
0,
false,
Handle<Context>(isolate->native_context()),
NULL, NULL,
Handle<String>::null(),
NOT_NATIVES_CODE);
return isolate->factory()->NewFunctionFromSharedFunctionInfo(
shared_function, isolate->native_context());
}

View File

@ -59,3 +59,5 @@ assertUnoptimized(add_field, "no sync");
%UnblockConcurrentRecompilation();
// Sync with background thread to conclude optimization that bailed out.
assertUnoptimized(add_field, "sync");
// Clear type info for stress runs.
%ClearFunctionTypeFeedback(add_field);

View File

@ -54,3 +54,5 @@ assertUnoptimized(f, "no sync");
// Optimization eventually bails out due to map dependency.
assertUnoptimized(f, "sync");
assertEquals(2, f(o));
//Clear type info for stress runs.
%ClearFunctionTypeFeedback(f);

View File

@ -55,3 +55,5 @@ assertUnoptimized(f1, "no sync");
// Sync with background thread to conclude optimization, which bails out
// due to map dependency.
assertUnoptimized(f1, "sync");
//Clear type info for stress runs.
%ClearFunctionTypeFeedback(f1);

View File

@ -149,9 +149,9 @@ var knownProblems = {
"PushCatchContext": true,
"PushBlockContext": true,
"PushModuleContext": true,
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"CompileUnoptimized": true,
"CompileOptimized": true,
"CompileOptimizedConcurrent": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -149,9 +149,9 @@ var knownProblems = {
"PushCatchContext": true,
"PushBlockContext": true,
"PushModuleContext": true,
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"CompileUnoptimized": true,
"CompileOptimized": true,
"CompileOptimizedConcurrent": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -149,9 +149,9 @@ var knownProblems = {
"PushCatchContext": true,
"PushBlockContext": true,
"PushModuleContext": true,
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"CompileUnoptimized": true,
"CompileOptimized": true,
"CompileOptimizedConcurrent": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -149,9 +149,9 @@ var knownProblems = {
"PushCatchContext": true,
"PushBlockContext": true,
"PushModuleContext": true,
"LazyCompile": true,
"LazyRecompile": true,
"ConcurrentRecompile": true,
"CompileUnoptimized": true,
"CompileOptimized": true,
"CompileOptimizedConcurrent": true,
"NotifyDeoptimized": true,
"NotifyStubFailure": true,
"NotifyOSR": true,

View File

@ -25,7 +25,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --use-osr --allow-natives-syntax
// Flags: --use-osr --allow-natives-syntax --no-concurrent-osr
function f() {
do {