Reland "Optimize function across closures." (again).

Review URL: https://codereview.chromium.org/707463002

Cr-Commit-Position: refs/heads/master@{#25367}
This commit is contained in:
yangguo 2014-11-17 00:42:45 -08:00 committed by Commit bot
parent ef41de10db
commit 6714365a30
9 changed files with 112 additions and 71 deletions

View File

@ -1555,47 +1555,67 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
AddIncrementCounter(counters->fast_new_closure_total());
// Create a new closure from the given function info in new space
HValue* size = Add<HConstant>(JSFunction::kSize);
HInstruction* js_function = Add<HAllocate>(size, HType::JSObject(),
NOT_TENURED, JS_FUNCTION_TYPE);
int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
casted_stub()->kind());
// Compute the function map in the current native context and set that
// as the map of the allocated object.
HInstruction* native_context = BuildGetNativeContext();
HInstruction* map_slot_value = Add<HLoadNamedField>(
native_context, static_cast<HValue*>(NULL),
HObjectAccess::ForContextSlot(map_index));
Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
// Initialize the rest of the function.
Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForPrototypeOrInitialMap(),
graph()->GetConstantHole());
Add<HStoreNamedField>(js_function,
HObjectAccess::ForSharedFunctionInfoPointer(),
shared_info);
Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
context());
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
// But first check if there is an optimized version for our context.
if (FLAG_cache_optimized_code) {
BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
} else {
BuildInstallCode(js_function, shared_info);
IfBuilder optimize_now(this);
HInstruction* compile_hint = Add<HLoadNamedField>(
shared_info, static_cast<HValue*>(NULL), HObjectAccess::ForCompileHint());
HValue* hint_mask = Add<HConstant>(
static_cast<int32_t>(1 << SharedFunctionInfo::kOptimizeNextClosure));
HInstruction* optimize =
AddUncasted<HBitwise>(Token::BIT_AND, compile_hint, hint_mask);
optimize_now.If<HCompareNumericAndBranch>(optimize, hint_mask, Token::EQ);
optimize_now.Then();
{
Add<HPushArguments>(context(), shared_info, graph()->GetConstantFalse());
Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
Runtime::FunctionForId(Runtime::kNewClosure), 3));
}
optimize_now.Else();
{
// Create a new closure from the given function info in new space
HValue* size = Add<HConstant>(JSFunction::kSize);
HInstruction* js_function =
Add<HAllocate>(size, HType::JSObject(), NOT_TENURED, JS_FUNCTION_TYPE);
return js_function;
int map_index = Context::FunctionMapIndex(casted_stub()->strict_mode(),
casted_stub()->kind());
// Compute the function map in the current native context and set that
// as the map of the allocated object.
HInstruction* native_context = BuildGetNativeContext();
HInstruction* map_slot_value =
Add<HLoadNamedField>(native_context, static_cast<HValue*>(NULL),
HObjectAccess::ForContextSlot(map_index));
Add<HStoreNamedField>(js_function, HObjectAccess::ForMap(), map_slot_value);
// Initialize the rest of the function.
Add<HStoreNamedField>(js_function, HObjectAccess::ForPropertiesPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForElementsPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function, HObjectAccess::ForLiteralsPointer(),
empty_fixed_array);
Add<HStoreNamedField>(js_function,
HObjectAccess::ForPrototypeOrInitialMap(),
graph()->GetConstantHole());
Add<HStoreNamedField>(js_function,
HObjectAccess::ForSharedFunctionInfoPointer(),
shared_info);
Add<HStoreNamedField>(
js_function, HObjectAccess::ForFunctionContextPointer(), context());
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
// But first check if there is an optimized version for our context.
if (FLAG_cache_optimized_code) {
BuildInstallFromOptimizedCodeMap(js_function, shared_info,
native_context);
} else {
BuildInstallCode(js_function, shared_info);
}
Push(js_function);
}
optimize_now.End();
return Pop();
}

View File

@ -1382,6 +1382,7 @@ MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
PostponeInterruptsScope postpone(isolate);
Handle<SharedFunctionInfo> shared = info->shared_info();
shared->set_optimize_next_closure(false);
if (shared->code()->kind() != Code::FUNCTION ||
ScopeInfo::Empty(isolate) == shared->scope_info()) {
// The function was never compiled. Compile it unoptimized first.

View File

@ -1372,6 +1372,14 @@ Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
}
static bool ShouldOptimizeNewClosure(Isolate* isolate,
Handle<SharedFunctionInfo> info) {
return isolate->use_crankshaft() && !info->is_toplevel() &&
info->is_compiled() && info->allows_lazy_compilation() &&
!info->optimization_disabled() && !isolate->DebuggerHasBreakPoints();
}
Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
Handle<SharedFunctionInfo> info,
Handle<Context> context,
@ -1409,14 +1417,11 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
return result;
}
if (isolate()->use_crankshaft() &&
FLAG_always_opt &&
result->is_compiled() &&
!info->is_toplevel() &&
info->allows_lazy_compilation() &&
!info->optimization_disabled() &&
!isolate()->DebuggerHasBreakPoints()) {
if (FLAG_always_opt && ShouldOptimizeNewClosure(isolate(), info)) {
result->MarkForOptimization();
} else if (info->optimize_next_closure() &&
ShouldOptimizeNewClosure(isolate(), info)) {
result->AttemptConcurrentOptimization();
}
return result;
}

View File

@ -6171,6 +6171,18 @@ class HObjectAccess FINAL {
SharedFunctionInfo::kOptimizedCodeMapOffset);
}
static HObjectAccess ForCompileHint() {
// Compile hints are stored in the upper half of a pseudo-smi, which for
// 64-bit means that the representation is an integer.
#if V8_HOST_ARCH_32_BIT
Representation repr = Representation::Smi();
#else
Representation repr = Representation::Integer32();
#endif
return HObjectAccess(kInobject, SharedFunctionInfo::kCompilerHintsOffset,
repr);
}
static HObjectAccess ForFunctionContextPointer() {
return HObjectAccess(kInobject, JSFunction::kContextOffset);
}

View File

@ -5573,9 +5573,9 @@ BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
kIsTopLevelBit)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,
allows_lazy_compilation,
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, optimize_next_closure,
kOptimizeNextClosure)
BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
kAllowLazyCompilation)
BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints,

View File

@ -9408,12 +9408,27 @@ void JSFunction::MarkForOptimization() {
}
void JSFunction::MarkForConcurrentOptimization() {
DCHECK(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
void JSFunction::AttemptConcurrentOptimization() {
Isolate* isolate = GetIsolate();
if (!isolate->concurrent_recompilation_enabled() ||
isolate->bootstrapper()->IsActive()) {
MarkForOptimization();
return;
}
if (isolate->concurrent_osr_enabled() &&
isolate->optimizing_compiler_thread()->IsQueuedForOSR(this)) {
// Do not attempt regular recompilation if we already queued this for OSR.
// TODO(yangguo): This is necessary so that we don't install optimized
// code on a function that is already optimized, since OSR and regular
// recompilation race. This goes away as soon as OSR becomes one-shot.
return;
}
DCHECK(!IsInOptimizationQueue());
DCHECK(is_compiled() || isolate->DebuggerHasBreakPoints());
DCHECK(!IsOptimized());
DCHECK(shared()->allows_lazy_compilation() || code()->optimizable());
DCHECK(!shared()->is_generator());
DCHECK(GetIsolate()->concurrent_recompilation_enabled());
DCHECK(isolate->concurrent_recompilation_enabled());
if (FLAG_trace_concurrent_recompilation) {
PrintF(" ** Marking ");
ShortPrint();

View File

@ -6794,6 +6794,8 @@ class SharedFunctionInfo: public HeapObject {
inline int ic_age();
inline void set_ic_age(int age);
DECL_BOOLEAN_ACCESSORS(optimize_next_closure)
// Indicates if this function can be lazy compiled.
// This is used to determine if we can safely flush code from a function
// when doing GC if we expect that the function will no longer be used.
@ -7096,6 +7098,7 @@ class SharedFunctionInfo: public HeapObject {
enum CompilerHints {
kAllowLazyCompilation,
kAllowLazyCompilationWithoutContext,
kOptimizeNextClosure,
kOptimizationDisabled,
kStrictModeFunction,
kUsesArguments,
@ -7323,7 +7326,7 @@ class JSFunction: public JSObject {
// Mark this function for lazy recompilation. The function will be
// recompiled the next time it is executed.
void MarkForOptimization();
void MarkForConcurrentOptimization();
void AttemptConcurrentOptimization();
void MarkInOptimizationQueue();
// Tells whether or not the function is already marked for lazy

View File

@ -106,23 +106,8 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
PrintF("]\n");
}
if (isolate_->concurrent_recompilation_enabled() &&
!isolate_->bootstrapper()->IsActive()) {
if (isolate_->concurrent_osr_enabled() &&
isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) {
// Do not attempt regular recompilation if we already queued this for OSR.
// TODO(yangguo): This is necessary so that we don't install optimized
// code on a function that is already optimized, since OSR and regular
// recompilation race. This goes away as soon as OSR becomes one-shot.
return;
}
DCHECK(!function->IsInOptimizationQueue());
function->MarkForConcurrentOptimization();
} else {
// The next call to the function will trigger optimization.
function->MarkForOptimization();
}
function->shared()->set_optimize_next_closure(true);
function->AttemptConcurrentOptimization();
}

View File

@ -75,7 +75,7 @@ RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
*function, Code::kMaxLoopNestingMarker);
} else if (type->IsOneByteEqualTo(STATIC_CHAR_VECTOR("concurrent")) &&
isolate->concurrent_recompilation_enabled()) {
function->MarkForConcurrentOptimization();
function->AttemptConcurrentOptimization();
}
}