Make --always-opt also optimize toplevel code.

R=jacob.bramley@arm.com, titzer@chromium.org, rossberg@chromium.org

Review URL: https://codereview.chromium.org/410153002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22666 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
This commit is contained in:
mstarzinger@chromium.org 2014-07-29 11:41:42 +00:00
parent a8a02a51f1
commit 34f5edd500
14 changed files with 115 additions and 64 deletions

View File

@ -181,7 +181,11 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is in r1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
if (FLAG_harmony_scoping && info()->scope()->is_global_scope()) {
__ push(r1);
__ Push(info()->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
// Result of FastNewContextStub is always in new space.

View File

@ -689,7 +689,13 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is in x1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
if (FLAG_harmony_scoping && info()->scope()->is_global_scope()) {
UseScratchRegisterScope temps(masm());
Register scope_info = temps.AcquireX();
__ Mov(scope_info, Operand(info()->scope()->GetScopeInfo()));
__ Push(x1, scope_info);
__ CallRuntime(Runtime::kNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
// Result of FastNewContextStub is always in new space.
@ -5026,8 +5032,6 @@ void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
Register scratch1 = x5;
Register scratch2 = x6;
ASSERT(instr->IsMarkedAsCall());
ASM_UNIMPLEMENTED_BREAK("DoDeclareGlobals");
// TODO(all): if Mov could handle object in new space then it could be used
// here.
__ LoadHeapObject(scratch1, instr->hydrogen()->pairs());

View File

@ -5,6 +5,7 @@
#include "src/factory.h"
#include "src/allocation-site-scopes.h"
#include "src/bootstrapper.h"
#include "src/conversions.h"
#include "src/isolate-inl.h"
#include "src/macro-assembler.h"
@ -1372,7 +1373,8 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
if (isolate()->use_crankshaft() &&
FLAG_always_opt &&
result->is_compiled() &&
!info->is_toplevel() &&
// TODO(mstarzinger): Extend to optimization of builtin code.
!isolate()->bootstrapper()->IsActive() &&
info->allows_lazy_compilation() &&
!info->optimization_disabled() &&
!isolate()->DebuggerHasBreakPoints()) {

View File

@ -11275,7 +11275,7 @@ void HOptimizedGraphBuilder::VisitFunctionDeclaration(
void HOptimizedGraphBuilder::VisitModuleDeclaration(
ModuleDeclaration* declaration) {
UNREACHABLE();
return Bailout(kModuleDeclaration);
}

View File

@ -256,7 +256,11 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is still in edi.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
if (FLAG_harmony_scoping && info()->scope()->is_global_scope()) {
__ push(edi);
__ Push(info()->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
// Result of FastNewContextStub is always in new space.

View File

@ -200,7 +200,11 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is in a1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
if (FLAG_harmony_scoping && info()->scope()->is_global_scope()) {
__ push(a1);
__ Push(info()->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
// Result of FastNewContextStub is always in new space.

View File

@ -193,7 +193,11 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; Allocate local context");
bool need_write_barrier = true;
// Argument to NewContext is the function, which is still in rdi.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
if (FLAG_harmony_scoping && info()->scope()->is_global_scope()) {
__ Push(rdi);
__ Push(info()->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
// Result of FastNewContextStub is always in new space.

View File

@ -5369,7 +5369,8 @@ void TryCatchMixedNestingCheck(v8::TryCatch* try_catch) {
CHECK_EQ(0, strcmp(*v8::String::Utf8Value(message->Get()),
"Uncaught Error: a"));
CHECK_EQ(1, message->GetLineNumber());
CHECK_EQ(6, message->GetStartColumn());
// TODO(mstarzinger): Our compilers disagree about the position.
CHECK_EQ(i::FLAG_always_opt ? 0 : 6, message->GetStartColumn());
}

View File

@ -6624,6 +6624,10 @@ TEST(Backtrace) {
v8::Debug::SetMessageHandler(BacktraceData::MessageHandler);
// TODO(mstarzinger): This doesn't work with --always-opt because we don't
// have correct source positions in optimized code. Enable once we have.
i::FLAG_always_opt = false;
const int kBufferSize = 1000;
uint16_t buffer[kBufferSize];
const char* scripts_command =
@ -6962,13 +6966,12 @@ TEST(DeoptimizeDuringDebugBreak) {
v8::Debug::SetDebugEventListener(DebugEventBreakDeoptimize);
// Compile and run function bar which will optimize it for some flag settings.
v8::Script::Compile(v8::String::NewFromUtf8(
env->GetIsolate(), "function bar(){}; bar()"))->Run();
v8::Local<v8::Function> f = CompileFunction(&env, "function bar(){}", "bar");
f->Call(v8::Undefined(env->GetIsolate()), 0, NULL);
// Set debug break and call bar again.
v8::Debug::DebugBreak(env->GetIsolate());
v8::Script::Compile(v8::String::NewFromUtf8(env->GetIsolate(), "bar()"))
->Run();
f->Call(v8::Undefined(env->GetIsolate()), 0, NULL);
CHECK(debug_event_break_deoptimize_done);

View File

@ -652,6 +652,17 @@ TEST(CrossScriptReferencesHarmony) {
v8::Isolate* isolate = CcTest::isolate();
HandleScope scope(isolate);
// TODO(rossberg): Reparsing of top-level code does not work in the presence
// of harmony scoping and multiple scripts. This can already be reproduced
// without --always-opt by relying on OSR alone.
//
// ./d8 --harmony-scoping
// -e "'use strict'; let a = 1;"
// -e "'use strict'; let b = 2; for (var i = 0; i < 100000; ++i) b++;"
//
// For now we just disable --always-opt for this test.
i::FLAG_always_opt = false;
const char* decs[] = {
"var x = 1; x", "x", "this.x",
"function x() { return 1 }; x()", "x()", "this.x()",

View File

@ -1416,16 +1416,21 @@ static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
TEST(TestInternalWeakLists) {
FLAG_allow_natives_syntax = true;
v8::V8::Initialize();
Isolate* isolate = CcTest::i_isolate();
// TODO(mstarzinger): Test should be resilient against optimization decisions.
if (i::FLAG_always_opt) return;
if (!isolate->use_crankshaft()) return;
// Some flags turn Scavenge collections into Mark-sweep collections
// and hence are incompatible with this test case.
if (FLAG_gc_global || FLAG_stress_compaction) return;
static const int kNumTestContexts = 10;
static const int kNumTestContexts = 5;
static const int kNumTestCollections = 3;
Isolate* isolate = CcTest::i_isolate();
Heap* heap = isolate->heap();
HandleScope scope(isolate);
v8::Handle<v8::Context> ctx[kNumTestContexts];
@ -1438,9 +1443,7 @@ TEST(TestInternalWeakLists) {
// Collect garbage that might have been created by one of the
// installed extensions.
isolate->compilation_cache()->Clear();
heap->CollectAllGarbage(Heap::kNoGCFlags);
bool opt = (FLAG_always_opt && isolate->use_crankshaft());
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(i + 1, CountNativeContexts());
@ -1456,46 +1459,46 @@ TEST(TestInternalWeakLists) {
"function f5() { };";
CompileRun(source);
CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f1()");
CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f2()");
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f3()");
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f4()");
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f5()");
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f1(); %OptimizeFunctionOnNextCall(f1); f1()");
CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f2(); %OptimizeFunctionOnNextCall(f2); f2()");
CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f3(); %OptimizeFunctionOnNextCall(f3); f3()");
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f4(); %OptimizeFunctionOnNextCall(f4); f4()");
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f5(); %OptimizeFunctionOnNextCall(f5); f5()");
CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
// Remove function f1, and
CompileRun("f1=null");
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
for (int j = 0; j < kNumTestCollections; j++) {
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[i]));
CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
}
// Mark compact handles the weak references.
isolate->compilation_cache()->Clear();
heap->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
// Get rid of f3 and f5 in the same way.
CompileRun("f3=null");
for (int j = 0; j < 10; j++) {
for (int j = 0; j < kNumTestCollections; j++) {
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
CompileRun("f5=null");
for (int j = 0; j < 10; j++) {
for (int j = 0; j < kNumTestCollections; j++) {
CcTest::heap()->CollectGarbage(NEW_SPACE);
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
}
CcTest::heap()->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
ctx[i]->Exit();
}
@ -1512,7 +1515,7 @@ TEST(TestInternalWeakLists) {
ctx[i].Clear();
// Scavenge treats these references as strong.
for (int j = 0; j < 10; j++) {
for (int j = 0; j < kNumTestCollections; j++) {
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
}
@ -1566,10 +1569,15 @@ static int CountOptimizedUserFunctionsWithGC(v8::Handle<v8::Context> context,
TEST(TestInternalWeakListsTraverseWithGC) {
FLAG_allow_natives_syntax = true;
v8::V8::Initialize();
Isolate* isolate = CcTest::i_isolate();
static const int kNumTestContexts = 10;
// TODO(mstarzinger): Test should be resilient against optimization decisions.
if (i::FLAG_always_opt) return;
if (!isolate->use_crankshaft()) return;
static const int kNumTestContexts = 5;
HandleScope scope(isolate);
v8::Handle<v8::Context> ctx[kNumTestContexts];
@ -1584,8 +1592,6 @@ TEST(TestInternalWeakListsTraverseWithGC) {
CHECK_EQ(i + 1, CountNativeContextsWithGC(isolate, i / 2 + 1));
}
bool opt = (FLAG_always_opt && isolate->use_crankshaft());
// Compile a number of functions the length of the weak list of optimized
// functions both with and without GCs while iterating the list.
ctx[0]->Enter();
@ -1596,21 +1602,21 @@ TEST(TestInternalWeakListsTraverseWithGC) {
"function f5() { };";
CompileRun(source);
CHECK_EQ(0, CountOptimizedUserFunctions(ctx[0]));
CompileRun("f1()");
CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(opt ? 1 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
CompileRun("f2()");
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
CompileRun("f3()");
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
CompileRun("f4()");
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
CompileRun("f5()");
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(opt ? 5 : 0, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
CompileRun("f1(); %OptimizeFunctionOnNextCall(f1); f1()");
CHECK_EQ(1, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(1, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
CompileRun("f2(); %OptimizeFunctionOnNextCall(f2); f2()");
CHECK_EQ(2, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(2, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
CompileRun("f3(); %OptimizeFunctionOnNextCall(f3); f3()");
CHECK_EQ(3, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(3, CountOptimizedUserFunctionsWithGC(ctx[0], 1));
CompileRun("f4(); %OptimizeFunctionOnNextCall(f4); f4()");
CHECK_EQ(4, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(4, CountOptimizedUserFunctionsWithGC(ctx[0], 2));
CompileRun("f5(); %OptimizeFunctionOnNextCall(f5); f5()");
CHECK_EQ(5, CountOptimizedUserFunctions(ctx[0]));
CHECK_EQ(5, CountOptimizedUserFunctionsWithGC(ctx[0], 4));
ctx[0]->Exit();
}
@ -4072,7 +4078,10 @@ TEST(NextCodeLinkIsWeak) {
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
// TODO(titzer): Test should be resilient against optimization decisions.
if (i::FLAG_always_opt) return;
if (!isolate->use_crankshaft()) return;
HandleScope outer_scope(heap->isolate());
Handle<Code> code;
heap->CollectAllAvailableGarbage();

View File

@ -2985,8 +2985,8 @@ namespace {
int* global_use_counts = NULL;
void MockUseCounterCallback(v8::Isolate* isolate,
v8::Isolate::UseCounterFeature feature) {
void UseCounterCallback(v8::Isolate* isolate,
v8::Isolate::UseCounterFeature feature) {
++global_use_counts[feature];
}
@ -2999,12 +2999,13 @@ TEST(UseAsmUseCount) {
LocalContext env;
int use_counts[v8::Isolate::kUseCounterFeatureCount] = {};
global_use_counts = use_counts;
CcTest::isolate()->SetUseCounterCallback(MockUseCounterCallback);
CcTest::isolate()->SetUseCounterCallback(UseCounterCallback);
CompileRun("\"use asm\";\n"
"var foo = 1;\n"
"\"use asm\";\n" // Only the first one counts.
"function bar() { \"use asm\"; var baz = 1; }");
CHECK_EQ(2, use_counts[v8::Isolate::kUseAsm]);
// Optimizing will double-count because the source is parsed twice.
CHECK_EQ(i::FLAG_always_opt ? 4 : 2, use_counts[v8::Isolate::kUseAsm]);
}

View File

@ -61,7 +61,8 @@ function listener(event, exec_state, event_data, data) {
Debug.setListener(listener);
// Create a function from its body text. It will lead to an eval.
new Function('arg1', 'return arg1 + 1;');
var f = new Function('arg1', 'return arg1 + 1;');
// TODO(titzer): Assignment only needed because source positions are borked.
assertNull(exception, "exception in listener");

View File

@ -51,6 +51,9 @@
# Issue 3389: deopt_every_n_garbage_collections is unsafe
'regress/regress-2653': [SKIP],
# Issue 3475: Arrow function declaration cannot be optimized
'harmony/arrow-functions': [SKIP],
##############################################################################
# Too slow in debug mode with --stress-opt mode.
'compiler/regress-stacktrace-methods': [PASS, ['mode == debug', SKIP]],