Remove overzealous checking of --cache-optimized-code flag.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/1206803003

Cr-Commit-Position: refs/heads/master@{#29284}
This commit is contained in:
mstarzinger 2015-06-25 02:44:58 -07:00 committed by Commit bot
parent e21f122865
commit 8f6bca542f
6 changed files with 20 additions and 29 deletions

View File

@ -1818,14 +1818,10 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(), Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
context()); context());
// Initialize the code pointer in the function to be the one // Initialize the code pointer in the function to be the one found in the
// found in the shared function info object. // shared function info object. But first check if there is an optimized
// But first check if there is an optimized version for our context. // version for our context.
if (FLAG_cache_optimized_code) {
BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context); BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
} else {
BuildInstallCode(js_function, shared_info);
}
return js_function; return js_function;
} }

View File

@ -691,7 +691,6 @@ MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap( MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
Handle<JSFunction> function, BailoutId osr_ast_id) { Handle<JSFunction> function, BailoutId osr_ast_id) {
if (FLAG_cache_optimized_code) {
Handle<SharedFunctionInfo> shared(function->shared()); Handle<SharedFunctionInfo> shared(function->shared());
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
CodeAndLiterals cached = shared->SearchOptimizedCodeMap( CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
@ -703,7 +702,6 @@ MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
DCHECK(function->shared()->is_compiled()); DCHECK(function->shared()->is_compiled());
return Handle<Code>(cached.code); return Handle<Code>(cached.code);
} }
}
return MaybeHandle<Code>(); return MaybeHandle<Code>();
} }

View File

@ -899,7 +899,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
PrintF(" - age: %d]\n", code->GetAge()); PrintF(" - age: %d]\n", code->GetAge());
} }
// Always flush the optimized code map if requested by flag. // Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) { !shared->optimized_code_map()->IsSmi()) {
shared->ClearOptimizedCodeMap(); shared->ClearOptimizedCodeMap();
} }
@ -947,7 +947,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
PrintF(" - age: %d]\n", code->GetAge()); PrintF(" - age: %d]\n", code->GetAge());
} }
// Always flush the optimized code map if requested by flag. // Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && if (FLAG_flush_optimized_code_cache &&
!candidate->optimized_code_map()->IsSmi()) { !candidate->optimized_code_map()->IsSmi()) {
candidate->ClearOptimizedCodeMap(); candidate->ClearOptimizedCodeMap();
} }

View File

@ -409,14 +409,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
if (FLAG_cleanup_code_caches_at_gc) { if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfoAtGCTime(); shared->ClearTypeFeedbackInfoAtGCTime();
} }
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache && if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) { !shared->optimized_code_map()->IsSmi()) {
// Always flush the optimized code map if requested by flag. // Always flush the optimized code map if requested by flag.
shared->ClearOptimizedCodeMap(); shared->ClearOptimizedCodeMap();
} }
MarkCompactCollector* collector = heap->mark_compact_collector(); MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->is_code_flushing_enabled()) { if (collector->is_code_flushing_enabled()) {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { if (!shared->optimized_code_map()->IsSmi()) {
// Add the shared function info holding an optimized code map to // Add the shared function info holding an optimized code map to
// the code flusher for processing of code maps after marking. // the code flusher for processing of code maps after marking.
collector->code_flusher()->AddOptimizedCodeMap(shared); collector->code_flusher()->AddOptimizedCodeMap(shared);
@ -438,7 +438,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
return; return;
} }
} else { } else {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) { if (!shared->optimized_code_map()->IsSmi()) {
// Flush optimized code map on major GCs without code flushing, // Flush optimized code map on major GCs without code flushing,
// needed because cached code doesn't contain breakpoints. // needed because cached code doesn't contain breakpoints.
shared->ClearOptimizedCodeMap(); shared->ClearOptimizedCodeMap();

View File

@ -10647,7 +10647,6 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
Context* native_context, BailoutId osr_ast_id) { Context* native_context, BailoutId osr_ast_id) {
DisallowHeapAllocation no_gc; DisallowHeapAllocation no_gc;
DCHECK(native_context->IsNativeContext()); DCHECK(native_context->IsNativeContext());
if (!FLAG_cache_optimized_code) return {nullptr, nullptr};
Object* value = optimized_code_map(); Object* value = optimized_code_map();
if (!value->IsSmi()) { if (!value->IsSmi()) {
FixedArray* optimized_code_map = FixedArray::cast(value); FixedArray* optimized_code_map = FixedArray::cast(value);

View File

@ -371,11 +371,9 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
// Test that optimized code for different closures is actually shared // Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context. // immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) { TEST(OptimizedCodeSharing) {
// Skip test if --cache-optimized-code is not activated by default because
// FastNewClosureStub that is baked into the snapshot is incorrect.
if (!FLAG_cache_optimized_code) return;
FLAG_stress_compaction = false; FLAG_stress_compaction = false;
FLAG_allow_natives_syntax = true; FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
CcTest::InitializeVM(); CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate()); v8::HandleScope scope(CcTest::isolate());
for (int i = 0; i < 10; i++) { for (int i = 0; i < 10; i++) {