Remove overzealous checking of --cache-optimized-code flag.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/1206803003

Cr-Commit-Position: refs/heads/master@{#29284}
This commit is contained in:
mstarzinger 2015-06-25 02:44:58 -07:00 committed by Commit bot
parent e21f122865
commit 8f6bca542f
6 changed files with 20 additions and 29 deletions

View File

@ -1818,14 +1818,10 @@ HValue* CodeStubGraphBuilder<FastNewClosureStub>::BuildCodeStub() {
Add<HStoreNamedField>(js_function, HObjectAccess::ForFunctionContextPointer(),
context());
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
// But first check if there is an optimized version for our context.
if (FLAG_cache_optimized_code) {
// Initialize the code pointer in the function to be the one found in the
// shared function info object. But first check if there is an optimized
// version for our context.
BuildInstallFromOptimizedCodeMap(js_function, shared_info, native_context);
} else {
BuildInstallCode(js_function, shared_info);
}
return js_function;
}

View File

@ -691,7 +691,6 @@ MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
Handle<JSFunction> function, BailoutId osr_ast_id) {
if (FLAG_cache_optimized_code) {
Handle<SharedFunctionInfo> shared(function->shared());
DisallowHeapAllocation no_gc;
CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
@ -703,7 +702,6 @@ MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
DCHECK(function->shared()->is_compiled());
return Handle<Code>(cached.code);
}
}
return MaybeHandle<Code>();
}

View File

@ -899,7 +899,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
PrintF(" - age: %d]\n", code->GetAge());
}
// Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {
shared->ClearOptimizedCodeMap();
}
@ -947,7 +947,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
PrintF(" - age: %d]\n", code->GetAge());
}
// Always flush the optimized code map if requested by flag.
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
if (FLAG_flush_optimized_code_cache &&
!candidate->optimized_code_map()->IsSmi()) {
candidate->ClearOptimizedCodeMap();
}

View File

@ -409,14 +409,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
if (FLAG_cleanup_code_caches_at_gc) {
shared->ClearTypeFeedbackInfoAtGCTime();
}
if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
if (FLAG_flush_optimized_code_cache &&
!shared->optimized_code_map()->IsSmi()) {
// Always flush the optimized code map if requested by flag.
shared->ClearOptimizedCodeMap();
}
MarkCompactCollector* collector = heap->mark_compact_collector();
if (collector->is_code_flushing_enabled()) {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
if (!shared->optimized_code_map()->IsSmi()) {
// Add the shared function info holding an optimized code map to
// the code flusher for processing of code maps after marking.
collector->code_flusher()->AddOptimizedCodeMap(shared);
@ -438,7 +438,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
return;
}
} else {
if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
if (!shared->optimized_code_map()->IsSmi()) {
// Flush optimized code map on major GCs without code flushing,
// needed because cached code doesn't contain breakpoints.
shared->ClearOptimizedCodeMap();

View File

@ -10647,7 +10647,6 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
Context* native_context, BailoutId osr_ast_id) {
DisallowHeapAllocation no_gc;
DCHECK(native_context->IsNativeContext());
if (!FLAG_cache_optimized_code) return {nullptr, nullptr};
Object* value = optimized_code_map();
if (!value->IsSmi()) {
FixedArray* optimized_code_map = FixedArray::cast(value);

View File

@ -371,11 +371,9 @@ TEST(FeedbackVectorUnaffectedByScopeChanges) {
// Test that optimized code for different closures is actually shared
// immediately by the FastNewClosureStub when run in the same context.
TEST(OptimizedCodeSharing) {
// Skip test if --cache-optimized-code is not activated by default because
// FastNewClosureStub that is baked into the snapshot is incorrect.
if (!FLAG_cache_optimized_code) return;
FLAG_stress_compaction = false;
FLAG_allow_natives_syntax = true;
FLAG_cache_optimized_code = true;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
for (int i = 0; i < 10; i++) {