[heap] Separate out optimized code map processing.
This separates the post-processing step for optimized code maps out of
the CodeFlusher. It uses the complete SharedFunctionInfo::Iterator to
visit all candidates instead of gathering candidates during marking.
Gathering candidates during marking no longer makes sense, now that the
majority of SharedFunctionInfo objects will hold such an optimized code
map. Also it reduces complexity of the implementation. Also conflating
this mechanism with "code flushing" was confusing.
This reverts commit b6644e8491
.
R=ulan@chromium.org
Review URL: https://codereview.chromium.org/1421903012
Cr-Commit-Position: refs/heads/master@{#31836}
This commit is contained in:
parent
fa4bd0b9ab
commit
bb7a5eb2d8
@ -492,6 +492,7 @@ void GCTracer::PrintNVP() const {
|
|||||||
"mark_weakrefs=%.1f "
|
"mark_weakrefs=%.1f "
|
||||||
"mark_globalhandles=%.1f "
|
"mark_globalhandles=%.1f "
|
||||||
"mark_codeflush=%.1f "
|
"mark_codeflush=%.1f "
|
||||||
|
"mark_optimizedcodemaps=%.1f "
|
||||||
"store_buffer_clear=%.1f "
|
"store_buffer_clear=%.1f "
|
||||||
"slots_buffer_clear=%.1f "
|
"slots_buffer_clear=%.1f "
|
||||||
"sweep=%.2f "
|
"sweep=%.2f "
|
||||||
@ -558,6 +559,7 @@ void GCTracer::PrintNVP() const {
|
|||||||
current_.scopes[Scope::MC_MARK_WEAK_REFERENCES],
|
current_.scopes[Scope::MC_MARK_WEAK_REFERENCES],
|
||||||
current_.scopes[Scope::MC_MARK_GLOBAL_HANDLES],
|
current_.scopes[Scope::MC_MARK_GLOBAL_HANDLES],
|
||||||
current_.scopes[Scope::MC_MARK_CODE_FLUSH],
|
current_.scopes[Scope::MC_MARK_CODE_FLUSH],
|
||||||
|
current_.scopes[Scope::MC_MARK_OPTIMIZED_CODE_MAPS],
|
||||||
current_.scopes[Scope::MC_STORE_BUFFER_CLEAR],
|
current_.scopes[Scope::MC_STORE_BUFFER_CLEAR],
|
||||||
current_.scopes[Scope::MC_SLOTS_BUFFER_CLEAR],
|
current_.scopes[Scope::MC_SLOTS_BUFFER_CLEAR],
|
||||||
current_.scopes[Scope::MC_SWEEP],
|
current_.scopes[Scope::MC_SWEEP],
|
||||||
|
@ -109,6 +109,7 @@ class GCTracer {
|
|||||||
MC_MARK_WEAK_REFERENCES,
|
MC_MARK_WEAK_REFERENCES,
|
||||||
MC_MARK_GLOBAL_HANDLES,
|
MC_MARK_GLOBAL_HANDLES,
|
||||||
MC_MARK_CODE_FLUSH,
|
MC_MARK_CODE_FLUSH,
|
||||||
|
MC_MARK_OPTIMIZED_CODE_MAPS,
|
||||||
MC_STORE_BUFFER_CLEAR,
|
MC_STORE_BUFFER_CLEAR,
|
||||||
MC_SLOTS_BUFFER_CLEAR,
|
MC_SLOTS_BUFFER_CLEAR,
|
||||||
MC_SWEEP,
|
MC_SWEEP,
|
||||||
|
@ -99,14 +99,6 @@ void CodeFlusher::AddCandidate(JSFunction* function) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::AddOptimizedCodeMap(SharedFunctionInfo* code_map_holder) {
|
|
||||||
if (GetNextCodeMap(code_map_holder)->IsUndefined()) {
|
|
||||||
SetNextCodeMap(code_map_holder, optimized_code_map_holder_head_);
|
|
||||||
optimized_code_map_holder_head_ = code_map_holder;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
JSFunction** CodeFlusher::GetNextCandidateSlot(JSFunction* candidate) {
|
JSFunction** CodeFlusher::GetNextCandidateSlot(JSFunction* candidate) {
|
||||||
return reinterpret_cast<JSFunction**>(
|
return reinterpret_cast<JSFunction**>(
|
||||||
HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
|
HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
|
||||||
@ -148,26 +140,6 @@ void CodeFlusher::ClearNextCandidate(SharedFunctionInfo* candidate) {
|
|||||||
candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
|
candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SharedFunctionInfo* CodeFlusher::GetNextCodeMap(SharedFunctionInfo* holder) {
|
|
||||||
FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
|
|
||||||
Object* next_map = code_map->get(SharedFunctionInfo::kNextMapIndex);
|
|
||||||
return reinterpret_cast<SharedFunctionInfo*>(next_map);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::SetNextCodeMap(SharedFunctionInfo* holder,
|
|
||||||
SharedFunctionInfo* next_holder) {
|
|
||||||
FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
|
|
||||||
code_map->set(SharedFunctionInfo::kNextMapIndex, next_holder);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::ClearNextCodeMap(SharedFunctionInfo* holder) {
|
|
||||||
FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
|
|
||||||
code_map->set_undefined(SharedFunctionInfo::kNextMapIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
} // namespace v8
|
} // namespace v8
|
||||||
|
|
||||||
|
@ -984,85 +984,6 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::ProcessOptimizedCodeMaps() {
|
|
||||||
STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
|
|
||||||
|
|
||||||
SharedFunctionInfo* holder = optimized_code_map_holder_head_;
|
|
||||||
SharedFunctionInfo* next_holder;
|
|
||||||
|
|
||||||
while (holder != NULL) {
|
|
||||||
next_holder = GetNextCodeMap(holder);
|
|
||||||
ClearNextCodeMap(holder);
|
|
||||||
|
|
||||||
// Process context-dependent entries in the optimized code map.
|
|
||||||
FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
|
|
||||||
int new_length = SharedFunctionInfo::kEntriesStart;
|
|
||||||
int old_length = code_map->length();
|
|
||||||
for (int i = SharedFunctionInfo::kEntriesStart; i < old_length;
|
|
||||||
i += SharedFunctionInfo::kEntryLength) {
|
|
||||||
// Each entry contains [ context, code, literals, ast-id ] as fields.
|
|
||||||
STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
|
|
||||||
Context* context =
|
|
||||||
Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset));
|
|
||||||
HeapObject* code = HeapObject::cast(
|
|
||||||
code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
|
|
||||||
FixedArray* literals = FixedArray::cast(
|
|
||||||
code_map->get(i + SharedFunctionInfo::kLiteralsOffset));
|
|
||||||
Smi* ast_id =
|
|
||||||
Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset));
|
|
||||||
if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue;
|
|
||||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context)));
|
|
||||||
if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue;
|
|
||||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code)));
|
|
||||||
if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue;
|
|
||||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals)));
|
|
||||||
// Move every slot in the entry and record slots when needed.
|
|
||||||
code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code);
|
|
||||||
code_map->set(new_length + SharedFunctionInfo::kContextOffset, context);
|
|
||||||
code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals);
|
|
||||||
code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id);
|
|
||||||
Object** code_slot = code_map->RawFieldOfElementAt(
|
|
||||||
new_length + SharedFunctionInfo::kCachedCodeOffset);
|
|
||||||
isolate_->heap()->mark_compact_collector()->RecordSlot(
|
|
||||||
code_map, code_slot, *code_slot);
|
|
||||||
Object** context_slot = code_map->RawFieldOfElementAt(
|
|
||||||
new_length + SharedFunctionInfo::kContextOffset);
|
|
||||||
isolate_->heap()->mark_compact_collector()->RecordSlot(
|
|
||||||
code_map, context_slot, *context_slot);
|
|
||||||
Object** literals_slot = code_map->RawFieldOfElementAt(
|
|
||||||
new_length + SharedFunctionInfo::kLiteralsOffset);
|
|
||||||
isolate_->heap()->mark_compact_collector()->RecordSlot(
|
|
||||||
code_map, literals_slot, *literals_slot);
|
|
||||||
new_length += SharedFunctionInfo::kEntryLength;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process context-independent entry in the optimized code map.
|
|
||||||
Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
|
|
||||||
if (shared_object->IsCode()) {
|
|
||||||
Code* shared_code = Code::cast(shared_object);
|
|
||||||
if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) {
|
|
||||||
code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex);
|
|
||||||
} else {
|
|
||||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
|
|
||||||
Object** slot =
|
|
||||||
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
|
|
||||||
isolate_->heap()->mark_compact_collector()->RecordSlot(code_map, slot,
|
|
||||||
*slot);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trim the optimized code map if entries have been removed.
|
|
||||||
if (new_length < old_length) {
|
|
||||||
holder->TrimOptimizedCodeMap(old_length - new_length);
|
|
||||||
}
|
|
||||||
|
|
||||||
holder = next_holder;
|
|
||||||
}
|
|
||||||
|
|
||||||
optimized_code_map_holder_head_ = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) {
|
void CodeFlusher::EvictCandidate(SharedFunctionInfo* shared_info) {
|
||||||
// Make sure previous flushing decisions are revisited.
|
// Make sure previous flushing decisions are revisited.
|
||||||
isolate_->heap()->incremental_marking()->RecordWrites(shared_info);
|
isolate_->heap()->incremental_marking()->RecordWrites(shared_info);
|
||||||
@ -1133,44 +1054,6 @@ void CodeFlusher::EvictCandidate(JSFunction* function) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder) {
|
|
||||||
FixedArray* code_map =
|
|
||||||
FixedArray::cast(code_map_holder->optimized_code_map());
|
|
||||||
DCHECK(!code_map->get(SharedFunctionInfo::kNextMapIndex)->IsUndefined());
|
|
||||||
|
|
||||||
// Make sure previous flushing decisions are revisited.
|
|
||||||
isolate_->heap()->incremental_marking()->RecordWrites(code_map);
|
|
||||||
isolate_->heap()->incremental_marking()->RecordWrites(code_map_holder);
|
|
||||||
|
|
||||||
if (FLAG_trace_code_flushing) {
|
|
||||||
PrintF("[code-flushing abandons code-map: ");
|
|
||||||
code_map_holder->ShortPrint();
|
|
||||||
PrintF("]\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
SharedFunctionInfo* holder = optimized_code_map_holder_head_;
|
|
||||||
SharedFunctionInfo* next_holder;
|
|
||||||
if (holder == code_map_holder) {
|
|
||||||
next_holder = GetNextCodeMap(code_map_holder);
|
|
||||||
optimized_code_map_holder_head_ = next_holder;
|
|
||||||
ClearNextCodeMap(code_map_holder);
|
|
||||||
} else {
|
|
||||||
while (holder != NULL) {
|
|
||||||
next_holder = GetNextCodeMap(holder);
|
|
||||||
|
|
||||||
if (next_holder == code_map_holder) {
|
|
||||||
next_holder = GetNextCodeMap(code_map_holder);
|
|
||||||
SetNextCodeMap(holder, next_holder);
|
|
||||||
ClearNextCodeMap(code_map_holder);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
holder = next_holder;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::EvictJSFunctionCandidates() {
|
void CodeFlusher::EvictJSFunctionCandidates() {
|
||||||
JSFunction* candidate = jsfunction_candidates_head_;
|
JSFunction* candidate = jsfunction_candidates_head_;
|
||||||
JSFunction* next_candidate;
|
JSFunction* next_candidate;
|
||||||
@ -1195,18 +1078,6 @@ void CodeFlusher::EvictSharedFunctionInfoCandidates() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::EvictOptimizedCodeMaps() {
|
|
||||||
SharedFunctionInfo* holder = optimized_code_map_holder_head_;
|
|
||||||
SharedFunctionInfo* next_holder;
|
|
||||||
while (holder != NULL) {
|
|
||||||
next_holder = GetNextCodeMap(holder);
|
|
||||||
EvictOptimizedCodeMap(holder);
|
|
||||||
holder = next_holder;
|
|
||||||
}
|
|
||||||
DCHECK(optimized_code_map_holder_head_ == NULL);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
|
void CodeFlusher::IteratePointersToFromSpace(ObjectVisitor* v) {
|
||||||
Heap* heap = isolate_->heap();
|
Heap* heap = isolate_->heap();
|
||||||
|
|
||||||
@ -2234,6 +2105,13 @@ void MarkCompactCollector::AfterMarking() {
|
|||||||
code_flusher_->ProcessCandidates();
|
code_flusher_->ProcessCandidates();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Process and clear all optimized code maps.
|
||||||
|
if (!FLAG_flush_optimized_code_cache) {
|
||||||
|
GCTracer::Scope gc_scope(heap()->tracer(),
|
||||||
|
GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS);
|
||||||
|
ProcessAndClearOptimizedCodeMaps();
|
||||||
|
}
|
||||||
|
|
||||||
if (FLAG_track_gc_object_stats) {
|
if (FLAG_track_gc_object_stats) {
|
||||||
if (FLAG_trace_gc_object_stats) {
|
if (FLAG_trace_gc_object_stats) {
|
||||||
heap()->object_stats_->TraceObjectStats();
|
heap()->object_stats_->TraceObjectStats();
|
||||||
@ -2243,6 +2121,72 @@ void MarkCompactCollector::AfterMarking() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() {
|
||||||
|
SharedFunctionInfo::Iterator iterator(isolate());
|
||||||
|
while (SharedFunctionInfo* shared = iterator.Next()) {
|
||||||
|
if (shared->optimized_code_map()->IsSmi()) continue;
|
||||||
|
|
||||||
|
// Process context-dependent entries in the optimized code map.
|
||||||
|
FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
|
||||||
|
int new_length = SharedFunctionInfo::kEntriesStart;
|
||||||
|
int old_length = code_map->length();
|
||||||
|
for (int i = SharedFunctionInfo::kEntriesStart; i < old_length;
|
||||||
|
i += SharedFunctionInfo::kEntryLength) {
|
||||||
|
// Each entry contains [ context, code, literals, ast-id ] as fields.
|
||||||
|
STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
|
||||||
|
Context* context =
|
||||||
|
Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset));
|
||||||
|
HeapObject* code = HeapObject::cast(
|
||||||
|
code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
|
||||||
|
FixedArray* literals = FixedArray::cast(
|
||||||
|
code_map->get(i + SharedFunctionInfo::kLiteralsOffset));
|
||||||
|
Smi* ast_id =
|
||||||
|
Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset));
|
||||||
|
if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue;
|
||||||
|
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context)));
|
||||||
|
if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue;
|
||||||
|
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code)));
|
||||||
|
if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue;
|
||||||
|
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals)));
|
||||||
|
// Move every slot in the entry and record slots when needed.
|
||||||
|
code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code);
|
||||||
|
code_map->set(new_length + SharedFunctionInfo::kContextOffset, context);
|
||||||
|
code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals);
|
||||||
|
code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id);
|
||||||
|
Object** code_slot = code_map->RawFieldOfElementAt(
|
||||||
|
new_length + SharedFunctionInfo::kCachedCodeOffset);
|
||||||
|
RecordSlot(code_map, code_slot, *code_slot);
|
||||||
|
Object** context_slot = code_map->RawFieldOfElementAt(
|
||||||
|
new_length + SharedFunctionInfo::kContextOffset);
|
||||||
|
RecordSlot(code_map, context_slot, *context_slot);
|
||||||
|
Object** literals_slot = code_map->RawFieldOfElementAt(
|
||||||
|
new_length + SharedFunctionInfo::kLiteralsOffset);
|
||||||
|
RecordSlot(code_map, literals_slot, *literals_slot);
|
||||||
|
new_length += SharedFunctionInfo::kEntryLength;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process context-independent entry in the optimized code map.
|
||||||
|
Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
|
||||||
|
if (shared_object->IsCode()) {
|
||||||
|
Code* shared_code = Code::cast(shared_object);
|
||||||
|
if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) {
|
||||||
|
code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex);
|
||||||
|
} else {
|
||||||
|
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
|
||||||
|
Object** slot =
|
||||||
|
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
|
||||||
|
RecordSlot(code_map, slot, *slot);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim the optimized code map if entries have been removed.
|
||||||
|
if (new_length < old_length) {
|
||||||
|
shared->TrimOptimizedCodeMap(old_length - new_length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void MarkCompactCollector::ClearNonLiveReferences() {
|
void MarkCompactCollector::ClearNonLiveReferences() {
|
||||||
GCTracer::Scope gc_scope(heap()->tracer(),
|
GCTracer::Scope gc_scope(heap()->tracer(),
|
||||||
GCTracer::Scope::MC_NONLIVEREFERENCES);
|
GCTracer::Scope::MC_NONLIVEREFERENCES);
|
||||||
|
@ -263,10 +263,9 @@ class MarkingDeque {
|
|||||||
// CodeFlusher collects candidates for code flushing during marking and
|
// CodeFlusher collects candidates for code flushing during marking and
|
||||||
// processes those candidates after marking has completed in order to
|
// processes those candidates after marking has completed in order to
|
||||||
// reset those functions referencing code objects that would otherwise
|
// reset those functions referencing code objects that would otherwise
|
||||||
// be unreachable. Code objects can be referenced in three ways:
|
// be unreachable. Code objects can be referenced in two ways:
|
||||||
// - SharedFunctionInfo references unoptimized code.
|
// - SharedFunctionInfo references unoptimized code.
|
||||||
// - JSFunction references either unoptimized or optimized code.
|
// - JSFunction references either unoptimized or optimized code.
|
||||||
// - OptimizedCodeMap references optimized code.
|
|
||||||
// We are not allowed to flush unoptimized code for functions that got
|
// We are not allowed to flush unoptimized code for functions that got
|
||||||
// optimized or inlined into optimized code, because we might bailout
|
// optimized or inlined into optimized code, because we might bailout
|
||||||
// into the unoptimized code again during deoptimization.
|
// into the unoptimized code again during deoptimization.
|
||||||
@ -274,26 +273,21 @@ class CodeFlusher {
|
|||||||
public:
|
public:
|
||||||
explicit CodeFlusher(Isolate* isolate)
|
explicit CodeFlusher(Isolate* isolate)
|
||||||
: isolate_(isolate),
|
: isolate_(isolate),
|
||||||
jsfunction_candidates_head_(NULL),
|
jsfunction_candidates_head_(nullptr),
|
||||||
shared_function_info_candidates_head_(NULL),
|
shared_function_info_candidates_head_(nullptr) {}
|
||||||
optimized_code_map_holder_head_(NULL) {}
|
|
||||||
|
|
||||||
inline void AddCandidate(SharedFunctionInfo* shared_info);
|
inline void AddCandidate(SharedFunctionInfo* shared_info);
|
||||||
inline void AddCandidate(JSFunction* function);
|
inline void AddCandidate(JSFunction* function);
|
||||||
inline void AddOptimizedCodeMap(SharedFunctionInfo* code_map_holder);
|
|
||||||
|
|
||||||
void EvictOptimizedCodeMap(SharedFunctionInfo* code_map_holder);
|
|
||||||
void EvictCandidate(SharedFunctionInfo* shared_info);
|
void EvictCandidate(SharedFunctionInfo* shared_info);
|
||||||
void EvictCandidate(JSFunction* function);
|
void EvictCandidate(JSFunction* function);
|
||||||
|
|
||||||
void ProcessCandidates() {
|
void ProcessCandidates() {
|
||||||
ProcessOptimizedCodeMaps();
|
|
||||||
ProcessSharedFunctionInfoCandidates();
|
ProcessSharedFunctionInfoCandidates();
|
||||||
ProcessJSFunctionCandidates();
|
ProcessJSFunctionCandidates();
|
||||||
}
|
}
|
||||||
|
|
||||||
void EvictAllCandidates() {
|
void EvictAllCandidates() {
|
||||||
EvictOptimizedCodeMaps();
|
|
||||||
EvictJSFunctionCandidates();
|
EvictJSFunctionCandidates();
|
||||||
EvictSharedFunctionInfoCandidates();
|
EvictSharedFunctionInfoCandidates();
|
||||||
}
|
}
|
||||||
@ -301,10 +295,8 @@ class CodeFlusher {
|
|||||||
void IteratePointersToFromSpace(ObjectVisitor* v);
|
void IteratePointersToFromSpace(ObjectVisitor* v);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void ProcessOptimizedCodeMaps();
|
|
||||||
void ProcessJSFunctionCandidates();
|
void ProcessJSFunctionCandidates();
|
||||||
void ProcessSharedFunctionInfoCandidates();
|
void ProcessSharedFunctionInfoCandidates();
|
||||||
void EvictOptimizedCodeMaps();
|
|
||||||
void EvictJSFunctionCandidates();
|
void EvictJSFunctionCandidates();
|
||||||
void EvictSharedFunctionInfoCandidates();
|
void EvictSharedFunctionInfoCandidates();
|
||||||
|
|
||||||
@ -321,15 +313,9 @@ class CodeFlusher {
|
|||||||
SharedFunctionInfo* next_candidate);
|
SharedFunctionInfo* next_candidate);
|
||||||
static inline void ClearNextCandidate(SharedFunctionInfo* candidate);
|
static inline void ClearNextCandidate(SharedFunctionInfo* candidate);
|
||||||
|
|
||||||
static inline SharedFunctionInfo* GetNextCodeMap(SharedFunctionInfo* holder);
|
|
||||||
static inline void SetNextCodeMap(SharedFunctionInfo* holder,
|
|
||||||
SharedFunctionInfo* next_holder);
|
|
||||||
static inline void ClearNextCodeMap(SharedFunctionInfo* holder);
|
|
||||||
|
|
||||||
Isolate* isolate_;
|
Isolate* isolate_;
|
||||||
JSFunction* jsfunction_candidates_head_;
|
JSFunction* jsfunction_candidates_head_;
|
||||||
SharedFunctionInfo* shared_function_info_candidates_head_;
|
SharedFunctionInfo* shared_function_info_candidates_head_;
|
||||||
SharedFunctionInfo* optimized_code_map_holder_head_;
|
|
||||||
|
|
||||||
DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
|
DISALLOW_COPY_AND_ASSIGN(CodeFlusher);
|
||||||
};
|
};
|
||||||
@ -694,10 +680,14 @@ class MarkCompactCollector {
|
|||||||
// collections when incremental marking is aborted.
|
// collections when incremental marking is aborted.
|
||||||
void AbortWeakCollections();
|
void AbortWeakCollections();
|
||||||
|
|
||||||
|
|
||||||
void ProcessAndClearWeakCells();
|
void ProcessAndClearWeakCells();
|
||||||
void AbortWeakCells();
|
void AbortWeakCells();
|
||||||
|
|
||||||
|
// After all reachable objects have been marked, those entries within
|
||||||
|
// optimized code maps that became unreachable are removed, potentially
|
||||||
|
// trimming or clearing out the entire optimized code map.
|
||||||
|
void ProcessAndClearOptimizedCodeMaps();
|
||||||
|
|
||||||
// -----------------------------------------------------------------------
|
// -----------------------------------------------------------------------
|
||||||
// Phase 2: Sweeping to clear mark bits and free non-live objects for
|
// Phase 2: Sweeping to clear mark bits and free non-live objects for
|
||||||
// a non-compacting collection.
|
// a non-compacting collection.
|
||||||
|
@ -443,23 +443,23 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
|
|||||||
if (FLAG_cleanup_code_caches_at_gc) {
|
if (FLAG_cleanup_code_caches_at_gc) {
|
||||||
shared->ClearTypeFeedbackInfoAtGCTime();
|
shared->ClearTypeFeedbackInfoAtGCTime();
|
||||||
}
|
}
|
||||||
if ((FLAG_flush_optimized_code_cache ||
|
if (FLAG_flush_optimized_code_cache ||
|
||||||
heap->isolate()->serializer_enabled()) &&
|
heap->isolate()->serializer_enabled()) {
|
||||||
!shared->optimized_code_map()->IsSmi()) {
|
if (!shared->optimized_code_map()->IsSmi()) {
|
||||||
// Always flush the optimized code map if requested by flag.
|
// Always flush the optimized code map if requested by flag.
|
||||||
shared->ClearOptimizedCodeMap();
|
shared->ClearOptimizedCodeMap();
|
||||||
}
|
}
|
||||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
} else {
|
||||||
if (collector->is_code_flushing_enabled()) {
|
|
||||||
if (!shared->optimized_code_map()->IsSmi()) {
|
if (!shared->optimized_code_map()->IsSmi()) {
|
||||||
// Add the shared function info holding an optimized code map to
|
|
||||||
// the code flusher for processing of code maps after marking.
|
|
||||||
collector->code_flusher()->AddOptimizedCodeMap(shared);
|
|
||||||
// Treat some references within the code map weakly by marking the
|
// Treat some references within the code map weakly by marking the
|
||||||
// code map itself but not pushing it onto the marking deque.
|
// code map itself but not pushing it onto the marking deque. The
|
||||||
|
// map will be processed after marking.
|
||||||
FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
|
FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
|
||||||
MarkOptimizedCodeMap(heap, code_map);
|
MarkOptimizedCodeMap(heap, code_map);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||||
|
if (collector->is_code_flushing_enabled()) {
|
||||||
if (IsFlushable(heap, shared)) {
|
if (IsFlushable(heap, shared)) {
|
||||||
// This function's code looks flushable. But we have to postpone
|
// This function's code looks flushable. But we have to postpone
|
||||||
// the decision until we see all functions that point to the same
|
// the decision until we see all functions that point to the same
|
||||||
@ -473,6 +473,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
// TODO(mstarzinger): Drop this case, it shouldn't be done here!
|
||||||
if (!shared->optimized_code_map()->IsSmi()) {
|
if (!shared->optimized_code_map()->IsSmi()) {
|
||||||
// Flush optimized code map on major GCs without code flushing,
|
// Flush optimized code map on major GCs without code flushing,
|
||||||
// needed because cached code doesn't contain breakpoints.
|
// needed because cached code doesn't contain breakpoints.
|
||||||
|
@ -11102,21 +11102,28 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
|
|||||||
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
|
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
if (Heap::ShouldZapGarbage()) {
|
||||||
|
// Zap any old optimized code map for heap-verifier.
|
||||||
|
if (!shared->optimized_code_map()->IsSmi()) {
|
||||||
|
FixedArray* old_code_map = FixedArray::cast(shared->optimized_code_map());
|
||||||
|
old_code_map->FillWithHoles(0, old_code_map->length());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
shared->set_optimized_code_map(*new_code_map);
|
shared->set_optimized_code_map(*new_code_map);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void SharedFunctionInfo::ClearOptimizedCodeMap() {
|
void SharedFunctionInfo::ClearOptimizedCodeMap() {
|
||||||
FixedArray* code_map = FixedArray::cast(optimized_code_map());
|
if (Heap::ShouldZapGarbage()) {
|
||||||
|
// Zap any old optimized code map for heap-verifier.
|
||||||
// If the next map link slot is already used then the function was
|
if (!optimized_code_map()->IsSmi()) {
|
||||||
// enqueued with code flushing and we remove it now.
|
FixedArray* old_code_map = FixedArray::cast(optimized_code_map());
|
||||||
if (!code_map->get(kNextMapIndex)->IsUndefined()) {
|
old_code_map->FillWithHoles(0, old_code_map->length());
|
||||||
CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
|
}
|
||||||
flusher->EvictOptimizedCodeMap(this);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DCHECK(code_map->get(kNextMapIndex)->IsUndefined());
|
|
||||||
set_optimized_code_map(Smi::FromInt(0));
|
set_optimized_code_map(Smi::FromInt(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6555,9 +6555,8 @@ class SharedFunctionInfo: public HeapObject {
|
|||||||
Handle<Object> script_object);
|
Handle<Object> script_object);
|
||||||
|
|
||||||
// Layout description of the optimized code map.
|
// Layout description of the optimized code map.
|
||||||
static const int kNextMapIndex = 0;
|
static const int kSharedCodeIndex = 0;
|
||||||
static const int kSharedCodeIndex = 1;
|
static const int kEntriesStart = 1;
|
||||||
static const int kEntriesStart = 2;
|
|
||||||
static const int kContextOffset = 0;
|
static const int kContextOffset = 0;
|
||||||
static const int kCachedCodeOffset = 1;
|
static const int kCachedCodeOffset = 1;
|
||||||
static const int kLiteralsOffset = 2;
|
static const int kLiteralsOffset = 2;
|
||||||
|
Loading…
Reference in New Issue
Block a user