Revert "Use WeakCells in the optimized code map rather than traversing in pause."
Reason for revert: Probably causes GC stress test failures. TBR=mvstanton@chromium.org BUG= NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true Review URL: https://codereview.chromium.org/1493393002 Cr-Commit-Position: refs/heads/master@{#32574}
This commit is contained in:
parent
ef3bee66d1
commit
39b207ddac
@ -1867,29 +1867,19 @@ void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
|
||||
HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
|
||||
HValue* context_slot = LoadFromOptimizedCodeMap(
|
||||
optimized_map, map_index, SharedFunctionInfo::kContextOffset);
|
||||
context_slot = Add<HLoadNamedField>(context_slot, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
|
||||
optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
|
||||
HValue* code_object = LoadFromOptimizedCodeMap(
|
||||
optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset);
|
||||
code_object = Add<HLoadNamedField>(code_object, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
builder->If<HCompareObjectEqAndBranch>(native_context,
|
||||
context_slot);
|
||||
builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
|
||||
builder->And();
|
||||
builder->IfNot<HCompareObjectEqAndBranch>(code_object,
|
||||
graph()->GetConstant0());
|
||||
graph()->GetConstantUndefined());
|
||||
builder->Then();
|
||||
HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
|
||||
map_index, SharedFunctionInfo::kLiteralsOffset);
|
||||
literals = Add<HLoadNamedField>(literals, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
IfBuilder maybe_deopt(this);
|
||||
maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0());
|
||||
maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed);
|
||||
maybe_deopt.End();
|
||||
|
||||
BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
|
||||
|
||||
@ -2013,10 +2003,8 @@ void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
|
||||
HValue* shared_code =
|
||||
Add<HLoadNamedField>(optimized_map, nullptr,
|
||||
HObjectAccess::ForOptimizedCodeMapSharedCode());
|
||||
shared_code = Add<HLoadNamedField>(shared_code, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
shared_code_check.IfNot<HCompareObjectEqAndBranch>(
|
||||
shared_code, graph()->GetConstant0());
|
||||
shared_code, graph()->GetConstantUndefined());
|
||||
shared_code_check.Then();
|
||||
{
|
||||
// Store the context-independent optimized code.
|
||||
|
@ -224,7 +224,6 @@ enum BindingFlags {
|
||||
V(SCRIPT_CONTEXT_TABLE_INDEX, ScriptContextTable, script_context_table) \
|
||||
V(SCRIPT_FUNCTION_INDEX, JSFunction, script_function) \
|
||||
V(SECURITY_TOKEN_INDEX, Object, security_token) \
|
||||
V(SELF_WEAK_CELL_INDEX, WeakCell, self_weak_cell) \
|
||||
V(SET_ITERATOR_MAP_INDEX, Map, set_iterator_map) \
|
||||
V(SHARED_ARRAY_BUFFER_FUN_INDEX, JSFunction, shared_array_buffer_fun) \
|
||||
V(SLOPPY_ARGUMENTS_MAP_INDEX, Map, sloppy_arguments_map) \
|
||||
|
@ -335,7 +335,6 @@ class OptimizedFunctionVisitor BASE_EMBEDDED {
|
||||
V(kInsufficientTypeFeedbackForRHSOfBinaryOperation, \
|
||||
"Insufficient type feedback for RHS of binary operation") \
|
||||
V(kKeyIsNegative, "key is negative") \
|
||||
V(kLiteralsWereDisposed, "literals have been disposed") \
|
||||
V(kLostPrecision, "lost precision") \
|
||||
V(kLostPrecisionOrNaN, "lost precision or NaN") \
|
||||
V(kMementoFound, "memento found") \
|
||||
|
@ -728,8 +728,6 @@ Handle<Context> Factory::NewNativeContext() {
|
||||
context->set_native_context(*context);
|
||||
context->set_js_array_maps(*undefined_value());
|
||||
context->set_errors_thrown(Smi::FromInt(0));
|
||||
Handle<WeakCell> weak_cell = NewWeakCell(context);
|
||||
context->set_self_weak_cell(*weak_cell);
|
||||
DCHECK(context->IsNativeContext());
|
||||
return context;
|
||||
}
|
||||
|
@ -505,6 +505,7 @@ void GCTracer::PrintNVP() const {
|
||||
"mark_weakrefs=%.1f "
|
||||
"mark_globalhandles=%.1f "
|
||||
"mark_codeflush=%.1f "
|
||||
"mark_optimizedcodemaps=%.1f "
|
||||
"store_buffer_clear=%.1f "
|
||||
"slots_buffer_clear=%.1f "
|
||||
"sweep=%.2f "
|
||||
@ -573,6 +574,7 @@ void GCTracer::PrintNVP() const {
|
||||
current_.scopes[Scope::MC_MARK_WEAK_REFERENCES],
|
||||
current_.scopes[Scope::MC_MARK_GLOBAL_HANDLES],
|
||||
current_.scopes[Scope::MC_MARK_CODE_FLUSH],
|
||||
current_.scopes[Scope::MC_MARK_OPTIMIZED_CODE_MAPS],
|
||||
current_.scopes[Scope::MC_STORE_BUFFER_CLEAR],
|
||||
current_.scopes[Scope::MC_SLOTS_BUFFER_CLEAR],
|
||||
current_.scopes[Scope::MC_SWEEP],
|
||||
|
@ -109,6 +109,7 @@ class GCTracer {
|
||||
MC_MARK_WEAK_REFERENCES,
|
||||
MC_MARK_GLOBAL_HANDLES,
|
||||
MC_MARK_CODE_FLUSH,
|
||||
MC_MARK_OPTIMIZED_CODE_MAPS,
|
||||
MC_STORE_BUFFER_CLEAR,
|
||||
MC_SLOTS_BUFFER_CLEAR,
|
||||
MC_SWEEP,
|
||||
|
@ -2780,14 +2780,8 @@ void Heap::CreateInitialObjects() {
|
||||
}
|
||||
|
||||
{
|
||||
Handle<WeakCell> cell = factory->NewWeakCell(factory->undefined_value());
|
||||
set_empty_weak_cell(*cell);
|
||||
cell->clear();
|
||||
|
||||
Handle<FixedArray> cleared_optimized_code_map =
|
||||
factory->NewFixedArray(SharedFunctionInfo::kEntriesStart, TENURED);
|
||||
cleared_optimized_code_map->set(SharedFunctionInfo::kSharedCodeIndex,
|
||||
*cell);
|
||||
STATIC_ASSERT(SharedFunctionInfo::kEntriesStart == 1 &&
|
||||
SharedFunctionInfo::kSharedCodeIndex == 0);
|
||||
set_cleared_optimized_code_map(*cleared_optimized_code_map);
|
||||
|
@ -189,7 +189,6 @@ namespace internal {
|
||||
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
|
||||
V(FixedArray, interpreter_table, InterpreterTable) \
|
||||
V(Map, bytecode_array_map, BytecodeArrayMap) \
|
||||
V(WeakCell, empty_weak_cell, EmptyWeakCell) \
|
||||
V(BytecodeArray, empty_bytecode_array, EmptyBytecodeArray)
|
||||
|
||||
|
||||
@ -448,7 +447,6 @@ namespace internal {
|
||||
V(JSMessageObjectMap) \
|
||||
V(ForeignMap) \
|
||||
V(NeanderMap) \
|
||||
V(EmptyWeakCell) \
|
||||
V(empty_string) \
|
||||
PRIVATE_SYMBOL_LIST(V)
|
||||
|
||||
|
@ -2133,6 +2133,13 @@ void MarkCompactCollector::AfterMarking() {
|
||||
code_flusher_->ProcessCandidates();
|
||||
}
|
||||
|
||||
// Process and clear all optimized code maps.
|
||||
if (!FLAG_flush_optimized_code_cache) {
|
||||
GCTracer::Scope gc_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS);
|
||||
ProcessAndClearOptimizedCodeMaps();
|
||||
}
|
||||
|
||||
if (FLAG_track_gc_object_stats) {
|
||||
if (FLAG_trace_gc_object_stats) {
|
||||
heap()->object_stats_->TraceObjectStats();
|
||||
@ -2142,6 +2149,72 @@ void MarkCompactCollector::AfterMarking() {
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() {
|
||||
SharedFunctionInfo::Iterator iterator(isolate());
|
||||
while (SharedFunctionInfo* shared = iterator.Next()) {
|
||||
if (shared->OptimizedCodeMapIsCleared()) continue;
|
||||
|
||||
// Process context-dependent entries in the optimized code map.
|
||||
FixedArray* code_map = shared->optimized_code_map();
|
||||
int new_length = SharedFunctionInfo::kEntriesStart;
|
||||
int old_length = code_map->length();
|
||||
for (int i = SharedFunctionInfo::kEntriesStart; i < old_length;
|
||||
i += SharedFunctionInfo::kEntryLength) {
|
||||
// Each entry contains [ context, code, literals, ast-id ] as fields.
|
||||
STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
|
||||
Context* context =
|
||||
Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset));
|
||||
HeapObject* code = HeapObject::cast(
|
||||
code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
|
||||
FixedArray* literals = FixedArray::cast(
|
||||
code_map->get(i + SharedFunctionInfo::kLiteralsOffset));
|
||||
Smi* ast_id =
|
||||
Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset));
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue;
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context)));
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue;
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code)));
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue;
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals)));
|
||||
// Move every slot in the entry and record slots when needed.
|
||||
code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code);
|
||||
code_map->set(new_length + SharedFunctionInfo::kContextOffset, context);
|
||||
code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals);
|
||||
code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id);
|
||||
Object** code_slot = code_map->RawFieldOfElementAt(
|
||||
new_length + SharedFunctionInfo::kCachedCodeOffset);
|
||||
RecordSlot(code_map, code_slot, *code_slot);
|
||||
Object** context_slot = code_map->RawFieldOfElementAt(
|
||||
new_length + SharedFunctionInfo::kContextOffset);
|
||||
RecordSlot(code_map, context_slot, *context_slot);
|
||||
Object** literals_slot = code_map->RawFieldOfElementAt(
|
||||
new_length + SharedFunctionInfo::kLiteralsOffset);
|
||||
RecordSlot(code_map, literals_slot, *literals_slot);
|
||||
new_length += SharedFunctionInfo::kEntryLength;
|
||||
}
|
||||
|
||||
// Process context-independent entry in the optimized code map.
|
||||
Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
|
||||
if (shared_object->IsCode()) {
|
||||
Code* shared_code = Code::cast(shared_object);
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) {
|
||||
code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex);
|
||||
} else {
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
|
||||
Object** slot =
|
||||
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
|
||||
RecordSlot(code_map, slot, *slot);
|
||||
}
|
||||
}
|
||||
|
||||
// Trim the optimized code map if entries have been removed.
|
||||
if (new_length < old_length) {
|
||||
shared->TrimOptimizedCodeMap(old_length - new_length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ProcessWeakReferences() {
|
||||
// This should be done before processing weak cells because it checks
|
||||
// mark bits of maps in weak cells.
|
||||
|
@ -671,6 +671,11 @@ class MarkCompactCollector {
|
||||
void ProcessAndClearTransitionArrays();
|
||||
void AbortTransitionArrays();
|
||||
|
||||
// After all reachable objects have been marked, those entries within
|
||||
// optimized code maps that became unreachable are removed, potentially
|
||||
// trimming or clearing out the entire optimized code map.
|
||||
void ProcessAndClearOptimizedCodeMaps();
|
||||
|
||||
// Process non-live references in maps and optimized code.
|
||||
void ProcessWeakReferences();
|
||||
|
||||
|
@ -442,6 +442,14 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
|
||||
// Always flush the optimized code map if requested by flag.
|
||||
shared->ClearOptimizedCodeMap();
|
||||
}
|
||||
} else {
|
||||
if (!shared->OptimizedCodeMapIsCleared()) {
|
||||
// Treat some references within the code map weakly by marking the
|
||||
// code map itself but not pushing it onto the marking deque. The
|
||||
// map will be processed after marking.
|
||||
FixedArray* code_map = shared->optimized_code_map();
|
||||
MarkOptimizedCodeMap(heap, code_map);
|
||||
}
|
||||
}
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
if (collector->is_code_flushing_enabled()) {
|
||||
@ -580,6 +588,23 @@ void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
|
||||
}
|
||||
|
||||
|
||||
template <typename StaticVisitor>
|
||||
void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
|
||||
Heap* heap, FixedArray* code_map) {
|
||||
if (!StaticVisitor::MarkObjectWithoutPush(heap, code_map)) return;
|
||||
|
||||
// Mark the context-independent entry in the optimized code map. Depending on
|
||||
// the age of the code object, we treat it as a strong or a weak reference.
|
||||
Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
|
||||
if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
|
||||
FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
|
||||
StaticVisitor::VisitPointer(
|
||||
heap, code_map,
|
||||
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template <typename StaticVisitor>
|
||||
void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
|
||||
Code* code) {
|
||||
|
@ -379,6 +379,10 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
||||
static void MarkMapContents(Heap* heap, Map* map);
|
||||
static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
|
||||
|
||||
// Mark pointers in the optimized code map that should act as strong
|
||||
// references, possibly treating some entries weak.
|
||||
static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
|
||||
|
||||
// Mark non-optimized code for functions inlined into the given optimized
|
||||
// code. This will prevent it from being flushed.
|
||||
static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
|
||||
|
@ -2030,10 +2030,7 @@ Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
|
||||
|
||||
|
||||
void WeakCell::clear() {
|
||||
// Either the garbage collector is clearing the cell or we are simply
|
||||
// initializing the root empty weak cell.
|
||||
DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
|
||||
this == GetHeap()->empty_weak_cell());
|
||||
DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
|
||||
WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
|
||||
}
|
||||
|
||||
|
120
src/objects.cc
120
src/objects.cc
@ -11889,8 +11889,7 @@ void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(
|
||||
DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
// Empty code maps are unsupported.
|
||||
if (shared->OptimizedCodeMapIsCleared()) return;
|
||||
Handle<WeakCell> cell = isolate->factory()->NewWeakCell(code);
|
||||
shared->optimized_code_map()->set(kSharedCodeIndex, *cell);
|
||||
shared->optimized_code_map()->set(kSharedCodeIndex, *code);
|
||||
}
|
||||
|
||||
|
||||
@ -11908,74 +11907,45 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
|
||||
STATIC_ASSERT(kEntryLength == 4);
|
||||
Handle<FixedArray> new_code_map;
|
||||
int entry;
|
||||
|
||||
if (shared->OptimizedCodeMapIsCleared()) {
|
||||
new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
|
||||
new_code_map->set(kSharedCodeIndex, *isolate->factory()->empty_weak_cell(),
|
||||
SKIP_WRITE_BARRIER);
|
||||
entry = kEntriesStart;
|
||||
} else {
|
||||
Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate);
|
||||
entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id);
|
||||
if (entry > kSharedCodeIndex) {
|
||||
// Found an existing context-specific entry, it must not contain any code.
|
||||
DCHECK(WeakCell::cast(old_code_map->get(entry + kCachedCodeOffset))
|
||||
->cleared());
|
||||
DCHECK_EQ(isolate->heap()->undefined_value(),
|
||||
old_code_map->get(entry + kCachedCodeOffset));
|
||||
// Just set the code and literals to the entry.
|
||||
Handle<WeakCell> code_cell = code->IsUndefined()
|
||||
? isolate->factory()->empty_weak_cell()
|
||||
: isolate->factory()->NewWeakCell(code);
|
||||
Handle<WeakCell> literals_cell =
|
||||
isolate->factory()->NewWeakCell(literals);
|
||||
old_code_map->set(entry + kCachedCodeOffset, *code_cell);
|
||||
old_code_map->set(entry + kLiteralsOffset, *literals_cell);
|
||||
old_code_map->set(entry + kCachedCodeOffset, *code);
|
||||
old_code_map->set(entry + kLiteralsOffset, *literals);
|
||||
return;
|
||||
}
|
||||
|
||||
// Can we reuse an entry?
|
||||
DCHECK(entry < kEntriesStart);
|
||||
int length = old_code_map->length();
|
||||
for (int i = kEntriesStart; i < length; i += kEntryLength) {
|
||||
if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) {
|
||||
entry = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (entry < kEntriesStart) {
|
||||
// Copy old optimized code map and append one new entry.
|
||||
new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
|
||||
old_code_map, kEntryLength, TENURED);
|
||||
// TODO(mstarzinger): Temporary workaround. The allocation above might
|
||||
// have flushed the optimized code map and the copy we created is full of
|
||||
// holes. For now we just give up on adding the entry and pretend it got
|
||||
// flushed.
|
||||
if (shared->OptimizedCodeMapIsCleared()) return;
|
||||
entry = old_code_map->length();
|
||||
}
|
||||
// Copy old optimized code map and append one new entry.
|
||||
new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
|
||||
old_code_map, kEntryLength, TENURED);
|
||||
// TODO(mstarzinger): Temporary workaround. The allocation above might have
|
||||
// flushed the optimized code map and the copy we created is full of holes.
|
||||
// For now we just give up on adding the entry and pretend it got flushed.
|
||||
if (shared->OptimizedCodeMapIsCleared()) return;
|
||||
entry = old_code_map->length();
|
||||
}
|
||||
|
||||
Handle<WeakCell> code_cell = code->IsUndefined()
|
||||
? isolate->factory()->empty_weak_cell()
|
||||
: isolate->factory()->NewWeakCell(code);
|
||||
Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals);
|
||||
WeakCell* context_cell = native_context->self_weak_cell();
|
||||
|
||||
new_code_map->set(entry + kContextOffset, context_cell);
|
||||
new_code_map->set(entry + kCachedCodeOffset, *code_cell);
|
||||
new_code_map->set(entry + kLiteralsOffset, *literals_cell);
|
||||
new_code_map->set(entry + kContextOffset, *native_context);
|
||||
new_code_map->set(entry + kCachedCodeOffset, *code);
|
||||
new_code_map->set(entry + kLiteralsOffset, *literals);
|
||||
new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt()));
|
||||
|
||||
#ifdef DEBUG
|
||||
for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
|
||||
WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset));
|
||||
DCHECK(cell->cleared() || cell->value()->IsNativeContext());
|
||||
cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset));
|
||||
DCHECK(cell->cleared() ||
|
||||
(cell->value()->IsCode() &&
|
||||
Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
|
||||
cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset));
|
||||
DCHECK(cell->cleared() || cell->value()->IsFixedArray());
|
||||
DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext());
|
||||
Object* code = new_code_map->get(i + kCachedCodeOffset);
|
||||
if (code != isolate->heap()->undefined_value()) {
|
||||
DCHECK(code->IsCode());
|
||||
DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
}
|
||||
DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
|
||||
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
|
||||
}
|
||||
#endif
|
||||
@ -12012,10 +11982,8 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
int dst = kEntriesStart;
|
||||
int length = code_map->length();
|
||||
for (int src = kEntriesStart; src < length; src += kEntryLength) {
|
||||
DCHECK(WeakCell::cast(code_map->get(src))->cleared() ||
|
||||
WeakCell::cast(code_map->get(src))->value()->IsNativeContext());
|
||||
if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
|
||||
optimized_code) {
|
||||
DCHECK(code_map->get(src)->IsNativeContext());
|
||||
if (code_map->get(src + kCachedCodeOffset) == optimized_code) {
|
||||
BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
|
||||
if (FLAG_trace_opt) {
|
||||
PrintF("[evicting entry from optimizing code map (%s) for ", reason);
|
||||
@ -12032,8 +12000,7 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
}
|
||||
// In case of non-OSR entry just clear the code in order to proceed
|
||||
// sharing literals.
|
||||
code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(),
|
||||
SKIP_WRITE_BARRIER);
|
||||
code_map->set_undefined(src + kCachedCodeOffset);
|
||||
}
|
||||
|
||||
// Keep the src entry by copying it to the dst entry.
|
||||
@ -12048,11 +12015,9 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
}
|
||||
dst += kEntryLength;
|
||||
}
|
||||
if (WeakCell::cast(code_map->get(kSharedCodeIndex))->value() ==
|
||||
optimized_code) {
|
||||
if (code_map->get(kSharedCodeIndex) == optimized_code) {
|
||||
// Evict context-independent code as well.
|
||||
code_map->set(kSharedCodeIndex, heap->empty_weak_cell(),
|
||||
SKIP_WRITE_BARRIER);
|
||||
code_map->set_undefined(kSharedCodeIndex);
|
||||
if (FLAG_trace_opt) {
|
||||
PrintF("[evicting entry from optimizing code map (%s) for ", reason);
|
||||
ShortPrint();
|
||||
@ -12064,7 +12029,7 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map,
|
||||
length - dst);
|
||||
if (code_map->length() == kEntriesStart &&
|
||||
WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) {
|
||||
code_map->get(kSharedCodeIndex)->IsUndefined()) {
|
||||
ClearOptimizedCodeMap();
|
||||
}
|
||||
}
|
||||
@ -12079,7 +12044,7 @@ void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
|
||||
GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map,
|
||||
shrink_by);
|
||||
if (code_map->length() == kEntriesStart &&
|
||||
WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) {
|
||||
code_map->get(kSharedCodeIndex)->IsUndefined()) {
|
||||
ClearOptimizedCodeMap();
|
||||
}
|
||||
}
|
||||
@ -13324,14 +13289,12 @@ int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context,
|
||||
int length = optimized_code_map->length();
|
||||
Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
|
||||
for (int i = kEntriesStart; i < length; i += kEntryLength) {
|
||||
if (WeakCell::cast(optimized_code_map->get(i + kContextOffset))
|
||||
->value() == native_context &&
|
||||
if (optimized_code_map->get(i + kContextOffset) == native_context &&
|
||||
optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
Object* shared_code =
|
||||
WeakCell::cast(optimized_code_map->get(kSharedCodeIndex))->value();
|
||||
Object* shared_code = optimized_code_map->get(kSharedCodeIndex);
|
||||
if (shared_code->IsCode() && osr_ast_id.IsNone()) {
|
||||
return kSharedCodeIndex;
|
||||
}
|
||||
@ -13347,22 +13310,13 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
|
||||
if (entry != kNotFound) {
|
||||
FixedArray* code_map = optimized_code_map();
|
||||
if (entry == kSharedCodeIndex) {
|
||||
// We know the weak cell isn't cleared because we made sure of it in
|
||||
// SearchOptimizedCodeMapEntry and performed no allocations since that
|
||||
// call.
|
||||
result = {
|
||||
Code::cast(WeakCell::cast(code_map->get(kSharedCodeIndex))->value()),
|
||||
nullptr};
|
||||
result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr};
|
||||
|
||||
} else {
|
||||
DCHECK_LE(entry + kEntryLength, code_map->length());
|
||||
WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
|
||||
WeakCell* literals_cell =
|
||||
WeakCell::cast(code_map->get(entry + kLiteralsOffset));
|
||||
|
||||
result = {cell->cleared() ? nullptr : Code::cast(cell->value()),
|
||||
literals_cell->cleared()
|
||||
? nullptr
|
||||
: LiteralsArray::cast(literals_cell->value())};
|
||||
Object* code = code_map->get(entry + kCachedCodeOffset);
|
||||
result = {code->IsUndefined() ? nullptr : Code::cast(code),
|
||||
LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))};
|
||||
}
|
||||
}
|
||||
if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() &&
|
||||
|
Loading…
Reference in New Issue
Block a user