Use WeakCells in the optimized code map rather than traversing in pause.
It's expensive to walk all shared function infos during the gc atomic pause. Instead, use WeakCells to implement this structure without manual clearing. Reland due to a bug when reusing entries in the optimized code map. BUG= Review URL: https://codereview.chromium.org/1508703002 Cr-Commit-Position: refs/heads/master@{#32696}
This commit is contained in:
parent
5dffa35350
commit
e56fe8460a
@ -1870,19 +1870,29 @@ void CodeStubGraphBuilderBase::BuildCheckAndInstallOptimizedCode(
|
||||
HValue* osr_ast_id_none = Add<HConstant>(BailoutId::None().ToInt());
|
||||
HValue* context_slot = LoadFromOptimizedCodeMap(
|
||||
optimized_map, map_index, SharedFunctionInfo::kContextOffset);
|
||||
context_slot = Add<HLoadNamedField>(context_slot, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
HValue* osr_ast_slot = LoadFromOptimizedCodeMap(
|
||||
optimized_map, map_index, SharedFunctionInfo::kOsrAstIdOffset);
|
||||
HValue* code_object = LoadFromOptimizedCodeMap(
|
||||
optimized_map, map_index, SharedFunctionInfo::kCachedCodeOffset);
|
||||
code_object = Add<HLoadNamedField>(code_object, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
builder->If<HCompareObjectEqAndBranch>(native_context,
|
||||
context_slot);
|
||||
builder->AndIf<HCompareObjectEqAndBranch>(osr_ast_slot, osr_ast_id_none);
|
||||
builder->And();
|
||||
builder->IfNot<HCompareObjectEqAndBranch>(code_object,
|
||||
graph()->GetConstantUndefined());
|
||||
graph()->GetConstant0());
|
||||
builder->Then();
|
||||
HValue* literals = LoadFromOptimizedCodeMap(optimized_map,
|
||||
map_index, SharedFunctionInfo::kLiteralsOffset);
|
||||
literals = Add<HLoadNamedField>(literals, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
IfBuilder maybe_deopt(this);
|
||||
maybe_deopt.If<HCompareObjectEqAndBranch>(literals, graph()->GetConstant0());
|
||||
maybe_deopt.ThenDeopt(Deoptimizer::kLiteralsWereDisposed);
|
||||
maybe_deopt.End();
|
||||
|
||||
BuildInstallOptimizedCode(js_function, native_context, code_object, literals);
|
||||
|
||||
@ -2006,8 +2016,10 @@ void CodeStubGraphBuilderBase::BuildInstallFromOptimizedCodeMap(
|
||||
HValue* shared_code =
|
||||
Add<HLoadNamedField>(optimized_map, nullptr,
|
||||
HObjectAccess::ForOptimizedCodeMapSharedCode());
|
||||
shared_code = Add<HLoadNamedField>(shared_code, nullptr,
|
||||
HObjectAccess::ForWeakCellValue());
|
||||
shared_code_check.IfNot<HCompareObjectEqAndBranch>(
|
||||
shared_code, graph()->GetConstantUndefined());
|
||||
shared_code, graph()->GetConstant0());
|
||||
shared_code_check.Then();
|
||||
{
|
||||
// Store the context-independent optimized code.
|
||||
|
@ -229,6 +229,7 @@ enum BindingFlags {
|
||||
V(SCRIPT_CONTEXT_TABLE_INDEX, ScriptContextTable, script_context_table) \
|
||||
V(SCRIPT_FUNCTION_INDEX, JSFunction, script_function) \
|
||||
V(SECURITY_TOKEN_INDEX, Object, security_token) \
|
||||
V(SELF_WEAK_CELL_INDEX, WeakCell, self_weak_cell) \
|
||||
V(SET_ITERATOR_MAP_INDEX, Map, set_iterator_map) \
|
||||
V(SHARED_ARRAY_BUFFER_FUN_INDEX, JSFunction, shared_array_buffer_fun) \
|
||||
V(SLOPPY_ARGUMENTS_MAP_INDEX, Map, sloppy_arguments_map) \
|
||||
|
@ -335,6 +335,7 @@ class OptimizedFunctionVisitor BASE_EMBEDDED {
|
||||
V(kInsufficientTypeFeedbackForRHSOfBinaryOperation, \
|
||||
"Insufficient type feedback for RHS of binary operation") \
|
||||
V(kKeyIsNegative, "key is negative") \
|
||||
V(kLiteralsWereDisposed, "literals have been disposed") \
|
||||
V(kLostPrecision, "lost precision") \
|
||||
V(kLostPrecisionOrNaN, "lost precision or NaN") \
|
||||
V(kMementoFound, "memento found") \
|
||||
|
@ -728,6 +728,8 @@ Handle<Context> Factory::NewNativeContext() {
|
||||
context->set_native_context(*context);
|
||||
context->set_js_array_maps(*undefined_value());
|
||||
context->set_errors_thrown(Smi::FromInt(0));
|
||||
Handle<WeakCell> weak_cell = NewWeakCell(context);
|
||||
context->set_self_weak_cell(*weak_cell);
|
||||
DCHECK(context->IsNativeContext());
|
||||
return context;
|
||||
}
|
||||
|
@ -520,7 +520,6 @@ void GCTracer::PrintNVP() const {
|
||||
"mark_weakrefs=%.1f "
|
||||
"mark_globalhandles=%.1f "
|
||||
"mark_codeflush=%.1f "
|
||||
"mark_optimizedcodemaps=%.1f "
|
||||
"store_buffer_clear=%.1f "
|
||||
"slots_buffer_clear=%.1f "
|
||||
"sweep=%.2f "
|
||||
@ -592,7 +591,6 @@ void GCTracer::PrintNVP() const {
|
||||
current_.scopes[Scope::MC_MARK_WEAK_REFERENCES],
|
||||
current_.scopes[Scope::MC_MARK_GLOBAL_HANDLES],
|
||||
current_.scopes[Scope::MC_MARK_CODE_FLUSH],
|
||||
current_.scopes[Scope::MC_MARK_OPTIMIZED_CODE_MAPS],
|
||||
current_.scopes[Scope::MC_STORE_BUFFER_CLEAR],
|
||||
current_.scopes[Scope::MC_SLOTS_BUFFER_CLEAR],
|
||||
current_.scopes[Scope::MC_SWEEP],
|
||||
|
@ -109,7 +109,6 @@ class GCTracer {
|
||||
MC_MARK_WEAK_REFERENCES,
|
||||
MC_MARK_GLOBAL_HANDLES,
|
||||
MC_MARK_CODE_FLUSH,
|
||||
MC_MARK_OPTIMIZED_CODE_MAPS,
|
||||
MC_STORE_BUFFER_CLEAR,
|
||||
MC_SLOTS_BUFFER_CLEAR,
|
||||
MC_SWEEP,
|
||||
|
@ -2777,8 +2777,14 @@ void Heap::CreateInitialObjects() {
|
||||
}
|
||||
|
||||
{
|
||||
Handle<WeakCell> cell = factory->NewWeakCell(factory->undefined_value());
|
||||
set_empty_weak_cell(*cell);
|
||||
cell->clear();
|
||||
|
||||
Handle<FixedArray> cleared_optimized_code_map =
|
||||
factory->NewFixedArray(SharedFunctionInfo::kEntriesStart, TENURED);
|
||||
cleared_optimized_code_map->set(SharedFunctionInfo::kSharedCodeIndex,
|
||||
*cell);
|
||||
STATIC_ASSERT(SharedFunctionInfo::kEntriesStart == 1 &&
|
||||
SharedFunctionInfo::kSharedCodeIndex == 0);
|
||||
set_cleared_optimized_code_map(*cleared_optimized_code_map);
|
||||
|
@ -189,6 +189,7 @@ namespace internal {
|
||||
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
|
||||
V(FixedArray, interpreter_table, InterpreterTable) \
|
||||
V(Map, bytecode_array_map, BytecodeArrayMap) \
|
||||
V(WeakCell, empty_weak_cell, EmptyWeakCell) \
|
||||
V(BytecodeArray, empty_bytecode_array, EmptyBytecodeArray)
|
||||
|
||||
|
||||
@ -449,6 +450,7 @@ namespace internal {
|
||||
V(JSMessageObjectMap) \
|
||||
V(ForeignMap) \
|
||||
V(NeanderMap) \
|
||||
V(EmptyWeakCell) \
|
||||
V(empty_string) \
|
||||
PRIVATE_SYMBOL_LIST(V)
|
||||
|
||||
|
@ -2163,13 +2163,6 @@ void MarkCompactCollector::AfterMarking() {
|
||||
code_flusher_->ProcessCandidates();
|
||||
}
|
||||
|
||||
// Process and clear all optimized code maps.
|
||||
if (!FLAG_flush_optimized_code_cache) {
|
||||
GCTracer::Scope gc_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_MARK_OPTIMIZED_CODE_MAPS);
|
||||
ProcessAndClearOptimizedCodeMaps();
|
||||
}
|
||||
|
||||
if (FLAG_track_gc_object_stats) {
|
||||
if (FLAG_trace_gc_object_stats) {
|
||||
heap()->object_stats_->TraceObjectStats();
|
||||
@ -2179,72 +2172,6 @@ void MarkCompactCollector::AfterMarking() {
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ProcessAndClearOptimizedCodeMaps() {
|
||||
SharedFunctionInfo::Iterator iterator(isolate());
|
||||
while (SharedFunctionInfo* shared = iterator.Next()) {
|
||||
if (shared->OptimizedCodeMapIsCleared()) continue;
|
||||
|
||||
// Process context-dependent entries in the optimized code map.
|
||||
FixedArray* code_map = shared->optimized_code_map();
|
||||
int new_length = SharedFunctionInfo::kEntriesStart;
|
||||
int old_length = code_map->length();
|
||||
for (int i = SharedFunctionInfo::kEntriesStart; i < old_length;
|
||||
i += SharedFunctionInfo::kEntryLength) {
|
||||
// Each entry contains [ context, code, literals, ast-id ] as fields.
|
||||
STATIC_ASSERT(SharedFunctionInfo::kEntryLength == 4);
|
||||
Context* context =
|
||||
Context::cast(code_map->get(i + SharedFunctionInfo::kContextOffset));
|
||||
HeapObject* code = HeapObject::cast(
|
||||
code_map->get(i + SharedFunctionInfo::kCachedCodeOffset));
|
||||
FixedArray* literals = FixedArray::cast(
|
||||
code_map->get(i + SharedFunctionInfo::kLiteralsOffset));
|
||||
Smi* ast_id =
|
||||
Smi::cast(code_map->get(i + SharedFunctionInfo::kOsrAstIdOffset));
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(context))) continue;
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(context)));
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(code))) continue;
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(code)));
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(literals))) continue;
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(literals)));
|
||||
// Move every slot in the entry and record slots when needed.
|
||||
code_map->set(new_length + SharedFunctionInfo::kCachedCodeOffset, code);
|
||||
code_map->set(new_length + SharedFunctionInfo::kContextOffset, context);
|
||||
code_map->set(new_length + SharedFunctionInfo::kLiteralsOffset, literals);
|
||||
code_map->set(new_length + SharedFunctionInfo::kOsrAstIdOffset, ast_id);
|
||||
Object** code_slot = code_map->RawFieldOfElementAt(
|
||||
new_length + SharedFunctionInfo::kCachedCodeOffset);
|
||||
RecordSlot(code_map, code_slot, *code_slot);
|
||||
Object** context_slot = code_map->RawFieldOfElementAt(
|
||||
new_length + SharedFunctionInfo::kContextOffset);
|
||||
RecordSlot(code_map, context_slot, *context_slot);
|
||||
Object** literals_slot = code_map->RawFieldOfElementAt(
|
||||
new_length + SharedFunctionInfo::kLiteralsOffset);
|
||||
RecordSlot(code_map, literals_slot, *literals_slot);
|
||||
new_length += SharedFunctionInfo::kEntryLength;
|
||||
}
|
||||
|
||||
// Process context-independent entry in the optimized code map.
|
||||
Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
|
||||
if (shared_object->IsCode()) {
|
||||
Code* shared_code = Code::cast(shared_object);
|
||||
if (Marking::IsWhite(Marking::MarkBitFrom(shared_code))) {
|
||||
code_map->set_undefined(SharedFunctionInfo::kSharedCodeIndex);
|
||||
} else {
|
||||
DCHECK(Marking::IsBlack(Marking::MarkBitFrom(shared_code)));
|
||||
Object** slot =
|
||||
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex);
|
||||
RecordSlot(code_map, slot, *slot);
|
||||
}
|
||||
}
|
||||
|
||||
// Trim the optimized code map if entries have been removed.
|
||||
if (new_length < old_length) {
|
||||
shared->TrimOptimizedCodeMap(old_length - new_length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ClearNonLiveReferences() {
|
||||
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_CLEAR);
|
||||
|
||||
|
@ -685,11 +685,6 @@ class MarkCompactCollector {
|
||||
|
||||
void AbortTransitionArrays();
|
||||
|
||||
// After all reachable objects have been marked, those entries within
|
||||
// optimized code maps that became unreachable are removed, potentially
|
||||
// trimming or clearing out the entire optimized code map.
|
||||
void ProcessAndClearOptimizedCodeMaps();
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Phase 2: Sweeping to clear mark bits and free non-live objects for
|
||||
// a non-compacting collection.
|
||||
|
@ -454,14 +454,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
|
||||
// Always flush the optimized code map if requested by flag.
|
||||
shared->ClearOptimizedCodeMap();
|
||||
}
|
||||
} else {
|
||||
if (!shared->OptimizedCodeMapIsCleared()) {
|
||||
// Treat some references within the code map weakly by marking the
|
||||
// code map itself but not pushing it onto the marking deque. The
|
||||
// map will be processed after marking.
|
||||
FixedArray* code_map = shared->optimized_code_map();
|
||||
MarkOptimizedCodeMap(heap, code_map);
|
||||
}
|
||||
}
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
if (collector->is_code_flushing_enabled()) {
|
||||
@ -578,23 +570,6 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
|
||||
}
|
||||
|
||||
|
||||
template <typename StaticVisitor>
|
||||
void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
|
||||
Heap* heap, FixedArray* code_map) {
|
||||
if (!StaticVisitor::MarkObjectWithoutPush(heap, code_map)) return;
|
||||
|
||||
// Mark the context-independent entry in the optimized code map. Depending on
|
||||
// the age of the code object, we treat it as a strong or a weak reference.
|
||||
Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
|
||||
if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
|
||||
FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
|
||||
StaticVisitor::VisitPointer(
|
||||
heap, code_map,
|
||||
code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
|
||||
Object* undefined = heap->undefined_value();
|
||||
return (info->script() != undefined) &&
|
||||
|
@ -375,10 +375,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
|
||||
// Mark pointers in a Map treating some elements of the descriptor array weak.
|
||||
static void MarkMapContents(Heap* heap, Map* map);
|
||||
|
||||
// Mark pointers in the optimized code map that should act as strong
|
||||
// references, possibly treating some entries weak.
|
||||
static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
|
||||
|
||||
// Code flushing support.
|
||||
INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
|
||||
INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
|
||||
|
@ -2029,7 +2029,10 @@ Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
|
||||
|
||||
|
||||
void WeakCell::clear() {
|
||||
DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT);
|
||||
// Either the garbage collector is clearing the cell or we are simply
|
||||
// initializing the root empty weak cell.
|
||||
DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
|
||||
this == GetHeap()->empty_weak_cell());
|
||||
WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
|
||||
}
|
||||
|
||||
|
124
src/objects.cc
124
src/objects.cc
@ -11941,7 +11941,8 @@ void SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(
|
||||
DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
// Empty code maps are unsupported.
|
||||
if (shared->OptimizedCodeMapIsCleared()) return;
|
||||
shared->optimized_code_map()->set(kSharedCodeIndex, *code);
|
||||
Handle<WeakCell> cell = isolate->factory()->NewWeakCell(code);
|
||||
shared->optimized_code_map()->set(kSharedCodeIndex, *cell);
|
||||
}
|
||||
|
||||
|
||||
@ -11959,66 +11960,87 @@ void SharedFunctionInfo::AddToOptimizedCodeMap(
|
||||
STATIC_ASSERT(kEntryLength == 4);
|
||||
Handle<FixedArray> new_code_map;
|
||||
int entry;
|
||||
|
||||
if (shared->OptimizedCodeMapIsCleared()) {
|
||||
new_code_map = isolate->factory()->NewFixedArray(kInitialLength, TENURED);
|
||||
new_code_map->set(kSharedCodeIndex, *isolate->factory()->empty_weak_cell(),
|
||||
SKIP_WRITE_BARRIER);
|
||||
entry = kEntriesStart;
|
||||
} else {
|
||||
Handle<FixedArray> old_code_map(shared->optimized_code_map(), isolate);
|
||||
entry = shared->SearchOptimizedCodeMapEntry(*native_context, osr_ast_id);
|
||||
if (entry > kSharedCodeIndex) {
|
||||
// Found an existing context-specific entry, it must not contain any code.
|
||||
DCHECK_EQ(isolate->heap()->undefined_value(),
|
||||
old_code_map->get(entry + kCachedCodeOffset));
|
||||
DCHECK(WeakCell::cast(old_code_map->get(entry + kCachedCodeOffset))
|
||||
->cleared());
|
||||
// Just set the code and literals to the entry.
|
||||
old_code_map->set(entry + kCachedCodeOffset, *code);
|
||||
old_code_map->set(entry + kLiteralsOffset, *literals);
|
||||
Handle<WeakCell> code_cell = code->IsUndefined()
|
||||
? isolate->factory()->empty_weak_cell()
|
||||
: isolate->factory()->NewWeakCell(code);
|
||||
Handle<WeakCell> literals_cell =
|
||||
isolate->factory()->NewWeakCell(literals);
|
||||
old_code_map->set(entry + kCachedCodeOffset, *code_cell);
|
||||
old_code_map->set(entry + kLiteralsOffset, *literals_cell);
|
||||
return;
|
||||
}
|
||||
|
||||
// Can we reuse an entry?
|
||||
DCHECK(entry < kEntriesStart);
|
||||
int length = old_code_map->length();
|
||||
for (int i = kEntriesStart; i < length; i += kEntryLength) {
|
||||
if (WeakCell::cast(old_code_map->get(i + kContextOffset))->cleared()) {
|
||||
new_code_map = old_code_map;
|
||||
entry = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (entry < kEntriesStart) {
|
||||
// Copy old optimized code map and append one new entry.
|
||||
new_code_map = isolate->factory()->CopyFixedArrayAndGrow(
|
||||
old_code_map, kEntryLength, TENURED);
|
||||
// TODO(mstarzinger): Temporary workaround. The allocation above might have
|
||||
// flushed the optimized code map and the copy we created is full of holes.
|
||||
// For now we just give up on adding the entry and pretend it got flushed.
|
||||
// TODO(mstarzinger): Temporary workaround. The allocation above might
|
||||
// have flushed the optimized code map and the copy we created is full of
|
||||
// holes. For now we just give up on adding the entry and pretend it got
|
||||
// flushed.
|
||||
if (shared->OptimizedCodeMapIsCleared()) return;
|
||||
entry = old_code_map->length();
|
||||
}
|
||||
new_code_map->set(entry + kContextOffset, *native_context);
|
||||
new_code_map->set(entry + kCachedCodeOffset, *code);
|
||||
new_code_map->set(entry + kLiteralsOffset, *literals);
|
||||
}
|
||||
|
||||
Handle<WeakCell> code_cell = code->IsUndefined()
|
||||
? isolate->factory()->empty_weak_cell()
|
||||
: isolate->factory()->NewWeakCell(code);
|
||||
Handle<WeakCell> literals_cell = isolate->factory()->NewWeakCell(literals);
|
||||
WeakCell* context_cell = native_context->self_weak_cell();
|
||||
|
||||
new_code_map->set(entry + kContextOffset, context_cell);
|
||||
new_code_map->set(entry + kCachedCodeOffset, *code_cell);
|
||||
new_code_map->set(entry + kLiteralsOffset, *literals_cell);
|
||||
new_code_map->set(entry + kOsrAstIdOffset, Smi::FromInt(osr_ast_id.ToInt()));
|
||||
|
||||
#ifdef DEBUG
|
||||
for (int i = kEntriesStart; i < new_code_map->length(); i += kEntryLength) {
|
||||
DCHECK(new_code_map->get(i + kContextOffset)->IsNativeContext());
|
||||
Object* code = new_code_map->get(i + kCachedCodeOffset);
|
||||
if (code != isolate->heap()->undefined_value()) {
|
||||
DCHECK(code->IsCode());
|
||||
DCHECK(Code::cast(code)->kind() == Code::OPTIMIZED_FUNCTION);
|
||||
}
|
||||
DCHECK(new_code_map->get(i + kLiteralsOffset)->IsFixedArray());
|
||||
WeakCell* cell = WeakCell::cast(new_code_map->get(i + kContextOffset));
|
||||
DCHECK(cell->cleared() || cell->value()->IsNativeContext());
|
||||
cell = WeakCell::cast(new_code_map->get(i + kCachedCodeOffset));
|
||||
DCHECK(cell->cleared() ||
|
||||
(cell->value()->IsCode() &&
|
||||
Code::cast(cell->value())->kind() == Code::OPTIMIZED_FUNCTION));
|
||||
cell = WeakCell::cast(new_code_map->get(i + kLiteralsOffset));
|
||||
DCHECK(cell->cleared() || cell->value()->IsFixedArray());
|
||||
DCHECK(new_code_map->get(i + kOsrAstIdOffset)->IsSmi());
|
||||
}
|
||||
#endif
|
||||
|
||||
// Zap any old optimized code map.
|
||||
if (!shared->OptimizedCodeMapIsCleared()) {
|
||||
FixedArray* old_code_map = shared->optimized_code_map();
|
||||
old_code_map->FillWithHoles(0, old_code_map->length());
|
||||
}
|
||||
|
||||
if (old_code_map != *new_code_map) {
|
||||
shared->set_optimized_code_map(*new_code_map);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void SharedFunctionInfo::ClearOptimizedCodeMap() {
|
||||
// Zap any old optimized code map.
|
||||
if (!OptimizedCodeMapIsCleared()) {
|
||||
FixedArray* old_code_map = optimized_code_map();
|
||||
old_code_map->FillWithHoles(0, old_code_map->length());
|
||||
}
|
||||
|
||||
FixedArray* cleared_map = GetHeap()->cleared_optimized_code_map();
|
||||
set_optimized_code_map(cleared_map, SKIP_WRITE_BARRIER);
|
||||
}
|
||||
@ -12034,8 +12056,10 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
int dst = kEntriesStart;
|
||||
int length = code_map->length();
|
||||
for (int src = kEntriesStart; src < length; src += kEntryLength) {
|
||||
DCHECK(code_map->get(src)->IsNativeContext());
|
||||
if (code_map->get(src + kCachedCodeOffset) == optimized_code) {
|
||||
DCHECK(WeakCell::cast(code_map->get(src))->cleared() ||
|
||||
WeakCell::cast(code_map->get(src))->value()->IsNativeContext());
|
||||
if (WeakCell::cast(code_map->get(src + kCachedCodeOffset))->value() ==
|
||||
optimized_code) {
|
||||
BailoutId osr(Smi::cast(code_map->get(src + kOsrAstIdOffset))->value());
|
||||
if (FLAG_trace_opt) {
|
||||
PrintF("[evicting entry from optimizing code map (%s) for ", reason);
|
||||
@ -12052,7 +12076,8 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
}
|
||||
// In case of non-OSR entry just clear the code in order to proceed
|
||||
// sharing literals.
|
||||
code_map->set_undefined(src + kCachedCodeOffset);
|
||||
code_map->set(src + kCachedCodeOffset, heap->empty_weak_cell(),
|
||||
SKIP_WRITE_BARRIER);
|
||||
}
|
||||
|
||||
// Keep the src entry by copying it to the dst entry.
|
||||
@ -12067,9 +12092,11 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
}
|
||||
dst += kEntryLength;
|
||||
}
|
||||
if (code_map->get(kSharedCodeIndex) == optimized_code) {
|
||||
if (WeakCell::cast(code_map->get(kSharedCodeIndex))->value() ==
|
||||
optimized_code) {
|
||||
// Evict context-independent code as well.
|
||||
code_map->set_undefined(kSharedCodeIndex);
|
||||
code_map->set(kSharedCodeIndex, heap->empty_weak_cell(),
|
||||
SKIP_WRITE_BARRIER);
|
||||
if (FLAG_trace_opt) {
|
||||
PrintF("[evicting entry from optimizing code map (%s) for ", reason);
|
||||
ShortPrint();
|
||||
@ -12081,7 +12108,7 @@ void SharedFunctionInfo::EvictFromOptimizedCodeMap(Code* optimized_code,
|
||||
heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(code_map,
|
||||
length - dst);
|
||||
if (code_map->length() == kEntriesStart &&
|
||||
code_map->get(kSharedCodeIndex)->IsUndefined()) {
|
||||
WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) {
|
||||
ClearOptimizedCodeMap();
|
||||
}
|
||||
}
|
||||
@ -12096,7 +12123,7 @@ void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
|
||||
GetHeap()->RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>(code_map,
|
||||
shrink_by);
|
||||
if (code_map->length() == kEntriesStart &&
|
||||
code_map->get(kSharedCodeIndex)->IsUndefined()) {
|
||||
WeakCell::cast(code_map->get(kSharedCodeIndex))->cleared()) {
|
||||
ClearOptimizedCodeMap();
|
||||
}
|
||||
}
|
||||
@ -13337,12 +13364,14 @@ int SharedFunctionInfo::SearchOptimizedCodeMapEntry(Context* native_context,
|
||||
int length = optimized_code_map->length();
|
||||
Smi* osr_ast_id_smi = Smi::FromInt(osr_ast_id.ToInt());
|
||||
for (int i = kEntriesStart; i < length; i += kEntryLength) {
|
||||
if (optimized_code_map->get(i + kContextOffset) == native_context &&
|
||||
if (WeakCell::cast(optimized_code_map->get(i + kContextOffset))
|
||||
->value() == native_context &&
|
||||
optimized_code_map->get(i + kOsrAstIdOffset) == osr_ast_id_smi) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
Object* shared_code = optimized_code_map->get(kSharedCodeIndex);
|
||||
Object* shared_code =
|
||||
WeakCell::cast(optimized_code_map->get(kSharedCodeIndex))->value();
|
||||
if (shared_code->IsCode() && osr_ast_id.IsNone()) {
|
||||
return kSharedCodeIndex;
|
||||
}
|
||||
@ -13358,13 +13387,22 @@ CodeAndLiterals SharedFunctionInfo::SearchOptimizedCodeMap(
|
||||
if (entry != kNotFound) {
|
||||
FixedArray* code_map = optimized_code_map();
|
||||
if (entry == kSharedCodeIndex) {
|
||||
result = {Code::cast(code_map->get(kSharedCodeIndex)), nullptr};
|
||||
|
||||
// We know the weak cell isn't cleared because we made sure of it in
|
||||
// SearchOptimizedCodeMapEntry and performed no allocations since that
|
||||
// call.
|
||||
result = {
|
||||
Code::cast(WeakCell::cast(code_map->get(kSharedCodeIndex))->value()),
|
||||
nullptr};
|
||||
} else {
|
||||
DCHECK_LE(entry + kEntryLength, code_map->length());
|
||||
Object* code = code_map->get(entry + kCachedCodeOffset);
|
||||
result = {code->IsUndefined() ? nullptr : Code::cast(code),
|
||||
LiteralsArray::cast(code_map->get(entry + kLiteralsOffset))};
|
||||
WeakCell* cell = WeakCell::cast(code_map->get(entry + kCachedCodeOffset));
|
||||
WeakCell* literals_cell =
|
||||
WeakCell::cast(code_map->get(entry + kLiteralsOffset));
|
||||
|
||||
result = {cell->cleared() ? nullptr : Code::cast(cell->value()),
|
||||
literals_cell->cleared()
|
||||
? nullptr
|
||||
: LiteralsArray::cast(literals_cell->value())};
|
||||
}
|
||||
}
|
||||
if (FLAG_trace_opt && !OptimizedCodeMapIsCleared() &&
|
||||
|
@ -4492,6 +4492,115 @@ TEST(Regress514122) {
|
||||
}
|
||||
|
||||
|
||||
TEST(OptimizedCodeMapReuseEntries) {
|
||||
i::FLAG_flush_optimized_code_cache = false;
|
||||
i::FLAG_allow_natives_syntax = true;
|
||||
// BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
|
||||
// run this test.
|
||||
if (i::FLAG_turbo) return;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* v8_isolate = CcTest::isolate();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Heap* heap = isolate->heap();
|
||||
HandleScope scope(isolate);
|
||||
|
||||
// Create 3 contexts, allow the 2nd one to be disposed, and verify that
|
||||
// a 4th context will re-use the weak slots in the optimized code map
|
||||
// to hold data, rather than expanding the map.
|
||||
v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
|
||||
const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
|
||||
v8::ScriptCompiler::Source script_source(
|
||||
v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
|
||||
.ToLocalChecked());
|
||||
v8::Local<v8::UnboundScript> indep =
|
||||
v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
|
||||
.ToLocalChecked();
|
||||
const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
|
||||
// Perfrom one initial GC to enable code flushing.
|
||||
heap->CollectAllGarbage();
|
||||
|
||||
c1->Enter();
|
||||
indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
|
||||
CompileRun(toplevel);
|
||||
|
||||
Handle<SharedFunctionInfo> shared;
|
||||
Handle<JSFunction> foo = Handle<JSFunction>::cast(
|
||||
v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
|
||||
CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
|
||||
CHECK(foo->shared()->is_compiled());
|
||||
shared = handle(foo->shared());
|
||||
c1->Exit();
|
||||
|
||||
{
|
||||
HandleScope scope(isolate);
|
||||
v8::Local<v8::Context> c2 = v8::Context::New(v8_isolate);
|
||||
c2->Enter();
|
||||
indep->BindToCurrentContext()->Run(c2).ToLocalChecked();
|
||||
CompileRun(toplevel);
|
||||
c2->Exit();
|
||||
}
|
||||
|
||||
{
|
||||
HandleScope scope(isolate);
|
||||
v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
|
||||
c3->Enter();
|
||||
indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
|
||||
CompileRun(toplevel);
|
||||
c3->Exit();
|
||||
|
||||
// Now, collect garbage. Context c2 should have no roots to it, and it's
|
||||
// entry in the optimized code map should be free for a new context.
|
||||
for (int i = 0; i < 4; i++) {
|
||||
heap->CollectAllGarbage();
|
||||
}
|
||||
|
||||
Handle<FixedArray> optimized_code_map =
|
||||
handle(shared->optimized_code_map());
|
||||
// There should be 3 entries in the map.
|
||||
CHECK_EQ(
|
||||
3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
|
||||
SharedFunctionInfo::kEntryLength));
|
||||
// But one of them (formerly for c2) should be cleared.
|
||||
int cleared_count = 0;
|
||||
for (int i = SharedFunctionInfo::kEntriesStart;
|
||||
i < optimized_code_map->length();
|
||||
i += SharedFunctionInfo::kEntryLength) {
|
||||
cleared_count +=
|
||||
WeakCell::cast(
|
||||
optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
|
||||
->cleared()
|
||||
? 1
|
||||
: 0;
|
||||
}
|
||||
CHECK_EQ(1, cleared_count);
|
||||
|
||||
// Verify that a new context uses the cleared entry rather than creating a
|
||||
// new
|
||||
// optimized code map array.
|
||||
v8::Local<v8::Context> c4 = v8::Context::New(v8_isolate);
|
||||
c4->Enter();
|
||||
indep->BindToCurrentContext()->Run(c4).ToLocalChecked();
|
||||
CompileRun(toplevel);
|
||||
c4->Exit();
|
||||
CHECK_EQ(*optimized_code_map, shared->optimized_code_map());
|
||||
|
||||
// Now each entry is in use.
|
||||
cleared_count = 0;
|
||||
for (int i = SharedFunctionInfo::kEntriesStart;
|
||||
i < optimized_code_map->length();
|
||||
i += SharedFunctionInfo::kEntryLength) {
|
||||
cleared_count +=
|
||||
WeakCell::cast(
|
||||
optimized_code_map->get(i + SharedFunctionInfo::kContextOffset))
|
||||
->cleared()
|
||||
? 1
|
||||
: 0;
|
||||
}
|
||||
CHECK_EQ(0, cleared_count);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST(Regress513496) {
|
||||
i::FLAG_flush_optimized_code_cache = false;
|
||||
i::FLAG_allow_natives_syntax = true;
|
||||
|
Loading…
Reference in New Issue
Block a user