[heap] Re-implement weak cell tracking in the marker.
This replaces the linked list of weak cells with a worklist. TBR=yangguo@chromium.org BUG=chromium:694255 Change-Id: Ia877e25010ebbec9c05fbbe48cff460a92d3a132 Reviewed-on: https://chromium-review.googlesource.com/587067 Commit-Queue: Ulan Degenbaev <ulan@chromium.org> Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Cr-Commit-Position: refs/heads/master@{#46929}
This commit is contained in:
parent
20f870c729
commit
2cc8fdfb47
@ -7065,8 +7065,6 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
|
||||
DCHECK(Heap::RootIsImmortalImmovable(Heap::kWeakCellMapRootIndex));
|
||||
StoreMapNoWriteBarrier(cell, Heap::kWeakCellMapRootIndex);
|
||||
StoreObjectField(cell, WeakCell::kValueOffset, value);
|
||||
StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
|
||||
Heap::kTheHoleValueRootIndex);
|
||||
|
||||
// Store the WeakCell in the feedback vector.
|
||||
StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER, 0,
|
||||
|
@ -176,7 +176,6 @@ Heap::Heap()
|
||||
set_native_contexts_list(NULL);
|
||||
set_allocation_sites_list(Smi::kZero);
|
||||
set_encountered_weak_collections(Smi::kZero);
|
||||
set_encountered_weak_cells(Smi::kZero);
|
||||
set_encountered_transition_arrays(Smi::kZero);
|
||||
// Put a dummy entry in the remembered pages so we can find the list the
|
||||
// minidump even if there are no real unmapped pages.
|
||||
@ -2723,7 +2722,6 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
|
||||
}
|
||||
result->set_map_after_allocation(weak_cell_map(), SKIP_WRITE_BARRIER);
|
||||
WeakCell::cast(result)->initialize(value);
|
||||
WeakCell::cast(result)->clear_next(the_hole_value());
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -748,11 +748,6 @@ class Heap {
|
||||
}
|
||||
void IterateEncounteredWeakCollections(RootVisitor* visitor);
|
||||
|
||||
void set_encountered_weak_cells(Object* weak_cell) {
|
||||
encountered_weak_cells_ = weak_cell;
|
||||
}
|
||||
Object* encountered_weak_cells() const { return encountered_weak_cells_; }
|
||||
|
||||
void set_encountered_transition_arrays(Object* transition_array) {
|
||||
encountered_transition_arrays_ = transition_array;
|
||||
}
|
||||
@ -2282,8 +2277,6 @@ class Heap {
|
||||
// contains Smi(0) while marking is not active.
|
||||
Object* encountered_weak_collections_;
|
||||
|
||||
Object* encountered_weak_cells_;
|
||||
|
||||
Object* encountered_transition_arrays_;
|
||||
|
||||
List<GCCallbackPair> gc_epilogue_callbacks_;
|
||||
|
@ -631,44 +631,6 @@ void IncrementalMarking::MarkRoots() {
|
||||
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
|
||||
}
|
||||
|
||||
void IncrementalMarking::ProcessWeakCells() {
|
||||
DCHECK(!finalize_marking_completed_);
|
||||
DCHECK(IsMarking());
|
||||
|
||||
Object* the_hole_value = heap()->the_hole_value();
|
||||
Object* weak_cell_obj = heap()->encountered_weak_cells();
|
||||
Object* weak_cell_head = Smi::kZero;
|
||||
WeakCell* prev_weak_cell_obj = NULL;
|
||||
while (weak_cell_obj != Smi::kZero) {
|
||||
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
|
||||
// We do not insert cleared weak cells into the list, so the value
|
||||
// cannot be a Smi here.
|
||||
HeapObject* value = HeapObject::cast(weak_cell->value());
|
||||
// Remove weak cells with live objects from the list, they do not need
|
||||
// clearing.
|
||||
if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
|
||||
// Record slot, if value is pointing to an evacuation candidate.
|
||||
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
|
||||
// Remove entry somewhere after top.
|
||||
if (prev_weak_cell_obj != NULL) {
|
||||
prev_weak_cell_obj->set_next(weak_cell->next());
|
||||
}
|
||||
weak_cell_obj = weak_cell->next();
|
||||
weak_cell->clear_next(the_hole_value);
|
||||
} else {
|
||||
if (weak_cell_head == Smi::kZero) {
|
||||
weak_cell_head = weak_cell;
|
||||
}
|
||||
prev_weak_cell_obj = weak_cell;
|
||||
weak_cell_obj = weak_cell->next();
|
||||
}
|
||||
}
|
||||
// Top may have changed.
|
||||
heap()->set_encountered_weak_cells(weak_cell_head);
|
||||
}
|
||||
|
||||
|
||||
bool ShouldRetainMap(Map* map, int age) {
|
||||
if (age == 0) {
|
||||
// The map has aged. Do not retain this map.
|
||||
@ -756,7 +718,6 @@ void IncrementalMarking::FinalizeIncrementally() {
|
||||
// so we can do it only once at the beginning of the finalization.
|
||||
RetainMaps();
|
||||
}
|
||||
ProcessWeakCells();
|
||||
|
||||
int marking_progress =
|
||||
heap_->mark_compact_collector()->marking_worklist()->Size() +
|
||||
|
@ -286,7 +286,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
void FinishBlackAllocation();
|
||||
|
||||
void MarkRoots();
|
||||
void ProcessWeakCells();
|
||||
// Retain dying maps for <FLAG_retain_maps_for_n_gc> garbage collections to
|
||||
// increase chances of reusing of map transition tree in future.
|
||||
void RetainMaps();
|
||||
|
@ -422,6 +422,7 @@ void MinorMarkCompactCollector::SetUp() {}
|
||||
|
||||
void MarkCompactCollector::TearDown() {
|
||||
AbortCompaction();
|
||||
weak_cells_.Clear();
|
||||
marking_worklist()->TearDown();
|
||||
}
|
||||
|
||||
@ -2779,12 +2780,9 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
||||
}
|
||||
|
||||
DependentCode* dependent_code_list;
|
||||
Object* non_live_map_list;
|
||||
ClearWeakCells(&non_live_map_list, &dependent_code_list);
|
||||
|
||||
ClearWeakCellsAndSimpleMapTransitions(&dependent_code_list);
|
||||
{
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_MAPS);
|
||||
ClearSimpleMapTransitions(non_live_map_list);
|
||||
ClearFullMapTransitions();
|
||||
}
|
||||
|
||||
@ -2854,40 +2852,27 @@ void MarkCompactCollector::MarkDependentCodeForDeoptimization(
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ClearSimpleMapTransitions(
|
||||
Object* non_live_map_list) {
|
||||
Object* the_hole_value = heap()->the_hole_value();
|
||||
Object* weak_cell_obj = non_live_map_list;
|
||||
while (weak_cell_obj != Smi::kZero) {
|
||||
WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
|
||||
Map* map = Map::cast(weak_cell->value());
|
||||
DCHECK(ObjectMarking::IsWhite(map, MarkingState::Internal(map)));
|
||||
Object* potential_parent = map->constructor_or_backpointer();
|
||||
if (potential_parent->IsMap()) {
|
||||
Map* parent = Map::cast(potential_parent);
|
||||
if (ObjectMarking::IsBlackOrGrey(parent,
|
||||
MarkingState::Internal(parent)) &&
|
||||
parent->raw_transitions() == weak_cell) {
|
||||
ClearSimpleMapTransition(parent, map);
|
||||
}
|
||||
void MarkCompactCollector::ClearSimpleMapTransition(
|
||||
WeakCell* potential_transition, Map* dead_target) {
|
||||
DCHECK(ObjectMarking::IsWhite(dead_target, marking_state(dead_target)));
|
||||
Object* potential_parent = dead_target->constructor_or_backpointer();
|
||||
if (potential_parent->IsMap()) {
|
||||
Map* parent = Map::cast(potential_parent);
|
||||
if (ObjectMarking::IsBlackOrGrey(parent, marking_state(parent)) &&
|
||||
parent->raw_transitions() == potential_transition) {
|
||||
ClearSimpleMapTransition(parent, dead_target);
|
||||
}
|
||||
weak_cell->clear();
|
||||
weak_cell_obj = weak_cell->next();
|
||||
weak_cell->clear_next(the_hole_value);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ClearSimpleMapTransition(Map* map,
|
||||
Map* dead_transition) {
|
||||
// A previously existing simple transition (stored in a WeakCell) is going
|
||||
// to be cleared. Clear the useless cell pointer, and take ownership
|
||||
// of the descriptor array.
|
||||
Map* dead_target) {
|
||||
// Clear the useless weak cell pointer, and take ownership of the descriptor
|
||||
// array.
|
||||
map->set_raw_transitions(Smi::kZero);
|
||||
int number_of_own_descriptors = map->NumberOfOwnDescriptors();
|
||||
DescriptorArray* descriptors = map->instance_descriptors();
|
||||
if (descriptors == dead_transition->instance_descriptors() &&
|
||||
if (descriptors == dead_target->instance_descriptors() &&
|
||||
number_of_own_descriptors > 0) {
|
||||
TrimDescriptorArray(map, descriptors);
|
||||
DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
|
||||
@ -3081,25 +3066,18 @@ void MarkCompactCollector::AbortWeakCollections() {
|
||||
heap()->set_encountered_weak_collections(Smi::kZero);
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
|
||||
DependentCode** dependent_code_list) {
|
||||
void MarkCompactCollector::ClearWeakCellsAndSimpleMapTransitions(
|
||||
DependentCode** dependent_code_list) {
|
||||
Heap* heap = this->heap();
|
||||
TRACE_GC(heap->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_CELLS);
|
||||
Object* weak_cell_obj = heap->encountered_weak_cells();
|
||||
Object* the_hole_value = heap->the_hole_value();
|
||||
DependentCode* dependent_code_head =
|
||||
DependentCode::cast(heap->empty_fixed_array());
|
||||
Object* non_live_map_head = Smi::kZero;
|
||||
while (weak_cell_obj != Smi::kZero) {
|
||||
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
|
||||
Object* next_weak_cell = weak_cell->next();
|
||||
bool clear_value = true;
|
||||
bool clear_next = true;
|
||||
WeakCell* weak_cell;
|
||||
while (weak_cells_.Pop(kMainThread, &weak_cell)) {
|
||||
// We do not insert cleared weak cells into the list, so the value
|
||||
// cannot be a Smi here.
|
||||
HeapObject* value = HeapObject::cast(weak_cell->value());
|
||||
if (!ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
|
||||
if (!ObjectMarking::IsBlackOrGrey(value, marking_state(value))) {
|
||||
// Cells for new-space objects embedded in optimized code are wrapped in
|
||||
// WeakCell and put into Heap::weak_object_to_code_table.
|
||||
// Such cells do not have any strong references but we want to keep them
|
||||
@ -3117,10 +3095,10 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
|
||||
RecordSlot(value, slot, *slot);
|
||||
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
RecordSlot(weak_cell, slot, *slot);
|
||||
clear_value = false;
|
||||
} else {
|
||||
weak_cell->clear();
|
||||
}
|
||||
}
|
||||
if (value->IsMap()) {
|
||||
} else if (value->IsMap()) {
|
||||
// The map is non-live.
|
||||
Map* map = Map::cast(value);
|
||||
// Add dependent code to the dependent_code_list.
|
||||
@ -3132,43 +3110,22 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
|
||||
candidate->set_next_link(dependent_code_head);
|
||||
dependent_code_head = candidate;
|
||||
}
|
||||
// Add the weak cell to the non_live_map list.
|
||||
weak_cell->set_next(non_live_map_head);
|
||||
non_live_map_head = weak_cell;
|
||||
clear_value = false;
|
||||
clear_next = false;
|
||||
ClearSimpleMapTransition(weak_cell, map);
|
||||
weak_cell->clear();
|
||||
} else {
|
||||
// All other objects.
|
||||
weak_cell->clear();
|
||||
}
|
||||
} else {
|
||||
// The value of the weak cell is alive.
|
||||
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
RecordSlot(weak_cell, slot, *slot);
|
||||
clear_value = false;
|
||||
}
|
||||
if (clear_value) {
|
||||
weak_cell->clear();
|
||||
}
|
||||
if (clear_next) {
|
||||
weak_cell->clear_next(the_hole_value);
|
||||
}
|
||||
weak_cell_obj = next_weak_cell;
|
||||
}
|
||||
heap->set_encountered_weak_cells(Smi::kZero);
|
||||
*non_live_map_list = non_live_map_head;
|
||||
*dependent_code_list = dependent_code_head;
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::AbortWeakCells() {
|
||||
Object* the_hole_value = heap()->the_hole_value();
|
||||
Object* weak_cell_obj = heap()->encountered_weak_cells();
|
||||
while (weak_cell_obj != Smi::kZero) {
|
||||
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
|
||||
weak_cell_obj = weak_cell->next();
|
||||
weak_cell->clear_next(the_hole_value);
|
||||
}
|
||||
heap()->set_encountered_weak_cells(Smi::kZero);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::AbortWeakCells() { weak_cells_.Clear(); }
|
||||
|
||||
void MarkCompactCollector::AbortTransitionArrays() {
|
||||
HeapObject* undefined = heap()->undefined_value();
|
||||
|
@ -376,6 +376,7 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
int NumberOfParallelMarkingTasks(int pages);
|
||||
|
||||
MarkingWorklist* worklist_;
|
||||
|
||||
YoungGenerationMarkingVisitor* main_marking_visitor_;
|
||||
base::Semaphore page_parallel_job_semaphore_;
|
||||
std::vector<Page*> new_space_evacuation_pages_;
|
||||
@ -388,12 +389,12 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// Collector for young and old generation.
|
||||
class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
public:
|
||||
static const int kMainThread = 0;
|
||||
// Wrapper for the shared and bailout worklists.
|
||||
class MarkingWorklist {
|
||||
public:
|
||||
using ConcurrentMarkingWorklist = Worklist<HeapObject*, 64>;
|
||||
|
||||
static const int kMainThread = 0;
|
||||
// The heap parameter is not used but needed to match the sequential case.
|
||||
explicit MarkingWorklist(Heap* heap) {}
|
||||
|
||||
@ -487,6 +488,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
ConcurrentMarkingWorklist bailout_;
|
||||
};
|
||||
|
||||
using WeakCellWorklist = Worklist<WeakCell*, 64 /* segment size */>;
|
||||
|
||||
class RootMarkingVisitor;
|
||||
|
||||
class Sweeper {
|
||||
@ -639,6 +642,10 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
MarkingWorklist* marking_worklist() { return &marking_worklist_; }
|
||||
|
||||
void AddWeakCell(WeakCell* weak_cell) {
|
||||
weak_cells_.Push(kMainThread, weak_cell);
|
||||
}
|
||||
|
||||
Sweeper& sweeper() { return sweeper_; }
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -735,10 +742,12 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// and deoptimize dependent code of non-live maps.
|
||||
void ClearNonLiveReferences() override;
|
||||
void MarkDependentCodeForDeoptimization(DependentCode* list);
|
||||
// Find non-live targets of simple transitions in the given list. Clear
|
||||
// transitions to non-live targets and if needed trim descriptors arrays.
|
||||
void ClearSimpleMapTransitions(Object* non_live_map_list);
|
||||
void ClearSimpleMapTransition(Map* map, Map* dead_transition);
|
||||
// Checks if the given weak cell is a simple transition from the parent map
|
||||
// of the given dead target. If so it clears the transition and trims
|
||||
// the descriptor array of the parent if needed.
|
||||
void ClearSimpleMapTransition(WeakCell* potential_transition,
|
||||
Map* dead_target);
|
||||
void ClearSimpleMapTransition(Map* map, Map* dead_target);
|
||||
// Compact every array in the global list of transition arrays and
|
||||
// trim the corresponding descriptor array if a transition target is non-live.
|
||||
void ClearFullMapTransitions();
|
||||
@ -761,8 +770,12 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// collections when incremental marking is aborted.
|
||||
void AbortWeakCollections();
|
||||
|
||||
void ClearWeakCells(Object** non_live_map_list,
|
||||
DependentCode** dependent_code_list);
|
||||
// Goes through the list of encountered weak cells and clears those with
|
||||
// dead values. If the value is a dead map and the parent map transitions to
|
||||
// the dead map via weak cell, then this function also clears the map
|
||||
// transition.
|
||||
void ClearWeakCellsAndSimpleMapTransitions(
|
||||
DependentCode** dependent_code_list);
|
||||
void AbortWeakCells();
|
||||
|
||||
void AbortTransitionArrays();
|
||||
@ -816,6 +829,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
bool have_code_to_deoptimize_;
|
||||
|
||||
MarkingWorklist marking_worklist_;
|
||||
WeakCellWorklist weak_cells_;
|
||||
|
||||
// Candidates for pages that should be evacuated.
|
||||
std::vector<Page*> evacuation_candidates_;
|
||||
|
@ -219,7 +219,7 @@ int MarkingVisitor<ConcreteVisitor>::VisitWeakCell(Map* map,
|
||||
// Enqueue weak cell in linked list of encountered weak collections.
|
||||
// We can ignore weak cells with cleared values because they will always
|
||||
// contain smi zero.
|
||||
if (weak_cell->next_cleared() && !weak_cell->cleared()) {
|
||||
if (!weak_cell->cleared()) {
|
||||
HeapObject* value = HeapObject::cast(weak_cell->value());
|
||||
if (ObjectMarking::IsBlackOrGrey<IncrementalMarking::kAtomicity>(
|
||||
value, collector_->marking_state(value))) {
|
||||
@ -231,9 +231,7 @@ int MarkingVisitor<ConcreteVisitor>::VisitWeakCell(Map* map,
|
||||
// If we do not know about liveness of values of weak cells, we have to
|
||||
// process them when we know the liveness of the whole transitive
|
||||
// closure.
|
||||
weak_cell->set_next(heap_->encountered_weak_cells(),
|
||||
UPDATE_WEAK_WRITE_BARRIER);
|
||||
heap_->set_encountered_weak_cells(weak_cell);
|
||||
collector_->AddWeakCell(weak_cell);
|
||||
}
|
||||
}
|
||||
return WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
|
||||
|
@ -851,7 +851,6 @@ void PropertyCell::PropertyCellVerify() {
|
||||
void WeakCell::WeakCellVerify() {
|
||||
CHECK(IsWeakCell());
|
||||
VerifyObjectField(kValueOffset);
|
||||
VerifyObjectField(kNextOffset);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1445,24 +1445,6 @@ void WeakCell::initialize(HeapObject* val) {
|
||||
|
||||
bool WeakCell::cleared() const { return value() == Smi::kZero; }
|
||||
|
||||
Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
|
||||
|
||||
|
||||
void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
|
||||
WRITE_FIELD(this, kNextOffset, val);
|
||||
if (mode == UPDATE_WRITE_BARRIER) {
|
||||
WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void WeakCell::clear_next(Object* the_hole_value) {
|
||||
DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
|
||||
set_next(the_hole_value, SKIP_WRITE_BARRIER);
|
||||
}
|
||||
|
||||
bool WeakCell::next_cleared() { return next()->IsTheHole(GetIsolate()); }
|
||||
|
||||
int JSObject::GetHeaderSize() {
|
||||
// Check for the most common kind of JavaScript object before
|
||||
// falling into the generic switch. This speeds up the internal
|
||||
|
@ -6209,12 +6209,6 @@ class WeakCell : public HeapObject {
|
||||
|
||||
inline bool cleared() const;
|
||||
|
||||
DECL_ACCESSORS(next, Object)
|
||||
|
||||
inline void clear_next(Object* the_hole_value);
|
||||
|
||||
inline bool next_cleared();
|
||||
|
||||
DECL_CAST(WeakCell)
|
||||
|
||||
DECL_PRINTER(WeakCell)
|
||||
@ -6222,8 +6216,7 @@ class WeakCell : public HeapObject {
|
||||
|
||||
// Layout description.
|
||||
static const int kValueOffset = HeapObject::kHeaderSize;
|
||||
static const int kNextOffset = kValueOffset + kPointerSize;
|
||||
static const int kSize = kNextOffset + kPointerSize;
|
||||
static const int kSize = kValueOffset + kPointerSize;
|
||||
|
||||
typedef FixedBodyDescriptor<kValueOffset, kSize, kSize> BodyDescriptor;
|
||||
|
||||
|
@ -1855,8 +1855,6 @@ bool V8HeapExplorer::IsEssentialHiddenReference(Object* parent,
|
||||
if (parent->IsContext() &&
|
||||
field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
|
||||
return false;
|
||||
if (parent->IsWeakCell() && field_offset == WeakCell::kNextOffset)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -496,11 +496,7 @@ void Serializer::ObjectSerializer::SerializeExternalStringAsSequentialString() {
|
||||
class UnlinkWeakNextScope {
|
||||
public:
|
||||
explicit UnlinkWeakNextScope(HeapObject* object) : object_(nullptr) {
|
||||
if (object->IsWeakCell()) {
|
||||
object_ = object;
|
||||
next_ = WeakCell::cast(object)->next();
|
||||
WeakCell::cast(object)->clear_next(object->GetHeap()->the_hole_value());
|
||||
} else if (object->IsAllocationSite()) {
|
||||
if (object->IsAllocationSite()) {
|
||||
object_ = object;
|
||||
next_ = AllocationSite::cast(object)->weak_next();
|
||||
AllocationSite::cast(object)->set_weak_next(
|
||||
@ -510,12 +506,8 @@ class UnlinkWeakNextScope {
|
||||
|
||||
~UnlinkWeakNextScope() {
|
||||
if (object_ != nullptr) {
|
||||
if (object_->IsWeakCell()) {
|
||||
WeakCell::cast(object_)->set_next(next_, UPDATE_WEAK_WRITE_BARRIER);
|
||||
} else {
|
||||
AllocationSite::cast(object_)->set_weak_next(next_,
|
||||
UPDATE_WEAK_WRITE_BARRIER);
|
||||
}
|
||||
AllocationSite::cast(object_)->set_weak_next(next_,
|
||||
UPDATE_WEAK_WRITE_BARRIER);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -304,16 +304,16 @@ KNOWN_OBJECTS = {
|
||||
("OLD_SPACE", 0x02839): "EmptySlowElementDictionary",
|
||||
("OLD_SPACE", 0x02881): "EmptyPropertyCell",
|
||||
("OLD_SPACE", 0x028a9): "EmptyWeakCell",
|
||||
("OLD_SPACE", 0x028c1): "ArrayProtector",
|
||||
("OLD_SPACE", 0x028e9): "IsConcatSpreadableProtector",
|
||||
("OLD_SPACE", 0x028f9): "SpeciesProtector",
|
||||
("OLD_SPACE", 0x02921): "StringLengthProtector",
|
||||
("OLD_SPACE", 0x02949): "FastArrayIterationProtector",
|
||||
("OLD_SPACE", 0x02959): "ArrayIteratorProtector",
|
||||
("OLD_SPACE", 0x02981): "ArrayBufferNeuteringProtector",
|
||||
("OLD_SPACE", 0x029a9): "InfinityValue",
|
||||
("OLD_SPACE", 0x029b9): "MinusZeroValue",
|
||||
("OLD_SPACE", 0x029c9): "MinusInfinityValue",
|
||||
("OLD_SPACE", 0x028b9): "ArrayProtector",
|
||||
("OLD_SPACE", 0x028e1): "IsConcatSpreadableProtector",
|
||||
("OLD_SPACE", 0x028f1): "SpeciesProtector",
|
||||
("OLD_SPACE", 0x02919): "StringLengthProtector",
|
||||
("OLD_SPACE", 0x02941): "FastArrayIterationProtector",
|
||||
("OLD_SPACE", 0x02951): "ArrayIteratorProtector",
|
||||
("OLD_SPACE", 0x02979): "ArrayBufferNeuteringProtector",
|
||||
("OLD_SPACE", 0x029a1): "InfinityValue",
|
||||
("OLD_SPACE", 0x029b1): "MinusZeroValue",
|
||||
("OLD_SPACE", 0x029c1): "MinusInfinityValue",
|
||||
}
|
||||
|
||||
# List of known V8 Frame Markers.
|
||||
|
Loading…
Reference in New Issue
Block a user