[heap] MC: Switch to instance-based visitor
Bug: Change-Id: Ie365e73656f9807043e801b4fb74d75c64259838 Reviewed-on: https://chromium-review.googlesource.com/552552 Commit-Queue: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#46325}
This commit is contained in:
parent
8b97f512ac
commit
3dffe2e3ad
@ -1037,93 +1037,94 @@ void MarkCompactCollector::Finish() {
|
||||
// and continue with marking. This process repeats until all reachable
|
||||
// objects have been marked.
|
||||
|
||||
class MarkCompactMarkingVisitor
|
||||
: public StaticMarkingVisitor<MarkCompactMarkingVisitor> {
|
||||
class MarkCompactMarkingVisitor final
|
||||
: public MarkingVisitor<MarkCompactMarkingVisitor> {
|
||||
public:
|
||||
static void Initialize();
|
||||
explicit MarkCompactMarkingVisitor(MarkCompactCollector* collector)
|
||||
: MarkingVisitor<MarkCompactMarkingVisitor>(collector->heap(),
|
||||
collector) {}
|
||||
|
||||
INLINE(static void VisitPointer(Heap* heap, HeapObject* object, Object** p)) {
|
||||
MarkObjectByPointer(heap->mark_compact_collector(), object, p);
|
||||
inline void VisitPointer(HeapObject* host, Object** p) final {
|
||||
MarkObjectByPointer(host, p);
|
||||
}
|
||||
|
||||
INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
|
||||
Object** start, Object** end)) {
|
||||
inline void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) final {
|
||||
// Mark all objects pointed to in [start, end).
|
||||
const int kMinRangeForMarkingRecursion = 64;
|
||||
if (end - start >= kMinRangeForMarkingRecursion) {
|
||||
if (VisitUnmarkedObjects(heap, object, start, end)) return;
|
||||
if (VisitUnmarkedObjects(host, start, end)) return;
|
||||
// We are close to a stack overflow, so just mark the objects.
|
||||
}
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
for (Object** p = start; p < end; p++) {
|
||||
MarkObjectByPointer(collector, object, p);
|
||||
MarkObjectByPointer(host, p);
|
||||
}
|
||||
}
|
||||
|
||||
// Marks the object black and pushes it on the marking stack.
|
||||
INLINE(static void MarkObject(Heap* heap, HeapObject* object)) {
|
||||
heap->mark_compact_collector()->MarkObject(object);
|
||||
V8_INLINE void MarkObject(HeapObject* object) {
|
||||
collector_->MarkObject(object);
|
||||
}
|
||||
|
||||
// Marks the object black without pushing it on the marking stack.
|
||||
// Returns true if object needed marking and false otherwise.
|
||||
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
|
||||
// Marks the object black without pushing it on the marking stack. Returns
|
||||
// true if object needed marking and false otherwise.
|
||||
V8_INLINE bool MarkObjectWithoutPush(HeapObject* object) {
|
||||
return ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
|
||||
}
|
||||
|
||||
// Mark object pointed to by p.
|
||||
INLINE(static void MarkObjectByPointer(MarkCompactCollector* collector,
|
||||
HeapObject* object, Object** p)) {
|
||||
V8_INLINE void MarkObjectByPointer(HeapObject* host, Object** p) {
|
||||
if (!(*p)->IsHeapObject()) return;
|
||||
HeapObject* target_object = HeapObject::cast(*p);
|
||||
collector->RecordSlot(object, p, target_object);
|
||||
collector->MarkObject(target_object);
|
||||
collector_->RecordSlot(host, p, target_object);
|
||||
collector_->MarkObject(target_object);
|
||||
}
|
||||
|
||||
|
||||
// Visit an unmarked object.
|
||||
INLINE(static void VisitUnmarkedObject(MarkCompactCollector* collector,
|
||||
HeapObject* obj)) {
|
||||
#ifdef DEBUG
|
||||
DCHECK(collector->heap()->Contains(obj));
|
||||
#endif
|
||||
if (ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj))) {
|
||||
Map* map = obj->map();
|
||||
Heap* heap = obj->GetHeap();
|
||||
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
|
||||
// Mark the map pointer and the body.
|
||||
heap->mark_compact_collector()->MarkObject(map);
|
||||
IterateBody(map, obj);
|
||||
V8_INLINE int VisitJSRegExp(Map* map, JSRegExp* re) {
|
||||
if (!FLAG_flush_regexp_code) {
|
||||
return VisitJSObject(map, re);
|
||||
}
|
||||
// Flush code or set age on both one byte and two byte code.
|
||||
UpdateRegExpCodeAgeAndFlush(re, true);
|
||||
UpdateRegExpCodeAgeAndFlush(re, false);
|
||||
// Visit the fields of the RegExp, including the updated FixedArray.
|
||||
return VisitJSObject(map, re);
|
||||
}
|
||||
|
||||
protected:
|
||||
static const int kRegExpCodeThreshold = 5;
|
||||
|
||||
// Visit all unmarked objects pointed to by [start, end).
|
||||
// Returns false if the operation fails (lack of stack space).
|
||||
INLINE(static bool VisitUnmarkedObjects(Heap* heap, HeapObject* object,
|
||||
Object** start, Object** end)) {
|
||||
V8_INLINE bool VisitUnmarkedObjects(HeapObject* host, Object** start,
|
||||
Object** end) {
|
||||
// Return false is we are close to the stack limit.
|
||||
StackLimitCheck check(heap->isolate());
|
||||
StackLimitCheck check(heap_->isolate());
|
||||
if (check.HasOverflowed()) return false;
|
||||
|
||||
MarkCompactCollector* collector = heap->mark_compact_collector();
|
||||
// Visit the unmarked objects.
|
||||
for (Object** p = start; p < end; p++) {
|
||||
Object* o = *p;
|
||||
if (!o->IsHeapObject()) continue;
|
||||
collector->RecordSlot(object, p, o);
|
||||
collector_->RecordSlot(host, p, o);
|
||||
HeapObject* obj = HeapObject::cast(o);
|
||||
VisitUnmarkedObject(collector, obj);
|
||||
VisitUnmarkedObject(obj);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
// Code flushing support.
|
||||
// Visit an unmarked object.
|
||||
V8_INLINE void VisitUnmarkedObject(HeapObject* obj) {
|
||||
DCHECK(heap_->Contains(obj));
|
||||
if (ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj))) {
|
||||
Map* map = obj->map();
|
||||
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
|
||||
// Mark the map pointer and the body.
|
||||
collector_->MarkObject(map);
|
||||
Visit(map, obj);
|
||||
}
|
||||
}
|
||||
|
||||
static const int kRegExpCodeThreshold = 5;
|
||||
|
||||
static void UpdateRegExpCodeAgeAndFlush(Heap* heap, JSRegExp* re,
|
||||
bool is_one_byte) {
|
||||
V8_INLINE void UpdateRegExpCodeAgeAndFlush(JSRegExp* re, bool is_one_byte) {
|
||||
// Make sure that the fixed array is in fact initialized on the RegExp.
|
||||
// We could potentially trigger a GC when initializing the RegExp.
|
||||
if (HeapObject::cast(re->data())->map()->instance_type() !=
|
||||
@ -1147,12 +1148,12 @@ class MarkCompactMarkingVisitor
|
||||
if (ObjectMarking::IsBlackOrGrey(data, MarkingState::Internal(data))) {
|
||||
Object** slot =
|
||||
data->data_start() + JSRegExp::saved_code_index(is_one_byte);
|
||||
heap->mark_compact_collector()->RecordSlot(data, slot, code);
|
||||
collector_->RecordSlot(data, slot, code);
|
||||
}
|
||||
|
||||
// Set a number in the 0-255 range to guarantee no smi overflow.
|
||||
re->SetDataAt(JSRegExp::code_index(is_one_byte),
|
||||
Smi::FromInt(heap->ms_count() & 0xff));
|
||||
Smi::FromInt(heap_->ms_count() & 0xff));
|
||||
} else if (code->IsSmi()) {
|
||||
int value = Smi::cast(code)->value();
|
||||
// The regexp has not been compiled yet or there was a compilation error.
|
||||
@ -1162,7 +1163,7 @@ class MarkCompactMarkingVisitor
|
||||
}
|
||||
|
||||
// Check if we should flush now.
|
||||
if (value == ((heap->ms_count() - kRegExpCodeThreshold) & 0xff)) {
|
||||
if (value == ((heap_->ms_count() - kRegExpCodeThreshold) & 0xff)) {
|
||||
re->SetDataAt(JSRegExp::code_index(is_one_byte),
|
||||
Smi::FromInt(JSRegExp::kUninitializedValue));
|
||||
re->SetDataAt(JSRegExp::saved_code_index(is_one_byte),
|
||||
@ -1170,35 +1171,8 @@ class MarkCompactMarkingVisitor
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Works by setting the current sweep_generation (as a smi) in the
|
||||
// code object place in the data array of the RegExp and keeps a copy
|
||||
// around that can be reinstated if we reuse the RegExp before flushing.
|
||||
// If we did not use the code for kRegExpCodeThreshold mark sweep GCs
|
||||
// we flush the code.
|
||||
static void VisitRegExpAndFlushCode(Map* map, HeapObject* object) {
|
||||
Heap* heap = map->GetHeap();
|
||||
if (!FLAG_flush_regexp_code) {
|
||||
JSObjectVisitor::Visit(map, object);
|
||||
return;
|
||||
}
|
||||
JSRegExp* re = reinterpret_cast<JSRegExp*>(object);
|
||||
// Flush code or set age on both one byte and two byte code.
|
||||
UpdateRegExpCodeAgeAndFlush(heap, re, true);
|
||||
UpdateRegExpCodeAgeAndFlush(heap, re, false);
|
||||
// Visit the fields of the RegExp, including the updated FixedArray.
|
||||
JSObjectVisitor::Visit(map, object);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
void MarkCompactMarkingVisitor::Initialize() {
|
||||
StaticMarkingVisitor<MarkCompactMarkingVisitor>::Initialize();
|
||||
|
||||
table_.Register(kVisitJSRegExp, &VisitRegExpAndFlushCode);
|
||||
}
|
||||
|
||||
void MinorMarkCompactCollector::CleanupSweepToIteratePages() {
|
||||
for (Page* p : sweep_to_iterate_pages_) {
|
||||
if (p->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
|
||||
@ -1216,7 +1190,7 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
|
||||
public RootVisitor {
|
||||
public:
|
||||
explicit RootMarkingVisitor(Heap* heap)
|
||||
: collector_(heap->mark_compact_collector()) {}
|
||||
: collector_(heap->mark_compact_collector()), visitor_(collector_) {}
|
||||
|
||||
void VisitPointer(HeapObject* host, Object** p) override {
|
||||
MarkObjectByPointer(p);
|
||||
@ -1249,7 +1223,7 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
|
||||
Map* map = object->map();
|
||||
// Mark the map pointer and body, and push them on the marking stack.
|
||||
collector_->MarkObject(map);
|
||||
MarkCompactMarkingVisitor::IterateBody(map, object);
|
||||
visitor_.Visit(map, object);
|
||||
// Mark all the objects reachable from the map and body. May leave
|
||||
// overflowed objects in the heap.
|
||||
collector_->EmptyMarkingWorklist();
|
||||
@ -1257,6 +1231,7 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
|
||||
}
|
||||
|
||||
MarkCompactCollector* collector_;
|
||||
MarkCompactMarkingVisitor visitor_;
|
||||
};
|
||||
|
||||
class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
@ -2038,6 +2013,7 @@ void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
||||
// marking stack have been marked, or are overflowed in the heap.
|
||||
void MarkCompactCollector::EmptyMarkingWorklist() {
|
||||
HeapObject* object;
|
||||
MarkCompactMarkingVisitor visitor(this);
|
||||
while ((object = marking_worklist()->Pop()) != nullptr) {
|
||||
DCHECK(!object->IsFiller());
|
||||
DCHECK(object->IsHeapObject());
|
||||
@ -2047,7 +2023,7 @@ void MarkCompactCollector::EmptyMarkingWorklist() {
|
||||
|
||||
Map* map = object->map();
|
||||
MarkObject(map);
|
||||
MarkCompactMarkingVisitor::IterateBody(map, object);
|
||||
visitor.Visit(map, object);
|
||||
}
|
||||
DCHECK(marking_worklist()->IsEmpty());
|
||||
}
|
||||
@ -3180,6 +3156,7 @@ void MarkCompactCollector::TrimEnumCache(Map* map,
|
||||
|
||||
|
||||
void MarkCompactCollector::ProcessWeakCollections() {
|
||||
MarkCompactMarkingVisitor visitor(this);
|
||||
Object* weak_collection_obj = heap()->encountered_weak_collections();
|
||||
while (weak_collection_obj != Smi::kZero) {
|
||||
JSWeakCollection* weak_collection =
|
||||
@ -3197,8 +3174,7 @@ void MarkCompactCollector::ProcessWeakCollections() {
|
||||
RecordSlot(table, key_slot, *key_slot);
|
||||
Object** value_slot =
|
||||
table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
|
||||
MarkCompactMarkingVisitor::MarkObjectByPointer(this, table,
|
||||
value_slot);
|
||||
visitor.MarkObjectByPointer(table, value_slot);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4835,7 +4811,6 @@ void MarkCompactCollector::StartSweepSpaces() {
|
||||
}
|
||||
|
||||
void MarkCompactCollector::Initialize() {
|
||||
MarkCompactMarkingVisitor::Initialize();
|
||||
IncrementalMarking::Initialize();
|
||||
}
|
||||
|
||||
|
@ -765,6 +765,275 @@ int NewSpaceVisitor<ConcreteVisitor>::VisitJSApiObject(Map* map,
|
||||
return visitor->VisitJSObject(map, object);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitJSFunction(Map* map,
|
||||
JSFunction* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
|
||||
JSFunction::BodyDescriptorWeak::IterateBody(object, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitTransitionArray(
|
||||
Map* map, TransitionArray* array) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
// Visit strong references.
|
||||
if (array->HasPrototypeTransitions()) {
|
||||
visitor->VisitPointer(array, array->GetPrototypeTransitionsSlot());
|
||||
}
|
||||
int num_transitions = TransitionArray::NumberOfTransitions(array);
|
||||
for (int i = 0; i < num_transitions; ++i) {
|
||||
visitor->VisitPointer(array, array->GetKeySlot(i));
|
||||
}
|
||||
// Enqueue the array in linked list of encountered transition arrays if it is
|
||||
// not already in the list.
|
||||
if (array->next_link()->IsUndefined(heap_->isolate())) {
|
||||
array->set_next_link(heap_->encountered_transition_arrays(),
|
||||
UPDATE_WEAK_WRITE_BARRIER);
|
||||
heap_->set_encountered_transition_arrays(array);
|
||||
}
|
||||
return TransitionArray::BodyDescriptor::SizeOf(map, array);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitWeakCell(Map* map,
|
||||
WeakCell* weak_cell) {
|
||||
// Enqueue weak cell in linked list of encountered weak collections.
|
||||
// We can ignore weak cells with cleared values because they will always
|
||||
// contain smi zero.
|
||||
if (weak_cell->next_cleared() && !weak_cell->cleared()) {
|
||||
HeapObject* value = HeapObject::cast(weak_cell->value());
|
||||
if (ObjectMarking::IsBlackOrGrey<IncrementalMarking::kAtomicity>(
|
||||
value, collector_->marking_state(value))) {
|
||||
// Weak cells with live values are directly processed here to reduce
|
||||
// the processing time of weak cells during the main GC pause.
|
||||
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
collector_->RecordSlot(weak_cell, slot, *slot);
|
||||
} else {
|
||||
// If we do not know about liveness of values of weak cells, we have to
|
||||
// process them when we know the liveness of the whole transitive
|
||||
// closure.
|
||||
weak_cell->set_next(heap_->encountered_weak_cells(),
|
||||
UPDATE_WEAK_WRITE_BARRIER);
|
||||
heap_->set_encountered_weak_cells(weak_cell);
|
||||
}
|
||||
}
|
||||
return WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitNativeContext(Map* map,
|
||||
Context* context) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
int size = Context::BodyDescriptorWeak::SizeOf(map, context);
|
||||
Context::BodyDescriptorWeak::IterateBody(context, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitJSWeakCollection(
|
||||
Map* map, JSWeakCollection* weak_collection) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
|
||||
// Enqueue weak collection in linked list of encountered weak collections.
|
||||
if (weak_collection->next() == heap_->undefined_value()) {
|
||||
weak_collection->set_next(heap_->encountered_weak_collections());
|
||||
heap_->set_encountered_weak_collections(weak_collection);
|
||||
}
|
||||
|
||||
// Skip visiting the backing hash table containing the mappings and the
|
||||
// pointer to the other enqueued weak collections, both are post-processed.
|
||||
int size = JSWeakCollection::BodyDescriptorWeak::SizeOf(map, weak_collection);
|
||||
JSWeakCollection::BodyDescriptorWeak::IterateBody(weak_collection, size,
|
||||
visitor);
|
||||
|
||||
// Partially initialized weak collection is enqueued, but table is ignored.
|
||||
if (!weak_collection->table()->IsHashTable()) return size;
|
||||
|
||||
// Mark the backing hash table without pushing it on the marking stack.
|
||||
Object** slot =
|
||||
HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
|
||||
HeapObject* obj = HeapObject::cast(*slot);
|
||||
collector_->RecordSlot(weak_collection, slot, obj);
|
||||
visitor->MarkObjectWithoutPush(obj);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitSharedFunctionInfo(
|
||||
Map* map, SharedFunctionInfo* sfi) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (sfi->ic_age() != heap_->global_ic_age()) {
|
||||
sfi->ResetForNewContext(heap_->global_ic_age());
|
||||
}
|
||||
int size = SharedFunctionInfo::BodyDescriptor::SizeOf(map, sfi);
|
||||
SharedFunctionInfo::BodyDescriptor::IterateBody(sfi, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitBytecodeArray(Map* map,
|
||||
BytecodeArray* array) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
|
||||
BytecodeArray::BodyDescriptor::IterateBody(array, size, visitor);
|
||||
array->MakeOlder();
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitCode(Map* map, Code* code) {
|
||||
if (FLAG_age_code && !heap_->isolate()->serializer_enabled()) {
|
||||
code->MakeOlder();
|
||||
}
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
int size = Code::BodyDescriptor::SizeOf(map, code);
|
||||
Code::BodyDescriptor::IterateBody(code, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::MarkMapContents(Map* map) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
// Since descriptor arrays are potentially shared, ensure that only the
|
||||
// descriptors that belong to this map are marked. The first time a non-empty
|
||||
// descriptor array is marked, its header is also visited. The slot holding
|
||||
// the descriptor array will be implicitly recorded when the pointer fields of
|
||||
// this map are visited. Prototype maps don't keep track of transitions, so
|
||||
// just mark the entire descriptor array.
|
||||
if (!map->is_prototype_map()) {
|
||||
DescriptorArray* descriptors = map->instance_descriptors();
|
||||
if (visitor->MarkObjectWithoutPush(descriptors) &&
|
||||
descriptors->length() > 0) {
|
||||
visitor->VisitPointers(descriptors, descriptors->GetFirstElementAddress(),
|
||||
descriptors->GetDescriptorEndSlot(0));
|
||||
}
|
||||
int start = 0;
|
||||
int end = map->NumberOfOwnDescriptors();
|
||||
if (start < end) {
|
||||
visitor->VisitPointers(descriptors,
|
||||
descriptors->GetDescriptorStartSlot(start),
|
||||
descriptors->GetDescriptorEndSlot(end));
|
||||
}
|
||||
}
|
||||
|
||||
// Mark the pointer fields of the Map. Since the transitions array has
|
||||
// been marked already, it is fine that one of these fields contains a
|
||||
// pointer to it.
|
||||
visitor->VisitPointers(
|
||||
map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
|
||||
HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitMap(Map* map, Map* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
|
||||
// Clears the cache of ICs related to this map.
|
||||
if (FLAG_cleanup_code_caches_at_gc) {
|
||||
object->ClearCodeCache(heap_);
|
||||
}
|
||||
|
||||
// When map collection is enabled we have to mark through map's transitions
|
||||
// and back pointers in a special way to make these links weak.
|
||||
if (object->CanTransition()) {
|
||||
MarkMapContents(object);
|
||||
} else {
|
||||
visitor->VisitPointers(
|
||||
object, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
|
||||
HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
|
||||
}
|
||||
return Map::BodyDescriptor::SizeOf(map, object);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitJSApiObject(Map* map,
|
||||
JSObject* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (heap_->local_embedder_heap_tracer()->InUse()) {
|
||||
DCHECK(object->IsJSObject());
|
||||
heap_->TracePossibleWrapper(object);
|
||||
}
|
||||
int size = JSObject::BodyDescriptor::SizeOf(map, object);
|
||||
JSObject::BodyDescriptor::IterateBody(object, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
int MarkingVisitor<ConcreteVisitor>::VisitAllocationSite(
|
||||
Map* map, AllocationSite* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
|
||||
AllocationSite::BodyDescriptorWeak::IterateBody(object, size, visitor);
|
||||
return size;
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::VisitCodeEntry(JSFunction* host,
|
||||
Address entry_address) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
|
||||
collector_->RecordCodeEntrySlot(host, entry_address, code);
|
||||
visitor->MarkObject(code);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::VisitEmbeddedPointer(Code* host,
|
||||
RelocInfo* rinfo) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
HeapObject* object = HeapObject::cast(rinfo->target_object());
|
||||
collector_->RecordRelocSlot(host, rinfo, object);
|
||||
if (!host->IsWeakObject(object)) {
|
||||
visitor->MarkObject(object);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::VisitCellPointer(Code* host,
|
||||
RelocInfo* rinfo) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
DCHECK(rinfo->rmode() == RelocInfo::CELL);
|
||||
Cell* cell = rinfo->target_cell();
|
||||
collector_->RecordRelocSlot(host, rinfo, cell);
|
||||
if (!host->IsWeakObject(cell)) {
|
||||
visitor->MarkObject(cell);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::VisitDebugTarget(Code* host,
|
||||
RelocInfo* rinfo) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
|
||||
rinfo->IsPatchedDebugBreakSlotSequence());
|
||||
Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
|
||||
collector_->RecordRelocSlot(host, rinfo, target);
|
||||
visitor->MarkObject(target);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::VisitCodeTarget(Code* host,
|
||||
RelocInfo* rinfo) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
||||
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
collector_->RecordRelocSlot(host, rinfo, target);
|
||||
visitor->MarkObject(target);
|
||||
}
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
void MarkingVisitor<ConcreteVisitor>::VisitCodeAgeSequence(Code* host,
|
||||
RelocInfo* rinfo) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
|
||||
Code* target = rinfo->code_age_stub();
|
||||
DCHECK_NOT_NULL(target);
|
||||
collector_->RecordRelocSlot(host, rinfo, target);
|
||||
visitor->MarkObject(target);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -11,7 +11,6 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
|
||||
// We don't record weak slots during marking or scavenges. Instead we do it
|
||||
// once when we complete mark-compact cycle. Note that write barrier has no
|
||||
// effect if we are already in the middle of compacting mark-sweep cycle and we
|
||||
|
@ -335,6 +335,43 @@ class NewSpaceVisitor : public HeapVisitor<int, ConcreteVisitor> {
|
||||
}
|
||||
};
|
||||
|
||||
template <typename ConcreteVisitor>
|
||||
class MarkingVisitor : public HeapVisitor<int, ConcreteVisitor> {
|
||||
public:
|
||||
explicit MarkingVisitor(Heap* heap, MarkCompactCollector* collector)
|
||||
: heap_(heap), collector_(collector) {}
|
||||
|
||||
V8_INLINE bool ShouldVisitMapPointer() { return false; }
|
||||
|
||||
V8_INLINE int VisitJSFunction(Map* map, JSFunction* object);
|
||||
V8_INLINE int VisitWeakCell(Map* map, WeakCell* object);
|
||||
V8_INLINE int VisitTransitionArray(Map* map, TransitionArray* object);
|
||||
V8_INLINE int VisitNativeContext(Map* map, Context* object);
|
||||
V8_INLINE int VisitJSWeakCollection(Map* map, JSWeakCollection* object);
|
||||
V8_INLINE int VisitSharedFunctionInfo(Map* map, SharedFunctionInfo* object);
|
||||
V8_INLINE int VisitBytecodeArray(Map* map, BytecodeArray* object);
|
||||
V8_INLINE int VisitCode(Map* map, Code* object);
|
||||
V8_INLINE int VisitMap(Map* map, Map* object);
|
||||
V8_INLINE int VisitJSApiObject(Map* map, JSObject* object);
|
||||
V8_INLINE int VisitAllocationSite(Map* map, AllocationSite* object);
|
||||
|
||||
// ObjectVisitor implementation.
|
||||
V8_INLINE void VisitCodeEntry(JSFunction* host, Address entry_address) final;
|
||||
V8_INLINE void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) final;
|
||||
V8_INLINE void VisitCellPointer(Code* host, RelocInfo* rinfo) final;
|
||||
V8_INLINE void VisitDebugTarget(Code* host, RelocInfo* rinfo) final;
|
||||
V8_INLINE void VisitCodeTarget(Code* host, RelocInfo* rinfo) final;
|
||||
V8_INLINE void VisitCodeAgeSequence(Code* host, RelocInfo* rinfo) final;
|
||||
// Skip weak next code link.
|
||||
V8_INLINE void VisitNextCodeLink(Code* host, Object** p) final {}
|
||||
|
||||
protected:
|
||||
V8_INLINE void MarkMapContents(Map* map);
|
||||
|
||||
Heap* heap_;
|
||||
MarkCompactCollector* collector_;
|
||||
};
|
||||
|
||||
class WeakObjectRetainer;
|
||||
|
||||
// A weak list is single linked list where each element has a weak pointer to
|
||||
|
Loading…
Reference in New Issue
Block a user