Revert https://codereview.chromium.org/2857713002 and dependencies
Revert "[heap] Make non-atomic markbit operations consistent with atomic ones." This reverts commitdd37366fb5
Revert "[heap] Use atomic marking operations in incremental marking if" This reverts commit1f2c3596e9
Revert "[heap] Prepare IncrementalMarking::VisitObject for concurrent marking." This reverts commit00d1e2cf76
Revert "[heap] Use shared markbits in the concurrent marker." This reverts commitb0db0541ee
https://codereview.chromium.org/2857713002 blocks the current roll: https://codereview.chromium.org/2857423002/ Doesn't revert cleanly. NOTRY=true TBR=ulan@chromium.org Bug: chromium:694255 Change-Id: Iada35af5c2529cd9e604802700604b16cc30aa2d Reviewed-on: https://chromium-review.googlesource.com/497387 Reviewed-by: Michael Achenbach <machenbach@chromium.org> Commit-Queue: Michael Achenbach <machenbach@chromium.org> Cr-Commit-Position: refs/heads/master@{#45118}
This commit is contained in:
parent
d21326d5c4
commit
0c590f45a8
@ -666,7 +666,7 @@ DEFINE_BOOL(age_code, true,
|
||||
DEFINE_BOOL(incremental_marking, true, "use incremental marking")
|
||||
DEFINE_BOOL(incremental_marking_wrappers, true,
|
||||
"use incremental marking for marking wrappers")
|
||||
DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING, "use concurrent marking")
|
||||
DEFINE_BOOL(concurrent_marking, false, "use concurrent marking")
|
||||
DEFINE_BOOL(trace_concurrent_marking, false, "trace concurrent marking")
|
||||
DEFINE_INT(min_progress_during_incremental_marking_finalization, 32,
|
||||
"keep finalizing incremental marking as long as we discover at "
|
||||
|
@ -30,11 +30,6 @@ class ConcurrentMarkingVisitor final
|
||||
explicit ConcurrentMarkingVisitor(ConcurrentMarkingDeque* deque)
|
||||
: deque_(deque) {}
|
||||
|
||||
bool ShouldVisit(HeapObject* object) override {
|
||||
return ObjectMarking::GreyToBlack<MarkBit::AccessMode::ATOMIC>(
|
||||
object, marking_state(object));
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
for (Object** p = start; p < end; p++) {
|
||||
if (!(*p)->IsHeapObject()) continue;
|
||||
@ -73,7 +68,7 @@ class ConcurrentMarkingVisitor final
|
||||
// ===========================================================================
|
||||
|
||||
int VisitCode(Map* map, Code* object) override {
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): push the object to the bail-out deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
@ -82,65 +77,58 @@ class ConcurrentMarkingVisitor final
|
||||
// ===========================================================================
|
||||
|
||||
int VisitBytecodeArray(Map* map, BytecodeArray* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitJSFunction(Map* map, JSFunction* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitMap(Map* map, Map* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitNativeContext(Map* map, Context* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitSharedFunctionInfo(Map* map, SharedFunctionInfo* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitTransitionArray(Map* map, TransitionArray* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitWeakCell(Map* map, WeakCell* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
int VisitJSWeakCollection(Map* map, JSWeakCollection* object) override {
|
||||
// TODO(ulan): implement iteration of strong fields.
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kBailout);
|
||||
// TODO(ulan): implement iteration of strong fields and push the object to
|
||||
// the bailout deque.
|
||||
return 0;
|
||||
}
|
||||
|
||||
void MarkObject(HeapObject* object) {
|
||||
if (ObjectMarking::WhiteToGrey<MarkBit::AccessMode::ATOMIC>(
|
||||
object, marking_state(object))) {
|
||||
deque_->Push(object, MarkingThread::kConcurrent, TargetDeque::kShared);
|
||||
}
|
||||
void MarkObject(HeapObject* obj) {
|
||||
deque_->Push(obj, MarkingThread::kConcurrent, TargetDeque::kShared);
|
||||
}
|
||||
|
||||
private:
|
||||
MarkingState marking_state(HeapObject* object) const {
|
||||
return MarkingState::Internal(object);
|
||||
}
|
||||
|
||||
ConcurrentMarkingDeque* deque_;
|
||||
};
|
||||
|
||||
@ -187,7 +175,7 @@ void ConcurrentMarking::Run() {
|
||||
TimedScope scope(&time_ms);
|
||||
HeapObject* object;
|
||||
while ((object = deque_->Pop(MarkingThread::kConcurrent)) != nullptr) {
|
||||
bytes_marked += visitor_->Visit(object);
|
||||
bytes_marked += visitor_->IterateBody(object);
|
||||
}
|
||||
}
|
||||
if (FLAG_trace_concurrent_marking) {
|
||||
|
@ -4270,7 +4270,7 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
|
||||
void Heap::NotifyObjectLayoutChange(HeapObject* object,
|
||||
const DisallowHeapAllocation&) {
|
||||
if (FLAG_incremental_marking && incremental_marking()->IsMarking()) {
|
||||
incremental_marking()->WhiteToGreyAndPush(object);
|
||||
incremental_marking()->MarkGrey(object);
|
||||
}
|
||||
#ifdef VERIFY_HEAP
|
||||
DCHECK(pending_layout_change_object_ == nullptr);
|
||||
@ -4834,7 +4834,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
// promoted objects.
|
||||
if (heap_->incremental_marking()->black_allocation()) {
|
||||
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
|
||||
heap_->incremental_marking()->WhiteToGreyAndPush(code);
|
||||
heap_->incremental_marking()->MarkGrey(code);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5628,7 +5628,7 @@ void Heap::RegisterExternallyReferencedObject(Object** object) {
|
||||
HeapObject* heap_object = HeapObject::cast(*object);
|
||||
DCHECK(Contains(heap_object));
|
||||
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
|
||||
incremental_marking()->WhiteToGreyAndPush(heap_object);
|
||||
incremental_marking()->MarkGrey(heap_object);
|
||||
} else {
|
||||
DCHECK(mark_compact_collector()->in_use());
|
||||
mark_compact_collector()->MarkObject(heap_object);
|
||||
|
@ -52,13 +52,14 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
|
||||
|
||||
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
|
||||
HeapObject* value_heap_obj = HeapObject::cast(value);
|
||||
DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(
|
||||
value_heap_obj, marking_state(value_heap_obj)));
|
||||
DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
|
||||
const bool is_black =
|
||||
ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj));
|
||||
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
|
||||
marking_state(value_heap_obj)));
|
||||
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
|
||||
const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj));
|
||||
|
||||
if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
|
||||
if (is_black &&
|
||||
ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) {
|
||||
WhiteToGreyAndPush(value_heap_obj);
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
return is_compacting_ && is_black;
|
||||
@ -129,12 +130,9 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
|
||||
}
|
||||
}
|
||||
|
||||
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
|
||||
if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
|
||||
marking_deque()->Push(obj);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
|
||||
ObjectMarking::WhiteToGrey(obj, marking_state(obj));
|
||||
marking_deque()->Push(obj);
|
||||
}
|
||||
|
||||
void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
|
||||
@ -154,32 +152,12 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
|
||||
MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
|
||||
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
|
||||
|
||||
if (Marking::IsBlack<kAtomicity>(old_mark_bit)) {
|
||||
if (from->address() + kPointerSize == to->address()) {
|
||||
// The old and the new markbits overlap. The |to| object has the
|
||||
// grey color. To make it black, we need to set second bit.
|
||||
DCHECK(new_mark_bit.Get<kAtomicity>());
|
||||
new_mark_bit.Next().Set<kAtomicity>();
|
||||
} else {
|
||||
bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
|
||||
DCHECK(success);
|
||||
USE(success);
|
||||
}
|
||||
} else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) {
|
||||
if (from->address() + kPointerSize == to->address()) {
|
||||
// The old and the new markbits overlap. The |to| object has the
|
||||
// white color. To make it black, we need to set both bits.
|
||||
// Note that Marking::WhiteToGrey does not work here because
|
||||
// old_mark_bit.Next() can be set by the concurrent marker at any time.
|
||||
new_mark_bit.Set<kAtomicity>();
|
||||
new_mark_bit.Next().Set<kAtomicity>();
|
||||
} else {
|
||||
bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
|
||||
DCHECK(success);
|
||||
USE(success);
|
||||
marking_deque()->Push(to);
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
if (Marking::IsBlack(old_mark_bit)) {
|
||||
Marking::MarkBlack(new_mark_bit);
|
||||
} else if (Marking::IsGrey(old_mark_bit)) {
|
||||
Marking::WhiteToGrey(new_mark_bit);
|
||||
marking_deque()->Push(to);
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
}
|
||||
|
||||
@ -220,11 +198,11 @@ class IncrementalMarkingMarkingVisitor
|
||||
} while (scan_until_end && start_offset < object_size);
|
||||
chunk->set_progress_bar(start_offset);
|
||||
if (start_offset < object_size) {
|
||||
if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
|
||||
if (ObjectMarking::IsGrey(
|
||||
object, heap->incremental_marking()->marking_state(object))) {
|
||||
heap->incremental_marking()->marking_deque()->Unshift(object);
|
||||
} else {
|
||||
DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
|
||||
DCHECK(ObjectMarking::IsBlack(
|
||||
object, heap->incremental_marking()->marking_state(object)));
|
||||
heap->mark_compact_collector()->UnshiftBlack(object);
|
||||
}
|
||||
@ -249,10 +227,12 @@ class IncrementalMarkingMarkingVisitor
|
||||
// Mark the object grey if it is white, do not enque it into the marking
|
||||
// deque.
|
||||
Heap* heap = map->GetHeap();
|
||||
bool ignored =
|
||||
ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
|
||||
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
|
||||
USE(ignored);
|
||||
if (ObjectMarking::IsWhite(
|
||||
heap_obj,
|
||||
heap->incremental_marking()->marking_state(heap_obj))) {
|
||||
ObjectMarking::WhiteToGrey(
|
||||
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
|
||||
}
|
||||
}
|
||||
}
|
||||
VisitNativeContext(map, context);
|
||||
@ -279,28 +259,33 @@ class IncrementalMarkingMarkingVisitor
|
||||
|
||||
// Marks the object grey and pushes it on the marking stack.
|
||||
INLINE(static void MarkObject(Heap* heap, Object* obj)) {
|
||||
heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
|
||||
heap->incremental_marking()->MarkGrey(HeapObject::cast(obj));
|
||||
}
|
||||
|
||||
// Marks the object black without pushing it on the marking stack.
|
||||
// Returns true if object needed marking and false otherwise.
|
||||
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
|
||||
HeapObject* heap_object = HeapObject::cast(obj);
|
||||
return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
|
||||
heap_object, heap->incremental_marking()->marking_state(heap_object));
|
||||
if (ObjectMarking::IsWhite(
|
||||
heap_object,
|
||||
heap->incremental_marking()->marking_state(heap_object))) {
|
||||
ObjectMarking::WhiteToBlack(
|
||||
heap_object, heap->incremental_marking()->marking_state(heap_object));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
void IncrementalMarking::IterateBlackObject(HeapObject* object) {
|
||||
if (IsMarking() &&
|
||||
ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) {
|
||||
if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) {
|
||||
Page* page = Page::FromAddress(object->address());
|
||||
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
|
||||
// IterateBlackObject requires us to visit the whole object.
|
||||
page->ResetProgressBar();
|
||||
}
|
||||
Map* map = object->map();
|
||||
WhiteToGreyAndPush(map);
|
||||
MarkGrey(map);
|
||||
IncrementalMarkingMarkingVisitor::IterateBody(map, object);
|
||||
}
|
||||
}
|
||||
@ -324,7 +309,7 @@ class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
|
||||
Object* obj = *p;
|
||||
if (!obj->IsHeapObject()) return;
|
||||
|
||||
heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
|
||||
heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj));
|
||||
}
|
||||
|
||||
Heap* heap_;
|
||||
@ -650,7 +635,7 @@ void IncrementalMarking::ProcessWeakCells() {
|
||||
HeapObject* value = HeapObject::cast(weak_cell->value());
|
||||
// Remove weak cells with live objects from the list, they do not need
|
||||
// clearing.
|
||||
if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
|
||||
if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) {
|
||||
// Record slot, if value is pointing to an evacuation candidate.
|
||||
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
|
||||
@ -681,10 +666,9 @@ bool ShouldRetainMap(Map* map, int age) {
|
||||
Object* constructor = map->GetConstructor();
|
||||
Heap* heap = map->GetHeap();
|
||||
if (!constructor->IsHeapObject() ||
|
||||
ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>(
|
||||
HeapObject::cast(constructor),
|
||||
heap->incremental_marking()->marking_state(
|
||||
HeapObject::cast(constructor)))) {
|
||||
ObjectMarking::IsWhite(HeapObject::cast(constructor),
|
||||
heap->incremental_marking()->marking_state(
|
||||
HeapObject::cast(constructor)))) {
|
||||
// The constructor is dead, no new objects with this map can
|
||||
// be created. Do not retain this map.
|
||||
return false;
|
||||
@ -714,15 +698,14 @@ void IncrementalMarking::RetainMaps() {
|
||||
int new_age;
|
||||
Map* map = Map::cast(cell->value());
|
||||
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
|
||||
ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) {
|
||||
ObjectMarking::IsWhite(map, marking_state(map))) {
|
||||
if (ShouldRetainMap(map, age)) {
|
||||
WhiteToGreyAndPush(map);
|
||||
MarkGrey(map);
|
||||
}
|
||||
Object* prototype = map->prototype();
|
||||
if (age > 0 && prototype->IsHeapObject() &&
|
||||
ObjectMarking::IsWhite<kAtomicity>(
|
||||
HeapObject::cast(prototype),
|
||||
marking_state(HeapObject::cast(prototype)))) {
|
||||
ObjectMarking::IsWhite(HeapObject::cast(prototype),
|
||||
marking_state(HeapObject::cast(prototype)))) {
|
||||
// The prototype is not marked, age the map.
|
||||
new_age = age - 1;
|
||||
} else {
|
||||
@ -813,21 +796,21 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
|
||||
return nullptr;
|
||||
}
|
||||
HeapObject* dest = map_word.ToForwardingAddress();
|
||||
if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) {
|
||||
if (ObjectMarking::IsBlack(dest, marking_state(dest))) {
|
||||
// The object is already processed by the marker.
|
||||
return nullptr;
|
||||
}
|
||||
DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
|
||||
(obj->IsFiller() &&
|
||||
ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))));
|
||||
DCHECK(
|
||||
ObjectMarking::IsGrey(obj, marking_state(obj)) ||
|
||||
(obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj))));
|
||||
return dest;
|
||||
} else {
|
||||
DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
|
||||
DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) ||
|
||||
(obj->IsFiller() &&
|
||||
ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) ||
|
||||
ObjectMarking::IsWhite(obj, marking_state(obj))) ||
|
||||
(MemoryChunk::FromAddress(obj->address())
|
||||
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
|
||||
ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj))));
|
||||
ObjectMarking::IsBlack(obj, marking_state(obj))));
|
||||
// Skip one word filler objects that appear on the
|
||||
// stack when we perform in place array shift.
|
||||
return (obj->map() == filler_map) ? nullptr : obj;
|
||||
@ -835,29 +818,33 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
|
||||
});
|
||||
}
|
||||
|
||||
bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject* obj) {
|
||||
if (!obj->IsFixedArray()) return false;
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
||||
return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
|
||||
}
|
||||
|
||||
void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
|
||||
MarkGrey(map);
|
||||
|
||||
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
|
||||
|
||||
#if ENABLE_SLOW_DCHECKS
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
||||
SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) ||
|
||||
SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
|
||||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
|
||||
Marking::IsBlack<kAtomicity>(mark_bit)));
|
||||
Marking::IsBlack(mark_bit)));
|
||||
#endif
|
||||
if (ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj))) {
|
||||
WhiteToGreyAndPush(map);
|
||||
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
|
||||
} else if (IsFixedArrayWithProgressBar(obj)) {
|
||||
DCHECK(ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)));
|
||||
IncrementalMarkingMarkingVisitor::VisitFixedArrayIncremental(map, obj);
|
||||
MarkBlack(obj, size);
|
||||
}
|
||||
|
||||
void IncrementalMarking::MarkGrey(HeapObject* object) {
|
||||
if (ObjectMarking::IsWhite(object, marking_state(object))) {
|
||||
WhiteToGreyAndPush(object);
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
|
||||
if (ObjectMarking::IsBlack(obj, marking_state(obj))) return;
|
||||
ObjectMarking::GreyToBlack(obj, marking_state(obj));
|
||||
}
|
||||
|
||||
intptr_t IncrementalMarking::ProcessMarkingDeque(
|
||||
intptr_t bytes_to_process, ForceCompletionAction completion) {
|
||||
intptr_t bytes_processed = 0;
|
||||
@ -868,7 +855,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
|
||||
// Left trimming may result in white, grey, or black filler objects on the
|
||||
// marking deque. Ignore these objects.
|
||||
if (obj->IsFiller()) {
|
||||
DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
|
||||
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -923,10 +910,9 @@ void IncrementalMarking::Hurry() {
|
||||
HeapObject* cache = HeapObject::cast(
|
||||
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
|
||||
if (!cache->IsUndefined(heap_->isolate())) {
|
||||
// Mark the cache black if it is grey.
|
||||
bool ignored =
|
||||
ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache));
|
||||
USE(ignored);
|
||||
if (ObjectMarking::IsGrey(cache, marking_state(cache))) {
|
||||
ObjectMarking::GreyToBlack(cache, marking_state(cache));
|
||||
}
|
||||
}
|
||||
context = Context::cast(context)->next_context_link();
|
||||
}
|
||||
|
@ -65,6 +65,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
return MarkingState::Internal(chunk);
|
||||
}
|
||||
|
||||
void MarkBlack(HeapObject* object, int size);
|
||||
void MarkGrey(HeapObject* object);
|
||||
|
||||
// Transfers mark bits without requiring proper object headers.
|
||||
void TransferMark(Heap* heap, HeapObject* from, HeapObject* to);
|
||||
|
||||
@ -79,15 +82,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
|
||||
DCHECK(ObjectMarking::IsWhite<access_mode>(to, marking_state(to)));
|
||||
if (ObjectMarking::IsGrey<access_mode>(from, marking_state(from))) {
|
||||
bool success =
|
||||
ObjectMarking::WhiteToGrey<access_mode>(to, marking_state(to));
|
||||
DCHECK(success);
|
||||
USE(success);
|
||||
ObjectMarking::WhiteToGrey<access_mode>(to, marking_state(to));
|
||||
} else if (ObjectMarking::IsBlack<access_mode>(from, marking_state(from))) {
|
||||
bool success =
|
||||
ObjectMarking::WhiteToBlack<access_mode>(to, marking_state(to));
|
||||
DCHECK(success);
|
||||
USE(success);
|
||||
ObjectMarking::WhiteToBlack<access_mode>(to, marking_state(to));
|
||||
}
|
||||
}
|
||||
|
||||
@ -182,12 +179,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
static const intptr_t kActivationThreshold = 0;
|
||||
#endif
|
||||
|
||||
#ifdef V8_CONCURRENT_MARKING
|
||||
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::ATOMIC;
|
||||
#else
|
||||
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::NON_ATOMIC;
|
||||
#endif
|
||||
|
||||
void FinalizeSweeping();
|
||||
|
||||
size_t Step(size_t bytes_to_process, CompletionAction action,
|
||||
@ -219,9 +210,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
|
||||
void RecordCodeTargetPatch(Address pc, HeapObject* value);
|
||||
|
||||
// Returns true if the function succeeds in transitioning the object
|
||||
// from white to grey.
|
||||
bool WhiteToGreyAndPush(HeapObject* obj);
|
||||
void WhiteToGreyAndPush(HeapObject* obj);
|
||||
|
||||
inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
|
||||
SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
|
||||
@ -312,7 +301,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
intptr_t bytes_to_process,
|
||||
ForceCompletionAction completion = DO_NOT_FORCE_COMPLETION));
|
||||
|
||||
INLINE(bool IsFixedArrayWithProgressBar(HeapObject* object));
|
||||
INLINE(void VisitObject(Map* map, HeapObject* obj, int size));
|
||||
|
||||
void IncrementIdleMarkingDelayCounter();
|
||||
|
@ -38,15 +38,19 @@ void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkObject(HeapObject* obj) {
|
||||
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
|
||||
obj, MarkingState::Internal(obj))) {
|
||||
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
obj, MarkingState::Internal(obj));
|
||||
PushBlack(obj);
|
||||
}
|
||||
}
|
||||
|
||||
void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
|
||||
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
|
||||
obj, MarkingState::External(obj))) {
|
||||
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
obj, MarkingState::External(obj));
|
||||
PushBlack(obj);
|
||||
}
|
||||
}
|
||||
|
@ -1196,10 +1196,12 @@ class StaticYoungGenerationMarkingVisitor
|
||||
StackLimitCheck check(heap->isolate());
|
||||
if (check.HasOverflowed()) return false;
|
||||
|
||||
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::External(object))) {
|
||||
IterateBody(object->map(), object);
|
||||
}
|
||||
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::External(object)))
|
||||
return true;
|
||||
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::External(object));
|
||||
IterateBody(object->map(), object);
|
||||
return true;
|
||||
}
|
||||
};
|
||||
@ -1235,7 +1237,11 @@ class MarkCompactMarkingVisitor
|
||||
// Marks the object black without pushing it on the marking stack.
|
||||
// Returns true if object needed marking and false otherwise.
|
||||
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
|
||||
return ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
|
||||
if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
|
||||
ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Mark object pointed to by p.
|
||||
@ -1253,15 +1259,14 @@ class MarkCompactMarkingVisitor
|
||||
HeapObject* obj)) {
|
||||
#ifdef DEBUG
|
||||
DCHECK(collector->heap()->Contains(obj));
|
||||
DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
|
||||
#endif
|
||||
if (ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj))) {
|
||||
Map* map = obj->map();
|
||||
Heap* heap = obj->GetHeap();
|
||||
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
|
||||
// Mark the map pointer and the body.
|
||||
heap->mark_compact_collector()->MarkObject(map);
|
||||
IterateBody(map, obj);
|
||||
}
|
||||
Map* map = obj->map();
|
||||
Heap* heap = obj->GetHeap();
|
||||
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
|
||||
// Mark the map pointer and the body.
|
||||
heap->mark_compact_collector()->MarkObject(map);
|
||||
IterateBody(map, obj);
|
||||
}
|
||||
|
||||
// Visit all unmarked objects pointed to by [start, end).
|
||||
@ -1279,6 +1284,8 @@ class MarkCompactMarkingVisitor
|
||||
if (!o->IsHeapObject()) continue;
|
||||
collector->RecordSlot(object, p, o);
|
||||
HeapObject* obj = HeapObject::cast(o);
|
||||
if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)))
|
||||
continue;
|
||||
VisitUnmarkedObject(collector, obj);
|
||||
}
|
||||
return true;
|
||||
@ -1475,12 +1482,16 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
|
||||
|
||||
if (!collector_->heap()->InNewSpace(object)) return;
|
||||
|
||||
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::External(object))) {
|
||||
Map* map = object->map();
|
||||
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
|
||||
collector_->EmptyMarkingDeque();
|
||||
}
|
||||
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::External(object)))
|
||||
return;
|
||||
|
||||
Map* map = object->map();
|
||||
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::External(object));
|
||||
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
|
||||
|
||||
collector_->EmptyMarkingDeque();
|
||||
}
|
||||
|
||||
MinorMarkCompactCollector* collector_;
|
||||
@ -1521,16 +1532,22 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
|
||||
|
||||
HeapObject* object = HeapObject::cast(*p);
|
||||
|
||||
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::Internal(object))) {
|
||||
Map* map = object->map();
|
||||
// Mark the map pointer and body, and push them on the marking stack.
|
||||
collector_->MarkObject(map);
|
||||
MarkCompactMarkingVisitor::IterateBody(map, object);
|
||||
// Mark all the objects reachable from the map and body. May leave
|
||||
// overflowed objects in the heap.
|
||||
collector_->EmptyMarkingDeque();
|
||||
}
|
||||
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::Internal(object)))
|
||||
return;
|
||||
|
||||
Map* map = object->map();
|
||||
// Mark the object.
|
||||
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
|
||||
object, MarkingState::Internal(object));
|
||||
|
||||
// Mark the map pointer and body, and push them on the marking stack.
|
||||
collector_->MarkObject(map);
|
||||
MarkCompactMarkingVisitor::IterateBody(map, object);
|
||||
|
||||
// Mark all the objects reachable from the map and body. May leave
|
||||
// overflowed objects in the heap.
|
||||
collector_->EmptyMarkingDeque();
|
||||
}
|
||||
|
||||
MarkCompactCollector* collector_;
|
||||
@ -1701,7 +1718,8 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
|
||||
Map* filler_map = heap()->one_pointer_filler_map();
|
||||
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
|
||||
if ((object->map() != filler_map) &&
|
||||
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object))) {
|
||||
ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
|
||||
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
|
||||
PushBlack(object);
|
||||
if (marking_deque()->IsFull()) return;
|
||||
}
|
||||
@ -1713,10 +1731,8 @@ void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
|
||||
LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p));
|
||||
HeapObject* object = NULL;
|
||||
while ((object = it.Next()) != NULL) {
|
||||
bool success =
|
||||
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
|
||||
DCHECK(success);
|
||||
USE(success);
|
||||
DCHECK(ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
|
||||
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
|
||||
PushBlack(object);
|
||||
if (marking_deque()->IsFull()) return;
|
||||
}
|
||||
@ -2279,12 +2295,15 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
||||
void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
|
||||
StringTable* string_table = heap()->string_table();
|
||||
// Mark the string table itself.
|
||||
if (ObjectMarking::WhiteToBlack(string_table,
|
||||
MarkingState::Internal(string_table))) {
|
||||
// Explicitly mark the prefix.
|
||||
string_table->IteratePrefix(visitor);
|
||||
ProcessMarkingDeque();
|
||||
if (ObjectMarking::IsWhite(string_table,
|
||||
MarkingState::Internal(string_table))) {
|
||||
// String table could have already been marked by visiting the handles list.
|
||||
ObjectMarking::WhiteToBlack(string_table,
|
||||
MarkingState::Internal(string_table));
|
||||
}
|
||||
// Explicitly mark the prefix.
|
||||
string_table->IteratePrefix(visitor);
|
||||
ProcessMarkingDeque();
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {
|
||||
|
@ -82,6 +82,8 @@ class ObjectMarking : public AllStatic {
|
||||
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
|
||||
V8_INLINE static bool BlackToGrey(HeapObject* obj,
|
||||
const MarkingState& state) {
|
||||
DCHECK(
|
||||
(access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj, state)));
|
||||
MarkBit markbit = MarkBitFrom(obj, state);
|
||||
if (!Marking::BlackToGrey<access_mode>(markbit)) return false;
|
||||
state.IncrementLiveBytes<access_mode>(-obj->Size());
|
||||
@ -91,19 +93,24 @@ class ObjectMarking : public AllStatic {
|
||||
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
|
||||
V8_INLINE static bool WhiteToGrey(HeapObject* obj,
|
||||
const MarkingState& state) {
|
||||
DCHECK(
|
||||
(access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state)));
|
||||
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state));
|
||||
}
|
||||
|
||||
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
|
||||
V8_INLINE static bool WhiteToBlack(HeapObject* obj,
|
||||
const MarkingState& state) {
|
||||
return ObjectMarking::WhiteToGrey<access_mode>(obj, state) &&
|
||||
ObjectMarking::GreyToBlack<access_mode>(obj, state);
|
||||
DCHECK(
|
||||
(access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state)));
|
||||
if (!ObjectMarking::WhiteToGrey<access_mode>(obj, state)) return false;
|
||||
return ObjectMarking::GreyToBlack<access_mode>(obj, state);
|
||||
}
|
||||
|
||||
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
|
||||
V8_INLINE static bool GreyToBlack(HeapObject* obj,
|
||||
const MarkingState& state) {
|
||||
DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj, state)));
|
||||
MarkBit markbit = MarkBitFrom(obj, state);
|
||||
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
|
||||
state.IncrementLiveBytes<access_mode>(obj->Size());
|
||||
|
@ -38,16 +38,12 @@ class MarkBit {
|
||||
}
|
||||
}
|
||||
|
||||
// The function returns true if it succeeded to
|
||||
// transition the bit from 0 to 1.
|
||||
template <AccessMode mode = NON_ATOMIC>
|
||||
inline bool Set();
|
||||
|
||||
template <AccessMode mode = NON_ATOMIC>
|
||||
inline bool Get();
|
||||
|
||||
// The function returns true if it succeeded to
|
||||
// transition the bit from 1 to 0.
|
||||
template <AccessMode mode = NON_ATOMIC>
|
||||
inline bool Clear();
|
||||
|
||||
@ -61,9 +57,8 @@ class MarkBit {
|
||||
|
||||
template <>
|
||||
inline bool MarkBit::Set<MarkBit::NON_ATOMIC>() {
|
||||
base::Atomic32 old_value = *cell_;
|
||||
*cell_ = old_value | mask_;
|
||||
return (old_value & mask_) == 0;
|
||||
*cell_ |= mask_;
|
||||
return true;
|
||||
}
|
||||
|
||||
template <>
|
||||
@ -91,9 +86,8 @@ inline bool MarkBit::Get<MarkBit::ATOMIC>() {
|
||||
|
||||
template <>
|
||||
inline bool MarkBit::Clear<MarkBit::NON_ATOMIC>() {
|
||||
base::Atomic32 old_value = *cell_;
|
||||
*cell_ = old_value & ~mask_;
|
||||
return (old_value & mask_) == mask_;
|
||||
*cell_ &= ~mask_;
|
||||
return true;
|
||||
}
|
||||
|
||||
template <>
|
||||
@ -418,17 +412,24 @@ class Marking : public AllStatic {
|
||||
|
||||
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
|
||||
INLINE(static bool WhiteToGrey(MarkBit markbit)) {
|
||||
DCHECK(mode == MarkBit::ATOMIC || IsWhite(markbit));
|
||||
return markbit.Set<mode>();
|
||||
}
|
||||
|
||||
// Warning: this method is not safe in general in concurrent scenarios.
|
||||
// If you know that nobody else will change the bits on the given location
|
||||
// then you may use it.
|
||||
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
|
||||
INLINE(static bool WhiteToBlack(MarkBit markbit)) {
|
||||
return markbit.Set<mode>() && markbit.Next().Set<mode>();
|
||||
INLINE(static void WhiteToBlack(MarkBit markbit)) {
|
||||
DCHECK(mode == MarkBit::ATOMIC || IsWhite(markbit));
|
||||
markbit.Set<mode>();
|
||||
markbit.Next().Set<mode>();
|
||||
}
|
||||
|
||||
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
|
||||
INLINE(static bool GreyToBlack(MarkBit markbit)) {
|
||||
return markbit.Get<mode>() && markbit.Next().Set<mode>();
|
||||
DCHECK(mode == MarkBit::ATOMIC || IsGrey(markbit));
|
||||
return markbit.Next().Set<mode>();
|
||||
}
|
||||
|
||||
enum ObjectColor {
|
||||
|
@ -637,7 +637,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) {
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::IterateBody(
|
||||
HeapObject* object) {
|
||||
Map* map = object->map();
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
switch (static_cast<VisitorId>(map->visitor_id())) {
|
||||
@ -668,28 +669,14 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit(HeapObject* object) {
|
||||
return ResultType();
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
void HeapVisitor<ResultType, ConcreteVisitor>::VisitMapPointer(
|
||||
HeapObject* host, HeapObject** map) {
|
||||
static_cast<ConcreteVisitor*>(this)->VisitPointer(
|
||||
host, reinterpret_cast<Object**>(map));
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
bool HeapVisitor<ResultType, ConcreteVisitor>::ShouldVisit(HeapObject* object) {
|
||||
return true;
|
||||
}
|
||||
|
||||
#define VISIT(type) \
|
||||
template <typename ResultType, typename ConcreteVisitor> \
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \
|
||||
Map* map, type* object) { \
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this); \
|
||||
if (!visitor->ShouldVisit(object)) return ResultType(); \
|
||||
int size = type::BodyDescriptor::SizeOf(map, object); \
|
||||
visitor->VisitMapPointer(object, object->map_slot()); \
|
||||
type::BodyDescriptor::IterateBody(object, size, visitor); \
|
||||
return static_cast<ResultType>(size); \
|
||||
#define VISIT(type) \
|
||||
template <typename ResultType, typename ConcreteVisitor> \
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::Visit##type( \
|
||||
Map* map, type* object) { \
|
||||
int size = type::BodyDescriptor::SizeOf(map, object); \
|
||||
type::BodyDescriptor::IterateBody(object, size, \
|
||||
static_cast<ConcreteVisitor*>(this)); \
|
||||
return static_cast<ResultType>(size); \
|
||||
}
|
||||
TYPED_VISITOR_ID_LIST(VISIT)
|
||||
#undef VISIT
|
||||
@ -697,10 +684,7 @@ TYPED_VISITOR_ID_LIST(VISIT)
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
|
||||
Map* map, ConsString* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = ConsString::BodyDescriptor::SizeOf(map, object);
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
ConsString::BodyDescriptor::IterateBody(object, size,
|
||||
static_cast<ConcreteVisitor*>(this));
|
||||
return static_cast<ResultType>(size);
|
||||
@ -709,10 +693,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitShortcutCandidate(
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext(
|
||||
Map* map, Context* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = Context::BodyDescriptor::SizeOf(map, object);
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
Context::BodyDescriptor::IterateBody(object, size,
|
||||
static_cast<ConcreteVisitor*>(this));
|
||||
return static_cast<ResultType>(size);
|
||||
@ -721,20 +702,14 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitNativeContext(
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitDataObject(
|
||||
Map* map, HeapObject* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = map->instance_size();
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
return static_cast<ResultType>(size);
|
||||
}
|
||||
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
|
||||
Map* map, JSObject* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = JSObject::FastBodyDescriptor::SizeOf(map, object);
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
JSObject::FastBodyDescriptor::IterateBody(
|
||||
object, size, static_cast<ConcreteVisitor*>(this));
|
||||
return static_cast<ResultType>(size);
|
||||
@ -742,10 +717,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSObjectFast(
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
|
||||
Map* map, JSObject* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = JSObject::BodyDescriptor::SizeOf(map, object);
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
JSObject::BodyDescriptor::IterateBody(object, size,
|
||||
static_cast<ConcreteVisitor*>(this));
|
||||
return static_cast<ResultType>(size);
|
||||
@ -753,10 +725,7 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitJSApiObject(
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
|
||||
Map* map, HeapObject* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
int size = map->instance_size();
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
StructBodyDescriptor::IterateBody(object, size,
|
||||
static_cast<ConcreteVisitor*>(this));
|
||||
return static_cast<ResultType>(size);
|
||||
@ -764,9 +733,6 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitFreeSpace(
|
||||
Map* map, FreeSpace* object) {
|
||||
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
|
||||
if (!visitor->ShouldVisit(object)) return ResultType();
|
||||
visitor->VisitMapPointer(object, object->map_slot());
|
||||
return static_cast<ResultType>(FreeSpace::cast(object)->size());
|
||||
}
|
||||
|
||||
|
@ -395,16 +395,9 @@ VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
|
||||
template <typename ResultType, typename ConcreteVisitor>
|
||||
class HeapVisitor : public ObjectVisitor {
|
||||
public:
|
||||
ResultType Visit(HeapObject* object);
|
||||
ResultType IterateBody(HeapObject* object);
|
||||
|
||||
protected:
|
||||
// A guard predicate for visiting the object.
|
||||
// If it returns false then the default implementations of the Visit*
|
||||
// functions bailout from iterating the object pointers.
|
||||
virtual bool ShouldVisit(HeapObject* object);
|
||||
// A callback for visiting the map pointer in the object header.
|
||||
virtual void VisitMapPointer(HeapObject* host, HeapObject** map);
|
||||
|
||||
#define VISIT(type) virtual ResultType Visit##type(Map* map, type* object);
|
||||
TYPED_VISITOR_ID_LIST(VISIT)
|
||||
#undef VISIT
|
||||
|
@ -1525,9 +1525,6 @@ void HeapObject::set_map_no_write_barrier(Map* value) {
|
||||
set_map_word(MapWord::FromMap(value));
|
||||
}
|
||||
|
||||
HeapObject** HeapObject::map_slot() {
|
||||
return reinterpret_cast<HeapObject**>(FIELD_ADDR(this, kMapOffset));
|
||||
}
|
||||
|
||||
MapWord HeapObject::map_word() const {
|
||||
return MapWord(
|
||||
|
@ -1669,9 +1669,6 @@ class HeapObject: public Object {
|
||||
// information.
|
||||
inline Map* map() const;
|
||||
inline void set_map(Map* value);
|
||||
|
||||
inline HeapObject** map_slot();
|
||||
|
||||
// The no-write-barrier version. This is OK if the object is white and in
|
||||
// new space, or if the value is an immortal immutable object, like the maps
|
||||
// of primitive (non-JS) objects like strings, heap numbers etc.
|
||||
|
Loading…
Reference in New Issue
Block a user