Revert of [heap] Provide ObjectMarking with marking transitions (patchset #5 id:80001 of https://codereview.chromium.org/2644523002/ )
Reason for revert:
Breaks the tree: https://build.chromium.org/p/client.v8/builders/V8%20Linux%20-%20gc%20stress/builds/8349/steps/Mjsunit/logs/compiled-module-seria..
Original issue's description:
> [heap] Provide ObjectMarking with marking transitions
>
> BUG=chromium:651354
>
> Review-Url: https://codereview.chromium.org/2644523002
> Cr-Commit-Position: refs/heads/master@{#42531}
> Committed: cbb8929e97
TBR=hpayer@chromium.org,mlippautz@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:651354
Review-Url: https://codereview.chromium.org/2647873002
Cr-Commit-Position: refs/heads/master@{#42533}
This commit is contained in:
parent
70000946eb
commit
3258bef0ad
@ -19,7 +19,8 @@ void LocalArrayBufferTracker::Free() {
|
||||
for (TrackingData::iterator it = array_buffers_.begin();
|
||||
it != array_buffers_.end();) {
|
||||
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
|
||||
if ((free_mode == kFreeAll) || ObjectMarking::IsWhite(buffer)) {
|
||||
if ((free_mode == kFreeAll) ||
|
||||
Marking::IsWhite(ObjectMarking::MarkBitFrom(buffer))) {
|
||||
const size_t len = it->second;
|
||||
heap_->isolate()->array_buffer_allocator()->Free(buffer->backing_store(),
|
||||
len);
|
||||
|
@ -3183,7 +3183,7 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) {
|
||||
lo_space()->AdjustLiveBytes(by);
|
||||
} else if (!in_heap_iterator() &&
|
||||
!mark_compact_collector()->sweeping_in_progress() &&
|
||||
ObjectMarking::IsBlack(object)) {
|
||||
Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) {
|
||||
DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
|
||||
MemoryChunk::IncrementLiveBytes(object, by);
|
||||
}
|
||||
@ -3193,7 +3193,6 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) {
|
||||
FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
|
||||
int elements_to_trim) {
|
||||
CHECK_NOT_NULL(object);
|
||||
DCHECK(CanMoveObjectStart(object));
|
||||
DCHECK(!object->IsFixedTypedArrayBase());
|
||||
DCHECK(!object->IsByteArray());
|
||||
const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
|
||||
@ -3244,6 +3243,7 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
|
||||
// Initialize header of the trimmed array. Since left trimming is only
|
||||
// performed on pages which are not concurrently swept creating a filler
|
||||
// object does not require synchronization.
|
||||
DCHECK(CanMoveObjectStart(object));
|
||||
Object** former_start = HeapObject::RawField(object, 0);
|
||||
int new_start_index = elements_to_trim * (element_size / kPointerSize);
|
||||
former_start[new_start_index] = map;
|
||||
@ -3314,7 +3314,7 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
|
||||
// Clear the mark bits of the black area that belongs now to the filler.
|
||||
// This is an optimization. The sweeper will release black fillers anyway.
|
||||
if (incremental_marking()->black_allocation() &&
|
||||
ObjectMarking::IsBlackOrGrey(filler)) {
|
||||
Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(filler))) {
|
||||
Page* page = Page::FromAddress(new_end);
|
||||
page->markbits()->ClearRange(
|
||||
page->AddressToMarkbitIndex(new_end),
|
||||
@ -4309,19 +4309,17 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
|
||||
// Hence we have to color all objects of the reservation first black to avoid
|
||||
// unnecessary marking deque load.
|
||||
if (incremental_marking()->black_allocation()) {
|
||||
for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) {
|
||||
for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
|
||||
const Heap::Reservation& res = reservations[i];
|
||||
for (auto& chunk : res) {
|
||||
Address addr = chunk.start;
|
||||
while (addr < chunk.end) {
|
||||
HeapObject* obj = HeapObject::FromAddress(addr);
|
||||
ObjectMarking::WhiteToBlack(obj);
|
||||
Marking::MarkBlack(ObjectMarking::MarkBitFrom(obj));
|
||||
addr += obj->Size();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Iterate black objects in old space, code space, map space, and large
|
||||
// object space for side effects.
|
||||
for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) {
|
||||
const Heap::Reservation& res = reservations[i];
|
||||
for (auto& chunk : res) {
|
||||
@ -4900,7 +4898,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
|
||||
// it would be a violation of the invariant to record it's slots.
|
||||
bool record_slots = false;
|
||||
if (incremental_marking()->IsCompacting()) {
|
||||
record_slots = ObjectMarking::IsBlack(target);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(target);
|
||||
record_slots = Marking::IsBlack(mark_bit);
|
||||
}
|
||||
|
||||
IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
|
||||
@ -6138,7 +6137,8 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
|
||||
bool SkipObject(HeapObject* object) {
|
||||
if (object->IsFiller()) return true;
|
||||
return ObjectMarking::IsWhite(object);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
|
||||
return Marking::IsWhite(mark_bit);
|
||||
}
|
||||
|
||||
private:
|
||||
@ -6150,8 +6150,6 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
for (Object** p = start; p < end; p++) {
|
||||
if (!(*p)->IsHeapObject()) continue;
|
||||
HeapObject* obj = HeapObject::cast(*p);
|
||||
// Use Marking instead of ObjectMarking to avoid adjusting live bytes
|
||||
// counter.
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
if (Marking::IsWhite(mark_bit)) {
|
||||
Marking::WhiteToBlack(mark_bit);
|
||||
|
@ -39,12 +39,15 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
|
||||
|
||||
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
|
||||
HeapObject* value_heap_obj = HeapObject::cast(value);
|
||||
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj));
|
||||
DCHECK(!ObjectMarking::IsImpossible(obj));
|
||||
const bool is_black = ObjectMarking::IsBlack(obj);
|
||||
MarkBit value_bit = ObjectMarking::MarkBitFrom(value_heap_obj);
|
||||
DCHECK(!Marking::IsImpossible(value_bit));
|
||||
|
||||
if (is_black && ObjectMarking::IsWhite(value_heap_obj)) {
|
||||
WhiteToGreyAndPush(value_heap_obj);
|
||||
MarkBit obj_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
DCHECK(!Marking::IsImpossible(obj_bit));
|
||||
bool is_black = Marking::IsBlack(obj_bit);
|
||||
|
||||
if (is_black && Marking::IsWhite(value_bit)) {
|
||||
WhiteToGreyAndPush(value_heap_obj, value_bit);
|
||||
RestartIfNotMarking();
|
||||
}
|
||||
return is_compacting_ && is_black;
|
||||
@ -115,8 +118,9 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
|
||||
ObjectMarking::WhiteToGrey(obj);
|
||||
|
||||
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit) {
|
||||
Marking::WhiteToGrey(mark_bit);
|
||||
heap_->mark_compact_collector()->marking_deque()->Push(obj);
|
||||
}
|
||||
|
||||
@ -124,13 +128,16 @@ void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
|
||||
static void MarkObjectGreyDoNotEnqueue(Object* obj) {
|
||||
if (obj->IsHeapObject()) {
|
||||
HeapObject* heap_obj = HeapObject::cast(obj);
|
||||
ObjectMarking::AnyToGrey(heap_obj);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(obj));
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
MemoryChunk::IncrementLiveBytes(heap_obj, -heap_obj->Size());
|
||||
}
|
||||
Marking::AnyToGrey(mark_bit);
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
|
||||
HeapObject* to) {
|
||||
DCHECK(MemoryChunk::FromAddress(from->address())->SweepingDone());
|
||||
// This is only used when resizing an object.
|
||||
DCHECK(MemoryChunk::FromAddress(from->address()) ==
|
||||
MemoryChunk::FromAddress(to->address()));
|
||||
@ -151,12 +158,11 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
|
||||
|
||||
if (Marking::IsBlack(old_mark_bit)) {
|
||||
Marking::BlackToWhite(old_mark_bit);
|
||||
Marking::WhiteToBlack(new_mark_bit);
|
||||
Marking::MarkBlack(new_mark_bit);
|
||||
return;
|
||||
} else if (Marking::IsGrey(old_mark_bit)) {
|
||||
Marking::GreyToWhite(old_mark_bit);
|
||||
Marking::WhiteToGrey(new_mark_bit);
|
||||
heap->mark_compact_collector()->marking_deque()->Push(to);
|
||||
heap->incremental_marking()->WhiteToGreyAndPush(to, new_mark_bit);
|
||||
heap->incremental_marking()->RestartIfNotMarking();
|
||||
}
|
||||
|
||||
@ -204,10 +210,10 @@ class IncrementalMarkingMarkingVisitor
|
||||
} while (scan_until_end && start_offset < object_size);
|
||||
chunk->set_progress_bar(start_offset);
|
||||
if (start_offset < object_size) {
|
||||
if (ObjectMarking::IsGrey(object)) {
|
||||
if (Marking::IsGrey(ObjectMarking::MarkBitFrom(object))) {
|
||||
heap->mark_compact_collector()->marking_deque()->Unshift(object);
|
||||
} else {
|
||||
DCHECK(ObjectMarking::IsBlack(object));
|
||||
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
heap->mark_compact_collector()->UnshiftBlack(object);
|
||||
}
|
||||
heap->incremental_marking()->NotifyIncompleteScanOfObject(
|
||||
@ -259,8 +265,10 @@ class IncrementalMarkingMarkingVisitor
|
||||
// Returns true if object needed marking and false otherwise.
|
||||
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
|
||||
HeapObject* heap_object = HeapObject::cast(obj);
|
||||
if (ObjectMarking::IsWhite(heap_object)) {
|
||||
ObjectMarking::WhiteToBlack(heap_object);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object);
|
||||
if (Marking::IsWhite(mark_bit)) {
|
||||
Marking::MarkBlack(mark_bit);
|
||||
MemoryChunk::IncrementLiveBytes(heap_object, heap_object->Size());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -268,7 +276,7 @@ class IncrementalMarkingMarkingVisitor
|
||||
};
|
||||
|
||||
void IncrementalMarking::IterateBlackObject(HeapObject* object) {
|
||||
if (IsMarking() && ObjectMarking::IsBlack(object)) {
|
||||
if (IsMarking() && Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) {
|
||||
Page* page = Page::FromAddress(object->address());
|
||||
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
|
||||
// IterateBlackObject requires us to visit the whole object.
|
||||
@ -653,7 +661,8 @@ bool ShouldRetainMap(Map* map, int age) {
|
||||
}
|
||||
Object* constructor = map->GetConstructor();
|
||||
if (!constructor->IsHeapObject() ||
|
||||
ObjectMarking::IsWhite(HeapObject::cast(constructor))) {
|
||||
Marking::IsWhite(
|
||||
ObjectMarking::MarkBitFrom(HeapObject::cast(constructor)))) {
|
||||
// The constructor is dead, no new objects with this map can
|
||||
// be created. Do not retain this map.
|
||||
return false;
|
||||
@ -682,14 +691,16 @@ void IncrementalMarking::RetainMaps() {
|
||||
int age = Smi::cast(retained_maps->Get(i + 1))->value();
|
||||
int new_age;
|
||||
Map* map = Map::cast(cell->value());
|
||||
MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
|
||||
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
|
||||
ObjectMarking::IsWhite(map)) {
|
||||
Marking::IsWhite(map_mark)) {
|
||||
if (ShouldRetainMap(map, age)) {
|
||||
MarkGrey(heap(), map);
|
||||
}
|
||||
Object* prototype = map->prototype();
|
||||
if (age > 0 && prototype->IsHeapObject() &&
|
||||
ObjectMarking::IsWhite(HeapObject::cast(prototype))) {
|
||||
Marking::IsWhite(
|
||||
ObjectMarking::MarkBitFrom(HeapObject::cast(prototype)))) {
|
||||
// The prototype is not marked, age the map.
|
||||
new_age = age - 1;
|
||||
} else {
|
||||
@ -796,12 +807,15 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
|
||||
// them.
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
HeapObject* dest = map_word.ToForwardingAddress();
|
||||
if (ObjectMarking::IsBlack(dest)) continue;
|
||||
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(dest))) continue;
|
||||
array[new_top] = dest;
|
||||
new_top = ((new_top + 1) & mask);
|
||||
DCHECK(new_top != marking_deque->bottom());
|
||||
DCHECK(ObjectMarking::IsGrey(obj) ||
|
||||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)));
|
||||
#ifdef DEBUG
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
DCHECK(Marking::IsGrey(mark_bit) ||
|
||||
(obj->IsFiller() && Marking::IsWhite(mark_bit)));
|
||||
#endif
|
||||
}
|
||||
} else if (obj->map() != filler_map) {
|
||||
// Skip one word filler objects that appear on the
|
||||
@ -809,11 +823,14 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
|
||||
array[new_top] = obj;
|
||||
new_top = ((new_top + 1) & mask);
|
||||
DCHECK(new_top != marking_deque->bottom());
|
||||
DCHECK(ObjectMarking::IsGrey(obj) ||
|
||||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)) ||
|
||||
(MemoryChunk::FromAddress(obj->address())
|
||||
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
|
||||
ObjectMarking::IsBlack(obj)));
|
||||
#ifdef DEBUG
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
||||
DCHECK(Marking::IsGrey(mark_bit) ||
|
||||
(obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
|
||||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
|
||||
Marking::IsBlack(mark_bit)));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
marking_deque->set_top(new_top);
|
||||
@ -837,14 +854,17 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
|
||||
}
|
||||
|
||||
void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) {
|
||||
if (ObjectMarking::IsWhite(object)) {
|
||||
heap->incremental_marking()->WhiteToGreyAndPush(object);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
|
||||
if (Marking::IsWhite(mark_bit)) {
|
||||
heap->incremental_marking()->WhiteToGreyAndPush(object, mark_bit);
|
||||
}
|
||||
}
|
||||
|
||||
void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
|
||||
if (ObjectMarking::IsBlack(obj)) return;
|
||||
ObjectMarking::GreyToBlack(obj);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(mark_bit)) return;
|
||||
Marking::GreyToBlack(mark_bit);
|
||||
MemoryChunk::IncrementLiveBytes(obj, size);
|
||||
}
|
||||
|
||||
intptr_t IncrementalMarking::ProcessMarkingDeque(
|
||||
@ -859,7 +879,8 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
|
||||
// Left trimming may result in white filler objects on the marking deque.
|
||||
// Ignore these objects.
|
||||
if (obj->IsFiller()) {
|
||||
DCHECK(ObjectMarking::IsImpossible(obj) || ObjectMarking::IsWhite(obj));
|
||||
DCHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(obj)) ||
|
||||
Marking::IsWhite(ObjectMarking::MarkBitFrom(obj)));
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -914,8 +935,10 @@ void IncrementalMarking::Hurry() {
|
||||
HeapObject* cache = HeapObject::cast(
|
||||
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
|
||||
if (!cache->IsUndefined(heap_->isolate())) {
|
||||
if (ObjectMarking::IsGrey(cache)) {
|
||||
ObjectMarking::GreyToBlack(cache);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(cache);
|
||||
if (Marking::IsGrey(mark_bit)) {
|
||||
Marking::GreyToBlack(mark_bit);
|
||||
MemoryChunk::IncrementLiveBytes(cache, cache->Size());
|
||||
}
|
||||
}
|
||||
context = Context::cast(context)->next_context_link();
|
||||
|
@ -158,7 +158,7 @@ class IncrementalMarking {
|
||||
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
|
||||
void RecordCodeTargetPatch(Address pc, HeapObject* value);
|
||||
|
||||
void WhiteToGreyAndPush(HeapObject* obj);
|
||||
void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit);
|
||||
|
||||
inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
|
||||
SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
|
||||
|
@ -13,38 +13,48 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
void MarkCompactCollector::PushBlack(HeapObject* obj) {
|
||||
DCHECK(ObjectMarking::IsBlack(obj));
|
||||
if (!marking_deque()->Push(obj)) {
|
||||
ObjectMarking::BlackToGrey(obj);
|
||||
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
|
||||
if (marking_deque()->Push(obj)) {
|
||||
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
|
||||
} else {
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
Marking::BlackToGrey(mark_bit);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
|
||||
DCHECK(ObjectMarking::IsBlack(obj));
|
||||
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(obj)));
|
||||
if (!marking_deque()->Unshift(obj)) {
|
||||
ObjectMarking::BlackToGrey(obj);
|
||||
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
Marking::BlackToGrey(mark_bit);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MarkCompactCollector::MarkObject(HeapObject* obj, MarkBit mark_bit) {
|
||||
DCHECK(ObjectMarking::MarkBitFrom(obj) == mark_bit);
|
||||
if (ObjectMarking::IsWhite(obj)) {
|
||||
ObjectMarking::WhiteToBlack(obj);
|
||||
if (Marking::IsWhite(mark_bit)) {
|
||||
Marking::WhiteToBlack(mark_bit);
|
||||
DCHECK(obj->GetIsolate()->heap()->Contains(obj));
|
||||
PushBlack(obj);
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::SetMark(HeapObject* obj) {
|
||||
DCHECK(ObjectMarking::IsWhite(obj));
|
||||
ObjectMarking::WhiteToBlack(obj);
|
||||
|
||||
void MarkCompactCollector::SetMark(HeapObject* obj, MarkBit mark_bit) {
|
||||
DCHECK(Marking::IsWhite(mark_bit));
|
||||
DCHECK(ObjectMarking::MarkBitFrom(obj) == mark_bit);
|
||||
Marking::WhiteToBlack(mark_bit);
|
||||
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
|
||||
}
|
||||
|
||||
|
||||
bool MarkCompactCollector::IsMarked(Object* obj) {
|
||||
DCHECK(obj->IsHeapObject());
|
||||
return ObjectMarking::IsBlackOrGrey(HeapObject::cast(obj));
|
||||
HeapObject* heap_object = HeapObject::cast(obj);
|
||||
return Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(heap_object));
|
||||
}
|
||||
|
||||
|
||||
@ -54,7 +64,7 @@ void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
|
||||
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
|
||||
if (target_page->IsEvacuationCandidate() &&
|
||||
!ShouldSkipEvacuationSlotRecording(object)) {
|
||||
DCHECK(IsMarked(object));
|
||||
DCHECK(Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object)));
|
||||
RememberedSet<OLD_TO_OLD>::Insert(source_page,
|
||||
reinterpret_cast<Address>(slot));
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ static void VerifyMarking(Heap* heap, Address bottom, Address top) {
|
||||
// One word fillers at the end of a black area can be grey.
|
||||
if (MarkCompactCollector::IsMarked(object) &&
|
||||
object->map() != heap->one_pointer_filler_map()) {
|
||||
CHECK(ObjectMarking::IsBlack(object));
|
||||
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
CHECK(current >= next_object_must_be_here_or_later);
|
||||
object->Iterate(&visitor);
|
||||
next_object_must_be_here_or_later = current + object->Size();
|
||||
@ -348,7 +348,8 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
|
||||
|
||||
LargeObjectIterator it(heap_->lo_space());
|
||||
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
|
||||
CHECK(ObjectMarking::IsWhite(obj));
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
|
||||
CHECK(Marking::IsWhite(mark_bit));
|
||||
CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
|
||||
}
|
||||
}
|
||||
@ -397,7 +398,7 @@ void MarkCompactCollector::ClearMarkbits() {
|
||||
|
||||
LargeObjectIterator it(heap_->lo_space());
|
||||
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
|
||||
ObjectMarking::ClearMarkBit(obj);
|
||||
Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj));
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
|
||||
chunk->ResetProgressBar();
|
||||
chunk->ResetLiveBytes();
|
||||
@ -908,7 +909,8 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
|
||||
SharedFunctionInfo* shared = candidate->shared();
|
||||
|
||||
Code* code = shared->code();
|
||||
if (ObjectMarking::IsWhite(code)) {
|
||||
MarkBit code_mark = ObjectMarking::MarkBitFrom(code);
|
||||
if (Marking::IsWhite(code_mark)) {
|
||||
if (FLAG_trace_code_flushing && shared->is_compiled()) {
|
||||
PrintF("[code-flushing clears: ");
|
||||
shared->ShortPrint();
|
||||
@ -926,7 +928,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
|
||||
candidate->set_code(lazy_compile);
|
||||
}
|
||||
} else {
|
||||
DCHECK(ObjectMarking::IsBlack(code));
|
||||
DCHECK(Marking::IsBlack(code_mark));
|
||||
candidate->set_code(code);
|
||||
}
|
||||
|
||||
@ -960,7 +962,8 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
|
||||
ClearNextCandidate(candidate);
|
||||
|
||||
Code* code = candidate->code();
|
||||
if (ObjectMarking::IsWhite(code)) {
|
||||
MarkBit code_mark = ObjectMarking::MarkBitFrom(code);
|
||||
if (Marking::IsWhite(code_mark)) {
|
||||
if (FLAG_trace_code_flushing && candidate->is_compiled()) {
|
||||
PrintF("[code-flushing clears: ");
|
||||
candidate->ShortPrint();
|
||||
@ -1100,8 +1103,9 @@ class StaticYoungGenerationMarkingVisitor
|
||||
StackLimitCheck check(heap->isolate());
|
||||
if (check.HasOverflowed()) return false;
|
||||
|
||||
if (ObjectMarking::IsBlackOrGrey(object)) return true;
|
||||
heap->mark_compact_collector()->SetMark(object);
|
||||
MarkBit mark = ObjectMarking::MarkBitFrom(object);
|
||||
if (Marking::IsBlackOrGrey(mark)) return true;
|
||||
heap->mark_compact_collector()->SetMark(object, mark);
|
||||
IterateBody(object->map(), object);
|
||||
return true;
|
||||
}
|
||||
@ -1139,8 +1143,9 @@ class MarkCompactMarkingVisitor
|
||||
// Marks the object black without pushing it on the marking stack.
|
||||
// Returns true if object needed marking and false otherwise.
|
||||
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
|
||||
if (ObjectMarking::IsWhite(object)) {
|
||||
heap->mark_compact_collector()->SetMark(object);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
|
||||
if (Marking::IsWhite(mark_bit)) {
|
||||
heap->mark_compact_collector()->SetMark(object, mark_bit);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -1166,7 +1171,8 @@ class MarkCompactMarkingVisitor
|
||||
#endif
|
||||
Map* map = obj->map();
|
||||
Heap* heap = obj->GetHeap();
|
||||
heap->mark_compact_collector()->SetMark(obj);
|
||||
MarkBit mark = ObjectMarking::MarkBitFrom(obj);
|
||||
heap->mark_compact_collector()->SetMark(obj, mark);
|
||||
// Mark the map pointer and the body.
|
||||
MarkBit map_mark = ObjectMarking::MarkBitFrom(map);
|
||||
heap->mark_compact_collector()->MarkObject(map, map_mark);
|
||||
@ -1188,7 +1194,8 @@ class MarkCompactMarkingVisitor
|
||||
if (!o->IsHeapObject()) continue;
|
||||
collector->RecordSlot(object, p, o);
|
||||
HeapObject* obj = HeapObject::cast(o);
|
||||
if (ObjectMarking::IsBlackOrGrey(obj)) continue;
|
||||
MarkBit mark = ObjectMarking::MarkBitFrom(obj);
|
||||
if (Marking::IsBlackOrGrey(mark)) continue;
|
||||
VisitUnmarkedObject(collector, obj);
|
||||
}
|
||||
return true;
|
||||
@ -1221,7 +1228,7 @@ class MarkCompactMarkingVisitor
|
||||
// was marked through the compilation cache before marker reached JSRegExp
|
||||
// object.
|
||||
FixedArray* data = FixedArray::cast(re->data());
|
||||
if (ObjectMarking::IsBlackOrGrey(data)) {
|
||||
if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(data))) {
|
||||
Object** slot =
|
||||
data->data_start() + JSRegExp::saved_code_index(is_one_byte);
|
||||
heap->mark_compact_collector()->RecordSlot(data, slot, code);
|
||||
@ -1387,11 +1394,12 @@ class RootMarkingVisitor : public ObjectVisitor {
|
||||
!collector_->heap()->InNewSpace(object))
|
||||
return;
|
||||
|
||||
if (ObjectMarking::IsBlackOrGrey(object)) return;
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
|
||||
if (Marking::IsBlackOrGrey(mark_bit)) return;
|
||||
|
||||
Map* map = object->map();
|
||||
// Mark the object.
|
||||
collector_->SetMark(object);
|
||||
collector_->SetMark(object, mark_bit);
|
||||
|
||||
switch (mode) {
|
||||
case MarkCompactMode::FULL: {
|
||||
@ -1429,7 +1437,7 @@ class StringTableCleaner : public ObjectVisitor {
|
||||
for (Object** p = start; p < end; p++) {
|
||||
Object* o = *p;
|
||||
if (o->IsHeapObject()) {
|
||||
if (ObjectMarking::IsWhite(HeapObject::cast(o))) {
|
||||
if (Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(o)))) {
|
||||
if (finalize_external_strings) {
|
||||
if (o->IsExternalString()) {
|
||||
heap_->FinalizeExternalString(String::cast(*p));
|
||||
@ -1470,8 +1478,9 @@ typedef StringTableCleaner<true, false> ExternalStringTableCleaner;
|
||||
class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
||||
public:
|
||||
virtual Object* RetainAs(Object* object) {
|
||||
DCHECK(!ObjectMarking::IsGrey(HeapObject::cast(object)));
|
||||
if (ObjectMarking::IsBlack(HeapObject::cast(object))) {
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(HeapObject::cast(object));
|
||||
DCHECK(!Marking::IsGrey(mark_bit));
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
return object;
|
||||
} else if (object->IsAllocationSite() &&
|
||||
!(AllocationSite::cast(object)->IsZombie())) {
|
||||
@ -1499,8 +1508,9 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
|
||||
|
||||
Map* filler_map = heap()->one_pointer_filler_map();
|
||||
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
|
||||
if ((object->map() != filler_map) && ObjectMarking::IsGrey(object)) {
|
||||
ObjectMarking::GreyToBlack(object);
|
||||
MarkBit markbit = ObjectMarking::MarkBitFrom(object);
|
||||
if ((object->map() != filler_map) && Marking::IsGrey(markbit)) {
|
||||
Marking::GreyToBlack(markbit);
|
||||
PushBlack(object);
|
||||
if (marking_deque()->IsFull()) return;
|
||||
}
|
||||
@ -1512,8 +1522,9 @@ void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
|
||||
LiveObjectIterator<kGreyObjects> it(p);
|
||||
HeapObject* object = NULL;
|
||||
while ((object = it.Next()) != NULL) {
|
||||
DCHECK(ObjectMarking::IsGrey(object));
|
||||
ObjectMarking::GreyToBlack(object);
|
||||
MarkBit markbit = ObjectMarking::MarkBitFrom(object);
|
||||
DCHECK(Marking::IsGrey(markbit));
|
||||
Marking::GreyToBlack(markbit);
|
||||
PushBlack(object);
|
||||
if (marking_deque()->IsFull()) return;
|
||||
}
|
||||
@ -1963,7 +1974,9 @@ void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() {
|
||||
bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
|
||||
Object* o = *p;
|
||||
if (!o->IsHeapObject()) return false;
|
||||
return ObjectMarking::IsWhite(HeapObject::cast(o));
|
||||
HeapObject* heap_object = HeapObject::cast(o);
|
||||
MarkBit mark = ObjectMarking::MarkBitFrom(heap_object);
|
||||
return Marking::IsWhite(mark);
|
||||
}
|
||||
|
||||
|
||||
@ -1971,16 +1984,19 @@ bool MarkCompactCollector::IsUnmarkedHeapObjectWithHeap(Heap* heap,
|
||||
Object** p) {
|
||||
Object* o = *p;
|
||||
DCHECK(o->IsHeapObject());
|
||||
return ObjectMarking::IsWhite(HeapObject::cast(o));
|
||||
HeapObject* heap_object = HeapObject::cast(o);
|
||||
MarkBit mark = ObjectMarking::MarkBitFrom(heap_object);
|
||||
return Marking::IsWhite(mark);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkStringTable(
|
||||
RootMarkingVisitor<MarkCompactMode::FULL>* visitor) {
|
||||
StringTable* string_table = heap()->string_table();
|
||||
// Mark the string table itself.
|
||||
if (ObjectMarking::IsWhite(string_table)) {
|
||||
MarkBit string_table_mark = ObjectMarking::MarkBitFrom(string_table);
|
||||
if (Marking::IsWhite(string_table_mark)) {
|
||||
// String table could have already been marked by visiting the handles list.
|
||||
SetMark(string_table);
|
||||
SetMark(string_table, string_table_mark);
|
||||
}
|
||||
// Explicitly mark the prefix.
|
||||
string_table->IteratePrefix(visitor);
|
||||
@ -1989,7 +2005,8 @@ void MarkCompactCollector::MarkStringTable(
|
||||
|
||||
|
||||
void MarkCompactCollector::MarkAllocationSite(AllocationSite* site) {
|
||||
SetMark(site);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(site);
|
||||
SetMark(site, mark_bit);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkRoots(
|
||||
@ -2052,7 +2069,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
|
||||
DCHECK(!object->IsFiller());
|
||||
DCHECK(object->IsHeapObject());
|
||||
DCHECK(heap()->Contains(object));
|
||||
DCHECK(!ObjectMarking::IsWhite(object));
|
||||
DCHECK(!Marking::IsWhite(ObjectMarking::MarkBitFrom(object)));
|
||||
|
||||
Map* map = object->map();
|
||||
switch (mode) {
|
||||
@ -2062,7 +2079,7 @@ void MarkCompactCollector::EmptyMarkingDeque() {
|
||||
MarkCompactMarkingVisitor::IterateBody(map, object);
|
||||
} break;
|
||||
case MarkCompactMode::YOUNG_GENERATION: {
|
||||
DCHECK(ObjectMarking::IsBlack(object));
|
||||
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
|
||||
} break;
|
||||
}
|
||||
@ -2263,10 +2280,10 @@ class MarkCompactCollector::ObjectStatsVisitor
|
||||
}
|
||||
|
||||
bool Visit(HeapObject* obj) override {
|
||||
if (ObjectMarking::IsBlack(obj)) {
|
||||
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(obj))) {
|
||||
live_collector_.CollectStatistics(obj);
|
||||
} else {
|
||||
DCHECK(!ObjectMarking::IsGrey(obj));
|
||||
DCHECK(!Marking::IsGrey(ObjectMarking::MarkBitFrom(obj)));
|
||||
dead_collector_.CollectStatistics(obj);
|
||||
}
|
||||
return true;
|
||||
@ -2322,10 +2339,11 @@ SlotCallbackResult MarkCompactCollector::CheckAndMarkObject(
|
||||
// has to be in ToSpace.
|
||||
DCHECK(heap->InToSpace(object));
|
||||
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
|
||||
if (ObjectMarking::IsBlackOrGrey(heap_object)) {
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(heap_object);
|
||||
if (Marking::IsBlackOrGrey(mark_bit)) {
|
||||
return KEEP_SLOT;
|
||||
}
|
||||
heap->mark_compact_collector()->SetMark(heap_object);
|
||||
heap->mark_compact_collector()->SetMark(heap_object, mark_bit);
|
||||
StaticYoungGenerationMarkingVisitor::IterateBody(heap_object->map(),
|
||||
heap_object);
|
||||
return KEEP_SLOT;
|
||||
@ -2335,7 +2353,8 @@ SlotCallbackResult MarkCompactCollector::CheckAndMarkObject(
|
||||
|
||||
static bool IsUnmarkedObject(Heap* heap, Object** p) {
|
||||
DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p));
|
||||
return heap->InNewSpace(*p) && !ObjectMarking::IsBlack(HeapObject::cast(*p));
|
||||
return heap->InNewSpace(*p) &&
|
||||
!Marking::IsBlack(ObjectMarking::MarkBitFrom(HeapObject::cast(*p)));
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkLiveObjectsInYoungGeneration() {
|
||||
@ -2617,11 +2636,11 @@ void MarkCompactCollector::ClearSimpleMapTransitions(
|
||||
while (weak_cell_obj != Smi::kZero) {
|
||||
WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
|
||||
Map* map = Map::cast(weak_cell->value());
|
||||
DCHECK(ObjectMarking::IsWhite(map));
|
||||
DCHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(map)));
|
||||
Object* potential_parent = map->constructor_or_backpointer();
|
||||
if (potential_parent->IsMap()) {
|
||||
Map* parent = Map::cast(potential_parent);
|
||||
if (ObjectMarking::IsBlackOrGrey(parent) &&
|
||||
if (Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent)) &&
|
||||
parent->raw_transitions() == weak_cell) {
|
||||
ClearSimpleMapTransition(parent, map);
|
||||
}
|
||||
@ -2660,7 +2679,8 @@ void MarkCompactCollector::ClearFullMapTransitions() {
|
||||
if (num_transitions > 0) {
|
||||
Map* map = array->GetTarget(0);
|
||||
Map* parent = Map::cast(map->constructor_or_backpointer());
|
||||
bool parent_is_alive = ObjectMarking::IsBlackOrGrey(parent);
|
||||
bool parent_is_alive =
|
||||
Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(parent));
|
||||
DescriptorArray* descriptors =
|
||||
parent_is_alive ? parent->instance_descriptors() : nullptr;
|
||||
bool descriptors_owner_died =
|
||||
@ -2685,7 +2705,7 @@ bool MarkCompactCollector::CompactTransitionArray(
|
||||
for (int i = 0; i < num_transitions; ++i) {
|
||||
Map* target = transitions->GetTarget(i);
|
||||
DCHECK_EQ(target->constructor_or_backpointer(), map);
|
||||
if (ObjectMarking::IsWhite(target)) {
|
||||
if (Marking::IsWhite(ObjectMarking::MarkBitFrom(target))) {
|
||||
if (descriptors != nullptr &&
|
||||
target->instance_descriptors() == descriptors) {
|
||||
descriptors_owner_died = true;
|
||||
@ -2860,7 +2880,8 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
|
||||
if (cell_value->IsHeapObject() &&
|
||||
MarkCompactCollector::IsMarked(HeapObject::cast(cell_value))) {
|
||||
// Resurrect the cell.
|
||||
SetMark(value);
|
||||
MarkBit mark = ObjectMarking::MarkBitFrom(value);
|
||||
SetMark(value, mark);
|
||||
Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
|
||||
RecordSlot(value, slot, *slot);
|
||||
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
|
||||
@ -3402,7 +3423,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
|
||||
HeapObject* object = NULL;
|
||||
|
||||
while ((object = it.Next()) != NULL) {
|
||||
DCHECK(ObjectMarking::IsBlack(object));
|
||||
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
Address free_end = object->address();
|
||||
if (free_end != free_start) {
|
||||
CHECK_GT(free_end, free_start);
|
||||
@ -3492,7 +3513,8 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
|
||||
DCHECK(compacting_);
|
||||
|
||||
// If the object is white than no slots were recorded on it yet.
|
||||
if (ObjectMarking::IsWhite(code)) return;
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(code);
|
||||
if (Marking::IsWhite(mark_bit)) return;
|
||||
|
||||
// Ignore all slots that might have been recorded in the body of the
|
||||
// deoptimized code object. Assumption: no slots will be recorded for
|
||||
@ -3513,7 +3535,7 @@ static void VerifyAllBlackObjects(MemoryChunk* page) {
|
||||
LiveObjectIterator<kAllLiveObjects> it(page);
|
||||
HeapObject* object = NULL;
|
||||
while ((object = it.Next()) != NULL) {
|
||||
CHECK(ObjectMarking::IsBlack(object));
|
||||
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
}
|
||||
}
|
||||
#endif // VERIFY_HEAP
|
||||
@ -3528,7 +3550,7 @@ bool MarkCompactCollector::VisitLiveObjects(MemoryChunk* page, Visitor* visitor,
|
||||
LiveObjectIterator<kBlackObjects> it(page);
|
||||
HeapObject* object = nullptr;
|
||||
while ((object = it.Next()) != nullptr) {
|
||||
DCHECK(ObjectMarking::IsBlack(object));
|
||||
DCHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
if (!visitor->Visit(object)) {
|
||||
if (mode == kClearMarkbits) {
|
||||
page->markbits()->ClearRange(
|
||||
@ -3725,7 +3747,8 @@ class PointerUpdateJobTraits {
|
||||
// slot has been recorded multiple times in the remembered set. Since
|
||||
// there is no forwarding information present we need to check the
|
||||
// markbits to determine liveness.
|
||||
if (ObjectMarking::IsBlack(reinterpret_cast<HeapObject*>(slot_reference)))
|
||||
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(
|
||||
reinterpret_cast<HeapObject*>(slot_reference))))
|
||||
return KEEP_SLOT;
|
||||
} else {
|
||||
DCHECK(!heap->InNewSpace(slot_reference));
|
||||
@ -4059,7 +4082,8 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
|
||||
Code* host =
|
||||
isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
|
||||
pc);
|
||||
if (ObjectMarking::IsBlack(host)) {
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(host);
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
RelocInfo rinfo(isolate(), pc, RelocInfo::CODE_TARGET, 0, host);
|
||||
// The target is always in old space, we don't have to record the slot in
|
||||
// the old-to-new remembered set.
|
||||
|
@ -46,76 +46,6 @@ class ObjectMarking : public AllStatic {
|
||||
return Marking::Color(ObjectMarking::MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsImpossible(HeapObject* obj) {
|
||||
return Marking::IsImpossible(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsBlack(HeapObject* obj) {
|
||||
return Marking::IsBlack(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsWhite(HeapObject* obj) {
|
||||
return Marking::IsWhite(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsGrey(HeapObject* obj) {
|
||||
return Marking::IsGrey(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj) {
|
||||
return Marking::IsBlackOrGrey(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static void ClearMarkBit(HeapObject* obj) {
|
||||
Marking::MarkWhite(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static void BlackToWhite(HeapObject* obj) {
|
||||
DCHECK(IsBlack(obj));
|
||||
MarkBit markbit = MarkBitFrom(obj);
|
||||
Marking::BlackToWhite(markbit);
|
||||
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
|
||||
}
|
||||
|
||||
V8_INLINE static void GreyToWhite(HeapObject* obj) {
|
||||
DCHECK(IsGrey(obj));
|
||||
Marking::GreyToWhite(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static void BlackToGrey(HeapObject* obj) {
|
||||
DCHECK(IsBlack(obj));
|
||||
MarkBit markbit = MarkBitFrom(obj);
|
||||
Marking::BlackToGrey(markbit);
|
||||
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
|
||||
}
|
||||
|
||||
V8_INLINE static void WhiteToGrey(HeapObject* obj) {
|
||||
DCHECK(IsWhite(obj));
|
||||
Marking::WhiteToGrey(MarkBitFrom(obj));
|
||||
}
|
||||
|
||||
V8_INLINE static void WhiteToBlack(HeapObject* obj) {
|
||||
DCHECK(IsWhite(obj));
|
||||
MarkBit markbit = MarkBitFrom(obj);
|
||||
Marking::WhiteToBlack(markbit);
|
||||
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
|
||||
}
|
||||
|
||||
V8_INLINE static void GreyToBlack(HeapObject* obj) {
|
||||
DCHECK(IsGrey(obj));
|
||||
MarkBit markbit = MarkBitFrom(obj);
|
||||
Marking::GreyToBlack(markbit);
|
||||
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
|
||||
}
|
||||
|
||||
V8_INLINE static void AnyToGrey(HeapObject* obj) {
|
||||
MarkBit markbit = MarkBitFrom(obj);
|
||||
if (Marking::IsBlack(markbit)) {
|
||||
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
|
||||
}
|
||||
Marking::AnyToGrey(markbit);
|
||||
}
|
||||
|
||||
private:
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(ObjectMarking);
|
||||
};
|
||||
@ -665,7 +595,7 @@ class MarkCompactCollector {
|
||||
|
||||
// Marks the object black assuming that it is not yet marked.
|
||||
// This is for non-incremental marking only.
|
||||
INLINE(void SetMark(HeapObject* obj));
|
||||
INLINE(void SetMark(HeapObject* obj, MarkBit mark_bit));
|
||||
|
||||
// Mark the heap roots and all objects reachable from them.
|
||||
void MarkRoots(RootMarkingVisitor<MarkCompactMode::FULL>* visitor);
|
||||
|
@ -534,7 +534,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
|
||||
|
||||
// Code is either on stack, in compilation cache or referenced
|
||||
// by optimized version of function.
|
||||
if (ObjectMarking::IsBlackOrGrey(function->code())) {
|
||||
MarkBit code_mark = ObjectMarking::MarkBitFrom(function->code());
|
||||
if (Marking::IsBlackOrGrey(code_mark)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -557,7 +558,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
|
||||
Heap* heap, SharedFunctionInfo* shared_info) {
|
||||
// Code is either on stack, in compilation cache or referenced
|
||||
// by optimized version of function.
|
||||
if (ObjectMarking::IsBlackOrGrey(shared_info->code())) {
|
||||
MarkBit code_mark = ObjectMarking::MarkBitFrom(shared_info->code());
|
||||
if (Marking::IsBlackOrGrey(code_mark)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -200,8 +200,9 @@ class ScavengingVisitor : public StaticVisitorBase {
|
||||
reinterpret_cast<base::AtomicWord>(target));
|
||||
|
||||
if (object_contents == POINTER_OBJECT) {
|
||||
heap->promotion_queue()->insert(target, object_size,
|
||||
ObjectMarking::IsBlack(object));
|
||||
heap->promotion_queue()->insert(
|
||||
target, object_size,
|
||||
Marking::IsBlack(ObjectMarking::MarkBitFrom(object)));
|
||||
}
|
||||
heap->IncrementPromotedObjectsSize(object_size);
|
||||
return true;
|
||||
@ -245,7 +246,8 @@ class ScavengingVisitor : public StaticVisitorBase {
|
||||
DCHECK(map_word.IsForwardingAddress());
|
||||
HeapObject* target = map_word.ToForwardingAddress();
|
||||
|
||||
if (ObjectMarking::IsBlack(target)) {
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(target);
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
// This object is black and it might not be rescanned by marker.
|
||||
// We should explicitly record code entry slot for compaction because
|
||||
// promotion queue processing (IteratePromotedObjectPointers) will
|
||||
|
@ -1459,7 +1459,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
|
||||
// All the interior pointers should be contained in the heap.
|
||||
int size = object->Size();
|
||||
object->IterateBody(map->instance_type(), size, visitor);
|
||||
if (ObjectMarking::IsBlack(object)) {
|
||||
if (Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) {
|
||||
black_size += size;
|
||||
}
|
||||
|
||||
@ -2990,8 +2990,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
|
||||
AllocationStep(object->address(), object_size);
|
||||
|
||||
if (heap()->incremental_marking()->black_allocation()) {
|
||||
// We cannot use ObjectMarking here as the object still lacks a size.
|
||||
Marking::WhiteToBlack(ObjectMarking::MarkBitFrom(object));
|
||||
Marking::MarkBlack(ObjectMarking::MarkBitFrom(object));
|
||||
MemoryChunk::IncrementLiveBytes(object, object_size);
|
||||
}
|
||||
return object;
|
||||
@ -3040,8 +3039,9 @@ void LargeObjectSpace::ClearMarkingStateOfLiveObjects() {
|
||||
LargePage* current = first_page_;
|
||||
while (current != NULL) {
|
||||
HeapObject* object = current->GetObject();
|
||||
DCHECK(ObjectMarking::IsBlack(object));
|
||||
ObjectMarking::BlackToWhite(object);
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
|
||||
DCHECK(Marking::IsBlack(mark_bit));
|
||||
Marking::BlackToWhite(mark_bit);
|
||||
Page::FromAddress(object->address())->ResetProgressBar();
|
||||
Page::FromAddress(object->address())->ResetLiveBytes();
|
||||
current = current->next_page();
|
||||
@ -3086,8 +3086,9 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
|
||||
LargePage* current = first_page_;
|
||||
while (current != NULL) {
|
||||
HeapObject* object = current->GetObject();
|
||||
DCHECK(!ObjectMarking::IsGrey(object));
|
||||
if (ObjectMarking::IsBlack(object)) {
|
||||
MarkBit mark_bit = ObjectMarking::MarkBitFrom(object);
|
||||
DCHECK(!Marking::IsGrey(mark_bit));
|
||||
if (Marking::IsBlack(mark_bit)) {
|
||||
Address free_start;
|
||||
if ((free_start = current->GetAddressToShrink()) != 0) {
|
||||
// TODO(hpayer): Perform partial free concurrently.
|
||||
@ -3222,7 +3223,7 @@ void Page::Print() {
|
||||
unsigned mark_size = 0;
|
||||
for (HeapObject* object = objects.Next(); object != NULL;
|
||||
object = objects.Next()) {
|
||||
bool is_marked = ObjectMarking::IsBlackOrGrey(object);
|
||||
bool is_marked = Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object));
|
||||
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
|
||||
if (is_marked) {
|
||||
mark_size += object->Size();
|
||||
|
@ -2121,7 +2121,7 @@ void WeakCell::initialize(HeapObject* val) {
|
||||
// We just have to execute the generational barrier here because we never
|
||||
// mark through a weak cell and collect evacuation candidates when we process
|
||||
// all weak cells.
|
||||
WriteBarrierMode mode = ObjectMarking::IsBlack(this)
|
||||
WriteBarrierMode mode = Marking::IsBlack(ObjectMarking::MarkBitFrom(this))
|
||||
? UPDATE_WRITE_BARRIER
|
||||
: UPDATE_WEAK_WRITE_BARRIER;
|
||||
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
|
||||
|
Loading…
Reference in New Issue
Block a user