[heap] Enforce explicit MarkingState

Require the use of MarkingState when going through ObjectMarking
and friends.

BUG=chromium:651354

Review-Url: https://codereview.chromium.org/2770253002
Cr-Commit-Position: refs/heads/master@{#44123}
This commit is contained in:
mlippautz 2017-03-24 11:54:04 -07:00 committed by Commit bot
parent c7ec5bf414
commit 79ac83e121
19 changed files with 345 additions and 431 deletions

View File

@ -19,7 +19,10 @@ void LocalArrayBufferTracker::Free() {
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
JSArrayBuffer* buffer = reinterpret_cast<JSArrayBuffer*>(it->first);
if ((free_mode == kFreeAll) || ObjectMarking::IsWhite(buffer)) {
// TODO(mlippautz): Create a dependency on the collector to avoid getting
// the marking state out of thin air.
if ((free_mode == kFreeAll) ||
ObjectMarking::IsWhite(buffer, MarkingState::Internal(buffer))) {
const size_t len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(buffer->backing_store(),
len);

View File

@ -3164,9 +3164,9 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by) {
lo_space()->AdjustLiveBytes(by);
} else if (!in_heap_iterator() &&
!mark_compact_collector()->sweeping_in_progress() &&
ObjectMarking::IsBlack(object)) {
ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone());
MemoryChunk::IncrementLiveBytes(object, by);
MarkingState::Internal(object).IncrementLiveBytes(by);
}
}
@ -3201,8 +3201,9 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object,
// Transfer the mark bits to their new location if the object is not within
// a black area.
if (!incremental_marking()->black_allocation() ||
!Marking::IsBlack(
ObjectMarking::MarkBitFrom(HeapObject::FromAddress(new_start)))) {
!Marking::IsBlack(ObjectMarking::MarkBitFrom(
HeapObject::FromAddress(new_start),
MarkingState::Internal(HeapObject::FromAddress(new_start))))) {
IncrementalMarking::TransferMark(this, object,
HeapObject::FromAddress(new_start));
}
@ -3285,9 +3286,9 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) {
// Clear the mark bits of the black area that belongs now to the filler.
// This is an optimization. The sweeper will release black fillers anyway.
if (incremental_marking()->black_allocation() &&
ObjectMarking::IsBlackOrGrey(filler)) {
ObjectMarking::IsBlackOrGrey(filler, MarkingState::Internal(filler))) {
Page* page = Page::FromAddress(new_end);
page->markbits()->ClearRange(
MarkingState::Internal(page).bitmap()->ClearRange(
page->AddressToMarkbitIndex(new_end),
page->AddressToMarkbitIndex(new_end + bytes_to_trim));
}
@ -4274,8 +4275,9 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
HeapObject* obj = HeapObject::FromAddress(addr);
// There might be grey objects due to black to grey transitions in
// incremental marking. E.g. see VisitNativeContextIncremental.
DCHECK(ObjectMarking::IsBlackOrGrey(obj));
if (ObjectMarking::IsBlack(obj)) {
DCHECK(
ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)));
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
incremental_marking()->IterateBlackObject(obj);
}
addr += obj->Size();
@ -4873,7 +4875,8 @@ void Heap::IterateAndScavengePromotedObject(HeapObject* target, int size,
// it would be a violation of the invariant to record it's slots.
bool record_slots = false;
if (incremental_marking()->IsCompacting()) {
record_slots = ObjectMarking::IsBlack(target);
record_slots =
ObjectMarking::IsBlack(target, MarkingState::Internal(target));
}
IterateAndScavengePromotedObjectsVisitor visitor(this, target, record_slots);
@ -6107,7 +6110,7 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
bool SkipObject(HeapObject* object) {
if (object->IsFiller()) return true;
return ObjectMarking::IsWhite(object);
return ObjectMarking::IsWhite(object, MarkingState::Internal(object));
}
private:
@ -6121,7 +6124,8 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
HeapObject* obj = HeapObject::cast(*p);
// Use Marking instead of ObjectMarking to avoid adjusting live bytes
// counter.
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
MarkBit mark_bit =
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
if (Marking::IsWhite(mark_bit)) {
Marking::WhiteToBlack(mark_bit);
marking_stack_.Add(obj);

View File

@ -40,11 +40,14 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value);
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj));
DCHECK(!ObjectMarking::IsImpossible(obj));
const bool is_black = ObjectMarking::IsBlack(obj);
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
MarkingState::Internal(value_heap_obj)));
DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
const bool is_black =
ObjectMarking::IsBlack(obj, MarkingState::Internal(obj));
if (is_black && ObjectMarking::IsWhite(value_heap_obj)) {
if (is_black && ObjectMarking::IsWhite(
value_heap_obj, MarkingState::Internal(value_heap_obj))) {
WhiteToGreyAndPush(value_heap_obj);
RestartIfNotMarking();
}
@ -117,7 +120,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
}
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
ObjectMarking::WhiteToGrey(obj);
ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj));
heap_->mark_compact_collector()->marking_deque()->Push(obj);
}
@ -135,8 +138,10 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
// size, so the adjustment to the live data count will be zero anyway.
if (from == to) return;
MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to);
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from);
MarkBit new_mark_bit =
ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to));
MarkBit old_mark_bit =
ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from));
if (Marking::IsBlack(old_mark_bit)) {
Marking::MarkBlack(new_mark_bit);
@ -185,10 +190,11 @@ class IncrementalMarkingMarkingVisitor
} while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
if (ObjectMarking::IsGrey(object)) {
if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
heap->mark_compact_collector()->marking_deque()->Unshift(object);
} else {
DCHECK(ObjectMarking::IsBlack(object));
DCHECK(
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
heap->mark_compact_collector()->UnshiftBlack(object);
}
heap->incremental_marking()->NotifyIncompleteScanOfObject(
@ -211,8 +217,10 @@ class IncrementalMarkingMarkingVisitor
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
if (ObjectMarking::IsWhite(heap_obj)) {
ObjectMarking::WhiteToGrey(heap_obj);
if (ObjectMarking::IsWhite(heap_obj,
MarkingState::Internal(heap_obj))) {
ObjectMarking::WhiteToGrey(heap_obj,
MarkingState::Internal(heap_obj));
}
}
}
@ -247,8 +255,10 @@ class IncrementalMarkingMarkingVisitor
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
if (ObjectMarking::IsWhite(heap_object)) {
ObjectMarking::WhiteToBlack(heap_object);
if (ObjectMarking::IsWhite(heap_object,
MarkingState::Internal(heap_object))) {
ObjectMarking::WhiteToBlack(heap_object,
MarkingState::Internal(heap_object));
return true;
}
return false;
@ -256,7 +266,8 @@ class IncrementalMarkingMarkingVisitor
};
void IncrementalMarking::IterateBlackObject(HeapObject* object) {
if (IsMarking() && ObjectMarking::IsBlack(object)) {
if (IsMarking() &&
ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
Page* page = Page::FromAddress(object->address());
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
// IterateBlackObject requires us to visit the whole object.
@ -593,7 +604,7 @@ void IncrementalMarking::ProcessWeakCells() {
HeapObject* value = HeapObject::cast(weak_cell->value());
// Remove weak cells with live objects from the list, they do not need
// clearing.
if (ObjectMarking::IsBlackOrGrey(value)) {
if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Record slot, if value is pointing to an evacuation candidate.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
@ -623,7 +634,9 @@ bool ShouldRetainMap(Map* map, int age) {
}
Object* constructor = map->GetConstructor();
if (!constructor->IsHeapObject() ||
ObjectMarking::IsWhite(HeapObject::cast(constructor))) {
ObjectMarking::IsWhite(
HeapObject::cast(constructor),
MarkingState::Internal(HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
@ -653,13 +666,15 @@ void IncrementalMarking::RetainMaps() {
int new_age;
Map* map = Map::cast(cell->value());
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
ObjectMarking::IsWhite(map)) {
ObjectMarking::IsWhite(map, MarkingState::Internal(map))) {
if (ShouldRetainMap(map, age)) {
MarkGrey(heap(), map);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
ObjectMarking::IsWhite(HeapObject::cast(prototype))) {
ObjectMarking::IsWhite(
HeapObject::cast(prototype),
MarkingState::Internal(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
@ -763,12 +778,14 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
// them.
if (map_word.IsForwardingAddress()) {
HeapObject* dest = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(dest)) continue;
if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest)))
continue;
array[new_top] = dest;
new_top = ((new_top + 1) & mask);
DCHECK(new_top != marking_deque->bottom());
DCHECK(ObjectMarking::IsGrey(obj) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)));
DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
(obj->IsFiller() &&
ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))));
}
} else if (obj->map() != filler_map) {
// Skip one word filler objects that appear on the
@ -776,11 +793,12 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
array[new_top] = obj;
new_top = ((new_top + 1) & mask);
DCHECK(new_top != marking_deque->bottom());
DCHECK(ObjectMarking::IsGrey(obj) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj)) ||
DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
(obj->IsFiller() &&
ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) ||
(MemoryChunk::FromAddress(obj->address())
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
ObjectMarking::IsBlack(obj)));
ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))));
}
}
marking_deque->set_top(new_top);
@ -793,7 +811,8 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
#if ENABLE_SLOW_DCHECKS
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj);
MarkBit mark_bit =
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
@ -803,14 +822,14 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
}
void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) {
if (ObjectMarking::IsWhite(object)) {
if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
heap->incremental_marking()->WhiteToGreyAndPush(object);
}
}
void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
if (ObjectMarking::IsBlack(obj)) return;
ObjectMarking::GreyToBlack(obj);
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return;
ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj));
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
@ -825,7 +844,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
// Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects.
if (obj->IsFiller()) {
DCHECK(!ObjectMarking::IsImpossible(obj));
DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
continue;
}
@ -880,8 +899,8 @@ void IncrementalMarking::Hurry() {
HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
if (ObjectMarking::IsGrey(cache)) {
ObjectMarking::GreyToBlack(cache);
if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) {
ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache));
}
}
context = Context::cast(context)->next_context_link();

View File

@ -186,16 +186,16 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static void TransferMark(Heap* heap, HeapObject* from, HeapObject* to);
V8_INLINE static void TransferColor(HeapObject* from, HeapObject* to) {
if (ObjectMarking::IsBlack(to)) {
if (ObjectMarking::IsBlack(to, MarkingState::Internal(to))) {
DCHECK(to->GetHeap()->incremental_marking()->black_allocation());
return;
}
DCHECK(ObjectMarking::IsWhite(to));
if (ObjectMarking::IsGrey(from)) {
ObjectMarking::WhiteToGrey(to);
} else if (ObjectMarking::IsBlack(from)) {
ObjectMarking::WhiteToBlack(to);
DCHECK(ObjectMarking::IsWhite(to, MarkingState::Internal(to)));
if (ObjectMarking::IsGrey(from, MarkingState::Internal(from))) {
ObjectMarking::WhiteToGrey(to, MarkingState::Internal(to));
} else if (ObjectMarking::IsBlack(from, MarkingState::Internal(from))) {
ObjectMarking::WhiteToBlack(to, MarkingState::Internal(to));
}
}

View File

@ -13,37 +13,44 @@ namespace v8 {
namespace internal {
void MarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(obj)));
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))));
if (!marking_deque()->Push(obj)) {
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(obj);
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
}
}
void MinorMarkCompactCollector::PushBlack(HeapObject* obj) {
DCHECK(
(ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(obj, StateForObject(obj))));
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj))));
if (!marking_deque()->Push(obj)) {
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(obj, StateForObject(obj));
ObjectMarking::BlackToGrey<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
}
}
void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
DCHECK(ObjectMarking::IsBlack(obj));
DCHECK(ObjectMarking::IsBlack(obj, MarkingState::Internal(obj)));
if (!marking_deque()->Unshift(obj)) {
ObjectMarking::BlackToGrey(obj);
ObjectMarking::BlackToGrey(obj, MarkingState::Internal(obj));
}
}
void MarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(obj)) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(obj);
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
PushBlack(obj);
}
}
void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(obj, StateForObject(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(obj, StateForObject(obj));
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
PushBlack(obj);
}
}
@ -54,7 +61,8 @@ void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
if (target_page->IsEvacuationCandidate() &&
!ShouldSkipEvacuationSlotRecording(object)) {
DCHECK(ObjectMarking::IsBlackOrGrey(object));
DCHECK(
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object)));
RememberedSet<OLD_TO_OLD>::Insert(source_page,
reinterpret_cast<Address>(slot));
}

View File

@ -104,10 +104,10 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page& page,
// The object is either part of a black area of black allocation or a
// regular black object
CHECK(
state.bitmap->AllBitsSetInRange(
state.bitmap()->AllBitsSetInRange(
page.AddressToMarkbitIndex(current),
page.AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
state.bitmap->AllBitsClearInRange(
state.bitmap()->AllBitsClearInRange(
page.AddressToMarkbitIndex(current + kPointerSize * 2),
page.AddressToMarkbitIndex(next_object_must_be_here_or_later)));
current = next_object_must_be_here_or_later;
@ -159,11 +159,11 @@ class FullMarkingVerifier : public MarkingVerifier {
protected:
MarkingState marking_state(MemoryChunk* chunk) override {
return MarkingState::FromPageInternal(chunk);
return MarkingState::Internal(chunk);
}
MarkingState marking_state(HeapObject* object) {
return marking_state(Page::FromAddress(object->address()));
return MarkingState::Internal(object);
}
void VisitPointers(Object** start, Object** end) override {
@ -197,11 +197,11 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
explicit YoungGenerationMarkingVerifier(Heap* heap) : MarkingVerifier(heap) {}
MarkingState marking_state(MemoryChunk* chunk) override {
return MarkingState::FromPageExternal(chunk);
return MarkingState::External(chunk);
}
MarkingState marking_state(HeapObject* object) {
return marking_state(Page::FromAddress(object->address()));
return MarkingState::External(object);
}
void Run() override {
@ -379,16 +379,18 @@ void MarkCompactCollector::CollectGarbage() {
#ifdef VERIFY_HEAP
void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
for (Page* p : *space) {
CHECK(p->markbits()->IsClean());
CHECK_EQ(0, p->LiveBytes());
const MarkingState state = MarkingState::Internal(p);
CHECK(state.bitmap()->IsClean());
CHECK_EQ(0, state.live_bytes());
}
}
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
for (Page* p : PageRange(space->bottom(), space->top())) {
CHECK(p->markbits()->IsClean());
CHECK_EQ(0, p->LiveBytes());
const MarkingState state = MarkingState::Internal(p);
CHECK(state.bitmap()->IsClean());
CHECK_EQ(0, state.live_bytes());
}
}
@ -401,8 +403,8 @@ void MarkCompactCollector::VerifyMarkbitsAreClean() {
LargeObjectIterator it(heap_->lo_space());
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
CHECK(ObjectMarking::IsWhite(obj));
CHECK_EQ(0, Page::FromAddress(obj->address())->LiveBytes());
CHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
CHECK_EQ(0, MarkingState::Internal(obj).live_bytes());
}
}
@ -430,14 +432,14 @@ void MarkCompactCollector::VerifyOmittedMapChecks() {
static void ClearMarkbitsInPagedSpace(PagedSpace* space) {
for (Page* p : *space) {
p->ClearLiveness();
MarkingState::Internal(p).ClearLiveness();
}
}
static void ClearMarkbitsInNewSpace(NewSpace* space) {
for (Page* page : *space) {
page->ClearLiveness();
for (Page* p : *space) {
MarkingState::Internal(p).ClearLiveness();
}
}
@ -491,7 +493,10 @@ void MarkCompactCollector::Sweeper::StartSweeping() {
sweeping_in_progress_ = true;
ForAllSweepingSpaces([this](AllocationSpace space) {
std::sort(sweeping_list_[space].begin(), sweeping_list_[space].end(),
[](Page* a, Page* b) { return a->LiveBytes() < b->LiveBytes(); });
[](Page* a, Page* b) {
return MarkingState::Internal(a).live_bytes() <
MarkingState::Internal(b).live_bytes();
});
});
}
@ -945,7 +950,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
SharedFunctionInfo* shared = candidate->shared();
Code* code = shared->code();
if (ObjectMarking::IsWhite(code)) {
if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) {
if (FLAG_trace_code_flushing && shared->is_compiled()) {
PrintF("[code-flushing clears: ");
shared->ShortPrint();
@ -963,7 +968,7 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
candidate->set_code(lazy_compile);
}
} else {
DCHECK(ObjectMarking::IsBlack(code));
DCHECK(ObjectMarking::IsBlack(code, MarkingState::Internal(code)));
candidate->set_code(code);
}
@ -997,7 +1002,7 @@ void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
ClearNextCandidate(candidate);
Code* code = candidate->code();
if (ObjectMarking::IsWhite(code)) {
if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) {
if (FLAG_trace_code_flushing && candidate->is_compiled()) {
PrintF("[code-flushing clears: ");
candidate->ShortPrint();
@ -1132,11 +1137,11 @@ class StaticYoungGenerationMarkingVisitor
StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false;
const MarkingState state =
MinorMarkCompactCollector::StateForObject(object);
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(object, state))
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object)))
return true;
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object, state);
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
IterateBody(object->map(), object);
return true;
}
@ -1173,8 +1178,8 @@ class MarkCompactMarkingVisitor
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
if (ObjectMarking::IsWhite(object)) {
ObjectMarking::WhiteToBlack(object);
if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
return true;
}
return false;
@ -1195,11 +1200,11 @@ class MarkCompactMarkingVisitor
HeapObject* obj)) {
#ifdef DEBUG
DCHECK(collector->heap()->Contains(obj));
DCHECK(ObjectMarking::IsWhite(obj));
DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
#endif
Map* map = obj->map();
Heap* heap = obj->GetHeap();
ObjectMarking::WhiteToBlack(obj);
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
// Mark the map pointer and the body.
heap->mark_compact_collector()->MarkObject(map);
IterateBody(map, obj);
@ -1220,7 +1225,8 @@ class MarkCompactMarkingVisitor
if (!o->IsHeapObject()) continue;
collector->RecordSlot(object, p, o);
HeapObject* obj = HeapObject::cast(o);
if (ObjectMarking::IsBlackOrGrey(obj)) continue;
if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)))
continue;
VisitUnmarkedObject(collector, obj);
}
return true;
@ -1253,7 +1259,7 @@ class MarkCompactMarkingVisitor
// was marked through the compilation cache before marker reached JSRegExp
// object.
FixedArray* data = FixedArray::cast(re->data());
if (ObjectMarking::IsBlackOrGrey(data)) {
if (ObjectMarking::IsBlackOrGrey(data, MarkingState::Internal(data))) {
Object** slot =
data->data_start() + JSRegExp::saved_code_index(is_one_byte);
heap->mark_compact_collector()->RecordSlot(data, slot, code);
@ -1411,12 +1417,12 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public ObjectVisitor {
if (!collector_->heap()->InNewSpace(object)) return;
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, StateForObject(object)))
object, MarkingState::External(object)))
return;
Map* map = object->map();
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object,
StateForObject(object));
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
collector_->EmptyMarkingDeque();
@ -1447,11 +1453,14 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor {
HeapObject* object = HeapObject::cast(*p);
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(object)) return;
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object)))
return;
Map* map = object->map();
// Mark the object.
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(object);
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object));
// Mark the map pointer and body, and push them on the marking stack.
collector_->MarkObject(map);
@ -1481,7 +1490,9 @@ class StringTableCleaner : public ObjectVisitor {
for (Object** p = start; p < end; p++) {
Object* o = *p;
if (o->IsHeapObject()) {
if (ObjectMarking::IsWhite(HeapObject::cast(o))) {
HeapObject* heap_object = HeapObject::cast(o);
if (ObjectMarking::IsWhite(heap_object,
MarkingState::Internal(heap_object))) {
if (finalize_external_strings) {
if (o->IsExternalString()) {
heap_->FinalizeExternalString(String::cast(*p));
@ -1522,8 +1533,11 @@ typedef StringTableCleaner<true, false> ExternalStringTableCleaner;
class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
public:
virtual Object* RetainAs(Object* object) {
DCHECK(!ObjectMarking::IsGrey(HeapObject::cast(object)));
if (ObjectMarking::IsBlack(HeapObject::cast(object))) {
HeapObject* heap_object = HeapObject::cast(object);
DCHECK(!ObjectMarking::IsGrey(heap_object,
MarkingState::Internal(heap_object)));
if (ObjectMarking::IsBlack(heap_object,
MarkingState::Internal(heap_object))) {
return object;
} else if (object->IsAllocationSite() &&
!(AllocationSite::cast(object)->IsZombie())) {
@ -1531,7 +1545,7 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
// space. These sites get a one-time reprieve.
AllocationSite* site = AllocationSite::cast(object);
site->MarkZombie();
ObjectMarking::WhiteToBlack(site);
ObjectMarking::WhiteToBlack(site, MarkingState::Internal(site));
return object;
} else {
return NULL;
@ -1551,8 +1565,9 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
Map* filler_map = heap()->one_pointer_filler_map();
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
if ((object->map() != filler_map) && ObjectMarking::IsGrey(object)) {
ObjectMarking::GreyToBlack(object);
if ((object->map() != filler_map) &&
ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
PushBlack(object);
if (marking_deque()->IsFull()) return;
}
@ -1561,11 +1576,11 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
DCHECK(!marking_deque()->IsFull());
LiveObjectIterator<kGreyObjects> it(p, MarkingState::FromPageInternal(p));
LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p));
HeapObject* object = NULL;
while ((object = it.Next()) != NULL) {
DCHECK(ObjectMarking::IsGrey(object));
ObjectMarking::GreyToBlack(object);
DCHECK(ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
PushBlack(object);
if (marking_deque()->IsFull()) return;
}
@ -2018,15 +2033,18 @@ void MarkCompactCollector::DiscoverGreyObjectsInNewSpace() {
bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
Object* o = *p;
if (!o->IsHeapObject()) return false;
return ObjectMarking::IsWhite(HeapObject::cast(o));
return ObjectMarking::IsWhite(HeapObject::cast(o),
MarkingState::Internal(HeapObject::cast(o)));
}
void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
StringTable* string_table = heap()->string_table();
// Mark the string table itself.
if (ObjectMarking::IsWhite(string_table)) {
if (ObjectMarking::IsWhite(string_table,
MarkingState::Internal(string_table))) {
// String table could have already been marked by visiting the handles list.
ObjectMarking::WhiteToBlack(string_table);
ObjectMarking::WhiteToBlack(string_table,
MarkingState::Internal(string_table));
}
// Explicitly mark the prefix.
string_table->IteratePrefix(visitor);
@ -2059,7 +2077,8 @@ void MarkCompactCollector::EmptyMarkingDeque() {
DCHECK(!object->IsFiller());
DCHECK(object->IsHeapObject());
DCHECK(heap()->Contains(object));
DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(object)));
DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object))));
Map* map = object->map();
MarkObject(map);
@ -2250,10 +2269,10 @@ class ObjectStatsVisitor : public HeapObjectVisitor {
}
bool Visit(HeapObject* obj) override {
if (ObjectMarking::IsBlack(obj)) {
if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) {
live_collector_.CollectStatistics(obj);
} else {
DCHECK(!ObjectMarking::IsGrey(obj));
DCHECK(!ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)));
dead_collector_.CollectStatistics(obj);
}
return true;
@ -2309,8 +2328,7 @@ SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject(
// has to be in ToSpace.
DCHECK(heap->InToSpace(object));
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
const MarkingState state =
MinorMarkCompactCollector::StateForObject(heap_object);
const MarkingState state = MarkingState::External(heap_object);
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(heap_object, state)) {
return KEEP_SLOT;
}
@ -2324,7 +2342,9 @@ SlotCallbackResult MinorMarkCompactCollector::CheckAndMarkObject(
static bool IsUnmarkedObject(Heap* heap, Object** p) {
DCHECK_IMPLIES(heap->InNewSpace(*p), heap->InToSpace(*p));
return heap->InNewSpace(*p) && !ObjectMarking::IsBlack(HeapObject::cast(*p));
return heap->InNewSpace(*p) &&
!ObjectMarking::IsBlack(HeapObject::cast(*p),
MarkingState::Internal(HeapObject::cast(*p)));
}
void MinorMarkCompactCollector::MarkLiveObjects() {
@ -2398,11 +2418,11 @@ void MinorMarkCompactCollector::EmptyMarkingDeque() {
DCHECK(heap()->Contains(object));
DCHECK(!(ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
object, StateForObject(object))));
object, MarkingState::External(object))));
Map* map = object->map();
DCHECK((ObjectMarking::IsBlack<MarkBit::NON_ATOMIC>(
object, StateForObject(object))));
object, MarkingState::External(object))));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
}
}
@ -2626,11 +2646,12 @@ void MarkCompactCollector::ClearSimpleMapTransitions(
while (weak_cell_obj != Smi::kZero) {
WeakCell* weak_cell = WeakCell::cast(weak_cell_obj);
Map* map = Map::cast(weak_cell->value());
DCHECK(ObjectMarking::IsWhite(map));
DCHECK(ObjectMarking::IsWhite(map, MarkingState::Internal(map)));
Object* potential_parent = map->constructor_or_backpointer();
if (potential_parent->IsMap()) {
Map* parent = Map::cast(potential_parent);
if (ObjectMarking::IsBlackOrGrey(parent) &&
if (ObjectMarking::IsBlackOrGrey(parent,
MarkingState::Internal(parent)) &&
parent->raw_transitions() == weak_cell) {
ClearSimpleMapTransition(parent, map);
}
@ -2669,7 +2690,8 @@ void MarkCompactCollector::ClearFullMapTransitions() {
if (num_transitions > 0) {
Map* map = array->GetTarget(0);
Map* parent = Map::cast(map->constructor_or_backpointer());
bool parent_is_alive = ObjectMarking::IsBlackOrGrey(parent);
bool parent_is_alive =
ObjectMarking::IsBlackOrGrey(parent, MarkingState::Internal(parent));
DescriptorArray* descriptors =
parent_is_alive ? parent->instance_descriptors() : nullptr;
bool descriptors_owner_died =
@ -2694,7 +2716,7 @@ bool MarkCompactCollector::CompactTransitionArray(
for (int i = 0; i < num_transitions; ++i) {
Map* target = transitions->GetTarget(i);
DCHECK_EQ(target->constructor_or_backpointer(), map);
if (ObjectMarking::IsWhite(target)) {
if (ObjectMarking::IsWhite(target, MarkingState::Internal(target))) {
if (descriptors != nullptr &&
target->instance_descriptors() == descriptors) {
descriptors_owner_died = true;
@ -2786,11 +2808,14 @@ void MarkCompactCollector::ProcessWeakCollections() {
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(ObjectMarking::IsBlackOrGrey(weak_collection));
DCHECK(ObjectMarking::IsBlackOrGrey(
weak_collection, MarkingState::Internal(weak_collection)));
if (weak_collection->table()->IsHashTable()) {
ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
for (int i = 0; i < table->Capacity(); i++) {
if (ObjectMarking::IsBlackOrGrey(HeapObject::cast(table->KeyAt(i)))) {
HeapObject* heap_object = HeapObject::cast(table->KeyAt(i));
if (ObjectMarking::IsBlackOrGrey(heap_object,
MarkingState::Internal(heap_object))) {
Object** key_slot =
table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
RecordSlot(table, key_slot, *key_slot);
@ -2812,12 +2837,13 @@ void MarkCompactCollector::ClearWeakCollections() {
while (weak_collection_obj != Smi::kZero) {
JSWeakCollection* weak_collection =
reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
DCHECK(ObjectMarking::IsBlackOrGrey(weak_collection));
DCHECK(ObjectMarking::IsBlackOrGrey(
weak_collection, MarkingState::Internal(weak_collection)));
if (weak_collection->table()->IsHashTable()) {
ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
for (int i = 0; i < table->Capacity(); i++) {
HeapObject* key = HeapObject::cast(table->KeyAt(i));
if (!ObjectMarking::IsBlackOrGrey(key)) {
if (!ObjectMarking::IsBlackOrGrey(key, MarkingState::Internal(key))) {
table->RemoveEntry(i);
}
}
@ -2858,7 +2884,7 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
// We do not insert cleared weak cells into the list, so the value
// cannot be a Smi here.
HeapObject* value = HeapObject::cast(weak_cell->value());
if (!ObjectMarking::IsBlackOrGrey(value)) {
if (!ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Cells for new-space objects embedded in optimized code are wrapped in
// WeakCell and put into Heap::weak_object_to_code_table.
// Such cells do not have any strong references but we want to keep them
@ -2867,9 +2893,11 @@ void MarkCompactCollector::ClearWeakCells(Object** non_live_map_list,
if (value->IsCell()) {
Object* cell_value = Cell::cast(value)->value();
if (cell_value->IsHeapObject() &&
ObjectMarking::IsBlackOrGrey(HeapObject::cast(cell_value))) {
ObjectMarking::IsBlackOrGrey(
HeapObject::cast(cell_value),
MarkingState::Internal(HeapObject::cast(cell_value)))) {
// Resurrect the cell.
ObjectMarking::WhiteToBlack(value);
ObjectMarking::WhiteToBlack(value, MarkingState::Internal(value));
Object** slot = HeapObject::RawField(value, Cell::kValueOffset);
RecordSlot(value, slot, *slot);
slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
@ -3163,7 +3191,7 @@ class FullEvacuator : public Evacuator {
bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) {
bool success = false;
DCHECK(page->SweepingDone());
intptr_t saved_live_bytes = *state.live_bytes;
intptr_t saved_live_bytes = state.live_bytes();
double evacuation_time = 0.0;
{
AlwaysAllocateScope always_allocate(heap()->isolate());
@ -3183,7 +3211,8 @@ bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) {
page, state, &new_to_old_page_visitor_,
LiveObjectVisitor::kKeepMarking);
DCHECK(success);
new_to_old_page_visitor_.account_moved_bytes(page->LiveBytes());
new_to_old_page_visitor_.account_moved_bytes(
MarkingState::Internal(page).live_bytes());
// ArrayBufferTracker will be updated during sweeping.
break;
case kPageNewToNew:
@ -3191,7 +3220,8 @@ bool FullEvacuator::EvacuatePage(Page* page, const MarkingState& state) {
page, state, &new_to_new_page_visitor_,
LiveObjectVisitor::kKeepMarking);
DCHECK(success);
new_to_new_page_visitor_.account_moved_bytes(page->LiveBytes());
new_to_new_page_visitor_.account_moved_bytes(
MarkingState::Internal(page).live_bytes());
// ArrayBufferTracker will be updated during sweeping.
break;
case kObjectsOldToOld:
@ -3275,7 +3305,7 @@ class EvacuationJobTraits {
static bool ProcessPageInParallel(Heap* heap, PerTaskData evacuator,
MemoryChunk* chunk, PerPageData) {
return evacuator->EvacuatePage(reinterpret_cast<Page*>(chunk),
MarkingState::FromPageInternal(chunk));
MarkingState::Internal(chunk));
}
static void FinalizePageSequentially(Heap* heap, MemoryChunk* chunk,
@ -3318,18 +3348,19 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
int abandoned_pages = 0;
intptr_t live_bytes = 0;
for (Page* page : old_space_evacuation_pages_) {
live_bytes += page->LiveBytes();
live_bytes += MarkingState::Internal(page).live_bytes();
job.AddPage(page, &abandoned_pages);
}
const bool reduce_memory = heap()->ShouldReduceMemory();
const Address age_mark = heap()->new_space()->age_mark();
for (Page* page : new_space_evacuation_pages_) {
live_bytes += page->LiveBytes();
intptr_t live_bytes_on_page = MarkingState::Internal(page).live_bytes();
live_bytes += live_bytes_on_page;
if (!reduce_memory && !page->NeverEvacuate() &&
(page->LiveBytes() > Evacuator::PageEvacuationThreshold()) &&
(live_bytes_on_page > Evacuator::PageEvacuationThreshold()) &&
!page->Contains(age_mark) &&
heap()->CanExpandOldGeneration(page->LiveBytes())) {
heap()->CanExpandOldGeneration(live_bytes_on_page)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
} else {
@ -3445,11 +3476,11 @@ int MarkCompactCollector::Sweeper::RawSweep(
intptr_t max_freed_bytes = 0;
int curr_region = -1;
LiveObjectIterator<kBlackObjects> it(p, MarkingState::FromPageInternal(p));
LiveObjectIterator<kBlackObjects> it(p, MarkingState::Internal(p));
HeapObject* object = NULL;
while ((object = it.Next()) != NULL) {
DCHECK(ObjectMarking::IsBlack(object));
DCHECK(ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
Address free_end = object->address();
if (free_end != free_start) {
CHECK_GT(free_end, free_start);
@ -3520,7 +3551,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
}
// Clear the mark bits of that page and reset live bytes count.
p->ClearLiveness();
MarkingState::Internal(p).ClearLiveness();
p->concurrent_sweeping_state().SetValue(Page::kSweepingDone);
if (free_list_mode == IGNORE_FREE_LIST) return 0;
@ -3539,7 +3570,7 @@ void MarkCompactCollector::InvalidateCode(Code* code) {
DCHECK(compacting_);
// If the object is white than no slots were recorded on it yet.
if (ObjectMarking::IsWhite(code)) return;
if (ObjectMarking::IsWhite(code, MarkingState::Internal(code))) return;
// Ignore all slots that might have been recorded in the body of the
// deoptimized code object. Assumption: no slots will be recorded for
@ -3557,8 +3588,8 @@ bool MarkCompactCollector::WillBeDeoptimized(Code* code) {
void MarkCompactCollector::RecordLiveSlotsOnPage(Page* page) {
EvacuateRecordOnlyVisitor visitor(heap());
LiveObjectVisitor object_visitor;
object_visitor.VisitBlackObjects(page, MarkingState::FromPageInternal(page),
&visitor, LiveObjectVisitor::kKeepMarking);
object_visitor.VisitBlackObjects(page, MarkingState::Internal(page), &visitor,
LiveObjectVisitor::kKeepMarking);
}
template <class Visitor>
@ -3572,7 +3603,7 @@ bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk,
DCHECK(ObjectMarking::IsBlack(object, state));
if (!visitor->Visit(object)) {
if (iteration_mode == kClearMarkbits) {
state.bitmap->ClearRange(
state.bitmap()->ClearRange(
chunk->AddressToMarkbitIndex(chunk->area_start()),
chunk->AddressToMarkbitIndex(object->address()));
if (chunk->old_to_new_slots() != nullptr) {
@ -3773,7 +3804,9 @@ class PointerUpdateJobTraits {
// slot has been recorded multiple times in the remembered set. Since
// there is no forwarding information present we need to check the
// markbits to determine liveness.
if (ObjectMarking::IsBlack(reinterpret_cast<HeapObject*>(slot_reference)))
HeapObject* heap_object = reinterpret_cast<HeapObject*>(slot_reference);
if (ObjectMarking::IsBlack(heap_object,
MarkingState::Internal(heap_object)))
return KEEP_SLOT;
} else {
DCHECK(!heap->InNewSpace(slot_reference));
@ -3839,8 +3872,7 @@ class ToSpacePointerUpdateJobTraits {
static void ProcessPageInParallelVisitLive(Heap* heap, PerTaskData visitor,
MemoryChunk* chunk,
PerPageData limits) {
LiveObjectIterator<kBlackObjects> it(chunk,
MarkingState::FromPageInternal(chunk));
LiveObjectIterator<kBlackObjects> it(chunk, MarkingState::Internal(chunk));
HeapObject* object = NULL;
while ((object = it.Next()) != NULL) {
Map* map = object->map();
@ -3904,7 +3936,7 @@ void MarkCompactCollector::ReleaseEvacuationCandidates() {
for (Page* p : old_space_evacuation_pages_) {
if (!p->IsEvacuationCandidate()) continue;
PagedSpace* space = static_cast<PagedSpace*>(p->owner());
p->ResetLiveBytes();
MarkingState::Internal(p).SetLiveBytes(0);
CHECK(p->SweepingDone());
space->ReleasePage(p);
}
@ -3975,8 +4007,10 @@ void MarkCompactCollector::Sweeper::AddPage(AllocationSpace space, Page* page) {
void MarkCompactCollector::Sweeper::PrepareToBeSweptPage(AllocationSpace space,
Page* page) {
page->concurrent_sweeping_state().SetValue(Page::kSweepingPending);
DCHECK_GE(page->area_size(), static_cast<size_t>(page->LiveBytes()));
size_t to_sweep = page->area_size() - page->LiveBytes();
DCHECK_GE(page->area_size(),
static_cast<size_t>(MarkingState::Internal(page).live_bytes()));
size_t to_sweep =
page->area_size() - MarkingState::Internal(page).live_bytes();
if (space != NEW_SPACE)
heap_->paged_space(space)->accounting_stats_.ShrinkSpace(to_sweep);
}
@ -4028,7 +4062,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
}
// One unused page is kept, all further are released before sweeping them.
if (p->LiveBytes() == 0) {
if (MarkingState::Internal(p).live_bytes() == 0) {
if (unused_page_present) {
if (FLAG_gc_verbose) {
PrintIsolate(isolate(), "sweeping: released page: %p",
@ -4108,7 +4142,7 @@ void MarkCompactCollector::RecordCodeTargetPatch(Address pc, Code* target) {
Code* host =
isolate()->inner_pointer_to_code_cache()->GcSafeFindCodeForInnerPointer(
pc);
if (ObjectMarking::IsBlack(host)) {
if (ObjectMarking::IsBlack(host, MarkingState::Internal(host))) {
RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
// The target is always in old space, we don't have to record the slot in
// the old-to-new remembered set.

View File

@ -32,57 +32,13 @@ class MarkCompactCollector;
class MinorMarkCompactCollector;
class MarkingVisitor;
class MarkingState {
public:
static MarkingState FromPageInternal(MemoryChunk* chunk) {
return MarkingState(chunk->markbits<MarkingMode::FULL>(),
chunk->live_bytes_address<MarkingMode::FULL>());
}
static MarkingState FromPageExternal(MemoryChunk* chunk) {
return MarkingState(
chunk->markbits<MarkingMode::YOUNG_GENERATION>(),
chunk->live_bytes_address<MarkingMode::YOUNG_GENERATION>());
}
MarkingState(Bitmap* bitmap, intptr_t* live_bytes)
: bitmap(bitmap), live_bytes(live_bytes) {}
void IncrementLiveBytes(intptr_t by) const {
*live_bytes += static_cast<int>(by);
}
void SetLiveBytes(intptr_t value) const {
*live_bytes = static_cast<int>(value);
}
void ClearLiveness() const {
bitmap->Clear();
*live_bytes = 0;
}
Bitmap* bitmap;
intptr_t* live_bytes;
};
// TODO(mlippautz): Remove duplicate accessors once the architecture for
// different markers is fixed.
class ObjectMarking : public AllStatic {
public:
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj) {
const Address address = obj->address();
const MemoryChunk* p = MemoryChunk::FromAddress(address);
return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
V8_INLINE static MarkBit MarkBitFrom(HeapObject* obj,
const MarkingState& state) {
const Address address = obj->address();
const MemoryChunk* p = MemoryChunk::FromAddress(address);
return state.bitmap->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
static Marking::ObjectColor Color(HeapObject* obj) {
return Marking::Color(ObjectMarking::MarkBitFrom(obj));
return state.bitmap()->MarkBitFromIndex(p->AddressToMarkbitIndex(address));
}
static Marking::ObjectColor Color(HeapObject* obj,
@ -90,67 +46,33 @@ class ObjectMarking : public AllStatic {
return Marking::Color(ObjectMarking::MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsImpossible(HeapObject* obj) {
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsImpossible(HeapObject* obj,
const MarkingState& state) {
return Marking::IsImpossible<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlack(HeapObject* obj) {
return Marking::IsBlack<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlack(HeapObject* obj, const MarkingState& state) {
return Marking::IsBlack<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsWhite(HeapObject* obj) {
return Marking::IsWhite<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsWhite(HeapObject* obj, const MarkingState& state) {
return Marking::IsWhite<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsGrey(HeapObject* obj) {
return Marking::IsGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsGrey(HeapObject* obj, const MarkingState& state) {
return Marking::IsGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj) {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool IsBlackOrGrey(HeapObject* obj,
const MarkingState& state) {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj)));
MarkBit markbit = MarkBitFrom(obj);
if (!Marking::BlackToGrey<access_mode>(markbit)) return false;
MemoryChunk::IncrementLiveBytes(obj, -obj->Size());
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj,
const MarkingState& state) {
@ -162,12 +84,6 @@ class ObjectMarking : public AllStatic {
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj)));
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj,
const MarkingState& state) {
@ -176,13 +92,6 @@ class ObjectMarking : public AllStatic {
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj)));
if (!ObjectMarking::WhiteToGrey<access_mode>(obj)) return false;
return ObjectMarking::GreyToBlack<access_mode>(obj);
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj,
const MarkingState& state) {
@ -192,15 +101,6 @@ class ObjectMarking : public AllStatic {
return ObjectMarking::GreyToBlack<access_mode>(obj, state);
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj) {
DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj)));
MarkBit markbit = MarkBitFrom(obj);
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
MemoryChunk::IncrementLiveBytes(obj, obj->Size());
return true;
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj,
const MarkingState& state) {
@ -413,7 +313,7 @@ class MarkBitCellIterator BASE_EMBEDDED {
cell_base_ = chunk_->area_start();
cell_index_ = Bitmap::IndexToCell(
Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(cell_base_)));
cells_ = state.bitmap->cells();
cells_ = state.bitmap()->cells();
}
inline bool Done() { return cell_index_ == last_cell_index_; }
@ -526,10 +426,6 @@ class MinorMarkCompactCollector {
private:
class RootMarkingVisitor;
static MarkingState StateForObject(HeapObject* object) {
return MarkingState::FromPageExternal(Page::FromAddress(object->address()));
}
inline Heap* heap() { return heap_; }
inline Isolate* isolate() { return heap()->isolate(); }
inline MarkingDeque* marking_deque() { return &marking_deque_; }

View File

@ -343,7 +343,8 @@ static bool IsCowArray(Heap* heap, FixedArrayBase* array) {
static bool SameLiveness(HeapObject* obj1, HeapObject* obj2) {
return obj1 == nullptr || obj2 == nullptr ||
ObjectMarking::Color(obj1) == ObjectMarking::Color(obj2);
ObjectMarking::Color(obj1, MarkingState::Internal(obj1)) ==
ObjectMarking::Color(obj2, MarkingState::Internal(obj2));
}
bool ObjectStatsCollector::RecordFixedArrayHelper(HeapObject* parent,

View File

@ -333,7 +333,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
// contain smi zero.
if (weak_cell->next_cleared() && !weak_cell->cleared()) {
HeapObject* value = HeapObject::cast(weak_cell->value());
if (ObjectMarking::IsBlackOrGrey(value)) {
if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
// Weak cells with live values are directly processed here to reduce
// the processing time of weak cells during the main GC pause.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
@ -522,7 +522,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
if (ObjectMarking::IsBlackOrGrey(function->code())) {
if (ObjectMarking::IsBlackOrGrey(function->code(),
MarkingState::Internal(function->code()))) {
return false;
}
@ -545,7 +546,8 @@ bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
Heap* heap, SharedFunctionInfo* shared_info) {
// Code is either on stack, in compilation cache or referenced
// by optimized version of function.
if (ObjectMarking::IsBlackOrGrey(shared_info->code())) {
if (ObjectMarking::IsBlackOrGrey(
shared_info->code(), MarkingState::Internal(shared_info->code()))) {
return false;
}

View File

@ -200,8 +200,10 @@ class ScavengingVisitor : public StaticVisitorBase {
reinterpret_cast<base::AtomicWord>(target));
if (object_contents == POINTER_OBJECT) {
heap->promotion_queue()->insert(target, object_size,
ObjectMarking::IsBlack(object));
// TODO(mlippautz): Query collector for marking state.
heap->promotion_queue()->insert(
target, object_size,
ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
}
heap->IncrementPromotedObjectsSize(object_size);
return true;
@ -245,7 +247,9 @@ class ScavengingVisitor : public StaticVisitorBase {
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(target)) {
// TODO(mlippautz): Notify collector of this object so we don't have to
// retrieve the state our of thin air.
if (ObjectMarking::IsBlack(target, MarkingState::Internal(target))) {
// This object is black and it might not be rescanned by marker.
// We should explicitly record code entry slot for compaction because
// promotion queue processing (IteratePromotedObjectPointers) will

View File

@ -182,7 +182,7 @@ Page* Page::Initialize(Heap* heap, MemoryChunk* chunk, Executability executable,
page->AllocateLocalTracker();
if (FLAG_minor_mc) {
page->AllocateYoungGenerationBitmap();
page->ClearLiveness<MarkingMode::YOUNG_GENERATION>();
MarkingState::External(page).ClearLiveness();
}
return page;
}
@ -230,54 +230,6 @@ void Page::InitializeFreeListCategories() {
}
}
template <MarkingMode mode>
void MemoryChunk::IncrementLiveBytes(HeapObject* object, int by) {
MemoryChunk::FromAddress(object->address())->IncrementLiveBytes<mode>(by);
}
template <MarkingMode mode>
void MemoryChunk::TraceLiveBytes(intptr_t old_value, intptr_t new_value) {
if (!FLAG_trace_live_bytes) return;
PrintIsolate(heap()->isolate(),
"live-bytes[%p:%s]: %" V8PRIdPTR "-> %" V8PRIdPTR "\n",
static_cast<void*>(this),
mode == MarkingMode::FULL ? "internal" : "external", old_value,
new_value);
}
template <MarkingMode mode>
void MemoryChunk::ResetLiveBytes() {
switch (mode) {
case MarkingMode::FULL:
TraceLiveBytes(live_byte_count_, 0);
live_byte_count_ = 0;
break;
case MarkingMode::YOUNG_GENERATION:
TraceLiveBytes(young_generation_live_byte_count_, 0);
young_generation_live_byte_count_ = 0;
break;
}
}
template <MarkingMode mode>
void MemoryChunk::IncrementLiveBytes(int by) {
switch (mode) {
case MarkingMode::FULL:
TraceLiveBytes(live_byte_count_, live_byte_count_ + by);
live_byte_count_ += by;
DCHECK_GE(live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(live_byte_count_), size_);
break;
case MarkingMode::YOUNG_GENERATION:
TraceLiveBytes(young_generation_live_byte_count_,
young_generation_live_byte_count_ + by);
young_generation_live_byte_count_ += by;
DCHECK_GE(young_generation_live_byte_count_, 0);
DCHECK_LE(static_cast<size_t>(young_generation_live_byte_count_), size_);
break;
}
}
bool PagedSpace::Contains(Address addr) {
return MemoryChunk::FromAnyPointerAddress(heap(), addr)->owner() == this;
}

View File

@ -538,12 +538,13 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
chunk->mutex_ = new base::Mutex();
chunk->available_in_free_list_ = 0;
chunk->wasted_memory_ = 0;
chunk->ClearLiveness();
chunk->young_generation_bitmap_ = nullptr;
chunk->set_next_chunk(nullptr);
chunk->set_prev_chunk(nullptr);
chunk->local_tracker_ = nullptr;
MarkingState::Internal(chunk).ClearLiveness();
DCHECK(OFFSET_OF(MemoryChunk, flags_) == kFlagsOffset);
if (executable == EXECUTABLE) {
@ -854,9 +855,10 @@ void Page::CreateBlackArea(Address start, Address end) {
DCHECK_EQ(Page::FromAddress(start), this);
DCHECK_NE(start, end);
DCHECK_EQ(Page::FromAddress(end - 1), this);
markbits()->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end));
IncrementLiveBytes(static_cast<int>(end - start));
MarkingState::Internal(this).bitmap()->SetRange(AddressToMarkbitIndex(start),
AddressToMarkbitIndex(end));
MarkingState::Internal(this).IncrementLiveBytes(
static_cast<int>(end - start));
}
void MemoryAllocator::PartialFreeMemory(MemoryChunk* chunk,
@ -1196,15 +1198,6 @@ void MemoryChunk::ReleaseYoungGenerationBitmap() {
young_generation_bitmap_ = nullptr;
}
template <MarkingMode mode>
void MemoryChunk::ClearLiveness() {
markbits<mode>()->Clear();
ResetLiveBytes<mode>();
}
template void MemoryChunk::ClearLiveness<MarkingMode::FULL>();
template void MemoryChunk::ClearLiveness<MarkingMode::YOUNG_GENERATION>();
// -----------------------------------------------------------------------------
// PagedSpace implementation
@ -1420,9 +1413,11 @@ void PagedSpace::EmptyAllocationInfo() {
// Clear the bits in the unused black area.
if (current_top != current_limit) {
page->markbits()->ClearRange(page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
page->IncrementLiveBytes(-static_cast<int>(current_limit - current_top));
MarkingState::Internal(page).bitmap()->ClearRange(
page->AddressToMarkbitIndex(current_top),
page->AddressToMarkbitIndex(current_limit));
MarkingState::Internal(page).IncrementLiveBytes(
-static_cast<int>(current_limit - current_top));
}
}
@ -1436,7 +1431,7 @@ void PagedSpace::IncreaseCapacity(size_t bytes) {
}
void PagedSpace::ReleasePage(Page* page) {
DCHECK_EQ(page->LiveBytes(), 0);
DCHECK_EQ(0, MarkingState::Internal(page).live_bytes());
DCHECK_EQ(page->owner(), this);
free_list_.EvictFreeListItems(page);
@ -1497,14 +1492,14 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// All the interior pointers should be contained in the heap.
int size = object->Size();
object->IterateBody(map->instance_type(), size, visitor);
if (ObjectMarking::IsBlack(object)) {
if (ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
black_size += size;
}
CHECK(object->address() + size <= top);
end_of_previous_object = object->address() + size;
}
CHECK_LE(black_size, page->LiveBytes());
CHECK_LE(black_size, MarkingState::Internal(page).live_bytes());
}
CHECK(allocation_pointer_found_in_space);
}
@ -1637,7 +1632,7 @@ bool SemiSpace::EnsureCurrentCapacity() {
if (current_page == nullptr) return false;
DCHECK_NOT_NULL(current_page);
current_page->InsertAfter(anchor());
current_page->ClearLiveness();
MarkingState::Internal(current_page).ClearLiveness();
current_page->SetFlags(anchor()->prev_page()->GetFlags(),
static_cast<uintptr_t>(Page::kCopyAllFlags));
heap()->CreateFillerObjectAt(current_page->area_start(),
@ -1709,7 +1704,7 @@ void NewSpace::ResetAllocationInfo() {
UpdateAllocationInfo();
// Clear all mark-bits in the to-space.
for (Page* p : to_space_) {
p->ClearLiveness();
MarkingState::Internal(p).ClearLiveness();
}
InlineAllocationStep(old_top, allocation_info_.top(), nullptr, 0);
}
@ -2010,7 +2005,7 @@ bool SemiSpace::GrowTo(size_t new_capacity) {
return false;
}
new_page->InsertAfter(last_page);
new_page->ClearLiveness();
MarkingState::Internal(new_page).ClearLiveness();
// Duplicate the flags that was set on the old page.
new_page->SetFlags(last_page->GetFlags(), Page::kCopyOnFlipFlagsMask);
last_page = new_page;
@ -2071,7 +2066,7 @@ void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) {
page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
page->SetFlag(MemoryChunk::IN_TO_SPACE);
page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
page->ResetLiveBytes();
MarkingState::Internal(page).SetLiveBytes(0);
} else {
page->SetFlag(MemoryChunk::IN_FROM_SPACE);
page->ClearFlag(MemoryChunk::IN_TO_SPACE);
@ -3044,7 +3039,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
ClearRecordedSlots::kNo);
if (heap()->incremental_marking()->black_allocation()) {
ObjectMarking::WhiteToBlack(object);
ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
}
return object;
}
@ -3091,13 +3086,14 @@ LargePage* LargeObjectSpace::FindPage(Address a) {
void LargeObjectSpace::ClearMarkingStateOfLiveObjects() {
LargeObjectIterator it(this);
for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
if (ObjectMarking::IsBlackOrGrey(obj)) {
Marking::MarkWhite(ObjectMarking::MarkBitFrom(obj));
if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj))) {
Marking::MarkWhite(
ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj)));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
chunk->ResetProgressBar();
chunk->ResetLiveBytes();
MarkingState::Internal(chunk).SetLiveBytes(0);
}
DCHECK(ObjectMarking::IsWhite(obj));
DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
}
}
@ -3139,8 +3135,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() {
LargePage* current = first_page_;
while (current != NULL) {
HeapObject* object = current->GetObject();
DCHECK(!ObjectMarking::IsGrey(object));
if (ObjectMarking::IsBlack(object)) {
DCHECK(!ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
if (ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
Address free_start;
if ((free_start = current->GetAddressToShrink()) != 0) {
// TODO(hpayer): Perform partial free concurrently.
@ -3276,7 +3272,8 @@ void Page::Print() {
unsigned mark_size = 0;
for (HeapObject* object = objects.Next(); object != NULL;
object = objects.Next()) {
bool is_marked = ObjectMarking::IsBlackOrGrey(object);
bool is_marked =
ObjectMarking::IsBlackOrGrey(object, MarkingState::Internal(object));
PrintF(" %c ", (is_marked ? '!' : ' ')); // Indent a little.
if (is_marked) {
mark_size += object->Size();
@ -3285,7 +3282,8 @@ void Page::Print() {
PrintF("\n");
}
printf(" --------------------------------------\n");
printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes());
printf(" Marked: %x, LiveCount: %" V8PRIdPTR "\n", mark_size,
MarkingState::Internal(this).live_bytes());
}
#endif // DEBUG

View File

@ -224,10 +224,6 @@ class FreeListCategory {
friend class PagedSpace;
};
// MarkingMode determines which bitmaps and counters should be used when
// accessing marking information on MemoryChunk.
enum class MarkingMode { FULL, YOUNG_GENERATION };
// MemoryChunk represents a memory region owned by a specific space.
// It is divided into the header and the body. Chunk start is always
// 1MB aligned. Start of the body is aligned so it can accommodate
@ -377,9 +373,6 @@ class MemoryChunk {
static const int kAllocatableMemory = kPageSize - kObjectStartOffset;
template <MarkingMode mode = MarkingMode::FULL>
static inline void IncrementLiveBytes(HeapObject* object, int by);
// Only works if the pointer is in the first kPageSize of the MemoryChunk.
static MemoryChunk* FromAddress(Address a) {
return reinterpret_cast<MemoryChunk*>(OffsetFrom(a) & ~kAlignmentMask);
@ -427,33 +420,6 @@ class MemoryChunk {
return concurrent_sweeping_state().Value() == kSweepingDone;
}
// Manage live byte count, i.e., count of bytes in black objects.
template <MarkingMode mode = MarkingMode::FULL>
inline void ResetLiveBytes();
template <MarkingMode mode = MarkingMode::FULL>
inline void IncrementLiveBytes(int by);
template <MarkingMode mode = MarkingMode::FULL>
int LiveBytes() {
switch (mode) {
case MarkingMode::FULL:
DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_);
return static_cast<int>(live_byte_count_);
case MarkingMode::YOUNG_GENERATION:
DCHECK_LE(static_cast<unsigned>(young_generation_live_byte_count_),
size_);
return static_cast<int>(young_generation_live_byte_count_);
}
UNREACHABLE();
return 0;
}
void SetLiveBytes(int live_bytes) {
DCHECK_GE(live_bytes, 0);
DCHECK_LE(static_cast<size_t>(live_bytes), size_);
live_byte_count_ = live_bytes;
}
size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; }
@ -513,19 +479,6 @@ class MemoryChunk {
}
}
template <MarkingMode mode = MarkingMode::FULL>
inline Bitmap* markbits() const {
return mode == MarkingMode::FULL
? Bitmap::FromAddress(address() + kHeaderSize)
: young_generation_bitmap_;
}
template <MarkingMode mode = MarkingMode::FULL>
inline intptr_t* live_bytes_address() {
return mode == MarkingMode::FULL ? &live_byte_count_
: &young_generation_live_byte_count_;
}
inline uint32_t AddressToMarkbitIndex(Address addr) const {
return static_cast<uint32_t>(addr - this->address()) >> kPointerSizeLog2;
}
@ -534,11 +487,6 @@ class MemoryChunk {
return this->address() + (index << kPointerSizeLog2);
}
template <MarkingMode mode = MarkingMode::FULL>
void ClearLiveness();
void PrintMarkbits() { markbits()->Print(); }
void SetFlag(Flag flag) { flags_ |= flag; }
void ClearFlag(Flag flag) { flags_ &= ~Flags(flag); }
bool IsFlagSet(Flag flag) { return (flags_ & flag) != 0; }
@ -622,9 +570,6 @@ class MemoryChunk {
base::VirtualMemory* reserved_memory() { return &reservation_; }
template <MarkingMode mode = MarkingMode::FULL>
inline void TraceLiveBytes(intptr_t old_value, intptr_t new_value);
size_t size_;
Flags flags_;
@ -686,6 +631,7 @@ class MemoryChunk {
private:
void InitializeReservedMemory() { reservation_.Reset(); }
friend class MarkingState;
friend class MemoryAllocator;
friend class MemoryChunkValidator;
};
@ -695,6 +641,50 @@ DEFINE_OPERATORS_FOR_FLAGS(MemoryChunk::Flags)
static_assert(kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory,
"kMaxRegularHeapObjectSize <= MemoryChunk::kAllocatableMemory");
class MarkingState {
public:
static MarkingState External(HeapObject* object) {
return External(MemoryChunk::FromAddress(object->address()));
}
static MarkingState External(MemoryChunk* chunk) {
return MarkingState(chunk->young_generation_bitmap_,
&chunk->young_generation_live_byte_count_);
}
static MarkingState Internal(HeapObject* object) {
return Internal(MemoryChunk::FromAddress(object->address()));
}
static MarkingState Internal(MemoryChunk* chunk) {
return MarkingState(
Bitmap::FromAddress(chunk->address() + MemoryChunk::kHeaderSize),
&chunk->live_byte_count_);
}
MarkingState(Bitmap* bitmap, intptr_t* live_bytes)
: bitmap_(bitmap), live_bytes_(live_bytes) {}
void IncrementLiveBytes(intptr_t by) const {
*live_bytes_ += static_cast<int>(by);
}
void SetLiveBytes(intptr_t value) const {
*live_bytes_ = static_cast<int>(value);
}
void ClearLiveness() const {
bitmap_->Clear();
*live_bytes_ = 0;
}
Bitmap* bitmap() const { return bitmap_; }
intptr_t live_bytes() const { return *live_bytes_; }
private:
Bitmap* bitmap_;
intptr_t* live_bytes_;
};
// -----------------------------------------------------------------------------
// A page is a memory chunk of a size 1MB. Large object pages may be larger.
//

View File

@ -2005,9 +2005,10 @@ void WeakCell::initialize(HeapObject* val) {
// We just have to execute the generational barrier here because we never
// mark through a weak cell and collect evacuation candidates when we process
// all weak cells.
WriteBarrierMode mode = ObjectMarking::IsBlack(this)
? UPDATE_WRITE_BARRIER
: UPDATE_WEAK_WRITE_BARRIER;
WriteBarrierMode mode =
ObjectMarking::IsBlack(this, MarkingState::Internal(this))
? UPDATE_WRITE_BARRIER
: UPDATE_WEAK_WRITE_BARRIER;
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
}

View File

@ -26,7 +26,7 @@ void CheckInvariantsOfAbortedPage(Page* page) {
// 1) Markbits are cleared
// 2) The page is not marked as evacuation candidate anymore
// 3) The page is not marked as aborted compaction anymore.
CHECK(page->markbits()->IsClean());
CHECK(MarkingState::Internal(page).bitmap()->IsClean());
CHECK(!page->IsEvacuationCandidate());
CHECK(!page->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
}

View File

@ -2416,8 +2416,9 @@ TEST(InstanceOfStubWriteBarrier) {
CHECK(f->IsOptimized());
while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) &&
!marking->IsStopped()) {
while (
!ObjectMarking::IsBlack(f->code(), MarkingState::Internal(f->code())) &&
!marking->IsStopped()) {
// Discard any pending GC requests otherwise we will get GC when we enter
// code below.
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
@ -5363,8 +5364,9 @@ TEST(Regress3631) {
Handle<JSReceiver> obj =
v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
while (!Marking::IsBlack(
ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
HeapObject* weak_map_table = HeapObject::cast(weak_map->table());
while (!ObjectMarking::IsBlack(weak_map_table,
MarkingState::Internal(weak_map_table)) &&
!marking->IsStopped()) {
marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8);
@ -6114,10 +6116,10 @@ TEST(Regress598319) {
}
CHECK(heap->lo_space()->Contains(arr.get()));
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get())));
CHECK(ObjectMarking::IsWhite(arr.get(), MarkingState::Internal(arr.get())));
for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsWhite(
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
CHECK(ObjectMarking::IsWhite(arr_value, MarkingState::Internal(arr_value)));
}
// Start incremental marking.
@ -6131,8 +6133,8 @@ TEST(Regress598319) {
// Check that we have not marked the interesting array during root scanning.
for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsWhite(
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
CHECK(ObjectMarking::IsWhite(arr_value, MarkingState::Internal(arr_value)));
}
// Now we search for a state where we are in incremental marking and have
@ -6167,8 +6169,8 @@ TEST(Regress598319) {
// All objects need to be black after marking. If a white object crossed the
// progress bar, we would fail here.
for (int i = 0; i < arr.get()->length(); i++) {
CHECK(Marking::IsBlack(
ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
HeapObject* arr_value = HeapObject::cast(arr.get()->get(i));
CHECK(ObjectMarking::IsBlack(arr_value, MarkingState::Internal(arr_value)));
}
}
@ -6314,13 +6316,13 @@ TEST(LeftTrimFixedArrayInBlackArea) {
isolate->factory()->NewFixedArray(4, TENURED);
Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
CHECK(heap->old_space()->Contains(*array));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
// Now left trim the allocated black area. A filler has to be installed
// for the trimmed area and all mark bits of the trimmed area have to be
// cleared.
FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
heap::GcAndSweep(heap, OLD_SPACE);
}
@ -6357,8 +6359,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
CHECK(page->markbits()->AllBitsSetInRange(
CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
CHECK(MarkingState::Internal(page).bitmap()->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address),
page->AddressToMarkbitIndex(end_address)));
CHECK(heap->old_space()->Contains(*array));
@ -6371,8 +6373,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
trimmed = heap->LeftTrimFixedArray(previous, 1);
HeapObject* filler = HeapObject::FromAddress(previous->address());
CHECK(filler->IsFiller());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(previous)));
CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
CHECK(ObjectMarking::IsBlack(previous, MarkingState::Internal(previous)));
previous = trimmed;
}
@ -6382,8 +6384,8 @@ TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
trimmed = heap->LeftTrimFixedArray(previous, i);
HeapObject* filler = HeapObject::FromAddress(previous->address());
CHECK(filler->IsFiller());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(previous)));
CHECK(ObjectMarking::IsBlack(trimmed, MarkingState::Internal(trimmed)));
CHECK(ObjectMarking::IsBlack(previous, MarkingState::Internal(previous)));
previous = trimmed;
}
}
@ -6423,8 +6425,9 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
Address start_address = array->address();
Address end_address = start_address + array->Size();
Page* page = Page::FromAddress(start_address);
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
CHECK(page->markbits()->AllBitsSetInRange(
CHECK(ObjectMarking::IsBlack(*array, MarkingState::Internal(*array)));
CHECK(MarkingState::Internal(page).bitmap()->AllBitsSetInRange(
page->AddressToMarkbitIndex(start_address),
page->AddressToMarkbitIndex(end_address)));
CHECK(heap->old_space()->Contains(*array));
@ -6434,7 +6437,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap->RightTrimFixedArray(*array, 1);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
CHECK(Marking::IsImpossible(ObjectMarking::MarkBitFrom(filler)));
CHECK(ObjectMarking::IsImpossible(filler, MarkingState::Internal(filler)));
// Trim 10 times by one, two, and three word.
for (int i = 1; i <= 3; i++) {
@ -6443,7 +6446,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
heap->RightTrimFixedArray(*array, i);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(filler)));
CHECK(ObjectMarking::IsWhite(filler, MarkingState::Internal(filler)));
}
}

View File

@ -355,8 +355,7 @@ TEST(Regress5829) {
ClearRecordedSlots::kNo);
heap->old_space()->EmptyAllocationInfo();
Page* page = Page::FromAddress(array->address());
LiveObjectIterator<kGreyObjects> it(page,
MarkingState::FromPageInternal(page));
LiveObjectIterator<kGreyObjects> it(page, MarkingState::Internal(page));
HeapObject* object = nullptr;
while ((object = it.Next()) != nullptr) {
CHECK(!object->IsFiller());

View File

@ -65,7 +65,7 @@ UNINITIALIZED_TEST(PagePromotion_NewToOld) {
// Sanity check that the page meets the requirements for promotion.
const int threshold_bytes =
FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100;
CHECK_GE(first_page->LiveBytes(), threshold_bytes);
CHECK_GE(MarkingState::Internal(first_page).live_bytes(), threshold_bytes);
// Actual checks: The page is in new space first, but is moved to old space
// during a full GC.

View File

@ -1172,7 +1172,7 @@ TEST(DoScavengeWithIncrementalWriteBarrier) {
// in compacting mode and |obj_value|'s page is an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsCompacting());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj)));
CHECK(ObjectMarking::IsBlack(*obj, MarkingState::Internal(*obj)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger GCs so that |obj| moves to old gen.
@ -1492,8 +1492,8 @@ static void TestIncrementalWriteBarrier(Handle<Map> map, Handle<Map> new_map,
// still active and |obj_value|'s page is indeed an evacuation candidate).
IncrementalMarking* marking = heap->incremental_marking();
CHECK(marking->IsMarking());
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj)));
CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*obj_value)));
CHECK(ObjectMarking::IsBlack(*obj, MarkingState::Internal(*obj)));
CHECK(ObjectMarking::IsBlack(*obj_value, MarkingState::Internal(*obj_value)));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
// Trigger incremental write barrier, which should add a slot to remembered