[heap] Reland Use atomic marking operations in incremental marking if

concurrent marking is enabled.

This patch adds kAtomicity flag to IncrementalMarking that is set
depending on the concurrent marking compile time flag.

BUG=chromium:694255
CQ_INCLUDE_TRYBOTS=master.tryserver.chromium.linux:linux_chromium_rel_ng

Review-Url: https://codereview.chromium.org/2858343003
Cr-Commit-Position: refs/heads/master@{#45138}
This commit is contained in:
ulan 2017-05-05 06:59:32 -07:00 committed by Commit bot
parent b9cebf8c9e
commit b1d96fa1d4
2 changed files with 48 additions and 36 deletions

View File

@ -52,10 +52,11 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value);
DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
marking_state(value_heap_obj)));
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj));
DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(
value_heap_obj, marking_state(value_heap_obj)));
DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
const bool is_black =
ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj));
if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
RestartIfNotMarking();
@ -129,7 +130,7 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
}
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) {
if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
marking_deque()->Push(obj);
return true;
}
@ -153,18 +154,18 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
if (Marking::IsBlack(old_mark_bit)) {
if (Marking::IsBlack<kAtomicity>(old_mark_bit)) {
if (from->address() + kPointerSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the
// grey color. To make it black, we need to set the second bit.
DCHECK(new_mark_bit.Get());
new_mark_bit.Next().Set();
DCHECK(new_mark_bit.Get<kAtomicity>());
new_mark_bit.Next().Set<kAtomicity>();
} else {
bool success = Marking::WhiteToBlack(new_mark_bit);
bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
DCHECK(success);
USE(success);
}
} else if (Marking::IsGrey(old_mark_bit)) {
} else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) {
if (from->address() + kPointerSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the
// white color. To make it grey, we need to set the first bit.
@ -173,7 +174,7 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
new_mark_bit.Set();
DCHECK(!new_mark_bit.Next().Get());
} else {
bool success = Marking::WhiteToGrey(new_mark_bit);
bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
DCHECK(success);
USE(success);
}
@ -219,11 +220,11 @@ class IncrementalMarkingMarkingVisitor
} while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
if (ObjectMarking::IsGrey(
if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object))) {
heap->incremental_marking()->marking_deque()->Unshift(object);
} else {
DCHECK(ObjectMarking::IsBlack(
DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
object, heap->incremental_marking()->marking_state(object)));
heap->mark_compact_collector()->UnshiftBlack(object);
}
@ -248,8 +249,9 @@ class IncrementalMarkingMarkingVisitor
// Mark the object grey if it is white, do not enque it into the marking
// deque.
Heap* heap = map->GetHeap();
bool ignored = ObjectMarking::WhiteToGrey(
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
bool ignored =
ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
USE(ignored);
}
}
@ -284,13 +286,14 @@ class IncrementalMarkingMarkingVisitor
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
return ObjectMarking::WhiteToBlack(
return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
heap_object, heap->incremental_marking()->marking_state(heap_object));
}
};
void IncrementalMarking::IterateBlackObject(HeapObject* object) {
if (IsMarking() && ObjectMarking::IsBlack(object, marking_state(object))) {
if (IsMarking() &&
ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) {
Page* page = Page::FromAddress(object->address());
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
// IterateBlackObject requires us to visit the whole object.
@ -647,7 +650,7 @@ void IncrementalMarking::ProcessWeakCells() {
HeapObject* value = HeapObject::cast(weak_cell->value());
// Remove weak cells with live objects from the list, they do not need
// clearing.
if (ObjectMarking::IsBlackOrGrey(value, marking_state(value))) {
if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
// Record slot, if value is pointing to an evacuation candidate.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
@ -678,9 +681,10 @@ bool ShouldRetainMap(Map* map, int age) {
Object* constructor = map->GetConstructor();
Heap* heap = map->GetHeap();
if (!constructor->IsHeapObject() ||
ObjectMarking::IsWhite(HeapObject::cast(constructor),
heap->incremental_marking()->marking_state(
HeapObject::cast(constructor)))) {
ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>(
HeapObject::cast(constructor),
heap->incremental_marking()->marking_state(
HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
@ -710,14 +714,15 @@ void IncrementalMarking::RetainMaps() {
int new_age;
Map* map = Map::cast(cell->value());
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
ObjectMarking::IsWhite(map, marking_state(map))) {
ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) {
if (ShouldRetainMap(map, age)) {
WhiteToGreyAndPush(map);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
ObjectMarking::IsWhite(HeapObject::cast(prototype),
marking_state(HeapObject::cast(prototype)))) {
ObjectMarking::IsWhite<kAtomicity>(
HeapObject::cast(prototype),
marking_state(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
@ -808,21 +813,21 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
return nullptr;
}
HeapObject* dest = map_word.ToForwardingAddress();
if (ObjectMarking::IsBlack(dest, marking_state(dest))) {
if (ObjectMarking::IsBlack<kAtomicity>(dest, marking_state(dest))) {
// The object is already processed by the marker.
return nullptr;
}
DCHECK(
ObjectMarking::IsGrey(obj, marking_state(obj)) ||
(obj->IsFiller() && ObjectMarking::IsWhite(obj, marking_state(obj))));
DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
(obj->IsFiller() &&
ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))));
return dest;
} else {
DCHECK(ObjectMarking::IsGrey(obj, marking_state(obj)) ||
DCHECK(ObjectMarking::IsGrey<kAtomicity>(obj, marking_state(obj)) ||
(obj->IsFiller() &&
ObjectMarking::IsWhite(obj, marking_state(obj))) ||
ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj))) ||
(MemoryChunk::FromAddress(obj->address())
->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
ObjectMarking::IsBlack(obj, marking_state(obj))));
ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj))));
// Skip one word filler objects that appear on the
// stack when we perform in place array shift.
return (obj->map() == filler_map) ? nullptr : obj;
@ -839,11 +844,11 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
#if ENABLE_SLOW_DCHECKS
MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
SLOW_DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) ||
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
Marking::IsBlack(mark_bit)));
Marking::IsBlack<kAtomicity>(mark_bit)));
#endif
ObjectMarking::GreyToBlack(obj, marking_state(obj));
ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj));
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
@ -856,7 +861,7 @@ intptr_t IncrementalMarking::ProcessMarkingDeque(
// Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects.
if (obj->IsFiller()) {
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
continue;
}
@ -912,7 +917,8 @@ void IncrementalMarking::Hurry() {
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
// Mark the cache black if it is grey.
bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache));
bool ignored =
ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache));
USE(ignored);
}
context = Context::cast(context)->next_context_link();

View File

@ -182,6 +182,12 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
static const intptr_t kActivationThreshold = 0;
#endif
#if V8_CONCURRENT_MARKING
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::ATOMIC;
#else
static const MarkBit::AccessMode kAtomicity = MarkBit::AccessMode::NON_ATOMIC;
#endif
void FinalizeSweeping();
size_t Step(size_t bytes_to_process, CompletionAction action,