[heap] Reland "Make non-atomic markbit operations consistent with atomic ones."

Now non-atomic color transition operations return a boolean indicating
whether the transition succeeded or not.

This allows to replace color check and transition operations with a
single transition operation. For example:

if (IsWhite(object)) {
  WhiteToBlack(object);
  Foo();
}

becomes

if (WhiteToBlack(object)) {
  Foo();
}

BUG=chromium:694255

CQ_INCLUDE_TRYBOTS=master.tryserver.chromium.linux:linux_chromium_rel_ng

Review-Url: https://codereview.chromium.org/2860323003
Cr-Commit-Position: refs/heads/master@{#45123}
This commit is contained in:
ulan 2017-05-05 03:16:01 -07:00 committed by Commit bot
parent 56352067f2
commit c0a65cd295
7 changed files with 112 additions and 137 deletions

View File

@ -4270,7 +4270,7 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) {
void Heap::NotifyObjectLayoutChange(HeapObject* object,
const DisallowHeapAllocation&) {
if (FLAG_incremental_marking && incremental_marking()->IsMarking()) {
incremental_marking()->MarkGrey(object);
incremental_marking()->WhiteToGreyAndPush(object);
}
#ifdef VERIFY_HEAP
DCHECK(pending_layout_change_object_ == nullptr);
@ -4834,7 +4834,7 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
// promoted objects.
if (heap_->incremental_marking()->black_allocation()) {
Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
heap_->incremental_marking()->MarkGrey(code);
heap_->incremental_marking()->WhiteToGreyAndPush(code);
}
}
@ -5628,7 +5628,7 @@ void Heap::RegisterExternallyReferencedObject(Object** object) {
HeapObject* heap_object = HeapObject::cast(*object);
DCHECK(Contains(heap_object));
if (FLAG_incremental_marking_wrappers && incremental_marking()->IsMarking()) {
incremental_marking()->MarkGrey(heap_object);
incremental_marking()->WhiteToGreyAndPush(heap_object);
} else {
DCHECK(mark_compact_collector()->in_use());
mark_compact_collector()->MarkObject(heap_object);

View File

@ -57,9 +57,7 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
DCHECK(!ObjectMarking::IsImpossible(obj, marking_state(obj)));
const bool is_black = ObjectMarking::IsBlack(obj, marking_state(obj));
if (is_black &&
ObjectMarking::IsWhite(value_heap_obj, marking_state(value_heap_obj))) {
WhiteToGreyAndPush(value_heap_obj);
if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
RestartIfNotMarking();
}
return is_compacting_ && is_black;
@ -130,9 +128,12 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
}
}
void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
ObjectMarking::WhiteToGrey(obj, marking_state(obj));
marking_deque()->Push(obj);
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
if (ObjectMarking::WhiteToGrey(obj, marking_state(obj))) {
marking_deque()->Push(obj);
return true;
}
return false;
}
void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
@ -153,9 +154,29 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
if (Marking::IsBlack(old_mark_bit)) {
Marking::MarkBlack(new_mark_bit);
if (from->address() + kPointerSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the
// grey color. To make it black, we need to set second bit.
DCHECK(new_mark_bit.Get());
new_mark_bit.Next().Set();
} else {
bool success = Marking::WhiteToBlack(new_mark_bit);
DCHECK(success);
USE(success);
}
} else if (Marking::IsGrey(old_mark_bit)) {
Marking::WhiteToGrey(new_mark_bit);
if (from->address() + kPointerSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the
// white color. To make it black, we need to set both bits.
// Note that Marking::WhiteToGrey does not work here because
// old_mark_bit.Next() can be set by the concurrent marker at any time.
new_mark_bit.Set();
new_mark_bit.Next().Set();
} else {
bool success = Marking::WhiteToGrey(new_mark_bit);
DCHECK(success);
USE(success);
}
marking_deque()->Push(to);
RestartIfNotMarking();
}
@ -227,12 +248,9 @@ class IncrementalMarkingMarkingVisitor
// Mark the object grey if it is white, do not enque it into the marking
// deque.
Heap* heap = map->GetHeap();
if (ObjectMarking::IsWhite(
heap_obj,
heap->incremental_marking()->marking_state(heap_obj))) {
ObjectMarking::WhiteToGrey(
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
}
bool ignored = ObjectMarking::WhiteToGrey(
heap_obj, heap->incremental_marking()->marking_state(heap_obj));
USE(ignored);
}
}
VisitNativeContext(map, context);
@ -259,21 +277,15 @@ class IncrementalMarkingMarkingVisitor
// Marks the object grey and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, Object* obj)) {
heap->incremental_marking()->MarkGrey(HeapObject::cast(obj));
heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
}
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
if (ObjectMarking::IsWhite(
heap_object,
heap->incremental_marking()->marking_state(heap_object))) {
ObjectMarking::WhiteToBlack(
heap_object, heap->incremental_marking()->marking_state(heap_object));
return true;
}
return false;
return ObjectMarking::WhiteToBlack(
heap_object, heap->incremental_marking()->marking_state(heap_object));
}
};
@ -285,7 +297,7 @@ void IncrementalMarking::IterateBlackObject(HeapObject* object) {
page->ResetProgressBar();
}
Map* map = object->map();
MarkGrey(map);
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor::IterateBody(map, object);
}
}
@ -309,7 +321,7 @@ class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
Object* obj = *p;
if (!obj->IsHeapObject()) return;
heap_->incremental_marking()->MarkGrey(HeapObject::cast(obj));
heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
}
Heap* heap_;
@ -700,7 +712,7 @@ void IncrementalMarking::RetainMaps() {
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
ObjectMarking::IsWhite(map, marking_state(map))) {
if (ShouldRetainMap(map, age)) {
MarkGrey(map);
WhiteToGreyAndPush(map);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
@ -820,7 +832,7 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
MarkGrey(map);
WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
@ -831,17 +843,6 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
(chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
Marking::IsBlack(mark_bit)));
#endif
MarkBlack(obj, size);
}
void IncrementalMarking::MarkGrey(HeapObject* object) {
if (ObjectMarking::IsWhite(object, marking_state(object))) {
WhiteToGreyAndPush(object);
}
}
void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
if (ObjectMarking::IsBlack(obj, marking_state(obj))) return;
ObjectMarking::GreyToBlack(obj, marking_state(obj));
}
@ -910,9 +911,9 @@ void IncrementalMarking::Hurry() {
HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
if (ObjectMarking::IsGrey(cache, marking_state(cache))) {
ObjectMarking::GreyToBlack(cache, marking_state(cache));
}
// Mark the cache black if it is grey.
bool ignored = ObjectMarking::GreyToBlack(cache, marking_state(cache));
USE(ignored);
}
context = Context::cast(context)->next_context_link();
}

View File

@ -65,9 +65,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
return MarkingState::Internal(chunk);
}
void MarkBlack(HeapObject* object, int size);
void MarkGrey(HeapObject* object);
// Transfers mark bits without requiring proper object headers.
void TransferMark(Heap* heap, HeapObject* from, HeapObject* to);
@ -82,9 +79,15 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
DCHECK(ObjectMarking::IsWhite<access_mode>(to, marking_state(to)));
if (ObjectMarking::IsGrey<access_mode>(from, marking_state(from))) {
ObjectMarking::WhiteToGrey<access_mode>(to, marking_state(to));
bool success =
ObjectMarking::WhiteToGrey<access_mode>(to, marking_state(to));
DCHECK(success);
USE(success);
} else if (ObjectMarking::IsBlack<access_mode>(from, marking_state(from))) {
ObjectMarking::WhiteToBlack<access_mode>(to, marking_state(to));
bool success =
ObjectMarking::WhiteToBlack<access_mode>(to, marking_state(to));
DCHECK(success);
USE(success);
}
}
@ -210,7 +213,9 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void RecordCodeTargetPatch(Code* host, Address pc, HeapObject* value);
void RecordCodeTargetPatch(Address pc, HeapObject* value);
void WhiteToGreyAndPush(HeapObject* obj);
// Returns true if the function succeeds in transitioning the object
// from white to grey.
bool WhiteToGreyAndPush(HeapObject* obj);
inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());

View File

@ -38,19 +38,15 @@ void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
}
void MarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::Internal(obj));
PushBlack(obj);
}
}
void MinorMarkCompactCollector::MarkObject(HeapObject* obj) {
if (ObjectMarking::IsWhite<MarkBit::NON_ATOMIC>(
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj))) {
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
obj, MarkingState::External(obj));
PushBlack(obj);
}
}

View File

@ -1196,12 +1196,10 @@ class StaticYoungGenerationMarkingVisitor
StackLimitCheck check(heap->isolate());
if (check.HasOverflowed()) return false;
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object)))
return true;
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
IterateBody(object->map(), object);
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object))) {
IterateBody(object->map(), object);
}
return true;
}
};
@ -1237,11 +1235,7 @@ class MarkCompactMarkingVisitor
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, HeapObject* object)) {
if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
return true;
}
return false;
return ObjectMarking::WhiteToBlack(object, MarkingState::Internal(object));
}
// Mark object pointed to by p.
@ -1259,14 +1253,15 @@ class MarkCompactMarkingVisitor
HeapObject* obj)) {
#ifdef DEBUG
DCHECK(collector->heap()->Contains(obj));
DCHECK(ObjectMarking::IsWhite(obj, MarkingState::Internal(obj)));
#endif
Map* map = obj->map();
Heap* heap = obj->GetHeap();
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
// Mark the map pointer and the body.
heap->mark_compact_collector()->MarkObject(map);
IterateBody(map, obj);
if (ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj))) {
Map* map = obj->map();
Heap* heap = obj->GetHeap();
ObjectMarking::WhiteToBlack(obj, MarkingState::Internal(obj));
// Mark the map pointer and the body.
heap->mark_compact_collector()->MarkObject(map);
IterateBody(map, obj);
}
}
// Visit all unmarked objects pointed to by [start, end).
@ -1284,8 +1279,6 @@ class MarkCompactMarkingVisitor
if (!o->IsHeapObject()) continue;
collector->RecordSlot(object, p, o);
HeapObject* obj = HeapObject::cast(o);
if (ObjectMarking::IsBlackOrGrey(obj, MarkingState::Internal(obj)))
continue;
VisitUnmarkedObject(collector, obj);
}
return true;
@ -1482,16 +1475,12 @@ class MinorMarkCompactCollector::RootMarkingVisitor : public RootVisitor {
if (!collector_->heap()->InNewSpace(object)) return;
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object)))
return;
Map* map = object->map();
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object));
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
collector_->EmptyMarkingDeque();
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::External(object))) {
Map* map = object->map();
StaticYoungGenerationMarkingVisitor::IterateBody(map, object);
collector_->EmptyMarkingDeque();
}
}
MinorMarkCompactCollector* collector_;
@ -1532,22 +1521,16 @@ class MarkCompactCollector::RootMarkingVisitor : public ObjectVisitor,
HeapObject* object = HeapObject::cast(*p);
if (ObjectMarking::IsBlackOrGrey<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object)))
return;
Map* map = object->map();
// Mark the object.
ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object));
// Mark the map pointer and body, and push them on the marking stack.
collector_->MarkObject(map);
MarkCompactMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
collector_->EmptyMarkingDeque();
if (ObjectMarking::WhiteToBlack<MarkBit::NON_ATOMIC>(
object, MarkingState::Internal(object))) {
Map* map = object->map();
// Mark the map pointer and body, and push them on the marking stack.
collector_->MarkObject(map);
MarkCompactMarkingVisitor::IterateBody(map, object);
// Mark all the objects reachable from the map and body. May leave
// overflowed objects in the heap.
collector_->EmptyMarkingDeque();
}
}
MarkCompactCollector* collector_;
@ -1718,8 +1701,7 @@ void MarkCompactCollector::DiscoverGreyObjectsWithIterator(T* it) {
Map* filler_map = heap()->one_pointer_filler_map();
for (HeapObject* object = it->Next(); object != NULL; object = it->Next()) {
if ((object->map() != filler_map) &&
ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object))) {
PushBlack(object);
if (marking_deque()->IsFull()) return;
}
@ -1731,8 +1713,10 @@ void MarkCompactCollector::DiscoverGreyObjectsOnPage(MemoryChunk* p) {
LiveObjectIterator<kGreyObjects> it(p, MarkingState::Internal(p));
HeapObject* object = NULL;
while ((object = it.Next()) != NULL) {
DCHECK(ObjectMarking::IsGrey(object, MarkingState::Internal(object)));
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
bool success =
ObjectMarking::GreyToBlack(object, MarkingState::Internal(object));
DCHECK(success);
USE(success);
PushBlack(object);
if (marking_deque()->IsFull()) return;
}
@ -2295,15 +2279,12 @@ bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) {
void MarkCompactCollector::MarkStringTable(RootMarkingVisitor* visitor) {
StringTable* string_table = heap()->string_table();
// Mark the string table itself.
if (ObjectMarking::IsWhite(string_table,
MarkingState::Internal(string_table))) {
// String table could have already been marked by visiting the handles list.
ObjectMarking::WhiteToBlack(string_table,
MarkingState::Internal(string_table));
if (ObjectMarking::WhiteToBlack(string_table,
MarkingState::Internal(string_table))) {
// Explicitly mark the prefix.
string_table->IteratePrefix(visitor);
ProcessMarkingDeque();
}
// Explicitly mark the prefix.
string_table->IteratePrefix(visitor);
ProcessMarkingDeque();
}
void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) {

View File

@ -82,8 +82,6 @@ class ObjectMarking : public AllStatic {
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool BlackToGrey(HeapObject* obj,
const MarkingState& state) {
DCHECK(
(access_mode == MarkBit::ATOMIC || IsBlack<access_mode>(obj, state)));
MarkBit markbit = MarkBitFrom(obj, state);
if (!Marking::BlackToGrey<access_mode>(markbit)) return false;
state.IncrementLiveBytes<access_mode>(-obj->Size());
@ -93,24 +91,19 @@ class ObjectMarking : public AllStatic {
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToGrey(HeapObject* obj,
const MarkingState& state) {
DCHECK(
(access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state)));
return Marking::WhiteToGrey<access_mode>(MarkBitFrom(obj, state));
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool WhiteToBlack(HeapObject* obj,
const MarkingState& state) {
DCHECK(
(access_mode == MarkBit::ATOMIC || IsWhite<access_mode>(obj, state)));
if (!ObjectMarking::WhiteToGrey<access_mode>(obj, state)) return false;
return ObjectMarking::GreyToBlack<access_mode>(obj, state);
return ObjectMarking::WhiteToGrey<access_mode>(obj, state) &&
ObjectMarking::GreyToBlack<access_mode>(obj, state);
}
template <MarkBit::AccessMode access_mode = MarkBit::NON_ATOMIC>
V8_INLINE static bool GreyToBlack(HeapObject* obj,
const MarkingState& state) {
DCHECK((access_mode == MarkBit::ATOMIC || IsGrey<access_mode>(obj, state)));
MarkBit markbit = MarkBitFrom(obj, state);
if (!Marking::GreyToBlack<access_mode>(markbit)) return false;
state.IncrementLiveBytes<access_mode>(obj->Size());

View File

@ -38,12 +38,16 @@ class MarkBit {
}
}
// The function returns true if it succeeded to
// transition the bit from 0 to 1.
template <AccessMode mode = NON_ATOMIC>
inline bool Set();
template <AccessMode mode = NON_ATOMIC>
inline bool Get();
// The function returns true if it succeeded to
// transition the bit from 1 to 0.
template <AccessMode mode = NON_ATOMIC>
inline bool Clear();
@ -57,8 +61,9 @@ class MarkBit {
template <>
inline bool MarkBit::Set<MarkBit::NON_ATOMIC>() {
*cell_ |= mask_;
return true;
base::Atomic32 old_value = *cell_;
*cell_ = old_value | mask_;
return (old_value & mask_) == 0;
}
template <>
@ -86,8 +91,9 @@ inline bool MarkBit::Get<MarkBit::ATOMIC>() {
template <>
inline bool MarkBit::Clear<MarkBit::NON_ATOMIC>() {
*cell_ &= ~mask_;
return true;
base::Atomic32 old_value = *cell_;
*cell_ = old_value & ~mask_;
return (old_value & mask_) == mask_;
}
template <>
@ -412,24 +418,17 @@ class Marking : public AllStatic {
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
INLINE(static bool WhiteToGrey(MarkBit markbit)) {
DCHECK(mode == MarkBit::ATOMIC || IsWhite(markbit));
return markbit.Set<mode>();
}
// Warning: this method is not safe in general in concurrent scenarios.
// If you know that nobody else will change the bits on the given location
// then you may use it.
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
INLINE(static void WhiteToBlack(MarkBit markbit)) {
DCHECK(mode == MarkBit::ATOMIC || IsWhite(markbit));
markbit.Set<mode>();
markbit.Next().Set<mode>();
INLINE(static bool WhiteToBlack(MarkBit markbit)) {
return markbit.Set<mode>() && markbit.Next().Set<mode>();
}
template <MarkBit::AccessMode mode = MarkBit::NON_ATOMIC>
INLINE(static bool GreyToBlack(MarkBit markbit)) {
DCHECK(mode == MarkBit::ATOMIC || IsGrey(markbit));
return markbit.Next().Set<mode>();
return markbit.Get<mode>() && markbit.Next().Set<mode>();
}
enum ObjectColor {