diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 694dbea562..0863f7de95 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -438,6 +438,9 @@ DEFINE_bool(incremental_code_compaction, true, DEFINE_bool(cleanup_code_caches_at_gc, true, "Flush inline caches prior to mark compact collection and " "flush code caches in maps during mark compact cycle.") +DEFINE_bool(use_marking_progress_bar, false, + "Use a progress bar to scan large objects in increments when " + "incremental marking is active.") DEFINE_int(random_seed, 0, "Default seed for initializing random generator " "(0, the default, means to use system random).") diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc index b34d6d902c..e9fd74c0d1 100644 --- a/src/incremental-marking.cc +++ b/src/incremental-marking.cc @@ -188,16 +188,78 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) { } +static inline void MarkBlackOrKeepGrey(HeapObject* heap_object, + MarkBit mark_bit, + int size) { + ASSERT(!Marking::IsImpossible(mark_bit)); + if (mark_bit.Get()) return; + mark_bit.Set(); + MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size); + ASSERT(Marking::IsBlack(mark_bit)); +} + + +static inline void MarkBlackOrKeepBlack(HeapObject* heap_object, + MarkBit mark_bit, + int size) { + ASSERT(!Marking::IsImpossible(mark_bit)); + if (Marking::IsBlack(mark_bit)) return; + Marking::MarkBlack(mark_bit); + MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size); + ASSERT(Marking::IsBlack(mark_bit)); +} + + class IncrementalMarkingMarkingVisitor : public StaticMarkingVisitor { public: static void Initialize() { StaticMarkingVisitor::Initialize(); - + table_.Register(kVisitFixedArray, &VisitFixedArrayIncremental); table_.Register(kVisitNativeContext, &VisitNativeContextIncremental); table_.Register(kVisitJSRegExp, &VisitJSRegExp); } + static const int kProgressBarScanningChunk = 32 * 1024; + + static void VisitFixedArrayIncremental(Map* map, HeapObject* object) { + MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); + // TODO(mstarzinger): Move setting of the flag to the allocation site of + // the array. The visitor should just check the flag. + if (FLAG_use_marking_progress_bar && + chunk->owner()->identity() == LO_SPACE) { + chunk->SetFlag(MemoryChunk::HAS_PROGRESS_BAR); + } + if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { + Heap* heap = map->GetHeap(); + // When using a progress bar for large fixed arrays, scan only a chunk of + // the array and try to push it onto the marking deque again until it is + // fully scanned. Fall back to scanning it through to the end in case this + // fails because of a full deque. + int object_size = FixedArray::BodyDescriptor::SizeOf(map, object); + int start_offset = Max(FixedArray::BodyDescriptor::kStartOffset, + chunk->progress_bar()); + int end_offset = Min(object_size, + start_offset + kProgressBarScanningChunk); + bool scan_until_end = false; + do { + VisitPointersWithAnchor(heap, + HeapObject::RawField(object, 0), + HeapObject::RawField(object, start_offset), + HeapObject::RawField(object, end_offset)); + start_offset = end_offset; + end_offset = Min(object_size, end_offset + kProgressBarScanningChunk); + scan_until_end = heap->incremental_marking()->marking_deque()->IsFull(); + } while (scan_until_end && start_offset < object_size); + chunk->set_progress_bar(start_offset); + if (start_offset < object_size) { + heap->incremental_marking()->marking_deque()->UnshiftGrey(object); + } + } else { + FixedArrayVisitor::Visit(map, object); + } + } + static void VisitNativeContextIncremental(Map* map, HeapObject* object) { Context* context = Context::cast(object); @@ -234,15 +296,25 @@ class IncrementalMarkingMarkingVisitor } } + INLINE(static void VisitPointersWithAnchor(Heap* heap, + Object** anchor, + Object** start, + Object** end)) { + for (Object** p = start; p < end; p++) { + Object* obj = *p; + if (obj->NonFailureIsHeapObject()) { + heap->mark_compact_collector()->RecordSlot(anchor, p, obj); + MarkObject(heap, obj); + } + } + } + // Marks the object grey and pushes it on the marking stack. INLINE(static void MarkObject(Heap* heap, Object* obj)) { HeapObject* heap_object = HeapObject::cast(obj); MarkBit mark_bit = Marking::MarkBitFrom(heap_object); if (mark_bit.data_only()) { - if (heap->incremental_marking()->MarkBlackOrKeepGrey(mark_bit)) { - MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), - heap_object->Size()); - } + MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size()); } else if (Marking::IsWhite(mark_bit)) { heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit); } @@ -288,10 +360,7 @@ class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor { HeapObject* heap_object = HeapObject::cast(obj); MarkBit mark_bit = Marking::MarkBitFrom(heap_object); if (mark_bit.data_only()) { - if (incremental_marking_->MarkBlackOrKeepGrey(mark_bit)) { - MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), - heap_object->Size()); - } + MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size()); } else { if (Marking::IsWhite(mark_bit)) { incremental_marking_->WhiteToGreyAndPush(heap_object, mark_bit); @@ -616,8 +685,11 @@ void IncrementalMarking::UpdateMarkingDequeAfterScavenge() { ASSERT(new_top != marking_deque_.bottom()); #ifdef DEBUG MarkBit mark_bit = Marking::MarkBitFrom(obj); + MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); ASSERT(Marking::IsGrey(mark_bit) || - (obj->IsFiller() && Marking::IsWhite(mark_bit))); + (obj->IsFiller() && Marking::IsWhite(mark_bit)) || + (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && + Marking::IsBlack(mark_bit))); #endif } } @@ -637,11 +709,15 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) { IncrementalMarkingMarkingVisitor::IterateBody(map, obj); - MarkBit obj_mark_bit = Marking::MarkBitFrom(obj); - SLOW_ASSERT(Marking::IsGrey(obj_mark_bit) || - (obj->IsFiller() && Marking::IsWhite(obj_mark_bit))); - Marking::MarkBlack(obj_mark_bit); - MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size); + MarkBit mark_bit = Marking::MarkBitFrom(obj); +#ifdef DEBUG + MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); + SLOW_ASSERT(Marking::IsGrey(mark_bit) || + (obj->IsFiller() && Marking::IsWhite(mark_bit)) || + (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) && + Marking::IsBlack(mark_bit))); +#endif + MarkBlackOrKeepBlack(obj, mark_bit, size); } diff --git a/src/incremental-marking.h b/src/incremental-marking.h index 6ae0f595f6..77d54bed4b 100644 --- a/src/incremental-marking.h +++ b/src/incremental-marking.h @@ -164,19 +164,6 @@ class IncrementalMarking { inline void WhiteToGreyAndPush(HeapObject* obj, MarkBit mark_bit); - // Does white->black or keeps gray or black color. Returns true if converting - // white to black. - inline bool MarkBlackOrKeepGrey(MarkBit mark_bit) { - ASSERT(!Marking::IsImpossible(mark_bit)); - if (mark_bit.Get()) { - // Grey or black: Keep the color. - return false; - } - mark_bit.Set(); - ASSERT(Marking::IsBlack(mark_bit)); - return true; - } - inline int steps_count() { return steps_count_; } diff --git a/src/mark-compact.cc b/src/mark-compact.cc index 1af3074f36..30abe6d54c 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -488,6 +488,7 @@ void MarkCompactCollector::ClearMarkbits() { MarkBit mark_bit = Marking::MarkBitFrom(obj); mark_bit.Clear(); mark_bit.Next().Clear(); + Page::FromAddress(obj->address())->ResetProgressBar(); Page::FromAddress(obj->address())->ResetLiveBytes(); } } diff --git a/src/objects-visiting-inl.h b/src/objects-visiting-inl.h index 2eefde1cfe..4a9dab5caa 100644 --- a/src/objects-visiting-inl.h +++ b/src/objects-visiting-inl.h @@ -110,10 +110,7 @@ void StaticMarkingVisitor::Initialize() { SlicedString::BodyDescriptor, void>::Visit); - table_.Register(kVisitFixedArray, - &FlexibleBodyVisitor::Visit); + table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit); table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit); diff --git a/src/objects-visiting.h b/src/objects-visiting.h index 56380de252..29f3cbc598 100644 --- a/src/objects-visiting.h +++ b/src/objects-visiting.h @@ -434,6 +434,10 @@ class StaticMarkingVisitor : public StaticVisitorBase { } }; + typedef FlexibleBodyVisitor FixedArrayVisitor; + typedef FlexibleBodyVisitor JSObjectVisitor; diff --git a/src/spaces.cc b/src/spaces.cc index 583b2ca512..0ac23d279d 100644 --- a/src/spaces.cc +++ b/src/spaces.cc @@ -448,6 +448,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, chunk->slots_buffer_ = NULL; chunk->skip_list_ = NULL; chunk->write_barrier_counter_ = kWriteBarrierCounterGranularity; + chunk->progress_bar_ = 0; chunk->high_water_mark_ = static_cast(area_start - base); chunk->ResetLiveBytes(); Bitmap::Clear(chunk); @@ -2784,7 +2785,8 @@ void LargeObjectSpace::FreeUnmarkedObjects() { MarkBit mark_bit = Marking::MarkBitFrom(object); if (mark_bit.Get()) { mark_bit.Clear(); - MemoryChunk::IncrementLiveBytesFromGC(object->address(), -object->Size()); + Page::FromAddress(object->address())->ResetProgressBar(); + Page::FromAddress(object->address())->ResetLiveBytes(); previous = current; current = current->next_page(); } else { diff --git a/src/spaces.h b/src/spaces.h index 9121e9cea3..4fbabd6349 100644 --- a/src/spaces.h +++ b/src/spaces.h @@ -397,6 +397,12 @@ class MemoryChunk { WAS_SWEPT_PRECISELY, WAS_SWEPT_CONSERVATIVELY, + // Large objects can have a progress bar in their page header. These object + // are scanned in increments and will be kept black while being scanned. + // Even if the mutator writes to them they will be kept black and a white + // to grey transition is performed in the value. + HAS_PROGRESS_BAR, + // Last flag, keep at bottom. NUM_MEMORY_CHUNK_FLAGS }; @@ -480,6 +486,23 @@ class MemoryChunk { write_barrier_counter_ = counter; } + int progress_bar() { + ASSERT(IsFlagSet(HAS_PROGRESS_BAR)); + return progress_bar_; + } + + void set_progress_bar(int progress_bar) { + ASSERT(IsFlagSet(HAS_PROGRESS_BAR)); + progress_bar_ = progress_bar; + } + + void ResetProgressBar() { + if (IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) { + set_progress_bar(0); + ClearFlag(MemoryChunk::HAS_PROGRESS_BAR); + } + } + static void IncrementLiveBytesFromGC(Address address, int by) { MemoryChunk::FromAddress(address)->IncrementLiveBytes(by); @@ -505,7 +528,7 @@ class MemoryChunk { kSlotsBufferOffset + kPointerSize + kPointerSize; static const size_t kHeaderSize = - kWriteBarrierCounterOffset + kPointerSize + kPointerSize; + kWriteBarrierCounterOffset + kPointerSize + kIntSize + kIntSize; static const int kBodyOffset = CODE_POINTER_ALIGN(kHeaderSize + Bitmap::kSize); @@ -649,6 +672,9 @@ class MemoryChunk { SlotsBuffer* slots_buffer_; SkipList* skip_list_; intptr_t write_barrier_counter_; + // Used by the incremental marker to keep track of the scanning progress in + // large objects that have a progress bar and are scanned in increments. + int progress_bar_; // Assuming the initial allocation on a page is sequential, // count highest number of bytes ever allocated on the page. int high_water_mark_;