[heap] Buffer counter updates for new space evacuation.

* Buffer counter updates in the corresponding visitor to allow parallel
  processing of pages.
* Fix a bug where we don't keep live bytes in sync when marking is already
  finished.

LOG=N
BUG=524425

Review URL: https://codereview.chromium.org/1559873004

Cr-Commit-Position: refs/heads/master@{#33170}
This commit is contained in:
mlippautz 2016-01-08 04:25:02 -08:00 committed by Commit bot
parent 0a808704c9
commit bfefce1e50
4 changed files with 44 additions and 12 deletions

View File

@ -165,6 +165,7 @@ Heap::Heap()
deserialization_complete_(false),
strong_roots_list_(NULL),
array_buffer_tracker_(NULL),
heap_iterator_depth_(0),
force_oom_(false) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
@ -3102,7 +3103,11 @@ bool Heap::CanMoveObjectStart(HeapObject* object) {
void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) {
if (incremental_marking()->IsMarking() &&
// As long as the inspected object is black and we are currently not iterating
// the heap using HeapIterator, we can update the live byte count. We cannot
// update while using HeapIterator because the iterator is temporarily
// marking the whole object graph, without updating live bytes.
if (!in_heap_iterator() &&
Marking::IsBlack(Marking::MarkBitFrom(object->address()))) {
if (mode == SEQUENTIAL_TO_SWEEPER) {
MemoryChunk::IncrementLiveBytesFromGC(object, by);
@ -5659,6 +5664,7 @@ HeapIterator::HeapIterator(Heap* heap,
filter_(nullptr),
space_iterator_(nullptr),
object_iterator_(nullptr) {
heap_->heap_iterator_start();
// Start the iteration.
space_iterator_ = new SpaceIterator(heap_);
switch (filtering_) {
@ -5673,6 +5679,7 @@ HeapIterator::HeapIterator(Heap* heap,
HeapIterator::~HeapIterator() {
heap_->heap_iterator_end();
#ifdef DEBUG
// Assert that in filtering mode we have iterated through all
// objects. Otherwise, heap will be left in an inconsistent state.

View File

@ -1415,13 +1415,13 @@ class Heap {
void UpdateSurvivalStatistics(int start_new_space_size);
inline void IncrementPromotedObjectsSize(int object_size) {
DCHECK(object_size > 0);
DCHECK_GE(object_size, 0);
promoted_objects_size_ += object_size;
}
inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
inline void IncrementSemiSpaceCopiedObjectSize(int object_size) {
DCHECK(object_size > 0);
DCHECK_GE(object_size, 0);
semi_space_copied_object_size_ += object_size;
}
inline intptr_t semi_space_copied_object_size() {
@ -1930,6 +1930,16 @@ class Heap {
bool RecentIdleNotificationHappened();
void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
// ===========================================================================
// HeapIterator helpers. =====================================================
// ===========================================================================
void heap_iterator_start() { heap_iterator_depth_++; }
void heap_iterator_end() { heap_iterator_depth_--; }
bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
// ===========================================================================
// Allocation methods. =======================================================
// ===========================================================================
@ -2383,6 +2393,9 @@ class Heap {
ArrayBufferTracker* array_buffer_tracker_;
// The depth of HeapIterator nestings.
int heap_iterator_depth_;
// Used for testing purposes.
bool force_oom_;

View File

@ -1617,7 +1617,9 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
SlotsBuffer** evacuation_slots_buffer)
: EvacuateVisitorBase(heap, evacuation_slots_buffer),
buffer_(LocalAllocationBuffer::InvalidBuffer()),
space_to_allocate_(NEW_SPACE) {}
space_to_allocate_(NEW_SPACE),
promoted_size_(0),
semispace_copied_size_(0) {}
bool Visit(HeapObject* object) override {
Heap::UpdateAllocationSiteFeedback(object, Heap::RECORD_SCRATCHPAD_SLOT);
@ -1630,7 +1632,7 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object));
}
heap_->IncrementPromotedObjectsSize(size);
promoted_size_ += size;
return true;
}
HeapObject* target = nullptr;
@ -1641,10 +1643,13 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
heap_->IncrementSemiSpaceCopiedObjectSize(size);
semispace_copied_size_ += size;
return true;
}
intptr_t promoted_size() { return promoted_size_; }
intptr_t semispace_copied_size() { return semispace_copied_size_; }
private:
enum NewSpaceAllocationMode {
kNonstickyBailoutOldSpace,
@ -1742,6 +1747,8 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
LocalAllocationBuffer buffer_;
AllocationSpace space_to_allocate_;
intptr_t promoted_size_;
intptr_t semispace_copied_size_;
};
@ -3096,8 +3103,6 @@ void MarkCompactCollector::EvacuateNewSpace() {
new_space->Flip();
new_space->ResetAllocationInfo();
int survivors_size = 0;
// First pass: traverse all objects in inactive semispace, remove marks,
// migrate live objects and write forwarding addresses. This stage puts
// new entries in the store buffer and may cause some pages to be marked
@ -3106,13 +3111,17 @@ void MarkCompactCollector::EvacuateNewSpace() {
EvacuateNewSpaceVisitor new_space_visitor(heap(), &migration_slots_buffer_);
while (it.has_next()) {
NewSpacePage* p = it.next();
survivors_size += p->LiveBytes();
bool ok = VisitLiveObjects(p, &new_space_visitor, kClearMarkbits);
USE(ok);
DCHECK(ok);
}
heap_->IncrementYoungSurvivorsCounter(survivors_size);
heap_->IncrementPromotedObjectsSize(
static_cast<int>(new_space_visitor.promoted_size()));
heap_->IncrementSemiSpaceCopiedObjectSize(
static_cast<int>(new_space_visitor.semispace_copied_size()));
heap_->IncrementYoungSurvivorsCounter(
static_cast<int>(new_space_visitor.promoted_size()) +
static_cast<int>(new_space_visitor.semispace_copied_size()));
new_space->set_age_mark(new_space->top());
}

View File

@ -595,6 +595,7 @@ class MemoryChunk {
}
live_byte_count_ = 0;
}
void IncrementLiveBytes(int by) {
if (FLAG_gc_verbose) {
printf("UpdateLiveBytes:%p:%x%c=%x->%x\n", static_cast<void*>(this),
@ -602,10 +603,12 @@ class MemoryChunk {
live_byte_count_ + by);
}
live_byte_count_ += by;
DCHECK_GE(live_byte_count_, 0);
DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_);
}
int LiveBytes() {
DCHECK(static_cast<unsigned>(live_byte_count_) <= size_);
DCHECK_LE(static_cast<unsigned>(live_byte_count_), size_);
return live_byte_count_;
}