diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 8b92b3f336..473db2291f 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -399,9 +399,11 @@ void MarkCompactCollector::ClearMarkbits() { class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { public: SweeperTask(Sweeper* sweeper, base::Semaphore* pending_sweeper_tasks, + base::AtomicNumber* num_sweeping_tasks, AllocationSpace space_to_start) : sweeper_(sweeper), pending_sweeper_tasks_(pending_sweeper_tasks), + num_sweeping_tasks_(num_sweeping_tasks), space_to_start_(space_to_start) {} virtual ~SweeperTask() {} @@ -419,11 +421,13 @@ class MarkCompactCollector::Sweeper::SweeperTask : public v8::Task { DCHECK_LE(space_id, LAST_PAGED_SPACE); sweeper_->ParallelSweepSpace(static_cast(space_id), 0); } + num_sweeping_tasks_->Decrement(1); pending_sweeper_tasks_->Signal(); } Sweeper* sweeper_; base::Semaphore* pending_sweeper_tasks_; + base::AtomicNumber* num_sweeping_tasks_; AllocationSpace space_to_start_; DISALLOW_COPY_AND_ASSIGN(SweeperTask); @@ -442,8 +446,10 @@ void MarkCompactCollector::Sweeper::StartSweeperTasks() { ForAllSweepingSpaces([this](AllocationSpace space) { if (space == NEW_SPACE) return; num_sweeping_tasks_.Increment(1); + semaphore_counter_++; V8::GetCurrentPlatform()->CallOnBackgroundThread( - new SweeperTask(this, &pending_sweeper_tasks_semaphore_, space), + new SweeperTask(this, &pending_sweeper_tasks_semaphore_, + &num_sweeping_tasks_, space), v8::Platform::kShortRunningTask); }); } @@ -463,8 +469,7 @@ void MarkCompactCollector::Sweeper::SweepOrWaitUntilSweepingCompleted( } void MarkCompactCollector::SweepAndRefill(CompactionSpace* space) { - if (FLAG_concurrent_sweeping && - !sweeper().IsSweepingCompleted(space->identity())) { + if (FLAG_concurrent_sweeping && sweeper().sweeping_in_progress()) { sweeper().ParallelSweepSpace(space->identity(), 0); space->RefillFreeList(); } @@ -484,16 +489,13 @@ void MarkCompactCollector::Sweeper::EnsureCompleted() { // If sweeping is not completed or not running at all, we try to complete it // here. - ForAllSweepingSpaces([this](AllocationSpace space) { - if (!FLAG_concurrent_sweeping || !this->IsSweepingCompleted(space)) { - ParallelSweepSpace(space, 0); - } - }); + ForAllSweepingSpaces( + [this](AllocationSpace space) { ParallelSweepSpace(space, 0); }); if (FLAG_concurrent_sweeping) { - while (num_sweeping_tasks_.Value() > 0) { + while (semaphore_counter_ > 0) { pending_sweeper_tasks_semaphore_.Wait(); - num_sweeping_tasks_.Increment(-1); + semaphore_counter_--; } } @@ -508,7 +510,7 @@ void MarkCompactCollector::Sweeper::EnsureCompleted() { void MarkCompactCollector::Sweeper::EnsureNewSpaceCompleted() { if (!sweeping_in_progress_) return; - if (!FLAG_concurrent_sweeping || !IsSweepingCompleted(NEW_SPACE)) { + if (!FLAG_concurrent_sweeping || sweeping_in_progress()) { for (Page* p : *heap_->new_space()) { SweepOrWaitUntilSweepingCompleted(p); } @@ -528,24 +530,15 @@ void MarkCompactCollector::EnsureSweepingCompleted() { VerifyEvacuation(heap_); } #endif + + if (heap()->memory_allocator()->unmapper()->has_delayed_chunks()) + heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); } bool MarkCompactCollector::Sweeper::AreSweeperTasksRunning() { - DCHECK(FLAG_concurrent_sweeping); - while (pending_sweeper_tasks_semaphore_.WaitFor( - base::TimeDelta::FromSeconds(0))) { - num_sweeping_tasks_.Increment(-1); - } return num_sweeping_tasks_.Value() != 0; } -bool MarkCompactCollector::Sweeper::IsSweepingCompleted(AllocationSpace space) { - DCHECK(FLAG_concurrent_sweeping); - if (AreSweeperTasksRunning()) return false; - base::LockGuard guard(&mutex_); - return sweeping_list_[space].empty(); -} - const char* AllocationSpaceName(AllocationSpace space) { switch (space) { case NEW_SPACE: @@ -626,8 +619,6 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) { pages.reserve(number_of_pages); DCHECK(!sweeping_in_progress()); - DCHECK(!FLAG_concurrent_sweeping || - sweeper().IsSweepingCompleted(space->identity())); Page* owner_of_linear_allocation_area = space->top() == space->limit() ? nullptr diff --git a/src/heap/mark-compact.h b/src/heap/mark-compact.h index d9277b02ae..436961acc1 100644 --- a/src/heap/mark-compact.h +++ b/src/heap/mark-compact.h @@ -424,6 +424,7 @@ class MarkCompactCollector { explicit Sweeper(Heap* heap) : heap_(heap), pending_sweeper_tasks_semaphore_(0), + semaphore_counter_(0), sweeping_in_progress_(false), num_sweeping_tasks_(0) {} @@ -443,7 +444,6 @@ class MarkCompactCollector { void EnsureCompleted(); void EnsureNewSpaceCompleted(); bool AreSweeperTasksRunning(); - bool IsSweepingCompleted(AllocationSpace space); void SweepOrWaitUntilSweepingCompleted(Page* page); void AddSweptPageSafe(PagedSpace* space, Page* page); @@ -468,10 +468,14 @@ class MarkCompactCollector { Heap* heap_; base::Semaphore pending_sweeper_tasks_semaphore_; + // Counter is only used for waiting on the semaphore. + intptr_t semaphore_counter_; base::Mutex mutex_; SweptList swept_list_[kAllocationSpaces]; SweepingList sweeping_list_[kAllocationSpaces]; bool sweeping_in_progress_; + // Counter is actively maintained by the concurrent tasks to avoid querying + // the semaphore for maintaining a task counter on the main thread. base::AtomicNumber num_sweeping_tasks_; }; diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index 98bc128511..f94789afc4 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -417,7 +417,7 @@ bool MemoryAllocator::CanFreeMemoryChunk(MemoryChunk* chunk) { // Chunks in old generation are unmapped if they are empty. DCHECK(chunk->InNewSpace() || chunk->SweepingDone()); return !chunk->InNewSpace() || mc == nullptr || !FLAG_concurrent_sweeping || - mc->sweeper().IsSweepingCompleted(NEW_SPACE); + !mc->sweeper().sweeping_in_progress(); } bool MemoryAllocator::CommitMemory(Address base, size_t size, @@ -2866,6 +2866,11 @@ HeapObject* PagedSpace::SlowAllocateRaw(int size_in_bytes) { MarkCompactCollector* collector = heap()->mark_compact_collector(); // Sweeping is still in progress. if (collector->sweeping_in_progress()) { + if (FLAG_concurrent_sweeping && !is_local() && + !collector->sweeper().AreSweeperTasksRunning()) { + collector->EnsureSweepingCompleted(); + } + // First try to refill the free-list, concurrent sweeper threads // may have freed some objects in the meantime. RefillFreeList(); diff --git a/src/heap/spaces.h b/src/heap/spaces.h index 77deaeaa21..95a24f008b 100644 --- a/src/heap/spaces.h +++ b/src/heap/spaces.h @@ -1176,6 +1176,8 @@ class V8_EXPORT_PRIVATE MemoryAllocator { bool WaitUntilCompleted(); void TearDown(); + bool has_delayed_chunks() { return delayed_regular_chunks_.size() > 0; } + private: static const int kReservedQueueingSlots = 64;