Concurrent sweeping of code space.
BUG= Review URL: https://codereview.chromium.org/1222013002 Cr-Commit-Position: refs/heads/master@{#29456}
This commit is contained in:
parent
9d0cd81da9
commit
3050b52f57
@ -1431,7 +1431,9 @@ Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
|
||||
// after the inner pointer.
|
||||
Page* page = Page::FromAddress(inner_pointer);
|
||||
|
||||
page->skip_list()->Lock();
|
||||
Address addr = page->skip_list()->StartFor(inner_pointer);
|
||||
page->skip_list()->Unlock();
|
||||
|
||||
Address top = heap->code_space()->top();
|
||||
Address limit = heap->code_space()->limit();
|
||||
|
@ -5145,6 +5145,11 @@ void Heap::Verify() {
|
||||
code_space_->Verify(&no_dirty_regions_visitor);
|
||||
|
||||
lo_space_->Verify();
|
||||
|
||||
mark_compact_collector_.VerifyWeakEmbeddedObjectsInCode();
|
||||
if (FLAG_omit_map_checks_for_leaf_maps) {
|
||||
mark_compact_collector_.VerifyOmittedMapChecks();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -226,6 +226,7 @@ static void VerifyEvacuation(Heap* heap) {
|
||||
|
||||
void MarkCompactCollector::SetUp() {
|
||||
free_list_old_space_.Reset(new FreeList(heap_->old_space()));
|
||||
free_list_code_space_.Reset(new FreeList(heap_->code_space()));
|
||||
EnsureMarkingDequeIsReserved();
|
||||
EnsureMarkingDequeIsCommitted(kMinMarkingDequeSize);
|
||||
}
|
||||
@ -366,13 +367,6 @@ void MarkCompactCollector::CollectGarbage() {
|
||||
|
||||
SweepSpaces();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
VerifyWeakEmbeddedObjectsInCode();
|
||||
if (FLAG_omit_map_checks_for_leaf_maps) {
|
||||
VerifyOmittedMapChecks();
|
||||
}
|
||||
#endif
|
||||
|
||||
Finish();
|
||||
|
||||
if (marking_parity_ == EVEN_MARKING_PARITY) {
|
||||
@ -499,9 +493,13 @@ class MarkCompactCollector::SweeperTask : public v8::Task {
|
||||
|
||||
void MarkCompactCollector::StartSweeperThreads() {
|
||||
DCHECK(free_list_old_space_.get()->IsEmpty());
|
||||
DCHECK(free_list_code_space_.get()->IsEmpty());
|
||||
V8::GetCurrentPlatform()->CallOnBackgroundThread(
|
||||
new SweeperTask(heap(), heap()->old_space()),
|
||||
v8::Platform::kShortRunningTask);
|
||||
V8::GetCurrentPlatform()->CallOnBackgroundThread(
|
||||
new SweeperTask(heap(), heap()->code_space()),
|
||||
v8::Platform::kShortRunningTask);
|
||||
}
|
||||
|
||||
|
||||
@ -512,15 +510,19 @@ void MarkCompactCollector::EnsureSweepingCompleted() {
|
||||
// here.
|
||||
if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) {
|
||||
SweepInParallel(heap()->paged_space(OLD_SPACE), 0);
|
||||
SweepInParallel(heap()->paged_space(CODE_SPACE), 0);
|
||||
}
|
||||
// Wait twice for both jobs.
|
||||
if (heap()->concurrent_sweeping_enabled()) {
|
||||
pending_sweeper_jobs_semaphore_.Wait();
|
||||
pending_sweeper_jobs_semaphore_.Wait();
|
||||
}
|
||||
ParallelSweepSpacesComplete();
|
||||
sweeping_in_progress_ = false;
|
||||
RefillFreeList(heap()->paged_space(OLD_SPACE));
|
||||
RefillFreeList(heap()->paged_space(CODE_SPACE));
|
||||
heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes();
|
||||
heap()->paged_space(CODE_SPACE)->ResetUnsweptFreeBytes();
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
if (FLAG_verify_heap && !evacuation()) {
|
||||
@ -545,6 +547,8 @@ void MarkCompactCollector::RefillFreeList(PagedSpace* space) {
|
||||
|
||||
if (space == heap()->old_space()) {
|
||||
free_list = free_list_old_space_.get();
|
||||
} else if (space == heap()->code_space()) {
|
||||
free_list = free_list_code_space_.get();
|
||||
} else {
|
||||
// Any PagedSpace might invoke RefillFreeLists, so we need to make sure
|
||||
// to only refill them for the old space.
|
||||
@ -3497,14 +3501,20 @@ static int Sweep(PagedSpace* space, FreeList* free_list, Page* p,
|
||||
DCHECK(reinterpret_cast<intptr_t>(free_start) % (32 * kPointerSize) == 0);
|
||||
int offsets[16];
|
||||
|
||||
// If we use the skip list for code space pages, we have to lock the skip
|
||||
// list because it could be accessed concurrently by the runtime or the
|
||||
// deoptimizer.
|
||||
bool skip_list_locked = false;
|
||||
SkipList* skip_list = p->skip_list();
|
||||
int curr_region = -1;
|
||||
if ((skip_list_mode == REBUILD_SKIP_LIST) && skip_list) {
|
||||
skip_list->Lock();
|
||||
skip_list_locked = true;
|
||||
skip_list->Clear();
|
||||
}
|
||||
|
||||
intptr_t freed_bytes = 0;
|
||||
intptr_t max_freed_bytes = 0;
|
||||
int curr_region = -1;
|
||||
|
||||
for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
|
||||
Address cell_base = it.CurrentCellBase();
|
||||
@ -3559,6 +3569,10 @@ static int Sweep(PagedSpace* space, FreeList* free_list, Page* p,
|
||||
} else {
|
||||
p->SetWasSwept();
|
||||
}
|
||||
if (skip_list_locked) {
|
||||
DCHECK(skip_list && skip_list_mode == REBUILD_SKIP_LIST);
|
||||
skip_list->Unlock();
|
||||
}
|
||||
return FreeList::GuaranteedAllocatable(static_cast<int>(max_freed_bytes));
|
||||
}
|
||||
|
||||
@ -4168,10 +4182,19 @@ int MarkCompactCollector::SweepInParallel(PagedSpace* space,
|
||||
int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) {
|
||||
int max_freed = 0;
|
||||
if (page->TryParallelSweeping()) {
|
||||
FreeList* free_list = free_list_old_space_.get();
|
||||
FreeList* free_list;
|
||||
FreeList private_free_list(space);
|
||||
max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
|
||||
if (space->identity() == CODE_SPACE) {
|
||||
free_list = free_list_code_space_.get();
|
||||
max_freed =
|
||||
Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, REBUILD_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
|
||||
} else {
|
||||
free_list = free_list_old_space_.get();
|
||||
max_freed =
|
||||
Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
|
||||
}
|
||||
free_list->Concatenate(&private_free_list);
|
||||
}
|
||||
return max_freed;
|
||||
@ -4228,8 +4251,19 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
|
||||
PrintF("Sweeping 0x%" V8PRIxPTR ".\n",
|
||||
reinterpret_cast<intptr_t>(p));
|
||||
}
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
|
||||
if (space->identity() == CODE_SPACE) {
|
||||
if (FLAG_zap_code_space) {
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
|
||||
ZAP_FREE_SPACE>(space, NULL, p, NULL);
|
||||
} else {
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
|
||||
}
|
||||
} else {
|
||||
DCHECK(space->identity() == OLD_SPACE);
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
|
||||
}
|
||||
pages_swept++;
|
||||
parallel_sweeping_active = true;
|
||||
} else {
|
||||
@ -4246,13 +4280,17 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) {
|
||||
if (FLAG_gc_verbose) {
|
||||
PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p));
|
||||
}
|
||||
if (space->identity() == CODE_SPACE && FLAG_zap_code_space) {
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
|
||||
ZAP_FREE_SPACE>(space, NULL, p, NULL);
|
||||
} else if (space->identity() == CODE_SPACE) {
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
|
||||
if (space->identity() == CODE_SPACE) {
|
||||
if (FLAG_zap_code_space) {
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
|
||||
ZAP_FREE_SPACE>(space, NULL, p, NULL);
|
||||
} else {
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, REBUILD_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
|
||||
}
|
||||
} else {
|
||||
DCHECK(space->identity() == OLD_SPACE ||
|
||||
space->identity() == MAP_SPACE);
|
||||
Sweep<SWEEP_ONLY, SWEEP_ON_MAIN_THREAD, IGNORE_SKIP_LIST,
|
||||
IGNORE_FREE_SPACE>(space, NULL, p, NULL);
|
||||
}
|
||||
@ -4292,19 +4330,22 @@ void MarkCompactCollector::SweepSpaces() {
|
||||
// the other spaces rely on possibly non-live maps to get the sizes for
|
||||
// non-live objects.
|
||||
{
|
||||
GCTracer::Scope sweep_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_SWEEP_OLDSPACE);
|
||||
{ SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); }
|
||||
{
|
||||
GCTracer::Scope sweep_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_SWEEP_OLDSPACE);
|
||||
SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING);
|
||||
}
|
||||
{
|
||||
GCTracer::Scope sweep_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_SWEEP_CODE);
|
||||
SweepSpace(heap()->code_space(), CONCURRENT_SWEEPING);
|
||||
}
|
||||
|
||||
sweeping_in_progress_ = true;
|
||||
if (heap()->concurrent_sweeping_enabled()) {
|
||||
StartSweeperThreads();
|
||||
}
|
||||
}
|
||||
{
|
||||
GCTracer::Scope sweep_scope(heap()->tracer(),
|
||||
GCTracer::Scope::MC_SWEEP_CODE);
|
||||
SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING);
|
||||
}
|
||||
|
||||
EvacuateNewSpaceAndCandidates();
|
||||
|
||||
@ -4357,6 +4398,7 @@ void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) {
|
||||
|
||||
void MarkCompactCollector::ParallelSweepSpacesComplete() {
|
||||
ParallelSweepSpaceComplete(heap()->old_space());
|
||||
ParallelSweepSpaceComplete(heap()->code_space());
|
||||
}
|
||||
|
||||
|
||||
|
@ -970,6 +970,7 @@ class MarkCompactCollector {
|
||||
List<Page*> evacuation_candidates_;
|
||||
|
||||
SmartPointer<FreeList> free_list_old_space_;
|
||||
SmartPointer<FreeList> free_list_code_space_;
|
||||
|
||||
friend class Heap;
|
||||
};
|
||||
|
@ -1014,6 +1014,10 @@ class SkipList {
|
||||
list->AddObject(addr, size);
|
||||
}
|
||||
|
||||
void Lock() { mutex_.Lock(); }
|
||||
|
||||
void Unlock() { mutex_.Unlock(); }
|
||||
|
||||
private:
|
||||
static const int kRegionSizeLog2 = 13;
|
||||
static const int kRegionSize = 1 << kRegionSizeLog2;
|
||||
@ -1022,6 +1026,7 @@ class SkipList {
|
||||
STATIC_ASSERT(Page::kPageSize % kRegionSize == 0);
|
||||
|
||||
Address starts_[kSize];
|
||||
base::Mutex mutex_;
|
||||
};
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user