[heap] Cleanup RawSweep for better readability.

Change-Id: Ia316db16fb338e3f26b0666de88d5e53f375f8be
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2181263
Commit-Queue: Hannes Payer <hpayer@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67565}
This commit is contained in:
Hannes Payer 2020-05-05 15:36:47 +02:00 committed by Commit Bot
parent d4ddf645c3
commit db8f64b85a
2 changed files with 151 additions and 97 deletions

View File

@ -247,6 +247,78 @@ void Sweeper::EnsureCompleted() {
bool Sweeper::AreSweeperTasksRunning() { return num_sweeping_tasks_ != 0; } bool Sweeper::AreSweeperTasksRunning() { return num_sweeping_tasks_ != 0; }
V8_INLINE size_t Sweeper::FreeAndProcessFreedMemory(
Address free_start, Address free_end, Page* page, Space* space,
bool non_empty_typed_slots, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode) {
CHECK_GT(free_end, free_start);
size_t freed_bytes = 0;
size_t size = static_cast<size_t>(free_end - free_start);
if (free_space_mode == ZAP_FREE_SPACE) {
ZapCode(free_start, size);
}
if (free_list_mode == REBUILD_FREE_LIST) {
freed_bytes = reinterpret_cast<PagedSpace*>(space)->Free(
free_start, size, SpaceAccountingMode::kSpaceUnaccounted);
} else {
Heap::CreateFillerObjectAt(ReadOnlyRoots(page->heap()), free_start,
static_cast<int>(size),
ClearFreedMemoryMode::kClearFreedMemory);
}
if (should_reduce_memory_) page->DiscardUnusedMemory(free_start, size);
return freed_bytes;
}
V8_INLINE void Sweeper::CleanupRememberedSetEntriesForFreedMemory(
Address free_start, Address free_end, Page* page,
bool non_empty_typed_slots, FreeRangesMap* free_ranges_map,
InvalidatedSlotsCleanup* old_to_new_cleanup) {
DCHECK_LE(free_start, free_end);
RememberedSetSweeping::RemoveRange(page, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSet<OLD_TO_OLD>::RemoveRange(page, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS);
if (non_empty_typed_slots) {
free_ranges_map->insert(std::pair<uint32_t, uint32_t>(
static_cast<uint32_t>(free_start - page->address()),
static_cast<uint32_t>(free_end - page->address())));
}
old_to_new_cleanup->Free(free_start, free_end);
}
void Sweeper::CleanupInvalidTypedSlotsOfFreeRanges(
Page* page, const FreeRangesMap& free_ranges_map) {
if (!free_ranges_map.empty()) {
TypedSlotSet* old_to_new = page->typed_slot_set<OLD_TO_NEW>();
if (old_to_new != nullptr) {
old_to_new->ClearInvalidSlots(free_ranges_map);
}
TypedSlotSet* old_to_old = page->typed_slot_set<OLD_TO_OLD>();
if (old_to_old != nullptr) {
old_to_old->ClearInvalidSlots(free_ranges_map);
}
}
}
void Sweeper::ClearMarkBitsAndHandleLivenessStatistics(
Page* page, size_t live_bytes, FreeListRebuildingMode free_list_mode) {
marking_state_->bitmap(page)->Clear();
if (free_list_mode == IGNORE_FREE_LIST) {
marking_state_->SetLiveBytes(page, 0);
// We did not free memory, so have to adjust allocated bytes here.
intptr_t freed_bytes = page->area_size() - live_bytes;
page->DecreaseAllocatedBytes(freed_bytes);
} else {
// Keep the old live bytes counter of the page until RefillFreeList, where
// the space size is refined.
// The allocated_bytes() counter is precisely the total size of objects.
DCHECK_EQ(live_bytes, page->allocated_bytes());
}
}
int Sweeper::RawSweep( int Sweeper::RawSweep(
Page* p, FreeListRebuildingMode free_list_mode, Page* p, FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode, FreeSpaceTreatmentMode free_space_mode,
@ -258,7 +330,26 @@ int Sweeper::RawSweep(
space->identity() == CODE_SPACE || space->identity() == MAP_SPACE); space->identity() == CODE_SPACE || space->identity() == MAP_SPACE);
DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone()); DCHECK(!p->IsEvacuationCandidate() && !p->SweepingDone());
// Phase 1: Prepare the page for sweeping.
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p, marking_state_);
// Set the allocated_bytes_ counter to area_size and clear the wasted_memory_
// counter. The free operations below will decrease allocated_bytes_ to actual
// live bytes and keep track of wasted_memory_.
p->ResetAllocationStatistics();
CodeObjectRegistry* code_object_registry = p->GetCodeObjectRegistry(); CodeObjectRegistry* code_object_registry = p->GetCodeObjectRegistry();
if (code_object_registry) code_object_registry->Clear();
// Phase 2: Free the non-live memory and clean-up the regular remembered set
// entires.
// Liveness and freeing statistics.
size_t live_bytes = 0;
size_t max_freed_bytes = 0;
// TODO(ulan): we don't have to clear type old-to-old slots in code space // TODO(ulan): we don't have to clear type old-to-old slots in code space
// because the concurrent marker doesn't mark code objects. This requires // because the concurrent marker doesn't mark code objects. This requires
@ -266,35 +357,21 @@ int Sweeper::RawSweep(
bool non_empty_typed_slots = p->typed_slot_set<OLD_TO_NEW>() != nullptr || bool non_empty_typed_slots = p->typed_slot_set<OLD_TO_NEW>() != nullptr ||
p->typed_slot_set<OLD_TO_OLD>() != nullptr; p->typed_slot_set<OLD_TO_OLD>() != nullptr;
// The free ranges map is used for filtering typed slots.
std::map<uint32_t, uint32_t> free_ranges;
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
ArrayBufferTracker::FreeDead(p, marking_state_);
Address free_start = p->area_start();
InvalidatedSlotsCleanup old_to_new_cleanup =
InvalidatedSlotsCleanup::NoCleanup(p);
// Clean invalidated slots during the final atomic pause. After resuming // Clean invalidated slots during the final atomic pause. After resuming
// execution this isn't necessary, invalid old-to-new refs were already // execution this isn't necessary, invalid old-to-new refs were already
// removed by mark compact's update pointers phase. // removed by mark compact's update pointers phase.
InvalidatedSlotsCleanup old_to_new_cleanup =
InvalidatedSlotsCleanup::NoCleanup(p);
if (invalidated_slots_in_free_space == if (invalidated_slots_in_free_space ==
FreeSpaceMayContainInvalidatedSlots::kYes) FreeSpaceMayContainInvalidatedSlots::kYes)
old_to_new_cleanup = InvalidatedSlotsCleanup::OldToNew(p); old_to_new_cleanup = InvalidatedSlotsCleanup::OldToNew(p);
intptr_t live_bytes = 0; // The free ranges map is used for filtering typed slots.
intptr_t freed_bytes = 0; FreeRangesMap free_ranges_map;
intptr_t max_freed_bytes = 0;
// Set the allocated_bytes_ counter to area_size and clear the wasted_memory_
// counter. The free operations below will decrease allocated_bytes_ to actual
// live bytes and keep track of wasted_memory_.
p->ResetAllocationStatistics();
if (code_object_registry) code_object_registry->Clear();
// Iterate over the page using the live objects and free the memory before
// the given live object.
Address free_start = p->area_start();
for (auto object_and_size : for (auto object_and_size :
LiveObjectRange<kBlackObjects>(p, marking_state_->bitmap(p))) { LiveObjectRange<kBlackObjects>(p, marking_state_->bitmap(p))) {
HeapObject const object = object_and_size.first; HeapObject const object = object_and_size.first;
@ -303,32 +380,14 @@ int Sweeper::RawSweep(
DCHECK(marking_state_->IsBlack(object)); DCHECK(marking_state_->IsBlack(object));
Address free_end = object.address(); Address free_end = object.address();
if (free_end != free_start) { if (free_end != free_start) {
CHECK_GT(free_end, free_start); max_freed_bytes =
size_t size = static_cast<size_t>(free_end - free_start); Max(max_freed_bytes,
if (free_space_mode == ZAP_FREE_SPACE) { FreeAndProcessFreedMemory(free_start, free_end, p, space,
ZapCode(free_start, size); non_empty_typed_slots, free_list_mode,
} free_space_mode));
if (free_list_mode == REBUILD_FREE_LIST) { CleanupRememberedSetEntriesForFreedMemory(
freed_bytes = reinterpret_cast<PagedSpace*>(space)->Free( free_start, free_end, p, non_empty_typed_slots, &free_ranges_map,
free_start, size, SpaceAccountingMode::kSpaceUnaccounted); &old_to_new_cleanup);
max_freed_bytes = Max(freed_bytes, max_freed_bytes);
} else {
Heap::CreateFillerObjectAt(ReadOnlyRoots(p->heap()), free_start,
static_cast<int>(size),
ClearFreedMemoryMode::kClearFreedMemory);
}
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
RememberedSetSweeping::RemoveRange(p, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSet<OLD_TO_OLD>::RemoveRange(p, free_start, free_end,
SlotSet::KEEP_EMPTY_BUCKETS);
if (non_empty_typed_slots) {
free_ranges.insert(std::pair<uint32_t, uint32_t>(
static_cast<uint32_t>(free_start - p->address()),
static_cast<uint32_t>(free_end - p->address())));
}
old_to_new_cleanup.Free(free_start, free_end);
} }
Map map = object.synchronized_map(); Map map = object.synchronized_map();
int size = object.SizeFromMap(map); int size = object.SizeFromMap(map);
@ -336,59 +395,23 @@ int Sweeper::RawSweep(
free_start = free_end + size; free_start = free_end + size;
} }
if (free_start != p->area_end()) { // If there is free memory after the last live object also free that.
CHECK_GT(p->area_end(), free_start); Address free_end = p->area_end();
size_t size = static_cast<size_t>(p->area_end() - free_start); if (free_end != free_start) {
if (free_space_mode == ZAP_FREE_SPACE) { max_freed_bytes =
ZapCode(free_start, size); Max(max_freed_bytes,
} FreeAndProcessFreedMemory(free_start, free_end, p, space,
if (free_list_mode == REBUILD_FREE_LIST) { non_empty_typed_slots, free_list_mode,
freed_bytes = reinterpret_cast<PagedSpace*>(space)->Free( free_space_mode));
free_start, size, SpaceAccountingMode::kSpaceUnaccounted); CleanupRememberedSetEntriesForFreedMemory(
max_freed_bytes = Max(freed_bytes, max_freed_bytes); free_start, free_end, p, non_empty_typed_slots, &free_ranges_map,
} else { &old_to_new_cleanup);
Heap::CreateFillerObjectAt(ReadOnlyRoots(p->heap()), free_start,
static_cast<int>(size),
ClearFreedMemoryMode::kClearFreedMemory);
}
if (should_reduce_memory_) p->DiscardUnusedMemory(free_start, size);
RememberedSetSweeping::RemoveRange(p, free_start, p->area_end(),
SlotSet::KEEP_EMPTY_BUCKETS);
RememberedSet<OLD_TO_OLD>::RemoveRange(p, free_start, p->area_end(),
SlotSet::KEEP_EMPTY_BUCKETS);
if (non_empty_typed_slots) {
free_ranges.insert(std::pair<uint32_t, uint32_t>(
static_cast<uint32_t>(free_start - p->address()),
static_cast<uint32_t>(p->area_end() - p->address())));
}
old_to_new_cleanup.Free(free_start, p->area_end());
} }
// Clear invalid typed slots after collection all free ranges. // Phase 3: Post process the page.
if (!free_ranges.empty()) { CleanupInvalidTypedSlotsOfFreeRanges(p, free_ranges_map);
TypedSlotSet* old_to_new = p->typed_slot_set<OLD_TO_NEW>(); ClearMarkBitsAndHandleLivenessStatistics(p, live_bytes, free_list_mode);
if (old_to_new != nullptr) {
old_to_new->ClearInvalidSlots(free_ranges);
}
TypedSlotSet* old_to_old = p->typed_slot_set<OLD_TO_OLD>();
if (old_to_old != nullptr) {
old_to_old->ClearInvalidSlots(free_ranges);
}
}
marking_state_->bitmap(p)->Clear();
if (free_list_mode == IGNORE_FREE_LIST) {
marking_state_->SetLiveBytes(p, 0);
// We did not free memory, so have to adjust allocated bytes here.
intptr_t freed_bytes = p->area_size() - live_bytes;
p->DecreaseAllocatedBytes(freed_bytes);
} else {
// Keep the old live bytes counter of the page until RefillFreeList, where
// the space size is refined.
// The allocated_bytes() counter is precisely the total size of objects.
DCHECK_EQ(live_bytes, p->allocated_bytes());
}
p->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kDone); p->set_concurrent_sweeping_state(Page::ConcurrentSweepingState::kDone);
if (code_object_registry) code_object_registry->Finalize(); if (code_object_registry) code_object_registry->Finalize();
if (free_list_mode == IGNORE_FREE_LIST) return 0; if (free_list_mode == IGNORE_FREE_LIST) return 0;

View File

@ -6,6 +6,7 @@
#define V8_HEAP_SWEEPER_H_ #define V8_HEAP_SWEEPER_H_
#include <deque> #include <deque>
#include <map>
#include <vector> #include <vector>
#include "src/base/platform/semaphore.h" #include "src/base/platform/semaphore.h"
@ -15,9 +16,11 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class InvalidatedSlotsCleanup;
class MajorNonAtomicMarkingState; class MajorNonAtomicMarkingState;
class Page; class Page;
class PagedSpace; class PagedSpace;
class Space;
enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE }; enum FreeSpaceTreatmentMode { IGNORE_FREE_SPACE, ZAP_FREE_SPACE };
@ -26,6 +29,7 @@ class Sweeper {
using IterabilityList = std::vector<Page*>; using IterabilityList = std::vector<Page*>;
using SweepingList = std::vector<Page*>; using SweepingList = std::vector<Page*>;
using SweptList = std::vector<Page*>; using SweptList = std::vector<Page*>;
using FreeRangesMap = std::map<uint32_t, uint32_t>;
// Pauses the sweeper tasks or completes sweeping. // Pauses the sweeper tasks or completes sweeping.
class PauseOrCompleteScope final { class PauseOrCompleteScope final {
@ -127,6 +131,33 @@ class Sweeper {
callback(MAP_SPACE); callback(MAP_SPACE);
} }
// Helper function for RawSweep. Depending on the FreeListRebuildingMode and
// FreeSpaceTreatmentMode this function may add the free memory to a free
// list, make the memory iterable, clear it, and return the free memory to
// the operating system.
size_t FreeAndProcessFreedMemory(Address free_start, Address free_end,
Page* page, Space* space,
bool non_empty_typed_slots,
FreeListRebuildingMode free_list_mode,
FreeSpaceTreatmentMode free_space_mode);
// Helper function for RawSweep. Handle remembered set entries in the freed
// memory which require clearing.
void CleanupRememberedSetEntriesForFreedMemory(
Address free_start, Address free_end, Page* page,
bool non_empty_typed_slots, FreeRangesMap* free_ranges_map,
InvalidatedSlotsCleanup* old_to_new_cleanup);
// Helper function for RawSweep. Clears invalid typed slots in the given free
// ranges.
void CleanupInvalidTypedSlotsOfFreeRanges(
Page* page, const FreeRangesMap& free_ranges_map);
// Helper function for RawSweep. Clears the mark bits and ensures consistency
// of live bytes.
void ClearMarkBitsAndHandleLivenessStatistics(
Page* page, size_t live_bytes, FreeListRebuildingMode free_list_mode);
// Can only be called on the main thread when no tasks are running. // Can only be called on the main thread when no tasks are running.
bool IsDoneSweeping() const { bool IsDoneSweeping() const {
bool is_done = true; bool is_done = true;