[heap] Reland: Make SlotSet allocation thread-safe and refactor code.
BUG=chromium:694255 Review-Url: https://codereview.chromium.org/2783873002 Cr-Commit-Position: refs/heads/master@{#44236}
This commit is contained in:
parent
a82418780d
commit
d0c06a7b95
@ -689,8 +689,8 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
|
||||
// of a GC all evacuation candidates are cleared and their slot buffers are
|
||||
// released.
|
||||
CHECK(!p->IsEvacuationCandidate());
|
||||
CHECK_NULL(p->old_to_old_slots());
|
||||
CHECK_NULL(p->typed_old_to_old_slots());
|
||||
CHECK_NULL(p->slot_set<OLD_TO_OLD>());
|
||||
CHECK_NULL(p->typed_slot_set<OLD_TO_OLD>());
|
||||
CHECK(p->SweepingDone());
|
||||
DCHECK(p->area_size() == area_size);
|
||||
pages.push_back(std::make_pair(p->LiveBytesFromFreeList(), p));
|
||||
@ -3430,10 +3430,10 @@ class EvacuationWeakObjectRetainer : public WeakObjectRetainer {
|
||||
MarkCompactCollector::Sweeper::ClearOldToNewSlotsMode
|
||||
MarkCompactCollector::Sweeper::GetClearOldToNewSlotsMode(Page* p) {
|
||||
AllocationSpace identity = p->owner()->identity();
|
||||
if (p->old_to_new_slots() &&
|
||||
if (p->slot_set<OLD_TO_NEW>() &&
|
||||
(identity == OLD_SPACE || identity == MAP_SPACE)) {
|
||||
return MarkCompactCollector::Sweeper::CLEAR_REGULAR_SLOTS;
|
||||
} else if (p->typed_old_to_new_slots() && identity == CODE_SPACE) {
|
||||
} else if (p->typed_slot_set<OLD_TO_NEW>() && identity == CODE_SPACE) {
|
||||
return MarkCompactCollector::Sweeper::CLEAR_TYPED_SLOTS;
|
||||
}
|
||||
return MarkCompactCollector::Sweeper::DO_NOT_CLEAR;
|
||||
@ -3547,7 +3547,10 @@ int MarkCompactCollector::Sweeper::RawSweep(
|
||||
|
||||
// Clear invalid typed slots after collection all free ranges.
|
||||
if (slots_clearing_mode == CLEAR_TYPED_SLOTS) {
|
||||
p->typed_old_to_new_slots()->RemoveInvaldSlots(free_ranges);
|
||||
TypedSlotSet* typed_slot_set = p->typed_slot_set<OLD_TO_NEW>();
|
||||
if (typed_slot_set != nullptr) {
|
||||
typed_slot_set->RemoveInvaldSlots(free_ranges);
|
||||
}
|
||||
}
|
||||
|
||||
// Clear the mark bits of that page and reset live bytes count.
|
||||
@ -3606,15 +3609,14 @@ bool LiveObjectVisitor::VisitBlackObjects(MemoryChunk* chunk,
|
||||
state.bitmap()->ClearRange(
|
||||
chunk->AddressToMarkbitIndex(chunk->area_start()),
|
||||
chunk->AddressToMarkbitIndex(object->address()));
|
||||
if (chunk->old_to_new_slots() != nullptr) {
|
||||
chunk->old_to_new_slots()->RemoveRange(
|
||||
SlotSet* slot_set = chunk->slot_set<OLD_TO_NEW>();
|
||||
if (slot_set != nullptr) {
|
||||
slot_set->RemoveRange(
|
||||
0, static_cast<int>(object->address() - chunk->address()),
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
}
|
||||
if (chunk->typed_old_to_new_slots() != nullptr) {
|
||||
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(chunk, chunk->address(),
|
||||
object->address());
|
||||
}
|
||||
RememberedSet<OLD_TO_NEW>::RemoveRangeTyped(chunk, chunk->address(),
|
||||
object->address());
|
||||
RecomputeLiveBytes(chunk, state);
|
||||
}
|
||||
return false;
|
||||
@ -3714,7 +3716,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
|
||||
#endif
|
||||
}
|
||||
|
||||
template <PointerDirection direction>
|
||||
template <RememberedSetType type>
|
||||
class PointerUpdateJobTraits {
|
||||
public:
|
||||
typedef int PerPageData; // Per page data is not used in this job.
|
||||
@ -3732,7 +3734,7 @@ class PointerUpdateJobTraits {
|
||||
|
||||
private:
|
||||
static void UpdateUntypedPointers(Heap* heap, MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_NEW) {
|
||||
if (type == OLD_TO_NEW) {
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(chunk, [heap](Address slot) {
|
||||
return CheckAndUpdateOldToNewSlot(heap, slot);
|
||||
});
|
||||
@ -3744,20 +3746,21 @@ class PointerUpdateJobTraits {
|
||||
}
|
||||
|
||||
static void UpdateTypedPointers(Heap* heap, MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_OLD) {
|
||||
if (type == OLD_TO_OLD) {
|
||||
Isolate* isolate = heap->isolate();
|
||||
RememberedSet<OLD_TO_OLD>::IterateTyped(
|
||||
chunk, [isolate](SlotType type, Address host_addr, Address slot) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(isolate, type, slot,
|
||||
UpdateSlot);
|
||||
chunk,
|
||||
[isolate](SlotType slot_type, Address host_addr, Address slot) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(isolate, slot_type,
|
||||
slot, UpdateSlot);
|
||||
});
|
||||
} else {
|
||||
Isolate* isolate = heap->isolate();
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
chunk,
|
||||
[isolate, heap](SlotType type, Address host_addr, Address slot) {
|
||||
[isolate, heap](SlotType slot_type, Address host_addr, Address slot) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
isolate, type, slot, [heap](Object** slot) {
|
||||
isolate, slot_type, slot, [heap](Object** slot) {
|
||||
return CheckAndUpdateOldToNewSlot(
|
||||
heap, reinterpret_cast<Address>(slot));
|
||||
});
|
||||
@ -3824,11 +3827,11 @@ int NumberOfPointerUpdateTasks(int pages) {
|
||||
return Min(available_cores, (pages + kPagesPerTask - 1) / kPagesPerTask);
|
||||
}
|
||||
|
||||
template <PointerDirection direction>
|
||||
template <RememberedSetType type>
|
||||
void UpdatePointersInParallel(Heap* heap, base::Semaphore* semaphore) {
|
||||
PageParallelJob<PointerUpdateJobTraits<direction> > job(
|
||||
PageParallelJob<PointerUpdateJobTraits<type> > job(
|
||||
heap, heap->isolate()->cancelable_task_manager(), semaphore);
|
||||
RememberedSet<direction>::IterateMemoryChunks(
|
||||
RememberedSet<type>::IterateMemoryChunks(
|
||||
heap, [&job](MemoryChunk* chunk) { job.AddPage(chunk, 0); });
|
||||
int num_pages = job.NumberOfPages();
|
||||
int num_tasks = NumberOfPointerUpdateTasks(num_pages);
|
||||
@ -3983,11 +3986,13 @@ int MarkCompactCollector::Sweeper::ParallelSweepPage(Page* page,
|
||||
DCHECK(page->SweepingDone());
|
||||
|
||||
// After finishing sweeping of a page we clean up its remembered set.
|
||||
if (page->typed_old_to_new_slots()) {
|
||||
page->typed_old_to_new_slots()->FreeToBeFreedChunks();
|
||||
TypedSlotSet* typed_slot_set = page->typed_slot_set<OLD_TO_NEW>();
|
||||
if (typed_slot_set) {
|
||||
typed_slot_set->FreeToBeFreedChunks();
|
||||
}
|
||||
if (page->old_to_new_slots()) {
|
||||
page->old_to_new_slots()->FreeToBeFreedBuckets();
|
||||
SlotSet* slot_set = page->slot_set<OLD_TO_NEW>();
|
||||
if (slot_set) {
|
||||
slot_set->FreeToBeFreedBuckets();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -13,19 +13,17 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
enum PointerDirection { OLD_TO_OLD, OLD_TO_NEW };
|
||||
|
||||
// TODO(ulan): Investigate performance of de-templatizing this class.
|
||||
template <PointerDirection direction>
|
||||
template <RememberedSetType type>
|
||||
class RememberedSet : public AllStatic {
|
||||
public:
|
||||
// Given a page and a slot in that page, this function adds the slot to the
|
||||
// remembered set.
|
||||
static void Insert(MemoryChunk* chunk, Address slot_addr) {
|
||||
DCHECK(chunk->Contains(slot_addr));
|
||||
SlotSet* slot_set = GetSlotSet(chunk);
|
||||
SlotSet* slot_set = chunk->slot_set<type>();
|
||||
if (slot_set == nullptr) {
|
||||
slot_set = AllocateSlotSet(chunk);
|
||||
slot_set = chunk->AllocateSlotSet<type>();
|
||||
}
|
||||
uintptr_t offset = slot_addr - chunk->address();
|
||||
slot_set[offset / Page::kPageSize].Insert(offset % Page::kPageSize);
|
||||
@ -35,7 +33,7 @@ class RememberedSet : public AllStatic {
|
||||
// the remembered set contains the slot.
|
||||
static bool Contains(MemoryChunk* chunk, Address slot_addr) {
|
||||
DCHECK(chunk->Contains(slot_addr));
|
||||
SlotSet* slot_set = GetSlotSet(chunk);
|
||||
SlotSet* slot_set = chunk->slot_set<type>();
|
||||
if (slot_set == nullptr) {
|
||||
return false;
|
||||
}
|
||||
@ -49,7 +47,7 @@ class RememberedSet : public AllStatic {
|
||||
// If the slot was never added, then the function does nothing.
|
||||
static void Remove(MemoryChunk* chunk, Address slot_addr) {
|
||||
DCHECK(chunk->Contains(slot_addr));
|
||||
SlotSet* slot_set = GetSlotSet(chunk);
|
||||
SlotSet* slot_set = chunk->slot_set<type>();
|
||||
if (slot_set != nullptr) {
|
||||
uintptr_t offset = slot_addr - chunk->address();
|
||||
slot_set[offset / Page::kPageSize].Remove(offset % Page::kPageSize);
|
||||
@ -60,7 +58,7 @@ class RememberedSet : public AllStatic {
|
||||
// slots from the remembered set.
|
||||
static void RemoveRange(MemoryChunk* chunk, Address start, Address end,
|
||||
SlotSet::EmptyBucketMode mode) {
|
||||
SlotSet* slot_set = GetSlotSet(chunk);
|
||||
SlotSet* slot_set = chunk->slot_set<type>();
|
||||
if (slot_set != nullptr) {
|
||||
uintptr_t start_offset = start - chunk->address();
|
||||
uintptr_t end_offset = end - chunk->address();
|
||||
@ -112,8 +110,8 @@ class RememberedSet : public AllStatic {
|
||||
MemoryChunkIterator it(heap);
|
||||
MemoryChunk* chunk;
|
||||
while ((chunk = it.next()) != nullptr) {
|
||||
SlotSet* slots = GetSlotSet(chunk);
|
||||
TypedSlotSet* typed_slots = GetTypedSlotSet(chunk);
|
||||
SlotSet* slots = chunk->slot_set<type>();
|
||||
TypedSlotSet* typed_slots = chunk->typed_slot_set<type>();
|
||||
if (slots != nullptr || typed_slots != nullptr) {
|
||||
callback(chunk);
|
||||
}
|
||||
@ -125,7 +123,7 @@ class RememberedSet : public AllStatic {
|
||||
// SlotCallbackResult.
|
||||
template <typename Callback>
|
||||
static void Iterate(MemoryChunk* chunk, Callback callback) {
|
||||
SlotSet* slots = GetSlotSet(chunk);
|
||||
SlotSet* slots = chunk->slot_set<type>();
|
||||
if (slots != nullptr) {
|
||||
size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize;
|
||||
int new_count = 0;
|
||||
@ -135,8 +133,8 @@ class RememberedSet : public AllStatic {
|
||||
}
|
||||
// Only old-to-old slot sets are released eagerly. Old-new-slot sets are
|
||||
// released by the sweeper threads.
|
||||
if (direction == OLD_TO_OLD && new_count == 0) {
|
||||
chunk->ReleaseOldToOldSlots();
|
||||
if (type == OLD_TO_OLD && new_count == 0) {
|
||||
chunk->ReleaseSlotSet<OLD_TO_OLD>();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -145,10 +143,9 @@ class RememberedSet : public AllStatic {
|
||||
// to the remembered set.
|
||||
static void InsertTyped(Page* page, Address host_addr, SlotType slot_type,
|
||||
Address slot_addr) {
|
||||
TypedSlotSet* slot_set = GetTypedSlotSet(page);
|
||||
TypedSlotSet* slot_set = page->typed_slot_set<type>();
|
||||
if (slot_set == nullptr) {
|
||||
AllocateTypedSlotSet(page);
|
||||
slot_set = GetTypedSlotSet(page);
|
||||
slot_set = page->AllocateTypedSlotSet<type>();
|
||||
}
|
||||
if (host_addr == nullptr) {
|
||||
host_addr = page->address();
|
||||
@ -164,7 +161,7 @@ class RememberedSet : public AllStatic {
|
||||
// Given a page and a range of typed slots in that page, this function removes
|
||||
// the slots from the remembered set.
|
||||
static void RemoveRangeTyped(MemoryChunk* page, Address start, Address end) {
|
||||
TypedSlotSet* slots = GetTypedSlotSet(page);
|
||||
TypedSlotSet* slots = page->typed_slot_set<type>();
|
||||
if (slots != nullptr) {
|
||||
slots->Iterate(
|
||||
[start, end](SlotType slot_type, Address host_addr,
|
||||
@ -191,23 +188,23 @@ class RememberedSet : public AllStatic {
|
||||
// Address slot_addr) and return SlotCallbackResult.
|
||||
template <typename Callback>
|
||||
static void IterateTyped(MemoryChunk* chunk, Callback callback) {
|
||||
TypedSlotSet* slots = GetTypedSlotSet(chunk);
|
||||
TypedSlotSet* slots = chunk->typed_slot_set<type>();
|
||||
if (slots != nullptr) {
|
||||
int new_count = slots->Iterate(callback, TypedSlotSet::KEEP_EMPTY_CHUNKS);
|
||||
if (new_count == 0) {
|
||||
ReleaseTypedSlotSet(chunk);
|
||||
chunk->ReleaseTypedSlotSet<type>();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clear all old to old slots from the remembered set.
|
||||
static void ClearAll(Heap* heap) {
|
||||
STATIC_ASSERT(direction == OLD_TO_OLD);
|
||||
STATIC_ASSERT(type == OLD_TO_OLD);
|
||||
MemoryChunkIterator it(heap);
|
||||
MemoryChunk* chunk;
|
||||
while ((chunk = it.next()) != nullptr) {
|
||||
chunk->ReleaseOldToOldSlots();
|
||||
chunk->ReleaseTypedOldToOldSlots();
|
||||
chunk->ReleaseSlotSet<OLD_TO_OLD>();
|
||||
chunk->ReleaseTypedSlotSet<OLD_TO_OLD>();
|
||||
}
|
||||
}
|
||||
|
||||
@ -218,48 +215,6 @@ class RememberedSet : public AllStatic {
|
||||
static void ClearInvalidTypedSlots(Heap* heap, MemoryChunk* chunk);
|
||||
|
||||
private:
|
||||
static SlotSet* GetSlotSet(MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_OLD) {
|
||||
return chunk->old_to_old_slots();
|
||||
} else {
|
||||
return chunk->old_to_new_slots();
|
||||
}
|
||||
}
|
||||
|
||||
static TypedSlotSet* GetTypedSlotSet(MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_OLD) {
|
||||
return chunk->typed_old_to_old_slots();
|
||||
} else {
|
||||
return chunk->typed_old_to_new_slots();
|
||||
}
|
||||
}
|
||||
|
||||
static void ReleaseTypedSlotSet(MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_OLD) {
|
||||
chunk->ReleaseTypedOldToOldSlots();
|
||||
}
|
||||
}
|
||||
|
||||
static SlotSet* AllocateSlotSet(MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_OLD) {
|
||||
chunk->AllocateOldToOldSlots();
|
||||
return chunk->old_to_old_slots();
|
||||
} else {
|
||||
chunk->AllocateOldToNewSlots();
|
||||
return chunk->old_to_new_slots();
|
||||
}
|
||||
}
|
||||
|
||||
static TypedSlotSet* AllocateTypedSlotSet(MemoryChunk* chunk) {
|
||||
if (direction == OLD_TO_OLD) {
|
||||
chunk->AllocateTypedOldToOldSlots();
|
||||
return chunk->typed_old_to_old_slots();
|
||||
} else {
|
||||
chunk->AllocateTypedOldToNewSlots();
|
||||
return chunk->typed_old_to_new_slots();
|
||||
}
|
||||
}
|
||||
|
||||
static bool IsValidSlot(Heap* heap, MemoryChunk* chunk, Object** slot);
|
||||
};
|
||||
|
||||
|
@ -282,16 +282,16 @@ void Page::MarkNeverAllocateForTesting() {
|
||||
|
||||
void Page::MarkEvacuationCandidate() {
|
||||
DCHECK(!IsFlagSet(NEVER_EVACUATE));
|
||||
DCHECK_NULL(old_to_old_slots_);
|
||||
DCHECK_NULL(typed_old_to_old_slots_);
|
||||
DCHECK_NULL(slot_set<OLD_TO_OLD>());
|
||||
DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
|
||||
SetFlag(EVACUATION_CANDIDATE);
|
||||
reinterpret_cast<PagedSpace*>(owner())->free_list()->EvictFreeListItems(this);
|
||||
}
|
||||
|
||||
void Page::ClearEvacuationCandidate() {
|
||||
if (!IsFlagSet(COMPACTION_WAS_ABORTED)) {
|
||||
DCHECK_NULL(old_to_old_slots_);
|
||||
DCHECK_NULL(typed_old_to_old_slots_);
|
||||
DCHECK_NULL(slot_set<OLD_TO_OLD>());
|
||||
DCHECK_NULL(typed_slot_set<OLD_TO_OLD>());
|
||||
}
|
||||
ClearFlag(EVACUATION_CANDIDATE);
|
||||
InitializeFreeListCategories();
|
||||
|
@ -527,10 +527,10 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
|
||||
chunk->flags_ = Flags(NO_FLAGS);
|
||||
chunk->set_owner(owner);
|
||||
chunk->InitializeReservedMemory();
|
||||
chunk->old_to_new_slots_.SetValue(nullptr);
|
||||
chunk->old_to_old_slots_ = nullptr;
|
||||
chunk->typed_old_to_new_slots_.SetValue(nullptr);
|
||||
chunk->typed_old_to_old_slots_ = nullptr;
|
||||
chunk->slot_set_[OLD_TO_NEW].SetValue(nullptr);
|
||||
chunk->slot_set_[OLD_TO_OLD].SetValue(nullptr);
|
||||
chunk->typed_slot_set_[OLD_TO_NEW].SetValue(nullptr);
|
||||
chunk->typed_slot_set_[OLD_TO_OLD].SetValue(nullptr);
|
||||
chunk->skip_list_ = nullptr;
|
||||
chunk->progress_bar_ = 0;
|
||||
chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base));
|
||||
@ -1116,15 +1116,15 @@ void MemoryChunk::ReleaseAllocatedMemory() {
|
||||
delete mutex_;
|
||||
mutex_ = nullptr;
|
||||
}
|
||||
if (old_to_new_slots_.Value() != nullptr) ReleaseOldToNewSlots();
|
||||
if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots();
|
||||
if (typed_old_to_new_slots_.Value() != nullptr) ReleaseTypedOldToNewSlots();
|
||||
if (typed_old_to_old_slots_ != nullptr) ReleaseTypedOldToOldSlots();
|
||||
ReleaseSlotSet<OLD_TO_NEW>();
|
||||
ReleaseSlotSet<OLD_TO_OLD>();
|
||||
ReleaseTypedSlotSet<OLD_TO_NEW>();
|
||||
ReleaseTypedSlotSet<OLD_TO_OLD>();
|
||||
if (local_tracker_ != nullptr) ReleaseLocalTracker();
|
||||
if (young_generation_bitmap_ != nullptr) ReleaseYoungGenerationBitmap();
|
||||
}
|
||||
|
||||
static SlotSet* AllocateSlotSet(size_t size, Address page_start) {
|
||||
static SlotSet* AllocateAndInitializeSlotSet(size_t size, Address page_start) {
|
||||
size_t pages = (size + Page::kPageSize - 1) / Page::kPageSize;
|
||||
DCHECK(pages > 0);
|
||||
SlotSet* slot_set = new SlotSet[pages];
|
||||
@ -1134,46 +1134,58 @@ static SlotSet* AllocateSlotSet(size_t size, Address page_start) {
|
||||
return slot_set;
|
||||
}
|
||||
|
||||
void MemoryChunk::AllocateOldToNewSlots() {
|
||||
DCHECK(nullptr == old_to_new_slots_.Value());
|
||||
old_to_new_slots_.SetValue(AllocateSlotSet(size_, address()));
|
||||
template SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_NEW>();
|
||||
template SlotSet* MemoryChunk::AllocateSlotSet<OLD_TO_OLD>();
|
||||
|
||||
template <RememberedSetType type>
|
||||
SlotSet* MemoryChunk::AllocateSlotSet() {
|
||||
SlotSet* slot_set = AllocateAndInitializeSlotSet(size_, address());
|
||||
if (!slot_set_[type].TrySetValue(nullptr, slot_set)) {
|
||||
delete[] slot_set;
|
||||
slot_set = slot_set_[type].Value();
|
||||
DCHECK(slot_set);
|
||||
return slot_set;
|
||||
}
|
||||
return slot_set;
|
||||
}
|
||||
|
||||
void MemoryChunk::ReleaseOldToNewSlots() {
|
||||
SlotSet* old_to_new_slots = old_to_new_slots_.Value();
|
||||
delete[] old_to_new_slots;
|
||||
old_to_new_slots_.SetValue(nullptr);
|
||||
template void MemoryChunk::ReleaseSlotSet<OLD_TO_NEW>();
|
||||
template void MemoryChunk::ReleaseSlotSet<OLD_TO_OLD>();
|
||||
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::ReleaseSlotSet() {
|
||||
SlotSet* slot_set = slot_set_[type].Value();
|
||||
if (slot_set) {
|
||||
delete[] slot_set;
|
||||
slot_set_[type].SetValue(nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
void MemoryChunk::AllocateOldToOldSlots() {
|
||||
DCHECK(nullptr == old_to_old_slots_);
|
||||
old_to_old_slots_ = AllocateSlotSet(size_, address());
|
||||
template TypedSlotSet* MemoryChunk::AllocateTypedSlotSet<OLD_TO_NEW>();
|
||||
template TypedSlotSet* MemoryChunk::AllocateTypedSlotSet<OLD_TO_OLD>();
|
||||
|
||||
template <RememberedSetType type>
|
||||
TypedSlotSet* MemoryChunk::AllocateTypedSlotSet() {
|
||||
TypedSlotSet* slot_set = new TypedSlotSet(address());
|
||||
if (!typed_slot_set_[type].TrySetValue(nullptr, slot_set)) {
|
||||
delete slot_set;
|
||||
slot_set = typed_slot_set_[type].Value();
|
||||
DCHECK(slot_set);
|
||||
return slot_set;
|
||||
}
|
||||
return slot_set;
|
||||
}
|
||||
|
||||
void MemoryChunk::ReleaseOldToOldSlots() {
|
||||
delete[] old_to_old_slots_;
|
||||
old_to_old_slots_ = nullptr;
|
||||
}
|
||||
template void MemoryChunk::ReleaseTypedSlotSet<OLD_TO_NEW>();
|
||||
template void MemoryChunk::ReleaseTypedSlotSet<OLD_TO_OLD>();
|
||||
|
||||
void MemoryChunk::AllocateTypedOldToNewSlots() {
|
||||
DCHECK(nullptr == typed_old_to_new_slots_.Value());
|
||||
typed_old_to_new_slots_.SetValue(new TypedSlotSet(address()));
|
||||
}
|
||||
|
||||
void MemoryChunk::ReleaseTypedOldToNewSlots() {
|
||||
TypedSlotSet* typed_old_to_new_slots = typed_old_to_new_slots_.Value();
|
||||
delete typed_old_to_new_slots;
|
||||
typed_old_to_new_slots_.SetValue(nullptr);
|
||||
}
|
||||
|
||||
void MemoryChunk::AllocateTypedOldToOldSlots() {
|
||||
DCHECK(nullptr == typed_old_to_old_slots_);
|
||||
typed_old_to_old_slots_ = new TypedSlotSet(address());
|
||||
}
|
||||
|
||||
void MemoryChunk::ReleaseTypedOldToOldSlots() {
|
||||
delete typed_old_to_old_slots_;
|
||||
typed_old_to_old_slots_ = nullptr;
|
||||
template <RememberedSetType type>
|
||||
void MemoryChunk::ReleaseTypedSlotSet() {
|
||||
TypedSlotSet* typed_slot_set = typed_slot_set_[type].Value();
|
||||
if (typed_slot_set) {
|
||||
delete typed_slot_set;
|
||||
typed_slot_set_[type].SetValue(nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
void MemoryChunk::AllocateLocalTracker() {
|
||||
|
@ -131,6 +131,12 @@ enum FreeListCategoryType {
|
||||
|
||||
enum FreeMode { kLinkCategory, kDoNotLinkCategory };
|
||||
|
||||
enum RememberedSetType {
|
||||
OLD_TO_NEW,
|
||||
OLD_TO_OLD,
|
||||
NUMBER_OF_REMEMBERED_SET_TYPES = OLD_TO_OLD + 1
|
||||
};
|
||||
|
||||
// A free list category maintains a linked list of free memory blocks.
|
||||
class FreeListCategory {
|
||||
public:
|
||||
@ -334,17 +340,15 @@ class MemoryChunk {
|
||||
+ kPointerSize // Heap* heap_
|
||||
+ kIntptrSize // intptr_t progress_bar_
|
||||
+ kIntptrSize // intptr_t live_byte_count_
|
||||
+ kPointerSize // SlotSet* old_to_new_slots_
|
||||
+ kPointerSize // SlotSet* old_to_old_slots_
|
||||
+ kPointerSize // TypedSlotSet* typed_old_to_new_slots_
|
||||
+ kPointerSize // TypedSlotSet* typed_old_to_old_slots_
|
||||
+ kPointerSize // SkipList* skip_list_
|
||||
+ kPointerSize // AtomicValue high_water_mark_
|
||||
+ kPointerSize // base::Mutex* mutex_
|
||||
+ kPointerSize // base::AtomicWord concurrent_sweeping_
|
||||
+ 2 * kSizetSize // AtomicNumber free-list statistics
|
||||
+ kPointerSize // AtomicValue next_chunk_
|
||||
+ kPointerSize // AtomicValue prev_chunk_
|
||||
+ kPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
|
||||
+ kPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
|
||||
+ kPointerSize // SkipList* skip_list_
|
||||
+ kPointerSize // AtomicValue high_water_mark_
|
||||
+ kPointerSize // base::Mutex* mutex_
|
||||
+ kPointerSize // base::AtomicWord concurrent_sweeping_
|
||||
+ 2 * kSizetSize // AtomicNumber free-list statistics
|
||||
+ kPointerSize // AtomicValue next_chunk_
|
||||
+ kPointerSize // AtomicValue prev_chunk_
|
||||
+ FreeListCategory::kSize * kNumberOfCategories
|
||||
// FreeListCategory categories_[kNumberOfCategories]
|
||||
+ kPointerSize // LocalArrayBufferTracker* local_tracker_
|
||||
@ -429,24 +433,26 @@ class MemoryChunk {
|
||||
|
||||
inline void set_skip_list(SkipList* skip_list) { skip_list_ = skip_list; }
|
||||
|
||||
inline SlotSet* old_to_new_slots() { return old_to_new_slots_.Value(); }
|
||||
inline SlotSet* old_to_old_slots() { return old_to_old_slots_; }
|
||||
inline TypedSlotSet* typed_old_to_new_slots() {
|
||||
return typed_old_to_new_slots_.Value();
|
||||
template <RememberedSetType type>
|
||||
SlotSet* slot_set() {
|
||||
return slot_set_[type].Value();
|
||||
}
|
||||
inline TypedSlotSet* typed_old_to_old_slots() {
|
||||
return typed_old_to_old_slots_;
|
||||
|
||||
template <RememberedSetType type>
|
||||
TypedSlotSet* typed_slot_set() {
|
||||
return typed_slot_set_[type].Value();
|
||||
}
|
||||
|
||||
inline LocalArrayBufferTracker* local_tracker() { return local_tracker_; }
|
||||
|
||||
V8_EXPORT_PRIVATE void AllocateOldToNewSlots();
|
||||
void ReleaseOldToNewSlots();
|
||||
V8_EXPORT_PRIVATE void AllocateOldToOldSlots();
|
||||
void ReleaseOldToOldSlots();
|
||||
void AllocateTypedOldToNewSlots();
|
||||
void ReleaseTypedOldToNewSlots();
|
||||
void AllocateTypedOldToOldSlots();
|
||||
void ReleaseTypedOldToOldSlots();
|
||||
template <RememberedSetType type>
|
||||
SlotSet* AllocateSlotSet();
|
||||
template <RememberedSetType type>
|
||||
void ReleaseSlotSet();
|
||||
template <RememberedSetType type>
|
||||
TypedSlotSet* AllocateTypedSlotSet();
|
||||
template <RememberedSetType type>
|
||||
void ReleaseTypedSlotSet();
|
||||
void AllocateLocalTracker();
|
||||
void ReleaseLocalTracker();
|
||||
void AllocateYoungGenerationBitmap();
|
||||
@ -597,10 +603,9 @@ class MemoryChunk {
|
||||
// A single slot set for small pages (of size kPageSize) or an array of slot
|
||||
// set for large pages. In the latter case the number of entries in the array
|
||||
// is ceil(size() / kPageSize).
|
||||
base::AtomicValue<SlotSet*> old_to_new_slots_;
|
||||
SlotSet* old_to_old_slots_;
|
||||
base::AtomicValue<TypedSlotSet*> typed_old_to_new_slots_;
|
||||
TypedSlotSet* typed_old_to_old_slots_;
|
||||
base::AtomicValue<SlotSet*> slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
|
||||
base::AtomicValue<TypedSlotSet*>
|
||||
typed_slot_set_[NUMBER_OF_REMEMBERED_SET_TYPES];
|
||||
|
||||
SkipList* skip_list_;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user