[heap] Reland: Take page lock when scavenging old to new references in Scavenger.
BUG=v8:5807 Review-Url: https://codereview.chromium.org/2826593004 Cr-Commit-Position: refs/heads/master@{#44697}
This commit is contained in:
parent
2e4b86b0de
commit
26bc590629
@ -17,7 +17,7 @@ void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
|
||||
size_t length = NumberToSize(buffer->byte_length());
|
||||
Page* page = Page::FromAddress(buffer->address());
|
||||
{
|
||||
base::LockGuard<base::Mutex> guard(page->mutex());
|
||||
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
|
||||
LocalArrayBufferTracker* tracker = page->local_tracker();
|
||||
if (tracker == nullptr) {
|
||||
page->AllocateLocalTracker();
|
||||
@ -39,7 +39,7 @@ void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
|
||||
Page* page = Page::FromAddress(buffer->address());
|
||||
size_t length = 0;
|
||||
{
|
||||
base::LockGuard<base::Mutex> guard(page->mutex());
|
||||
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
|
||||
LocalArrayBufferTracker* tracker = page->local_tracker();
|
||||
DCHECK_NOT_NULL(tracker);
|
||||
length = tracker->Remove(buffer);
|
||||
|
@ -130,7 +130,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
|
||||
bool ArrayBufferTracker::IsTracked(JSArrayBuffer* buffer) {
|
||||
Page* page = Page::FromAddress(buffer->address());
|
||||
{
|
||||
base::LockGuard<base::Mutex> guard(page->mutex());
|
||||
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
|
||||
LocalArrayBufferTracker* tracker = page->local_tracker();
|
||||
if (tracker == nullptr) return false;
|
||||
return tracker->IsTracked(buffer);
|
||||
|
@ -1729,12 +1729,14 @@ void Heap::Scavenge() {
|
||||
{
|
||||
// Copy objects reachable from the old generation.
|
||||
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(this, [this](Address addr) {
|
||||
return Scavenger::CheckAndScavengeObject(this, addr);
|
||||
});
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(
|
||||
this, SYNCHRONIZED, [this](Address addr) {
|
||||
return Scavenger::CheckAndScavengeObject(this, addr);
|
||||
});
|
||||
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
this, [this](SlotType type, Address host_addr, Address addr) {
|
||||
this, SYNCHRONIZED,
|
||||
[this](SlotType type, Address host_addr, Address addr) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
isolate(), type, addr, [this](Object** addr) {
|
||||
// We expect that objects referenced by code are long living.
|
||||
|
@ -2384,11 +2384,12 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
|
||||
{
|
||||
TRACE_GC(heap()->tracer(),
|
||||
GCTracer::Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS);
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(heap(), [this](Address addr) {
|
||||
return CheckAndMarkObject(heap(), addr);
|
||||
});
|
||||
RememberedSet<OLD_TO_NEW>::Iterate(
|
||||
heap(), NON_SYNCHRONIZED,
|
||||
[this](Address addr) { return CheckAndMarkObject(heap(), addr); });
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
heap(), [this](SlotType type, Address host_addr, Address addr) {
|
||||
heap(), NON_SYNCHRONIZED,
|
||||
[this](SlotType type, Address host_addr, Address addr) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
isolate(), type, addr, [this](Object** addr) {
|
||||
return CheckAndMarkObject(heap(),
|
||||
@ -3986,7 +3987,7 @@ int MarkCompactCollector::Sweeper::ParallelSweepPage(Page* page,
|
||||
AllocationSpace identity) {
|
||||
int max_freed = 0;
|
||||
{
|
||||
base::LockGuard<base::Mutex> guard(page->mutex());
|
||||
base::LockGuard<base::RecursiveMutex> guard(page->mutex());
|
||||
// If this page was already swept in the meantime, we can return here.
|
||||
if (page->SweepingDone()) return 0;
|
||||
DCHECK_EQ(Page::kSweepingPending,
|
||||
|
@ -13,6 +13,8 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
enum RememberedSetIterationMode { SYNCHRONIZED, NON_SYNCHRONIZED };
|
||||
|
||||
// TODO(ulan): Investigate performance of de-templatizing this class.
|
||||
template <RememberedSetType type>
|
||||
class RememberedSet : public AllStatic {
|
||||
@ -98,9 +100,13 @@ class RememberedSet : public AllStatic {
|
||||
// Iterates and filters the remembered set with the given callback.
|
||||
// The callback should take (Address slot) and return SlotCallbackResult.
|
||||
template <typename Callback>
|
||||
static void Iterate(Heap* heap, Callback callback) {
|
||||
IterateMemoryChunks(
|
||||
heap, [callback](MemoryChunk* chunk) { Iterate(chunk, callback); });
|
||||
static void Iterate(Heap* heap, RememberedSetIterationMode mode,
|
||||
Callback callback) {
|
||||
IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) {
|
||||
if (mode == SYNCHRONIZED) chunk->mutex()->Lock();
|
||||
Iterate(chunk, callback);
|
||||
if (mode == SYNCHRONIZED) chunk->mutex()->Unlock();
|
||||
});
|
||||
}
|
||||
|
||||
// Iterates over all memory chunks that contains non-empty slot sets.
|
||||
@ -177,9 +183,12 @@ class RememberedSet : public AllStatic {
|
||||
// The callback should take (SlotType slot_type, SlotAddress slot) and return
|
||||
// SlotCallbackResult.
|
||||
template <typename Callback>
|
||||
static void IterateTyped(Heap* heap, Callback callback) {
|
||||
IterateMemoryChunks(heap, [callback](MemoryChunk* chunk) {
|
||||
static void IterateTyped(Heap* heap, RememberedSetIterationMode mode,
|
||||
Callback callback) {
|
||||
IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) {
|
||||
if (mode == SYNCHRONIZED) chunk->mutex()->Lock();
|
||||
IterateTyped(chunk, callback);
|
||||
if (mode == SYNCHRONIZED) chunk->mutex()->Unlock();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -535,7 +535,7 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
|
||||
chunk->progress_bar_ = 0;
|
||||
chunk->high_water_mark_.SetValue(static_cast<intptr_t>(area_start - base));
|
||||
chunk->concurrent_sweeping_state().SetValue(kSweepingDone);
|
||||
chunk->mutex_ = new base::Mutex();
|
||||
chunk->mutex_ = new base::RecursiveMutex();
|
||||
chunk->available_in_free_list_ = 0;
|
||||
chunk->wasted_memory_ = 0;
|
||||
chunk->young_generation_bitmap_ = nullptr;
|
||||
|
@ -344,7 +344,7 @@ class MemoryChunk {
|
||||
+ kPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
|
||||
+ kPointerSize // SkipList* skip_list_
|
||||
+ kPointerSize // AtomicValue high_water_mark_
|
||||
+ kPointerSize // base::Mutex* mutex_
|
||||
+ kPointerSize // base::RecursiveMutex* mutex_
|
||||
+ kPointerSize // base::AtomicWord concurrent_sweeping_
|
||||
+ 2 * kSizetSize // AtomicNumber free-list statistics
|
||||
+ kPointerSize // AtomicValue next_chunk_
|
||||
@ -404,7 +404,7 @@ class MemoryChunk {
|
||||
return reinterpret_cast<Address>(const_cast<MemoryChunk*>(this));
|
||||
}
|
||||
|
||||
base::Mutex* mutex() { return mutex_; }
|
||||
base::RecursiveMutex* mutex() { return mutex_; }
|
||||
|
||||
bool Contains(Address addr) {
|
||||
return addr >= area_start() && addr < area_end();
|
||||
@ -613,7 +613,7 @@ class MemoryChunk {
|
||||
// count highest number of bytes ever allocated on the page.
|
||||
base::AtomicValue<intptr_t> high_water_mark_;
|
||||
|
||||
base::Mutex* mutex_;
|
||||
base::RecursiveMutex* mutex_;
|
||||
|
||||
base::AtomicValue<ConcurrentSweepingState> concurrent_sweeping_;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user