[heap] Make creation of NewSpace and NewLargeObjectSpace optional
Both NewSpace and NewLargeObjectSpace aren't used with FLAG_single_generation enabled. So far both spaces still existed but weren't used in this mode. This CL makes both spaces optional, which ensure that we do not inadvertently create objects in them or use them in any other way. Bug: v8:11644 Change-Id: I52a449c62e9d3df126c95419433d2abbd75539a5 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2862768 Commit-Queue: Dominik Inführ <dinfuehr@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#74345}
This commit is contained in:
parent
3fa681db7a
commit
9b78e758af
@ -8560,10 +8560,11 @@ bool Isolate::GetHeapSpaceStatistics(HeapSpaceStatistics* space_statistics,
|
||||
}
|
||||
} else {
|
||||
i::Space* space = heap->space(static_cast<int>(index));
|
||||
space_statistics->space_size_ = space->CommittedMemory();
|
||||
space_statistics->space_used_size_ = space->SizeOfObjects();
|
||||
space_statistics->space_available_size_ = space->Available();
|
||||
space_statistics->physical_space_size_ = space->CommittedPhysicalMemory();
|
||||
space_statistics->space_size_ = space ? space->CommittedMemory() : 0;
|
||||
space_statistics->space_used_size_ = space ? space->SizeOfObjects() : 0;
|
||||
space_statistics->space_available_size_ = space ? space->Available() : 0;
|
||||
space_statistics->physical_space_size_ =
|
||||
space ? space->CommittedPhysicalMemory() : 0;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -416,13 +416,13 @@ DEFINE_BOOL_READONLY(enable_unconditional_write_barriers,
|
||||
"always use full write barriers")
|
||||
|
||||
#ifdef V8_ENABLE_SINGLE_GENERATION
|
||||
#define V8_GENERATION_BOOL true
|
||||
#define V8_SINGLE_GENERATION_BOOL true
|
||||
#else
|
||||
#define V8_GENERATION_BOOL false
|
||||
#define V8_SINGLE_GENERATION_BOOL false
|
||||
#endif
|
||||
|
||||
DEFINE_BOOL_READONLY(
|
||||
single_generation, V8_GENERATION_BOOL,
|
||||
single_generation, V8_SINGLE_GENERATION_BOOL,
|
||||
"allocate all objects from young generation to old generation")
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
|
@ -460,11 +460,23 @@ void ConcurrentMarking::Run(JobDelegate* delegate,
|
||||
break;
|
||||
}
|
||||
objects_processed++;
|
||||
// The order of the two loads is important.
|
||||
Address new_space_top = heap_->new_space()->original_top_acquire();
|
||||
Address new_space_limit = heap_->new_space()->original_limit_relaxed();
|
||||
Address new_large_object = heap_->new_lo_space()->pending_object();
|
||||
|
||||
Address new_space_top = kNullAddress;
|
||||
Address new_space_limit = kNullAddress;
|
||||
Address new_large_object = kNullAddress;
|
||||
|
||||
if (heap_->new_space()) {
|
||||
// The order of the two loads is important.
|
||||
new_space_top = heap_->new_space()->original_top_acquire();
|
||||
new_space_limit = heap_->new_space()->original_limit_relaxed();
|
||||
}
|
||||
|
||||
if (heap_->new_lo_space()) {
|
||||
new_large_object = heap_->new_lo_space()->pending_object();
|
||||
}
|
||||
|
||||
Address addr = object.address();
|
||||
|
||||
if ((new_space_top <= addr && addr < new_space_limit) ||
|
||||
addr == new_large_object) {
|
||||
local_marking_worklists.PushOnHold(object);
|
||||
|
@ -308,8 +308,10 @@ void GCTracer::StartInSafepoint() {
|
||||
current_.start_object_size = heap_->SizeOfObjects();
|
||||
current_.start_memory_size = heap_->memory_allocator()->Size();
|
||||
current_.start_holes_size = CountTotalHolesSize(heap_);
|
||||
current_.young_object_size =
|
||||
heap_->new_space()->Size() + heap_->new_lo_space()->SizeOfObjects();
|
||||
size_t new_space_size = (heap_->new_space() ? heap_->new_space()->Size() : 0);
|
||||
size_t new_lo_space_size =
|
||||
(heap_->new_lo_space() ? heap_->new_lo_space()->SizeOfObjects() : 0);
|
||||
current_.young_object_size = new_space_size + new_lo_space_size;
|
||||
}
|
||||
|
||||
void GCTracer::ResetIncrementalMarkingCounters() {
|
||||
|
@ -158,14 +158,6 @@ Address* Heap::OldSpaceAllocationLimitAddress() {
|
||||
return old_space_->allocation_limit_address();
|
||||
}
|
||||
|
||||
void Heap::UpdateNewSpaceAllocationCounter() {
|
||||
new_space_allocation_counter_ = NewSpaceAllocationCounter();
|
||||
}
|
||||
|
||||
size_t Heap::NewSpaceAllocationCounter() {
|
||||
return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
|
||||
}
|
||||
|
||||
inline const base::AddressRegion& Heap::code_region() {
|
||||
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
|
||||
return tp_heap_->GetCodeRange();
|
||||
@ -186,7 +178,8 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
|
||||
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT
|
||||
if (FLAG_random_gc_interval > 0 || FLAG_gc_interval >= 0) {
|
||||
if (!always_allocate() && Heap::allocation_timeout_-- <= 0) {
|
||||
return AllocationResult::Retry();
|
||||
AllocationSpace space = FLAG_single_generation ? OLD_SPACE : NEW_SPACE;
|
||||
return AllocationResult::Retry(space);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
@ -292,10 +285,9 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation,
|
||||
DCHECK(AllowHeapAllocation::IsAllowed());
|
||||
DCHECK_EQ(gc_state(), NOT_IN_GC);
|
||||
Heap* heap = isolate()->heap();
|
||||
if (!V8_ENABLE_THIRD_PARTY_HEAP_BOOL &&
|
||||
allocation == AllocationType::kYoung &&
|
||||
if (allocation == AllocationType::kYoung &&
|
||||
alignment == AllocationAlignment::kWordAligned &&
|
||||
size <= MaxRegularHeapObjectSize(allocation)) {
|
||||
size <= MaxRegularHeapObjectSize(allocation) && !FLAG_single_generation) {
|
||||
Address* top = heap->NewSpaceAllocationTopAddress();
|
||||
Address* limit = heap->NewSpaceAllocationLimitAddress();
|
||||
if ((*limit - *top >= static_cast<unsigned>(size)) &&
|
||||
@ -400,7 +392,9 @@ void Heap::FinalizeExternalString(String string) {
|
||||
ext_string.DisposeResource(isolate());
|
||||
}
|
||||
|
||||
Address Heap::NewSpaceTop() { return new_space_->top(); }
|
||||
Address Heap::NewSpaceTop() {
|
||||
return new_space_ ? new_space_->top() : kNullAddress;
|
||||
}
|
||||
|
||||
bool Heap::InYoungGeneration(Object object) {
|
||||
DCHECK(!HasWeakHeapObjectTag(object));
|
||||
@ -589,9 +583,14 @@ void Heap::UpdateAllocationSite(Map map, HeapObject object,
|
||||
bool Heap::IsPendingAllocation(HeapObject object) {
|
||||
// TODO(ulan): Optimize this function to perform 3 loads at most.
|
||||
Address addr = object.address();
|
||||
Address top = new_space_->original_top_acquire();
|
||||
Address limit = new_space_->original_limit_relaxed();
|
||||
if (top <= addr && addr < limit) return true;
|
||||
Address top, limit;
|
||||
|
||||
if (new_space_) {
|
||||
top = new_space_->original_top_acquire();
|
||||
limit = new_space_->original_limit_relaxed();
|
||||
if (top <= addr && addr < limit) return true;
|
||||
}
|
||||
|
||||
PagedSpaceIterator spaces(this);
|
||||
for (PagedSpace* space = spaces.Next(); space != nullptr;
|
||||
space = spaces.Next()) {
|
||||
@ -600,7 +599,7 @@ bool Heap::IsPendingAllocation(HeapObject object) {
|
||||
if (top <= addr && addr < limit) return true;
|
||||
}
|
||||
if (addr == lo_space_->pending_object()) return true;
|
||||
if (addr == new_lo_space_->pending_object()) return true;
|
||||
if (new_lo_space_ && addr == new_lo_space_->pending_object()) return true;
|
||||
if (addr == code_lo_space_->pending_object()) return true;
|
||||
return false;
|
||||
}
|
||||
|
169
src/heap/heap.cc
169
src/heap/heap.cc
@ -324,7 +324,7 @@ size_t Heap::SemiSpaceSizeFromYoungGenerationSize(
|
||||
size_t Heap::Capacity() {
|
||||
if (!HasBeenSetUp()) return 0;
|
||||
|
||||
return new_space_->Capacity() + OldGenerationCapacity();
|
||||
return NewSpaceCapacity() + OldGenerationCapacity();
|
||||
}
|
||||
|
||||
size_t Heap::OldGenerationCapacity() {
|
||||
@ -359,7 +359,10 @@ size_t Heap::CommittedMemoryOfUnmapper() {
|
||||
size_t Heap::CommittedMemory() {
|
||||
if (!HasBeenSetUp()) return 0;
|
||||
|
||||
return new_space_->CommittedMemory() + new_lo_space_->Size() +
|
||||
size_t new_space_committed = new_space_ ? new_space_->CommittedMemory() : 0;
|
||||
size_t new_lo_space_committed = new_lo_space_ ? new_lo_space_->Size() : 0;
|
||||
|
||||
return new_space_committed + new_lo_space_committed +
|
||||
CommittedOldGenerationMemory();
|
||||
}
|
||||
|
||||
@ -422,14 +425,17 @@ bool Heap::CanExpandOldGenerationBackground(LocalHeap* local_heap,
|
||||
}
|
||||
|
||||
bool Heap::CanPromoteYoungAndExpandOldGeneration(size_t size) {
|
||||
size_t new_space_capacity = NewSpaceCapacity();
|
||||
size_t new_lo_space_capacity = new_lo_space_ ? new_lo_space_->Size() : 0;
|
||||
|
||||
// Over-estimate the new space size using capacity to allow some slack.
|
||||
return CanExpandOldGeneration(size + new_space_->Capacity() +
|
||||
new_lo_space_->Size());
|
||||
return CanExpandOldGeneration(size + new_space_capacity +
|
||||
new_lo_space_capacity);
|
||||
}
|
||||
|
||||
bool Heap::HasBeenSetUp() const {
|
||||
// We will always have a new space when the heap is set up.
|
||||
return new_space_ != nullptr;
|
||||
// We will always have an old space when the heap is set up.
|
||||
return old_space_ != nullptr;
|
||||
}
|
||||
|
||||
GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
|
||||
@ -441,7 +447,7 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
|
||||
return MARK_COMPACTOR;
|
||||
}
|
||||
|
||||
if (FLAG_gc_global || ShouldStressCompaction()) {
|
||||
if (FLAG_gc_global || ShouldStressCompaction() || FLAG_single_generation) {
|
||||
*reason = "GC in old space forced by flags";
|
||||
return MARK_COMPACTOR;
|
||||
}
|
||||
@ -486,7 +492,7 @@ void Heap::PrintShortHeapStatistics() {
|
||||
"New space, used: %6zu KB"
|
||||
", available: %6zu KB"
|
||||
", committed: %6zu KB\n",
|
||||
new_space_->Size() / KB, new_space_->Available() / KB,
|
||||
NewSpaceSize() / KB, new_space_->Available() / KB,
|
||||
new_space_->CommittedMemory() / KB);
|
||||
PrintIsolate(isolate_,
|
||||
"New large object space, used: %6zu KB"
|
||||
@ -903,7 +909,7 @@ void Heap::GarbageCollectionPrologue() {
|
||||
if (FLAG_gc_verbose) Print();
|
||||
#endif // DEBUG
|
||||
|
||||
if (new_space_->IsAtMaximumCapacity()) {
|
||||
if (new_space_ && new_space_->IsAtMaximumCapacity()) {
|
||||
maximum_size_scavenges_++;
|
||||
} else {
|
||||
maximum_size_scavenges_ = 0;
|
||||
@ -920,9 +926,20 @@ void Heap::GarbageCollectionPrologueInSafepoint() {
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_PROLOGUE_SAFEPOINT);
|
||||
gc_count_++;
|
||||
|
||||
UpdateNewSpaceAllocationCounter();
|
||||
CheckNewSpaceExpansionCriteria();
|
||||
new_space_->ResetParkedAllocationBuffers();
|
||||
if (new_space_) {
|
||||
UpdateNewSpaceAllocationCounter();
|
||||
CheckNewSpaceExpansionCriteria();
|
||||
new_space_->ResetParkedAllocationBuffers();
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::UpdateNewSpaceAllocationCounter() {
|
||||
new_space_allocation_counter_ = NewSpaceAllocationCounter();
|
||||
}
|
||||
|
||||
size_t Heap::NewSpaceAllocationCounter() {
|
||||
return new_space_allocation_counter_ +
|
||||
(new_space_ ? new_space()->AllocatedSinceLastGC() : 0);
|
||||
}
|
||||
|
||||
size_t Heap::SizeOfObjects() {
|
||||
@ -998,14 +1015,14 @@ void Heap::RemoveAllocationObserversFromAllSpaces(
|
||||
|
||||
void Heap::PublishPendingAllocations() {
|
||||
if (FLAG_enable_third_party_heap) return;
|
||||
new_space_->MarkLabStartInitialized();
|
||||
if (new_space_) new_space_->MarkLabStartInitialized();
|
||||
PagedSpaceIterator spaces(this);
|
||||
for (PagedSpace* space = spaces.Next(); space != nullptr;
|
||||
space = spaces.Next()) {
|
||||
space->MoveOriginalTopForward();
|
||||
}
|
||||
lo_space_->ResetPendingObject();
|
||||
new_lo_space_->ResetPendingObject();
|
||||
if (new_lo_space_) new_lo_space_->ResetPendingObject();
|
||||
code_lo_space_->ResetPendingObject();
|
||||
}
|
||||
|
||||
@ -1103,7 +1120,8 @@ void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite site) {
|
||||
}
|
||||
|
||||
bool Heap::DeoptMaybeTenuredAllocationSites() {
|
||||
return new_space_->IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
|
||||
return new_space_ && new_space_->IsAtMaximumCapacity() &&
|
||||
maximum_size_scavenges_ == 0;
|
||||
}
|
||||
|
||||
void Heap::ProcessPretenuringFeedback() {
|
||||
@ -1247,7 +1265,10 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
|
||||
UPDATE_COUNTERS_FOR_SPACE(space) \
|
||||
UPDATE_FRAGMENTATION_FOR_SPACE(space)
|
||||
|
||||
UPDATE_COUNTERS_FOR_SPACE(new_space)
|
||||
if (new_space()) {
|
||||
UPDATE_COUNTERS_FOR_SPACE(new_space)
|
||||
}
|
||||
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
|
||||
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
|
||||
@ -1277,7 +1298,7 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
|
||||
ZapFromSpace();
|
||||
}
|
||||
|
||||
{
|
||||
if (new_space()) {
|
||||
TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE_REDUCE_NEW_SPACE);
|
||||
ReduceNewSpaceSize();
|
||||
}
|
||||
@ -1611,6 +1632,7 @@ void Heap::EnsureFillerObjectAtTop() {
|
||||
// evacuation of a non-full new space (or if we are on the last page) there
|
||||
// may be uninitialized memory behind top. We fill the remainder of the page
|
||||
// with a filler.
|
||||
if (!new_space_) return;
|
||||
Address to_top = new_space_->top();
|
||||
Page* page = Page::FromAddress(to_top - kTaggedSize);
|
||||
if (page->Contains(to_top)) {
|
||||
@ -1912,7 +1934,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReached(
|
||||
} else if (reached_limit == IncrementalMarkingLimit::kHardLimit) {
|
||||
StartIncrementalMarking(
|
||||
gc_flags,
|
||||
OldGenerationSpaceAvailable() <= new_space_->Capacity()
|
||||
OldGenerationSpaceAvailable() <= NewSpaceCapacity()
|
||||
? GarbageCollectionReason::kAllocationLimit
|
||||
: GarbageCollectionReason::kGlobalAllocationLimit,
|
||||
gc_callback_flags);
|
||||
@ -1928,7 +1950,7 @@ void Heap::StartIncrementalMarkingIfAllocationLimitIsReachedBackground() {
|
||||
|
||||
const size_t old_generation_space_available = OldGenerationSpaceAvailable();
|
||||
|
||||
if (old_generation_space_available < new_space_->Capacity()) {
|
||||
if (old_generation_space_available < NewSpaceCapacity()) {
|
||||
incremental_marking()->incremental_marking_job()->ScheduleTask(this);
|
||||
}
|
||||
}
|
||||
@ -2055,6 +2077,7 @@ static void VerifyStringTable(Isolate* isolate) {
|
||||
#endif // VERIFY_HEAP
|
||||
|
||||
void Heap::EnsureFromSpaceIsCommitted() {
|
||||
if (!new_space_) return;
|
||||
if (new_space_->CommitFromSpaceIfNeeded()) return;
|
||||
|
||||
// Committing memory to from space failed.
|
||||
@ -2151,7 +2174,7 @@ size_t Heap::PerformGarbageCollection(
|
||||
EnsureFromSpaceIsCommitted();
|
||||
|
||||
size_t start_young_generation_size =
|
||||
Heap::new_space()->Size() + new_lo_space()->SizeOfObjects();
|
||||
NewSpaceSize() + (new_lo_space() ? new_lo_space()->SizeOfObjects() : 0);
|
||||
|
||||
switch (collector) {
|
||||
case MARK_COMPACTOR:
|
||||
@ -2281,7 +2304,7 @@ void Heap::RecomputeLimits(GarbageCollector collector) {
|
||||
}
|
||||
|
||||
size_t old_gen_size = OldGenerationSizeOfObjects();
|
||||
size_t new_space_capacity = new_space()->Capacity();
|
||||
size_t new_space_capacity = NewSpaceCapacity();
|
||||
HeapGrowingMode mode = CurrentHeapGrowingMode();
|
||||
|
||||
if (collector == MARK_COMPACTOR) {
|
||||
@ -2386,7 +2409,7 @@ void Heap::MarkCompact() {
|
||||
|
||||
void Heap::MinorMarkCompact() {
|
||||
#ifdef ENABLE_MINOR_MC
|
||||
DCHECK(FLAG_minor_mc);
|
||||
DCHECK(FLAG_minor_mc && !FLAG_single_generation);
|
||||
|
||||
PauseAllocationObserversScope pause_observers(this);
|
||||
SetGCState(MINOR_MARK_COMPACT);
|
||||
@ -2493,6 +2516,8 @@ void Heap::EvacuateYoungGeneration() {
|
||||
}
|
||||
|
||||
void Heap::Scavenge() {
|
||||
DCHECK(!FLAG_single_generation);
|
||||
|
||||
if (fast_promotion_mode_ && CanPromoteYoungAndExpandOldGeneration(0)) {
|
||||
tracer()->NotifyYoungGenerationHandling(
|
||||
YoungGenerationHandling::kFastPromotionDuringScavenge);
|
||||
@ -2541,12 +2566,15 @@ void Heap::Scavenge() {
|
||||
}
|
||||
|
||||
void Heap::ComputeFastPromotionMode() {
|
||||
if (!new_space_) return;
|
||||
|
||||
const size_t survived_in_new_space =
|
||||
survived_last_scavenge_ * 100 / new_space_->Capacity();
|
||||
survived_last_scavenge_ * 100 / NewSpaceCapacity();
|
||||
fast_promotion_mode_ =
|
||||
!FLAG_optimize_for_size && FLAG_fast_promotion_new_space &&
|
||||
!ShouldReduceMemory() && new_space_->IsAtMaximumCapacity() &&
|
||||
survived_in_new_space >= kMinPromotedPercentForFastPromotionMode;
|
||||
|
||||
if (FLAG_trace_gc_verbose && !FLAG_trace_gc_ignore_scavenger) {
|
||||
PrintIsolate(isolate(), "Fast promotion mode: %s survival rate: %zu%%\n",
|
||||
fast_promotion_mode_ ? "true" : "false",
|
||||
@ -2969,7 +2997,7 @@ HeapObject Heap::AlignWithFiller(ReadOnlyRoots roots, HeapObject object,
|
||||
|
||||
void* Heap::AllocateExternalBackingStore(
|
||||
const std::function<void*(size_t)>& allocate, size_t byte_length) {
|
||||
if (!always_allocate()) {
|
||||
if (!always_allocate() && new_space()) {
|
||||
size_t new_space_backing_store_bytes =
|
||||
new_space()->ExternalBackingStoreBytes();
|
||||
if (new_space_backing_store_bytes >= 2 * kMaxSemiSpaceSize &&
|
||||
@ -3570,6 +3598,12 @@ void Heap::ReduceNewSpaceSize() {
|
||||
}
|
||||
}
|
||||
|
||||
size_t Heap::NewSpaceSize() { return new_space() ? new_space()->Size() : 0; }
|
||||
|
||||
size_t Heap::NewSpaceCapacity() {
|
||||
return new_space() ? new_space()->Capacity() : 0;
|
||||
}
|
||||
|
||||
void Heap::FinalizeIncrementalMarkingIfComplete(
|
||||
GarbageCollectionReason gc_reason) {
|
||||
if (incremental_marking()->IsMarking() &&
|
||||
@ -3801,7 +3835,10 @@ double Heap::MonotonicallyIncreasingTimeInMs() const {
|
||||
static_cast<double>(base::Time::kMillisecondsPerSecond);
|
||||
}
|
||||
|
||||
void Heap::VerifyNewSpaceTop() { new_space()->VerifyTop(); }
|
||||
void Heap::VerifyNewSpaceTop() {
|
||||
if (!new_space()) return;
|
||||
new_space()->VerifyTop();
|
||||
}
|
||||
|
||||
bool Heap::IdleNotification(int idle_time_in_ms) {
|
||||
return IdleNotification(
|
||||
@ -4113,10 +4150,11 @@ bool Heap::Contains(HeapObject value) const {
|
||||
return false;
|
||||
}
|
||||
return HasBeenSetUp() &&
|
||||
(new_space_->ToSpaceContains(value) || old_space_->Contains(value) ||
|
||||
code_space_->Contains(value) || map_space_->Contains(value) ||
|
||||
lo_space_->Contains(value) || code_lo_space_->Contains(value) ||
|
||||
new_lo_space_->Contains(value));
|
||||
((new_space_ && new_space_->ToSpaceContains(value)) ||
|
||||
old_space_->Contains(value) || code_space_->Contains(value) ||
|
||||
map_space_->Contains(value) || lo_space_->Contains(value) ||
|
||||
code_lo_space_->Contains(value) ||
|
||||
(new_lo_space_ && new_lo_space_->Contains(value)));
|
||||
}
|
||||
|
||||
bool Heap::SharedHeapContains(HeapObject value) const {
|
||||
@ -4231,7 +4269,7 @@ void Heap::Verify() {
|
||||
VerifySmisVisitor smis_visitor;
|
||||
IterateSmiRoots(&smis_visitor);
|
||||
|
||||
new_space_->Verify(isolate());
|
||||
if (new_space_) new_space_->Verify(isolate());
|
||||
|
||||
old_space_->Verify(isolate(), &visitor);
|
||||
map_space_->Verify(isolate(), &visitor);
|
||||
@ -4241,7 +4279,7 @@ void Heap::Verify() {
|
||||
|
||||
lo_space_->Verify(isolate());
|
||||
code_lo_space_->Verify(isolate());
|
||||
new_lo_space_->Verify(isolate());
|
||||
if (new_lo_space_) new_lo_space_->Verify(isolate());
|
||||
VerifyStringTable(isolate());
|
||||
}
|
||||
|
||||
@ -4430,7 +4468,7 @@ void Heap::VerifyCountersBeforeConcurrentSweeping() {
|
||||
#endif
|
||||
|
||||
void Heap::ZapFromSpace() {
|
||||
if (!new_space_->IsFromSpaceCommitted()) return;
|
||||
if (!new_space_ || !new_space_->IsFromSpaceCommitted()) return;
|
||||
for (Page* page : PageRange(new_space_->from_space().first_page(), nullptr)) {
|
||||
memory_allocator()->ZapBlock(page->area_start(),
|
||||
page->HighWaterMark() - page->area_start(),
|
||||
@ -4891,8 +4929,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
|
||||
*stats->end_marker = HeapStats::kEndMarker;
|
||||
*stats->ro_space_size = read_only_space_->Size();
|
||||
*stats->ro_space_capacity = read_only_space_->Capacity();
|
||||
*stats->new_space_size = new_space_->Size();
|
||||
*stats->new_space_capacity = new_space_->Capacity();
|
||||
*stats->new_space_size = NewSpaceSize();
|
||||
*stats->new_space_capacity = NewSpaceCapacity();
|
||||
*stats->old_space_size = old_space_->SizeOfObjects();
|
||||
*stats->old_space_capacity = old_space_->Capacity();
|
||||
*stats->code_space_size = code_space_->SizeOfObjects();
|
||||
@ -5155,9 +5193,9 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() {
|
||||
const base::Optional<size_t> global_memory_available =
|
||||
GlobalMemoryAvailable();
|
||||
|
||||
if (old_generation_space_available > new_space_->Capacity() &&
|
||||
if (old_generation_space_available > NewSpaceCapacity() &&
|
||||
(!global_memory_available ||
|
||||
global_memory_available > new_space_->Capacity())) {
|
||||
global_memory_available > NewSpaceCapacity())) {
|
||||
return IncrementalMarkingLimit::kNoLimit;
|
||||
}
|
||||
if (ShouldOptimizeForMemoryUsage()) {
|
||||
@ -5184,8 +5222,10 @@ void Heap::EnableInlineAllocation() {
|
||||
inline_allocation_disabled_ = false;
|
||||
|
||||
// Update inline allocation limit for new space.
|
||||
new_space()->AdvanceAllocationObservers();
|
||||
new_space()->UpdateInlineAllocationLimit(0);
|
||||
if (new_space()) {
|
||||
new_space()->AdvanceAllocationObservers();
|
||||
new_space()->UpdateInlineAllocationLimit(0);
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::DisableInlineAllocation() {
|
||||
@ -5193,7 +5233,9 @@ void Heap::DisableInlineAllocation() {
|
||||
inline_allocation_disabled_ = true;
|
||||
|
||||
// Update inline allocation limit for new space.
|
||||
new_space()->UpdateInlineAllocationLimit(0);
|
||||
if (new_space()) {
|
||||
new_space()->UpdateInlineAllocationLimit(0);
|
||||
}
|
||||
|
||||
// Update inline allocation limit for old spaces.
|
||||
PagedSpaceIterator spaces(this);
|
||||
@ -5385,15 +5427,19 @@ class StressConcurrentAllocationObserver : public AllocationObserver {
|
||||
void Heap::SetUpSpaces() {
|
||||
// Ensure SetUpFromReadOnlySpace has been ran.
|
||||
DCHECK_NOT_NULL(read_only_space_);
|
||||
space_[NEW_SPACE] = new_space_ =
|
||||
new NewSpace(this, memory_allocator_->data_page_allocator(),
|
||||
initial_semispace_size_, max_semi_space_size_);
|
||||
if (!FLAG_single_generation) {
|
||||
space_[NEW_SPACE] = new_space_ =
|
||||
new NewSpace(this, memory_allocator_->data_page_allocator(),
|
||||
initial_semispace_size_, max_semi_space_size_);
|
||||
}
|
||||
space_[OLD_SPACE] = old_space_ = new OldSpace(this);
|
||||
space_[CODE_SPACE] = code_space_ = new CodeSpace(this);
|
||||
space_[MAP_SPACE] = map_space_ = new MapSpace(this);
|
||||
space_[LO_SPACE] = lo_space_ = new OldLargeObjectSpace(this);
|
||||
space_[NEW_LO_SPACE] = new_lo_space_ =
|
||||
new NewLargeObjectSpace(this, new_space_->Capacity());
|
||||
if (!FLAG_single_generation) {
|
||||
space_[NEW_LO_SPACE] = new_lo_space_ =
|
||||
new NewLargeObjectSpace(this, NewSpaceCapacity());
|
||||
}
|
||||
space_[CODE_LO_SPACE] = code_lo_space_ = new CodeLargeObjectSpace(this);
|
||||
|
||||
for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
|
||||
@ -5429,10 +5475,12 @@ void Heap::SetUpSpaces() {
|
||||
}
|
||||
#endif // ENABLE_MINOR_MC
|
||||
|
||||
scavenge_job_.reset(new ScavengeJob());
|
||||
scavenge_task_observer_.reset(new ScavengeTaskObserver(
|
||||
this, ScavengeJob::YoungGenerationTaskTriggerSize(this)));
|
||||
new_space()->AddAllocationObserver(scavenge_task_observer_.get());
|
||||
if (!FLAG_single_generation) {
|
||||
scavenge_job_.reset(new ScavengeJob());
|
||||
scavenge_task_observer_.reset(new ScavengeTaskObserver(
|
||||
this, ScavengeJob::YoungGenerationTaskTriggerSize(this)));
|
||||
new_space()->AddAllocationObserver(scavenge_task_observer_.get());
|
||||
}
|
||||
|
||||
SetGetExternallyAllocatedMemoryInBytesCallback(
|
||||
DefaultGetExternallyAllocatedMemoryInBytesCallback);
|
||||
@ -5443,7 +5491,7 @@ void Heap::SetUpSpaces() {
|
||||
AddAllocationObserversToAllSpaces(stress_marking_observer_,
|
||||
stress_marking_observer_);
|
||||
}
|
||||
if (FLAG_stress_scavenge > 0) {
|
||||
if (FLAG_stress_scavenge > 0 && new_space()) {
|
||||
stress_scavenge_observer_ = new StressScavengeObserver(this);
|
||||
new_space()->AddAllocationObserver(stress_scavenge_observer_);
|
||||
}
|
||||
@ -5655,7 +5703,10 @@ void Heap::TearDown() {
|
||||
}
|
||||
}
|
||||
|
||||
new_space()->RemoveAllocationObserver(scavenge_task_observer_.get());
|
||||
if (new_space()) {
|
||||
new_space()->RemoveAllocationObserver(scavenge_task_observer_.get());
|
||||
}
|
||||
|
||||
scavenge_task_observer_.reset();
|
||||
scavenge_job_.reset();
|
||||
|
||||
@ -5672,7 +5723,7 @@ void Heap::TearDown() {
|
||||
delete stress_marking_observer_;
|
||||
stress_marking_observer_ = nullptr;
|
||||
}
|
||||
if (FLAG_stress_scavenge > 0) {
|
||||
if (FLAG_stress_scavenge > 0 && new_space()) {
|
||||
new_space()->RemoveAllocationObserver(stress_scavenge_observer_);
|
||||
delete stress_scavenge_observer_;
|
||||
stress_scavenge_observer_ = nullptr;
|
||||
@ -6037,18 +6088,26 @@ PagedSpace* PagedSpaceIterator::Next() {
|
||||
}
|
||||
|
||||
SpaceIterator::SpaceIterator(Heap* heap)
|
||||
: heap_(heap), current_space_(FIRST_MUTABLE_SPACE - 1) {}
|
||||
: heap_(heap), current_space_(FIRST_MUTABLE_SPACE) {}
|
||||
|
||||
SpaceIterator::~SpaceIterator() = default;
|
||||
|
||||
bool SpaceIterator::HasNext() {
|
||||
// Iterate until no more spaces.
|
||||
return current_space_ != LAST_SPACE;
|
||||
while (current_space_ <= LAST_MUTABLE_SPACE) {
|
||||
Space* space = heap_->space(current_space_);
|
||||
if (space) return true;
|
||||
++current_space_;
|
||||
}
|
||||
|
||||
// No more spaces left.
|
||||
return false;
|
||||
}
|
||||
|
||||
Space* SpaceIterator::Next() {
|
||||
DCHECK(HasNext());
|
||||
return heap_->space(++current_space_);
|
||||
Space* space = heap_->space(current_space_++);
|
||||
DCHECK_NOT_NULL(space);
|
||||
return space;
|
||||
}
|
||||
|
||||
class HeapObjectsFilter {
|
||||
|
@ -199,7 +199,7 @@ class StrongRootsEntry {
|
||||
|
||||
class AllocationResult {
|
||||
public:
|
||||
static inline AllocationResult Retry(AllocationSpace space = NEW_SPACE) {
|
||||
static inline AllocationResult Retry(AllocationSpace space) {
|
||||
return AllocationResult(space);
|
||||
}
|
||||
|
||||
@ -518,6 +518,9 @@ class Heap {
|
||||
inline Address* OldSpaceAllocationTopAddress();
|
||||
inline Address* OldSpaceAllocationLimitAddress();
|
||||
|
||||
size_t NewSpaceSize();
|
||||
size_t NewSpaceCapacity();
|
||||
|
||||
// Move len non-weak tagged elements from src_slot to dst_slot of dst_object.
|
||||
// The source and destination memory ranges can overlap.
|
||||
V8_EXPORT_PRIVATE void MoveRange(HeapObject dst_object, ObjectSlot dst_slot,
|
||||
@ -1370,9 +1373,9 @@ class Heap {
|
||||
survived_since_last_expansion_ += survived;
|
||||
}
|
||||
|
||||
inline void UpdateNewSpaceAllocationCounter();
|
||||
void UpdateNewSpaceAllocationCounter();
|
||||
|
||||
inline size_t NewSpaceAllocationCounter();
|
||||
V8_EXPORT_PRIVATE size_t NewSpaceAllocationCounter();
|
||||
|
||||
// This should be used only for testing.
|
||||
void set_new_space_allocation_counter(size_t new_value) {
|
||||
|
@ -36,7 +36,7 @@ class EvacuationAllocator {
|
||||
// Give back remaining LAB space if this EvacuationAllocator's new space LAB
|
||||
// sits right next to new space allocation top.
|
||||
const LinearAllocationArea info = new_space_lab_.CloseAndMakeIterable();
|
||||
new_space_->MaybeFreeUnusedLab(info);
|
||||
if (new_space_) new_space_->MaybeFreeUnusedLab(info);
|
||||
}
|
||||
|
||||
inline AllocationResult Allocate(AllocationSpace space, int object_size,
|
||||
|
@ -151,6 +151,7 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
|
||||
}
|
||||
|
||||
void MarkingVerifier::VerifyMarking(NewSpace* space) {
|
||||
if (!space) return;
|
||||
Address end = space->top();
|
||||
// The bottom position is at the start of its page. Allows us to use
|
||||
// page->area_start() as start of range on all pages.
|
||||
@ -173,6 +174,7 @@ void MarkingVerifier::VerifyMarking(PagedSpace* space) {
|
||||
}
|
||||
|
||||
void MarkingVerifier::VerifyMarking(LargeObjectSpace* lo_space) {
|
||||
if (!lo_space) return;
|
||||
LargeObjectSpaceObjectIterator it(lo_space);
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
if (IsBlackOrGrey(obj)) {
|
||||
@ -313,6 +315,7 @@ void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) {
|
||||
}
|
||||
|
||||
void EvacuationVerifier::VerifyEvacuation(NewSpace* space) {
|
||||
if (!space) return;
|
||||
PageRange range(space->first_allocatable_address(), space->top());
|
||||
for (auto it = range.begin(); it != range.end();) {
|
||||
Page* page = *(it++);
|
||||
@ -559,6 +562,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(PagedSpace* space) {
|
||||
}
|
||||
|
||||
void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
|
||||
if (!space) return;
|
||||
for (Page* p : PageRange(space->first_allocatable_address(), space->top())) {
|
||||
CHECK(non_atomic_marking_state()->bitmap(p)->IsClean());
|
||||
CHECK_EQ(0, non_atomic_marking_state()->live_bytes(p));
|
||||
@ -566,6 +570,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
|
||||
}
|
||||
|
||||
void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) {
|
||||
if (!space) return;
|
||||
LargeObjectSpaceObjectIterator it(space);
|
||||
for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
|
||||
CHECK(non_atomic_marking_state()->IsWhite(obj));
|
||||
@ -874,9 +879,14 @@ void MarkCompactCollector::Prepare() {
|
||||
[](LocalHeap* local_heap) { local_heap->FreeLinearAllocationArea(); });
|
||||
|
||||
// All objects are guaranteed to be initialized in atomic pause
|
||||
heap()->new_lo_space()->ResetPendingObject();
|
||||
DCHECK_EQ(heap()->new_space()->top(),
|
||||
heap()->new_space()->original_top_acquire());
|
||||
if (heap()->new_lo_space()) {
|
||||
heap()->new_lo_space()->ResetPendingObject();
|
||||
}
|
||||
|
||||
if (heap()->new_space()) {
|
||||
DCHECK_EQ(heap()->new_space()->top(),
|
||||
heap()->new_space()->original_top_acquire());
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::FinishConcurrentMarking() {
|
||||
@ -2862,18 +2872,23 @@ static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
|
||||
void MarkCompactCollector::EvacuatePrologue() {
|
||||
// New space.
|
||||
NewSpace* new_space = heap()->new_space();
|
||||
// Append the list of new space pages to be processed.
|
||||
for (Page* p :
|
||||
PageRange(new_space->first_allocatable_address(), new_space->top())) {
|
||||
new_space_evacuation_pages_.push_back(p);
|
||||
|
||||
if (new_space) {
|
||||
// Append the list of new space pages to be processed.
|
||||
for (Page* p :
|
||||
PageRange(new_space->first_allocatable_address(), new_space->top())) {
|
||||
new_space_evacuation_pages_.push_back(p);
|
||||
}
|
||||
new_space->Flip();
|
||||
new_space->ResetLinearAllocationArea();
|
||||
|
||||
DCHECK_EQ(new_space->Size(), 0);
|
||||
}
|
||||
new_space->Flip();
|
||||
new_space->ResetLinearAllocationArea();
|
||||
|
||||
DCHECK_EQ(new_space->Size(), 0);
|
||||
|
||||
heap()->new_lo_space()->Flip();
|
||||
heap()->new_lo_space()->ResetPendingObject();
|
||||
if (heap()->new_lo_space()) {
|
||||
heap()->new_lo_space()->Flip();
|
||||
heap()->new_lo_space()->ResetPendingObject();
|
||||
}
|
||||
|
||||
// Old space.
|
||||
DCHECK(old_space_evacuation_pages_.empty());
|
||||
@ -2884,18 +2899,27 @@ void MarkCompactCollector::EvacuatePrologue() {
|
||||
|
||||
void MarkCompactCollector::EvacuateEpilogue() {
|
||||
aborted_evacuation_candidates_.clear();
|
||||
|
||||
// New space.
|
||||
heap()->new_space()->set_age_mark(heap()->new_space()->top());
|
||||
DCHECK_IMPLIES(FLAG_always_promote_young_mc,
|
||||
heap()->new_space()->Size() == 0);
|
||||
if (heap()->new_space()) {
|
||||
heap()->new_space()->set_age_mark(heap()->new_space()->top());
|
||||
DCHECK_IMPLIES(FLAG_always_promote_young_mc,
|
||||
heap()->new_space()->Size() == 0);
|
||||
}
|
||||
|
||||
// Deallocate unmarked large objects.
|
||||
heap()->lo_space()->FreeUnmarkedObjects();
|
||||
heap()->code_lo_space()->FreeUnmarkedObjects();
|
||||
heap()->new_lo_space()->FreeUnmarkedObjects();
|
||||
if (heap()->new_lo_space()) {
|
||||
heap()->new_lo_space()->FreeUnmarkedObjects();
|
||||
}
|
||||
|
||||
// Old space. Deallocate evacuated candidate pages.
|
||||
ReleaseEvacuationCandidates();
|
||||
|
||||
// Give pages that are queued to be freed back to the OS.
|
||||
heap()->memory_allocator()->unmapper()->FreeQueuedChunks();
|
||||
|
||||
#ifdef DEBUG
|
||||
// Old-to-old slot sets must be empty after evacuation.
|
||||
for (Page* p : *heap()->old_space()) {
|
||||
@ -3295,19 +3319,21 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
|
||||
}
|
||||
|
||||
// Promote young generation large objects.
|
||||
IncrementalMarking::NonAtomicMarkingState* marking_state =
|
||||
heap()->incremental_marking()->non_atomic_marking_state();
|
||||
if (heap()->new_lo_space()) {
|
||||
IncrementalMarking::NonAtomicMarkingState* marking_state =
|
||||
heap()->incremental_marking()->non_atomic_marking_state();
|
||||
|
||||
for (auto it = heap()->new_lo_space()->begin();
|
||||
it != heap()->new_lo_space()->end();) {
|
||||
LargePage* current = *it;
|
||||
it++;
|
||||
HeapObject object = current->GetObject();
|
||||
DCHECK(!marking_state->IsGrey(object));
|
||||
if (marking_state->IsBlack(object)) {
|
||||
heap_->lo_space()->PromoteNewLargeObject(current);
|
||||
current->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
|
||||
evacuation_items.emplace_back(ParallelWorkItem{}, current);
|
||||
for (auto it = heap()->new_lo_space()->begin();
|
||||
it != heap()->new_lo_space()->end();) {
|
||||
LargePage* current = *it;
|
||||
it++;
|
||||
HeapObject object = current->GetObject();
|
||||
DCHECK(!marking_state->IsGrey(object));
|
||||
if (marking_state->IsBlack(object)) {
|
||||
heap_->lo_space()->PromoteNewLargeObject(current);
|
||||
current->SetFlag(Page::PAGE_NEW_OLD_PROMOTION);
|
||||
evacuation_items.emplace_back(ParallelWorkItem{}, current);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -3464,7 +3490,7 @@ void MarkCompactCollector::Evacuate() {
|
||||
|
||||
UpdatePointersAfterEvacuation();
|
||||
|
||||
{
|
||||
if (heap()->new_space()) {
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_REBALANCE);
|
||||
if (!heap()->new_space()->Rebalance()) {
|
||||
heap()->FatalProcessOutOfMemory("NewSpace::Rebalance");
|
||||
@ -3836,6 +3862,8 @@ MarkCompactCollector::CreateRememberedSetUpdatingItem(
|
||||
|
||||
int MarkCompactCollectorBase::CollectToSpaceUpdatingItems(
|
||||
std::vector<std::unique_ptr<UpdatingItem>>* items) {
|
||||
if (!heap()->new_space()) return 0;
|
||||
|
||||
// Seed to space pages.
|
||||
const Address space_start = heap()->new_space()->first_allocatable_address();
|
||||
const Address space_end = heap()->new_space()->top();
|
||||
|
@ -112,7 +112,7 @@ AllocationResult NewSpace::AllocateFastUnaligned(int size_in_bytes,
|
||||
AllocationOrigin origin) {
|
||||
Address top = allocation_info_.top();
|
||||
if (allocation_info_.limit() < top + size_in_bytes) {
|
||||
return AllocationResult::Retry();
|
||||
return AllocationResult::Retry(NEW_SPACE);
|
||||
}
|
||||
|
||||
HeapObject obj = HeapObject::FromAddress(top);
|
||||
@ -137,7 +137,7 @@ AllocationResult NewSpace::AllocateFastAligned(
|
||||
|
||||
if (allocation_info_.limit() - top <
|
||||
static_cast<uintptr_t>(aligned_size_in_bytes)) {
|
||||
return AllocationResult::Retry();
|
||||
return AllocationResult::Retry(NEW_SPACE);
|
||||
}
|
||||
|
||||
HeapObject obj = HeapObject::FromAddress(top);
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
#include "src/heap/new-spaces.h"
|
||||
|
||||
#include "src/common/globals.h"
|
||||
#include "src/heap/array-buffer-sweeper.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/incremental-marking.h"
|
||||
@ -630,7 +631,7 @@ AllocationResult NewSpace::AllocateRawUnaligned(int size_in_bytes,
|
||||
AllocationOrigin origin) {
|
||||
DCHECK(!FLAG_enable_third_party_heap);
|
||||
if (!EnsureAllocation(size_in_bytes, kWordAligned)) {
|
||||
return AllocationResult::Retry();
|
||||
return AllocationResult::Retry(NEW_SPACE);
|
||||
}
|
||||
|
||||
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
|
||||
@ -649,7 +650,7 @@ AllocationResult NewSpace::AllocateRawAligned(int size_in_bytes,
|
||||
AllocationOrigin origin) {
|
||||
DCHECK(!FLAG_enable_third_party_heap);
|
||||
if (!EnsureAllocation(size_in_bytes, alignment)) {
|
||||
return AllocationResult::Retry();
|
||||
return AllocationResult::Retry(NEW_SPACE);
|
||||
}
|
||||
|
||||
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
|
||||
|
@ -141,7 +141,8 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
|
||||
int filler_size = Heap::GetFillToAlign(current_top, alignment);
|
||||
|
||||
Address new_top = current_top + filler_size + size_in_bytes;
|
||||
if (new_top > allocation_info_.limit()) return AllocationResult::Retry();
|
||||
if (new_top > allocation_info_.limit())
|
||||
return AllocationResult::Retry(NEW_SPACE);
|
||||
|
||||
allocation_info_.set_top(new_top);
|
||||
if (filler_size > 0) {
|
||||
|
@ -392,6 +392,7 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
|
||||
}
|
||||
|
||||
TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
|
||||
if (FLAG_single_generation) return;
|
||||
CcTest::InitializeVM();
|
||||
LocalContext env;
|
||||
v8::Isolate* isolate = env->GetIsolate();
|
||||
@ -413,6 +414,7 @@ TEST(ArrayBuffer_ExternalBackingStoreSizeIncreases) {
|
||||
}
|
||||
|
||||
TEST(ArrayBuffer_ExternalBackingStoreSizeDecreases) {
|
||||
if (FLAG_single_generation) return;
|
||||
FLAG_concurrent_array_buffer_sweeping = false;
|
||||
CcTest::InitializeVM();
|
||||
LocalContext env;
|
||||
|
@ -130,6 +130,7 @@ TEST(ExternalString_ExternalBackingStoreSizeIncreasesMarkCompact) {
|
||||
}
|
||||
|
||||
TEST(ExternalString_ExternalBackingStoreSizeIncreasesAfterExternalization) {
|
||||
if (FLAG_single_generation) return;
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
LocalContext env;
|
||||
|
@ -365,6 +365,8 @@ TEST(Tagging) {
|
||||
|
||||
|
||||
TEST(GarbageCollection) {
|
||||
if (FLAG_single_generation) return;
|
||||
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
@ -1557,7 +1559,7 @@ TEST(TestInternalWeakLists) {
|
||||
// Some flags turn Scavenge collections into Mark-sweep collections
|
||||
// and hence are incompatible with this test case.
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
FLAG_retain_maps_for_n_gc = 0;
|
||||
|
||||
@ -2501,7 +2503,7 @@ TEST(OptimizedPretenuringAllocationFolding) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
|
||||
@ -2551,7 +2553,7 @@ TEST(OptimizedPretenuringObjectArrayLiterals) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking) {
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation) {
|
||||
return;
|
||||
}
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
@ -2632,7 +2634,7 @@ TEST(OptimizedPretenuringMixedInObjectProperties) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
@ -2679,7 +2681,7 @@ TEST(OptimizedPretenuringDoubleArrayProperties) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
@ -2718,7 +2720,7 @@ TEST(OptimizedPretenuringDoubleArrayLiterals) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
@ -2756,7 +2758,7 @@ TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
|
||||
@ -2806,7 +2808,7 @@ TEST(OptimizedPretenuringNestedObjectLiterals) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
|
||||
@ -2856,7 +2858,7 @@ TEST(OptimizedPretenuringNestedDoubleLiterals) {
|
||||
CcTest::InitializeVM();
|
||||
if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
|
||||
if (FLAG_gc_global || FLAG_stress_compaction ||
|
||||
FLAG_stress_incremental_marking)
|
||||
FLAG_stress_incremental_marking || FLAG_single_generation)
|
||||
return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
|
||||
@ -6635,7 +6637,7 @@ Isolate* oom_isolate = nullptr;
|
||||
|
||||
void OOMCallback(const char* location, bool is_heap_oom) {
|
||||
Heap* heap = oom_isolate->heap();
|
||||
size_t kSlack = heap->new_space()->Capacity();
|
||||
size_t kSlack = heap->new_space() ? heap->new_space()->Capacity() : 0;
|
||||
CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
|
||||
CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
|
||||
base::OS::ExitProcess(0);
|
||||
@ -6798,8 +6800,10 @@ size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
|
||||
state->oom_triggered = true;
|
||||
state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
|
||||
state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
|
||||
state->new_space_capacity_at_oom = heap->new_space()->Capacity();
|
||||
state->new_lo_space_size_at_oom = heap->new_lo_space()->Size();
|
||||
state->new_space_capacity_at_oom =
|
||||
heap->new_space() ? heap->new_space()->Capacity() : 0;
|
||||
state->new_lo_space_size_at_oom =
|
||||
heap->new_lo_space() ? heap->new_lo_space()->Size() : 0;
|
||||
state->current_heap_limit = current_heap_limit;
|
||||
state->initial_heap_limit = initial_heap_limit;
|
||||
return initial_heap_limit + 100 * MB;
|
||||
@ -7442,6 +7446,7 @@ TEST(LongTaskStatsFullIncremental) {
|
||||
}
|
||||
|
||||
TEST(LongTaskStatsYoung) {
|
||||
if (FLAG_single_generation) return;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
@ -350,6 +350,8 @@ TEST(EmptyWeakArray) {
|
||||
}
|
||||
|
||||
TEST(WeakArraysBasic) {
|
||||
if (FLAG_single_generation) return;
|
||||
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
@ -363,7 +365,7 @@ TEST(WeakArraysBasic) {
|
||||
CHECK(!array->IsFixedArray());
|
||||
CHECK_EQ(array->length(), length);
|
||||
|
||||
if (!FLAG_single_generation) CHECK(Heap::InYoungGeneration(*array));
|
||||
CHECK(Heap::InYoungGeneration(*array));
|
||||
|
||||
for (int i = 0; i < length; ++i) {
|
||||
HeapObject heap_object;
|
||||
@ -419,6 +421,8 @@ TEST(WeakArraysBasic) {
|
||||
}
|
||||
|
||||
TEST(WeakArrayListBasic) {
|
||||
if (FLAG_single_generation) return;
|
||||
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
|
@ -388,6 +388,8 @@ TEST(TracedGlobalToUnmodifiedJSApiObjectDiesOnScavenge) {
|
||||
}
|
||||
|
||||
TEST(TracedGlobalToJSApiObjectWithIdentityHashSurvivesScavenge) {
|
||||
if (FLAG_single_generation) return;
|
||||
|
||||
ManualGCScope manual_gc;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* i_isolate = CcTest::i_isolate();
|
||||
@ -441,6 +443,7 @@ TEST(WeakHandleToUnmodifiedJSApiObjectSurvivesMarkCompactWhenInHandle) {
|
||||
}
|
||||
|
||||
TEST(TracedGlobalToJSApiObjectWithModifiedMapSurvivesScavenge) {
|
||||
if (FLAG_single_generation) return;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
LocalContext context;
|
||||
@ -462,6 +465,7 @@ TEST(TracedGlobalToJSApiObjectWithModifiedMapSurvivesScavenge) {
|
||||
}
|
||||
|
||||
TEST(TracedGlobalTOJsApiObjectWithElementsSurvivesScavenge) {
|
||||
if (FLAG_single_generation) return;
|
||||
CcTest::InitializeVM();
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
LocalContext context;
|
||||
|
@ -3919,7 +3919,7 @@ TEST(SamplingHeapProfilerPretenuredInlineAllocations) {
|
||||
if (!CcTest::i_isolate()->use_optimizer() || i::FLAG_always_opt) return;
|
||||
if (i::FLAG_gc_global || i::FLAG_stress_compaction ||
|
||||
i::FLAG_stress_incremental_marking ||
|
||||
i::FLAG_stress_concurrent_allocation) {
|
||||
i::FLAG_stress_concurrent_allocation || i::FLAG_single_generation) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ static void SetUpNewSpaceWithPoisonedMementoAtTop() {
|
||||
|
||||
TEST(Regress340063) {
|
||||
CcTest::InitializeVM();
|
||||
if (!i::FLAG_allocation_site_pretenuring) return;
|
||||
if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
SetUpNewSpaceWithPoisonedMementoAtTop();
|
||||
@ -74,7 +74,7 @@ TEST(Regress340063) {
|
||||
|
||||
TEST(Regress470390) {
|
||||
CcTest::InitializeVM();
|
||||
if (!i::FLAG_allocation_site_pretenuring) return;
|
||||
if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
SetUpNewSpaceWithPoisonedMementoAtTop();
|
||||
@ -91,7 +91,7 @@ TEST(Regress470390) {
|
||||
|
||||
TEST(BadMementoAfterTopForceScavenge) {
|
||||
CcTest::InitializeVM();
|
||||
if (!i::FLAG_allocation_site_pretenuring) return;
|
||||
if (!i::FLAG_allocation_site_pretenuring || FLAG_single_generation) return;
|
||||
v8::HandleScope scope(CcTest::isolate());
|
||||
|
||||
SetUpNewSpaceWithPoisonedMementoAtTop();
|
||||
|
Loading…
Reference in New Issue
Block a user