diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 96e3692132..211982cbc1 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -785,6 +785,10 @@ DEFINE_BOOL(optimize_ephemerons, true, DEFINE_NEG_NEG_IMPLICATION(optimize_ephemerons, parallel_ephemeron_marking) DEFINE_NEG_NEG_IMPLICATION(optimize_ephemerons, parallel_ephemeron_visiting) +DEFINE_BOOL(young_generation_large_objects, false, + "allocates large objects by default in the young generation large " + "object space") + // assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc DEFINE_BOOL(debug_code, DEBUG_BOOL, "generate extra code (assertions) for debugging") diff --git a/src/globals.h b/src/globals.h index 0a2a5ee27a..28eebe0ddc 100644 --- a/src/globals.h +++ b/src/globals.h @@ -536,6 +536,7 @@ class MapSpace; class MarkCompactCollector; class MaybeObject; class NewSpace; +class NewLargeObjectSpace; class Object; class OldSpace; class ParameterCount; @@ -570,14 +571,16 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer); enum AllocationSpace { // TODO(v8:7464): Actually map this space's memory as read-only. RO_SPACE, // Immortal, immovable and immutable objects, - NEW_SPACE, // Semispaces collected with copying collector. - OLD_SPACE, // May contain pointers to new space. - CODE_SPACE, // No pointers to new space, marked executable. - MAP_SPACE, // Only and all map objects. - LO_SPACE, // Promoted large objects. + NEW_SPACE, // Young generation semispaces for regular objects collected with + // Scavenger. + OLD_SPACE, // Old generation regular object space. + CODE_SPACE, // Old generation code object space, marked executable. + MAP_SPACE, // Old generation map object space, non-movable. + LO_SPACE, // Old generation large object space. + NEW_LO_SPACE, // Young generation large object space. FIRST_SPACE = RO_SPACE, - LAST_SPACE = LO_SPACE, + LAST_SPACE = NEW_LO_SPACE, FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE, LAST_GROWABLE_PAGED_SPACE = MAP_SPACE }; diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index ca3056640e..9f375db064 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -183,7 +183,11 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, } } else if (LO_SPACE == space) { DCHECK(large_object); - allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); + if (FLAG_young_generation_large_objects) { + allocation = new_lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); + } else { + allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE); + } } else if (MAP_SPACE == space) { allocation = map_space_->AllocateRawUnaligned(size_in_bytes); } else if (RO_SPACE == space) { diff --git a/src/heap/heap.cc b/src/heap/heap.cc index 05ae12b71f..a0910c39e3 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -165,6 +165,7 @@ Heap::Heap() code_space_(nullptr), map_space_(nullptr), lo_space_(nullptr), + new_lo_space_(nullptr), read_only_space_(nullptr), write_protect_code_memory_(false), code_space_memory_modification_scope_depth_(0), @@ -673,6 +674,8 @@ const char* Heap::GetSpaceName(int idx) { return "code_space"; case LO_SPACE: return "large_object_space"; + case NEW_LO_SPACE: + return "new_large_object_space"; case RO_SPACE: return "read_only_space"; default: @@ -3647,6 +3650,8 @@ bool Heap::InSpace(HeapObject* value, AllocationSpace space) { return map_space_->Contains(value); case LO_SPACE: return lo_space_->Contains(value); + case NEW_LO_SPACE: + return new_lo_space_->Contains(value); case RO_SPACE: return read_only_space_->Contains(value); } @@ -3670,13 +3675,14 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) { return map_space_->ContainsSlow(addr); case LO_SPACE: return lo_space_->ContainsSlow(addr); + case NEW_LO_SPACE: + return new_lo_space_->ContainsSlow(addr); case RO_SPACE: return read_only_space_->ContainsSlow(addr); } UNREACHABLE(); } - bool Heap::IsValidAllocationSpace(AllocationSpace space) { switch (space) { case NEW_SPACE: @@ -3684,6 +3690,7 @@ bool Heap::IsValidAllocationSpace(AllocationSpace space) { case CODE_SPACE: case MAP_SPACE: case LO_SPACE: + case NEW_LO_SPACE: case RO_SPACE: return true; default: @@ -4593,6 +4600,7 @@ void Heap::SetUp() { space_[CODE_SPACE] = code_space_ = new CodeSpace(this); space_[MAP_SPACE] = map_space_ = new MapSpace(this); space_[LO_SPACE] = lo_space_ = new LargeObjectSpace(this); + space_[NEW_LO_SPACE] = new_lo_space_ = new NewLargeObjectSpace(this); // Set up the seed that is used to randomize the string hash function. DCHECK_EQ(Smi::kZero, hash_seed()); @@ -5529,6 +5537,8 @@ const char* AllocationSpaceName(AllocationSpace space) { return "MAP_SPACE"; case LO_SPACE: return "LO_SPACE"; + case NEW_LO_SPACE: + return "NEW_LO_SPACE"; case RO_SPACE: return "RO_SPACE"; default: @@ -5602,6 +5612,7 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) { return dst == CODE_SPACE && type == CODE_TYPE; case MAP_SPACE: case LO_SPACE: + case NEW_LO_SPACE: case RO_SPACE: return false; } diff --git a/src/heap/heap.h b/src/heap/heap.h index 4521d5dd9b..c30b791591 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -1012,6 +1012,7 @@ class Heap { CodeSpace* code_space() { return code_space_; } MapSpace* map_space() { return map_space_; } LargeObjectSpace* lo_space() { return lo_space_; } + NewLargeObjectSpace* new_lo_space() { return new_lo_space_; } ReadOnlySpace* read_only_space() { return read_only_space_; } inline PagedSpace* paged_space(int idx); @@ -2290,6 +2291,7 @@ class Heap { CodeSpace* code_space_; MapSpace* map_space_; LargeObjectSpace* lo_space_; + NewLargeObjectSpace* new_lo_space_; ReadOnlySpace* read_only_space_; // Map from the space id to the space. Space* space_[LAST_SPACE + 1]; diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index 3300894738..5b5e14cac2 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -3250,7 +3250,10 @@ HeapObject* LargeObjectIterator::Next() { // LargeObjectSpace LargeObjectSpace::LargeObjectSpace(Heap* heap) - : Space(heap, LO_SPACE), // Managed on a per-allocation basis + : LargeObjectSpace(heap, LO_SPACE) {} + +LargeObjectSpace::LargeObjectSpace(Heap* heap, AllocationSpace id) + : Space(heap, id), size_(0), page_count_(0), objects_size_(0), @@ -3547,5 +3550,13 @@ void Page::Print() { } #endif // DEBUG + +NewLargeObjectSpace::NewLargeObjectSpace(Heap* heap) + : LargeObjectSpace(heap, NEW_LO_SPACE) {} + +size_t NewLargeObjectSpace::Available() { + // TODO(hpayer): Update as soon as we have a growing strategy. + return 0; +} } // namespace internal } // namespace v8 diff --git a/src/heap/spaces.h b/src/heap/spaces.h index 37c84be3ce..d03075713f 100644 --- a/src/heap/spaces.h +++ b/src/heap/spaces.h @@ -2946,6 +2946,8 @@ class LargeObjectSpace : public Space { typedef LargePageIterator iterator; explicit LargeObjectSpace(Heap* heap); + LargeObjectSpace(Heap* heap, AllocationSpace id); + ~LargeObjectSpace() override { TearDown(); } // Releases internal resources, frees objects in this space. @@ -3039,6 +3041,13 @@ class LargeObjectSpace : public Space { friend class LargeObjectIterator; }; +class NewLargeObjectSpace : public LargeObjectSpace { + public: + explicit NewLargeObjectSpace(Heap* heap); + + // Available bytes for objects in this space. + size_t Available() override; +}; class LargeObjectIterator : public ObjectIterator { public: diff --git a/src/snapshot/serializer-common.h b/src/snapshot/serializer-common.h index 566046abd2..9c18e5496c 100644 --- a/src/snapshot/serializer-common.h +++ b/src/snapshot/serializer-common.h @@ -105,8 +105,12 @@ class SerializerDeserializer : public RootVisitor { // No reservation for large object space necessary. // We also handle map space differenly. STATIC_ASSERT(MAP_SPACE == CODE_SPACE + 1); + + // We do not support young generation large objects. + STATIC_ASSERT(LAST_SPACE == NEW_LO_SPACE); + STATIC_ASSERT(LAST_SPACE - 1 == LO_SPACE); static const int kNumberOfPreallocatedSpaces = CODE_SPACE + 1; - static const int kNumberOfSpaces = LAST_SPACE + 1; + static const int kNumberOfSpaces = LO_SPACE + 1; protected: static bool CanBeDeferred(HeapObject* o); diff --git a/src/snapshot/serializer.cc b/src/snapshot/serializer.cc index c51f9bc179..2591b94a29 100644 --- a/src/snapshot/serializer.cc +++ b/src/snapshot/serializer.cc @@ -647,6 +647,7 @@ void Serializer::ObjectSerializer::SerializeObject() { Map* map = object_->map(); AllocationSpace space = MemoryChunk::FromAddress(object_->address())->owner()->identity(); + DCHECK(space != NEW_LO_SPACE); SerializePrologue(space, size, map); // Serialize the rest of the object. diff --git a/test/cctest/heap/test-heap.cc b/test/cctest/heap/test-heap.cc index a2480253a8..62a4fb6103 100644 --- a/test/cctest/heap/test-heap.cc +++ b/test/cctest/heap/test-heap.cc @@ -5656,6 +5656,18 @@ TEST(Regress618958) { !heap->incremental_marking()->IsStopped())); } +TEST(YoungGenerationLargeObjectAllocation) { + FLAG_young_generation_large_objects = true; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + Heap* heap = CcTest::heap(); + Isolate* isolate = heap->isolate(); + + Handle array = isolate->factory()->NewFixedArray(200000); + MemoryChunk* chunk = MemoryChunk::FromAddress(array->address()); + CHECK(chunk->owner()->identity() == NEW_LO_SPACE); +} + TEST(UncommitUnusedLargeObjectMemory) { CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc index e6c7b3176f..256b755410 100644 --- a/test/cctest/test-api.cc +++ b/test/cctest/test-api.cc @@ -18922,6 +18922,9 @@ TEST(GetHeapSpaceStatistics) { v8::HeapSpaceStatistics space_statistics; isolate->GetHeapSpaceStatistics(&space_statistics, i); CHECK_NOT_NULL(space_statistics.space_name()); + if (strcmp(space_statistics.space_name(), "new_large_object_space") == 0) { + continue; + } CHECK_GT(space_statistics.space_size(), 0u); total_size += space_statistics.space_size(); CHECK_GT(space_statistics.space_used_size(), 0u);