[heap] Move RO_SPACE to beginning of AllocationSpace

Moves RO_SPACE to the front of the AllocationSpace enum, so the space
pre-allocation iterations don't miss it. Being at the start of the enum
means that it continues to not be iterated over by any sweeper code,
which iterates from FIRST_GROWABLE_PAGED_SPACE to
LAST_GROWABLE_PAGED_SPACE (renamed from FIRST_PAGED_SPACE and
LAST_PAGED_SPACE).

Bug: v8:7464
Change-Id: I480ba784afbd878552d1cb7f9f5fa57c3b55e004
Reviewed-on: https://chromium-review.googlesource.com/973604
Commit-Queue: Dan Elphick <delphick@chromium.org>
Reviewed-by: Hannes Payer <hpayer@chromium.org>
Reviewed-by: Yang Guo <yangguo@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52177}
This commit is contained in:
Dan Elphick 2018-03-23 11:42:41 +00:00 committed by Commit Bot
parent 68b4026c20
commit 32d0e02639
8 changed files with 41 additions and 48 deletions

View File

@ -523,20 +523,21 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive.
enum AllocationSpace {
// TODO(v8:7464): Actually map this space's memory as read-only.
RO_SPACE, // Immortal, immovable and immutable objects,
NEW_SPACE, // Semispaces collected with copying collector.
OLD_SPACE, // May contain pointers to new space.
CODE_SPACE, // No pointers to new space, marked executable.
MAP_SPACE, // Only and all map objects.
LO_SPACE, // Promoted large objects.
// TODO(v8:7464): Actually map this space's memory as read-only.
RO_SPACE, // Immortal, immovable and immutable objects.
FIRST_SPACE = NEW_SPACE,
LAST_SPACE = RO_SPACE,
FIRST_PAGED_SPACE = OLD_SPACE,
LAST_PAGED_SPACE = MAP_SPACE
FIRST_SPACE = RO_SPACE,
LAST_SPACE = LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE
};
constexpr int kSpaceTagSize = 4;
STATIC_ASSERT(FIRST_SPACE == 0);
enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned };

View File

@ -1479,8 +1479,8 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
static const int kThreshold = 20;
while (gc_performed && counter++ < kThreshold) {
gc_performed = false;
for (int space = NEW_SPACE; space < SerializerDeserializer::kNumberOfSpaces;
space++) {
for (int space = FIRST_SPACE;
space < SerializerDeserializer::kNumberOfSpaces; space++) {
Reservation* reservation = &reservations[space];
DCHECK_LE(1, reservation->size());
if (reservation->at(0).size == 0) continue;
@ -5308,7 +5308,8 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size_in_kb,
}
// The old generation is paged and needs at least one page for each space.
int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
int paged_space_count =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
initial_max_old_generation_size_ = max_old_generation_size_ =
Max(static_cast<size_t>(paged_space_count * Page::kPageSize),
max_old_generation_size_);

View File

@ -78,16 +78,18 @@ class Sweeper::SweeperTask final : public CancelableTask {
void RunInternal() final {
TRACE_BACKGROUND_GC(tracer_,
GCTracer::BackgroundScope::MC_BACKGROUND_SWEEPING);
DCHECK_GE(space_to_start_, FIRST_PAGED_SPACE);
DCHECK_LE(space_to_start_, LAST_PAGED_SPACE);
const int offset = space_to_start_ - FIRST_PAGED_SPACE;
const int num_spaces = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
DCHECK_GE(space_to_start_, FIRST_GROWABLE_PAGED_SPACE);
DCHECK_LE(space_to_start_, LAST_GROWABLE_PAGED_SPACE);
const int offset = space_to_start_ - FIRST_GROWABLE_PAGED_SPACE;
const int num_spaces =
LAST_GROWABLE_PAGED_SPACE - FIRST_GROWABLE_PAGED_SPACE + 1;
for (int i = 0; i < num_spaces; i++) {
const int space_id = FIRST_PAGED_SPACE + ((i + offset) % num_spaces);
const int space_id =
FIRST_GROWABLE_PAGED_SPACE + ((i + offset) % num_spaces);
// Do not sweep code space concurrently.
if (static_cast<AllocationSpace>(space_id) == CODE_SPACE) continue;
DCHECK_GE(space_id, FIRST_PAGED_SPACE);
DCHECK_LE(space_id, LAST_PAGED_SPACE);
DCHECK_GE(space_id, FIRST_GROWABLE_PAGED_SPACE);
DCHECK_LE(space_id, LAST_GROWABLE_PAGED_SPACE);
sweeper_->SweepSpaceFromTask(static_cast<AllocationSpace>(space_id));
}
num_sweeping_tasks_->Decrement(1);

View File

@ -123,7 +123,7 @@ class Sweeper {
class IterabilityTask;
class SweeperTask;
static const int kNumberOfSweepingSpaces = LAST_PAGED_SPACE + 1;
static const int kNumberOfSweepingSpaces = LAST_GROWABLE_PAGED_SPACE + 1;
static const int kMaxSweeperTasks = 3;
template <typename Callback>
@ -159,11 +159,12 @@ class Sweeper {
void MakeIterable(Page* page);
bool IsValidIterabilitySpace(AllocationSpace space) {
return space == NEW_SPACE;
return space == NEW_SPACE || space == RO_SPACE;
}
bool IsValidSweepingSpace(AllocationSpace space) {
return space >= FIRST_PAGED_SPACE && space <= LAST_PAGED_SPACE;
return space >= FIRST_GROWABLE_PAGED_SPACE &&
space <= LAST_GROWABLE_PAGED_SPACE;
}
Heap* const heap_;

View File

@ -49,14 +49,12 @@ void BuiltinSerializerAllocator::OutputStatistics() {
PrintF(" Spaces (bytes):\n");
STATIC_ASSERT(NEW_SPACE == 0);
for (int space = 0; space < kNumberOfSpaces; space++) {
for (int space = FIRST_SPACE; space < kNumberOfSpaces; space++) {
PrintF("%16s", AllocationSpaceName(static_cast<AllocationSpace>(space)));
}
PrintF("\n");
STATIC_ASSERT(NEW_SPACE == 0);
for (int space = 0; space < kNumberOfSpaces; space++) {
for (int space = FIRST_SPACE; space < kNumberOfSpaces; space++) {
uint32_t space_size = (space == CODE_SPACE) ? virtual_chunk_size_ : 0;
PrintF("%16d", space_size);
}

View File

@ -122,9 +122,8 @@ HeapObject* DefaultDeserializerAllocator::GetObject(AllocationSpace space,
void DefaultDeserializerAllocator::DecodeReservation(
std::vector<SerializedData::Reservation> res) {
DCHECK_EQ(0, reservations_[NEW_SPACE].size());
STATIC_ASSERT(NEW_SPACE == 0);
int current_space = NEW_SPACE;
DCHECK_EQ(0, reservations_[FIRST_SPACE].size());
int current_space = FIRST_SPACE;
for (auto& r : res) {
reservations_[current_space].push_back({r.chunk_size(), NULL, NULL});
if (r.is_last()) current_space++;
@ -135,7 +134,7 @@ void DefaultDeserializerAllocator::DecodeReservation(
bool DefaultDeserializerAllocator::ReserveSpace() {
#ifdef DEBUG
for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) {
for (int i = FIRST_SPACE; i < kNumberOfSpaces; ++i) {
DCHECK_GT(reservations_[i].size(), 0);
}
#endif // DEBUG
@ -153,8 +152,6 @@ bool DefaultDeserializerAllocator::ReserveSpace() {
bool DefaultDeserializerAllocator::ReserveSpace(
StartupDeserializer* startup_deserializer,
BuiltinDeserializer* builtin_deserializer) {
const int first_space = NEW_SPACE;
const int last_space = SerializerDeserializer::kNumberOfSpaces;
Isolate* isolate = startup_deserializer->isolate();
// Create a set of merged reservations to reserve space in one go.
@ -163,7 +160,7 @@ bool DefaultDeserializerAllocator::ReserveSpace(
// Instead, we manually determine the required code-space.
Heap::Reservation merged_reservations[kNumberOfSpaces];
for (int i = first_space; i < last_space; i++) {
for (int i = FIRST_SPACE; i < kNumberOfSpaces; i++) {
merged_reservations[i] =
startup_deserializer->allocator()->reservations_[i];
}
@ -206,12 +203,12 @@ bool DefaultDeserializerAllocator::ReserveSpace(
// Write back startup reservations.
for (int i = first_space; i < last_space; i++) {
for (int i = FIRST_SPACE; i < kNumberOfSpaces; i++) {
startup_deserializer->allocator()->reservations_[i].swap(
merged_reservations[i]);
}
for (int i = first_space; i < kNumberOfPreallocatedSpaces; i++) {
for (int i = FIRST_SPACE; i < kNumberOfPreallocatedSpaces; i++) {
startup_deserializer->allocator()->high_water_[i] =
startup_deserializer->allocator()->reservations_[i][0].start;
}

View File

@ -86,8 +86,7 @@ std::vector<SerializedData::Reservation>
DefaultSerializerAllocator::EncodeReservations() const {
std::vector<SerializedData::Reservation> out;
STATIC_ASSERT(NEW_SPACE == 0);
for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) {
for (int i = FIRST_SPACE; i < kNumberOfPreallocatedSpaces; i++) {
for (size_t j = 0; j < completed_chunks_[i].size(); j++) {
out.emplace_back(completed_chunks_[i][j]);
}
@ -106,9 +105,6 @@ DefaultSerializerAllocator::EncodeReservations() const {
out.emplace_back(large_objects_total_size_);
out.back().mark_as_last();
STATIC_ASSERT(RO_SPACE == LO_SPACE + 1);
out.emplace_back(0);
out.back().mark_as_last();
return out;
}
@ -117,14 +113,12 @@ void DefaultSerializerAllocator::OutputStatistics() {
PrintF(" Spaces (bytes):\n");
STATIC_ASSERT(NEW_SPACE == 0);
for (int space = 0; space < kNumberOfSpaces; space++) {
for (int space = FIRST_SPACE; space < kNumberOfSpaces; space++) {
PrintF("%16s", AllocationSpaceName(static_cast<AllocationSpace>(space)));
}
PrintF("\n");
STATIC_ASSERT(NEW_SPACE == 0);
for (int space = 0; space < kNumberOfPreallocatedSpaces; space++) {
for (int space = FIRST_SPACE; space < kNumberOfPreallocatedSpaces; space++) {
size_t s = pending_chunk_[space];
for (uint32_t chunk_size : completed_chunks_[space]) s += chunk_size;
PrintF("%16" PRIuS, s);
@ -134,10 +128,7 @@ void DefaultSerializerAllocator::OutputStatistics() {
PrintF("%16d", num_maps_ * Map::kSize);
STATIC_ASSERT(LO_SPACE == MAP_SPACE + 1);
PrintF("%16d", large_objects_total_size_);
STATIC_ASSERT(RO_SPACE == LO_SPACE + 1);
PrintF("%16d\n", 0);
PrintF("%16d\n", large_objects_total_size_);
}
// static

View File

@ -365,8 +365,9 @@ TEST(SizeOfInitialHeap) {
// Freshly initialized VM gets by with the snapshot size (which is below
// kMaxInitialSizePerSpace per space).
Heap* heap = isolate->heap();
int page_count[LAST_PAGED_SPACE + 1] = {0, 0, 0, 0};
for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) {
int page_count[LAST_GROWABLE_PAGED_SPACE + 1] = {0, 0, 0, 0};
for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
i++) {
// Debug code can be very large, so skip CODE_SPACE if we are generating it.
if (i == CODE_SPACE && i::FLAG_debug_code) continue;
@ -378,7 +379,8 @@ TEST(SizeOfInitialHeap) {
// Executing the empty script gets by with the same number of pages, i.e.,
// requires no extra space.
CompileRun("/*empty*/");
for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) {
for (int i = FIRST_GROWABLE_PAGED_SPACE; i <= LAST_GROWABLE_PAGED_SPACE;
i++) {
// Skip CODE_SPACE, since we had to generate code even for an empty script.
if (i == CODE_SPACE) continue;
CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages());