[heap] Remove Heap::map_space_ field and MapSpace class

Bug: v8:12578
Change-Id: I724164405cf3ba6d433655fa0fde5b9986fe04a0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3959661
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Auto-Submit: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/main@{#83786}
This commit is contained in:
Dominik Inführ 2022-10-18 09:15:33 +02:00 committed by V8 LUCI CQ
parent 6770f44ce7
commit d4c1da341a
24 changed files with 13 additions and 226 deletions

View File

@ -882,7 +882,6 @@ class JSObject;
class LocalIsolate;
class MacroAssembler;
class Map;
class MapSpace;
class MarkCompactCollector;
template <typename T>
class MaybeHandle;

View File

@ -4062,13 +4062,6 @@ void Isolate::AddCrashKeysForIsolateAndHeapPointers() {
add_crash_key_callback_(v8::CrashKeyId::kReadonlySpaceFirstPageAddress,
ToHexString(ro_space_firstpage_address));
if (heap()->map_space()) {
const uintptr_t map_space_firstpage_address =
heap()->map_space()->FirstPageAddress();
add_crash_key_callback_(v8::CrashKeyId::kMapSpaceFirstPageAddress,
ToHexString(map_space_firstpage_address));
}
if (heap()->code_range_base()) {
const uintptr_t code_range_base_address = heap()->code_range_base();
add_crash_key_callback_(v8::CrashKeyId::kCodeRangeBaseAddress,

View File

@ -35,10 +35,6 @@ class EvacuationAllocator {
heap_->old_space()->MergeCompactionSpace(compaction_spaces_.Get(OLD_SPACE));
heap_->code_space()->MergeCompactionSpace(
compaction_spaces_.Get(CODE_SPACE));
if (heap_->map_space()) {
heap_->map_space()->MergeCompactionSpace(
compaction_spaces_.Get(MAP_SPACE));
}
if (heap_->shared_space()) {
heap_->shared_space()->MergeCompactionSpace(
compaction_spaces_.Get(SHARED_SPACE));

View File

@ -94,7 +94,6 @@ void FullEvacuationVerifier::Run() {
VerifyEvacuation(heap_->old_space());
VerifyEvacuation(heap_->code_space());
if (heap_->shared_space()) VerifyEvacuation(heap_->shared_space());
if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
}
void FullEvacuationVerifier::VerifyMap(Map map) { VerifyHeapObjectImpl(map); }
@ -136,7 +135,6 @@ void YoungGenerationEvacuationVerifier::YoungGenerationEvacuationVerifier::
VerifyEvacuation(heap_->new_space());
VerifyEvacuation(heap_->old_space());
VerifyEvacuation(heap_->code_space());
if (heap_->map_space()) VerifyEvacuation(heap_->map_space());
}
void YoungGenerationEvacuationVerifier::VerifyMap(Map map) {

View File

@ -541,7 +541,6 @@ void GCTracer::NotifyFullSweepingCompleted() {
heap_->new_space()->PrintAllocationsOrigins();
heap_->old_space()->PrintAllocationsOrigins();
heap_->code_space()->PrintAllocationsOrigins();
heap_->map_space()->PrintAllocationsOrigins();
}
DCHECK(!notified_full_sweeping_completed_);
notified_full_sweeping_completed_ = true;

View File

@ -38,8 +38,6 @@ OldLargeObjectSpace* HeapAllocator::shared_lo_space() const {
return shared_lo_space_;
}
PagedSpace* HeapAllocator::space_for_maps() const { return space_for_maps_; }
NewSpace* HeapAllocator::new_space() const {
return static_cast<NewSpace*>(spaces_[NEW_SPACE]);
}
@ -106,6 +104,7 @@ V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult HeapAllocator::AllocateRaw(
allocation =
new_space()->AllocateRaw(size_in_bytes, alignment, origin);
break;
case AllocationType::kMap:
case AllocationType::kOld:
allocation =
old_space()->AllocateRaw(size_in_bytes, alignment, origin);
@ -116,20 +115,12 @@ V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult HeapAllocator::AllocateRaw(
allocation = code_space()->AllocateRaw(
size_in_bytes, AllocationAlignment::kTaggedAligned);
break;
case AllocationType::kMap:
DCHECK_EQ(alignment, AllocationAlignment::kTaggedAligned);
allocation = space_for_maps()->AllocateRaw(
size_in_bytes, AllocationAlignment::kTaggedAligned);
break;
case AllocationType::kReadOnly:
DCHECK(read_only_space()->writable());
DCHECK_EQ(AllocationOrigin::kRuntime, origin);
allocation = read_only_space()->AllocateRaw(size_in_bytes, alignment);
break;
case AllocationType::kSharedMap:
allocation = shared_map_allocator_->AllocateRaw(size_in_bytes,
alignment, origin);
break;
case AllocationType::kSharedOld:
allocation = shared_old_allocator_->AllocateRaw(size_in_bytes,
alignment, origin);

View File

@ -23,14 +23,7 @@ void HeapAllocator::Setup() {
spaces_[i] = heap_->space(i);
}
space_for_maps_ = spaces_[MAP_SPACE]
? static_cast<PagedSpace*>(spaces_[MAP_SPACE])
: static_cast<PagedSpace*>(spaces_[OLD_SPACE]);
shared_old_allocator_ = heap_->shared_space_allocator_.get();
shared_map_allocator_ = heap_->shared_map_allocator_
? heap_->shared_map_allocator_.get()
: shared_old_allocator_;
shared_lo_space_ = heap_->shared_lo_allocation_space();
}

View File

@ -79,7 +79,6 @@ class V8_EXPORT_PRIVATE HeapAllocator final {
private:
V8_INLINE PagedSpace* code_space() const;
V8_INLINE CodeLargeObjectSpace* code_lo_space() const;
V8_INLINE PagedSpace* space_for_maps() const;
V8_INLINE NewSpace* new_space() const;
V8_INLINE NewLargeObjectSpace* new_lo_space() const;
V8_INLINE OldLargeObjectSpace* lo_space() const;
@ -105,11 +104,9 @@ class V8_EXPORT_PRIVATE HeapAllocator final {
Heap* const heap_;
Space* spaces_[LAST_SPACE + 1];
PagedSpace* space_for_maps_;
ReadOnlySpace* read_only_space_;
ConcurrentAllocator* shared_old_allocator_;
ConcurrentAllocator* shared_map_allocator_;
OldLargeObjectSpace* shared_lo_space_;
#ifdef V8_ENABLE_ALLOCATION_TIMEOUT

View File

@ -115,16 +115,6 @@ int64_t Heap::update_external_memory(int64_t delta) {
return external_memory_.Update(delta);
}
PagedSpace* Heap::space_for_maps() {
return V8_LIKELY(map_space_) ? static_cast<PagedSpace*>(map_space_)
: static_cast<PagedSpace*>(old_space_);
}
ConcurrentAllocator* Heap::concurrent_allocator_for_maps() {
return V8_LIKELY(shared_map_allocator_) ? shared_map_allocator_.get()
: shared_space_allocator_.get();
}
RootsTable& Heap::roots_table() { return isolate()->roots_table(); }
#define ROOT_ACCESSOR(Type, name, CamelName) \

View File

@ -58,7 +58,6 @@ class HeapVerification final {
ReadOnlySpace* read_only_space() const { return heap_->read_only_space(); }
NewSpace* new_space() const { return heap_->new_space(); }
OldSpace* old_space() const { return heap_->old_space(); }
MapSpace* map_space() const { return heap_->map_space(); }
CodeSpace* code_space() const { return heap_->code_space(); }
LargeObjectSpace* lo_space() const { return heap_->lo_space(); }
CodeLargeObjectSpace* code_lo_space() const { return heap_->code_lo_space(); }
@ -107,9 +106,6 @@ void HeapVerification::Verify() {
if (new_space()) new_space()->Verify(isolate());
old_space()->Verify(isolate(), &visitor);
if (map_space()) {
map_space()->Verify(isolate(), &visitor);
}
VerifyPointersVisitor no_dirty_regions_visitor(heap());
code_space()->Verify(isolate(), &no_dirty_regions_visitor);

View File

@ -569,14 +569,6 @@ void Heap::PrintShortHeapStatistics() {
", committed: %6zu KB\n",
code_space_->SizeOfObjects() / KB, code_space_->Available() / KB,
code_space_->CommittedMemory() / KB);
if (map_space()) {
PrintIsolate(isolate_,
"Map space, used: %6zu KB"
", available: %6zu KB"
", committed: %6zu KB\n",
map_space_->SizeOfObjects() / KB, map_space_->Available() / KB,
map_space_->CommittedMemory() / KB);
}
PrintIsolate(isolate_,
"Large object space, used: %6zu KB"
", available: %6zu KB"
@ -1246,10 +1238,6 @@ void Heap::GarbageCollectionEpilogueInSafepoint(GarbageCollector collector) {
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
if (map_space()) {
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
}
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(lo_space)
#undef UPDATE_COUNTERS_FOR_SPACE
#undef UPDATE_FRAGMENTATION_FOR_SPACE
@ -1331,10 +1319,6 @@ void Heap::GarbageCollectionEpilogue(GarbageCollector collector) {
static_cast<int>(CommittedMemory() / KB));
isolate_->counters()->heap_sample_total_used()->AddSample(
static_cast<int>(SizeOfObjects() / KB));
if (map_space()) {
isolate_->counters()->heap_sample_map_space_committed()->AddSample(
static_cast<int>(map_space()->CommittedMemory() / KB));
}
isolate_->counters()->heap_sample_code_space_committed()->AddSample(
static_cast<int>(code_space()->CommittedMemory() / KB));
@ -3563,7 +3547,6 @@ void Heap::FreeSharedLinearAllocationAreas() {
void Heap::FreeMainThreadSharedLinearAllocationAreas() {
if (!isolate()->has_shared_heap()) return;
shared_space_allocator_->FreeLinearAllocationArea();
if (shared_map_allocator_) shared_map_allocator_->FreeLinearAllocationArea();
main_thread_local_heap()->FreeSharedLinearAllocationArea();
}
@ -4242,7 +4225,6 @@ bool Heap::Contains(HeapObject value) const {
return (new_space_ && new_space_->Contains(value)) ||
old_space_->Contains(value) || code_space_->Contains(value) ||
(map_space_ && map_space_->Contains(value)) ||
(shared_space_ && shared_space_->Contains(value)) ||
lo_space_->Contains(value) || code_lo_space_->Contains(value) ||
(new_lo_space_ && new_lo_space_->Contains(value)) ||
@ -4265,9 +4247,6 @@ bool Heap::SharedHeapContains(HeapObject value) const {
if (shared_allocation_space_) {
if (shared_allocation_space_->Contains(value)) return true;
if (shared_lo_allocation_space_->Contains(value)) return true;
if (shared_map_allocation_space_ &&
shared_map_allocation_space_->Contains(value))
return true;
}
return false;
@ -4298,8 +4277,7 @@ bool Heap::InSpace(HeapObject value, AllocationSpace space) const {
case CODE_SPACE:
return code_space_->Contains(value);
case MAP_SPACE:
DCHECK(map_space_);
return map_space_->Contains(value);
UNREACHABLE();
case SHARED_SPACE:
return shared_space_->Contains(value);
case LO_SPACE:
@ -4336,8 +4314,7 @@ bool Heap::InSpaceSlow(Address addr, AllocationSpace space) const {
case CODE_SPACE:
return code_space_->ContainsSlow(addr);
case MAP_SPACE:
DCHECK(map_space_);
return map_space_->ContainsSlow(addr);
UNREACHABLE();
case SHARED_SPACE:
return shared_space_->ContainsSlow(addr);
case LO_SPACE:
@ -4976,8 +4953,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->old_space_capacity = old_space_->Capacity();
*stats->code_space_size = code_space_->SizeOfObjects();
*stats->code_space_capacity = code_space_->Capacity();
*stats->map_space_size = map_space_ ? map_space_->SizeOfObjects() : 0;
*stats->map_space_capacity = map_space_ ? map_space_->Capacity() : 0;
*stats->map_space_size = 0;
*stats->map_space_capacity = 0;
*stats->lo_space_size = lo_space_->Size();
*stats->code_lo_space_size = code_lo_space_->Size();
isolate_->global_handles()->RecordStats(stats);
@ -5557,11 +5534,8 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
shared_space_allocator_ = std::make_unique<ConcurrentAllocator>(
main_thread_local_heap(), heap->shared_space_);
DCHECK_NULL(shared_map_allocator_.get());
shared_allocation_space_ = heap->shared_space_;
shared_lo_allocation_space_ = heap->shared_lo_space_;
DCHECK_NULL(shared_map_allocation_space_);
} else if (isolate()->shared_isolate()) {
Heap* shared_heap = isolate()->shared_isolate()->heap();
@ -5569,14 +5543,8 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
shared_space_allocator_ = std::make_unique<ConcurrentAllocator>(
main_thread_local_heap(), shared_heap->old_space());
if (shared_heap->map_space()) {
shared_map_allocator_ = std::make_unique<ConcurrentAllocator>(
main_thread_local_heap(), shared_heap->map_space());
}
shared_allocation_space_ = shared_heap->old_space();
shared_lo_allocation_space_ = shared_heap->lo_space();
shared_map_allocation_space_ = shared_heap->map_space();
}
main_thread_local_heap()->SetUpMainThread();
@ -5899,7 +5867,6 @@ void Heap::TearDown() {
pretenuring_handler_.reset();
shared_space_allocator_.reset();
shared_map_allocator_.reset();
{
CodePageHeaderModificationScope rwx_write_scope(
@ -7314,10 +7281,6 @@ void Heap::EnsureSweepingCompleted(SweepingForcedFinalizationMode mode) {
if (shared_space()) {
shared_space()->RefillFreeList();
}
if (map_space()) {
map_space()->RefillFreeList();
map_space()->SortFreeList();
}
if (v8_flags.minor_mc && new_space()) {
paged_new_space()->paged_space()->RefillFreeList();

View File

@ -835,8 +835,6 @@ class Heap {
OldSpace* old_space() const { return old_space_; }
CodeSpace* code_space() const { return code_space_; }
SharedSpace* shared_space() const { return shared_space_; }
MapSpace* map_space() const { return map_space_; }
inline PagedSpace* space_for_maps();
OldLargeObjectSpace* lo_space() const { return lo_space_; }
CodeLargeObjectSpace* code_lo_space() const { return code_lo_space_; }
SharedLargeObjectSpace* shared_lo_space() const { return shared_lo_space_; }
@ -864,8 +862,6 @@ class Heap {
return memory_allocator_.get();
}
inline ConcurrentAllocator* concurrent_allocator_for_maps();
inline Isolate* isolate() const;
// Check if we run on isolate's main thread.
@ -2161,7 +2157,6 @@ class Heap {
NewSpace* new_space_ = nullptr;
OldSpace* old_space_ = nullptr;
CodeSpace* code_space_ = nullptr;
MapSpace* map_space_ = nullptr;
SharedSpace* shared_space_ = nullptr;
OldLargeObjectSpace* lo_space_ = nullptr;
CodeLargeObjectSpace* code_lo_space_ = nullptr;
@ -2173,11 +2168,9 @@ class Heap {
// in another isolate.
PagedSpace* shared_allocation_space_ = nullptr;
OldLargeObjectSpace* shared_lo_allocation_space_ = nullptr;
PagedSpace* shared_map_allocation_space_ = nullptr;
// Allocators for the shared spaces.
std::unique_ptr<ConcurrentAllocator> shared_space_allocator_;
std::unique_ptr<ConcurrentAllocator> shared_map_allocator_;
// Map from the space id to the space.
std::unique_ptr<Space> space_[LAST_SPACE + 1];

View File

@ -402,7 +402,6 @@ void IncrementalMarking::StartBlackAllocation() {
DCHECK(IsMarking());
black_allocation_ = true;
heap()->old_space()->MarkLinearAllocationAreaBlack();
if (heap()->map_space()) heap()->map_space()->MarkLinearAllocationAreaBlack();
{
CodePageHeaderModificationScope rwx_write_scope(
"Marking Code objects requires write access to the Code page header");
@ -427,7 +426,6 @@ void IncrementalMarking::StartBlackAllocation() {
void IncrementalMarking::PauseBlackAllocation() {
DCHECK(IsMarking());
heap()->old_space()->UnmarkLinearAllocationArea();
if (heap()->map_space()) heap()->map_space()->UnmarkLinearAllocationArea();
{
CodePageHeaderModificationScope rwx_write_scope(
"Marking Code objects requires write access to the Code page header");

View File

@ -373,7 +373,7 @@ void LargeObjectSpace::Verify(Isolate* isolate) {
Map map = object.map(cage_base);
CHECK(map.IsMap(cage_base));
CHECK(ReadOnlyHeap::Contains(map) ||
isolate->heap()->space_for_maps()->Contains(map));
isolate->heap()->old_space()->Contains(map));
// We have only the following types in the large object space:
const bool is_valid_lo_space_object = //

View File

@ -232,7 +232,6 @@ class FullMarkingVerifier : public MarkingVerifier {
VerifyMarking(heap_->old_space());
VerifyMarking(heap_->code_space());
if (heap_->shared_space()) VerifyMarking(heap_->shared_space());
if (heap_->map_space()) VerifyMarking(heap_->map_space());
VerifyMarking(heap_->lo_space());
VerifyMarking(heap_->code_lo_space());
if (heap_->shared_lo_space()) VerifyMarking(heap_->shared_lo_space());
@ -539,10 +538,6 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) {
CollectEvacuationCandidates(heap()->old_space());
if (heap()->map_space()) {
CollectEvacuationCandidates(heap()->map_space());
}
if (heap()->shared_space()) {
CollectEvacuationCandidates(heap()->shared_space());
}
@ -554,10 +549,6 @@ bool MarkCompactCollector::StartCompaction(StartCompactionMode mode) {
TraceFragmentation(heap()->code_space());
}
if (v8_flags.trace_fragmentation && heap()->map_space()) {
TraceFragmentation(heap()->map_space());
}
compacting_ = !evacuation_candidates_.empty();
return compacting_;
}
@ -650,9 +641,6 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(LargeObjectSpace* space) {
void MarkCompactCollector::VerifyMarkbitsAreClean() {
VerifyMarkbitsAreClean(heap_->old_space());
VerifyMarkbitsAreClean(heap_->code_space());
if (heap_->map_space()) {
VerifyMarkbitsAreClean(heap_->map_space());
}
VerifyMarkbitsAreClean(heap_->new_space());
// Read-only space should always be black since we never collect any objects
// in it or linked from it.
@ -714,7 +702,7 @@ void MarkCompactCollector::ComputeEvacuationHeuristics(
void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE ||
space->identity() == MAP_SPACE || space->identity() == SHARED_SPACE);
space->identity() == SHARED_SPACE);
int number_of_pages = space->CountTotalPages();
size_t area_size = space->AreaSize();
@ -959,7 +947,6 @@ void MarkCompactCollector::VerifyMarking() {
#ifdef VERIFY_HEAP
if (v8_flags.verify_heap) {
heap()->old_space()->VerifyLiveBytes();
if (heap()->map_space()) heap()->map_space()->VerifyLiveBytes();
heap()->code_space()->VerifyLiveBytes();
if (heap()->shared_space()) heap()->shared_space()->VerifyLiveBytes();
if (v8_flags.minor_mc && heap()->paged_new_space())
@ -1639,7 +1626,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
if (V8_UNLIKELY(v8_flags.minor_mc)) {
base->record_visitor_->MarkArrayBufferExtensionPromoted(dst);
}
} else if (dest == MAP_SPACE || dest == SHARED_SPACE) {
} else if (dest == SHARED_SPACE) {
DCHECK_OBJECT_SIZE(size);
DCHECK(IsAligned(size, kTaggedSize));
base->heap_->CopyBlock(dst_addr, src_addr, size);
@ -4997,7 +4984,6 @@ class RememberedSetUpdatingItem : public UpdatingItem {
void UpdateTypedPointers() {
if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() !=
nullptr) {
CHECK_NE(chunk_->owner(), heap_->map_space());
const auto check_and_update_old_to_new_slot_fn =
[this](FullMaybeObjectSlot slot) {
return CheckAndUpdateOldToNewSlot(slot);
@ -5018,7 +5004,6 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
(chunk_->typed_slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() !=
nullptr)) {
CHECK_NE(chunk_->owner(), heap_->map_space());
RememberedSet<OLD_TO_OLD>::IterateTyped(
chunk_, [this](SlotType slot_type, Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak
@ -5175,11 +5160,6 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
heap()->shared_lo_space(),
RememberedSetUpdatingMode::ALL);
}
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(this, &updating_items,
heap()->map_space(),
RememberedSetUpdatingMode::ALL);
}
// Iterating to space may require a valid body descriptor for e.g.
// WasmStruct which races with updating a slot in Map. Since to space is
@ -5390,11 +5370,6 @@ void MarkCompactCollector::Sweep() {
heap()->tracer(), GCTracer::Scope::MC_SWEEP_CODE, ThreadKind::kMain);
StartSweepSpace(heap()->code_space());
}
if (heap()->map_space()) {
GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP,
ThreadKind::kMain);
StartSweepSpace(heap()->map_space());
}
if (heap()->shared_space()) {
GCTracer::Scope sweep_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP_MAP,
ThreadKind::kMain);
@ -5664,11 +5639,6 @@ void MinorMarkCompactCollector::UpdatePointersAfterEvacuation() {
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->code_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
if (heap()->map_space()) {
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->map_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);
}
CollectRememberedSetUpdatingItems(
this, &updating_items, heap()->lo_space(),
RememberedSetUpdatingMode::OLD_TO_NEW_ONLY);

View File

@ -210,7 +210,6 @@ void MarkingBarrier::Deactivate() {
is_compacting_ = false;
if (is_main_thread_barrier_) {
DeactivateSpace(heap_->old_space());
if (heap_->map_space()) DeactivateSpace(heap_->map_space());
DeactivateSpace(heap_->code_space());
DeactivateSpace(heap_->new_space());
if (heap_->shared_space()) {
@ -261,7 +260,6 @@ void MarkingBarrier::Activate(bool is_compacting,
is_activated_ = true;
if (is_main_thread_barrier_) {
ActivateSpace(heap_->old_space());
if (heap_->map_space()) ActivateSpace(heap_->map_space());
{
CodePageHeaderModificationScope rwx_write_scope(
"Modification of Code page header flags requires write access");

View File

@ -492,7 +492,7 @@ void NewSpace::VerifyImpl(Isolate* isolate, const Page* current_page,
Map map = object.map(cage_base);
CHECK(map.IsMap(cage_base));
CHECK(ReadOnlyHeap::Contains(map) ||
isolate->heap()->space_for_maps()->Contains(map));
isolate->heap()->old_space()->Contains(map));
// The object should not be code or a map.
CHECK(!object.IsMap(cage_base));

View File

@ -742,7 +742,7 @@ void PagedSpaceBase::Verify(Isolate* isolate, ObjectVisitor* visitor) const {
Map map = object.map(cage_base);
CHECK(map.IsMap(cage_base));
CHECK(ReadOnlyHeap::Contains(map) ||
isolate->heap()->space_for_maps()->Contains(map));
isolate->heap()->old_space()->Contains(map));
// Perform space-specific object verification.
VerifyObject(object);
@ -1038,44 +1038,5 @@ size_t PagedSpaceBase::RelinkFreeListCategories(Page* page) {
return added;
}
// -----------------------------------------------------------------------------
// MapSpace implementation
// TODO(dmercadier): use a heap instead of sorting like that.
// Using a heap will have multiple benefits:
// - for now, SortFreeList is only called after sweeping, which is somewhat
// late. Using a heap, sorting could be done online: FreeListCategories would
// be inserted in a heap (ie, in a sorted manner).
// - SortFreeList is a bit fragile: any change to FreeListMap (or to
// MapSpace::free_list_) could break it.
void MapSpace::SortFreeList() {
using LiveBytesPagePair = std::pair<size_t, Page*>;
std::vector<LiveBytesPagePair> pages;
pages.reserve(CountTotalPages());
for (Page* p : *this) {
free_list()->RemoveCategory(p->free_list_category(kFirstCategory));
pages.push_back(std::make_pair(p->allocated_bytes(), p));
}
// Sorting by least-allocated-bytes first.
std::sort(pages.begin(), pages.end(),
[](const LiveBytesPagePair& a, const LiveBytesPagePair& b) {
return a.first < b.first;
});
for (LiveBytesPagePair const& p : pages) {
// Since AddCategory inserts in head position, it reverts the order produced
// by the sort above: least-allocated-bytes will be Added first, and will
// therefore be the last element (and the first one will be
// most-allocated-bytes).
free_list()->AddCategory(p.second->free_list_category(kFirstCategory));
}
}
#ifdef VERIFY_HEAP
void MapSpace::VerifyObject(HeapObject object) const { CHECK(object.IsMap()); }
#endif
} // namespace internal
} // namespace v8

View File

@ -492,36 +492,6 @@ class CodeSpace final : public PagedSpace {
LinearAllocationArea paged_allocation_info_;
};
// -----------------------------------------------------------------------------
// Old space for all map objects
class MapSpace final : public PagedSpace {
public:
// Creates a map space object.
explicit MapSpace(Heap* heap)
: PagedSpace(heap, MAP_SPACE, NOT_EXECUTABLE, FreeList::CreateFreeList(),
paged_allocation_info_) {}
int RoundSizeDownToObjectAlignment(int size) const override {
if (V8_COMPRESS_POINTERS_8GB_BOOL) {
return RoundDown(size, kObjectAlignment8GbHeap);
} else if (base::bits::IsPowerOfTwo(Map::kSize)) {
return RoundDown(size, Map::kSize);
} else {
return (size / Map::kSize) * Map::kSize;
}
}
void SortFreeList();
#ifdef VERIFY_HEAP
void VerifyObject(HeapObject obj) const override;
#endif
private:
LinearAllocationArea paged_allocation_info_;
};
// -----------------------------------------------------------------------------
// Shared space regular object space.
@ -560,7 +530,6 @@ class OldGenerationMemoryChunkIterator {
private:
enum State {
kOldSpaceState,
kMapState,
kCodeState,
kLargeObjectState,
kCodeLargeObjectState,
@ -570,8 +539,6 @@ class OldGenerationMemoryChunkIterator {
State state_;
PageIterator old_iterator_;
PageIterator code_iterator_;
PageIterator map_iterator_;
const PageIterator map_iterator_end_;
LargePageIterator lo_iterator_;
LargePageIterator code_lo_iterator_;
};

View File

@ -103,10 +103,6 @@ OldGenerationMemoryChunkIterator::OldGenerationMemoryChunkIterator(Heap* heap)
state_(kOldSpaceState),
old_iterator_(heap->old_space()->begin()),
code_iterator_(heap->code_space()->begin()),
map_iterator_(heap->map_space() ? heap->map_space()->begin()
: PageRange::iterator(nullptr)),
map_iterator_end_(heap->map_space() ? heap->map_space()->end()
: PageRange::iterator(nullptr)),
lo_iterator_(heap->lo_space()->begin()),
code_lo_iterator_(heap->code_lo_space()->begin()) {}
@ -114,11 +110,6 @@ MemoryChunk* OldGenerationMemoryChunkIterator::next() {
switch (state_) {
case kOldSpaceState: {
if (old_iterator_ != heap_->old_space()->end()) return *(old_iterator_++);
state_ = kMapState;
V8_FALLTHROUGH;
}
case kMapState: {
if (map_iterator_ != map_iterator_end_) return *(map_iterator_++);
state_ = kCodeState;
V8_FALLTHROUGH;
}

View File

@ -78,7 +78,7 @@ Handle<Object> HeapTester::TestAllocateAfterFailures() {
heap->CreateFillerObjectAt(obj.address(), size);
// Map space.
heap::SimulateFullSpace(heap->space_for_maps());
heap::SimulateFullSpace(heap->old_space());
obj = heap->AllocateRaw(Map::kSize, AllocationType::kMap).ToObjectChecked();
heap->CreateFillerObjectAt(obj.address(), Map::kSize);

View File

@ -103,7 +103,6 @@ TEST(PagedSpaceIterator) {
PagedSpaceIterator iterator(heap);
CHECK_EQ(iterator.Next(), reinterpret_cast<PagedSpace*>(heap->old_space()));
CHECK_EQ(iterator.Next(), reinterpret_cast<PagedSpace*>(heap->code_space()));
CHECK_EQ(iterator.Next(), reinterpret_cast<PagedSpace*>(heap->map_space()));
for (int i = 0; i < 20; i++) {
CHECK_NULL(iterator.Next());
}

View File

@ -149,12 +149,11 @@ static int DumpHeapConstants(FILE* out, const char* argv0) {
object);
}
i::PagedSpace* space_for_maps = heap->space_for_maps();
i::PagedSpaceObjectIterator iterator(heap, space_for_maps);
i::PagedSpaceObjectIterator iterator(heap, heap->old_space());
for (i::HeapObject object = iterator.Next(); !object.is_null();
object = iterator.Next()) {
if (!object.IsMap()) continue;
DumpKnownMap(out, heap, space_for_maps->name(), object);
DumpKnownMap(out, heap, heap->old_space()->name(), object);
}
i::PrintF(out, "}\n");
}

View File

@ -152,10 +152,6 @@ TEST_F(IsolateTest, SetAddCrashKeyCallback) {
EXPECT_EQ(crash_keys.count(v8::CrashKeyId::kSnapshotChecksumCalculated), 1u);
EXPECT_EQ(crash_keys.count(v8::CrashKeyId::kSnapshotChecksumExpected), 1u);
if (heap->map_space()) {
++expected_keys_count;
EXPECT_EQ(crash_keys.count(v8::CrashKeyId::kMapSpaceFirstPageAddress), 1u);
}
if (heap->code_range_base()) {
++expected_keys_count;
EXPECT_EQ(crash_keys.count(v8::CrashKeyId::kCodeRangeBaseAddress), 1u);