Reland "[heap] Skip ro-space from heap iterators, add CombinedHeapIterator."

Code relocation info is now always allocated in old-space. Before relocation
info allocated for placeholders and builtins (which get replaced with
trampolines in nosnap builds) would become unreachable. Since read-only space
is not GCed and ReadOnlyHeapIterator doesn't check for reachability,
ValidateSnapshot would fail finding unreachable objects returned by
ReadOnlyHeapIterator.

Because trampoline relocation info gets replaced with canonical one, this only
affects no-embdded-builtins nosnap builds, which don't get much benefit from
read-only relocation info anyway.

A new check has been added to the read-only deserializer to verify that every
read-only object is reachable at mksnapshot-time.

The CombinedHeapIterator iteration order was changed to iterate over
read-only space first, because that's how HeapIterator worked.

This is a reland of 3d1d8eae77

Original change's description:
> [heap] Skip ro-space from heap iterators, add CombinedHeapIterator.
>
> Read-only space sharing requires an iterator independent of heap. This
> also enables future removal of read-only space from heap.
>
> Bug: v8:7464
> Change-Id: Ia07a9369494ea2c547d12c01ffa1d7b8b6bbeabc
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1552795
> Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Dan Elphick <delphick@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#60819}

Bug: v8:7464
Change-Id: I49ae070955b77956962334a84f762ab29052d5ff
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1566513
Reviewed-by: Dan Elphick <delphick@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Commit-Queue: Maciej Goszczycki <goszczycki@google.com>
Cr-Commit-Position: refs/heads/master@{#61185}
This commit is contained in:
Maciej Goszczycki 2019-05-02 16:35:51 +01:00 committed by Commit Bot
parent e09fbbd1b8
commit 9c06209306
21 changed files with 335 additions and 78 deletions

View File

@ -2221,6 +2221,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/barrier.h",
"src/heap/code-stats.cc",
"src/heap/code-stats.h",
"src/heap/combined-heap.cc",
"src/heap/combined-heap.h",
"src/heap/concurrent-marking.cc",
"src/heap/concurrent-marking.h",
"src/heap/embedder-tracing.cc",

View File

@ -8,6 +8,7 @@ include_rules = [
"+src/compiler/code-assembler.h",
"+src/compiler/wasm-compiler.h",
"-src/heap",
"+src/heap/combined-heap.h",
"+src/heap/embedder-tracing.h",
"+src/heap/factory.h",
"+src/heap/factory-inl.h",

View File

@ -8342,12 +8342,18 @@ i::Address* Isolate::GetDataFromSnapshotOnce(size_t index) {
void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
i::Heap* heap = isolate->heap();
heap_statistics->total_heap_size_ = heap->CommittedMemory();
i::ReadOnlySpace* ro_space = heap->read_only_space();
heap_statistics->total_heap_size_ =
heap->CommittedMemory() + ro_space->CommittedMemory();
heap_statistics->total_heap_size_executable_ =
heap->CommittedMemoryExecutable();
heap_statistics->total_physical_size_ = heap->CommittedPhysicalMemory();
heap_statistics->total_available_size_ = heap->Available();
heap_statistics->used_heap_size_ = heap->SizeOfObjects();
heap_statistics->total_physical_size_ =
heap->CommittedPhysicalMemory() + ro_space->CommittedPhysicalMemory();
heap_statistics->total_available_size_ =
heap->Available() + ro_space->Available();
heap_statistics->used_heap_size_ =
heap->SizeOfObjects() + ro_space->SizeOfObjects();
heap_statistics->heap_size_limit_ = heap->MaxReserved();
// TODO(7424): There is no public API for the {WasmEngine} yet. Once such an
// API becomes available we should report the malloced memory separately. For

19
src/heap/combined-heap.cc Normal file
View File

@ -0,0 +1,19 @@
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/combined-heap.h"
namespace v8 {
namespace internal {
HeapObject CombinedHeapIterator::next() {
HeapObject object = ro_heap_iterator_.next();
if (!object.is_null()) {
return object;
}
return heap_iterator_.next();
}
} // namespace internal
} // namespace v8

36
src/heap/combined-heap.h Normal file
View File

@ -0,0 +1,36 @@
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_COMBINED_HEAP_H_
#define V8_HEAP_COMBINED_HEAP_H_
#include "src/heap/heap.h"
#include "src/heap/read-only-heap.h"
#include "src/objects.h"
namespace v8 {
namespace internal {
// This class allows iteration over the entire heap (Heap and ReadOnlyHeap). It
// uses the HeapIterator to iterate over non-read-only objects and accepts the
// same filtering option. (Interrupting iteration while filtering unreachable
// objects is still forbidden)
class V8_EXPORT_PRIVATE CombinedHeapIterator final {
public:
CombinedHeapIterator(Heap* heap,
HeapIterator::HeapObjectsFiltering filtering =
HeapIterator::HeapObjectsFiltering::kNoFiltering)
: heap_iterator_(heap, filtering),
ro_heap_iterator_(heap->read_only_heap()) {}
HeapObject next();
private:
HeapIterator heap_iterator_;
ReadOnlyHeapIterator ro_heap_iterator_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_COMBINED_HEAP_H_

View File

@ -83,10 +83,8 @@ Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc,
MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(bool failing_allocation) {
const auto factory = isolate_->factory();
// Allocate objects needed for code initialization.
Handle<ByteArray> reloc_info = factory->NewByteArray(
code_desc_.reloc_size, Builtins::IsBuiltinId(builtin_index_)
? AllocationType::kReadOnly
: AllocationType::kOld);
Handle<ByteArray> reloc_info =
factory->NewByteArray(code_desc_.reloc_size, AllocationType::kOld);
Handle<CodeDataContainer> data_container = factory->NewCodeDataContainer(0);
Handle<Code> code;
{

View File

@ -228,7 +228,7 @@ size_t Heap::Capacity() {
size_t Heap::OldGenerationCapacity() {
if (!HasBeenSetUp()) return 0;
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
PagedSpaces spaces(this);
size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
@ -240,7 +240,7 @@ size_t Heap::OldGenerationCapacity() {
size_t Heap::CommittedOldGenerationMemory() {
if (!HasBeenSetUp()) return 0;
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
PagedSpaces spaces(this);
size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
@ -432,14 +432,16 @@ void Heap::PrintShortHeapStatistics() {
code_lo_space_->SizeOfObjects() / KB,
code_lo_space_->Available() / KB,
code_lo_space_->CommittedMemory() / KB);
ReadOnlySpace* const ro_space = read_only_space_;
PrintIsolate(isolate_,
"All spaces, used: %6" PRIuS
" KB"
", available: %6" PRIuS
" KB"
", committed: %6" PRIuS "KB\n",
this->SizeOfObjects() / KB, this->Available() / KB,
this->CommittedMemory() / KB);
(this->SizeOfObjects() + ro_space->SizeOfObjects()) / KB,
(this->Available() + ro_space->Available()) / KB,
(this->CommittedMemory() + ro_space->CommittedMemory()) / KB);
PrintIsolate(isolate_,
"Unmapper buffering %zu chunks of committed: %6" PRIuS " KB\n",
memory_allocator()->unmapper()->NumberOfCommittedChunks(),
@ -4309,7 +4311,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
}
size_t Heap::OldGenerationSizeOfObjects() {
PagedSpaces spaces(this, PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
PagedSpaces spaces(this);
size_t total = 0;
for (PagedSpace* space = spaces.next(); space != nullptr;
space = spaces.next()) {
@ -5220,7 +5222,7 @@ PagedSpace* PagedSpaces::next() {
}
SpaceIterator::SpaceIterator(Heap* heap)
: heap_(heap), current_space_(FIRST_SPACE - 1) {}
: heap_(heap), current_space_(FIRST_MUTABLE_SPACE - 1) {}
SpaceIterator::~SpaceIterator() = default;

View File

@ -2207,13 +2207,7 @@ class VerifySmisVisitor : public RootVisitor {
// space in turn, and null when it is done.
class V8_EXPORT_PRIVATE PagedSpaces {
public:
enum class SpacesSpecifier { kSweepablePagedSpaces, kAllPagedSpaces };
explicit PagedSpaces(Heap* heap, SpacesSpecifier specifier =
SpacesSpecifier::kSweepablePagedSpaces)
: heap_(heap),
counter_(specifier == SpacesSpecifier::kAllPagedSpaces ? RO_SPACE
: OLD_SPACE) {}
explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
PagedSpace* next();
private:
@ -2235,19 +2229,21 @@ class SpaceIterator : public Malloced {
int current_space_; // from enum AllocationSpace.
};
// A HeapIterator provides iteration over the whole heap. It
// aggregates the specific iterators for the different spaces as
// these can only iterate over one space only.
// A HeapIterator provides iteration over the entire non-read-only heap. It
// aggregates the specific iterators for the different spaces as these can only
// iterate over one space only.
//
// HeapIterator ensures there is no allocation during its lifetime
// (using an embedded DisallowHeapAllocation instance).
// HeapIterator ensures there is no allocation during its lifetime (using an
// embedded DisallowHeapAllocation instance).
//
// HeapIterator can skip free list nodes (that is, de-allocated heap
// objects that still remain in the heap). As implementation of free
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
// HeapIterator can skip free list nodes (that is, de-allocated heap objects
// that still remain in the heap). As implementation of free nodes filtering
// uses GC marks, it can't be used during MS/MC GC phases. Also, it is forbidden
// to interrupt iteration in this mode, as this will leave heap objects marked
// (and thus, unusable).
//
// See ReadOnlyHeapIterator if you need to iterate over read-only space objects,
// or CombinedHeapIterator if you need to iterate over both heaps.
class V8_EXPORT_PRIVATE HeapIterator {
public:
enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };

View File

@ -12,6 +12,7 @@
#include "src/compilation-cache.h"
#include "src/counters.h"
#include "src/globals.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h"
#include "src/heap/mark-compact.h"
#include "src/isolate.h"
@ -1079,14 +1080,10 @@ class ObjectStatsVisitor {
namespace {
void IterateHeap(Heap* heap, ObjectStatsVisitor* visitor) {
SpaceIterator space_it(heap);
HeapObject obj;
while (space_it.has_next()) {
std::unique_ptr<ObjectIterator> it(space_it.next()->GetObjectIterator());
ObjectIterator* obj_it = it.get();
for (obj = obj_it->Next(); !obj.is_null(); obj = obj_it->Next()) {
visitor->Visit(obj, obj->Size());
}
CombinedHeapIterator iterator(heap);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
visitor->Visit(obj, obj->Size());
}
}

View File

@ -9,6 +9,8 @@
#include "src/base/once.h"
#include "src/heap/heap-inl.h"
#include "src/heap/spaces.h"
#include "src/objects-inl.h"
#include "src/objects/heap-object-inl.h"
#include "src/snapshot/read-only-deserializer.h"
namespace v8 {
@ -77,5 +79,47 @@ bool ReadOnlyHeap::Contains(HeapObject object) {
return Page::FromAddress(object.ptr())->owner()->identity() == RO_SPACE;
}
ReadOnlyHeapIterator::ReadOnlyHeapIterator(ReadOnlyHeap* ro_heap)
: ReadOnlyHeapIterator(ro_heap->read_only_space()) {}
ReadOnlyHeapIterator::ReadOnlyHeapIterator(ReadOnlySpace* ro_space)
: ro_space_(ro_space),
current_page_(ro_space->first_page()),
current_addr_(current_page_->area_start()) {}
HeapObject ReadOnlyHeapIterator::next() {
if (current_page_ == nullptr) {
return HeapObject();
}
for (;;) {
DCHECK_LE(current_addr_, current_page_->area_end());
if (current_addr_ == current_page_->area_end()) {
// Progress to the next page.
current_page_ = current_page_->next_page();
if (current_page_ == nullptr) {
return HeapObject();
}
current_addr_ = current_page_->area_start();
}
if (current_addr_ == ro_space_->top() &&
current_addr_ != ro_space_->limit()) {
current_addr_ = ro_space_->limit();
continue;
}
HeapObject object = HeapObject::FromAddress(current_addr_);
const int object_size = object->Size();
current_addr_ += object_size;
if (object->IsFiller()) {
continue;
}
DCHECK_OBJECT_SIZE(object_size);
return object;
}
}
} // namespace internal
} // namespace v8

View File

@ -13,8 +13,9 @@
namespace v8 {
namespace internal {
class ReadOnlySpace;
class Page;
class ReadOnlyDeserializer;
class ReadOnlySpace;
// This class transparently manages read-only space, roots and cache creation
// and destruction.
@ -58,6 +59,20 @@ class ReadOnlyHeap final {
DISALLOW_COPY_AND_ASSIGN(ReadOnlyHeap);
};
// This class enables iterating over all read-only heap objects.
class V8_EXPORT_PRIVATE ReadOnlyHeapIterator {
public:
explicit ReadOnlyHeapIterator(ReadOnlyHeap* ro_heap);
explicit ReadOnlyHeapIterator(ReadOnlySpace* ro_space);
HeapObject next();
private:
ReadOnlySpace* const ro_space_;
Page* current_page_;
Address current_addr_;
};
} // namespace internal
} // namespace v8

View File

@ -17,6 +17,7 @@
#include "src/heap/heap-controller.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set.h"
#include "src/heap/slot-set.h"
#include "src/heap/sweeper.h"
@ -3397,14 +3398,12 @@ void ReadOnlySpace::ClearStringPaddingIfNeeded() {
if (is_string_padding_cleared_) return;
WritableScope writable_scope(this);
for (Page* page : *this) {
HeapObjectIterator iterator(page);
for (HeapObject o = iterator.Next(); !o.is_null(); o = iterator.Next()) {
if (o->IsSeqOneByteString()) {
SeqOneByteString::cast(o)->clear_padding();
} else if (o->IsSeqTwoByteString()) {
SeqTwoByteString::cast(o)->clear_padding();
}
ReadOnlyHeapIterator iterator(this);
for (HeapObject o = iterator.next(); !o.is_null(); o = iterator.next()) {
if (o->IsSeqOneByteString()) {
SeqOneByteString::cast(o)->clear_padding();
} else if (o->IsSeqTwoByteString()) {
SeqTwoByteString::cast(o)->clear_padding();
}
}
is_string_padding_cleared_ = true;

View File

@ -6,6 +6,7 @@
#include "src/api-inl.h"
#include "src/debug/debug.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap-inl.h"
#include "src/profiler/allocation-tracker.h"
#include "src/profiler/heap-snapshot-generator-inl.h"
@ -172,7 +173,7 @@ void HeapProfiler::UpdateObjectSizeEvent(Address addr, int size) {
Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
HeapObject object;
HeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
CombinedHeapIterator iterator(heap(), HeapIterator::kFilterUnreachable);
// Make sure that object with the given id is still reachable.
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
@ -182,6 +183,7 @@ Handle<HeapObject> HeapProfiler::FindHeapObjectById(SnapshotObjectId id) {
// Can't break -- kFilterUnreachable requires full heap traversal.
}
}
return !object.is_null() ? Handle<HeapObject>(object, isolate())
: Handle<HeapObject>();
}
@ -203,7 +205,7 @@ void HeapProfiler::QueryObjects(Handle<Context> context,
// We should return accurate information about live objects, so we need to
// collect all garbage first.
heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kHeapProfiler);
HeapIterator heap_iterator(heap());
CombinedHeapIterator heap_iterator(heap());
for (HeapObject heap_obj = heap_iterator.next(); !heap_obj.is_null();
heap_obj = heap_iterator.next()) {
if (!heap_obj->IsJSObject() || heap_obj->IsExternal(isolate())) continue;

View File

@ -11,6 +11,7 @@
#include "src/conversions.h"
#include "src/debug/debug.h"
#include "src/global-handles.h"
#include "src/heap/combined-heap.h"
#include "src/layout-descriptor.h"
#include "src/objects-body-descriptors.h"
#include "src/objects-inl.h"
@ -394,7 +395,7 @@ void HeapObjectsMap::UpdateHeapObjectsMap() {
}
heap_->PreciseCollectAllGarbage(Heap::kNoGCFlags,
GarbageCollectionReason::kHeapProfiler);
HeapIterator iterator(heap_);
CombinedHeapIterator iterator(heap_);
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
FindOrAddEntry(obj->address(), obj->Size());
@ -645,7 +646,7 @@ const char* V8HeapExplorer::GetSystemEntryName(HeapObject object) {
}
int V8HeapExplorer::EstimateObjectsCount() {
HeapIterator it(heap_, HeapIterator::kFilterUnreachable);
CombinedHeapIterator it(heap_, HeapIterator::kFilterUnreachable);
int objects_count = 0;
while (!it.next().is_null()) ++objects_count;
return objects_count;
@ -1456,7 +1457,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
bool interrupted = false;
HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
CombinedHeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
// Heap iteration with filtering must be finished in any case.
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next(), progress_->ProgressStep()) {

View File

@ -40,6 +40,9 @@ void ReadOnlySerializer::SerializeObject(HeapObject obj) {
// Object has not yet been serialized. Serialize it here.
ObjectSerializer object_serializer(this, obj, &sink_);
object_serializer.Serialize();
#ifdef DEBUG
serialized_objects_.insert(obj);
#endif
}
void ReadOnlySerializer::SerializeReadOnlyRoots() {
@ -60,6 +63,16 @@ void ReadOnlySerializer::FinalizeSerialization() {
FullObjectSlot(&undefined));
SerializeDeferredObjects();
Pad();
#ifdef DEBUG
// Check that every object on read-only heap is reachable (and was
// serialized).
ReadOnlyHeapIterator iterator(isolate()->heap()->read_only_heap());
for (HeapObject object = iterator.next(); !object.is_null();
object = iterator.next()) {
CHECK(serialized_objects_.count(object));
}
#endif
}
bool ReadOnlySerializer::MustBeDeferred(HeapObject object) {

View File

@ -5,6 +5,8 @@
#ifndef V8_SNAPSHOT_READ_ONLY_SERIALIZER_H_
#define V8_SNAPSHOT_READ_ONLY_SERIALIZER_H_
#include <unordered_set>
#include "src/snapshot/roots-serializer.h"
namespace v8 {
@ -35,6 +37,9 @@ class V8_EXPORT_PRIVATE ReadOnlySerializer : public RootsSerializer {
void SerializeObject(HeapObject o) override;
bool MustBeDeferred(HeapObject object) override;
#ifdef DEBUG
std::unordered_set<HeapObject, Object::Hasher> serialized_objects_;
#endif
DISALLOW_COPY_AND_ASSIGN(ReadOnlySerializer);
};

View File

@ -125,6 +125,7 @@ v8_source_set("cctest_sources") {
"heap/test-heap.cc",
"heap/test-incremental-marking.cc",
"heap/test-invalidated-slots.cc",
"heap/test-iterators.cc",
"heap/test-lab.cc",
"heap/test-mark-compact.cc",
"heap/test-page-promotion.cc",

View File

@ -121,6 +121,7 @@ void CcTest::Run() {
}
i::Heap* CcTest::heap() { return i_isolate()->heap(); }
i::ReadOnlyHeap* CcTest::read_only_heap() { return heap()->read_only_heap(); }
void CcTest::CollectGarbage(i::AllocationSpace space) {
heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);

View File

@ -133,6 +133,7 @@ class CcTest {
}
static i::Heap* heap();
static i::ReadOnlyHeap* read_only_heap();
static void CollectGarbage(i::AllocationSpace space);
static void CollectAllGarbage(i::Isolate* isolate = nullptr);

View File

@ -0,0 +1,100 @@
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "include/v8.h"
#include "src/api-inl.h"
#include "src/heap/combined-heap.h"
#include "src/heap/heap.h"
#include "src/heap/read-only-heap.h"
#include "src/isolate.h"
#include "src/objects.h"
#include "src/objects/heap-object.h"
#include "src/roots-inl.h"
#include "test/cctest/cctest.h"
namespace v8 {
namespace internal {
namespace heap {
TEST(HeapIteratorNullPastEnd) {
HeapIterator iterator(CcTest::heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
TEST(ReadOnlyHeapIteratorNullPastEnd) {
ReadOnlyHeapIterator iterator(CcTest::heap()->read_only_heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
TEST(CombinedHeapIteratorNullPastEnd) {
CombinedHeapIterator iterator(CcTest::heap());
while (!iterator.next().is_null()) {
}
for (int i = 0; i < 20; i++) {
CHECK(iterator.next().is_null());
}
}
namespace {
// An arbitrary object guaranteed to live on the non-read-only heap.
Object CreateWritableObject() {
return *v8::Utils::OpenHandle(*v8::Object::New(CcTest::isolate()));
}
} // namespace
// TODO(v8:7464): Add more CHECKs once Contains doesn't include read-only space.
TEST(ReadOnlyHeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
ReadOnlyHeapIterator iterator(CcTest::read_only_heap());
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(ReadOnlyHeap::Contains(obj));
CHECK_NE(sample_object, obj);
}
}
TEST(HeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
HeapIterator iterator(CcTest::heap());
bool seen_sample_object = false;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(!ReadOnlyHeap::Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}
CHECK(seen_sample_object);
}
TEST(CombinedHeapIterator) {
CcTest::InitializeVM();
HandleScope handle_scope(CcTest::i_isolate());
const Object sample_object = CreateWritableObject();
CombinedHeapIterator iterator(CcTest::heap());
bool seen_sample_object = false;
for (HeapObject obj = iterator.next(); !obj.is_null();
obj = iterator.next()) {
CHECK(CcTest::heap()->Contains(obj));
if (sample_object == obj) seen_sample_object = true;
}
CHECK(seen_sample_object);
}
} // namespace heap
} // namespace internal
} // namespace v8

View File

@ -9,6 +9,7 @@
#include "src/frames.h"
#include "src/heap/heap-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/spaces.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
@ -65,6 +66,37 @@ static void DumpMaps(i::PagedSpace* space) {
#undef MUTABLE_ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
static void DumpKnownObject(i::Heap* heap, const char* space_name,
i::HeapObject object) {
#define RO_ROOT_LIST_CASE(type, name, CamelName) \
if (root_name == NULL && object == roots.name()) { \
root_name = #CamelName; \
root_index = i::RootIndex::k##CamelName; \
}
#define ROOT_LIST_CASE(type, name, CamelName) \
if (root_name == NULL && object == heap->name()) { \
root_name = #CamelName; \
root_index = i::RootIndex::k##CamelName; \
}
i::ReadOnlyRoots roots(heap);
const char* root_name = nullptr;
i::RootIndex root_index = i::RootIndex::kFirstSmiRoot;
intptr_t root_ptr = object.ptr() & (i::Page::kPageSize - 1);
STRONG_READ_ONLY_ROOT_LIST(RO_ROOT_LIST_CASE)
MUTABLE_ROOT_LIST(ROOT_LIST_CASE)
if (root_name == nullptr) return;
if (!i::RootsTable::IsImmortalImmovable(root_index)) return;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", space_name, root_ptr,
root_name);
#undef ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
}
static int DumpHeapConstants(const char* argv0) {
// Start up V8.
std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
@ -78,7 +110,6 @@ static int DumpHeapConstants(const char* argv0) {
{
Isolate::Scope scope(isolate);
i::Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
i::ReadOnlyRoots roots(heap);
i::PrintF("%s", kHeader);
#define DUMP_TYPE(T) i::PrintF(" %d: \"%s\",\n", i::T, #T);
i::PrintF("INSTANCE_TYPES = {\n");
@ -95,18 +126,16 @@ static int DumpHeapConstants(const char* argv0) {
// Dump the KNOWN_OBJECTS table to the console.
i::PrintF("\n# List of known V8 objects.\n");
#define RO_ROOT_LIST_CASE(type, name, CamelName) \
if (n == NULL && o == roots.name()) { \
n = #CamelName; \
i = i::RootIndex::k##CamelName; \
}
#define ROOT_LIST_CASE(type, name, CamelName) \
if (n == NULL && o == heap->name()) { \
n = #CamelName; \
i = i::RootIndex::k##CamelName; \
}
i::PagedSpaces spit(heap, i::PagedSpaces::SpacesSpecifier::kAllPagedSpaces);
i::PrintF("KNOWN_OBJECTS = {\n");
i::ReadOnlyHeapIterator ro_iterator(heap->read_only_heap());
for (i::HeapObject object = ro_iterator.next(); !object.is_null();
object = ro_iterator.next()) {
// Skip read-only heap maps, they will be reported elsewhere.
if (object->IsMap()) continue;
DumpKnownObject(heap, i::Heap::GetSpaceName(i::RO_SPACE), object);
}
i::PagedSpaces spit(heap);
for (i::PagedSpace* s = spit.next(); s != nullptr; s = spit.next()) {
i::HeapObjectIterator it(s);
// Code objects are generally platform-dependent.
@ -114,21 +143,10 @@ static int DumpHeapConstants(const char* argv0) {
continue;
const char* sname = s->name();
for (i::HeapObject o = it.Next(); !o.is_null(); o = it.Next()) {
// Skip maps in RO_SPACE since they will be reported elsewhere.
if (o->IsMap()) continue;
const char* n = nullptr;
i::RootIndex i = i::RootIndex::kFirstSmiRoot;
intptr_t p = o.ptr() & (i::Page::kPageSize - 1);
STRONG_READ_ONLY_ROOT_LIST(RO_ROOT_LIST_CASE)
MUTABLE_ROOT_LIST(ROOT_LIST_CASE)
if (n == nullptr) continue;
if (!i::RootsTable::IsImmortalImmovable(i)) continue;
i::PrintF(" (\"%s\", 0x%05" V8PRIxPTR "): \"%s\",\n", sname, p, n);
DumpKnownObject(heap, sname, o);
}
}
i::PrintF("}\n");
#undef ROOT_LIST_CASE
#undef RO_ROOT_LIST_CASE
// Dump frame markers
i::PrintF("\n# List of known V8 Frame Markers.\n");