Revert recent de/serializer related changes

They are suspected to be causing Canary crashes, confirmed through
local reverts and repro attempts.

This reverts:
- "Reland "[serializer] Change deferring to use forward refs""
  commit 76d684cc82.
- "Reland "[serializer] Remove new space""
  commit 81231c23a9.
- "[serializer] Clean-up and de-macro ReadDataCase"
  commit c06d24b915.
- "[serializer] DCHECK deserializer allocations are initialized"
  commit fbc1f32d8e.

Bug: chromium:1128872
Change-Id: Id2bb3b8fac526fdf9ffb033222ae08cd423f8238
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2414220
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Adam Klein <adamk@chromium.org>
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69955}
This commit is contained in:
Jakob Kummerow 2020-09-16 18:39:16 +02:00 committed by Commit Bot
parent 2000aea58a
commit 1aa9ab7384
19 changed files with 649 additions and 595 deletions

View File

@ -746,20 +746,20 @@ using WeakSlotCallbackWithHeap = bool (*)(Heap* heap, FullObjectSlot pointer);
// NOTE: SpaceIterator depends on AllocationSpace enumeration values being
// consecutive.
enum AllocationSpace {
RO_SPACE, // Immortal, immovable and immutable objects,
OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable.
LO_SPACE, // Old generation large object space.
RO_SPACE, // Immortal, immovable and immutable objects,
NEW_SPACE, // Young generation semispaces for regular objects collected with
// Scavenger.
OLD_SPACE, // Old generation regular object space.
CODE_SPACE, // Old generation code object space, marked executable.
MAP_SPACE, // Old generation map object space, non-movable.
LO_SPACE, // Old generation large object space.
CODE_LO_SPACE, // Old generation large code object space.
NEW_LO_SPACE, // Young generation large object space.
NEW_SPACE, // Young generation semispaces for regular objects collected with
// Scavenger.
FIRST_SPACE = RO_SPACE,
LAST_SPACE = NEW_SPACE,
FIRST_MUTABLE_SPACE = OLD_SPACE,
LAST_MUTABLE_SPACE = NEW_SPACE,
LAST_SPACE = NEW_LO_SPACE,
FIRST_MUTABLE_SPACE = NEW_SPACE,
LAST_MUTABLE_SPACE = NEW_LO_SPACE,
FIRST_GROWABLE_PAGED_SPACE = OLD_SPACE,
LAST_GROWABLE_PAGED_SPACE = MAP_SPACE
};

View File

@ -2794,7 +2794,7 @@ Handle<JSGlobalProxy> Factory::NewUninitializedJSGlobalProxy(int size) {
map->set_may_have_interesting_symbols(true);
LOG(isolate(), MapDetails(*map));
Handle<JSGlobalProxy> proxy = Handle<JSGlobalProxy>::cast(
NewJSObjectFromMap(map, AllocationType::kOld));
NewJSObjectFromMap(map, AllocationType::kYoung));
// Create identity hash early in case there is any JS collection containing
// a global proxy key and needs to be rehashed after deserialization.
proxy->GetOrCreateIdentityHash(isolate());

View File

@ -1877,8 +1877,6 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
for (int space = FIRST_SPACE;
space < static_cast<int>(SnapshotSpace::kNumberOfHeapSpaces);
space++) {
DCHECK_NE(space, NEW_SPACE);
DCHECK_NE(space, NEW_LO_SPACE);
Reservation* reservation = &reservations[space];
DCHECK_LE(1, reservation->size());
if (reservation->at(0).size == 0) {
@ -1939,7 +1937,10 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
allocation =
AllocateRaw(size, type, AllocationOrigin::kRuntime, align);
#else
if (space == RO_SPACE) {
if (space == NEW_SPACE) {
allocation = new_space()->AllocateRaw(
size, AllocationAlignment::kWordAligned);
} else if (space == RO_SPACE) {
allocation = read_only_space()->AllocateRaw(
size, AllocationAlignment::kWordAligned);
} else {
@ -1971,11 +1972,16 @@ bool Heap::ReserveSpace(Reservation* reservations, std::vector<Address>* maps) {
V8::FatalProcessOutOfMemory(
isolate(), "insufficient memory to create an Isolate");
}
if (counter > 1) {
CollectAllGarbage(kReduceMemoryFootprintMask,
GarbageCollectionReason::kDeserializer);
if (space == NEW_SPACE) {
CollectGarbage(NEW_SPACE, GarbageCollectionReason::kDeserializer);
} else {
CollectAllGarbage(kNoGCFlags, GarbageCollectionReason::kDeserializer);
if (counter > 1) {
CollectAllGarbage(kReduceMemoryFootprintMask,
GarbageCollectionReason::kDeserializer);
} else {
CollectAllGarbage(kNoGCFlags,
GarbageCollectionReason::kDeserializer);
}
}
gc_performed = true;
break; // Abort for-loop over spaces and retry.
@ -5881,9 +5887,9 @@ void Heap::ClearRecordedSlotRange(Address start, Address end) {
}
PagedSpace* PagedSpaceIterator::Next() {
int space = counter_++;
switch (space) {
switch (counter_++) {
case RO_SPACE:
case NEW_SPACE:
UNREACHABLE();
case OLD_SPACE:
return heap_->old_space();
@ -5892,7 +5898,6 @@ PagedSpace* PagedSpaceIterator::Next() {
case MAP_SPACE:
return heap_->map_space();
default:
DCHECK_GT(space, LAST_GROWABLE_PAGED_SPACE);
return nullptr;
}
}

View File

@ -2504,8 +2504,7 @@ class VerifySmisVisitor : public RootVisitor {
// is done.
class V8_EXPORT_PRIVATE PagedSpaceIterator {
public:
explicit PagedSpaceIterator(Heap* heap)
: heap_(heap), counter_(FIRST_GROWABLE_PAGED_SPACE) {}
explicit PagedSpaceIterator(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
PagedSpace* Next();
private:

View File

@ -55,19 +55,6 @@ HeapObjectReference HeapObjectReference::Weak(Object object) {
return HeapObjectReference(object.ptr() | kWeakHeapObjectMask);
}
// static
HeapObjectReference HeapObjectReference::From(Object object,
HeapObjectReferenceType type) {
DCHECK(!object.IsSmi());
DCHECK(!HasWeakHeapObjectTag(object));
switch (type) {
case HeapObjectReferenceType::STRONG:
return HeapObjectReference::Strong(object);
case HeapObjectReferenceType::WEAK:
return HeapObjectReference::Weak(object);
}
}
// static
HeapObjectReference HeapObjectReference::ClearedValue(const Isolate* isolate) {
// Construct cleared weak ref value.

View File

@ -47,9 +47,6 @@ class HeapObjectReference : public MaybeObject {
V8_INLINE static HeapObjectReference Weak(Object object);
V8_INLINE static HeapObjectReference From(Object object,
HeapObjectReferenceType type);
V8_INLINE static HeapObjectReference ClearedValue(const Isolate* isolate);
template <typename THeapObjectSlot>

View File

@ -64,22 +64,6 @@ Address DeserializerAllocator::AllocateRaw(SnapshotSpace space, int size) {
}
Address DeserializerAllocator::Allocate(SnapshotSpace space, int size) {
#ifdef DEBUG
if (previous_allocation_start_ != kNullAddress) {
// Make sure that the previous allocation is initialized sufficiently to
// be iterated over by the GC.
Address object_address = previous_allocation_start_;
Address previous_allocation_end =
previous_allocation_start_ + previous_allocation_size_;
while (object_address != previous_allocation_end) {
int object_size = HeapObject::FromAddress(object_address).Size();
DCHECK_GT(object_size, 0);
DCHECK_LE(object_address + object_size, previous_allocation_end);
object_address += object_size;
}
}
#endif
Address address;
HeapObject obj;
// TODO(steveblackburn) Note that the third party heap allocates objects
@ -96,9 +80,9 @@ Address DeserializerAllocator::Allocate(SnapshotSpace space, int size) {
// abstracting away the details of the memory allocator from this code.
// At each allocation, the regular allocator performs allocation,
// and a fixed-sized table is used to track and fix all back references.
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
address = AllocateRaw(space, size);
} else if (next_alignment_ != kWordAligned) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return AllocateRaw(space, size);
if (next_alignment_ != kWordAligned) {
const int reserved = size + Heap::GetMaximumFillToAlign(next_alignment_);
address = AllocateRaw(space, reserved);
obj = HeapObject::FromAddress(address);
@ -111,16 +95,10 @@ Address DeserializerAllocator::Allocate(SnapshotSpace space, int size) {
obj = Heap::AlignWithFiller(roots_, obj, size, reserved, next_alignment_);
address = obj.address();
next_alignment_ = kWordAligned;
return address;
} else {
address = AllocateRaw(space, size);
return AllocateRaw(space, size);
}
#ifdef DEBUG
previous_allocation_start_ = address;
previous_allocation_size_ = size;
#endif
return address;
}
void DeserializerAllocator::MoveToNextChunk(SnapshotSpace space) {

View File

@ -37,6 +37,20 @@ class DeserializerAllocator final {
next_alignment_ = static_cast<AllocationAlignment>(alignment);
}
void set_next_reference_is_weak(bool next_reference_is_weak) {
next_reference_is_weak_ = next_reference_is_weak;
}
bool GetAndClearNextReferenceIsWeak() {
bool saved = next_reference_is_weak_;
next_reference_is_weak_ = false;
return saved;
}
#ifdef DEBUG
bool next_reference_is_weak() const { return next_reference_is_weak_; }
#endif
HeapObject GetMap(uint32_t index);
HeapObject GetLargeObject(uint32_t index);
HeapObject GetObject(SnapshotSpace space, uint32_t chunk_index,
@ -73,14 +87,9 @@ class DeserializerAllocator final {
uint32_t current_chunk_[kNumberOfPreallocatedSpaces];
Address high_water_[kNumberOfPreallocatedSpaces];
#ifdef DEBUG
// Record the previous object allocated for DCHECKs.
Address previous_allocation_start_ = kNullAddress;
int previous_allocation_size_ = 0;
#endif
// The alignment of the next allocation.
AllocationAlignment next_alignment_ = kWordAligned;
bool next_reference_is_weak_ = false;
// All required maps are pre-allocated during reservation. {next_map_index_}
// stores the index of the next map to return from allocation.

View File

@ -7,7 +7,6 @@
#include "src/base/logging.h"
#include "src/codegen/assembler-inl.h"
#include "src/common/external-pointer.h"
#include "src/common/globals.h"
#include "src/execution/isolate.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap-write-barrier-inl.h"
@ -36,14 +35,14 @@ namespace internal {
template <typename TSlot>
TSlot Deserializer::Write(TSlot dest, MaybeObject value) {
DCHECK(!next_reference_is_weak_);
DCHECK(!allocator()->next_reference_is_weak());
dest.store(value);
return dest + 1;
}
template <typename TSlot>
TSlot Deserializer::WriteAddress(TSlot dest, Address value) {
DCHECK(!next_reference_is_weak_);
DCHECK(!allocator()->next_reference_is_weak());
memcpy(dest.ToVoidPtr(), &value, kSystemPointerSize);
STATIC_ASSERT(IsAligned(kSystemPointerSize, TSlot::kSlotDataSize));
return dest + (kSystemPointerSize / TSlot::kSlotDataSize);
@ -52,7 +51,7 @@ TSlot Deserializer::WriteAddress(TSlot dest, Address value) {
template <typename TSlot>
TSlot Deserializer::WriteExternalPointer(TSlot dest, Address value) {
value = EncodeExternalPointer(isolate(), value);
DCHECK(!next_reference_is_weak_);
DCHECK(!allocator()->next_reference_is_weak());
memcpy(dest.ToVoidPtr(), &value, kExternalPointerSize);
STATIC_ASSERT(IsAligned(kExternalPointerSize, TSlot::kSlotDataSize));
return dest + (kExternalPointerSize / TSlot::kSlotDataSize);
@ -103,7 +102,10 @@ Deserializer::~Deserializer() {
// process. It is also called on the body of each function.
void Deserializer::VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) {
ReadData(FullMaybeObjectSlot(start), FullMaybeObjectSlot(end), kNullAddress);
// We are reading to a location outside of JS heap, so pass kNew to avoid
// triggering write barriers.
ReadData(FullMaybeObjectSlot(start), FullMaybeObjectSlot(end),
SnapshotSpace::kNew, kNullAddress);
}
void Deserializer::Synchronize(VisitorSynchronization::SyncTag tag) {
@ -115,8 +117,36 @@ void Deserializer::DeserializeDeferredObjects() {
DisallowHeapAllocation no_gc;
for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
SnapshotSpace space = NewObject::Decode(code);
ReadObject(space);
switch (code) {
case kAlignmentPrefix:
case kAlignmentPrefix + 1:
case kAlignmentPrefix + 2: {
int alignment = code - (SerializerDeserializer::kAlignmentPrefix - 1);
allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
break;
}
default: {
SnapshotSpace space = NewObject::Decode(code);
HeapObject object = GetBackReferencedObject(space);
int size = source_.GetInt() << kTaggedSizeLog2;
Address obj_address = object.address();
// Object's map is already initialized, now read the rest.
MaybeObjectSlot start(obj_address + kTaggedSize);
MaybeObjectSlot end(obj_address + size);
bool filled = ReadData(start, end, space, obj_address);
CHECK(filled);
DCHECK(CanBeDeferred(object));
PostProcessNewObject(object, space);
}
}
}
// When the deserialization of maps are deferred, they will be created
// as filler maps, and we postpone the post processing until the maps
// are also deserialized.
for (const auto& pair : fillers_to_post_process_) {
DCHECK(!pair.first.IsFiller());
PostProcessNewObject(pair.first, pair.second);
}
}
@ -162,7 +192,11 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
DisallowHeapAllocation no_gc;
if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
if (obj.IsString()) {
if (obj.IsFiller()) {
DCHECK_EQ(fillers_to_post_process_.find(obj),
fillers_to_post_process_.end());
fillers_to_post_process_.insert({obj, space});
} else if (obj.IsString()) {
// Uninitialize hash field as we need to recompute the hash.
String string = String::cast(obj);
string.set_hash_field(String::kEmptyHashField);
@ -294,14 +328,6 @@ HeapObject Deserializer::PostProcessNewObject(HeapObject obj,
return obj;
}
HeapObjectReferenceType Deserializer::GetAndResetNextReferenceType() {
HeapObjectReferenceType type = next_reference_is_weak_
? HeapObjectReferenceType::WEAK
: HeapObjectReferenceType::STRONG;
next_reference_is_weak_ = false;
return type;
}
HeapObject Deserializer::GetBackReferencedObject(SnapshotSpace space) {
HeapObject obj;
switch (space) {
@ -343,8 +369,12 @@ HeapObject Deserializer::GetBackReferencedObject(SnapshotSpace space) {
HeapObject Deserializer::ReadObject() {
MaybeObject object;
ReadData(FullMaybeObjectSlot(&object), FullMaybeObjectSlot(&object + 1),
kNullAddress);
// We are reading to a location outside of JS heap, so pass kNew to avoid
// triggering write barriers.
bool filled =
ReadData(FullMaybeObjectSlot(&object), FullMaybeObjectSlot(&object + 1),
SnapshotSpace::kNew, kNullAddress);
CHECK(filled);
return object.GetHeapObjectAssumeStrong();
}
@ -384,8 +414,10 @@ HeapObject Deserializer::ReadObject(SnapshotSpace space) {
MaybeObjectSlot limit(address + size);
current.store(MaybeObject::FromObject(map));
ReadData(current + 1, limit, address);
obj = PostProcessNewObject(obj, space);
if (ReadData(current + 1, limit, space, address)) {
// Only post process if object content has not been deferred.
obj = PostProcessNewObject(obj, space);
}
#ifdef DEBUG
if (obj.IsCode()) {
@ -414,18 +446,21 @@ HeapObject Deserializer::ReadMetaMap() {
current.store(MaybeObject(current.address() + kHeapObjectTag));
// Set the instance-type manually, to allow backrefs to read it.
Map::unchecked_cast(obj).set_instance_type(MAP_TYPE);
ReadData(current + 1, limit, address);
// The meta map's contents cannot be deferred.
CHECK(ReadData(current + 1, limit, space, address));
return obj;
}
void Deserializer::ReadCodeObjectBody(Address code_object_address) {
void Deserializer::ReadCodeObjectBody(SnapshotSpace space,
Address code_object_address) {
// At this point the code object is already allocated, its map field is
// initialized and its raw data fields and code stream are also read.
// Now we read the rest of code header's fields.
MaybeObjectSlot current(code_object_address + HeapObject::kHeaderSize);
MaybeObjectSlot limit(code_object_address + Code::kDataStart);
ReadData(current, limit, code_object_address);
bool filled = ReadData(current, limit, space, code_object_address);
CHECK(filled);
// Now iterate RelocInfos the same way it was done by the serialzier and
// deserialize respective data into RelocInfos.
@ -537,6 +572,69 @@ constexpr byte VerifyBytecodeCount(byte bytecode) {
} // namespace
template <typename TSlot>
bool Deserializer::ReadData(TSlot current, TSlot limit,
SnapshotSpace source_space,
Address current_object_address) {
// Write barrier support costs around 1% in startup time. In fact there
// are no new space objects in current boot snapshots, so it's not needed,
// but that may change.
bool write_barrier_needed =
(current_object_address != kNullAddress &&
source_space != SnapshotSpace::kNew &&
source_space != SnapshotSpace::kCode && !FLAG_disable_write_barriers);
while (current < limit) {
byte data = source_.Get();
switch (data) {
#define READ_DATA_CASE_BODY(bytecode) \
current = ReadDataCase<TSlot, bytecode>(current, current_object_address, \
data, write_barrier_needed); \
break;
// This generates a case and a body for the new space (which has to do extra
// write barrier handling) and handles the other spaces with fall-through cases
// and one body.
#define ALL_SPACES(bytecode) \
case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kNew): \
case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kOld): \
case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kCode): \
case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kMap): \
case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kLargeObject): \
case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kReadOnlyHeap): \
READ_DATA_CASE_BODY(bytecode)
// Deserialize a new object and write a pointer to it to the current
// object.
ALL_SPACES(kNewObject)
// Find a recently deserialized object using its offset from the current
// allocation point and write a pointer to it to the current object.
ALL_SPACES(kBackref)
#undef ALL_SPACES
// Find an object in the roots array and write a pointer to it to the
// current object.
case kRootArray:
READ_DATA_CASE_BODY(kRootArray)
// Find an object in the startup object cache and write a pointer to it to
// the current object.
case kStartupObjectCache:
READ_DATA_CASE_BODY(kStartupObjectCache)
// Find an object in the read-only object cache and write a pointer to it
// to the current object.
case kReadOnlyObjectCache:
READ_DATA_CASE_BODY(kReadOnlyObjectCache)
// Find an object in the attached references and write a pointer to it to
// the current object.
case kAttachedReference:
READ_DATA_CASE_BODY(kAttachedReference)
// Deserialize a new meta-map and write a pointer to it to the current
// object.
case kNewMetaMap:
READ_DATA_CASE_BODY(kNewMetaMap)
#undef READ_DATA_CASE_BODY
// Helper macro (and its implementation detail) for specifying a range of cases.
// Use as "case CASE_RANGE(byte_code, num_bytecodes):"
#define CASE_RANGE(byte_code, num_bytecodes) \
@ -550,304 +648,228 @@ constexpr byte VerifyBytecodeCount(byte bytecode) {
#define CASE_R16(byte_code) CASE_R8(byte_code) : case CASE_R8(byte_code + 8)
#define CASE_R32(byte_code) CASE_R16(byte_code) : case CASE_R16(byte_code + 16)
// This generates a case range for all the spaces.
#define CASE_RANGE_ALL_SPACES(bytecode) \
SpaceEncoder<bytecode>::Encode(SnapshotSpace::kOld) \
: case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kCode) \
: case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kMap) \
: case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kLargeObject) \
: case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kReadOnlyHeap)
template <typename TSlot>
void Deserializer::ReadData(TSlot current, TSlot limit,
Address current_object_address) {
while (current < limit) {
byte data = source_.Get();
current = ReadSingleBytecodeData(data, current, current_object_address);
}
CHECK_EQ(limit, current);
}
template <typename TSlot>
TSlot Deserializer::ReadSingleBytecodeData(byte data, TSlot current,
Address current_object_address) {
switch (data) {
// Deserialize a new object and write a pointer to it to the current
// object.
case CASE_RANGE_ALL_SPACES(kNewObject): {
SnapshotSpace space = NewObject::Decode(data);
// Save the reference type before recursing down into reading the object.
HeapObjectReferenceType ref_type = GetAndResetNextReferenceType();
HeapObject heap_object = ReadObject(space);
DCHECK(!Heap::InYoungGeneration(heap_object));
return Write(current, HeapObjectReference::From(heap_object, ref_type));
}
// Find a recently deserialized object using its offset from the current
// allocation point and write a pointer to it to the current object.
case CASE_RANGE_ALL_SPACES(kBackref): {
SnapshotSpace space = BackRef::Decode(data);
HeapObject heap_object = GetBackReferencedObject(space);
DCHECK(!Heap::InYoungGeneration(heap_object));
return Write(current, HeapObjectReference::From(
heap_object, GetAndResetNextReferenceType()));
}
// Find an object in the roots array and write a pointer to it to the
// current object.
case kRootArray: {
int id = source_.GetInt();
RootIndex root_index = static_cast<RootIndex>(id);
HeapObject heap_object = HeapObject::cast(isolate()->root(root_index));
DCHECK(!Heap::InYoungGeneration(heap_object));
hot_objects_.Add(heap_object);
return Write(current, HeapObjectReference::From(
heap_object, GetAndResetNextReferenceType()));
}
// Find an object in the startup object cache and write a pointer to it to
// the current object.
case kStartupObjectCache: {
int cache_index = source_.GetInt();
HeapObject heap_object =
HeapObject::cast(isolate()->startup_object_cache()->at(cache_index));
DCHECK(!Heap::InYoungGeneration(heap_object));
return Write(current, HeapObjectReference::From(
heap_object, GetAndResetNextReferenceType()));
}
// Find an object in the read-only object cache and write a pointer to it
// to the current object.
case kReadOnlyObjectCache: {
int cache_index = source_.GetInt();
HeapObject heap_object = HeapObject::cast(
isolate()->read_only_heap()->cached_read_only_object(cache_index));
DCHECK(!Heap::InYoungGeneration(heap_object));
return Write(current, HeapObjectReference::From(
heap_object, GetAndResetNextReferenceType()));
}
// Deserialize a new meta-map and write a pointer to it to the current
// object.
case kNewMetaMap: {
HeapObject heap_object = ReadMetaMap();
DCHECK(!Heap::InYoungGeneration(heap_object));
return Write(current, HeapObjectReference::Strong(heap_object));
}
// Find an external reference and write a pointer to it to the current
// object.
case kSandboxedExternalReference:
case kExternalReference: {
Address address = ReadExternalReferenceCase();
if (V8_HEAP_SANDBOX_BOOL && data == kSandboxedExternalReference) {
return WriteExternalPointer(current, address);
} else {
DCHECK(!V8_HEAP_SANDBOX_BOOL);
return WriteAddress(current, address);
// Find an external reference and write a pointer to it to the current
// object.
case kSandboxedExternalReference:
case kExternalReference: {
Address address = ReadExternalReferenceCase();
if (V8_HEAP_SANDBOX_BOOL && data == kSandboxedExternalReference) {
current = WriteExternalPointer(current, address);
} else {
DCHECK(!V8_HEAP_SANDBOX_BOOL);
current = WriteAddress(current, address);
}
break;
}
}
case kInternalReference:
case kOffHeapTarget:
// These bytecodes are expected only during RelocInfo iteration.
UNREACHABLE();
// Find an object in the attached references and write a pointer to it to
// the current object.
case kAttachedReference: {
int index = source_.GetInt();
HeapObjectReference ref = HeapObjectReference::From(
*attached_objects_[index], GetAndResetNextReferenceType());
// This is the only case where we might encounter new space objects, so
// maybe emit a write barrier before returning the updated slot.
TSlot ret = Write(current, ref);
if (Heap::InYoungGeneration(ref)) {
HeapObject current_object =
HeapObject::FromAddress(current_object_address);
GenerationalBarrier(current_object, MaybeObjectSlot(current.address()),
ref);
case kInternalReference:
case kOffHeapTarget: {
// These bytecodes are expected only during RelocInfo iteration.
UNREACHABLE();
break;
}
return ret;
}
case kNop:
return current;
case kNop:
break;
// NextChunk should only be seen during object allocation.
case kNextChunk:
UNREACHABLE();
case kRegisterPendingForwardRef: {
DCHECK_NE(current_object_address, kNullAddress);
HeapObject obj = HeapObject::FromAddress(current_object_address);
unresolved_forward_refs_.emplace_back(
obj, current.address() - current_object_address);
num_unresolved_forward_refs_++;
return current + 1;
}
case kResolvePendingForwardRef: {
// Pending forward refs can only be resolved after the heap object's map
// field is deserialized; currently they only appear immediately after
// the map field.
DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
HeapObject obj = HeapObject::FromAddress(current_object_address);
int index = source_.GetInt();
auto& forward_ref = unresolved_forward_refs_[index];
TaggedField<HeapObject>::store(forward_ref.first, forward_ref.second,
obj);
num_unresolved_forward_refs_--;
if (num_unresolved_forward_refs_ == 0) {
// If there's no more pending fields, clear the entire pending field
// vector.
unresolved_forward_refs_.clear();
} else {
// Otherwise, at least clear the pending field.
forward_ref.first = HeapObject();
// NextChunk should only be seen during object allocation.
case kNextChunk: {
UNREACHABLE();
break;
}
return current;
}
case kSynchronize:
// If we get here then that indicates that you have a mismatch between
// the number of GC roots when serializing and deserializing.
UNREACHABLE();
// Deserialize raw data of variable length.
case kVariableRawData: {
int size_in_bytes = source_.GetInt();
DCHECK(IsAligned(size_in_bytes, kTaggedSize));
source_.CopyRaw(current.ToVoidPtr(), size_in_bytes);
return TSlot(current.address() + size_in_bytes);
}
// Deserialize raw code directly into the body of the code object.
case kVariableRawCode: {
// VariableRawCode can only occur right after the heap object header.
DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
int size_in_bytes = source_.GetInt();
DCHECK(IsAligned(size_in_bytes, kTaggedSize));
source_.CopyRaw(
reinterpret_cast<void*>(current_object_address + Code::kDataStart),
size_in_bytes);
// Deserialize tagged fields in the code object header and reloc infos.
ReadCodeObjectBody(current_object_address);
// Set current to the code object end.
return TSlot(current.address() + Code::kDataStart -
HeapObject::kHeaderSize + size_in_bytes);
}
case kVariableRepeat: {
int repeats = VariableRepeatCount::Decode(source_.GetInt());
return ReadRepeatedObject(current, repeats);
}
case kOffHeapBackingStore: {
AlwaysAllocateScope scope(isolate()->heap());
int byte_length = source_.GetInt();
std::unique_ptr<BackingStore> backing_store =
BackingStore::Allocate(isolate(), byte_length, SharedFlag::kNotShared,
InitializedFlag::kUninitialized);
CHECK_NOT_NULL(backing_store);
source_.CopyRaw(backing_store->buffer_start(), byte_length);
backing_stores_.push_back(std::move(backing_store));
return current;
}
case kSandboxedApiReference:
case kApiReference: {
uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
Address address;
if (isolate()->api_external_references()) {
DCHECK_WITH_MSG(reference_id < num_api_references_,
"too few external references provided through the API");
address = static_cast<Address>(
isolate()->api_external_references()[reference_id]);
} else {
address = reinterpret_cast<Address>(NoExternalReferencesCallback);
case kDeferred: {
// Deferred can only occur right after the heap object's map field.
DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
HeapObject obj = HeapObject::FromAddress(current_object_address);
// If the deferred object is a map, its instance type may be used
// during deserialization. Initialize it with a temporary value.
if (obj.IsMap()) Map::cast(obj).set_instance_type(FILLER_TYPE);
current = limit;
return false;
}
if (V8_HEAP_SANDBOX_BOOL && data == kSandboxedApiReference) {
return WriteExternalPointer(current, address);
} else {
DCHECK(!V8_HEAP_SANDBOX_BOOL);
return WriteAddress(current, address);
case kRegisterPendingForwardRef: {
HeapObject obj = HeapObject::FromAddress(current_object_address);
unresolved_forward_refs_.emplace_back(
obj, current.address() - current_object_address);
num_unresolved_forward_refs_++;
current++;
break;
}
}
case kClearedWeakReference:
return Write(current, HeapObjectReference::ClearedValue(isolate()));
case kResolvePendingForwardRef: {
// Pending forward refs can only be resolved after the heap object's map
// field is deserialized; currently they only appear immediately after
// the map field.
DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
HeapObject obj = HeapObject::FromAddress(current_object_address);
int index = source_.GetInt();
auto& forward_ref = unresolved_forward_refs_[index];
TaggedField<HeapObject>::store(forward_ref.first, forward_ref.second,
obj);
num_unresolved_forward_refs_--;
if (num_unresolved_forward_refs_ == 0) {
// If there's no more pending fields, clear the entire pending field
// vector.
unresolved_forward_refs_.clear();
} else {
// Otherwise, at least clear the pending field.
forward_ref.first = HeapObject();
}
break;
}
case kWeakPrefix: {
// We shouldn't have two weak prefixes in a row.
DCHECK(!next_reference_is_weak_);
// We shouldn't have weak refs without a current object.
DCHECK_NE(current_object_address, kNullAddress);
next_reference_is_weak_ = true;
return current;
}
case kSynchronize:
// If we get here then that indicates that you have a mismatch between
// the number of GC roots when serializing and deserializing.
UNREACHABLE();
case CASE_RANGE(kAlignmentPrefix, 3): {
int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
return current;
}
// Deserialize raw data of variable length.
case kVariableRawData: {
int size_in_bytes = source_.GetInt();
DCHECK(IsAligned(size_in_bytes, kTaggedSize));
source_.CopyRaw(current.ToVoidPtr(), size_in_bytes);
current = TSlot(current.address() + size_in_bytes);
break;
}
case CASE_RANGE(kRootArrayConstants, 32): {
// First kRootArrayConstantsCount roots are guaranteed to be in
// the old space.
STATIC_ASSERT(static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) ==
0);
STATIC_ASSERT(kRootArrayConstantsCount <=
static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
// Deserialize raw code directly into the body of the code object.
case kVariableRawCode: {
// VariableRawCode can only occur right after the heap object header.
DCHECK_EQ(current.address(), current_object_address + kTaggedSize);
int size_in_bytes = source_.GetInt();
DCHECK(IsAligned(size_in_bytes, kTaggedSize));
source_.CopyRaw(
reinterpret_cast<void*>(current_object_address + Code::kDataStart),
size_in_bytes);
// Deserialize tagged fields in the code object header and reloc infos.
ReadCodeObjectBody(source_space, current_object_address);
// Set current to the code object end.
current = TSlot(current.address() + Code::kDataStart -
HeapObject::kHeaderSize + size_in_bytes);
CHECK_EQ(current, limit);
break;
}
RootIndex root_index = RootArrayConstant::Decode(data);
MaybeObject object = MaybeObject(ReadOnlyRoots(isolate()).at(root_index));
DCHECK(!Heap::InYoungGeneration(object));
return Write(current, object);
}
case kVariableRepeat: {
int repeats = VariableRepeatCount::Decode(source_.GetInt());
current = ReadRepeatedObject(current, repeats);
break;
}
case CASE_RANGE(kHotObject, 8): {
int index = HotObject::Decode(data);
HeapObject hot_object = hot_objects_.Get(index);
DCHECK(!Heap::InYoungGeneration(hot_object));
return Write(current, HeapObjectReference::From(
hot_object, GetAndResetNextReferenceType()));
}
case kOffHeapBackingStore: {
AlwaysAllocateScope scope(isolate()->heap());
int byte_length = source_.GetInt();
std::unique_ptr<BackingStore> backing_store = BackingStore::Allocate(
isolate(), byte_length, SharedFlag::kNotShared,
InitializedFlag::kUninitialized);
CHECK_NOT_NULL(backing_store);
source_.CopyRaw(backing_store->buffer_start(), byte_length);
backing_stores_.push_back(std::move(backing_store));
break;
}
case CASE_RANGE(kFixedRawData, 32): {
// Deserialize raw data of fixed length from 1 to 32 times kTaggedSize.
int size_in_tagged = FixedRawDataWithSize::Decode(data);
source_.CopyRaw(current.ToVoidPtr(), size_in_tagged * kTaggedSize);
case kSandboxedApiReference:
case kApiReference: {
uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
Address address;
if (isolate()->api_external_references()) {
DCHECK_WITH_MSG(
reference_id < num_api_references_,
"too few external references provided through the API");
address = static_cast<Address>(
isolate()->api_external_references()[reference_id]);
} else {
address = reinterpret_cast<Address>(NoExternalReferencesCallback);
}
if (V8_HEAP_SANDBOX_BOOL && data == kSandboxedApiReference) {
current = WriteExternalPointer(current, address);
} else {
DCHECK(!V8_HEAP_SANDBOX_BOOL);
current = WriteAddress(current, address);
}
break;
}
int size_in_bytes = size_in_tagged * kTaggedSize;
int size_in_slots = size_in_bytes / TSlot::kSlotDataSize;
DCHECK(IsAligned(size_in_bytes, TSlot::kSlotDataSize));
return current + size_in_slots;
}
case kClearedWeakReference:
current = Write(current, HeapObjectReference::ClearedValue(isolate()));
break;
case CASE_RANGE(kFixedRepeat, 16): {
int repeats = FixedRepeatWithCount::Decode(data);
return ReadRepeatedObject(current, repeats);
}
case kWeakPrefix:
DCHECK(!allocator()->next_reference_is_weak());
allocator()->set_next_reference_is_weak(true);
break;
case CASE_RANGE(kAlignmentPrefix, 3): {
int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
break;
}
case CASE_RANGE(kRootArrayConstants, 32): {
// First kRootArrayConstantsCount roots are guaranteed to be in
// the old space.
STATIC_ASSERT(
static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) == 0);
STATIC_ASSERT(kRootArrayConstantsCount <=
static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
RootIndex root_index = RootArrayConstant::Decode(data);
MaybeObject object =
MaybeObject(ReadOnlyRoots(isolate()).at(root_index));
DCHECK(!Heap::InYoungGeneration(object));
current = Write(current, object);
break;
}
case CASE_RANGE(kHotObject, 8): {
int index = HotObject::Decode(data);
Object hot_object = hot_objects_.Get(index);
MaybeObject hot_maybe_object = MaybeObject::FromObject(hot_object);
if (allocator()->GetAndClearNextReferenceIsWeak()) {
hot_maybe_object = MaybeObject::MakeWeak(hot_maybe_object);
}
// Don't update current pointer here as it may be needed for write
// barrier.
Write(current, hot_maybe_object);
if (write_barrier_needed && Heap::InYoungGeneration(hot_object)) {
HeapObject current_object =
HeapObject::FromAddress(current_object_address);
GenerationalBarrier(current_object,
MaybeObjectSlot(current.address()),
hot_maybe_object);
}
++current;
break;
}
case CASE_RANGE(kFixedRawData, 32): {
// Deserialize raw data of fixed length from 1 to 32 times kTaggedSize.
int size_in_tagged = FixedRawDataWithSize::Decode(data);
source_.CopyRaw(current.ToVoidPtr(), size_in_tagged * kTaggedSize);
int size_in_bytes = size_in_tagged * kTaggedSize;
int size_in_slots = size_in_bytes / TSlot::kSlotDataSize;
DCHECK(IsAligned(size_in_bytes, TSlot::kSlotDataSize));
current += size_in_slots;
break;
}
case CASE_RANGE(kFixedRepeat, 16): {
int repeats = FixedRepeatWithCount::Decode(data);
current = ReadRepeatedObject(current, repeats);
break;
}
#ifdef DEBUG
#define UNUSED_CASE(byte_code) \
case byte_code: \
UNREACHABLE();
UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
#endif
#undef UNUSED_CASE
}
// The above switch, including UNUSED_SERIALIZER_BYTE_CODES, covers all
// possible bytecodes; but, clang doesn't realize this, so we have an explicit
// UNREACHABLE here too.
UNREACHABLE();
}
#undef CASE_RANGE_ALL_SPACES
#undef CASE_RANGE
#undef CASE_R32
#undef CASE_R16
@ -856,11 +878,76 @@ TSlot Deserializer::ReadSingleBytecodeData(byte data, TSlot current,
#undef CASE_R3
#undef CASE_R2
#undef CASE_R1
}
}
CHECK_EQ(limit, current);
return true;
}
Address Deserializer::ReadExternalReferenceCase() {
uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
return isolate()->external_reference_table()->address(reference_id);
}
template <typename TSlot, SerializerDeserializer::Bytecode bytecode>
TSlot Deserializer::ReadDataCase(TSlot current, Address current_object_address,
byte data, bool write_barrier_needed) {
bool emit_write_barrier = false;
HeapObject heap_object;
HeapObjectReferenceType reference_type =
allocator()->GetAndClearNextReferenceIsWeak()
? HeapObjectReferenceType::WEAK
: HeapObjectReferenceType::STRONG;
if (bytecode == kNewObject) {
SnapshotSpace space = SpaceEncoder<bytecode>::Decode(data);
heap_object = ReadObject(space);
emit_write_barrier = (space == SnapshotSpace::kNew);
} else if (bytecode == kBackref) {
SnapshotSpace space = SpaceEncoder<bytecode>::Decode(data);
heap_object = GetBackReferencedObject(space);
emit_write_barrier = (space == SnapshotSpace::kNew);
} else if (bytecode == kNewMetaMap) {
heap_object = ReadMetaMap();
emit_write_barrier = false;
} else if (bytecode == kRootArray) {
int id = source_.GetInt();
RootIndex root_index = static_cast<RootIndex>(id);
heap_object = HeapObject::cast(isolate()->root(root_index));
emit_write_barrier = Heap::InYoungGeneration(heap_object);
hot_objects_.Add(heap_object);
} else if (bytecode == kReadOnlyObjectCache) {
int cache_index = source_.GetInt();
heap_object = HeapObject::cast(
isolate()->read_only_heap()->cached_read_only_object(cache_index));
DCHECK(!Heap::InYoungGeneration(heap_object));
emit_write_barrier = false;
} else if (bytecode == kStartupObjectCache) {
int cache_index = source_.GetInt();
heap_object =
HeapObject::cast(isolate()->startup_object_cache()->at(cache_index));
emit_write_barrier = Heap::InYoungGeneration(heap_object);
} else {
DCHECK_EQ(bytecode, kAttachedReference);
int index = source_.GetInt();
heap_object = *attached_objects_[index];
emit_write_barrier = Heap::InYoungGeneration(heap_object);
}
HeapObjectReference heap_object_ref =
reference_type == HeapObjectReferenceType::STRONG
? HeapObjectReference::Strong(heap_object)
: HeapObjectReference::Weak(heap_object);
// Don't update current pointer here as it may be needed for write barrier.
Write(current, heap_object_ref);
if (emit_write_barrier && write_barrier_needed) {
DCHECK_IMPLIES(FLAG_disable_write_barriers, !write_barrier_needed);
HeapObject host_object = HeapObject::FromAddress(current_object_address);
SLOW_DCHECK(isolate()->heap()->Contains(host_object));
GenerationalBarrier(host_object, MaybeObjectSlot(current.address()),
heap_object_ref);
}
return current + 1;
}
} // namespace internal
} // namespace v8

View File

@ -8,7 +8,6 @@
#include <utility>
#include <vector>
#include "src/common/globals.h"
#include "src/objects/allocation-site.h"
#include "src/objects/api-callbacks.h"
#include "src/objects/backing-store.h"
@ -126,38 +125,34 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
template <typename TSlot>
inline TSlot Write(TSlot dest, MaybeObject value);
template <typename TSlot>
inline TSlot Write(TSlot dest, HeapObject value,
HeapObjectReferenceType type);
template <typename TSlot>
inline TSlot WriteAddress(TSlot dest, Address value);
template <typename TSlot>
inline TSlot WriteExternalPointer(TSlot dest, Address value);
// Fills in some heap data in an area from start to end (non-inclusive). The
// object_address is the address of the object we are writing into, or nullptr
// if we are not writing into an object, i.e. if we are writing a series of
// tagged values that are not on the heap.
// Fills in some heap data in an area from start to end (non-inclusive). The
// space id is used for the write barrier. The object_address is the address
// of the object we are writing into, or nullptr if we are not writing into an
// object, i.e. if we are writing a series of tagged values that are not on
// the heap. Return false if the object content has been deferred.
template <typename TSlot>
void ReadData(TSlot start, TSlot end, Address object_address);
bool ReadData(TSlot start, TSlot end, SnapshotSpace space,
Address object_address);
// Helper for ReadData which reads the given bytecode and fills in some heap
// data into the given slot. May fill in zero or multiple slots, so it returns
// the next unfilled slot.
template <typename TSlot>
TSlot ReadSingleBytecodeData(byte data, TSlot current,
Address object_address);
// A helper function for ReadData, templatized on the bytecode for efficiency.
// Returns the new value of {current}.
template <typename TSlot, Bytecode bytecode>
inline TSlot ReadDataCase(TSlot current, Address current_object_address,
byte data, bool write_barrier_needed);
// A helper function for ReadData for reading external references.
inline Address ReadExternalReferenceCase();
HeapObject ReadObject(SnapshotSpace space_number);
HeapObject ReadMetaMap();
void ReadCodeObjectBody(Address code_object_address);
HeapObjectReferenceType GetAndResetNextReferenceType();
void ReadCodeObjectBody(SnapshotSpace space_number,
Address code_object_address);
protected:
HeapObject ReadObject();
@ -207,11 +202,14 @@ class V8_EXPORT_PRIVATE Deserializer : public SerializerDeserializer {
DeserializerAllocator allocator_;
const bool deserializing_user_code_;
bool next_reference_is_weak_ = false;
// TODO(6593): generalize rehashing, and remove this flag.
bool can_rehash_;
std::vector<HeapObject> to_rehash_;
// Store the objects whose maps are deferred and thus initialized as filler
// maps during deserialization, so that they can be processed later when the
// maps become available.
std::unordered_map<HeapObject, SnapshotSpace, Object::Hasher>
fillers_to_post_process_;
#ifdef DEBUG
uint32_t num_api_references_;

View File

@ -86,9 +86,12 @@ bool ReadOnlySerializer::MustBeDeferred(HeapObject object) {
// be saved without problems.
return false;
}
// Defer objects with special alignment requirements until the filler roots
// are serialized.
return HeapObject::RequiredAlignment(object.map()) != kWordAligned;
// Just defer everything except for Map objects until all required roots are
// serialized. Some objects may have special alignment requirements, that may
// not be fulfilled during deserialization until few first root objects are
// serialized. But we must serialize Map objects since deserializer checks
// that these root objects are indeed Maps.
return !object.IsMap();
}
bool ReadOnlySerializer::SerializeUsingReadOnlyObjectCache(

View File

@ -17,6 +17,7 @@ namespace internal {
// and CODE_LO_SPACE) are not supported.
enum class SnapshotSpace : byte {
kReadOnlyHeap = RO_SPACE,
kNew = NEW_SPACE,
kOld = OLD_SPACE,
kCode = CODE_SPACE,
kMap = MAP_SPACE,

View File

@ -30,9 +30,14 @@ void SerializerDeserializer::Iterate(Isolate* isolate, RootVisitor* visitor) {
}
bool SerializerDeserializer::CanBeDeferred(HeapObject o) {
// Maps cannot be deferred as objects are expected to have a valid map
// immediately.
return !o.IsMap();
// ArrayBuffer instances are serialized by first re-assigning a index
// to the backing store field, then serializing the object, and then
// storing the actual backing store address again (and the same for the
// ArrayBufferExtension). If serialization of the object itself is deferred,
// the real backing store address is written into the snapshot, which cannot
// be processed when deserializing.
return !o.IsString() && !o.IsScript() && !o.IsJSTypedArray() &&
!o.IsJSArrayBuffer();
}
void SerializerDeserializer::RestoreExternalReferenceRedirectors(

View File

@ -71,9 +71,9 @@ class SerializerDeserializer : public RootVisitor {
// clang-format off
#define UNUSED_SERIALIZER_BYTE_CODES(V) \
V(0x05) V(0x06) V(0x07) V(0x0d) V(0x0e) V(0x0f) \
/* Free range 0x2a..0x2f */ \
V(0x2a) V(0x2b) V(0x2c) V(0x2d) V(0x2e) V(0x2f) \
V(0x06) V(0x07) V(0x0e) V(0x0f) \
/* Free range 0x2b..0x2f */ \
V(0x2b) V(0x2c) V(0x2d) V(0x2e) V(0x2f) \
/* Free range 0x30..0x3f */ \
V(0x30) V(0x31) V(0x32) V(0x33) V(0x34) V(0x35) V(0x36) V(0x37) \
V(0x38) V(0x39) V(0x3a) V(0x3b) V(0x3c) V(0x3d) V(0x3e) V(0x3f) \
@ -102,7 +102,7 @@ class SerializerDeserializer : public RootVisitor {
// The static assert below will trigger when the number of preallocated spaces
// changed. If that happens, update the kNewObject and kBackref bytecode
// ranges in the comments below.
STATIC_ASSERT(5 == kNumberOfSpaces);
STATIC_ASSERT(6 == kNumberOfSpaces);
// First 32 root array items.
static const int kRootArrayConstantsCount = 0x20;
@ -124,9 +124,9 @@ class SerializerDeserializer : public RootVisitor {
// ---------- byte code range 0x00..0x0f ----------
//
// 0x00..0x04 Allocate new object, in specified space.
// 0x00..0x05 Allocate new object, in specified space.
kNewObject = 0x00,
// 0x08..0x0c Reference to previous object from specified space.
// 0x08..0x0d Reference to previous object from specified space.
kBackref = 0x08,
//
@ -145,14 +145,16 @@ class SerializerDeserializer : public RootVisitor {
kNop,
// Move to next reserved chunk.
kNextChunk,
// 3 alignment prefixes 0x16..0x18
kAlignmentPrefix = 0x16,
// Deferring object content.
kDeferred,
// 3 alignment prefixes 0x17..0x19
kAlignmentPrefix = 0x17,
// A tag emitted at strategic points in the snapshot to delineate sections.
// If the deserializer does not find these at the expected moments then it
// is an indication that the snapshot and the VM do not fit together.
// Examine the build process for architecture, version or configuration
// mismatches.
kSynchronize = 0x19,
kSynchronize = 0x1a,
// Repeats of variable length.
kVariableRepeat,
// Used for embedder-allocated backing stores for TypedArrays.

View File

@ -71,9 +71,6 @@ void Serializer::OutputStatistics(const char* name) {
}
void Serializer::SerializeDeferredObjects() {
if (FLAG_trace_serializer) {
PrintF("Serializing deferred objects\n");
}
while (!deferred_objects_.empty()) {
HeapObject obj = deferred_objects_.back();
deferred_objects_.pop_back();
@ -168,13 +165,13 @@ bool Serializer::SerializeBackReference(HeapObject obj) {
}
bool Serializer::SerializePendingObject(HeapObject obj) {
PendingObjectReference pending_obj =
forward_refs_per_pending_object_.find(obj);
if (pending_obj == forward_refs_per_pending_object_.end()) {
auto it = forward_refs_per_pending_object_.find(obj);
if (it == forward_refs_per_pending_object_.end()) {
return false;
}
PutPendingForwardReferenceTo(pending_obj);
int forward_ref_id = PutPendingForwardReference();
it->second.push_back(forward_ref_id);
return true;
}
@ -274,13 +271,10 @@ void Serializer::PutRepeat(int repeat_count) {
}
}
void Serializer::PutPendingForwardReferenceTo(
PendingObjectReference reference) {
int Serializer::PutPendingForwardReference() {
sink_.Put(kRegisterPendingForwardRef, "RegisterPendingForwardRef");
unresolved_forward_refs_++;
// Register the current slot with the pending object.
int forward_ref_id = next_forward_ref_id_++;
reference->second.push_back(forward_ref_id);
return next_forward_ref_id_++;
}
void Serializer::ResolvePendingForwardReference(int forward_reference_id) {
@ -301,11 +295,9 @@ Serializer::PendingObjectReference Serializer::RegisterObjectIsPending(
auto forward_refs_entry_insertion =
forward_refs_per_pending_object_.emplace(obj, std::vector<int>());
// If the above emplace didn't actually add the object, then the object must
// already have been registered pending by deferring. It might not be in the
// deferred objects queue though, since it may be the very object we just
// popped off that queue, so just check that it can be deferred.
DCHECK_IMPLIES(!forward_refs_entry_insertion.second, CanBeDeferred(obj));
// Make sure the above emplace actually added the object, rather than
// overwriting an existing entry.
DCHECK(forward_refs_entry_insertion.second);
// return the iterator into the map as the reference.
return forward_refs_entry_insertion.first;
@ -589,26 +581,6 @@ class UnlinkWeakNextScope {
};
void Serializer::ObjectSerializer::Serialize() {
RecursionScope recursion(serializer_);
// Defer objects as "pending" if they cannot be serialized now, or if we
// exceed a certain recursion depth. Some objects cannot be deferred
if ((recursion.ExceedsMaximum() && CanBeDeferred(object_)) ||
serializer_->MustBeDeferred(object_)) {
DCHECK(CanBeDeferred(object_));
if (FLAG_trace_serializer) {
PrintF(" Deferring heap object: ");
object_.ShortPrint();
PrintF("\n");
}
// Deferred objects are considered "pending".
PendingObjectReference pending_obj =
serializer_->RegisterObjectIsPending(object_);
serializer_->PutPendingForwardReferenceTo(pending_obj);
serializer_->QueueDeferredObject(object_);
return;
}
if (FLAG_trace_serializer) {
PrintF(" Encoding heap object: ");
object_.ShortPrint();
@ -662,7 +634,7 @@ SnapshotSpace GetSnapshotSpace(HeapObject object) {
} else if (object.IsMap()) {
return SnapshotSpace::kMap;
} else {
return SnapshotSpace::kOld; // avoid new/young distinction in TPH
return SnapshotSpace::kNew; // avoid new/young distinction in TPH
}
} else if (ReadOnlyHeap::Contains(object)) {
return SnapshotSpace::kReadOnlyHeap;
@ -697,27 +669,43 @@ void Serializer::ObjectSerializer::SerializeObject() {
CHECK_EQ(0, bytes_processed_so_far_);
bytes_processed_so_far_ = kTaggedSize;
RecursionScope recursion(serializer_);
// Objects that are immediately post processed during deserialization
// cannot be deferred, since post processing requires the object content.
if ((recursion.ExceedsMaximum() && CanBeDeferred(object_)) ||
serializer_->MustBeDeferred(object_)) {
serializer_->QueueDeferredObject(object_);
sink_->Put(kDeferred, "Deferring object content");
return;
}
SerializeContent(map, size);
}
void Serializer::ObjectSerializer::SerializeDeferred() {
if (FLAG_trace_serializer) {
PrintF(" Encoding deferred heap object: ");
object_.ShortPrint();
PrintF("\n");
}
int size = object_.Size();
Map map = object_.map();
SerializerReference back_reference =
serializer_->reference_map()->LookupReference(
reinterpret_cast<void*>(object_.ptr()));
DCHECK(back_reference.is_back_reference());
if (back_reference.is_valid()) {
if (FLAG_trace_serializer) {
PrintF(" Deferred heap object ");
object_.ShortPrint();
PrintF(" was already serialized\n");
}
return;
}
// Serialize the rest of the object.
CHECK_EQ(0, bytes_processed_so_far_);
bytes_processed_so_far_ = kTaggedSize;
if (FLAG_trace_serializer) {
PrintF(" Encoding deferred heap object\n");
}
Serialize();
serializer_->PutAlignmentPrefix(object_);
sink_->Put(NewObject::Encode(back_reference.space()), "deferred object");
serializer_->PutBackReference(object_, back_reference);
sink_->PutInt(size >> kTaggedSizeLog2, "deferred object size");
SerializeContent(map, size);
}
void Serializer::ObjectSerializer::SerializeContent(Map map, int size) {

View File

@ -176,9 +176,6 @@ class Serializer : public SerializerDeserializer {
Isolate* isolate() const { return isolate_; }
protected:
using PendingObjectReference =
std::map<HeapObject, std::vector<int>>::iterator;
class ObjectSerializer;
class RecursionScope {
public:
@ -215,7 +212,7 @@ class Serializer : public SerializerDeserializer {
// Emit a marker noting that this slot is a forward reference to the an
// object which has not yet been serialized.
void PutPendingForwardReferenceTo(PendingObjectReference reference);
int PutPendingForwardReference();
// Resolve the given previously registered forward reference to the current
// object.
void ResolvePendingForwardReference(int obj);
@ -254,11 +251,14 @@ class Serializer : public SerializerDeserializer {
Code CopyCode(Code code);
void QueueDeferredObject(HeapObject obj) {
DCHECK(!reference_map_.LookupReference(reinterpret_cast<void*>(obj.ptr()))
.is_valid());
DCHECK(reference_map_.LookupReference(reinterpret_cast<void*>(obj.ptr()))
.is_back_reference());
deferred_objects_.push_back(obj);
}
using PendingObjectReference =
std::map<HeapObject, std::vector<int>>::iterator;
// Register that the the given object shouldn't be immediately serialized, but
// will be serialized later and any references to it should be pending forward
// references.

View File

@ -5370,7 +5370,6 @@ TEST(NewSpaceAllocationCounter) {
TEST(OldSpaceAllocationCounter) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Isolate* isolate = CcTest::i_isolate();

View File

@ -28312,9 +28312,7 @@ TEST(TriggerDelayedMainThreadMetricsEvent) {
CHECK_EQ(recorder->count_, 0); // Unchanged.
CHECK_EQ(recorder->time_in_us_, -1); // Unchanged.
v8::base::OS::Sleep(v8::base::TimeDelta::FromMilliseconds(1100));
while (v8::platform::PumpMessageLoop(v8::internal::V8::GetCurrentPlatform(),
iso)) {
}
v8::platform::PumpMessageLoop(v8::internal::V8::GetCurrentPlatform(), iso);
CHECK_EQ(recorder->count_, 1); // Increased.
CHECK_GT(recorder->time_in_us_, 100);
}
@ -28325,9 +28323,7 @@ TEST(TriggerDelayedMainThreadMetricsEvent) {
// invalid.
i_iso->metrics_recorder()->DelayMainThreadEvent(event, context_id);
v8::base::OS::Sleep(v8::base::TimeDelta::FromMilliseconds(1100));
while (v8::platform::PumpMessageLoop(v8::internal::V8::GetCurrentPlatform(),
iso)) {
}
v8::platform::PumpMessageLoop(v8::internal::V8::GetCurrentPlatform(), iso);
CHECK_EQ(recorder->count_, 1); // Unchanged.
}

View File

@ -209,93 +209,93 @@ KNOWN_MAPS = {
("read_only_space", 0x0213d): (67, "NullMap"),
("read_only_space", 0x02165): (162, "DescriptorArrayMap"),
("read_only_space", 0x0218d): (156, "WeakFixedArrayMap"),
("read_only_space", 0x021cd): (96, "EnumCacheMap"),
("read_only_space", 0x02201): (117, "FixedArrayMap"),
("read_only_space", 0x0224d): (8, "OneByteInternalizedStringMap"),
("read_only_space", 0x02299): (167, "FreeSpaceMap"),
("read_only_space", 0x022c1): (166, "OnePointerFillerMap"),
("read_only_space", 0x022e9): (166, "TwoPointerFillerMap"),
("read_only_space", 0x02311): (67, "UninitializedMap"),
("read_only_space", 0x02389): (67, "UndefinedMap"),
("read_only_space", 0x023cd): (66, "HeapNumberMap"),
("read_only_space", 0x02401): (67, "TheHoleMap"),
("read_only_space", 0x02461): (67, "BooleanMap"),
("read_only_space", 0x02505): (131, "ByteArrayMap"),
("read_only_space", 0x0252d): (117, "FixedCOWArrayMap"),
("read_only_space", 0x02555): (118, "HashTableMap"),
("read_only_space", 0x0257d): (64, "SymbolMap"),
("read_only_space", 0x025a5): (40, "OneByteStringMap"),
("read_only_space", 0x025cd): (129, "ScopeInfoMap"),
("read_only_space", 0x025f5): (175, "SharedFunctionInfoMap"),
("read_only_space", 0x0261d): (159, "CodeMap"),
("read_only_space", 0x02645): (158, "CellMap"),
("read_only_space", 0x0266d): (174, "GlobalPropertyCellMap"),
("read_only_space", 0x02695): (70, "ForeignMap"),
("read_only_space", 0x026bd): (157, "TransitionArrayMap"),
("read_only_space", 0x026e5): (45, "ThinOneByteStringMap"),
("read_only_space", 0x0270d): (165, "FeedbackVectorMap"),
("read_only_space", 0x0273d): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x0279d): (67, "ExceptionMap"),
("read_only_space", 0x027f9): (67, "TerminationExceptionMap"),
("read_only_space", 0x02861): (67, "OptimizedOutMap"),
("read_only_space", 0x028c1): (67, "StaleRegisterMap"),
("read_only_space", 0x02921): (130, "ScriptContextTableMap"),
("read_only_space", 0x02949): (127, "ClosureFeedbackCellArrayMap"),
("read_only_space", 0x02971): (164, "FeedbackMetadataArrayMap"),
("read_only_space", 0x02999): (117, "ArrayListMap"),
("read_only_space", 0x029c1): (65, "BigIntMap"),
("read_only_space", 0x029e9): (128, "ObjectBoilerplateDescriptionMap"),
("read_only_space", 0x02a11): (132, "BytecodeArrayMap"),
("read_only_space", 0x02a39): (160, "CodeDataContainerMap"),
("read_only_space", 0x02a61): (161, "CoverageInfoMap"),
("read_only_space", 0x02a89): (133, "FixedDoubleArrayMap"),
("read_only_space", 0x02ab1): (120, "GlobalDictionaryMap"),
("read_only_space", 0x02ad9): (97, "ManyClosuresCellMap"),
("read_only_space", 0x02b01): (117, "ModuleInfoMap"),
("read_only_space", 0x02b29): (121, "NameDictionaryMap"),
("read_only_space", 0x02b51): (97, "NoClosuresCellMap"),
("read_only_space", 0x02b79): (122, "NumberDictionaryMap"),
("read_only_space", 0x02ba1): (97, "OneClosureCellMap"),
("read_only_space", 0x02bc9): (123, "OrderedHashMapMap"),
("read_only_space", 0x02bf1): (124, "OrderedHashSetMap"),
("read_only_space", 0x02c19): (125, "OrderedNameDictionaryMap"),
("read_only_space", 0x02c41): (172, "PreparseDataMap"),
("read_only_space", 0x02c69): (173, "PropertyArrayMap"),
("read_only_space", 0x02c91): (93, "SideEffectCallHandlerInfoMap"),
("read_only_space", 0x02cb9): (93, "SideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02ce1): (93, "NextCallSideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02d09): (126, "SimpleNumberDictionaryMap"),
("read_only_space", 0x02d31): (149, "SmallOrderedHashMapMap"),
("read_only_space", 0x02d59): (150, "SmallOrderedHashSetMap"),
("read_only_space", 0x02d81): (151, "SmallOrderedNameDictionaryMap"),
("read_only_space", 0x02da9): (152, "SourceTextModuleMap"),
("read_only_space", 0x02dd1): (153, "SyntheticModuleMap"),
("read_only_space", 0x02df9): (155, "UncompiledDataWithoutPreparseDataMap"),
("read_only_space", 0x02e21): (154, "UncompiledDataWithPreparseDataMap"),
("read_only_space", 0x02e49): (71, "WasmTypeInfoMap"),
("read_only_space", 0x02e71): (181, "WeakArrayListMap"),
("read_only_space", 0x02e99): (119, "EphemeronHashTableMap"),
("read_only_space", 0x02ec1): (163, "EmbedderDataArrayMap"),
("read_only_space", 0x02ee9): (182, "WeakCellMap"),
("read_only_space", 0x02f11): (32, "StringMap"),
("read_only_space", 0x02f39): (41, "ConsOneByteStringMap"),
("read_only_space", 0x02f61): (33, "ConsStringMap"),
("read_only_space", 0x02f89): (37, "ThinStringMap"),
("read_only_space", 0x02fb1): (35, "SlicedStringMap"),
("read_only_space", 0x02fd9): (43, "SlicedOneByteStringMap"),
("read_only_space", 0x03001): (34, "ExternalStringMap"),
("read_only_space", 0x03029): (42, "ExternalOneByteStringMap"),
("read_only_space", 0x03051): (50, "UncachedExternalStringMap"),
("read_only_space", 0x03079): (0, "InternalizedStringMap"),
("read_only_space", 0x030a1): (2, "ExternalInternalizedStringMap"),
("read_only_space", 0x030c9): (10, "ExternalOneByteInternalizedStringMap"),
("read_only_space", 0x030f1): (18, "UncachedExternalInternalizedStringMap"),
("read_only_space", 0x03119): (26, "UncachedExternalOneByteInternalizedStringMap"),
("read_only_space", 0x03141): (58, "UncachedExternalOneByteStringMap"),
("read_only_space", 0x03169): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x03191): (67, "BasicBlockCountersMarkerMap"),
("read_only_space", 0x031d5): (87, "ArrayBoilerplateDescriptionMap"),
("read_only_space", 0x032a5): (99, "InterceptorInfoMap"),
("read_only_space", 0x021e9): (167, "FreeSpaceMap"),
("read_only_space", 0x02211): (166, "OnePointerFillerMap"),
("read_only_space", 0x02239): (166, "TwoPointerFillerMap"),
("read_only_space", 0x02261): (67, "UninitializedMap"),
("read_only_space", 0x022a5): (8, "OneByteInternalizedStringMap"),
("read_only_space", 0x02301): (67, "UndefinedMap"),
("read_only_space", 0x02345): (66, "HeapNumberMap"),
("read_only_space", 0x02379): (67, "TheHoleMap"),
("read_only_space", 0x023d9): (67, "BooleanMap"),
("read_only_space", 0x0247d): (131, "ByteArrayMap"),
("read_only_space", 0x024a5): (117, "FixedArrayMap"),
("read_only_space", 0x024cd): (117, "FixedCOWArrayMap"),
("read_only_space", 0x024f5): (118, "HashTableMap"),
("read_only_space", 0x0251d): (64, "SymbolMap"),
("read_only_space", 0x02545): (40, "OneByteStringMap"),
("read_only_space", 0x0256d): (129, "ScopeInfoMap"),
("read_only_space", 0x02595): (175, "SharedFunctionInfoMap"),
("read_only_space", 0x025bd): (159, "CodeMap"),
("read_only_space", 0x025e5): (158, "CellMap"),
("read_only_space", 0x0260d): (174, "GlobalPropertyCellMap"),
("read_only_space", 0x02635): (70, "ForeignMap"),
("read_only_space", 0x0265d): (157, "TransitionArrayMap"),
("read_only_space", 0x02685): (45, "ThinOneByteStringMap"),
("read_only_space", 0x026ad): (165, "FeedbackVectorMap"),
("read_only_space", 0x026e5): (67, "ArgumentsMarkerMap"),
("read_only_space", 0x02745): (67, "ExceptionMap"),
("read_only_space", 0x027a1): (67, "TerminationExceptionMap"),
("read_only_space", 0x02809): (67, "OptimizedOutMap"),
("read_only_space", 0x02869): (67, "StaleRegisterMap"),
("read_only_space", 0x028c9): (130, "ScriptContextTableMap"),
("read_only_space", 0x028f1): (127, "ClosureFeedbackCellArrayMap"),
("read_only_space", 0x02919): (164, "FeedbackMetadataArrayMap"),
("read_only_space", 0x02941): (117, "ArrayListMap"),
("read_only_space", 0x02969): (65, "BigIntMap"),
("read_only_space", 0x02991): (128, "ObjectBoilerplateDescriptionMap"),
("read_only_space", 0x029b9): (132, "BytecodeArrayMap"),
("read_only_space", 0x029e1): (160, "CodeDataContainerMap"),
("read_only_space", 0x02a09): (161, "CoverageInfoMap"),
("read_only_space", 0x02a31): (133, "FixedDoubleArrayMap"),
("read_only_space", 0x02a59): (120, "GlobalDictionaryMap"),
("read_only_space", 0x02a81): (97, "ManyClosuresCellMap"),
("read_only_space", 0x02aa9): (117, "ModuleInfoMap"),
("read_only_space", 0x02ad1): (121, "NameDictionaryMap"),
("read_only_space", 0x02af9): (97, "NoClosuresCellMap"),
("read_only_space", 0x02b21): (122, "NumberDictionaryMap"),
("read_only_space", 0x02b49): (97, "OneClosureCellMap"),
("read_only_space", 0x02b71): (123, "OrderedHashMapMap"),
("read_only_space", 0x02b99): (124, "OrderedHashSetMap"),
("read_only_space", 0x02bc1): (125, "OrderedNameDictionaryMap"),
("read_only_space", 0x02be9): (172, "PreparseDataMap"),
("read_only_space", 0x02c11): (173, "PropertyArrayMap"),
("read_only_space", 0x02c39): (93, "SideEffectCallHandlerInfoMap"),
("read_only_space", 0x02c61): (93, "SideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02c89): (93, "NextCallSideEffectFreeCallHandlerInfoMap"),
("read_only_space", 0x02cb1): (126, "SimpleNumberDictionaryMap"),
("read_only_space", 0x02cd9): (149, "SmallOrderedHashMapMap"),
("read_only_space", 0x02d01): (150, "SmallOrderedHashSetMap"),
("read_only_space", 0x02d29): (151, "SmallOrderedNameDictionaryMap"),
("read_only_space", 0x02d51): (152, "SourceTextModuleMap"),
("read_only_space", 0x02d79): (153, "SyntheticModuleMap"),
("read_only_space", 0x02da1): (155, "UncompiledDataWithoutPreparseDataMap"),
("read_only_space", 0x02dc9): (154, "UncompiledDataWithPreparseDataMap"),
("read_only_space", 0x02df1): (71, "WasmTypeInfoMap"),
("read_only_space", 0x02e19): (181, "WeakArrayListMap"),
("read_only_space", 0x02e41): (119, "EphemeronHashTableMap"),
("read_only_space", 0x02e69): (163, "EmbedderDataArrayMap"),
("read_only_space", 0x02e91): (182, "WeakCellMap"),
("read_only_space", 0x02eb9): (32, "StringMap"),
("read_only_space", 0x02ee1): (41, "ConsOneByteStringMap"),
("read_only_space", 0x02f09): (33, "ConsStringMap"),
("read_only_space", 0x02f31): (37, "ThinStringMap"),
("read_only_space", 0x02f59): (35, "SlicedStringMap"),
("read_only_space", 0x02f81): (43, "SlicedOneByteStringMap"),
("read_only_space", 0x02fa9): (34, "ExternalStringMap"),
("read_only_space", 0x02fd1): (42, "ExternalOneByteStringMap"),
("read_only_space", 0x02ff9): (50, "UncachedExternalStringMap"),
("read_only_space", 0x03021): (0, "InternalizedStringMap"),
("read_only_space", 0x03049): (2, "ExternalInternalizedStringMap"),
("read_only_space", 0x03071): (10, "ExternalOneByteInternalizedStringMap"),
("read_only_space", 0x03099): (18, "UncachedExternalInternalizedStringMap"),
("read_only_space", 0x030c1): (26, "UncachedExternalOneByteInternalizedStringMap"),
("read_only_space", 0x030e9): (58, "UncachedExternalOneByteStringMap"),
("read_only_space", 0x03111): (67, "SelfReferenceMarkerMap"),
("read_only_space", 0x03139): (67, "BasicBlockCountersMarkerMap"),
("read_only_space", 0x03161): (96, "EnumCacheMap"),
("read_only_space", 0x031b1): (87, "ArrayBoilerplateDescriptionMap"),
("read_only_space", 0x03281): (99, "InterceptorInfoMap"),
("read_only_space", 0x05399): (72, "PromiseFulfillReactionJobTaskMap"),
("read_only_space", 0x053c1): (73, "PromiseRejectReactionJobTaskMap"),
("read_only_space", 0x053e9): (74, "CallableTaskMap"),
@ -367,48 +367,48 @@ KNOWN_MAPS = {
KNOWN_OBJECTS = {
("read_only_space", 0x021b5): "EmptyWeakFixedArray",
("read_only_space", 0x021bd): "EmptyDescriptorArray",
("read_only_space", 0x021f5): "EmptyEnumCache",
("read_only_space", 0x02229): "EmptyFixedArray",
("read_only_space", 0x02231): "NullValue",
("read_only_space", 0x02339): "UninitializedValue",
("read_only_space", 0x023b1): "UndefinedValue",
("read_only_space", 0x023f5): "NanValue",
("read_only_space", 0x02429): "TheHoleValue",
("read_only_space", 0x02455): "HoleNanValue",
("read_only_space", 0x02489): "TrueValue",
("read_only_space", 0x024c9): "FalseValue",
("read_only_space", 0x024f9): "empty_string",
("read_only_space", 0x02735): "EmptyScopeInfo",
("read_only_space", 0x02765): "ArgumentsMarker",
("read_only_space", 0x027c5): "Exception",
("read_only_space", 0x02821): "TerminationException",
("read_only_space", 0x02889): "OptimizedOut",
("read_only_space", 0x028e9): "StaleRegister",
("read_only_space", 0x031b9): "EmptyPropertyArray",
("read_only_space", 0x031c1): "EmptyByteArray",
("read_only_space", 0x031c9): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x031fd): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x03209): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x03211): "EmptySlowElementDictionary",
("read_only_space", 0x03235): "EmptyOrderedHashMap",
("read_only_space", 0x03249): "EmptyOrderedHashSet",
("read_only_space", 0x0325d): "EmptyFeedbackMetadata",
("read_only_space", 0x03269): "EmptyPropertyCell",
("read_only_space", 0x0327d): "EmptyPropertyDictionary",
("read_only_space", 0x032cd): "NoOpInterceptorInfo",
("read_only_space", 0x032f5): "EmptyWeakArrayList",
("read_only_space", 0x03301): "InfinityValue",
("read_only_space", 0x0330d): "MinusZeroValue",
("read_only_space", 0x03319): "MinusInfinityValue",
("read_only_space", 0x03325): "SelfReferenceMarker",
("read_only_space", 0x03365): "BasicBlockCountersMarker",
("read_only_space", 0x033a9): "OffHeapTrampolineRelocationInfo",
("read_only_space", 0x033b5): "TrampolineTrivialCodeDataContainer",
("read_only_space", 0x033c1): "TrampolinePromiseRejectionCodeDataContainer",
("read_only_space", 0x033cd): "GlobalThisBindingScopeInfo",
("read_only_space", 0x03405): "EmptyFunctionScopeInfo",
("read_only_space", 0x0342d): "NativeScopeInfo",
("read_only_space", 0x03449): "HashSeed",
("read_only_space", 0x021cd): "NullValue",
("read_only_space", 0x02289): "UninitializedValue",
("read_only_space", 0x02329): "UndefinedValue",
("read_only_space", 0x0236d): "NanValue",
("read_only_space", 0x023a1): "TheHoleValue",
("read_only_space", 0x023cd): "HoleNanValue",
("read_only_space", 0x02401): "TrueValue",
("read_only_space", 0x02441): "FalseValue",
("read_only_space", 0x02471): "empty_string",
("read_only_space", 0x026d5): "EmptyScopeInfo",
("read_only_space", 0x026dd): "EmptyFixedArray",
("read_only_space", 0x0270d): "ArgumentsMarker",
("read_only_space", 0x0276d): "Exception",
("read_only_space", 0x027c9): "TerminationException",
("read_only_space", 0x02831): "OptimizedOut",
("read_only_space", 0x02891): "StaleRegister",
("read_only_space", 0x03189): "EmptyEnumCache",
("read_only_space", 0x03195): "EmptyPropertyArray",
("read_only_space", 0x0319d): "EmptyByteArray",
("read_only_space", 0x031a5): "EmptyObjectBoilerplateDescription",
("read_only_space", 0x031d9): "EmptyArrayBoilerplateDescription",
("read_only_space", 0x031e5): "EmptyClosureFeedbackCellArray",
("read_only_space", 0x031ed): "EmptySlowElementDictionary",
("read_only_space", 0x03211): "EmptyOrderedHashMap",
("read_only_space", 0x03225): "EmptyOrderedHashSet",
("read_only_space", 0x03239): "EmptyFeedbackMetadata",
("read_only_space", 0x03245): "EmptyPropertyCell",
("read_only_space", 0x03259): "EmptyPropertyDictionary",
("read_only_space", 0x032a9): "NoOpInterceptorInfo",
("read_only_space", 0x032d1): "EmptyWeakArrayList",
("read_only_space", 0x032dd): "InfinityValue",
("read_only_space", 0x032e9): "MinusZeroValue",
("read_only_space", 0x032f5): "MinusInfinityValue",
("read_only_space", 0x03301): "SelfReferenceMarker",
("read_only_space", 0x03341): "BasicBlockCountersMarker",
("read_only_space", 0x03385): "OffHeapTrampolineRelocationInfo",
("read_only_space", 0x03391): "TrampolineTrivialCodeDataContainer",
("read_only_space", 0x0339d): "TrampolinePromiseRejectionCodeDataContainer",
("read_only_space", 0x033a9): "GlobalThisBindingScopeInfo",
("read_only_space", 0x033e1): "EmptyFunctionScopeInfo",
("read_only_space", 0x03409): "NativeScopeInfo",
("read_only_space", 0x03425): "HashSeed",
("old_space", 0x02115): "ArgumentsIteratorAccessor",
("old_space", 0x02159): "ArrayLengthAccessor",
("old_space", 0x0219d): "BoundFunctionLengthAccessor",