[cleanup][heap] Fix kPointerSize usages in src/heap/

Bug: v8:8477, v8:8562
Change-Id: Iaa995c8fbb9f309dadac4e308d727f628fdb8b3c
Reviewed-on: https://chromium-review.googlesource.com/c/1384314
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58379}
This commit is contained in:
Igor Sheludko 2018-12-19 20:10:21 +01:00 committed by Commit Bot
parent b6dfeb0d8a
commit 4ba29d0503
33 changed files with 491 additions and 486 deletions

View File

@ -249,7 +249,7 @@ class RecordWriteCodeStubAssembler : public CodeStubAssembler {
{
// Temp variable to calculate cell offset in bitmap.
Node* r0;
int shift = Bitmap::kBitsPerCellLog2 + kSystemPointerSizeLog2 -
int shift = Bitmap::kBitsPerCellLog2 + kTaggedSizeLog2 -
Bitmap::kBytesPerCellLog2;
r0 = WordShr(object, IntPtrConstant(shift));
r0 = WordAnd(r0, IntPtrConstant((kPageAlignmentMask >> shift) &
@ -259,7 +259,7 @@ class RecordWriteCodeStubAssembler : public CodeStubAssembler {
{
// Temp variable to calculate bit offset in cell.
Node* r1;
r1 = WordShr(object, IntPtrConstant(kSystemPointerSizeLog2));
r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
r1 = WordAnd(r1, IntPtrConstant((1 << Bitmap::kBitsPerCellLog2) - 1));
// It seems that LSB(e.g. cl) is automatically used, so no manual masking
// is needed. Uncomment the following line otherwise.

View File

@ -673,6 +673,7 @@ enum AllocationSpace {
constexpr int kSpaceTagSize = 4;
STATIC_ASSERT(FIRST_SPACE == 0);
// TODO(ishell): review and rename kWordAligned to kTaggedAligned.
enum AllocationAlignment { kWordAligned, kDoubleAligned, kDoubleUnaligned };
enum class AccessMode { ATOMIC, NON_ATOMIC };

View File

@ -65,7 +65,7 @@ class SlotSnapshot {
}
private:
static const int kMaxSnapshotSize = JSObject::kMaxInstanceSize / kPointerSize;
static const int kMaxSnapshotSize = JSObject::kMaxInstanceSize / kTaggedSize;
int number_of_slots_;
std::pair<ObjectSlot, Object*> snapshot_[kMaxSnapshotSize];
DISALLOW_COPY_AND_ASSIGN(SlotSnapshot);

View File

@ -1975,7 +1975,7 @@ Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
map->set_instance_size(instance_size);
if (map->IsJSObjectMap()) {
DCHECK(!isolate()->heap()->InReadOnlySpace(map));
map->SetInObjectPropertiesStartInWords(instance_size / kPointerSize -
map->SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
inobject_properties);
DCHECK_EQ(map->GetInObjectProperties(), inobject_properties);
map->set_prototype_validity_cell(*invalid_prototype_validity_cell());
@ -2109,9 +2109,8 @@ Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
if (mode == SKIP_WRITE_BARRIER) {
// Eliminate the write barrier if possible.
Heap::CopyBlock(obj->address() + kPointerSize,
src->address() + kPointerSize,
T::SizeFor(len) - kPointerSize);
Heap::CopyBlock(obj->address() + kTaggedSize, src->address() + kTaggedSize,
T::SizeFor(len) - kTaggedSize);
} else {
// Slow case: Just copy the content one-by-one.
initialize_length(result, len);
@ -2263,9 +2262,9 @@ Handle<FeedbackVector> Factory::CopyFeedbackVector(
// Eliminate the write barrier if possible.
if (mode == SKIP_WRITE_BARRIER) {
Heap::CopyBlock(result->address() + kPointerSize,
result->address() + kPointerSize,
FeedbackVector::SizeFor(len) - kPointerSize);
Heap::CopyBlock(result->address() + kTaggedSize,
result->address() + kTaggedSize,
FeedbackVector::SizeFor(len) - kTaggedSize);
} else {
// Slow case: Just copy the content one-by-one.
result->set_shared_function_info(array->shared_function_info());
@ -4045,7 +4044,7 @@ Handle<Map> Factory::CreateSloppyFunctionMap(
if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count;
Handle<Map> map = NewMap(
JS_FUNCTION_TYPE, header_size + inobject_properties_count * kPointerSize,
JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
map->set_has_prototype_slot(has_prototype);
map->set_is_constructor(has_prototype);
@ -4125,7 +4124,7 @@ Handle<Map> Factory::CreateStrictFunctionMap(
inobject_properties_count;
Handle<Map> map = NewMap(
JS_FUNCTION_TYPE, header_size + inobject_properties_count * kPointerSize,
JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
map->set_has_prototype_slot(has_prototype);
map->set_is_constructor(has_prototype);

View File

@ -495,14 +495,16 @@ bool Heap::ShouldBePromoted(Address old_address) {
}
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
CopyWords(dst, src, static_cast<size_t>(byte_size / kPointerSize));
DCHECK(IsAligned(byte_size, kTaggedSize));
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
CopyWords(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
}
template <Heap::FindMementoMode mode>
AllocationMemento Heap::FindAllocationMemento(Map map, HeapObject* object) {
Address object_address = object->address();
Address memento_address = object_address + object->SizeFromMap(map);
Address last_memento_word_address = memento_address + kPointerSize;
Address last_memento_word_address = memento_address + kTaggedSize;
// If the memento would be on another page, bail out immediately.
if (!Page::OnSamePage(object_address, last_memento_word_address)) {
return AllocationMemento();

View File

@ -1071,12 +1071,12 @@ void Heap::CollectAllGarbage(int flags, GarbageCollectionReason gc_reason,
namespace {
intptr_t CompareWords(int size, HeapObject* a, HeapObject* b) {
int words = size / kPointerSize;
int slots = size / kTaggedSize;
DCHECK_EQ(a->Size(), size);
DCHECK_EQ(b->Size(), size);
intptr_t* slot_a = reinterpret_cast<intptr_t*>(a->address());
intptr_t* slot_b = reinterpret_cast<intptr_t*>(b->address());
for (int i = 0; i < words; i++) {
Tagged_t* slot_a = reinterpret_cast<Tagged_t*>(a->address());
Tagged_t* slot_b = reinterpret_cast<Tagged_t*>(b->address());
for (int i = 0; i < slots; i++) {
if (*slot_a != *slot_b) {
return *slot_a - *slot_b;
}
@ -1251,7 +1251,7 @@ void Heap::EnsureFillerObjectAtTop() {
// may be uninitialized memory behind top. We fill the remainder of the page
// with a filler.
Address to_top = new_space_->top();
Page* page = Page::FromAddress(to_top - kPointerSize);
Page* page = Page::FromAddress(to_top - kTaggedSize);
if (page->Contains(to_top)) {
int remaining_in_page = static_cast<int>(page->area_end() - to_top);
CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo);
@ -1446,7 +1446,7 @@ void Heap::MoveElements(FixedArray array, int dst_index, int src_index, int len,
}
}
} else {
MemMove(dst.ToVoidPtr(), src.ToVoidPtr(), len * kPointerSize);
MemMove(dst.ToVoidPtr(), src.ToVoidPtr(), len * kTaggedSize);
}
if (mode == SKIP_WRITE_BARRIER) return;
FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(this, array, dst_index, len);
@ -2351,7 +2351,7 @@ int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) {
return 0;
case kDoubleAligned:
case kDoubleUnaligned:
return kDoubleSize - kPointerSize;
return kDoubleSize - kTaggedSize;
default:
UNREACHABLE();
}
@ -2361,9 +2361,9 @@ int Heap::GetMaximumFillToAlign(AllocationAlignment alignment) {
int Heap::GetFillToAlign(Address address, AllocationAlignment alignment) {
if (alignment == kDoubleAligned && (address & kDoubleAlignmentMask) != 0)
return kPointerSize;
return kTaggedSize;
if (alignment == kDoubleUnaligned && (address & kDoubleAlignmentMask) == 0)
return kDoubleSize - kPointerSize; // No fill if double is always aligned.
return kDoubleSize - kTaggedSize; // No fill if double is always aligned.
return 0;
}
@ -2423,27 +2423,27 @@ HeapObject* Heap::CreateFillerObjectAt(Address addr, int size,
ClearFreedMemoryMode clear_memory_mode) {
if (size == 0) return nullptr;
HeapObject* filler = HeapObject::FromAddress(addr);
if (size == kPointerSize) {
if (size == kTaggedSize) {
filler->set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kOnePointerFillerMap)),
SKIP_WRITE_BARRIER);
} else if (size == 2 * kPointerSize) {
} else if (size == 2 * kTaggedSize) {
filler->set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kTwoPointerFillerMap)),
SKIP_WRITE_BARRIER);
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
Memory<Address>(addr + kPointerSize) =
static_cast<Address>(kClearedFreeMemoryValue);
Memory<Tagged_t>(addr + kTaggedSize) =
static_cast<Tagged_t>(kClearedFreeMemoryValue);
}
} else {
DCHECK_GT(size, 2 * kPointerSize);
DCHECK_GT(size, 2 * kTaggedSize);
filler->set_map_after_allocation(
Map::unchecked_cast(isolate()->root(RootIndex::kFreeSpaceMap)),
SKIP_WRITE_BARRIER);
FreeSpace::cast(filler)->relaxed_write_size(size);
if (clear_memory_mode == ClearFreedMemoryMode::kClearFreedMemory) {
memset(reinterpret_cast<void*>(addr + 2 * kPointerSize),
kClearedFreeMemoryValue, size - 2 * kPointerSize);
MemsetTagged(ObjectSlot(addr) + 2, ObjectPtr(kClearedFreeMemoryValue),
(size / kTaggedSize) - 2);
}
}
if (clear_slots_mode == ClearRecordedSlots::kYes) {
@ -2540,7 +2540,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
// Add custom visitor to concurrent marker if new left-trimmable type
// is added.
DCHECK(object->IsFixedArray() || object->IsFixedDoubleArray());
const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
const int element_size = object->IsFixedArray() ? kTaggedSize : kDoubleSize;
const int bytes_to_trim = elements_to_trim * element_size;
Map map = object->map();
@ -2551,8 +2551,8 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
DCHECK(object->map() != ReadOnlyRoots(this).fixed_cow_array_map());
STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
STATIC_ASSERT(FixedArrayBase::kLengthOffset == kTaggedSize);
STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
const int len = object->length();
DCHECK(elements_to_trim <= len);
@ -2576,7 +2576,7 @@ FixedArrayBase Heap::LeftTrimFixedArray(FixedArrayBase object,
// performed on pages which are not concurrently swept creating a filler
// object does not require synchronization.
RELAXED_WRITE_FIELD(object, bytes_to_trim, map);
RELAXED_WRITE_FIELD(object, bytes_to_trim + kPointerSize,
RELAXED_WRITE_FIELD(object, bytes_to_trim + kTaggedSize,
Smi::FromInt(len - elements_to_trim));
FixedArrayBase new_object =
@ -2633,7 +2633,7 @@ void Heap::RightTrimFixedArray(FixedArrayBase object, int elements_to_trim) {
DCHECK_GE(bytes_to_trim, 0);
} else if (object->IsFixedArray()) {
CHECK_NE(elements_to_trim, len);
bytes_to_trim = elements_to_trim * kPointerSize;
bytes_to_trim = elements_to_trim * kTaggedSize;
} else {
DCHECK(object->IsFixedDoubleArray());
CHECK_NE(elements_to_trim, len);
@ -2650,7 +2650,7 @@ void Heap::RightTrimWeakFixedArray(WeakFixedArray object,
// invalidates them.
DCHECK_EQ(gc_state(), MARK_COMPACT);
CreateFillerForArray<WeakFixedArray>(object, elements_to_trim,
elements_to_trim * kPointerSize);
elements_to_trim * kTaggedSize);
}
template <typename T>
@ -3767,10 +3767,10 @@ class FixStaleLeftTrimmedHandlesVisitor : public RootVisitor {
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == ReadOnlyRoots(heap_).one_pointer_filler_map()) {
next += kPointerSize;
next += kTaggedSize;
} else if (current->map() ==
ReadOnlyRoots(heap_).two_pointer_filler_map()) {
next += 2 * kPointerSize;
next += 2 * kTaggedSize;
} else {
next += current->Size();
}

View File

@ -197,7 +197,7 @@ class AllocationResult {
ObjectPtr object_;
};
STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize);
STATIC_ASSERT(sizeof(AllocationResult) == kSystemPointerSize);
#ifdef DEBUG
struct CommentStatistic {
@ -249,7 +249,8 @@ class Heap {
// should instead adapt it's heap size based on available physical memory.
static const int kPointerMultiplier = 1;
#else
static const int kPointerMultiplier = i::kPointerSize / 4;
// TODO(ishell): kSystePointerMultiplier?
static const int kPointerMultiplier = i::kSystemPointerSize / 4;
#endif
// Semi-space size needs to be a multiple of page size.
@ -1770,9 +1771,9 @@ class Heap {
Isolate* isolate_ = nullptr;
size_t code_range_size_ = 0;
size_t max_semi_space_size_ = 8 * (kPointerSize / 4) * MB;
size_t max_semi_space_size_ = 8 * (kSystemPointerSize / 4) * MB;
size_t initial_semispace_size_ = kMinSemiSpaceSizeInKB * KB;
size_t max_old_generation_size_ = 700ul * (kPointerSize / 4) * MB;
size_t max_old_generation_size_ = 700ul * (kSystemPointerSize / 4) * MB;
size_t initial_max_old_generation_size_;
size_t initial_old_generation_size_;
bool old_generation_size_configured_ = false;
@ -2277,7 +2278,7 @@ class AllocationObserver {
public:
explicit AllocationObserver(intptr_t step_size)
: step_size_(step_size), bytes_to_next_step_(step_size) {
DCHECK_LE(kPointerSize, step_size);
DCHECK_LE(kTaggedSize, step_size);
}
virtual ~AllocationObserver() = default;

View File

@ -180,7 +180,7 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
!marked_black_due_to_left_trimming) {
// The array was black before left trimming or was marked black by the
// concurrent marker. Simply transfer the color.
if (from->address() + kPointerSize == to->address()) {
if (from->address() + kTaggedSize == to->address()) {
// The old and the new markbits overlap. The |to| object has the
// grey color. To make it black, we need to set the second bit.
DCHECK(new_mark_bit.Get<kAtomicity>());
@ -194,7 +194,7 @@ void IncrementalMarking::NotifyLeftTrimming(HeapObject* from, HeapObject* to) {
marked_black_due_to_left_trimming) {
// The array was already grey or was marked black by this function.
// Mark the new array grey and push it to marking deque.
if (from->address() + kPointerSize == to->address()) {
if (from->address() + kTaggedSize == to->address()) {
// The old and the new markbits overlap. The |to| object is either white
// or grey. Set the first bit to make sure that it is grey.
new_mark_bit.Set<kAtomicity>();

View File

@ -548,7 +548,7 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
int size = 0;
while (current_cell_ != 0) {
uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
Address addr = cell_base_ + trailing_zeros * kPointerSize;
Address addr = cell_base_ + trailing_zeros * kTaggedSize;
// Clear the first bit of the found object..
current_cell_ &= ~(1u << trailing_zeros);
@ -580,11 +580,11 @@ void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
HeapObject* black_object = HeapObject::FromAddress(addr);
map = Map::cast(ObjectSlot(addr).Acquire_Load());
size = black_object->SizeFromMap(map);
Address end = addr + size - kPointerSize;
Address end = addr + size - kTaggedSize;
// One word filler objects do not borrow the second mark bit. We have
// to jump over the advancing and clearing part.
// Note that we know that we are at a one word filler when
// object_start + object_size - kPointerSize == object_start.
// object_start + object_size - kTaggedSize == object_start.
if (addr != end) {
DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);

View File

@ -121,11 +121,11 @@ void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
page->AddressToMarkbitIndex(current),
page->AddressToMarkbitIndex(next_object_must_be_here_or_later)) ||
bitmap(page)->AllBitsClearInRange(
page->AddressToMarkbitIndex(current + kPointerSize * 2),
page->AddressToMarkbitIndex(current + kTaggedSize * 2),
page->AddressToMarkbitIndex(next_object_must_be_here_or_later)));
current = next_object_must_be_here_or_later;
} else {
current += kPointerSize;
current += kTaggedSize;
}
}
}
@ -1211,7 +1211,7 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
DCHECK_NE(dest, CODE_LO_SPACE);
if (dest == OLD_SPACE) {
DCHECK_OBJECT_SIZE(size);
DCHECK(IsAligned(size, kPointerSize));
DCHECK(IsAligned(size, kTaggedSize));
base->heap_->CopyBlock(dst_addr, src_addr, size);
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
@ -2639,7 +2639,7 @@ class Evacuator : public Malloced {
if (FLAG_page_promotion)
return FLAG_page_promotion_threshold *
MemoryChunkLayout::AllocatableMemoryInDataPage() / 100;
return MemoryChunkLayout::AllocatableMemoryInDataPage() + kPointerSize;
return MemoryChunkLayout::AllocatableMemoryInDataPage() + kTaggedSize;
}
Evacuator(Heap* heap, RecordMigratedSlotVisitor* record_visitor)

View File

@ -102,7 +102,7 @@ class MarkBitCellIterator {
}
V8_WARN_UNUSED_RESULT inline bool Advance() {
cell_base_ += Bitmap::kBitsPerCell * kPointerSize;
cell_base_ += Bitmap::kBitsPerCell * kTaggedSize;
return ++cell_index_ != last_cell_index_;
}
@ -112,7 +112,7 @@ class MarkBitCellIterator {
DCHECK_LE(new_cell_index, last_cell_index_);
unsigned int diff = new_cell_index - cell_index_;
cell_index_ = new_cell_index;
cell_base_ += diff * (Bitmap::kBitsPerCell * kPointerSize);
cell_base_ += diff * (Bitmap::kBitsPerCell * kTaggedSize);
return true;
}
return false;

View File

@ -98,10 +98,10 @@ class V8_EXPORT_PRIVATE Bitmap {
static const uint32_t kBytesPerCell = kBitsPerCell / kBitsPerByte;
static const uint32_t kBytesPerCellLog2 = kBitsPerCellLog2 - kBitsPerByteLog2;
static const size_t kLength = (1 << kPageSizeBits) >> (kPointerSizeLog2);
static const size_t kLength = (1 << kPageSizeBits) >> (kTaggedSizeLog2);
static const size_t kSize = (1 << kPageSizeBits) >>
(kPointerSizeLog2 + kBitsPerByteLog2);
(kTaggedSizeLog2 + kBitsPerByteLog2);
static int CellsForLength(int length) {
return (length + kBitsPerCell - 1) >> kBitsPerCellLog2;

View File

@ -45,7 +45,7 @@ class FieldStatsCollector : public ObjectVisitor {
size_t tagged_fields_count_in_object =
*tagged_fields_count_ - old_pointer_fields_count;
int object_size_in_words = host->Size() / kPointerSize;
int object_size_in_words = host->Size() / kTaggedSize;
DCHECK_LE(tagged_fields_count_in_object, object_size_in_words);
size_t raw_fields_count_in_object =
object_size_in_words - tagged_fields_count_in_object;
@ -202,11 +202,12 @@ void ObjectStats::PrintJSON(const char* key) {
PrintF("{ ");
PrintKeyAndId(key, gc_count);
PrintF("\"type\": \"field_data\"");
PrintF(", \"tagged_fields\": %zu", tagged_fields_count_ * kPointerSize);
PrintF(", \"embedder_fields\": %zu", embedder_fields_count_ * kPointerSize);
PrintF(", \"tagged_fields\": %zu", tagged_fields_count_ * kTaggedSize);
PrintF(", \"embedder_fields\": %zu",
embedder_fields_count_ * kEmbedderDataSlotSize);
PrintF(", \"unboxed_double_fields\": %zu",
unboxed_double_fields_count_ * kDoubleSize);
PrintF(", \"other_raw_fields\": %zu", raw_fields_count_ * kPointerSize);
PrintF(", \"other_raw_fields\": %zu", raw_fields_count_ * kSystemPointerSize);
PrintF(" }\n");
// bucket_sizes
PrintF("{ ");
@ -256,11 +257,13 @@ void ObjectStats::Dump(std::stringstream& stream) {
// field_data
stream << "\"field_data\":{";
stream << "\"tagged_fields\":" << (tagged_fields_count_ * kPointerSize);
stream << ",\"embedder_fields\":" << (embedder_fields_count_ * kPointerSize);
stream << "\"tagged_fields\":" << (tagged_fields_count_ * kTaggedSize);
stream << ",\"embedder_fields\":"
<< (embedder_fields_count_ * kEmbedderDataSlotSize);
stream << ",\"unboxed_double_fields\": "
<< (unboxed_double_fields_count_ * kDoubleSize);
stream << ",\"other_raw_fields\":" << (raw_fields_count_ * kPointerSize);
stream << ",\"other_raw_fields\":"
<< (raw_fields_count_ * kSystemPointerSize);
stream << "}, ";
stream << "\"bucket_sizes\":[";
@ -626,7 +629,7 @@ void ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(
while (it.HasNext()) {
FeedbackSlot slot = it.Next();
// Log the entry (or entries) taken up by this slot.
size_t slot_size = it.entry_size() * kPointerSize;
size_t slot_size = it.entry_size() * kTaggedSize;
stats_->RecordVirtualObjectStats(
GetFeedbackSlotType(vector->Get(slot), it.kind(), heap_->isolate()),
slot_size, ObjectStats::kNoOverAllocation);

View File

@ -126,8 +126,8 @@ bool Scavenger::MigrateObject(Map map, HeapObject* source, HeapObject* target,
int size) {
// Copy the content of source to target.
target->set_map_word(MapWord::FromMap(map));
heap()->CopyBlock(target->address() + kPointerSize,
source->address() + kPointerSize, size - kPointerSize);
heap()->CopyBlock(target->address() + kTaggedSize,
source->address() + kTaggedSize, size - kTaggedSize);
ObjectPtr old = source->map_slot().Release_CompareAndSwap(
map, MapWord::FromForwardingAddress(target).ToMap());

View File

@ -435,8 +435,8 @@ bool Heap::CreateInitialMaps() {
}
ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
ALLOCATE_MAP(FILLER_TYPE, kTaggedSize, one_pointer_filler)
ALLOCATE_MAP(FILLER_TYPE, 2 * kTaggedSize, two_pointer_filler)
// The "no closures" and "one closure" FeedbackCell maps need
// to be marked unstable because their objects can change maps.

View File

@ -207,7 +207,7 @@ class SlotSet : public Malloced {
while (cell) {
int bit_offset = base::bits::CountTrailingZeros(cell);
uint32_t bit_mask = 1u << bit_offset;
uint32_t slot = (cell_offset + bit_offset) << kPointerSizeLog2;
uint32_t slot = (cell_offset + bit_offset) << kTaggedSizeLog2;
if (callback(MaybeObjectSlot(page_start_ + slot)) == KEEP_SLOT) {
++in_bucket_count;
} else {
@ -269,7 +269,7 @@ class SlotSet : public Malloced {
private:
typedef uint32_t* Bucket;
static const int kMaxSlots = (1 << kPageSizeBits) / kPointerSize;
static const int kMaxSlots = (1 << kPageSizeBits) / kTaggedSize;
static const int kCellsPerBucket = 32;
static const int kCellsPerBucketLog2 = 5;
static const int kBitsPerCell = 32;
@ -375,8 +375,8 @@ class SlotSet : public Malloced {
// Converts the slot offset into bucket/cell/bit index.
void SlotToIndices(int slot_offset, int* bucket_index, int* cell_index,
int* bit_index) {
DCHECK_EQ(slot_offset % kPointerSize, 0);
int slot = slot_offset >> kPointerSizeLog2;
DCHECK(IsAligned(slot_offset, kTaggedSize));
int slot = slot_offset >> kTaggedSizeLog2;
DCHECK(slot >= 0 && slot <= kMaxSlots);
*bucket_index = slot >> kBitsPerBucketLog2;
*cell_index = (slot >> kBitsPerCellLog2) & (kCellsPerBucket - 1);

View File

@ -502,8 +502,7 @@ size_t MemoryChunkLayout::AllocatableMemoryInCodePage() {
}
intptr_t MemoryChunkLayout::ObjectStartOffsetInDataPage() {
return MemoryChunk::kHeaderSize +
(kPointerSize - MemoryChunk::kHeaderSize % kPointerSize);
return RoundUp(MemoryChunk::kHeaderSize, kTaggedSize);
}
size_t MemoryChunkLayout::ObjectStartOffsetInMemoryChunk(
@ -1187,11 +1186,10 @@ MemoryChunk* MemoryAllocator::AllocatePagePooled(SpaceType* owner) {
void MemoryAllocator::ZapBlock(Address start, size_t size,
uintptr_t zap_value) {
DCHECK_EQ(start % kPointerSize, 0);
DCHECK_EQ(size % kPointerSize, 0);
for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) {
Memory<Address>(start + s) = static_cast<Address>(zap_value);
}
DCHECK(IsAligned(start, kTaggedSize));
DCHECK(IsAligned(size, kTaggedSize));
MemsetTagged(ObjectSlot(start), ObjectPtr(static_cast<Address>(zap_value)),
size >> kTaggedSizeLog2);
}
intptr_t MemoryAllocator::GetCommitPageSize() {
@ -1855,7 +1853,7 @@ std::unique_ptr<ObjectIterator> PagedSpace::GetObjectIterator() {
}
bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
DCHECK(IsAligned(size_in_bytes, kPointerSize));
DCHECK(IsAligned(size_in_bytes, kTaggedSize));
DCHECK_LE(top(), limit());
#ifdef DEBUG
if (top() != limit()) {

View File

@ -358,41 +358,43 @@ class MemoryChunk {
static const intptr_t kSizeOffset = 0;
static const intptr_t kFlagsOffset = kSizeOffset + kSizetSize;
static const intptr_t kMarkBitmapOffset = kFlagsOffset + kPointerSize;
static const intptr_t kReservationOffset = kMarkBitmapOffset + kPointerSize;
static const intptr_t kMarkBitmapOffset = kFlagsOffset + kSystemPointerSize;
static const intptr_t kReservationOffset =
kMarkBitmapOffset + kSystemPointerSize;
static const size_t kHeaderSize =
kSizeOffset // NOLINT
+ kSizetSize // size_t size
+ kUIntptrSize // uintptr_t flags_
+ kPointerSize // Bitmap* marking_bitmap_
+ 3 * kPointerSize // VirtualMemory reservation_
+ kPointerSize // Address area_start_
+ kPointerSize // Address area_end_
+ kPointerSize // Address owner_
+ kPointerSize // Heap* heap_
+ kIntptrSize // intptr_t progress_bar_
+ kIntptrSize // std::atomic<intptr_t> live_byte_count_
+ kPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
+ kPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
+ kPointerSize // InvalidatedSlots* invalidated_slots_
+ kPointerSize // SkipList* skip_list_
+ kPointerSize // std::atomic<intptr_t> high_water_mark_
+ kPointerSize // base::Mutex* mutex_
+
kPointerSize // std::atomic<ConcurrentSweepingState> concurrent_sweeping_
+ kPointerSize // base::Mutex* page_protection_change_mutex_
+ kPointerSize // unitptr_t write_unprotect_counter_
kSizeOffset // NOLINT
+ kSizetSize // size_t size
+ kUIntptrSize // uintptr_t flags_
+ kSystemPointerSize // Bitmap* marking_bitmap_
+ 3 * kSystemPointerSize // VirtualMemory reservation_
+ kSystemPointerSize // Address area_start_
+ kSystemPointerSize // Address area_end_
+ kSystemPointerSize // Address owner_
+ kSystemPointerSize // Heap* heap_
+ kIntptrSize // intptr_t progress_bar_
+ kIntptrSize // std::atomic<intptr_t> live_byte_count_
+ kSystemPointerSize * NUMBER_OF_REMEMBERED_SET_TYPES // SlotSet* array
+ kSystemPointerSize *
NUMBER_OF_REMEMBERED_SET_TYPES // TypedSlotSet* array
+ kSystemPointerSize // InvalidatedSlots* invalidated_slots_
+ kSystemPointerSize // SkipList* skip_list_
+ kSystemPointerSize // std::atomic<intptr_t> high_water_mark_
+ kSystemPointerSize // base::Mutex* mutex_
+ kSystemPointerSize // std::atomic<ConcurrentSweepingState>
// concurrent_sweeping_
+ kSystemPointerSize // base::Mutex* page_protection_change_mutex_
+ kSystemPointerSize // unitptr_t write_unprotect_counter_
+ kSizetSize * ExternalBackingStoreType::kNumTypes
// std::atomic<size_t> external_backing_store_bytes_
+ kSizetSize // size_t allocated_bytes_
+ kSizetSize // size_t wasted_memory_
+ kPointerSize * 2 // base::ListNode
+ kPointerSize * kNumberOfCategories
+ kSizetSize // size_t allocated_bytes_
+ kSizetSize // size_t wasted_memory_
+ kSystemPointerSize * 2 // base::ListNode
+ kSystemPointerSize * kNumberOfCategories
// FreeListCategory categories_[kNumberOfCategories]
+ kPointerSize // LocalArrayBufferTracker* local_tracker_
+ kIntptrSize // std::atomic<intptr_t> young_generation_live_byte_count_
+ kPointerSize; // Bitmap* young_generation_bitmap_
+ kSystemPointerSize // LocalArrayBufferTracker* local_tracker_
+ kIntptrSize // std::atomic<intptr_t> young_generation_live_byte_count_
+ kSystemPointerSize; // Bitmap* young_generation_bitmap_
// Page size in bytes. This must be a multiple of the OS page size.
static const int kPageSize = 1 << kPageSizeBits;
@ -562,11 +564,12 @@ class MemoryChunk {
}
inline uint32_t AddressToMarkbitIndex(Address addr) const {
return static_cast<uint32_t>(addr - this->address()) >> kPointerSizeLog2;
return static_cast<uint32_t>(addr - this->address()) >>
kSystemPointerSizeLog2;
}
inline Address MarkbitIndexToAddress(uint32_t index) const {
return this->address() + (index << kPointerSizeLog2);
return this->address() + (index << kSystemPointerSizeLog2);
}
template <AccessMode access_mode = AccessMode::NON_ATOMIC>
@ -753,8 +756,8 @@ class MemoryChunk {
friend class PagedSpace;
};
static_assert(sizeof(std::atomic<intptr_t>) == kPointerSize,
"sizeof(std::atomic<intptr_t>) == kPointerSize");
static_assert(sizeof(std::atomic<intptr_t>) == kSystemPointerSize,
"sizeof(std::atomic<intptr_t>) == kSystemPointerSize");
// -----------------------------------------------------------------------------
// A page is a memory chunk of a size 512K. Large object pages may be larger.
@ -786,9 +789,9 @@ class Page : public MemoryChunk {
// Returns the page containing the address provided. The address can
// potentially point righter after the page. To be also safe for tagged values
// we subtract a hole word. The valid address ranges from
// [page_addr + area_start_ .. page_addr + kPageSize + kPointerSize].
// [page_addr + area_start_ .. page_addr + kPageSize + kTaggedSize].
static Page* FromAllocationAreaAddress(Address address) {
return Page::FromAddress(address - kPointerSize);
return Page::FromAddress(address - kTaggedSize);
}
// Checks if address1 and address2 are on the same new space page.
@ -999,7 +1002,7 @@ class Space : public Malloced {
if (id_ == CODE_SPACE) {
return RoundDown(size, kCodeAlignment);
} else {
return RoundDown(size, kPointerSize);
return RoundDown(size, kTaggedSize);
}
}
@ -1114,7 +1117,7 @@ class SkipList {
void AddObject(Address addr, int size) {
int start_region = RegionNumber(addr);
int end_region = RegionNumber(addr + size - kPointerSize);
int end_region = RegionNumber(addr + size - kTaggedSize);
for (int idx = start_region; idx <= end_region; idx++) {
if (starts_[idx] > addr) {
starts_[idx] = addr;
@ -1892,17 +1895,17 @@ class V8_EXPORT_PRIVATE FreeList {
};
// The size range of blocks, in bytes.
static const size_t kMinBlockSize = 3 * kPointerSize;
static const size_t kMinBlockSize = 3 * kTaggedSize;
// This is a conservative upper bound. The actual maximum block size takes
// padding and alignment of data and code pages into account.
static const size_t kMaxBlockSize = Page::kPageSize;
static const size_t kTiniestListMax = 0xa * kPointerSize;
static const size_t kTinyListMax = 0x1f * kPointerSize;
static const size_t kSmallListMax = 0xff * kPointerSize;
static const size_t kMediumListMax = 0x7ff * kPointerSize;
static const size_t kLargeListMax = 0x3fff * kPointerSize;
static const size_t kTiniestListMax = 0xa * kTaggedSize;
static const size_t kTinyListMax = 0x1f * kTaggedSize;
static const size_t kSmallListMax = 0xff * kTaggedSize;
static const size_t kMediumListMax = 0x7ff * kTaggedSize;
static const size_t kLargeListMax = 0x3fff * kTaggedSize;
static const size_t kTinyAllocationMax = kTiniestListMax;
static const size_t kSmallAllocationMax = kTinyListMax;
static const size_t kMediumAllocationMax = kSmallListMax;

View File

@ -55,9 +55,9 @@ void StoreBuffer::SetUp() {
const size_t allocated_size = reservation.size();
start_[0] = reinterpret_cast<Address*>(start);
limit_[0] = start_[0] + (kStoreBufferSize / kPointerSize);
limit_[0] = start_[0] + (kStoreBufferSize / kSystemPointerSize);
start_[1] = limit_[0];
limit_[1] = start_[1] + (kStoreBufferSize / kPointerSize);
limit_[1] = start_[1] + (kStoreBufferSize / kSystemPointerSize);
// Sanity check the buffers.
Address* vm_limit = reinterpret_cast<Address*>(start + allocated_size);

View File

@ -23,6 +23,7 @@ namespace internal {
// one is the end address of the invalid range or null if there is just one slot
// that needs to be removed from the remembered set. On buffer overflow the
// slots are moved to the remembered set.
// Store buffer entries are always full pointers.
class StoreBuffer {
public:
enum StoreBufferMode { IN_GC, NOT_IN_GC };
@ -30,7 +31,7 @@ class StoreBuffer {
static const int kStoreBuffers = 2;
static const int kStoreBufferSize =
Max(static_cast<int>(kMinExpectedOSPageSize / kStoreBuffers),
1 << (11 + kPointerSizeLog2));
1 << (11 + kSystemPointerSizeLog2));
static const int kStoreBufferMask = kStoreBufferSize - 1;
static const intptr_t kDeletionTag = 1;

View File

@ -321,7 +321,7 @@ int Sweeper::RawSweep(Page* p, FreeListRebuildingMode free_list_mode,
if (rebuild_skip_list) {
int new_region_start = SkipList::RegionNumber(free_end);
int new_region_end =
SkipList::RegionNumber(free_end + size - kPointerSize);
SkipList::RegionNumber(free_end + size - kTaggedSize);
if (new_region_start != curr_region || new_region_end != curr_region) {
skip_list->AddObject(free_end, size);
curr_region = new_region_end;

View File

@ -181,10 +181,10 @@ const char* HeapEntry::TypeAsString() {
HeapSnapshot::HeapSnapshot(HeapProfiler* profiler) : profiler_(profiler) {
// It is very important to keep objects that form a heap snapshot
// as small as possible. Check assumptions about data structure sizes.
STATIC_ASSERT((kPointerSize == 4 && sizeof(HeapGraphEdge) == 12) ||
(kPointerSize == 8 && sizeof(HeapGraphEdge) == 24));
STATIC_ASSERT((kPointerSize == 4 && sizeof(HeapEntry) == 28) ||
(kPointerSize == 8 && sizeof(HeapEntry) == 40));
STATIC_ASSERT((kTaggedSize == 4 && sizeof(HeapGraphEdge) == 12) ||
(kTaggedSize == 8 && sizeof(HeapGraphEdge) == 24));
STATIC_ASSERT((kTaggedSize == 4 && sizeof(HeapEntry) == 28) ||
(kTaggedSize == 8 && sizeof(HeapEntry) == 40));
memset(&gc_subroot_entries_, 0, sizeof(gc_subroot_entries_));
}
@ -711,10 +711,10 @@ class IndexedReferencesExtractor : public ObjectVisitor {
V8_INLINE void VisitHeapObjectImpl(HeapObject* heap_object, int field_index) {
DCHECK_LE(-1, field_index);
// The last parameter {field_offset} is only used to check some well-known
// skipped references, so passing -1 * kPointerSize for objects embedded
// skipped references, so passing -1 * kTaggedSize for objects embedded
// into code is fine.
generator_->SetHiddenReference(parent_obj_, parent_, next_index_++,
heap_object, field_index * kPointerSize);
heap_object, field_index * kTaggedSize);
}
V8HeapExplorer* generator_;
@ -857,7 +857,7 @@ void V8HeapExplorer::ExtractJSObjectReferences(HeapEntry* entry,
SetInternalReference(entry, "global_proxy", global_obj->global_proxy(),
JSGlobalObject::kGlobalProxyOffset);
STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
2 * kPointerSize);
2 * kTaggedSize);
} else if (obj->IsJSArrayBufferView()) {
JSArrayBufferView view = JSArrayBufferView::cast(obj);
SetInternalReference(entry, "buffer", view->buffer(),
@ -1276,10 +1276,10 @@ void V8HeapExplorer::ExtractWeakArrayReferences(int header_size,
MaybeObject object = array->Get(i);
HeapObject* heap_object;
if (object->GetHeapObjectIfWeak(&heap_object)) {
SetWeakReference(entry, i, heap_object, header_size + i * kPointerSize);
SetWeakReference(entry, i, heap_object, header_size + i * kTaggedSize);
} else if (object->GetHeapObjectIfStrong(&heap_object)) {
SetInternalReference(entry, i, heap_object,
header_size + i * kPointerSize);
header_size + i * kTaggedSize);
}
}
}
@ -1473,7 +1473,7 @@ bool V8HeapExplorer::IterateAndExtractReferences(
obj = iterator.next(), progress_->ProgressStep()) {
if (interrupted) continue;
size_t max_pointer = obj->Size() / kPointerSize;
size_t max_pointer = obj->Size() / kTaggedSize;
if (max_pointer > visited_fields_.size()) {
// Clear the current bits.
std::vector<bool>().swap(visited_fields_);
@ -1546,7 +1546,7 @@ void V8HeapExplorer::SetContextReference(HeapEntry* parent_entry,
void V8HeapExplorer::MarkVisitedField(int offset) {
if (offset < 0) return;
int index = offset / kPointerSize;
int index = offset / kTaggedSize;
DCHECK(!visited_fields_[index]);
visited_fields_[index] = true;
}

View File

@ -31,8 +31,8 @@ intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
}
double u = random_->NextDouble();
double next = (-base::ieee754::log(u)) * rate;
return next < kPointerSize
? kPointerSize
return next < kTaggedSize
? kTaggedSize
: (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
}

View File

@ -26,7 +26,7 @@ void SealCurrentObjects(Heap* heap) {
}
int FixedArrayLenFromSize(int size) {
return (size - FixedArray::kHeaderSize) / kPointerSize;
return (size - FixedArray::kHeaderSize) / kTaggedSize;
}
std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
@ -93,7 +93,7 @@ std::vector<Handle<FixedArray>> CreatePadding(Heap* heap, int padding_size,
length = FixedArrayLenFromSize(allocate_memory);
if (length <= 0) {
// Not enough room to create another fixed array. Let's create a filler.
if (free_memory > (2 * kPointerSize)) {
if (free_memory > (2 * kTaggedSize)) {
heap->CreateFillerObjectAt(
*heap->old_space()->allocation_top_address(), free_memory,
ClearRecordedSlots::kNo);

View File

@ -353,10 +353,10 @@ HEAP_TEST(CompactionPartiallyAbortedPageWithStoreBufferEntries) {
isolate->factory()->NewFixedArray(10, NOT_TENURED);
// Create a broken address that looks like a tagged pointer to a new space
// object.
Address broken_address = holder->address() + 2 * kPointerSize + 1;
Address broken_address = holder->address() + 2 * kTaggedSize + 1;
// Convert it to a vector to create a string from it.
Vector<const uint8_t> string_to_broken_addresss(
reinterpret_cast<const uint8_t*>(&broken_address), kPointerSize);
reinterpret_cast<const uint8_t*>(&broken_address), kTaggedSize);
Handle<String> string;
do {

View File

@ -229,7 +229,7 @@ static void CheckFindCodeObject(Isolate* isolate) {
HeapObject* obj = HeapObject::cast(*code);
Address obj_addr = obj->address();
for (int i = 0; i < obj->Size(); i += kPointerSize) {
for (int i = 0; i < obj->Size(); i += kTaggedSize) {
Object* found = isolate->FindCodeObject(obj_addr + i);
CHECK_EQ(*code, found);
}
@ -1755,7 +1755,7 @@ HEAP_TEST(TestSizeOfObjects) {
TEST(TestAlignmentCalculations) {
// Maximum fill amounts are consistent.
int maximum_double_misalignment = kDoubleSize - kPointerSize;
int maximum_double_misalignment = kDoubleSize - kTaggedSize;
int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
CHECK_EQ(0, max_word_fill);
int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
@ -1769,19 +1769,19 @@ TEST(TestAlignmentCalculations) {
// Word alignment never requires fill.
fill = Heap::GetFillToAlign(base, kWordAligned);
CHECK_EQ(0, fill);
fill = Heap::GetFillToAlign(base + kPointerSize, kWordAligned);
fill = Heap::GetFillToAlign(base + kTaggedSize, kWordAligned);
CHECK_EQ(0, fill);
// No fill is required when address is double aligned.
fill = Heap::GetFillToAlign(base, kDoubleAligned);
CHECK_EQ(0, fill);
// Fill is required if address is not double aligned.
fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleAligned);
fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleAligned);
CHECK_EQ(maximum_double_misalignment, fill);
// kDoubleUnaligned has the opposite fill amounts.
fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
CHECK_EQ(maximum_double_misalignment, fill);
fill = Heap::GetFillToAlign(base + kPointerSize, kDoubleUnaligned);
fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleUnaligned);
CHECK_EQ(0, fill);
}
@ -1810,8 +1810,9 @@ static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
TEST(TestAlignedAllocation) {
// Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
const intptr_t double_misalignment = kDoubleSize - kPointerSize;
// Double misalignment is 4 on 32-bit platforms or when pointer compression
// is enabled, 0 on 64-bit ones when pointer compression is disabled.
const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
Address start;
HeapObject* obj;
@ -1820,35 +1821,33 @@ TEST(TestAlignedAllocation) {
// Allocate a pointer sized object that must be double aligned at an
// aligned address.
start = AlignNewSpace(kDoubleAligned, 0);
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
CHECK(IsAligned(obj->address(), kDoubleAlignment));
// There is no filler.
CHECK_EQ(kPointerSize, *top_addr - start);
CHECK_EQ(kTaggedSize, *top_addr - start);
// Allocate a second pointer sized object that must be double aligned at an
// unaligned address.
start = AlignNewSpace(kDoubleAligned, kPointerSize);
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleAligned);
start = AlignNewSpace(kDoubleAligned, kTaggedSize);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
CHECK(IsAligned(obj->address(), kDoubleAlignment));
// There is a filler object before the object.
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
// Similarly for kDoubleUnaligned.
start = AlignNewSpace(kDoubleUnaligned, 0);
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
CHECK(IsAligned(obj->address() + kPointerSize, kDoubleAlignment));
CHECK_EQ(kPointerSize, *top_addr - start);
start = AlignNewSpace(kDoubleUnaligned, kPointerSize);
obj = NewSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
CHECK(IsAligned(obj->address() + kPointerSize, kDoubleAlignment));
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
CHECK_EQ(kTaggedSize, *top_addr - start);
start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
// There is a filler object before the object.
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK_EQ(kPointerSize + double_misalignment, *top_addr - start);
CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
}
}
@ -1888,45 +1887,43 @@ TEST(TestAlignedOverAllocation) {
// page and empty free list.
heap::AbandonCurrentlyFreeMemory(heap->old_space());
// Allocate a dummy object to properly set up the linear allocation info.
AllocationResult dummy =
heap->old_space()->AllocateRawUnaligned(kPointerSize);
AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
CHECK(!dummy.IsRetry());
heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kPointerSize,
heap->CreateFillerObjectAt(dummy.ToObjectChecked()->address(), kTaggedSize,
ClearRecordedSlots::kNo);
// Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones.
const intptr_t double_misalignment = kDoubleSize - kPointerSize;
// Double misalignment is 4 on 32-bit platforms or when pointer compression
// is enabled, 0 on 64-bit ones when pointer compression is disabled.
const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
Address start;
HeapObject* obj;
HeapObject* filler;
if (double_misalignment) {
start = AlignOldSpace(kDoubleAligned, 0);
obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
// The object is aligned.
CHECK(IsAligned(obj->address(), kDoubleAlignment));
// Try the opposite alignment case.
start = AlignOldSpace(kDoubleAligned, kPointerSize);
obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned);
start = AlignOldSpace(kDoubleAligned, kTaggedSize);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
CHECK(IsAligned(obj->address(), kDoubleAlignment));
filler = HeapObject::FromAddress(start);
CHECK(obj != filler);
CHECK(filler->IsFiller());
CHECK_EQ(kPointerSize, filler->Size());
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK_EQ(kTaggedSize, filler->Size());
CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
// Similarly for kDoubleUnaligned.
start = AlignOldSpace(kDoubleUnaligned, 0);
obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
// The object is aligned.
CHECK(IsAligned(obj->address() + kPointerSize, kDoubleAlignment));
CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
// Try the opposite alignment case.
start = AlignOldSpace(kDoubleUnaligned, kPointerSize);
obj = OldSpaceAllocateAligned(kPointerSize, kDoubleUnaligned);
CHECK(IsAligned(obj->address() + kPointerSize, kDoubleAlignment));
start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
CHECK(IsAligned(obj->address() + kTaggedSize, kDoubleAlignment));
filler = HeapObject::FromAddress(start);
CHECK(obj != filler && filler->IsFiller() &&
filler->Size() == kPointerSize);
CHECK(obj != filler && filler->IsFiller() && filler->Size() == kTaggedSize);
}
}
@ -3544,7 +3541,7 @@ TEST(Regress169928) {
heap::AllocateAllButNBytes(
CcTest::heap()->new_space(),
JSArray::kSize + AllocationMemento::kSize + kPointerSize);
JSArray::kSize + AllocationMemento::kSize + kTaggedSize);
Handle<JSArray> array =
factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
@ -3557,11 +3554,11 @@ TEST(Regress169928) {
HeapObject* obj = nullptr;
AllocationResult allocation =
CcTest::heap()->new_space()->AllocateRawUnaligned(
AllocationMemento::kSize + kPointerSize);
AllocationMemento::kSize + kTaggedSize);
CHECK(allocation.To(&obj));
Address addr_obj = obj->address();
CcTest::heap()->CreateFillerObjectAt(addr_obj,
AllocationMemento::kSize + kPointerSize,
AllocationMemento::kSize + kTaggedSize,
ClearRecordedSlots::kNo);
// Give the array a name, making sure not to allocate strings.
@ -5034,12 +5031,12 @@ TEST(BootstrappingExports) {
void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
CHECK_LE(FixedArray::kHeaderSize, bytes);
CHECK_EQ(0, bytes % kPointerSize);
CHECK(IsAligned(bytes, kTaggedSize));
Factory* factory = isolate->factory();
HandleScope scope(isolate);
AlwaysAllocateScope always_allocate(isolate);
int elements =
static_cast<int>((bytes - FixedArray::kHeaderSize) / kPointerSize);
static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
Handle<FixedArray> array = factory->NewFixedArray(
elements, space == NEW_SPACE ? NOT_TENURED : TENURED);
CHECK((space == NEW_SPACE) == Heap::InNewSpace(*array));
@ -5286,7 +5283,7 @@ HEAP_TEST(Regress587004) {
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
const int N =
(kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kPointerSize;
(kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kTaggedSize;
Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
CHECK(heap->old_space()->Contains(*array));
Handle<Object> number = factory->NewHeapNumber(1.0);
@ -5400,7 +5397,7 @@ TEST(Regress598319) {
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
const int kNumberOfObjects = kMaxRegularHeapObjectSize / kPointerSize;
const int kNumberOfObjects = kMaxRegularHeapObjectSize / kTaggedSize;
struct Arr {
Arr(Isolate* isolate, int number_of_objects) {
@ -5522,7 +5519,7 @@ TEST(Regress609761) {
CcTest::InitializeVM();
v8::HandleScope scope(CcTest::isolate());
Heap* heap = CcTest::heap();
int length = kMaxRegularHeapObjectSize / kPointerSize + 1;
int length = kMaxRegularHeapObjectSize / kTaggedSize + 1;
Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
CHECK(heap->lo_space()->Contains(*array));
}
@ -5788,7 +5785,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
CHECK(heap->old_space()->Contains(*array));
// Trim it once by one word to make checking for white marking color uniform.
Address previous = end_address - kPointerSize;
Address previous = end_address - kTaggedSize;
isolate->heap()->RightTrimFixedArray(*array, 1);
HeapObject* filler = HeapObject::FromAddress(previous);
@ -5798,7 +5795,7 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) {
// Trim 10 times by one, two, and three word.
for (int i = 1; i <= 3; i++) {
for (int j = 0; j < 10; j++) {
previous -= kPointerSize * i;
previous -= kTaggedSize * i;
isolate->heap()->RightTrimFixedArray(*array, i);
HeapObject* filler = HeapObject::FromAddress(previous);
CHECK(filler->IsFiller());
@ -5943,19 +5940,19 @@ TEST(RememberedSetRemoveRange) {
Heap* heap = CcTest::heap();
Isolate* isolate = heap->isolate();
Handle<FixedArray> array = isolate->factory()->NewFixedArray(
Page::kPageSize / kPointerSize, TENURED);
Handle<FixedArray> array =
isolate->factory()->NewFixedArray(Page::kPageSize / kTaggedSize, TENURED);
MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
CHECK(chunk->owner()->identity() == LO_SPACE);
Address start = array->address();
// Maps slot to boolean indicator of whether the slot should be in the set.
std::map<Address, bool> slots;
slots[start + 0] = true;
slots[start + kPointerSize] = true;
slots[start + Page::kPageSize - kPointerSize] = true;
slots[start + kTaggedSize] = true;
slots[start + Page::kPageSize - kTaggedSize] = true;
slots[start + Page::kPageSize] = true;
slots[start + Page::kPageSize + kPointerSize] = true;
slots[chunk->area_end() - kPointerSize] = true;
slots[start + Page::kPageSize + kTaggedSize] = true;
slots[chunk->area_end() - kTaggedSize] = true;
for (auto x : slots) {
RememberedSet<OLD_TO_NEW>::Insert(chunk, x.first);
@ -5968,7 +5965,7 @@ TEST(RememberedSetRemoveRange) {
},
SlotSet::PREFREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kPointerSize,
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kTaggedSize,
SlotSet::FREE_EMPTY_BUCKETS);
slots[start] = false;
RememberedSet<OLD_TO_NEW>::Iterate(chunk,
@ -5978,11 +5975,11 @@ TEST(RememberedSetRemoveRange) {
},
SlotSet::PREFREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kPointerSize,
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kTaggedSize,
start + Page::kPageSize,
SlotSet::FREE_EMPTY_BUCKETS);
slots[start + kPointerSize] = false;
slots[start + Page::kPageSize - kPointerSize] = false;
slots[start + kTaggedSize] = false;
slots[start + Page::kPageSize - kTaggedSize] = false;
RememberedSet<OLD_TO_NEW>::Iterate(chunk,
[&slots](MaybeObjectSlot slot) {
CHECK(slots[slot.address()]);
@ -5991,7 +5988,7 @@ TEST(RememberedSetRemoveRange) {
SlotSet::PREFREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
start + Page::kPageSize + kPointerSize,
start + Page::kPageSize + kTaggedSize,
SlotSet::FREE_EMPTY_BUCKETS);
slots[start + Page::kPageSize] = false;
RememberedSet<OLD_TO_NEW>::Iterate(chunk,
@ -6001,10 +5998,10 @@ TEST(RememberedSetRemoveRange) {
},
SlotSet::PREFREE_EMPTY_BUCKETS);
RememberedSet<OLD_TO_NEW>::RemoveRange(
chunk, chunk->area_end() - kPointerSize, chunk->area_end(),
SlotSet::FREE_EMPTY_BUCKETS);
slots[chunk->area_end() - kPointerSize] = false;
RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, chunk->area_end() - kTaggedSize,
chunk->area_end(),
SlotSet::FREE_EMPTY_BUCKETS);
slots[chunk->area_end() - kTaggedSize] = false;
RememberedSet<OLD_TO_NEW>::Iterate(chunk,
[&slots](MaybeObjectSlot slot) {
CHECK(slots[slot.address()]);
@ -6030,7 +6027,7 @@ HEAP_TEST(Regress670675) {
if (marking->IsStopped()) {
marking->Start(i::GarbageCollectionReason::kTesting);
}
size_t array_length = Page::kPageSize / kPointerSize + 100;
size_t array_length = Page::kPageSize / kTaggedSize + 100;
size_t n = heap->OldGenerationSpaceAvailable() / array_length;
for (size_t i = 0; i < n + 40; i++) {
{
@ -6135,7 +6132,7 @@ TEST(Regress6800LargeObject) {
Isolate* isolate = CcTest::i_isolate();
HandleScope handle_scope(isolate);
const int kRootLength = i::kMaxRegularHeapObjectSize / kPointerSize;
const int kRootLength = i::kMaxRegularHeapObjectSize / kTaggedSize;
Handle<FixedArray> root =
isolate->factory()->NewFixedArray(kRootLength, TENURED);
CcTest::heap()->lo_space()->Contains(*root);

View File

@ -55,7 +55,7 @@ HEAP_TEST(InvalidatedSlotsNoInvalidatedRanges) {
for (ByteArray byte_array : byte_arrays) {
Address start = byte_array->address() + ByteArray::kHeaderSize;
Address end = byte_array->address() + byte_array->Size();
for (Address addr = start; addr < end; addr += kPointerSize) {
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(filter.IsValid(addr));
}
}
@ -76,7 +76,7 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array->address() + ByteArray::kHeaderSize;
Address end = byte_array->address() + byte_array->Size();
for (Address addr = start; addr < end; addr += kPointerSize) {
for (Address addr = start; addr < end; addr += kTaggedSize) {
if (i % 2 == 0) {
CHECK(!filter.IsValid(addr));
} else {
@ -101,7 +101,7 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array->address() + ByteArray::kHeaderSize;
Address end = byte_array->address() + byte_array->Size();
for (Address addr = start; addr < end; addr += kPointerSize) {
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(!filter.IsValid(addr));
}
}
@ -126,7 +126,7 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
Address start = byte_array->address() + ByteArray::kHeaderSize;
Address end = byte_array->address() + byte_array->Size();
heap->RightTrimFixedArray(byte_array, byte_array->length());
for (Address addr = start; addr < end; addr += kPointerSize) {
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK_EQ(filter.IsValid(addr), page->SweepingDone());
}
}
@ -152,7 +152,7 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array->address() + ByteArray::kHeaderSize;
Address end = byte_array->address() + byte_array->Size();
for (Address addr = start; addr < end; addr += kPointerSize) {
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(filter.IsValid(addr));
}
}
@ -176,7 +176,7 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
ByteArray byte_array = byte_arrays[i];
Address start = byte_array->address() + ByteArray::kHeaderSize;
Address end = byte_array->address() + byte_array->Size();
for (Address addr = start; addr < end; addr += kPointerSize) {
for (Address addr = start; addr < end; addr += kTaggedSize) {
CHECK(!filter.IsValid(addr));
}
}
@ -247,7 +247,7 @@ HEAP_TEST(InvalidatedSlotsRightTrimLargeFixedArray) {
{
AlwaysAllocateScope always_allocate(isolate);
trimmed = factory->NewFixedArray(
kMaxRegularHeapObjectSize / kPointerSize + 100, TENURED);
kMaxRegularHeapObjectSize / kTaggedSize + 100, TENURED);
DCHECK(MemoryChunk::FromHeapObject(*trimmed)->InLargeObjectSpace());
}
heap::SimulateIncrementalMarking(heap);

View File

@ -366,7 +366,7 @@ TEST(Regress5829) {
Address old_end = array->address() + array->Size();
// Right trim the array without clearing the mark bits.
array->set_length(9);
heap->CreateFillerObjectAt(old_end - kPointerSize, kPointerSize,
heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize,
ClearRecordedSlots::kNo);
heap->old_space()->FreeLinearAllocationArea();
Page* page = Page::FromAddress(array->address());

View File

@ -617,7 +617,7 @@ HEAP_TEST(Regress791582) {
int until_page_end = static_cast<int>(new_space->limit() - new_space->top());
if (until_page_end % kPointerSize != 0) {
if (!IsAligned(until_page_end, kTaggedSize)) {
// The test works if the size of allocation area size is a multiple of
// pointer size. This is usually the case unless some allocation observer
// is already active (e.g. incremental marking observer).
@ -704,7 +704,7 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) {
heap::SealCurrentObjects(CcTest::heap());
const int kFillerSize = kPointerSize;
const int kFillerSize = kTaggedSize;
std::vector<Handle<FixedArray>> arrays =
heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
Handle<FixedArray> array = arrays.back();
@ -732,7 +732,7 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) {
heap::SealCurrentObjects(CcTest::heap());
const int kFillerSize = 2 * kPointerSize;
const int kFillerSize = 2 * kTaggedSize;
std::vector<Handle<FixedArray>> arrays =
heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize);
Handle<FixedArray> array = arrays.back();

View File

@ -3849,7 +3849,7 @@ TEST(SamplingHeapProfilerSampleDuringDeopt) {
v8::internal::FLAG_sampling_heap_profiler_suppress_randomness = true;
// Small sample interval to force each object to be sampled.
heap_profiler->StartSamplingHeapProfiler(i::kPointerSize);
heap_profiler->StartSamplingHeapProfiler(i::kTaggedSize);
// Lazy deopt from runtime call from inlined callback function.
const char* source =

View File

@ -14,7 +14,7 @@ namespace internal {
TEST(Marking, TransitionWhiteBlackWhite) {
Bitmap* bitmap = reinterpret_cast<Bitmap*>(
calloc(Bitmap::kSize / kPointerSize, kPointerSize));
calloc(Bitmap::kSize / kTaggedSize, kTaggedSize));
const int kLocationsSize = 3;
int position[kLocationsSize] = {
Bitmap::kBitsPerCell - 2, Bitmap::kBitsPerCell - 1, Bitmap::kBitsPerCell};
@ -34,7 +34,7 @@ TEST(Marking, TransitionWhiteBlackWhite) {
TEST(Marking, TransitionWhiteGreyBlack) {
Bitmap* bitmap = reinterpret_cast<Bitmap*>(
calloc(Bitmap::kSize / kPointerSize, kPointerSize));
calloc(Bitmap::kSize / kTaggedSize, kTaggedSize));
const int kLocationsSize = 3;
int position[kLocationsSize] = {
Bitmap::kBitsPerCell - 2, Bitmap::kBitsPerCell - 1, Bitmap::kBitsPerCell};
@ -60,7 +60,7 @@ TEST(Marking, TransitionWhiteGreyBlack) {
TEST(Marking, SetAndClearRange) {
Bitmap* bitmap = reinterpret_cast<Bitmap*>(
calloc(Bitmap::kSize / kPointerSize, kPointerSize));
calloc(Bitmap::kSize / kTaggedSize, kTaggedSize));
for (int i = 0; i < 3; i++) {
bitmap->SetRange(i, Bitmap::kBitsPerCell + i);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[0], 0xFFFFFFFFu << i);
@ -74,7 +74,7 @@ TEST(Marking, SetAndClearRange) {
TEST(Marking, ClearMultipleRanges) {
Bitmap* bitmap = reinterpret_cast<Bitmap*>(
calloc(Bitmap::kSize / kPointerSize, kPointerSize));
calloc(Bitmap::kSize / kTaggedSize, kTaggedSize));
CHECK(bitmap->AllBitsClearInRange(0, Bitmap::kBitsPerCell * 3));
bitmap->SetRange(0, Bitmap::kBitsPerCell * 3);
CHECK_EQ(reinterpret_cast<uint32_t*>(bitmap)[0], 0xFFFFFFFFu);

View File

@ -17,13 +17,13 @@ namespace internal {
TEST(SlotSet, InsertAndLookup1) {
SlotSet set;
set.SetPageStart(0);
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
EXPECT_FALSE(set.Lookup(i));
}
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
set.Insert(i);
}
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
EXPECT_TRUE(set.Lookup(i));
}
}
@ -31,12 +31,12 @@ TEST(SlotSet, InsertAndLookup1) {
TEST(SlotSet, InsertAndLookup2) {
SlotSet set;
set.SetPageStart(0);
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 7 == 0) {
set.Insert(i);
}
}
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 7 == 0) {
EXPECT_TRUE(set.Lookup(i));
} else {
@ -48,7 +48,7 @@ TEST(SlotSet, InsertAndLookup2) {
TEST(SlotSet, Iterate) {
SlotSet set;
set.SetPageStart(0);
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 7 == 0) {
set.Insert(i);
}
@ -64,7 +64,7 @@ TEST(SlotSet, Iterate) {
},
SlotSet::KEEP_EMPTY_BUCKETS);
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 21 == 0) {
EXPECT_TRUE(set.Lookup(i));
} else {
@ -76,19 +76,19 @@ TEST(SlotSet, Iterate) {
TEST(SlotSet, Remove) {
SlotSet set;
set.SetPageStart(0);
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 7 == 0) {
set.Insert(i);
}
}
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 3 != 0) {
set.Remove(i);
}
}
for (int i = 0; i < Page::kPageSize; i += kPointerSize) {
for (int i = 0; i < Page::kPageSize; i += kTaggedSize) {
if (i % 21 == 0) {
EXPECT_TRUE(set.Lookup(i));
} else {
@ -100,11 +100,11 @@ TEST(SlotSet, Remove) {
void CheckRemoveRangeOn(uint32_t start, uint32_t end) {
SlotSet set;
set.SetPageStart(0);
uint32_t first = start == 0 ? 0 : start - kPointerSize;
uint32_t last = end == Page::kPageSize ? end - kPointerSize : end;
uint32_t first = start == 0 ? 0 : start - kTaggedSize;
uint32_t last = end == Page::kPageSize ? end - kTaggedSize : end;
for (const auto mode :
{SlotSet::FREE_EMPTY_BUCKETS, SlotSet::KEEP_EMPTY_BUCKETS}) {
for (uint32_t i = first; i <= last; i += kPointerSize) {
for (uint32_t i = first; i <= last; i += kTaggedSize) {
set.Insert(i);
}
set.RemoveRange(start, end, mode);
@ -114,7 +114,7 @@ void CheckRemoveRangeOn(uint32_t start, uint32_t end) {
if (last == end) {
EXPECT_TRUE(set.Lookup(last));
}
for (uint32_t i = start; i < end; i += kPointerSize) {
for (uint32_t i = start; i < end; i += kTaggedSize) {
EXPECT_FALSE(set.Lookup(i));
}
}
@ -122,16 +122,16 @@ void CheckRemoveRangeOn(uint32_t start, uint32_t end) {
TEST(SlotSet, RemoveRange) {
CheckRemoveRangeOn(0, Page::kPageSize);
CheckRemoveRangeOn(1 * kPointerSize, 1023 * kPointerSize);
CheckRemoveRangeOn(1 * kTaggedSize, 1023 * kTaggedSize);
for (uint32_t start = 0; start <= 32; start++) {
CheckRemoveRangeOn(start * kPointerSize, (start + 1) * kPointerSize);
CheckRemoveRangeOn(start * kPointerSize, (start + 2) * kPointerSize);
CheckRemoveRangeOn(start * kTaggedSize, (start + 1) * kTaggedSize);
CheckRemoveRangeOn(start * kTaggedSize, (start + 2) * kTaggedSize);
const uint32_t kEnds[] = {32, 64, 100, 128, 1024, 1500, 2048};
for (size_t i = 0; i < sizeof(kEnds) / sizeof(uint32_t); i++) {
for (int k = -3; k <= 3; k++) {
uint32_t end = (kEnds[i] + k);
if (start < end) {
CheckRemoveRangeOn(start * kPointerSize, end * kPointerSize);
CheckRemoveRangeOn(start * kTaggedSize, end * kTaggedSize);
}
}
}
@ -142,7 +142,7 @@ TEST(SlotSet, RemoveRange) {
{SlotSet::FREE_EMPTY_BUCKETS, SlotSet::KEEP_EMPTY_BUCKETS}) {
set.Insert(Page::kPageSize / 2);
set.RemoveRange(0, Page::kPageSize, mode);
for (uint32_t i = 0; i < Page::kPageSize; i += kPointerSize) {
for (uint32_t i = 0; i < Page::kPageSize; i += kTaggedSize) {
EXPECT_FALSE(set.Lookup(i));
}
}

View File

@ -193,248 +193,248 @@ INSTANCE_TYPES = {
# List of known V8 maps.
KNOWN_MAPS = {
("RO_SPACE", 0x00139): (138, "FreeSpaceMap"),
("RO_SPACE", 0x00189): (132, "MetaMap"),
("RO_SPACE", 0x00209): (131, "NullMap"),
("RO_SPACE", 0x00271): (216, "DescriptorArrayMap"),
("RO_SPACE", 0x002d1): (211, "WeakFixedArrayMap"),
("RO_SPACE", 0x00321): (152, "OnePointerFillerMap"),
("RO_SPACE", 0x00371): (152, "TwoPointerFillerMap"),
("RO_SPACE", 0x003f1): (131, "UninitializedMap"),
("RO_SPACE", 0x00461): (8, "OneByteInternalizedStringMap"),
("RO_SPACE", 0x00501): (131, "UndefinedMap"),
("RO_SPACE", 0x00561): (129, "HeapNumberMap"),
("RO_SPACE", 0x005e1): (131, "TheHoleMap"),
("RO_SPACE", 0x00689): (131, "BooleanMap"),
("RO_SPACE", 0x00761): (136, "ByteArrayMap"),
("RO_SPACE", 0x007b1): (187, "FixedArrayMap"),
("RO_SPACE", 0x00801): (187, "FixedCOWArrayMap"),
("RO_SPACE", 0x00851): (189, "HashTableMap"),
("RO_SPACE", 0x008a1): (128, "SymbolMap"),
("RO_SPACE", 0x008f1): (72, "OneByteStringMap"),
("RO_SPACE", 0x00941): (199, "ScopeInfoMap"),
("RO_SPACE", 0x00991): (223, "SharedFunctionInfoMap"),
("RO_SPACE", 0x009e1): (133, "CodeMap"),
("RO_SPACE", 0x00a31): (206, "FunctionContextMap"),
("RO_SPACE", 0x00a81): (214, "CellMap"),
("RO_SPACE", 0x00ad1): (222, "GlobalPropertyCellMap"),
("RO_SPACE", 0x00b21): (135, "ForeignMap"),
("RO_SPACE", 0x00b71): (212, "TransitionArrayMap"),
("RO_SPACE", 0x00bc1): (218, "FeedbackVectorMap"),
("RO_SPACE", 0x00c61): (131, "ArgumentsMarkerMap"),
("RO_SPACE", 0x00d01): (131, "ExceptionMap"),
("RO_SPACE", 0x00da1): (131, "TerminationExceptionMap"),
("RO_SPACE", 0x00e49): (131, "OptimizedOutMap"),
("RO_SPACE", 0x00ee9): (131, "StaleRegisterMap"),
("RO_SPACE", 0x00f59): (208, "NativeContextMap"),
("RO_SPACE", 0x00fa9): (207, "ModuleContextMap"),
("RO_SPACE", 0x00ff9): (205, "EvalContextMap"),
("RO_SPACE", 0x01049): (209, "ScriptContextMap"),
("RO_SPACE", 0x01099): (201, "AwaitContextMap"),
("RO_SPACE", 0x010e9): (202, "BlockContextMap"),
("RO_SPACE", 0x01139): (203, "CatchContextMap"),
("RO_SPACE", 0x01189): (210, "WithContextMap"),
("RO_SPACE", 0x011d9): (204, "DebugEvaluateContextMap"),
("RO_SPACE", 0x01229): (200, "ScriptContextTableMap"),
("RO_SPACE", 0x01279): (151, "FeedbackMetadataArrayMap"),
("RO_SPACE", 0x012c9): (187, "ArrayListMap"),
("RO_SPACE", 0x01319): (130, "BigIntMap"),
("RO_SPACE", 0x01369): (188, "ObjectBoilerplateDescriptionMap"),
("RO_SPACE", 0x013b9): (137, "BytecodeArrayMap"),
("RO_SPACE", 0x01409): (215, "CodeDataContainerMap"),
("RO_SPACE", 0x01459): (150, "FixedDoubleArrayMap"),
("RO_SPACE", 0x014a9): (194, "GlobalDictionaryMap"),
("RO_SPACE", 0x014f9): (217, "ManyClosuresCellMap"),
("RO_SPACE", 0x01549): (187, "ModuleInfoMap"),
("RO_SPACE", 0x01599): (134, "MutableHeapNumberMap"),
("RO_SPACE", 0x015e9): (193, "NameDictionaryMap"),
("RO_SPACE", 0x01639): (217, "NoClosuresCellMap"),
("RO_SPACE", 0x01689): (217, "NoFeedbackCellMap"),
("RO_SPACE", 0x016d9): (195, "NumberDictionaryMap"),
("RO_SPACE", 0x01729): (217, "OneClosureCellMap"),
("RO_SPACE", 0x01779): (190, "OrderedHashMapMap"),
("RO_SPACE", 0x017c9): (191, "OrderedHashSetMap"),
("RO_SPACE", 0x01819): (192, "OrderedNameDictionaryMap"),
("RO_SPACE", 0x01869): (220, "PreParsedScopeDataMap"),
("RO_SPACE", 0x018b9): (221, "PropertyArrayMap"),
("RO_SPACE", 0x01909): (213, "SideEffectCallHandlerInfoMap"),
("RO_SPACE", 0x01959): (213, "SideEffectFreeCallHandlerInfoMap"),
("RO_SPACE", 0x019a9): (213, "NextCallSideEffectFreeCallHandlerInfoMap"),
("RO_SPACE", 0x019f9): (196, "SimpleNumberDictionaryMap"),
("RO_SPACE", 0x01a49): (187, "SloppyArgumentsElementsMap"),
("RO_SPACE", 0x01a99): (224, "SmallOrderedHashMapMap"),
("RO_SPACE", 0x01ae9): (225, "SmallOrderedHashSetMap"),
("RO_SPACE", 0x01b39): (226, "SmallOrderedNameDictionaryMap"),
("RO_SPACE", 0x01b89): (197, "StringTableMap"),
("RO_SPACE", 0x01bd9): (228, "UncompiledDataWithoutPreParsedScopeMap"),
("RO_SPACE", 0x01c29): (229, "UncompiledDataWithPreParsedScopeMap"),
("RO_SPACE", 0x01c79): (230, "WeakArrayListMap"),
("RO_SPACE", 0x01cc9): (198, "EphemeronHashTableMap"),
("RO_SPACE", 0x01d19): (186, "EmbedderDataArrayMap"),
("RO_SPACE", 0x01d69): (106, "NativeSourceStringMap"),
("RO_SPACE", 0x01db9): (64, "StringMap"),
("RO_SPACE", 0x01e09): (73, "ConsOneByteStringMap"),
("RO_SPACE", 0x01e59): (65, "ConsStringMap"),
("RO_SPACE", 0x01ea9): (77, "ThinOneByteStringMap"),
("RO_SPACE", 0x01ef9): (69, "ThinStringMap"),
("RO_SPACE", 0x01f49): (67, "SlicedStringMap"),
("RO_SPACE", 0x01f99): (75, "SlicedOneByteStringMap"),
("RO_SPACE", 0x01fe9): (66, "ExternalStringMap"),
("RO_SPACE", 0x02039): (82, "ExternalStringWithOneByteDataMap"),
("RO_SPACE", 0x02089): (74, "ExternalOneByteStringMap"),
("RO_SPACE", 0x020d9): (98, "UncachedExternalStringMap"),
("RO_SPACE", 0x02129): (114, "UncachedExternalStringWithOneByteDataMap"),
("RO_SPACE", 0x02179): (0, "InternalizedStringMap"),
("RO_SPACE", 0x021c9): (2, "ExternalInternalizedStringMap"),
("RO_SPACE", 0x02219): (18, "ExternalInternalizedStringWithOneByteDataMap"),
("RO_SPACE", 0x02269): (10, "ExternalOneByteInternalizedStringMap"),
("RO_SPACE", 0x022b9): (34, "UncachedExternalInternalizedStringMap"),
("RO_SPACE", 0x02309): (50, "UncachedExternalInternalizedStringWithOneByteDataMap"),
("RO_SPACE", 0x02359): (42, "UncachedExternalOneByteInternalizedStringMap"),
("RO_SPACE", 0x023a9): (106, "UncachedExternalOneByteStringMap"),
("RO_SPACE", 0x023f9): (140, "FixedUint8ArrayMap"),
("RO_SPACE", 0x02449): (139, "FixedInt8ArrayMap"),
("RO_SPACE", 0x02499): (142, "FixedUint16ArrayMap"),
("RO_SPACE", 0x024e9): (141, "FixedInt16ArrayMap"),
("RO_SPACE", 0x02539): (144, "FixedUint32ArrayMap"),
("RO_SPACE", 0x02589): (143, "FixedInt32ArrayMap"),
("RO_SPACE", 0x025d9): (145, "FixedFloat32ArrayMap"),
("RO_SPACE", 0x02629): (146, "FixedFloat64ArrayMap"),
("RO_SPACE", 0x02679): (147, "FixedUint8ClampedArrayMap"),
("RO_SPACE", 0x026c9): (149, "FixedBigUint64ArrayMap"),
("RO_SPACE", 0x02719): (148, "FixedBigInt64ArrayMap"),
("RO_SPACE", 0x02769): (131, "SelfReferenceMarkerMap"),
("RO_SPACE", 0x027d1): (173, "Tuple2Map"),
("RO_SPACE", 0x02871): (175, "ArrayBoilerplateDescriptionMap"),
("RO_SPACE", 0x02bb1): (163, "InterceptorInfoMap"),
("RO_SPACE", 0x050d1): (153, "AccessCheckInfoMap"),
("RO_SPACE", 0x05121): (154, "AccessorInfoMap"),
("RO_SPACE", 0x05171): (155, "AccessorPairMap"),
("RO_SPACE", 0x051c1): (156, "AliasedArgumentsEntryMap"),
("RO_SPACE", 0x05211): (157, "AllocationMementoMap"),
("RO_SPACE", 0x05261): (158, "AsmWasmDataMap"),
("RO_SPACE", 0x052b1): (159, "AsyncGeneratorRequestMap"),
("RO_SPACE", 0x05301): (160, "DebugInfoMap"),
("RO_SPACE", 0x05351): (161, "FunctionTemplateInfoMap"),
("RO_SPACE", 0x053a1): (162, "FunctionTemplateRareDataMap"),
("RO_SPACE", 0x053f1): (164, "InterpreterDataMap"),
("RO_SPACE", 0x05441): (165, "ModuleInfoEntryMap"),
("RO_SPACE", 0x05491): (166, "ModuleMap"),
("RO_SPACE", 0x054e1): (167, "ObjectTemplateInfoMap"),
("RO_SPACE", 0x05531): (168, "PromiseCapabilityMap"),
("RO_SPACE", 0x05581): (169, "PromiseReactionMap"),
("RO_SPACE", 0x055d1): (170, "PrototypeInfoMap"),
("RO_SPACE", 0x05621): (171, "ScriptMap"),
("RO_SPACE", 0x05671): (172, "StackFrameInfoMap"),
("RO_SPACE", 0x056c1): (174, "Tuple3Map"),
("RO_SPACE", 0x05711): (176, "WasmDebugInfoMap"),
("RO_SPACE", 0x05761): (177, "WasmExceptionTagMap"),
("RO_SPACE", 0x057b1): (178, "WasmExportedFunctionDataMap"),
("RO_SPACE", 0x05801): (179, "CallableTaskMap"),
("RO_SPACE", 0x05851): (180, "CallbackTaskMap"),
("RO_SPACE", 0x058a1): (181, "PromiseFulfillReactionJobTaskMap"),
("RO_SPACE", 0x058f1): (182, "PromiseRejectReactionJobTaskMap"),
("RO_SPACE", 0x05941): (183, "PromiseResolveThenableJobTaskMap"),
("RO_SPACE", 0x05991): (184, "WeakFactoryCleanupJobTaskMap"),
("RO_SPACE", 0x059e1): (185, "AllocationSiteWithWeakNextMap"),
("RO_SPACE", 0x05a31): (185, "AllocationSiteWithoutWeakNextMap"),
("RO_SPACE", 0x05a81): (219, "LoadHandler1Map"),
("RO_SPACE", 0x05ad1): (219, "LoadHandler2Map"),
("RO_SPACE", 0x05b21): (219, "LoadHandler3Map"),
("RO_SPACE", 0x05b71): (227, "StoreHandler0Map"),
("RO_SPACE", 0x05bc1): (227, "StoreHandler1Map"),
("RO_SPACE", 0x05c11): (227, "StoreHandler2Map"),
("RO_SPACE", 0x05c61): (227, "StoreHandler3Map"),
("MAP_SPACE", 0x00139): (1057, "ExternalMap"),
("MAP_SPACE", 0x00189): (1073, "JSMessageObjectMap"),
("RO_SPACE", 0x00131): (138, "FreeSpaceMap"),
("RO_SPACE", 0x00181): (132, "MetaMap"),
("RO_SPACE", 0x00201): (131, "NullMap"),
("RO_SPACE", 0x00269): (216, "DescriptorArrayMap"),
("RO_SPACE", 0x002c9): (211, "WeakFixedArrayMap"),
("RO_SPACE", 0x00319): (152, "OnePointerFillerMap"),
("RO_SPACE", 0x00369): (152, "TwoPointerFillerMap"),
("RO_SPACE", 0x003e9): (131, "UninitializedMap"),
("RO_SPACE", 0x00459): (8, "OneByteInternalizedStringMap"),
("RO_SPACE", 0x004f9): (131, "UndefinedMap"),
("RO_SPACE", 0x00559): (129, "HeapNumberMap"),
("RO_SPACE", 0x005d9): (131, "TheHoleMap"),
("RO_SPACE", 0x00681): (131, "BooleanMap"),
("RO_SPACE", 0x00759): (136, "ByteArrayMap"),
("RO_SPACE", 0x007a9): (187, "FixedArrayMap"),
("RO_SPACE", 0x007f9): (187, "FixedCOWArrayMap"),
("RO_SPACE", 0x00849): (189, "HashTableMap"),
("RO_SPACE", 0x00899): (128, "SymbolMap"),
("RO_SPACE", 0x008e9): (72, "OneByteStringMap"),
("RO_SPACE", 0x00939): (199, "ScopeInfoMap"),
("RO_SPACE", 0x00989): (223, "SharedFunctionInfoMap"),
("RO_SPACE", 0x009d9): (133, "CodeMap"),
("RO_SPACE", 0x00a29): (206, "FunctionContextMap"),
("RO_SPACE", 0x00a79): (214, "CellMap"),
("RO_SPACE", 0x00ac9): (222, "GlobalPropertyCellMap"),
("RO_SPACE", 0x00b19): (135, "ForeignMap"),
("RO_SPACE", 0x00b69): (212, "TransitionArrayMap"),
("RO_SPACE", 0x00bb9): (218, "FeedbackVectorMap"),
("RO_SPACE", 0x00c59): (131, "ArgumentsMarkerMap"),
("RO_SPACE", 0x00cf9): (131, "ExceptionMap"),
("RO_SPACE", 0x00d99): (131, "TerminationExceptionMap"),
("RO_SPACE", 0x00e41): (131, "OptimizedOutMap"),
("RO_SPACE", 0x00ee1): (131, "StaleRegisterMap"),
("RO_SPACE", 0x00f51): (208, "NativeContextMap"),
("RO_SPACE", 0x00fa1): (207, "ModuleContextMap"),
("RO_SPACE", 0x00ff1): (205, "EvalContextMap"),
("RO_SPACE", 0x01041): (209, "ScriptContextMap"),
("RO_SPACE", 0x01091): (201, "AwaitContextMap"),
("RO_SPACE", 0x010e1): (202, "BlockContextMap"),
("RO_SPACE", 0x01131): (203, "CatchContextMap"),
("RO_SPACE", 0x01181): (210, "WithContextMap"),
("RO_SPACE", 0x011d1): (204, "DebugEvaluateContextMap"),
("RO_SPACE", 0x01221): (200, "ScriptContextTableMap"),
("RO_SPACE", 0x01271): (151, "FeedbackMetadataArrayMap"),
("RO_SPACE", 0x012c1): (187, "ArrayListMap"),
("RO_SPACE", 0x01311): (130, "BigIntMap"),
("RO_SPACE", 0x01361): (188, "ObjectBoilerplateDescriptionMap"),
("RO_SPACE", 0x013b1): (137, "BytecodeArrayMap"),
("RO_SPACE", 0x01401): (215, "CodeDataContainerMap"),
("RO_SPACE", 0x01451): (150, "FixedDoubleArrayMap"),
("RO_SPACE", 0x014a1): (194, "GlobalDictionaryMap"),
("RO_SPACE", 0x014f1): (217, "ManyClosuresCellMap"),
("RO_SPACE", 0x01541): (187, "ModuleInfoMap"),
("RO_SPACE", 0x01591): (134, "MutableHeapNumberMap"),
("RO_SPACE", 0x015e1): (193, "NameDictionaryMap"),
("RO_SPACE", 0x01631): (217, "NoClosuresCellMap"),
("RO_SPACE", 0x01681): (217, "NoFeedbackCellMap"),
("RO_SPACE", 0x016d1): (195, "NumberDictionaryMap"),
("RO_SPACE", 0x01721): (217, "OneClosureCellMap"),
("RO_SPACE", 0x01771): (190, "OrderedHashMapMap"),
("RO_SPACE", 0x017c1): (191, "OrderedHashSetMap"),
("RO_SPACE", 0x01811): (192, "OrderedNameDictionaryMap"),
("RO_SPACE", 0x01861): (220, "PreParsedScopeDataMap"),
("RO_SPACE", 0x018b1): (221, "PropertyArrayMap"),
("RO_SPACE", 0x01901): (213, "SideEffectCallHandlerInfoMap"),
("RO_SPACE", 0x01951): (213, "SideEffectFreeCallHandlerInfoMap"),
("RO_SPACE", 0x019a1): (213, "NextCallSideEffectFreeCallHandlerInfoMap"),
("RO_SPACE", 0x019f1): (196, "SimpleNumberDictionaryMap"),
("RO_SPACE", 0x01a41): (187, "SloppyArgumentsElementsMap"),
("RO_SPACE", 0x01a91): (224, "SmallOrderedHashMapMap"),
("RO_SPACE", 0x01ae1): (225, "SmallOrderedHashSetMap"),
("RO_SPACE", 0x01b31): (226, "SmallOrderedNameDictionaryMap"),
("RO_SPACE", 0x01b81): (197, "StringTableMap"),
("RO_SPACE", 0x01bd1): (228, "UncompiledDataWithoutPreParsedScopeMap"),
("RO_SPACE", 0x01c21): (229, "UncompiledDataWithPreParsedScopeMap"),
("RO_SPACE", 0x01c71): (230, "WeakArrayListMap"),
("RO_SPACE", 0x01cc1): (198, "EphemeronHashTableMap"),
("RO_SPACE", 0x01d11): (186, "EmbedderDataArrayMap"),
("RO_SPACE", 0x01d61): (106, "NativeSourceStringMap"),
("RO_SPACE", 0x01db1): (64, "StringMap"),
("RO_SPACE", 0x01e01): (73, "ConsOneByteStringMap"),
("RO_SPACE", 0x01e51): (65, "ConsStringMap"),
("RO_SPACE", 0x01ea1): (77, "ThinOneByteStringMap"),
("RO_SPACE", 0x01ef1): (69, "ThinStringMap"),
("RO_SPACE", 0x01f41): (67, "SlicedStringMap"),
("RO_SPACE", 0x01f91): (75, "SlicedOneByteStringMap"),
("RO_SPACE", 0x01fe1): (66, "ExternalStringMap"),
("RO_SPACE", 0x02031): (82, "ExternalStringWithOneByteDataMap"),
("RO_SPACE", 0x02081): (74, "ExternalOneByteStringMap"),
("RO_SPACE", 0x020d1): (98, "UncachedExternalStringMap"),
("RO_SPACE", 0x02121): (114, "UncachedExternalStringWithOneByteDataMap"),
("RO_SPACE", 0x02171): (0, "InternalizedStringMap"),
("RO_SPACE", 0x021c1): (2, "ExternalInternalizedStringMap"),
("RO_SPACE", 0x02211): (18, "ExternalInternalizedStringWithOneByteDataMap"),
("RO_SPACE", 0x02261): (10, "ExternalOneByteInternalizedStringMap"),
("RO_SPACE", 0x022b1): (34, "UncachedExternalInternalizedStringMap"),
("RO_SPACE", 0x02301): (50, "UncachedExternalInternalizedStringWithOneByteDataMap"),
("RO_SPACE", 0x02351): (42, "UncachedExternalOneByteInternalizedStringMap"),
("RO_SPACE", 0x023a1): (106, "UncachedExternalOneByteStringMap"),
("RO_SPACE", 0x023f1): (140, "FixedUint8ArrayMap"),
("RO_SPACE", 0x02441): (139, "FixedInt8ArrayMap"),
("RO_SPACE", 0x02491): (142, "FixedUint16ArrayMap"),
("RO_SPACE", 0x024e1): (141, "FixedInt16ArrayMap"),
("RO_SPACE", 0x02531): (144, "FixedUint32ArrayMap"),
("RO_SPACE", 0x02581): (143, "FixedInt32ArrayMap"),
("RO_SPACE", 0x025d1): (145, "FixedFloat32ArrayMap"),
("RO_SPACE", 0x02621): (146, "FixedFloat64ArrayMap"),
("RO_SPACE", 0x02671): (147, "FixedUint8ClampedArrayMap"),
("RO_SPACE", 0x026c1): (149, "FixedBigUint64ArrayMap"),
("RO_SPACE", 0x02711): (148, "FixedBigInt64ArrayMap"),
("RO_SPACE", 0x02761): (131, "SelfReferenceMarkerMap"),
("RO_SPACE", 0x027c9): (173, "Tuple2Map"),
("RO_SPACE", 0x02869): (175, "ArrayBoilerplateDescriptionMap"),
("RO_SPACE", 0x02ba9): (163, "InterceptorInfoMap"),
("RO_SPACE", 0x050c9): (153, "AccessCheckInfoMap"),
("RO_SPACE", 0x05119): (154, "AccessorInfoMap"),
("RO_SPACE", 0x05169): (155, "AccessorPairMap"),
("RO_SPACE", 0x051b9): (156, "AliasedArgumentsEntryMap"),
("RO_SPACE", 0x05209): (157, "AllocationMementoMap"),
("RO_SPACE", 0x05259): (158, "AsmWasmDataMap"),
("RO_SPACE", 0x052a9): (159, "AsyncGeneratorRequestMap"),
("RO_SPACE", 0x052f9): (160, "DebugInfoMap"),
("RO_SPACE", 0x05349): (161, "FunctionTemplateInfoMap"),
("RO_SPACE", 0x05399): (162, "FunctionTemplateRareDataMap"),
("RO_SPACE", 0x053e9): (164, "InterpreterDataMap"),
("RO_SPACE", 0x05439): (165, "ModuleInfoEntryMap"),
("RO_SPACE", 0x05489): (166, "ModuleMap"),
("RO_SPACE", 0x054d9): (167, "ObjectTemplateInfoMap"),
("RO_SPACE", 0x05529): (168, "PromiseCapabilityMap"),
("RO_SPACE", 0x05579): (169, "PromiseReactionMap"),
("RO_SPACE", 0x055c9): (170, "PrototypeInfoMap"),
("RO_SPACE", 0x05619): (171, "ScriptMap"),
("RO_SPACE", 0x05669): (172, "StackFrameInfoMap"),
("RO_SPACE", 0x056b9): (174, "Tuple3Map"),
("RO_SPACE", 0x05709): (176, "WasmDebugInfoMap"),
("RO_SPACE", 0x05759): (177, "WasmExceptionTagMap"),
("RO_SPACE", 0x057a9): (178, "WasmExportedFunctionDataMap"),
("RO_SPACE", 0x057f9): (179, "CallableTaskMap"),
("RO_SPACE", 0x05849): (180, "CallbackTaskMap"),
("RO_SPACE", 0x05899): (181, "PromiseFulfillReactionJobTaskMap"),
("RO_SPACE", 0x058e9): (182, "PromiseRejectReactionJobTaskMap"),
("RO_SPACE", 0x05939): (183, "PromiseResolveThenableJobTaskMap"),
("RO_SPACE", 0x05989): (184, "WeakFactoryCleanupJobTaskMap"),
("RO_SPACE", 0x059d9): (185, "AllocationSiteWithWeakNextMap"),
("RO_SPACE", 0x05a29): (185, "AllocationSiteWithoutWeakNextMap"),
("RO_SPACE", 0x05a79): (219, "LoadHandler1Map"),
("RO_SPACE", 0x05ac9): (219, "LoadHandler2Map"),
("RO_SPACE", 0x05b19): (219, "LoadHandler3Map"),
("RO_SPACE", 0x05b69): (227, "StoreHandler0Map"),
("RO_SPACE", 0x05bb9): (227, "StoreHandler1Map"),
("RO_SPACE", 0x05c09): (227, "StoreHandler2Map"),
("RO_SPACE", 0x05c59): (227, "StoreHandler3Map"),
("MAP_SPACE", 0x00131): (1057, "ExternalMap"),
("MAP_SPACE", 0x00181): (1073, "JSMessageObjectMap"),
}
# List of known V8 objects.
KNOWN_OBJECTS = {
("RO_SPACE", 0x001d9): "NullValue",
("RO_SPACE", 0x00259): "EmptyDescriptorArray",
("RO_SPACE", 0x002c1): "EmptyWeakFixedArray",
("RO_SPACE", 0x003c1): "UninitializedValue",
("RO_SPACE", 0x004d1): "UndefinedValue",
("RO_SPACE", 0x00551): "NanValue",
("RO_SPACE", 0x005b1): "TheHoleValue",
("RO_SPACE", 0x00649): "HoleNanValue",
("RO_SPACE", 0x00659): "TrueValue",
("RO_SPACE", 0x00709): "FalseValue",
("RO_SPACE", 0x00751): "empty_string",
("RO_SPACE", 0x00c11): "EmptyScopeInfo",
("RO_SPACE", 0x00c21): "EmptyFixedArray",
("RO_SPACE", 0x00c31): "ArgumentsMarker",
("RO_SPACE", 0x00cd1): "Exception",
("RO_SPACE", 0x00d71): "TerminationException",
("RO_SPACE", 0x00e19): "OptimizedOut",
("RO_SPACE", 0x00eb9): "StaleRegister",
("RO_SPACE", 0x027b9): "EmptyEnumCache",
("RO_SPACE", 0x02821): "EmptyPropertyArray",
("RO_SPACE", 0x02831): "EmptyByteArray",
("RO_SPACE", 0x02841): "EmptyObjectBoilerplateDescription",
("RO_SPACE", 0x02859): "EmptyArrayBoilerplateDescription",
("RO_SPACE", 0x028c1): "EmptyFixedUint8Array",
("RO_SPACE", 0x028e1): "EmptyFixedInt8Array",
("RO_SPACE", 0x02901): "EmptyFixedUint16Array",
("RO_SPACE", 0x02921): "EmptyFixedInt16Array",
("RO_SPACE", 0x02941): "EmptyFixedUint32Array",
("RO_SPACE", 0x02961): "EmptyFixedInt32Array",
("RO_SPACE", 0x02981): "EmptyFixedFloat32Array",
("RO_SPACE", 0x029a1): "EmptyFixedFloat64Array",
("RO_SPACE", 0x029c1): "EmptyFixedUint8ClampedArray",
("RO_SPACE", 0x029e1): "EmptyFixedBigUint64Array",
("RO_SPACE", 0x02a01): "EmptyFixedBigInt64Array",
("RO_SPACE", 0x02a21): "EmptySloppyArgumentsElements",
("RO_SPACE", 0x02a41): "EmptySlowElementDictionary",
("RO_SPACE", 0x02a89): "EmptyOrderedHashMap",
("RO_SPACE", 0x02ab1): "EmptyOrderedHashSet",
("RO_SPACE", 0x02ad9): "EmptyFeedbackMetadata",
("RO_SPACE", 0x02ae9): "EmptyPropertyCell",
("RO_SPACE", 0x02b11): "EmptyPropertyDictionary",
("RO_SPACE", 0x02b61): "NoOpInterceptorInfo",
("RO_SPACE", 0x02c01): "EmptyWeakArrayList",
("RO_SPACE", 0x02c19): "InfinityValue",
("RO_SPACE", 0x02c29): "MinusZeroValue",
("RO_SPACE", 0x02c39): "MinusInfinityValue",
("RO_SPACE", 0x02c49): "SelfReferenceMarker",
("RO_SPACE", 0x02ca1): "OffHeapTrampolineRelocationInfo",
("RO_SPACE", 0x02cb9): "HashSeed",
("OLD_SPACE", 0x00139): "ArgumentsIteratorAccessor",
("OLD_SPACE", 0x001a9): "ArrayLengthAccessor",
("OLD_SPACE", 0x00219): "BoundFunctionLengthAccessor",
("OLD_SPACE", 0x00289): "BoundFunctionNameAccessor",
("OLD_SPACE", 0x002f9): "ErrorStackAccessor",
("OLD_SPACE", 0x00369): "FunctionArgumentsAccessor",
("OLD_SPACE", 0x003d9): "FunctionCallerAccessor",
("OLD_SPACE", 0x00449): "FunctionNameAccessor",
("OLD_SPACE", 0x004b9): "FunctionLengthAccessor",
("OLD_SPACE", 0x00529): "FunctionPrototypeAccessor",
("OLD_SPACE", 0x00599): "StringLengthAccessor",
("OLD_SPACE", 0x00609): "InvalidPrototypeValidityCell",
("OLD_SPACE", 0x00619): "EmptyScript",
("OLD_SPACE", 0x00699): "ManyClosuresCell",
("OLD_SPACE", 0x006a9): "NoFeedbackCell",
("OLD_SPACE", 0x006b9): "ArrayConstructorProtector",
("OLD_SPACE", 0x006c9): "NoElementsProtector",
("OLD_SPACE", 0x006f1): "IsConcatSpreadableProtector",
("OLD_SPACE", 0x00701): "ArraySpeciesProtector",
("OLD_SPACE", 0x00729): "TypedArraySpeciesProtector",
("OLD_SPACE", 0x00751): "RegExpSpeciesProtector",
("OLD_SPACE", 0x00779): "PromiseSpeciesProtector",
("OLD_SPACE", 0x007a1): "StringLengthProtector",
("OLD_SPACE", 0x007b1): "ArrayIteratorProtector",
("OLD_SPACE", 0x007d9): "ArrayBufferDetachingProtector",
("OLD_SPACE", 0x00801): "PromiseHookProtector",
("OLD_SPACE", 0x00829): "PromiseResolveProtector",
("OLD_SPACE", 0x00839): "MapIteratorProtector",
("OLD_SPACE", 0x00861): "PromiseThenProtector",
("OLD_SPACE", 0x00889): "SetIteratorProtector",
("OLD_SPACE", 0x008b1): "StringIteratorProtector",
("OLD_SPACE", 0x008d9): "SingleCharacterStringCache",
("OLD_SPACE", 0x010e9): "StringSplitCache",
("OLD_SPACE", 0x018f9): "RegExpMultipleCache",
("OLD_SPACE", 0x02109): "BuiltinsConstantsTable",
("RO_SPACE", 0x001d1): "NullValue",
("RO_SPACE", 0x00251): "EmptyDescriptorArray",
("RO_SPACE", 0x002b9): "EmptyWeakFixedArray",
("RO_SPACE", 0x003b9): "UninitializedValue",
("RO_SPACE", 0x004c9): "UndefinedValue",
("RO_SPACE", 0x00549): "NanValue",
("RO_SPACE", 0x005a9): "TheHoleValue",
("RO_SPACE", 0x00641): "HoleNanValue",
("RO_SPACE", 0x00651): "TrueValue",
("RO_SPACE", 0x00701): "FalseValue",
("RO_SPACE", 0x00749): "empty_string",
("RO_SPACE", 0x00c09): "EmptyScopeInfo",
("RO_SPACE", 0x00c19): "EmptyFixedArray",
("RO_SPACE", 0x00c29): "ArgumentsMarker",
("RO_SPACE", 0x00cc9): "Exception",
("RO_SPACE", 0x00d69): "TerminationException",
("RO_SPACE", 0x00e11): "OptimizedOut",
("RO_SPACE", 0x00eb1): "StaleRegister",
("RO_SPACE", 0x027b1): "EmptyEnumCache",
("RO_SPACE", 0x02819): "EmptyPropertyArray",
("RO_SPACE", 0x02829): "EmptyByteArray",
("RO_SPACE", 0x02839): "EmptyObjectBoilerplateDescription",
("RO_SPACE", 0x02851): "EmptyArrayBoilerplateDescription",
("RO_SPACE", 0x028b9): "EmptyFixedUint8Array",
("RO_SPACE", 0x028d9): "EmptyFixedInt8Array",
("RO_SPACE", 0x028f9): "EmptyFixedUint16Array",
("RO_SPACE", 0x02919): "EmptyFixedInt16Array",
("RO_SPACE", 0x02939): "EmptyFixedUint32Array",
("RO_SPACE", 0x02959): "EmptyFixedInt32Array",
("RO_SPACE", 0x02979): "EmptyFixedFloat32Array",
("RO_SPACE", 0x02999): "EmptyFixedFloat64Array",
("RO_SPACE", 0x029b9): "EmptyFixedUint8ClampedArray",
("RO_SPACE", 0x029d9): "EmptyFixedBigUint64Array",
("RO_SPACE", 0x029f9): "EmptyFixedBigInt64Array",
("RO_SPACE", 0x02a19): "EmptySloppyArgumentsElements",
("RO_SPACE", 0x02a39): "EmptySlowElementDictionary",
("RO_SPACE", 0x02a81): "EmptyOrderedHashMap",
("RO_SPACE", 0x02aa9): "EmptyOrderedHashSet",
("RO_SPACE", 0x02ad1): "EmptyFeedbackMetadata",
("RO_SPACE", 0x02ae1): "EmptyPropertyCell",
("RO_SPACE", 0x02b09): "EmptyPropertyDictionary",
("RO_SPACE", 0x02b59): "NoOpInterceptorInfo",
("RO_SPACE", 0x02bf9): "EmptyWeakArrayList",
("RO_SPACE", 0x02c11): "InfinityValue",
("RO_SPACE", 0x02c21): "MinusZeroValue",
("RO_SPACE", 0x02c31): "MinusInfinityValue",
("RO_SPACE", 0x02c41): "SelfReferenceMarker",
("RO_SPACE", 0x02c99): "OffHeapTrampolineRelocationInfo",
("RO_SPACE", 0x02cb1): "HashSeed",
("OLD_SPACE", 0x00131): "ArgumentsIteratorAccessor",
("OLD_SPACE", 0x001a1): "ArrayLengthAccessor",
("OLD_SPACE", 0x00211): "BoundFunctionLengthAccessor",
("OLD_SPACE", 0x00281): "BoundFunctionNameAccessor",
("OLD_SPACE", 0x002f1): "ErrorStackAccessor",
("OLD_SPACE", 0x00361): "FunctionArgumentsAccessor",
("OLD_SPACE", 0x003d1): "FunctionCallerAccessor",
("OLD_SPACE", 0x00441): "FunctionNameAccessor",
("OLD_SPACE", 0x004b1): "FunctionLengthAccessor",
("OLD_SPACE", 0x00521): "FunctionPrototypeAccessor",
("OLD_SPACE", 0x00591): "StringLengthAccessor",
("OLD_SPACE", 0x00601): "InvalidPrototypeValidityCell",
("OLD_SPACE", 0x00611): "EmptyScript",
("OLD_SPACE", 0x00691): "ManyClosuresCell",
("OLD_SPACE", 0x006a1): "NoFeedbackCell",
("OLD_SPACE", 0x006b1): "ArrayConstructorProtector",
("OLD_SPACE", 0x006c1): "NoElementsProtector",
("OLD_SPACE", 0x006e9): "IsConcatSpreadableProtector",
("OLD_SPACE", 0x006f9): "ArraySpeciesProtector",
("OLD_SPACE", 0x00721): "TypedArraySpeciesProtector",
("OLD_SPACE", 0x00749): "RegExpSpeciesProtector",
("OLD_SPACE", 0x00771): "PromiseSpeciesProtector",
("OLD_SPACE", 0x00799): "StringLengthProtector",
("OLD_SPACE", 0x007a9): "ArrayIteratorProtector",
("OLD_SPACE", 0x007d1): "ArrayBufferDetachingProtector",
("OLD_SPACE", 0x007f9): "PromiseHookProtector",
("OLD_SPACE", 0x00821): "PromiseResolveProtector",
("OLD_SPACE", 0x00831): "MapIteratorProtector",
("OLD_SPACE", 0x00859): "PromiseThenProtector",
("OLD_SPACE", 0x00881): "SetIteratorProtector",
("OLD_SPACE", 0x008a9): "StringIteratorProtector",
("OLD_SPACE", 0x008d1): "SingleCharacterStringCache",
("OLD_SPACE", 0x010e1): "StringSplitCache",
("OLD_SPACE", 0x018f1): "RegExpMultipleCache",
("OLD_SPACE", 0x02101): "BuiltinsConstantsTable",
}
# List of known V8 Frame Markers.