diff --git a/BUILD.gn b/BUILD.gn index 357c7f493b..162d8cc1ca 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -404,6 +404,10 @@ if (v8_enable_shared_ro_heap && v8_enable_pointer_compression) { "Sharing read-only heap with pointer compression is only supported on Linux or Android") } +assert( + !v8_enable_pointer_compression_shared_cage || !v8_enable_shared_ro_heap, + "Sharing read-only heap is not yet supported when sharing a pointer compression cage") + assert(!v8_use_multi_snapshots || !v8_control_flow_integrity, "Control-flow integrity does not support multisnapshots") @@ -554,6 +558,7 @@ external_v8_defines = [ "V8_ENABLE_CHECKS", "V8_COMPRESS_POINTERS", "V8_COMPRESS_POINTERS_IN_SHARED_CAGE", + "V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE", "V8_31BIT_SMIS_ON_64BIT_ARCH", "V8_COMPRESS_ZONES", "V8_HEAP_SANDBOX", @@ -573,6 +578,8 @@ if (v8_enable_pointer_compression) { } if (v8_enable_pointer_compression_shared_cage) { enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_SHARED_CAGE" ] +} else if (v8_enable_pointer_compression) { + enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE" ] } if (v8_enable_pointer_compression || v8_enable_31bit_smis_on_64bit_arch) { enabled_external_v8_defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ] diff --git a/include/v8-internal.h b/include/v8-internal.h index 8abbcfb416..eb18f76504 100644 --- a/include/v8-internal.h +++ b/include/v8-internal.h @@ -358,8 +358,9 @@ class Internals { internal::Address heap_object_ptr, int offset) { #ifdef V8_COMPRESS_POINTERS uint32_t value = ReadRawField(heap_object_ptr, offset); - internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr); - return root + static_cast(static_cast(value)); + internal::Address base = + GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); + return base + static_cast(static_cast(value)); #else return ReadRawField(heap_object_ptr, offset); #endif @@ -411,18 +412,19 @@ class Internals { #ifdef V8_COMPRESS_POINTERS // See v8:7703 or src/ptr-compr.* for details about pointer compression. - static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32; - static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32; + static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32; + static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32; - V8_INLINE static internal::Address GetRootFromOnHeapAddress( + V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress( internal::Address addr) { - return addr & -static_cast(kPtrComprIsolateRootAlignment); + return addr & -static_cast(kPtrComprCageBaseAlignment); } V8_INLINE static internal::Address DecompressTaggedAnyField( internal::Address heap_object_ptr, uint32_t value) { - internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr); - return root + static_cast(static_cast(value)); + internal::Address base = + GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); + return base + static_cast(static_cast(value)); } #endif // V8_COMPRESS_POINTERS diff --git a/src/ast/ast.cc b/src/ast/ast.cc index 64577de0a7..5515a4a3fc 100644 --- a/src/ast/ast.cc +++ b/src/ast/ast.cc @@ -661,7 +661,7 @@ void ArrayLiteral::BuildBoilerplateDescription(LocalIsolate* isolate) { boilerplate_descriptor_kind(), GetMoreGeneralElementsKind(boilerplate_descriptor_kind(), boilerplate_value.OptimalElementsKind( - GetIsolateForPtrCompr(*elements)))); + GetPtrComprCageBase(*elements)))); FixedArray::cast(*elements).set(array_index, boilerplate_value); } diff --git a/src/builtins/builtins-typed-array-gen.cc b/src/builtins/builtins-typed-array-gen.cc index a0ba890167..65b1ab2f2b 100644 --- a/src/builtins/builtins-typed-array-gen.cc +++ b/src/builtins/builtins-typed-array-gen.cc @@ -370,14 +370,14 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr( TNode full_base = Signed(BitcastTaggedToWord(base)); TNode compressed_base = TruncateIntPtrToInt32(full_base); // TODO(v8:9706): Add a way to directly use kRootRegister value. - TNode isolate_root = + TNode ptr_compr_cage_base = IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base))); // Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset. DCHECK_EQ( isolate()->isolate_root(), JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate())); // See JSTypedArray::SetOnHeapDataPtr() for details. - offset = Unsigned(IntPtrAdd(offset, isolate_root)); + offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base)); } StoreJSTypedArrayBasePointer(holder, base); diff --git a/src/common/external-pointer-inl.h b/src/common/external-pointer-inl.h index 070d787b63..bc7aea3691 100644 --- a/src/common/external-pointer-inl.h +++ b/src/common/external-pointer-inl.h @@ -12,11 +12,17 @@ namespace v8 { namespace internal { -V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate_root, +V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate_root, ExternalPointer_t encoded_pointer, ExternalPointerTag tag) { STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize); #ifdef V8_HEAP_SANDBOX + + // TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage +#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE +#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage" +#endif + uint32_t index = static_cast(encoded_pointer); const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address()); return isolate->external_pointer_table().get(index) ^ tag; @@ -62,7 +68,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate, } V8_INLINE Address ReadExternalPointerField(Address field_address, - IsolateRoot isolate_root, + PtrComprCageBase cage_base, ExternalPointerTag tag) { // Pointer compression causes types larger than kTaggedSize to be unaligned. constexpr bool v8_pointer_compression_unaligned = @@ -73,7 +79,7 @@ V8_INLINE Address ReadExternalPointerField(Address field_address, } else { encoded_value = base::Memory(field_address); } - return DecodeExternalPointer(isolate_root, encoded_value, tag); + return DecodeExternalPointer(cage_base, encoded_value, tag); } V8_INLINE void WriteExternalPointerField(Address field_address, diff --git a/src/common/external-pointer.h b/src/common/external-pointer.h index 5a380df762..c0941f2978 100644 --- a/src/common/external-pointer.h +++ b/src/common/external-pointer.h @@ -12,7 +12,7 @@ namespace internal { // Convert external pointer from on-V8-heap representation to an actual external // pointer value. -V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate, +V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate, ExternalPointer_t encoded_pointer, ExternalPointerTag tag); @@ -34,7 +34,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate, // Reads external pointer for the field, and decodes it if the sandbox is // enabled. V8_INLINE Address ReadExternalPointerField(Address field_address, - IsolateRoot isolate, + PtrComprCageBase isolate, ExternalPointerTag tag); // Encodes value if the sandbox is enabled and writes it into the field. diff --git a/src/common/globals.h b/src/common/globals.h index 0891e6eddb..f51c3210f8 100644 --- a/src/common/globals.h +++ b/src/common/globals.h @@ -1748,13 +1748,13 @@ enum class DynamicCheckMapsStatus : uint8_t { }; #ifdef V8_COMPRESS_POINTERS -class IsolateRoot { +class PtrComprCageBase { public: - explicit constexpr IsolateRoot(Address address) : address_(address) {} + explicit constexpr PtrComprCageBase(Address address) : address_(address) {} // NOLINTNEXTLINE - inline IsolateRoot(const Isolate* isolate); + inline PtrComprCageBase(const Isolate* isolate); // NOLINTNEXTLINE - inline IsolateRoot(const LocalIsolate* isolate); + inline PtrComprCageBase(const LocalIsolate* isolate); inline Address address() const; @@ -1762,13 +1762,13 @@ class IsolateRoot { Address address_; }; #else -class IsolateRoot { +class PtrComprCageBase { public: - IsolateRoot() = default; + PtrComprCageBase() = default; // NOLINTNEXTLINE - IsolateRoot(const Isolate* isolate) {} + PtrComprCageBase(const Isolate* isolate) {} // NOLINTNEXTLINE - IsolateRoot(const LocalIsolate* isolate) {} + PtrComprCageBase(const LocalIsolate* isolate) {} }; #endif diff --git a/src/common/ptr-compr-inl.h b/src/common/ptr-compr-inl.h index f74c4d82c9..66c22311b0 100644 --- a/src/common/ptr-compr-inl.h +++ b/src/common/ptr-compr-inl.h @@ -15,15 +15,35 @@ namespace internal { #ifdef V8_COMPRESS_POINTERS -IsolateRoot::IsolateRoot(const Isolate* isolate) +#if defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE + +PtrComprCageBase::PtrComprCageBase(const Isolate* isolate) : address_(isolate->isolate_root()) {} -IsolateRoot::IsolateRoot(const LocalIsolate* isolate) +PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate) : address_(isolate->isolate_root()) {} -Address IsolateRoot::address() const { +#elif defined V8_COMPRESS_POINTERS_IN_SHARED_CAGE + +PtrComprCageBase::PtrComprCageBase(const Isolate* isolate) + : address_(isolate->isolate_root()) { + UNIMPLEMENTED(); +} +PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate) + : address_(isolate->isolate_root()) { + UNIMPLEMENTED(); +} + +#else + +#error "Pointer compression build configuration error" + +#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE, + // V8_COMPRESS_POINTERS_IN_SHARED_CAGE + +Address PtrComprCageBase::address() const { Address ret = address_; ret = reinterpret_cast
(V8_ASSUME_ALIGNED( - reinterpret_cast(ret), kPtrComprIsolateRootAlignment)); + reinterpret_cast(ret), kPtrComprCageBaseAlignment)); return ret; } @@ -33,12 +53,17 @@ V8_INLINE Tagged_t CompressTagged(Address tagged) { return static_cast(static_cast(tagged)); } -V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) { - return RoundDown(on_heap_addr); +V8_INLINE constexpr Address GetPtrComprCageBaseAddress(Address on_heap_addr) { + return RoundDown(on_heap_addr); } -V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) { - return isolate.address(); +V8_INLINE Address GetPtrComprCageBaseAddress(PtrComprCageBase cage_base) { + return cage_base.address(); +} + +V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress( + Address address) { + return PtrComprCageBase(GetPtrComprCageBaseAddress(address)); } // Decompresses smi value. @@ -52,7 +77,8 @@ V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { template V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr, Tagged_t raw_value) { - return GetIsolateRootAddress(on_heap_addr) + static_cast
(raw_value); + return GetPtrComprCageBaseAddress(on_heap_addr) + + static_cast
(raw_value); } // Decompresses any tagged value, preserving both weak- and smi- tags. @@ -62,18 +88,19 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, return DecompressTaggedPointer(on_heap_addr, raw_value); } -STATIC_ASSERT(kPtrComprHeapReservationSize == - Internals::kPtrComprHeapReservationSize); -STATIC_ASSERT(kPtrComprIsolateRootAlignment == - Internals::kPtrComprIsolateRootAlignment); +STATIC_ASSERT(kPtrComprCageReservationSize == + Internals::kPtrComprCageReservationSize); +STATIC_ASSERT(kPtrComprCageBaseAlignment == + Internals::kPtrComprCageBaseAlignment); #else V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); } -V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); } - -V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) { UNREACHABLE(); } +V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress( + Address address) { + return PtrComprCageBase(); +} V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); } @@ -90,6 +117,11 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr, } #endif // V8_COMPRESS_POINTERS + +inline PtrComprCageBase GetPtrComprCageBase(HeapObject object) { + return GetPtrComprCageBaseFromOnHeapAddress(object.ptr()); +} + } // namespace internal } // namespace v8 diff --git a/src/common/ptr-compr.h b/src/common/ptr-compr.h index 0c82c2328c..1d5668208a 100644 --- a/src/common/ptr-compr.h +++ b/src/common/ptr-compr.h @@ -13,8 +13,8 @@ namespace v8 { namespace internal { // See v8:7703 for details about how pointer compression works. -constexpr size_t kPtrComprHeapReservationSize = size_t{4} * GB; -constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB; +constexpr size_t kPtrComprCageReservationSize = size_t{4} * GB; +constexpr size_t kPtrComprCageBaseAlignment = size_t{4} * GB; } // namespace internal } // namespace v8 diff --git a/src/deoptimizer/translated-state.cc b/src/deoptimizer/translated-state.cc index c6f88afdc4..02c473d22b 100644 --- a/src/deoptimizer/translated-state.cc +++ b/src/deoptimizer/translated-state.cc @@ -1275,8 +1275,7 @@ int TranslatedState::CreateNextTranslatedValue( Address TranslatedState::DecompressIfNeeded(intptr_t value) { if (COMPRESS_POINTERS_BOOL) { - return DecompressTaggedAny(isolate()->isolate_root(), - static_cast(value)); + return DecompressTaggedAny(isolate(), static_cast(value)); } else { return value; } diff --git a/src/diagnostics/objects-debug.cc b/src/diagnostics/objects-debug.cc index f197c7f21c..b48df9385a 100644 --- a/src/diagnostics/objects-debug.cc +++ b/src/diagnostics/objects-debug.cc @@ -325,11 +325,11 @@ void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) { USE_TORQUE_VERIFIER(JSReceiver) -bool JSObject::ElementsAreSafeToExamine(IsolateRoot isolate) const { +bool JSObject::ElementsAreSafeToExamine(PtrComprCageBase cage_base) const { // If a GC was caused while constructing this object, the elements // pointer may point to a one pointer filler map. - return elements(isolate) != - GetReadOnlyRoots(isolate).one_pointer_filler_map(); + return elements(cage_base) != + GetReadOnlyRoots(cage_base).one_pointer_filler_map(); } namespace { diff --git a/src/diagnostics/objects-printer.cc b/src/diagnostics/objects-printer.cc index 4d5abb567a..aa1713e3b3 100644 --- a/src/diagnostics/objects-printer.cc +++ b/src/diagnostics/objects-printer.cc @@ -468,13 +468,13 @@ void PrintSloppyArgumentElements(std::ostream& os, ElementsKind kind, } } -void PrintEmbedderData(IsolateRoot isolate, std::ostream& os, +void PrintEmbedderData(PtrComprCageBase cage_base, std::ostream& os, EmbedderDataSlot slot) { DisallowGarbageCollection no_gc; Object value = slot.load_tagged(); os << Brief(value); void* raw_pointer; - if (slot.ToAlignedPointer(isolate, &raw_pointer)) { + if (slot.ToAlignedPointer(cage_base, &raw_pointer)) { os << ", aligned pointer: " << raw_pointer; } } @@ -579,11 +579,11 @@ static void JSObjectPrintBody(std::ostream& os, } int embedder_fields = obj.GetEmbedderFieldCount(); if (embedder_fields > 0) { - IsolateRoot isolate = GetIsolateForPtrCompr(obj); + PtrComprCageBase cage_base = GetPtrComprCageBase(obj); os << " - embedder fields = {"; for (int i = 0; i < embedder_fields; i++) { os << "\n "; - PrintEmbedderData(isolate, os, EmbedderDataSlot(obj, i)); + PrintEmbedderData(cage_base, os, EmbedderDataSlot(obj, i)); } os << "\n }\n"; } @@ -762,14 +762,14 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint( } void EmbedderDataArray::EmbedderDataArrayPrint(std::ostream& os) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); PrintHeader(os, "EmbedderDataArray"); os << "\n - length: " << length(); EmbedderDataSlot start(*this, 0); EmbedderDataSlot end(*this, length()); for (EmbedderDataSlot slot = start; slot < end; ++slot) { os << "\n "; - PrintEmbedderData(isolate, os, slot); + PrintEmbedderData(cage_base, os, slot); } os << "\n"; } @@ -2747,12 +2747,11 @@ namespace { inline i::Object GetObjectFromRaw(void* object) { i::Address object_ptr = reinterpret_cast(object); #ifdef V8_COMPRESS_POINTERS - if (RoundDown(object_ptr) == - i::kNullAddress) { + if (RoundDown(object_ptr) == i::kNullAddress) { // Try to decompress pointer. i::Isolate* isolate = i::Isolate::Current(); - object_ptr = i::DecompressTaggedAny(isolate->isolate_root(), - static_cast(object_ptr)); + object_ptr = + i::DecompressTaggedAny(isolate, static_cast(object_ptr)); } #endif return i::Object(object_ptr); diff --git a/src/execution/isolate-utils-inl.h b/src/execution/isolate-utils-inl.h index 2cc66a473c..f199b525aa 100644 --- a/src/execution/isolate-utils-inl.h +++ b/src/execution/isolate-utils-inl.h @@ -13,26 +13,36 @@ namespace v8 { namespace internal { -inline constexpr IsolateRoot GetIsolateForPtrComprFromOnHeapAddress( - Address address) { -#ifdef V8_COMPRESS_POINTERS - return IsolateRoot(GetIsolateRootAddress(address)); -#else - return IsolateRoot(); -#endif // V8_COMPRESS_POINTERS +#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE + +// Aliases for GetPtrComprCageBase when +// V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE. Each Isolate has its own cage, whose +// base address is also the Isolate root. +V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) { + return GetPtrComprCageBaseAddress(on_heap_addr); } -inline IsolateRoot GetIsolateForPtrCompr(HeapObject object) { - return GetIsolateForPtrComprFromOnHeapAddress(object.ptr()); +V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) { + return cage_base.address(); } +#else + +V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); } + +V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) { + UNREACHABLE(); +} + +#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE + V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) { // Avoid using the below GetIsolateFromWritableObject because we want to be // able to get the heap, but not the isolate, for off-thread objects. #if defined V8_ENABLE_THIRD_PARTY_HEAP return Heap::GetIsolateFromWritableObject(object)->heap(); -#elif defined V8_COMPRESS_POINTERS +#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE Isolate* isolate = Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr())); DCHECK_NOT_NULL(isolate); @@ -47,7 +57,7 @@ V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) { V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) { #ifdef V8_ENABLE_THIRD_PARTY_HEAP return Heap::GetIsolateFromWritableObject(object); -#elif defined V8_COMPRESS_POINTERS +#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE Isolate* isolate = Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr())); DCHECK_NOT_NULL(isolate); diff --git a/src/execution/isolate-utils.h b/src/execution/isolate-utils.h index 2204b2cd96..c41788d945 100644 --- a/src/execution/isolate-utils.h +++ b/src/execution/isolate-utils.h @@ -10,11 +10,12 @@ namespace v8 { namespace internal { -// Computes isolate from any read only or writable heap object. The resulting -// value is intended to be used only as a hoisted computation of isolate root -// inside trivial accessors for optmizing value decompression. -// When pointer compression is disabled this function always returns nullptr. -V8_INLINE IsolateRoot GetIsolateForPtrCompr(HeapObject object); +// Computes the pointer compression cage base from any read only or writable +// heap object. The resulting value is intended to be used only as a hoisted +// computation of cage base inside trivial accessors for optimizing value +// decompression. When pointer compression is disabled this function always +// returns nullptr. +V8_INLINE PtrComprCageBase GetPtrComprCageBase(HeapObject object); V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object); diff --git a/src/execution/isolate.cc b/src/execution/isolate.cc index f3cb313187..8f98ee443e 100644 --- a/src/execution/isolate.cc +++ b/src/execution/isolate.cc @@ -2861,8 +2861,8 @@ Isolate* Isolate::New() { // Construct Isolate object in the allocated memory. void* isolate_ptr = isolate_allocator->isolate_memory(); Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator)); -#ifdef V8_COMPRESS_POINTERS - DCHECK(IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment)); +#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE + DCHECK(IsAligned(isolate->isolate_root(), kPtrComprCageBaseAlignment)); #endif #ifdef DEBUG diff --git a/src/flags/flag-definitions.h b/src/flags/flag-definitions.h index 379c02138e..aa1e70ce77 100644 --- a/src/flags/flag-definitions.h +++ b/src/flags/flag-definitions.h @@ -151,6 +151,18 @@ struct MaybeBoolFlag { #define COMPRESS_POINTERS_BOOL false #endif +#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE +#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL true +#else +#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL false +#endif + +#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE +#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL true +#else +#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL false +#endif + #ifdef V8_HEAP_SANDBOX #define V8_HEAP_SANDBOX_BOOL true #else diff --git a/src/handles/global-handles.cc b/src/handles/global-handles.cc index 8b24de2a05..6bc290eac7 100644 --- a/src/handles/global-handles.cc +++ b/src/handles/global-handles.cc @@ -382,11 +382,11 @@ namespace { void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) { int field_count = jsobject.GetEmbedderFieldCount(); - IsolateRoot isolate = GetIsolateForPtrCompr(jsobject); + PtrComprCageBase cage_base = GetPtrComprCageBase(jsobject); for (int i = 0; i < len; ++i) { if (field_count == i) break; void* pointer; - if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) { + if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(cage_base, &pointer)) { embedder_fields[i] = pointer; } } diff --git a/src/heap/heap.cc b/src/heap/heap.cc index ab019c8e1b..ff44912070 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -289,7 +289,7 @@ size_t Heap::MinOldGenerationSize() { size_t Heap::AllocatorLimitOnMaxOldGenerationSize() { #ifdef V8_COMPRESS_POINTERS // Isolate and the young generation are also allocated on the heap. - return kPtrComprHeapReservationSize - + return kPtrComprCageReservationSize - YoungGenerationSizeFromSemiSpaceSize(kMaxSemiSpaceSize) - RoundUp(sizeof(Isolate), size_t{1} << kPageSizeBits); #endif diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index 258596c1c5..ab4c64dc08 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -2704,8 +2704,9 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot, } template -static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) { - typename TSlot::TObject obj = slot.Relaxed_Load(isolate); +static inline SlotCallbackResult UpdateSlot(PtrComprCageBase cage_base, + TSlot slot) { + typename TSlot::TObject obj = slot.Relaxed_Load(cage_base); HeapObject heap_obj; if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) { UpdateSlot(slot, obj, heap_obj); @@ -2717,9 +2718,9 @@ static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) { } template -static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate, +static inline SlotCallbackResult UpdateStrongSlot(PtrComprCageBase cage_base, TSlot slot) { - typename TSlot::TObject obj = slot.Relaxed_Load(isolate); + typename TSlot::TObject obj = slot.Relaxed_Load(cage_base); DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr())); HeapObject heap_obj; if (obj.GetHeapObject(&heap_obj)) { @@ -2735,39 +2736,40 @@ static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate, // It does not expect to encounter pointers to dead objects. class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor { public: - explicit PointersUpdatingVisitor(IsolateRoot isolate) : isolate_(isolate) {} + explicit PointersUpdatingVisitor(PtrComprCageBase cage_base) + : cage_base_(cage_base) {} void VisitPointer(HeapObject host, ObjectSlot p) override { - UpdateStrongSlotInternal(isolate_, p); + UpdateStrongSlotInternal(cage_base_, p); } void VisitPointer(HeapObject host, MaybeObjectSlot p) override { - UpdateSlotInternal(isolate_, p); + UpdateSlotInternal(cage_base_, p); } void VisitPointers(HeapObject host, ObjectSlot start, ObjectSlot end) override { for (ObjectSlot p = start; p < end; ++p) { - UpdateStrongSlotInternal(isolate_, p); + UpdateStrongSlotInternal(cage_base_, p); } } void VisitPointers(HeapObject host, MaybeObjectSlot start, MaybeObjectSlot end) final { for (MaybeObjectSlot p = start; p < end; ++p) { - UpdateSlotInternal(isolate_, p); + UpdateSlotInternal(cage_base_, p); } } void VisitRootPointer(Root root, const char* description, FullObjectSlot p) override { - UpdateRootSlotInternal(isolate_, p); + UpdateRootSlotInternal(cage_base_, p); } void VisitRootPointers(Root root, const char* description, FullObjectSlot start, FullObjectSlot end) override { for (FullObjectSlot p = start; p < end; ++p) { - UpdateRootSlotInternal(isolate_, p); + UpdateRootSlotInternal(cage_base_, p); } } @@ -2775,7 +2777,7 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor { OffHeapObjectSlot start, OffHeapObjectSlot end) override { for (OffHeapObjectSlot p = start; p < end; ++p) { - UpdateRootSlotInternal(isolate_, p); + UpdateRootSlotInternal(cage_base_, p); } } @@ -2790,32 +2792,32 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor { } private: - static inline SlotCallbackResult UpdateRootSlotInternal(IsolateRoot isolate, - FullObjectSlot slot) { - return UpdateStrongSlot(isolate, slot); + static inline SlotCallbackResult UpdateRootSlotInternal( + PtrComprCageBase cage_base, FullObjectSlot slot) { + return UpdateStrongSlot(cage_base, slot); } static inline SlotCallbackResult UpdateRootSlotInternal( - IsolateRoot isolate, OffHeapObjectSlot slot) { - return UpdateStrongSlot(isolate, slot); + PtrComprCageBase cage_base, OffHeapObjectSlot slot) { + return UpdateStrongSlot(cage_base, slot); } static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal( - IsolateRoot isolate, MaybeObjectSlot slot) { - return UpdateStrongSlot(isolate, slot); + PtrComprCageBase cage_base, MaybeObjectSlot slot) { + return UpdateStrongSlot(cage_base, slot); } - static inline SlotCallbackResult UpdateStrongSlotInternal(IsolateRoot isolate, - ObjectSlot slot) { - return UpdateStrongSlot(isolate, slot); + static inline SlotCallbackResult UpdateStrongSlotInternal( + PtrComprCageBase cage_base, ObjectSlot slot) { + return UpdateStrongSlot(cage_base, slot); } - static inline SlotCallbackResult UpdateSlotInternal(IsolateRoot isolate, - MaybeObjectSlot slot) { - return UpdateSlot(isolate, slot); + static inline SlotCallbackResult UpdateSlotInternal( + PtrComprCageBase cage_base, MaybeObjectSlot slot) { + return UpdateSlot(cage_base, slot); } - IsolateRoot isolate_; + PtrComprCageBase cage_base_; }; static String UpdateReferenceInExternalStringTableEntry(Heap* heap, @@ -3581,7 +3583,7 @@ class ToSpaceUpdatingItem : public UpdatingItem { TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"), "ToSpaceUpdatingItem::ProcessVisitAll"); PointersUpdatingVisitor visitor( - GetIsolateForPtrComprFromOnHeapAddress(start_)); + GetPtrComprCageBaseFromOnHeapAddress(start_)); for (Address cur = start_; cur < end_;) { HeapObject object = HeapObject::FromAddress(cur); Map map = object.map(); @@ -3597,7 +3599,7 @@ class ToSpaceUpdatingItem : public UpdatingItem { // For young generation evacuations we want to visit grey objects, for // full MC, we need to visit black objects. PointersUpdatingVisitor visitor( - GetIsolateForPtrComprFromOnHeapAddress(start_)); + GetPtrComprCageBaseFromOnHeapAddress(start_)); for (auto object_and_size : LiveObjectRange( chunk_, marking_state_->bitmap(chunk_))) { object_and_size.first.IterateBodyFast(&visitor); @@ -3743,12 +3745,12 @@ class RememberedSetUpdatingItem : public UpdatingItem { if ((updating_mode_ == RememberedSetUpdatingMode::ALL) && (chunk_->slot_set() != nullptr)) { InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_); - IsolateRoot isolate = heap_->isolate(); + PtrComprCageBase cage_base = heap_->isolate(); RememberedSet::Iterate( chunk_, - [&filter, isolate](MaybeObjectSlot slot) { + [&filter, cage_base](MaybeObjectSlot slot) { if (!filter.IsValid(slot.address())) return REMOVE_SLOT; - return UpdateSlot(isolate, slot); + return UpdateSlot(cage_base, slot); }, SlotSet::FREE_EMPTY_BUCKETS); chunk_->ReleaseSlotSet(); @@ -3783,10 +3785,10 @@ class RememberedSetUpdatingItem : public UpdatingItem { Address slot) { // Using UpdateStrongSlot is OK here, because there are no weak // typed slots. - IsolateRoot isolate = heap_->isolate(); + PtrComprCageBase cage_base = heap_->isolate(); return UpdateTypedSlotHelper::UpdateTypedSlot( - heap_, slot_type, slot, [isolate](FullMaybeObjectSlot slot) { - return UpdateStrongSlot(isolate, slot); + heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) { + return UpdateStrongSlot(cage_base, slot); }); }); } diff --git a/src/heap/read-only-heap-inl.h b/src/heap/read-only-heap-inl.h index 316f455013..0c12828584 100644 --- a/src/heap/read-only-heap-inl.h +++ b/src/heap/read-only-heap-inl.h @@ -14,9 +14,9 @@ namespace internal { // static ReadOnlyRoots ReadOnlyHeap::GetReadOnlyRoots(HeapObject object) { -#ifdef V8_COMPRESS_POINTERS - IsolateRoot isolate = GetIsolateForPtrCompr(object); - return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address())); +#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE + return ReadOnlyRoots( + Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()))); #else #ifdef V8_SHARED_RO_HEAP // This fails if we are creating heap objects and the roots haven't yet been diff --git a/src/heap/read-only-heap.cc b/src/heap/read-only-heap.cc index 342ad1d031..d5f7e843ef 100644 --- a/src/heap/read-only-heap.cc +++ b/src/heap/read-only-heap.cc @@ -37,7 +37,7 @@ base::LazyInstance>::type std::shared_ptr InitializeSharedReadOnlyArtifacts() { std::shared_ptr artifacts; - if (COMPRESS_POINTERS_BOOL) { + if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) { artifacts = std::make_shared(); } else { artifacts = std::make_shared(); @@ -129,7 +129,7 @@ ReadOnlyHeap::ReadOnlyHeap(ReadOnlyHeap* ro_heap, ReadOnlySpace* ro_space) : read_only_space_(ro_space), read_only_object_cache_(ro_heap->read_only_object_cache_) { DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared()); - DCHECK(COMPRESS_POINTERS_BOOL); + DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL); } // static @@ -139,7 +139,7 @@ ReadOnlyHeap* ReadOnlyHeap::CreateInitalHeapForBootstrapping( std::unique_ptr ro_heap; auto* ro_space = new ReadOnlySpace(isolate->heap()); - if (COMPRESS_POINTERS_BOOL) { + if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) { ro_heap.reset(new ReadOnlyHeap(ro_space)); } else { std::unique_ptr sole_ro_heap( diff --git a/src/heap/read-only-heap.h b/src/heap/read-only-heap.h index c78ea77452..f947832c5f 100644 --- a/src/heap/read-only-heap.h +++ b/src/heap/read-only-heap.h @@ -87,8 +87,8 @@ class ReadOnlyHeap { // Returns whether the ReadOnlySpace will actually be shared taking into // account whether shared memory is available with pointer compression. static bool IsReadOnlySpaceShared() { - return V8_SHARED_RO_HEAP_BOOL && - (!COMPRESS_POINTERS_BOOL || IsSharedMemoryAvailable()); + return V8_SHARED_RO_HEAP_BOOL && (!COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL || + IsSharedMemoryAvailable()); } virtual void InitializeIsolateRoots(Isolate* isolate) {} diff --git a/src/heap/read-only-spaces.cc b/src/heap/read-only-spaces.cc index b54bfc0389..5adac66afe 100644 --- a/src/heap/read-only-spaces.cc +++ b/src/heap/read-only-spaces.cc @@ -755,9 +755,10 @@ SharedReadOnlySpace::SharedReadOnlySpace( Heap* heap, PointerCompressedReadOnlyArtifacts* artifacts) : SharedReadOnlySpace(heap) { // This constructor should only be used when RO_SPACE is shared with pointer - // compression. + // compression in a per-Isolate cage. DCHECK(V8_SHARED_RO_HEAP_BOOL); DCHECK(COMPRESS_POINTERS_BOOL); + DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL); DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared()); DCHECK(!artifacts->pages().empty()); @@ -776,6 +777,7 @@ SharedReadOnlySpace::SharedReadOnlySpace( : SharedReadOnlySpace(heap) { DCHECK(V8_SHARED_RO_HEAP_BOOL); DCHECK(COMPRESS_POINTERS_BOOL); + DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL); DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared()); accounting_stats_ = std::move(new_stats); diff --git a/src/heap/read-only-spaces.h b/src/heap/read-only-spaces.h index ffadcb55b3..ee4b2a8223 100644 --- a/src/heap/read-only-spaces.h +++ b/src/heap/read-only-spaces.h @@ -35,10 +35,11 @@ class ReadOnlyPage : public BasicMemoryChunk { // Returns the address for a given offset in this page. Address OffsetToAddress(size_t offset) const { Address address_in_page = address() + offset; - if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_BOOL) { - // Pointer compression with share ReadOnlyPages means that the area_start - // and area_end cannot be defined since they are stored within the pages - // which can be mapped at multiple memory addresses. + if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) { + // Pointer compression with a per-Isolate cage and shared ReadOnlyPages + // means that the area_start and area_end cannot be defined since they are + // stored within the pages which can be mapped at multiple memory + // addresses. DCHECK_LT(offset, size()); } else { DCHECK_GE(address_in_page, area_start()); diff --git a/src/init/isolate-allocator.cc b/src/init/isolate-allocator.cc index 01ae416181..5db27d288b 100644 --- a/src/init/isolate-allocator.cc +++ b/src/init/isolate-allocator.cc @@ -59,8 +59,8 @@ Address IsolateAllocator::InitReservation() { // Reserve a |4Gb + kIsolateRootBiasPageSize| region such as that the // resevation address plus |kIsolateRootBiasPageSize| is 4Gb aligned. const size_t reservation_size = - kPtrComprHeapReservationSize + kIsolateRootBiasPageSize; - const size_t base_alignment = kPtrComprIsolateRootAlignment; + kPtrComprCageReservationSize + kIsolateRootBiasPageSize; + const size_t base_alignment = kPtrComprCageBaseAlignment; const int kMaxAttempts = 4; for (int attempt = 0; attempt < kMaxAttempts; ++attempt) { @@ -137,11 +137,11 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) { GetIsolateRootBiasPageSize(platform_page_allocator); Address isolate_root = heap_reservation_address + kIsolateRootBiasPageSize; - CHECK(IsAligned(isolate_root, kPtrComprIsolateRootAlignment)); + CHECK(IsAligned(isolate_root, kPtrComprCageBaseAlignment)); CHECK(reservation_.InVM( heap_reservation_address, - kPtrComprHeapReservationSize + kIsolateRootBiasPageSize)); + kPtrComprCageReservationSize + kIsolateRootBiasPageSize)); // Simplify BoundedPageAllocator's life by configuring it to use same page // size as the Heap will use (MemoryChunk::kPageSize). @@ -149,7 +149,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) { platform_page_allocator->AllocatePageSize()); page_allocator_instance_ = std::make_unique( - platform_page_allocator, isolate_root, kPtrComprHeapReservationSize, + platform_page_allocator, isolate_root, kPtrComprCageReservationSize, page_size); page_allocator_ = page_allocator_instance_.get(); diff --git a/src/objects/code-inl.h b/src/objects/code-inl.h index 49eb3480fb..d6203f0597 100644 --- a/src/objects/code-inl.h +++ b/src/objects/code-inl.h @@ -323,9 +323,9 @@ int Code::SizeIncludingMetadata() const { } ByteArray Code::unchecked_relocation_info() const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); return ByteArray::unchecked_cast( - TaggedField::load(isolate, *this)); + TaggedField::load(cage_base, *this)); } byte* Code::relocation_start() const { diff --git a/src/objects/compressed-slots-inl.h b/src/objects/compressed-slots-inl.h index ecb276ce36..54c828d919 100644 --- a/src/objects/compressed-slots-inl.h +++ b/src/objects/compressed-slots-inl.h @@ -33,9 +33,9 @@ Object CompressedObjectSlot::operator*() const { return Object(DecompressTaggedAny(address(), value)); } -Object CompressedObjectSlot::load(IsolateRoot isolate) const { +Object CompressedObjectSlot::load(PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return Object(DecompressTaggedAny(isolate, value)); + return Object(DecompressTaggedAny(cage_base, value)); } void CompressedObjectSlot::store(Object value) const { @@ -52,9 +52,9 @@ Object CompressedObjectSlot::Relaxed_Load() const { return Object(DecompressTaggedAny(address(), value)); } -Object CompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const { +Object CompressedObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return Object(DecompressTaggedAny(isolate, value)); + return Object(DecompressTaggedAny(cage_base, value)); } void CompressedObjectSlot::Relaxed_Store(Object value) const { @@ -85,9 +85,9 @@ MaybeObject CompressedMaybeObjectSlot::operator*() const { return MaybeObject(DecompressTaggedAny(address(), value)); } -MaybeObject CompressedMaybeObjectSlot::load(IsolateRoot isolate) const { +MaybeObject CompressedMaybeObjectSlot::load(PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return MaybeObject(DecompressTaggedAny(isolate, value)); + return MaybeObject(DecompressTaggedAny(cage_base, value)); } void CompressedMaybeObjectSlot::store(MaybeObject value) const { @@ -99,9 +99,10 @@ MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const { return MaybeObject(DecompressTaggedAny(address(), value)); } -MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const { +MaybeObject CompressedMaybeObjectSlot::Relaxed_Load( + PtrComprCageBase cage_base) const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return MaybeObject(DecompressTaggedAny(isolate, value)); + return MaybeObject(DecompressTaggedAny(cage_base, value)); } void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const { @@ -125,9 +126,10 @@ HeapObjectReference CompressedHeapObjectSlot::operator*() const { return HeapObjectReference(DecompressTaggedPointer(address(), value)); } -HeapObjectReference CompressedHeapObjectSlot::load(IsolateRoot isolate) const { +HeapObjectReference CompressedHeapObjectSlot::load( + PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return HeapObjectReference(DecompressTaggedPointer(isolate, value)); + return HeapObjectReference(DecompressTaggedPointer(cage_base, value)); } void CompressedHeapObjectSlot::store(HeapObjectReference value) const { @@ -148,23 +150,25 @@ void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const { // OffHeapCompressedObjectSlot implementation. // -Object OffHeapCompressedObjectSlot::load(IsolateRoot isolate) const { +Object OffHeapCompressedObjectSlot::load(PtrComprCageBase cage_base) const { Tagged_t value = *location(); - return Object(DecompressTaggedAny(isolate, value)); + return Object(DecompressTaggedAny(cage_base, value)); } void OffHeapCompressedObjectSlot::store(Object value) const { *location() = CompressTagged(value.ptr()); } -Object OffHeapCompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const { +Object OffHeapCompressedObjectSlot::Relaxed_Load( + PtrComprCageBase cage_base) const { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location()); - return Object(DecompressTaggedAny(isolate, value)); + return Object(DecompressTaggedAny(cage_base, value)); } -Object OffHeapCompressedObjectSlot::Acquire_Load(IsolateRoot isolate) const { +Object OffHeapCompressedObjectSlot::Acquire_Load( + PtrComprCageBase cage_base) const { AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location()); - return Object(DecompressTaggedAny(isolate, value)); + return Object(DecompressTaggedAny(cage_base, value)); } void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const { diff --git a/src/objects/compressed-slots.h b/src/objects/compressed-slots.h index 6f74b723c8..7737e685fe 100644 --- a/src/objects/compressed-slots.h +++ b/src/objects/compressed-slots.h @@ -41,12 +41,12 @@ class CompressedObjectSlot : public SlotBase { // TODO(leszeks): Consider deprecating the operator* load, and always pass the // Isolate. inline Object operator*() const; - inline Object load(IsolateRoot isolate) const; + inline Object load(PtrComprCageBase cage_base) const; inline void store(Object value) const; inline Object Acquire_Load() const; inline Object Relaxed_Load() const; - inline Object Relaxed_Load(IsolateRoot isolate) const; + inline Object Relaxed_Load(PtrComprCageBase cage_base) const; inline void Relaxed_Store(Object value) const; inline void Release_Store(Object value) const; inline Object Release_CompareAndSwap(Object old, Object target) const; @@ -77,11 +77,11 @@ class CompressedMaybeObjectSlot : SlotBase(slot.address()) {} inline MaybeObject operator*() const; - inline MaybeObject load(IsolateRoot isolate) const; + inline MaybeObject load(PtrComprCageBase cage_base) const; inline void store(MaybeObject value) const; inline MaybeObject Relaxed_Load() const; - inline MaybeObject Relaxed_Load(IsolateRoot isolate) const; + inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const; inline void Relaxed_Store(MaybeObject value) const; inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const; }; @@ -105,7 +105,7 @@ class CompressedHeapObjectSlot : SlotBase(slot.address()) {} inline HeapObjectReference operator*() const; - inline HeapObjectReference load(IsolateRoot isolate) const; + inline HeapObjectReference load(PtrComprCageBase cage_base) const; inline void store(HeapObjectReference value) const; inline HeapObject ToHeapObject() const; @@ -131,11 +131,11 @@ class OffHeapCompressedObjectSlot explicit OffHeapCompressedObjectSlot(const uint32_t* ptr) : SlotBase(reinterpret_cast
(ptr)) {} - inline Object load(IsolateRoot isolate) const; + inline Object load(PtrComprCageBase cage_base) const; inline void store(Object value) const; - inline Object Relaxed_Load(IsolateRoot isolate) const; - inline Object Acquire_Load(IsolateRoot isolate) const; + inline Object Relaxed_Load(PtrComprCageBase cage_base) const; + inline Object Acquire_Load(PtrComprCageBase cage_base) const; inline void Relaxed_Store(Object value) const; inline void Release_Store(Object value) const; inline void Release_CompareAndSwap(Object old, Object target) const; diff --git a/src/objects/contexts-inl.h b/src/objects/contexts-inl.h index b036b00686..356df687de 100644 --- a/src/objects/contexts-inl.h +++ b/src/objects/contexts-inl.h @@ -56,8 +56,8 @@ NEVER_READ_ONLY_SPACE_IMPL(Context) CAST_ACCESSOR(NativeContext) V8_INLINE Object Context::get(int index) const { return elements(index); } -V8_INLINE Object Context::get(IsolateRoot isolate, int index) const { - return elements(isolate, index); +V8_INLINE Object Context::get(PtrComprCageBase cage_base, int index) const { + return elements(cage_base, index); } V8_INLINE void Context::set(int index, Object value) { set_elements(index, value); @@ -71,11 +71,11 @@ void Context::set_scope_info(ScopeInfo scope_info, WriteBarrierMode mode) { } Object Context::synchronized_get(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return synchronized_get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return synchronized_get(cage_base, index); } -Object Context::synchronized_get(IsolateRoot isolate, int index) const { +Object Context::synchronized_get(PtrComprCageBase cage_base, int index) const { DCHECK_LT(static_cast(index), static_cast(this->length())); return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index)); @@ -243,7 +243,7 @@ Map Context::GetInitialJSArrayMap(ElementsKind kind) const { DEF_GETTER(NativeContext, microtask_queue, MicrotaskQueue*) { return reinterpret_cast(ReadExternalPointerField( - kMicrotaskQueueOffset, isolate, kNativeContextMicrotaskQueueTag)); + kMicrotaskQueueOffset, cage_base, kNativeContextMicrotaskQueueTag)); } void NativeContext::AllocateExternalPointerEntries(Isolate* isolate) { diff --git a/src/objects/contexts.h b/src/objects/contexts.h index cae923ef8e..79aed5d40f 100644 --- a/src/objects/contexts.h +++ b/src/objects/contexts.h @@ -422,13 +422,14 @@ class Context : public TorqueGeneratedContext { // Setter and getter for elements. V8_INLINE Object get(int index) const; - V8_INLINE Object get(IsolateRoot isolate, int index) const; + V8_INLINE Object get(PtrComprCageBase cage_base, int index) const; V8_INLINE void set(int index, Object value); // Setter with explicit barrier mode. V8_INLINE void set(int index, Object value, WriteBarrierMode mode); // Setter and getter with synchronization semantics. V8_INLINE Object synchronized_get(int index) const; - V8_INLINE Object synchronized_get(IsolateRoot isolate, int index) const; + V8_INLINE Object synchronized_get(PtrComprCageBase cage_base, + int index) const; V8_INLINE void synchronized_set(int index, Object value); static const int kScopeInfoOffset = kElementsOffset; diff --git a/src/objects/descriptor-array-inl.h b/src/objects/descriptor-array-inl.h index 9a40298420..9bb01ffc4d 100644 --- a/src/objects/descriptor-array-inl.h +++ b/src/objects/descriptor-array-inl.h @@ -106,15 +106,16 @@ ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) { } Name DescriptorArray::GetKey(InternalIndex descriptor_number) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return GetKey(isolate, descriptor_number); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return GetKey(cage_base, descriptor_number); } -Name DescriptorArray::GetKey(IsolateRoot isolate, +Name DescriptorArray::GetKey(PtrComprCageBase cage_base, InternalIndex descriptor_number) const { DCHECK_LT(descriptor_number.as_int(), number_of_descriptors()); int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int()); - return Name::cast(EntryKeyField::Relaxed_Load(isolate, *this, entry_offset)); + return Name::cast( + EntryKeyField::Relaxed_Load(cage_base, *this, entry_offset)); } void DescriptorArray::SetKey(InternalIndex descriptor_number, Name key) { @@ -129,12 +130,13 @@ int DescriptorArray::GetSortedKeyIndex(int descriptor_number) { } Name DescriptorArray::GetSortedKey(int descriptor_number) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return GetSortedKey(isolate, descriptor_number); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return GetSortedKey(cage_base, descriptor_number); } -Name DescriptorArray::GetSortedKey(IsolateRoot isolate, int descriptor_number) { - return GetKey(isolate, InternalIndex(GetSortedKeyIndex(descriptor_number))); +Name DescriptorArray::GetSortedKey(PtrComprCageBase cage_base, + int descriptor_number) { + return GetKey(cage_base, InternalIndex(GetSortedKeyIndex(descriptor_number))); } void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) { @@ -143,13 +145,13 @@ void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) { } Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return GetStrongValue(isolate, descriptor_number); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return GetStrongValue(cage_base, descriptor_number); } -Object DescriptorArray::GetStrongValue(IsolateRoot isolate, +Object DescriptorArray::GetStrongValue(PtrComprCageBase cage_base, InternalIndex descriptor_number) { - return GetValue(isolate, descriptor_number).cast(); + return GetValue(cage_base, descriptor_number).cast(); } void DescriptorArray::SetValue(InternalIndex descriptor_number, @@ -161,15 +163,15 @@ void DescriptorArray::SetValue(InternalIndex descriptor_number, } MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return GetValue(isolate, descriptor_number); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return GetValue(cage_base, descriptor_number); } -MaybeObject DescriptorArray::GetValue(IsolateRoot isolate, +MaybeObject DescriptorArray::GetValue(PtrComprCageBase cage_base, InternalIndex descriptor_number) { DCHECK_LT(descriptor_number.as_int(), number_of_descriptors()); int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int()); - return EntryValueField::Relaxed_Load(isolate, *this, entry_offset); + return EntryValueField::Relaxed_Load(cage_base, *this, entry_offset); } PropertyDetails DescriptorArray::GetDetails(InternalIndex descriptor_number) { @@ -192,14 +194,14 @@ int DescriptorArray::GetFieldIndex(InternalIndex descriptor_number) { } FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return GetFieldType(isolate, descriptor_number); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return GetFieldType(cage_base, descriptor_number); } -FieldType DescriptorArray::GetFieldType(IsolateRoot isolate, +FieldType DescriptorArray::GetFieldType(PtrComprCageBase cage_base, InternalIndex descriptor_number) { DCHECK_EQ(GetDetails(descriptor_number).location(), kField); - MaybeObject wrapped_type = GetValue(isolate, descriptor_number); + MaybeObject wrapped_type = GetValue(cage_base, descriptor_number); return Map::UnwrapFieldType(wrapped_type); } diff --git a/src/objects/descriptor-array.h b/src/objects/descriptor-array.h index 8120a0eaa0..327931a421 100644 --- a/src/objects/descriptor-array.h +++ b/src/objects/descriptor-array.h @@ -69,22 +69,22 @@ class DescriptorArray // Accessors for fetching instance descriptor at descriptor number. inline Name GetKey(InternalIndex descriptor_number) const; - inline Name GetKey(IsolateRoot isolate, + inline Name GetKey(PtrComprCageBase cage_base, InternalIndex descriptor_number) const; inline Object GetStrongValue(InternalIndex descriptor_number); - inline Object GetStrongValue(IsolateRoot isolate, + inline Object GetStrongValue(PtrComprCageBase cage_base, InternalIndex descriptor_number); inline MaybeObject GetValue(InternalIndex descriptor_number); - inline MaybeObject GetValue(IsolateRoot isolate, + inline MaybeObject GetValue(PtrComprCageBase cage_base, InternalIndex descriptor_number); inline PropertyDetails GetDetails(InternalIndex descriptor_number); inline int GetFieldIndex(InternalIndex descriptor_number); inline FieldType GetFieldType(InternalIndex descriptor_number); - inline FieldType GetFieldType(IsolateRoot isolate, + inline FieldType GetFieldType(PtrComprCageBase cage_base, InternalIndex descriptor_number); inline Name GetSortedKey(int descriptor_number); - inline Name GetSortedKey(IsolateRoot isolate, int descriptor_number); + inline Name GetSortedKey(PtrComprCageBase cage_base, int descriptor_number); inline int GetSortedKeyIndex(int descriptor_number); // Accessor for complete descriptor. diff --git a/src/objects/dictionary-inl.h b/src/objects/dictionary-inl.h index 981f5aac93..bb3d8d5879 100644 --- a/src/objects/dictionary-inl.h +++ b/src/objects/dictionary-inl.h @@ -30,15 +30,15 @@ Dictionary::Dictionary(Address ptr) template Object Dictionary::ValueAt(InternalIndex entry) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return ValueAt(isolate, entry); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return ValueAt(cage_base, entry); } template -Object Dictionary::ValueAt(IsolateRoot isolate, +Object Dictionary::ValueAt(PtrComprCageBase cage_base, InternalIndex entry) { - return this->get(isolate, DerivedHashTable::EntryToIndex(entry) + - Derived::kEntryValueIndex); + return this->get(cage_base, DerivedHashTable::EntryToIndex(entry) + + Derived::kEntryValueIndex); } template @@ -181,12 +181,12 @@ Handle GlobalDictionary::GetMap(ReadOnlyRoots roots) { } Name NameDictionary::NameAt(InternalIndex entry) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return NameAt(isolate, entry); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return NameAt(cage_base, entry); } -Name NameDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) { - return Name::cast(KeyAt(isolate, entry)); +Name NameDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) { + return Name::cast(KeyAt(cage_base, entry)); } Handle NameDictionary::GetMap(ReadOnlyRoots roots) { @@ -194,32 +194,33 @@ Handle NameDictionary::GetMap(ReadOnlyRoots roots) { } PropertyCell GlobalDictionary::CellAt(InternalIndex entry) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return CellAt(isolate, entry); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return CellAt(cage_base, entry); } -PropertyCell GlobalDictionary::CellAt(IsolateRoot isolate, +PropertyCell GlobalDictionary::CellAt(PtrComprCageBase cage_base, InternalIndex entry) { - DCHECK(KeyAt(isolate, entry).IsPropertyCell(isolate)); - return PropertyCell::cast(KeyAt(isolate, entry)); + DCHECK(KeyAt(cage_base, entry).IsPropertyCell(cage_base)); + return PropertyCell::cast(KeyAt(cage_base, entry)); } Name GlobalDictionary::NameAt(InternalIndex entry) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return NameAt(isolate, entry); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return NameAt(cage_base, entry); } -Name GlobalDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) { - return CellAt(isolate, entry).name(isolate); +Name GlobalDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) { + return CellAt(cage_base, entry).name(cage_base); } Object GlobalDictionary::ValueAt(InternalIndex entry) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return ValueAt(isolate, entry); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return ValueAt(cage_base, entry); } -Object GlobalDictionary::ValueAt(IsolateRoot isolate, InternalIndex entry) { - return CellAt(isolate, entry).value(isolate); +Object GlobalDictionary::ValueAt(PtrComprCageBase cage_base, + InternalIndex entry) { + return CellAt(cage_base, entry).value(cage_base); } void GlobalDictionary::SetEntry(InternalIndex entry, Object key, Object value, diff --git a/src/objects/dictionary.h b/src/objects/dictionary.h index 2a650f7019..be255f8162 100644 --- a/src/objects/dictionary.h +++ b/src/objects/dictionary.h @@ -39,7 +39,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary using Key = typename Shape::Key; // Returns the value at entry. inline Object ValueAt(InternalIndex entry); - inline Object ValueAt(IsolateRoot isolate, InternalIndex entry); + inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry); // Set the value for entry. inline void ValueAtPut(InternalIndex entry, Object value); @@ -193,7 +193,7 @@ class V8_EXPORT_PRIVATE NameDictionary static const int kInitialCapacity = 2; inline Name NameAt(InternalIndex entry); - inline Name NameAt(IsolateRoot isolate, InternalIndex entry); + inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry); inline void set_hash(int hash); inline int hash() const; @@ -231,14 +231,14 @@ class V8_EXPORT_PRIVATE GlobalDictionary DECL_PRINTER(GlobalDictionary) inline Object ValueAt(InternalIndex entry); - inline Object ValueAt(IsolateRoot isolate, InternalIndex entry); + inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry); inline PropertyCell CellAt(InternalIndex entry); - inline PropertyCell CellAt(IsolateRoot isolate, InternalIndex entry); + inline PropertyCell CellAt(PtrComprCageBase cage_base, InternalIndex entry); inline void SetEntry(InternalIndex entry, Object key, Object value, PropertyDetails details); inline void ClearEntry(InternalIndex entry); inline Name NameAt(InternalIndex entry); - inline Name NameAt(IsolateRoot isolate, InternalIndex entry); + inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry); inline void ValueAtPut(InternalIndex entry, Object value); OBJECT_CONSTRUCTORS( diff --git a/src/objects/elements.cc b/src/objects/elements.cc index 1882024d4e..9b1c7936bb 100644 --- a/src/objects/elements.cc +++ b/src/objects/elements.cc @@ -1421,10 +1421,10 @@ class DictionaryElementsAccessor DisallowGarbageCollection no_gc; NumberDictionary dict = NumberDictionary::cast(backing_store); if (!dict.requires_slow_elements()) return false; - IsolateRoot isolate = GetIsolateForPtrCompr(holder); - ReadOnlyRoots roots = holder.GetReadOnlyRoots(isolate); + PtrComprCageBase cage_base = GetPtrComprCageBase(holder); + ReadOnlyRoots roots = holder.GetReadOnlyRoots(cage_base); for (InternalIndex i : dict.IterateEntries()) { - Object key = dict.KeyAt(isolate, i); + Object key = dict.KeyAt(cage_base, i); if (!dict.IsKey(roots, key)) continue; PropertyDetails details = dict.DetailsAt(i); if (details.kind() == kAccessor) return true; diff --git a/src/objects/embedder-data-slot-inl.h b/src/objects/embedder-data-slot-inl.h index f9ef6e1e56..3f8deb39f0 100644 --- a/src/objects/embedder-data-slot-inl.h +++ b/src/objects/embedder-data-slot-inl.h @@ -81,7 +81,7 @@ void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index, #endif } -bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root, +bool EmbedderDataSlot::ToAlignedPointer(PtrComprCageBase isolate_root, void** out_pointer) const { // We don't care about atomicity of access here because embedder slots // are accessed this way only from the main thread via API during "mutator" @@ -89,6 +89,12 @@ bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root, // at the tagged part of the embedder slot but read-only access is ok). Address raw_value; #ifdef V8_HEAP_SANDBOX + + // TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage +#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE +#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage" +#endif + uint32_t index = base::Memory(address() + kRawPayloadOffset); const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address()); raw_value = isolate->external_pointer_table().get(index) ^ @@ -108,9 +114,15 @@ bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root, return HAS_SMI_TAG(raw_value); } -bool EmbedderDataSlot::ToAlignedPointerSafe(IsolateRoot isolate_root, +bool EmbedderDataSlot::ToAlignedPointerSafe(PtrComprCageBase isolate_root, void** out_pointer) const { #ifdef V8_HEAP_SANDBOX + + // TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage +#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE +#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage" +#endif + uint32_t index = base::Memory(address() + kRawPayloadOffset); Address raw_value; const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address()); diff --git a/src/objects/embedder-data-slot.h b/src/objects/embedder-data-slot.h index 8f4fcc8af2..65fe78403a 100644 --- a/src/objects/embedder-data-slot.h +++ b/src/objects/embedder-data-slot.h @@ -75,7 +75,8 @@ class EmbedderDataSlot // When V8 heap sandbox is enabled, calling this method when the raw part of // the slot does not contain valid external pointer table index is undefined // behaviour and most likely result in crashes. - V8_INLINE bool ToAlignedPointer(IsolateRoot isolate, void** out_result) const; + V8_INLINE bool ToAlignedPointer(PtrComprCageBase isolate_root, + void** out_result) const; // Same as ToAlignedPointer() but with a workaround for V8 heap sandbox. // When V8 heap sandbox is enabled, this method doesn't crash when the raw @@ -86,7 +87,7 @@ class EmbedderDataSlot // // Call this function if you are not sure whether the slot contains valid // external pointer or not. - V8_INLINE bool ToAlignedPointerSafe(IsolateRoot isolate, + V8_INLINE bool ToAlignedPointerSafe(PtrComprCageBase isolate_root, void** out_result) const; // Returns true if the pointer was successfully stored or false it the pointer diff --git a/src/objects/feedback-vector-inl.h b/src/objects/feedback-vector-inl.h index a66ec312f6..8853dabdbd 100644 --- a/src/objects/feedback-vector-inl.h +++ b/src/objects/feedback-vector-inl.h @@ -187,8 +187,9 @@ MaybeObject FeedbackVector::Get(FeedbackSlot slot) const { return value; } -MaybeObject FeedbackVector::Get(IsolateRoot isolate, FeedbackSlot slot) const { - MaybeObject value = raw_feedback_slots(isolate, GetIndex(slot)); +MaybeObject FeedbackVector::Get(PtrComprCageBase cage_base, + FeedbackSlot slot) const { + MaybeObject value = raw_feedback_slots(cage_base, GetIndex(slot)); DCHECK(!IsOfLegacyType(value)); return value; } diff --git a/src/objects/feedback-vector.h b/src/objects/feedback-vector.h index e6a850fe52..cc5e867f72 100644 --- a/src/objects/feedback-vector.h +++ b/src/objects/feedback-vector.h @@ -259,7 +259,7 @@ class FeedbackVector WriteBarrierMode mode = UPDATE_WRITE_BARRIER); inline MaybeObject Get(FeedbackSlot slot) const; - inline MaybeObject Get(IsolateRoot isolate, FeedbackSlot slot) const; + inline MaybeObject Get(PtrComprCageBase cage_base, FeedbackSlot slot) const; // Returns the feedback cell at |index| that is used to create the // closure. diff --git a/src/objects/field-index-inl.h b/src/objects/field-index-inl.h index 09056cfd99..64bb421301 100644 --- a/src/objects/field-index-inl.h +++ b/src/objects/field-index-inl.h @@ -61,13 +61,13 @@ int FieldIndex::GetLoadByFieldIndex() const { } FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) { - IsolateRoot isolate = GetIsolateForPtrCompr(map); - return ForDescriptor(isolate, map, descriptor_index); + PtrComprCageBase cage_base = GetPtrComprCageBase(map); + return ForDescriptor(cage_base, map, descriptor_index); } -FieldIndex FieldIndex::ForDescriptor(IsolateRoot isolate, Map map, +FieldIndex FieldIndex::ForDescriptor(PtrComprCageBase cage_base, Map map, InternalIndex descriptor_index) { - PropertyDetails details = map.instance_descriptors(isolate, kRelaxedLoad) + PropertyDetails details = map.instance_descriptors(cage_base, kRelaxedLoad) .GetDetails(descriptor_index); int field_index = details.field_index(); return ForPropertyIndex(map, field_index, details.representation()); diff --git a/src/objects/field-index.h b/src/objects/field-index.h index 7819c8c06b..7ccf049269 100644 --- a/src/objects/field-index.h +++ b/src/objects/field-index.h @@ -31,7 +31,7 @@ class FieldIndex final { static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding); static inline FieldIndex ForDescriptor(Map map, InternalIndex descriptor_index); - static inline FieldIndex ForDescriptor(IsolateRoot isolate, Map map, + static inline FieldIndex ForDescriptor(PtrComprCageBase cage_base, Map map, InternalIndex descriptor_index); inline int GetLoadByFieldIndex() const; diff --git a/src/objects/fixed-array-inl.h b/src/objects/fixed-array-inl.h index a91f89784f..3a7c292fe5 100644 --- a/src/objects/fixed-array-inl.h +++ b/src/objects/fixed-array-inl.h @@ -70,13 +70,13 @@ bool FixedArray::ContainsOnlySmisOrHoles() { } Object FixedArray::get(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return get(cage_base, index); } -Object FixedArray::get(IsolateRoot isolate, int index) const { +Object FixedArray::get(PtrComprCageBase cage_base, int index) const { DCHECK_LT(static_cast(index), static_cast(length())); - return TaggedField::Relaxed_Load(isolate, *this, + return TaggedField::Relaxed_Load(cage_base, *this, OffsetOfElementAt(index)); } @@ -124,11 +124,12 @@ void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) { } Object FixedArray::get(int index, RelaxedLoadTag) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return get(cage_base, index); } -Object FixedArray::get(IsolateRoot isolate, int index, RelaxedLoadTag) const { +Object FixedArray::get(PtrComprCageBase cage_base, int index, + RelaxedLoadTag) const { DCHECK_LT(static_cast(index), static_cast(length())); return RELAXED_READ_FIELD(*this, OffsetOfElementAt(index)); } @@ -147,11 +148,12 @@ void FixedArray::set(int index, Smi value, RelaxedStoreTag tag) { } Object FixedArray::get(int index, AcquireLoadTag) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return get(cage_base, index); } -Object FixedArray::get(IsolateRoot isolate, int index, AcquireLoadTag) const { +Object FixedArray::get(PtrComprCageBase cage_base, int index, + AcquireLoadTag) const { DCHECK_LT(static_cast(index), static_cast(length())); return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index)); } @@ -435,13 +437,13 @@ void FixedDoubleArray::FillWithHoles(int from, int to) { } MaybeObject WeakFixedArray::Get(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return Get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return Get(cage_base, index); } -MaybeObject WeakFixedArray::Get(IsolateRoot isolate, int index) const { +MaybeObject WeakFixedArray::Get(PtrComprCageBase cage_base, int index) const { DCHECK_LT(static_cast(index), static_cast(length())); - return objects(isolate, index); + return objects(cage_base, index); } void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) { @@ -470,13 +472,13 @@ void WeakFixedArray::CopyElements(Isolate* isolate, int dst_index, } MaybeObject WeakArrayList::Get(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return Get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return Get(cage_base, index); } -MaybeObject WeakArrayList::Get(IsolateRoot isolate, int index) const { +MaybeObject WeakArrayList::Get(PtrComprCageBase cage_base, int index) const { DCHECK_LT(static_cast(index), static_cast(capacity())); - return objects(isolate, index); + return objects(cage_base, index); } void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) { @@ -525,8 +527,8 @@ Object ArrayList::Get(int index) const { return FixedArray::cast(*this).get(kFirstIndex + index); } -Object ArrayList::Get(IsolateRoot isolate, int index) const { - return FixedArray::cast(*this).get(isolate, kFirstIndex + index); +Object ArrayList::Get(PtrComprCageBase cage_base, int index) const { + return FixedArray::cast(*this).get(cage_base, kFirstIndex + index); } ObjectSlot ArrayList::Slot(int index) { @@ -650,8 +652,8 @@ Object TemplateList::get(int index) const { return FixedArray::cast(*this).get(kFirstElementIndex + index); } -Object TemplateList::get(IsolateRoot isolate, int index) const { - return FixedArray::cast(*this).get(isolate, kFirstElementIndex + index); +Object TemplateList::get(PtrComprCageBase cage_base, int index) const { + return FixedArray::cast(*this).get(cage_base, kFirstElementIndex + index); } void TemplateList::set(int index, Object value) { diff --git a/src/objects/fixed-array.h b/src/objects/fixed-array.h index 53b4cbb22b..98c5d8d5b5 100644 --- a/src/objects/fixed-array.h +++ b/src/objects/fixed-array.h @@ -101,7 +101,7 @@ class FixedArray public: // Setter and getter for elements. inline Object get(int index) const; - inline Object get(IsolateRoot isolate, int index) const; + inline Object get(PtrComprCageBase cage_base, int index) const; static inline Handle get(FixedArray array, int index, Isolate* isolate); @@ -113,14 +113,16 @@ class FixedArray // Relaxed accessors. inline Object get(int index, RelaxedLoadTag) const; - inline Object get(IsolateRoot isolate, int index, RelaxedLoadTag) const; + inline Object get(PtrComprCageBase cage_base, int index, + RelaxedLoadTag) const; inline void set(int index, Object value, RelaxedStoreTag, WriteBarrierMode mode = UPDATE_WRITE_BARRIER); inline void set(int index, Smi value, RelaxedStoreTag); // Acquire/release accessors. inline Object get(int index, AcquireLoadTag) const; - inline Object get(IsolateRoot isolate, int index, AcquireLoadTag) const; + inline Object get(PtrComprCageBase cage_base, int index, + AcquireLoadTag) const; inline void set(int index, Object value, ReleaseStoreTag, WriteBarrierMode mode = UPDATE_WRITE_BARRIER); inline void set(int index, Smi value, ReleaseStoreTag); @@ -275,7 +277,7 @@ class WeakFixedArray : public TorqueGeneratedWeakFixedArray { public: inline MaybeObject Get(int index) const; - inline MaybeObject Get(IsolateRoot isolate, int index) const; + inline MaybeObject Get(PtrComprCageBase cage_base, int index) const; inline void Set( int index, MaybeObject value, @@ -350,7 +352,7 @@ class WeakArrayList V8_EXPORT_PRIVATE void Compact(Isolate* isolate); inline MaybeObject Get(int index) const; - inline MaybeObject Get(IsolateRoot isolate, int index) const; + inline MaybeObject Get(PtrComprCageBase cage_base, int index) const; // Set the element at index to obj. The underlying array must be large enough. // If you need to grow the WeakArrayList, use the static AddToEnd() method @@ -450,7 +452,7 @@ class ArrayList : public TorqueGeneratedArrayList { // storage capacity, i.e., length(). inline void SetLength(int length); inline Object Get(int index) const; - inline Object Get(IsolateRoot isolate, int index) const; + inline Object Get(PtrComprCageBase cage_base, int index) const; inline ObjectSlot Slot(int index); // Set the element at index to obj. The underlying array must be large enough. @@ -596,7 +598,7 @@ class TemplateList static Handle New(Isolate* isolate, int size); inline int length() const; inline Object get(int index) const; - inline Object get(IsolateRoot isolate, int index) const; + inline Object get(PtrComprCageBase cage_base, int index) const; inline void set(int index, Object value); static Handle Add(Isolate* isolate, Handle list, Handle value); diff --git a/src/objects/foreign-inl.h b/src/objects/foreign-inl.h index cb3dac91eb..150857f49a 100644 --- a/src/objects/foreign-inl.h +++ b/src/objects/foreign-inl.h @@ -29,7 +29,7 @@ bool Foreign::IsNormalized(Object value) { } DEF_GETTER(Foreign, foreign_address, Address) { - return ReadExternalPointerField(kForeignAddressOffset, isolate, + return ReadExternalPointerField(kForeignAddressOffset, cage_base, kForeignForeignAddressTag); } diff --git a/src/objects/hash-table-inl.h b/src/objects/hash-table-inl.h index 08f30ad004..27645058b3 100644 --- a/src/objects/hash-table-inl.h +++ b/src/objects/hash-table-inl.h @@ -139,7 +139,7 @@ InternalIndex HashTable::FindEntry(LocalIsolate* isolate, // Find entry for key otherwise return kNotFound. template -InternalIndex HashTable::FindEntry(IsolateRoot isolate, +InternalIndex HashTable::FindEntry(PtrComprCageBase cage_base, ReadOnlyRoots roots, Key key, int32_t hash) { DisallowGarbageCollection no_gc; @@ -151,7 +151,7 @@ InternalIndex HashTable::FindEntry(IsolateRoot isolate, // EnsureCapacity will guarantee the hash table is never full. for (InternalIndex entry = FirstProbe(hash, capacity);; entry = NextProbe(entry, count++, capacity)) { - Object element = KeyAt(isolate, entry); + Object element = KeyAt(cage_base, entry); // Empty entry. Uses raw unchecked accessors because it is called by the // string table during bootstrapping. if (element == undefined) return InternalIndex::NotFound(); @@ -177,24 +177,24 @@ bool HashTable::ToKey(ReadOnlyRoots roots, InternalIndex entry, } template -bool HashTable::ToKey(IsolateRoot isolate, InternalIndex entry, - Object* out_k) { - Object k = KeyAt(isolate, entry); - if (!IsKey(GetReadOnlyRoots(isolate), k)) return false; +bool HashTable::ToKey(PtrComprCageBase cage_base, + InternalIndex entry, Object* out_k) { + Object k = KeyAt(cage_base, entry); + if (!IsKey(GetReadOnlyRoots(cage_base), k)) return false; *out_k = Shape::Unwrap(k); return true; } template Object HashTable::KeyAt(InternalIndex entry) { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return KeyAt(isolate, entry); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return KeyAt(cage_base, entry); } template -Object HashTable::KeyAt(IsolateRoot isolate, +Object HashTable::KeyAt(PtrComprCageBase cage_base, InternalIndex entry) { - return get(isolate, EntryToIndex(entry) + kEntryKeyIndex); + return get(cage_base, EntryToIndex(entry) + kEntryKeyIndex); } template diff --git a/src/objects/hash-table.h b/src/objects/hash-table.h index 39d8e326f6..12ac020fb7 100644 --- a/src/objects/hash-table.h +++ b/src/objects/hash-table.h @@ -138,24 +138,25 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable void IterateElements(ObjectVisitor* visitor); // Find entry for key otherwise return kNotFound. - inline InternalIndex FindEntry(IsolateRoot isolate, ReadOnlyRoots roots, - Key key, int32_t hash); + inline InternalIndex FindEntry(PtrComprCageBase cage_base, + ReadOnlyRoots roots, Key key, int32_t hash); template inline InternalIndex FindEntry(LocalIsolate* isolate, Key key); // Rehashes the table in-place. - void Rehash(IsolateRoot isolate); + void Rehash(PtrComprCageBase cage_base); // Returns whether k is a real key. The hole and undefined are not allowed as // keys and can be used to indicate missing or deleted elements. static inline bool IsKey(ReadOnlyRoots roots, Object k); inline bool ToKey(ReadOnlyRoots roots, InternalIndex entry, Object* out_k); - inline bool ToKey(IsolateRoot isolate, InternalIndex entry, Object* out_k); + inline bool ToKey(PtrComprCageBase cage_base, InternalIndex entry, + Object* out_k); // Returns the key at entry. inline Object KeyAt(InternalIndex entry); - inline Object KeyAt(IsolateRoot isolate, InternalIndex entry); + inline Object KeyAt(PtrComprCageBase cage_base, InternalIndex entry); static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize; static const int kEntrySize = Shape::kEntrySize; @@ -217,8 +218,8 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable // Find the entry at which to insert element with the given key that // has the given hash value. - InternalIndex FindInsertionEntry(IsolateRoot isolate, ReadOnlyRoots roots, - uint32_t hash); + InternalIndex FindInsertionEntry(PtrComprCageBase cage_base, + ReadOnlyRoots roots, uint32_t hash); InternalIndex FindInsertionEntry(Isolate* isolate, uint32_t hash); // Computes the capacity a table with the given capacity would need to have @@ -231,7 +232,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable Isolate* isolate, Handle table, int additionalCapacity = 0); // Rehashes this hash-table into the new table. - void Rehash(IsolateRoot isolate, Derived new_table); + void Rehash(PtrComprCageBase cage_base, Derived new_table); inline void set_key(int index, Object value); inline void set_key(int index, Object value, WriteBarrierMode mode); @@ -322,7 +323,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ObjectHashTableBase // returned in case the key is not present. Object Lookup(Handle key); Object Lookup(Handle key, int32_t hash); - Object Lookup(IsolateRoot isolate, Handle key, int32_t hash); + Object Lookup(PtrComprCageBase cage_base, Handle key, int32_t hash); // Returns the value at entry. Object ValueAt(InternalIndex entry); diff --git a/src/objects/heap-object.h b/src/objects/heap-object.h index e62356218d..e0aea97537 100644 --- a/src/objects/heap-object.h +++ b/src/objects/heap-object.h @@ -70,12 +70,12 @@ class HeapObject : public Object { // places where it might not be safe to access it. inline ReadOnlyRoots GetReadOnlyRoots() const; // This version is intended to be used for the isolate values produced by - // i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr. - inline ReadOnlyRoots GetReadOnlyRoots(IsolateRoot isolate) const; + // i::GetPtrComprCageBase(HeapObject) function which may return nullptr. + inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const; #define IS_TYPE_FUNCTION_DECL(Type) \ V8_INLINE bool Is##Type() const; \ - V8_INLINE bool Is##Type(IsolateRoot isolate) const; + V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const; HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) IS_TYPE_FUNCTION_DECL(HashTableBase) IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable) @@ -96,7 +96,7 @@ class HeapObject : public Object { #define DECL_STRUCT_PREDICATE(NAME, Name, name) \ V8_INLINE bool Is##Name() const; \ - V8_INLINE bool Is##Name(IsolateRoot isolate) const; + V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const; STRUCT_LIST(DECL_STRUCT_PREDICATE) #undef DECL_STRUCT_PREDICATE diff --git a/src/objects/js-array-buffer-inl.h b/src/objects/js-array-buffer-inl.h index 66389ce431..b4aa5e33b9 100644 --- a/src/objects/js-array-buffer-inl.h +++ b/src/objects/js-array-buffer-inl.h @@ -43,7 +43,7 @@ void JSArrayBuffer::set_byte_length(size_t value) { } DEF_GETTER(JSArrayBuffer, backing_store, void*) { - Address value = ReadExternalPointerField(kBackingStoreOffset, isolate, + Address value = ReadExternalPointerField(kBackingStoreOffset, cage_base, kArrayBufferBackingStoreTag); return reinterpret_cast(value); } @@ -199,7 +199,7 @@ void JSTypedArray::set_length(size_t value) { } DEF_GETTER(JSTypedArray, external_pointer, Address) { - return ReadExternalPointerField(kExternalPointerOffset, isolate, + return ReadExternalPointerField(kExternalPointerOffset, cage_base, kTypedArrayExternalPointerTag); } @@ -213,9 +213,9 @@ void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) { } Address JSTypedArray::ExternalPointerCompensationForOnHeapArray( - IsolateRoot isolate) { + PtrComprCageBase cage_base) { #ifdef V8_COMPRESS_POINTERS - return isolate.address(); + return cage_base.address(); #else return 0; #endif @@ -321,7 +321,7 @@ MaybeHandle JSTypedArray::Validate(Isolate* isolate, DEF_GETTER(JSDataView, data_pointer, void*) { return reinterpret_cast(ReadExternalPointerField( - kDataPointerOffset, isolate, kDataViewDataPointerTag)); + kDataPointerOffset, cage_base, kDataViewDataPointerTag)); } void JSDataView::AllocateExternalPointerEntries(Isolate* isolate) { diff --git a/src/objects/js-array-buffer.h b/src/objects/js-array-buffer.h index 0c259ddece..3ec5e0d517 100644 --- a/src/objects/js-array-buffer.h +++ b/src/objects/js-array-buffer.h @@ -300,7 +300,7 @@ class JSTypedArray // as Tagged_t value and an |external_pointer| value. // For full-pointer mode the compensation value is zero. static inline Address ExternalPointerCompensationForOnHeapArray( - IsolateRoot isolate); + PtrComprCageBase cage_base); // // Serializer/deserializer support. diff --git a/src/objects/js-array-inl.h b/src/objects/js-array-inl.h index b53a8919a5..ed7ab4e003 100644 --- a/src/objects/js-array-inl.h +++ b/src/objects/js-array-inl.h @@ -22,7 +22,7 @@ CAST_ACCESSOR(JSArray) CAST_ACCESSOR(JSArrayIterator) DEF_GETTER(JSArray, length, Object) { - return TaggedField::load(isolate, *this); + return TaggedField::load(cage_base, *this); } void JSArray::set_length(Object value, WriteBarrierMode mode) { @@ -31,8 +31,8 @@ void JSArray::set_length(Object value, WriteBarrierMode mode) { CONDITIONAL_WRITE_BARRIER(*this, kLengthOffset, value, mode); } -Object JSArray::length(IsolateRoot isolate, RelaxedLoadTag tag) const { - return TaggedField::Relaxed_Load(isolate, *this); +Object JSArray::length(PtrComprCageBase cage_base, RelaxedLoadTag tag) const { + return TaggedField::Relaxed_Load(cage_base, *this); } void JSArray::set_length(Smi length) { diff --git a/src/objects/js-array.h b/src/objects/js-array.h index b1bdad314c..a8b336d2be 100644 --- a/src/objects/js-array.h +++ b/src/objects/js-array.h @@ -32,7 +32,7 @@ class JSArray : public JSObject { // acquire/release semantics ever become necessary, the default setter should // be reverted to non-atomic behavior, and setters with explicit tags // introduced and used when required. - Object length(IsolateRoot isolate, AcquireLoadTag tag) const = delete; + Object length(PtrComprCageBase cage_base, AcquireLoadTag tag) const = delete; void set_length(Object value, ReleaseStoreTag tag, WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete; diff --git a/src/objects/js-function-inl.h b/src/objects/js-function-inl.h index adf52a908d..5c8cb5b644 100644 --- a/src/objects/js-function-inl.h +++ b/src/objects/js-function-inl.h @@ -210,63 +210,62 @@ ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject, kPrototypeOrInitialMapOffset, map().has_prototype_slot()) DEF_GETTER(JSFunction, has_prototype_slot, bool) { - return map(isolate).has_prototype_slot(); + return map(cage_base).has_prototype_slot(); } DEF_GETTER(JSFunction, initial_map, Map) { - return Map::cast(prototype_or_initial_map(isolate)); + return Map::cast(prototype_or_initial_map(cage_base)); } DEF_GETTER(JSFunction, has_initial_map, bool) { - DCHECK(has_prototype_slot(isolate)); - return prototype_or_initial_map(isolate).IsMap(isolate); + DCHECK(has_prototype_slot(cage_base)); + return prototype_or_initial_map(cage_base).IsMap(cage_base); } DEF_GETTER(JSFunction, has_instance_prototype, bool) { - DCHECK(has_prototype_slot(isolate)); - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - return has_initial_map(isolate) || - !prototype_or_initial_map(isolate).IsTheHole( - GetReadOnlyRoots(isolate)); + DCHECK(has_prototype_slot(cage_base)); + return has_initial_map(cage_base) || + !prototype_or_initial_map(cage_base).IsTheHole( + GetReadOnlyRoots(cage_base)); } DEF_GETTER(JSFunction, has_prototype, bool) { - DCHECK(has_prototype_slot(isolate)); - return map(isolate).has_non_instance_prototype() || - has_instance_prototype(isolate); + DCHECK(has_prototype_slot(cage_base)); + return map(cage_base).has_non_instance_prototype() || + has_instance_prototype(cage_base); } DEF_GETTER(JSFunction, has_prototype_property, bool) { - return (has_prototype_slot(isolate) && IsConstructor(isolate)) || - IsGeneratorFunction(shared(isolate).kind()); + return (has_prototype_slot(cage_base) && IsConstructor(cage_base)) || + IsGeneratorFunction(shared(cage_base).kind()); } DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) { - return !has_prototype_property(isolate) || - map(isolate).has_non_instance_prototype(); + return !has_prototype_property(cage_base) || + map(cage_base).has_non_instance_prototype(); } DEF_GETTER(JSFunction, instance_prototype, HeapObject) { - DCHECK(has_instance_prototype(isolate)); - if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate); + DCHECK(has_instance_prototype(cage_base)); + if (has_initial_map(cage_base)) + return initial_map(cage_base).prototype(cage_base); // When there is no initial map and the prototype is a JSReceiver, the // initial map field is used for the prototype field. - return HeapObject::cast(prototype_or_initial_map(isolate)); + return HeapObject::cast(prototype_or_initial_map(cage_base)); } DEF_GETTER(JSFunction, prototype, Object) { - DCHECK(has_prototype(isolate)); + DCHECK(has_prototype(cage_base)); // If the function's prototype property has been set to a non-JSReceiver // value, that value is stored in the constructor field of the map. - if (map(isolate).has_non_instance_prototype()) { - Object prototype = map(isolate).GetConstructor(isolate); + if (map(cage_base).has_non_instance_prototype()) { + Object prototype = map(cage_base).GetConstructor(cage_base); // The map must have a prototype in that field, not a back pointer. - DCHECK(!prototype.IsMap(isolate)); - DCHECK(!prototype.IsFunctionTemplateInfo(isolate)); + DCHECK(!prototype.IsMap(cage_base)); + DCHECK(!prototype.IsFunctionTemplateInfo(cage_base)); return prototype; } - return instance_prototype(isolate); + return instance_prototype(cage_base); } bool JSFunction::is_compiled() const { diff --git a/src/objects/js-objects-inl.h b/src/objects/js-objects-inl.h index 4c577b45f8..cbbbc9fc9e 100644 --- a/src/objects/js-objects-inl.h +++ b/src/objects/js-objects-inl.h @@ -52,11 +52,12 @@ CAST_ACCESSOR(JSMessageObject) CAST_ACCESSOR(JSReceiver) DEF_GETTER(JSObject, elements, FixedArrayBase) { - return TaggedField::load(isolate, *this); + return TaggedField::load(cage_base, *this); } -FixedArrayBase JSObject::elements(IsolateRoot isolate, RelaxedLoadTag) const { - return TaggedField::Relaxed_Load(isolate, +FixedArrayBase JSObject::elements(PtrComprCageBase cage_base, + RelaxedLoadTag) const { + return TaggedField::Relaxed_Load(cage_base, *this); } @@ -249,11 +250,11 @@ void JSObject::initialize_elements() { } DEF_GETTER(JSObject, GetIndexedInterceptor, InterceptorInfo) { - return map(isolate).GetIndexedInterceptor(isolate); + return map(cage_base).GetIndexedInterceptor(cage_base); } DEF_GETTER(JSObject, GetNamedInterceptor, InterceptorInfo) { - return map(isolate).GetNamedInterceptor(isolate); + return map(cage_base).GetNamedInterceptor(cage_base); } // static @@ -322,16 +323,17 @@ void JSObject::SetEmbedderField(int index, Smi value) { // is needed to correctly distinguish between properties stored in-object and // properties stored in the properties array. Object JSObject::RawFastPropertyAt(FieldIndex index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return RawFastPropertyAt(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return RawFastPropertyAt(cage_base, index); } -Object JSObject::RawFastPropertyAt(IsolateRoot isolate, +Object JSObject::RawFastPropertyAt(PtrComprCageBase cage_base, FieldIndex index) const { if (index.is_inobject()) { - return TaggedField::load(isolate, *this, index.offset()); + return TaggedField::load(cage_base, *this, index.offset()); } else { - return property_array(isolate).get(isolate, index.outobject_array_index()); + return property_array(cage_base).get(cage_base, + index.outobject_array_index()); } } @@ -425,7 +427,7 @@ ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset) ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset) DEF_GETTER(JSGlobalObject, native_context_unchecked, Object) { - return TaggedField::load(isolate, *this); + return TaggedField::load(cage_base, *this); } bool JSMessageObject::DidEnsureSourcePositionsAvailable() const { @@ -461,119 +463,119 @@ SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset) SMI_ACCESSORS(JSMessageObject, raw_type, kMessageTypeOffset) DEF_GETTER(JSObject, GetElementsKind, ElementsKind) { - ElementsKind kind = map(isolate).elements_kind(); + ElementsKind kind = map(cage_base).elements_kind(); #if VERIFY_HEAP && DEBUG FixedArrayBase fixed_array = FixedArrayBase::unchecked_cast( - TaggedField::load(isolate, *this)); + TaggedField::load(cage_base, *this)); // If a GC was caused while constructing this object, the elements // pointer may point to a one pointer filler map. - if (ElementsAreSafeToExamine(isolate)) { - Map map = fixed_array.map(isolate); + if (ElementsAreSafeToExamine(cage_base)) { + Map map = fixed_array.map(cage_base); if (IsSmiOrObjectElementsKind(kind)) { - DCHECK(map == GetReadOnlyRoots(isolate).fixed_array_map() || - map == GetReadOnlyRoots(isolate).fixed_cow_array_map()); + DCHECK(map == GetReadOnlyRoots(cage_base).fixed_array_map() || + map == GetReadOnlyRoots(cage_base).fixed_cow_array_map()); } else if (IsDoubleElementsKind(kind)) { - DCHECK(fixed_array.IsFixedDoubleArray(isolate) || - fixed_array == GetReadOnlyRoots(isolate).empty_fixed_array()); + DCHECK(fixed_array.IsFixedDoubleArray(cage_base) || + fixed_array == GetReadOnlyRoots(cage_base).empty_fixed_array()); } else if (kind == DICTIONARY_ELEMENTS) { - DCHECK(fixed_array.IsFixedArray(isolate)); - DCHECK(fixed_array.IsNumberDictionary(isolate)); + DCHECK(fixed_array.IsFixedArray(cage_base)); + DCHECK(fixed_array.IsNumberDictionary(cage_base)); } else { DCHECK(kind > DICTIONARY_ELEMENTS || IsAnyNonextensibleElementsKind(kind)); } DCHECK(!IsSloppyArgumentsElementsKind(kind) || - elements(isolate).IsSloppyArgumentsElements()); + elements(cage_base).IsSloppyArgumentsElements()); } #endif return kind; } DEF_GETTER(JSObject, GetElementsAccessor, ElementsAccessor*) { - return ElementsAccessor::ForKind(GetElementsKind(isolate)); + return ElementsAccessor::ForKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasObjectElements, bool) { - return IsObjectElementsKind(GetElementsKind(isolate)); + return IsObjectElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasSmiElements, bool) { - return IsSmiElementsKind(GetElementsKind(isolate)); + return IsSmiElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasSmiOrObjectElements, bool) { - return IsSmiOrObjectElementsKind(GetElementsKind(isolate)); + return IsSmiOrObjectElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasDoubleElements, bool) { - return IsDoubleElementsKind(GetElementsKind(isolate)); + return IsDoubleElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasHoleyElements, bool) { - return IsHoleyElementsKind(GetElementsKind(isolate)); + return IsHoleyElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasFastElements, bool) { - return IsFastElementsKind(GetElementsKind(isolate)); + return IsFastElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasFastPackedElements, bool) { - return IsFastPackedElementsKind(GetElementsKind(isolate)); + return IsFastPackedElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasDictionaryElements, bool) { - return IsDictionaryElementsKind(GetElementsKind(isolate)); + return IsDictionaryElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasPackedElements, bool) { - return GetElementsKind(isolate) == PACKED_ELEMENTS; + return GetElementsKind(cage_base) == PACKED_ELEMENTS; } DEF_GETTER(JSObject, HasAnyNonextensibleElements, bool) { - return IsAnyNonextensibleElementsKind(GetElementsKind(isolate)); + return IsAnyNonextensibleElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasSealedElements, bool) { - return IsSealedElementsKind(GetElementsKind(isolate)); + return IsSealedElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasNonextensibleElements, bool) { - return IsNonextensibleElementsKind(GetElementsKind(isolate)); + return IsNonextensibleElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasFastArgumentsElements, bool) { - return IsFastArgumentsElementsKind(GetElementsKind(isolate)); + return IsFastArgumentsElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasSlowArgumentsElements, bool) { - return IsSlowArgumentsElementsKind(GetElementsKind(isolate)); + return IsSlowArgumentsElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasSloppyArgumentsElements, bool) { - return IsSloppyArgumentsElementsKind(GetElementsKind(isolate)); + return IsSloppyArgumentsElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasStringWrapperElements, bool) { - return IsStringWrapperElementsKind(GetElementsKind(isolate)); + return IsStringWrapperElementsKind(GetElementsKind(cage_base)); } DEF_GETTER(JSObject, HasFastStringWrapperElements, bool) { - return GetElementsKind(isolate) == FAST_STRING_WRAPPER_ELEMENTS; + return GetElementsKind(cage_base) == FAST_STRING_WRAPPER_ELEMENTS; } DEF_GETTER(JSObject, HasSlowStringWrapperElements, bool) { - return GetElementsKind(isolate) == SLOW_STRING_WRAPPER_ELEMENTS; + return GetElementsKind(cage_base) == SLOW_STRING_WRAPPER_ELEMENTS; } DEF_GETTER(JSObject, HasTypedArrayElements, bool) { - DCHECK(!elements(isolate).is_null()); - return map(isolate).has_typed_array_elements(); + DCHECK(!elements(cage_base).is_null()); + return map(cage_base).has_typed_array_elements(); } -#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \ - DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \ - return map(isolate).elements_kind() == TYPE##_ELEMENTS; \ +#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \ + DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \ + return map(cage_base).elements_kind() == TYPE##_ELEMENTS; \ } TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK) @@ -581,21 +583,21 @@ TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK) #undef FIXED_TYPED_ELEMENTS_CHECK DEF_GETTER(JSObject, HasNamedInterceptor, bool) { - return map(isolate).has_named_interceptor(); + return map(cage_base).has_named_interceptor(); } DEF_GETTER(JSObject, HasIndexedInterceptor, bool) { - return map(isolate).has_indexed_interceptor(); + return map(cage_base).has_indexed_interceptor(); } RELEASE_ACQUIRE_ACCESSORS_CHECKED2(JSGlobalObject, global_dictionary, GlobalDictionary, kPropertiesOrHashOffset, - !HasFastProperties(isolate), true) + !HasFastProperties(cage_base), true) DEF_GETTER(JSObject, element_dictionary, NumberDictionary) { - DCHECK(HasDictionaryElements(isolate) || - HasSlowStringWrapperElements(isolate)); - return NumberDictionary::cast(elements(isolate)); + DCHECK(HasDictionaryElements(cage_base) || + HasSlowStringWrapperElements(cage_base)); + return NumberDictionary::cast(elements(cage_base)); } void JSReceiver::initialize_properties(Isolate* isolate) { @@ -617,38 +619,34 @@ void JSReceiver::initialize_properties(Isolate* isolate) { } DEF_GETTER(JSReceiver, HasFastProperties, bool) { - DCHECK(raw_properties_or_hash(isolate).IsSmi() || - ((raw_properties_or_hash(isolate).IsGlobalDictionary(isolate) || - raw_properties_or_hash(isolate).IsNameDictionary(isolate) || - raw_properties_or_hash(isolate).IsSwissNameDictionary(isolate)) == - map(isolate).is_dictionary_map())); - return !map(isolate).is_dictionary_map(); + DCHECK(raw_properties_or_hash(cage_base).IsSmi() || + ((raw_properties_or_hash(cage_base).IsGlobalDictionary(cage_base) || + raw_properties_or_hash(cage_base).IsNameDictionary(cage_base) || + raw_properties_or_hash(cage_base).IsSwissNameDictionary( + cage_base)) == map(cage_base).is_dictionary_map())); + return !map(cage_base).is_dictionary_map(); } DEF_GETTER(JSReceiver, property_dictionary, NameDictionary) { - DCHECK(!IsJSGlobalObject(isolate)); - DCHECK(!HasFastProperties(isolate)); + DCHECK(!IsJSGlobalObject(cage_base)); + DCHECK(!HasFastProperties(cage_base)); DCHECK(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL); - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - Object prop = raw_properties_or_hash(isolate); + Object prop = raw_properties_or_hash(cage_base); if (prop.IsSmi()) { - return GetReadOnlyRoots(isolate).empty_property_dictionary(); + return GetReadOnlyRoots(cage_base).empty_property_dictionary(); } return NameDictionary::cast(prop); } DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) { - DCHECK(!IsJSGlobalObject(isolate)); - DCHECK(!HasFastProperties(isolate)); + DCHECK(!IsJSGlobalObject(cage_base)); + DCHECK(!HasFastProperties(cage_base)); DCHECK(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL); - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - Object prop = raw_properties_or_hash(isolate); + Object prop = raw_properties_or_hash(cage_base); if (prop.IsSmi()) { - return GetReadOnlyRoots(isolate).empty_swiss_property_dictionary(); + return GetReadOnlyRoots(cage_base).empty_swiss_property_dictionary(); } return SwissNameDictionary::cast(prop); } @@ -656,12 +654,10 @@ DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) { // TODO(gsathya): Pass isolate directly to this function and access // the heap from this. DEF_GETTER(JSReceiver, property_array, PropertyArray) { - DCHECK(HasFastProperties(isolate)); - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - Object prop = raw_properties_or_hash(isolate); - if (prop.IsSmi() || prop == GetReadOnlyRoots(isolate).empty_fixed_array()) { - return GetReadOnlyRoots(isolate).empty_property_array(); + DCHECK(HasFastProperties(cage_base)); + Object prop = raw_properties_or_hash(cage_base); + if (prop.IsSmi() || prop == GetReadOnlyRoots(cage_base).empty_fixed_array()) { + return GetReadOnlyRoots(cage_base).empty_property_array(); } return PropertyArray::cast(prop); } diff --git a/src/objects/js-objects.h b/src/objects/js-objects.h index aeb2504fc4..b1f22ed8f6 100644 --- a/src/objects/js-objects.h +++ b/src/objects/js-objects.h @@ -319,7 +319,7 @@ class JSObject : public TorqueGeneratedJSObject { // acquire/release semantics ever become necessary, the default setter should // be reverted to non-atomic behavior, and setters with explicit tags // introduced and used when required. - FixedArrayBase elements(IsolateRoot isolate, + FixedArrayBase elements(PtrComprCageBase cage_base, AcquireLoadTag tag) const = delete; void set_elements(FixedArrayBase value, ReleaseStoreTag tag, WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete; @@ -652,7 +652,8 @@ class JSObject : public TorqueGeneratedJSObject { Representation representation, FieldIndex index); inline Object RawFastPropertyAt(FieldIndex index) const; - inline Object RawFastPropertyAt(IsolateRoot isolate, FieldIndex index) const; + inline Object RawFastPropertyAt(PtrComprCageBase cage_base, + FieldIndex index) const; inline void FastPropertyAtPut(FieldIndex index, Object value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER); @@ -742,7 +743,8 @@ class JSObject : public TorqueGeneratedJSObject { // If a GC was caused while constructing this object, the elements pointer // may point to a one pointer filler map. The object won't be rooted, but // our heap verification code could stumble across it. - V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(IsolateRoot isolate) const; + V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine( + PtrComprCageBase cage_base) const; #endif Object SlowReverseLookup(Object value); diff --git a/src/objects/literal-objects-inl.h b/src/objects/literal-objects-inl.h index 4a2329ee55..26c0829f3e 100644 --- a/src/objects/literal-objects-inl.h +++ b/src/objects/literal-objects-inl.h @@ -29,26 +29,26 @@ SMI_ACCESSORS(ObjectBoilerplateDescription, flags, FixedArray::OffsetOfElementAt(kLiteralTypeOffset)) Object ObjectBoilerplateDescription::name(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return name(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return name(cage_base, index); } -Object ObjectBoilerplateDescription::name(IsolateRoot isolate, +Object ObjectBoilerplateDescription::name(PtrComprCageBase cage_base, int index) const { // get() already checks for out of bounds access, but we do not want to allow // access to the last element, if it is the number of properties. DCHECK_NE(size(), index); - return get(isolate, 2 * index + kDescriptionStartIndex); + return get(cage_base, 2 * index + kDescriptionStartIndex); } Object ObjectBoilerplateDescription::value(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return value(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return value(cage_base, index); } -Object ObjectBoilerplateDescription::value(IsolateRoot isolate, +Object ObjectBoilerplateDescription::value(PtrComprCageBase cage_base, int index) const { - return get(isolate, 2 * index + 1 + kDescriptionStartIndex); + return get(cage_base, 2 * index + 1 + kDescriptionStartIndex); } void ObjectBoilerplateDescription::set_key_value(int index, Object key, diff --git a/src/objects/literal-objects.h b/src/objects/literal-objects.h index 78fa53011b..3377bcd4c2 100644 --- a/src/objects/literal-objects.h +++ b/src/objects/literal-objects.h @@ -28,10 +28,10 @@ class ClassLiteral; class ObjectBoilerplateDescription : public FixedArray { public: inline Object name(int index) const; - inline Object name(IsolateRoot isolate, int index) const; + inline Object name(PtrComprCageBase cage_base, int index) const; inline Object value(int index) const; - inline Object value(IsolateRoot isolate, int index) const; + inline Object value(PtrComprCageBase cage_base, int index) const; inline void set_key_value(int index, Object key, Object value); diff --git a/src/objects/map-inl.h b/src/objects/map-inl.h index 4bc32125b5..3a1265d848 100644 --- a/src/objects/map-inl.h +++ b/src/objects/map-inl.h @@ -107,14 +107,14 @@ BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter, DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) { DCHECK(has_named_interceptor()); - FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate); - return InterceptorInfo::cast(info.GetNamedPropertyHandler(isolate)); + FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base); + return InterceptorInfo::cast(info.GetNamedPropertyHandler(cage_base)); } DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) { DCHECK(has_indexed_interceptor()); - FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate); - return InterceptorInfo::cast(info.GetIndexedPropertyHandler(isolate)); + FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base); + return InterceptorInfo::cast(info.GetIndexedPropertyHandler(cage_base)); } bool Map::IsMostGeneralFieldType(Representation representation, @@ -657,19 +657,18 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) { #endif } -bool Map::ConcurrentIsMap(IsolateRoot isolate, const Object& object) const { - return object.IsHeapObject() && HeapObject::cast(object).map(isolate) == - GetReadOnlyRoots(isolate).meta_map(); +bool Map::ConcurrentIsMap(PtrComprCageBase cage_base, + const Object& object) const { + return object.IsHeapObject() && HeapObject::cast(object).map(cage_base) == + GetReadOnlyRoots(cage_base).meta_map(); } DEF_GETTER(Map, GetBackPointer, HeapObject) { - Object object = constructor_or_back_pointer(isolate); - if (ConcurrentIsMap(isolate, object)) { + Object object = constructor_or_back_pointer(cage_base); + if (ConcurrentIsMap(cage_base, object)) { return Map::cast(object); } - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - return GetReadOnlyRoots(isolate).undefined_value(); + return GetReadOnlyRoots(cage_base).undefined_value(); } void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) { @@ -709,11 +708,11 @@ bool Map::IsPrototypeValidityCellValid() const { } DEF_GETTER(Map, GetConstructor, Object) { - Object maybe_constructor = constructor_or_back_pointer(isolate); + Object maybe_constructor = constructor_or_back_pointer(cage_base); // Follow any back pointers. - while (ConcurrentIsMap(isolate, maybe_constructor)) { + while (ConcurrentIsMap(cage_base, maybe_constructor)) { maybe_constructor = - Map::cast(maybe_constructor).constructor_or_back_pointer(isolate); + Map::cast(maybe_constructor).constructor_or_back_pointer(cage_base); } return maybe_constructor; } @@ -730,13 +729,13 @@ Object Map::TryGetConstructor(Isolate* isolate, int max_steps) { } DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) { - Object constructor = GetConstructor(isolate); - if (constructor.IsJSFunction(isolate)) { + Object constructor = GetConstructor(cage_base); + if (constructor.IsJSFunction(cage_base)) { // TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate) - DCHECK(JSFunction::cast(constructor).shared(isolate).IsApiFunction()); - return JSFunction::cast(constructor).shared(isolate).get_api_func_data(); + DCHECK(JSFunction::cast(constructor).shared(cage_base).IsApiFunction()); + return JSFunction::cast(constructor).shared(cage_base).get_api_func_data(); } - DCHECK(constructor.IsFunctionTemplateInfo(isolate)); + DCHECK(constructor.IsFunctionTemplateInfo(cage_base)); return FunctionTemplateInfo::cast(constructor); } @@ -791,7 +790,7 @@ int NormalizedMapCache::GetIndex(Handle map) { } DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) { - if (!IsWeakFixedArray(isolate)) return false; + if (!IsWeakFixedArray(cage_base)) return false; if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) { return false; } diff --git a/src/objects/map.h b/src/objects/map.h index 68ba2d7fe9..6eeb68d2cc 100644 --- a/src/objects/map.h +++ b/src/objects/map.h @@ -943,7 +943,7 @@ class Map : public HeapObject { // This is the equivalent of IsMap() but avoids reading the instance type so // it can be used concurrently without acquire load. - V8_INLINE bool ConcurrentIsMap(IsolateRoot isolate, + V8_INLINE bool ConcurrentIsMap(PtrComprCageBase cage_base, const Object& object) const; // Use the high-level instance_descriptors/SetInstanceDescriptors instead. @@ -976,7 +976,8 @@ class NormalizedMapCache : public WeakFixedArray { DECL_VERIFIER(NormalizedMapCache) private: - friend bool HeapObject::IsNormalizedMapCache(IsolateRoot isolate) const; + friend bool HeapObject::IsNormalizedMapCache( + PtrComprCageBase cage_base) const; static const int kEntries = 64; diff --git a/src/objects/maybe-object-inl.h b/src/objects/maybe-object-inl.h index 6cabc52312..4b06fec5cb 100644 --- a/src/objects/maybe-object-inl.h +++ b/src/objects/maybe-object-inl.h @@ -78,13 +78,14 @@ HeapObjectReference HeapObjectReference::From(Object object, } // static -HeapObjectReference HeapObjectReference::ClearedValue(IsolateRoot isolate) { +HeapObjectReference HeapObjectReference::ClearedValue( + PtrComprCageBase cage_base) { // Construct cleared weak ref value. #ifdef V8_COMPRESS_POINTERS // This is necessary to make pointer decompression computation also // suitable for cleared weak references. Address raw_value = - DecompressTaggedPointer(isolate, kClearedWeakHeapObjectLower32); + DecompressTaggedPointer(cage_base, kClearedWeakHeapObjectLower32); #else Address raw_value = kClearedWeakHeapObjectLower32; #endif diff --git a/src/objects/maybe-object.h b/src/objects/maybe-object.h index 3fe69ee5ec..0393ef6497 100644 --- a/src/objects/maybe-object.h +++ b/src/objects/maybe-object.h @@ -54,7 +54,7 @@ class HeapObjectReference : public MaybeObject { V8_INLINE static HeapObjectReference From(Object object, HeapObjectReferenceType type); - V8_INLINE static HeapObjectReference ClearedValue(IsolateRoot isolate); + V8_INLINE static HeapObjectReference ClearedValue(PtrComprCageBase cage_base); template V8_INLINE static void Update(THeapObjectSlot slot, HeapObject value); diff --git a/src/objects/name-inl.h b/src/objects/name-inl.h index f07e1bb9ce..93c0cd3fa9 100644 --- a/src/objects/name-inl.h +++ b/src/objects/name-inl.h @@ -56,7 +56,7 @@ void Symbol::set_is_private_name() { } DEF_GETTER(Name, IsUniqueName, bool) { - uint32_t type = map(isolate).instance_type(); + uint32_t type = map(cage_base).instance_type(); bool result = (type & (kIsNotStringMask | kIsNotInternalizedMask)) != (kStringTag | kNotInternalizedTag); SLOW_DCHECK(result == HeapObject::IsUniqueName()); @@ -104,23 +104,23 @@ uint32_t Name::hash() const { } DEF_GETTER(Name, IsInterestingSymbol, bool) { - return IsSymbol(isolate) && Symbol::cast(*this).is_interesting_symbol(); + return IsSymbol(cage_base) && Symbol::cast(*this).is_interesting_symbol(); } DEF_GETTER(Name, IsPrivate, bool) { - return this->IsSymbol(isolate) && Symbol::cast(*this).is_private(); + return this->IsSymbol(cage_base) && Symbol::cast(*this).is_private(); } DEF_GETTER(Name, IsPrivateName, bool) { bool is_private_name = - this->IsSymbol(isolate) && Symbol::cast(*this).is_private_name(); + this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_name(); DCHECK_IMPLIES(is_private_name, IsPrivate()); return is_private_name; } DEF_GETTER(Name, IsPrivateBrand, bool) { bool is_private_brand = - this->IsSymbol(isolate) && Symbol::cast(*this).is_private_brand(); + this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_brand(); DCHECK_IMPLIES(is_private_brand, IsPrivateName()); return is_private_brand; } diff --git a/src/objects/object-macros.h b/src/objects/object-macros.h index 2d0abc1bb2..2a742d5d77 100644 --- a/src/objects/object-macros.h +++ b/src/objects/object-macros.h @@ -86,14 +86,14 @@ // parameter. #define DECL_GETTER(name, type) \ inline type name() const; \ - inline type name(IsolateRoot isolate) const; + inline type name(PtrComprCageBase cage_base) const; -#define DEF_GETTER(holder, name, type) \ - type holder::name() const { \ - IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ - return holder::name(isolate); \ - } \ - type holder::name(IsolateRoot isolate) const +#define DEF_GETTER(holder, name, type) \ + type holder::name() const { \ + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \ + return holder::name(cage_base); \ + } \ + type holder::name(PtrComprCageBase cage_base) const #define DECL_SETTER(name, type) \ inline void set_##name(type value, \ @@ -105,7 +105,7 @@ #define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \ inline type name(tag_type tag) const; \ - inline type name(IsolateRoot isolate, tag_type) const; + inline type name(PtrComprCageBase cage_base, tag_type) const; #define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \ inline void set_##name(type value, tag_type, \ @@ -179,7 +179,7 @@ #define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \ set_condition) \ DEF_GETTER(holder, name, type) { \ - type value = TaggedField::load(isolate, *this); \ + type value = TaggedField::load(cage_base, *this); \ DCHECK(get_condition); \ return value; \ } \ @@ -215,11 +215,11 @@ #define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \ set_condition) \ type holder::name(RelaxedLoadTag tag) const { \ - IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ - return holder::name(isolate, tag); \ + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \ + return holder::name(cage_base, tag); \ } \ - type holder::name(IsolateRoot isolate, RelaxedLoadTag) const { \ - type value = TaggedField::Relaxed_Load(isolate, *this); \ + type holder::name(PtrComprCageBase cage_base, RelaxedLoadTag) const { \ + type value = TaggedField::Relaxed_Load(cage_base, *this); \ DCHECK(get_condition); \ return value; \ } \ @@ -236,22 +236,22 @@ #define RELAXED_ACCESSORS(holder, name, type, offset) \ RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true) -#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \ - get_condition, set_condition) \ - type holder::name(AcquireLoadTag tag) const { \ - IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ - return holder::name(isolate, tag); \ - } \ - type holder::name(IsolateRoot isolate, AcquireLoadTag) const { \ - type value = TaggedField::Acquire_Load(isolate, *this); \ - DCHECK(get_condition); \ - return value; \ - } \ - void holder::set_##name(type value, ReleaseStoreTag, \ - WriteBarrierMode mode) { \ - DCHECK(set_condition); \ - TaggedField::Release_Store(*this, value); \ - CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \ +#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \ + get_condition, set_condition) \ + type holder::name(AcquireLoadTag tag) const { \ + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \ + return holder::name(cage_base, tag); \ + } \ + type holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \ + type value = TaggedField::Acquire_Load(cage_base, *this); \ + DCHECK(get_condition); \ + return value; \ + } \ + void holder::set_##name(type value, ReleaseStoreTag, \ + WriteBarrierMode mode) { \ + DCHECK(set_condition); \ + TaggedField::Release_Store(*this, value); \ + CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \ } #define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \ @@ -266,7 +266,7 @@ set_condition) \ DEF_GETTER(holder, name, MaybeObject) { \ MaybeObject value = \ - TaggedField::load(isolate, *this); \ + TaggedField::load(cage_base, *this); \ DCHECK(get_condition); \ return value; \ } \ @@ -282,23 +282,23 @@ #define WEAK_ACCESSORS(holder, name, offset) \ WEAK_ACCESSORS_CHECKED(holder, name, offset, true) -#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \ - get_condition, set_condition) \ - MaybeObject holder::name(AcquireLoadTag tag) const { \ - IsolateRoot isolate = GetIsolateForPtrCompr(*this); \ - return holder::name(isolate, tag); \ - } \ - MaybeObject holder::name(IsolateRoot isolate, AcquireLoadTag) const { \ - MaybeObject value = \ - TaggedField::Acquire_Load(isolate, *this); \ - DCHECK(get_condition); \ - return value; \ - } \ - void holder::set_##name(MaybeObject value, ReleaseStoreTag, \ - WriteBarrierMode mode) { \ - DCHECK(set_condition); \ - TaggedField::Release_Store(*this, value); \ - CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \ +#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \ + get_condition, set_condition) \ + MaybeObject holder::name(AcquireLoadTag tag) const { \ + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \ + return holder::name(cage_base, tag); \ + } \ + MaybeObject holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \ + MaybeObject value = \ + TaggedField::Acquire_Load(cage_base, *this); \ + DCHECK(get_condition); \ + return value; \ + } \ + void holder::set_##name(MaybeObject value, ReleaseStoreTag, \ + WriteBarrierMode mode) { \ + DCHECK(set_condition); \ + TaggedField::Release_Store(*this, value); \ + CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \ } #define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED(holder, name, offset, \ @@ -380,9 +380,9 @@ return instance_type == forinstancetype; \ } -#define TYPE_CHECKER(type, ...) \ - DEF_GETTER(HeapObject, Is##type, bool) { \ - return InstanceTypeChecker::Is##type(map(isolate).instance_type()); \ +#define TYPE_CHECKER(type, ...) \ + DEF_GETTER(HeapObject, Is##type, bool) { \ + return InstanceTypeChecker::Is##type(map(cage_base).instance_type()); \ } #define RELAXED_INT16_ACCESSORS(holder, name, offset) \ diff --git a/src/objects/objects-inl.h b/src/objects/objects-inl.h index df2f0c64ab..c94feca250 100644 --- a/src/objects/objects-inl.h +++ b/src/objects/objects-inl.h @@ -65,19 +65,19 @@ int PropertyDetails::field_width_in_words() const { } DEF_GETTER(HeapObject, IsClassBoilerplate, bool) { - return IsFixedArrayExact(isolate); + return IsFixedArrayExact(cage_base); } bool Object::IsTaggedIndex() const { return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value()); } -#define IS_TYPE_FUNCTION_DEF(type_) \ - bool Object::Is##type_() const { \ - return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \ - } \ - bool Object::Is##type_(IsolateRoot isolate) const { \ - return IsHeapObject() && HeapObject::cast(*this).Is##type_(isolate); \ +#define IS_TYPE_FUNCTION_DEF(type_) \ + bool Object::Is##type_() const { \ + return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \ + } \ + bool Object::Is##type_(PtrComprCageBase cage_base) const { \ + return IsHeapObject() && HeapObject::cast(*this).Is##type_(cage_base); \ } HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF) IS_TYPE_FUNCTION_DEF(HashTableBase) @@ -148,127 +148,125 @@ bool HeapObject::IsNullOrUndefined() const { } DEF_GETTER(HeapObject, IsUniqueName, bool) { - return IsInternalizedString(isolate) || IsSymbol(isolate); + return IsInternalizedString(cage_base) || IsSymbol(cage_base); } DEF_GETTER(HeapObject, IsFunction, bool) { return IsJSFunctionOrBoundFunction(); } -DEF_GETTER(HeapObject, IsCallable, bool) { return map(isolate).is_callable(); } +DEF_GETTER(HeapObject, IsCallable, bool) { + return map(cage_base).is_callable(); +} DEF_GETTER(HeapObject, IsCallableJSProxy, bool) { - return IsCallable(isolate) && IsJSProxy(isolate); + return IsCallable(cage_base) && IsJSProxy(cage_base); } DEF_GETTER(HeapObject, IsCallableApiObject, bool) { - InstanceType type = map(isolate).instance_type(); - return IsCallable(isolate) && + InstanceType type = map(cage_base).instance_type(); + return IsCallable(cage_base) && (type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE); } DEF_GETTER(HeapObject, IsNonNullForeign, bool) { - return IsForeign(isolate) && + return IsForeign(cage_base) && Foreign::cast(*this).foreign_address() != kNullAddress; } DEF_GETTER(HeapObject, IsConstructor, bool) { - return map(isolate).is_constructor(); + return map(cage_base).is_constructor(); } DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) { - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - return map(isolate) == GetReadOnlyRoots(isolate).module_info_map(); + return map(cage_base) == GetReadOnlyRoots(cage_base).module_info_map(); } DEF_GETTER(HeapObject, IsConsString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsCons(); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsCons(); } DEF_GETTER(HeapObject, IsThinString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsThin(); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsThin(); } DEF_GETTER(HeapObject, IsSlicedString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsSliced(); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsSliced(); } DEF_GETTER(HeapObject, IsSeqString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsSequential(); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsSequential(); } DEF_GETTER(HeapObject, IsSeqOneByteString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsSequential() && - String::cast(*this).IsOneByteRepresentation(isolate); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsSequential() && + String::cast(*this).IsOneByteRepresentation(cage_base); } DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsSequential() && - String::cast(*this).IsTwoByteRepresentation(isolate); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsSequential() && + String::cast(*this).IsTwoByteRepresentation(cage_base); } DEF_GETTER(HeapObject, IsExternalOneByteString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsExternal() && - String::cast(*this).IsOneByteRepresentation(isolate); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsExternal() && + String::cast(*this).IsOneByteRepresentation(cage_base); } DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) { - if (!IsString(isolate)) return false; - return StringShape(String::cast(*this).map(isolate)).IsExternal() && - String::cast(*this).IsTwoByteRepresentation(isolate); + if (!IsString(cage_base)) return false; + return StringShape(String::cast(*this).map(cage_base)).IsExternal() && + String::cast(*this).IsTwoByteRepresentation(cage_base); } bool Object::IsNumber() const { if (IsSmi()) return true; HeapObject this_heap_object = HeapObject::cast(*this); - IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object); - return this_heap_object.IsHeapNumber(isolate); + PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object); + return this_heap_object.IsHeapNumber(cage_base); } -bool Object::IsNumber(IsolateRoot isolate) const { - return IsSmi() || IsHeapNumber(isolate); +bool Object::IsNumber(PtrComprCageBase cage_base) const { + return IsSmi() || IsHeapNumber(cage_base); } bool Object::IsNumeric() const { if (IsSmi()) return true; HeapObject this_heap_object = HeapObject::cast(*this); - IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object); - return this_heap_object.IsHeapNumber(isolate) || - this_heap_object.IsBigInt(isolate); + PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object); + return this_heap_object.IsHeapNumber(cage_base) || + this_heap_object.IsBigInt(cage_base); } -bool Object::IsNumeric(IsolateRoot isolate) const { - return IsNumber(isolate) || IsBigInt(isolate); +bool Object::IsNumeric(PtrComprCageBase cage_base) const { + return IsNumber(cage_base) || IsBigInt(cage_base); } DEF_GETTER(HeapObject, IsFreeSpaceOrFiller, bool) { - InstanceType instance_type = map(isolate).instance_type(); + InstanceType instance_type = map(cage_base).instance_type(); return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE; } DEF_GETTER(HeapObject, IsArrayList, bool) { - // Can't use ReadOnlyRoots(isolate) as this isolate could be produced by - // i::GetIsolateForPtrCompr(HeapObject). - ReadOnlyRoots roots = GetReadOnlyRoots(isolate); + ReadOnlyRoots roots = GetReadOnlyRoots(cage_base); return *this == roots.empty_fixed_array() || - map(isolate) == roots.array_list_map(); + map(cage_base) == roots.array_list_map(); } DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) { - return IsFixedArrayExact(isolate); + return IsFixedArrayExact(cage_base); } DEF_GETTER(HeapObject, IsDeoptimizationData, bool) { // Must be a fixed array. - if (!IsFixedArrayExact(isolate)) return false; + if (!IsFixedArrayExact(cage_base)) return false; // There's no sure way to detect the difference between a fixed array and // a deoptimization data array. Since this is used for asserts we can @@ -282,14 +280,14 @@ DEF_GETTER(HeapObject, IsDeoptimizationData, bool) { } DEF_GETTER(HeapObject, IsHandlerTable, bool) { - if (!IsFixedArrayExact(isolate)) return false; + if (!IsFixedArrayExact(cage_base)) return false; // There's actually no way to see the difference between a fixed array and // a handler table array. return true; } DEF_GETTER(HeapObject, IsTemplateList, bool) { - if (!IsFixedArrayExact(isolate)) return false; + if (!IsFixedArrayExact(cage_base)) return false; // There's actually no way to see the difference between a fixed array and // a template list. if (FixedArray::cast(*this).length() < 1) return false; @@ -297,84 +295,86 @@ DEF_GETTER(HeapObject, IsTemplateList, bool) { } DEF_GETTER(HeapObject, IsDependentCode, bool) { - if (!IsWeakFixedArray(isolate)) return false; + if (!IsWeakFixedArray(cage_base)) return false; // There's actually no way to see the difference between a weak fixed array // and a dependent codes array. return true; } DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) { - if (!IsWeakFixedArray(isolate)) return false; + if (!IsWeakFixedArray(cage_base)) return false; // There's actually no way to see the difference between a weak fixed array // and a osr optimized code cache. return true; } DEF_GETTER(HeapObject, IsAbstractCode, bool) { - return IsBytecodeArray(isolate) || IsCode(isolate); + return IsBytecodeArray(cage_base) || IsCode(cage_base); } DEF_GETTER(HeapObject, IsStringWrapper, bool) { - return IsJSPrimitiveWrapper(isolate) && - JSPrimitiveWrapper::cast(*this).value().IsString(isolate); + return IsJSPrimitiveWrapper(cage_base) && + JSPrimitiveWrapper::cast(*this).value().IsString(cage_base); } DEF_GETTER(HeapObject, IsBooleanWrapper, bool) { - return IsJSPrimitiveWrapper(isolate) && - JSPrimitiveWrapper::cast(*this).value().IsBoolean(isolate); + return IsJSPrimitiveWrapper(cage_base) && + JSPrimitiveWrapper::cast(*this).value().IsBoolean(cage_base); } DEF_GETTER(HeapObject, IsScriptWrapper, bool) { - return IsJSPrimitiveWrapper(isolate) && - JSPrimitiveWrapper::cast(*this).value().IsScript(isolate); + return IsJSPrimitiveWrapper(cage_base) && + JSPrimitiveWrapper::cast(*this).value().IsScript(cage_base); } DEF_GETTER(HeapObject, IsNumberWrapper, bool) { - return IsJSPrimitiveWrapper(isolate) && - JSPrimitiveWrapper::cast(*this).value().IsNumber(isolate); + return IsJSPrimitiveWrapper(cage_base) && + JSPrimitiveWrapper::cast(*this).value().IsNumber(cage_base); } DEF_GETTER(HeapObject, IsBigIntWrapper, bool) { - return IsJSPrimitiveWrapper(isolate) && - JSPrimitiveWrapper::cast(*this).value().IsBigInt(isolate); + return IsJSPrimitiveWrapper(cage_base) && + JSPrimitiveWrapper::cast(*this).value().IsBigInt(cage_base); } DEF_GETTER(HeapObject, IsSymbolWrapper, bool) { - return IsJSPrimitiveWrapper(isolate) && - JSPrimitiveWrapper::cast(*this).value().IsSymbol(isolate); + return IsJSPrimitiveWrapper(cage_base) && + JSPrimitiveWrapper::cast(*this).value().IsSymbol(cage_base); } -DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(isolate); } +DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(cage_base); } -DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(isolate); } +DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(cage_base); } DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) { - return IsHashTable(isolate); + return IsHashTable(cage_base); } -DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(isolate); } +DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(cage_base); } -DEF_GETTER(HeapObject, IsObjectHashTable, bool) { return IsHashTable(isolate); } +DEF_GETTER(HeapObject, IsObjectHashTable, bool) { + return IsHashTable(cage_base); +} -DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(isolate); } +DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(cage_base); } #if V8_ENABLE_WEBASSEMBLY DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) { // It is not possible to check for the existence of certain properties on the // underlying {JSReceiver} here because that requires calling handlified code. - return IsJSReceiver(isolate); + return IsJSReceiver(cage_base); } #endif // V8_ENABLE_WEBASSEMBLY bool Object::IsPrimitive() const { if (IsSmi()) return true; HeapObject this_heap_object = HeapObject::cast(*this); - IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object); - return this_heap_object.map(isolate).IsPrimitiveMap(); + PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object); + return this_heap_object.map(cage_base).IsPrimitiveMap(); } -bool Object::IsPrimitive(IsolateRoot isolate) const { - return IsSmi() || HeapObject::cast(*this).map(isolate).IsPrimitiveMap(); +bool Object::IsPrimitive(PtrComprCageBase cage_base) const { + return IsSmi() || HeapObject::cast(*this).map(cage_base).IsPrimitiveMap(); } // static @@ -387,24 +387,24 @@ Maybe Object::IsArray(Handle object) { } DEF_GETTER(HeapObject, IsUndetectable, bool) { - return map(isolate).is_undetectable(); + return map(cage_base).is_undetectable(); } DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) { - if (IsJSGlobalProxy(isolate)) { + if (IsJSGlobalProxy(cage_base)) { const JSGlobalProxy proxy = JSGlobalProxy::cast(*this); JSGlobalObject global = proxy.GetIsolate()->context().global_object(); return proxy.IsDetachedFrom(global); } - return map(isolate).is_access_check_needed(); + return map(cage_base).is_access_check_needed(); } -#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \ - bool Object::Is##Name() const { \ - return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \ - } \ - bool Object::Is##Name(IsolateRoot isolate) const { \ - return IsHeapObject() && HeapObject::cast(*this).Is##Name(isolate); \ +#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \ + bool Object::Is##Name() const { \ + return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \ + } \ + bool Object::Is##Name(PtrComprCageBase cage_base) const { \ + return IsHeapObject() && HeapObject::cast(*this).Is##Name(cage_base); \ } STRUCT_LIST(MAKE_STRUCT_PREDICATE) #undef MAKE_STRUCT_PREDICATE @@ -467,17 +467,17 @@ bool Object::FilterKey(PropertyFilter filter) { return false; } -Representation Object::OptimalRepresentation(IsolateRoot isolate) const { +Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const { if (!FLAG_track_fields) return Representation::Tagged(); if (IsSmi()) { return Representation::Smi(); } HeapObject heap_object = HeapObject::cast(*this); - if (FLAG_track_double_fields && heap_object.IsHeapNumber(isolate)) { + if (FLAG_track_double_fields && heap_object.IsHeapNumber(cage_base)) { return Representation::Double(); } else if (FLAG_track_computed_fields && heap_object.IsUninitialized( - heap_object.GetReadOnlyRoots(isolate))) { + heap_object.GetReadOnlyRoots(cage_base))) { return Representation::None(); } else if (FLAG_track_heap_object_fields) { return Representation::HeapObject(); @@ -486,9 +486,9 @@ Representation Object::OptimalRepresentation(IsolateRoot isolate) const { } } -ElementsKind Object::OptimalElementsKind(IsolateRoot isolate) const { +ElementsKind Object::OptimalElementsKind(PtrComprCageBase cage_base) const { if (IsSmi()) return PACKED_SMI_ELEMENTS; - if (IsNumber(isolate)) return PACKED_DOUBLE_ELEMENTS; + if (IsNumber(cage_base)) return PACKED_DOUBLE_ELEMENTS; return PACKED_ELEMENTS; } @@ -631,9 +631,10 @@ void Object::InitExternalPointerField(size_t offset, Isolate* isolate, i::InitExternalPointerField(field_address(offset), isolate, value, tag); } -Address Object::ReadExternalPointerField(size_t offset, IsolateRoot isolate, +Address Object::ReadExternalPointerField(size_t offset, + PtrComprCageBase isolate_root, ExternalPointerTag tag) const { - return i::ReadExternalPointerField(field_address(offset), isolate, tag); + return i::ReadExternalPointerField(field_address(offset), isolate_root, tag); } void Object::WriteExternalPointerField(size_t offset, Isolate* isolate, @@ -687,16 +688,16 @@ ReadOnlyRoots HeapObject::GetReadOnlyRoots() const { return ReadOnlyHeap::GetReadOnlyRoots(*this); } -ReadOnlyRoots HeapObject::GetReadOnlyRoots(IsolateRoot isolate) const { -#ifdef V8_COMPRESS_POINTERS - DCHECK_NE(isolate.address(), 0); - return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address())); +ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const { +#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE + DCHECK_NE(cage_base.address(), 0); + return ReadOnlyRoots(Isolate::FromRootAddress(cage_base.address())); #else return GetReadOnlyRoots(); #endif } -DEF_GETTER(HeapObject, map, Map) { return map_word(isolate).ToMap(); } +DEF_GETTER(HeapObject, map, Map) { return map_word(cage_base).ToMap(); } void HeapObject::set_map(Map value) { #ifdef VERIFY_HEAP @@ -715,7 +716,7 @@ void HeapObject::set_map(Map value) { } DEF_GETTER(HeapObject, synchronized_map, Map) { - return synchronized_map_word(isolate).ToMap(); + return synchronized_map_word(cage_base).ToMap(); } void HeapObject::synchronized_set_map(Map value) { @@ -761,7 +762,7 @@ ObjectSlot HeapObject::map_slot() const { } DEF_GETTER(HeapObject, map_word, MapWord) { - return MapField::Relaxed_Load(isolate, *this); + return MapField::Relaxed_Load(cage_base, *this); } void HeapObject::set_map_word(MapWord map_word) { @@ -769,7 +770,7 @@ void HeapObject::set_map_word(MapWord map_word) { } DEF_GETTER(HeapObject, synchronized_map_word, MapWord) { - return MapField::Acquire_Load(isolate, *this); + return MapField::Acquire_Load(cage_base, *this); } void HeapObject::synchronized_set_map_word(MapWord map_word) { diff --git a/src/objects/objects.cc b/src/objects/objects.cc index 276157563e..122bc8718b 100644 --- a/src/objects/objects.cc +++ b/src/objects/objects.cc @@ -5567,7 +5567,8 @@ Handle HashTable::NewInternal( } template -void HashTable::Rehash(IsolateRoot isolate, Derived new_table) { +void HashTable::Rehash(PtrComprCageBase cage_base, + Derived new_table) { DisallowGarbageCollection no_gc; WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc); @@ -5575,21 +5576,21 @@ void HashTable::Rehash(IsolateRoot isolate, Derived new_table) { // Copy prefix to new array. for (int i = kPrefixStartIndex; i < kElementsStartIndex; i++) { - new_table.set(i, get(isolate, i), mode); + new_table.set(i, get(cage_base, i), mode); } // Rehash the elements. - ReadOnlyRoots roots = GetReadOnlyRoots(isolate); + ReadOnlyRoots roots = GetReadOnlyRoots(cage_base); for (InternalIndex i : this->IterateEntries()) { uint32_t from_index = EntryToIndex(i); - Object k = this->get(isolate, from_index); + Object k = this->get(cage_base, from_index); if (!IsKey(roots, k)) continue; uint32_t hash = Shape::HashForObject(roots, k); uint32_t insertion_index = - EntryToIndex(new_table.FindInsertionEntry(isolate, roots, hash)); - new_table.set_key(insertion_index, get(isolate, from_index), mode); + EntryToIndex(new_table.FindInsertionEntry(cage_base, roots, hash)); + new_table.set_key(insertion_index, get(cage_base, from_index), mode); for (int j = 1; j < Shape::kEntrySize; j++) { - new_table.set(insertion_index + j, get(isolate, from_index + j), mode); + new_table.set(insertion_index + j, get(cage_base, from_index + j), mode); } } new_table.SetNumberOfElements(NumberOfElements()); @@ -5631,10 +5632,10 @@ void HashTable::Swap(InternalIndex entry1, InternalIndex entry2, } template -void HashTable::Rehash(IsolateRoot isolate) { +void HashTable::Rehash(PtrComprCageBase cage_base) { DisallowGarbageCollection no_gc; WriteBarrierMode mode = GetWriteBarrierMode(no_gc); - ReadOnlyRoots roots = GetReadOnlyRoots(isolate); + ReadOnlyRoots roots = GetReadOnlyRoots(cage_base); uint32_t capacity = Capacity(); bool done = false; for (int probe = 1; !done; probe++) { @@ -5643,7 +5644,7 @@ void HashTable::Rehash(IsolateRoot isolate) { done = true; for (InternalIndex current(0); current.raw_value() < capacity; /* {current} is advanced manually below, when appropriate.*/) { - Object current_key = KeyAt(isolate, current); + Object current_key = KeyAt(cage_base, current); if (!IsKey(roots, current_key)) { ++current; // Advance to next entry. continue; @@ -5653,7 +5654,7 @@ void HashTable::Rehash(IsolateRoot isolate) { ++current; // Advance to next entry. continue; } - Object target_key = KeyAt(isolate, target); + Object target_key = KeyAt(cage_base, target); if (!IsKey(roots, target_key) || EntryForProbe(roots, target_key, probe, target) != target) { // Put the current element into the correct position. @@ -5673,7 +5674,7 @@ void HashTable::Rehash(IsolateRoot isolate) { HeapObject undefined = roots.undefined_value(); Derived* self = static_cast(this); for (InternalIndex current : InternalIndex::Range(capacity)) { - if (KeyAt(isolate, current) == the_hole) { + if (KeyAt(cage_base, current) == the_hole) { self->set_key(EntryToIndex(current) + kEntryKeyIndex, undefined, SKIP_WRITE_BARRIER); } @@ -5764,15 +5765,14 @@ Handle HashTable::Shrink(Isolate* isolate, } template -InternalIndex HashTable::FindInsertionEntry(IsolateRoot isolate, - ReadOnlyRoots roots, - uint32_t hash) { +InternalIndex HashTable::FindInsertionEntry( + PtrComprCageBase cage_base, ReadOnlyRoots roots, uint32_t hash) { uint32_t capacity = Capacity(); uint32_t count = 1; // EnsureCapacity will guarantee the hash table is never full. for (InternalIndex entry = FirstProbe(hash, capacity);; entry = NextProbe(entry, count++, capacity)) { - if (!IsKey(roots, KeyAt(isolate, entry))) return entry; + if (!IsKey(roots, KeyAt(cage_base, entry))) return entry; } } @@ -6080,14 +6080,14 @@ void ObjectHashTableBase::FillEntriesWithHoles( } template -Object ObjectHashTableBase::Lookup(IsolateRoot isolate, +Object ObjectHashTableBase::Lookup(PtrComprCageBase cage_base, Handle key, int32_t hash) { DisallowGarbageCollection no_gc; - ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate); + ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base); DCHECK(this->IsKey(roots, *key)); - InternalIndex entry = this->FindEntry(isolate, roots, key, hash); + InternalIndex entry = this->FindEntry(cage_base, roots, key, hash); if (entry.is_not_found()) return roots.the_hole_value(); return this->get(Derived::EntryToIndex(entry) + 1); } @@ -6096,8 +6096,8 @@ template Object ObjectHashTableBase::Lookup(Handle key) { DisallowGarbageCollection no_gc; - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base); DCHECK(this->IsKey(roots, *key)); // If the object does not have an identity hash, it was never used as a key. @@ -6105,13 +6105,13 @@ Object ObjectHashTableBase::Lookup(Handle key) { if (hash.IsUndefined(roots)) { return roots.the_hole_value(); } - return Lookup(isolate, key, Smi::ToInt(hash)); + return Lookup(cage_base, key, Smi::ToInt(hash)); } template Object ObjectHashTableBase::Lookup(Handle key, int32_t hash) { - return Lookup(GetIsolateForPtrCompr(*this), key, hash); + return Lookup(GetPtrComprCageBase(*this), key, hash); } template diff --git a/src/objects/objects.h b/src/objects/objects.h index c68445597f..e4532bb0e5 100644 --- a/src/objects/objects.h +++ b/src/objects/objects.h @@ -279,7 +279,7 @@ class Object : public TaggedImpl { #define IS_TYPE_FUNCTION_DECL(Type) \ V8_INLINE bool Is##Type() const; \ - V8_INLINE bool Is##Type(IsolateRoot isolate) const; + V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const; OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL) IS_TYPE_FUNCTION_DECL(HashTableBase) @@ -307,7 +307,7 @@ class Object : public TaggedImpl { #define DECL_STRUCT_PREDICATE(NAME, Name, name) \ V8_INLINE bool Is##Name() const; \ - V8_INLINE bool Is##Name(IsolateRoot isolate) const; + V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const; STRUCT_LIST(DECL_STRUCT_PREDICATE) #undef DECL_STRUCT_PREDICATE @@ -322,9 +322,9 @@ class Object : public TaggedImpl { V8_EXPORT_PRIVATE bool ToInt32(int32_t* value); inline bool ToUint32(uint32_t* value) const; - inline Representation OptimalRepresentation(IsolateRoot isolate) const; + inline Representation OptimalRepresentation(PtrComprCageBase cage_base) const; - inline ElementsKind OptimalElementsKind(IsolateRoot isolate) const; + inline ElementsKind OptimalElementsKind(PtrComprCageBase cage_base) const; inline bool FitsRepresentation(Representation representation); @@ -673,7 +673,8 @@ class Object : public TaggedImpl { inline void InitExternalPointerField(size_t offset, Isolate* isolate); inline void InitExternalPointerField(size_t offset, Isolate* isolate, Address value, ExternalPointerTag tag); - inline Address ReadExternalPointerField(size_t offset, IsolateRoot isolate, + inline Address ReadExternalPointerField(size_t offset, + PtrComprCageBase isolate_root, ExternalPointerTag tag) const; inline void WriteExternalPointerField(size_t offset, Isolate* isolate, Address value, ExternalPointerTag tag); diff --git a/src/objects/oddball-inl.h b/src/objects/oddball-inl.h index 4a022831be..df7829e2b4 100644 --- a/src/objects/oddball-inl.h +++ b/src/objects/oddball-inl.h @@ -37,7 +37,7 @@ Handle Oddball::ToNumber(Isolate* isolate, Handle input) { } DEF_GETTER(HeapObject, IsBoolean, bool) { - return IsOddball(isolate) && + return IsOddball(cage_base) && ((Oddball::cast(*this).kind() & Oddball::kNotBooleanMask) == 0); } diff --git a/src/objects/property-array-inl.h b/src/objects/property-array-inl.h index e2e905fbb3..fe884b043f 100644 --- a/src/objects/property-array-inl.h +++ b/src/objects/property-array-inl.h @@ -25,14 +25,14 @@ SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset) SYNCHRONIZED_SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset) Object PropertyArray::get(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return get(cage_base, index); } -Object PropertyArray::get(IsolateRoot isolate, int index) const { +Object PropertyArray::get(PtrComprCageBase cage_base, int index) const { DCHECK_LT(static_cast(index), static_cast(this->length())); - return TaggedField::Relaxed_Load(isolate, *this, + return TaggedField::Relaxed_Load(cage_base, *this, OffsetOfElementAt(index)); } diff --git a/src/objects/property-array.h b/src/objects/property-array.h index da15e8d732..f4cc5c9fb1 100644 --- a/src/objects/property-array.h +++ b/src/objects/property-array.h @@ -30,7 +30,7 @@ class PropertyArray : public HeapObject { inline int Hash() const; inline Object get(int index) const; - inline Object get(IsolateRoot isolate, int index) const; + inline Object get(PtrComprCageBase cage_base, int index) const; inline void set(int index, Object value); // Setter with explicit barrier mode. diff --git a/src/objects/property.cc b/src/objects/property.cc index b0bb79c601..c21a618cb1 100644 --- a/src/objects/property.cc +++ b/src/objects/property.cc @@ -75,10 +75,10 @@ Descriptor Descriptor::DataField(Handle key, int field_index, Descriptor Descriptor::DataConstant(Handle key, Handle value, PropertyAttributes attributes) { - IsolateRoot isolate = GetIsolateForPtrCompr(*key); + PtrComprCageBase cage_base = GetPtrComprCageBase(*key); return Descriptor(key, MaybeObjectHandle(value), kData, attributes, kDescriptor, PropertyConstness::kConst, - value->OptimalRepresentation(isolate), 0); + value->OptimalRepresentation(cage_base), 0); } Descriptor Descriptor::DataConstant(Isolate* isolate, Handle key, diff --git a/src/objects/scope-info.cc b/src/objects/scope-info.cc index 9e37b0ef11..308b57a309 100644 --- a/src/objects/scope-info.cc +++ b/src/objects/scope-info.cc @@ -575,13 +575,13 @@ Handle ScopeInfo::CreateForBootstrapping(Isolate* isolate, } Object ScopeInfo::get(int index) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return get(isolate, index); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return get(cage_base, index); } -Object ScopeInfo::get(IsolateRoot isolate, int index) const { +Object ScopeInfo::get(PtrComprCageBase cage_base, int index) const { DCHECK_LT(static_cast(index), static_cast(length())); - return TaggedField::Relaxed_Load(isolate, *this, + return TaggedField::Relaxed_Load(cage_base, *this, OffsetOfElementAt(index)); } diff --git a/src/objects/scope-info.h b/src/objects/scope-info.h index d773115bbd..57e5d2e308 100644 --- a/src/objects/scope-info.h +++ b/src/objects/scope-info.h @@ -293,7 +293,7 @@ class ScopeInfo : public TorqueGeneratedScopeInfo { // 'flags', the first field defined by ScopeInfo after the standard-size // HeapObject header. V8_EXPORT_PRIVATE Object get(int index) const; - Object get(IsolateRoot isolate, int index) const; + Object get(PtrComprCageBase cage_base, int index) const; // Setter that doesn't need write barrier. void set(int index, Smi value); // Setter with explicit barrier mode. diff --git a/src/objects/slots-inl.h b/src/objects/slots-inl.h index 2943c117c7..c0d35c525f 100644 --- a/src/objects/slots-inl.h +++ b/src/objects/slots-inl.h @@ -31,7 +31,7 @@ bool FullObjectSlot::contains_value(Address raw_value) const { Object FullObjectSlot::operator*() const { return Object(*location()); } -Object FullObjectSlot::load(IsolateRoot isolate) const { return **this; } +Object FullObjectSlot::load(PtrComprCageBase cage_base) const { return **this; } void FullObjectSlot::store(Object value) const { *location() = value.ptr(); } @@ -39,7 +39,7 @@ Object FullObjectSlot::Acquire_Load() const { return Object(base::AsAtomicPointer::Acquire_Load(location())); } -Object FullObjectSlot::Acquire_Load(IsolateRoot isolate) const { +Object FullObjectSlot::Acquire_Load(PtrComprCageBase cage_base) const { return Acquire_Load(); } @@ -47,7 +47,7 @@ Object FullObjectSlot::Relaxed_Load() const { return Object(base::AsAtomicPointer::Relaxed_Load(location())); } -Object FullObjectSlot::Relaxed_Load(IsolateRoot isolate) const { +Object FullObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const { return Relaxed_Load(); } @@ -79,7 +79,7 @@ MaybeObject FullMaybeObjectSlot::operator*() const { return MaybeObject(*location()); } -MaybeObject FullMaybeObjectSlot::load(IsolateRoot isolate) const { +MaybeObject FullMaybeObjectSlot::load(PtrComprCageBase cage_base) const { return **this; } @@ -91,7 +91,8 @@ MaybeObject FullMaybeObjectSlot::Relaxed_Load() const { return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location())); } -MaybeObject FullMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const { +MaybeObject FullMaybeObjectSlot::Relaxed_Load( + PtrComprCageBase cage_base) const { return Relaxed_Load(); } @@ -113,7 +114,7 @@ HeapObjectReference FullHeapObjectSlot::operator*() const { return HeapObjectReference(*location()); } -HeapObjectReference FullHeapObjectSlot::load(IsolateRoot isolate) const { +HeapObjectReference FullHeapObjectSlot::load(PtrComprCageBase cage_base) const { return **this; } diff --git a/src/objects/slots.h b/src/objects/slots.h index 2221fb41c8..69c6a8a80b 100644 --- a/src/objects/slots.h +++ b/src/objects/slots.h @@ -110,13 +110,13 @@ class FullObjectSlot : public SlotBase { inline bool contains_value(Address raw_value) const; inline Object operator*() const; - inline Object load(IsolateRoot isolate) const; + inline Object load(PtrComprCageBase cage_base) const; inline void store(Object value) const; inline Object Acquire_Load() const; - inline Object Acquire_Load(IsolateRoot isolate) const; + inline Object Acquire_Load(PtrComprCageBase cage_base) const; inline Object Relaxed_Load() const; - inline Object Relaxed_Load(IsolateRoot isolate) const; + inline Object Relaxed_Load(PtrComprCageBase cage_base) const; inline void Relaxed_Store(Object value) const; inline void Release_Store(Object value) const; inline Object Relaxed_CompareAndSwap(Object old, Object target) const; @@ -147,11 +147,11 @@ class FullMaybeObjectSlot : SlotBase(slot.address()) {} inline MaybeObject operator*() const; - inline MaybeObject load(IsolateRoot isolate) const; + inline MaybeObject load(PtrComprCageBase cage_base) const; inline void store(MaybeObject value) const; inline MaybeObject Relaxed_Load() const; - inline MaybeObject Relaxed_Load(IsolateRoot isolate) const; + inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const; inline void Relaxed_Store(MaybeObject value) const; inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const; }; @@ -174,7 +174,7 @@ class FullHeapObjectSlot : public SlotBase { : SlotBase(slot.address()) {} inline HeapObjectReference operator*() const; - inline HeapObjectReference load(IsolateRoot isolate) const; + inline HeapObjectReference load(PtrComprCageBase cage_base) const; inline void store(HeapObjectReference value) const; inline HeapObject ToHeapObject() const; diff --git a/src/objects/string-inl.h b/src/objects/string-inl.h index c610e7b2f4..912109b2e0 100644 --- a/src/objects/string-inl.h +++ b/src/objects/string-inl.h @@ -274,12 +274,12 @@ inline TResult StringShape::DispatchToSpecificType(String str, } DEF_GETTER(String, IsOneByteRepresentation, bool) { - uint32_t type = map(isolate).instance_type(); + uint32_t type = map(cage_base).instance_type(); return (type & kStringEncodingMask) == kOneByteStringTag; } DEF_GETTER(String, IsTwoByteRepresentation, bool) { - uint32_t type = map(isolate).instance_type(); + uint32_t type = map(cage_base).instance_type(); return (type & kStringEncodingMask) == kTwoByteStringTag; } @@ -463,7 +463,7 @@ bool String::IsEqualTo(Vector str, Isolate* isolate) const { template bool String::IsEqualTo(Vector str) const { DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this)); - return IsEqualToImpl(str, GetIsolateForPtrCompr(*this), + return IsEqualToImpl(str, GetPtrComprCageBase(*this), SharedStringAccessGuardIfNeeded::NotNeeded()); } @@ -475,7 +475,7 @@ bool String::IsEqualTo(Vector str, LocalIsolate* isolate) const { template bool String::IsEqualToImpl( - Vector str, IsolateRoot isolate, + Vector str, PtrComprCageBase cage_base, const SharedStringAccessGuardIfNeeded& access_guard) const { size_t len = str.size(); switch (kEqType) { @@ -496,7 +496,7 @@ bool String::IsEqualToImpl( String string = *this; const Char* data = str.data(); while (true) { - int32_t type = string.map(isolate).instance_type(); + int32_t type = string.map(cage_base).instance_type(); switch (type & (kStringRepresentationMask | kStringEncodingMask)) { case kSeqStringTag | kOneByteStringTag: return CompareCharsEqual( @@ -521,7 +521,7 @@ bool String::IsEqualToImpl( case kSlicedStringTag | kTwoByteStringTag: { SlicedString slicedString = SlicedString::cast(string); slice_offset += slicedString.offset(); - string = slicedString.parent(isolate); + string = slicedString.parent(cage_base); continue; } @@ -529,13 +529,14 @@ bool String::IsEqualToImpl( case kConsStringTag | kTwoByteStringTag: { // The ConsString path is more complex and rare, so call out to an // out-of-line handler. - return IsConsStringEqualToImpl( - ConsString::cast(string), slice_offset, str, isolate, access_guard); + return IsConsStringEqualToImpl(ConsString::cast(string), + slice_offset, str, cage_base, + access_guard); } case kThinStringTag | kOneByteStringTag: case kThinStringTag | kTwoByteStringTag: - string = ThinString::cast(string).actual(isolate); + string = ThinString::cast(string).actual(cage_base); continue; default: @@ -548,7 +549,8 @@ bool String::IsEqualToImpl( template bool String::IsConsStringEqualToImpl( ConsString string, int slice_offset, Vector str, - IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard) { + PtrComprCageBase cage_base, + const SharedStringAccessGuardIfNeeded& access_guard) { // Already checked the len in IsEqualToImpl. Check GE rather than EQ in case // this is a prefix check. DCHECK_GE(string.length(), str.size()); @@ -561,7 +563,7 @@ bool String::IsConsStringEqualToImpl( // remaining string. size_t len = std::min(segment.length(), remaining_str.size()); Vector sub_str = remaining_str.SubVector(0, len); - if (!segment.IsEqualToImpl(sub_str, isolate, + if (!segment.IsEqualToImpl(sub_str, cage_base, access_guard)) { return false; } @@ -845,7 +847,7 @@ Object ConsString::unchecked_second() { } DEF_GETTER(ThinString, unchecked_actual, HeapObject) { - return TaggedField::load(isolate, *this); + return TaggedField::load(cage_base, *this); } bool ExternalString::is_uncached() const { @@ -860,7 +862,7 @@ void ExternalString::AllocateExternalPointerEntries(Isolate* isolate) { } DEF_GETTER(ExternalString, resource_as_address, Address) { - return ReadExternalPointerField(kResourceOffset, isolate, + return ReadExternalPointerField(kResourceOffset, cage_base, kExternalStringResourceTag); } @@ -908,7 +910,7 @@ DEF_GETTER(ExternalOneByteString, resource, DEF_GETTER(ExternalOneByteString, mutable_resource, ExternalOneByteString::Resource*) { - return reinterpret_cast(resource_as_address(isolate)); + return reinterpret_cast(resource_as_address(cage_base)); } void ExternalOneByteString::update_data_cache(Isolate* isolate) { @@ -973,7 +975,7 @@ DEF_GETTER(ExternalTwoByteString, resource, DEF_GETTER(ExternalTwoByteString, mutable_resource, ExternalTwoByteString::Resource*) { - return reinterpret_cast(resource_as_address(isolate)); + return reinterpret_cast(resource_as_address(cage_base)); } void ExternalTwoByteString::update_data_cache(Isolate* isolate) { diff --git a/src/objects/string-table.cc b/src/objects/string-table.cc index 8d5b44c6c5..a549376116 100644 --- a/src/objects/string-table.cc +++ b/src/objects/string-table.cc @@ -91,15 +91,15 @@ bool KeyIsMatch(LocalIsolate* isolate, StringTableKey* key, String string) { class StringTable::Data { public: static std::unique_ptr New(int capacity); - static std::unique_ptr Resize(IsolateRoot isolate, + static std::unique_ptr Resize(PtrComprCageBase cage_base, std::unique_ptr data, int capacity); OffHeapObjectSlot slot(InternalIndex index) const { return OffHeapObjectSlot(&elements_[index.as_uint32()]); } - Object Get(IsolateRoot isolate, InternalIndex index) const { - return slot(index).Acquire_Load(isolate); + Object Get(PtrComprCageBase cage_base, InternalIndex index) const { + return slot(index).Acquire_Load(cage_base); } void Set(InternalIndex index, String entry) { @@ -139,7 +139,8 @@ class StringTable::Data { InternalIndex FindEntry(LocalIsolate* isolate, StringTableKey* key, uint32_t hash) const; - InternalIndex FindInsertionEntry(IsolateRoot isolate, uint32_t hash) const; + InternalIndex FindInsertionEntry(PtrComprCageBase cage_base, + uint32_t hash) const; template InternalIndex FindEntryOrInsertionEntry(LocalIsolate* isolate, @@ -157,7 +158,7 @@ class StringTable::Data { Data* PreviousData() { return previous_data_.get(); } void DropPreviousData() { previous_data_.reset(); } - void Print(IsolateRoot isolate) const; + void Print(PtrComprCageBase cage_base) const; size_t GetCurrentMemoryUsage() const; private: @@ -224,7 +225,7 @@ std::unique_ptr StringTable::Data::New(int capacity) { } std::unique_ptr StringTable::Data::Resize( - IsolateRoot isolate, std::unique_ptr data, int capacity) { + PtrComprCageBase cage_base, std::unique_ptr data, int capacity) { std::unique_ptr new_data(new (capacity) Data(capacity)); DCHECK_LT(data->number_of_elements(), new_data->capacity()); @@ -234,11 +235,12 @@ std::unique_ptr StringTable::Data::Resize( // Rehash the elements. for (InternalIndex i : InternalIndex::Range(data->capacity())) { - Object element = data->Get(isolate, i); + Object element = data->Get(cage_base, i); if (element == empty_element() || element == deleted_element()) continue; String string = String::cast(element); uint32_t hash = string.hash(); - InternalIndex insertion_index = new_data->FindInsertionEntry(isolate, hash); + InternalIndex insertion_index = + new_data->FindInsertionEntry(cage_base, hash); new_data->Set(insertion_index, string); } new_data->number_of_elements_ = data->number_of_elements(); @@ -265,7 +267,7 @@ InternalIndex StringTable::Data::FindEntry(LocalIsolate* isolate, } } -InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate, +InternalIndex StringTable::Data::FindInsertionEntry(PtrComprCageBase cage_base, uint32_t hash) const { uint32_t count = 1; // EnsureCapacity will guarantee the hash table is never full. @@ -273,7 +275,7 @@ InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate, entry = NextProbe(entry, count++, capacity_)) { // TODO(leszeks): Consider delaying the decompression until after the // comparisons against empty/deleted. - Object element = Get(isolate, entry); + Object element = Get(cage_base, entry); if (element == empty_element() || element == deleted_element()) return entry; } @@ -314,11 +316,12 @@ void StringTable::Data::IterateElements(RootVisitor* visitor) { visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot); } -void StringTable::Data::Print(IsolateRoot isolate) const { +void StringTable::Data::Print(PtrComprCageBase cage_base) const { OFStream os(stdout); os << "StringTable {" << std::endl; for (InternalIndex i : InternalIndex::Range(capacity_)) { - os << " " << i.as_uint32() << ": " << Brief(Get(isolate, i)) << std::endl; + os << " " << i.as_uint32() << ": " << Brief(Get(cage_base, i)) + << std::endl; } os << "}" << std::endl; } @@ -530,7 +533,7 @@ template Handle StringTable::LookupKey(LocalIsolate* isolate, template Handle StringTable::LookupKey(Isolate* isolate, StringTableInsertionKey* key); -StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate, +StringTable::Data* StringTable::EnsureCapacity(PtrComprCageBase cage_base, int additional_elements) { // This call is only allowed while the write mutex is held. write_mutex_.AssertHeld(); @@ -560,7 +563,7 @@ StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate, if (new_capacity != -1) { std::unique_ptr new_data = - Data::Resize(isolate, std::unique_ptr(data), new_capacity); + Data::Resize(cage_base, std::unique_ptr(data), new_capacity); // `new_data` is the new owner of `data`. DCHECK_EQ(new_data->PreviousData(), data); // Release-store the new data pointer as `data_`, so that it can be @@ -669,8 +672,8 @@ Address StringTable::TryStringToIndexOrLookupExisting(Isolate* isolate, isolate, string, source, start); } -void StringTable::Print(IsolateRoot isolate) const { - data_.load(std::memory_order_acquire)->Print(isolate); +void StringTable::Print(PtrComprCageBase cage_base) const { + data_.load(std::memory_order_acquire)->Print(cage_base); } size_t StringTable::GetCurrentMemoryUsage() const { diff --git a/src/objects/string-table.h b/src/objects/string-table.h index baf9518ea3..fe87ce15f2 100644 --- a/src/objects/string-table.h +++ b/src/objects/string-table.h @@ -72,7 +72,7 @@ class V8_EXPORT_PRIVATE StringTable { static Address TryStringToIndexOrLookupExisting(Isolate* isolate, Address raw_string); - void Print(IsolateRoot isolate) const; + void Print(PtrComprCageBase cage_base) const; size_t GetCurrentMemoryUsage() const; // The following methods must be called either while holding the write lock, @@ -84,7 +84,7 @@ class V8_EXPORT_PRIVATE StringTable { private: class Data; - Data* EnsureCapacity(IsolateRoot isolate, int additional_elements); + Data* EnsureCapacity(PtrComprCageBase cage_base, int additional_elements); std::atomic data_; // Write mutex is mutable so that readers of concurrently mutated values (e.g. diff --git a/src/objects/string.cc b/src/objects/string.cc index a65cfc9bcf..ffa1be3aa3 100644 --- a/src/objects/string.cc +++ b/src/objects/string.cc @@ -1289,7 +1289,7 @@ Object String::LastIndexOf(Isolate* isolate, Handle receiver, bool String::HasOneBytePrefix(Vector str) { DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this)); return IsEqualToImpl( - str, GetIsolateForPtrCompr(*this), + str, GetPtrComprCageBase(*this), SharedStringAccessGuardIfNeeded::NotNeeded()); } diff --git a/src/objects/string.h b/src/objects/string.h index 47cdacab96..b8d47b5551 100644 --- a/src/objects/string.h +++ b/src/objects/string.h @@ -332,7 +332,7 @@ class String : public TorqueGeneratedString { // whole string or just a prefix. // // This is main-thread only, like the Isolate* overload, but additionally - // computes the IsolateRoot for IsEqualToImpl. + // computes the PtrComprCageBase for IsEqualToImpl. template inline bool IsEqualTo(Vector str) const; @@ -546,14 +546,15 @@ class String : public TorqueGeneratedString { // Implementation of the IsEqualTo() public methods. Do not use directly. template V8_INLINE bool IsEqualToImpl( - Vector str, IsolateRoot isolate, + Vector str, PtrComprCageBase cage_base, const SharedStringAccessGuardIfNeeded& access_guard) const; // Out-of-line IsEqualToImpl for ConsString. template V8_NOINLINE static bool IsConsStringEqualToImpl( ConsString string, int slice_offset, Vector str, - IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard); + PtrComprCageBase cage_base, + const SharedStringAccessGuardIfNeeded& access_guard); V8_EXPORT_PRIVATE static Handle SlowFlatten( Isolate* isolate, Handle cons, AllocationType allocation); diff --git a/src/objects/swiss-name-dictionary-inl.h b/src/objects/swiss-name-dictionary-inl.h index 71e21f1536..343abfc8cc 100644 --- a/src/objects/swiss-name-dictionary-inl.h +++ b/src/objects/swiss-name-dictionary-inl.h @@ -219,15 +219,15 @@ InternalIndex SwissNameDictionary::FindEntry(LocalIsolate* isolate, } Object SwissNameDictionary::LoadFromDataTable(int entry, int data_offset) { - return LoadFromDataTable(GetIsolateForPtrCompr(*this), entry, data_offset); + return LoadFromDataTable(GetPtrComprCageBase(*this), entry, data_offset); } -Object SwissNameDictionary::LoadFromDataTable(IsolateRoot isolate, int entry, - int data_offset) { +Object SwissNameDictionary::LoadFromDataTable(PtrComprCageBase cage_base, + int entry, int data_offset) { DCHECK_LT(static_cast(entry), static_cast(Capacity())); int offset = DataTableStartOffset() + (entry * kDataTableEntryCount + data_offset) * kTaggedSize; - return TaggedField::Relaxed_Load(isolate, *this, offset); + return TaggedField::Relaxed_Load(cage_base, *this, offset); } void SwissNameDictionary::StoreToDataTable(int entry, int data_offset, diff --git a/src/objects/swiss-name-dictionary.h b/src/objects/swiss-name-dictionary.h index 9849b3fa55..9ab225dd34 100644 --- a/src/objects/swiss-name-dictionary.h +++ b/src/objects/swiss-name-dictionary.h @@ -306,7 +306,8 @@ class V8_EXPORT_PRIVATE SwissNameDictionary : public HeapObject { inline ctrl_t GetCtrl(int entry); inline Object LoadFromDataTable(int entry, int data_offset); - inline Object LoadFromDataTable(IsolateRoot root, int entry, int data_offset); + inline Object LoadFromDataTable(PtrComprCageBase cage_base, int entry, + int data_offset); inline void StoreToDataTable(int entry, int data_offset, Object data); inline void StoreToDataTableNoBarrier(int entry, int data_offset, Object data); diff --git a/src/objects/tagged-field-inl.h b/src/objects/tagged-field-inl.h index eaaa557431..513f6a02d9 100644 --- a/src/objects/tagged-field-inl.h +++ b/src/objects/tagged-field-inl.h @@ -61,10 +61,10 @@ T TaggedField::load(HeapObject host, int offset) { // static template -T TaggedField::load(IsolateRoot isolate, HeapObject host, - int offset) { +T TaggedField::load(PtrComprCageBase cage_base, + HeapObject host, int offset) { Tagged_t value = *location(host, offset); - return T(tagged_to_full(isolate, value)); + return T(tagged_to_full(cage_base, value)); } // static @@ -96,10 +96,10 @@ T TaggedField::Relaxed_Load(HeapObject host, int offset) { // static template -T TaggedField::Relaxed_Load(IsolateRoot isolate, +T TaggedField::Relaxed_Load(PtrComprCageBase cage_base, HeapObject host, int offset) { AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset)); - return T(tagged_to_full(isolate, value)); + return T(tagged_to_full(cage_base, value)); } // static @@ -125,10 +125,10 @@ T TaggedField::Acquire_Load(HeapObject host, int offset) { // static template -T TaggedField::Acquire_Load(IsolateRoot isolate, +T TaggedField::Acquire_Load(PtrComprCageBase cage_base, HeapObject host, int offset) { AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset)); - return T(tagged_to_full(isolate, value)); + return T(tagged_to_full(cage_base, value)); } // static diff --git a/src/objects/tagged-field.h b/src/objects/tagged-field.h index 8560c54cc4..e3950fa0af 100644 --- a/src/objects/tagged-field.h +++ b/src/objects/tagged-field.h @@ -38,20 +38,21 @@ class TaggedField : public AllStatic { static inline Address address(HeapObject host, int offset = 0); static inline T load(HeapObject host, int offset = 0); - static inline T load(IsolateRoot isolate, HeapObject host, int offset = 0); + static inline T load(PtrComprCageBase cage_base, HeapObject host, + int offset = 0); static inline void store(HeapObject host, T value); static inline void store(HeapObject host, int offset, T value); static inline T Relaxed_Load(HeapObject host, int offset = 0); - static inline T Relaxed_Load(IsolateRoot isolate, HeapObject host, + static inline T Relaxed_Load(PtrComprCageBase cage_base, HeapObject host, int offset = 0); static inline void Relaxed_Store(HeapObject host, T value); static inline void Relaxed_Store(HeapObject host, int offset, T value); static inline T Acquire_Load(HeapObject host, int offset = 0); - static inline T Acquire_Load(IsolateRoot isolate, HeapObject host, + static inline T Acquire_Load(PtrComprCageBase cage_base, HeapObject host, int offset = 0); static inline void Release_Store(HeapObject host, T value); diff --git a/src/objects/templates-inl.h b/src/objects/templates-inl.h index 8d9107ec7d..d5a08fd88e 100644 --- a/src/objects/templates-inl.h +++ b/src/objects/templates-inl.h @@ -45,13 +45,13 @@ RELEASE_ACQUIRE_ACCESSORS(FunctionTemplateInfo, call_code, HeapObject, // TODO(nicohartmann@, v8:11122): Let Torque generate this accessor. HeapObject FunctionTemplateInfo::rare_data(AcquireLoadTag) const { - IsolateRoot isolate = GetIsolateForPtrCompr(*this); - return rare_data(isolate, kAcquireLoad); + PtrComprCageBase cage_base = GetPtrComprCageBase(*this); + return rare_data(cage_base, kAcquireLoad); } -HeapObject FunctionTemplateInfo::rare_data(IsolateRoot isolate, +HeapObject FunctionTemplateInfo::rare_data(PtrComprCageBase cage_base, AcquireLoadTag) const { HeapObject value = - TaggedField::Acquire_Load(isolate, *this, kRareDataOffset); + TaggedField::Acquire_Load(cage_base, *this, kRareDataOffset); DCHECK(value.IsUndefined() || value.IsFunctionTemplateRareData()); return value; } @@ -75,8 +75,8 @@ FunctionTemplateRareData FunctionTemplateInfo::EnsureFunctionTemplateRareData( #define RARE_ACCESSORS(Name, CamelName, Type, Default) \ DEF_GETTER(FunctionTemplateInfo, Get##CamelName, Type) { \ - HeapObject extra = rare_data(isolate, kAcquireLoad); \ - HeapObject undefined = GetReadOnlyRoots(isolate).undefined_value(); \ + HeapObject extra = rare_data(cage_base, kAcquireLoad); \ + HeapObject undefined = GetReadOnlyRoots(cage_base).undefined_value(); \ return extra == undefined ? Default \ : FunctionTemplateRareData::cast(extra).Name(); \ } \ diff --git a/src/objects/templates.h b/src/objects/templates.h index 33c27b1182..966b81167c 100644 --- a/src/objects/templates.h +++ b/src/objects/templates.h @@ -92,7 +92,7 @@ class FunctionTemplateInfo // TODO(nicohartmann@, v8:11122): Let Torque generate the following accessor. inline HeapObject rare_data(AcquireLoadTag) const; - inline HeapObject rare_data(IsolateRoot isolate, AcquireLoadTag) const; + inline HeapObject rare_data(PtrComprCageBase cage_base, AcquireLoadTag) const; inline void set_rare_data( HeapObject value, ReleaseStoreTag, WriteBarrierMode mode = WriteBarrierMode::UPDATE_WRITE_BARRIER); diff --git a/src/profiler/heap-snapshot-generator.cc b/src/profiler/heap-snapshot-generator.cc index a9a6282abc..9cc26fa3e2 100644 --- a/src/profiler/heap-snapshot-generator.cc +++ b/src/profiler/heap-snapshot-generator.cc @@ -1508,10 +1508,10 @@ class RootsReferencesExtractor : public RootVisitor { OffHeapObjectSlot start, OffHeapObjectSlot end) override { DCHECK_EQ(root, Root::kStringTable); - IsolateRoot isolate = Isolate::FromHeap(explorer_->heap_); + PtrComprCageBase cage_base = Isolate::FromHeap(explorer_->heap_); for (OffHeapObjectSlot p = start; p < end; ++p) { explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_, - p.load(isolate)); + p.load(cage_base)); } } diff --git a/src/torque/cc-generator.cc b/src/torque/cc-generator.cc index 7f9f662979..0dea634ba4 100644 --- a/src/torque/cc-generator.cc +++ b/src/torque/cc-generator.cc @@ -386,10 +386,10 @@ void CCGenerator::EmitInstruction(const LoadReferenceInstruction& instruction, out() << " " << result_name << " = "; if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) { // Currently, all of the tagged loads we emit are for smi values, so there - // is no point in providing an IsolateRoot. If at some point we start + // is no point in providing an PtrComprCageBase. If at some point we start // emitting loads for tagged fields which might be HeapObjects, then we - // should plumb an IsolateRoot through the generated functions that need - // it. + // should plumb an PtrComprCageBase through the generated functions that + // need it. if (!instruction.type->IsSubtypeOf(TypeOracle::GetSmiType())) { Error( "Not supported in C++ output: LoadReference on non-smi tagged " diff --git a/src/torque/implementation-visitor.cc b/src/torque/implementation-visitor.cc index e4f66777e9..a2cf0fee86 100644 --- a/src/torque/implementation-visitor.cc +++ b/src/torque/implementation-visitor.cc @@ -4223,8 +4223,9 @@ void CppClassGenerator::GenerateFieldAccessors( hdr_ << " inline " << type_name << " " << name << "(" << (indexed ? "int i" : "") << ") const;\n"; if (can_contain_heap_objects) { - hdr_ << " inline " << type_name << " " << name << "(IsolateRoot isolate" - << (indexed ? ", int i" : "") << ") const;\n"; + hdr_ << " inline " << type_name << " " << name + << "(PtrComprCageBase cage_base" << (indexed ? ", int i" : "") + << ") const;\n"; } hdr_ << " inline void set_" << name << "(" << (indexed ? "int i, " : "") << type_name << " value" @@ -4233,14 +4234,14 @@ void CppClassGenerator::GenerateFieldAccessors( : "") << ");\n\n"; - // For tagged data, generate the extra getter that derives an IsolateRoot from - // the current object's pointer. + // For tagged data, generate the extra getter that derives an PtrComprCageBase + // from the current object's pointer. if (can_contain_heap_objects) { inl_ << "template \n"; inl_ << type_name << " " << gen_name_ << "::" << name << "(" << (indexed ? "int i" : "") << ") const {\n"; - inl_ << " IsolateRoot isolate = GetIsolateForPtrCompr(*this);\n"; - inl_ << " return " << gen_name_ << "::" << name << "(isolate" + inl_ << " PtrComprCageBase cage_base = GetPtrComprCageBase(*this);\n"; + inl_ << " return " << gen_name_ << "::" << name << "(cage_base" << (indexed ? ", i" : "") << ");\n"; inl_ << "}\n"; } @@ -4248,7 +4249,7 @@ void CppClassGenerator::GenerateFieldAccessors( // Generate the getter implementation. inl_ << "template \n"; inl_ << type_name << " " << gen_name_ << "::" << name << "("; - if (can_contain_heap_objects) inl_ << "IsolateRoot isolate"; + if (can_contain_heap_objects) inl_ << "PtrComprCageBase cage_base"; if (can_contain_heap_objects && indexed) inl_ << ", "; if (indexed) inl_ << "int i"; inl_ << ") const {\n"; @@ -4361,10 +4362,11 @@ void CppClassGenerator::EmitLoadFieldStatement( bool is_smi = field_type->IsSubtypeOf(TypeOracle::GetSmiType()); const std::string load_type = is_smi ? "Smi" : type_name; const char* postfix = is_smi ? ".value()" : ""; - const char* optional_isolate = is_smi ? "" : "isolate, "; + const char* optional_cage_base = is_smi ? "" : "cage_base, "; inl_ << "TaggedField<" << load_type << ">::" << load << "(" - << optional_isolate << "*this, " << offset << ")" << postfix << ";\n"; + << optional_cage_base << "*this, " << offset << ")" << postfix + << ";\n"; } if (CanContainHeapObjects(field_type)) { diff --git a/src/wasm/wasm-objects-inl.h b/src/wasm/wasm-objects-inl.h index abae4ee951..3da7e1650a 100644 --- a/src/wasm/wasm-objects-inl.h +++ b/src/wasm/wasm-objects-inl.h @@ -59,13 +59,13 @@ CAST_ACCESSOR(WasmTypeInfo) CAST_ACCESSOR(WasmStruct) CAST_ACCESSOR(WasmArray) -#define OPTIONAL_ACCESSORS(holder, name, type, offset) \ - DEF_GETTER(holder, has_##name, bool) { \ - Object value = TaggedField::load(isolate, *this); \ - return !value.IsUndefined(GetReadOnlyRoots(isolate)); \ - } \ - ACCESSORS_CHECKED2(holder, name, type, offset, \ - !value.IsUndefined(GetReadOnlyRoots(isolate)), true) +#define OPTIONAL_ACCESSORS(holder, name, type, offset) \ + DEF_GETTER(holder, has_##name, bool) { \ + Object value = TaggedField::load(cage_base, *this); \ + return !value.IsUndefined(GetReadOnlyRoots(cage_base)); \ + } \ + ACCESSORS_CHECKED2(holder, name, type, offset, \ + !value.IsUndefined(GetReadOnlyRoots(cage_base)), true) #define PRIMITIVE_ACCESSORS(holder, name, type, offset) \ type holder::name() const { \ @@ -460,6 +460,12 @@ int WasmArray::GcSafeSizeFor(Map map, int length) { void WasmTypeInfo::clear_foreign_address(Isolate* isolate) { #ifdef V8_HEAP_SANDBOX + + // TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage +#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE +#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage" +#endif + // Due to the type-specific pointer tags for external pointers, we need to // allocate an entry in the table here even though it will just store nullptr. AllocateExternalPointerEntries(isolate); diff --git a/tools/debug_helper/debug-helper-internal.cc b/tools/debug_helper/debug-helper-internal.cc index 29af7ebdd7..51c8da6f27 100644 --- a/tools/debug_helper/debug-helper-internal.cc +++ b/tools/debug_helper/debug-helper-internal.cc @@ -14,7 +14,7 @@ namespace debug_helper_internal { bool IsPointerCompressed(uintptr_t address) { #if COMPRESS_POINTERS_BOOL - return address < i::kPtrComprHeapReservationSize; + return address < i::kPtrComprCageReservationSize; #else return false; #endif diff --git a/tools/debug_helper/get-object-properties.cc b/tools/debug_helper/get-object-properties.cc index a7cc1414df..7199bc51d2 100644 --- a/tools/debug_helper/get-object-properties.cc +++ b/tools/debug_helper/get-object-properties.cc @@ -348,7 +348,7 @@ class ReadStringVisitor : public TqObjectVisitor { GetOrFinish(object->GetResourceDataValue(accessor_)); #ifdef V8_COMPRESS_POINTERS uintptr_t data_address = static_cast( - DecodeExternalPointer(GetIsolateForPtrComprFromOnHeapAddress( + DecodeExternalPointer(GetPtrComprCageBaseFromOnHeapAddress( heap_addresses_.any_heap_pointer), resource_data, kExternalStringResourceDataTag)); #else