Reland^2 "[ptr-cage] Rename IsolateRoot to PtrComprCageBase"

This is a reland of e28dadc207

The original failure was due to a stale Win32 bot. The reland failure
was due to idempotent task deduplication returning the exact same
failure. See crbug/1196064

Original change's description:
> [ptr-cage] Rename IsolateRoot to PtrComprCageBase
>
> Currently, IsolateRoot is both the address of the Isolate root and the
> base address of the pointer compression reservation. This CL teases the
> two uses apart by renaming IsolateRoot to PtrComprCageBase.
>
> - In addition to V8_COMPRESS_POINTERS, add a
>   V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE (vs SHARED_CAGE).
>
> - Rename GetIsolate* helpers to GetPtrComprCageBase. When
>   V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE is true, the helpers remain as
>   aliases to GetPtrComprCageBase.
>
> - Rename kPtrComprIsolateRootAlignment to kPtrComprCageBaseAlignment.
>
> Bug: v8:11460
> Change-Id: I1d715f678ce9a0b5731895612ca14f56579b1c48
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2783672
> Commit-Queue: Shu-yu Guo <syg@chromium.org>
> Auto-Submit: Shu-yu Guo <syg@chromium.org>
> Reviewed-by: Igor Sheludko <ishell@chromium.org>
> Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#73790}

Bug: v8:11460
No-Try: true
Tbr: ishell@chromium.org
Tbr: rmcilroy@chromium.org
Change-Id: Id69311cf3267ebe1297fff159de0be48b15b65a3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2806546
Reviewed-by: Shu-yu Guo <syg@chromium.org>
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73795}
This commit is contained in:
Shu-yu Guo 2021-04-05 12:42:59 -07:00 committed by Commit Bot
parent 4b90ad752a
commit 627b6b2f06
90 changed files with 847 additions and 730 deletions

View File

@ -404,6 +404,10 @@ if (v8_enable_shared_ro_heap && v8_enable_pointer_compression) {
"Sharing read-only heap with pointer compression is only supported on Linux or Android")
}
assert(
!v8_enable_pointer_compression_shared_cage || !v8_enable_shared_ro_heap,
"Sharing read-only heap is not yet supported when sharing a pointer compression cage")
assert(!v8_use_multi_snapshots || !v8_control_flow_integrity,
"Control-flow integrity does not support multisnapshots")
@ -554,6 +558,7 @@ external_v8_defines = [
"V8_ENABLE_CHECKS",
"V8_COMPRESS_POINTERS",
"V8_COMPRESS_POINTERS_IN_SHARED_CAGE",
"V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE",
"V8_31BIT_SMIS_ON_64BIT_ARCH",
"V8_COMPRESS_ZONES",
"V8_HEAP_SANDBOX",
@ -573,6 +578,8 @@ if (v8_enable_pointer_compression) {
}
if (v8_enable_pointer_compression_shared_cage) {
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_SHARED_CAGE" ]
} else if (v8_enable_pointer_compression) {
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE" ]
}
if (v8_enable_pointer_compression || v8_enable_31bit_smis_on_64bit_arch) {
enabled_external_v8_defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ]

View File

@ -358,8 +358,9 @@ class Internals {
internal::Address heap_object_ptr, int offset) {
#ifdef V8_COMPRESS_POINTERS
uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
internal::Address base =
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
#else
return ReadRawField<internal::Address>(heap_object_ptr, offset);
#endif
@ -411,18 +412,19 @@ class Internals {
#ifdef V8_COMPRESS_POINTERS
// See v8:7703 or src/ptr-compr.* for details about pointer compression.
static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
V8_INLINE static internal::Address GetRootFromOnHeapAddress(
V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
internal::Address addr) {
return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
}
V8_INLINE static internal::Address DecompressTaggedAnyField(
internal::Address heap_object_ptr, uint32_t value) {
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
internal::Address base =
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
}
#endif // V8_COMPRESS_POINTERS

View File

@ -661,7 +661,7 @@ void ArrayLiteral::BuildBoilerplateDescription(LocalIsolate* isolate) {
boilerplate_descriptor_kind(),
GetMoreGeneralElementsKind(boilerplate_descriptor_kind(),
boilerplate_value.OptimalElementsKind(
GetIsolateForPtrCompr(*elements))));
GetPtrComprCageBase(*elements))));
FixedArray::cast(*elements).set(array_index, boilerplate_value);
}

View File

@ -370,14 +370,14 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr(
TNode<IntPtrT> full_base = Signed(BitcastTaggedToWord(base));
TNode<Int32T> compressed_base = TruncateIntPtrToInt32(full_base);
// TODO(v8:9706): Add a way to directly use kRootRegister value.
TNode<IntPtrT> isolate_root =
TNode<IntPtrT> ptr_compr_cage_base =
IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base)));
// Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset.
DCHECK_EQ(
isolate()->isolate_root(),
JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate()));
// See JSTypedArray::SetOnHeapDataPtr() for details.
offset = Unsigned(IntPtrAdd(offset, isolate_root));
offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base));
}
StoreJSTypedArrayBasePointer(holder, base);

View File

@ -12,11 +12,17 @@
namespace v8 {
namespace internal {
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate_root,
V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate_root,
ExternalPointer_t encoded_pointer,
ExternalPointerTag tag) {
STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
#ifdef V8_HEAP_SANDBOX
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
#endif
uint32_t index = static_cast<uint32_t>(encoded_pointer);
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
return isolate->external_pointer_table().get(index) ^ tag;
@ -62,7 +68,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
}
V8_INLINE Address ReadExternalPointerField(Address field_address,
IsolateRoot isolate_root,
PtrComprCageBase cage_base,
ExternalPointerTag tag) {
// Pointer compression causes types larger than kTaggedSize to be unaligned.
constexpr bool v8_pointer_compression_unaligned =
@ -73,7 +79,7 @@ V8_INLINE Address ReadExternalPointerField(Address field_address,
} else {
encoded_value = base::Memory<ExternalPointer_t>(field_address);
}
return DecodeExternalPointer(isolate_root, encoded_value, tag);
return DecodeExternalPointer(cage_base, encoded_value, tag);
}
V8_INLINE void WriteExternalPointerField(Address field_address,

View File

@ -12,7 +12,7 @@ namespace internal {
// Convert external pointer from on-V8-heap representation to an actual external
// pointer value.
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate,
V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate,
ExternalPointer_t encoded_pointer,
ExternalPointerTag tag);
@ -34,7 +34,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
// Reads external pointer for the field, and decodes it if the sandbox is
// enabled.
V8_INLINE Address ReadExternalPointerField(Address field_address,
IsolateRoot isolate,
PtrComprCageBase isolate,
ExternalPointerTag tag);
// Encodes value if the sandbox is enabled and writes it into the field.

View File

@ -1748,13 +1748,13 @@ enum class DynamicCheckMapsStatus : uint8_t {
};
#ifdef V8_COMPRESS_POINTERS
class IsolateRoot {
class PtrComprCageBase {
public:
explicit constexpr IsolateRoot(Address address) : address_(address) {}
explicit constexpr PtrComprCageBase(Address address) : address_(address) {}
// NOLINTNEXTLINE
inline IsolateRoot(const Isolate* isolate);
inline PtrComprCageBase(const Isolate* isolate);
// NOLINTNEXTLINE
inline IsolateRoot(const LocalIsolate* isolate);
inline PtrComprCageBase(const LocalIsolate* isolate);
inline Address address() const;
@ -1762,13 +1762,13 @@ class IsolateRoot {
Address address_;
};
#else
class IsolateRoot {
class PtrComprCageBase {
public:
IsolateRoot() = default;
PtrComprCageBase() = default;
// NOLINTNEXTLINE
IsolateRoot(const Isolate* isolate) {}
PtrComprCageBase(const Isolate* isolate) {}
// NOLINTNEXTLINE
IsolateRoot(const LocalIsolate* isolate) {}
PtrComprCageBase(const LocalIsolate* isolate) {}
};
#endif

View File

@ -15,15 +15,35 @@ namespace internal {
#ifdef V8_COMPRESS_POINTERS
IsolateRoot::IsolateRoot(const Isolate* isolate)
#if defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
PtrComprCageBase::PtrComprCageBase(const Isolate* isolate)
: address_(isolate->isolate_root()) {}
IsolateRoot::IsolateRoot(const LocalIsolate* isolate)
PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate)
: address_(isolate->isolate_root()) {}
Address IsolateRoot::address() const {
#elif defined V8_COMPRESS_POINTERS_IN_SHARED_CAGE
PtrComprCageBase::PtrComprCageBase(const Isolate* isolate)
: address_(isolate->isolate_root()) {
UNIMPLEMENTED();
}
PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate)
: address_(isolate->isolate_root()) {
UNIMPLEMENTED();
}
#else
#error "Pointer compression build configuration error"
#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE,
// V8_COMPRESS_POINTERS_IN_SHARED_CAGE
Address PtrComprCageBase::address() const {
Address ret = address_;
ret = reinterpret_cast<Address>(V8_ASSUME_ALIGNED(
reinterpret_cast<void*>(ret), kPtrComprIsolateRootAlignment));
reinterpret_cast<void*>(ret), kPtrComprCageBaseAlignment));
return ret;
}
@ -33,12 +53,17 @@ V8_INLINE Tagged_t CompressTagged(Address tagged) {
return static_cast<Tagged_t>(static_cast<uint32_t>(tagged));
}
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
return RoundDown<kPtrComprIsolateRootAlignment>(on_heap_addr);
V8_INLINE constexpr Address GetPtrComprCageBaseAddress(Address on_heap_addr) {
return RoundDown<kPtrComprCageBaseAlignment>(on_heap_addr);
}
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) {
return isolate.address();
V8_INLINE Address GetPtrComprCageBaseAddress(PtrComprCageBase cage_base) {
return cage_base.address();
}
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(
Address address) {
return PtrComprCageBase(GetPtrComprCageBaseAddress(address));
}
// Decompresses smi value.
@ -52,7 +77,8 @@ V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) {
template <typename TOnHeapAddress>
V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr,
Tagged_t raw_value) {
return GetIsolateRootAddress(on_heap_addr) + static_cast<Address>(raw_value);
return GetPtrComprCageBaseAddress(on_heap_addr) +
static_cast<Address>(raw_value);
}
// Decompresses any tagged value, preserving both weak- and smi- tags.
@ -62,18 +88,19 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
return DecompressTaggedPointer(on_heap_addr, raw_value);
}
STATIC_ASSERT(kPtrComprHeapReservationSize ==
Internals::kPtrComprHeapReservationSize);
STATIC_ASSERT(kPtrComprIsolateRootAlignment ==
Internals::kPtrComprIsolateRootAlignment);
STATIC_ASSERT(kPtrComprCageReservationSize ==
Internals::kPtrComprCageReservationSize);
STATIC_ASSERT(kPtrComprCageBaseAlignment ==
Internals::kPtrComprCageBaseAlignment);
#else
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) { UNREACHABLE(); }
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(
Address address) {
return PtrComprCageBase();
}
V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); }
@ -90,6 +117,11 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
}
#endif // V8_COMPRESS_POINTERS
inline PtrComprCageBase GetPtrComprCageBase(HeapObject object) {
return GetPtrComprCageBaseFromOnHeapAddress(object.ptr());
}
} // namespace internal
} // namespace v8

View File

@ -13,8 +13,8 @@ namespace v8 {
namespace internal {
// See v8:7703 for details about how pointer compression works.
constexpr size_t kPtrComprHeapReservationSize = size_t{4} * GB;
constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB;
constexpr size_t kPtrComprCageReservationSize = size_t{4} * GB;
constexpr size_t kPtrComprCageBaseAlignment = size_t{4} * GB;
} // namespace internal
} // namespace v8

View File

@ -1275,8 +1275,7 @@ int TranslatedState::CreateNextTranslatedValue(
Address TranslatedState::DecompressIfNeeded(intptr_t value) {
if (COMPRESS_POINTERS_BOOL) {
return DecompressTaggedAny(isolate()->isolate_root(),
static_cast<uint32_t>(value));
return DecompressTaggedAny(isolate(), static_cast<uint32_t>(value));
} else {
return value;
}

View File

@ -325,11 +325,11 @@ void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) {
USE_TORQUE_VERIFIER(JSReceiver)
bool JSObject::ElementsAreSafeToExamine(IsolateRoot isolate) const {
bool JSObject::ElementsAreSafeToExamine(PtrComprCageBase cage_base) const {
// If a GC was caused while constructing this object, the elements
// pointer may point to a one pointer filler map.
return elements(isolate) !=
GetReadOnlyRoots(isolate).one_pointer_filler_map();
return elements(cage_base) !=
GetReadOnlyRoots(cage_base).one_pointer_filler_map();
}
namespace {

View File

@ -468,13 +468,13 @@ void PrintSloppyArgumentElements(std::ostream& os, ElementsKind kind,
}
}
void PrintEmbedderData(IsolateRoot isolate, std::ostream& os,
void PrintEmbedderData(PtrComprCageBase cage_base, std::ostream& os,
EmbedderDataSlot slot) {
DisallowGarbageCollection no_gc;
Object value = slot.load_tagged();
os << Brief(value);
void* raw_pointer;
if (slot.ToAlignedPointer(isolate, &raw_pointer)) {
if (slot.ToAlignedPointer(cage_base, &raw_pointer)) {
os << ", aligned pointer: " << raw_pointer;
}
}
@ -579,11 +579,11 @@ static void JSObjectPrintBody(std::ostream& os,
}
int embedder_fields = obj.GetEmbedderFieldCount();
if (embedder_fields > 0) {
IsolateRoot isolate = GetIsolateForPtrCompr(obj);
PtrComprCageBase cage_base = GetPtrComprCageBase(obj);
os << " - embedder fields = {";
for (int i = 0; i < embedder_fields; i++) {
os << "\n ";
PrintEmbedderData(isolate, os, EmbedderDataSlot(obj, i));
PrintEmbedderData(cage_base, os, EmbedderDataSlot(obj, i));
}
os << "\n }\n";
}
@ -762,14 +762,14 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint(
}
void EmbedderDataArray::EmbedderDataArrayPrint(std::ostream& os) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
PrintHeader(os, "EmbedderDataArray");
os << "\n - length: " << length();
EmbedderDataSlot start(*this, 0);
EmbedderDataSlot end(*this, length());
for (EmbedderDataSlot slot = start; slot < end; ++slot) {
os << "\n ";
PrintEmbedderData(isolate, os, slot);
PrintEmbedderData(cage_base, os, slot);
}
os << "\n";
}
@ -2747,12 +2747,11 @@ namespace {
inline i::Object GetObjectFromRaw(void* object) {
i::Address object_ptr = reinterpret_cast<i::Address>(object);
#ifdef V8_COMPRESS_POINTERS
if (RoundDown<i::kPtrComprIsolateRootAlignment>(object_ptr) ==
i::kNullAddress) {
if (RoundDown<i::kPtrComprCageBaseAlignment>(object_ptr) == i::kNullAddress) {
// Try to decompress pointer.
i::Isolate* isolate = i::Isolate::Current();
object_ptr = i::DecompressTaggedAny(isolate->isolate_root(),
static_cast<i::Tagged_t>(object_ptr));
object_ptr =
i::DecompressTaggedAny(isolate, static_cast<i::Tagged_t>(object_ptr));
}
#endif
return i::Object(object_ptr);

View File

@ -13,26 +13,36 @@
namespace v8 {
namespace internal {
inline constexpr IsolateRoot GetIsolateForPtrComprFromOnHeapAddress(
Address address) {
#ifdef V8_COMPRESS_POINTERS
return IsolateRoot(GetIsolateRootAddress(address));
#else
return IsolateRoot();
#endif // V8_COMPRESS_POINTERS
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
// Aliases for GetPtrComprCageBase when
// V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE. Each Isolate has its own cage, whose
// base address is also the Isolate root.
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
return GetPtrComprCageBaseAddress(on_heap_addr);
}
inline IsolateRoot GetIsolateForPtrCompr(HeapObject object) {
return GetIsolateForPtrComprFromOnHeapAddress(object.ptr());
V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) {
return cage_base.address();
}
#else
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) {
UNREACHABLE();
}
#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
// Avoid using the below GetIsolateFromWritableObject because we want to be
// able to get the heap, but not the isolate, for off-thread objects.
#if defined V8_ENABLE_THIRD_PARTY_HEAP
return Heap::GetIsolateFromWritableObject(object)->heap();
#elif defined V8_COMPRESS_POINTERS
#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
Isolate* isolate =
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
DCHECK_NOT_NULL(isolate);
@ -47,7 +57,7 @@ V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) {
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
return Heap::GetIsolateFromWritableObject(object);
#elif defined V8_COMPRESS_POINTERS
#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
Isolate* isolate =
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
DCHECK_NOT_NULL(isolate);

View File

@ -10,11 +10,12 @@
namespace v8 {
namespace internal {
// Computes isolate from any read only or writable heap object. The resulting
// value is intended to be used only as a hoisted computation of isolate root
// inside trivial accessors for optmizing value decompression.
// When pointer compression is disabled this function always returns nullptr.
V8_INLINE IsolateRoot GetIsolateForPtrCompr(HeapObject object);
// Computes the pointer compression cage base from any read only or writable
// heap object. The resulting value is intended to be used only as a hoisted
// computation of cage base inside trivial accessors for optimizing value
// decompression. When pointer compression is disabled this function always
// returns nullptr.
V8_INLINE PtrComprCageBase GetPtrComprCageBase(HeapObject object);
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object);

View File

@ -2861,8 +2861,8 @@ Isolate* Isolate::New() {
// Construct Isolate object in the allocated memory.
void* isolate_ptr = isolate_allocator->isolate_memory();
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
#ifdef V8_COMPRESS_POINTERS
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprCageBaseAlignment));
#endif
#ifdef DEBUG

View File

@ -151,6 +151,18 @@ struct MaybeBoolFlag {
#define COMPRESS_POINTERS_BOOL false
#endif
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL true
#else
#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL false
#endif
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL true
#else
#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL false
#endif
#ifdef V8_HEAP_SANDBOX
#define V8_HEAP_SANDBOX_BOOL true
#else

View File

@ -382,11 +382,11 @@ namespace {
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
int field_count = jsobject.GetEmbedderFieldCount();
IsolateRoot isolate = GetIsolateForPtrCompr(jsobject);
PtrComprCageBase cage_base = GetPtrComprCageBase(jsobject);
for (int i = 0; i < len; ++i) {
if (field_count == i) break;
void* pointer;
if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) {
if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(cage_base, &pointer)) {
embedder_fields[i] = pointer;
}
}

View File

@ -289,7 +289,7 @@ size_t Heap::MinOldGenerationSize() {
size_t Heap::AllocatorLimitOnMaxOldGenerationSize() {
#ifdef V8_COMPRESS_POINTERS
// Isolate and the young generation are also allocated on the heap.
return kPtrComprHeapReservationSize -
return kPtrComprCageReservationSize -
YoungGenerationSizeFromSemiSpaceSize(kMaxSemiSpaceSize) -
RoundUp(sizeof(Isolate), size_t{1} << kPageSizeBits);
#endif

View File

@ -2704,8 +2704,9 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
}
template <AccessMode access_mode, typename TSlot>
static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
static inline SlotCallbackResult UpdateSlot(PtrComprCageBase cage_base,
TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(cage_base);
HeapObject heap_obj;
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
@ -2717,9 +2718,9 @@ static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) {
}
template <AccessMode access_mode, typename TSlot>
static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate,
static inline SlotCallbackResult UpdateStrongSlot(PtrComprCageBase cage_base,
TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
typename TSlot::TObject obj = slot.Relaxed_Load(cage_base);
DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr()));
HeapObject heap_obj;
if (obj.GetHeapObject(&heap_obj)) {
@ -2735,39 +2736,40 @@ static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate,
// It does not expect to encounter pointers to dead objects.
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
public:
explicit PointersUpdatingVisitor(IsolateRoot isolate) : isolate_(isolate) {}
explicit PointersUpdatingVisitor(PtrComprCageBase cage_base)
: cage_base_(cage_base) {}
void VisitPointer(HeapObject host, ObjectSlot p) override {
UpdateStrongSlotInternal(isolate_, p);
UpdateStrongSlotInternal(cage_base_, p);
}
void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
UpdateSlotInternal(isolate_, p);
UpdateSlotInternal(cage_base_, p);
}
void VisitPointers(HeapObject host, ObjectSlot start,
ObjectSlot end) override {
for (ObjectSlot p = start; p < end; ++p) {
UpdateStrongSlotInternal(isolate_, p);
UpdateStrongSlotInternal(cage_base_, p);
}
}
void VisitPointers(HeapObject host, MaybeObjectSlot start,
MaybeObjectSlot end) final {
for (MaybeObjectSlot p = start; p < end; ++p) {
UpdateSlotInternal(isolate_, p);
UpdateSlotInternal(cage_base_, p);
}
}
void VisitRootPointer(Root root, const char* description,
FullObjectSlot p) override {
UpdateRootSlotInternal(isolate_, p);
UpdateRootSlotInternal(cage_base_, p);
}
void VisitRootPointers(Root root, const char* description,
FullObjectSlot start, FullObjectSlot end) override {
for (FullObjectSlot p = start; p < end; ++p) {
UpdateRootSlotInternal(isolate_, p);
UpdateRootSlotInternal(cage_base_, p);
}
}
@ -2775,7 +2777,7 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
for (OffHeapObjectSlot p = start; p < end; ++p) {
UpdateRootSlotInternal(isolate_, p);
UpdateRootSlotInternal(cage_base_, p);
}
}
@ -2790,32 +2792,32 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
}
private:
static inline SlotCallbackResult UpdateRootSlotInternal(IsolateRoot isolate,
FullObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
static inline SlotCallbackResult UpdateRootSlotInternal(
PtrComprCageBase cage_base, FullObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
}
static inline SlotCallbackResult UpdateRootSlotInternal(
IsolateRoot isolate, OffHeapObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
PtrComprCageBase cage_base, OffHeapObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
}
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
IsolateRoot isolate, MaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
PtrComprCageBase cage_base, MaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
}
static inline SlotCallbackResult UpdateStrongSlotInternal(IsolateRoot isolate,
ObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
static inline SlotCallbackResult UpdateStrongSlotInternal(
PtrComprCageBase cage_base, ObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
}
static inline SlotCallbackResult UpdateSlotInternal(IsolateRoot isolate,
MaybeObjectSlot slot) {
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
static inline SlotCallbackResult UpdateSlotInternal(
PtrComprCageBase cage_base, MaybeObjectSlot slot) {
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
}
IsolateRoot isolate_;
PtrComprCageBase cage_base_;
};
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
@ -3581,7 +3583,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
"ToSpaceUpdatingItem::ProcessVisitAll");
PointersUpdatingVisitor visitor(
GetIsolateForPtrComprFromOnHeapAddress(start_));
GetPtrComprCageBaseFromOnHeapAddress(start_));
for (Address cur = start_; cur < end_;) {
HeapObject object = HeapObject::FromAddress(cur);
Map map = object.map();
@ -3597,7 +3599,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
// For young generation evacuations we want to visit grey objects, for
// full MC, we need to visit black objects.
PointersUpdatingVisitor visitor(
GetIsolateForPtrComprFromOnHeapAddress(start_));
GetPtrComprCageBaseFromOnHeapAddress(start_));
for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
chunk_, marking_state_->bitmap(chunk_))) {
object_and_size.first.IterateBodyFast(&visitor);
@ -3743,12 +3745,12 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
IsolateRoot isolate = heap_->isolate();
PtrComprCageBase cage_base = heap_->isolate();
RememberedSet<OLD_TO_OLD>::Iterate(
chunk_,
[&filter, isolate](MaybeObjectSlot slot) {
[&filter, cage_base](MaybeObjectSlot slot) {
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
},
SlotSet::FREE_EMPTY_BUCKETS);
chunk_->ReleaseSlotSet<OLD_TO_OLD>();
@ -3783,10 +3785,10 @@ class RememberedSetUpdatingItem : public UpdatingItem {
Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak
// typed slots.
IsolateRoot isolate = heap_->isolate();
PtrComprCageBase cage_base = heap_->isolate();
return UpdateTypedSlotHelper::UpdateTypedSlot(
heap_, slot_type, slot, [isolate](FullMaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
});
});
}

View File

@ -14,9 +14,9 @@ namespace internal {
// static
ReadOnlyRoots ReadOnlyHeap::GetReadOnlyRoots(HeapObject object) {
#ifdef V8_COMPRESS_POINTERS
IsolateRoot isolate = GetIsolateForPtrCompr(object);
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
return ReadOnlyRoots(
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr())));
#else
#ifdef V8_SHARED_RO_HEAP
// This fails if we are creating heap objects and the roots haven't yet been

View File

@ -37,7 +37,7 @@ base::LazyInstance<std::weak_ptr<ReadOnlyArtifacts>>::type
std::shared_ptr<ReadOnlyArtifacts> InitializeSharedReadOnlyArtifacts() {
std::shared_ptr<ReadOnlyArtifacts> artifacts;
if (COMPRESS_POINTERS_BOOL) {
if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
artifacts = std::make_shared<PointerCompressedReadOnlyArtifacts>();
} else {
artifacts = std::make_shared<SingleCopyReadOnlyArtifacts>();
@ -129,7 +129,7 @@ ReadOnlyHeap::ReadOnlyHeap(ReadOnlyHeap* ro_heap, ReadOnlySpace* ro_space)
: read_only_space_(ro_space),
read_only_object_cache_(ro_heap->read_only_object_cache_) {
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
DCHECK(COMPRESS_POINTERS_BOOL);
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
}
// static
@ -139,7 +139,7 @@ ReadOnlyHeap* ReadOnlyHeap::CreateInitalHeapForBootstrapping(
std::unique_ptr<ReadOnlyHeap> ro_heap;
auto* ro_space = new ReadOnlySpace(isolate->heap());
if (COMPRESS_POINTERS_BOOL) {
if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
ro_heap.reset(new ReadOnlyHeap(ro_space));
} else {
std::unique_ptr<SoleReadOnlyHeap> sole_ro_heap(

View File

@ -87,8 +87,8 @@ class ReadOnlyHeap {
// Returns whether the ReadOnlySpace will actually be shared taking into
// account whether shared memory is available with pointer compression.
static bool IsReadOnlySpaceShared() {
return V8_SHARED_RO_HEAP_BOOL &&
(!COMPRESS_POINTERS_BOOL || IsSharedMemoryAvailable());
return V8_SHARED_RO_HEAP_BOOL && (!COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL ||
IsSharedMemoryAvailable());
}
virtual void InitializeIsolateRoots(Isolate* isolate) {}

View File

@ -755,9 +755,10 @@ SharedReadOnlySpace::SharedReadOnlySpace(
Heap* heap, PointerCompressedReadOnlyArtifacts* artifacts)
: SharedReadOnlySpace(heap) {
// This constructor should only be used when RO_SPACE is shared with pointer
// compression.
// compression in a per-Isolate cage.
DCHECK(V8_SHARED_RO_HEAP_BOOL);
DCHECK(COMPRESS_POINTERS_BOOL);
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
DCHECK(!artifacts->pages().empty());
@ -776,6 +777,7 @@ SharedReadOnlySpace::SharedReadOnlySpace(
: SharedReadOnlySpace(heap) {
DCHECK(V8_SHARED_RO_HEAP_BOOL);
DCHECK(COMPRESS_POINTERS_BOOL);
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
accounting_stats_ = std::move(new_stats);

View File

@ -35,10 +35,11 @@ class ReadOnlyPage : public BasicMemoryChunk {
// Returns the address for a given offset in this page.
Address OffsetToAddress(size_t offset) const {
Address address_in_page = address() + offset;
if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_BOOL) {
// Pointer compression with share ReadOnlyPages means that the area_start
// and area_end cannot be defined since they are stored within the pages
// which can be mapped at multiple memory addresses.
if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
// Pointer compression with a per-Isolate cage and shared ReadOnlyPages
// means that the area_start and area_end cannot be defined since they are
// stored within the pages which can be mapped at multiple memory
// addresses.
DCHECK_LT(offset, size());
} else {
DCHECK_GE(address_in_page, area_start());

View File

@ -59,8 +59,8 @@ Address IsolateAllocator::InitReservation() {
// Reserve a |4Gb + kIsolateRootBiasPageSize| region such as that the
// resevation address plus |kIsolateRootBiasPageSize| is 4Gb aligned.
const size_t reservation_size =
kPtrComprHeapReservationSize + kIsolateRootBiasPageSize;
const size_t base_alignment = kPtrComprIsolateRootAlignment;
kPtrComprCageReservationSize + kIsolateRootBiasPageSize;
const size_t base_alignment = kPtrComprCageBaseAlignment;
const int kMaxAttempts = 4;
for (int attempt = 0; attempt < kMaxAttempts; ++attempt) {
@ -137,11 +137,11 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
GetIsolateRootBiasPageSize(platform_page_allocator);
Address isolate_root = heap_reservation_address + kIsolateRootBiasPageSize;
CHECK(IsAligned(isolate_root, kPtrComprIsolateRootAlignment));
CHECK(IsAligned(isolate_root, kPtrComprCageBaseAlignment));
CHECK(reservation_.InVM(
heap_reservation_address,
kPtrComprHeapReservationSize + kIsolateRootBiasPageSize));
kPtrComprCageReservationSize + kIsolateRootBiasPageSize));
// Simplify BoundedPageAllocator's life by configuring it to use same page
// size as the Heap will use (MemoryChunk::kPageSize).
@ -149,7 +149,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
platform_page_allocator->AllocatePageSize());
page_allocator_instance_ = std::make_unique<base::BoundedPageAllocator>(
platform_page_allocator, isolate_root, kPtrComprHeapReservationSize,
platform_page_allocator, isolate_root, kPtrComprCageReservationSize,
page_size);
page_allocator_ = page_allocator_instance_.get();

View File

@ -323,9 +323,9 @@ int Code::SizeIncludingMetadata() const {
}
ByteArray Code::unchecked_relocation_info() const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return ByteArray::unchecked_cast(
TaggedField<HeapObject, kRelocationInfoOffset>::load(isolate, *this));
TaggedField<HeapObject, kRelocationInfoOffset>::load(cage_base, *this));
}
byte* Code::relocation_start() const {

View File

@ -33,9 +33,9 @@ Object CompressedObjectSlot::operator*() const {
return Object(DecompressTaggedAny(address(), value));
}
Object CompressedObjectSlot::load(IsolateRoot isolate) const {
Object CompressedObjectSlot::load(PtrComprCageBase cage_base) const {
Tagged_t value = *location();
return Object(DecompressTaggedAny(isolate, value));
return Object(DecompressTaggedAny(cage_base, value));
}
void CompressedObjectSlot::store(Object value) const {
@ -52,9 +52,9 @@ Object CompressedObjectSlot::Relaxed_Load() const {
return Object(DecompressTaggedAny(address(), value));
}
Object CompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
Object CompressedObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return Object(DecompressTaggedAny(isolate, value));
return Object(DecompressTaggedAny(cage_base, value));
}
void CompressedObjectSlot::Relaxed_Store(Object value) const {
@ -85,9 +85,9 @@ MaybeObject CompressedMaybeObjectSlot::operator*() const {
return MaybeObject(DecompressTaggedAny(address(), value));
}
MaybeObject CompressedMaybeObjectSlot::load(IsolateRoot isolate) const {
MaybeObject CompressedMaybeObjectSlot::load(PtrComprCageBase cage_base) const {
Tagged_t value = *location();
return MaybeObject(DecompressTaggedAny(isolate, value));
return MaybeObject(DecompressTaggedAny(cage_base, value));
}
void CompressedMaybeObjectSlot::store(MaybeObject value) const {
@ -99,9 +99,10 @@ MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const {
return MaybeObject(DecompressTaggedAny(address(), value));
}
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(
PtrComprCageBase cage_base) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return MaybeObject(DecompressTaggedAny(isolate, value));
return MaybeObject(DecompressTaggedAny(cage_base, value));
}
void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const {
@ -125,9 +126,10 @@ HeapObjectReference CompressedHeapObjectSlot::operator*() const {
return HeapObjectReference(DecompressTaggedPointer(address(), value));
}
HeapObjectReference CompressedHeapObjectSlot::load(IsolateRoot isolate) const {
HeapObjectReference CompressedHeapObjectSlot::load(
PtrComprCageBase cage_base) const {
Tagged_t value = *location();
return HeapObjectReference(DecompressTaggedPointer(isolate, value));
return HeapObjectReference(DecompressTaggedPointer(cage_base, value));
}
void CompressedHeapObjectSlot::store(HeapObjectReference value) const {
@ -148,23 +150,25 @@ void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const {
// OffHeapCompressedObjectSlot implementation.
//
Object OffHeapCompressedObjectSlot::load(IsolateRoot isolate) const {
Object OffHeapCompressedObjectSlot::load(PtrComprCageBase cage_base) const {
Tagged_t value = *location();
return Object(DecompressTaggedAny(isolate, value));
return Object(DecompressTaggedAny(cage_base, value));
}
void OffHeapCompressedObjectSlot::store(Object value) const {
*location() = CompressTagged(value.ptr());
}
Object OffHeapCompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
Object OffHeapCompressedObjectSlot::Relaxed_Load(
PtrComprCageBase cage_base) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return Object(DecompressTaggedAny(isolate, value));
return Object(DecompressTaggedAny(cage_base, value));
}
Object OffHeapCompressedObjectSlot::Acquire_Load(IsolateRoot isolate) const {
Object OffHeapCompressedObjectSlot::Acquire_Load(
PtrComprCageBase cage_base) const {
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location());
return Object(DecompressTaggedAny(isolate, value));
return Object(DecompressTaggedAny(cage_base, value));
}
void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const {

View File

@ -41,12 +41,12 @@ class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> {
// TODO(leszeks): Consider deprecating the operator* load, and always pass the
// Isolate.
inline Object operator*() const;
inline Object load(IsolateRoot isolate) const;
inline Object load(PtrComprCageBase cage_base) const;
inline void store(Object value) const;
inline Object Acquire_Load() const;
inline Object Relaxed_Load() const;
inline Object Relaxed_Load(IsolateRoot isolate) const;
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline Object Release_CompareAndSwap(Object old, Object target) const;
@ -77,11 +77,11 @@ class CompressedMaybeObjectSlot
: SlotBase(slot.address()) {}
inline MaybeObject operator*() const;
inline MaybeObject load(IsolateRoot isolate) const;
inline MaybeObject load(PtrComprCageBase cage_base) const;
inline void store(MaybeObject value) const;
inline MaybeObject Relaxed_Load() const;
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const;
inline void Relaxed_Store(MaybeObject value) const;
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
};
@ -105,7 +105,7 @@ class CompressedHeapObjectSlot
: SlotBase(slot.address()) {}
inline HeapObjectReference operator*() const;
inline HeapObjectReference load(IsolateRoot isolate) const;
inline HeapObjectReference load(PtrComprCageBase cage_base) const;
inline void store(HeapObjectReference value) const;
inline HeapObject ToHeapObject() const;
@ -131,11 +131,11 @@ class OffHeapCompressedObjectSlot
explicit OffHeapCompressedObjectSlot(const uint32_t* ptr)
: SlotBase(reinterpret_cast<Address>(ptr)) {}
inline Object load(IsolateRoot isolate) const;
inline Object load(PtrComprCageBase cage_base) const;
inline void store(Object value) const;
inline Object Relaxed_Load(IsolateRoot isolate) const;
inline Object Acquire_Load(IsolateRoot isolate) const;
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
inline Object Acquire_Load(PtrComprCageBase cage_base) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline void Release_CompareAndSwap(Object old, Object target) const;

View File

@ -56,8 +56,8 @@ NEVER_READ_ONLY_SPACE_IMPL(Context)
CAST_ACCESSOR(NativeContext)
V8_INLINE Object Context::get(int index) const { return elements(index); }
V8_INLINE Object Context::get(IsolateRoot isolate, int index) const {
return elements(isolate, index);
V8_INLINE Object Context::get(PtrComprCageBase cage_base, int index) const {
return elements(cage_base, index);
}
V8_INLINE void Context::set(int index, Object value) {
set_elements(index, value);
@ -71,11 +71,11 @@ void Context::set_scope_info(ScopeInfo scope_info, WriteBarrierMode mode) {
}
Object Context::synchronized_get(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return synchronized_get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return synchronized_get(cage_base, index);
}
Object Context::synchronized_get(IsolateRoot isolate, int index) const {
Object Context::synchronized_get(PtrComprCageBase cage_base, int index) const {
DCHECK_LT(static_cast<unsigned int>(index),
static_cast<unsigned int>(this->length()));
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
@ -243,7 +243,7 @@ Map Context::GetInitialJSArrayMap(ElementsKind kind) const {
DEF_GETTER(NativeContext, microtask_queue, MicrotaskQueue*) {
return reinterpret_cast<MicrotaskQueue*>(ReadExternalPointerField(
kMicrotaskQueueOffset, isolate, kNativeContextMicrotaskQueueTag));
kMicrotaskQueueOffset, cage_base, kNativeContextMicrotaskQueueTag));
}
void NativeContext::AllocateExternalPointerEntries(Isolate* isolate) {

View File

@ -422,13 +422,14 @@ class Context : public TorqueGeneratedContext<Context, HeapObject> {
// Setter and getter for elements.
V8_INLINE Object get(int index) const;
V8_INLINE Object get(IsolateRoot isolate, int index) const;
V8_INLINE Object get(PtrComprCageBase cage_base, int index) const;
V8_INLINE void set(int index, Object value);
// Setter with explicit barrier mode.
V8_INLINE void set(int index, Object value, WriteBarrierMode mode);
// Setter and getter with synchronization semantics.
V8_INLINE Object synchronized_get(int index) const;
V8_INLINE Object synchronized_get(IsolateRoot isolate, int index) const;
V8_INLINE Object synchronized_get(PtrComprCageBase cage_base,
int index) const;
V8_INLINE void synchronized_set(int index, Object value);
static const int kScopeInfoOffset = kElementsOffset;

View File

@ -106,15 +106,16 @@ ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) {
}
Name DescriptorArray::GetKey(InternalIndex descriptor_number) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetKey(isolate, descriptor_number);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return GetKey(cage_base, descriptor_number);
}
Name DescriptorArray::GetKey(IsolateRoot isolate,
Name DescriptorArray::GetKey(PtrComprCageBase cage_base,
InternalIndex descriptor_number) const {
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
return Name::cast(EntryKeyField::Relaxed_Load(isolate, *this, entry_offset));
return Name::cast(
EntryKeyField::Relaxed_Load(cage_base, *this, entry_offset));
}
void DescriptorArray::SetKey(InternalIndex descriptor_number, Name key) {
@ -129,12 +130,13 @@ int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
}
Name DescriptorArray::GetSortedKey(int descriptor_number) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetSortedKey(isolate, descriptor_number);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return GetSortedKey(cage_base, descriptor_number);
}
Name DescriptorArray::GetSortedKey(IsolateRoot isolate, int descriptor_number) {
return GetKey(isolate, InternalIndex(GetSortedKeyIndex(descriptor_number)));
Name DescriptorArray::GetSortedKey(PtrComprCageBase cage_base,
int descriptor_number) {
return GetKey(cage_base, InternalIndex(GetSortedKeyIndex(descriptor_number)));
}
void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
@ -143,13 +145,13 @@ void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
}
Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetStrongValue(isolate, descriptor_number);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return GetStrongValue(cage_base, descriptor_number);
}
Object DescriptorArray::GetStrongValue(IsolateRoot isolate,
Object DescriptorArray::GetStrongValue(PtrComprCageBase cage_base,
InternalIndex descriptor_number) {
return GetValue(isolate, descriptor_number).cast<Object>();
return GetValue(cage_base, descriptor_number).cast<Object>();
}
void DescriptorArray::SetValue(InternalIndex descriptor_number,
@ -161,15 +163,15 @@ void DescriptorArray::SetValue(InternalIndex descriptor_number,
}
MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetValue(isolate, descriptor_number);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return GetValue(cage_base, descriptor_number);
}
MaybeObject DescriptorArray::GetValue(IsolateRoot isolate,
MaybeObject DescriptorArray::GetValue(PtrComprCageBase cage_base,
InternalIndex descriptor_number) {
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
return EntryValueField::Relaxed_Load(isolate, *this, entry_offset);
return EntryValueField::Relaxed_Load(cage_base, *this, entry_offset);
}
PropertyDetails DescriptorArray::GetDetails(InternalIndex descriptor_number) {
@ -192,14 +194,14 @@ int DescriptorArray::GetFieldIndex(InternalIndex descriptor_number) {
}
FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetFieldType(isolate, descriptor_number);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return GetFieldType(cage_base, descriptor_number);
}
FieldType DescriptorArray::GetFieldType(IsolateRoot isolate,
FieldType DescriptorArray::GetFieldType(PtrComprCageBase cage_base,
InternalIndex descriptor_number) {
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
MaybeObject wrapped_type = GetValue(isolate, descriptor_number);
MaybeObject wrapped_type = GetValue(cage_base, descriptor_number);
return Map::UnwrapFieldType(wrapped_type);
}

View File

@ -69,22 +69,22 @@ class DescriptorArray
// Accessors for fetching instance descriptor at descriptor number.
inline Name GetKey(InternalIndex descriptor_number) const;
inline Name GetKey(IsolateRoot isolate,
inline Name GetKey(PtrComprCageBase cage_base,
InternalIndex descriptor_number) const;
inline Object GetStrongValue(InternalIndex descriptor_number);
inline Object GetStrongValue(IsolateRoot isolate,
inline Object GetStrongValue(PtrComprCageBase cage_base,
InternalIndex descriptor_number);
inline MaybeObject GetValue(InternalIndex descriptor_number);
inline MaybeObject GetValue(IsolateRoot isolate,
inline MaybeObject GetValue(PtrComprCageBase cage_base,
InternalIndex descriptor_number);
inline PropertyDetails GetDetails(InternalIndex descriptor_number);
inline int GetFieldIndex(InternalIndex descriptor_number);
inline FieldType GetFieldType(InternalIndex descriptor_number);
inline FieldType GetFieldType(IsolateRoot isolate,
inline FieldType GetFieldType(PtrComprCageBase cage_base,
InternalIndex descriptor_number);
inline Name GetSortedKey(int descriptor_number);
inline Name GetSortedKey(IsolateRoot isolate, int descriptor_number);
inline Name GetSortedKey(PtrComprCageBase cage_base, int descriptor_number);
inline int GetSortedKeyIndex(int descriptor_number);
// Accessor for complete descriptor.

View File

@ -30,15 +30,15 @@ Dictionary<Derived, Shape>::Dictionary(Address ptr)
template <typename Derived, typename Shape>
Object Dictionary<Derived, Shape>::ValueAt(InternalIndex entry) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return ValueAt(isolate, entry);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return ValueAt(cage_base, entry);
}
template <typename Derived, typename Shape>
Object Dictionary<Derived, Shape>::ValueAt(IsolateRoot isolate,
Object Dictionary<Derived, Shape>::ValueAt(PtrComprCageBase cage_base,
InternalIndex entry) {
return this->get(isolate, DerivedHashTable::EntryToIndex(entry) +
Derived::kEntryValueIndex);
return this->get(cage_base, DerivedHashTable::EntryToIndex(entry) +
Derived::kEntryValueIndex);
}
template <typename Derived, typename Shape>
@ -181,12 +181,12 @@ Handle<Map> GlobalDictionary::GetMap(ReadOnlyRoots roots) {
}
Name NameDictionary::NameAt(InternalIndex entry) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return NameAt(isolate, entry);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return NameAt(cage_base, entry);
}
Name NameDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
return Name::cast(KeyAt(isolate, entry));
Name NameDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) {
return Name::cast(KeyAt(cage_base, entry));
}
Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
@ -194,32 +194,33 @@ Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
}
PropertyCell GlobalDictionary::CellAt(InternalIndex entry) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return CellAt(isolate, entry);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return CellAt(cage_base, entry);
}
PropertyCell GlobalDictionary::CellAt(IsolateRoot isolate,
PropertyCell GlobalDictionary::CellAt(PtrComprCageBase cage_base,
InternalIndex entry) {
DCHECK(KeyAt(isolate, entry).IsPropertyCell(isolate));
return PropertyCell::cast(KeyAt(isolate, entry));
DCHECK(KeyAt(cage_base, entry).IsPropertyCell(cage_base));
return PropertyCell::cast(KeyAt(cage_base, entry));
}
Name GlobalDictionary::NameAt(InternalIndex entry) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return NameAt(isolate, entry);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return NameAt(cage_base, entry);
}
Name GlobalDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
return CellAt(isolate, entry).name(isolate);
Name GlobalDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) {
return CellAt(cage_base, entry).name(cage_base);
}
Object GlobalDictionary::ValueAt(InternalIndex entry) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return ValueAt(isolate, entry);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return ValueAt(cage_base, entry);
}
Object GlobalDictionary::ValueAt(IsolateRoot isolate, InternalIndex entry) {
return CellAt(isolate, entry).value(isolate);
Object GlobalDictionary::ValueAt(PtrComprCageBase cage_base,
InternalIndex entry) {
return CellAt(cage_base, entry).value(cage_base);
}
void GlobalDictionary::SetEntry(InternalIndex entry, Object key, Object value,

View File

@ -39,7 +39,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary
using Key = typename Shape::Key;
// Returns the value at entry.
inline Object ValueAt(InternalIndex entry);
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry);
// Set the value for entry.
inline void ValueAtPut(InternalIndex entry, Object value);
@ -193,7 +193,7 @@ class V8_EXPORT_PRIVATE NameDictionary
static const int kInitialCapacity = 2;
inline Name NameAt(InternalIndex entry);
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry);
inline void set_hash(int hash);
inline int hash() const;
@ -231,14 +231,14 @@ class V8_EXPORT_PRIVATE GlobalDictionary
DECL_PRINTER(GlobalDictionary)
inline Object ValueAt(InternalIndex entry);
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry);
inline PropertyCell CellAt(InternalIndex entry);
inline PropertyCell CellAt(IsolateRoot isolate, InternalIndex entry);
inline PropertyCell CellAt(PtrComprCageBase cage_base, InternalIndex entry);
inline void SetEntry(InternalIndex entry, Object key, Object value,
PropertyDetails details);
inline void ClearEntry(InternalIndex entry);
inline Name NameAt(InternalIndex entry);
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry);
inline void ValueAtPut(InternalIndex entry, Object value);
OBJECT_CONSTRUCTORS(

View File

@ -1421,10 +1421,10 @@ class DictionaryElementsAccessor
DisallowGarbageCollection no_gc;
NumberDictionary dict = NumberDictionary::cast(backing_store);
if (!dict.requires_slow_elements()) return false;
IsolateRoot isolate = GetIsolateForPtrCompr(holder);
ReadOnlyRoots roots = holder.GetReadOnlyRoots(isolate);
PtrComprCageBase cage_base = GetPtrComprCageBase(holder);
ReadOnlyRoots roots = holder.GetReadOnlyRoots(cage_base);
for (InternalIndex i : dict.IterateEntries()) {
Object key = dict.KeyAt(isolate, i);
Object key = dict.KeyAt(cage_base, i);
if (!dict.IsKey(roots, key)) continue;
PropertyDetails details = dict.DetailsAt(i);
if (details.kind() == kAccessor) return true;

View File

@ -81,7 +81,7 @@ void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index,
#endif
}
bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
bool EmbedderDataSlot::ToAlignedPointer(PtrComprCageBase isolate_root,
void** out_pointer) const {
// We don't care about atomicity of access here because embedder slots
// are accessed this way only from the main thread via API during "mutator"
@ -89,6 +89,12 @@ bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
// at the tagged part of the embedder slot but read-only access is ok).
Address raw_value;
#ifdef V8_HEAP_SANDBOX
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
#endif
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
raw_value = isolate->external_pointer_table().get(index) ^
@ -108,9 +114,15 @@ bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
return HAS_SMI_TAG(raw_value);
}
bool EmbedderDataSlot::ToAlignedPointerSafe(IsolateRoot isolate_root,
bool EmbedderDataSlot::ToAlignedPointerSafe(PtrComprCageBase isolate_root,
void** out_pointer) const {
#ifdef V8_HEAP_SANDBOX
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
#endif
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
Address raw_value;
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());

View File

@ -75,7 +75,8 @@ class EmbedderDataSlot
// When V8 heap sandbox is enabled, calling this method when the raw part of
// the slot does not contain valid external pointer table index is undefined
// behaviour and most likely result in crashes.
V8_INLINE bool ToAlignedPointer(IsolateRoot isolate, void** out_result) const;
V8_INLINE bool ToAlignedPointer(PtrComprCageBase isolate_root,
void** out_result) const;
// Same as ToAlignedPointer() but with a workaround for V8 heap sandbox.
// When V8 heap sandbox is enabled, this method doesn't crash when the raw
@ -86,7 +87,7 @@ class EmbedderDataSlot
//
// Call this function if you are not sure whether the slot contains valid
// external pointer or not.
V8_INLINE bool ToAlignedPointerSafe(IsolateRoot isolate,
V8_INLINE bool ToAlignedPointerSafe(PtrComprCageBase isolate_root,
void** out_result) const;
// Returns true if the pointer was successfully stored or false it the pointer

View File

@ -187,8 +187,9 @@ MaybeObject FeedbackVector::Get(FeedbackSlot slot) const {
return value;
}
MaybeObject FeedbackVector::Get(IsolateRoot isolate, FeedbackSlot slot) const {
MaybeObject value = raw_feedback_slots(isolate, GetIndex(slot));
MaybeObject FeedbackVector::Get(PtrComprCageBase cage_base,
FeedbackSlot slot) const {
MaybeObject value = raw_feedback_slots(cage_base, GetIndex(slot));
DCHECK(!IsOfLegacyType(value));
return value;
}

View File

@ -259,7 +259,7 @@ class FeedbackVector
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline MaybeObject Get(FeedbackSlot slot) const;
inline MaybeObject Get(IsolateRoot isolate, FeedbackSlot slot) const;
inline MaybeObject Get(PtrComprCageBase cage_base, FeedbackSlot slot) const;
// Returns the feedback cell at |index| that is used to create the
// closure.

View File

@ -61,13 +61,13 @@ int FieldIndex::GetLoadByFieldIndex() const {
}
FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
IsolateRoot isolate = GetIsolateForPtrCompr(map);
return ForDescriptor(isolate, map, descriptor_index);
PtrComprCageBase cage_base = GetPtrComprCageBase(map);
return ForDescriptor(cage_base, map, descriptor_index);
}
FieldIndex FieldIndex::ForDescriptor(IsolateRoot isolate, Map map,
FieldIndex FieldIndex::ForDescriptor(PtrComprCageBase cage_base, Map map,
InternalIndex descriptor_index) {
PropertyDetails details = map.instance_descriptors(isolate, kRelaxedLoad)
PropertyDetails details = map.instance_descriptors(cage_base, kRelaxedLoad)
.GetDetails(descriptor_index);
int field_index = details.field_index();
return ForPropertyIndex(map, field_index, details.representation());

View File

@ -31,7 +31,7 @@ class FieldIndex final {
static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding);
static inline FieldIndex ForDescriptor(Map map,
InternalIndex descriptor_index);
static inline FieldIndex ForDescriptor(IsolateRoot isolate, Map map,
static inline FieldIndex ForDescriptor(PtrComprCageBase cage_base, Map map,
InternalIndex descriptor_index);
inline int GetLoadByFieldIndex() const;

View File

@ -70,13 +70,13 @@ bool FixedArray::ContainsOnlySmisOrHoles() {
}
Object FixedArray::get(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
}
Object FixedArray::get(IsolateRoot isolate, int index) const {
Object FixedArray::get(PtrComprCageBase cage_base, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return TaggedField<Object>::Relaxed_Load(isolate, *this,
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
OffsetOfElementAt(index));
}
@ -124,11 +124,12 @@ void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
}
Object FixedArray::get(int index, RelaxedLoadTag) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
}
Object FixedArray::get(IsolateRoot isolate, int index, RelaxedLoadTag) const {
Object FixedArray::get(PtrComprCageBase cage_base, int index,
RelaxedLoadTag) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return RELAXED_READ_FIELD(*this, OffsetOfElementAt(index));
}
@ -147,11 +148,12 @@ void FixedArray::set(int index, Smi value, RelaxedStoreTag tag) {
}
Object FixedArray::get(int index, AcquireLoadTag) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
}
Object FixedArray::get(IsolateRoot isolate, int index, AcquireLoadTag) const {
Object FixedArray::get(PtrComprCageBase cage_base, int index,
AcquireLoadTag) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
}
@ -435,13 +437,13 @@ void FixedDoubleArray::FillWithHoles(int from, int to) {
}
MaybeObject WeakFixedArray::Get(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return Get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return Get(cage_base, index);
}
MaybeObject WeakFixedArray::Get(IsolateRoot isolate, int index) const {
MaybeObject WeakFixedArray::Get(PtrComprCageBase cage_base, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return objects(isolate, index);
return objects(cage_base, index);
}
void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
@ -470,13 +472,13 @@ void WeakFixedArray::CopyElements(Isolate* isolate, int dst_index,
}
MaybeObject WeakArrayList::Get(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return Get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return Get(cage_base, index);
}
MaybeObject WeakArrayList::Get(IsolateRoot isolate, int index) const {
MaybeObject WeakArrayList::Get(PtrComprCageBase cage_base, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(capacity()));
return objects(isolate, index);
return objects(cage_base, index);
}
void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
@ -525,8 +527,8 @@ Object ArrayList::Get(int index) const {
return FixedArray::cast(*this).get(kFirstIndex + index);
}
Object ArrayList::Get(IsolateRoot isolate, int index) const {
return FixedArray::cast(*this).get(isolate, kFirstIndex + index);
Object ArrayList::Get(PtrComprCageBase cage_base, int index) const {
return FixedArray::cast(*this).get(cage_base, kFirstIndex + index);
}
ObjectSlot ArrayList::Slot(int index) {
@ -650,8 +652,8 @@ Object TemplateList::get(int index) const {
return FixedArray::cast(*this).get(kFirstElementIndex + index);
}
Object TemplateList::get(IsolateRoot isolate, int index) const {
return FixedArray::cast(*this).get(isolate, kFirstElementIndex + index);
Object TemplateList::get(PtrComprCageBase cage_base, int index) const {
return FixedArray::cast(*this).get(cage_base, kFirstElementIndex + index);
}
void TemplateList::set(int index, Object value) {

View File

@ -101,7 +101,7 @@ class FixedArray
public:
// Setter and getter for elements.
inline Object get(int index) const;
inline Object get(IsolateRoot isolate, int index) const;
inline Object get(PtrComprCageBase cage_base, int index) const;
static inline Handle<Object> get(FixedArray array, int index,
Isolate* isolate);
@ -113,14 +113,16 @@ class FixedArray
// Relaxed accessors.
inline Object get(int index, RelaxedLoadTag) const;
inline Object get(IsolateRoot isolate, int index, RelaxedLoadTag) const;
inline Object get(PtrComprCageBase cage_base, int index,
RelaxedLoadTag) const;
inline void set(int index, Object value, RelaxedStoreTag,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void set(int index, Smi value, RelaxedStoreTag);
// Acquire/release accessors.
inline Object get(int index, AcquireLoadTag) const;
inline Object get(IsolateRoot isolate, int index, AcquireLoadTag) const;
inline Object get(PtrComprCageBase cage_base, int index,
AcquireLoadTag) const;
inline void set(int index, Object value, ReleaseStoreTag,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void set(int index, Smi value, ReleaseStoreTag);
@ -275,7 +277,7 @@ class WeakFixedArray
: public TorqueGeneratedWeakFixedArray<WeakFixedArray, HeapObject> {
public:
inline MaybeObject Get(int index) const;
inline MaybeObject Get(IsolateRoot isolate, int index) const;
inline MaybeObject Get(PtrComprCageBase cage_base, int index) const;
inline void Set(
int index, MaybeObject value,
@ -350,7 +352,7 @@ class WeakArrayList
V8_EXPORT_PRIVATE void Compact(Isolate* isolate);
inline MaybeObject Get(int index) const;
inline MaybeObject Get(IsolateRoot isolate, int index) const;
inline MaybeObject Get(PtrComprCageBase cage_base, int index) const;
// Set the element at index to obj. The underlying array must be large enough.
// If you need to grow the WeakArrayList, use the static AddToEnd() method
@ -450,7 +452,7 @@ class ArrayList : public TorqueGeneratedArrayList<ArrayList, FixedArray> {
// storage capacity, i.e., length().
inline void SetLength(int length);
inline Object Get(int index) const;
inline Object Get(IsolateRoot isolate, int index) const;
inline Object Get(PtrComprCageBase cage_base, int index) const;
inline ObjectSlot Slot(int index);
// Set the element at index to obj. The underlying array must be large enough.
@ -596,7 +598,7 @@ class TemplateList
static Handle<TemplateList> New(Isolate* isolate, int size);
inline int length() const;
inline Object get(int index) const;
inline Object get(IsolateRoot isolate, int index) const;
inline Object get(PtrComprCageBase cage_base, int index) const;
inline void set(int index, Object value);
static Handle<TemplateList> Add(Isolate* isolate, Handle<TemplateList> list,
Handle<Object> value);

View File

@ -29,7 +29,7 @@ bool Foreign::IsNormalized(Object value) {
}
DEF_GETTER(Foreign, foreign_address, Address) {
return ReadExternalPointerField(kForeignAddressOffset, isolate,
return ReadExternalPointerField(kForeignAddressOffset, cage_base,
kForeignForeignAddressTag);
}

View File

@ -139,7 +139,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(LocalIsolate* isolate,
// Find entry for key otherwise return kNotFound.
template <typename Derived, typename Shape>
InternalIndex HashTable<Derived, Shape>::FindEntry(IsolateRoot isolate,
InternalIndex HashTable<Derived, Shape>::FindEntry(PtrComprCageBase cage_base,
ReadOnlyRoots roots, Key key,
int32_t hash) {
DisallowGarbageCollection no_gc;
@ -151,7 +151,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(IsolateRoot isolate,
// EnsureCapacity will guarantee the hash table is never full.
for (InternalIndex entry = FirstProbe(hash, capacity);;
entry = NextProbe(entry, count++, capacity)) {
Object element = KeyAt(isolate, entry);
Object element = KeyAt(cage_base, entry);
// Empty entry. Uses raw unchecked accessors because it is called by the
// string table during bootstrapping.
if (element == undefined) return InternalIndex::NotFound();
@ -177,24 +177,24 @@ bool HashTable<Derived, Shape>::ToKey(ReadOnlyRoots roots, InternalIndex entry,
}
template <typename Derived, typename Shape>
bool HashTable<Derived, Shape>::ToKey(IsolateRoot isolate, InternalIndex entry,
Object* out_k) {
Object k = KeyAt(isolate, entry);
if (!IsKey(GetReadOnlyRoots(isolate), k)) return false;
bool HashTable<Derived, Shape>::ToKey(PtrComprCageBase cage_base,
InternalIndex entry, Object* out_k) {
Object k = KeyAt(cage_base, entry);
if (!IsKey(GetReadOnlyRoots(cage_base), k)) return false;
*out_k = Shape::Unwrap(k);
return true;
}
template <typename Derived, typename Shape>
Object HashTable<Derived, Shape>::KeyAt(InternalIndex entry) {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return KeyAt(isolate, entry);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return KeyAt(cage_base, entry);
}
template <typename Derived, typename Shape>
Object HashTable<Derived, Shape>::KeyAt(IsolateRoot isolate,
Object HashTable<Derived, Shape>::KeyAt(PtrComprCageBase cage_base,
InternalIndex entry) {
return get(isolate, EntryToIndex(entry) + kEntryKeyIndex);
return get(cage_base, EntryToIndex(entry) + kEntryKeyIndex);
}
template <typename Derived, typename Shape>

View File

@ -138,24 +138,25 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
void IterateElements(ObjectVisitor* visitor);
// Find entry for key otherwise return kNotFound.
inline InternalIndex FindEntry(IsolateRoot isolate, ReadOnlyRoots roots,
Key key, int32_t hash);
inline InternalIndex FindEntry(PtrComprCageBase cage_base,
ReadOnlyRoots roots, Key key, int32_t hash);
template <typename LocalIsolate>
inline InternalIndex FindEntry(LocalIsolate* isolate, Key key);
// Rehashes the table in-place.
void Rehash(IsolateRoot isolate);
void Rehash(PtrComprCageBase cage_base);
// Returns whether k is a real key. The hole and undefined are not allowed as
// keys and can be used to indicate missing or deleted elements.
static inline bool IsKey(ReadOnlyRoots roots, Object k);
inline bool ToKey(ReadOnlyRoots roots, InternalIndex entry, Object* out_k);
inline bool ToKey(IsolateRoot isolate, InternalIndex entry, Object* out_k);
inline bool ToKey(PtrComprCageBase cage_base, InternalIndex entry,
Object* out_k);
// Returns the key at entry.
inline Object KeyAt(InternalIndex entry);
inline Object KeyAt(IsolateRoot isolate, InternalIndex entry);
inline Object KeyAt(PtrComprCageBase cage_base, InternalIndex entry);
static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize;
static const int kEntrySize = Shape::kEntrySize;
@ -217,8 +218,8 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
// Find the entry at which to insert element with the given key that
// has the given hash value.
InternalIndex FindInsertionEntry(IsolateRoot isolate, ReadOnlyRoots roots,
uint32_t hash);
InternalIndex FindInsertionEntry(PtrComprCageBase cage_base,
ReadOnlyRoots roots, uint32_t hash);
InternalIndex FindInsertionEntry(Isolate* isolate, uint32_t hash);
// Computes the capacity a table with the given capacity would need to have
@ -231,7 +232,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
// Rehashes this hash-table into the new table.
void Rehash(IsolateRoot isolate, Derived new_table);
void Rehash(PtrComprCageBase cage_base, Derived new_table);
inline void set_key(int index, Object value);
inline void set_key(int index, Object value, WriteBarrierMode mode);
@ -322,7 +323,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ObjectHashTableBase
// returned in case the key is not present.
Object Lookup(Handle<Object> key);
Object Lookup(Handle<Object> key, int32_t hash);
Object Lookup(IsolateRoot isolate, Handle<Object> key, int32_t hash);
Object Lookup(PtrComprCageBase cage_base, Handle<Object> key, int32_t hash);
// Returns the value at entry.
Object ValueAt(InternalIndex entry);

View File

@ -70,12 +70,12 @@ class HeapObject : public Object {
// places where it might not be safe to access it.
inline ReadOnlyRoots GetReadOnlyRoots() const;
// This version is intended to be used for the isolate values produced by
// i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr.
inline ReadOnlyRoots GetReadOnlyRoots(IsolateRoot isolate) const;
// i::GetPtrComprCageBase(HeapObject) function which may return nullptr.
inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const;
#define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
IS_TYPE_FUNCTION_DECL(HashTableBase)
IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
@ -96,7 +96,7 @@ class HeapObject : public Object {
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
V8_INLINE bool Is##Name() const; \
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE

View File

@ -43,7 +43,7 @@ void JSArrayBuffer::set_byte_length(size_t value) {
}
DEF_GETTER(JSArrayBuffer, backing_store, void*) {
Address value = ReadExternalPointerField(kBackingStoreOffset, isolate,
Address value = ReadExternalPointerField(kBackingStoreOffset, cage_base,
kArrayBufferBackingStoreTag);
return reinterpret_cast<void*>(value);
}
@ -199,7 +199,7 @@ void JSTypedArray::set_length(size_t value) {
}
DEF_GETTER(JSTypedArray, external_pointer, Address) {
return ReadExternalPointerField(kExternalPointerOffset, isolate,
return ReadExternalPointerField(kExternalPointerOffset, cage_base,
kTypedArrayExternalPointerTag);
}
@ -213,9 +213,9 @@ void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
}
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
IsolateRoot isolate) {
PtrComprCageBase cage_base) {
#ifdef V8_COMPRESS_POINTERS
return isolate.address();
return cage_base.address();
#else
return 0;
#endif
@ -321,7 +321,7 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
DEF_GETTER(JSDataView, data_pointer, void*) {
return reinterpret_cast<void*>(ReadExternalPointerField(
kDataPointerOffset, isolate, kDataViewDataPointerTag));
kDataPointerOffset, cage_base, kDataViewDataPointerTag));
}
void JSDataView::AllocateExternalPointerEntries(Isolate* isolate) {

View File

@ -300,7 +300,7 @@ class JSTypedArray
// as Tagged_t value and an |external_pointer| value.
// For full-pointer mode the compensation value is zero.
static inline Address ExternalPointerCompensationForOnHeapArray(
IsolateRoot isolate);
PtrComprCageBase cage_base);
//
// Serializer/deserializer support.

View File

@ -22,7 +22,7 @@ CAST_ACCESSOR(JSArray)
CAST_ACCESSOR(JSArrayIterator)
DEF_GETTER(JSArray, length, Object) {
return TaggedField<Object, kLengthOffset>::load(isolate, *this);
return TaggedField<Object, kLengthOffset>::load(cage_base, *this);
}
void JSArray::set_length(Object value, WriteBarrierMode mode) {
@ -31,8 +31,8 @@ void JSArray::set_length(Object value, WriteBarrierMode mode) {
CONDITIONAL_WRITE_BARRIER(*this, kLengthOffset, value, mode);
}
Object JSArray::length(IsolateRoot isolate, RelaxedLoadTag tag) const {
return TaggedField<Object, kLengthOffset>::Relaxed_Load(isolate, *this);
Object JSArray::length(PtrComprCageBase cage_base, RelaxedLoadTag tag) const {
return TaggedField<Object, kLengthOffset>::Relaxed_Load(cage_base, *this);
}
void JSArray::set_length(Smi length) {

View File

@ -32,7 +32,7 @@ class JSArray : public JSObject {
// acquire/release semantics ever become necessary, the default setter should
// be reverted to non-atomic behavior, and setters with explicit tags
// introduced and used when required.
Object length(IsolateRoot isolate, AcquireLoadTag tag) const = delete;
Object length(PtrComprCageBase cage_base, AcquireLoadTag tag) const = delete;
void set_length(Object value, ReleaseStoreTag tag,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;

View File

@ -210,63 +210,62 @@ ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
kPrototypeOrInitialMapOffset, map().has_prototype_slot())
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
return map(isolate).has_prototype_slot();
return map(cage_base).has_prototype_slot();
}
DEF_GETTER(JSFunction, initial_map, Map) {
return Map::cast(prototype_or_initial_map(isolate));
return Map::cast(prototype_or_initial_map(cage_base));
}
DEF_GETTER(JSFunction, has_initial_map, bool) {
DCHECK(has_prototype_slot(isolate));
return prototype_or_initial_map(isolate).IsMap(isolate);
DCHECK(has_prototype_slot(cage_base));
return prototype_or_initial_map(cage_base).IsMap(cage_base);
}
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
DCHECK(has_prototype_slot(isolate));
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
return has_initial_map(isolate) ||
!prototype_or_initial_map(isolate).IsTheHole(
GetReadOnlyRoots(isolate));
DCHECK(has_prototype_slot(cage_base));
return has_initial_map(cage_base) ||
!prototype_or_initial_map(cage_base).IsTheHole(
GetReadOnlyRoots(cage_base));
}
DEF_GETTER(JSFunction, has_prototype, bool) {
DCHECK(has_prototype_slot(isolate));
return map(isolate).has_non_instance_prototype() ||
has_instance_prototype(isolate);
DCHECK(has_prototype_slot(cage_base));
return map(cage_base).has_non_instance_prototype() ||
has_instance_prototype(cage_base);
}
DEF_GETTER(JSFunction, has_prototype_property, bool) {
return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
IsGeneratorFunction(shared(isolate).kind());
return (has_prototype_slot(cage_base) && IsConstructor(cage_base)) ||
IsGeneratorFunction(shared(cage_base).kind());
}
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
return !has_prototype_property(isolate) ||
map(isolate).has_non_instance_prototype();
return !has_prototype_property(cage_base) ||
map(cage_base).has_non_instance_prototype();
}
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
DCHECK(has_instance_prototype(isolate));
if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
DCHECK(has_instance_prototype(cage_base));
if (has_initial_map(cage_base))
return initial_map(cage_base).prototype(cage_base);
// When there is no initial map and the prototype is a JSReceiver, the
// initial map field is used for the prototype field.
return HeapObject::cast(prototype_or_initial_map(isolate));
return HeapObject::cast(prototype_or_initial_map(cage_base));
}
DEF_GETTER(JSFunction, prototype, Object) {
DCHECK(has_prototype(isolate));
DCHECK(has_prototype(cage_base));
// If the function's prototype property has been set to a non-JSReceiver
// value, that value is stored in the constructor field of the map.
if (map(isolate).has_non_instance_prototype()) {
Object prototype = map(isolate).GetConstructor(isolate);
if (map(cage_base).has_non_instance_prototype()) {
Object prototype = map(cage_base).GetConstructor(cage_base);
// The map must have a prototype in that field, not a back pointer.
DCHECK(!prototype.IsMap(isolate));
DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
DCHECK(!prototype.IsMap(cage_base));
DCHECK(!prototype.IsFunctionTemplateInfo(cage_base));
return prototype;
}
return instance_prototype(isolate);
return instance_prototype(cage_base);
}
bool JSFunction::is_compiled() const {

View File

@ -52,11 +52,12 @@ CAST_ACCESSOR(JSMessageObject)
CAST_ACCESSOR(JSReceiver)
DEF_GETTER(JSObject, elements, FixedArrayBase) {
return TaggedField<FixedArrayBase, kElementsOffset>::load(isolate, *this);
return TaggedField<FixedArrayBase, kElementsOffset>::load(cage_base, *this);
}
FixedArrayBase JSObject::elements(IsolateRoot isolate, RelaxedLoadTag) const {
return TaggedField<FixedArrayBase, kElementsOffset>::Relaxed_Load(isolate,
FixedArrayBase JSObject::elements(PtrComprCageBase cage_base,
RelaxedLoadTag) const {
return TaggedField<FixedArrayBase, kElementsOffset>::Relaxed_Load(cage_base,
*this);
}
@ -249,11 +250,11 @@ void JSObject::initialize_elements() {
}
DEF_GETTER(JSObject, GetIndexedInterceptor, InterceptorInfo) {
return map(isolate).GetIndexedInterceptor(isolate);
return map(cage_base).GetIndexedInterceptor(cage_base);
}
DEF_GETTER(JSObject, GetNamedInterceptor, InterceptorInfo) {
return map(isolate).GetNamedInterceptor(isolate);
return map(cage_base).GetNamedInterceptor(cage_base);
}
// static
@ -322,16 +323,17 @@ void JSObject::SetEmbedderField(int index, Smi value) {
// is needed to correctly distinguish between properties stored in-object and
// properties stored in the properties array.
Object JSObject::RawFastPropertyAt(FieldIndex index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return RawFastPropertyAt(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return RawFastPropertyAt(cage_base, index);
}
Object JSObject::RawFastPropertyAt(IsolateRoot isolate,
Object JSObject::RawFastPropertyAt(PtrComprCageBase cage_base,
FieldIndex index) const {
if (index.is_inobject()) {
return TaggedField<Object>::load(isolate, *this, index.offset());
return TaggedField<Object>::load(cage_base, *this, index.offset());
} else {
return property_array(isolate).get(isolate, index.outobject_array_index());
return property_array(cage_base).get(cage_base,
index.outobject_array_index());
}
}
@ -425,7 +427,7 @@ ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset)
ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset)
DEF_GETTER(JSGlobalObject, native_context_unchecked, Object) {
return TaggedField<Object, kNativeContextOffset>::load(isolate, *this);
return TaggedField<Object, kNativeContextOffset>::load(cage_base, *this);
}
bool JSMessageObject::DidEnsureSourcePositionsAvailable() const {
@ -461,119 +463,119 @@ SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
SMI_ACCESSORS(JSMessageObject, raw_type, kMessageTypeOffset)
DEF_GETTER(JSObject, GetElementsKind, ElementsKind) {
ElementsKind kind = map(isolate).elements_kind();
ElementsKind kind = map(cage_base).elements_kind();
#if VERIFY_HEAP && DEBUG
FixedArrayBase fixed_array = FixedArrayBase::unchecked_cast(
TaggedField<HeapObject, kElementsOffset>::load(isolate, *this));
TaggedField<HeapObject, kElementsOffset>::load(cage_base, *this));
// If a GC was caused while constructing this object, the elements
// pointer may point to a one pointer filler map.
if (ElementsAreSafeToExamine(isolate)) {
Map map = fixed_array.map(isolate);
if (ElementsAreSafeToExamine(cage_base)) {
Map map = fixed_array.map(cage_base);
if (IsSmiOrObjectElementsKind(kind)) {
DCHECK(map == GetReadOnlyRoots(isolate).fixed_array_map() ||
map == GetReadOnlyRoots(isolate).fixed_cow_array_map());
DCHECK(map == GetReadOnlyRoots(cage_base).fixed_array_map() ||
map == GetReadOnlyRoots(cage_base).fixed_cow_array_map());
} else if (IsDoubleElementsKind(kind)) {
DCHECK(fixed_array.IsFixedDoubleArray(isolate) ||
fixed_array == GetReadOnlyRoots(isolate).empty_fixed_array());
DCHECK(fixed_array.IsFixedDoubleArray(cage_base) ||
fixed_array == GetReadOnlyRoots(cage_base).empty_fixed_array());
} else if (kind == DICTIONARY_ELEMENTS) {
DCHECK(fixed_array.IsFixedArray(isolate));
DCHECK(fixed_array.IsNumberDictionary(isolate));
DCHECK(fixed_array.IsFixedArray(cage_base));
DCHECK(fixed_array.IsNumberDictionary(cage_base));
} else {
DCHECK(kind > DICTIONARY_ELEMENTS ||
IsAnyNonextensibleElementsKind(kind));
}
DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
elements(isolate).IsSloppyArgumentsElements());
elements(cage_base).IsSloppyArgumentsElements());
}
#endif
return kind;
}
DEF_GETTER(JSObject, GetElementsAccessor, ElementsAccessor*) {
return ElementsAccessor::ForKind(GetElementsKind(isolate));
return ElementsAccessor::ForKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasObjectElements, bool) {
return IsObjectElementsKind(GetElementsKind(isolate));
return IsObjectElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasSmiElements, bool) {
return IsSmiElementsKind(GetElementsKind(isolate));
return IsSmiElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasSmiOrObjectElements, bool) {
return IsSmiOrObjectElementsKind(GetElementsKind(isolate));
return IsSmiOrObjectElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasDoubleElements, bool) {
return IsDoubleElementsKind(GetElementsKind(isolate));
return IsDoubleElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasHoleyElements, bool) {
return IsHoleyElementsKind(GetElementsKind(isolate));
return IsHoleyElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasFastElements, bool) {
return IsFastElementsKind(GetElementsKind(isolate));
return IsFastElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasFastPackedElements, bool) {
return IsFastPackedElementsKind(GetElementsKind(isolate));
return IsFastPackedElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasDictionaryElements, bool) {
return IsDictionaryElementsKind(GetElementsKind(isolate));
return IsDictionaryElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasPackedElements, bool) {
return GetElementsKind(isolate) == PACKED_ELEMENTS;
return GetElementsKind(cage_base) == PACKED_ELEMENTS;
}
DEF_GETTER(JSObject, HasAnyNonextensibleElements, bool) {
return IsAnyNonextensibleElementsKind(GetElementsKind(isolate));
return IsAnyNonextensibleElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasSealedElements, bool) {
return IsSealedElementsKind(GetElementsKind(isolate));
return IsSealedElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasNonextensibleElements, bool) {
return IsNonextensibleElementsKind(GetElementsKind(isolate));
return IsNonextensibleElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasFastArgumentsElements, bool) {
return IsFastArgumentsElementsKind(GetElementsKind(isolate));
return IsFastArgumentsElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasSlowArgumentsElements, bool) {
return IsSlowArgumentsElementsKind(GetElementsKind(isolate));
return IsSlowArgumentsElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasSloppyArgumentsElements, bool) {
return IsSloppyArgumentsElementsKind(GetElementsKind(isolate));
return IsSloppyArgumentsElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasStringWrapperElements, bool) {
return IsStringWrapperElementsKind(GetElementsKind(isolate));
return IsStringWrapperElementsKind(GetElementsKind(cage_base));
}
DEF_GETTER(JSObject, HasFastStringWrapperElements, bool) {
return GetElementsKind(isolate) == FAST_STRING_WRAPPER_ELEMENTS;
return GetElementsKind(cage_base) == FAST_STRING_WRAPPER_ELEMENTS;
}
DEF_GETTER(JSObject, HasSlowStringWrapperElements, bool) {
return GetElementsKind(isolate) == SLOW_STRING_WRAPPER_ELEMENTS;
return GetElementsKind(cage_base) == SLOW_STRING_WRAPPER_ELEMENTS;
}
DEF_GETTER(JSObject, HasTypedArrayElements, bool) {
DCHECK(!elements(isolate).is_null());
return map(isolate).has_typed_array_elements();
DCHECK(!elements(cage_base).is_null());
return map(cage_base).has_typed_array_elements();
}
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \
return map(isolate).elements_kind() == TYPE##_ELEMENTS; \
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \
return map(cage_base).elements_kind() == TYPE##_ELEMENTS; \
}
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
@ -581,21 +583,21 @@ TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
#undef FIXED_TYPED_ELEMENTS_CHECK
DEF_GETTER(JSObject, HasNamedInterceptor, bool) {
return map(isolate).has_named_interceptor();
return map(cage_base).has_named_interceptor();
}
DEF_GETTER(JSObject, HasIndexedInterceptor, bool) {
return map(isolate).has_indexed_interceptor();
return map(cage_base).has_indexed_interceptor();
}
RELEASE_ACQUIRE_ACCESSORS_CHECKED2(JSGlobalObject, global_dictionary,
GlobalDictionary, kPropertiesOrHashOffset,
!HasFastProperties(isolate), true)
!HasFastProperties(cage_base), true)
DEF_GETTER(JSObject, element_dictionary, NumberDictionary) {
DCHECK(HasDictionaryElements(isolate) ||
HasSlowStringWrapperElements(isolate));
return NumberDictionary::cast(elements(isolate));
DCHECK(HasDictionaryElements(cage_base) ||
HasSlowStringWrapperElements(cage_base));
return NumberDictionary::cast(elements(cage_base));
}
void JSReceiver::initialize_properties(Isolate* isolate) {
@ -617,38 +619,34 @@ void JSReceiver::initialize_properties(Isolate* isolate) {
}
DEF_GETTER(JSReceiver, HasFastProperties, bool) {
DCHECK(raw_properties_or_hash(isolate).IsSmi() ||
((raw_properties_or_hash(isolate).IsGlobalDictionary(isolate) ||
raw_properties_or_hash(isolate).IsNameDictionary(isolate) ||
raw_properties_or_hash(isolate).IsSwissNameDictionary(isolate)) ==
map(isolate).is_dictionary_map()));
return !map(isolate).is_dictionary_map();
DCHECK(raw_properties_or_hash(cage_base).IsSmi() ||
((raw_properties_or_hash(cage_base).IsGlobalDictionary(cage_base) ||
raw_properties_or_hash(cage_base).IsNameDictionary(cage_base) ||
raw_properties_or_hash(cage_base).IsSwissNameDictionary(
cage_base)) == map(cage_base).is_dictionary_map()));
return !map(cage_base).is_dictionary_map();
}
DEF_GETTER(JSReceiver, property_dictionary, NameDictionary) {
DCHECK(!IsJSGlobalObject(isolate));
DCHECK(!HasFastProperties(isolate));
DCHECK(!IsJSGlobalObject(cage_base));
DCHECK(!HasFastProperties(cage_base));
DCHECK(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
Object prop = raw_properties_or_hash(isolate);
Object prop = raw_properties_or_hash(cage_base);
if (prop.IsSmi()) {
return GetReadOnlyRoots(isolate).empty_property_dictionary();
return GetReadOnlyRoots(cage_base).empty_property_dictionary();
}
return NameDictionary::cast(prop);
}
DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
DCHECK(!IsJSGlobalObject(isolate));
DCHECK(!HasFastProperties(isolate));
DCHECK(!IsJSGlobalObject(cage_base));
DCHECK(!HasFastProperties(cage_base));
DCHECK(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
Object prop = raw_properties_or_hash(isolate);
Object prop = raw_properties_or_hash(cage_base);
if (prop.IsSmi()) {
return GetReadOnlyRoots(isolate).empty_swiss_property_dictionary();
return GetReadOnlyRoots(cage_base).empty_swiss_property_dictionary();
}
return SwissNameDictionary::cast(prop);
}
@ -656,12 +654,10 @@ DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
// TODO(gsathya): Pass isolate directly to this function and access
// the heap from this.
DEF_GETTER(JSReceiver, property_array, PropertyArray) {
DCHECK(HasFastProperties(isolate));
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
Object prop = raw_properties_or_hash(isolate);
if (prop.IsSmi() || prop == GetReadOnlyRoots(isolate).empty_fixed_array()) {
return GetReadOnlyRoots(isolate).empty_property_array();
DCHECK(HasFastProperties(cage_base));
Object prop = raw_properties_or_hash(cage_base);
if (prop.IsSmi() || prop == GetReadOnlyRoots(cage_base).empty_fixed_array()) {
return GetReadOnlyRoots(cage_base).empty_property_array();
}
return PropertyArray::cast(prop);
}

View File

@ -319,7 +319,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
// acquire/release semantics ever become necessary, the default setter should
// be reverted to non-atomic behavior, and setters with explicit tags
// introduced and used when required.
FixedArrayBase elements(IsolateRoot isolate,
FixedArrayBase elements(PtrComprCageBase cage_base,
AcquireLoadTag tag) const = delete;
void set_elements(FixedArrayBase value, ReleaseStoreTag tag,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
@ -652,7 +652,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
Representation representation,
FieldIndex index);
inline Object RawFastPropertyAt(FieldIndex index) const;
inline Object RawFastPropertyAt(IsolateRoot isolate, FieldIndex index) const;
inline Object RawFastPropertyAt(PtrComprCageBase cage_base,
FieldIndex index) const;
inline void FastPropertyAtPut(FieldIndex index, Object value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
@ -742,7 +743,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
// If a GC was caused while constructing this object, the elements pointer
// may point to a one pointer filler map. The object won't be rooted, but
// our heap verification code could stumble across it.
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(IsolateRoot isolate) const;
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(
PtrComprCageBase cage_base) const;
#endif
Object SlowReverseLookup(Object value);

View File

@ -29,26 +29,26 @@ SMI_ACCESSORS(ObjectBoilerplateDescription, flags,
FixedArray::OffsetOfElementAt(kLiteralTypeOffset))
Object ObjectBoilerplateDescription::name(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return name(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return name(cage_base, index);
}
Object ObjectBoilerplateDescription::name(IsolateRoot isolate,
Object ObjectBoilerplateDescription::name(PtrComprCageBase cage_base,
int index) const {
// get() already checks for out of bounds access, but we do not want to allow
// access to the last element, if it is the number of properties.
DCHECK_NE(size(), index);
return get(isolate, 2 * index + kDescriptionStartIndex);
return get(cage_base, 2 * index + kDescriptionStartIndex);
}
Object ObjectBoilerplateDescription::value(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return value(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return value(cage_base, index);
}
Object ObjectBoilerplateDescription::value(IsolateRoot isolate,
Object ObjectBoilerplateDescription::value(PtrComprCageBase cage_base,
int index) const {
return get(isolate, 2 * index + 1 + kDescriptionStartIndex);
return get(cage_base, 2 * index + 1 + kDescriptionStartIndex);
}
void ObjectBoilerplateDescription::set_key_value(int index, Object key,

View File

@ -28,10 +28,10 @@ class ClassLiteral;
class ObjectBoilerplateDescription : public FixedArray {
public:
inline Object name(int index) const;
inline Object name(IsolateRoot isolate, int index) const;
inline Object name(PtrComprCageBase cage_base, int index) const;
inline Object value(int index) const;
inline Object value(IsolateRoot isolate, int index) const;
inline Object value(PtrComprCageBase cage_base, int index) const;
inline void set_key_value(int index, Object key, Object value);

View File

@ -107,14 +107,14 @@ BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
DCHECK(has_named_interceptor());
FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
return InterceptorInfo::cast(info.GetNamedPropertyHandler(isolate));
FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
return InterceptorInfo::cast(info.GetNamedPropertyHandler(cage_base));
}
DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
DCHECK(has_indexed_interceptor());
FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
return InterceptorInfo::cast(info.GetIndexedPropertyHandler(isolate));
FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
return InterceptorInfo::cast(info.GetIndexedPropertyHandler(cage_base));
}
bool Map::IsMostGeneralFieldType(Representation representation,
@ -657,19 +657,18 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
#endif
}
bool Map::ConcurrentIsMap(IsolateRoot isolate, const Object& object) const {
return object.IsHeapObject() && HeapObject::cast(object).map(isolate) ==
GetReadOnlyRoots(isolate).meta_map();
bool Map::ConcurrentIsMap(PtrComprCageBase cage_base,
const Object& object) const {
return object.IsHeapObject() && HeapObject::cast(object).map(cage_base) ==
GetReadOnlyRoots(cage_base).meta_map();
}
DEF_GETTER(Map, GetBackPointer, HeapObject) {
Object object = constructor_or_back_pointer(isolate);
if (ConcurrentIsMap(isolate, object)) {
Object object = constructor_or_back_pointer(cage_base);
if (ConcurrentIsMap(cage_base, object)) {
return Map::cast(object);
}
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
return GetReadOnlyRoots(isolate).undefined_value();
return GetReadOnlyRoots(cage_base).undefined_value();
}
void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
@ -709,11 +708,11 @@ bool Map::IsPrototypeValidityCellValid() const {
}
DEF_GETTER(Map, GetConstructor, Object) {
Object maybe_constructor = constructor_or_back_pointer(isolate);
Object maybe_constructor = constructor_or_back_pointer(cage_base);
// Follow any back pointers.
while (ConcurrentIsMap(isolate, maybe_constructor)) {
while (ConcurrentIsMap(cage_base, maybe_constructor)) {
maybe_constructor =
Map::cast(maybe_constructor).constructor_or_back_pointer(isolate);
Map::cast(maybe_constructor).constructor_or_back_pointer(cage_base);
}
return maybe_constructor;
}
@ -730,13 +729,13 @@ Object Map::TryGetConstructor(Isolate* isolate, int max_steps) {
}
DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
Object constructor = GetConstructor(isolate);
if (constructor.IsJSFunction(isolate)) {
Object constructor = GetConstructor(cage_base);
if (constructor.IsJSFunction(cage_base)) {
// TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
DCHECK(JSFunction::cast(constructor).shared(isolate).IsApiFunction());
return JSFunction::cast(constructor).shared(isolate).get_api_func_data();
DCHECK(JSFunction::cast(constructor).shared(cage_base).IsApiFunction());
return JSFunction::cast(constructor).shared(cage_base).get_api_func_data();
}
DCHECK(constructor.IsFunctionTemplateInfo(isolate));
DCHECK(constructor.IsFunctionTemplateInfo(cage_base));
return FunctionTemplateInfo::cast(constructor);
}
@ -791,7 +790,7 @@ int NormalizedMapCache::GetIndex(Handle<Map> map) {
}
DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
if (!IsWeakFixedArray(isolate)) return false;
if (!IsWeakFixedArray(cage_base)) return false;
if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
return false;
}

View File

@ -943,7 +943,7 @@ class Map : public HeapObject {
// This is the equivalent of IsMap() but avoids reading the instance type so
// it can be used concurrently without acquire load.
V8_INLINE bool ConcurrentIsMap(IsolateRoot isolate,
V8_INLINE bool ConcurrentIsMap(PtrComprCageBase cage_base,
const Object& object) const;
// Use the high-level instance_descriptors/SetInstanceDescriptors instead.
@ -976,7 +976,8 @@ class NormalizedMapCache : public WeakFixedArray {
DECL_VERIFIER(NormalizedMapCache)
private:
friend bool HeapObject::IsNormalizedMapCache(IsolateRoot isolate) const;
friend bool HeapObject::IsNormalizedMapCache(
PtrComprCageBase cage_base) const;
static const int kEntries = 64;

View File

@ -78,13 +78,14 @@ HeapObjectReference HeapObjectReference::From(Object object,
}
// static
HeapObjectReference HeapObjectReference::ClearedValue(IsolateRoot isolate) {
HeapObjectReference HeapObjectReference::ClearedValue(
PtrComprCageBase cage_base) {
// Construct cleared weak ref value.
#ifdef V8_COMPRESS_POINTERS
// This is necessary to make pointer decompression computation also
// suitable for cleared weak references.
Address raw_value =
DecompressTaggedPointer(isolate, kClearedWeakHeapObjectLower32);
DecompressTaggedPointer(cage_base, kClearedWeakHeapObjectLower32);
#else
Address raw_value = kClearedWeakHeapObjectLower32;
#endif

View File

@ -54,7 +54,7 @@ class HeapObjectReference : public MaybeObject {
V8_INLINE static HeapObjectReference From(Object object,
HeapObjectReferenceType type);
V8_INLINE static HeapObjectReference ClearedValue(IsolateRoot isolate);
V8_INLINE static HeapObjectReference ClearedValue(PtrComprCageBase cage_base);
template <typename THeapObjectSlot>
V8_INLINE static void Update(THeapObjectSlot slot, HeapObject value);

View File

@ -56,7 +56,7 @@ void Symbol::set_is_private_name() {
}
DEF_GETTER(Name, IsUniqueName, bool) {
uint32_t type = map(isolate).instance_type();
uint32_t type = map(cage_base).instance_type();
bool result = (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
(kStringTag | kNotInternalizedTag);
SLOW_DCHECK(result == HeapObject::IsUniqueName());
@ -104,23 +104,23 @@ uint32_t Name::hash() const {
}
DEF_GETTER(Name, IsInterestingSymbol, bool) {
return IsSymbol(isolate) && Symbol::cast(*this).is_interesting_symbol();
return IsSymbol(cage_base) && Symbol::cast(*this).is_interesting_symbol();
}
DEF_GETTER(Name, IsPrivate, bool) {
return this->IsSymbol(isolate) && Symbol::cast(*this).is_private();
return this->IsSymbol(cage_base) && Symbol::cast(*this).is_private();
}
DEF_GETTER(Name, IsPrivateName, bool) {
bool is_private_name =
this->IsSymbol(isolate) && Symbol::cast(*this).is_private_name();
this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_name();
DCHECK_IMPLIES(is_private_name, IsPrivate());
return is_private_name;
}
DEF_GETTER(Name, IsPrivateBrand, bool) {
bool is_private_brand =
this->IsSymbol(isolate) && Symbol::cast(*this).is_private_brand();
this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_brand();
DCHECK_IMPLIES(is_private_brand, IsPrivateName());
return is_private_brand;
}

View File

@ -86,14 +86,14 @@
// parameter.
#define DECL_GETTER(name, type) \
inline type name() const; \
inline type name(IsolateRoot isolate) const;
inline type name(PtrComprCageBase cage_base) const;
#define DEF_GETTER(holder, name, type) \
type holder::name() const { \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate); \
} \
type holder::name(IsolateRoot isolate) const
#define DEF_GETTER(holder, name, type) \
type holder::name() const { \
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
return holder::name(cage_base); \
} \
type holder::name(PtrComprCageBase cage_base) const
#define DECL_SETTER(name, type) \
inline void set_##name(type value, \
@ -105,7 +105,7 @@
#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
inline type name(tag_type tag) const; \
inline type name(IsolateRoot isolate, tag_type) const;
inline type name(PtrComprCageBase cage_base, tag_type) const;
#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
inline void set_##name(type value, tag_type, \
@ -179,7 +179,7 @@
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
set_condition) \
DEF_GETTER(holder, name, type) { \
type value = TaggedField<type, offset>::load(isolate, *this); \
type value = TaggedField<type, offset>::load(cage_base, *this); \
DCHECK(get_condition); \
return value; \
} \
@ -215,11 +215,11 @@
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
set_condition) \
type holder::name(RelaxedLoadTag tag) const { \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
return holder::name(cage_base, tag); \
} \
type holder::name(IsolateRoot isolate, RelaxedLoadTag) const { \
type value = TaggedField<type, offset>::Relaxed_Load(isolate, *this); \
type holder::name(PtrComprCageBase cage_base, RelaxedLoadTag) const { \
type value = TaggedField<type, offset>::Relaxed_Load(cage_base, *this); \
DCHECK(get_condition); \
return value; \
} \
@ -236,22 +236,22 @@
#define RELAXED_ACCESSORS(holder, name, type, offset) \
RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true)
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
get_condition, set_condition) \
type holder::name(AcquireLoadTag tag) const { \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
} \
type holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \
DCHECK(get_condition); \
return value; \
} \
void holder::set_##name(type value, ReleaseStoreTag, \
WriteBarrierMode mode) { \
DCHECK(set_condition); \
TaggedField<type, offset>::Release_Store(*this, value); \
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
get_condition, set_condition) \
type holder::name(AcquireLoadTag tag) const { \
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
return holder::name(cage_base, tag); \
} \
type holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \
type value = TaggedField<type, offset>::Acquire_Load(cage_base, *this); \
DCHECK(get_condition); \
return value; \
} \
void holder::set_##name(type value, ReleaseStoreTag, \
WriteBarrierMode mode) { \
DCHECK(set_condition); \
TaggedField<type, offset>::Release_Store(*this, value); \
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
}
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \
@ -266,7 +266,7 @@
set_condition) \
DEF_GETTER(holder, name, MaybeObject) { \
MaybeObject value = \
TaggedField<MaybeObject, offset>::load(isolate, *this); \
TaggedField<MaybeObject, offset>::load(cage_base, *this); \
DCHECK(get_condition); \
return value; \
} \
@ -282,23 +282,23 @@
#define WEAK_ACCESSORS(holder, name, offset) \
WEAK_ACCESSORS_CHECKED(holder, name, offset, true)
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \
get_condition, set_condition) \
MaybeObject holder::name(AcquireLoadTag tag) const { \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
} \
MaybeObject holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
MaybeObject value = \
TaggedField<MaybeObject, offset>::Acquire_Load(isolate, *this); \
DCHECK(get_condition); \
return value; \
} \
void holder::set_##name(MaybeObject value, ReleaseStoreTag, \
WriteBarrierMode mode) { \
DCHECK(set_condition); \
TaggedField<MaybeObject, offset>::Release_Store(*this, value); \
CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \
get_condition, set_condition) \
MaybeObject holder::name(AcquireLoadTag tag) const { \
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
return holder::name(cage_base, tag); \
} \
MaybeObject holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \
MaybeObject value = \
TaggedField<MaybeObject, offset>::Acquire_Load(cage_base, *this); \
DCHECK(get_condition); \
return value; \
} \
void holder::set_##name(MaybeObject value, ReleaseStoreTag, \
WriteBarrierMode mode) { \
DCHECK(set_condition); \
TaggedField<MaybeObject, offset>::Release_Store(*this, value); \
CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \
}
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED(holder, name, offset, \
@ -380,9 +380,9 @@
return instance_type == forinstancetype; \
}
#define TYPE_CHECKER(type, ...) \
DEF_GETTER(HeapObject, Is##type, bool) { \
return InstanceTypeChecker::Is##type(map(isolate).instance_type()); \
#define TYPE_CHECKER(type, ...) \
DEF_GETTER(HeapObject, Is##type, bool) { \
return InstanceTypeChecker::Is##type(map(cage_base).instance_type()); \
}
#define RELAXED_INT16_ACCESSORS(holder, name, offset) \

View File

@ -65,19 +65,19 @@ int PropertyDetails::field_width_in_words() const {
}
DEF_GETTER(HeapObject, IsClassBoilerplate, bool) {
return IsFixedArrayExact(isolate);
return IsFixedArrayExact(cage_base);
}
bool Object::IsTaggedIndex() const {
return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
}
#define IS_TYPE_FUNCTION_DEF(type_) \
bool Object::Is##type_() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
} \
bool Object::Is##type_(IsolateRoot isolate) const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(isolate); \
#define IS_TYPE_FUNCTION_DEF(type_) \
bool Object::Is##type_() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
} \
bool Object::Is##type_(PtrComprCageBase cage_base) const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(cage_base); \
}
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
IS_TYPE_FUNCTION_DEF(HashTableBase)
@ -148,127 +148,125 @@ bool HeapObject::IsNullOrUndefined() const {
}
DEF_GETTER(HeapObject, IsUniqueName, bool) {
return IsInternalizedString(isolate) || IsSymbol(isolate);
return IsInternalizedString(cage_base) || IsSymbol(cage_base);
}
DEF_GETTER(HeapObject, IsFunction, bool) {
return IsJSFunctionOrBoundFunction();
}
DEF_GETTER(HeapObject, IsCallable, bool) { return map(isolate).is_callable(); }
DEF_GETTER(HeapObject, IsCallable, bool) {
return map(cage_base).is_callable();
}
DEF_GETTER(HeapObject, IsCallableJSProxy, bool) {
return IsCallable(isolate) && IsJSProxy(isolate);
return IsCallable(cage_base) && IsJSProxy(cage_base);
}
DEF_GETTER(HeapObject, IsCallableApiObject, bool) {
InstanceType type = map(isolate).instance_type();
return IsCallable(isolate) &&
InstanceType type = map(cage_base).instance_type();
return IsCallable(cage_base) &&
(type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
}
DEF_GETTER(HeapObject, IsNonNullForeign, bool) {
return IsForeign(isolate) &&
return IsForeign(cage_base) &&
Foreign::cast(*this).foreign_address() != kNullAddress;
}
DEF_GETTER(HeapObject, IsConstructor, bool) {
return map(isolate).is_constructor();
return map(cage_base).is_constructor();
}
DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) {
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
return map(isolate) == GetReadOnlyRoots(isolate).module_info_map();
return map(cage_base) == GetReadOnlyRoots(cage_base).module_info_map();
}
DEF_GETTER(HeapObject, IsConsString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsCons();
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsCons();
}
DEF_GETTER(HeapObject, IsThinString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsThin();
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsThin();
}
DEF_GETTER(HeapObject, IsSlicedString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsSliced();
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsSliced();
}
DEF_GETTER(HeapObject, IsSeqString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsSequential();
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsSequential();
}
DEF_GETTER(HeapObject, IsSeqOneByteString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsSequential() &&
String::cast(*this).IsOneByteRepresentation(isolate);
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
String::cast(*this).IsOneByteRepresentation(cage_base);
}
DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsSequential() &&
String::cast(*this).IsTwoByteRepresentation(isolate);
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
String::cast(*this).IsTwoByteRepresentation(cage_base);
}
DEF_GETTER(HeapObject, IsExternalOneByteString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsExternal() &&
String::cast(*this).IsOneByteRepresentation(isolate);
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
String::cast(*this).IsOneByteRepresentation(cage_base);
}
DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
if (!IsString(isolate)) return false;
return StringShape(String::cast(*this).map(isolate)).IsExternal() &&
String::cast(*this).IsTwoByteRepresentation(isolate);
if (!IsString(cage_base)) return false;
return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
String::cast(*this).IsTwoByteRepresentation(cage_base);
}
bool Object::IsNumber() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.IsHeapNumber(isolate);
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
return this_heap_object.IsHeapNumber(cage_base);
}
bool Object::IsNumber(IsolateRoot isolate) const {
return IsSmi() || IsHeapNumber(isolate);
bool Object::IsNumber(PtrComprCageBase cage_base) const {
return IsSmi() || IsHeapNumber(cage_base);
}
bool Object::IsNumeric() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.IsHeapNumber(isolate) ||
this_heap_object.IsBigInt(isolate);
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
return this_heap_object.IsHeapNumber(cage_base) ||
this_heap_object.IsBigInt(cage_base);
}
bool Object::IsNumeric(IsolateRoot isolate) const {
return IsNumber(isolate) || IsBigInt(isolate);
bool Object::IsNumeric(PtrComprCageBase cage_base) const {
return IsNumber(cage_base) || IsBigInt(cage_base);
}
DEF_GETTER(HeapObject, IsFreeSpaceOrFiller, bool) {
InstanceType instance_type = map(isolate).instance_type();
InstanceType instance_type = map(cage_base).instance_type();
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
}
DEF_GETTER(HeapObject, IsArrayList, bool) {
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
// i::GetIsolateForPtrCompr(HeapObject).
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
return *this == roots.empty_fixed_array() ||
map(isolate) == roots.array_list_map();
map(cage_base) == roots.array_list_map();
}
DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
return IsFixedArrayExact(isolate);
return IsFixedArrayExact(cage_base);
}
DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
// Must be a fixed array.
if (!IsFixedArrayExact(isolate)) return false;
if (!IsFixedArrayExact(cage_base)) return false;
// There's no sure way to detect the difference between a fixed array and
// a deoptimization data array. Since this is used for asserts we can
@ -282,14 +280,14 @@ DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
}
DEF_GETTER(HeapObject, IsHandlerTable, bool) {
if (!IsFixedArrayExact(isolate)) return false;
if (!IsFixedArrayExact(cage_base)) return false;
// There's actually no way to see the difference between a fixed array and
// a handler table array.
return true;
}
DEF_GETTER(HeapObject, IsTemplateList, bool) {
if (!IsFixedArrayExact(isolate)) return false;
if (!IsFixedArrayExact(cage_base)) return false;
// There's actually no way to see the difference between a fixed array and
// a template list.
if (FixedArray::cast(*this).length() < 1) return false;
@ -297,84 +295,86 @@ DEF_GETTER(HeapObject, IsTemplateList, bool) {
}
DEF_GETTER(HeapObject, IsDependentCode, bool) {
if (!IsWeakFixedArray(isolate)) return false;
if (!IsWeakFixedArray(cage_base)) return false;
// There's actually no way to see the difference between a weak fixed array
// and a dependent codes array.
return true;
}
DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
if (!IsWeakFixedArray(isolate)) return false;
if (!IsWeakFixedArray(cage_base)) return false;
// There's actually no way to see the difference between a weak fixed array
// and a osr optimized code cache.
return true;
}
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
return IsBytecodeArray(isolate) || IsCode(isolate);
return IsBytecodeArray(cage_base) || IsCode(cage_base);
}
DEF_GETTER(HeapObject, IsStringWrapper, bool) {
return IsJSPrimitiveWrapper(isolate) &&
JSPrimitiveWrapper::cast(*this).value().IsString(isolate);
return IsJSPrimitiveWrapper(cage_base) &&
JSPrimitiveWrapper::cast(*this).value().IsString(cage_base);
}
DEF_GETTER(HeapObject, IsBooleanWrapper, bool) {
return IsJSPrimitiveWrapper(isolate) &&
JSPrimitiveWrapper::cast(*this).value().IsBoolean(isolate);
return IsJSPrimitiveWrapper(cage_base) &&
JSPrimitiveWrapper::cast(*this).value().IsBoolean(cage_base);
}
DEF_GETTER(HeapObject, IsScriptWrapper, bool) {
return IsJSPrimitiveWrapper(isolate) &&
JSPrimitiveWrapper::cast(*this).value().IsScript(isolate);
return IsJSPrimitiveWrapper(cage_base) &&
JSPrimitiveWrapper::cast(*this).value().IsScript(cage_base);
}
DEF_GETTER(HeapObject, IsNumberWrapper, bool) {
return IsJSPrimitiveWrapper(isolate) &&
JSPrimitiveWrapper::cast(*this).value().IsNumber(isolate);
return IsJSPrimitiveWrapper(cage_base) &&
JSPrimitiveWrapper::cast(*this).value().IsNumber(cage_base);
}
DEF_GETTER(HeapObject, IsBigIntWrapper, bool) {
return IsJSPrimitiveWrapper(isolate) &&
JSPrimitiveWrapper::cast(*this).value().IsBigInt(isolate);
return IsJSPrimitiveWrapper(cage_base) &&
JSPrimitiveWrapper::cast(*this).value().IsBigInt(cage_base);
}
DEF_GETTER(HeapObject, IsSymbolWrapper, bool) {
return IsJSPrimitiveWrapper(isolate) &&
JSPrimitiveWrapper::cast(*this).value().IsSymbol(isolate);
return IsJSPrimitiveWrapper(cage_base) &&
JSPrimitiveWrapper::cast(*this).value().IsSymbol(cage_base);
}
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(isolate); }
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(cage_base); }
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(isolate); }
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(cage_base); }
DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) {
return IsHashTable(isolate);
return IsHashTable(cage_base);
}
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(isolate); }
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(cage_base); }
DEF_GETTER(HeapObject, IsObjectHashTable, bool) { return IsHashTable(isolate); }
DEF_GETTER(HeapObject, IsObjectHashTable, bool) {
return IsHashTable(cage_base);
}
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(isolate); }
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(cage_base); }
#if V8_ENABLE_WEBASSEMBLY
DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
// It is not possible to check for the existence of certain properties on the
// underlying {JSReceiver} here because that requires calling handlified code.
return IsJSReceiver(isolate);
return IsJSReceiver(cage_base);
}
#endif // V8_ENABLE_WEBASSEMBLY
bool Object::IsPrimitive() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.map(isolate).IsPrimitiveMap();
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
return this_heap_object.map(cage_base).IsPrimitiveMap();
}
bool Object::IsPrimitive(IsolateRoot isolate) const {
return IsSmi() || HeapObject::cast(*this).map(isolate).IsPrimitiveMap();
bool Object::IsPrimitive(PtrComprCageBase cage_base) const {
return IsSmi() || HeapObject::cast(*this).map(cage_base).IsPrimitiveMap();
}
// static
@ -387,24 +387,24 @@ Maybe<bool> Object::IsArray(Handle<Object> object) {
}
DEF_GETTER(HeapObject, IsUndetectable, bool) {
return map(isolate).is_undetectable();
return map(cage_base).is_undetectable();
}
DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
if (IsJSGlobalProxy(isolate)) {
if (IsJSGlobalProxy(cage_base)) {
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
JSGlobalObject global = proxy.GetIsolate()->context().global_object();
return proxy.IsDetachedFrom(global);
}
return map(isolate).is_access_check_needed();
return map(cage_base).is_access_check_needed();
}
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
bool Object::Is##Name() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
} \
bool Object::Is##Name(IsolateRoot isolate) const { \
return IsHeapObject() && HeapObject::cast(*this).Is##Name(isolate); \
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
bool Object::Is##Name() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
} \
bool Object::Is##Name(PtrComprCageBase cage_base) const { \
return IsHeapObject() && HeapObject::cast(*this).Is##Name(cage_base); \
}
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
#undef MAKE_STRUCT_PREDICATE
@ -467,17 +467,17 @@ bool Object::FilterKey(PropertyFilter filter) {
return false;
}
Representation Object::OptimalRepresentation(IsolateRoot isolate) const {
Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const {
if (!FLAG_track_fields) return Representation::Tagged();
if (IsSmi()) {
return Representation::Smi();
}
HeapObject heap_object = HeapObject::cast(*this);
if (FLAG_track_double_fields && heap_object.IsHeapNumber(isolate)) {
if (FLAG_track_double_fields && heap_object.IsHeapNumber(cage_base)) {
return Representation::Double();
} else if (FLAG_track_computed_fields &&
heap_object.IsUninitialized(
heap_object.GetReadOnlyRoots(isolate))) {
heap_object.GetReadOnlyRoots(cage_base))) {
return Representation::None();
} else if (FLAG_track_heap_object_fields) {
return Representation::HeapObject();
@ -486,9 +486,9 @@ Representation Object::OptimalRepresentation(IsolateRoot isolate) const {
}
}
ElementsKind Object::OptimalElementsKind(IsolateRoot isolate) const {
ElementsKind Object::OptimalElementsKind(PtrComprCageBase cage_base) const {
if (IsSmi()) return PACKED_SMI_ELEMENTS;
if (IsNumber(isolate)) return PACKED_DOUBLE_ELEMENTS;
if (IsNumber(cage_base)) return PACKED_DOUBLE_ELEMENTS;
return PACKED_ELEMENTS;
}
@ -631,9 +631,10 @@ void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
i::InitExternalPointerField(field_address(offset), isolate, value, tag);
}
Address Object::ReadExternalPointerField(size_t offset, IsolateRoot isolate,
Address Object::ReadExternalPointerField(size_t offset,
PtrComprCageBase isolate_root,
ExternalPointerTag tag) const {
return i::ReadExternalPointerField(field_address(offset), isolate, tag);
return i::ReadExternalPointerField(field_address(offset), isolate_root, tag);
}
void Object::WriteExternalPointerField(size_t offset, Isolate* isolate,
@ -687,16 +688,16 @@ ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
return ReadOnlyHeap::GetReadOnlyRoots(*this);
}
ReadOnlyRoots HeapObject::GetReadOnlyRoots(IsolateRoot isolate) const {
#ifdef V8_COMPRESS_POINTERS
DCHECK_NE(isolate.address(), 0);
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const {
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
DCHECK_NE(cage_base.address(), 0);
return ReadOnlyRoots(Isolate::FromRootAddress(cage_base.address()));
#else
return GetReadOnlyRoots();
#endif
}
DEF_GETTER(HeapObject, map, Map) { return map_word(isolate).ToMap(); }
DEF_GETTER(HeapObject, map, Map) { return map_word(cage_base).ToMap(); }
void HeapObject::set_map(Map value) {
#ifdef VERIFY_HEAP
@ -715,7 +716,7 @@ void HeapObject::set_map(Map value) {
}
DEF_GETTER(HeapObject, synchronized_map, Map) {
return synchronized_map_word(isolate).ToMap();
return synchronized_map_word(cage_base).ToMap();
}
void HeapObject::synchronized_set_map(Map value) {
@ -761,7 +762,7 @@ ObjectSlot HeapObject::map_slot() const {
}
DEF_GETTER(HeapObject, map_word, MapWord) {
return MapField::Relaxed_Load(isolate, *this);
return MapField::Relaxed_Load(cage_base, *this);
}
void HeapObject::set_map_word(MapWord map_word) {
@ -769,7 +770,7 @@ void HeapObject::set_map_word(MapWord map_word) {
}
DEF_GETTER(HeapObject, synchronized_map_word, MapWord) {
return MapField::Acquire_Load(isolate, *this);
return MapField::Acquire_Load(cage_base, *this);
}
void HeapObject::synchronized_set_map_word(MapWord map_word) {

View File

@ -5567,7 +5567,8 @@ Handle<Derived> HashTable<Derived, Shape>::NewInternal(
}
template <typename Derived, typename Shape>
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate, Derived new_table) {
void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base,
Derived new_table) {
DisallowGarbageCollection no_gc;
WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc);
@ -5575,21 +5576,21 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate, Derived new_table) {
// Copy prefix to new array.
for (int i = kPrefixStartIndex; i < kElementsStartIndex; i++) {
new_table.set(i, get(isolate, i), mode);
new_table.set(i, get(cage_base, i), mode);
}
// Rehash the elements.
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
for (InternalIndex i : this->IterateEntries()) {
uint32_t from_index = EntryToIndex(i);
Object k = this->get(isolate, from_index);
Object k = this->get(cage_base, from_index);
if (!IsKey(roots, k)) continue;
uint32_t hash = Shape::HashForObject(roots, k);
uint32_t insertion_index =
EntryToIndex(new_table.FindInsertionEntry(isolate, roots, hash));
new_table.set_key(insertion_index, get(isolate, from_index), mode);
EntryToIndex(new_table.FindInsertionEntry(cage_base, roots, hash));
new_table.set_key(insertion_index, get(cage_base, from_index), mode);
for (int j = 1; j < Shape::kEntrySize; j++) {
new_table.set(insertion_index + j, get(isolate, from_index + j), mode);
new_table.set(insertion_index + j, get(cage_base, from_index + j), mode);
}
}
new_table.SetNumberOfElements(NumberOfElements());
@ -5631,10 +5632,10 @@ void HashTable<Derived, Shape>::Swap(InternalIndex entry1, InternalIndex entry2,
}
template <typename Derived, typename Shape>
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base) {
DisallowGarbageCollection no_gc;
WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
uint32_t capacity = Capacity();
bool done = false;
for (int probe = 1; !done; probe++) {
@ -5643,7 +5644,7 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
done = true;
for (InternalIndex current(0); current.raw_value() < capacity;
/* {current} is advanced manually below, when appropriate.*/) {
Object current_key = KeyAt(isolate, current);
Object current_key = KeyAt(cage_base, current);
if (!IsKey(roots, current_key)) {
++current; // Advance to next entry.
continue;
@ -5653,7 +5654,7 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
++current; // Advance to next entry.
continue;
}
Object target_key = KeyAt(isolate, target);
Object target_key = KeyAt(cage_base, target);
if (!IsKey(roots, target_key) ||
EntryForProbe(roots, target_key, probe, target) != target) {
// Put the current element into the correct position.
@ -5673,7 +5674,7 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
HeapObject undefined = roots.undefined_value();
Derived* self = static_cast<Derived*>(this);
for (InternalIndex current : InternalIndex::Range(capacity)) {
if (KeyAt(isolate, current) == the_hole) {
if (KeyAt(cage_base, current) == the_hole) {
self->set_key(EntryToIndex(current) + kEntryKeyIndex, undefined,
SKIP_WRITE_BARRIER);
}
@ -5764,15 +5765,14 @@ Handle<Derived> HashTable<Derived, Shape>::Shrink(Isolate* isolate,
}
template <typename Derived, typename Shape>
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(IsolateRoot isolate,
ReadOnlyRoots roots,
uint32_t hash) {
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(
PtrComprCageBase cage_base, ReadOnlyRoots roots, uint32_t hash) {
uint32_t capacity = Capacity();
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
for (InternalIndex entry = FirstProbe(hash, capacity);;
entry = NextProbe(entry, count++, capacity)) {
if (!IsKey(roots, KeyAt(isolate, entry))) return entry;
if (!IsKey(roots, KeyAt(cage_base, entry))) return entry;
}
}
@ -6080,14 +6080,14 @@ void ObjectHashTableBase<Derived, Shape>::FillEntriesWithHoles(
}
template <typename Derived, typename Shape>
Object ObjectHashTableBase<Derived, Shape>::Lookup(IsolateRoot isolate,
Object ObjectHashTableBase<Derived, Shape>::Lookup(PtrComprCageBase cage_base,
Handle<Object> key,
int32_t hash) {
DisallowGarbageCollection no_gc;
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base);
DCHECK(this->IsKey(roots, *key));
InternalIndex entry = this->FindEntry(isolate, roots, key, hash);
InternalIndex entry = this->FindEntry(cage_base, roots, key, hash);
if (entry.is_not_found()) return roots.the_hole_value();
return this->get(Derived::EntryToIndex(entry) + 1);
}
@ -6096,8 +6096,8 @@ template <typename Derived, typename Shape>
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
DisallowGarbageCollection no_gc;
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base);
DCHECK(this->IsKey(roots, *key));
// If the object does not have an identity hash, it was never used as a key.
@ -6105,13 +6105,13 @@ Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
if (hash.IsUndefined(roots)) {
return roots.the_hole_value();
}
return Lookup(isolate, key, Smi::ToInt(hash));
return Lookup(cage_base, key, Smi::ToInt(hash));
}
template <typename Derived, typename Shape>
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key,
int32_t hash) {
return Lookup(GetIsolateForPtrCompr(*this), key, hash);
return Lookup(GetPtrComprCageBase(*this), key, hash);
}
template <typename Derived, typename Shape>

View File

@ -279,7 +279,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
#define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
IS_TYPE_FUNCTION_DECL(HashTableBase)
@ -307,7 +307,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
V8_INLINE bool Is##Name() const; \
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE
@ -322,9 +322,9 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
V8_EXPORT_PRIVATE bool ToInt32(int32_t* value);
inline bool ToUint32(uint32_t* value) const;
inline Representation OptimalRepresentation(IsolateRoot isolate) const;
inline Representation OptimalRepresentation(PtrComprCageBase cage_base) const;
inline ElementsKind OptimalElementsKind(IsolateRoot isolate) const;
inline ElementsKind OptimalElementsKind(PtrComprCageBase cage_base) const;
inline bool FitsRepresentation(Representation representation);
@ -673,7 +673,8 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
inline void InitExternalPointerField(size_t offset, Isolate* isolate);
inline void InitExternalPointerField(size_t offset, Isolate* isolate,
Address value, ExternalPointerTag tag);
inline Address ReadExternalPointerField(size_t offset, IsolateRoot isolate,
inline Address ReadExternalPointerField(size_t offset,
PtrComprCageBase isolate_root,
ExternalPointerTag tag) const;
inline void WriteExternalPointerField(size_t offset, Isolate* isolate,
Address value, ExternalPointerTag tag);

View File

@ -37,7 +37,7 @@ Handle<Object> Oddball::ToNumber(Isolate* isolate, Handle<Oddball> input) {
}
DEF_GETTER(HeapObject, IsBoolean, bool) {
return IsOddball(isolate) &&
return IsOddball(cage_base) &&
((Oddball::cast(*this).kind() & Oddball::kNotBooleanMask) == 0);
}

View File

@ -25,14 +25,14 @@ SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
SYNCHRONIZED_SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
Object PropertyArray::get(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
}
Object PropertyArray::get(IsolateRoot isolate, int index) const {
Object PropertyArray::get(PtrComprCageBase cage_base, int index) const {
DCHECK_LT(static_cast<unsigned>(index),
static_cast<unsigned>(this->length()));
return TaggedField<Object>::Relaxed_Load(isolate, *this,
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
OffsetOfElementAt(index));
}

View File

@ -30,7 +30,7 @@ class PropertyArray : public HeapObject {
inline int Hash() const;
inline Object get(int index) const;
inline Object get(IsolateRoot isolate, int index) const;
inline Object get(PtrComprCageBase cage_base, int index) const;
inline void set(int index, Object value);
// Setter with explicit barrier mode.

View File

@ -75,10 +75,10 @@ Descriptor Descriptor::DataField(Handle<Name> key, int field_index,
Descriptor Descriptor::DataConstant(Handle<Name> key, Handle<Object> value,
PropertyAttributes attributes) {
IsolateRoot isolate = GetIsolateForPtrCompr(*key);
PtrComprCageBase cage_base = GetPtrComprCageBase(*key);
return Descriptor(key, MaybeObjectHandle(value), kData, attributes,
kDescriptor, PropertyConstness::kConst,
value->OptimalRepresentation(isolate), 0);
value->OptimalRepresentation(cage_base), 0);
}
Descriptor Descriptor::DataConstant(Isolate* isolate, Handle<Name> key,

View File

@ -575,13 +575,13 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
}
Object ScopeInfo::get(int index) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return get(cage_base, index);
}
Object ScopeInfo::get(IsolateRoot isolate, int index) const {
Object ScopeInfo::get(PtrComprCageBase cage_base, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return TaggedField<Object>::Relaxed_Load(isolate, *this,
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
OffsetOfElementAt(index));
}

View File

@ -293,7 +293,7 @@ class ScopeInfo : public TorqueGeneratedScopeInfo<ScopeInfo, HeapObject> {
// 'flags', the first field defined by ScopeInfo after the standard-size
// HeapObject header.
V8_EXPORT_PRIVATE Object get(int index) const;
Object get(IsolateRoot isolate, int index) const;
Object get(PtrComprCageBase cage_base, int index) const;
// Setter that doesn't need write barrier.
void set(int index, Smi value);
// Setter with explicit barrier mode.

View File

@ -31,7 +31,7 @@ bool FullObjectSlot::contains_value(Address raw_value) const {
Object FullObjectSlot::operator*() const { return Object(*location()); }
Object FullObjectSlot::load(IsolateRoot isolate) const { return **this; }
Object FullObjectSlot::load(PtrComprCageBase cage_base) const { return **this; }
void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
@ -39,7 +39,7 @@ Object FullObjectSlot::Acquire_Load() const {
return Object(base::AsAtomicPointer::Acquire_Load(location()));
}
Object FullObjectSlot::Acquire_Load(IsolateRoot isolate) const {
Object FullObjectSlot::Acquire_Load(PtrComprCageBase cage_base) const {
return Acquire_Load();
}
@ -47,7 +47,7 @@ Object FullObjectSlot::Relaxed_Load() const {
return Object(base::AsAtomicPointer::Relaxed_Load(location()));
}
Object FullObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
Object FullObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const {
return Relaxed_Load();
}
@ -79,7 +79,7 @@ MaybeObject FullMaybeObjectSlot::operator*() const {
return MaybeObject(*location());
}
MaybeObject FullMaybeObjectSlot::load(IsolateRoot isolate) const {
MaybeObject FullMaybeObjectSlot::load(PtrComprCageBase cage_base) const {
return **this;
}
@ -91,7 +91,8 @@ MaybeObject FullMaybeObjectSlot::Relaxed_Load() const {
return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location()));
}
MaybeObject FullMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
MaybeObject FullMaybeObjectSlot::Relaxed_Load(
PtrComprCageBase cage_base) const {
return Relaxed_Load();
}
@ -113,7 +114,7 @@ HeapObjectReference FullHeapObjectSlot::operator*() const {
return HeapObjectReference(*location());
}
HeapObjectReference FullHeapObjectSlot::load(IsolateRoot isolate) const {
HeapObjectReference FullHeapObjectSlot::load(PtrComprCageBase cage_base) const {
return **this;
}

View File

@ -110,13 +110,13 @@ class FullObjectSlot : public SlotBase<FullObjectSlot, Address> {
inline bool contains_value(Address raw_value) const;
inline Object operator*() const;
inline Object load(IsolateRoot isolate) const;
inline Object load(PtrComprCageBase cage_base) const;
inline void store(Object value) const;
inline Object Acquire_Load() const;
inline Object Acquire_Load(IsolateRoot isolate) const;
inline Object Acquire_Load(PtrComprCageBase cage_base) const;
inline Object Relaxed_Load() const;
inline Object Relaxed_Load(IsolateRoot isolate) const;
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline Object Relaxed_CompareAndSwap(Object old, Object target) const;
@ -147,11 +147,11 @@ class FullMaybeObjectSlot
: SlotBase(slot.address()) {}
inline MaybeObject operator*() const;
inline MaybeObject load(IsolateRoot isolate) const;
inline MaybeObject load(PtrComprCageBase cage_base) const;
inline void store(MaybeObject value) const;
inline MaybeObject Relaxed_Load() const;
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const;
inline void Relaxed_Store(MaybeObject value) const;
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
};
@ -174,7 +174,7 @@ class FullHeapObjectSlot : public SlotBase<FullHeapObjectSlot, Address> {
: SlotBase(slot.address()) {}
inline HeapObjectReference operator*() const;
inline HeapObjectReference load(IsolateRoot isolate) const;
inline HeapObjectReference load(PtrComprCageBase cage_base) const;
inline void store(HeapObjectReference value) const;
inline HeapObject ToHeapObject() const;

View File

@ -274,12 +274,12 @@ inline TResult StringShape::DispatchToSpecificType(String str,
}
DEF_GETTER(String, IsOneByteRepresentation, bool) {
uint32_t type = map(isolate).instance_type();
uint32_t type = map(cage_base).instance_type();
return (type & kStringEncodingMask) == kOneByteStringTag;
}
DEF_GETTER(String, IsTwoByteRepresentation, bool) {
uint32_t type = map(isolate).instance_type();
uint32_t type = map(cage_base).instance_type();
return (type & kStringEncodingMask) == kTwoByteStringTag;
}
@ -463,7 +463,7 @@ bool String::IsEqualTo(Vector<const Char> str, Isolate* isolate) const {
template <String::EqualityType kEqType, typename Char>
bool String::IsEqualTo(Vector<const Char> str) const {
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
return IsEqualToImpl<kEqType>(str, GetIsolateForPtrCompr(*this),
return IsEqualToImpl<kEqType>(str, GetPtrComprCageBase(*this),
SharedStringAccessGuardIfNeeded::NotNeeded());
}
@ -475,7 +475,7 @@ bool String::IsEqualTo(Vector<const Char> str, LocalIsolate* isolate) const {
template <String::EqualityType kEqType, typename Char>
bool String::IsEqualToImpl(
Vector<const Char> str, IsolateRoot isolate,
Vector<const Char> str, PtrComprCageBase cage_base,
const SharedStringAccessGuardIfNeeded& access_guard) const {
size_t len = str.size();
switch (kEqType) {
@ -496,7 +496,7 @@ bool String::IsEqualToImpl(
String string = *this;
const Char* data = str.data();
while (true) {
int32_t type = string.map(isolate).instance_type();
int32_t type = string.map(cage_base).instance_type();
switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
case kSeqStringTag | kOneByteStringTag:
return CompareCharsEqual(
@ -521,7 +521,7 @@ bool String::IsEqualToImpl(
case kSlicedStringTag | kTwoByteStringTag: {
SlicedString slicedString = SlicedString::cast(string);
slice_offset += slicedString.offset();
string = slicedString.parent(isolate);
string = slicedString.parent(cage_base);
continue;
}
@ -529,13 +529,14 @@ bool String::IsEqualToImpl(
case kConsStringTag | kTwoByteStringTag: {
// The ConsString path is more complex and rare, so call out to an
// out-of-line handler.
return IsConsStringEqualToImpl<Char>(
ConsString::cast(string), slice_offset, str, isolate, access_guard);
return IsConsStringEqualToImpl<Char>(ConsString::cast(string),
slice_offset, str, cage_base,
access_guard);
}
case kThinStringTag | kOneByteStringTag:
case kThinStringTag | kTwoByteStringTag:
string = ThinString::cast(string).actual(isolate);
string = ThinString::cast(string).actual(cage_base);
continue;
default:
@ -548,7 +549,8 @@ bool String::IsEqualToImpl(
template <typename Char>
bool String::IsConsStringEqualToImpl(
ConsString string, int slice_offset, Vector<const Char> str,
IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard) {
PtrComprCageBase cage_base,
const SharedStringAccessGuardIfNeeded& access_guard) {
// Already checked the len in IsEqualToImpl. Check GE rather than EQ in case
// this is a prefix check.
DCHECK_GE(string.length(), str.size());
@ -561,7 +563,7 @@ bool String::IsConsStringEqualToImpl(
// remaining string.
size_t len = std::min<size_t>(segment.length(), remaining_str.size());
Vector<const Char> sub_str = remaining_str.SubVector(0, len);
if (!segment.IsEqualToImpl<EqualityType::kNoLengthCheck>(sub_str, isolate,
if (!segment.IsEqualToImpl<EqualityType::kNoLengthCheck>(sub_str, cage_base,
access_guard)) {
return false;
}
@ -845,7 +847,7 @@ Object ConsString::unchecked_second() {
}
DEF_GETTER(ThinString, unchecked_actual, HeapObject) {
return TaggedField<HeapObject, kActualOffset>::load(isolate, *this);
return TaggedField<HeapObject, kActualOffset>::load(cage_base, *this);
}
bool ExternalString::is_uncached() const {
@ -860,7 +862,7 @@ void ExternalString::AllocateExternalPointerEntries(Isolate* isolate) {
}
DEF_GETTER(ExternalString, resource_as_address, Address) {
return ReadExternalPointerField(kResourceOffset, isolate,
return ReadExternalPointerField(kResourceOffset, cage_base,
kExternalStringResourceTag);
}
@ -908,7 +910,7 @@ DEF_GETTER(ExternalOneByteString, resource,
DEF_GETTER(ExternalOneByteString, mutable_resource,
ExternalOneByteString::Resource*) {
return reinterpret_cast<Resource*>(resource_as_address(isolate));
return reinterpret_cast<Resource*>(resource_as_address(cage_base));
}
void ExternalOneByteString::update_data_cache(Isolate* isolate) {
@ -973,7 +975,7 @@ DEF_GETTER(ExternalTwoByteString, resource,
DEF_GETTER(ExternalTwoByteString, mutable_resource,
ExternalTwoByteString::Resource*) {
return reinterpret_cast<Resource*>(resource_as_address(isolate));
return reinterpret_cast<Resource*>(resource_as_address(cage_base));
}
void ExternalTwoByteString::update_data_cache(Isolate* isolate) {

View File

@ -91,15 +91,15 @@ bool KeyIsMatch(LocalIsolate* isolate, StringTableKey* key, String string) {
class StringTable::Data {
public:
static std::unique_ptr<Data> New(int capacity);
static std::unique_ptr<Data> Resize(IsolateRoot isolate,
static std::unique_ptr<Data> Resize(PtrComprCageBase cage_base,
std::unique_ptr<Data> data, int capacity);
OffHeapObjectSlot slot(InternalIndex index) const {
return OffHeapObjectSlot(&elements_[index.as_uint32()]);
}
Object Get(IsolateRoot isolate, InternalIndex index) const {
return slot(index).Acquire_Load(isolate);
Object Get(PtrComprCageBase cage_base, InternalIndex index) const {
return slot(index).Acquire_Load(cage_base);
}
void Set(InternalIndex index, String entry) {
@ -139,7 +139,8 @@ class StringTable::Data {
InternalIndex FindEntry(LocalIsolate* isolate, StringTableKey* key,
uint32_t hash) const;
InternalIndex FindInsertionEntry(IsolateRoot isolate, uint32_t hash) const;
InternalIndex FindInsertionEntry(PtrComprCageBase cage_base,
uint32_t hash) const;
template <typename LocalIsolate, typename StringTableKey>
InternalIndex FindEntryOrInsertionEntry(LocalIsolate* isolate,
@ -157,7 +158,7 @@ class StringTable::Data {
Data* PreviousData() { return previous_data_.get(); }
void DropPreviousData() { previous_data_.reset(); }
void Print(IsolateRoot isolate) const;
void Print(PtrComprCageBase cage_base) const;
size_t GetCurrentMemoryUsage() const;
private:
@ -224,7 +225,7 @@ std::unique_ptr<StringTable::Data> StringTable::Data::New(int capacity) {
}
std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
IsolateRoot isolate, std::unique_ptr<Data> data, int capacity) {
PtrComprCageBase cage_base, std::unique_ptr<Data> data, int capacity) {
std::unique_ptr<Data> new_data(new (capacity) Data(capacity));
DCHECK_LT(data->number_of_elements(), new_data->capacity());
@ -234,11 +235,12 @@ std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
// Rehash the elements.
for (InternalIndex i : InternalIndex::Range(data->capacity())) {
Object element = data->Get(isolate, i);
Object element = data->Get(cage_base, i);
if (element == empty_element() || element == deleted_element()) continue;
String string = String::cast(element);
uint32_t hash = string.hash();
InternalIndex insertion_index = new_data->FindInsertionEntry(isolate, hash);
InternalIndex insertion_index =
new_data->FindInsertionEntry(cage_base, hash);
new_data->Set(insertion_index, string);
}
new_data->number_of_elements_ = data->number_of_elements();
@ -265,7 +267,7 @@ InternalIndex StringTable::Data::FindEntry(LocalIsolate* isolate,
}
}
InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate,
InternalIndex StringTable::Data::FindInsertionEntry(PtrComprCageBase cage_base,
uint32_t hash) const {
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
@ -273,7 +275,7 @@ InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate,
entry = NextProbe(entry, count++, capacity_)) {
// TODO(leszeks): Consider delaying the decompression until after the
// comparisons against empty/deleted.
Object element = Get(isolate, entry);
Object element = Get(cage_base, entry);
if (element == empty_element() || element == deleted_element())
return entry;
}
@ -314,11 +316,12 @@ void StringTable::Data::IterateElements(RootVisitor* visitor) {
visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot);
}
void StringTable::Data::Print(IsolateRoot isolate) const {
void StringTable::Data::Print(PtrComprCageBase cage_base) const {
OFStream os(stdout);
os << "StringTable {" << std::endl;
for (InternalIndex i : InternalIndex::Range(capacity_)) {
os << " " << i.as_uint32() << ": " << Brief(Get(isolate, i)) << std::endl;
os << " " << i.as_uint32() << ": " << Brief(Get(cage_base, i))
<< std::endl;
}
os << "}" << std::endl;
}
@ -530,7 +533,7 @@ template Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
template Handle<String> StringTable::LookupKey(Isolate* isolate,
StringTableInsertionKey* key);
StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate,
StringTable::Data* StringTable::EnsureCapacity(PtrComprCageBase cage_base,
int additional_elements) {
// This call is only allowed while the write mutex is held.
write_mutex_.AssertHeld();
@ -560,7 +563,7 @@ StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate,
if (new_capacity != -1) {
std::unique_ptr<Data> new_data =
Data::Resize(isolate, std::unique_ptr<Data>(data), new_capacity);
Data::Resize(cage_base, std::unique_ptr<Data>(data), new_capacity);
// `new_data` is the new owner of `data`.
DCHECK_EQ(new_data->PreviousData(), data);
// Release-store the new data pointer as `data_`, so that it can be
@ -669,8 +672,8 @@ Address StringTable::TryStringToIndexOrLookupExisting(Isolate* isolate,
isolate, string, source, start);
}
void StringTable::Print(IsolateRoot isolate) const {
data_.load(std::memory_order_acquire)->Print(isolate);
void StringTable::Print(PtrComprCageBase cage_base) const {
data_.load(std::memory_order_acquire)->Print(cage_base);
}
size_t StringTable::GetCurrentMemoryUsage() const {

View File

@ -72,7 +72,7 @@ class V8_EXPORT_PRIVATE StringTable {
static Address TryStringToIndexOrLookupExisting(Isolate* isolate,
Address raw_string);
void Print(IsolateRoot isolate) const;
void Print(PtrComprCageBase cage_base) const;
size_t GetCurrentMemoryUsage() const;
// The following methods must be called either while holding the write lock,
@ -84,7 +84,7 @@ class V8_EXPORT_PRIVATE StringTable {
private:
class Data;
Data* EnsureCapacity(IsolateRoot isolate, int additional_elements);
Data* EnsureCapacity(PtrComprCageBase cage_base, int additional_elements);
std::atomic<Data*> data_;
// Write mutex is mutable so that readers of concurrently mutated values (e.g.

View File

@ -1289,7 +1289,7 @@ Object String::LastIndexOf(Isolate* isolate, Handle<Object> receiver,
bool String::HasOneBytePrefix(Vector<const char> str) {
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
return IsEqualToImpl<EqualityType::kPrefix>(
str, GetIsolateForPtrCompr(*this),
str, GetPtrComprCageBase(*this),
SharedStringAccessGuardIfNeeded::NotNeeded());
}

View File

@ -332,7 +332,7 @@ class String : public TorqueGeneratedString<String, Name> {
// whole string or just a prefix.
//
// This is main-thread only, like the Isolate* overload, but additionally
// computes the IsolateRoot for IsEqualToImpl.
// computes the PtrComprCageBase for IsEqualToImpl.
template <EqualityType kEqType = EqualityType::kWholeString, typename Char>
inline bool IsEqualTo(Vector<const Char> str) const;
@ -546,14 +546,15 @@ class String : public TorqueGeneratedString<String, Name> {
// Implementation of the IsEqualTo() public methods. Do not use directly.
template <EqualityType kEqType, typename Char>
V8_INLINE bool IsEqualToImpl(
Vector<const Char> str, IsolateRoot isolate,
Vector<const Char> str, PtrComprCageBase cage_base,
const SharedStringAccessGuardIfNeeded& access_guard) const;
// Out-of-line IsEqualToImpl for ConsString.
template <typename Char>
V8_NOINLINE static bool IsConsStringEqualToImpl(
ConsString string, int slice_offset, Vector<const Char> str,
IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard);
PtrComprCageBase cage_base,
const SharedStringAccessGuardIfNeeded& access_guard);
V8_EXPORT_PRIVATE static Handle<String> SlowFlatten(
Isolate* isolate, Handle<ConsString> cons, AllocationType allocation);

View File

@ -219,15 +219,15 @@ InternalIndex SwissNameDictionary::FindEntry(LocalIsolate* isolate,
}
Object SwissNameDictionary::LoadFromDataTable(int entry, int data_offset) {
return LoadFromDataTable(GetIsolateForPtrCompr(*this), entry, data_offset);
return LoadFromDataTable(GetPtrComprCageBase(*this), entry, data_offset);
}
Object SwissNameDictionary::LoadFromDataTable(IsolateRoot isolate, int entry,
int data_offset) {
Object SwissNameDictionary::LoadFromDataTable(PtrComprCageBase cage_base,
int entry, int data_offset) {
DCHECK_LT(static_cast<unsigned>(entry), static_cast<unsigned>(Capacity()));
int offset = DataTableStartOffset() +
(entry * kDataTableEntryCount + data_offset) * kTaggedSize;
return TaggedField<Object>::Relaxed_Load(isolate, *this, offset);
return TaggedField<Object>::Relaxed_Load(cage_base, *this, offset);
}
void SwissNameDictionary::StoreToDataTable(int entry, int data_offset,

View File

@ -306,7 +306,8 @@ class V8_EXPORT_PRIVATE SwissNameDictionary : public HeapObject {
inline ctrl_t GetCtrl(int entry);
inline Object LoadFromDataTable(int entry, int data_offset);
inline Object LoadFromDataTable(IsolateRoot root, int entry, int data_offset);
inline Object LoadFromDataTable(PtrComprCageBase cage_base, int entry,
int data_offset);
inline void StoreToDataTable(int entry, int data_offset, Object data);
inline void StoreToDataTableNoBarrier(int entry, int data_offset,
Object data);

View File

@ -61,10 +61,10 @@ T TaggedField<T, kFieldOffset>::load(HeapObject host, int offset) {
// static
template <typename T, int kFieldOffset>
T TaggedField<T, kFieldOffset>::load(IsolateRoot isolate, HeapObject host,
int offset) {
T TaggedField<T, kFieldOffset>::load(PtrComprCageBase cage_base,
HeapObject host, int offset) {
Tagged_t value = *location(host, offset);
return T(tagged_to_full(isolate, value));
return T(tagged_to_full(cage_base, value));
}
// static
@ -96,10 +96,10 @@ T TaggedField<T, kFieldOffset>::Relaxed_Load(HeapObject host, int offset) {
// static
template <typename T, int kFieldOffset>
T TaggedField<T, kFieldOffset>::Relaxed_Load(IsolateRoot isolate,
T TaggedField<T, kFieldOffset>::Relaxed_Load(PtrComprCageBase cage_base,
HeapObject host, int offset) {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset));
return T(tagged_to_full(isolate, value));
return T(tagged_to_full(cage_base, value));
}
// static
@ -125,10 +125,10 @@ T TaggedField<T, kFieldOffset>::Acquire_Load(HeapObject host, int offset) {
// static
template <typename T, int kFieldOffset>
T TaggedField<T, kFieldOffset>::Acquire_Load(IsolateRoot isolate,
T TaggedField<T, kFieldOffset>::Acquire_Load(PtrComprCageBase cage_base,
HeapObject host, int offset) {
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset));
return T(tagged_to_full(isolate, value));
return T(tagged_to_full(cage_base, value));
}
// static

View File

@ -38,20 +38,21 @@ class TaggedField : public AllStatic {
static inline Address address(HeapObject host, int offset = 0);
static inline T load(HeapObject host, int offset = 0);
static inline T load(IsolateRoot isolate, HeapObject host, int offset = 0);
static inline T load(PtrComprCageBase cage_base, HeapObject host,
int offset = 0);
static inline void store(HeapObject host, T value);
static inline void store(HeapObject host, int offset, T value);
static inline T Relaxed_Load(HeapObject host, int offset = 0);
static inline T Relaxed_Load(IsolateRoot isolate, HeapObject host,
static inline T Relaxed_Load(PtrComprCageBase cage_base, HeapObject host,
int offset = 0);
static inline void Relaxed_Store(HeapObject host, T value);
static inline void Relaxed_Store(HeapObject host, int offset, T value);
static inline T Acquire_Load(HeapObject host, int offset = 0);
static inline T Acquire_Load(IsolateRoot isolate, HeapObject host,
static inline T Acquire_Load(PtrComprCageBase cage_base, HeapObject host,
int offset = 0);
static inline void Release_Store(HeapObject host, T value);

View File

@ -45,13 +45,13 @@ RELEASE_ACQUIRE_ACCESSORS(FunctionTemplateInfo, call_code, HeapObject,
// TODO(nicohartmann@, v8:11122): Let Torque generate this accessor.
HeapObject FunctionTemplateInfo::rare_data(AcquireLoadTag) const {
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return rare_data(isolate, kAcquireLoad);
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
return rare_data(cage_base, kAcquireLoad);
}
HeapObject FunctionTemplateInfo::rare_data(IsolateRoot isolate,
HeapObject FunctionTemplateInfo::rare_data(PtrComprCageBase cage_base,
AcquireLoadTag) const {
HeapObject value =
TaggedField<HeapObject>::Acquire_Load(isolate, *this, kRareDataOffset);
TaggedField<HeapObject>::Acquire_Load(cage_base, *this, kRareDataOffset);
DCHECK(value.IsUndefined() || value.IsFunctionTemplateRareData());
return value;
}
@ -75,8 +75,8 @@ FunctionTemplateRareData FunctionTemplateInfo::EnsureFunctionTemplateRareData(
#define RARE_ACCESSORS(Name, CamelName, Type, Default) \
DEF_GETTER(FunctionTemplateInfo, Get##CamelName, Type) { \
HeapObject extra = rare_data(isolate, kAcquireLoad); \
HeapObject undefined = GetReadOnlyRoots(isolate).undefined_value(); \
HeapObject extra = rare_data(cage_base, kAcquireLoad); \
HeapObject undefined = GetReadOnlyRoots(cage_base).undefined_value(); \
return extra == undefined ? Default \
: FunctionTemplateRareData::cast(extra).Name(); \
} \

View File

@ -92,7 +92,7 @@ class FunctionTemplateInfo
// TODO(nicohartmann@, v8:11122): Let Torque generate the following accessor.
inline HeapObject rare_data(AcquireLoadTag) const;
inline HeapObject rare_data(IsolateRoot isolate, AcquireLoadTag) const;
inline HeapObject rare_data(PtrComprCageBase cage_base, AcquireLoadTag) const;
inline void set_rare_data(
HeapObject value, ReleaseStoreTag,
WriteBarrierMode mode = WriteBarrierMode::UPDATE_WRITE_BARRIER);

View File

@ -1508,10 +1508,10 @@ class RootsReferencesExtractor : public RootVisitor {
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
DCHECK_EQ(root, Root::kStringTable);
IsolateRoot isolate = Isolate::FromHeap(explorer_->heap_);
PtrComprCageBase cage_base = Isolate::FromHeap(explorer_->heap_);
for (OffHeapObjectSlot p = start; p < end; ++p) {
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
p.load(isolate));
p.load(cage_base));
}
}

View File

@ -386,10 +386,10 @@ void CCGenerator::EmitInstruction(const LoadReferenceInstruction& instruction,
out() << " " << result_name << " = ";
if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
// Currently, all of the tagged loads we emit are for smi values, so there
// is no point in providing an IsolateRoot. If at some point we start
// is no point in providing an PtrComprCageBase. If at some point we start
// emitting loads for tagged fields which might be HeapObjects, then we
// should plumb an IsolateRoot through the generated functions that need
// it.
// should plumb an PtrComprCageBase through the generated functions that
// need it.
if (!instruction.type->IsSubtypeOf(TypeOracle::GetSmiType())) {
Error(
"Not supported in C++ output: LoadReference on non-smi tagged "

View File

@ -4223,8 +4223,9 @@ void CppClassGenerator::GenerateFieldAccessors(
hdr_ << " inline " << type_name << " " << name << "("
<< (indexed ? "int i" : "") << ") const;\n";
if (can_contain_heap_objects) {
hdr_ << " inline " << type_name << " " << name << "(IsolateRoot isolate"
<< (indexed ? ", int i" : "") << ") const;\n";
hdr_ << " inline " << type_name << " " << name
<< "(PtrComprCageBase cage_base" << (indexed ? ", int i" : "")
<< ") const;\n";
}
hdr_ << " inline void set_" << name << "(" << (indexed ? "int i, " : "")
<< type_name << " value"
@ -4233,14 +4234,14 @@ void CppClassGenerator::GenerateFieldAccessors(
: "")
<< ");\n\n";
// For tagged data, generate the extra getter that derives an IsolateRoot from
// the current object's pointer.
// For tagged data, generate the extra getter that derives an PtrComprCageBase
// from the current object's pointer.
if (can_contain_heap_objects) {
inl_ << "template <class D, class P>\n";
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "("
<< (indexed ? "int i" : "") << ") const {\n";
inl_ << " IsolateRoot isolate = GetIsolateForPtrCompr(*this);\n";
inl_ << " return " << gen_name_ << "::" << name << "(isolate"
inl_ << " PtrComprCageBase cage_base = GetPtrComprCageBase(*this);\n";
inl_ << " return " << gen_name_ << "::" << name << "(cage_base"
<< (indexed ? ", i" : "") << ");\n";
inl_ << "}\n";
}
@ -4248,7 +4249,7 @@ void CppClassGenerator::GenerateFieldAccessors(
// Generate the getter implementation.
inl_ << "template <class D, class P>\n";
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "(";
if (can_contain_heap_objects) inl_ << "IsolateRoot isolate";
if (can_contain_heap_objects) inl_ << "PtrComprCageBase cage_base";
if (can_contain_heap_objects && indexed) inl_ << ", ";
if (indexed) inl_ << "int i";
inl_ << ") const {\n";
@ -4361,10 +4362,11 @@ void CppClassGenerator::EmitLoadFieldStatement(
bool is_smi = field_type->IsSubtypeOf(TypeOracle::GetSmiType());
const std::string load_type = is_smi ? "Smi" : type_name;
const char* postfix = is_smi ? ".value()" : "";
const char* optional_isolate = is_smi ? "" : "isolate, ";
const char* optional_cage_base = is_smi ? "" : "cage_base, ";
inl_ << "TaggedField<" << load_type << ">::" << load << "("
<< optional_isolate << "*this, " << offset << ")" << postfix << ";\n";
<< optional_cage_base << "*this, " << offset << ")" << postfix
<< ";\n";
}
if (CanContainHeapObjects(field_type)) {

View File

@ -59,13 +59,13 @@ CAST_ACCESSOR(WasmTypeInfo)
CAST_ACCESSOR(WasmStruct)
CAST_ACCESSOR(WasmArray)
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
DEF_GETTER(holder, has_##name, bool) { \
Object value = TaggedField<Object, offset>::load(isolate, *this); \
return !value.IsUndefined(GetReadOnlyRoots(isolate)); \
} \
ACCESSORS_CHECKED2(holder, name, type, offset, \
!value.IsUndefined(GetReadOnlyRoots(isolate)), true)
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
DEF_GETTER(holder, has_##name, bool) { \
Object value = TaggedField<Object, offset>::load(cage_base, *this); \
return !value.IsUndefined(GetReadOnlyRoots(cage_base)); \
} \
ACCESSORS_CHECKED2(holder, name, type, offset, \
!value.IsUndefined(GetReadOnlyRoots(cage_base)), true)
#define PRIMITIVE_ACCESSORS(holder, name, type, offset) \
type holder::name() const { \
@ -460,6 +460,12 @@ int WasmArray::GcSafeSizeFor(Map map, int length) {
void WasmTypeInfo::clear_foreign_address(Isolate* isolate) {
#ifdef V8_HEAP_SANDBOX
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
#endif
// Due to the type-specific pointer tags for external pointers, we need to
// allocate an entry in the table here even though it will just store nullptr.
AllocateExternalPointerEntries(isolate);

View File

@ -14,7 +14,7 @@ namespace debug_helper_internal {
bool IsPointerCompressed(uintptr_t address) {
#if COMPRESS_POINTERS_BOOL
return address < i::kPtrComprHeapReservationSize;
return address < i::kPtrComprCageReservationSize;
#else
return false;
#endif

View File

@ -348,7 +348,7 @@ class ReadStringVisitor : public TqObjectVisitor {
GetOrFinish(object->GetResourceDataValue(accessor_));
#ifdef V8_COMPRESS_POINTERS
uintptr_t data_address = static_cast<uintptr_t>(
DecodeExternalPointer(GetIsolateForPtrComprFromOnHeapAddress(
DecodeExternalPointer(GetPtrComprCageBaseFromOnHeapAddress(
heap_addresses_.any_heap_pointer),
resource_data, kExternalStringResourceDataTag));
#else