Revert "[ptr-cage] Rename IsolateRoot to PtrComprCageBase"
This reverts commit e28dadc207
.
Reason for revert: failed test262 tests;; see https://ci.chromium.org/ui/p/v8/builders/ci/V8%20Win32/32275/steps?succeeded=true&debug=false
Original change's description:
> [ptr-cage] Rename IsolateRoot to PtrComprCageBase
>
> Currently, IsolateRoot is both the address of the Isolate root and the
> base address of the pointer compression reservation. This CL teases the
> two uses apart by renaming IsolateRoot to PtrComprCageBase.
>
> - In addition to V8_COMPRESS_POINTERS, add a
> V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE (vs SHARED_CAGE).
>
> - Rename GetIsolate* helpers to GetPtrComprCageBase. When
> V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE is true, the helpers remain as
> aliases to GetPtrComprCageBase.
>
> - Rename kPtrComprIsolateRootAlignment to kPtrComprCageBaseAlignment.
>
> Bug: v8:11460
> Change-Id: I1d715f678ce9a0b5731895612ca14f56579b1c48
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2783672
> Commit-Queue: Shu-yu Guo <syg@chromium.org>
> Auto-Submit: Shu-yu Guo <syg@chromium.org>
> Reviewed-by: Igor Sheludko <ishell@chromium.org>
> Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#73790}
Bug: v8:11460
Change-Id: I19d0e28194fcdb28e89f129a7694ca3fe29fa17a
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2806168
Auto-Submit: Francis McCabe <fgm@chromium.org>
Commit-Queue: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Bot-Commit: Rubber Stamper <rubber-stamper@appspot.gserviceaccount.com>
Cr-Commit-Position: refs/heads/master@{#73791}
This commit is contained in:
parent
e28dadc207
commit
07a9ff4dbb
7
BUILD.gn
7
BUILD.gn
@ -404,10 +404,6 @@ if (v8_enable_shared_ro_heap && v8_enable_pointer_compression) {
|
||||
"Sharing read-only heap with pointer compression is only supported on Linux or Android")
|
||||
}
|
||||
|
||||
assert(
|
||||
!v8_enable_pointer_compression_shared_cage || !v8_enable_shared_ro_heap,
|
||||
"Sharing read-only heap is not yet supported when sharing a pointer compression cage")
|
||||
|
||||
assert(!v8_use_multi_snapshots || !v8_control_flow_integrity,
|
||||
"Control-flow integrity does not support multisnapshots")
|
||||
|
||||
@ -558,7 +554,6 @@ external_v8_defines = [
|
||||
"V8_ENABLE_CHECKS",
|
||||
"V8_COMPRESS_POINTERS",
|
||||
"V8_COMPRESS_POINTERS_IN_SHARED_CAGE",
|
||||
"V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE",
|
||||
"V8_31BIT_SMIS_ON_64BIT_ARCH",
|
||||
"V8_COMPRESS_ZONES",
|
||||
"V8_HEAP_SANDBOX",
|
||||
@ -578,8 +573,6 @@ if (v8_enable_pointer_compression) {
|
||||
}
|
||||
if (v8_enable_pointer_compression_shared_cage) {
|
||||
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_SHARED_CAGE" ]
|
||||
} else if (v8_enable_pointer_compression) {
|
||||
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE" ]
|
||||
}
|
||||
if (v8_enable_pointer_compression || v8_enable_31bit_smis_on_64bit_arch) {
|
||||
enabled_external_v8_defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ]
|
||||
|
@ -358,9 +358,8 @@ class Internals {
|
||||
internal::Address heap_object_ptr, int offset) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
|
||||
internal::Address base =
|
||||
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
||||
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
||||
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
|
||||
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
||||
#else
|
||||
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
||||
#endif
|
||||
@ -412,19 +411,18 @@ class Internals {
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
// See v8:7703 or src/ptr-compr.* for details about pointer compression.
|
||||
static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
|
||||
static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
|
||||
static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
|
||||
static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
|
||||
|
||||
V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
|
||||
V8_INLINE static internal::Address GetRootFromOnHeapAddress(
|
||||
internal::Address addr) {
|
||||
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
|
||||
return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
|
||||
}
|
||||
|
||||
V8_INLINE static internal::Address DecompressTaggedAnyField(
|
||||
internal::Address heap_object_ptr, uint32_t value) {
|
||||
internal::Address base =
|
||||
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
||||
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
||||
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
|
||||
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
||||
}
|
||||
|
||||
#endif // V8_COMPRESS_POINTERS
|
||||
|
@ -661,7 +661,7 @@ void ArrayLiteral::BuildBoilerplateDescription(LocalIsolate* isolate) {
|
||||
boilerplate_descriptor_kind(),
|
||||
GetMoreGeneralElementsKind(boilerplate_descriptor_kind(),
|
||||
boilerplate_value.OptimalElementsKind(
|
||||
GetPtrComprCageBase(*elements))));
|
||||
GetIsolateForPtrCompr(*elements))));
|
||||
|
||||
FixedArray::cast(*elements).set(array_index, boilerplate_value);
|
||||
}
|
||||
|
@ -370,14 +370,14 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr(
|
||||
TNode<IntPtrT> full_base = Signed(BitcastTaggedToWord(base));
|
||||
TNode<Int32T> compressed_base = TruncateIntPtrToInt32(full_base);
|
||||
// TODO(v8:9706): Add a way to directly use kRootRegister value.
|
||||
TNode<IntPtrT> ptr_compr_cage_base =
|
||||
TNode<IntPtrT> isolate_root =
|
||||
IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base)));
|
||||
// Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset.
|
||||
DCHECK_EQ(
|
||||
isolate()->isolate_root(),
|
||||
JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate()));
|
||||
// See JSTypedArray::SetOnHeapDataPtr() for details.
|
||||
offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base));
|
||||
offset = Unsigned(IntPtrAdd(offset, isolate_root));
|
||||
}
|
||||
|
||||
StoreJSTypedArrayBasePointer(holder, base);
|
||||
|
@ -12,17 +12,11 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate_root,
|
||||
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate_root,
|
||||
ExternalPointer_t encoded_pointer,
|
||||
ExternalPointerTag tag) {
|
||||
STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
|
||||
#ifdef V8_HEAP_SANDBOX
|
||||
|
||||
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||
#endif
|
||||
|
||||
uint32_t index = static_cast<uint32_t>(encoded_pointer);
|
||||
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
||||
return isolate->external_pointer_table().get(index) ^ tag;
|
||||
@ -68,7 +62,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
|
||||
}
|
||||
|
||||
V8_INLINE Address ReadExternalPointerField(Address field_address,
|
||||
PtrComprCageBase cage_base,
|
||||
IsolateRoot isolate_root,
|
||||
ExternalPointerTag tag) {
|
||||
// Pointer compression causes types larger than kTaggedSize to be unaligned.
|
||||
constexpr bool v8_pointer_compression_unaligned =
|
||||
@ -79,7 +73,7 @@ V8_INLINE Address ReadExternalPointerField(Address field_address,
|
||||
} else {
|
||||
encoded_value = base::Memory<ExternalPointer_t>(field_address);
|
||||
}
|
||||
return DecodeExternalPointer(cage_base, encoded_value, tag);
|
||||
return DecodeExternalPointer(isolate_root, encoded_value, tag);
|
||||
}
|
||||
|
||||
V8_INLINE void WriteExternalPointerField(Address field_address,
|
||||
|
@ -12,7 +12,7 @@ namespace internal {
|
||||
|
||||
// Convert external pointer from on-V8-heap representation to an actual external
|
||||
// pointer value.
|
||||
V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate,
|
||||
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate,
|
||||
ExternalPointer_t encoded_pointer,
|
||||
ExternalPointerTag tag);
|
||||
|
||||
@ -34,7 +34,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
|
||||
// Reads external pointer for the field, and decodes it if the sandbox is
|
||||
// enabled.
|
||||
V8_INLINE Address ReadExternalPointerField(Address field_address,
|
||||
PtrComprCageBase isolate,
|
||||
IsolateRoot isolate,
|
||||
ExternalPointerTag tag);
|
||||
|
||||
// Encodes value if the sandbox is enabled and writes it into the field.
|
||||
|
@ -1748,13 +1748,13 @@ enum class DynamicCheckMapsStatus : uint8_t {
|
||||
};
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
class PtrComprCageBase {
|
||||
class IsolateRoot {
|
||||
public:
|
||||
explicit constexpr PtrComprCageBase(Address address) : address_(address) {}
|
||||
explicit constexpr IsolateRoot(Address address) : address_(address) {}
|
||||
// NOLINTNEXTLINE
|
||||
inline PtrComprCageBase(const Isolate* isolate);
|
||||
inline IsolateRoot(const Isolate* isolate);
|
||||
// NOLINTNEXTLINE
|
||||
inline PtrComprCageBase(const LocalIsolate* isolate);
|
||||
inline IsolateRoot(const LocalIsolate* isolate);
|
||||
|
||||
inline Address address() const;
|
||||
|
||||
@ -1762,13 +1762,13 @@ class PtrComprCageBase {
|
||||
Address address_;
|
||||
};
|
||||
#else
|
||||
class PtrComprCageBase {
|
||||
class IsolateRoot {
|
||||
public:
|
||||
PtrComprCageBase() = default;
|
||||
IsolateRoot() = default;
|
||||
// NOLINTNEXTLINE
|
||||
PtrComprCageBase(const Isolate* isolate) {}
|
||||
IsolateRoot(const Isolate* isolate) {}
|
||||
// NOLINTNEXTLINE
|
||||
PtrComprCageBase(const LocalIsolate* isolate) {}
|
||||
IsolateRoot(const LocalIsolate* isolate) {}
|
||||
};
|
||||
#endif
|
||||
|
||||
|
@ -15,35 +15,15 @@ namespace internal {
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
|
||||
#if defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
|
||||
PtrComprCageBase::PtrComprCageBase(const Isolate* isolate)
|
||||
IsolateRoot::IsolateRoot(const Isolate* isolate)
|
||||
: address_(isolate->isolate_root()) {}
|
||||
PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate)
|
||||
IsolateRoot::IsolateRoot(const LocalIsolate* isolate)
|
||||
: address_(isolate->isolate_root()) {}
|
||||
|
||||
#elif defined V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
|
||||
PtrComprCageBase::PtrComprCageBase(const Isolate* isolate)
|
||||
: address_(isolate->isolate_root()) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate)
|
||||
: address_(isolate->isolate_root()) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
#error "Pointer compression build configuration error"
|
||||
|
||||
#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE,
|
||||
// V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
|
||||
Address PtrComprCageBase::address() const {
|
||||
Address IsolateRoot::address() const {
|
||||
Address ret = address_;
|
||||
ret = reinterpret_cast<Address>(V8_ASSUME_ALIGNED(
|
||||
reinterpret_cast<void*>(ret), kPtrComprCageBaseAlignment));
|
||||
reinterpret_cast<void*>(ret), kPtrComprIsolateRootAlignment));
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -53,17 +33,12 @@ V8_INLINE Tagged_t CompressTagged(Address tagged) {
|
||||
return static_cast<Tagged_t>(static_cast<uint32_t>(tagged));
|
||||
}
|
||||
|
||||
V8_INLINE constexpr Address GetPtrComprCageBaseAddress(Address on_heap_addr) {
|
||||
return RoundDown<kPtrComprCageBaseAlignment>(on_heap_addr);
|
||||
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
|
||||
return RoundDown<kPtrComprIsolateRootAlignment>(on_heap_addr);
|
||||
}
|
||||
|
||||
V8_INLINE Address GetPtrComprCageBaseAddress(PtrComprCageBase cage_base) {
|
||||
return cage_base.address();
|
||||
}
|
||||
|
||||
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(
|
||||
Address address) {
|
||||
return PtrComprCageBase(GetPtrComprCageBaseAddress(address));
|
||||
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) {
|
||||
return isolate.address();
|
||||
}
|
||||
|
||||
// Decompresses smi value.
|
||||
@ -77,8 +52,7 @@ V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) {
|
||||
template <typename TOnHeapAddress>
|
||||
V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr,
|
||||
Tagged_t raw_value) {
|
||||
return GetPtrComprCageBaseAddress(on_heap_addr) +
|
||||
static_cast<Address>(raw_value);
|
||||
return GetIsolateRootAddress(on_heap_addr) + static_cast<Address>(raw_value);
|
||||
}
|
||||
|
||||
// Decompresses any tagged value, preserving both weak- and smi- tags.
|
||||
@ -88,19 +62,18 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
|
||||
return DecompressTaggedPointer(on_heap_addr, raw_value);
|
||||
}
|
||||
|
||||
STATIC_ASSERT(kPtrComprCageReservationSize ==
|
||||
Internals::kPtrComprCageReservationSize);
|
||||
STATIC_ASSERT(kPtrComprCageBaseAlignment ==
|
||||
Internals::kPtrComprCageBaseAlignment);
|
||||
STATIC_ASSERT(kPtrComprHeapReservationSize ==
|
||||
Internals::kPtrComprHeapReservationSize);
|
||||
STATIC_ASSERT(kPtrComprIsolateRootAlignment ==
|
||||
Internals::kPtrComprIsolateRootAlignment);
|
||||
|
||||
#else
|
||||
|
||||
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
|
||||
|
||||
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(
|
||||
Address address) {
|
||||
return PtrComprCageBase();
|
||||
}
|
||||
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
|
||||
|
||||
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) { UNREACHABLE(); }
|
||||
|
||||
V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); }
|
||||
|
||||
@ -117,11 +90,6 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
|
||||
}
|
||||
|
||||
#endif // V8_COMPRESS_POINTERS
|
||||
|
||||
inline PtrComprCageBase GetPtrComprCageBase(HeapObject object) {
|
||||
return GetPtrComprCageBaseFromOnHeapAddress(object.ptr());
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -13,8 +13,8 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
// See v8:7703 for details about how pointer compression works.
|
||||
constexpr size_t kPtrComprCageReservationSize = size_t{4} * GB;
|
||||
constexpr size_t kPtrComprCageBaseAlignment = size_t{4} * GB;
|
||||
constexpr size_t kPtrComprHeapReservationSize = size_t{4} * GB;
|
||||
constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB;
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -1275,7 +1275,8 @@ int TranslatedState::CreateNextTranslatedValue(
|
||||
|
||||
Address TranslatedState::DecompressIfNeeded(intptr_t value) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
return DecompressTaggedAny(isolate(), static_cast<uint32_t>(value));
|
||||
return DecompressTaggedAny(isolate()->isolate_root(),
|
||||
static_cast<uint32_t>(value));
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
|
@ -325,11 +325,11 @@ void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) {
|
||||
|
||||
USE_TORQUE_VERIFIER(JSReceiver)
|
||||
|
||||
bool JSObject::ElementsAreSafeToExamine(PtrComprCageBase cage_base) const {
|
||||
bool JSObject::ElementsAreSafeToExamine(IsolateRoot isolate) const {
|
||||
// If a GC was caused while constructing this object, the elements
|
||||
// pointer may point to a one pointer filler map.
|
||||
return elements(cage_base) !=
|
||||
GetReadOnlyRoots(cage_base).one_pointer_filler_map();
|
||||
return elements(isolate) !=
|
||||
GetReadOnlyRoots(isolate).one_pointer_filler_map();
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
@ -468,13 +468,13 @@ void PrintSloppyArgumentElements(std::ostream& os, ElementsKind kind,
|
||||
}
|
||||
}
|
||||
|
||||
void PrintEmbedderData(PtrComprCageBase cage_base, std::ostream& os,
|
||||
void PrintEmbedderData(IsolateRoot isolate, std::ostream& os,
|
||||
EmbedderDataSlot slot) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
Object value = slot.load_tagged();
|
||||
os << Brief(value);
|
||||
void* raw_pointer;
|
||||
if (slot.ToAlignedPointer(cage_base, &raw_pointer)) {
|
||||
if (slot.ToAlignedPointer(isolate, &raw_pointer)) {
|
||||
os << ", aligned pointer: " << raw_pointer;
|
||||
}
|
||||
}
|
||||
@ -579,11 +579,11 @@ static void JSObjectPrintBody(std::ostream& os,
|
||||
}
|
||||
int embedder_fields = obj.GetEmbedderFieldCount();
|
||||
if (embedder_fields > 0) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(obj);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(obj);
|
||||
os << " - embedder fields = {";
|
||||
for (int i = 0; i < embedder_fields; i++) {
|
||||
os << "\n ";
|
||||
PrintEmbedderData(cage_base, os, EmbedderDataSlot(obj, i));
|
||||
PrintEmbedderData(isolate, os, EmbedderDataSlot(obj, i));
|
||||
}
|
||||
os << "\n }\n";
|
||||
}
|
||||
@ -762,14 +762,14 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint(
|
||||
}
|
||||
|
||||
void EmbedderDataArray::EmbedderDataArrayPrint(std::ostream& os) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
PrintHeader(os, "EmbedderDataArray");
|
||||
os << "\n - length: " << length();
|
||||
EmbedderDataSlot start(*this, 0);
|
||||
EmbedderDataSlot end(*this, length());
|
||||
for (EmbedderDataSlot slot = start; slot < end; ++slot) {
|
||||
os << "\n ";
|
||||
PrintEmbedderData(cage_base, os, slot);
|
||||
PrintEmbedderData(isolate, os, slot);
|
||||
}
|
||||
os << "\n";
|
||||
}
|
||||
@ -2747,11 +2747,12 @@ namespace {
|
||||
inline i::Object GetObjectFromRaw(void* object) {
|
||||
i::Address object_ptr = reinterpret_cast<i::Address>(object);
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
if (RoundDown<i::kPtrComprCageBaseAlignment>(object_ptr) == i::kNullAddress) {
|
||||
if (RoundDown<i::kPtrComprIsolateRootAlignment>(object_ptr) ==
|
||||
i::kNullAddress) {
|
||||
// Try to decompress pointer.
|
||||
i::Isolate* isolate = i::Isolate::Current();
|
||||
object_ptr =
|
||||
i::DecompressTaggedAny(isolate, static_cast<i::Tagged_t>(object_ptr));
|
||||
object_ptr = i::DecompressTaggedAny(isolate->isolate_root(),
|
||||
static_cast<i::Tagged_t>(object_ptr));
|
||||
}
|
||||
#endif
|
||||
return i::Object(object_ptr);
|
||||
|
@ -13,28 +13,18 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
|
||||
// Aliases for GetPtrComprCageBase when
|
||||
// V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE. Each Isolate has its own cage, whose
|
||||
// base address is also the Isolate root.
|
||||
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
|
||||
return GetPtrComprCageBaseAddress(on_heap_addr);
|
||||
}
|
||||
|
||||
V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) {
|
||||
return cage_base.address();
|
||||
}
|
||||
|
||||
inline constexpr IsolateRoot GetIsolateForPtrComprFromOnHeapAddress(
|
||||
Address address) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
return IsolateRoot(GetIsolateRootAddress(address));
|
||||
#else
|
||||
|
||||
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
|
||||
|
||||
V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) {
|
||||
UNREACHABLE();
|
||||
return IsolateRoot();
|
||||
#endif // V8_COMPRESS_POINTERS
|
||||
}
|
||||
|
||||
#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
inline IsolateRoot GetIsolateForPtrCompr(HeapObject object) {
|
||||
return GetIsolateForPtrComprFromOnHeapAddress(object.ptr());
|
||||
}
|
||||
|
||||
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
|
||||
// Avoid using the below GetIsolateFromWritableObject because we want to be
|
||||
@ -42,7 +32,7 @@ V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
|
||||
|
||||
#if defined V8_ENABLE_THIRD_PARTY_HEAP
|
||||
return Heap::GetIsolateFromWritableObject(object)->heap();
|
||||
#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
#elif defined V8_COMPRESS_POINTERS
|
||||
Isolate* isolate =
|
||||
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
|
||||
DCHECK_NOT_NULL(isolate);
|
||||
@ -57,7 +47,7 @@ V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
|
||||
V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) {
|
||||
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
|
||||
return Heap::GetIsolateFromWritableObject(object);
|
||||
#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
#elif defined V8_COMPRESS_POINTERS
|
||||
Isolate* isolate =
|
||||
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
|
||||
DCHECK_NOT_NULL(isolate);
|
||||
|
@ -10,12 +10,11 @@
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
// Computes the pointer compression cage base from any read only or writable
|
||||
// heap object. The resulting value is intended to be used only as a hoisted
|
||||
// computation of cage base inside trivial accessors for optimizing value
|
||||
// decompression. When pointer compression is disabled this function always
|
||||
// returns nullptr.
|
||||
V8_INLINE PtrComprCageBase GetPtrComprCageBase(HeapObject object);
|
||||
// Computes isolate from any read only or writable heap object. The resulting
|
||||
// value is intended to be used only as a hoisted computation of isolate root
|
||||
// inside trivial accessors for optmizing value decompression.
|
||||
// When pointer compression is disabled this function always returns nullptr.
|
||||
V8_INLINE IsolateRoot GetIsolateForPtrCompr(HeapObject object);
|
||||
|
||||
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object);
|
||||
|
||||
|
@ -2861,8 +2861,8 @@ Isolate* Isolate::New() {
|
||||
// Construct Isolate object in the allocated memory.
|
||||
void* isolate_ptr = isolate_allocator->isolate_memory();
|
||||
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprCageBaseAlignment));
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
|
||||
#endif
|
||||
|
||||
#ifdef DEBUG
|
||||
|
@ -151,18 +151,6 @@ struct MaybeBoolFlag {
|
||||
#define COMPRESS_POINTERS_BOOL false
|
||||
#endif
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL true
|
||||
#else
|
||||
#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL false
|
||||
#endif
|
||||
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL true
|
||||
#else
|
||||
#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL false
|
||||
#endif
|
||||
|
||||
#ifdef V8_HEAP_SANDBOX
|
||||
#define V8_HEAP_SANDBOX_BOOL true
|
||||
#else
|
||||
|
@ -382,11 +382,11 @@ namespace {
|
||||
|
||||
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
|
||||
int field_count = jsobject.GetEmbedderFieldCount();
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(jsobject);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(jsobject);
|
||||
for (int i = 0; i < len; ++i) {
|
||||
if (field_count == i) break;
|
||||
void* pointer;
|
||||
if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(cage_base, &pointer)) {
|
||||
if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) {
|
||||
embedder_fields[i] = pointer;
|
||||
}
|
||||
}
|
||||
|
@ -289,7 +289,7 @@ size_t Heap::MinOldGenerationSize() {
|
||||
size_t Heap::AllocatorLimitOnMaxOldGenerationSize() {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
// Isolate and the young generation are also allocated on the heap.
|
||||
return kPtrComprCageReservationSize -
|
||||
return kPtrComprHeapReservationSize -
|
||||
YoungGenerationSizeFromSemiSpaceSize(kMaxSemiSpaceSize) -
|
||||
RoundUp(sizeof(Isolate), size_t{1} << kPageSizeBits);
|
||||
#endif
|
||||
|
@ -2704,9 +2704,8 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
|
||||
}
|
||||
|
||||
template <AccessMode access_mode, typename TSlot>
|
||||
static inline SlotCallbackResult UpdateSlot(PtrComprCageBase cage_base,
|
||||
TSlot slot) {
|
||||
typename TSlot::TObject obj = slot.Relaxed_Load(cage_base);
|
||||
static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) {
|
||||
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
|
||||
HeapObject heap_obj;
|
||||
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
|
||||
UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
|
||||
@ -2718,9 +2717,9 @@ static inline SlotCallbackResult UpdateSlot(PtrComprCageBase cage_base,
|
||||
}
|
||||
|
||||
template <AccessMode access_mode, typename TSlot>
|
||||
static inline SlotCallbackResult UpdateStrongSlot(PtrComprCageBase cage_base,
|
||||
static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate,
|
||||
TSlot slot) {
|
||||
typename TSlot::TObject obj = slot.Relaxed_Load(cage_base);
|
||||
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
|
||||
DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr()));
|
||||
HeapObject heap_obj;
|
||||
if (obj.GetHeapObject(&heap_obj)) {
|
||||
@ -2736,40 +2735,39 @@ static inline SlotCallbackResult UpdateStrongSlot(PtrComprCageBase cage_base,
|
||||
// It does not expect to encounter pointers to dead objects.
|
||||
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
||||
public:
|
||||
explicit PointersUpdatingVisitor(PtrComprCageBase cage_base)
|
||||
: cage_base_(cage_base) {}
|
||||
explicit PointersUpdatingVisitor(IsolateRoot isolate) : isolate_(isolate) {}
|
||||
|
||||
void VisitPointer(HeapObject host, ObjectSlot p) override {
|
||||
UpdateStrongSlotInternal(cage_base_, p);
|
||||
UpdateStrongSlotInternal(isolate_, p);
|
||||
}
|
||||
|
||||
void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
|
||||
UpdateSlotInternal(cage_base_, p);
|
||||
UpdateSlotInternal(isolate_, p);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||
ObjectSlot end) override {
|
||||
for (ObjectSlot p = start; p < end; ++p) {
|
||||
UpdateStrongSlotInternal(cage_base_, p);
|
||||
UpdateStrongSlotInternal(isolate_, p);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||
MaybeObjectSlot end) final {
|
||||
for (MaybeObjectSlot p = start; p < end; ++p) {
|
||||
UpdateSlotInternal(cage_base_, p);
|
||||
UpdateSlotInternal(isolate_, p);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitRootPointer(Root root, const char* description,
|
||||
FullObjectSlot p) override {
|
||||
UpdateRootSlotInternal(cage_base_, p);
|
||||
UpdateRootSlotInternal(isolate_, p);
|
||||
}
|
||||
|
||||
void VisitRootPointers(Root root, const char* description,
|
||||
FullObjectSlot start, FullObjectSlot end) override {
|
||||
for (FullObjectSlot p = start; p < end; ++p) {
|
||||
UpdateRootSlotInternal(cage_base_, p);
|
||||
UpdateRootSlotInternal(isolate_, p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2777,7 +2775,7 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
||||
OffHeapObjectSlot start,
|
||||
OffHeapObjectSlot end) override {
|
||||
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
||||
UpdateRootSlotInternal(cage_base_, p);
|
||||
UpdateRootSlotInternal(isolate_, p);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2792,32 +2790,32 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
||||
}
|
||||
|
||||
private:
|
||||
static inline SlotCallbackResult UpdateRootSlotInternal(
|
||||
PtrComprCageBase cage_base, FullObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
static inline SlotCallbackResult UpdateRootSlotInternal(IsolateRoot isolate,
|
||||
FullObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
}
|
||||
|
||||
static inline SlotCallbackResult UpdateRootSlotInternal(
|
||||
PtrComprCageBase cage_base, OffHeapObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
IsolateRoot isolate, OffHeapObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
}
|
||||
|
||||
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
|
||||
PtrComprCageBase cage_base, MaybeObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
IsolateRoot isolate, MaybeObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
}
|
||||
|
||||
static inline SlotCallbackResult UpdateStrongSlotInternal(
|
||||
PtrComprCageBase cage_base, ObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
static inline SlotCallbackResult UpdateStrongSlotInternal(IsolateRoot isolate,
|
||||
ObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
}
|
||||
|
||||
static inline SlotCallbackResult UpdateSlotInternal(
|
||||
PtrComprCageBase cage_base, MaybeObjectSlot slot) {
|
||||
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
static inline SlotCallbackResult UpdateSlotInternal(IsolateRoot isolate,
|
||||
MaybeObjectSlot slot) {
|
||||
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
}
|
||||
|
||||
PtrComprCageBase cage_base_;
|
||||
IsolateRoot isolate_;
|
||||
};
|
||||
|
||||
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
|
||||
@ -3583,7 +3581,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
|
||||
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
|
||||
"ToSpaceUpdatingItem::ProcessVisitAll");
|
||||
PointersUpdatingVisitor visitor(
|
||||
GetPtrComprCageBaseFromOnHeapAddress(start_));
|
||||
GetIsolateForPtrComprFromOnHeapAddress(start_));
|
||||
for (Address cur = start_; cur < end_;) {
|
||||
HeapObject object = HeapObject::FromAddress(cur);
|
||||
Map map = object.map();
|
||||
@ -3599,7 +3597,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
|
||||
// For young generation evacuations we want to visit grey objects, for
|
||||
// full MC, we need to visit black objects.
|
||||
PointersUpdatingVisitor visitor(
|
||||
GetPtrComprCageBaseFromOnHeapAddress(start_));
|
||||
GetIsolateForPtrComprFromOnHeapAddress(start_));
|
||||
for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
|
||||
chunk_, marking_state_->bitmap(chunk_))) {
|
||||
object_and_size.first.IterateBodyFast(&visitor);
|
||||
@ -3745,12 +3743,12 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
|
||||
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
|
||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
|
||||
PtrComprCageBase cage_base = heap_->isolate();
|
||||
IsolateRoot isolate = heap_->isolate();
|
||||
RememberedSet<OLD_TO_OLD>::Iterate(
|
||||
chunk_,
|
||||
[&filter, cage_base](MaybeObjectSlot slot) {
|
||||
[&filter, isolate](MaybeObjectSlot slot) {
|
||||
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
||||
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
},
|
||||
SlotSet::FREE_EMPTY_BUCKETS);
|
||||
chunk_->ReleaseSlotSet<OLD_TO_OLD>();
|
||||
@ -3785,10 +3783,10 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
Address slot) {
|
||||
// Using UpdateStrongSlot is OK here, because there are no weak
|
||||
// typed slots.
|
||||
PtrComprCageBase cage_base = heap_->isolate();
|
||||
IsolateRoot isolate = heap_->isolate();
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||
heap_, slot_type, slot, [isolate](FullMaybeObjectSlot slot) {
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -14,9 +14,9 @@ namespace internal {
|
||||
|
||||
// static
|
||||
ReadOnlyRoots ReadOnlyHeap::GetReadOnlyRoots(HeapObject object) {
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
return ReadOnlyRoots(
|
||||
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr())));
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(object);
|
||||
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
|
||||
#else
|
||||
#ifdef V8_SHARED_RO_HEAP
|
||||
// This fails if we are creating heap objects and the roots haven't yet been
|
||||
|
@ -37,7 +37,7 @@ base::LazyInstance<std::weak_ptr<ReadOnlyArtifacts>>::type
|
||||
|
||||
std::shared_ptr<ReadOnlyArtifacts> InitializeSharedReadOnlyArtifacts() {
|
||||
std::shared_ptr<ReadOnlyArtifacts> artifacts;
|
||||
if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
artifacts = std::make_shared<PointerCompressedReadOnlyArtifacts>();
|
||||
} else {
|
||||
artifacts = std::make_shared<SingleCopyReadOnlyArtifacts>();
|
||||
@ -129,7 +129,7 @@ ReadOnlyHeap::ReadOnlyHeap(ReadOnlyHeap* ro_heap, ReadOnlySpace* ro_space)
|
||||
: read_only_space_(ro_space),
|
||||
read_only_object_cache_(ro_heap->read_only_object_cache_) {
|
||||
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
||||
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
|
||||
DCHECK(COMPRESS_POINTERS_BOOL);
|
||||
}
|
||||
|
||||
// static
|
||||
@ -139,7 +139,7 @@ ReadOnlyHeap* ReadOnlyHeap::CreateInitalHeapForBootstrapping(
|
||||
|
||||
std::unique_ptr<ReadOnlyHeap> ro_heap;
|
||||
auto* ro_space = new ReadOnlySpace(isolate->heap());
|
||||
if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
|
||||
if (COMPRESS_POINTERS_BOOL) {
|
||||
ro_heap.reset(new ReadOnlyHeap(ro_space));
|
||||
} else {
|
||||
std::unique_ptr<SoleReadOnlyHeap> sole_ro_heap(
|
||||
|
@ -87,8 +87,8 @@ class ReadOnlyHeap {
|
||||
// Returns whether the ReadOnlySpace will actually be shared taking into
|
||||
// account whether shared memory is available with pointer compression.
|
||||
static bool IsReadOnlySpaceShared() {
|
||||
return V8_SHARED_RO_HEAP_BOOL && (!COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL ||
|
||||
IsSharedMemoryAvailable());
|
||||
return V8_SHARED_RO_HEAP_BOOL &&
|
||||
(!COMPRESS_POINTERS_BOOL || IsSharedMemoryAvailable());
|
||||
}
|
||||
|
||||
virtual void InitializeIsolateRoots(Isolate* isolate) {}
|
||||
|
@ -755,10 +755,9 @@ SharedReadOnlySpace::SharedReadOnlySpace(
|
||||
Heap* heap, PointerCompressedReadOnlyArtifacts* artifacts)
|
||||
: SharedReadOnlySpace(heap) {
|
||||
// This constructor should only be used when RO_SPACE is shared with pointer
|
||||
// compression in a per-Isolate cage.
|
||||
// compression.
|
||||
DCHECK(V8_SHARED_RO_HEAP_BOOL);
|
||||
DCHECK(COMPRESS_POINTERS_BOOL);
|
||||
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
|
||||
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
||||
DCHECK(!artifacts->pages().empty());
|
||||
|
||||
@ -777,7 +776,6 @@ SharedReadOnlySpace::SharedReadOnlySpace(
|
||||
: SharedReadOnlySpace(heap) {
|
||||
DCHECK(V8_SHARED_RO_HEAP_BOOL);
|
||||
DCHECK(COMPRESS_POINTERS_BOOL);
|
||||
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
|
||||
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
||||
|
||||
accounting_stats_ = std::move(new_stats);
|
||||
|
@ -35,11 +35,10 @@ class ReadOnlyPage : public BasicMemoryChunk {
|
||||
// Returns the address for a given offset in this page.
|
||||
Address OffsetToAddress(size_t offset) const {
|
||||
Address address_in_page = address() + offset;
|
||||
if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
|
||||
// Pointer compression with a per-Isolate cage and shared ReadOnlyPages
|
||||
// means that the area_start and area_end cannot be defined since they are
|
||||
// stored within the pages which can be mapped at multiple memory
|
||||
// addresses.
|
||||
if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_BOOL) {
|
||||
// Pointer compression with share ReadOnlyPages means that the area_start
|
||||
// and area_end cannot be defined since they are stored within the pages
|
||||
// which can be mapped at multiple memory addresses.
|
||||
DCHECK_LT(offset, size());
|
||||
} else {
|
||||
DCHECK_GE(address_in_page, area_start());
|
||||
|
@ -59,8 +59,8 @@ Address IsolateAllocator::InitReservation() {
|
||||
// Reserve a |4Gb + kIsolateRootBiasPageSize| region such as that the
|
||||
// resevation address plus |kIsolateRootBiasPageSize| is 4Gb aligned.
|
||||
const size_t reservation_size =
|
||||
kPtrComprCageReservationSize + kIsolateRootBiasPageSize;
|
||||
const size_t base_alignment = kPtrComprCageBaseAlignment;
|
||||
kPtrComprHeapReservationSize + kIsolateRootBiasPageSize;
|
||||
const size_t base_alignment = kPtrComprIsolateRootAlignment;
|
||||
|
||||
const int kMaxAttempts = 4;
|
||||
for (int attempt = 0; attempt < kMaxAttempts; ++attempt) {
|
||||
@ -137,11 +137,11 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
|
||||
GetIsolateRootBiasPageSize(platform_page_allocator);
|
||||
|
||||
Address isolate_root = heap_reservation_address + kIsolateRootBiasPageSize;
|
||||
CHECK(IsAligned(isolate_root, kPtrComprCageBaseAlignment));
|
||||
CHECK(IsAligned(isolate_root, kPtrComprIsolateRootAlignment));
|
||||
|
||||
CHECK(reservation_.InVM(
|
||||
heap_reservation_address,
|
||||
kPtrComprCageReservationSize + kIsolateRootBiasPageSize));
|
||||
kPtrComprHeapReservationSize + kIsolateRootBiasPageSize));
|
||||
|
||||
// Simplify BoundedPageAllocator's life by configuring it to use same page
|
||||
// size as the Heap will use (MemoryChunk::kPageSize).
|
||||
@ -149,7 +149,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
|
||||
platform_page_allocator->AllocatePageSize());
|
||||
|
||||
page_allocator_instance_ = std::make_unique<base::BoundedPageAllocator>(
|
||||
platform_page_allocator, isolate_root, kPtrComprCageReservationSize,
|
||||
platform_page_allocator, isolate_root, kPtrComprHeapReservationSize,
|
||||
page_size);
|
||||
page_allocator_ = page_allocator_instance_.get();
|
||||
|
||||
|
@ -323,9 +323,9 @@ int Code::SizeIncludingMetadata() const {
|
||||
}
|
||||
|
||||
ByteArray Code::unchecked_relocation_info() const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return ByteArray::unchecked_cast(
|
||||
TaggedField<HeapObject, kRelocationInfoOffset>::load(cage_base, *this));
|
||||
TaggedField<HeapObject, kRelocationInfoOffset>::load(isolate, *this));
|
||||
}
|
||||
|
||||
byte* Code::relocation_start() const {
|
||||
|
@ -33,9 +33,9 @@ Object CompressedObjectSlot::operator*() const {
|
||||
return Object(DecompressTaggedAny(address(), value));
|
||||
}
|
||||
|
||||
Object CompressedObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||
Object CompressedObjectSlot::load(IsolateRoot isolate) const {
|
||||
Tagged_t value = *location();
|
||||
return Object(DecompressTaggedAny(cage_base, value));
|
||||
return Object(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
void CompressedObjectSlot::store(Object value) const {
|
||||
@ -52,9 +52,9 @@ Object CompressedObjectSlot::Relaxed_Load() const {
|
||||
return Object(DecompressTaggedAny(address(), value));
|
||||
}
|
||||
|
||||
Object CompressedObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const {
|
||||
Object CompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
||||
return Object(DecompressTaggedAny(cage_base, value));
|
||||
return Object(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
void CompressedObjectSlot::Relaxed_Store(Object value) const {
|
||||
@ -85,9 +85,9 @@ MaybeObject CompressedMaybeObjectSlot::operator*() const {
|
||||
return MaybeObject(DecompressTaggedAny(address(), value));
|
||||
}
|
||||
|
||||
MaybeObject CompressedMaybeObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||
MaybeObject CompressedMaybeObjectSlot::load(IsolateRoot isolate) const {
|
||||
Tagged_t value = *location();
|
||||
return MaybeObject(DecompressTaggedAny(cage_base, value));
|
||||
return MaybeObject(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
void CompressedMaybeObjectSlot::store(MaybeObject value) const {
|
||||
@ -99,10 +99,9 @@ MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const {
|
||||
return MaybeObject(DecompressTaggedAny(address(), value));
|
||||
}
|
||||
|
||||
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(
|
||||
PtrComprCageBase cage_base) const {
|
||||
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
||||
return MaybeObject(DecompressTaggedAny(cage_base, value));
|
||||
return MaybeObject(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const {
|
||||
@ -126,10 +125,9 @@ HeapObjectReference CompressedHeapObjectSlot::operator*() const {
|
||||
return HeapObjectReference(DecompressTaggedPointer(address(), value));
|
||||
}
|
||||
|
||||
HeapObjectReference CompressedHeapObjectSlot::load(
|
||||
PtrComprCageBase cage_base) const {
|
||||
HeapObjectReference CompressedHeapObjectSlot::load(IsolateRoot isolate) const {
|
||||
Tagged_t value = *location();
|
||||
return HeapObjectReference(DecompressTaggedPointer(cage_base, value));
|
||||
return HeapObjectReference(DecompressTaggedPointer(isolate, value));
|
||||
}
|
||||
|
||||
void CompressedHeapObjectSlot::store(HeapObjectReference value) const {
|
||||
@ -150,25 +148,23 @@ void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const {
|
||||
// OffHeapCompressedObjectSlot implementation.
|
||||
//
|
||||
|
||||
Object OffHeapCompressedObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||
Object OffHeapCompressedObjectSlot::load(IsolateRoot isolate) const {
|
||||
Tagged_t value = *location();
|
||||
return Object(DecompressTaggedAny(cage_base, value));
|
||||
return Object(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
void OffHeapCompressedObjectSlot::store(Object value) const {
|
||||
*location() = CompressTagged(value.ptr());
|
||||
}
|
||||
|
||||
Object OffHeapCompressedObjectSlot::Relaxed_Load(
|
||||
PtrComprCageBase cage_base) const {
|
||||
Object OffHeapCompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
||||
return Object(DecompressTaggedAny(cage_base, value));
|
||||
return Object(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
Object OffHeapCompressedObjectSlot::Acquire_Load(
|
||||
PtrComprCageBase cage_base) const {
|
||||
Object OffHeapCompressedObjectSlot::Acquire_Load(IsolateRoot isolate) const {
|
||||
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location());
|
||||
return Object(DecompressTaggedAny(cage_base, value));
|
||||
return Object(DecompressTaggedAny(isolate, value));
|
||||
}
|
||||
|
||||
void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const {
|
||||
|
@ -41,12 +41,12 @@ class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> {
|
||||
// TODO(leszeks): Consider deprecating the operator* load, and always pass the
|
||||
// Isolate.
|
||||
inline Object operator*() const;
|
||||
inline Object load(PtrComprCageBase cage_base) const;
|
||||
inline Object load(IsolateRoot isolate) const;
|
||||
inline void store(Object value) const;
|
||||
|
||||
inline Object Acquire_Load() const;
|
||||
inline Object Relaxed_Load() const;
|
||||
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||
inline Object Relaxed_Load(IsolateRoot isolate) const;
|
||||
inline void Relaxed_Store(Object value) const;
|
||||
inline void Release_Store(Object value) const;
|
||||
inline Object Release_CompareAndSwap(Object old, Object target) const;
|
||||
@ -77,11 +77,11 @@ class CompressedMaybeObjectSlot
|
||||
: SlotBase(slot.address()) {}
|
||||
|
||||
inline MaybeObject operator*() const;
|
||||
inline MaybeObject load(PtrComprCageBase cage_base) const;
|
||||
inline MaybeObject load(IsolateRoot isolate) const;
|
||||
inline void store(MaybeObject value) const;
|
||||
|
||||
inline MaybeObject Relaxed_Load() const;
|
||||
inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
|
||||
inline void Relaxed_Store(MaybeObject value) const;
|
||||
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
|
||||
};
|
||||
@ -105,7 +105,7 @@ class CompressedHeapObjectSlot
|
||||
: SlotBase(slot.address()) {}
|
||||
|
||||
inline HeapObjectReference operator*() const;
|
||||
inline HeapObjectReference load(PtrComprCageBase cage_base) const;
|
||||
inline HeapObjectReference load(IsolateRoot isolate) const;
|
||||
inline void store(HeapObjectReference value) const;
|
||||
|
||||
inline HeapObject ToHeapObject() const;
|
||||
@ -131,11 +131,11 @@ class OffHeapCompressedObjectSlot
|
||||
explicit OffHeapCompressedObjectSlot(const uint32_t* ptr)
|
||||
: SlotBase(reinterpret_cast<Address>(ptr)) {}
|
||||
|
||||
inline Object load(PtrComprCageBase cage_base) const;
|
||||
inline Object load(IsolateRoot isolate) const;
|
||||
inline void store(Object value) const;
|
||||
|
||||
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||
inline Object Acquire_Load(PtrComprCageBase cage_base) const;
|
||||
inline Object Relaxed_Load(IsolateRoot isolate) const;
|
||||
inline Object Acquire_Load(IsolateRoot isolate) const;
|
||||
inline void Relaxed_Store(Object value) const;
|
||||
inline void Release_Store(Object value) const;
|
||||
inline void Release_CompareAndSwap(Object old, Object target) const;
|
||||
|
@ -56,8 +56,8 @@ NEVER_READ_ONLY_SPACE_IMPL(Context)
|
||||
CAST_ACCESSOR(NativeContext)
|
||||
|
||||
V8_INLINE Object Context::get(int index) const { return elements(index); }
|
||||
V8_INLINE Object Context::get(PtrComprCageBase cage_base, int index) const {
|
||||
return elements(cage_base, index);
|
||||
V8_INLINE Object Context::get(IsolateRoot isolate, int index) const {
|
||||
return elements(isolate, index);
|
||||
}
|
||||
V8_INLINE void Context::set(int index, Object value) {
|
||||
set_elements(index, value);
|
||||
@ -71,11 +71,11 @@ void Context::set_scope_info(ScopeInfo scope_info, WriteBarrierMode mode) {
|
||||
}
|
||||
|
||||
Object Context::synchronized_get(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return synchronized_get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return synchronized_get(isolate, index);
|
||||
}
|
||||
|
||||
Object Context::synchronized_get(PtrComprCageBase cage_base, int index) const {
|
||||
Object Context::synchronized_get(IsolateRoot isolate, int index) const {
|
||||
DCHECK_LT(static_cast<unsigned int>(index),
|
||||
static_cast<unsigned int>(this->length()));
|
||||
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
|
||||
@ -243,7 +243,7 @@ Map Context::GetInitialJSArrayMap(ElementsKind kind) const {
|
||||
|
||||
DEF_GETTER(NativeContext, microtask_queue, MicrotaskQueue*) {
|
||||
return reinterpret_cast<MicrotaskQueue*>(ReadExternalPointerField(
|
||||
kMicrotaskQueueOffset, cage_base, kNativeContextMicrotaskQueueTag));
|
||||
kMicrotaskQueueOffset, isolate, kNativeContextMicrotaskQueueTag));
|
||||
}
|
||||
|
||||
void NativeContext::AllocateExternalPointerEntries(Isolate* isolate) {
|
||||
|
@ -422,14 +422,13 @@ class Context : public TorqueGeneratedContext<Context, HeapObject> {
|
||||
|
||||
// Setter and getter for elements.
|
||||
V8_INLINE Object get(int index) const;
|
||||
V8_INLINE Object get(PtrComprCageBase cage_base, int index) const;
|
||||
V8_INLINE Object get(IsolateRoot isolate, int index) const;
|
||||
V8_INLINE void set(int index, Object value);
|
||||
// Setter with explicit barrier mode.
|
||||
V8_INLINE void set(int index, Object value, WriteBarrierMode mode);
|
||||
// Setter and getter with synchronization semantics.
|
||||
V8_INLINE Object synchronized_get(int index) const;
|
||||
V8_INLINE Object synchronized_get(PtrComprCageBase cage_base,
|
||||
int index) const;
|
||||
V8_INLINE Object synchronized_get(IsolateRoot isolate, int index) const;
|
||||
V8_INLINE void synchronized_set(int index, Object value);
|
||||
|
||||
static const int kScopeInfoOffset = kElementsOffset;
|
||||
|
@ -106,16 +106,15 @@ ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) {
|
||||
}
|
||||
|
||||
Name DescriptorArray::GetKey(InternalIndex descriptor_number) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return GetKey(cage_base, descriptor_number);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return GetKey(isolate, descriptor_number);
|
||||
}
|
||||
|
||||
Name DescriptorArray::GetKey(PtrComprCageBase cage_base,
|
||||
Name DescriptorArray::GetKey(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number) const {
|
||||
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
|
||||
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
|
||||
return Name::cast(
|
||||
EntryKeyField::Relaxed_Load(cage_base, *this, entry_offset));
|
||||
return Name::cast(EntryKeyField::Relaxed_Load(isolate, *this, entry_offset));
|
||||
}
|
||||
|
||||
void DescriptorArray::SetKey(InternalIndex descriptor_number, Name key) {
|
||||
@ -130,13 +129,12 @@ int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
|
||||
}
|
||||
|
||||
Name DescriptorArray::GetSortedKey(int descriptor_number) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return GetSortedKey(cage_base, descriptor_number);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return GetSortedKey(isolate, descriptor_number);
|
||||
}
|
||||
|
||||
Name DescriptorArray::GetSortedKey(PtrComprCageBase cage_base,
|
||||
int descriptor_number) {
|
||||
return GetKey(cage_base, InternalIndex(GetSortedKeyIndex(descriptor_number)));
|
||||
Name DescriptorArray::GetSortedKey(IsolateRoot isolate, int descriptor_number) {
|
||||
return GetKey(isolate, InternalIndex(GetSortedKeyIndex(descriptor_number)));
|
||||
}
|
||||
|
||||
void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
|
||||
@ -145,13 +143,13 @@ void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
|
||||
}
|
||||
|
||||
Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return GetStrongValue(cage_base, descriptor_number);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return GetStrongValue(isolate, descriptor_number);
|
||||
}
|
||||
|
||||
Object DescriptorArray::GetStrongValue(PtrComprCageBase cage_base,
|
||||
Object DescriptorArray::GetStrongValue(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number) {
|
||||
return GetValue(cage_base, descriptor_number).cast<Object>();
|
||||
return GetValue(isolate, descriptor_number).cast<Object>();
|
||||
}
|
||||
|
||||
void DescriptorArray::SetValue(InternalIndex descriptor_number,
|
||||
@ -163,15 +161,15 @@ void DescriptorArray::SetValue(InternalIndex descriptor_number,
|
||||
}
|
||||
|
||||
MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return GetValue(cage_base, descriptor_number);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return GetValue(isolate, descriptor_number);
|
||||
}
|
||||
|
||||
MaybeObject DescriptorArray::GetValue(PtrComprCageBase cage_base,
|
||||
MaybeObject DescriptorArray::GetValue(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number) {
|
||||
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
|
||||
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
|
||||
return EntryValueField::Relaxed_Load(cage_base, *this, entry_offset);
|
||||
return EntryValueField::Relaxed_Load(isolate, *this, entry_offset);
|
||||
}
|
||||
|
||||
PropertyDetails DescriptorArray::GetDetails(InternalIndex descriptor_number) {
|
||||
@ -194,14 +192,14 @@ int DescriptorArray::GetFieldIndex(InternalIndex descriptor_number) {
|
||||
}
|
||||
|
||||
FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return GetFieldType(cage_base, descriptor_number);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return GetFieldType(isolate, descriptor_number);
|
||||
}
|
||||
|
||||
FieldType DescriptorArray::GetFieldType(PtrComprCageBase cage_base,
|
||||
FieldType DescriptorArray::GetFieldType(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number) {
|
||||
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
|
||||
MaybeObject wrapped_type = GetValue(cage_base, descriptor_number);
|
||||
MaybeObject wrapped_type = GetValue(isolate, descriptor_number);
|
||||
return Map::UnwrapFieldType(wrapped_type);
|
||||
}
|
||||
|
||||
|
@ -69,22 +69,22 @@ class DescriptorArray
|
||||
|
||||
// Accessors for fetching instance descriptor at descriptor number.
|
||||
inline Name GetKey(InternalIndex descriptor_number) const;
|
||||
inline Name GetKey(PtrComprCageBase cage_base,
|
||||
inline Name GetKey(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number) const;
|
||||
inline Object GetStrongValue(InternalIndex descriptor_number);
|
||||
inline Object GetStrongValue(PtrComprCageBase cage_base,
|
||||
inline Object GetStrongValue(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number);
|
||||
inline MaybeObject GetValue(InternalIndex descriptor_number);
|
||||
inline MaybeObject GetValue(PtrComprCageBase cage_base,
|
||||
inline MaybeObject GetValue(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number);
|
||||
inline PropertyDetails GetDetails(InternalIndex descriptor_number);
|
||||
inline int GetFieldIndex(InternalIndex descriptor_number);
|
||||
inline FieldType GetFieldType(InternalIndex descriptor_number);
|
||||
inline FieldType GetFieldType(PtrComprCageBase cage_base,
|
||||
inline FieldType GetFieldType(IsolateRoot isolate,
|
||||
InternalIndex descriptor_number);
|
||||
|
||||
inline Name GetSortedKey(int descriptor_number);
|
||||
inline Name GetSortedKey(PtrComprCageBase cage_base, int descriptor_number);
|
||||
inline Name GetSortedKey(IsolateRoot isolate, int descriptor_number);
|
||||
inline int GetSortedKeyIndex(int descriptor_number);
|
||||
|
||||
// Accessor for complete descriptor.
|
||||
|
@ -30,15 +30,15 @@ Dictionary<Derived, Shape>::Dictionary(Address ptr)
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
Object Dictionary<Derived, Shape>::ValueAt(InternalIndex entry) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return ValueAt(cage_base, entry);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return ValueAt(isolate, entry);
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
Object Dictionary<Derived, Shape>::ValueAt(PtrComprCageBase cage_base,
|
||||
Object Dictionary<Derived, Shape>::ValueAt(IsolateRoot isolate,
|
||||
InternalIndex entry) {
|
||||
return this->get(cage_base, DerivedHashTable::EntryToIndex(entry) +
|
||||
Derived::kEntryValueIndex);
|
||||
return this->get(isolate, DerivedHashTable::EntryToIndex(entry) +
|
||||
Derived::kEntryValueIndex);
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
@ -181,12 +181,12 @@ Handle<Map> GlobalDictionary::GetMap(ReadOnlyRoots roots) {
|
||||
}
|
||||
|
||||
Name NameDictionary::NameAt(InternalIndex entry) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return NameAt(cage_base, entry);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return NameAt(isolate, entry);
|
||||
}
|
||||
|
||||
Name NameDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) {
|
||||
return Name::cast(KeyAt(cage_base, entry));
|
||||
Name NameDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
|
||||
return Name::cast(KeyAt(isolate, entry));
|
||||
}
|
||||
|
||||
Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
|
||||
@ -194,33 +194,32 @@ Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
|
||||
}
|
||||
|
||||
PropertyCell GlobalDictionary::CellAt(InternalIndex entry) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return CellAt(cage_base, entry);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return CellAt(isolate, entry);
|
||||
}
|
||||
|
||||
PropertyCell GlobalDictionary::CellAt(PtrComprCageBase cage_base,
|
||||
PropertyCell GlobalDictionary::CellAt(IsolateRoot isolate,
|
||||
InternalIndex entry) {
|
||||
DCHECK(KeyAt(cage_base, entry).IsPropertyCell(cage_base));
|
||||
return PropertyCell::cast(KeyAt(cage_base, entry));
|
||||
DCHECK(KeyAt(isolate, entry).IsPropertyCell(isolate));
|
||||
return PropertyCell::cast(KeyAt(isolate, entry));
|
||||
}
|
||||
|
||||
Name GlobalDictionary::NameAt(InternalIndex entry) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return NameAt(cage_base, entry);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return NameAt(isolate, entry);
|
||||
}
|
||||
|
||||
Name GlobalDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) {
|
||||
return CellAt(cage_base, entry).name(cage_base);
|
||||
Name GlobalDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
|
||||
return CellAt(isolate, entry).name(isolate);
|
||||
}
|
||||
|
||||
Object GlobalDictionary::ValueAt(InternalIndex entry) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return ValueAt(cage_base, entry);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return ValueAt(isolate, entry);
|
||||
}
|
||||
|
||||
Object GlobalDictionary::ValueAt(PtrComprCageBase cage_base,
|
||||
InternalIndex entry) {
|
||||
return CellAt(cage_base, entry).value(cage_base);
|
||||
Object GlobalDictionary::ValueAt(IsolateRoot isolate, InternalIndex entry) {
|
||||
return CellAt(isolate, entry).value(isolate);
|
||||
}
|
||||
|
||||
void GlobalDictionary::SetEntry(InternalIndex entry, Object key, Object value,
|
||||
|
@ -39,7 +39,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary
|
||||
using Key = typename Shape::Key;
|
||||
// Returns the value at entry.
|
||||
inline Object ValueAt(InternalIndex entry);
|
||||
inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
|
||||
|
||||
// Set the value for entry.
|
||||
inline void ValueAtPut(InternalIndex entry, Object value);
|
||||
@ -193,7 +193,7 @@ class V8_EXPORT_PRIVATE NameDictionary
|
||||
static const int kInitialCapacity = 2;
|
||||
|
||||
inline Name NameAt(InternalIndex entry);
|
||||
inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
|
||||
|
||||
inline void set_hash(int hash);
|
||||
inline int hash() const;
|
||||
@ -231,14 +231,14 @@ class V8_EXPORT_PRIVATE GlobalDictionary
|
||||
DECL_PRINTER(GlobalDictionary)
|
||||
|
||||
inline Object ValueAt(InternalIndex entry);
|
||||
inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
|
||||
inline PropertyCell CellAt(InternalIndex entry);
|
||||
inline PropertyCell CellAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||
inline PropertyCell CellAt(IsolateRoot isolate, InternalIndex entry);
|
||||
inline void SetEntry(InternalIndex entry, Object key, Object value,
|
||||
PropertyDetails details);
|
||||
inline void ClearEntry(InternalIndex entry);
|
||||
inline Name NameAt(InternalIndex entry);
|
||||
inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
|
||||
inline void ValueAtPut(InternalIndex entry, Object value);
|
||||
|
||||
OBJECT_CONSTRUCTORS(
|
||||
|
@ -1421,10 +1421,10 @@ class DictionaryElementsAccessor
|
||||
DisallowGarbageCollection no_gc;
|
||||
NumberDictionary dict = NumberDictionary::cast(backing_store);
|
||||
if (!dict.requires_slow_elements()) return false;
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(holder);
|
||||
ReadOnlyRoots roots = holder.GetReadOnlyRoots(cage_base);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(holder);
|
||||
ReadOnlyRoots roots = holder.GetReadOnlyRoots(isolate);
|
||||
for (InternalIndex i : dict.IterateEntries()) {
|
||||
Object key = dict.KeyAt(cage_base, i);
|
||||
Object key = dict.KeyAt(isolate, i);
|
||||
if (!dict.IsKey(roots, key)) continue;
|
||||
PropertyDetails details = dict.DetailsAt(i);
|
||||
if (details.kind() == kAccessor) return true;
|
||||
|
@ -81,7 +81,7 @@ void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index,
|
||||
#endif
|
||||
}
|
||||
|
||||
bool EmbedderDataSlot::ToAlignedPointer(PtrComprCageBase isolate_root,
|
||||
bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
|
||||
void** out_pointer) const {
|
||||
// We don't care about atomicity of access here because embedder slots
|
||||
// are accessed this way only from the main thread via API during "mutator"
|
||||
@ -89,12 +89,6 @@ bool EmbedderDataSlot::ToAlignedPointer(PtrComprCageBase isolate_root,
|
||||
// at the tagged part of the embedder slot but read-only access is ok).
|
||||
Address raw_value;
|
||||
#ifdef V8_HEAP_SANDBOX
|
||||
|
||||
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||
#endif
|
||||
|
||||
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
|
||||
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
||||
raw_value = isolate->external_pointer_table().get(index) ^
|
||||
@ -114,15 +108,9 @@ bool EmbedderDataSlot::ToAlignedPointer(PtrComprCageBase isolate_root,
|
||||
return HAS_SMI_TAG(raw_value);
|
||||
}
|
||||
|
||||
bool EmbedderDataSlot::ToAlignedPointerSafe(PtrComprCageBase isolate_root,
|
||||
bool EmbedderDataSlot::ToAlignedPointerSafe(IsolateRoot isolate_root,
|
||||
void** out_pointer) const {
|
||||
#ifdef V8_HEAP_SANDBOX
|
||||
|
||||
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||
#endif
|
||||
|
||||
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
|
||||
Address raw_value;
|
||||
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
||||
|
@ -75,8 +75,7 @@ class EmbedderDataSlot
|
||||
// When V8 heap sandbox is enabled, calling this method when the raw part of
|
||||
// the slot does not contain valid external pointer table index is undefined
|
||||
// behaviour and most likely result in crashes.
|
||||
V8_INLINE bool ToAlignedPointer(PtrComprCageBase isolate_root,
|
||||
void** out_result) const;
|
||||
V8_INLINE bool ToAlignedPointer(IsolateRoot isolate, void** out_result) const;
|
||||
|
||||
// Same as ToAlignedPointer() but with a workaround for V8 heap sandbox.
|
||||
// When V8 heap sandbox is enabled, this method doesn't crash when the raw
|
||||
@ -87,7 +86,7 @@ class EmbedderDataSlot
|
||||
//
|
||||
// Call this function if you are not sure whether the slot contains valid
|
||||
// external pointer or not.
|
||||
V8_INLINE bool ToAlignedPointerSafe(PtrComprCageBase isolate_root,
|
||||
V8_INLINE bool ToAlignedPointerSafe(IsolateRoot isolate,
|
||||
void** out_result) const;
|
||||
|
||||
// Returns true if the pointer was successfully stored or false it the pointer
|
||||
|
@ -187,9 +187,8 @@ MaybeObject FeedbackVector::Get(FeedbackSlot slot) const {
|
||||
return value;
|
||||
}
|
||||
|
||||
MaybeObject FeedbackVector::Get(PtrComprCageBase cage_base,
|
||||
FeedbackSlot slot) const {
|
||||
MaybeObject value = raw_feedback_slots(cage_base, GetIndex(slot));
|
||||
MaybeObject FeedbackVector::Get(IsolateRoot isolate, FeedbackSlot slot) const {
|
||||
MaybeObject value = raw_feedback_slots(isolate, GetIndex(slot));
|
||||
DCHECK(!IsOfLegacyType(value));
|
||||
return value;
|
||||
}
|
||||
|
@ -259,7 +259,7 @@ class FeedbackVector
|
||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||
|
||||
inline MaybeObject Get(FeedbackSlot slot) const;
|
||||
inline MaybeObject Get(PtrComprCageBase cage_base, FeedbackSlot slot) const;
|
||||
inline MaybeObject Get(IsolateRoot isolate, FeedbackSlot slot) const;
|
||||
|
||||
// Returns the feedback cell at |index| that is used to create the
|
||||
// closure.
|
||||
|
@ -61,13 +61,13 @@ int FieldIndex::GetLoadByFieldIndex() const {
|
||||
}
|
||||
|
||||
FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(map);
|
||||
return ForDescriptor(cage_base, map, descriptor_index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(map);
|
||||
return ForDescriptor(isolate, map, descriptor_index);
|
||||
}
|
||||
|
||||
FieldIndex FieldIndex::ForDescriptor(PtrComprCageBase cage_base, Map map,
|
||||
FieldIndex FieldIndex::ForDescriptor(IsolateRoot isolate, Map map,
|
||||
InternalIndex descriptor_index) {
|
||||
PropertyDetails details = map.instance_descriptors(cage_base, kRelaxedLoad)
|
||||
PropertyDetails details = map.instance_descriptors(isolate, kRelaxedLoad)
|
||||
.GetDetails(descriptor_index);
|
||||
int field_index = details.field_index();
|
||||
return ForPropertyIndex(map, field_index, details.representation());
|
||||
|
@ -31,7 +31,7 @@ class FieldIndex final {
|
||||
static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding);
|
||||
static inline FieldIndex ForDescriptor(Map map,
|
||||
InternalIndex descriptor_index);
|
||||
static inline FieldIndex ForDescriptor(PtrComprCageBase cage_base, Map map,
|
||||
static inline FieldIndex ForDescriptor(IsolateRoot isolate, Map map,
|
||||
InternalIndex descriptor_index);
|
||||
|
||||
inline int GetLoadByFieldIndex() const;
|
||||
|
@ -70,13 +70,13 @@ bool FixedArray::ContainsOnlySmisOrHoles() {
|
||||
}
|
||||
|
||||
Object FixedArray::get(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return get(isolate, index);
|
||||
}
|
||||
|
||||
Object FixedArray::get(PtrComprCageBase cage_base, int index) const {
|
||||
Object FixedArray::get(IsolateRoot isolate, int index) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
|
||||
return TaggedField<Object>::Relaxed_Load(isolate, *this,
|
||||
OffsetOfElementAt(index));
|
||||
}
|
||||
|
||||
@ -124,12 +124,11 @@ void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
|
||||
}
|
||||
|
||||
Object FixedArray::get(int index, RelaxedLoadTag) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return get(isolate, index);
|
||||
}
|
||||
|
||||
Object FixedArray::get(PtrComprCageBase cage_base, int index,
|
||||
RelaxedLoadTag) const {
|
||||
Object FixedArray::get(IsolateRoot isolate, int index, RelaxedLoadTag) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||
return RELAXED_READ_FIELD(*this, OffsetOfElementAt(index));
|
||||
}
|
||||
@ -148,12 +147,11 @@ void FixedArray::set(int index, Smi value, RelaxedStoreTag tag) {
|
||||
}
|
||||
|
||||
Object FixedArray::get(int index, AcquireLoadTag) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return get(isolate, index);
|
||||
}
|
||||
|
||||
Object FixedArray::get(PtrComprCageBase cage_base, int index,
|
||||
AcquireLoadTag) const {
|
||||
Object FixedArray::get(IsolateRoot isolate, int index, AcquireLoadTag) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
|
||||
}
|
||||
@ -437,13 +435,13 @@ void FixedDoubleArray::FillWithHoles(int from, int to) {
|
||||
}
|
||||
|
||||
MaybeObject WeakFixedArray::Get(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return Get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return Get(isolate, index);
|
||||
}
|
||||
|
||||
MaybeObject WeakFixedArray::Get(PtrComprCageBase cage_base, int index) const {
|
||||
MaybeObject WeakFixedArray::Get(IsolateRoot isolate, int index) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||
return objects(cage_base, index);
|
||||
return objects(isolate, index);
|
||||
}
|
||||
|
||||
void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
|
||||
@ -472,13 +470,13 @@ void WeakFixedArray::CopyElements(Isolate* isolate, int dst_index,
|
||||
}
|
||||
|
||||
MaybeObject WeakArrayList::Get(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return Get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return Get(isolate, index);
|
||||
}
|
||||
|
||||
MaybeObject WeakArrayList::Get(PtrComprCageBase cage_base, int index) const {
|
||||
MaybeObject WeakArrayList::Get(IsolateRoot isolate, int index) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(capacity()));
|
||||
return objects(cage_base, index);
|
||||
return objects(isolate, index);
|
||||
}
|
||||
|
||||
void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
|
||||
@ -527,8 +525,8 @@ Object ArrayList::Get(int index) const {
|
||||
return FixedArray::cast(*this).get(kFirstIndex + index);
|
||||
}
|
||||
|
||||
Object ArrayList::Get(PtrComprCageBase cage_base, int index) const {
|
||||
return FixedArray::cast(*this).get(cage_base, kFirstIndex + index);
|
||||
Object ArrayList::Get(IsolateRoot isolate, int index) const {
|
||||
return FixedArray::cast(*this).get(isolate, kFirstIndex + index);
|
||||
}
|
||||
|
||||
ObjectSlot ArrayList::Slot(int index) {
|
||||
@ -652,8 +650,8 @@ Object TemplateList::get(int index) const {
|
||||
return FixedArray::cast(*this).get(kFirstElementIndex + index);
|
||||
}
|
||||
|
||||
Object TemplateList::get(PtrComprCageBase cage_base, int index) const {
|
||||
return FixedArray::cast(*this).get(cage_base, kFirstElementIndex + index);
|
||||
Object TemplateList::get(IsolateRoot isolate, int index) const {
|
||||
return FixedArray::cast(*this).get(isolate, kFirstElementIndex + index);
|
||||
}
|
||||
|
||||
void TemplateList::set(int index, Object value) {
|
||||
|
@ -101,7 +101,7 @@ class FixedArray
|
||||
public:
|
||||
// Setter and getter for elements.
|
||||
inline Object get(int index) const;
|
||||
inline Object get(PtrComprCageBase cage_base, int index) const;
|
||||
inline Object get(IsolateRoot isolate, int index) const;
|
||||
|
||||
static inline Handle<Object> get(FixedArray array, int index,
|
||||
Isolate* isolate);
|
||||
@ -113,16 +113,14 @@ class FixedArray
|
||||
|
||||
// Relaxed accessors.
|
||||
inline Object get(int index, RelaxedLoadTag) const;
|
||||
inline Object get(PtrComprCageBase cage_base, int index,
|
||||
RelaxedLoadTag) const;
|
||||
inline Object get(IsolateRoot isolate, int index, RelaxedLoadTag) const;
|
||||
inline void set(int index, Object value, RelaxedStoreTag,
|
||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||
inline void set(int index, Smi value, RelaxedStoreTag);
|
||||
|
||||
// Acquire/release accessors.
|
||||
inline Object get(int index, AcquireLoadTag) const;
|
||||
inline Object get(PtrComprCageBase cage_base, int index,
|
||||
AcquireLoadTag) const;
|
||||
inline Object get(IsolateRoot isolate, int index, AcquireLoadTag) const;
|
||||
inline void set(int index, Object value, ReleaseStoreTag,
|
||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||
inline void set(int index, Smi value, ReleaseStoreTag);
|
||||
@ -277,7 +275,7 @@ class WeakFixedArray
|
||||
: public TorqueGeneratedWeakFixedArray<WeakFixedArray, HeapObject> {
|
||||
public:
|
||||
inline MaybeObject Get(int index) const;
|
||||
inline MaybeObject Get(PtrComprCageBase cage_base, int index) const;
|
||||
inline MaybeObject Get(IsolateRoot isolate, int index) const;
|
||||
|
||||
inline void Set(
|
||||
int index, MaybeObject value,
|
||||
@ -352,7 +350,7 @@ class WeakArrayList
|
||||
V8_EXPORT_PRIVATE void Compact(Isolate* isolate);
|
||||
|
||||
inline MaybeObject Get(int index) const;
|
||||
inline MaybeObject Get(PtrComprCageBase cage_base, int index) const;
|
||||
inline MaybeObject Get(IsolateRoot isolate, int index) const;
|
||||
|
||||
// Set the element at index to obj. The underlying array must be large enough.
|
||||
// If you need to grow the WeakArrayList, use the static AddToEnd() method
|
||||
@ -452,7 +450,7 @@ class ArrayList : public TorqueGeneratedArrayList<ArrayList, FixedArray> {
|
||||
// storage capacity, i.e., length().
|
||||
inline void SetLength(int length);
|
||||
inline Object Get(int index) const;
|
||||
inline Object Get(PtrComprCageBase cage_base, int index) const;
|
||||
inline Object Get(IsolateRoot isolate, int index) const;
|
||||
inline ObjectSlot Slot(int index);
|
||||
|
||||
// Set the element at index to obj. The underlying array must be large enough.
|
||||
@ -598,7 +596,7 @@ class TemplateList
|
||||
static Handle<TemplateList> New(Isolate* isolate, int size);
|
||||
inline int length() const;
|
||||
inline Object get(int index) const;
|
||||
inline Object get(PtrComprCageBase cage_base, int index) const;
|
||||
inline Object get(IsolateRoot isolate, int index) const;
|
||||
inline void set(int index, Object value);
|
||||
static Handle<TemplateList> Add(Isolate* isolate, Handle<TemplateList> list,
|
||||
Handle<Object> value);
|
||||
|
@ -29,7 +29,7 @@ bool Foreign::IsNormalized(Object value) {
|
||||
}
|
||||
|
||||
DEF_GETTER(Foreign, foreign_address, Address) {
|
||||
return ReadExternalPointerField(kForeignAddressOffset, cage_base,
|
||||
return ReadExternalPointerField(kForeignAddressOffset, isolate,
|
||||
kForeignForeignAddressTag);
|
||||
}
|
||||
|
||||
|
@ -139,7 +139,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(LocalIsolate* isolate,
|
||||
|
||||
// Find entry for key otherwise return kNotFound.
|
||||
template <typename Derived, typename Shape>
|
||||
InternalIndex HashTable<Derived, Shape>::FindEntry(PtrComprCageBase cage_base,
|
||||
InternalIndex HashTable<Derived, Shape>::FindEntry(IsolateRoot isolate,
|
||||
ReadOnlyRoots roots, Key key,
|
||||
int32_t hash) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
@ -151,7 +151,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(PtrComprCageBase cage_base,
|
||||
// EnsureCapacity will guarantee the hash table is never full.
|
||||
for (InternalIndex entry = FirstProbe(hash, capacity);;
|
||||
entry = NextProbe(entry, count++, capacity)) {
|
||||
Object element = KeyAt(cage_base, entry);
|
||||
Object element = KeyAt(isolate, entry);
|
||||
// Empty entry. Uses raw unchecked accessors because it is called by the
|
||||
// string table during bootstrapping.
|
||||
if (element == undefined) return InternalIndex::NotFound();
|
||||
@ -177,24 +177,24 @@ bool HashTable<Derived, Shape>::ToKey(ReadOnlyRoots roots, InternalIndex entry,
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
bool HashTable<Derived, Shape>::ToKey(PtrComprCageBase cage_base,
|
||||
InternalIndex entry, Object* out_k) {
|
||||
Object k = KeyAt(cage_base, entry);
|
||||
if (!IsKey(GetReadOnlyRoots(cage_base), k)) return false;
|
||||
bool HashTable<Derived, Shape>::ToKey(IsolateRoot isolate, InternalIndex entry,
|
||||
Object* out_k) {
|
||||
Object k = KeyAt(isolate, entry);
|
||||
if (!IsKey(GetReadOnlyRoots(isolate), k)) return false;
|
||||
*out_k = Shape::Unwrap(k);
|
||||
return true;
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
Object HashTable<Derived, Shape>::KeyAt(InternalIndex entry) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return KeyAt(cage_base, entry);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return KeyAt(isolate, entry);
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
Object HashTable<Derived, Shape>::KeyAt(PtrComprCageBase cage_base,
|
||||
Object HashTable<Derived, Shape>::KeyAt(IsolateRoot isolate,
|
||||
InternalIndex entry) {
|
||||
return get(cage_base, EntryToIndex(entry) + kEntryKeyIndex);
|
||||
return get(isolate, EntryToIndex(entry) + kEntryKeyIndex);
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
|
@ -138,25 +138,24 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
|
||||
void IterateElements(ObjectVisitor* visitor);
|
||||
|
||||
// Find entry for key otherwise return kNotFound.
|
||||
inline InternalIndex FindEntry(PtrComprCageBase cage_base,
|
||||
ReadOnlyRoots roots, Key key, int32_t hash);
|
||||
inline InternalIndex FindEntry(IsolateRoot isolate, ReadOnlyRoots roots,
|
||||
Key key, int32_t hash);
|
||||
template <typename LocalIsolate>
|
||||
inline InternalIndex FindEntry(LocalIsolate* isolate, Key key);
|
||||
|
||||
// Rehashes the table in-place.
|
||||
void Rehash(PtrComprCageBase cage_base);
|
||||
void Rehash(IsolateRoot isolate);
|
||||
|
||||
// Returns whether k is a real key. The hole and undefined are not allowed as
|
||||
// keys and can be used to indicate missing or deleted elements.
|
||||
static inline bool IsKey(ReadOnlyRoots roots, Object k);
|
||||
|
||||
inline bool ToKey(ReadOnlyRoots roots, InternalIndex entry, Object* out_k);
|
||||
inline bool ToKey(PtrComprCageBase cage_base, InternalIndex entry,
|
||||
Object* out_k);
|
||||
inline bool ToKey(IsolateRoot isolate, InternalIndex entry, Object* out_k);
|
||||
|
||||
// Returns the key at entry.
|
||||
inline Object KeyAt(InternalIndex entry);
|
||||
inline Object KeyAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||
inline Object KeyAt(IsolateRoot isolate, InternalIndex entry);
|
||||
|
||||
static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize;
|
||||
static const int kEntrySize = Shape::kEntrySize;
|
||||
@ -218,8 +217,8 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
|
||||
|
||||
// Find the entry at which to insert element with the given key that
|
||||
// has the given hash value.
|
||||
InternalIndex FindInsertionEntry(PtrComprCageBase cage_base,
|
||||
ReadOnlyRoots roots, uint32_t hash);
|
||||
InternalIndex FindInsertionEntry(IsolateRoot isolate, ReadOnlyRoots roots,
|
||||
uint32_t hash);
|
||||
InternalIndex FindInsertionEntry(Isolate* isolate, uint32_t hash);
|
||||
|
||||
// Computes the capacity a table with the given capacity would need to have
|
||||
@ -232,7 +231,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
|
||||
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
|
||||
|
||||
// Rehashes this hash-table into the new table.
|
||||
void Rehash(PtrComprCageBase cage_base, Derived new_table);
|
||||
void Rehash(IsolateRoot isolate, Derived new_table);
|
||||
|
||||
inline void set_key(int index, Object value);
|
||||
inline void set_key(int index, Object value, WriteBarrierMode mode);
|
||||
@ -323,7 +322,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ObjectHashTableBase
|
||||
// returned in case the key is not present.
|
||||
Object Lookup(Handle<Object> key);
|
||||
Object Lookup(Handle<Object> key, int32_t hash);
|
||||
Object Lookup(PtrComprCageBase cage_base, Handle<Object> key, int32_t hash);
|
||||
Object Lookup(IsolateRoot isolate, Handle<Object> key, int32_t hash);
|
||||
|
||||
// Returns the value at entry.
|
||||
Object ValueAt(InternalIndex entry);
|
||||
|
@ -70,12 +70,12 @@ class HeapObject : public Object {
|
||||
// places where it might not be safe to access it.
|
||||
inline ReadOnlyRoots GetReadOnlyRoots() const;
|
||||
// This version is intended to be used for the isolate values produced by
|
||||
// i::GetPtrComprCageBase(HeapObject) function which may return nullptr.
|
||||
inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const;
|
||||
// i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr.
|
||||
inline ReadOnlyRoots GetReadOnlyRoots(IsolateRoot isolate) const;
|
||||
|
||||
#define IS_TYPE_FUNCTION_DECL(Type) \
|
||||
V8_INLINE bool Is##Type() const; \
|
||||
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
||||
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
|
||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||
IS_TYPE_FUNCTION_DECL(HashTableBase)
|
||||
IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
|
||||
@ -96,7 +96,7 @@ class HeapObject : public Object {
|
||||
|
||||
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
|
||||
V8_INLINE bool Is##Name() const; \
|
||||
V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
|
||||
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
|
||||
STRUCT_LIST(DECL_STRUCT_PREDICATE)
|
||||
#undef DECL_STRUCT_PREDICATE
|
||||
|
||||
|
@ -43,7 +43,7 @@ void JSArrayBuffer::set_byte_length(size_t value) {
|
||||
}
|
||||
|
||||
DEF_GETTER(JSArrayBuffer, backing_store, void*) {
|
||||
Address value = ReadExternalPointerField(kBackingStoreOffset, cage_base,
|
||||
Address value = ReadExternalPointerField(kBackingStoreOffset, isolate,
|
||||
kArrayBufferBackingStoreTag);
|
||||
return reinterpret_cast<void*>(value);
|
||||
}
|
||||
@ -199,7 +199,7 @@ void JSTypedArray::set_length(size_t value) {
|
||||
}
|
||||
|
||||
DEF_GETTER(JSTypedArray, external_pointer, Address) {
|
||||
return ReadExternalPointerField(kExternalPointerOffset, cage_base,
|
||||
return ReadExternalPointerField(kExternalPointerOffset, isolate,
|
||||
kTypedArrayExternalPointerTag);
|
||||
}
|
||||
|
||||
@ -213,9 +213,9 @@ void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
|
||||
}
|
||||
|
||||
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
|
||||
PtrComprCageBase cage_base) {
|
||||
IsolateRoot isolate) {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
return cage_base.address();
|
||||
return isolate.address();
|
||||
#else
|
||||
return 0;
|
||||
#endif
|
||||
@ -321,7 +321,7 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
|
||||
|
||||
DEF_GETTER(JSDataView, data_pointer, void*) {
|
||||
return reinterpret_cast<void*>(ReadExternalPointerField(
|
||||
kDataPointerOffset, cage_base, kDataViewDataPointerTag));
|
||||
kDataPointerOffset, isolate, kDataViewDataPointerTag));
|
||||
}
|
||||
|
||||
void JSDataView::AllocateExternalPointerEntries(Isolate* isolate) {
|
||||
|
@ -300,7 +300,7 @@ class JSTypedArray
|
||||
// as Tagged_t value and an |external_pointer| value.
|
||||
// For full-pointer mode the compensation value is zero.
|
||||
static inline Address ExternalPointerCompensationForOnHeapArray(
|
||||
PtrComprCageBase cage_base);
|
||||
IsolateRoot isolate);
|
||||
|
||||
//
|
||||
// Serializer/deserializer support.
|
||||
|
@ -22,7 +22,7 @@ CAST_ACCESSOR(JSArray)
|
||||
CAST_ACCESSOR(JSArrayIterator)
|
||||
|
||||
DEF_GETTER(JSArray, length, Object) {
|
||||
return TaggedField<Object, kLengthOffset>::load(cage_base, *this);
|
||||
return TaggedField<Object, kLengthOffset>::load(isolate, *this);
|
||||
}
|
||||
|
||||
void JSArray::set_length(Object value, WriteBarrierMode mode) {
|
||||
@ -31,8 +31,8 @@ void JSArray::set_length(Object value, WriteBarrierMode mode) {
|
||||
CONDITIONAL_WRITE_BARRIER(*this, kLengthOffset, value, mode);
|
||||
}
|
||||
|
||||
Object JSArray::length(PtrComprCageBase cage_base, RelaxedLoadTag tag) const {
|
||||
return TaggedField<Object, kLengthOffset>::Relaxed_Load(cage_base, *this);
|
||||
Object JSArray::length(IsolateRoot isolate, RelaxedLoadTag tag) const {
|
||||
return TaggedField<Object, kLengthOffset>::Relaxed_Load(isolate, *this);
|
||||
}
|
||||
|
||||
void JSArray::set_length(Smi length) {
|
||||
|
@ -32,7 +32,7 @@ class JSArray : public JSObject {
|
||||
// acquire/release semantics ever become necessary, the default setter should
|
||||
// be reverted to non-atomic behavior, and setters with explicit tags
|
||||
// introduced and used when required.
|
||||
Object length(PtrComprCageBase cage_base, AcquireLoadTag tag) const = delete;
|
||||
Object length(IsolateRoot isolate, AcquireLoadTag tag) const = delete;
|
||||
void set_length(Object value, ReleaseStoreTag tag,
|
||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
|
||||
|
||||
|
@ -210,62 +210,63 @@ ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
|
||||
kPrototypeOrInitialMapOffset, map().has_prototype_slot())
|
||||
|
||||
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
|
||||
return map(cage_base).has_prototype_slot();
|
||||
return map(isolate).has_prototype_slot();
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, initial_map, Map) {
|
||||
return Map::cast(prototype_or_initial_map(cage_base));
|
||||
return Map::cast(prototype_or_initial_map(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, has_initial_map, bool) {
|
||||
DCHECK(has_prototype_slot(cage_base));
|
||||
return prototype_or_initial_map(cage_base).IsMap(cage_base);
|
||||
DCHECK(has_prototype_slot(isolate));
|
||||
return prototype_or_initial_map(isolate).IsMap(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
|
||||
DCHECK(has_prototype_slot(cage_base));
|
||||
return has_initial_map(cage_base) ||
|
||||
!prototype_or_initial_map(cage_base).IsTheHole(
|
||||
GetReadOnlyRoots(cage_base));
|
||||
DCHECK(has_prototype_slot(isolate));
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
return has_initial_map(isolate) ||
|
||||
!prototype_or_initial_map(isolate).IsTheHole(
|
||||
GetReadOnlyRoots(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, has_prototype, bool) {
|
||||
DCHECK(has_prototype_slot(cage_base));
|
||||
return map(cage_base).has_non_instance_prototype() ||
|
||||
has_instance_prototype(cage_base);
|
||||
DCHECK(has_prototype_slot(isolate));
|
||||
return map(isolate).has_non_instance_prototype() ||
|
||||
has_instance_prototype(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, has_prototype_property, bool) {
|
||||
return (has_prototype_slot(cage_base) && IsConstructor(cage_base)) ||
|
||||
IsGeneratorFunction(shared(cage_base).kind());
|
||||
return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
|
||||
IsGeneratorFunction(shared(isolate).kind());
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
|
||||
return !has_prototype_property(cage_base) ||
|
||||
map(cage_base).has_non_instance_prototype();
|
||||
return !has_prototype_property(isolate) ||
|
||||
map(isolate).has_non_instance_prototype();
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
|
||||
DCHECK(has_instance_prototype(cage_base));
|
||||
if (has_initial_map(cage_base))
|
||||
return initial_map(cage_base).prototype(cage_base);
|
||||
DCHECK(has_instance_prototype(isolate));
|
||||
if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
|
||||
// When there is no initial map and the prototype is a JSReceiver, the
|
||||
// initial map field is used for the prototype field.
|
||||
return HeapObject::cast(prototype_or_initial_map(cage_base));
|
||||
return HeapObject::cast(prototype_or_initial_map(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSFunction, prototype, Object) {
|
||||
DCHECK(has_prototype(cage_base));
|
||||
DCHECK(has_prototype(isolate));
|
||||
// If the function's prototype property has been set to a non-JSReceiver
|
||||
// value, that value is stored in the constructor field of the map.
|
||||
if (map(cage_base).has_non_instance_prototype()) {
|
||||
Object prototype = map(cage_base).GetConstructor(cage_base);
|
||||
if (map(isolate).has_non_instance_prototype()) {
|
||||
Object prototype = map(isolate).GetConstructor(isolate);
|
||||
// The map must have a prototype in that field, not a back pointer.
|
||||
DCHECK(!prototype.IsMap(cage_base));
|
||||
DCHECK(!prototype.IsFunctionTemplateInfo(cage_base));
|
||||
DCHECK(!prototype.IsMap(isolate));
|
||||
DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
|
||||
return prototype;
|
||||
}
|
||||
return instance_prototype(cage_base);
|
||||
return instance_prototype(isolate);
|
||||
}
|
||||
|
||||
bool JSFunction::is_compiled() const {
|
||||
|
@ -52,12 +52,11 @@ CAST_ACCESSOR(JSMessageObject)
|
||||
CAST_ACCESSOR(JSReceiver)
|
||||
|
||||
DEF_GETTER(JSObject, elements, FixedArrayBase) {
|
||||
return TaggedField<FixedArrayBase, kElementsOffset>::load(cage_base, *this);
|
||||
return TaggedField<FixedArrayBase, kElementsOffset>::load(isolate, *this);
|
||||
}
|
||||
|
||||
FixedArrayBase JSObject::elements(PtrComprCageBase cage_base,
|
||||
RelaxedLoadTag) const {
|
||||
return TaggedField<FixedArrayBase, kElementsOffset>::Relaxed_Load(cage_base,
|
||||
FixedArrayBase JSObject::elements(IsolateRoot isolate, RelaxedLoadTag) const {
|
||||
return TaggedField<FixedArrayBase, kElementsOffset>::Relaxed_Load(isolate,
|
||||
*this);
|
||||
}
|
||||
|
||||
@ -250,11 +249,11 @@ void JSObject::initialize_elements() {
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, GetIndexedInterceptor, InterceptorInfo) {
|
||||
return map(cage_base).GetIndexedInterceptor(cage_base);
|
||||
return map(isolate).GetIndexedInterceptor(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, GetNamedInterceptor, InterceptorInfo) {
|
||||
return map(cage_base).GetNamedInterceptor(cage_base);
|
||||
return map(isolate).GetNamedInterceptor(isolate);
|
||||
}
|
||||
|
||||
// static
|
||||
@ -323,17 +322,16 @@ void JSObject::SetEmbedderField(int index, Smi value) {
|
||||
// is needed to correctly distinguish between properties stored in-object and
|
||||
// properties stored in the properties array.
|
||||
Object JSObject::RawFastPropertyAt(FieldIndex index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return RawFastPropertyAt(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return RawFastPropertyAt(isolate, index);
|
||||
}
|
||||
|
||||
Object JSObject::RawFastPropertyAt(PtrComprCageBase cage_base,
|
||||
Object JSObject::RawFastPropertyAt(IsolateRoot isolate,
|
||||
FieldIndex index) const {
|
||||
if (index.is_inobject()) {
|
||||
return TaggedField<Object>::load(cage_base, *this, index.offset());
|
||||
return TaggedField<Object>::load(isolate, *this, index.offset());
|
||||
} else {
|
||||
return property_array(cage_base).get(cage_base,
|
||||
index.outobject_array_index());
|
||||
return property_array(isolate).get(isolate, index.outobject_array_index());
|
||||
}
|
||||
}
|
||||
|
||||
@ -427,7 +425,7 @@ ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset)
|
||||
ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset)
|
||||
|
||||
DEF_GETTER(JSGlobalObject, native_context_unchecked, Object) {
|
||||
return TaggedField<Object, kNativeContextOffset>::load(cage_base, *this);
|
||||
return TaggedField<Object, kNativeContextOffset>::load(isolate, *this);
|
||||
}
|
||||
|
||||
bool JSMessageObject::DidEnsureSourcePositionsAvailable() const {
|
||||
@ -463,119 +461,119 @@ SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
|
||||
SMI_ACCESSORS(JSMessageObject, raw_type, kMessageTypeOffset)
|
||||
|
||||
DEF_GETTER(JSObject, GetElementsKind, ElementsKind) {
|
||||
ElementsKind kind = map(cage_base).elements_kind();
|
||||
ElementsKind kind = map(isolate).elements_kind();
|
||||
#if VERIFY_HEAP && DEBUG
|
||||
FixedArrayBase fixed_array = FixedArrayBase::unchecked_cast(
|
||||
TaggedField<HeapObject, kElementsOffset>::load(cage_base, *this));
|
||||
TaggedField<HeapObject, kElementsOffset>::load(isolate, *this));
|
||||
|
||||
// If a GC was caused while constructing this object, the elements
|
||||
// pointer may point to a one pointer filler map.
|
||||
if (ElementsAreSafeToExamine(cage_base)) {
|
||||
Map map = fixed_array.map(cage_base);
|
||||
if (ElementsAreSafeToExamine(isolate)) {
|
||||
Map map = fixed_array.map(isolate);
|
||||
if (IsSmiOrObjectElementsKind(kind)) {
|
||||
DCHECK(map == GetReadOnlyRoots(cage_base).fixed_array_map() ||
|
||||
map == GetReadOnlyRoots(cage_base).fixed_cow_array_map());
|
||||
DCHECK(map == GetReadOnlyRoots(isolate).fixed_array_map() ||
|
||||
map == GetReadOnlyRoots(isolate).fixed_cow_array_map());
|
||||
} else if (IsDoubleElementsKind(kind)) {
|
||||
DCHECK(fixed_array.IsFixedDoubleArray(cage_base) ||
|
||||
fixed_array == GetReadOnlyRoots(cage_base).empty_fixed_array());
|
||||
DCHECK(fixed_array.IsFixedDoubleArray(isolate) ||
|
||||
fixed_array == GetReadOnlyRoots(isolate).empty_fixed_array());
|
||||
} else if (kind == DICTIONARY_ELEMENTS) {
|
||||
DCHECK(fixed_array.IsFixedArray(cage_base));
|
||||
DCHECK(fixed_array.IsNumberDictionary(cage_base));
|
||||
DCHECK(fixed_array.IsFixedArray(isolate));
|
||||
DCHECK(fixed_array.IsNumberDictionary(isolate));
|
||||
} else {
|
||||
DCHECK(kind > DICTIONARY_ELEMENTS ||
|
||||
IsAnyNonextensibleElementsKind(kind));
|
||||
}
|
||||
DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
|
||||
elements(cage_base).IsSloppyArgumentsElements());
|
||||
elements(isolate).IsSloppyArgumentsElements());
|
||||
}
|
||||
#endif
|
||||
return kind;
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, GetElementsAccessor, ElementsAccessor*) {
|
||||
return ElementsAccessor::ForKind(GetElementsKind(cage_base));
|
||||
return ElementsAccessor::ForKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasObjectElements, bool) {
|
||||
return IsObjectElementsKind(GetElementsKind(cage_base));
|
||||
return IsObjectElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasSmiElements, bool) {
|
||||
return IsSmiElementsKind(GetElementsKind(cage_base));
|
||||
return IsSmiElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasSmiOrObjectElements, bool) {
|
||||
return IsSmiOrObjectElementsKind(GetElementsKind(cage_base));
|
||||
return IsSmiOrObjectElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasDoubleElements, bool) {
|
||||
return IsDoubleElementsKind(GetElementsKind(cage_base));
|
||||
return IsDoubleElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasHoleyElements, bool) {
|
||||
return IsHoleyElementsKind(GetElementsKind(cage_base));
|
||||
return IsHoleyElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasFastElements, bool) {
|
||||
return IsFastElementsKind(GetElementsKind(cage_base));
|
||||
return IsFastElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasFastPackedElements, bool) {
|
||||
return IsFastPackedElementsKind(GetElementsKind(cage_base));
|
||||
return IsFastPackedElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasDictionaryElements, bool) {
|
||||
return IsDictionaryElementsKind(GetElementsKind(cage_base));
|
||||
return IsDictionaryElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasPackedElements, bool) {
|
||||
return GetElementsKind(cage_base) == PACKED_ELEMENTS;
|
||||
return GetElementsKind(isolate) == PACKED_ELEMENTS;
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasAnyNonextensibleElements, bool) {
|
||||
return IsAnyNonextensibleElementsKind(GetElementsKind(cage_base));
|
||||
return IsAnyNonextensibleElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasSealedElements, bool) {
|
||||
return IsSealedElementsKind(GetElementsKind(cage_base));
|
||||
return IsSealedElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasNonextensibleElements, bool) {
|
||||
return IsNonextensibleElementsKind(GetElementsKind(cage_base));
|
||||
return IsNonextensibleElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasFastArgumentsElements, bool) {
|
||||
return IsFastArgumentsElementsKind(GetElementsKind(cage_base));
|
||||
return IsFastArgumentsElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasSlowArgumentsElements, bool) {
|
||||
return IsSlowArgumentsElementsKind(GetElementsKind(cage_base));
|
||||
return IsSlowArgumentsElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasSloppyArgumentsElements, bool) {
|
||||
return IsSloppyArgumentsElementsKind(GetElementsKind(cage_base));
|
||||
return IsSloppyArgumentsElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasStringWrapperElements, bool) {
|
||||
return IsStringWrapperElementsKind(GetElementsKind(cage_base));
|
||||
return IsStringWrapperElementsKind(GetElementsKind(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasFastStringWrapperElements, bool) {
|
||||
return GetElementsKind(cage_base) == FAST_STRING_WRAPPER_ELEMENTS;
|
||||
return GetElementsKind(isolate) == FAST_STRING_WRAPPER_ELEMENTS;
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasSlowStringWrapperElements, bool) {
|
||||
return GetElementsKind(cage_base) == SLOW_STRING_WRAPPER_ELEMENTS;
|
||||
return GetElementsKind(isolate) == SLOW_STRING_WRAPPER_ELEMENTS;
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasTypedArrayElements, bool) {
|
||||
DCHECK(!elements(cage_base).is_null());
|
||||
return map(cage_base).has_typed_array_elements();
|
||||
DCHECK(!elements(isolate).is_null());
|
||||
return map(isolate).has_typed_array_elements();
|
||||
}
|
||||
|
||||
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
|
||||
DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \
|
||||
return map(cage_base).elements_kind() == TYPE##_ELEMENTS; \
|
||||
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
|
||||
DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \
|
||||
return map(isolate).elements_kind() == TYPE##_ELEMENTS; \
|
||||
}
|
||||
|
||||
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
|
||||
@ -583,21 +581,21 @@ TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
|
||||
#undef FIXED_TYPED_ELEMENTS_CHECK
|
||||
|
||||
DEF_GETTER(JSObject, HasNamedInterceptor, bool) {
|
||||
return map(cage_base).has_named_interceptor();
|
||||
return map(isolate).has_named_interceptor();
|
||||
}
|
||||
|
||||
DEF_GETTER(JSObject, HasIndexedInterceptor, bool) {
|
||||
return map(cage_base).has_indexed_interceptor();
|
||||
return map(isolate).has_indexed_interceptor();
|
||||
}
|
||||
|
||||
RELEASE_ACQUIRE_ACCESSORS_CHECKED2(JSGlobalObject, global_dictionary,
|
||||
GlobalDictionary, kPropertiesOrHashOffset,
|
||||
!HasFastProperties(cage_base), true)
|
||||
!HasFastProperties(isolate), true)
|
||||
|
||||
DEF_GETTER(JSObject, element_dictionary, NumberDictionary) {
|
||||
DCHECK(HasDictionaryElements(cage_base) ||
|
||||
HasSlowStringWrapperElements(cage_base));
|
||||
return NumberDictionary::cast(elements(cage_base));
|
||||
DCHECK(HasDictionaryElements(isolate) ||
|
||||
HasSlowStringWrapperElements(isolate));
|
||||
return NumberDictionary::cast(elements(isolate));
|
||||
}
|
||||
|
||||
void JSReceiver::initialize_properties(Isolate* isolate) {
|
||||
@ -619,34 +617,38 @@ void JSReceiver::initialize_properties(Isolate* isolate) {
|
||||
}
|
||||
|
||||
DEF_GETTER(JSReceiver, HasFastProperties, bool) {
|
||||
DCHECK(raw_properties_or_hash(cage_base).IsSmi() ||
|
||||
((raw_properties_or_hash(cage_base).IsGlobalDictionary(cage_base) ||
|
||||
raw_properties_or_hash(cage_base).IsNameDictionary(cage_base) ||
|
||||
raw_properties_or_hash(cage_base).IsSwissNameDictionary(
|
||||
cage_base)) == map(cage_base).is_dictionary_map()));
|
||||
return !map(cage_base).is_dictionary_map();
|
||||
DCHECK(raw_properties_or_hash(isolate).IsSmi() ||
|
||||
((raw_properties_or_hash(isolate).IsGlobalDictionary(isolate) ||
|
||||
raw_properties_or_hash(isolate).IsNameDictionary(isolate) ||
|
||||
raw_properties_or_hash(isolate).IsSwissNameDictionary(isolate)) ==
|
||||
map(isolate).is_dictionary_map()));
|
||||
return !map(isolate).is_dictionary_map();
|
||||
}
|
||||
|
||||
DEF_GETTER(JSReceiver, property_dictionary, NameDictionary) {
|
||||
DCHECK(!IsJSGlobalObject(cage_base));
|
||||
DCHECK(!HasFastProperties(cage_base));
|
||||
DCHECK(!IsJSGlobalObject(isolate));
|
||||
DCHECK(!HasFastProperties(isolate));
|
||||
DCHECK(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
|
||||
|
||||
Object prop = raw_properties_or_hash(cage_base);
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
Object prop = raw_properties_or_hash(isolate);
|
||||
if (prop.IsSmi()) {
|
||||
return GetReadOnlyRoots(cage_base).empty_property_dictionary();
|
||||
return GetReadOnlyRoots(isolate).empty_property_dictionary();
|
||||
}
|
||||
return NameDictionary::cast(prop);
|
||||
}
|
||||
|
||||
DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
|
||||
DCHECK(!IsJSGlobalObject(cage_base));
|
||||
DCHECK(!HasFastProperties(cage_base));
|
||||
DCHECK(!IsJSGlobalObject(isolate));
|
||||
DCHECK(!HasFastProperties(isolate));
|
||||
DCHECK(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
|
||||
|
||||
Object prop = raw_properties_or_hash(cage_base);
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
Object prop = raw_properties_or_hash(isolate);
|
||||
if (prop.IsSmi()) {
|
||||
return GetReadOnlyRoots(cage_base).empty_swiss_property_dictionary();
|
||||
return GetReadOnlyRoots(isolate).empty_swiss_property_dictionary();
|
||||
}
|
||||
return SwissNameDictionary::cast(prop);
|
||||
}
|
||||
@ -654,10 +656,12 @@ DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
|
||||
// TODO(gsathya): Pass isolate directly to this function and access
|
||||
// the heap from this.
|
||||
DEF_GETTER(JSReceiver, property_array, PropertyArray) {
|
||||
DCHECK(HasFastProperties(cage_base));
|
||||
Object prop = raw_properties_or_hash(cage_base);
|
||||
if (prop.IsSmi() || prop == GetReadOnlyRoots(cage_base).empty_fixed_array()) {
|
||||
return GetReadOnlyRoots(cage_base).empty_property_array();
|
||||
DCHECK(HasFastProperties(isolate));
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
Object prop = raw_properties_or_hash(isolate);
|
||||
if (prop.IsSmi() || prop == GetReadOnlyRoots(isolate).empty_fixed_array()) {
|
||||
return GetReadOnlyRoots(isolate).empty_property_array();
|
||||
}
|
||||
return PropertyArray::cast(prop);
|
||||
}
|
||||
|
@ -319,7 +319,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
|
||||
// acquire/release semantics ever become necessary, the default setter should
|
||||
// be reverted to non-atomic behavior, and setters with explicit tags
|
||||
// introduced and used when required.
|
||||
FixedArrayBase elements(PtrComprCageBase cage_base,
|
||||
FixedArrayBase elements(IsolateRoot isolate,
|
||||
AcquireLoadTag tag) const = delete;
|
||||
void set_elements(FixedArrayBase value, ReleaseStoreTag tag,
|
||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
|
||||
@ -652,8 +652,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
|
||||
Representation representation,
|
||||
FieldIndex index);
|
||||
inline Object RawFastPropertyAt(FieldIndex index) const;
|
||||
inline Object RawFastPropertyAt(PtrComprCageBase cage_base,
|
||||
FieldIndex index) const;
|
||||
inline Object RawFastPropertyAt(IsolateRoot isolate, FieldIndex index) const;
|
||||
|
||||
inline void FastPropertyAtPut(FieldIndex index, Object value,
|
||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||
@ -743,8 +742,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
|
||||
// If a GC was caused while constructing this object, the elements pointer
|
||||
// may point to a one pointer filler map. The object won't be rooted, but
|
||||
// our heap verification code could stumble across it.
|
||||
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(
|
||||
PtrComprCageBase cage_base) const;
|
||||
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(IsolateRoot isolate) const;
|
||||
#endif
|
||||
|
||||
Object SlowReverseLookup(Object value);
|
||||
|
@ -29,26 +29,26 @@ SMI_ACCESSORS(ObjectBoilerplateDescription, flags,
|
||||
FixedArray::OffsetOfElementAt(kLiteralTypeOffset))
|
||||
|
||||
Object ObjectBoilerplateDescription::name(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return name(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return name(isolate, index);
|
||||
}
|
||||
|
||||
Object ObjectBoilerplateDescription::name(PtrComprCageBase cage_base,
|
||||
Object ObjectBoilerplateDescription::name(IsolateRoot isolate,
|
||||
int index) const {
|
||||
// get() already checks for out of bounds access, but we do not want to allow
|
||||
// access to the last element, if it is the number of properties.
|
||||
DCHECK_NE(size(), index);
|
||||
return get(cage_base, 2 * index + kDescriptionStartIndex);
|
||||
return get(isolate, 2 * index + kDescriptionStartIndex);
|
||||
}
|
||||
|
||||
Object ObjectBoilerplateDescription::value(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return value(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return value(isolate, index);
|
||||
}
|
||||
|
||||
Object ObjectBoilerplateDescription::value(PtrComprCageBase cage_base,
|
||||
Object ObjectBoilerplateDescription::value(IsolateRoot isolate,
|
||||
int index) const {
|
||||
return get(cage_base, 2 * index + 1 + kDescriptionStartIndex);
|
||||
return get(isolate, 2 * index + 1 + kDescriptionStartIndex);
|
||||
}
|
||||
|
||||
void ObjectBoilerplateDescription::set_key_value(int index, Object key,
|
||||
|
@ -28,10 +28,10 @@ class ClassLiteral;
|
||||
class ObjectBoilerplateDescription : public FixedArray {
|
||||
public:
|
||||
inline Object name(int index) const;
|
||||
inline Object name(PtrComprCageBase cage_base, int index) const;
|
||||
inline Object name(IsolateRoot isolate, int index) const;
|
||||
|
||||
inline Object value(int index) const;
|
||||
inline Object value(PtrComprCageBase cage_base, int index) const;
|
||||
inline Object value(IsolateRoot isolate, int index) const;
|
||||
|
||||
inline void set_key_value(int index, Object key, Object value);
|
||||
|
||||
|
@ -107,14 +107,14 @@ BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
|
||||
|
||||
DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
|
||||
DCHECK(has_named_interceptor());
|
||||
FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
|
||||
return InterceptorInfo::cast(info.GetNamedPropertyHandler(cage_base));
|
||||
FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
|
||||
return InterceptorInfo::cast(info.GetNamedPropertyHandler(isolate));
|
||||
}
|
||||
|
||||
DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
|
||||
DCHECK(has_indexed_interceptor());
|
||||
FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
|
||||
return InterceptorInfo::cast(info.GetIndexedPropertyHandler(cage_base));
|
||||
FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
|
||||
return InterceptorInfo::cast(info.GetIndexedPropertyHandler(isolate));
|
||||
}
|
||||
|
||||
bool Map::IsMostGeneralFieldType(Representation representation,
|
||||
@ -657,18 +657,19 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
|
||||
#endif
|
||||
}
|
||||
|
||||
bool Map::ConcurrentIsMap(PtrComprCageBase cage_base,
|
||||
const Object& object) const {
|
||||
return object.IsHeapObject() && HeapObject::cast(object).map(cage_base) ==
|
||||
GetReadOnlyRoots(cage_base).meta_map();
|
||||
bool Map::ConcurrentIsMap(IsolateRoot isolate, const Object& object) const {
|
||||
return object.IsHeapObject() && HeapObject::cast(object).map(isolate) ==
|
||||
GetReadOnlyRoots(isolate).meta_map();
|
||||
}
|
||||
|
||||
DEF_GETTER(Map, GetBackPointer, HeapObject) {
|
||||
Object object = constructor_or_back_pointer(cage_base);
|
||||
if (ConcurrentIsMap(cage_base, object)) {
|
||||
Object object = constructor_or_back_pointer(isolate);
|
||||
if (ConcurrentIsMap(isolate, object)) {
|
||||
return Map::cast(object);
|
||||
}
|
||||
return GetReadOnlyRoots(cage_base).undefined_value();
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
return GetReadOnlyRoots(isolate).undefined_value();
|
||||
}
|
||||
|
||||
void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
|
||||
@ -708,11 +709,11 @@ bool Map::IsPrototypeValidityCellValid() const {
|
||||
}
|
||||
|
||||
DEF_GETTER(Map, GetConstructor, Object) {
|
||||
Object maybe_constructor = constructor_or_back_pointer(cage_base);
|
||||
Object maybe_constructor = constructor_or_back_pointer(isolate);
|
||||
// Follow any back pointers.
|
||||
while (ConcurrentIsMap(cage_base, maybe_constructor)) {
|
||||
while (ConcurrentIsMap(isolate, maybe_constructor)) {
|
||||
maybe_constructor =
|
||||
Map::cast(maybe_constructor).constructor_or_back_pointer(cage_base);
|
||||
Map::cast(maybe_constructor).constructor_or_back_pointer(isolate);
|
||||
}
|
||||
return maybe_constructor;
|
||||
}
|
||||
@ -729,13 +730,13 @@ Object Map::TryGetConstructor(Isolate* isolate, int max_steps) {
|
||||
}
|
||||
|
||||
DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
|
||||
Object constructor = GetConstructor(cage_base);
|
||||
if (constructor.IsJSFunction(cage_base)) {
|
||||
Object constructor = GetConstructor(isolate);
|
||||
if (constructor.IsJSFunction(isolate)) {
|
||||
// TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
|
||||
DCHECK(JSFunction::cast(constructor).shared(cage_base).IsApiFunction());
|
||||
return JSFunction::cast(constructor).shared(cage_base).get_api_func_data();
|
||||
DCHECK(JSFunction::cast(constructor).shared(isolate).IsApiFunction());
|
||||
return JSFunction::cast(constructor).shared(isolate).get_api_func_data();
|
||||
}
|
||||
DCHECK(constructor.IsFunctionTemplateInfo(cage_base));
|
||||
DCHECK(constructor.IsFunctionTemplateInfo(isolate));
|
||||
return FunctionTemplateInfo::cast(constructor);
|
||||
}
|
||||
|
||||
@ -790,7 +791,7 @@ int NormalizedMapCache::GetIndex(Handle<Map> map) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
|
||||
if (!IsWeakFixedArray(cage_base)) return false;
|
||||
if (!IsWeakFixedArray(isolate)) return false;
|
||||
if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
|
||||
return false;
|
||||
}
|
||||
|
@ -943,7 +943,7 @@ class Map : public HeapObject {
|
||||
|
||||
// This is the equivalent of IsMap() but avoids reading the instance type so
|
||||
// it can be used concurrently without acquire load.
|
||||
V8_INLINE bool ConcurrentIsMap(PtrComprCageBase cage_base,
|
||||
V8_INLINE bool ConcurrentIsMap(IsolateRoot isolate,
|
||||
const Object& object) const;
|
||||
|
||||
// Use the high-level instance_descriptors/SetInstanceDescriptors instead.
|
||||
@ -976,8 +976,7 @@ class NormalizedMapCache : public WeakFixedArray {
|
||||
DECL_VERIFIER(NormalizedMapCache)
|
||||
|
||||
private:
|
||||
friend bool HeapObject::IsNormalizedMapCache(
|
||||
PtrComprCageBase cage_base) const;
|
||||
friend bool HeapObject::IsNormalizedMapCache(IsolateRoot isolate) const;
|
||||
|
||||
static const int kEntries = 64;
|
||||
|
||||
|
@ -78,14 +78,13 @@ HeapObjectReference HeapObjectReference::From(Object object,
|
||||
}
|
||||
|
||||
// static
|
||||
HeapObjectReference HeapObjectReference::ClearedValue(
|
||||
PtrComprCageBase cage_base) {
|
||||
HeapObjectReference HeapObjectReference::ClearedValue(IsolateRoot isolate) {
|
||||
// Construct cleared weak ref value.
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
// This is necessary to make pointer decompression computation also
|
||||
// suitable for cleared weak references.
|
||||
Address raw_value =
|
||||
DecompressTaggedPointer(cage_base, kClearedWeakHeapObjectLower32);
|
||||
DecompressTaggedPointer(isolate, kClearedWeakHeapObjectLower32);
|
||||
#else
|
||||
Address raw_value = kClearedWeakHeapObjectLower32;
|
||||
#endif
|
||||
|
@ -54,7 +54,7 @@ class HeapObjectReference : public MaybeObject {
|
||||
V8_INLINE static HeapObjectReference From(Object object,
|
||||
HeapObjectReferenceType type);
|
||||
|
||||
V8_INLINE static HeapObjectReference ClearedValue(PtrComprCageBase cage_base);
|
||||
V8_INLINE static HeapObjectReference ClearedValue(IsolateRoot isolate);
|
||||
|
||||
template <typename THeapObjectSlot>
|
||||
V8_INLINE static void Update(THeapObjectSlot slot, HeapObject value);
|
||||
|
@ -56,7 +56,7 @@ void Symbol::set_is_private_name() {
|
||||
}
|
||||
|
||||
DEF_GETTER(Name, IsUniqueName, bool) {
|
||||
uint32_t type = map(cage_base).instance_type();
|
||||
uint32_t type = map(isolate).instance_type();
|
||||
bool result = (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
|
||||
(kStringTag | kNotInternalizedTag);
|
||||
SLOW_DCHECK(result == HeapObject::IsUniqueName());
|
||||
@ -104,23 +104,23 @@ uint32_t Name::hash() const {
|
||||
}
|
||||
|
||||
DEF_GETTER(Name, IsInterestingSymbol, bool) {
|
||||
return IsSymbol(cage_base) && Symbol::cast(*this).is_interesting_symbol();
|
||||
return IsSymbol(isolate) && Symbol::cast(*this).is_interesting_symbol();
|
||||
}
|
||||
|
||||
DEF_GETTER(Name, IsPrivate, bool) {
|
||||
return this->IsSymbol(cage_base) && Symbol::cast(*this).is_private();
|
||||
return this->IsSymbol(isolate) && Symbol::cast(*this).is_private();
|
||||
}
|
||||
|
||||
DEF_GETTER(Name, IsPrivateName, bool) {
|
||||
bool is_private_name =
|
||||
this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_name();
|
||||
this->IsSymbol(isolate) && Symbol::cast(*this).is_private_name();
|
||||
DCHECK_IMPLIES(is_private_name, IsPrivate());
|
||||
return is_private_name;
|
||||
}
|
||||
|
||||
DEF_GETTER(Name, IsPrivateBrand, bool) {
|
||||
bool is_private_brand =
|
||||
this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_brand();
|
||||
this->IsSymbol(isolate) && Symbol::cast(*this).is_private_brand();
|
||||
DCHECK_IMPLIES(is_private_brand, IsPrivateName());
|
||||
return is_private_brand;
|
||||
}
|
||||
|
@ -86,14 +86,14 @@
|
||||
// parameter.
|
||||
#define DECL_GETTER(name, type) \
|
||||
inline type name() const; \
|
||||
inline type name(PtrComprCageBase cage_base) const;
|
||||
inline type name(IsolateRoot isolate) const;
|
||||
|
||||
#define DEF_GETTER(holder, name, type) \
|
||||
type holder::name() const { \
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||
return holder::name(cage_base); \
|
||||
} \
|
||||
type holder::name(PtrComprCageBase cage_base) const
|
||||
#define DEF_GETTER(holder, name, type) \
|
||||
type holder::name() const { \
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
||||
return holder::name(isolate); \
|
||||
} \
|
||||
type holder::name(IsolateRoot isolate) const
|
||||
|
||||
#define DECL_SETTER(name, type) \
|
||||
inline void set_##name(type value, \
|
||||
@ -105,7 +105,7 @@
|
||||
|
||||
#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
|
||||
inline type name(tag_type tag) const; \
|
||||
inline type name(PtrComprCageBase cage_base, tag_type) const;
|
||||
inline type name(IsolateRoot isolate, tag_type) const;
|
||||
|
||||
#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
|
||||
inline void set_##name(type value, tag_type, \
|
||||
@ -179,7 +179,7 @@
|
||||
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
|
||||
set_condition) \
|
||||
DEF_GETTER(holder, name, type) { \
|
||||
type value = TaggedField<type, offset>::load(cage_base, *this); \
|
||||
type value = TaggedField<type, offset>::load(isolate, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
@ -215,11 +215,11 @@
|
||||
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
|
||||
set_condition) \
|
||||
type holder::name(RelaxedLoadTag tag) const { \
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||
return holder::name(cage_base, tag); \
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
||||
return holder::name(isolate, tag); \
|
||||
} \
|
||||
type holder::name(PtrComprCageBase cage_base, RelaxedLoadTag) const { \
|
||||
type value = TaggedField<type, offset>::Relaxed_Load(cage_base, *this); \
|
||||
type holder::name(IsolateRoot isolate, RelaxedLoadTag) const { \
|
||||
type value = TaggedField<type, offset>::Relaxed_Load(isolate, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
@ -236,22 +236,22 @@
|
||||
#define RELAXED_ACCESSORS(holder, name, type, offset) \
|
||||
RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true)
|
||||
|
||||
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
|
||||
get_condition, set_condition) \
|
||||
type holder::name(AcquireLoadTag tag) const { \
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||
return holder::name(cage_base, tag); \
|
||||
} \
|
||||
type holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \
|
||||
type value = TaggedField<type, offset>::Acquire_Load(cage_base, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
void holder::set_##name(type value, ReleaseStoreTag, \
|
||||
WriteBarrierMode mode) { \
|
||||
DCHECK(set_condition); \
|
||||
TaggedField<type, offset>::Release_Store(*this, value); \
|
||||
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
|
||||
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
|
||||
get_condition, set_condition) \
|
||||
type holder::name(AcquireLoadTag tag) const { \
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
||||
return holder::name(isolate, tag); \
|
||||
} \
|
||||
type holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
|
||||
type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
void holder::set_##name(type value, ReleaseStoreTag, \
|
||||
WriteBarrierMode mode) { \
|
||||
DCHECK(set_condition); \
|
||||
TaggedField<type, offset>::Release_Store(*this, value); \
|
||||
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
|
||||
}
|
||||
|
||||
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \
|
||||
@ -266,7 +266,7 @@
|
||||
set_condition) \
|
||||
DEF_GETTER(holder, name, MaybeObject) { \
|
||||
MaybeObject value = \
|
||||
TaggedField<MaybeObject, offset>::load(cage_base, *this); \
|
||||
TaggedField<MaybeObject, offset>::load(isolate, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
@ -282,23 +282,23 @@
|
||||
#define WEAK_ACCESSORS(holder, name, offset) \
|
||||
WEAK_ACCESSORS_CHECKED(holder, name, offset, true)
|
||||
|
||||
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \
|
||||
get_condition, set_condition) \
|
||||
MaybeObject holder::name(AcquireLoadTag tag) const { \
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||
return holder::name(cage_base, tag); \
|
||||
} \
|
||||
MaybeObject holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \
|
||||
MaybeObject value = \
|
||||
TaggedField<MaybeObject, offset>::Acquire_Load(cage_base, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
void holder::set_##name(MaybeObject value, ReleaseStoreTag, \
|
||||
WriteBarrierMode mode) { \
|
||||
DCHECK(set_condition); \
|
||||
TaggedField<MaybeObject, offset>::Release_Store(*this, value); \
|
||||
CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \
|
||||
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \
|
||||
get_condition, set_condition) \
|
||||
MaybeObject holder::name(AcquireLoadTag tag) const { \
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
||||
return holder::name(isolate, tag); \
|
||||
} \
|
||||
MaybeObject holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
|
||||
MaybeObject value = \
|
||||
TaggedField<MaybeObject, offset>::Acquire_Load(isolate, *this); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
void holder::set_##name(MaybeObject value, ReleaseStoreTag, \
|
||||
WriteBarrierMode mode) { \
|
||||
DCHECK(set_condition); \
|
||||
TaggedField<MaybeObject, offset>::Release_Store(*this, value); \
|
||||
CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \
|
||||
}
|
||||
|
||||
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED(holder, name, offset, \
|
||||
@ -380,9 +380,9 @@
|
||||
return instance_type == forinstancetype; \
|
||||
}
|
||||
|
||||
#define TYPE_CHECKER(type, ...) \
|
||||
DEF_GETTER(HeapObject, Is##type, bool) { \
|
||||
return InstanceTypeChecker::Is##type(map(cage_base).instance_type()); \
|
||||
#define TYPE_CHECKER(type, ...) \
|
||||
DEF_GETTER(HeapObject, Is##type, bool) { \
|
||||
return InstanceTypeChecker::Is##type(map(isolate).instance_type()); \
|
||||
}
|
||||
|
||||
#define RELAXED_INT16_ACCESSORS(holder, name, offset) \
|
||||
|
@ -65,19 +65,19 @@ int PropertyDetails::field_width_in_words() const {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsClassBoilerplate, bool) {
|
||||
return IsFixedArrayExact(cage_base);
|
||||
return IsFixedArrayExact(isolate);
|
||||
}
|
||||
|
||||
bool Object::IsTaggedIndex() const {
|
||||
return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
|
||||
}
|
||||
|
||||
#define IS_TYPE_FUNCTION_DEF(type_) \
|
||||
bool Object::Is##type_() const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
|
||||
} \
|
||||
bool Object::Is##type_(PtrComprCageBase cage_base) const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##type_(cage_base); \
|
||||
#define IS_TYPE_FUNCTION_DEF(type_) \
|
||||
bool Object::Is##type_() const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
|
||||
} \
|
||||
bool Object::Is##type_(IsolateRoot isolate) const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##type_(isolate); \
|
||||
}
|
||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
|
||||
IS_TYPE_FUNCTION_DEF(HashTableBase)
|
||||
@ -148,125 +148,127 @@ bool HeapObject::IsNullOrUndefined() const {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsUniqueName, bool) {
|
||||
return IsInternalizedString(cage_base) || IsSymbol(cage_base);
|
||||
return IsInternalizedString(isolate) || IsSymbol(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsFunction, bool) {
|
||||
return IsJSFunctionOrBoundFunction();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsCallable, bool) {
|
||||
return map(cage_base).is_callable();
|
||||
}
|
||||
DEF_GETTER(HeapObject, IsCallable, bool) { return map(isolate).is_callable(); }
|
||||
|
||||
DEF_GETTER(HeapObject, IsCallableJSProxy, bool) {
|
||||
return IsCallable(cage_base) && IsJSProxy(cage_base);
|
||||
return IsCallable(isolate) && IsJSProxy(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsCallableApiObject, bool) {
|
||||
InstanceType type = map(cage_base).instance_type();
|
||||
return IsCallable(cage_base) &&
|
||||
InstanceType type = map(isolate).instance_type();
|
||||
return IsCallable(isolate) &&
|
||||
(type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsNonNullForeign, bool) {
|
||||
return IsForeign(cage_base) &&
|
||||
return IsForeign(isolate) &&
|
||||
Foreign::cast(*this).foreign_address() != kNullAddress;
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsConstructor, bool) {
|
||||
return map(cage_base).is_constructor();
|
||||
return map(isolate).is_constructor();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) {
|
||||
return map(cage_base) == GetReadOnlyRoots(cage_base).module_info_map();
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
return map(isolate) == GetReadOnlyRoots(isolate).module_info_map();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsConsString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsCons();
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsCons();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsThinString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsThin();
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsThin();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsSlicedString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsSliced();
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsSliced();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsSeqString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsSequential();
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsSequential();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsSeqOneByteString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
|
||||
String::cast(*this).IsOneByteRepresentation(cage_base);
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsSequential() &&
|
||||
String::cast(*this).IsOneByteRepresentation(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
|
||||
String::cast(*this).IsTwoByteRepresentation(cage_base);
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsSequential() &&
|
||||
String::cast(*this).IsTwoByteRepresentation(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsExternalOneByteString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
|
||||
String::cast(*this).IsOneByteRepresentation(cage_base);
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsExternal() &&
|
||||
String::cast(*this).IsOneByteRepresentation(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
|
||||
if (!IsString(cage_base)) return false;
|
||||
return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
|
||||
String::cast(*this).IsTwoByteRepresentation(cage_base);
|
||||
if (!IsString(isolate)) return false;
|
||||
return StringShape(String::cast(*this).map(isolate)).IsExternal() &&
|
||||
String::cast(*this).IsTwoByteRepresentation(isolate);
|
||||
}
|
||||
|
||||
bool Object::IsNumber() const {
|
||||
if (IsSmi()) return true;
|
||||
HeapObject this_heap_object = HeapObject::cast(*this);
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
|
||||
return this_heap_object.IsHeapNumber(cage_base);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
|
||||
return this_heap_object.IsHeapNumber(isolate);
|
||||
}
|
||||
|
||||
bool Object::IsNumber(PtrComprCageBase cage_base) const {
|
||||
return IsSmi() || IsHeapNumber(cage_base);
|
||||
bool Object::IsNumber(IsolateRoot isolate) const {
|
||||
return IsSmi() || IsHeapNumber(isolate);
|
||||
}
|
||||
|
||||
bool Object::IsNumeric() const {
|
||||
if (IsSmi()) return true;
|
||||
HeapObject this_heap_object = HeapObject::cast(*this);
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
|
||||
return this_heap_object.IsHeapNumber(cage_base) ||
|
||||
this_heap_object.IsBigInt(cage_base);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
|
||||
return this_heap_object.IsHeapNumber(isolate) ||
|
||||
this_heap_object.IsBigInt(isolate);
|
||||
}
|
||||
|
||||
bool Object::IsNumeric(PtrComprCageBase cage_base) const {
|
||||
return IsNumber(cage_base) || IsBigInt(cage_base);
|
||||
bool Object::IsNumeric(IsolateRoot isolate) const {
|
||||
return IsNumber(isolate) || IsBigInt(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsFreeSpaceOrFiller, bool) {
|
||||
InstanceType instance_type = map(cage_base).instance_type();
|
||||
InstanceType instance_type = map(isolate).instance_type();
|
||||
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsArrayList, bool) {
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
|
||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
||||
// i::GetIsolateForPtrCompr(HeapObject).
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
|
||||
return *this == roots.empty_fixed_array() ||
|
||||
map(cage_base) == roots.array_list_map();
|
||||
map(isolate) == roots.array_list_map();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
|
||||
return IsFixedArrayExact(cage_base);
|
||||
return IsFixedArrayExact(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
|
||||
// Must be a fixed array.
|
||||
if (!IsFixedArrayExact(cage_base)) return false;
|
||||
if (!IsFixedArrayExact(isolate)) return false;
|
||||
|
||||
// There's no sure way to detect the difference between a fixed array and
|
||||
// a deoptimization data array. Since this is used for asserts we can
|
||||
@ -280,14 +282,14 @@ DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsHandlerTable, bool) {
|
||||
if (!IsFixedArrayExact(cage_base)) return false;
|
||||
if (!IsFixedArrayExact(isolate)) return false;
|
||||
// There's actually no way to see the difference between a fixed array and
|
||||
// a handler table array.
|
||||
return true;
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsTemplateList, bool) {
|
||||
if (!IsFixedArrayExact(cage_base)) return false;
|
||||
if (!IsFixedArrayExact(isolate)) return false;
|
||||
// There's actually no way to see the difference between a fixed array and
|
||||
// a template list.
|
||||
if (FixedArray::cast(*this).length() < 1) return false;
|
||||
@ -295,86 +297,84 @@ DEF_GETTER(HeapObject, IsTemplateList, bool) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsDependentCode, bool) {
|
||||
if (!IsWeakFixedArray(cage_base)) return false;
|
||||
if (!IsWeakFixedArray(isolate)) return false;
|
||||
// There's actually no way to see the difference between a weak fixed array
|
||||
// and a dependent codes array.
|
||||
return true;
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
|
||||
if (!IsWeakFixedArray(cage_base)) return false;
|
||||
if (!IsWeakFixedArray(isolate)) return false;
|
||||
// There's actually no way to see the difference between a weak fixed array
|
||||
// and a osr optimized code cache.
|
||||
return true;
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
|
||||
return IsBytecodeArray(cage_base) || IsCode(cage_base);
|
||||
return IsBytecodeArray(isolate) || IsCode(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsStringWrapper, bool) {
|
||||
return IsJSPrimitiveWrapper(cage_base) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsString(cage_base);
|
||||
return IsJSPrimitiveWrapper(isolate) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsString(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsBooleanWrapper, bool) {
|
||||
return IsJSPrimitiveWrapper(cage_base) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsBoolean(cage_base);
|
||||
return IsJSPrimitiveWrapper(isolate) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsBoolean(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsScriptWrapper, bool) {
|
||||
return IsJSPrimitiveWrapper(cage_base) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsScript(cage_base);
|
||||
return IsJSPrimitiveWrapper(isolate) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsScript(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsNumberWrapper, bool) {
|
||||
return IsJSPrimitiveWrapper(cage_base) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsNumber(cage_base);
|
||||
return IsJSPrimitiveWrapper(isolate) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsNumber(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsBigIntWrapper, bool) {
|
||||
return IsJSPrimitiveWrapper(cage_base) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsBigInt(cage_base);
|
||||
return IsJSPrimitiveWrapper(isolate) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsBigInt(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsSymbolWrapper, bool) {
|
||||
return IsJSPrimitiveWrapper(cage_base) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsSymbol(cage_base);
|
||||
return IsJSPrimitiveWrapper(isolate) &&
|
||||
JSPrimitiveWrapper::cast(*this).value().IsSymbol(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(cage_base); }
|
||||
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(isolate); }
|
||||
|
||||
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(cage_base); }
|
||||
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(isolate); }
|
||||
|
||||
DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) {
|
||||
return IsHashTable(cage_base);
|
||||
return IsHashTable(isolate);
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(cage_base); }
|
||||
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(isolate); }
|
||||
|
||||
DEF_GETTER(HeapObject, IsObjectHashTable, bool) {
|
||||
return IsHashTable(cage_base);
|
||||
}
|
||||
DEF_GETTER(HeapObject, IsObjectHashTable, bool) { return IsHashTable(isolate); }
|
||||
|
||||
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(cage_base); }
|
||||
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(isolate); }
|
||||
|
||||
#if V8_ENABLE_WEBASSEMBLY
|
||||
DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
|
||||
// It is not possible to check for the existence of certain properties on the
|
||||
// underlying {JSReceiver} here because that requires calling handlified code.
|
||||
return IsJSReceiver(cage_base);
|
||||
return IsJSReceiver(isolate);
|
||||
}
|
||||
#endif // V8_ENABLE_WEBASSEMBLY
|
||||
|
||||
bool Object::IsPrimitive() const {
|
||||
if (IsSmi()) return true;
|
||||
HeapObject this_heap_object = HeapObject::cast(*this);
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
|
||||
return this_heap_object.map(cage_base).IsPrimitiveMap();
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
|
||||
return this_heap_object.map(isolate).IsPrimitiveMap();
|
||||
}
|
||||
|
||||
bool Object::IsPrimitive(PtrComprCageBase cage_base) const {
|
||||
return IsSmi() || HeapObject::cast(*this).map(cage_base).IsPrimitiveMap();
|
||||
bool Object::IsPrimitive(IsolateRoot isolate) const {
|
||||
return IsSmi() || HeapObject::cast(*this).map(isolate).IsPrimitiveMap();
|
||||
}
|
||||
|
||||
// static
|
||||
@ -387,24 +387,24 @@ Maybe<bool> Object::IsArray(Handle<Object> object) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsUndetectable, bool) {
|
||||
return map(cage_base).is_undetectable();
|
||||
return map(isolate).is_undetectable();
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
|
||||
if (IsJSGlobalProxy(cage_base)) {
|
||||
if (IsJSGlobalProxy(isolate)) {
|
||||
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
|
||||
JSGlobalObject global = proxy.GetIsolate()->context().global_object();
|
||||
return proxy.IsDetachedFrom(global);
|
||||
}
|
||||
return map(cage_base).is_access_check_needed();
|
||||
return map(isolate).is_access_check_needed();
|
||||
}
|
||||
|
||||
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
|
||||
bool Object::Is##Name() const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
|
||||
} \
|
||||
bool Object::Is##Name(PtrComprCageBase cage_base) const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##Name(cage_base); \
|
||||
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
|
||||
bool Object::Is##Name() const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
|
||||
} \
|
||||
bool Object::Is##Name(IsolateRoot isolate) const { \
|
||||
return IsHeapObject() && HeapObject::cast(*this).Is##Name(isolate); \
|
||||
}
|
||||
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
|
||||
#undef MAKE_STRUCT_PREDICATE
|
||||
@ -467,17 +467,17 @@ bool Object::FilterKey(PropertyFilter filter) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const {
|
||||
Representation Object::OptimalRepresentation(IsolateRoot isolate) const {
|
||||
if (!FLAG_track_fields) return Representation::Tagged();
|
||||
if (IsSmi()) {
|
||||
return Representation::Smi();
|
||||
}
|
||||
HeapObject heap_object = HeapObject::cast(*this);
|
||||
if (FLAG_track_double_fields && heap_object.IsHeapNumber(cage_base)) {
|
||||
if (FLAG_track_double_fields && heap_object.IsHeapNumber(isolate)) {
|
||||
return Representation::Double();
|
||||
} else if (FLAG_track_computed_fields &&
|
||||
heap_object.IsUninitialized(
|
||||
heap_object.GetReadOnlyRoots(cage_base))) {
|
||||
heap_object.GetReadOnlyRoots(isolate))) {
|
||||
return Representation::None();
|
||||
} else if (FLAG_track_heap_object_fields) {
|
||||
return Representation::HeapObject();
|
||||
@ -486,9 +486,9 @@ Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const {
|
||||
}
|
||||
}
|
||||
|
||||
ElementsKind Object::OptimalElementsKind(PtrComprCageBase cage_base) const {
|
||||
ElementsKind Object::OptimalElementsKind(IsolateRoot isolate) const {
|
||||
if (IsSmi()) return PACKED_SMI_ELEMENTS;
|
||||
if (IsNumber(cage_base)) return PACKED_DOUBLE_ELEMENTS;
|
||||
if (IsNumber(isolate)) return PACKED_DOUBLE_ELEMENTS;
|
||||
return PACKED_ELEMENTS;
|
||||
}
|
||||
|
||||
@ -631,10 +631,9 @@ void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
|
||||
i::InitExternalPointerField(field_address(offset), isolate, value, tag);
|
||||
}
|
||||
|
||||
Address Object::ReadExternalPointerField(size_t offset,
|
||||
PtrComprCageBase isolate_root,
|
||||
Address Object::ReadExternalPointerField(size_t offset, IsolateRoot isolate,
|
||||
ExternalPointerTag tag) const {
|
||||
return i::ReadExternalPointerField(field_address(offset), isolate_root, tag);
|
||||
return i::ReadExternalPointerField(field_address(offset), isolate, tag);
|
||||
}
|
||||
|
||||
void Object::WriteExternalPointerField(size_t offset, Isolate* isolate,
|
||||
@ -688,16 +687,16 @@ ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
|
||||
return ReadOnlyHeap::GetReadOnlyRoots(*this);
|
||||
}
|
||||
|
||||
ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const {
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||
DCHECK_NE(cage_base.address(), 0);
|
||||
return ReadOnlyRoots(Isolate::FromRootAddress(cage_base.address()));
|
||||
ReadOnlyRoots HeapObject::GetReadOnlyRoots(IsolateRoot isolate) const {
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
DCHECK_NE(isolate.address(), 0);
|
||||
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
|
||||
#else
|
||||
return GetReadOnlyRoots();
|
||||
#endif
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, map, Map) { return map_word(cage_base).ToMap(); }
|
||||
DEF_GETTER(HeapObject, map, Map) { return map_word(isolate).ToMap(); }
|
||||
|
||||
void HeapObject::set_map(Map value) {
|
||||
#ifdef VERIFY_HEAP
|
||||
@ -716,7 +715,7 @@ void HeapObject::set_map(Map value) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, synchronized_map, Map) {
|
||||
return synchronized_map_word(cage_base).ToMap();
|
||||
return synchronized_map_word(isolate).ToMap();
|
||||
}
|
||||
|
||||
void HeapObject::synchronized_set_map(Map value) {
|
||||
@ -762,7 +761,7 @@ ObjectSlot HeapObject::map_slot() const {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, map_word, MapWord) {
|
||||
return MapField::Relaxed_Load(cage_base, *this);
|
||||
return MapField::Relaxed_Load(isolate, *this);
|
||||
}
|
||||
|
||||
void HeapObject::set_map_word(MapWord map_word) {
|
||||
@ -770,7 +769,7 @@ void HeapObject::set_map_word(MapWord map_word) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, synchronized_map_word, MapWord) {
|
||||
return MapField::Acquire_Load(cage_base, *this);
|
||||
return MapField::Acquire_Load(isolate, *this);
|
||||
}
|
||||
|
||||
void HeapObject::synchronized_set_map_word(MapWord map_word) {
|
||||
|
@ -5567,8 +5567,7 @@ Handle<Derived> HashTable<Derived, Shape>::NewInternal(
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base,
|
||||
Derived new_table) {
|
||||
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate, Derived new_table) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc);
|
||||
|
||||
@ -5576,21 +5575,21 @@ void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base,
|
||||
|
||||
// Copy prefix to new array.
|
||||
for (int i = kPrefixStartIndex; i < kElementsStartIndex; i++) {
|
||||
new_table.set(i, get(cage_base, i), mode);
|
||||
new_table.set(i, get(isolate, i), mode);
|
||||
}
|
||||
|
||||
// Rehash the elements.
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
|
||||
for (InternalIndex i : this->IterateEntries()) {
|
||||
uint32_t from_index = EntryToIndex(i);
|
||||
Object k = this->get(cage_base, from_index);
|
||||
Object k = this->get(isolate, from_index);
|
||||
if (!IsKey(roots, k)) continue;
|
||||
uint32_t hash = Shape::HashForObject(roots, k);
|
||||
uint32_t insertion_index =
|
||||
EntryToIndex(new_table.FindInsertionEntry(cage_base, roots, hash));
|
||||
new_table.set_key(insertion_index, get(cage_base, from_index), mode);
|
||||
EntryToIndex(new_table.FindInsertionEntry(isolate, roots, hash));
|
||||
new_table.set_key(insertion_index, get(isolate, from_index), mode);
|
||||
for (int j = 1; j < Shape::kEntrySize; j++) {
|
||||
new_table.set(insertion_index + j, get(cage_base, from_index + j), mode);
|
||||
new_table.set(insertion_index + j, get(isolate, from_index + j), mode);
|
||||
}
|
||||
}
|
||||
new_table.SetNumberOfElements(NumberOfElements());
|
||||
@ -5632,10 +5631,10 @@ void HashTable<Derived, Shape>::Swap(InternalIndex entry1, InternalIndex entry2,
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base) {
|
||||
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
|
||||
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
|
||||
uint32_t capacity = Capacity();
|
||||
bool done = false;
|
||||
for (int probe = 1; !done; probe++) {
|
||||
@ -5644,7 +5643,7 @@ void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base) {
|
||||
done = true;
|
||||
for (InternalIndex current(0); current.raw_value() < capacity;
|
||||
/* {current} is advanced manually below, when appropriate.*/) {
|
||||
Object current_key = KeyAt(cage_base, current);
|
||||
Object current_key = KeyAt(isolate, current);
|
||||
if (!IsKey(roots, current_key)) {
|
||||
++current; // Advance to next entry.
|
||||
continue;
|
||||
@ -5654,7 +5653,7 @@ void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base) {
|
||||
++current; // Advance to next entry.
|
||||
continue;
|
||||
}
|
||||
Object target_key = KeyAt(cage_base, target);
|
||||
Object target_key = KeyAt(isolate, target);
|
||||
if (!IsKey(roots, target_key) ||
|
||||
EntryForProbe(roots, target_key, probe, target) != target) {
|
||||
// Put the current element into the correct position.
|
||||
@ -5674,7 +5673,7 @@ void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base) {
|
||||
HeapObject undefined = roots.undefined_value();
|
||||
Derived* self = static_cast<Derived*>(this);
|
||||
for (InternalIndex current : InternalIndex::Range(capacity)) {
|
||||
if (KeyAt(cage_base, current) == the_hole) {
|
||||
if (KeyAt(isolate, current) == the_hole) {
|
||||
self->set_key(EntryToIndex(current) + kEntryKeyIndex, undefined,
|
||||
SKIP_WRITE_BARRIER);
|
||||
}
|
||||
@ -5765,14 +5764,15 @@ Handle<Derived> HashTable<Derived, Shape>::Shrink(Isolate* isolate,
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(
|
||||
PtrComprCageBase cage_base, ReadOnlyRoots roots, uint32_t hash) {
|
||||
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(IsolateRoot isolate,
|
||||
ReadOnlyRoots roots,
|
||||
uint32_t hash) {
|
||||
uint32_t capacity = Capacity();
|
||||
uint32_t count = 1;
|
||||
// EnsureCapacity will guarantee the hash table is never full.
|
||||
for (InternalIndex entry = FirstProbe(hash, capacity);;
|
||||
entry = NextProbe(entry, count++, capacity)) {
|
||||
if (!IsKey(roots, KeyAt(cage_base, entry))) return entry;
|
||||
if (!IsKey(roots, KeyAt(isolate, entry))) return entry;
|
||||
}
|
||||
}
|
||||
|
||||
@ -6080,14 +6080,14 @@ void ObjectHashTableBase<Derived, Shape>::FillEntriesWithHoles(
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(PtrComprCageBase cage_base,
|
||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(IsolateRoot isolate,
|
||||
Handle<Object> key,
|
||||
int32_t hash) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base);
|
||||
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
|
||||
DCHECK(this->IsKey(roots, *key));
|
||||
|
||||
InternalIndex entry = this->FindEntry(cage_base, roots, key, hash);
|
||||
InternalIndex entry = this->FindEntry(isolate, roots, key, hash);
|
||||
if (entry.is_not_found()) return roots.the_hole_value();
|
||||
return this->get(Derived::EntryToIndex(entry) + 1);
|
||||
}
|
||||
@ -6096,8 +6096,8 @@ template <typename Derived, typename Shape>
|
||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
|
||||
DisallowGarbageCollection no_gc;
|
||||
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
|
||||
DCHECK(this->IsKey(roots, *key));
|
||||
|
||||
// If the object does not have an identity hash, it was never used as a key.
|
||||
@ -6105,13 +6105,13 @@ Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
|
||||
if (hash.IsUndefined(roots)) {
|
||||
return roots.the_hole_value();
|
||||
}
|
||||
return Lookup(cage_base, key, Smi::ToInt(hash));
|
||||
return Lookup(isolate, key, Smi::ToInt(hash));
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key,
|
||||
int32_t hash) {
|
||||
return Lookup(GetPtrComprCageBase(*this), key, hash);
|
||||
return Lookup(GetIsolateForPtrCompr(*this), key, hash);
|
||||
}
|
||||
|
||||
template <typename Derived, typename Shape>
|
||||
|
@ -279,7 +279,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
||||
|
||||
#define IS_TYPE_FUNCTION_DECL(Type) \
|
||||
V8_INLINE bool Is##Type() const; \
|
||||
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
||||
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
|
||||
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||
IS_TYPE_FUNCTION_DECL(HashTableBase)
|
||||
@ -307,7 +307,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
||||
|
||||
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
|
||||
V8_INLINE bool Is##Name() const; \
|
||||
V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
|
||||
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
|
||||
STRUCT_LIST(DECL_STRUCT_PREDICATE)
|
||||
#undef DECL_STRUCT_PREDICATE
|
||||
|
||||
@ -322,9 +322,9 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
||||
V8_EXPORT_PRIVATE bool ToInt32(int32_t* value);
|
||||
inline bool ToUint32(uint32_t* value) const;
|
||||
|
||||
inline Representation OptimalRepresentation(PtrComprCageBase cage_base) const;
|
||||
inline Representation OptimalRepresentation(IsolateRoot isolate) const;
|
||||
|
||||
inline ElementsKind OptimalElementsKind(PtrComprCageBase cage_base) const;
|
||||
inline ElementsKind OptimalElementsKind(IsolateRoot isolate) const;
|
||||
|
||||
inline bool FitsRepresentation(Representation representation);
|
||||
|
||||
@ -673,8 +673,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
||||
inline void InitExternalPointerField(size_t offset, Isolate* isolate);
|
||||
inline void InitExternalPointerField(size_t offset, Isolate* isolate,
|
||||
Address value, ExternalPointerTag tag);
|
||||
inline Address ReadExternalPointerField(size_t offset,
|
||||
PtrComprCageBase isolate_root,
|
||||
inline Address ReadExternalPointerField(size_t offset, IsolateRoot isolate,
|
||||
ExternalPointerTag tag) const;
|
||||
inline void WriteExternalPointerField(size_t offset, Isolate* isolate,
|
||||
Address value, ExternalPointerTag tag);
|
||||
|
@ -37,7 +37,7 @@ Handle<Object> Oddball::ToNumber(Isolate* isolate, Handle<Oddball> input) {
|
||||
}
|
||||
|
||||
DEF_GETTER(HeapObject, IsBoolean, bool) {
|
||||
return IsOddball(cage_base) &&
|
||||
return IsOddball(isolate) &&
|
||||
((Oddball::cast(*this).kind() & Oddball::kNotBooleanMask) == 0);
|
||||
}
|
||||
|
||||
|
@ -25,14 +25,14 @@ SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
|
||||
SYNCHRONIZED_SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
|
||||
|
||||
Object PropertyArray::get(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return get(isolate, index);
|
||||
}
|
||||
|
||||
Object PropertyArray::get(PtrComprCageBase cage_base, int index) const {
|
||||
Object PropertyArray::get(IsolateRoot isolate, int index) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index),
|
||||
static_cast<unsigned>(this->length()));
|
||||
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
|
||||
return TaggedField<Object>::Relaxed_Load(isolate, *this,
|
||||
OffsetOfElementAt(index));
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ class PropertyArray : public HeapObject {
|
||||
inline int Hash() const;
|
||||
|
||||
inline Object get(int index) const;
|
||||
inline Object get(PtrComprCageBase cage_base, int index) const;
|
||||
inline Object get(IsolateRoot isolate, int index) const;
|
||||
|
||||
inline void set(int index, Object value);
|
||||
// Setter with explicit barrier mode.
|
||||
|
@ -75,10 +75,10 @@ Descriptor Descriptor::DataField(Handle<Name> key, int field_index,
|
||||
|
||||
Descriptor Descriptor::DataConstant(Handle<Name> key, Handle<Object> value,
|
||||
PropertyAttributes attributes) {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*key);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*key);
|
||||
return Descriptor(key, MaybeObjectHandle(value), kData, attributes,
|
||||
kDescriptor, PropertyConstness::kConst,
|
||||
value->OptimalRepresentation(cage_base), 0);
|
||||
value->OptimalRepresentation(isolate), 0);
|
||||
}
|
||||
|
||||
Descriptor Descriptor::DataConstant(Isolate* isolate, Handle<Name> key,
|
||||
|
@ -575,13 +575,13 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
|
||||
}
|
||||
|
||||
Object ScopeInfo::get(int index) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return get(cage_base, index);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return get(isolate, index);
|
||||
}
|
||||
|
||||
Object ScopeInfo::get(PtrComprCageBase cage_base, int index) const {
|
||||
Object ScopeInfo::get(IsolateRoot isolate, int index) const {
|
||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
|
||||
return TaggedField<Object>::Relaxed_Load(isolate, *this,
|
||||
OffsetOfElementAt(index));
|
||||
}
|
||||
|
||||
|
@ -293,7 +293,7 @@ class ScopeInfo : public TorqueGeneratedScopeInfo<ScopeInfo, HeapObject> {
|
||||
// 'flags', the first field defined by ScopeInfo after the standard-size
|
||||
// HeapObject header.
|
||||
V8_EXPORT_PRIVATE Object get(int index) const;
|
||||
Object get(PtrComprCageBase cage_base, int index) const;
|
||||
Object get(IsolateRoot isolate, int index) const;
|
||||
// Setter that doesn't need write barrier.
|
||||
void set(int index, Smi value);
|
||||
// Setter with explicit barrier mode.
|
||||
|
@ -31,7 +31,7 @@ bool FullObjectSlot::contains_value(Address raw_value) const {
|
||||
|
||||
Object FullObjectSlot::operator*() const { return Object(*location()); }
|
||||
|
||||
Object FullObjectSlot::load(PtrComprCageBase cage_base) const { return **this; }
|
||||
Object FullObjectSlot::load(IsolateRoot isolate) const { return **this; }
|
||||
|
||||
void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
|
||||
|
||||
@ -39,7 +39,7 @@ Object FullObjectSlot::Acquire_Load() const {
|
||||
return Object(base::AsAtomicPointer::Acquire_Load(location()));
|
||||
}
|
||||
|
||||
Object FullObjectSlot::Acquire_Load(PtrComprCageBase cage_base) const {
|
||||
Object FullObjectSlot::Acquire_Load(IsolateRoot isolate) const {
|
||||
return Acquire_Load();
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ Object FullObjectSlot::Relaxed_Load() const {
|
||||
return Object(base::AsAtomicPointer::Relaxed_Load(location()));
|
||||
}
|
||||
|
||||
Object FullObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const {
|
||||
Object FullObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
||||
return Relaxed_Load();
|
||||
}
|
||||
|
||||
@ -79,7 +79,7 @@ MaybeObject FullMaybeObjectSlot::operator*() const {
|
||||
return MaybeObject(*location());
|
||||
}
|
||||
|
||||
MaybeObject FullMaybeObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||
MaybeObject FullMaybeObjectSlot::load(IsolateRoot isolate) const {
|
||||
return **this;
|
||||
}
|
||||
|
||||
@ -91,8 +91,7 @@ MaybeObject FullMaybeObjectSlot::Relaxed_Load() const {
|
||||
return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location()));
|
||||
}
|
||||
|
||||
MaybeObject FullMaybeObjectSlot::Relaxed_Load(
|
||||
PtrComprCageBase cage_base) const {
|
||||
MaybeObject FullMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
||||
return Relaxed_Load();
|
||||
}
|
||||
|
||||
@ -114,7 +113,7 @@ HeapObjectReference FullHeapObjectSlot::operator*() const {
|
||||
return HeapObjectReference(*location());
|
||||
}
|
||||
|
||||
HeapObjectReference FullHeapObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||
HeapObjectReference FullHeapObjectSlot::load(IsolateRoot isolate) const {
|
||||
return **this;
|
||||
}
|
||||
|
||||
|
@ -110,13 +110,13 @@ class FullObjectSlot : public SlotBase<FullObjectSlot, Address> {
|
||||
inline bool contains_value(Address raw_value) const;
|
||||
|
||||
inline Object operator*() const;
|
||||
inline Object load(PtrComprCageBase cage_base) const;
|
||||
inline Object load(IsolateRoot isolate) const;
|
||||
inline void store(Object value) const;
|
||||
|
||||
inline Object Acquire_Load() const;
|
||||
inline Object Acquire_Load(PtrComprCageBase cage_base) const;
|
||||
inline Object Acquire_Load(IsolateRoot isolate) const;
|
||||
inline Object Relaxed_Load() const;
|
||||
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||
inline Object Relaxed_Load(IsolateRoot isolate) const;
|
||||
inline void Relaxed_Store(Object value) const;
|
||||
inline void Release_Store(Object value) const;
|
||||
inline Object Relaxed_CompareAndSwap(Object old, Object target) const;
|
||||
@ -147,11 +147,11 @@ class FullMaybeObjectSlot
|
||||
: SlotBase(slot.address()) {}
|
||||
|
||||
inline MaybeObject operator*() const;
|
||||
inline MaybeObject load(PtrComprCageBase cage_base) const;
|
||||
inline MaybeObject load(IsolateRoot isolate) const;
|
||||
inline void store(MaybeObject value) const;
|
||||
|
||||
inline MaybeObject Relaxed_Load() const;
|
||||
inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
|
||||
inline void Relaxed_Store(MaybeObject value) const;
|
||||
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
|
||||
};
|
||||
@ -174,7 +174,7 @@ class FullHeapObjectSlot : public SlotBase<FullHeapObjectSlot, Address> {
|
||||
: SlotBase(slot.address()) {}
|
||||
|
||||
inline HeapObjectReference operator*() const;
|
||||
inline HeapObjectReference load(PtrComprCageBase cage_base) const;
|
||||
inline HeapObjectReference load(IsolateRoot isolate) const;
|
||||
inline void store(HeapObjectReference value) const;
|
||||
|
||||
inline HeapObject ToHeapObject() const;
|
||||
|
@ -274,12 +274,12 @@ inline TResult StringShape::DispatchToSpecificType(String str,
|
||||
}
|
||||
|
||||
DEF_GETTER(String, IsOneByteRepresentation, bool) {
|
||||
uint32_t type = map(cage_base).instance_type();
|
||||
uint32_t type = map(isolate).instance_type();
|
||||
return (type & kStringEncodingMask) == kOneByteStringTag;
|
||||
}
|
||||
|
||||
DEF_GETTER(String, IsTwoByteRepresentation, bool) {
|
||||
uint32_t type = map(cage_base).instance_type();
|
||||
uint32_t type = map(isolate).instance_type();
|
||||
return (type & kStringEncodingMask) == kTwoByteStringTag;
|
||||
}
|
||||
|
||||
@ -463,7 +463,7 @@ bool String::IsEqualTo(Vector<const Char> str, Isolate* isolate) const {
|
||||
template <String::EqualityType kEqType, typename Char>
|
||||
bool String::IsEqualTo(Vector<const Char> str) const {
|
||||
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
|
||||
return IsEqualToImpl<kEqType>(str, GetPtrComprCageBase(*this),
|
||||
return IsEqualToImpl<kEqType>(str, GetIsolateForPtrCompr(*this),
|
||||
SharedStringAccessGuardIfNeeded::NotNeeded());
|
||||
}
|
||||
|
||||
@ -475,7 +475,7 @@ bool String::IsEqualTo(Vector<const Char> str, LocalIsolate* isolate) const {
|
||||
|
||||
template <String::EqualityType kEqType, typename Char>
|
||||
bool String::IsEqualToImpl(
|
||||
Vector<const Char> str, PtrComprCageBase cage_base,
|
||||
Vector<const Char> str, IsolateRoot isolate,
|
||||
const SharedStringAccessGuardIfNeeded& access_guard) const {
|
||||
size_t len = str.size();
|
||||
switch (kEqType) {
|
||||
@ -496,7 +496,7 @@ bool String::IsEqualToImpl(
|
||||
String string = *this;
|
||||
const Char* data = str.data();
|
||||
while (true) {
|
||||
int32_t type = string.map(cage_base).instance_type();
|
||||
int32_t type = string.map(isolate).instance_type();
|
||||
switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
|
||||
case kSeqStringTag | kOneByteStringTag:
|
||||
return CompareCharsEqual(
|
||||
@ -521,7 +521,7 @@ bool String::IsEqualToImpl(
|
||||
case kSlicedStringTag | kTwoByteStringTag: {
|
||||
SlicedString slicedString = SlicedString::cast(string);
|
||||
slice_offset += slicedString.offset();
|
||||
string = slicedString.parent(cage_base);
|
||||
string = slicedString.parent(isolate);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -529,14 +529,13 @@ bool String::IsEqualToImpl(
|
||||
case kConsStringTag | kTwoByteStringTag: {
|
||||
// The ConsString path is more complex and rare, so call out to an
|
||||
// out-of-line handler.
|
||||
return IsConsStringEqualToImpl<Char>(ConsString::cast(string),
|
||||
slice_offset, str, cage_base,
|
||||
access_guard);
|
||||
return IsConsStringEqualToImpl<Char>(
|
||||
ConsString::cast(string), slice_offset, str, isolate, access_guard);
|
||||
}
|
||||
|
||||
case kThinStringTag | kOneByteStringTag:
|
||||
case kThinStringTag | kTwoByteStringTag:
|
||||
string = ThinString::cast(string).actual(cage_base);
|
||||
string = ThinString::cast(string).actual(isolate);
|
||||
continue;
|
||||
|
||||
default:
|
||||
@ -549,8 +548,7 @@ bool String::IsEqualToImpl(
|
||||
template <typename Char>
|
||||
bool String::IsConsStringEqualToImpl(
|
||||
ConsString string, int slice_offset, Vector<const Char> str,
|
||||
PtrComprCageBase cage_base,
|
||||
const SharedStringAccessGuardIfNeeded& access_guard) {
|
||||
IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard) {
|
||||
// Already checked the len in IsEqualToImpl. Check GE rather than EQ in case
|
||||
// this is a prefix check.
|
||||
DCHECK_GE(string.length(), str.size());
|
||||
@ -563,7 +561,7 @@ bool String::IsConsStringEqualToImpl(
|
||||
// remaining string.
|
||||
size_t len = std::min<size_t>(segment.length(), remaining_str.size());
|
||||
Vector<const Char> sub_str = remaining_str.SubVector(0, len);
|
||||
if (!segment.IsEqualToImpl<EqualityType::kNoLengthCheck>(sub_str, cage_base,
|
||||
if (!segment.IsEqualToImpl<EqualityType::kNoLengthCheck>(sub_str, isolate,
|
||||
access_guard)) {
|
||||
return false;
|
||||
}
|
||||
@ -847,7 +845,7 @@ Object ConsString::unchecked_second() {
|
||||
}
|
||||
|
||||
DEF_GETTER(ThinString, unchecked_actual, HeapObject) {
|
||||
return TaggedField<HeapObject, kActualOffset>::load(cage_base, *this);
|
||||
return TaggedField<HeapObject, kActualOffset>::load(isolate, *this);
|
||||
}
|
||||
|
||||
bool ExternalString::is_uncached() const {
|
||||
@ -862,7 +860,7 @@ void ExternalString::AllocateExternalPointerEntries(Isolate* isolate) {
|
||||
}
|
||||
|
||||
DEF_GETTER(ExternalString, resource_as_address, Address) {
|
||||
return ReadExternalPointerField(kResourceOffset, cage_base,
|
||||
return ReadExternalPointerField(kResourceOffset, isolate,
|
||||
kExternalStringResourceTag);
|
||||
}
|
||||
|
||||
@ -910,7 +908,7 @@ DEF_GETTER(ExternalOneByteString, resource,
|
||||
|
||||
DEF_GETTER(ExternalOneByteString, mutable_resource,
|
||||
ExternalOneByteString::Resource*) {
|
||||
return reinterpret_cast<Resource*>(resource_as_address(cage_base));
|
||||
return reinterpret_cast<Resource*>(resource_as_address(isolate));
|
||||
}
|
||||
|
||||
void ExternalOneByteString::update_data_cache(Isolate* isolate) {
|
||||
@ -975,7 +973,7 @@ DEF_GETTER(ExternalTwoByteString, resource,
|
||||
|
||||
DEF_GETTER(ExternalTwoByteString, mutable_resource,
|
||||
ExternalTwoByteString::Resource*) {
|
||||
return reinterpret_cast<Resource*>(resource_as_address(cage_base));
|
||||
return reinterpret_cast<Resource*>(resource_as_address(isolate));
|
||||
}
|
||||
|
||||
void ExternalTwoByteString::update_data_cache(Isolate* isolate) {
|
||||
|
@ -91,15 +91,15 @@ bool KeyIsMatch(LocalIsolate* isolate, StringTableKey* key, String string) {
|
||||
class StringTable::Data {
|
||||
public:
|
||||
static std::unique_ptr<Data> New(int capacity);
|
||||
static std::unique_ptr<Data> Resize(PtrComprCageBase cage_base,
|
||||
static std::unique_ptr<Data> Resize(IsolateRoot isolate,
|
||||
std::unique_ptr<Data> data, int capacity);
|
||||
|
||||
OffHeapObjectSlot slot(InternalIndex index) const {
|
||||
return OffHeapObjectSlot(&elements_[index.as_uint32()]);
|
||||
}
|
||||
|
||||
Object Get(PtrComprCageBase cage_base, InternalIndex index) const {
|
||||
return slot(index).Acquire_Load(cage_base);
|
||||
Object Get(IsolateRoot isolate, InternalIndex index) const {
|
||||
return slot(index).Acquire_Load(isolate);
|
||||
}
|
||||
|
||||
void Set(InternalIndex index, String entry) {
|
||||
@ -139,8 +139,7 @@ class StringTable::Data {
|
||||
InternalIndex FindEntry(LocalIsolate* isolate, StringTableKey* key,
|
||||
uint32_t hash) const;
|
||||
|
||||
InternalIndex FindInsertionEntry(PtrComprCageBase cage_base,
|
||||
uint32_t hash) const;
|
||||
InternalIndex FindInsertionEntry(IsolateRoot isolate, uint32_t hash) const;
|
||||
|
||||
template <typename LocalIsolate, typename StringTableKey>
|
||||
InternalIndex FindEntryOrInsertionEntry(LocalIsolate* isolate,
|
||||
@ -158,7 +157,7 @@ class StringTable::Data {
|
||||
Data* PreviousData() { return previous_data_.get(); }
|
||||
void DropPreviousData() { previous_data_.reset(); }
|
||||
|
||||
void Print(PtrComprCageBase cage_base) const;
|
||||
void Print(IsolateRoot isolate) const;
|
||||
size_t GetCurrentMemoryUsage() const;
|
||||
|
||||
private:
|
||||
@ -225,7 +224,7 @@ std::unique_ptr<StringTable::Data> StringTable::Data::New(int capacity) {
|
||||
}
|
||||
|
||||
std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
|
||||
PtrComprCageBase cage_base, std::unique_ptr<Data> data, int capacity) {
|
||||
IsolateRoot isolate, std::unique_ptr<Data> data, int capacity) {
|
||||
std::unique_ptr<Data> new_data(new (capacity) Data(capacity));
|
||||
|
||||
DCHECK_LT(data->number_of_elements(), new_data->capacity());
|
||||
@ -235,12 +234,11 @@ std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
|
||||
|
||||
// Rehash the elements.
|
||||
for (InternalIndex i : InternalIndex::Range(data->capacity())) {
|
||||
Object element = data->Get(cage_base, i);
|
||||
Object element = data->Get(isolate, i);
|
||||
if (element == empty_element() || element == deleted_element()) continue;
|
||||
String string = String::cast(element);
|
||||
uint32_t hash = string.hash();
|
||||
InternalIndex insertion_index =
|
||||
new_data->FindInsertionEntry(cage_base, hash);
|
||||
InternalIndex insertion_index = new_data->FindInsertionEntry(isolate, hash);
|
||||
new_data->Set(insertion_index, string);
|
||||
}
|
||||
new_data->number_of_elements_ = data->number_of_elements();
|
||||
@ -267,7 +265,7 @@ InternalIndex StringTable::Data::FindEntry(LocalIsolate* isolate,
|
||||
}
|
||||
}
|
||||
|
||||
InternalIndex StringTable::Data::FindInsertionEntry(PtrComprCageBase cage_base,
|
||||
InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate,
|
||||
uint32_t hash) const {
|
||||
uint32_t count = 1;
|
||||
// EnsureCapacity will guarantee the hash table is never full.
|
||||
@ -275,7 +273,7 @@ InternalIndex StringTable::Data::FindInsertionEntry(PtrComprCageBase cage_base,
|
||||
entry = NextProbe(entry, count++, capacity_)) {
|
||||
// TODO(leszeks): Consider delaying the decompression until after the
|
||||
// comparisons against empty/deleted.
|
||||
Object element = Get(cage_base, entry);
|
||||
Object element = Get(isolate, entry);
|
||||
if (element == empty_element() || element == deleted_element())
|
||||
return entry;
|
||||
}
|
||||
@ -316,12 +314,11 @@ void StringTable::Data::IterateElements(RootVisitor* visitor) {
|
||||
visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot);
|
||||
}
|
||||
|
||||
void StringTable::Data::Print(PtrComprCageBase cage_base) const {
|
||||
void StringTable::Data::Print(IsolateRoot isolate) const {
|
||||
OFStream os(stdout);
|
||||
os << "StringTable {" << std::endl;
|
||||
for (InternalIndex i : InternalIndex::Range(capacity_)) {
|
||||
os << " " << i.as_uint32() << ": " << Brief(Get(cage_base, i))
|
||||
<< std::endl;
|
||||
os << " " << i.as_uint32() << ": " << Brief(Get(isolate, i)) << std::endl;
|
||||
}
|
||||
os << "}" << std::endl;
|
||||
}
|
||||
@ -533,7 +530,7 @@ template Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
|
||||
template Handle<String> StringTable::LookupKey(Isolate* isolate,
|
||||
StringTableInsertionKey* key);
|
||||
|
||||
StringTable::Data* StringTable::EnsureCapacity(PtrComprCageBase cage_base,
|
||||
StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate,
|
||||
int additional_elements) {
|
||||
// This call is only allowed while the write mutex is held.
|
||||
write_mutex_.AssertHeld();
|
||||
@ -563,7 +560,7 @@ StringTable::Data* StringTable::EnsureCapacity(PtrComprCageBase cage_base,
|
||||
|
||||
if (new_capacity != -1) {
|
||||
std::unique_ptr<Data> new_data =
|
||||
Data::Resize(cage_base, std::unique_ptr<Data>(data), new_capacity);
|
||||
Data::Resize(isolate, std::unique_ptr<Data>(data), new_capacity);
|
||||
// `new_data` is the new owner of `data`.
|
||||
DCHECK_EQ(new_data->PreviousData(), data);
|
||||
// Release-store the new data pointer as `data_`, so that it can be
|
||||
@ -672,8 +669,8 @@ Address StringTable::TryStringToIndexOrLookupExisting(Isolate* isolate,
|
||||
isolate, string, source, start);
|
||||
}
|
||||
|
||||
void StringTable::Print(PtrComprCageBase cage_base) const {
|
||||
data_.load(std::memory_order_acquire)->Print(cage_base);
|
||||
void StringTable::Print(IsolateRoot isolate) const {
|
||||
data_.load(std::memory_order_acquire)->Print(isolate);
|
||||
}
|
||||
|
||||
size_t StringTable::GetCurrentMemoryUsage() const {
|
||||
|
@ -72,7 +72,7 @@ class V8_EXPORT_PRIVATE StringTable {
|
||||
static Address TryStringToIndexOrLookupExisting(Isolate* isolate,
|
||||
Address raw_string);
|
||||
|
||||
void Print(PtrComprCageBase cage_base) const;
|
||||
void Print(IsolateRoot isolate) const;
|
||||
size_t GetCurrentMemoryUsage() const;
|
||||
|
||||
// The following methods must be called either while holding the write lock,
|
||||
@ -84,7 +84,7 @@ class V8_EXPORT_PRIVATE StringTable {
|
||||
private:
|
||||
class Data;
|
||||
|
||||
Data* EnsureCapacity(PtrComprCageBase cage_base, int additional_elements);
|
||||
Data* EnsureCapacity(IsolateRoot isolate, int additional_elements);
|
||||
|
||||
std::atomic<Data*> data_;
|
||||
// Write mutex is mutable so that readers of concurrently mutated values (e.g.
|
||||
|
@ -1289,7 +1289,7 @@ Object String::LastIndexOf(Isolate* isolate, Handle<Object> receiver,
|
||||
bool String::HasOneBytePrefix(Vector<const char> str) {
|
||||
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
|
||||
return IsEqualToImpl<EqualityType::kPrefix>(
|
||||
str, GetPtrComprCageBase(*this),
|
||||
str, GetIsolateForPtrCompr(*this),
|
||||
SharedStringAccessGuardIfNeeded::NotNeeded());
|
||||
}
|
||||
|
||||
|
@ -332,7 +332,7 @@ class String : public TorqueGeneratedString<String, Name> {
|
||||
// whole string or just a prefix.
|
||||
//
|
||||
// This is main-thread only, like the Isolate* overload, but additionally
|
||||
// computes the PtrComprCageBase for IsEqualToImpl.
|
||||
// computes the IsolateRoot for IsEqualToImpl.
|
||||
template <EqualityType kEqType = EqualityType::kWholeString, typename Char>
|
||||
inline bool IsEqualTo(Vector<const Char> str) const;
|
||||
|
||||
@ -546,15 +546,14 @@ class String : public TorqueGeneratedString<String, Name> {
|
||||
// Implementation of the IsEqualTo() public methods. Do not use directly.
|
||||
template <EqualityType kEqType, typename Char>
|
||||
V8_INLINE bool IsEqualToImpl(
|
||||
Vector<const Char> str, PtrComprCageBase cage_base,
|
||||
Vector<const Char> str, IsolateRoot isolate,
|
||||
const SharedStringAccessGuardIfNeeded& access_guard) const;
|
||||
|
||||
// Out-of-line IsEqualToImpl for ConsString.
|
||||
template <typename Char>
|
||||
V8_NOINLINE static bool IsConsStringEqualToImpl(
|
||||
ConsString string, int slice_offset, Vector<const Char> str,
|
||||
PtrComprCageBase cage_base,
|
||||
const SharedStringAccessGuardIfNeeded& access_guard);
|
||||
IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard);
|
||||
|
||||
V8_EXPORT_PRIVATE static Handle<String> SlowFlatten(
|
||||
Isolate* isolate, Handle<ConsString> cons, AllocationType allocation);
|
||||
|
@ -219,15 +219,15 @@ InternalIndex SwissNameDictionary::FindEntry(LocalIsolate* isolate,
|
||||
}
|
||||
|
||||
Object SwissNameDictionary::LoadFromDataTable(int entry, int data_offset) {
|
||||
return LoadFromDataTable(GetPtrComprCageBase(*this), entry, data_offset);
|
||||
return LoadFromDataTable(GetIsolateForPtrCompr(*this), entry, data_offset);
|
||||
}
|
||||
|
||||
Object SwissNameDictionary::LoadFromDataTable(PtrComprCageBase cage_base,
|
||||
int entry, int data_offset) {
|
||||
Object SwissNameDictionary::LoadFromDataTable(IsolateRoot isolate, int entry,
|
||||
int data_offset) {
|
||||
DCHECK_LT(static_cast<unsigned>(entry), static_cast<unsigned>(Capacity()));
|
||||
int offset = DataTableStartOffset() +
|
||||
(entry * kDataTableEntryCount + data_offset) * kTaggedSize;
|
||||
return TaggedField<Object>::Relaxed_Load(cage_base, *this, offset);
|
||||
return TaggedField<Object>::Relaxed_Load(isolate, *this, offset);
|
||||
}
|
||||
|
||||
void SwissNameDictionary::StoreToDataTable(int entry, int data_offset,
|
||||
|
@ -306,8 +306,7 @@ class V8_EXPORT_PRIVATE SwissNameDictionary : public HeapObject {
|
||||
inline ctrl_t GetCtrl(int entry);
|
||||
|
||||
inline Object LoadFromDataTable(int entry, int data_offset);
|
||||
inline Object LoadFromDataTable(PtrComprCageBase cage_base, int entry,
|
||||
int data_offset);
|
||||
inline Object LoadFromDataTable(IsolateRoot root, int entry, int data_offset);
|
||||
inline void StoreToDataTable(int entry, int data_offset, Object data);
|
||||
inline void StoreToDataTableNoBarrier(int entry, int data_offset,
|
||||
Object data);
|
||||
|
@ -61,10 +61,10 @@ T TaggedField<T, kFieldOffset>::load(HeapObject host, int offset) {
|
||||
|
||||
// static
|
||||
template <typename T, int kFieldOffset>
|
||||
T TaggedField<T, kFieldOffset>::load(PtrComprCageBase cage_base,
|
||||
HeapObject host, int offset) {
|
||||
T TaggedField<T, kFieldOffset>::load(IsolateRoot isolate, HeapObject host,
|
||||
int offset) {
|
||||
Tagged_t value = *location(host, offset);
|
||||
return T(tagged_to_full(cage_base, value));
|
||||
return T(tagged_to_full(isolate, value));
|
||||
}
|
||||
|
||||
// static
|
||||
@ -96,10 +96,10 @@ T TaggedField<T, kFieldOffset>::Relaxed_Load(HeapObject host, int offset) {
|
||||
|
||||
// static
|
||||
template <typename T, int kFieldOffset>
|
||||
T TaggedField<T, kFieldOffset>::Relaxed_Load(PtrComprCageBase cage_base,
|
||||
T TaggedField<T, kFieldOffset>::Relaxed_Load(IsolateRoot isolate,
|
||||
HeapObject host, int offset) {
|
||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset));
|
||||
return T(tagged_to_full(cage_base, value));
|
||||
return T(tagged_to_full(isolate, value));
|
||||
}
|
||||
|
||||
// static
|
||||
@ -125,10 +125,10 @@ T TaggedField<T, kFieldOffset>::Acquire_Load(HeapObject host, int offset) {
|
||||
|
||||
// static
|
||||
template <typename T, int kFieldOffset>
|
||||
T TaggedField<T, kFieldOffset>::Acquire_Load(PtrComprCageBase cage_base,
|
||||
T TaggedField<T, kFieldOffset>::Acquire_Load(IsolateRoot isolate,
|
||||
HeapObject host, int offset) {
|
||||
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset));
|
||||
return T(tagged_to_full(cage_base, value));
|
||||
return T(tagged_to_full(isolate, value));
|
||||
}
|
||||
|
||||
// static
|
||||
|
@ -38,21 +38,20 @@ class TaggedField : public AllStatic {
|
||||
static inline Address address(HeapObject host, int offset = 0);
|
||||
|
||||
static inline T load(HeapObject host, int offset = 0);
|
||||
static inline T load(PtrComprCageBase cage_base, HeapObject host,
|
||||
int offset = 0);
|
||||
static inline T load(IsolateRoot isolate, HeapObject host, int offset = 0);
|
||||
|
||||
static inline void store(HeapObject host, T value);
|
||||
static inline void store(HeapObject host, int offset, T value);
|
||||
|
||||
static inline T Relaxed_Load(HeapObject host, int offset = 0);
|
||||
static inline T Relaxed_Load(PtrComprCageBase cage_base, HeapObject host,
|
||||
static inline T Relaxed_Load(IsolateRoot isolate, HeapObject host,
|
||||
int offset = 0);
|
||||
|
||||
static inline void Relaxed_Store(HeapObject host, T value);
|
||||
static inline void Relaxed_Store(HeapObject host, int offset, T value);
|
||||
|
||||
static inline T Acquire_Load(HeapObject host, int offset = 0);
|
||||
static inline T Acquire_Load(PtrComprCageBase cage_base, HeapObject host,
|
||||
static inline T Acquire_Load(IsolateRoot isolate, HeapObject host,
|
||||
int offset = 0);
|
||||
|
||||
static inline void Release_Store(HeapObject host, T value);
|
||||
|
@ -45,13 +45,13 @@ RELEASE_ACQUIRE_ACCESSORS(FunctionTemplateInfo, call_code, HeapObject,
|
||||
|
||||
// TODO(nicohartmann@, v8:11122): Let Torque generate this accessor.
|
||||
HeapObject FunctionTemplateInfo::rare_data(AcquireLoadTag) const {
|
||||
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||
return rare_data(cage_base, kAcquireLoad);
|
||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
||||
return rare_data(isolate, kAcquireLoad);
|
||||
}
|
||||
HeapObject FunctionTemplateInfo::rare_data(PtrComprCageBase cage_base,
|
||||
HeapObject FunctionTemplateInfo::rare_data(IsolateRoot isolate,
|
||||
AcquireLoadTag) const {
|
||||
HeapObject value =
|
||||
TaggedField<HeapObject>::Acquire_Load(cage_base, *this, kRareDataOffset);
|
||||
TaggedField<HeapObject>::Acquire_Load(isolate, *this, kRareDataOffset);
|
||||
DCHECK(value.IsUndefined() || value.IsFunctionTemplateRareData());
|
||||
return value;
|
||||
}
|
||||
@ -75,8 +75,8 @@ FunctionTemplateRareData FunctionTemplateInfo::EnsureFunctionTemplateRareData(
|
||||
|
||||
#define RARE_ACCESSORS(Name, CamelName, Type, Default) \
|
||||
DEF_GETTER(FunctionTemplateInfo, Get##CamelName, Type) { \
|
||||
HeapObject extra = rare_data(cage_base, kAcquireLoad); \
|
||||
HeapObject undefined = GetReadOnlyRoots(cage_base).undefined_value(); \
|
||||
HeapObject extra = rare_data(isolate, kAcquireLoad); \
|
||||
HeapObject undefined = GetReadOnlyRoots(isolate).undefined_value(); \
|
||||
return extra == undefined ? Default \
|
||||
: FunctionTemplateRareData::cast(extra).Name(); \
|
||||
} \
|
||||
|
@ -92,7 +92,7 @@ class FunctionTemplateInfo
|
||||
|
||||
// TODO(nicohartmann@, v8:11122): Let Torque generate the following accessor.
|
||||
inline HeapObject rare_data(AcquireLoadTag) const;
|
||||
inline HeapObject rare_data(PtrComprCageBase cage_base, AcquireLoadTag) const;
|
||||
inline HeapObject rare_data(IsolateRoot isolate, AcquireLoadTag) const;
|
||||
inline void set_rare_data(
|
||||
HeapObject value, ReleaseStoreTag,
|
||||
WriteBarrierMode mode = WriteBarrierMode::UPDATE_WRITE_BARRIER);
|
||||
|
@ -1508,10 +1508,10 @@ class RootsReferencesExtractor : public RootVisitor {
|
||||
OffHeapObjectSlot start,
|
||||
OffHeapObjectSlot end) override {
|
||||
DCHECK_EQ(root, Root::kStringTable);
|
||||
PtrComprCageBase cage_base = Isolate::FromHeap(explorer_->heap_);
|
||||
IsolateRoot isolate = Isolate::FromHeap(explorer_->heap_);
|
||||
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
||||
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
|
||||
p.load(cage_base));
|
||||
p.load(isolate));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,10 +386,10 @@ void CCGenerator::EmitInstruction(const LoadReferenceInstruction& instruction,
|
||||
out() << " " << result_name << " = ";
|
||||
if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
|
||||
// Currently, all of the tagged loads we emit are for smi values, so there
|
||||
// is no point in providing an PtrComprCageBase. If at some point we start
|
||||
// is no point in providing an IsolateRoot. If at some point we start
|
||||
// emitting loads for tagged fields which might be HeapObjects, then we
|
||||
// should plumb an PtrComprCageBase through the generated functions that
|
||||
// need it.
|
||||
// should plumb an IsolateRoot through the generated functions that need
|
||||
// it.
|
||||
if (!instruction.type->IsSubtypeOf(TypeOracle::GetSmiType())) {
|
||||
Error(
|
||||
"Not supported in C++ output: LoadReference on non-smi tagged "
|
||||
|
@ -4223,9 +4223,8 @@ void CppClassGenerator::GenerateFieldAccessors(
|
||||
hdr_ << " inline " << type_name << " " << name << "("
|
||||
<< (indexed ? "int i" : "") << ") const;\n";
|
||||
if (can_contain_heap_objects) {
|
||||
hdr_ << " inline " << type_name << " " << name
|
||||
<< "(PtrComprCageBase cage_base" << (indexed ? ", int i" : "")
|
||||
<< ") const;\n";
|
||||
hdr_ << " inline " << type_name << " " << name << "(IsolateRoot isolate"
|
||||
<< (indexed ? ", int i" : "") << ") const;\n";
|
||||
}
|
||||
hdr_ << " inline void set_" << name << "(" << (indexed ? "int i, " : "")
|
||||
<< type_name << " value"
|
||||
@ -4234,14 +4233,14 @@ void CppClassGenerator::GenerateFieldAccessors(
|
||||
: "")
|
||||
<< ");\n\n";
|
||||
|
||||
// For tagged data, generate the extra getter that derives an PtrComprCageBase
|
||||
// from the current object's pointer.
|
||||
// For tagged data, generate the extra getter that derives an IsolateRoot from
|
||||
// the current object's pointer.
|
||||
if (can_contain_heap_objects) {
|
||||
inl_ << "template <class D, class P>\n";
|
||||
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "("
|
||||
<< (indexed ? "int i" : "") << ") const {\n";
|
||||
inl_ << " PtrComprCageBase cage_base = GetPtrComprCageBase(*this);\n";
|
||||
inl_ << " return " << gen_name_ << "::" << name << "(cage_base"
|
||||
inl_ << " IsolateRoot isolate = GetIsolateForPtrCompr(*this);\n";
|
||||
inl_ << " return " << gen_name_ << "::" << name << "(isolate"
|
||||
<< (indexed ? ", i" : "") << ");\n";
|
||||
inl_ << "}\n";
|
||||
}
|
||||
@ -4249,7 +4248,7 @@ void CppClassGenerator::GenerateFieldAccessors(
|
||||
// Generate the getter implementation.
|
||||
inl_ << "template <class D, class P>\n";
|
||||
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "(";
|
||||
if (can_contain_heap_objects) inl_ << "PtrComprCageBase cage_base";
|
||||
if (can_contain_heap_objects) inl_ << "IsolateRoot isolate";
|
||||
if (can_contain_heap_objects && indexed) inl_ << ", ";
|
||||
if (indexed) inl_ << "int i";
|
||||
inl_ << ") const {\n";
|
||||
@ -4362,11 +4361,10 @@ void CppClassGenerator::EmitLoadFieldStatement(
|
||||
bool is_smi = field_type->IsSubtypeOf(TypeOracle::GetSmiType());
|
||||
const std::string load_type = is_smi ? "Smi" : type_name;
|
||||
const char* postfix = is_smi ? ".value()" : "";
|
||||
const char* optional_cage_base = is_smi ? "" : "cage_base, ";
|
||||
const char* optional_isolate = is_smi ? "" : "isolate, ";
|
||||
|
||||
inl_ << "TaggedField<" << load_type << ">::" << load << "("
|
||||
<< optional_cage_base << "*this, " << offset << ")" << postfix
|
||||
<< ";\n";
|
||||
<< optional_isolate << "*this, " << offset << ")" << postfix << ";\n";
|
||||
}
|
||||
|
||||
if (CanContainHeapObjects(field_type)) {
|
||||
|
@ -59,13 +59,13 @@ CAST_ACCESSOR(WasmTypeInfo)
|
||||
CAST_ACCESSOR(WasmStruct)
|
||||
CAST_ACCESSOR(WasmArray)
|
||||
|
||||
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
|
||||
DEF_GETTER(holder, has_##name, bool) { \
|
||||
Object value = TaggedField<Object, offset>::load(cage_base, *this); \
|
||||
return !value.IsUndefined(GetReadOnlyRoots(cage_base)); \
|
||||
} \
|
||||
ACCESSORS_CHECKED2(holder, name, type, offset, \
|
||||
!value.IsUndefined(GetReadOnlyRoots(cage_base)), true)
|
||||
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
|
||||
DEF_GETTER(holder, has_##name, bool) { \
|
||||
Object value = TaggedField<Object, offset>::load(isolate, *this); \
|
||||
return !value.IsUndefined(GetReadOnlyRoots(isolate)); \
|
||||
} \
|
||||
ACCESSORS_CHECKED2(holder, name, type, offset, \
|
||||
!value.IsUndefined(GetReadOnlyRoots(isolate)), true)
|
||||
|
||||
#define PRIMITIVE_ACCESSORS(holder, name, type, offset) \
|
||||
type holder::name() const { \
|
||||
@ -460,12 +460,6 @@ int WasmArray::GcSafeSizeFor(Map map, int length) {
|
||||
|
||||
void WasmTypeInfo::clear_foreign_address(Isolate* isolate) {
|
||||
#ifdef V8_HEAP_SANDBOX
|
||||
|
||||
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||
#endif
|
||||
|
||||
// Due to the type-specific pointer tags for external pointers, we need to
|
||||
// allocate an entry in the table here even though it will just store nullptr.
|
||||
AllocateExternalPointerEntries(isolate);
|
||||
|
@ -14,7 +14,7 @@ namespace debug_helper_internal {
|
||||
|
||||
bool IsPointerCompressed(uintptr_t address) {
|
||||
#if COMPRESS_POINTERS_BOOL
|
||||
return address < i::kPtrComprCageReservationSize;
|
||||
return address < i::kPtrComprHeapReservationSize;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
|
@ -348,7 +348,7 @@ class ReadStringVisitor : public TqObjectVisitor {
|
||||
GetOrFinish(object->GetResourceDataValue(accessor_));
|
||||
#ifdef V8_COMPRESS_POINTERS
|
||||
uintptr_t data_address = static_cast<uintptr_t>(
|
||||
DecodeExternalPointer(GetPtrComprCageBaseFromOnHeapAddress(
|
||||
DecodeExternalPointer(GetIsolateForPtrComprFromOnHeapAddress(
|
||||
heap_addresses_.any_heap_pointer),
|
||||
resource_data, kExternalStringResourceDataTag));
|
||||
#else
|
||||
|
Loading…
Reference in New Issue
Block a user