Reland^2 "[ptr-cage] Rename IsolateRoot to PtrComprCageBase"
This is a reland of e28dadc207
The original failure was due to a stale Win32 bot. The reland failure
was due to idempotent task deduplication returning the exact same
failure. See crbug/1196064
Original change's description:
> [ptr-cage] Rename IsolateRoot to PtrComprCageBase
>
> Currently, IsolateRoot is both the address of the Isolate root and the
> base address of the pointer compression reservation. This CL teases the
> two uses apart by renaming IsolateRoot to PtrComprCageBase.
>
> - In addition to V8_COMPRESS_POINTERS, add a
> V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE (vs SHARED_CAGE).
>
> - Rename GetIsolate* helpers to GetPtrComprCageBase. When
> V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE is true, the helpers remain as
> aliases to GetPtrComprCageBase.
>
> - Rename kPtrComprIsolateRootAlignment to kPtrComprCageBaseAlignment.
>
> Bug: v8:11460
> Change-Id: I1d715f678ce9a0b5731895612ca14f56579b1c48
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2783672
> Commit-Queue: Shu-yu Guo <syg@chromium.org>
> Auto-Submit: Shu-yu Guo <syg@chromium.org>
> Reviewed-by: Igor Sheludko <ishell@chromium.org>
> Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#73790}
Bug: v8:11460
No-Try: true
Tbr: ishell@chromium.org
Tbr: rmcilroy@chromium.org
Change-Id: Id69311cf3267ebe1297fff159de0be48b15b65a3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2806546
Reviewed-by: Shu-yu Guo <syg@chromium.org>
Commit-Queue: Shu-yu Guo <syg@chromium.org>
Cr-Commit-Position: refs/heads/master@{#73795}
This commit is contained in:
parent
4b90ad752a
commit
627b6b2f06
7
BUILD.gn
7
BUILD.gn
@ -404,6 +404,10 @@ if (v8_enable_shared_ro_heap && v8_enable_pointer_compression) {
|
|||||||
"Sharing read-only heap with pointer compression is only supported on Linux or Android")
|
"Sharing read-only heap with pointer compression is only supported on Linux or Android")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
assert(
|
||||||
|
!v8_enable_pointer_compression_shared_cage || !v8_enable_shared_ro_heap,
|
||||||
|
"Sharing read-only heap is not yet supported when sharing a pointer compression cage")
|
||||||
|
|
||||||
assert(!v8_use_multi_snapshots || !v8_control_flow_integrity,
|
assert(!v8_use_multi_snapshots || !v8_control_flow_integrity,
|
||||||
"Control-flow integrity does not support multisnapshots")
|
"Control-flow integrity does not support multisnapshots")
|
||||||
|
|
||||||
@ -554,6 +558,7 @@ external_v8_defines = [
|
|||||||
"V8_ENABLE_CHECKS",
|
"V8_ENABLE_CHECKS",
|
||||||
"V8_COMPRESS_POINTERS",
|
"V8_COMPRESS_POINTERS",
|
||||||
"V8_COMPRESS_POINTERS_IN_SHARED_CAGE",
|
"V8_COMPRESS_POINTERS_IN_SHARED_CAGE",
|
||||||
|
"V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE",
|
||||||
"V8_31BIT_SMIS_ON_64BIT_ARCH",
|
"V8_31BIT_SMIS_ON_64BIT_ARCH",
|
||||||
"V8_COMPRESS_ZONES",
|
"V8_COMPRESS_ZONES",
|
||||||
"V8_HEAP_SANDBOX",
|
"V8_HEAP_SANDBOX",
|
||||||
@ -573,6 +578,8 @@ if (v8_enable_pointer_compression) {
|
|||||||
}
|
}
|
||||||
if (v8_enable_pointer_compression_shared_cage) {
|
if (v8_enable_pointer_compression_shared_cage) {
|
||||||
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_SHARED_CAGE" ]
|
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_SHARED_CAGE" ]
|
||||||
|
} else if (v8_enable_pointer_compression) {
|
||||||
|
enabled_external_v8_defines += [ "V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE" ]
|
||||||
}
|
}
|
||||||
if (v8_enable_pointer_compression || v8_enable_31bit_smis_on_64bit_arch) {
|
if (v8_enable_pointer_compression || v8_enable_31bit_smis_on_64bit_arch) {
|
||||||
enabled_external_v8_defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ]
|
enabled_external_v8_defines += [ "V8_31BIT_SMIS_ON_64BIT_ARCH" ]
|
||||||
|
@ -358,8 +358,9 @@ class Internals {
|
|||||||
internal::Address heap_object_ptr, int offset) {
|
internal::Address heap_object_ptr, int offset) {
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
|
uint32_t value = ReadRawField<uint32_t>(heap_object_ptr, offset);
|
||||||
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
|
internal::Address base =
|
||||||
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
||||||
|
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
||||||
#else
|
#else
|
||||||
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
return ReadRawField<internal::Address>(heap_object_ptr, offset);
|
||||||
#endif
|
#endif
|
||||||
@ -411,18 +412,19 @@ class Internals {
|
|||||||
|
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
// See v8:7703 or src/ptr-compr.* for details about pointer compression.
|
// See v8:7703 or src/ptr-compr.* for details about pointer compression.
|
||||||
static constexpr size_t kPtrComprHeapReservationSize = size_t{1} << 32;
|
static constexpr size_t kPtrComprCageReservationSize = size_t{1} << 32;
|
||||||
static constexpr size_t kPtrComprIsolateRootAlignment = size_t{1} << 32;
|
static constexpr size_t kPtrComprCageBaseAlignment = size_t{1} << 32;
|
||||||
|
|
||||||
V8_INLINE static internal::Address GetRootFromOnHeapAddress(
|
V8_INLINE static internal::Address GetPtrComprCageBaseFromOnHeapAddress(
|
||||||
internal::Address addr) {
|
internal::Address addr) {
|
||||||
return addr & -static_cast<intptr_t>(kPtrComprIsolateRootAlignment);
|
return addr & -static_cast<intptr_t>(kPtrComprCageBaseAlignment);
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE static internal::Address DecompressTaggedAnyField(
|
V8_INLINE static internal::Address DecompressTaggedAnyField(
|
||||||
internal::Address heap_object_ptr, uint32_t value) {
|
internal::Address heap_object_ptr, uint32_t value) {
|
||||||
internal::Address root = GetRootFromOnHeapAddress(heap_object_ptr);
|
internal::Address base =
|
||||||
return root + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr);
|
||||||
|
return base + static_cast<internal::Address>(static_cast<uintptr_t>(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif // V8_COMPRESS_POINTERS
|
#endif // V8_COMPRESS_POINTERS
|
||||||
|
@ -661,7 +661,7 @@ void ArrayLiteral::BuildBoilerplateDescription(LocalIsolate* isolate) {
|
|||||||
boilerplate_descriptor_kind(),
|
boilerplate_descriptor_kind(),
|
||||||
GetMoreGeneralElementsKind(boilerplate_descriptor_kind(),
|
GetMoreGeneralElementsKind(boilerplate_descriptor_kind(),
|
||||||
boilerplate_value.OptimalElementsKind(
|
boilerplate_value.OptimalElementsKind(
|
||||||
GetIsolateForPtrCompr(*elements))));
|
GetPtrComprCageBase(*elements))));
|
||||||
|
|
||||||
FixedArray::cast(*elements).set(array_index, boilerplate_value);
|
FixedArray::cast(*elements).set(array_index, boilerplate_value);
|
||||||
}
|
}
|
||||||
|
@ -370,14 +370,14 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr(
|
|||||||
TNode<IntPtrT> full_base = Signed(BitcastTaggedToWord(base));
|
TNode<IntPtrT> full_base = Signed(BitcastTaggedToWord(base));
|
||||||
TNode<Int32T> compressed_base = TruncateIntPtrToInt32(full_base);
|
TNode<Int32T> compressed_base = TruncateIntPtrToInt32(full_base);
|
||||||
// TODO(v8:9706): Add a way to directly use kRootRegister value.
|
// TODO(v8:9706): Add a way to directly use kRootRegister value.
|
||||||
TNode<IntPtrT> isolate_root =
|
TNode<IntPtrT> ptr_compr_cage_base =
|
||||||
IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base)));
|
IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base)));
|
||||||
// Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset.
|
// Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset.
|
||||||
DCHECK_EQ(
|
DCHECK_EQ(
|
||||||
isolate()->isolate_root(),
|
isolate()->isolate_root(),
|
||||||
JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate()));
|
JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate()));
|
||||||
// See JSTypedArray::SetOnHeapDataPtr() for details.
|
// See JSTypedArray::SetOnHeapDataPtr() for details.
|
||||||
offset = Unsigned(IntPtrAdd(offset, isolate_root));
|
offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
StoreJSTypedArrayBasePointer(holder, base);
|
StoreJSTypedArrayBasePointer(holder, base);
|
||||||
|
@ -12,11 +12,17 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate_root,
|
V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate_root,
|
||||||
ExternalPointer_t encoded_pointer,
|
ExternalPointer_t encoded_pointer,
|
||||||
ExternalPointerTag tag) {
|
ExternalPointerTag tag) {
|
||||||
STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
|
STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
|
||||||
#ifdef V8_HEAP_SANDBOX
|
#ifdef V8_HEAP_SANDBOX
|
||||||
|
|
||||||
|
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||||
|
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||||
|
#endif
|
||||||
|
|
||||||
uint32_t index = static_cast<uint32_t>(encoded_pointer);
|
uint32_t index = static_cast<uint32_t>(encoded_pointer);
|
||||||
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
||||||
return isolate->external_pointer_table().get(index) ^ tag;
|
return isolate->external_pointer_table().get(index) ^ tag;
|
||||||
@ -62,7 +68,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE Address ReadExternalPointerField(Address field_address,
|
V8_INLINE Address ReadExternalPointerField(Address field_address,
|
||||||
IsolateRoot isolate_root,
|
PtrComprCageBase cage_base,
|
||||||
ExternalPointerTag tag) {
|
ExternalPointerTag tag) {
|
||||||
// Pointer compression causes types larger than kTaggedSize to be unaligned.
|
// Pointer compression causes types larger than kTaggedSize to be unaligned.
|
||||||
constexpr bool v8_pointer_compression_unaligned =
|
constexpr bool v8_pointer_compression_unaligned =
|
||||||
@ -73,7 +79,7 @@ V8_INLINE Address ReadExternalPointerField(Address field_address,
|
|||||||
} else {
|
} else {
|
||||||
encoded_value = base::Memory<ExternalPointer_t>(field_address);
|
encoded_value = base::Memory<ExternalPointer_t>(field_address);
|
||||||
}
|
}
|
||||||
return DecodeExternalPointer(isolate_root, encoded_value, tag);
|
return DecodeExternalPointer(cage_base, encoded_value, tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE void WriteExternalPointerField(Address field_address,
|
V8_INLINE void WriteExternalPointerField(Address field_address,
|
||||||
|
@ -12,7 +12,7 @@ namespace internal {
|
|||||||
|
|
||||||
// Convert external pointer from on-V8-heap representation to an actual external
|
// Convert external pointer from on-V8-heap representation to an actual external
|
||||||
// pointer value.
|
// pointer value.
|
||||||
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate,
|
V8_INLINE Address DecodeExternalPointer(PtrComprCageBase isolate,
|
||||||
ExternalPointer_t encoded_pointer,
|
ExternalPointer_t encoded_pointer,
|
||||||
ExternalPointerTag tag);
|
ExternalPointerTag tag);
|
||||||
|
|
||||||
@ -34,7 +34,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
|
|||||||
// Reads external pointer for the field, and decodes it if the sandbox is
|
// Reads external pointer for the field, and decodes it if the sandbox is
|
||||||
// enabled.
|
// enabled.
|
||||||
V8_INLINE Address ReadExternalPointerField(Address field_address,
|
V8_INLINE Address ReadExternalPointerField(Address field_address,
|
||||||
IsolateRoot isolate,
|
PtrComprCageBase isolate,
|
||||||
ExternalPointerTag tag);
|
ExternalPointerTag tag);
|
||||||
|
|
||||||
// Encodes value if the sandbox is enabled and writes it into the field.
|
// Encodes value if the sandbox is enabled and writes it into the field.
|
||||||
|
@ -1748,13 +1748,13 @@ enum class DynamicCheckMapsStatus : uint8_t {
|
|||||||
};
|
};
|
||||||
|
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
class IsolateRoot {
|
class PtrComprCageBase {
|
||||||
public:
|
public:
|
||||||
explicit constexpr IsolateRoot(Address address) : address_(address) {}
|
explicit constexpr PtrComprCageBase(Address address) : address_(address) {}
|
||||||
// NOLINTNEXTLINE
|
// NOLINTNEXTLINE
|
||||||
inline IsolateRoot(const Isolate* isolate);
|
inline PtrComprCageBase(const Isolate* isolate);
|
||||||
// NOLINTNEXTLINE
|
// NOLINTNEXTLINE
|
||||||
inline IsolateRoot(const LocalIsolate* isolate);
|
inline PtrComprCageBase(const LocalIsolate* isolate);
|
||||||
|
|
||||||
inline Address address() const;
|
inline Address address() const;
|
||||||
|
|
||||||
@ -1762,13 +1762,13 @@ class IsolateRoot {
|
|||||||
Address address_;
|
Address address_;
|
||||||
};
|
};
|
||||||
#else
|
#else
|
||||||
class IsolateRoot {
|
class PtrComprCageBase {
|
||||||
public:
|
public:
|
||||||
IsolateRoot() = default;
|
PtrComprCageBase() = default;
|
||||||
// NOLINTNEXTLINE
|
// NOLINTNEXTLINE
|
||||||
IsolateRoot(const Isolate* isolate) {}
|
PtrComprCageBase(const Isolate* isolate) {}
|
||||||
// NOLINTNEXTLINE
|
// NOLINTNEXTLINE
|
||||||
IsolateRoot(const LocalIsolate* isolate) {}
|
PtrComprCageBase(const LocalIsolate* isolate) {}
|
||||||
};
|
};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -15,15 +15,35 @@ namespace internal {
|
|||||||
|
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
|
|
||||||
IsolateRoot::IsolateRoot(const Isolate* isolate)
|
#if defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
|
|
||||||
|
PtrComprCageBase::PtrComprCageBase(const Isolate* isolate)
|
||||||
: address_(isolate->isolate_root()) {}
|
: address_(isolate->isolate_root()) {}
|
||||||
IsolateRoot::IsolateRoot(const LocalIsolate* isolate)
|
PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate)
|
||||||
: address_(isolate->isolate_root()) {}
|
: address_(isolate->isolate_root()) {}
|
||||||
|
|
||||||
Address IsolateRoot::address() const {
|
#elif defined V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
|
||||||
|
PtrComprCageBase::PtrComprCageBase(const Isolate* isolate)
|
||||||
|
: address_(isolate->isolate_root()) {
|
||||||
|
UNIMPLEMENTED();
|
||||||
|
}
|
||||||
|
PtrComprCageBase::PtrComprCageBase(const LocalIsolate* isolate)
|
||||||
|
: address_(isolate->isolate_root()) {
|
||||||
|
UNIMPLEMENTED();
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
#error "Pointer compression build configuration error"
|
||||||
|
|
||||||
|
#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE,
|
||||||
|
// V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
|
||||||
|
Address PtrComprCageBase::address() const {
|
||||||
Address ret = address_;
|
Address ret = address_;
|
||||||
ret = reinterpret_cast<Address>(V8_ASSUME_ALIGNED(
|
ret = reinterpret_cast<Address>(V8_ASSUME_ALIGNED(
|
||||||
reinterpret_cast<void*>(ret), kPtrComprIsolateRootAlignment));
|
reinterpret_cast<void*>(ret), kPtrComprCageBaseAlignment));
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,12 +53,17 @@ V8_INLINE Tagged_t CompressTagged(Address tagged) {
|
|||||||
return static_cast<Tagged_t>(static_cast<uint32_t>(tagged));
|
return static_cast<Tagged_t>(static_cast<uint32_t>(tagged));
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
|
V8_INLINE constexpr Address GetPtrComprCageBaseAddress(Address on_heap_addr) {
|
||||||
return RoundDown<kPtrComprIsolateRootAlignment>(on_heap_addr);
|
return RoundDown<kPtrComprCageBaseAlignment>(on_heap_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) {
|
V8_INLINE Address GetPtrComprCageBaseAddress(PtrComprCageBase cage_base) {
|
||||||
return isolate.address();
|
return cage_base.address();
|
||||||
|
}
|
||||||
|
|
||||||
|
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(
|
||||||
|
Address address) {
|
||||||
|
return PtrComprCageBase(GetPtrComprCageBaseAddress(address));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decompresses smi value.
|
// Decompresses smi value.
|
||||||
@ -52,7 +77,8 @@ V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) {
|
|||||||
template <typename TOnHeapAddress>
|
template <typename TOnHeapAddress>
|
||||||
V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr,
|
V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr,
|
||||||
Tagged_t raw_value) {
|
Tagged_t raw_value) {
|
||||||
return GetIsolateRootAddress(on_heap_addr) + static_cast<Address>(raw_value);
|
return GetPtrComprCageBaseAddress(on_heap_addr) +
|
||||||
|
static_cast<Address>(raw_value);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decompresses any tagged value, preserving both weak- and smi- tags.
|
// Decompresses any tagged value, preserving both weak- and smi- tags.
|
||||||
@ -62,18 +88,19 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
|
|||||||
return DecompressTaggedPointer(on_heap_addr, raw_value);
|
return DecompressTaggedPointer(on_heap_addr, raw_value);
|
||||||
}
|
}
|
||||||
|
|
||||||
STATIC_ASSERT(kPtrComprHeapReservationSize ==
|
STATIC_ASSERT(kPtrComprCageReservationSize ==
|
||||||
Internals::kPtrComprHeapReservationSize);
|
Internals::kPtrComprCageReservationSize);
|
||||||
STATIC_ASSERT(kPtrComprIsolateRootAlignment ==
|
STATIC_ASSERT(kPtrComprCageBaseAlignment ==
|
||||||
Internals::kPtrComprIsolateRootAlignment);
|
Internals::kPtrComprCageBaseAlignment);
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
|
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
|
||||||
|
|
||||||
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
|
V8_INLINE constexpr PtrComprCageBase GetPtrComprCageBaseFromOnHeapAddress(
|
||||||
|
Address address) {
|
||||||
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) { UNREACHABLE(); }
|
return PtrComprCageBase();
|
||||||
|
}
|
||||||
|
|
||||||
V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); }
|
V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); }
|
||||||
|
|
||||||
@ -90,6 +117,11 @@ V8_INLINE Address DecompressTaggedAny(TOnHeapAddress on_heap_addr,
|
|||||||
}
|
}
|
||||||
|
|
||||||
#endif // V8_COMPRESS_POINTERS
|
#endif // V8_COMPRESS_POINTERS
|
||||||
|
|
||||||
|
inline PtrComprCageBase GetPtrComprCageBase(HeapObject object) {
|
||||||
|
return GetPtrComprCageBaseFromOnHeapAddress(object.ptr());
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
} // namespace v8
|
} // namespace v8
|
||||||
|
|
||||||
|
@ -13,8 +13,8 @@ namespace v8 {
|
|||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
// See v8:7703 for details about how pointer compression works.
|
// See v8:7703 for details about how pointer compression works.
|
||||||
constexpr size_t kPtrComprHeapReservationSize = size_t{4} * GB;
|
constexpr size_t kPtrComprCageReservationSize = size_t{4} * GB;
|
||||||
constexpr size_t kPtrComprIsolateRootAlignment = size_t{4} * GB;
|
constexpr size_t kPtrComprCageBaseAlignment = size_t{4} * GB;
|
||||||
|
|
||||||
} // namespace internal
|
} // namespace internal
|
||||||
} // namespace v8
|
} // namespace v8
|
||||||
|
@ -1275,8 +1275,7 @@ int TranslatedState::CreateNextTranslatedValue(
|
|||||||
|
|
||||||
Address TranslatedState::DecompressIfNeeded(intptr_t value) {
|
Address TranslatedState::DecompressIfNeeded(intptr_t value) {
|
||||||
if (COMPRESS_POINTERS_BOOL) {
|
if (COMPRESS_POINTERS_BOOL) {
|
||||||
return DecompressTaggedAny(isolate()->isolate_root(),
|
return DecompressTaggedAny(isolate(), static_cast<uint32_t>(value));
|
||||||
static_cast<uint32_t>(value));
|
|
||||||
} else {
|
} else {
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
@ -325,11 +325,11 @@ void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) {
|
|||||||
|
|
||||||
USE_TORQUE_VERIFIER(JSReceiver)
|
USE_TORQUE_VERIFIER(JSReceiver)
|
||||||
|
|
||||||
bool JSObject::ElementsAreSafeToExamine(IsolateRoot isolate) const {
|
bool JSObject::ElementsAreSafeToExamine(PtrComprCageBase cage_base) const {
|
||||||
// If a GC was caused while constructing this object, the elements
|
// If a GC was caused while constructing this object, the elements
|
||||||
// pointer may point to a one pointer filler map.
|
// pointer may point to a one pointer filler map.
|
||||||
return elements(isolate) !=
|
return elements(cage_base) !=
|
||||||
GetReadOnlyRoots(isolate).one_pointer_filler_map();
|
GetReadOnlyRoots(cage_base).one_pointer_filler_map();
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
@ -468,13 +468,13 @@ void PrintSloppyArgumentElements(std::ostream& os, ElementsKind kind,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void PrintEmbedderData(IsolateRoot isolate, std::ostream& os,
|
void PrintEmbedderData(PtrComprCageBase cage_base, std::ostream& os,
|
||||||
EmbedderDataSlot slot) {
|
EmbedderDataSlot slot) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
Object value = slot.load_tagged();
|
Object value = slot.load_tagged();
|
||||||
os << Brief(value);
|
os << Brief(value);
|
||||||
void* raw_pointer;
|
void* raw_pointer;
|
||||||
if (slot.ToAlignedPointer(isolate, &raw_pointer)) {
|
if (slot.ToAlignedPointer(cage_base, &raw_pointer)) {
|
||||||
os << ", aligned pointer: " << raw_pointer;
|
os << ", aligned pointer: " << raw_pointer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -579,11 +579,11 @@ static void JSObjectPrintBody(std::ostream& os,
|
|||||||
}
|
}
|
||||||
int embedder_fields = obj.GetEmbedderFieldCount();
|
int embedder_fields = obj.GetEmbedderFieldCount();
|
||||||
if (embedder_fields > 0) {
|
if (embedder_fields > 0) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(obj);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(obj);
|
||||||
os << " - embedder fields = {";
|
os << " - embedder fields = {";
|
||||||
for (int i = 0; i < embedder_fields; i++) {
|
for (int i = 0; i < embedder_fields; i++) {
|
||||||
os << "\n ";
|
os << "\n ";
|
||||||
PrintEmbedderData(isolate, os, EmbedderDataSlot(obj, i));
|
PrintEmbedderData(cage_base, os, EmbedderDataSlot(obj, i));
|
||||||
}
|
}
|
||||||
os << "\n }\n";
|
os << "\n }\n";
|
||||||
}
|
}
|
||||||
@ -762,14 +762,14 @@ void ObjectBoilerplateDescription::ObjectBoilerplateDescriptionPrint(
|
|||||||
}
|
}
|
||||||
|
|
||||||
void EmbedderDataArray::EmbedderDataArrayPrint(std::ostream& os) {
|
void EmbedderDataArray::EmbedderDataArrayPrint(std::ostream& os) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
PrintHeader(os, "EmbedderDataArray");
|
PrintHeader(os, "EmbedderDataArray");
|
||||||
os << "\n - length: " << length();
|
os << "\n - length: " << length();
|
||||||
EmbedderDataSlot start(*this, 0);
|
EmbedderDataSlot start(*this, 0);
|
||||||
EmbedderDataSlot end(*this, length());
|
EmbedderDataSlot end(*this, length());
|
||||||
for (EmbedderDataSlot slot = start; slot < end; ++slot) {
|
for (EmbedderDataSlot slot = start; slot < end; ++slot) {
|
||||||
os << "\n ";
|
os << "\n ";
|
||||||
PrintEmbedderData(isolate, os, slot);
|
PrintEmbedderData(cage_base, os, slot);
|
||||||
}
|
}
|
||||||
os << "\n";
|
os << "\n";
|
||||||
}
|
}
|
||||||
@ -2747,12 +2747,11 @@ namespace {
|
|||||||
inline i::Object GetObjectFromRaw(void* object) {
|
inline i::Object GetObjectFromRaw(void* object) {
|
||||||
i::Address object_ptr = reinterpret_cast<i::Address>(object);
|
i::Address object_ptr = reinterpret_cast<i::Address>(object);
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
if (RoundDown<i::kPtrComprIsolateRootAlignment>(object_ptr) ==
|
if (RoundDown<i::kPtrComprCageBaseAlignment>(object_ptr) == i::kNullAddress) {
|
||||||
i::kNullAddress) {
|
|
||||||
// Try to decompress pointer.
|
// Try to decompress pointer.
|
||||||
i::Isolate* isolate = i::Isolate::Current();
|
i::Isolate* isolate = i::Isolate::Current();
|
||||||
object_ptr = i::DecompressTaggedAny(isolate->isolate_root(),
|
object_ptr =
|
||||||
static_cast<i::Tagged_t>(object_ptr));
|
i::DecompressTaggedAny(isolate, static_cast<i::Tagged_t>(object_ptr));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
return i::Object(object_ptr);
|
return i::Object(object_ptr);
|
||||||
|
@ -13,26 +13,36 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
inline constexpr IsolateRoot GetIsolateForPtrComprFromOnHeapAddress(
|
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
Address address) {
|
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
// Aliases for GetPtrComprCageBase when
|
||||||
return IsolateRoot(GetIsolateRootAddress(address));
|
// V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE. Each Isolate has its own cage, whose
|
||||||
#else
|
// base address is also the Isolate root.
|
||||||
return IsolateRoot();
|
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
|
||||||
#endif // V8_COMPRESS_POINTERS
|
return GetPtrComprCageBaseAddress(on_heap_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline IsolateRoot GetIsolateForPtrCompr(HeapObject object) {
|
V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) {
|
||||||
return GetIsolateForPtrComprFromOnHeapAddress(object.ptr());
|
return cage_base.address();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
|
||||||
|
|
||||||
|
V8_INLINE Address GetIsolateRootAddress(PtrComprCageBase cage_base) {
|
||||||
|
UNREACHABLE();
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif // V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
|
|
||||||
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
|
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
|
||||||
// Avoid using the below GetIsolateFromWritableObject because we want to be
|
// Avoid using the below GetIsolateFromWritableObject because we want to be
|
||||||
// able to get the heap, but not the isolate, for off-thread objects.
|
// able to get the heap, but not the isolate, for off-thread objects.
|
||||||
|
|
||||||
#if defined V8_ENABLE_THIRD_PARTY_HEAP
|
#if defined V8_ENABLE_THIRD_PARTY_HEAP
|
||||||
return Heap::GetIsolateFromWritableObject(object)->heap();
|
return Heap::GetIsolateFromWritableObject(object)->heap();
|
||||||
#elif defined V8_COMPRESS_POINTERS
|
#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
Isolate* isolate =
|
Isolate* isolate =
|
||||||
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
|
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
|
||||||
DCHECK_NOT_NULL(isolate);
|
DCHECK_NOT_NULL(isolate);
|
||||||
@ -47,7 +57,7 @@ V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
|
|||||||
V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) {
|
V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) {
|
||||||
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
|
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
|
||||||
return Heap::GetIsolateFromWritableObject(object);
|
return Heap::GetIsolateFromWritableObject(object);
|
||||||
#elif defined V8_COMPRESS_POINTERS
|
#elif defined V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
Isolate* isolate =
|
Isolate* isolate =
|
||||||
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
|
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
|
||||||
DCHECK_NOT_NULL(isolate);
|
DCHECK_NOT_NULL(isolate);
|
||||||
|
@ -10,11 +10,12 @@
|
|||||||
namespace v8 {
|
namespace v8 {
|
||||||
namespace internal {
|
namespace internal {
|
||||||
|
|
||||||
// Computes isolate from any read only or writable heap object. The resulting
|
// Computes the pointer compression cage base from any read only or writable
|
||||||
// value is intended to be used only as a hoisted computation of isolate root
|
// heap object. The resulting value is intended to be used only as a hoisted
|
||||||
// inside trivial accessors for optmizing value decompression.
|
// computation of cage base inside trivial accessors for optimizing value
|
||||||
// When pointer compression is disabled this function always returns nullptr.
|
// decompression. When pointer compression is disabled this function always
|
||||||
V8_INLINE IsolateRoot GetIsolateForPtrCompr(HeapObject object);
|
// returns nullptr.
|
||||||
|
V8_INLINE PtrComprCageBase GetPtrComprCageBase(HeapObject object);
|
||||||
|
|
||||||
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object);
|
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object);
|
||||||
|
|
||||||
|
@ -2861,8 +2861,8 @@ Isolate* Isolate::New() {
|
|||||||
// Construct Isolate object in the allocated memory.
|
// Construct Isolate object in the allocated memory.
|
||||||
void* isolate_ptr = isolate_allocator->isolate_memory();
|
void* isolate_ptr = isolate_allocator->isolate_memory();
|
||||||
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
|
Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
|
DCHECK(IsAligned(isolate->isolate_root(), kPtrComprCageBaseAlignment));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
|
@ -151,6 +151,18 @@ struct MaybeBoolFlag {
|
|||||||
#define COMPRESS_POINTERS_BOOL false
|
#define COMPRESS_POINTERS_BOOL false
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
|
#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL true
|
||||||
|
#else
|
||||||
|
#define COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL false
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL true
|
||||||
|
#else
|
||||||
|
#define COMPRESS_POINTERS_IN_SHARED_CAGE_BOOL false
|
||||||
|
#endif
|
||||||
|
|
||||||
#ifdef V8_HEAP_SANDBOX
|
#ifdef V8_HEAP_SANDBOX
|
||||||
#define V8_HEAP_SANDBOX_BOOL true
|
#define V8_HEAP_SANDBOX_BOOL true
|
||||||
#else
|
#else
|
||||||
|
@ -382,11 +382,11 @@ namespace {
|
|||||||
|
|
||||||
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
|
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
|
||||||
int field_count = jsobject.GetEmbedderFieldCount();
|
int field_count = jsobject.GetEmbedderFieldCount();
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(jsobject);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(jsobject);
|
||||||
for (int i = 0; i < len; ++i) {
|
for (int i = 0; i < len; ++i) {
|
||||||
if (field_count == i) break;
|
if (field_count == i) break;
|
||||||
void* pointer;
|
void* pointer;
|
||||||
if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) {
|
if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(cage_base, &pointer)) {
|
||||||
embedder_fields[i] = pointer;
|
embedder_fields[i] = pointer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -289,7 +289,7 @@ size_t Heap::MinOldGenerationSize() {
|
|||||||
size_t Heap::AllocatorLimitOnMaxOldGenerationSize() {
|
size_t Heap::AllocatorLimitOnMaxOldGenerationSize() {
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
// Isolate and the young generation are also allocated on the heap.
|
// Isolate and the young generation are also allocated on the heap.
|
||||||
return kPtrComprHeapReservationSize -
|
return kPtrComprCageReservationSize -
|
||||||
YoungGenerationSizeFromSemiSpaceSize(kMaxSemiSpaceSize) -
|
YoungGenerationSizeFromSemiSpaceSize(kMaxSemiSpaceSize) -
|
||||||
RoundUp(sizeof(Isolate), size_t{1} << kPageSizeBits);
|
RoundUp(sizeof(Isolate), size_t{1} << kPageSizeBits);
|
||||||
#endif
|
#endif
|
||||||
|
@ -2704,8 +2704,9 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <AccessMode access_mode, typename TSlot>
|
template <AccessMode access_mode, typename TSlot>
|
||||||
static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) {
|
static inline SlotCallbackResult UpdateSlot(PtrComprCageBase cage_base,
|
||||||
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
|
TSlot slot) {
|
||||||
|
typename TSlot::TObject obj = slot.Relaxed_Load(cage_base);
|
||||||
HeapObject heap_obj;
|
HeapObject heap_obj;
|
||||||
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
|
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
|
||||||
UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
|
UpdateSlot<access_mode, HeapObjectReferenceType::WEAK>(slot, obj, heap_obj);
|
||||||
@ -2717,9 +2718,9 @@ static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <AccessMode access_mode, typename TSlot>
|
template <AccessMode access_mode, typename TSlot>
|
||||||
static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate,
|
static inline SlotCallbackResult UpdateStrongSlot(PtrComprCageBase cage_base,
|
||||||
TSlot slot) {
|
TSlot slot) {
|
||||||
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
|
typename TSlot::TObject obj = slot.Relaxed_Load(cage_base);
|
||||||
DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr()));
|
DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr()));
|
||||||
HeapObject heap_obj;
|
HeapObject heap_obj;
|
||||||
if (obj.GetHeapObject(&heap_obj)) {
|
if (obj.GetHeapObject(&heap_obj)) {
|
||||||
@ -2735,39 +2736,40 @@ static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate,
|
|||||||
// It does not expect to encounter pointers to dead objects.
|
// It does not expect to encounter pointers to dead objects.
|
||||||
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
||||||
public:
|
public:
|
||||||
explicit PointersUpdatingVisitor(IsolateRoot isolate) : isolate_(isolate) {}
|
explicit PointersUpdatingVisitor(PtrComprCageBase cage_base)
|
||||||
|
: cage_base_(cage_base) {}
|
||||||
|
|
||||||
void VisitPointer(HeapObject host, ObjectSlot p) override {
|
void VisitPointer(HeapObject host, ObjectSlot p) override {
|
||||||
UpdateStrongSlotInternal(isolate_, p);
|
UpdateStrongSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
|
|
||||||
void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
|
void VisitPointer(HeapObject host, MaybeObjectSlot p) override {
|
||||||
UpdateSlotInternal(isolate_, p);
|
UpdateSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
|
|
||||||
void VisitPointers(HeapObject host, ObjectSlot start,
|
void VisitPointers(HeapObject host, ObjectSlot start,
|
||||||
ObjectSlot end) override {
|
ObjectSlot end) override {
|
||||||
for (ObjectSlot p = start; p < end; ++p) {
|
for (ObjectSlot p = start; p < end; ++p) {
|
||||||
UpdateStrongSlotInternal(isolate_, p);
|
UpdateStrongSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
void VisitPointers(HeapObject host, MaybeObjectSlot start,
|
||||||
MaybeObjectSlot end) final {
|
MaybeObjectSlot end) final {
|
||||||
for (MaybeObjectSlot p = start; p < end; ++p) {
|
for (MaybeObjectSlot p = start; p < end; ++p) {
|
||||||
UpdateSlotInternal(isolate_, p);
|
UpdateSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void VisitRootPointer(Root root, const char* description,
|
void VisitRootPointer(Root root, const char* description,
|
||||||
FullObjectSlot p) override {
|
FullObjectSlot p) override {
|
||||||
UpdateRootSlotInternal(isolate_, p);
|
UpdateRootSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
|
|
||||||
void VisitRootPointers(Root root, const char* description,
|
void VisitRootPointers(Root root, const char* description,
|
||||||
FullObjectSlot start, FullObjectSlot end) override {
|
FullObjectSlot start, FullObjectSlot end) override {
|
||||||
for (FullObjectSlot p = start; p < end; ++p) {
|
for (FullObjectSlot p = start; p < end; ++p) {
|
||||||
UpdateRootSlotInternal(isolate_, p);
|
UpdateRootSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2775,7 +2777,7 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
|||||||
OffHeapObjectSlot start,
|
OffHeapObjectSlot start,
|
||||||
OffHeapObjectSlot end) override {
|
OffHeapObjectSlot end) override {
|
||||||
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
||||||
UpdateRootSlotInternal(isolate_, p);
|
UpdateRootSlotInternal(cage_base_, p);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2790,32 +2792,32 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static inline SlotCallbackResult UpdateRootSlotInternal(IsolateRoot isolate,
|
static inline SlotCallbackResult UpdateRootSlotInternal(
|
||||||
FullObjectSlot slot) {
|
PtrComprCageBase cage_base, FullObjectSlot slot) {
|
||||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline SlotCallbackResult UpdateRootSlotInternal(
|
static inline SlotCallbackResult UpdateRootSlotInternal(
|
||||||
IsolateRoot isolate, OffHeapObjectSlot slot) {
|
PtrComprCageBase cage_base, OffHeapObjectSlot slot) {
|
||||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
|
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
|
||||||
IsolateRoot isolate, MaybeObjectSlot slot) {
|
PtrComprCageBase cage_base, MaybeObjectSlot slot) {
|
||||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline SlotCallbackResult UpdateStrongSlotInternal(IsolateRoot isolate,
|
static inline SlotCallbackResult UpdateStrongSlotInternal(
|
||||||
ObjectSlot slot) {
|
PtrComprCageBase cage_base, ObjectSlot slot) {
|
||||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline SlotCallbackResult UpdateSlotInternal(IsolateRoot isolate,
|
static inline SlotCallbackResult UpdateSlotInternal(
|
||||||
MaybeObjectSlot slot) {
|
PtrComprCageBase cage_base, MaybeObjectSlot slot) {
|
||||||
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
IsolateRoot isolate_;
|
PtrComprCageBase cage_base_;
|
||||||
};
|
};
|
||||||
|
|
||||||
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
|
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
|
||||||
@ -3581,7 +3583,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
|
|||||||
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
|
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
|
||||||
"ToSpaceUpdatingItem::ProcessVisitAll");
|
"ToSpaceUpdatingItem::ProcessVisitAll");
|
||||||
PointersUpdatingVisitor visitor(
|
PointersUpdatingVisitor visitor(
|
||||||
GetIsolateForPtrComprFromOnHeapAddress(start_));
|
GetPtrComprCageBaseFromOnHeapAddress(start_));
|
||||||
for (Address cur = start_; cur < end_;) {
|
for (Address cur = start_; cur < end_;) {
|
||||||
HeapObject object = HeapObject::FromAddress(cur);
|
HeapObject object = HeapObject::FromAddress(cur);
|
||||||
Map map = object.map();
|
Map map = object.map();
|
||||||
@ -3597,7 +3599,7 @@ class ToSpaceUpdatingItem : public UpdatingItem {
|
|||||||
// For young generation evacuations we want to visit grey objects, for
|
// For young generation evacuations we want to visit grey objects, for
|
||||||
// full MC, we need to visit black objects.
|
// full MC, we need to visit black objects.
|
||||||
PointersUpdatingVisitor visitor(
|
PointersUpdatingVisitor visitor(
|
||||||
GetIsolateForPtrComprFromOnHeapAddress(start_));
|
GetPtrComprCageBaseFromOnHeapAddress(start_));
|
||||||
for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
|
for (auto object_and_size : LiveObjectRange<kAllLiveObjects>(
|
||||||
chunk_, marking_state_->bitmap(chunk_))) {
|
chunk_, marking_state_->bitmap(chunk_))) {
|
||||||
object_and_size.first.IterateBodyFast(&visitor);
|
object_and_size.first.IterateBodyFast(&visitor);
|
||||||
@ -3743,12 +3745,12 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
|||||||
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
|
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
|
||||||
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
|
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
|
||||||
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
|
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
|
||||||
IsolateRoot isolate = heap_->isolate();
|
PtrComprCageBase cage_base = heap_->isolate();
|
||||||
RememberedSet<OLD_TO_OLD>::Iterate(
|
RememberedSet<OLD_TO_OLD>::Iterate(
|
||||||
chunk_,
|
chunk_,
|
||||||
[&filter, isolate](MaybeObjectSlot slot) {
|
[&filter, cage_base](MaybeObjectSlot slot) {
|
||||||
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
if (!filter.IsValid(slot.address())) return REMOVE_SLOT;
|
||||||
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
},
|
},
|
||||||
SlotSet::FREE_EMPTY_BUCKETS);
|
SlotSet::FREE_EMPTY_BUCKETS);
|
||||||
chunk_->ReleaseSlotSet<OLD_TO_OLD>();
|
chunk_->ReleaseSlotSet<OLD_TO_OLD>();
|
||||||
@ -3783,10 +3785,10 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
|||||||
Address slot) {
|
Address slot) {
|
||||||
// Using UpdateStrongSlot is OK here, because there are no weak
|
// Using UpdateStrongSlot is OK here, because there are no weak
|
||||||
// typed slots.
|
// typed slots.
|
||||||
IsolateRoot isolate = heap_->isolate();
|
PtrComprCageBase cage_base = heap_->isolate();
|
||||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||||
heap_, slot_type, slot, [isolate](FullMaybeObjectSlot slot) {
|
heap_, slot_type, slot, [cage_base](FullMaybeObjectSlot slot) {
|
||||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
|
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(cage_base, slot);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -14,9 +14,9 @@ namespace internal {
|
|||||||
|
|
||||||
// static
|
// static
|
||||||
ReadOnlyRoots ReadOnlyHeap::GetReadOnlyRoots(HeapObject object) {
|
ReadOnlyRoots ReadOnlyHeap::GetReadOnlyRoots(HeapObject object) {
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(object);
|
return ReadOnlyRoots(
|
||||||
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
|
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr())));
|
||||||
#else
|
#else
|
||||||
#ifdef V8_SHARED_RO_HEAP
|
#ifdef V8_SHARED_RO_HEAP
|
||||||
// This fails if we are creating heap objects and the roots haven't yet been
|
// This fails if we are creating heap objects and the roots haven't yet been
|
||||||
|
@ -37,7 +37,7 @@ base::LazyInstance<std::weak_ptr<ReadOnlyArtifacts>>::type
|
|||||||
|
|
||||||
std::shared_ptr<ReadOnlyArtifacts> InitializeSharedReadOnlyArtifacts() {
|
std::shared_ptr<ReadOnlyArtifacts> InitializeSharedReadOnlyArtifacts() {
|
||||||
std::shared_ptr<ReadOnlyArtifacts> artifacts;
|
std::shared_ptr<ReadOnlyArtifacts> artifacts;
|
||||||
if (COMPRESS_POINTERS_BOOL) {
|
if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
|
||||||
artifacts = std::make_shared<PointerCompressedReadOnlyArtifacts>();
|
artifacts = std::make_shared<PointerCompressedReadOnlyArtifacts>();
|
||||||
} else {
|
} else {
|
||||||
artifacts = std::make_shared<SingleCopyReadOnlyArtifacts>();
|
artifacts = std::make_shared<SingleCopyReadOnlyArtifacts>();
|
||||||
@ -129,7 +129,7 @@ ReadOnlyHeap::ReadOnlyHeap(ReadOnlyHeap* ro_heap, ReadOnlySpace* ro_space)
|
|||||||
: read_only_space_(ro_space),
|
: read_only_space_(ro_space),
|
||||||
read_only_object_cache_(ro_heap->read_only_object_cache_) {
|
read_only_object_cache_(ro_heap->read_only_object_cache_) {
|
||||||
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
||||||
DCHECK(COMPRESS_POINTERS_BOOL);
|
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -139,7 +139,7 @@ ReadOnlyHeap* ReadOnlyHeap::CreateInitalHeapForBootstrapping(
|
|||||||
|
|
||||||
std::unique_ptr<ReadOnlyHeap> ro_heap;
|
std::unique_ptr<ReadOnlyHeap> ro_heap;
|
||||||
auto* ro_space = new ReadOnlySpace(isolate->heap());
|
auto* ro_space = new ReadOnlySpace(isolate->heap());
|
||||||
if (COMPRESS_POINTERS_BOOL) {
|
if (COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
|
||||||
ro_heap.reset(new ReadOnlyHeap(ro_space));
|
ro_heap.reset(new ReadOnlyHeap(ro_space));
|
||||||
} else {
|
} else {
|
||||||
std::unique_ptr<SoleReadOnlyHeap> sole_ro_heap(
|
std::unique_ptr<SoleReadOnlyHeap> sole_ro_heap(
|
||||||
|
@ -87,8 +87,8 @@ class ReadOnlyHeap {
|
|||||||
// Returns whether the ReadOnlySpace will actually be shared taking into
|
// Returns whether the ReadOnlySpace will actually be shared taking into
|
||||||
// account whether shared memory is available with pointer compression.
|
// account whether shared memory is available with pointer compression.
|
||||||
static bool IsReadOnlySpaceShared() {
|
static bool IsReadOnlySpaceShared() {
|
||||||
return V8_SHARED_RO_HEAP_BOOL &&
|
return V8_SHARED_RO_HEAP_BOOL && (!COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL ||
|
||||||
(!COMPRESS_POINTERS_BOOL || IsSharedMemoryAvailable());
|
IsSharedMemoryAvailable());
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual void InitializeIsolateRoots(Isolate* isolate) {}
|
virtual void InitializeIsolateRoots(Isolate* isolate) {}
|
||||||
|
@ -755,9 +755,10 @@ SharedReadOnlySpace::SharedReadOnlySpace(
|
|||||||
Heap* heap, PointerCompressedReadOnlyArtifacts* artifacts)
|
Heap* heap, PointerCompressedReadOnlyArtifacts* artifacts)
|
||||||
: SharedReadOnlySpace(heap) {
|
: SharedReadOnlySpace(heap) {
|
||||||
// This constructor should only be used when RO_SPACE is shared with pointer
|
// This constructor should only be used when RO_SPACE is shared with pointer
|
||||||
// compression.
|
// compression in a per-Isolate cage.
|
||||||
DCHECK(V8_SHARED_RO_HEAP_BOOL);
|
DCHECK(V8_SHARED_RO_HEAP_BOOL);
|
||||||
DCHECK(COMPRESS_POINTERS_BOOL);
|
DCHECK(COMPRESS_POINTERS_BOOL);
|
||||||
|
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
|
||||||
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
||||||
DCHECK(!artifacts->pages().empty());
|
DCHECK(!artifacts->pages().empty());
|
||||||
|
|
||||||
@ -776,6 +777,7 @@ SharedReadOnlySpace::SharedReadOnlySpace(
|
|||||||
: SharedReadOnlySpace(heap) {
|
: SharedReadOnlySpace(heap) {
|
||||||
DCHECK(V8_SHARED_RO_HEAP_BOOL);
|
DCHECK(V8_SHARED_RO_HEAP_BOOL);
|
||||||
DCHECK(COMPRESS_POINTERS_BOOL);
|
DCHECK(COMPRESS_POINTERS_BOOL);
|
||||||
|
DCHECK(COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL);
|
||||||
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
DCHECK(ReadOnlyHeap::IsReadOnlySpaceShared());
|
||||||
|
|
||||||
accounting_stats_ = std::move(new_stats);
|
accounting_stats_ = std::move(new_stats);
|
||||||
|
@ -35,10 +35,11 @@ class ReadOnlyPage : public BasicMemoryChunk {
|
|||||||
// Returns the address for a given offset in this page.
|
// Returns the address for a given offset in this page.
|
||||||
Address OffsetToAddress(size_t offset) const {
|
Address OffsetToAddress(size_t offset) const {
|
||||||
Address address_in_page = address() + offset;
|
Address address_in_page = address() + offset;
|
||||||
if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_BOOL) {
|
if (V8_SHARED_RO_HEAP_BOOL && COMPRESS_POINTERS_IN_ISOLATE_CAGE_BOOL) {
|
||||||
// Pointer compression with share ReadOnlyPages means that the area_start
|
// Pointer compression with a per-Isolate cage and shared ReadOnlyPages
|
||||||
// and area_end cannot be defined since they are stored within the pages
|
// means that the area_start and area_end cannot be defined since they are
|
||||||
// which can be mapped at multiple memory addresses.
|
// stored within the pages which can be mapped at multiple memory
|
||||||
|
// addresses.
|
||||||
DCHECK_LT(offset, size());
|
DCHECK_LT(offset, size());
|
||||||
} else {
|
} else {
|
||||||
DCHECK_GE(address_in_page, area_start());
|
DCHECK_GE(address_in_page, area_start());
|
||||||
|
@ -59,8 +59,8 @@ Address IsolateAllocator::InitReservation() {
|
|||||||
// Reserve a |4Gb + kIsolateRootBiasPageSize| region such as that the
|
// Reserve a |4Gb + kIsolateRootBiasPageSize| region such as that the
|
||||||
// resevation address plus |kIsolateRootBiasPageSize| is 4Gb aligned.
|
// resevation address plus |kIsolateRootBiasPageSize| is 4Gb aligned.
|
||||||
const size_t reservation_size =
|
const size_t reservation_size =
|
||||||
kPtrComprHeapReservationSize + kIsolateRootBiasPageSize;
|
kPtrComprCageReservationSize + kIsolateRootBiasPageSize;
|
||||||
const size_t base_alignment = kPtrComprIsolateRootAlignment;
|
const size_t base_alignment = kPtrComprCageBaseAlignment;
|
||||||
|
|
||||||
const int kMaxAttempts = 4;
|
const int kMaxAttempts = 4;
|
||||||
for (int attempt = 0; attempt < kMaxAttempts; ++attempt) {
|
for (int attempt = 0; attempt < kMaxAttempts; ++attempt) {
|
||||||
@ -137,11 +137,11 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
|
|||||||
GetIsolateRootBiasPageSize(platform_page_allocator);
|
GetIsolateRootBiasPageSize(platform_page_allocator);
|
||||||
|
|
||||||
Address isolate_root = heap_reservation_address + kIsolateRootBiasPageSize;
|
Address isolate_root = heap_reservation_address + kIsolateRootBiasPageSize;
|
||||||
CHECK(IsAligned(isolate_root, kPtrComprIsolateRootAlignment));
|
CHECK(IsAligned(isolate_root, kPtrComprCageBaseAlignment));
|
||||||
|
|
||||||
CHECK(reservation_.InVM(
|
CHECK(reservation_.InVM(
|
||||||
heap_reservation_address,
|
heap_reservation_address,
|
||||||
kPtrComprHeapReservationSize + kIsolateRootBiasPageSize));
|
kPtrComprCageReservationSize + kIsolateRootBiasPageSize));
|
||||||
|
|
||||||
// Simplify BoundedPageAllocator's life by configuring it to use same page
|
// Simplify BoundedPageAllocator's life by configuring it to use same page
|
||||||
// size as the Heap will use (MemoryChunk::kPageSize).
|
// size as the Heap will use (MemoryChunk::kPageSize).
|
||||||
@ -149,7 +149,7 @@ void IsolateAllocator::CommitPagesForIsolate(Address heap_reservation_address) {
|
|||||||
platform_page_allocator->AllocatePageSize());
|
platform_page_allocator->AllocatePageSize());
|
||||||
|
|
||||||
page_allocator_instance_ = std::make_unique<base::BoundedPageAllocator>(
|
page_allocator_instance_ = std::make_unique<base::BoundedPageAllocator>(
|
||||||
platform_page_allocator, isolate_root, kPtrComprHeapReservationSize,
|
platform_page_allocator, isolate_root, kPtrComprCageReservationSize,
|
||||||
page_size);
|
page_size);
|
||||||
page_allocator_ = page_allocator_instance_.get();
|
page_allocator_ = page_allocator_instance_.get();
|
||||||
|
|
||||||
|
@ -323,9 +323,9 @@ int Code::SizeIncludingMetadata() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ByteArray Code::unchecked_relocation_info() const {
|
ByteArray Code::unchecked_relocation_info() const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return ByteArray::unchecked_cast(
|
return ByteArray::unchecked_cast(
|
||||||
TaggedField<HeapObject, kRelocationInfoOffset>::load(isolate, *this));
|
TaggedField<HeapObject, kRelocationInfoOffset>::load(cage_base, *this));
|
||||||
}
|
}
|
||||||
|
|
||||||
byte* Code::relocation_start() const {
|
byte* Code::relocation_start() const {
|
||||||
|
@ -33,9 +33,9 @@ Object CompressedObjectSlot::operator*() const {
|
|||||||
return Object(DecompressTaggedAny(address(), value));
|
return Object(DecompressTaggedAny(address(), value));
|
||||||
}
|
}
|
||||||
|
|
||||||
Object CompressedObjectSlot::load(IsolateRoot isolate) const {
|
Object CompressedObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||||
Tagged_t value = *location();
|
Tagged_t value = *location();
|
||||||
return Object(DecompressTaggedAny(isolate, value));
|
return Object(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void CompressedObjectSlot::store(Object value) const {
|
void CompressedObjectSlot::store(Object value) const {
|
||||||
@ -52,9 +52,9 @@ Object CompressedObjectSlot::Relaxed_Load() const {
|
|||||||
return Object(DecompressTaggedAny(address(), value));
|
return Object(DecompressTaggedAny(address(), value));
|
||||||
}
|
}
|
||||||
|
|
||||||
Object CompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
Object CompressedObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const {
|
||||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
||||||
return Object(DecompressTaggedAny(isolate, value));
|
return Object(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void CompressedObjectSlot::Relaxed_Store(Object value) const {
|
void CompressedObjectSlot::Relaxed_Store(Object value) const {
|
||||||
@ -85,9 +85,9 @@ MaybeObject CompressedMaybeObjectSlot::operator*() const {
|
|||||||
return MaybeObject(DecompressTaggedAny(address(), value));
|
return MaybeObject(DecompressTaggedAny(address(), value));
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject CompressedMaybeObjectSlot::load(IsolateRoot isolate) const {
|
MaybeObject CompressedMaybeObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||||
Tagged_t value = *location();
|
Tagged_t value = *location();
|
||||||
return MaybeObject(DecompressTaggedAny(isolate, value));
|
return MaybeObject(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void CompressedMaybeObjectSlot::store(MaybeObject value) const {
|
void CompressedMaybeObjectSlot::store(MaybeObject value) const {
|
||||||
@ -99,9 +99,10 @@ MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const {
|
|||||||
return MaybeObject(DecompressTaggedAny(address(), value));
|
return MaybeObject(DecompressTaggedAny(address(), value));
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(
|
||||||
|
PtrComprCageBase cage_base) const {
|
||||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
||||||
return MaybeObject(DecompressTaggedAny(isolate, value));
|
return MaybeObject(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const {
|
void CompressedMaybeObjectSlot::Relaxed_Store(MaybeObject value) const {
|
||||||
@ -125,9 +126,10 @@ HeapObjectReference CompressedHeapObjectSlot::operator*() const {
|
|||||||
return HeapObjectReference(DecompressTaggedPointer(address(), value));
|
return HeapObjectReference(DecompressTaggedPointer(address(), value));
|
||||||
}
|
}
|
||||||
|
|
||||||
HeapObjectReference CompressedHeapObjectSlot::load(IsolateRoot isolate) const {
|
HeapObjectReference CompressedHeapObjectSlot::load(
|
||||||
|
PtrComprCageBase cage_base) const {
|
||||||
Tagged_t value = *location();
|
Tagged_t value = *location();
|
||||||
return HeapObjectReference(DecompressTaggedPointer(isolate, value));
|
return HeapObjectReference(DecompressTaggedPointer(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void CompressedHeapObjectSlot::store(HeapObjectReference value) const {
|
void CompressedHeapObjectSlot::store(HeapObjectReference value) const {
|
||||||
@ -148,23 +150,25 @@ void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const {
|
|||||||
// OffHeapCompressedObjectSlot implementation.
|
// OffHeapCompressedObjectSlot implementation.
|
||||||
//
|
//
|
||||||
|
|
||||||
Object OffHeapCompressedObjectSlot::load(IsolateRoot isolate) const {
|
Object OffHeapCompressedObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||||
Tagged_t value = *location();
|
Tagged_t value = *location();
|
||||||
return Object(DecompressTaggedAny(isolate, value));
|
return Object(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffHeapCompressedObjectSlot::store(Object value) const {
|
void OffHeapCompressedObjectSlot::store(Object value) const {
|
||||||
*location() = CompressTagged(value.ptr());
|
*location() = CompressTagged(value.ptr());
|
||||||
}
|
}
|
||||||
|
|
||||||
Object OffHeapCompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
Object OffHeapCompressedObjectSlot::Relaxed_Load(
|
||||||
|
PtrComprCageBase cage_base) const {
|
||||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
|
||||||
return Object(DecompressTaggedAny(isolate, value));
|
return Object(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
Object OffHeapCompressedObjectSlot::Acquire_Load(IsolateRoot isolate) const {
|
Object OffHeapCompressedObjectSlot::Acquire_Load(
|
||||||
|
PtrComprCageBase cage_base) const {
|
||||||
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location());
|
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location());
|
||||||
return Object(DecompressTaggedAny(isolate, value));
|
return Object(DecompressTaggedAny(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const {
|
void OffHeapCompressedObjectSlot::Relaxed_Store(Object value) const {
|
||||||
|
@ -41,12 +41,12 @@ class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> {
|
|||||||
// TODO(leszeks): Consider deprecating the operator* load, and always pass the
|
// TODO(leszeks): Consider deprecating the operator* load, and always pass the
|
||||||
// Isolate.
|
// Isolate.
|
||||||
inline Object operator*() const;
|
inline Object operator*() const;
|
||||||
inline Object load(IsolateRoot isolate) const;
|
inline Object load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(Object value) const;
|
inline void store(Object value) const;
|
||||||
|
|
||||||
inline Object Acquire_Load() const;
|
inline Object Acquire_Load() const;
|
||||||
inline Object Relaxed_Load() const;
|
inline Object Relaxed_Load() const;
|
||||||
inline Object Relaxed_Load(IsolateRoot isolate) const;
|
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||||
inline void Relaxed_Store(Object value) const;
|
inline void Relaxed_Store(Object value) const;
|
||||||
inline void Release_Store(Object value) const;
|
inline void Release_Store(Object value) const;
|
||||||
inline Object Release_CompareAndSwap(Object old, Object target) const;
|
inline Object Release_CompareAndSwap(Object old, Object target) const;
|
||||||
@ -77,11 +77,11 @@ class CompressedMaybeObjectSlot
|
|||||||
: SlotBase(slot.address()) {}
|
: SlotBase(slot.address()) {}
|
||||||
|
|
||||||
inline MaybeObject operator*() const;
|
inline MaybeObject operator*() const;
|
||||||
inline MaybeObject load(IsolateRoot isolate) const;
|
inline MaybeObject load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(MaybeObject value) const;
|
inline void store(MaybeObject value) const;
|
||||||
|
|
||||||
inline MaybeObject Relaxed_Load() const;
|
inline MaybeObject Relaxed_Load() const;
|
||||||
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
|
inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||||
inline void Relaxed_Store(MaybeObject value) const;
|
inline void Relaxed_Store(MaybeObject value) const;
|
||||||
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
|
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
|
||||||
};
|
};
|
||||||
@ -105,7 +105,7 @@ class CompressedHeapObjectSlot
|
|||||||
: SlotBase(slot.address()) {}
|
: SlotBase(slot.address()) {}
|
||||||
|
|
||||||
inline HeapObjectReference operator*() const;
|
inline HeapObjectReference operator*() const;
|
||||||
inline HeapObjectReference load(IsolateRoot isolate) const;
|
inline HeapObjectReference load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(HeapObjectReference value) const;
|
inline void store(HeapObjectReference value) const;
|
||||||
|
|
||||||
inline HeapObject ToHeapObject() const;
|
inline HeapObject ToHeapObject() const;
|
||||||
@ -131,11 +131,11 @@ class OffHeapCompressedObjectSlot
|
|||||||
explicit OffHeapCompressedObjectSlot(const uint32_t* ptr)
|
explicit OffHeapCompressedObjectSlot(const uint32_t* ptr)
|
||||||
: SlotBase(reinterpret_cast<Address>(ptr)) {}
|
: SlotBase(reinterpret_cast<Address>(ptr)) {}
|
||||||
|
|
||||||
inline Object load(IsolateRoot isolate) const;
|
inline Object load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(Object value) const;
|
inline void store(Object value) const;
|
||||||
|
|
||||||
inline Object Relaxed_Load(IsolateRoot isolate) const;
|
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||||
inline Object Acquire_Load(IsolateRoot isolate) const;
|
inline Object Acquire_Load(PtrComprCageBase cage_base) const;
|
||||||
inline void Relaxed_Store(Object value) const;
|
inline void Relaxed_Store(Object value) const;
|
||||||
inline void Release_Store(Object value) const;
|
inline void Release_Store(Object value) const;
|
||||||
inline void Release_CompareAndSwap(Object old, Object target) const;
|
inline void Release_CompareAndSwap(Object old, Object target) const;
|
||||||
|
@ -56,8 +56,8 @@ NEVER_READ_ONLY_SPACE_IMPL(Context)
|
|||||||
CAST_ACCESSOR(NativeContext)
|
CAST_ACCESSOR(NativeContext)
|
||||||
|
|
||||||
V8_INLINE Object Context::get(int index) const { return elements(index); }
|
V8_INLINE Object Context::get(int index) const { return elements(index); }
|
||||||
V8_INLINE Object Context::get(IsolateRoot isolate, int index) const {
|
V8_INLINE Object Context::get(PtrComprCageBase cage_base, int index) const {
|
||||||
return elements(isolate, index);
|
return elements(cage_base, index);
|
||||||
}
|
}
|
||||||
V8_INLINE void Context::set(int index, Object value) {
|
V8_INLINE void Context::set(int index, Object value) {
|
||||||
set_elements(index, value);
|
set_elements(index, value);
|
||||||
@ -71,11 +71,11 @@ void Context::set_scope_info(ScopeInfo scope_info, WriteBarrierMode mode) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object Context::synchronized_get(int index) const {
|
Object Context::synchronized_get(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return synchronized_get(isolate, index);
|
return synchronized_get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object Context::synchronized_get(IsolateRoot isolate, int index) const {
|
Object Context::synchronized_get(PtrComprCageBase cage_base, int index) const {
|
||||||
DCHECK_LT(static_cast<unsigned int>(index),
|
DCHECK_LT(static_cast<unsigned int>(index),
|
||||||
static_cast<unsigned int>(this->length()));
|
static_cast<unsigned int>(this->length()));
|
||||||
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
|
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
|
||||||
@ -243,7 +243,7 @@ Map Context::GetInitialJSArrayMap(ElementsKind kind) const {
|
|||||||
|
|
||||||
DEF_GETTER(NativeContext, microtask_queue, MicrotaskQueue*) {
|
DEF_GETTER(NativeContext, microtask_queue, MicrotaskQueue*) {
|
||||||
return reinterpret_cast<MicrotaskQueue*>(ReadExternalPointerField(
|
return reinterpret_cast<MicrotaskQueue*>(ReadExternalPointerField(
|
||||||
kMicrotaskQueueOffset, isolate, kNativeContextMicrotaskQueueTag));
|
kMicrotaskQueueOffset, cage_base, kNativeContextMicrotaskQueueTag));
|
||||||
}
|
}
|
||||||
|
|
||||||
void NativeContext::AllocateExternalPointerEntries(Isolate* isolate) {
|
void NativeContext::AllocateExternalPointerEntries(Isolate* isolate) {
|
||||||
|
@ -422,13 +422,14 @@ class Context : public TorqueGeneratedContext<Context, HeapObject> {
|
|||||||
|
|
||||||
// Setter and getter for elements.
|
// Setter and getter for elements.
|
||||||
V8_INLINE Object get(int index) const;
|
V8_INLINE Object get(int index) const;
|
||||||
V8_INLINE Object get(IsolateRoot isolate, int index) const;
|
V8_INLINE Object get(PtrComprCageBase cage_base, int index) const;
|
||||||
V8_INLINE void set(int index, Object value);
|
V8_INLINE void set(int index, Object value);
|
||||||
// Setter with explicit barrier mode.
|
// Setter with explicit barrier mode.
|
||||||
V8_INLINE void set(int index, Object value, WriteBarrierMode mode);
|
V8_INLINE void set(int index, Object value, WriteBarrierMode mode);
|
||||||
// Setter and getter with synchronization semantics.
|
// Setter and getter with synchronization semantics.
|
||||||
V8_INLINE Object synchronized_get(int index) const;
|
V8_INLINE Object synchronized_get(int index) const;
|
||||||
V8_INLINE Object synchronized_get(IsolateRoot isolate, int index) const;
|
V8_INLINE Object synchronized_get(PtrComprCageBase cage_base,
|
||||||
|
int index) const;
|
||||||
V8_INLINE void synchronized_set(int index, Object value);
|
V8_INLINE void synchronized_set(int index, Object value);
|
||||||
|
|
||||||
static const int kScopeInfoOffset = kElementsOffset;
|
static const int kScopeInfoOffset = kElementsOffset;
|
||||||
|
@ -106,15 +106,16 @@ ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Name DescriptorArray::GetKey(InternalIndex descriptor_number) const {
|
Name DescriptorArray::GetKey(InternalIndex descriptor_number) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return GetKey(isolate, descriptor_number);
|
return GetKey(cage_base, descriptor_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
Name DescriptorArray::GetKey(IsolateRoot isolate,
|
Name DescriptorArray::GetKey(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number) const {
|
InternalIndex descriptor_number) const {
|
||||||
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
|
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
|
||||||
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
|
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
|
||||||
return Name::cast(EntryKeyField::Relaxed_Load(isolate, *this, entry_offset));
|
return Name::cast(
|
||||||
|
EntryKeyField::Relaxed_Load(cage_base, *this, entry_offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
void DescriptorArray::SetKey(InternalIndex descriptor_number, Name key) {
|
void DescriptorArray::SetKey(InternalIndex descriptor_number, Name key) {
|
||||||
@ -129,12 +130,13 @@ int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Name DescriptorArray::GetSortedKey(int descriptor_number) {
|
Name DescriptorArray::GetSortedKey(int descriptor_number) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return GetSortedKey(isolate, descriptor_number);
|
return GetSortedKey(cage_base, descriptor_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
Name DescriptorArray::GetSortedKey(IsolateRoot isolate, int descriptor_number) {
|
Name DescriptorArray::GetSortedKey(PtrComprCageBase cage_base,
|
||||||
return GetKey(isolate, InternalIndex(GetSortedKeyIndex(descriptor_number)));
|
int descriptor_number) {
|
||||||
|
return GetKey(cage_base, InternalIndex(GetSortedKeyIndex(descriptor_number)));
|
||||||
}
|
}
|
||||||
|
|
||||||
void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
|
void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
|
||||||
@ -143,13 +145,13 @@ void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) {
|
Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return GetStrongValue(isolate, descriptor_number);
|
return GetStrongValue(cage_base, descriptor_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object DescriptorArray::GetStrongValue(IsolateRoot isolate,
|
Object DescriptorArray::GetStrongValue(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number) {
|
InternalIndex descriptor_number) {
|
||||||
return GetValue(isolate, descriptor_number).cast<Object>();
|
return GetValue(cage_base, descriptor_number).cast<Object>();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DescriptorArray::SetValue(InternalIndex descriptor_number,
|
void DescriptorArray::SetValue(InternalIndex descriptor_number,
|
||||||
@ -161,15 +163,15 @@ void DescriptorArray::SetValue(InternalIndex descriptor_number,
|
|||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) {
|
MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return GetValue(isolate, descriptor_number);
|
return GetValue(cage_base, descriptor_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject DescriptorArray::GetValue(IsolateRoot isolate,
|
MaybeObject DescriptorArray::GetValue(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number) {
|
InternalIndex descriptor_number) {
|
||||||
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
|
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
|
||||||
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
|
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
|
||||||
return EntryValueField::Relaxed_Load(isolate, *this, entry_offset);
|
return EntryValueField::Relaxed_Load(cage_base, *this, entry_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
PropertyDetails DescriptorArray::GetDetails(InternalIndex descriptor_number) {
|
PropertyDetails DescriptorArray::GetDetails(InternalIndex descriptor_number) {
|
||||||
@ -192,14 +194,14 @@ int DescriptorArray::GetFieldIndex(InternalIndex descriptor_number) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) {
|
FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return GetFieldType(isolate, descriptor_number);
|
return GetFieldType(cage_base, descriptor_number);
|
||||||
}
|
}
|
||||||
|
|
||||||
FieldType DescriptorArray::GetFieldType(IsolateRoot isolate,
|
FieldType DescriptorArray::GetFieldType(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number) {
|
InternalIndex descriptor_number) {
|
||||||
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
|
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
|
||||||
MaybeObject wrapped_type = GetValue(isolate, descriptor_number);
|
MaybeObject wrapped_type = GetValue(cage_base, descriptor_number);
|
||||||
return Map::UnwrapFieldType(wrapped_type);
|
return Map::UnwrapFieldType(wrapped_type);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,22 +69,22 @@ class DescriptorArray
|
|||||||
|
|
||||||
// Accessors for fetching instance descriptor at descriptor number.
|
// Accessors for fetching instance descriptor at descriptor number.
|
||||||
inline Name GetKey(InternalIndex descriptor_number) const;
|
inline Name GetKey(InternalIndex descriptor_number) const;
|
||||||
inline Name GetKey(IsolateRoot isolate,
|
inline Name GetKey(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number) const;
|
InternalIndex descriptor_number) const;
|
||||||
inline Object GetStrongValue(InternalIndex descriptor_number);
|
inline Object GetStrongValue(InternalIndex descriptor_number);
|
||||||
inline Object GetStrongValue(IsolateRoot isolate,
|
inline Object GetStrongValue(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number);
|
InternalIndex descriptor_number);
|
||||||
inline MaybeObject GetValue(InternalIndex descriptor_number);
|
inline MaybeObject GetValue(InternalIndex descriptor_number);
|
||||||
inline MaybeObject GetValue(IsolateRoot isolate,
|
inline MaybeObject GetValue(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number);
|
InternalIndex descriptor_number);
|
||||||
inline PropertyDetails GetDetails(InternalIndex descriptor_number);
|
inline PropertyDetails GetDetails(InternalIndex descriptor_number);
|
||||||
inline int GetFieldIndex(InternalIndex descriptor_number);
|
inline int GetFieldIndex(InternalIndex descriptor_number);
|
||||||
inline FieldType GetFieldType(InternalIndex descriptor_number);
|
inline FieldType GetFieldType(InternalIndex descriptor_number);
|
||||||
inline FieldType GetFieldType(IsolateRoot isolate,
|
inline FieldType GetFieldType(PtrComprCageBase cage_base,
|
||||||
InternalIndex descriptor_number);
|
InternalIndex descriptor_number);
|
||||||
|
|
||||||
inline Name GetSortedKey(int descriptor_number);
|
inline Name GetSortedKey(int descriptor_number);
|
||||||
inline Name GetSortedKey(IsolateRoot isolate, int descriptor_number);
|
inline Name GetSortedKey(PtrComprCageBase cage_base, int descriptor_number);
|
||||||
inline int GetSortedKeyIndex(int descriptor_number);
|
inline int GetSortedKeyIndex(int descriptor_number);
|
||||||
|
|
||||||
// Accessor for complete descriptor.
|
// Accessor for complete descriptor.
|
||||||
|
@ -30,15 +30,15 @@ Dictionary<Derived, Shape>::Dictionary(Address ptr)
|
|||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
Object Dictionary<Derived, Shape>::ValueAt(InternalIndex entry) {
|
Object Dictionary<Derived, Shape>::ValueAt(InternalIndex entry) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return ValueAt(isolate, entry);
|
return ValueAt(cage_base, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
Object Dictionary<Derived, Shape>::ValueAt(IsolateRoot isolate,
|
Object Dictionary<Derived, Shape>::ValueAt(PtrComprCageBase cage_base,
|
||||||
InternalIndex entry) {
|
InternalIndex entry) {
|
||||||
return this->get(isolate, DerivedHashTable::EntryToIndex(entry) +
|
return this->get(cage_base, DerivedHashTable::EntryToIndex(entry) +
|
||||||
Derived::kEntryValueIndex);
|
Derived::kEntryValueIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
@ -181,12 +181,12 @@ Handle<Map> GlobalDictionary::GetMap(ReadOnlyRoots roots) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Name NameDictionary::NameAt(InternalIndex entry) {
|
Name NameDictionary::NameAt(InternalIndex entry) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return NameAt(isolate, entry);
|
return NameAt(cage_base, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
Name NameDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
|
Name NameDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) {
|
||||||
return Name::cast(KeyAt(isolate, entry));
|
return Name::cast(KeyAt(cage_base, entry));
|
||||||
}
|
}
|
||||||
|
|
||||||
Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
|
Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
|
||||||
@ -194,32 +194,33 @@ Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
PropertyCell GlobalDictionary::CellAt(InternalIndex entry) {
|
PropertyCell GlobalDictionary::CellAt(InternalIndex entry) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return CellAt(isolate, entry);
|
return CellAt(cage_base, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
PropertyCell GlobalDictionary::CellAt(IsolateRoot isolate,
|
PropertyCell GlobalDictionary::CellAt(PtrComprCageBase cage_base,
|
||||||
InternalIndex entry) {
|
InternalIndex entry) {
|
||||||
DCHECK(KeyAt(isolate, entry).IsPropertyCell(isolate));
|
DCHECK(KeyAt(cage_base, entry).IsPropertyCell(cage_base));
|
||||||
return PropertyCell::cast(KeyAt(isolate, entry));
|
return PropertyCell::cast(KeyAt(cage_base, entry));
|
||||||
}
|
}
|
||||||
|
|
||||||
Name GlobalDictionary::NameAt(InternalIndex entry) {
|
Name GlobalDictionary::NameAt(InternalIndex entry) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return NameAt(isolate, entry);
|
return NameAt(cage_base, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
Name GlobalDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
|
Name GlobalDictionary::NameAt(PtrComprCageBase cage_base, InternalIndex entry) {
|
||||||
return CellAt(isolate, entry).name(isolate);
|
return CellAt(cage_base, entry).name(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object GlobalDictionary::ValueAt(InternalIndex entry) {
|
Object GlobalDictionary::ValueAt(InternalIndex entry) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return ValueAt(isolate, entry);
|
return ValueAt(cage_base, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object GlobalDictionary::ValueAt(IsolateRoot isolate, InternalIndex entry) {
|
Object GlobalDictionary::ValueAt(PtrComprCageBase cage_base,
|
||||||
return CellAt(isolate, entry).value(isolate);
|
InternalIndex entry) {
|
||||||
|
return CellAt(cage_base, entry).value(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
void GlobalDictionary::SetEntry(InternalIndex entry, Object key, Object value,
|
void GlobalDictionary::SetEntry(InternalIndex entry, Object key, Object value,
|
||||||
|
@ -39,7 +39,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary
|
|||||||
using Key = typename Shape::Key;
|
using Key = typename Shape::Key;
|
||||||
// Returns the value at entry.
|
// Returns the value at entry.
|
||||||
inline Object ValueAt(InternalIndex entry);
|
inline Object ValueAt(InternalIndex entry);
|
||||||
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
|
inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||||
|
|
||||||
// Set the value for entry.
|
// Set the value for entry.
|
||||||
inline void ValueAtPut(InternalIndex entry, Object value);
|
inline void ValueAtPut(InternalIndex entry, Object value);
|
||||||
@ -193,7 +193,7 @@ class V8_EXPORT_PRIVATE NameDictionary
|
|||||||
static const int kInitialCapacity = 2;
|
static const int kInitialCapacity = 2;
|
||||||
|
|
||||||
inline Name NameAt(InternalIndex entry);
|
inline Name NameAt(InternalIndex entry);
|
||||||
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
|
inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||||
|
|
||||||
inline void set_hash(int hash);
|
inline void set_hash(int hash);
|
||||||
inline int hash() const;
|
inline int hash() const;
|
||||||
@ -231,14 +231,14 @@ class V8_EXPORT_PRIVATE GlobalDictionary
|
|||||||
DECL_PRINTER(GlobalDictionary)
|
DECL_PRINTER(GlobalDictionary)
|
||||||
|
|
||||||
inline Object ValueAt(InternalIndex entry);
|
inline Object ValueAt(InternalIndex entry);
|
||||||
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
|
inline Object ValueAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||||
inline PropertyCell CellAt(InternalIndex entry);
|
inline PropertyCell CellAt(InternalIndex entry);
|
||||||
inline PropertyCell CellAt(IsolateRoot isolate, InternalIndex entry);
|
inline PropertyCell CellAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||||
inline void SetEntry(InternalIndex entry, Object key, Object value,
|
inline void SetEntry(InternalIndex entry, Object key, Object value,
|
||||||
PropertyDetails details);
|
PropertyDetails details);
|
||||||
inline void ClearEntry(InternalIndex entry);
|
inline void ClearEntry(InternalIndex entry);
|
||||||
inline Name NameAt(InternalIndex entry);
|
inline Name NameAt(InternalIndex entry);
|
||||||
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
|
inline Name NameAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||||
inline void ValueAtPut(InternalIndex entry, Object value);
|
inline void ValueAtPut(InternalIndex entry, Object value);
|
||||||
|
|
||||||
OBJECT_CONSTRUCTORS(
|
OBJECT_CONSTRUCTORS(
|
||||||
|
@ -1421,10 +1421,10 @@ class DictionaryElementsAccessor
|
|||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
NumberDictionary dict = NumberDictionary::cast(backing_store);
|
NumberDictionary dict = NumberDictionary::cast(backing_store);
|
||||||
if (!dict.requires_slow_elements()) return false;
|
if (!dict.requires_slow_elements()) return false;
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(holder);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(holder);
|
||||||
ReadOnlyRoots roots = holder.GetReadOnlyRoots(isolate);
|
ReadOnlyRoots roots = holder.GetReadOnlyRoots(cage_base);
|
||||||
for (InternalIndex i : dict.IterateEntries()) {
|
for (InternalIndex i : dict.IterateEntries()) {
|
||||||
Object key = dict.KeyAt(isolate, i);
|
Object key = dict.KeyAt(cage_base, i);
|
||||||
if (!dict.IsKey(roots, key)) continue;
|
if (!dict.IsKey(roots, key)) continue;
|
||||||
PropertyDetails details = dict.DetailsAt(i);
|
PropertyDetails details = dict.DetailsAt(i);
|
||||||
if (details.kind() == kAccessor) return true;
|
if (details.kind() == kAccessor) return true;
|
||||||
|
@ -81,7 +81,7 @@ void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index,
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
|
bool EmbedderDataSlot::ToAlignedPointer(PtrComprCageBase isolate_root,
|
||||||
void** out_pointer) const {
|
void** out_pointer) const {
|
||||||
// We don't care about atomicity of access here because embedder slots
|
// We don't care about atomicity of access here because embedder slots
|
||||||
// are accessed this way only from the main thread via API during "mutator"
|
// are accessed this way only from the main thread via API during "mutator"
|
||||||
@ -89,6 +89,12 @@ bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
|
|||||||
// at the tagged part of the embedder slot but read-only access is ok).
|
// at the tagged part of the embedder slot but read-only access is ok).
|
||||||
Address raw_value;
|
Address raw_value;
|
||||||
#ifdef V8_HEAP_SANDBOX
|
#ifdef V8_HEAP_SANDBOX
|
||||||
|
|
||||||
|
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||||
|
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||||
|
#endif
|
||||||
|
|
||||||
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
|
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
|
||||||
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
||||||
raw_value = isolate->external_pointer_table().get(index) ^
|
raw_value = isolate->external_pointer_table().get(index) ^
|
||||||
@ -108,9 +114,15 @@ bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate_root,
|
|||||||
return HAS_SMI_TAG(raw_value);
|
return HAS_SMI_TAG(raw_value);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool EmbedderDataSlot::ToAlignedPointerSafe(IsolateRoot isolate_root,
|
bool EmbedderDataSlot::ToAlignedPointerSafe(PtrComprCageBase isolate_root,
|
||||||
void** out_pointer) const {
|
void** out_pointer) const {
|
||||||
#ifdef V8_HEAP_SANDBOX
|
#ifdef V8_HEAP_SANDBOX
|
||||||
|
|
||||||
|
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||||
|
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||||
|
#endif
|
||||||
|
|
||||||
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
|
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);
|
||||||
Address raw_value;
|
Address raw_value;
|
||||||
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
const Isolate* isolate = Isolate::FromRootAddress(isolate_root.address());
|
||||||
|
@ -75,7 +75,8 @@ class EmbedderDataSlot
|
|||||||
// When V8 heap sandbox is enabled, calling this method when the raw part of
|
// When V8 heap sandbox is enabled, calling this method when the raw part of
|
||||||
// the slot does not contain valid external pointer table index is undefined
|
// the slot does not contain valid external pointer table index is undefined
|
||||||
// behaviour and most likely result in crashes.
|
// behaviour and most likely result in crashes.
|
||||||
V8_INLINE bool ToAlignedPointer(IsolateRoot isolate, void** out_result) const;
|
V8_INLINE bool ToAlignedPointer(PtrComprCageBase isolate_root,
|
||||||
|
void** out_result) const;
|
||||||
|
|
||||||
// Same as ToAlignedPointer() but with a workaround for V8 heap sandbox.
|
// Same as ToAlignedPointer() but with a workaround for V8 heap sandbox.
|
||||||
// When V8 heap sandbox is enabled, this method doesn't crash when the raw
|
// When V8 heap sandbox is enabled, this method doesn't crash when the raw
|
||||||
@ -86,7 +87,7 @@ class EmbedderDataSlot
|
|||||||
//
|
//
|
||||||
// Call this function if you are not sure whether the slot contains valid
|
// Call this function if you are not sure whether the slot contains valid
|
||||||
// external pointer or not.
|
// external pointer or not.
|
||||||
V8_INLINE bool ToAlignedPointerSafe(IsolateRoot isolate,
|
V8_INLINE bool ToAlignedPointerSafe(PtrComprCageBase isolate_root,
|
||||||
void** out_result) const;
|
void** out_result) const;
|
||||||
|
|
||||||
// Returns true if the pointer was successfully stored or false it the pointer
|
// Returns true if the pointer was successfully stored or false it the pointer
|
||||||
|
@ -187,8 +187,9 @@ MaybeObject FeedbackVector::Get(FeedbackSlot slot) const {
|
|||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject FeedbackVector::Get(IsolateRoot isolate, FeedbackSlot slot) const {
|
MaybeObject FeedbackVector::Get(PtrComprCageBase cage_base,
|
||||||
MaybeObject value = raw_feedback_slots(isolate, GetIndex(slot));
|
FeedbackSlot slot) const {
|
||||||
|
MaybeObject value = raw_feedback_slots(cage_base, GetIndex(slot));
|
||||||
DCHECK(!IsOfLegacyType(value));
|
DCHECK(!IsOfLegacyType(value));
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
@ -259,7 +259,7 @@ class FeedbackVector
|
|||||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||||
|
|
||||||
inline MaybeObject Get(FeedbackSlot slot) const;
|
inline MaybeObject Get(FeedbackSlot slot) const;
|
||||||
inline MaybeObject Get(IsolateRoot isolate, FeedbackSlot slot) const;
|
inline MaybeObject Get(PtrComprCageBase cage_base, FeedbackSlot slot) const;
|
||||||
|
|
||||||
// Returns the feedback cell at |index| that is used to create the
|
// Returns the feedback cell at |index| that is used to create the
|
||||||
// closure.
|
// closure.
|
||||||
|
@ -61,13 +61,13 @@ int FieldIndex::GetLoadByFieldIndex() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
|
FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(map);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(map);
|
||||||
return ForDescriptor(isolate, map, descriptor_index);
|
return ForDescriptor(cage_base, map, descriptor_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
FieldIndex FieldIndex::ForDescriptor(IsolateRoot isolate, Map map,
|
FieldIndex FieldIndex::ForDescriptor(PtrComprCageBase cage_base, Map map,
|
||||||
InternalIndex descriptor_index) {
|
InternalIndex descriptor_index) {
|
||||||
PropertyDetails details = map.instance_descriptors(isolate, kRelaxedLoad)
|
PropertyDetails details = map.instance_descriptors(cage_base, kRelaxedLoad)
|
||||||
.GetDetails(descriptor_index);
|
.GetDetails(descriptor_index);
|
||||||
int field_index = details.field_index();
|
int field_index = details.field_index();
|
||||||
return ForPropertyIndex(map, field_index, details.representation());
|
return ForPropertyIndex(map, field_index, details.representation());
|
||||||
|
@ -31,7 +31,7 @@ class FieldIndex final {
|
|||||||
static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding);
|
static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding);
|
||||||
static inline FieldIndex ForDescriptor(Map map,
|
static inline FieldIndex ForDescriptor(Map map,
|
||||||
InternalIndex descriptor_index);
|
InternalIndex descriptor_index);
|
||||||
static inline FieldIndex ForDescriptor(IsolateRoot isolate, Map map,
|
static inline FieldIndex ForDescriptor(PtrComprCageBase cage_base, Map map,
|
||||||
InternalIndex descriptor_index);
|
InternalIndex descriptor_index);
|
||||||
|
|
||||||
inline int GetLoadByFieldIndex() const;
|
inline int GetLoadByFieldIndex() const;
|
||||||
|
@ -70,13 +70,13 @@ bool FixedArray::ContainsOnlySmisOrHoles() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object FixedArray::get(int index) const {
|
Object FixedArray::get(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return get(isolate, index);
|
return get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object FixedArray::get(IsolateRoot isolate, int index) const {
|
Object FixedArray::get(PtrComprCageBase cage_base, int index) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||||
return TaggedField<Object>::Relaxed_Load(isolate, *this,
|
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
|
||||||
OffsetOfElementAt(index));
|
OffsetOfElementAt(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -124,11 +124,12 @@ void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object FixedArray::get(int index, RelaxedLoadTag) const {
|
Object FixedArray::get(int index, RelaxedLoadTag) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return get(isolate, index);
|
return get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object FixedArray::get(IsolateRoot isolate, int index, RelaxedLoadTag) const {
|
Object FixedArray::get(PtrComprCageBase cage_base, int index,
|
||||||
|
RelaxedLoadTag) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||||
return RELAXED_READ_FIELD(*this, OffsetOfElementAt(index));
|
return RELAXED_READ_FIELD(*this, OffsetOfElementAt(index));
|
||||||
}
|
}
|
||||||
@ -147,11 +148,12 @@ void FixedArray::set(int index, Smi value, RelaxedStoreTag tag) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object FixedArray::get(int index, AcquireLoadTag) const {
|
Object FixedArray::get(int index, AcquireLoadTag) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return get(isolate, index);
|
return get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object FixedArray::get(IsolateRoot isolate, int index, AcquireLoadTag) const {
|
Object FixedArray::get(PtrComprCageBase cage_base, int index,
|
||||||
|
AcquireLoadTag) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||||
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
|
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
|
||||||
}
|
}
|
||||||
@ -435,13 +437,13 @@ void FixedDoubleArray::FillWithHoles(int from, int to) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject WeakFixedArray::Get(int index) const {
|
MaybeObject WeakFixedArray::Get(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return Get(isolate, index);
|
return Get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject WeakFixedArray::Get(IsolateRoot isolate, int index) const {
|
MaybeObject WeakFixedArray::Get(PtrComprCageBase cage_base, int index) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||||
return objects(isolate, index);
|
return objects(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
|
void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
|
||||||
@ -470,13 +472,13 @@ void WeakFixedArray::CopyElements(Isolate* isolate, int dst_index,
|
|||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject WeakArrayList::Get(int index) const {
|
MaybeObject WeakArrayList::Get(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return Get(isolate, index);
|
return Get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject WeakArrayList::Get(IsolateRoot isolate, int index) const {
|
MaybeObject WeakArrayList::Get(PtrComprCageBase cage_base, int index) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(capacity()));
|
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(capacity()));
|
||||||
return objects(isolate, index);
|
return objects(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
|
void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
|
||||||
@ -525,8 +527,8 @@ Object ArrayList::Get(int index) const {
|
|||||||
return FixedArray::cast(*this).get(kFirstIndex + index);
|
return FixedArray::cast(*this).get(kFirstIndex + index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object ArrayList::Get(IsolateRoot isolate, int index) const {
|
Object ArrayList::Get(PtrComprCageBase cage_base, int index) const {
|
||||||
return FixedArray::cast(*this).get(isolate, kFirstIndex + index);
|
return FixedArray::cast(*this).get(cage_base, kFirstIndex + index);
|
||||||
}
|
}
|
||||||
|
|
||||||
ObjectSlot ArrayList::Slot(int index) {
|
ObjectSlot ArrayList::Slot(int index) {
|
||||||
@ -650,8 +652,8 @@ Object TemplateList::get(int index) const {
|
|||||||
return FixedArray::cast(*this).get(kFirstElementIndex + index);
|
return FixedArray::cast(*this).get(kFirstElementIndex + index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object TemplateList::get(IsolateRoot isolate, int index) const {
|
Object TemplateList::get(PtrComprCageBase cage_base, int index) const {
|
||||||
return FixedArray::cast(*this).get(isolate, kFirstElementIndex + index);
|
return FixedArray::cast(*this).get(cage_base, kFirstElementIndex + index);
|
||||||
}
|
}
|
||||||
|
|
||||||
void TemplateList::set(int index, Object value) {
|
void TemplateList::set(int index, Object value) {
|
||||||
|
@ -101,7 +101,7 @@ class FixedArray
|
|||||||
public:
|
public:
|
||||||
// Setter and getter for elements.
|
// Setter and getter for elements.
|
||||||
inline Object get(int index) const;
|
inline Object get(int index) const;
|
||||||
inline Object get(IsolateRoot isolate, int index) const;
|
inline Object get(PtrComprCageBase cage_base, int index) const;
|
||||||
|
|
||||||
static inline Handle<Object> get(FixedArray array, int index,
|
static inline Handle<Object> get(FixedArray array, int index,
|
||||||
Isolate* isolate);
|
Isolate* isolate);
|
||||||
@ -113,14 +113,16 @@ class FixedArray
|
|||||||
|
|
||||||
// Relaxed accessors.
|
// Relaxed accessors.
|
||||||
inline Object get(int index, RelaxedLoadTag) const;
|
inline Object get(int index, RelaxedLoadTag) const;
|
||||||
inline Object get(IsolateRoot isolate, int index, RelaxedLoadTag) const;
|
inline Object get(PtrComprCageBase cage_base, int index,
|
||||||
|
RelaxedLoadTag) const;
|
||||||
inline void set(int index, Object value, RelaxedStoreTag,
|
inline void set(int index, Object value, RelaxedStoreTag,
|
||||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||||
inline void set(int index, Smi value, RelaxedStoreTag);
|
inline void set(int index, Smi value, RelaxedStoreTag);
|
||||||
|
|
||||||
// Acquire/release accessors.
|
// Acquire/release accessors.
|
||||||
inline Object get(int index, AcquireLoadTag) const;
|
inline Object get(int index, AcquireLoadTag) const;
|
||||||
inline Object get(IsolateRoot isolate, int index, AcquireLoadTag) const;
|
inline Object get(PtrComprCageBase cage_base, int index,
|
||||||
|
AcquireLoadTag) const;
|
||||||
inline void set(int index, Object value, ReleaseStoreTag,
|
inline void set(int index, Object value, ReleaseStoreTag,
|
||||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||||
inline void set(int index, Smi value, ReleaseStoreTag);
|
inline void set(int index, Smi value, ReleaseStoreTag);
|
||||||
@ -275,7 +277,7 @@ class WeakFixedArray
|
|||||||
: public TorqueGeneratedWeakFixedArray<WeakFixedArray, HeapObject> {
|
: public TorqueGeneratedWeakFixedArray<WeakFixedArray, HeapObject> {
|
||||||
public:
|
public:
|
||||||
inline MaybeObject Get(int index) const;
|
inline MaybeObject Get(int index) const;
|
||||||
inline MaybeObject Get(IsolateRoot isolate, int index) const;
|
inline MaybeObject Get(PtrComprCageBase cage_base, int index) const;
|
||||||
|
|
||||||
inline void Set(
|
inline void Set(
|
||||||
int index, MaybeObject value,
|
int index, MaybeObject value,
|
||||||
@ -350,7 +352,7 @@ class WeakArrayList
|
|||||||
V8_EXPORT_PRIVATE void Compact(Isolate* isolate);
|
V8_EXPORT_PRIVATE void Compact(Isolate* isolate);
|
||||||
|
|
||||||
inline MaybeObject Get(int index) const;
|
inline MaybeObject Get(int index) const;
|
||||||
inline MaybeObject Get(IsolateRoot isolate, int index) const;
|
inline MaybeObject Get(PtrComprCageBase cage_base, int index) const;
|
||||||
|
|
||||||
// Set the element at index to obj. The underlying array must be large enough.
|
// Set the element at index to obj. The underlying array must be large enough.
|
||||||
// If you need to grow the WeakArrayList, use the static AddToEnd() method
|
// If you need to grow the WeakArrayList, use the static AddToEnd() method
|
||||||
@ -450,7 +452,7 @@ class ArrayList : public TorqueGeneratedArrayList<ArrayList, FixedArray> {
|
|||||||
// storage capacity, i.e., length().
|
// storage capacity, i.e., length().
|
||||||
inline void SetLength(int length);
|
inline void SetLength(int length);
|
||||||
inline Object Get(int index) const;
|
inline Object Get(int index) const;
|
||||||
inline Object Get(IsolateRoot isolate, int index) const;
|
inline Object Get(PtrComprCageBase cage_base, int index) const;
|
||||||
inline ObjectSlot Slot(int index);
|
inline ObjectSlot Slot(int index);
|
||||||
|
|
||||||
// Set the element at index to obj. The underlying array must be large enough.
|
// Set the element at index to obj. The underlying array must be large enough.
|
||||||
@ -596,7 +598,7 @@ class TemplateList
|
|||||||
static Handle<TemplateList> New(Isolate* isolate, int size);
|
static Handle<TemplateList> New(Isolate* isolate, int size);
|
||||||
inline int length() const;
|
inline int length() const;
|
||||||
inline Object get(int index) const;
|
inline Object get(int index) const;
|
||||||
inline Object get(IsolateRoot isolate, int index) const;
|
inline Object get(PtrComprCageBase cage_base, int index) const;
|
||||||
inline void set(int index, Object value);
|
inline void set(int index, Object value);
|
||||||
static Handle<TemplateList> Add(Isolate* isolate, Handle<TemplateList> list,
|
static Handle<TemplateList> Add(Isolate* isolate, Handle<TemplateList> list,
|
||||||
Handle<Object> value);
|
Handle<Object> value);
|
||||||
|
@ -29,7 +29,7 @@ bool Foreign::IsNormalized(Object value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Foreign, foreign_address, Address) {
|
DEF_GETTER(Foreign, foreign_address, Address) {
|
||||||
return ReadExternalPointerField(kForeignAddressOffset, isolate,
|
return ReadExternalPointerField(kForeignAddressOffset, cage_base,
|
||||||
kForeignForeignAddressTag);
|
kForeignForeignAddressTag);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(LocalIsolate* isolate,
|
|||||||
|
|
||||||
// Find entry for key otherwise return kNotFound.
|
// Find entry for key otherwise return kNotFound.
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
InternalIndex HashTable<Derived, Shape>::FindEntry(IsolateRoot isolate,
|
InternalIndex HashTable<Derived, Shape>::FindEntry(PtrComprCageBase cage_base,
|
||||||
ReadOnlyRoots roots, Key key,
|
ReadOnlyRoots roots, Key key,
|
||||||
int32_t hash) {
|
int32_t hash) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
@ -151,7 +151,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(IsolateRoot isolate,
|
|||||||
// EnsureCapacity will guarantee the hash table is never full.
|
// EnsureCapacity will guarantee the hash table is never full.
|
||||||
for (InternalIndex entry = FirstProbe(hash, capacity);;
|
for (InternalIndex entry = FirstProbe(hash, capacity);;
|
||||||
entry = NextProbe(entry, count++, capacity)) {
|
entry = NextProbe(entry, count++, capacity)) {
|
||||||
Object element = KeyAt(isolate, entry);
|
Object element = KeyAt(cage_base, entry);
|
||||||
// Empty entry. Uses raw unchecked accessors because it is called by the
|
// Empty entry. Uses raw unchecked accessors because it is called by the
|
||||||
// string table during bootstrapping.
|
// string table during bootstrapping.
|
||||||
if (element == undefined) return InternalIndex::NotFound();
|
if (element == undefined) return InternalIndex::NotFound();
|
||||||
@ -177,24 +177,24 @@ bool HashTable<Derived, Shape>::ToKey(ReadOnlyRoots roots, InternalIndex entry,
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
bool HashTable<Derived, Shape>::ToKey(IsolateRoot isolate, InternalIndex entry,
|
bool HashTable<Derived, Shape>::ToKey(PtrComprCageBase cage_base,
|
||||||
Object* out_k) {
|
InternalIndex entry, Object* out_k) {
|
||||||
Object k = KeyAt(isolate, entry);
|
Object k = KeyAt(cage_base, entry);
|
||||||
if (!IsKey(GetReadOnlyRoots(isolate), k)) return false;
|
if (!IsKey(GetReadOnlyRoots(cage_base), k)) return false;
|
||||||
*out_k = Shape::Unwrap(k);
|
*out_k = Shape::Unwrap(k);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
Object HashTable<Derived, Shape>::KeyAt(InternalIndex entry) {
|
Object HashTable<Derived, Shape>::KeyAt(InternalIndex entry) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return KeyAt(isolate, entry);
|
return KeyAt(cage_base, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
Object HashTable<Derived, Shape>::KeyAt(IsolateRoot isolate,
|
Object HashTable<Derived, Shape>::KeyAt(PtrComprCageBase cage_base,
|
||||||
InternalIndex entry) {
|
InternalIndex entry) {
|
||||||
return get(isolate, EntryToIndex(entry) + kEntryKeyIndex);
|
return get(cage_base, EntryToIndex(entry) + kEntryKeyIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
|
@ -138,24 +138,25 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
|
|||||||
void IterateElements(ObjectVisitor* visitor);
|
void IterateElements(ObjectVisitor* visitor);
|
||||||
|
|
||||||
// Find entry for key otherwise return kNotFound.
|
// Find entry for key otherwise return kNotFound.
|
||||||
inline InternalIndex FindEntry(IsolateRoot isolate, ReadOnlyRoots roots,
|
inline InternalIndex FindEntry(PtrComprCageBase cage_base,
|
||||||
Key key, int32_t hash);
|
ReadOnlyRoots roots, Key key, int32_t hash);
|
||||||
template <typename LocalIsolate>
|
template <typename LocalIsolate>
|
||||||
inline InternalIndex FindEntry(LocalIsolate* isolate, Key key);
|
inline InternalIndex FindEntry(LocalIsolate* isolate, Key key);
|
||||||
|
|
||||||
// Rehashes the table in-place.
|
// Rehashes the table in-place.
|
||||||
void Rehash(IsolateRoot isolate);
|
void Rehash(PtrComprCageBase cage_base);
|
||||||
|
|
||||||
// Returns whether k is a real key. The hole and undefined are not allowed as
|
// Returns whether k is a real key. The hole and undefined are not allowed as
|
||||||
// keys and can be used to indicate missing or deleted elements.
|
// keys and can be used to indicate missing or deleted elements.
|
||||||
static inline bool IsKey(ReadOnlyRoots roots, Object k);
|
static inline bool IsKey(ReadOnlyRoots roots, Object k);
|
||||||
|
|
||||||
inline bool ToKey(ReadOnlyRoots roots, InternalIndex entry, Object* out_k);
|
inline bool ToKey(ReadOnlyRoots roots, InternalIndex entry, Object* out_k);
|
||||||
inline bool ToKey(IsolateRoot isolate, InternalIndex entry, Object* out_k);
|
inline bool ToKey(PtrComprCageBase cage_base, InternalIndex entry,
|
||||||
|
Object* out_k);
|
||||||
|
|
||||||
// Returns the key at entry.
|
// Returns the key at entry.
|
||||||
inline Object KeyAt(InternalIndex entry);
|
inline Object KeyAt(InternalIndex entry);
|
||||||
inline Object KeyAt(IsolateRoot isolate, InternalIndex entry);
|
inline Object KeyAt(PtrComprCageBase cage_base, InternalIndex entry);
|
||||||
|
|
||||||
static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize;
|
static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize;
|
||||||
static const int kEntrySize = Shape::kEntrySize;
|
static const int kEntrySize = Shape::kEntrySize;
|
||||||
@ -217,8 +218,8 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
|
|||||||
|
|
||||||
// Find the entry at which to insert element with the given key that
|
// Find the entry at which to insert element with the given key that
|
||||||
// has the given hash value.
|
// has the given hash value.
|
||||||
InternalIndex FindInsertionEntry(IsolateRoot isolate, ReadOnlyRoots roots,
|
InternalIndex FindInsertionEntry(PtrComprCageBase cage_base,
|
||||||
uint32_t hash);
|
ReadOnlyRoots roots, uint32_t hash);
|
||||||
InternalIndex FindInsertionEntry(Isolate* isolate, uint32_t hash);
|
InternalIndex FindInsertionEntry(Isolate* isolate, uint32_t hash);
|
||||||
|
|
||||||
// Computes the capacity a table with the given capacity would need to have
|
// Computes the capacity a table with the given capacity would need to have
|
||||||
@ -231,7 +232,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
|
|||||||
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
|
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
|
||||||
|
|
||||||
// Rehashes this hash-table into the new table.
|
// Rehashes this hash-table into the new table.
|
||||||
void Rehash(IsolateRoot isolate, Derived new_table);
|
void Rehash(PtrComprCageBase cage_base, Derived new_table);
|
||||||
|
|
||||||
inline void set_key(int index, Object value);
|
inline void set_key(int index, Object value);
|
||||||
inline void set_key(int index, Object value, WriteBarrierMode mode);
|
inline void set_key(int index, Object value, WriteBarrierMode mode);
|
||||||
@ -322,7 +323,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ObjectHashTableBase
|
|||||||
// returned in case the key is not present.
|
// returned in case the key is not present.
|
||||||
Object Lookup(Handle<Object> key);
|
Object Lookup(Handle<Object> key);
|
||||||
Object Lookup(Handle<Object> key, int32_t hash);
|
Object Lookup(Handle<Object> key, int32_t hash);
|
||||||
Object Lookup(IsolateRoot isolate, Handle<Object> key, int32_t hash);
|
Object Lookup(PtrComprCageBase cage_base, Handle<Object> key, int32_t hash);
|
||||||
|
|
||||||
// Returns the value at entry.
|
// Returns the value at entry.
|
||||||
Object ValueAt(InternalIndex entry);
|
Object ValueAt(InternalIndex entry);
|
||||||
|
@ -70,12 +70,12 @@ class HeapObject : public Object {
|
|||||||
// places where it might not be safe to access it.
|
// places where it might not be safe to access it.
|
||||||
inline ReadOnlyRoots GetReadOnlyRoots() const;
|
inline ReadOnlyRoots GetReadOnlyRoots() const;
|
||||||
// This version is intended to be used for the isolate values produced by
|
// This version is intended to be used for the isolate values produced by
|
||||||
// i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr.
|
// i::GetPtrComprCageBase(HeapObject) function which may return nullptr.
|
||||||
inline ReadOnlyRoots GetReadOnlyRoots(IsolateRoot isolate) const;
|
inline ReadOnlyRoots GetReadOnlyRoots(PtrComprCageBase cage_base) const;
|
||||||
|
|
||||||
#define IS_TYPE_FUNCTION_DECL(Type) \
|
#define IS_TYPE_FUNCTION_DECL(Type) \
|
||||||
V8_INLINE bool Is##Type() const; \
|
V8_INLINE bool Is##Type() const; \
|
||||||
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
|
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
||||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||||
IS_TYPE_FUNCTION_DECL(HashTableBase)
|
IS_TYPE_FUNCTION_DECL(HashTableBase)
|
||||||
IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
|
IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
|
||||||
@ -96,7 +96,7 @@ class HeapObject : public Object {
|
|||||||
|
|
||||||
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
|
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
|
||||||
V8_INLINE bool Is##Name() const; \
|
V8_INLINE bool Is##Name() const; \
|
||||||
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
|
V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
|
||||||
STRUCT_LIST(DECL_STRUCT_PREDICATE)
|
STRUCT_LIST(DECL_STRUCT_PREDICATE)
|
||||||
#undef DECL_STRUCT_PREDICATE
|
#undef DECL_STRUCT_PREDICATE
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ void JSArrayBuffer::set_byte_length(size_t value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSArrayBuffer, backing_store, void*) {
|
DEF_GETTER(JSArrayBuffer, backing_store, void*) {
|
||||||
Address value = ReadExternalPointerField(kBackingStoreOffset, isolate,
|
Address value = ReadExternalPointerField(kBackingStoreOffset, cage_base,
|
||||||
kArrayBufferBackingStoreTag);
|
kArrayBufferBackingStoreTag);
|
||||||
return reinterpret_cast<void*>(value);
|
return reinterpret_cast<void*>(value);
|
||||||
}
|
}
|
||||||
@ -199,7 +199,7 @@ void JSTypedArray::set_length(size_t value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSTypedArray, external_pointer, Address) {
|
DEF_GETTER(JSTypedArray, external_pointer, Address) {
|
||||||
return ReadExternalPointerField(kExternalPointerOffset, isolate,
|
return ReadExternalPointerField(kExternalPointerOffset, cage_base,
|
||||||
kTypedArrayExternalPointerTag);
|
kTypedArrayExternalPointerTag);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -213,9 +213,9 @@ void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
|
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
|
||||||
IsolateRoot isolate) {
|
PtrComprCageBase cage_base) {
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
return isolate.address();
|
return cage_base.address();
|
||||||
#else
|
#else
|
||||||
return 0;
|
return 0;
|
||||||
#endif
|
#endif
|
||||||
@ -321,7 +321,7 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
|
|||||||
|
|
||||||
DEF_GETTER(JSDataView, data_pointer, void*) {
|
DEF_GETTER(JSDataView, data_pointer, void*) {
|
||||||
return reinterpret_cast<void*>(ReadExternalPointerField(
|
return reinterpret_cast<void*>(ReadExternalPointerField(
|
||||||
kDataPointerOffset, isolate, kDataViewDataPointerTag));
|
kDataPointerOffset, cage_base, kDataViewDataPointerTag));
|
||||||
}
|
}
|
||||||
|
|
||||||
void JSDataView::AllocateExternalPointerEntries(Isolate* isolate) {
|
void JSDataView::AllocateExternalPointerEntries(Isolate* isolate) {
|
||||||
|
@ -300,7 +300,7 @@ class JSTypedArray
|
|||||||
// as Tagged_t value and an |external_pointer| value.
|
// as Tagged_t value and an |external_pointer| value.
|
||||||
// For full-pointer mode the compensation value is zero.
|
// For full-pointer mode the compensation value is zero.
|
||||||
static inline Address ExternalPointerCompensationForOnHeapArray(
|
static inline Address ExternalPointerCompensationForOnHeapArray(
|
||||||
IsolateRoot isolate);
|
PtrComprCageBase cage_base);
|
||||||
|
|
||||||
//
|
//
|
||||||
// Serializer/deserializer support.
|
// Serializer/deserializer support.
|
||||||
|
@ -22,7 +22,7 @@ CAST_ACCESSOR(JSArray)
|
|||||||
CAST_ACCESSOR(JSArrayIterator)
|
CAST_ACCESSOR(JSArrayIterator)
|
||||||
|
|
||||||
DEF_GETTER(JSArray, length, Object) {
|
DEF_GETTER(JSArray, length, Object) {
|
||||||
return TaggedField<Object, kLengthOffset>::load(isolate, *this);
|
return TaggedField<Object, kLengthOffset>::load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
void JSArray::set_length(Object value, WriteBarrierMode mode) {
|
void JSArray::set_length(Object value, WriteBarrierMode mode) {
|
||||||
@ -31,8 +31,8 @@ void JSArray::set_length(Object value, WriteBarrierMode mode) {
|
|||||||
CONDITIONAL_WRITE_BARRIER(*this, kLengthOffset, value, mode);
|
CONDITIONAL_WRITE_BARRIER(*this, kLengthOffset, value, mode);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object JSArray::length(IsolateRoot isolate, RelaxedLoadTag tag) const {
|
Object JSArray::length(PtrComprCageBase cage_base, RelaxedLoadTag tag) const {
|
||||||
return TaggedField<Object, kLengthOffset>::Relaxed_Load(isolate, *this);
|
return TaggedField<Object, kLengthOffset>::Relaxed_Load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
void JSArray::set_length(Smi length) {
|
void JSArray::set_length(Smi length) {
|
||||||
|
@ -32,7 +32,7 @@ class JSArray : public JSObject {
|
|||||||
// acquire/release semantics ever become necessary, the default setter should
|
// acquire/release semantics ever become necessary, the default setter should
|
||||||
// be reverted to non-atomic behavior, and setters with explicit tags
|
// be reverted to non-atomic behavior, and setters with explicit tags
|
||||||
// introduced and used when required.
|
// introduced and used when required.
|
||||||
Object length(IsolateRoot isolate, AcquireLoadTag tag) const = delete;
|
Object length(PtrComprCageBase cage_base, AcquireLoadTag tag) const = delete;
|
||||||
void set_length(Object value, ReleaseStoreTag tag,
|
void set_length(Object value, ReleaseStoreTag tag,
|
||||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
|
||||||
|
|
||||||
|
@ -210,63 +210,62 @@ ACCESSORS_CHECKED(JSFunction, prototype_or_initial_map, HeapObject,
|
|||||||
kPrototypeOrInitialMapOffset, map().has_prototype_slot())
|
kPrototypeOrInitialMapOffset, map().has_prototype_slot())
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
|
DEF_GETTER(JSFunction, has_prototype_slot, bool) {
|
||||||
return map(isolate).has_prototype_slot();
|
return map(cage_base).has_prototype_slot();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, initial_map, Map) {
|
DEF_GETTER(JSFunction, initial_map, Map) {
|
||||||
return Map::cast(prototype_or_initial_map(isolate));
|
return Map::cast(prototype_or_initial_map(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, has_initial_map, bool) {
|
DEF_GETTER(JSFunction, has_initial_map, bool) {
|
||||||
DCHECK(has_prototype_slot(isolate));
|
DCHECK(has_prototype_slot(cage_base));
|
||||||
return prototype_or_initial_map(isolate).IsMap(isolate);
|
return prototype_or_initial_map(cage_base).IsMap(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
|
DEF_GETTER(JSFunction, has_instance_prototype, bool) {
|
||||||
DCHECK(has_prototype_slot(isolate));
|
DCHECK(has_prototype_slot(cage_base));
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
return has_initial_map(cage_base) ||
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
!prototype_or_initial_map(cage_base).IsTheHole(
|
||||||
return has_initial_map(isolate) ||
|
GetReadOnlyRoots(cage_base));
|
||||||
!prototype_or_initial_map(isolate).IsTheHole(
|
|
||||||
GetReadOnlyRoots(isolate));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, has_prototype, bool) {
|
DEF_GETTER(JSFunction, has_prototype, bool) {
|
||||||
DCHECK(has_prototype_slot(isolate));
|
DCHECK(has_prototype_slot(cage_base));
|
||||||
return map(isolate).has_non_instance_prototype() ||
|
return map(cage_base).has_non_instance_prototype() ||
|
||||||
has_instance_prototype(isolate);
|
has_instance_prototype(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, has_prototype_property, bool) {
|
DEF_GETTER(JSFunction, has_prototype_property, bool) {
|
||||||
return (has_prototype_slot(isolate) && IsConstructor(isolate)) ||
|
return (has_prototype_slot(cage_base) && IsConstructor(cage_base)) ||
|
||||||
IsGeneratorFunction(shared(isolate).kind());
|
IsGeneratorFunction(shared(cage_base).kind());
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
|
DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
|
||||||
return !has_prototype_property(isolate) ||
|
return !has_prototype_property(cage_base) ||
|
||||||
map(isolate).has_non_instance_prototype();
|
map(cage_base).has_non_instance_prototype();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
|
DEF_GETTER(JSFunction, instance_prototype, HeapObject) {
|
||||||
DCHECK(has_instance_prototype(isolate));
|
DCHECK(has_instance_prototype(cage_base));
|
||||||
if (has_initial_map(isolate)) return initial_map(isolate).prototype(isolate);
|
if (has_initial_map(cage_base))
|
||||||
|
return initial_map(cage_base).prototype(cage_base);
|
||||||
// When there is no initial map and the prototype is a JSReceiver, the
|
// When there is no initial map and the prototype is a JSReceiver, the
|
||||||
// initial map field is used for the prototype field.
|
// initial map field is used for the prototype field.
|
||||||
return HeapObject::cast(prototype_or_initial_map(isolate));
|
return HeapObject::cast(prototype_or_initial_map(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSFunction, prototype, Object) {
|
DEF_GETTER(JSFunction, prototype, Object) {
|
||||||
DCHECK(has_prototype(isolate));
|
DCHECK(has_prototype(cage_base));
|
||||||
// If the function's prototype property has been set to a non-JSReceiver
|
// If the function's prototype property has been set to a non-JSReceiver
|
||||||
// value, that value is stored in the constructor field of the map.
|
// value, that value is stored in the constructor field of the map.
|
||||||
if (map(isolate).has_non_instance_prototype()) {
|
if (map(cage_base).has_non_instance_prototype()) {
|
||||||
Object prototype = map(isolate).GetConstructor(isolate);
|
Object prototype = map(cage_base).GetConstructor(cage_base);
|
||||||
// The map must have a prototype in that field, not a back pointer.
|
// The map must have a prototype in that field, not a back pointer.
|
||||||
DCHECK(!prototype.IsMap(isolate));
|
DCHECK(!prototype.IsMap(cage_base));
|
||||||
DCHECK(!prototype.IsFunctionTemplateInfo(isolate));
|
DCHECK(!prototype.IsFunctionTemplateInfo(cage_base));
|
||||||
return prototype;
|
return prototype;
|
||||||
}
|
}
|
||||||
return instance_prototype(isolate);
|
return instance_prototype(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool JSFunction::is_compiled() const {
|
bool JSFunction::is_compiled() const {
|
||||||
|
@ -52,11 +52,12 @@ CAST_ACCESSOR(JSMessageObject)
|
|||||||
CAST_ACCESSOR(JSReceiver)
|
CAST_ACCESSOR(JSReceiver)
|
||||||
|
|
||||||
DEF_GETTER(JSObject, elements, FixedArrayBase) {
|
DEF_GETTER(JSObject, elements, FixedArrayBase) {
|
||||||
return TaggedField<FixedArrayBase, kElementsOffset>::load(isolate, *this);
|
return TaggedField<FixedArrayBase, kElementsOffset>::load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
FixedArrayBase JSObject::elements(IsolateRoot isolate, RelaxedLoadTag) const {
|
FixedArrayBase JSObject::elements(PtrComprCageBase cage_base,
|
||||||
return TaggedField<FixedArrayBase, kElementsOffset>::Relaxed_Load(isolate,
|
RelaxedLoadTag) const {
|
||||||
|
return TaggedField<FixedArrayBase, kElementsOffset>::Relaxed_Load(cage_base,
|
||||||
*this);
|
*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,11 +250,11 @@ void JSObject::initialize_elements() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, GetIndexedInterceptor, InterceptorInfo) {
|
DEF_GETTER(JSObject, GetIndexedInterceptor, InterceptorInfo) {
|
||||||
return map(isolate).GetIndexedInterceptor(isolate);
|
return map(cage_base).GetIndexedInterceptor(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, GetNamedInterceptor, InterceptorInfo) {
|
DEF_GETTER(JSObject, GetNamedInterceptor, InterceptorInfo) {
|
||||||
return map(isolate).GetNamedInterceptor(isolate);
|
return map(cage_base).GetNamedInterceptor(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -322,16 +323,17 @@ void JSObject::SetEmbedderField(int index, Smi value) {
|
|||||||
// is needed to correctly distinguish between properties stored in-object and
|
// is needed to correctly distinguish between properties stored in-object and
|
||||||
// properties stored in the properties array.
|
// properties stored in the properties array.
|
||||||
Object JSObject::RawFastPropertyAt(FieldIndex index) const {
|
Object JSObject::RawFastPropertyAt(FieldIndex index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return RawFastPropertyAt(isolate, index);
|
return RawFastPropertyAt(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object JSObject::RawFastPropertyAt(IsolateRoot isolate,
|
Object JSObject::RawFastPropertyAt(PtrComprCageBase cage_base,
|
||||||
FieldIndex index) const {
|
FieldIndex index) const {
|
||||||
if (index.is_inobject()) {
|
if (index.is_inobject()) {
|
||||||
return TaggedField<Object>::load(isolate, *this, index.offset());
|
return TaggedField<Object>::load(cage_base, *this, index.offset());
|
||||||
} else {
|
} else {
|
||||||
return property_array(isolate).get(isolate, index.outobject_array_index());
|
return property_array(cage_base).get(cage_base,
|
||||||
|
index.outobject_array_index());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -425,7 +427,7 @@ ACCESSORS(JSGlobalObject, native_context, NativeContext, kNativeContextOffset)
|
|||||||
ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset)
|
ACCESSORS(JSGlobalObject, global_proxy, JSGlobalProxy, kGlobalProxyOffset)
|
||||||
|
|
||||||
DEF_GETTER(JSGlobalObject, native_context_unchecked, Object) {
|
DEF_GETTER(JSGlobalObject, native_context_unchecked, Object) {
|
||||||
return TaggedField<Object, kNativeContextOffset>::load(isolate, *this);
|
return TaggedField<Object, kNativeContextOffset>::load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool JSMessageObject::DidEnsureSourcePositionsAvailable() const {
|
bool JSMessageObject::DidEnsureSourcePositionsAvailable() const {
|
||||||
@ -461,119 +463,119 @@ SMI_ACCESSORS(JSMessageObject, error_level, kErrorLevelOffset)
|
|||||||
SMI_ACCESSORS(JSMessageObject, raw_type, kMessageTypeOffset)
|
SMI_ACCESSORS(JSMessageObject, raw_type, kMessageTypeOffset)
|
||||||
|
|
||||||
DEF_GETTER(JSObject, GetElementsKind, ElementsKind) {
|
DEF_GETTER(JSObject, GetElementsKind, ElementsKind) {
|
||||||
ElementsKind kind = map(isolate).elements_kind();
|
ElementsKind kind = map(cage_base).elements_kind();
|
||||||
#if VERIFY_HEAP && DEBUG
|
#if VERIFY_HEAP && DEBUG
|
||||||
FixedArrayBase fixed_array = FixedArrayBase::unchecked_cast(
|
FixedArrayBase fixed_array = FixedArrayBase::unchecked_cast(
|
||||||
TaggedField<HeapObject, kElementsOffset>::load(isolate, *this));
|
TaggedField<HeapObject, kElementsOffset>::load(cage_base, *this));
|
||||||
|
|
||||||
// If a GC was caused while constructing this object, the elements
|
// If a GC was caused while constructing this object, the elements
|
||||||
// pointer may point to a one pointer filler map.
|
// pointer may point to a one pointer filler map.
|
||||||
if (ElementsAreSafeToExamine(isolate)) {
|
if (ElementsAreSafeToExamine(cage_base)) {
|
||||||
Map map = fixed_array.map(isolate);
|
Map map = fixed_array.map(cage_base);
|
||||||
if (IsSmiOrObjectElementsKind(kind)) {
|
if (IsSmiOrObjectElementsKind(kind)) {
|
||||||
DCHECK(map == GetReadOnlyRoots(isolate).fixed_array_map() ||
|
DCHECK(map == GetReadOnlyRoots(cage_base).fixed_array_map() ||
|
||||||
map == GetReadOnlyRoots(isolate).fixed_cow_array_map());
|
map == GetReadOnlyRoots(cage_base).fixed_cow_array_map());
|
||||||
} else if (IsDoubleElementsKind(kind)) {
|
} else if (IsDoubleElementsKind(kind)) {
|
||||||
DCHECK(fixed_array.IsFixedDoubleArray(isolate) ||
|
DCHECK(fixed_array.IsFixedDoubleArray(cage_base) ||
|
||||||
fixed_array == GetReadOnlyRoots(isolate).empty_fixed_array());
|
fixed_array == GetReadOnlyRoots(cage_base).empty_fixed_array());
|
||||||
} else if (kind == DICTIONARY_ELEMENTS) {
|
} else if (kind == DICTIONARY_ELEMENTS) {
|
||||||
DCHECK(fixed_array.IsFixedArray(isolate));
|
DCHECK(fixed_array.IsFixedArray(cage_base));
|
||||||
DCHECK(fixed_array.IsNumberDictionary(isolate));
|
DCHECK(fixed_array.IsNumberDictionary(cage_base));
|
||||||
} else {
|
} else {
|
||||||
DCHECK(kind > DICTIONARY_ELEMENTS ||
|
DCHECK(kind > DICTIONARY_ELEMENTS ||
|
||||||
IsAnyNonextensibleElementsKind(kind));
|
IsAnyNonextensibleElementsKind(kind));
|
||||||
}
|
}
|
||||||
DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
|
DCHECK(!IsSloppyArgumentsElementsKind(kind) ||
|
||||||
elements(isolate).IsSloppyArgumentsElements());
|
elements(cage_base).IsSloppyArgumentsElements());
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
return kind;
|
return kind;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, GetElementsAccessor, ElementsAccessor*) {
|
DEF_GETTER(JSObject, GetElementsAccessor, ElementsAccessor*) {
|
||||||
return ElementsAccessor::ForKind(GetElementsKind(isolate));
|
return ElementsAccessor::ForKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasObjectElements, bool) {
|
DEF_GETTER(JSObject, HasObjectElements, bool) {
|
||||||
return IsObjectElementsKind(GetElementsKind(isolate));
|
return IsObjectElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasSmiElements, bool) {
|
DEF_GETTER(JSObject, HasSmiElements, bool) {
|
||||||
return IsSmiElementsKind(GetElementsKind(isolate));
|
return IsSmiElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasSmiOrObjectElements, bool) {
|
DEF_GETTER(JSObject, HasSmiOrObjectElements, bool) {
|
||||||
return IsSmiOrObjectElementsKind(GetElementsKind(isolate));
|
return IsSmiOrObjectElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasDoubleElements, bool) {
|
DEF_GETTER(JSObject, HasDoubleElements, bool) {
|
||||||
return IsDoubleElementsKind(GetElementsKind(isolate));
|
return IsDoubleElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasHoleyElements, bool) {
|
DEF_GETTER(JSObject, HasHoleyElements, bool) {
|
||||||
return IsHoleyElementsKind(GetElementsKind(isolate));
|
return IsHoleyElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasFastElements, bool) {
|
DEF_GETTER(JSObject, HasFastElements, bool) {
|
||||||
return IsFastElementsKind(GetElementsKind(isolate));
|
return IsFastElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasFastPackedElements, bool) {
|
DEF_GETTER(JSObject, HasFastPackedElements, bool) {
|
||||||
return IsFastPackedElementsKind(GetElementsKind(isolate));
|
return IsFastPackedElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasDictionaryElements, bool) {
|
DEF_GETTER(JSObject, HasDictionaryElements, bool) {
|
||||||
return IsDictionaryElementsKind(GetElementsKind(isolate));
|
return IsDictionaryElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasPackedElements, bool) {
|
DEF_GETTER(JSObject, HasPackedElements, bool) {
|
||||||
return GetElementsKind(isolate) == PACKED_ELEMENTS;
|
return GetElementsKind(cage_base) == PACKED_ELEMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasAnyNonextensibleElements, bool) {
|
DEF_GETTER(JSObject, HasAnyNonextensibleElements, bool) {
|
||||||
return IsAnyNonextensibleElementsKind(GetElementsKind(isolate));
|
return IsAnyNonextensibleElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasSealedElements, bool) {
|
DEF_GETTER(JSObject, HasSealedElements, bool) {
|
||||||
return IsSealedElementsKind(GetElementsKind(isolate));
|
return IsSealedElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasNonextensibleElements, bool) {
|
DEF_GETTER(JSObject, HasNonextensibleElements, bool) {
|
||||||
return IsNonextensibleElementsKind(GetElementsKind(isolate));
|
return IsNonextensibleElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasFastArgumentsElements, bool) {
|
DEF_GETTER(JSObject, HasFastArgumentsElements, bool) {
|
||||||
return IsFastArgumentsElementsKind(GetElementsKind(isolate));
|
return IsFastArgumentsElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasSlowArgumentsElements, bool) {
|
DEF_GETTER(JSObject, HasSlowArgumentsElements, bool) {
|
||||||
return IsSlowArgumentsElementsKind(GetElementsKind(isolate));
|
return IsSlowArgumentsElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasSloppyArgumentsElements, bool) {
|
DEF_GETTER(JSObject, HasSloppyArgumentsElements, bool) {
|
||||||
return IsSloppyArgumentsElementsKind(GetElementsKind(isolate));
|
return IsSloppyArgumentsElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasStringWrapperElements, bool) {
|
DEF_GETTER(JSObject, HasStringWrapperElements, bool) {
|
||||||
return IsStringWrapperElementsKind(GetElementsKind(isolate));
|
return IsStringWrapperElementsKind(GetElementsKind(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasFastStringWrapperElements, bool) {
|
DEF_GETTER(JSObject, HasFastStringWrapperElements, bool) {
|
||||||
return GetElementsKind(isolate) == FAST_STRING_WRAPPER_ELEMENTS;
|
return GetElementsKind(cage_base) == FAST_STRING_WRAPPER_ELEMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasSlowStringWrapperElements, bool) {
|
DEF_GETTER(JSObject, HasSlowStringWrapperElements, bool) {
|
||||||
return GetElementsKind(isolate) == SLOW_STRING_WRAPPER_ELEMENTS;
|
return GetElementsKind(cage_base) == SLOW_STRING_WRAPPER_ELEMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasTypedArrayElements, bool) {
|
DEF_GETTER(JSObject, HasTypedArrayElements, bool) {
|
||||||
DCHECK(!elements(isolate).is_null());
|
DCHECK(!elements(cage_base).is_null());
|
||||||
return map(isolate).has_typed_array_elements();
|
return map(cage_base).has_typed_array_elements();
|
||||||
}
|
}
|
||||||
|
|
||||||
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
|
#define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype) \
|
||||||
DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \
|
DEF_GETTER(JSObject, HasFixed##Type##Elements, bool) { \
|
||||||
return map(isolate).elements_kind() == TYPE##_ELEMENTS; \
|
return map(cage_base).elements_kind() == TYPE##_ELEMENTS; \
|
||||||
}
|
}
|
||||||
|
|
||||||
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
|
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
|
||||||
@ -581,21 +583,21 @@ TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
|
|||||||
#undef FIXED_TYPED_ELEMENTS_CHECK
|
#undef FIXED_TYPED_ELEMENTS_CHECK
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasNamedInterceptor, bool) {
|
DEF_GETTER(JSObject, HasNamedInterceptor, bool) {
|
||||||
return map(isolate).has_named_interceptor();
|
return map(cage_base).has_named_interceptor();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSObject, HasIndexedInterceptor, bool) {
|
DEF_GETTER(JSObject, HasIndexedInterceptor, bool) {
|
||||||
return map(isolate).has_indexed_interceptor();
|
return map(cage_base).has_indexed_interceptor();
|
||||||
}
|
}
|
||||||
|
|
||||||
RELEASE_ACQUIRE_ACCESSORS_CHECKED2(JSGlobalObject, global_dictionary,
|
RELEASE_ACQUIRE_ACCESSORS_CHECKED2(JSGlobalObject, global_dictionary,
|
||||||
GlobalDictionary, kPropertiesOrHashOffset,
|
GlobalDictionary, kPropertiesOrHashOffset,
|
||||||
!HasFastProperties(isolate), true)
|
!HasFastProperties(cage_base), true)
|
||||||
|
|
||||||
DEF_GETTER(JSObject, element_dictionary, NumberDictionary) {
|
DEF_GETTER(JSObject, element_dictionary, NumberDictionary) {
|
||||||
DCHECK(HasDictionaryElements(isolate) ||
|
DCHECK(HasDictionaryElements(cage_base) ||
|
||||||
HasSlowStringWrapperElements(isolate));
|
HasSlowStringWrapperElements(cage_base));
|
||||||
return NumberDictionary::cast(elements(isolate));
|
return NumberDictionary::cast(elements(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
void JSReceiver::initialize_properties(Isolate* isolate) {
|
void JSReceiver::initialize_properties(Isolate* isolate) {
|
||||||
@ -617,38 +619,34 @@ void JSReceiver::initialize_properties(Isolate* isolate) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSReceiver, HasFastProperties, bool) {
|
DEF_GETTER(JSReceiver, HasFastProperties, bool) {
|
||||||
DCHECK(raw_properties_or_hash(isolate).IsSmi() ||
|
DCHECK(raw_properties_or_hash(cage_base).IsSmi() ||
|
||||||
((raw_properties_or_hash(isolate).IsGlobalDictionary(isolate) ||
|
((raw_properties_or_hash(cage_base).IsGlobalDictionary(cage_base) ||
|
||||||
raw_properties_or_hash(isolate).IsNameDictionary(isolate) ||
|
raw_properties_or_hash(cage_base).IsNameDictionary(cage_base) ||
|
||||||
raw_properties_or_hash(isolate).IsSwissNameDictionary(isolate)) ==
|
raw_properties_or_hash(cage_base).IsSwissNameDictionary(
|
||||||
map(isolate).is_dictionary_map()));
|
cage_base)) == map(cage_base).is_dictionary_map()));
|
||||||
return !map(isolate).is_dictionary_map();
|
return !map(cage_base).is_dictionary_map();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSReceiver, property_dictionary, NameDictionary) {
|
DEF_GETTER(JSReceiver, property_dictionary, NameDictionary) {
|
||||||
DCHECK(!IsJSGlobalObject(isolate));
|
DCHECK(!IsJSGlobalObject(cage_base));
|
||||||
DCHECK(!HasFastProperties(isolate));
|
DCHECK(!HasFastProperties(cage_base));
|
||||||
DCHECK(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
|
DCHECK(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
|
||||||
|
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
Object prop = raw_properties_or_hash(cage_base);
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
|
||||||
Object prop = raw_properties_or_hash(isolate);
|
|
||||||
if (prop.IsSmi()) {
|
if (prop.IsSmi()) {
|
||||||
return GetReadOnlyRoots(isolate).empty_property_dictionary();
|
return GetReadOnlyRoots(cage_base).empty_property_dictionary();
|
||||||
}
|
}
|
||||||
return NameDictionary::cast(prop);
|
return NameDictionary::cast(prop);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
|
DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
|
||||||
DCHECK(!IsJSGlobalObject(isolate));
|
DCHECK(!IsJSGlobalObject(cage_base));
|
||||||
DCHECK(!HasFastProperties(isolate));
|
DCHECK(!HasFastProperties(cage_base));
|
||||||
DCHECK(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
|
DCHECK(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL);
|
||||||
|
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
Object prop = raw_properties_or_hash(cage_base);
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
|
||||||
Object prop = raw_properties_or_hash(isolate);
|
|
||||||
if (prop.IsSmi()) {
|
if (prop.IsSmi()) {
|
||||||
return GetReadOnlyRoots(isolate).empty_swiss_property_dictionary();
|
return GetReadOnlyRoots(cage_base).empty_swiss_property_dictionary();
|
||||||
}
|
}
|
||||||
return SwissNameDictionary::cast(prop);
|
return SwissNameDictionary::cast(prop);
|
||||||
}
|
}
|
||||||
@ -656,12 +654,10 @@ DEF_GETTER(JSReceiver, property_dictionary_swiss, SwissNameDictionary) {
|
|||||||
// TODO(gsathya): Pass isolate directly to this function and access
|
// TODO(gsathya): Pass isolate directly to this function and access
|
||||||
// the heap from this.
|
// the heap from this.
|
||||||
DEF_GETTER(JSReceiver, property_array, PropertyArray) {
|
DEF_GETTER(JSReceiver, property_array, PropertyArray) {
|
||||||
DCHECK(HasFastProperties(isolate));
|
DCHECK(HasFastProperties(cage_base));
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
Object prop = raw_properties_or_hash(cage_base);
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
if (prop.IsSmi() || prop == GetReadOnlyRoots(cage_base).empty_fixed_array()) {
|
||||||
Object prop = raw_properties_or_hash(isolate);
|
return GetReadOnlyRoots(cage_base).empty_property_array();
|
||||||
if (prop.IsSmi() || prop == GetReadOnlyRoots(isolate).empty_fixed_array()) {
|
|
||||||
return GetReadOnlyRoots(isolate).empty_property_array();
|
|
||||||
}
|
}
|
||||||
return PropertyArray::cast(prop);
|
return PropertyArray::cast(prop);
|
||||||
}
|
}
|
||||||
|
@ -319,7 +319,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
|
|||||||
// acquire/release semantics ever become necessary, the default setter should
|
// acquire/release semantics ever become necessary, the default setter should
|
||||||
// be reverted to non-atomic behavior, and setters with explicit tags
|
// be reverted to non-atomic behavior, and setters with explicit tags
|
||||||
// introduced and used when required.
|
// introduced and used when required.
|
||||||
FixedArrayBase elements(IsolateRoot isolate,
|
FixedArrayBase elements(PtrComprCageBase cage_base,
|
||||||
AcquireLoadTag tag) const = delete;
|
AcquireLoadTag tag) const = delete;
|
||||||
void set_elements(FixedArrayBase value, ReleaseStoreTag tag,
|
void set_elements(FixedArrayBase value, ReleaseStoreTag tag,
|
||||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER) = delete;
|
||||||
@ -652,7 +652,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
|
|||||||
Representation representation,
|
Representation representation,
|
||||||
FieldIndex index);
|
FieldIndex index);
|
||||||
inline Object RawFastPropertyAt(FieldIndex index) const;
|
inline Object RawFastPropertyAt(FieldIndex index) const;
|
||||||
inline Object RawFastPropertyAt(IsolateRoot isolate, FieldIndex index) const;
|
inline Object RawFastPropertyAt(PtrComprCageBase cage_base,
|
||||||
|
FieldIndex index) const;
|
||||||
|
|
||||||
inline void FastPropertyAtPut(FieldIndex index, Object value,
|
inline void FastPropertyAtPut(FieldIndex index, Object value,
|
||||||
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
|
||||||
@ -742,7 +743,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
|
|||||||
// If a GC was caused while constructing this object, the elements pointer
|
// If a GC was caused while constructing this object, the elements pointer
|
||||||
// may point to a one pointer filler map. The object won't be rooted, but
|
// may point to a one pointer filler map. The object won't be rooted, but
|
||||||
// our heap verification code could stumble across it.
|
// our heap verification code could stumble across it.
|
||||||
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(IsolateRoot isolate) const;
|
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(
|
||||||
|
PtrComprCageBase cage_base) const;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
Object SlowReverseLookup(Object value);
|
Object SlowReverseLookup(Object value);
|
||||||
|
@ -29,26 +29,26 @@ SMI_ACCESSORS(ObjectBoilerplateDescription, flags,
|
|||||||
FixedArray::OffsetOfElementAt(kLiteralTypeOffset))
|
FixedArray::OffsetOfElementAt(kLiteralTypeOffset))
|
||||||
|
|
||||||
Object ObjectBoilerplateDescription::name(int index) const {
|
Object ObjectBoilerplateDescription::name(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return name(isolate, index);
|
return name(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object ObjectBoilerplateDescription::name(IsolateRoot isolate,
|
Object ObjectBoilerplateDescription::name(PtrComprCageBase cage_base,
|
||||||
int index) const {
|
int index) const {
|
||||||
// get() already checks for out of bounds access, but we do not want to allow
|
// get() already checks for out of bounds access, but we do not want to allow
|
||||||
// access to the last element, if it is the number of properties.
|
// access to the last element, if it is the number of properties.
|
||||||
DCHECK_NE(size(), index);
|
DCHECK_NE(size(), index);
|
||||||
return get(isolate, 2 * index + kDescriptionStartIndex);
|
return get(cage_base, 2 * index + kDescriptionStartIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object ObjectBoilerplateDescription::value(int index) const {
|
Object ObjectBoilerplateDescription::value(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return value(isolate, index);
|
return value(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object ObjectBoilerplateDescription::value(IsolateRoot isolate,
|
Object ObjectBoilerplateDescription::value(PtrComprCageBase cage_base,
|
||||||
int index) const {
|
int index) const {
|
||||||
return get(isolate, 2 * index + 1 + kDescriptionStartIndex);
|
return get(cage_base, 2 * index + 1 + kDescriptionStartIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
void ObjectBoilerplateDescription::set_key_value(int index, Object key,
|
void ObjectBoilerplateDescription::set_key_value(int index, Object key,
|
||||||
|
@ -28,10 +28,10 @@ class ClassLiteral;
|
|||||||
class ObjectBoilerplateDescription : public FixedArray {
|
class ObjectBoilerplateDescription : public FixedArray {
|
||||||
public:
|
public:
|
||||||
inline Object name(int index) const;
|
inline Object name(int index) const;
|
||||||
inline Object name(IsolateRoot isolate, int index) const;
|
inline Object name(PtrComprCageBase cage_base, int index) const;
|
||||||
|
|
||||||
inline Object value(int index) const;
|
inline Object value(int index) const;
|
||||||
inline Object value(IsolateRoot isolate, int index) const;
|
inline Object value(PtrComprCageBase cage_base, int index) const;
|
||||||
|
|
||||||
inline void set_key_value(int index, Object key, Object value);
|
inline void set_key_value(int index, Object key, Object value);
|
||||||
|
|
||||||
|
@ -107,14 +107,14 @@ BIT_FIELD_ACCESSORS(Map, bit_field3, construction_counter,
|
|||||||
|
|
||||||
DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
|
DEF_GETTER(Map, GetNamedInterceptor, InterceptorInfo) {
|
||||||
DCHECK(has_named_interceptor());
|
DCHECK(has_named_interceptor());
|
||||||
FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
|
FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
|
||||||
return InterceptorInfo::cast(info.GetNamedPropertyHandler(isolate));
|
return InterceptorInfo::cast(info.GetNamedPropertyHandler(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
|
DEF_GETTER(Map, GetIndexedInterceptor, InterceptorInfo) {
|
||||||
DCHECK(has_indexed_interceptor());
|
DCHECK(has_indexed_interceptor());
|
||||||
FunctionTemplateInfo info = GetFunctionTemplateInfo(isolate);
|
FunctionTemplateInfo info = GetFunctionTemplateInfo(cage_base);
|
||||||
return InterceptorInfo::cast(info.GetIndexedPropertyHandler(isolate));
|
return InterceptorInfo::cast(info.GetIndexedPropertyHandler(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Map::IsMostGeneralFieldType(Representation representation,
|
bool Map::IsMostGeneralFieldType(Representation representation,
|
||||||
@ -657,19 +657,18 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Map::ConcurrentIsMap(IsolateRoot isolate, const Object& object) const {
|
bool Map::ConcurrentIsMap(PtrComprCageBase cage_base,
|
||||||
return object.IsHeapObject() && HeapObject::cast(object).map(isolate) ==
|
const Object& object) const {
|
||||||
GetReadOnlyRoots(isolate).meta_map();
|
return object.IsHeapObject() && HeapObject::cast(object).map(cage_base) ==
|
||||||
|
GetReadOnlyRoots(cage_base).meta_map();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Map, GetBackPointer, HeapObject) {
|
DEF_GETTER(Map, GetBackPointer, HeapObject) {
|
||||||
Object object = constructor_or_back_pointer(isolate);
|
Object object = constructor_or_back_pointer(cage_base);
|
||||||
if (ConcurrentIsMap(isolate, object)) {
|
if (ConcurrentIsMap(cage_base, object)) {
|
||||||
return Map::cast(object);
|
return Map::cast(object);
|
||||||
}
|
}
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
return GetReadOnlyRoots(cage_base).undefined_value();
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
|
||||||
return GetReadOnlyRoots(isolate).undefined_value();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
|
void Map::SetBackPointer(HeapObject value, WriteBarrierMode mode) {
|
||||||
@ -709,11 +708,11 @@ bool Map::IsPrototypeValidityCellValid() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Map, GetConstructor, Object) {
|
DEF_GETTER(Map, GetConstructor, Object) {
|
||||||
Object maybe_constructor = constructor_or_back_pointer(isolate);
|
Object maybe_constructor = constructor_or_back_pointer(cage_base);
|
||||||
// Follow any back pointers.
|
// Follow any back pointers.
|
||||||
while (ConcurrentIsMap(isolate, maybe_constructor)) {
|
while (ConcurrentIsMap(cage_base, maybe_constructor)) {
|
||||||
maybe_constructor =
|
maybe_constructor =
|
||||||
Map::cast(maybe_constructor).constructor_or_back_pointer(isolate);
|
Map::cast(maybe_constructor).constructor_or_back_pointer(cage_base);
|
||||||
}
|
}
|
||||||
return maybe_constructor;
|
return maybe_constructor;
|
||||||
}
|
}
|
||||||
@ -730,13 +729,13 @@ Object Map::TryGetConstructor(Isolate* isolate, int max_steps) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
|
DEF_GETTER(Map, GetFunctionTemplateInfo, FunctionTemplateInfo) {
|
||||||
Object constructor = GetConstructor(isolate);
|
Object constructor = GetConstructor(cage_base);
|
||||||
if (constructor.IsJSFunction(isolate)) {
|
if (constructor.IsJSFunction(cage_base)) {
|
||||||
// TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
|
// TODO(ishell): IsApiFunction(isolate) and get_api_func_data(isolate)
|
||||||
DCHECK(JSFunction::cast(constructor).shared(isolate).IsApiFunction());
|
DCHECK(JSFunction::cast(constructor).shared(cage_base).IsApiFunction());
|
||||||
return JSFunction::cast(constructor).shared(isolate).get_api_func_data();
|
return JSFunction::cast(constructor).shared(cage_base).get_api_func_data();
|
||||||
}
|
}
|
||||||
DCHECK(constructor.IsFunctionTemplateInfo(isolate));
|
DCHECK(constructor.IsFunctionTemplateInfo(cage_base));
|
||||||
return FunctionTemplateInfo::cast(constructor);
|
return FunctionTemplateInfo::cast(constructor);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -791,7 +790,7 @@ int NormalizedMapCache::GetIndex(Handle<Map> map) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
|
DEF_GETTER(HeapObject, IsNormalizedMapCache, bool) {
|
||||||
if (!IsWeakFixedArray(isolate)) return false;
|
if (!IsWeakFixedArray(cage_base)) return false;
|
||||||
if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
|
if (WeakFixedArray::cast(*this).length() != NormalizedMapCache::kEntries) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -943,7 +943,7 @@ class Map : public HeapObject {
|
|||||||
|
|
||||||
// This is the equivalent of IsMap() but avoids reading the instance type so
|
// This is the equivalent of IsMap() but avoids reading the instance type so
|
||||||
// it can be used concurrently without acquire load.
|
// it can be used concurrently without acquire load.
|
||||||
V8_INLINE bool ConcurrentIsMap(IsolateRoot isolate,
|
V8_INLINE bool ConcurrentIsMap(PtrComprCageBase cage_base,
|
||||||
const Object& object) const;
|
const Object& object) const;
|
||||||
|
|
||||||
// Use the high-level instance_descriptors/SetInstanceDescriptors instead.
|
// Use the high-level instance_descriptors/SetInstanceDescriptors instead.
|
||||||
@ -976,7 +976,8 @@ class NormalizedMapCache : public WeakFixedArray {
|
|||||||
DECL_VERIFIER(NormalizedMapCache)
|
DECL_VERIFIER(NormalizedMapCache)
|
||||||
|
|
||||||
private:
|
private:
|
||||||
friend bool HeapObject::IsNormalizedMapCache(IsolateRoot isolate) const;
|
friend bool HeapObject::IsNormalizedMapCache(
|
||||||
|
PtrComprCageBase cage_base) const;
|
||||||
|
|
||||||
static const int kEntries = 64;
|
static const int kEntries = 64;
|
||||||
|
|
||||||
|
@ -78,13 +78,14 @@ HeapObjectReference HeapObjectReference::From(Object object,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
HeapObjectReference HeapObjectReference::ClearedValue(IsolateRoot isolate) {
|
HeapObjectReference HeapObjectReference::ClearedValue(
|
||||||
|
PtrComprCageBase cage_base) {
|
||||||
// Construct cleared weak ref value.
|
// Construct cleared weak ref value.
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
// This is necessary to make pointer decompression computation also
|
// This is necessary to make pointer decompression computation also
|
||||||
// suitable for cleared weak references.
|
// suitable for cleared weak references.
|
||||||
Address raw_value =
|
Address raw_value =
|
||||||
DecompressTaggedPointer(isolate, kClearedWeakHeapObjectLower32);
|
DecompressTaggedPointer(cage_base, kClearedWeakHeapObjectLower32);
|
||||||
#else
|
#else
|
||||||
Address raw_value = kClearedWeakHeapObjectLower32;
|
Address raw_value = kClearedWeakHeapObjectLower32;
|
||||||
#endif
|
#endif
|
||||||
|
@ -54,7 +54,7 @@ class HeapObjectReference : public MaybeObject {
|
|||||||
V8_INLINE static HeapObjectReference From(Object object,
|
V8_INLINE static HeapObjectReference From(Object object,
|
||||||
HeapObjectReferenceType type);
|
HeapObjectReferenceType type);
|
||||||
|
|
||||||
V8_INLINE static HeapObjectReference ClearedValue(IsolateRoot isolate);
|
V8_INLINE static HeapObjectReference ClearedValue(PtrComprCageBase cage_base);
|
||||||
|
|
||||||
template <typename THeapObjectSlot>
|
template <typename THeapObjectSlot>
|
||||||
V8_INLINE static void Update(THeapObjectSlot slot, HeapObject value);
|
V8_INLINE static void Update(THeapObjectSlot slot, HeapObject value);
|
||||||
|
@ -56,7 +56,7 @@ void Symbol::set_is_private_name() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Name, IsUniqueName, bool) {
|
DEF_GETTER(Name, IsUniqueName, bool) {
|
||||||
uint32_t type = map(isolate).instance_type();
|
uint32_t type = map(cage_base).instance_type();
|
||||||
bool result = (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
|
bool result = (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
|
||||||
(kStringTag | kNotInternalizedTag);
|
(kStringTag | kNotInternalizedTag);
|
||||||
SLOW_DCHECK(result == HeapObject::IsUniqueName());
|
SLOW_DCHECK(result == HeapObject::IsUniqueName());
|
||||||
@ -104,23 +104,23 @@ uint32_t Name::hash() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Name, IsInterestingSymbol, bool) {
|
DEF_GETTER(Name, IsInterestingSymbol, bool) {
|
||||||
return IsSymbol(isolate) && Symbol::cast(*this).is_interesting_symbol();
|
return IsSymbol(cage_base) && Symbol::cast(*this).is_interesting_symbol();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Name, IsPrivate, bool) {
|
DEF_GETTER(Name, IsPrivate, bool) {
|
||||||
return this->IsSymbol(isolate) && Symbol::cast(*this).is_private();
|
return this->IsSymbol(cage_base) && Symbol::cast(*this).is_private();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Name, IsPrivateName, bool) {
|
DEF_GETTER(Name, IsPrivateName, bool) {
|
||||||
bool is_private_name =
|
bool is_private_name =
|
||||||
this->IsSymbol(isolate) && Symbol::cast(*this).is_private_name();
|
this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_name();
|
||||||
DCHECK_IMPLIES(is_private_name, IsPrivate());
|
DCHECK_IMPLIES(is_private_name, IsPrivate());
|
||||||
return is_private_name;
|
return is_private_name;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(Name, IsPrivateBrand, bool) {
|
DEF_GETTER(Name, IsPrivateBrand, bool) {
|
||||||
bool is_private_brand =
|
bool is_private_brand =
|
||||||
this->IsSymbol(isolate) && Symbol::cast(*this).is_private_brand();
|
this->IsSymbol(cage_base) && Symbol::cast(*this).is_private_brand();
|
||||||
DCHECK_IMPLIES(is_private_brand, IsPrivateName());
|
DCHECK_IMPLIES(is_private_brand, IsPrivateName());
|
||||||
return is_private_brand;
|
return is_private_brand;
|
||||||
}
|
}
|
||||||
|
@ -86,14 +86,14 @@
|
|||||||
// parameter.
|
// parameter.
|
||||||
#define DECL_GETTER(name, type) \
|
#define DECL_GETTER(name, type) \
|
||||||
inline type name() const; \
|
inline type name() const; \
|
||||||
inline type name(IsolateRoot isolate) const;
|
inline type name(PtrComprCageBase cage_base) const;
|
||||||
|
|
||||||
#define DEF_GETTER(holder, name, type) \
|
#define DEF_GETTER(holder, name, type) \
|
||||||
type holder::name() const { \
|
type holder::name() const { \
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||||
return holder::name(isolate); \
|
return holder::name(cage_base); \
|
||||||
} \
|
} \
|
||||||
type holder::name(IsolateRoot isolate) const
|
type holder::name(PtrComprCageBase cage_base) const
|
||||||
|
|
||||||
#define DECL_SETTER(name, type) \
|
#define DECL_SETTER(name, type) \
|
||||||
inline void set_##name(type value, \
|
inline void set_##name(type value, \
|
||||||
@ -105,7 +105,7 @@
|
|||||||
|
|
||||||
#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
|
#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
|
||||||
inline type name(tag_type tag) const; \
|
inline type name(tag_type tag) const; \
|
||||||
inline type name(IsolateRoot isolate, tag_type) const;
|
inline type name(PtrComprCageBase cage_base, tag_type) const;
|
||||||
|
|
||||||
#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
|
#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
|
||||||
inline void set_##name(type value, tag_type, \
|
inline void set_##name(type value, tag_type, \
|
||||||
@ -179,7 +179,7 @@
|
|||||||
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
|
#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
|
||||||
set_condition) \
|
set_condition) \
|
||||||
DEF_GETTER(holder, name, type) { \
|
DEF_GETTER(holder, name, type) { \
|
||||||
type value = TaggedField<type, offset>::load(isolate, *this); \
|
type value = TaggedField<type, offset>::load(cage_base, *this); \
|
||||||
DCHECK(get_condition); \
|
DCHECK(get_condition); \
|
||||||
return value; \
|
return value; \
|
||||||
} \
|
} \
|
||||||
@ -215,11 +215,11 @@
|
|||||||
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
|
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
|
||||||
set_condition) \
|
set_condition) \
|
||||||
type holder::name(RelaxedLoadTag tag) const { \
|
type holder::name(RelaxedLoadTag tag) const { \
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||||
return holder::name(isolate, tag); \
|
return holder::name(cage_base, tag); \
|
||||||
} \
|
} \
|
||||||
type holder::name(IsolateRoot isolate, RelaxedLoadTag) const { \
|
type holder::name(PtrComprCageBase cage_base, RelaxedLoadTag) const { \
|
||||||
type value = TaggedField<type, offset>::Relaxed_Load(isolate, *this); \
|
type value = TaggedField<type, offset>::Relaxed_Load(cage_base, *this); \
|
||||||
DCHECK(get_condition); \
|
DCHECK(get_condition); \
|
||||||
return value; \
|
return value; \
|
||||||
} \
|
} \
|
||||||
@ -236,22 +236,22 @@
|
|||||||
#define RELAXED_ACCESSORS(holder, name, type, offset) \
|
#define RELAXED_ACCESSORS(holder, name, type, offset) \
|
||||||
RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true)
|
RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true)
|
||||||
|
|
||||||
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
|
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
|
||||||
get_condition, set_condition) \
|
get_condition, set_condition) \
|
||||||
type holder::name(AcquireLoadTag tag) const { \
|
type holder::name(AcquireLoadTag tag) const { \
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||||
return holder::name(isolate, tag); \
|
return holder::name(cage_base, tag); \
|
||||||
} \
|
} \
|
||||||
type holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
|
type holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \
|
||||||
type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \
|
type value = TaggedField<type, offset>::Acquire_Load(cage_base, *this); \
|
||||||
DCHECK(get_condition); \
|
DCHECK(get_condition); \
|
||||||
return value; \
|
return value; \
|
||||||
} \
|
} \
|
||||||
void holder::set_##name(type value, ReleaseStoreTag, \
|
void holder::set_##name(type value, ReleaseStoreTag, \
|
||||||
WriteBarrierMode mode) { \
|
WriteBarrierMode mode) { \
|
||||||
DCHECK(set_condition); \
|
DCHECK(set_condition); \
|
||||||
TaggedField<type, offset>::Release_Store(*this, value); \
|
TaggedField<type, offset>::Release_Store(*this, value); \
|
||||||
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
|
CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \
|
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \
|
||||||
@ -266,7 +266,7 @@
|
|||||||
set_condition) \
|
set_condition) \
|
||||||
DEF_GETTER(holder, name, MaybeObject) { \
|
DEF_GETTER(holder, name, MaybeObject) { \
|
||||||
MaybeObject value = \
|
MaybeObject value = \
|
||||||
TaggedField<MaybeObject, offset>::load(isolate, *this); \
|
TaggedField<MaybeObject, offset>::load(cage_base, *this); \
|
||||||
DCHECK(get_condition); \
|
DCHECK(get_condition); \
|
||||||
return value; \
|
return value; \
|
||||||
} \
|
} \
|
||||||
@ -282,23 +282,23 @@
|
|||||||
#define WEAK_ACCESSORS(holder, name, offset) \
|
#define WEAK_ACCESSORS(holder, name, offset) \
|
||||||
WEAK_ACCESSORS_CHECKED(holder, name, offset, true)
|
WEAK_ACCESSORS_CHECKED(holder, name, offset, true)
|
||||||
|
|
||||||
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \
|
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED2(holder, name, offset, \
|
||||||
get_condition, set_condition) \
|
get_condition, set_condition) \
|
||||||
MaybeObject holder::name(AcquireLoadTag tag) const { \
|
MaybeObject holder::name(AcquireLoadTag tag) const { \
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
|
||||||
return holder::name(isolate, tag); \
|
return holder::name(cage_base, tag); \
|
||||||
} \
|
} \
|
||||||
MaybeObject holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
|
MaybeObject holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const { \
|
||||||
MaybeObject value = \
|
MaybeObject value = \
|
||||||
TaggedField<MaybeObject, offset>::Acquire_Load(isolate, *this); \
|
TaggedField<MaybeObject, offset>::Acquire_Load(cage_base, *this); \
|
||||||
DCHECK(get_condition); \
|
DCHECK(get_condition); \
|
||||||
return value; \
|
return value; \
|
||||||
} \
|
} \
|
||||||
void holder::set_##name(MaybeObject value, ReleaseStoreTag, \
|
void holder::set_##name(MaybeObject value, ReleaseStoreTag, \
|
||||||
WriteBarrierMode mode) { \
|
WriteBarrierMode mode) { \
|
||||||
DCHECK(set_condition); \
|
DCHECK(set_condition); \
|
||||||
TaggedField<MaybeObject, offset>::Release_Store(*this, value); \
|
TaggedField<MaybeObject, offset>::Release_Store(*this, value); \
|
||||||
CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \
|
CONDITIONAL_WEAK_WRITE_BARRIER(*this, offset, value, mode); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED(holder, name, offset, \
|
#define RELEASE_ACQUIRE_WEAK_ACCESSORS_CHECKED(holder, name, offset, \
|
||||||
@ -380,9 +380,9 @@
|
|||||||
return instance_type == forinstancetype; \
|
return instance_type == forinstancetype; \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define TYPE_CHECKER(type, ...) \
|
#define TYPE_CHECKER(type, ...) \
|
||||||
DEF_GETTER(HeapObject, Is##type, bool) { \
|
DEF_GETTER(HeapObject, Is##type, bool) { \
|
||||||
return InstanceTypeChecker::Is##type(map(isolate).instance_type()); \
|
return InstanceTypeChecker::Is##type(map(cage_base).instance_type()); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define RELAXED_INT16_ACCESSORS(holder, name, offset) \
|
#define RELAXED_INT16_ACCESSORS(holder, name, offset) \
|
||||||
|
@ -65,19 +65,19 @@ int PropertyDetails::field_width_in_words() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsClassBoilerplate, bool) {
|
DEF_GETTER(HeapObject, IsClassBoilerplate, bool) {
|
||||||
return IsFixedArrayExact(isolate);
|
return IsFixedArrayExact(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::IsTaggedIndex() const {
|
bool Object::IsTaggedIndex() const {
|
||||||
return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
|
return IsSmi() && TaggedIndex::IsValid(TaggedIndex(ptr()).value());
|
||||||
}
|
}
|
||||||
|
|
||||||
#define IS_TYPE_FUNCTION_DEF(type_) \
|
#define IS_TYPE_FUNCTION_DEF(type_) \
|
||||||
bool Object::Is##type_() const { \
|
bool Object::Is##type_() const { \
|
||||||
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
|
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
|
||||||
} \
|
} \
|
||||||
bool Object::Is##type_(IsolateRoot isolate) const { \
|
bool Object::Is##type_(PtrComprCageBase cage_base) const { \
|
||||||
return IsHeapObject() && HeapObject::cast(*this).Is##type_(isolate); \
|
return IsHeapObject() && HeapObject::cast(*this).Is##type_(cage_base); \
|
||||||
}
|
}
|
||||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
|
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
|
||||||
IS_TYPE_FUNCTION_DEF(HashTableBase)
|
IS_TYPE_FUNCTION_DEF(HashTableBase)
|
||||||
@ -148,127 +148,125 @@ bool HeapObject::IsNullOrUndefined() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsUniqueName, bool) {
|
DEF_GETTER(HeapObject, IsUniqueName, bool) {
|
||||||
return IsInternalizedString(isolate) || IsSymbol(isolate);
|
return IsInternalizedString(cage_base) || IsSymbol(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsFunction, bool) {
|
DEF_GETTER(HeapObject, IsFunction, bool) {
|
||||||
return IsJSFunctionOrBoundFunction();
|
return IsJSFunctionOrBoundFunction();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsCallable, bool) { return map(isolate).is_callable(); }
|
DEF_GETTER(HeapObject, IsCallable, bool) {
|
||||||
|
return map(cage_base).is_callable();
|
||||||
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsCallableJSProxy, bool) {
|
DEF_GETTER(HeapObject, IsCallableJSProxy, bool) {
|
||||||
return IsCallable(isolate) && IsJSProxy(isolate);
|
return IsCallable(cage_base) && IsJSProxy(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsCallableApiObject, bool) {
|
DEF_GETTER(HeapObject, IsCallableApiObject, bool) {
|
||||||
InstanceType type = map(isolate).instance_type();
|
InstanceType type = map(cage_base).instance_type();
|
||||||
return IsCallable(isolate) &&
|
return IsCallable(cage_base) &&
|
||||||
(type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
|
(type == JS_API_OBJECT_TYPE || type == JS_SPECIAL_API_OBJECT_TYPE);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsNonNullForeign, bool) {
|
DEF_GETTER(HeapObject, IsNonNullForeign, bool) {
|
||||||
return IsForeign(isolate) &&
|
return IsForeign(cage_base) &&
|
||||||
Foreign::cast(*this).foreign_address() != kNullAddress;
|
Foreign::cast(*this).foreign_address() != kNullAddress;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsConstructor, bool) {
|
DEF_GETTER(HeapObject, IsConstructor, bool) {
|
||||||
return map(isolate).is_constructor();
|
return map(cage_base).is_constructor();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) {
|
DEF_GETTER(HeapObject, IsSourceTextModuleInfo, bool) {
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
return map(cage_base) == GetReadOnlyRoots(cage_base).module_info_map();
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
|
||||||
return map(isolate) == GetReadOnlyRoots(isolate).module_info_map();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsConsString, bool) {
|
DEF_GETTER(HeapObject, IsConsString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsCons();
|
return StringShape(String::cast(*this).map(cage_base)).IsCons();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsThinString, bool) {
|
DEF_GETTER(HeapObject, IsThinString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsThin();
|
return StringShape(String::cast(*this).map(cage_base)).IsThin();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsSlicedString, bool) {
|
DEF_GETTER(HeapObject, IsSlicedString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsSliced();
|
return StringShape(String::cast(*this).map(cage_base)).IsSliced();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsSeqString, bool) {
|
DEF_GETTER(HeapObject, IsSeqString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsSequential();
|
return StringShape(String::cast(*this).map(cage_base)).IsSequential();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsSeqOneByteString, bool) {
|
DEF_GETTER(HeapObject, IsSeqOneByteString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsSequential() &&
|
return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
|
||||||
String::cast(*this).IsOneByteRepresentation(isolate);
|
String::cast(*this).IsOneByteRepresentation(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) {
|
DEF_GETTER(HeapObject, IsSeqTwoByteString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsSequential() &&
|
return StringShape(String::cast(*this).map(cage_base)).IsSequential() &&
|
||||||
String::cast(*this).IsTwoByteRepresentation(isolate);
|
String::cast(*this).IsTwoByteRepresentation(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsExternalOneByteString, bool) {
|
DEF_GETTER(HeapObject, IsExternalOneByteString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsExternal() &&
|
return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
|
||||||
String::cast(*this).IsOneByteRepresentation(isolate);
|
String::cast(*this).IsOneByteRepresentation(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
|
DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
|
||||||
if (!IsString(isolate)) return false;
|
if (!IsString(cage_base)) return false;
|
||||||
return StringShape(String::cast(*this).map(isolate)).IsExternal() &&
|
return StringShape(String::cast(*this).map(cage_base)).IsExternal() &&
|
||||||
String::cast(*this).IsTwoByteRepresentation(isolate);
|
String::cast(*this).IsTwoByteRepresentation(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::IsNumber() const {
|
bool Object::IsNumber() const {
|
||||||
if (IsSmi()) return true;
|
if (IsSmi()) return true;
|
||||||
HeapObject this_heap_object = HeapObject::cast(*this);
|
HeapObject this_heap_object = HeapObject::cast(*this);
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
|
||||||
return this_heap_object.IsHeapNumber(isolate);
|
return this_heap_object.IsHeapNumber(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::IsNumber(IsolateRoot isolate) const {
|
bool Object::IsNumber(PtrComprCageBase cage_base) const {
|
||||||
return IsSmi() || IsHeapNumber(isolate);
|
return IsSmi() || IsHeapNumber(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::IsNumeric() const {
|
bool Object::IsNumeric() const {
|
||||||
if (IsSmi()) return true;
|
if (IsSmi()) return true;
|
||||||
HeapObject this_heap_object = HeapObject::cast(*this);
|
HeapObject this_heap_object = HeapObject::cast(*this);
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
|
||||||
return this_heap_object.IsHeapNumber(isolate) ||
|
return this_heap_object.IsHeapNumber(cage_base) ||
|
||||||
this_heap_object.IsBigInt(isolate);
|
this_heap_object.IsBigInt(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::IsNumeric(IsolateRoot isolate) const {
|
bool Object::IsNumeric(PtrComprCageBase cage_base) const {
|
||||||
return IsNumber(isolate) || IsBigInt(isolate);
|
return IsNumber(cage_base) || IsBigInt(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsFreeSpaceOrFiller, bool) {
|
DEF_GETTER(HeapObject, IsFreeSpaceOrFiller, bool) {
|
||||||
InstanceType instance_type = map(isolate).instance_type();
|
InstanceType instance_type = map(cage_base).instance_type();
|
||||||
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
|
return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsArrayList, bool) {
|
DEF_GETTER(HeapObject, IsArrayList, bool) {
|
||||||
// Can't use ReadOnlyRoots(isolate) as this isolate could be produced by
|
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
|
||||||
// i::GetIsolateForPtrCompr(HeapObject).
|
|
||||||
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
|
|
||||||
return *this == roots.empty_fixed_array() ||
|
return *this == roots.empty_fixed_array() ||
|
||||||
map(isolate) == roots.array_list_map();
|
map(cage_base) == roots.array_list_map();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
|
DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
|
||||||
return IsFixedArrayExact(isolate);
|
return IsFixedArrayExact(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
|
DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
|
||||||
// Must be a fixed array.
|
// Must be a fixed array.
|
||||||
if (!IsFixedArrayExact(isolate)) return false;
|
if (!IsFixedArrayExact(cage_base)) return false;
|
||||||
|
|
||||||
// There's no sure way to detect the difference between a fixed array and
|
// There's no sure way to detect the difference between a fixed array and
|
||||||
// a deoptimization data array. Since this is used for asserts we can
|
// a deoptimization data array. Since this is used for asserts we can
|
||||||
@ -282,14 +280,14 @@ DEF_GETTER(HeapObject, IsDeoptimizationData, bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsHandlerTable, bool) {
|
DEF_GETTER(HeapObject, IsHandlerTable, bool) {
|
||||||
if (!IsFixedArrayExact(isolate)) return false;
|
if (!IsFixedArrayExact(cage_base)) return false;
|
||||||
// There's actually no way to see the difference between a fixed array and
|
// There's actually no way to see the difference between a fixed array and
|
||||||
// a handler table array.
|
// a handler table array.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsTemplateList, bool) {
|
DEF_GETTER(HeapObject, IsTemplateList, bool) {
|
||||||
if (!IsFixedArrayExact(isolate)) return false;
|
if (!IsFixedArrayExact(cage_base)) return false;
|
||||||
// There's actually no way to see the difference between a fixed array and
|
// There's actually no way to see the difference between a fixed array and
|
||||||
// a template list.
|
// a template list.
|
||||||
if (FixedArray::cast(*this).length() < 1) return false;
|
if (FixedArray::cast(*this).length() < 1) return false;
|
||||||
@ -297,84 +295,86 @@ DEF_GETTER(HeapObject, IsTemplateList, bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsDependentCode, bool) {
|
DEF_GETTER(HeapObject, IsDependentCode, bool) {
|
||||||
if (!IsWeakFixedArray(isolate)) return false;
|
if (!IsWeakFixedArray(cage_base)) return false;
|
||||||
// There's actually no way to see the difference between a weak fixed array
|
// There's actually no way to see the difference between a weak fixed array
|
||||||
// and a dependent codes array.
|
// and a dependent codes array.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
|
DEF_GETTER(HeapObject, IsOSROptimizedCodeCache, bool) {
|
||||||
if (!IsWeakFixedArray(isolate)) return false;
|
if (!IsWeakFixedArray(cage_base)) return false;
|
||||||
// There's actually no way to see the difference between a weak fixed array
|
// There's actually no way to see the difference between a weak fixed array
|
||||||
// and a osr optimized code cache.
|
// and a osr optimized code cache.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
|
DEF_GETTER(HeapObject, IsAbstractCode, bool) {
|
||||||
return IsBytecodeArray(isolate) || IsCode(isolate);
|
return IsBytecodeArray(cage_base) || IsCode(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsStringWrapper, bool) {
|
DEF_GETTER(HeapObject, IsStringWrapper, bool) {
|
||||||
return IsJSPrimitiveWrapper(isolate) &&
|
return IsJSPrimitiveWrapper(cage_base) &&
|
||||||
JSPrimitiveWrapper::cast(*this).value().IsString(isolate);
|
JSPrimitiveWrapper::cast(*this).value().IsString(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsBooleanWrapper, bool) {
|
DEF_GETTER(HeapObject, IsBooleanWrapper, bool) {
|
||||||
return IsJSPrimitiveWrapper(isolate) &&
|
return IsJSPrimitiveWrapper(cage_base) &&
|
||||||
JSPrimitiveWrapper::cast(*this).value().IsBoolean(isolate);
|
JSPrimitiveWrapper::cast(*this).value().IsBoolean(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsScriptWrapper, bool) {
|
DEF_GETTER(HeapObject, IsScriptWrapper, bool) {
|
||||||
return IsJSPrimitiveWrapper(isolate) &&
|
return IsJSPrimitiveWrapper(cage_base) &&
|
||||||
JSPrimitiveWrapper::cast(*this).value().IsScript(isolate);
|
JSPrimitiveWrapper::cast(*this).value().IsScript(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsNumberWrapper, bool) {
|
DEF_GETTER(HeapObject, IsNumberWrapper, bool) {
|
||||||
return IsJSPrimitiveWrapper(isolate) &&
|
return IsJSPrimitiveWrapper(cage_base) &&
|
||||||
JSPrimitiveWrapper::cast(*this).value().IsNumber(isolate);
|
JSPrimitiveWrapper::cast(*this).value().IsNumber(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsBigIntWrapper, bool) {
|
DEF_GETTER(HeapObject, IsBigIntWrapper, bool) {
|
||||||
return IsJSPrimitiveWrapper(isolate) &&
|
return IsJSPrimitiveWrapper(cage_base) &&
|
||||||
JSPrimitiveWrapper::cast(*this).value().IsBigInt(isolate);
|
JSPrimitiveWrapper::cast(*this).value().IsBigInt(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsSymbolWrapper, bool) {
|
DEF_GETTER(HeapObject, IsSymbolWrapper, bool) {
|
||||||
return IsJSPrimitiveWrapper(isolate) &&
|
return IsJSPrimitiveWrapper(cage_base) &&
|
||||||
JSPrimitiveWrapper::cast(*this).value().IsSymbol(isolate);
|
JSPrimitiveWrapper::cast(*this).value().IsSymbol(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(isolate); }
|
DEF_GETTER(HeapObject, IsStringSet, bool) { return IsHashTable(cage_base); }
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(isolate); }
|
DEF_GETTER(HeapObject, IsObjectHashSet, bool) { return IsHashTable(cage_base); }
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) {
|
DEF_GETTER(HeapObject, IsCompilationCacheTable, bool) {
|
||||||
return IsHashTable(isolate);
|
return IsHashTable(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(isolate); }
|
DEF_GETTER(HeapObject, IsMapCache, bool) { return IsHashTable(cage_base); }
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsObjectHashTable, bool) { return IsHashTable(isolate); }
|
DEF_GETTER(HeapObject, IsObjectHashTable, bool) {
|
||||||
|
return IsHashTable(cage_base);
|
||||||
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(isolate); }
|
DEF_GETTER(HeapObject, IsHashTableBase, bool) { return IsHashTable(cage_base); }
|
||||||
|
|
||||||
#if V8_ENABLE_WEBASSEMBLY
|
#if V8_ENABLE_WEBASSEMBLY
|
||||||
DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
|
DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
|
||||||
// It is not possible to check for the existence of certain properties on the
|
// It is not possible to check for the existence of certain properties on the
|
||||||
// underlying {JSReceiver} here because that requires calling handlified code.
|
// underlying {JSReceiver} here because that requires calling handlified code.
|
||||||
return IsJSReceiver(isolate);
|
return IsJSReceiver(cage_base);
|
||||||
}
|
}
|
||||||
#endif // V8_ENABLE_WEBASSEMBLY
|
#endif // V8_ENABLE_WEBASSEMBLY
|
||||||
|
|
||||||
bool Object::IsPrimitive() const {
|
bool Object::IsPrimitive() const {
|
||||||
if (IsSmi()) return true;
|
if (IsSmi()) return true;
|
||||||
HeapObject this_heap_object = HeapObject::cast(*this);
|
HeapObject this_heap_object = HeapObject::cast(*this);
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(this_heap_object);
|
||||||
return this_heap_object.map(isolate).IsPrimitiveMap();
|
return this_heap_object.map(cage_base).IsPrimitiveMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Object::IsPrimitive(IsolateRoot isolate) const {
|
bool Object::IsPrimitive(PtrComprCageBase cage_base) const {
|
||||||
return IsSmi() || HeapObject::cast(*this).map(isolate).IsPrimitiveMap();
|
return IsSmi() || HeapObject::cast(*this).map(cage_base).IsPrimitiveMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -387,24 +387,24 @@ Maybe<bool> Object::IsArray(Handle<Object> object) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsUndetectable, bool) {
|
DEF_GETTER(HeapObject, IsUndetectable, bool) {
|
||||||
return map(isolate).is_undetectable();
|
return map(cage_base).is_undetectable();
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
|
DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
|
||||||
if (IsJSGlobalProxy(isolate)) {
|
if (IsJSGlobalProxy(cage_base)) {
|
||||||
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
|
const JSGlobalProxy proxy = JSGlobalProxy::cast(*this);
|
||||||
JSGlobalObject global = proxy.GetIsolate()->context().global_object();
|
JSGlobalObject global = proxy.GetIsolate()->context().global_object();
|
||||||
return proxy.IsDetachedFrom(global);
|
return proxy.IsDetachedFrom(global);
|
||||||
}
|
}
|
||||||
return map(isolate).is_access_check_needed();
|
return map(cage_base).is_access_check_needed();
|
||||||
}
|
}
|
||||||
|
|
||||||
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
|
#define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
|
||||||
bool Object::Is##Name() const { \
|
bool Object::Is##Name() const { \
|
||||||
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
|
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
|
||||||
} \
|
} \
|
||||||
bool Object::Is##Name(IsolateRoot isolate) const { \
|
bool Object::Is##Name(PtrComprCageBase cage_base) const { \
|
||||||
return IsHeapObject() && HeapObject::cast(*this).Is##Name(isolate); \
|
return IsHeapObject() && HeapObject::cast(*this).Is##Name(cage_base); \
|
||||||
}
|
}
|
||||||
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
|
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
|
||||||
#undef MAKE_STRUCT_PREDICATE
|
#undef MAKE_STRUCT_PREDICATE
|
||||||
@ -467,17 +467,17 @@ bool Object::FilterKey(PropertyFilter filter) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Representation Object::OptimalRepresentation(IsolateRoot isolate) const {
|
Representation Object::OptimalRepresentation(PtrComprCageBase cage_base) const {
|
||||||
if (!FLAG_track_fields) return Representation::Tagged();
|
if (!FLAG_track_fields) return Representation::Tagged();
|
||||||
if (IsSmi()) {
|
if (IsSmi()) {
|
||||||
return Representation::Smi();
|
return Representation::Smi();
|
||||||
}
|
}
|
||||||
HeapObject heap_object = HeapObject::cast(*this);
|
HeapObject heap_object = HeapObject::cast(*this);
|
||||||
if (FLAG_track_double_fields && heap_object.IsHeapNumber(isolate)) {
|
if (FLAG_track_double_fields && heap_object.IsHeapNumber(cage_base)) {
|
||||||
return Representation::Double();
|
return Representation::Double();
|
||||||
} else if (FLAG_track_computed_fields &&
|
} else if (FLAG_track_computed_fields &&
|
||||||
heap_object.IsUninitialized(
|
heap_object.IsUninitialized(
|
||||||
heap_object.GetReadOnlyRoots(isolate))) {
|
heap_object.GetReadOnlyRoots(cage_base))) {
|
||||||
return Representation::None();
|
return Representation::None();
|
||||||
} else if (FLAG_track_heap_object_fields) {
|
} else if (FLAG_track_heap_object_fields) {
|
||||||
return Representation::HeapObject();
|
return Representation::HeapObject();
|
||||||
@ -486,9 +486,9 @@ Representation Object::OptimalRepresentation(IsolateRoot isolate) const {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ElementsKind Object::OptimalElementsKind(IsolateRoot isolate) const {
|
ElementsKind Object::OptimalElementsKind(PtrComprCageBase cage_base) const {
|
||||||
if (IsSmi()) return PACKED_SMI_ELEMENTS;
|
if (IsSmi()) return PACKED_SMI_ELEMENTS;
|
||||||
if (IsNumber(isolate)) return PACKED_DOUBLE_ELEMENTS;
|
if (IsNumber(cage_base)) return PACKED_DOUBLE_ELEMENTS;
|
||||||
return PACKED_ELEMENTS;
|
return PACKED_ELEMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -631,9 +631,10 @@ void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
|
|||||||
i::InitExternalPointerField(field_address(offset), isolate, value, tag);
|
i::InitExternalPointerField(field_address(offset), isolate, value, tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
Address Object::ReadExternalPointerField(size_t offset, IsolateRoot isolate,
|
Address Object::ReadExternalPointerField(size_t offset,
|
||||||
|
PtrComprCageBase isolate_root,
|
||||||
ExternalPointerTag tag) const {
|
ExternalPointerTag tag) const {
|
||||||
return i::ReadExternalPointerField(field_address(offset), isolate, tag);
|
return i::ReadExternalPointerField(field_address(offset), isolate_root, tag);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Object::WriteExternalPointerField(size_t offset, Isolate* isolate,
|
void Object::WriteExternalPointerField(size_t offset, Isolate* isolate,
|
||||||
@ -687,16 +688,16 @@ ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
|
|||||||
return ReadOnlyHeap::GetReadOnlyRoots(*this);
|
return ReadOnlyHeap::GetReadOnlyRoots(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
ReadOnlyRoots HeapObject::GetReadOnlyRoots(IsolateRoot isolate) const {
|
ReadOnlyRoots HeapObject::GetReadOnlyRoots(PtrComprCageBase cage_base) const {
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE
|
||||||
DCHECK_NE(isolate.address(), 0);
|
DCHECK_NE(cage_base.address(), 0);
|
||||||
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
|
return ReadOnlyRoots(Isolate::FromRootAddress(cage_base.address()));
|
||||||
#else
|
#else
|
||||||
return GetReadOnlyRoots();
|
return GetReadOnlyRoots();
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, map, Map) { return map_word(isolate).ToMap(); }
|
DEF_GETTER(HeapObject, map, Map) { return map_word(cage_base).ToMap(); }
|
||||||
|
|
||||||
void HeapObject::set_map(Map value) {
|
void HeapObject::set_map(Map value) {
|
||||||
#ifdef VERIFY_HEAP
|
#ifdef VERIFY_HEAP
|
||||||
@ -715,7 +716,7 @@ void HeapObject::set_map(Map value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, synchronized_map, Map) {
|
DEF_GETTER(HeapObject, synchronized_map, Map) {
|
||||||
return synchronized_map_word(isolate).ToMap();
|
return synchronized_map_word(cage_base).ToMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
void HeapObject::synchronized_set_map(Map value) {
|
void HeapObject::synchronized_set_map(Map value) {
|
||||||
@ -761,7 +762,7 @@ ObjectSlot HeapObject::map_slot() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, map_word, MapWord) {
|
DEF_GETTER(HeapObject, map_word, MapWord) {
|
||||||
return MapField::Relaxed_Load(isolate, *this);
|
return MapField::Relaxed_Load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
void HeapObject::set_map_word(MapWord map_word) {
|
void HeapObject::set_map_word(MapWord map_word) {
|
||||||
@ -769,7 +770,7 @@ void HeapObject::set_map_word(MapWord map_word) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, synchronized_map_word, MapWord) {
|
DEF_GETTER(HeapObject, synchronized_map_word, MapWord) {
|
||||||
return MapField::Acquire_Load(isolate, *this);
|
return MapField::Acquire_Load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
void HeapObject::synchronized_set_map_word(MapWord map_word) {
|
void HeapObject::synchronized_set_map_word(MapWord map_word) {
|
||||||
|
@ -5567,7 +5567,8 @@ Handle<Derived> HashTable<Derived, Shape>::NewInternal(
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate, Derived new_table) {
|
void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base,
|
||||||
|
Derived new_table) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc);
|
WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc);
|
||||||
|
|
||||||
@ -5575,21 +5576,21 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate, Derived new_table) {
|
|||||||
|
|
||||||
// Copy prefix to new array.
|
// Copy prefix to new array.
|
||||||
for (int i = kPrefixStartIndex; i < kElementsStartIndex; i++) {
|
for (int i = kPrefixStartIndex; i < kElementsStartIndex; i++) {
|
||||||
new_table.set(i, get(isolate, i), mode);
|
new_table.set(i, get(cage_base, i), mode);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rehash the elements.
|
// Rehash the elements.
|
||||||
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
|
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
|
||||||
for (InternalIndex i : this->IterateEntries()) {
|
for (InternalIndex i : this->IterateEntries()) {
|
||||||
uint32_t from_index = EntryToIndex(i);
|
uint32_t from_index = EntryToIndex(i);
|
||||||
Object k = this->get(isolate, from_index);
|
Object k = this->get(cage_base, from_index);
|
||||||
if (!IsKey(roots, k)) continue;
|
if (!IsKey(roots, k)) continue;
|
||||||
uint32_t hash = Shape::HashForObject(roots, k);
|
uint32_t hash = Shape::HashForObject(roots, k);
|
||||||
uint32_t insertion_index =
|
uint32_t insertion_index =
|
||||||
EntryToIndex(new_table.FindInsertionEntry(isolate, roots, hash));
|
EntryToIndex(new_table.FindInsertionEntry(cage_base, roots, hash));
|
||||||
new_table.set_key(insertion_index, get(isolate, from_index), mode);
|
new_table.set_key(insertion_index, get(cage_base, from_index), mode);
|
||||||
for (int j = 1; j < Shape::kEntrySize; j++) {
|
for (int j = 1; j < Shape::kEntrySize; j++) {
|
||||||
new_table.set(insertion_index + j, get(isolate, from_index + j), mode);
|
new_table.set(insertion_index + j, get(cage_base, from_index + j), mode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_table.SetNumberOfElements(NumberOfElements());
|
new_table.SetNumberOfElements(NumberOfElements());
|
||||||
@ -5631,10 +5632,10 @@ void HashTable<Derived, Shape>::Swap(InternalIndex entry1, InternalIndex entry2,
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
|
void HashTable<Derived, Shape>::Rehash(PtrComprCageBase cage_base) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
|
WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
|
||||||
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
|
ReadOnlyRoots roots = GetReadOnlyRoots(cage_base);
|
||||||
uint32_t capacity = Capacity();
|
uint32_t capacity = Capacity();
|
||||||
bool done = false;
|
bool done = false;
|
||||||
for (int probe = 1; !done; probe++) {
|
for (int probe = 1; !done; probe++) {
|
||||||
@ -5643,7 +5644,7 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
|
|||||||
done = true;
|
done = true;
|
||||||
for (InternalIndex current(0); current.raw_value() < capacity;
|
for (InternalIndex current(0); current.raw_value() < capacity;
|
||||||
/* {current} is advanced manually below, when appropriate.*/) {
|
/* {current} is advanced manually below, when appropriate.*/) {
|
||||||
Object current_key = KeyAt(isolate, current);
|
Object current_key = KeyAt(cage_base, current);
|
||||||
if (!IsKey(roots, current_key)) {
|
if (!IsKey(roots, current_key)) {
|
||||||
++current; // Advance to next entry.
|
++current; // Advance to next entry.
|
||||||
continue;
|
continue;
|
||||||
@ -5653,7 +5654,7 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
|
|||||||
++current; // Advance to next entry.
|
++current; // Advance to next entry.
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Object target_key = KeyAt(isolate, target);
|
Object target_key = KeyAt(cage_base, target);
|
||||||
if (!IsKey(roots, target_key) ||
|
if (!IsKey(roots, target_key) ||
|
||||||
EntryForProbe(roots, target_key, probe, target) != target) {
|
EntryForProbe(roots, target_key, probe, target) != target) {
|
||||||
// Put the current element into the correct position.
|
// Put the current element into the correct position.
|
||||||
@ -5673,7 +5674,7 @@ void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
|
|||||||
HeapObject undefined = roots.undefined_value();
|
HeapObject undefined = roots.undefined_value();
|
||||||
Derived* self = static_cast<Derived*>(this);
|
Derived* self = static_cast<Derived*>(this);
|
||||||
for (InternalIndex current : InternalIndex::Range(capacity)) {
|
for (InternalIndex current : InternalIndex::Range(capacity)) {
|
||||||
if (KeyAt(isolate, current) == the_hole) {
|
if (KeyAt(cage_base, current) == the_hole) {
|
||||||
self->set_key(EntryToIndex(current) + kEntryKeyIndex, undefined,
|
self->set_key(EntryToIndex(current) + kEntryKeyIndex, undefined,
|
||||||
SKIP_WRITE_BARRIER);
|
SKIP_WRITE_BARRIER);
|
||||||
}
|
}
|
||||||
@ -5764,15 +5765,14 @@ Handle<Derived> HashTable<Derived, Shape>::Shrink(Isolate* isolate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(IsolateRoot isolate,
|
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(
|
||||||
ReadOnlyRoots roots,
|
PtrComprCageBase cage_base, ReadOnlyRoots roots, uint32_t hash) {
|
||||||
uint32_t hash) {
|
|
||||||
uint32_t capacity = Capacity();
|
uint32_t capacity = Capacity();
|
||||||
uint32_t count = 1;
|
uint32_t count = 1;
|
||||||
// EnsureCapacity will guarantee the hash table is never full.
|
// EnsureCapacity will guarantee the hash table is never full.
|
||||||
for (InternalIndex entry = FirstProbe(hash, capacity);;
|
for (InternalIndex entry = FirstProbe(hash, capacity);;
|
||||||
entry = NextProbe(entry, count++, capacity)) {
|
entry = NextProbe(entry, count++, capacity)) {
|
||||||
if (!IsKey(roots, KeyAt(isolate, entry))) return entry;
|
if (!IsKey(roots, KeyAt(cage_base, entry))) return entry;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -6080,14 +6080,14 @@ void ObjectHashTableBase<Derived, Shape>::FillEntriesWithHoles(
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(IsolateRoot isolate,
|
Object ObjectHashTableBase<Derived, Shape>::Lookup(PtrComprCageBase cage_base,
|
||||||
Handle<Object> key,
|
Handle<Object> key,
|
||||||
int32_t hash) {
|
int32_t hash) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
|
ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base);
|
||||||
DCHECK(this->IsKey(roots, *key));
|
DCHECK(this->IsKey(roots, *key));
|
||||||
|
|
||||||
InternalIndex entry = this->FindEntry(isolate, roots, key, hash);
|
InternalIndex entry = this->FindEntry(cage_base, roots, key, hash);
|
||||||
if (entry.is_not_found()) return roots.the_hole_value();
|
if (entry.is_not_found()) return roots.the_hole_value();
|
||||||
return this->get(Derived::EntryToIndex(entry) + 1);
|
return this->get(Derived::EntryToIndex(entry) + 1);
|
||||||
}
|
}
|
||||||
@ -6096,8 +6096,8 @@ template <typename Derived, typename Shape>
|
|||||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
|
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
|
||||||
DisallowGarbageCollection no_gc;
|
DisallowGarbageCollection no_gc;
|
||||||
|
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
|
ReadOnlyRoots roots = this->GetReadOnlyRoots(cage_base);
|
||||||
DCHECK(this->IsKey(roots, *key));
|
DCHECK(this->IsKey(roots, *key));
|
||||||
|
|
||||||
// If the object does not have an identity hash, it was never used as a key.
|
// If the object does not have an identity hash, it was never used as a key.
|
||||||
@ -6105,13 +6105,13 @@ Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
|
|||||||
if (hash.IsUndefined(roots)) {
|
if (hash.IsUndefined(roots)) {
|
||||||
return roots.the_hole_value();
|
return roots.the_hole_value();
|
||||||
}
|
}
|
||||||
return Lookup(isolate, key, Smi::ToInt(hash));
|
return Lookup(cage_base, key, Smi::ToInt(hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key,
|
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key,
|
||||||
int32_t hash) {
|
int32_t hash) {
|
||||||
return Lookup(GetIsolateForPtrCompr(*this), key, hash);
|
return Lookup(GetPtrComprCageBase(*this), key, hash);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
|
@ -279,7 +279,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
|||||||
|
|
||||||
#define IS_TYPE_FUNCTION_DECL(Type) \
|
#define IS_TYPE_FUNCTION_DECL(Type) \
|
||||||
V8_INLINE bool Is##Type() const; \
|
V8_INLINE bool Is##Type() const; \
|
||||||
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
|
V8_INLINE bool Is##Type(PtrComprCageBase cage_base) const;
|
||||||
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||||
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
|
||||||
IS_TYPE_FUNCTION_DECL(HashTableBase)
|
IS_TYPE_FUNCTION_DECL(HashTableBase)
|
||||||
@ -307,7 +307,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
|||||||
|
|
||||||
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
|
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
|
||||||
V8_INLINE bool Is##Name() const; \
|
V8_INLINE bool Is##Name() const; \
|
||||||
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
|
V8_INLINE bool Is##Name(PtrComprCageBase cage_base) const;
|
||||||
STRUCT_LIST(DECL_STRUCT_PREDICATE)
|
STRUCT_LIST(DECL_STRUCT_PREDICATE)
|
||||||
#undef DECL_STRUCT_PREDICATE
|
#undef DECL_STRUCT_PREDICATE
|
||||||
|
|
||||||
@ -322,9 +322,9 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
|||||||
V8_EXPORT_PRIVATE bool ToInt32(int32_t* value);
|
V8_EXPORT_PRIVATE bool ToInt32(int32_t* value);
|
||||||
inline bool ToUint32(uint32_t* value) const;
|
inline bool ToUint32(uint32_t* value) const;
|
||||||
|
|
||||||
inline Representation OptimalRepresentation(IsolateRoot isolate) const;
|
inline Representation OptimalRepresentation(PtrComprCageBase cage_base) const;
|
||||||
|
|
||||||
inline ElementsKind OptimalElementsKind(IsolateRoot isolate) const;
|
inline ElementsKind OptimalElementsKind(PtrComprCageBase cage_base) const;
|
||||||
|
|
||||||
inline bool FitsRepresentation(Representation representation);
|
inline bool FitsRepresentation(Representation representation);
|
||||||
|
|
||||||
@ -673,7 +673,8 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
|
|||||||
inline void InitExternalPointerField(size_t offset, Isolate* isolate);
|
inline void InitExternalPointerField(size_t offset, Isolate* isolate);
|
||||||
inline void InitExternalPointerField(size_t offset, Isolate* isolate,
|
inline void InitExternalPointerField(size_t offset, Isolate* isolate,
|
||||||
Address value, ExternalPointerTag tag);
|
Address value, ExternalPointerTag tag);
|
||||||
inline Address ReadExternalPointerField(size_t offset, IsolateRoot isolate,
|
inline Address ReadExternalPointerField(size_t offset,
|
||||||
|
PtrComprCageBase isolate_root,
|
||||||
ExternalPointerTag tag) const;
|
ExternalPointerTag tag) const;
|
||||||
inline void WriteExternalPointerField(size_t offset, Isolate* isolate,
|
inline void WriteExternalPointerField(size_t offset, Isolate* isolate,
|
||||||
Address value, ExternalPointerTag tag);
|
Address value, ExternalPointerTag tag);
|
||||||
|
@ -37,7 +37,7 @@ Handle<Object> Oddball::ToNumber(Isolate* isolate, Handle<Oddball> input) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(HeapObject, IsBoolean, bool) {
|
DEF_GETTER(HeapObject, IsBoolean, bool) {
|
||||||
return IsOddball(isolate) &&
|
return IsOddball(cage_base) &&
|
||||||
((Oddball::cast(*this).kind() & Oddball::kNotBooleanMask) == 0);
|
((Oddball::cast(*this).kind() & Oddball::kNotBooleanMask) == 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,14 +25,14 @@ SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
|
|||||||
SYNCHRONIZED_SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
|
SYNCHRONIZED_SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
|
||||||
|
|
||||||
Object PropertyArray::get(int index) const {
|
Object PropertyArray::get(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return get(isolate, index);
|
return get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object PropertyArray::get(IsolateRoot isolate, int index) const {
|
Object PropertyArray::get(PtrComprCageBase cage_base, int index) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index),
|
DCHECK_LT(static_cast<unsigned>(index),
|
||||||
static_cast<unsigned>(this->length()));
|
static_cast<unsigned>(this->length()));
|
||||||
return TaggedField<Object>::Relaxed_Load(isolate, *this,
|
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
|
||||||
OffsetOfElementAt(index));
|
OffsetOfElementAt(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ class PropertyArray : public HeapObject {
|
|||||||
inline int Hash() const;
|
inline int Hash() const;
|
||||||
|
|
||||||
inline Object get(int index) const;
|
inline Object get(int index) const;
|
||||||
inline Object get(IsolateRoot isolate, int index) const;
|
inline Object get(PtrComprCageBase cage_base, int index) const;
|
||||||
|
|
||||||
inline void set(int index, Object value);
|
inline void set(int index, Object value);
|
||||||
// Setter with explicit barrier mode.
|
// Setter with explicit barrier mode.
|
||||||
|
@ -75,10 +75,10 @@ Descriptor Descriptor::DataField(Handle<Name> key, int field_index,
|
|||||||
|
|
||||||
Descriptor Descriptor::DataConstant(Handle<Name> key, Handle<Object> value,
|
Descriptor Descriptor::DataConstant(Handle<Name> key, Handle<Object> value,
|
||||||
PropertyAttributes attributes) {
|
PropertyAttributes attributes) {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*key);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*key);
|
||||||
return Descriptor(key, MaybeObjectHandle(value), kData, attributes,
|
return Descriptor(key, MaybeObjectHandle(value), kData, attributes,
|
||||||
kDescriptor, PropertyConstness::kConst,
|
kDescriptor, PropertyConstness::kConst,
|
||||||
value->OptimalRepresentation(isolate), 0);
|
value->OptimalRepresentation(cage_base), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
Descriptor Descriptor::DataConstant(Isolate* isolate, Handle<Name> key,
|
Descriptor Descriptor::DataConstant(Isolate* isolate, Handle<Name> key,
|
||||||
|
@ -575,13 +575,13 @@ Handle<ScopeInfo> ScopeInfo::CreateForBootstrapping(Isolate* isolate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object ScopeInfo::get(int index) const {
|
Object ScopeInfo::get(int index) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return get(isolate, index);
|
return get(cage_base, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object ScopeInfo::get(IsolateRoot isolate, int index) const {
|
Object ScopeInfo::get(PtrComprCageBase cage_base, int index) const {
|
||||||
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
|
||||||
return TaggedField<Object>::Relaxed_Load(isolate, *this,
|
return TaggedField<Object>::Relaxed_Load(cage_base, *this,
|
||||||
OffsetOfElementAt(index));
|
OffsetOfElementAt(index));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,7 +293,7 @@ class ScopeInfo : public TorqueGeneratedScopeInfo<ScopeInfo, HeapObject> {
|
|||||||
// 'flags', the first field defined by ScopeInfo after the standard-size
|
// 'flags', the first field defined by ScopeInfo after the standard-size
|
||||||
// HeapObject header.
|
// HeapObject header.
|
||||||
V8_EXPORT_PRIVATE Object get(int index) const;
|
V8_EXPORT_PRIVATE Object get(int index) const;
|
||||||
Object get(IsolateRoot isolate, int index) const;
|
Object get(PtrComprCageBase cage_base, int index) const;
|
||||||
// Setter that doesn't need write barrier.
|
// Setter that doesn't need write barrier.
|
||||||
void set(int index, Smi value);
|
void set(int index, Smi value);
|
||||||
// Setter with explicit barrier mode.
|
// Setter with explicit barrier mode.
|
||||||
|
@ -31,7 +31,7 @@ bool FullObjectSlot::contains_value(Address raw_value) const {
|
|||||||
|
|
||||||
Object FullObjectSlot::operator*() const { return Object(*location()); }
|
Object FullObjectSlot::operator*() const { return Object(*location()); }
|
||||||
|
|
||||||
Object FullObjectSlot::load(IsolateRoot isolate) const { return **this; }
|
Object FullObjectSlot::load(PtrComprCageBase cage_base) const { return **this; }
|
||||||
|
|
||||||
void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
|
void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
|
||||||
|
|
||||||
@ -39,7 +39,7 @@ Object FullObjectSlot::Acquire_Load() const {
|
|||||||
return Object(base::AsAtomicPointer::Acquire_Load(location()));
|
return Object(base::AsAtomicPointer::Acquire_Load(location()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Object FullObjectSlot::Acquire_Load(IsolateRoot isolate) const {
|
Object FullObjectSlot::Acquire_Load(PtrComprCageBase cage_base) const {
|
||||||
return Acquire_Load();
|
return Acquire_Load();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,7 +47,7 @@ Object FullObjectSlot::Relaxed_Load() const {
|
|||||||
return Object(base::AsAtomicPointer::Relaxed_Load(location()));
|
return Object(base::AsAtomicPointer::Relaxed_Load(location()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Object FullObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
Object FullObjectSlot::Relaxed_Load(PtrComprCageBase cage_base) const {
|
||||||
return Relaxed_Load();
|
return Relaxed_Load();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,7 +79,7 @@ MaybeObject FullMaybeObjectSlot::operator*() const {
|
|||||||
return MaybeObject(*location());
|
return MaybeObject(*location());
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject FullMaybeObjectSlot::load(IsolateRoot isolate) const {
|
MaybeObject FullMaybeObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||||
return **this;
|
return **this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +91,8 @@ MaybeObject FullMaybeObjectSlot::Relaxed_Load() const {
|
|||||||
return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location()));
|
return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location()));
|
||||||
}
|
}
|
||||||
|
|
||||||
MaybeObject FullMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
|
MaybeObject FullMaybeObjectSlot::Relaxed_Load(
|
||||||
|
PtrComprCageBase cage_base) const {
|
||||||
return Relaxed_Load();
|
return Relaxed_Load();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,7 +114,7 @@ HeapObjectReference FullHeapObjectSlot::operator*() const {
|
|||||||
return HeapObjectReference(*location());
|
return HeapObjectReference(*location());
|
||||||
}
|
}
|
||||||
|
|
||||||
HeapObjectReference FullHeapObjectSlot::load(IsolateRoot isolate) const {
|
HeapObjectReference FullHeapObjectSlot::load(PtrComprCageBase cage_base) const {
|
||||||
return **this;
|
return **this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,13 +110,13 @@ class FullObjectSlot : public SlotBase<FullObjectSlot, Address> {
|
|||||||
inline bool contains_value(Address raw_value) const;
|
inline bool contains_value(Address raw_value) const;
|
||||||
|
|
||||||
inline Object operator*() const;
|
inline Object operator*() const;
|
||||||
inline Object load(IsolateRoot isolate) const;
|
inline Object load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(Object value) const;
|
inline void store(Object value) const;
|
||||||
|
|
||||||
inline Object Acquire_Load() const;
|
inline Object Acquire_Load() const;
|
||||||
inline Object Acquire_Load(IsolateRoot isolate) const;
|
inline Object Acquire_Load(PtrComprCageBase cage_base) const;
|
||||||
inline Object Relaxed_Load() const;
|
inline Object Relaxed_Load() const;
|
||||||
inline Object Relaxed_Load(IsolateRoot isolate) const;
|
inline Object Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||||
inline void Relaxed_Store(Object value) const;
|
inline void Relaxed_Store(Object value) const;
|
||||||
inline void Release_Store(Object value) const;
|
inline void Release_Store(Object value) const;
|
||||||
inline Object Relaxed_CompareAndSwap(Object old, Object target) const;
|
inline Object Relaxed_CompareAndSwap(Object old, Object target) const;
|
||||||
@ -147,11 +147,11 @@ class FullMaybeObjectSlot
|
|||||||
: SlotBase(slot.address()) {}
|
: SlotBase(slot.address()) {}
|
||||||
|
|
||||||
inline MaybeObject operator*() const;
|
inline MaybeObject operator*() const;
|
||||||
inline MaybeObject load(IsolateRoot isolate) const;
|
inline MaybeObject load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(MaybeObject value) const;
|
inline void store(MaybeObject value) const;
|
||||||
|
|
||||||
inline MaybeObject Relaxed_Load() const;
|
inline MaybeObject Relaxed_Load() const;
|
||||||
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
|
inline MaybeObject Relaxed_Load(PtrComprCageBase cage_base) const;
|
||||||
inline void Relaxed_Store(MaybeObject value) const;
|
inline void Relaxed_Store(MaybeObject value) const;
|
||||||
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
|
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
|
||||||
};
|
};
|
||||||
@ -174,7 +174,7 @@ class FullHeapObjectSlot : public SlotBase<FullHeapObjectSlot, Address> {
|
|||||||
: SlotBase(slot.address()) {}
|
: SlotBase(slot.address()) {}
|
||||||
|
|
||||||
inline HeapObjectReference operator*() const;
|
inline HeapObjectReference operator*() const;
|
||||||
inline HeapObjectReference load(IsolateRoot isolate) const;
|
inline HeapObjectReference load(PtrComprCageBase cage_base) const;
|
||||||
inline void store(HeapObjectReference value) const;
|
inline void store(HeapObjectReference value) const;
|
||||||
|
|
||||||
inline HeapObject ToHeapObject() const;
|
inline HeapObject ToHeapObject() const;
|
||||||
|
@ -274,12 +274,12 @@ inline TResult StringShape::DispatchToSpecificType(String str,
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(String, IsOneByteRepresentation, bool) {
|
DEF_GETTER(String, IsOneByteRepresentation, bool) {
|
||||||
uint32_t type = map(isolate).instance_type();
|
uint32_t type = map(cage_base).instance_type();
|
||||||
return (type & kStringEncodingMask) == kOneByteStringTag;
|
return (type & kStringEncodingMask) == kOneByteStringTag;
|
||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(String, IsTwoByteRepresentation, bool) {
|
DEF_GETTER(String, IsTwoByteRepresentation, bool) {
|
||||||
uint32_t type = map(isolate).instance_type();
|
uint32_t type = map(cage_base).instance_type();
|
||||||
return (type & kStringEncodingMask) == kTwoByteStringTag;
|
return (type & kStringEncodingMask) == kTwoByteStringTag;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -463,7 +463,7 @@ bool String::IsEqualTo(Vector<const Char> str, Isolate* isolate) const {
|
|||||||
template <String::EqualityType kEqType, typename Char>
|
template <String::EqualityType kEqType, typename Char>
|
||||||
bool String::IsEqualTo(Vector<const Char> str) const {
|
bool String::IsEqualTo(Vector<const Char> str) const {
|
||||||
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
|
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
|
||||||
return IsEqualToImpl<kEqType>(str, GetIsolateForPtrCompr(*this),
|
return IsEqualToImpl<kEqType>(str, GetPtrComprCageBase(*this),
|
||||||
SharedStringAccessGuardIfNeeded::NotNeeded());
|
SharedStringAccessGuardIfNeeded::NotNeeded());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -475,7 +475,7 @@ bool String::IsEqualTo(Vector<const Char> str, LocalIsolate* isolate) const {
|
|||||||
|
|
||||||
template <String::EqualityType kEqType, typename Char>
|
template <String::EqualityType kEqType, typename Char>
|
||||||
bool String::IsEqualToImpl(
|
bool String::IsEqualToImpl(
|
||||||
Vector<const Char> str, IsolateRoot isolate,
|
Vector<const Char> str, PtrComprCageBase cage_base,
|
||||||
const SharedStringAccessGuardIfNeeded& access_guard) const {
|
const SharedStringAccessGuardIfNeeded& access_guard) const {
|
||||||
size_t len = str.size();
|
size_t len = str.size();
|
||||||
switch (kEqType) {
|
switch (kEqType) {
|
||||||
@ -496,7 +496,7 @@ bool String::IsEqualToImpl(
|
|||||||
String string = *this;
|
String string = *this;
|
||||||
const Char* data = str.data();
|
const Char* data = str.data();
|
||||||
while (true) {
|
while (true) {
|
||||||
int32_t type = string.map(isolate).instance_type();
|
int32_t type = string.map(cage_base).instance_type();
|
||||||
switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
|
switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
|
||||||
case kSeqStringTag | kOneByteStringTag:
|
case kSeqStringTag | kOneByteStringTag:
|
||||||
return CompareCharsEqual(
|
return CompareCharsEqual(
|
||||||
@ -521,7 +521,7 @@ bool String::IsEqualToImpl(
|
|||||||
case kSlicedStringTag | kTwoByteStringTag: {
|
case kSlicedStringTag | kTwoByteStringTag: {
|
||||||
SlicedString slicedString = SlicedString::cast(string);
|
SlicedString slicedString = SlicedString::cast(string);
|
||||||
slice_offset += slicedString.offset();
|
slice_offset += slicedString.offset();
|
||||||
string = slicedString.parent(isolate);
|
string = slicedString.parent(cage_base);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -529,13 +529,14 @@ bool String::IsEqualToImpl(
|
|||||||
case kConsStringTag | kTwoByteStringTag: {
|
case kConsStringTag | kTwoByteStringTag: {
|
||||||
// The ConsString path is more complex and rare, so call out to an
|
// The ConsString path is more complex and rare, so call out to an
|
||||||
// out-of-line handler.
|
// out-of-line handler.
|
||||||
return IsConsStringEqualToImpl<Char>(
|
return IsConsStringEqualToImpl<Char>(ConsString::cast(string),
|
||||||
ConsString::cast(string), slice_offset, str, isolate, access_guard);
|
slice_offset, str, cage_base,
|
||||||
|
access_guard);
|
||||||
}
|
}
|
||||||
|
|
||||||
case kThinStringTag | kOneByteStringTag:
|
case kThinStringTag | kOneByteStringTag:
|
||||||
case kThinStringTag | kTwoByteStringTag:
|
case kThinStringTag | kTwoByteStringTag:
|
||||||
string = ThinString::cast(string).actual(isolate);
|
string = ThinString::cast(string).actual(cage_base);
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -548,7 +549,8 @@ bool String::IsEqualToImpl(
|
|||||||
template <typename Char>
|
template <typename Char>
|
||||||
bool String::IsConsStringEqualToImpl(
|
bool String::IsConsStringEqualToImpl(
|
||||||
ConsString string, int slice_offset, Vector<const Char> str,
|
ConsString string, int slice_offset, Vector<const Char> str,
|
||||||
IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard) {
|
PtrComprCageBase cage_base,
|
||||||
|
const SharedStringAccessGuardIfNeeded& access_guard) {
|
||||||
// Already checked the len in IsEqualToImpl. Check GE rather than EQ in case
|
// Already checked the len in IsEqualToImpl. Check GE rather than EQ in case
|
||||||
// this is a prefix check.
|
// this is a prefix check.
|
||||||
DCHECK_GE(string.length(), str.size());
|
DCHECK_GE(string.length(), str.size());
|
||||||
@ -561,7 +563,7 @@ bool String::IsConsStringEqualToImpl(
|
|||||||
// remaining string.
|
// remaining string.
|
||||||
size_t len = std::min<size_t>(segment.length(), remaining_str.size());
|
size_t len = std::min<size_t>(segment.length(), remaining_str.size());
|
||||||
Vector<const Char> sub_str = remaining_str.SubVector(0, len);
|
Vector<const Char> sub_str = remaining_str.SubVector(0, len);
|
||||||
if (!segment.IsEqualToImpl<EqualityType::kNoLengthCheck>(sub_str, isolate,
|
if (!segment.IsEqualToImpl<EqualityType::kNoLengthCheck>(sub_str, cage_base,
|
||||||
access_guard)) {
|
access_guard)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -845,7 +847,7 @@ Object ConsString::unchecked_second() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(ThinString, unchecked_actual, HeapObject) {
|
DEF_GETTER(ThinString, unchecked_actual, HeapObject) {
|
||||||
return TaggedField<HeapObject, kActualOffset>::load(isolate, *this);
|
return TaggedField<HeapObject, kActualOffset>::load(cage_base, *this);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ExternalString::is_uncached() const {
|
bool ExternalString::is_uncached() const {
|
||||||
@ -860,7 +862,7 @@ void ExternalString::AllocateExternalPointerEntries(Isolate* isolate) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
DEF_GETTER(ExternalString, resource_as_address, Address) {
|
DEF_GETTER(ExternalString, resource_as_address, Address) {
|
||||||
return ReadExternalPointerField(kResourceOffset, isolate,
|
return ReadExternalPointerField(kResourceOffset, cage_base,
|
||||||
kExternalStringResourceTag);
|
kExternalStringResourceTag);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -908,7 +910,7 @@ DEF_GETTER(ExternalOneByteString, resource,
|
|||||||
|
|
||||||
DEF_GETTER(ExternalOneByteString, mutable_resource,
|
DEF_GETTER(ExternalOneByteString, mutable_resource,
|
||||||
ExternalOneByteString::Resource*) {
|
ExternalOneByteString::Resource*) {
|
||||||
return reinterpret_cast<Resource*>(resource_as_address(isolate));
|
return reinterpret_cast<Resource*>(resource_as_address(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
void ExternalOneByteString::update_data_cache(Isolate* isolate) {
|
void ExternalOneByteString::update_data_cache(Isolate* isolate) {
|
||||||
@ -973,7 +975,7 @@ DEF_GETTER(ExternalTwoByteString, resource,
|
|||||||
|
|
||||||
DEF_GETTER(ExternalTwoByteString, mutable_resource,
|
DEF_GETTER(ExternalTwoByteString, mutable_resource,
|
||||||
ExternalTwoByteString::Resource*) {
|
ExternalTwoByteString::Resource*) {
|
||||||
return reinterpret_cast<Resource*>(resource_as_address(isolate));
|
return reinterpret_cast<Resource*>(resource_as_address(cage_base));
|
||||||
}
|
}
|
||||||
|
|
||||||
void ExternalTwoByteString::update_data_cache(Isolate* isolate) {
|
void ExternalTwoByteString::update_data_cache(Isolate* isolate) {
|
||||||
|
@ -91,15 +91,15 @@ bool KeyIsMatch(LocalIsolate* isolate, StringTableKey* key, String string) {
|
|||||||
class StringTable::Data {
|
class StringTable::Data {
|
||||||
public:
|
public:
|
||||||
static std::unique_ptr<Data> New(int capacity);
|
static std::unique_ptr<Data> New(int capacity);
|
||||||
static std::unique_ptr<Data> Resize(IsolateRoot isolate,
|
static std::unique_ptr<Data> Resize(PtrComprCageBase cage_base,
|
||||||
std::unique_ptr<Data> data, int capacity);
|
std::unique_ptr<Data> data, int capacity);
|
||||||
|
|
||||||
OffHeapObjectSlot slot(InternalIndex index) const {
|
OffHeapObjectSlot slot(InternalIndex index) const {
|
||||||
return OffHeapObjectSlot(&elements_[index.as_uint32()]);
|
return OffHeapObjectSlot(&elements_[index.as_uint32()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object Get(IsolateRoot isolate, InternalIndex index) const {
|
Object Get(PtrComprCageBase cage_base, InternalIndex index) const {
|
||||||
return slot(index).Acquire_Load(isolate);
|
return slot(index).Acquire_Load(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Set(InternalIndex index, String entry) {
|
void Set(InternalIndex index, String entry) {
|
||||||
@ -139,7 +139,8 @@ class StringTable::Data {
|
|||||||
InternalIndex FindEntry(LocalIsolate* isolate, StringTableKey* key,
|
InternalIndex FindEntry(LocalIsolate* isolate, StringTableKey* key,
|
||||||
uint32_t hash) const;
|
uint32_t hash) const;
|
||||||
|
|
||||||
InternalIndex FindInsertionEntry(IsolateRoot isolate, uint32_t hash) const;
|
InternalIndex FindInsertionEntry(PtrComprCageBase cage_base,
|
||||||
|
uint32_t hash) const;
|
||||||
|
|
||||||
template <typename LocalIsolate, typename StringTableKey>
|
template <typename LocalIsolate, typename StringTableKey>
|
||||||
InternalIndex FindEntryOrInsertionEntry(LocalIsolate* isolate,
|
InternalIndex FindEntryOrInsertionEntry(LocalIsolate* isolate,
|
||||||
@ -157,7 +158,7 @@ class StringTable::Data {
|
|||||||
Data* PreviousData() { return previous_data_.get(); }
|
Data* PreviousData() { return previous_data_.get(); }
|
||||||
void DropPreviousData() { previous_data_.reset(); }
|
void DropPreviousData() { previous_data_.reset(); }
|
||||||
|
|
||||||
void Print(IsolateRoot isolate) const;
|
void Print(PtrComprCageBase cage_base) const;
|
||||||
size_t GetCurrentMemoryUsage() const;
|
size_t GetCurrentMemoryUsage() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
@ -224,7 +225,7 @@ std::unique_ptr<StringTable::Data> StringTable::Data::New(int capacity) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
|
std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
|
||||||
IsolateRoot isolate, std::unique_ptr<Data> data, int capacity) {
|
PtrComprCageBase cage_base, std::unique_ptr<Data> data, int capacity) {
|
||||||
std::unique_ptr<Data> new_data(new (capacity) Data(capacity));
|
std::unique_ptr<Data> new_data(new (capacity) Data(capacity));
|
||||||
|
|
||||||
DCHECK_LT(data->number_of_elements(), new_data->capacity());
|
DCHECK_LT(data->number_of_elements(), new_data->capacity());
|
||||||
@ -234,11 +235,12 @@ std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
|
|||||||
|
|
||||||
// Rehash the elements.
|
// Rehash the elements.
|
||||||
for (InternalIndex i : InternalIndex::Range(data->capacity())) {
|
for (InternalIndex i : InternalIndex::Range(data->capacity())) {
|
||||||
Object element = data->Get(isolate, i);
|
Object element = data->Get(cage_base, i);
|
||||||
if (element == empty_element() || element == deleted_element()) continue;
|
if (element == empty_element() || element == deleted_element()) continue;
|
||||||
String string = String::cast(element);
|
String string = String::cast(element);
|
||||||
uint32_t hash = string.hash();
|
uint32_t hash = string.hash();
|
||||||
InternalIndex insertion_index = new_data->FindInsertionEntry(isolate, hash);
|
InternalIndex insertion_index =
|
||||||
|
new_data->FindInsertionEntry(cage_base, hash);
|
||||||
new_data->Set(insertion_index, string);
|
new_data->Set(insertion_index, string);
|
||||||
}
|
}
|
||||||
new_data->number_of_elements_ = data->number_of_elements();
|
new_data->number_of_elements_ = data->number_of_elements();
|
||||||
@ -265,7 +267,7 @@ InternalIndex StringTable::Data::FindEntry(LocalIsolate* isolate,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate,
|
InternalIndex StringTable::Data::FindInsertionEntry(PtrComprCageBase cage_base,
|
||||||
uint32_t hash) const {
|
uint32_t hash) const {
|
||||||
uint32_t count = 1;
|
uint32_t count = 1;
|
||||||
// EnsureCapacity will guarantee the hash table is never full.
|
// EnsureCapacity will guarantee the hash table is never full.
|
||||||
@ -273,7 +275,7 @@ InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate,
|
|||||||
entry = NextProbe(entry, count++, capacity_)) {
|
entry = NextProbe(entry, count++, capacity_)) {
|
||||||
// TODO(leszeks): Consider delaying the decompression until after the
|
// TODO(leszeks): Consider delaying the decompression until after the
|
||||||
// comparisons against empty/deleted.
|
// comparisons against empty/deleted.
|
||||||
Object element = Get(isolate, entry);
|
Object element = Get(cage_base, entry);
|
||||||
if (element == empty_element() || element == deleted_element())
|
if (element == empty_element() || element == deleted_element())
|
||||||
return entry;
|
return entry;
|
||||||
}
|
}
|
||||||
@ -314,11 +316,12 @@ void StringTable::Data::IterateElements(RootVisitor* visitor) {
|
|||||||
visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot);
|
visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
void StringTable::Data::Print(IsolateRoot isolate) const {
|
void StringTable::Data::Print(PtrComprCageBase cage_base) const {
|
||||||
OFStream os(stdout);
|
OFStream os(stdout);
|
||||||
os << "StringTable {" << std::endl;
|
os << "StringTable {" << std::endl;
|
||||||
for (InternalIndex i : InternalIndex::Range(capacity_)) {
|
for (InternalIndex i : InternalIndex::Range(capacity_)) {
|
||||||
os << " " << i.as_uint32() << ": " << Brief(Get(isolate, i)) << std::endl;
|
os << " " << i.as_uint32() << ": " << Brief(Get(cage_base, i))
|
||||||
|
<< std::endl;
|
||||||
}
|
}
|
||||||
os << "}" << std::endl;
|
os << "}" << std::endl;
|
||||||
}
|
}
|
||||||
@ -530,7 +533,7 @@ template Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
|
|||||||
template Handle<String> StringTable::LookupKey(Isolate* isolate,
|
template Handle<String> StringTable::LookupKey(Isolate* isolate,
|
||||||
StringTableInsertionKey* key);
|
StringTableInsertionKey* key);
|
||||||
|
|
||||||
StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate,
|
StringTable::Data* StringTable::EnsureCapacity(PtrComprCageBase cage_base,
|
||||||
int additional_elements) {
|
int additional_elements) {
|
||||||
// This call is only allowed while the write mutex is held.
|
// This call is only allowed while the write mutex is held.
|
||||||
write_mutex_.AssertHeld();
|
write_mutex_.AssertHeld();
|
||||||
@ -560,7 +563,7 @@ StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate,
|
|||||||
|
|
||||||
if (new_capacity != -1) {
|
if (new_capacity != -1) {
|
||||||
std::unique_ptr<Data> new_data =
|
std::unique_ptr<Data> new_data =
|
||||||
Data::Resize(isolate, std::unique_ptr<Data>(data), new_capacity);
|
Data::Resize(cage_base, std::unique_ptr<Data>(data), new_capacity);
|
||||||
// `new_data` is the new owner of `data`.
|
// `new_data` is the new owner of `data`.
|
||||||
DCHECK_EQ(new_data->PreviousData(), data);
|
DCHECK_EQ(new_data->PreviousData(), data);
|
||||||
// Release-store the new data pointer as `data_`, so that it can be
|
// Release-store the new data pointer as `data_`, so that it can be
|
||||||
@ -669,8 +672,8 @@ Address StringTable::TryStringToIndexOrLookupExisting(Isolate* isolate,
|
|||||||
isolate, string, source, start);
|
isolate, string, source, start);
|
||||||
}
|
}
|
||||||
|
|
||||||
void StringTable::Print(IsolateRoot isolate) const {
|
void StringTable::Print(PtrComprCageBase cage_base) const {
|
||||||
data_.load(std::memory_order_acquire)->Print(isolate);
|
data_.load(std::memory_order_acquire)->Print(cage_base);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t StringTable::GetCurrentMemoryUsage() const {
|
size_t StringTable::GetCurrentMemoryUsage() const {
|
||||||
|
@ -72,7 +72,7 @@ class V8_EXPORT_PRIVATE StringTable {
|
|||||||
static Address TryStringToIndexOrLookupExisting(Isolate* isolate,
|
static Address TryStringToIndexOrLookupExisting(Isolate* isolate,
|
||||||
Address raw_string);
|
Address raw_string);
|
||||||
|
|
||||||
void Print(IsolateRoot isolate) const;
|
void Print(PtrComprCageBase cage_base) const;
|
||||||
size_t GetCurrentMemoryUsage() const;
|
size_t GetCurrentMemoryUsage() const;
|
||||||
|
|
||||||
// The following methods must be called either while holding the write lock,
|
// The following methods must be called either while holding the write lock,
|
||||||
@ -84,7 +84,7 @@ class V8_EXPORT_PRIVATE StringTable {
|
|||||||
private:
|
private:
|
||||||
class Data;
|
class Data;
|
||||||
|
|
||||||
Data* EnsureCapacity(IsolateRoot isolate, int additional_elements);
|
Data* EnsureCapacity(PtrComprCageBase cage_base, int additional_elements);
|
||||||
|
|
||||||
std::atomic<Data*> data_;
|
std::atomic<Data*> data_;
|
||||||
// Write mutex is mutable so that readers of concurrently mutated values (e.g.
|
// Write mutex is mutable so that readers of concurrently mutated values (e.g.
|
||||||
|
@ -1289,7 +1289,7 @@ Object String::LastIndexOf(Isolate* isolate, Handle<Object> receiver,
|
|||||||
bool String::HasOneBytePrefix(Vector<const char> str) {
|
bool String::HasOneBytePrefix(Vector<const char> str) {
|
||||||
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
|
DCHECK(!SharedStringAccessGuardIfNeeded::IsNeeded(*this));
|
||||||
return IsEqualToImpl<EqualityType::kPrefix>(
|
return IsEqualToImpl<EqualityType::kPrefix>(
|
||||||
str, GetIsolateForPtrCompr(*this),
|
str, GetPtrComprCageBase(*this),
|
||||||
SharedStringAccessGuardIfNeeded::NotNeeded());
|
SharedStringAccessGuardIfNeeded::NotNeeded());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -332,7 +332,7 @@ class String : public TorqueGeneratedString<String, Name> {
|
|||||||
// whole string or just a prefix.
|
// whole string or just a prefix.
|
||||||
//
|
//
|
||||||
// This is main-thread only, like the Isolate* overload, but additionally
|
// This is main-thread only, like the Isolate* overload, but additionally
|
||||||
// computes the IsolateRoot for IsEqualToImpl.
|
// computes the PtrComprCageBase for IsEqualToImpl.
|
||||||
template <EqualityType kEqType = EqualityType::kWholeString, typename Char>
|
template <EqualityType kEqType = EqualityType::kWholeString, typename Char>
|
||||||
inline bool IsEqualTo(Vector<const Char> str) const;
|
inline bool IsEqualTo(Vector<const Char> str) const;
|
||||||
|
|
||||||
@ -546,14 +546,15 @@ class String : public TorqueGeneratedString<String, Name> {
|
|||||||
// Implementation of the IsEqualTo() public methods. Do not use directly.
|
// Implementation of the IsEqualTo() public methods. Do not use directly.
|
||||||
template <EqualityType kEqType, typename Char>
|
template <EqualityType kEqType, typename Char>
|
||||||
V8_INLINE bool IsEqualToImpl(
|
V8_INLINE bool IsEqualToImpl(
|
||||||
Vector<const Char> str, IsolateRoot isolate,
|
Vector<const Char> str, PtrComprCageBase cage_base,
|
||||||
const SharedStringAccessGuardIfNeeded& access_guard) const;
|
const SharedStringAccessGuardIfNeeded& access_guard) const;
|
||||||
|
|
||||||
// Out-of-line IsEqualToImpl for ConsString.
|
// Out-of-line IsEqualToImpl for ConsString.
|
||||||
template <typename Char>
|
template <typename Char>
|
||||||
V8_NOINLINE static bool IsConsStringEqualToImpl(
|
V8_NOINLINE static bool IsConsStringEqualToImpl(
|
||||||
ConsString string, int slice_offset, Vector<const Char> str,
|
ConsString string, int slice_offset, Vector<const Char> str,
|
||||||
IsolateRoot isolate, const SharedStringAccessGuardIfNeeded& access_guard);
|
PtrComprCageBase cage_base,
|
||||||
|
const SharedStringAccessGuardIfNeeded& access_guard);
|
||||||
|
|
||||||
V8_EXPORT_PRIVATE static Handle<String> SlowFlatten(
|
V8_EXPORT_PRIVATE static Handle<String> SlowFlatten(
|
||||||
Isolate* isolate, Handle<ConsString> cons, AllocationType allocation);
|
Isolate* isolate, Handle<ConsString> cons, AllocationType allocation);
|
||||||
|
@ -219,15 +219,15 @@ InternalIndex SwissNameDictionary::FindEntry(LocalIsolate* isolate,
|
|||||||
}
|
}
|
||||||
|
|
||||||
Object SwissNameDictionary::LoadFromDataTable(int entry, int data_offset) {
|
Object SwissNameDictionary::LoadFromDataTable(int entry, int data_offset) {
|
||||||
return LoadFromDataTable(GetIsolateForPtrCompr(*this), entry, data_offset);
|
return LoadFromDataTable(GetPtrComprCageBase(*this), entry, data_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
Object SwissNameDictionary::LoadFromDataTable(IsolateRoot isolate, int entry,
|
Object SwissNameDictionary::LoadFromDataTable(PtrComprCageBase cage_base,
|
||||||
int data_offset) {
|
int entry, int data_offset) {
|
||||||
DCHECK_LT(static_cast<unsigned>(entry), static_cast<unsigned>(Capacity()));
|
DCHECK_LT(static_cast<unsigned>(entry), static_cast<unsigned>(Capacity()));
|
||||||
int offset = DataTableStartOffset() +
|
int offset = DataTableStartOffset() +
|
||||||
(entry * kDataTableEntryCount + data_offset) * kTaggedSize;
|
(entry * kDataTableEntryCount + data_offset) * kTaggedSize;
|
||||||
return TaggedField<Object>::Relaxed_Load(isolate, *this, offset);
|
return TaggedField<Object>::Relaxed_Load(cage_base, *this, offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
void SwissNameDictionary::StoreToDataTable(int entry, int data_offset,
|
void SwissNameDictionary::StoreToDataTable(int entry, int data_offset,
|
||||||
|
@ -306,7 +306,8 @@ class V8_EXPORT_PRIVATE SwissNameDictionary : public HeapObject {
|
|||||||
inline ctrl_t GetCtrl(int entry);
|
inline ctrl_t GetCtrl(int entry);
|
||||||
|
|
||||||
inline Object LoadFromDataTable(int entry, int data_offset);
|
inline Object LoadFromDataTable(int entry, int data_offset);
|
||||||
inline Object LoadFromDataTable(IsolateRoot root, int entry, int data_offset);
|
inline Object LoadFromDataTable(PtrComprCageBase cage_base, int entry,
|
||||||
|
int data_offset);
|
||||||
inline void StoreToDataTable(int entry, int data_offset, Object data);
|
inline void StoreToDataTable(int entry, int data_offset, Object data);
|
||||||
inline void StoreToDataTableNoBarrier(int entry, int data_offset,
|
inline void StoreToDataTableNoBarrier(int entry, int data_offset,
|
||||||
Object data);
|
Object data);
|
||||||
|
@ -61,10 +61,10 @@ T TaggedField<T, kFieldOffset>::load(HeapObject host, int offset) {
|
|||||||
|
|
||||||
// static
|
// static
|
||||||
template <typename T, int kFieldOffset>
|
template <typename T, int kFieldOffset>
|
||||||
T TaggedField<T, kFieldOffset>::load(IsolateRoot isolate, HeapObject host,
|
T TaggedField<T, kFieldOffset>::load(PtrComprCageBase cage_base,
|
||||||
int offset) {
|
HeapObject host, int offset) {
|
||||||
Tagged_t value = *location(host, offset);
|
Tagged_t value = *location(host, offset);
|
||||||
return T(tagged_to_full(isolate, value));
|
return T(tagged_to_full(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -96,10 +96,10 @@ T TaggedField<T, kFieldOffset>::Relaxed_Load(HeapObject host, int offset) {
|
|||||||
|
|
||||||
// static
|
// static
|
||||||
template <typename T, int kFieldOffset>
|
template <typename T, int kFieldOffset>
|
||||||
T TaggedField<T, kFieldOffset>::Relaxed_Load(IsolateRoot isolate,
|
T TaggedField<T, kFieldOffset>::Relaxed_Load(PtrComprCageBase cage_base,
|
||||||
HeapObject host, int offset) {
|
HeapObject host, int offset) {
|
||||||
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset));
|
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset));
|
||||||
return T(tagged_to_full(isolate, value));
|
return T(tagged_to_full(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
@ -125,10 +125,10 @@ T TaggedField<T, kFieldOffset>::Acquire_Load(HeapObject host, int offset) {
|
|||||||
|
|
||||||
// static
|
// static
|
||||||
template <typename T, int kFieldOffset>
|
template <typename T, int kFieldOffset>
|
||||||
T TaggedField<T, kFieldOffset>::Acquire_Load(IsolateRoot isolate,
|
T TaggedField<T, kFieldOffset>::Acquire_Load(PtrComprCageBase cage_base,
|
||||||
HeapObject host, int offset) {
|
HeapObject host, int offset) {
|
||||||
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset));
|
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset));
|
||||||
return T(tagged_to_full(isolate, value));
|
return T(tagged_to_full(cage_base, value));
|
||||||
}
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
|
@ -38,20 +38,21 @@ class TaggedField : public AllStatic {
|
|||||||
static inline Address address(HeapObject host, int offset = 0);
|
static inline Address address(HeapObject host, int offset = 0);
|
||||||
|
|
||||||
static inline T load(HeapObject host, int offset = 0);
|
static inline T load(HeapObject host, int offset = 0);
|
||||||
static inline T load(IsolateRoot isolate, HeapObject host, int offset = 0);
|
static inline T load(PtrComprCageBase cage_base, HeapObject host,
|
||||||
|
int offset = 0);
|
||||||
|
|
||||||
static inline void store(HeapObject host, T value);
|
static inline void store(HeapObject host, T value);
|
||||||
static inline void store(HeapObject host, int offset, T value);
|
static inline void store(HeapObject host, int offset, T value);
|
||||||
|
|
||||||
static inline T Relaxed_Load(HeapObject host, int offset = 0);
|
static inline T Relaxed_Load(HeapObject host, int offset = 0);
|
||||||
static inline T Relaxed_Load(IsolateRoot isolate, HeapObject host,
|
static inline T Relaxed_Load(PtrComprCageBase cage_base, HeapObject host,
|
||||||
int offset = 0);
|
int offset = 0);
|
||||||
|
|
||||||
static inline void Relaxed_Store(HeapObject host, T value);
|
static inline void Relaxed_Store(HeapObject host, T value);
|
||||||
static inline void Relaxed_Store(HeapObject host, int offset, T value);
|
static inline void Relaxed_Store(HeapObject host, int offset, T value);
|
||||||
|
|
||||||
static inline T Acquire_Load(HeapObject host, int offset = 0);
|
static inline T Acquire_Load(HeapObject host, int offset = 0);
|
||||||
static inline T Acquire_Load(IsolateRoot isolate, HeapObject host,
|
static inline T Acquire_Load(PtrComprCageBase cage_base, HeapObject host,
|
||||||
int offset = 0);
|
int offset = 0);
|
||||||
|
|
||||||
static inline void Release_Store(HeapObject host, T value);
|
static inline void Release_Store(HeapObject host, T value);
|
||||||
|
@ -45,13 +45,13 @@ RELEASE_ACQUIRE_ACCESSORS(FunctionTemplateInfo, call_code, HeapObject,
|
|||||||
|
|
||||||
// TODO(nicohartmann@, v8:11122): Let Torque generate this accessor.
|
// TODO(nicohartmann@, v8:11122): Let Torque generate this accessor.
|
||||||
HeapObject FunctionTemplateInfo::rare_data(AcquireLoadTag) const {
|
HeapObject FunctionTemplateInfo::rare_data(AcquireLoadTag) const {
|
||||||
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
|
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
|
||||||
return rare_data(isolate, kAcquireLoad);
|
return rare_data(cage_base, kAcquireLoad);
|
||||||
}
|
}
|
||||||
HeapObject FunctionTemplateInfo::rare_data(IsolateRoot isolate,
|
HeapObject FunctionTemplateInfo::rare_data(PtrComprCageBase cage_base,
|
||||||
AcquireLoadTag) const {
|
AcquireLoadTag) const {
|
||||||
HeapObject value =
|
HeapObject value =
|
||||||
TaggedField<HeapObject>::Acquire_Load(isolate, *this, kRareDataOffset);
|
TaggedField<HeapObject>::Acquire_Load(cage_base, *this, kRareDataOffset);
|
||||||
DCHECK(value.IsUndefined() || value.IsFunctionTemplateRareData());
|
DCHECK(value.IsUndefined() || value.IsFunctionTemplateRareData());
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
@ -75,8 +75,8 @@ FunctionTemplateRareData FunctionTemplateInfo::EnsureFunctionTemplateRareData(
|
|||||||
|
|
||||||
#define RARE_ACCESSORS(Name, CamelName, Type, Default) \
|
#define RARE_ACCESSORS(Name, CamelName, Type, Default) \
|
||||||
DEF_GETTER(FunctionTemplateInfo, Get##CamelName, Type) { \
|
DEF_GETTER(FunctionTemplateInfo, Get##CamelName, Type) { \
|
||||||
HeapObject extra = rare_data(isolate, kAcquireLoad); \
|
HeapObject extra = rare_data(cage_base, kAcquireLoad); \
|
||||||
HeapObject undefined = GetReadOnlyRoots(isolate).undefined_value(); \
|
HeapObject undefined = GetReadOnlyRoots(cage_base).undefined_value(); \
|
||||||
return extra == undefined ? Default \
|
return extra == undefined ? Default \
|
||||||
: FunctionTemplateRareData::cast(extra).Name(); \
|
: FunctionTemplateRareData::cast(extra).Name(); \
|
||||||
} \
|
} \
|
||||||
|
@ -92,7 +92,7 @@ class FunctionTemplateInfo
|
|||||||
|
|
||||||
// TODO(nicohartmann@, v8:11122): Let Torque generate the following accessor.
|
// TODO(nicohartmann@, v8:11122): Let Torque generate the following accessor.
|
||||||
inline HeapObject rare_data(AcquireLoadTag) const;
|
inline HeapObject rare_data(AcquireLoadTag) const;
|
||||||
inline HeapObject rare_data(IsolateRoot isolate, AcquireLoadTag) const;
|
inline HeapObject rare_data(PtrComprCageBase cage_base, AcquireLoadTag) const;
|
||||||
inline void set_rare_data(
|
inline void set_rare_data(
|
||||||
HeapObject value, ReleaseStoreTag,
|
HeapObject value, ReleaseStoreTag,
|
||||||
WriteBarrierMode mode = WriteBarrierMode::UPDATE_WRITE_BARRIER);
|
WriteBarrierMode mode = WriteBarrierMode::UPDATE_WRITE_BARRIER);
|
||||||
|
@ -1508,10 +1508,10 @@ class RootsReferencesExtractor : public RootVisitor {
|
|||||||
OffHeapObjectSlot start,
|
OffHeapObjectSlot start,
|
||||||
OffHeapObjectSlot end) override {
|
OffHeapObjectSlot end) override {
|
||||||
DCHECK_EQ(root, Root::kStringTable);
|
DCHECK_EQ(root, Root::kStringTable);
|
||||||
IsolateRoot isolate = Isolate::FromHeap(explorer_->heap_);
|
PtrComprCageBase cage_base = Isolate::FromHeap(explorer_->heap_);
|
||||||
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
for (OffHeapObjectSlot p = start; p < end; ++p) {
|
||||||
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
|
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
|
||||||
p.load(isolate));
|
p.load(cage_base));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -386,10 +386,10 @@ void CCGenerator::EmitInstruction(const LoadReferenceInstruction& instruction,
|
|||||||
out() << " " << result_name << " = ";
|
out() << " " << result_name << " = ";
|
||||||
if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
|
if (instruction.type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
|
||||||
// Currently, all of the tagged loads we emit are for smi values, so there
|
// Currently, all of the tagged loads we emit are for smi values, so there
|
||||||
// is no point in providing an IsolateRoot. If at some point we start
|
// is no point in providing an PtrComprCageBase. If at some point we start
|
||||||
// emitting loads for tagged fields which might be HeapObjects, then we
|
// emitting loads for tagged fields which might be HeapObjects, then we
|
||||||
// should plumb an IsolateRoot through the generated functions that need
|
// should plumb an PtrComprCageBase through the generated functions that
|
||||||
// it.
|
// need it.
|
||||||
if (!instruction.type->IsSubtypeOf(TypeOracle::GetSmiType())) {
|
if (!instruction.type->IsSubtypeOf(TypeOracle::GetSmiType())) {
|
||||||
Error(
|
Error(
|
||||||
"Not supported in C++ output: LoadReference on non-smi tagged "
|
"Not supported in C++ output: LoadReference on non-smi tagged "
|
||||||
|
@ -4223,8 +4223,9 @@ void CppClassGenerator::GenerateFieldAccessors(
|
|||||||
hdr_ << " inline " << type_name << " " << name << "("
|
hdr_ << " inline " << type_name << " " << name << "("
|
||||||
<< (indexed ? "int i" : "") << ") const;\n";
|
<< (indexed ? "int i" : "") << ") const;\n";
|
||||||
if (can_contain_heap_objects) {
|
if (can_contain_heap_objects) {
|
||||||
hdr_ << " inline " << type_name << " " << name << "(IsolateRoot isolate"
|
hdr_ << " inline " << type_name << " " << name
|
||||||
<< (indexed ? ", int i" : "") << ") const;\n";
|
<< "(PtrComprCageBase cage_base" << (indexed ? ", int i" : "")
|
||||||
|
<< ") const;\n";
|
||||||
}
|
}
|
||||||
hdr_ << " inline void set_" << name << "(" << (indexed ? "int i, " : "")
|
hdr_ << " inline void set_" << name << "(" << (indexed ? "int i, " : "")
|
||||||
<< type_name << " value"
|
<< type_name << " value"
|
||||||
@ -4233,14 +4234,14 @@ void CppClassGenerator::GenerateFieldAccessors(
|
|||||||
: "")
|
: "")
|
||||||
<< ");\n\n";
|
<< ");\n\n";
|
||||||
|
|
||||||
// For tagged data, generate the extra getter that derives an IsolateRoot from
|
// For tagged data, generate the extra getter that derives an PtrComprCageBase
|
||||||
// the current object's pointer.
|
// from the current object's pointer.
|
||||||
if (can_contain_heap_objects) {
|
if (can_contain_heap_objects) {
|
||||||
inl_ << "template <class D, class P>\n";
|
inl_ << "template <class D, class P>\n";
|
||||||
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "("
|
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "("
|
||||||
<< (indexed ? "int i" : "") << ") const {\n";
|
<< (indexed ? "int i" : "") << ") const {\n";
|
||||||
inl_ << " IsolateRoot isolate = GetIsolateForPtrCompr(*this);\n";
|
inl_ << " PtrComprCageBase cage_base = GetPtrComprCageBase(*this);\n";
|
||||||
inl_ << " return " << gen_name_ << "::" << name << "(isolate"
|
inl_ << " return " << gen_name_ << "::" << name << "(cage_base"
|
||||||
<< (indexed ? ", i" : "") << ");\n";
|
<< (indexed ? ", i" : "") << ");\n";
|
||||||
inl_ << "}\n";
|
inl_ << "}\n";
|
||||||
}
|
}
|
||||||
@ -4248,7 +4249,7 @@ void CppClassGenerator::GenerateFieldAccessors(
|
|||||||
// Generate the getter implementation.
|
// Generate the getter implementation.
|
||||||
inl_ << "template <class D, class P>\n";
|
inl_ << "template <class D, class P>\n";
|
||||||
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "(";
|
inl_ << type_name << " " << gen_name_ << "<D, P>::" << name << "(";
|
||||||
if (can_contain_heap_objects) inl_ << "IsolateRoot isolate";
|
if (can_contain_heap_objects) inl_ << "PtrComprCageBase cage_base";
|
||||||
if (can_contain_heap_objects && indexed) inl_ << ", ";
|
if (can_contain_heap_objects && indexed) inl_ << ", ";
|
||||||
if (indexed) inl_ << "int i";
|
if (indexed) inl_ << "int i";
|
||||||
inl_ << ") const {\n";
|
inl_ << ") const {\n";
|
||||||
@ -4361,10 +4362,11 @@ void CppClassGenerator::EmitLoadFieldStatement(
|
|||||||
bool is_smi = field_type->IsSubtypeOf(TypeOracle::GetSmiType());
|
bool is_smi = field_type->IsSubtypeOf(TypeOracle::GetSmiType());
|
||||||
const std::string load_type = is_smi ? "Smi" : type_name;
|
const std::string load_type = is_smi ? "Smi" : type_name;
|
||||||
const char* postfix = is_smi ? ".value()" : "";
|
const char* postfix = is_smi ? ".value()" : "";
|
||||||
const char* optional_isolate = is_smi ? "" : "isolate, ";
|
const char* optional_cage_base = is_smi ? "" : "cage_base, ";
|
||||||
|
|
||||||
inl_ << "TaggedField<" << load_type << ">::" << load << "("
|
inl_ << "TaggedField<" << load_type << ">::" << load << "("
|
||||||
<< optional_isolate << "*this, " << offset << ")" << postfix << ";\n";
|
<< optional_cage_base << "*this, " << offset << ")" << postfix
|
||||||
|
<< ";\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
if (CanContainHeapObjects(field_type)) {
|
if (CanContainHeapObjects(field_type)) {
|
||||||
|
@ -59,13 +59,13 @@ CAST_ACCESSOR(WasmTypeInfo)
|
|||||||
CAST_ACCESSOR(WasmStruct)
|
CAST_ACCESSOR(WasmStruct)
|
||||||
CAST_ACCESSOR(WasmArray)
|
CAST_ACCESSOR(WasmArray)
|
||||||
|
|
||||||
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
|
#define OPTIONAL_ACCESSORS(holder, name, type, offset) \
|
||||||
DEF_GETTER(holder, has_##name, bool) { \
|
DEF_GETTER(holder, has_##name, bool) { \
|
||||||
Object value = TaggedField<Object, offset>::load(isolate, *this); \
|
Object value = TaggedField<Object, offset>::load(cage_base, *this); \
|
||||||
return !value.IsUndefined(GetReadOnlyRoots(isolate)); \
|
return !value.IsUndefined(GetReadOnlyRoots(cage_base)); \
|
||||||
} \
|
} \
|
||||||
ACCESSORS_CHECKED2(holder, name, type, offset, \
|
ACCESSORS_CHECKED2(holder, name, type, offset, \
|
||||||
!value.IsUndefined(GetReadOnlyRoots(isolate)), true)
|
!value.IsUndefined(GetReadOnlyRoots(cage_base)), true)
|
||||||
|
|
||||||
#define PRIMITIVE_ACCESSORS(holder, name, type, offset) \
|
#define PRIMITIVE_ACCESSORS(holder, name, type, offset) \
|
||||||
type holder::name() const { \
|
type holder::name() const { \
|
||||||
@ -460,6 +460,12 @@ int WasmArray::GcSafeSizeFor(Map map, int length) {
|
|||||||
|
|
||||||
void WasmTypeInfo::clear_foreign_address(Isolate* isolate) {
|
void WasmTypeInfo::clear_foreign_address(Isolate* isolate) {
|
||||||
#ifdef V8_HEAP_SANDBOX
|
#ifdef V8_HEAP_SANDBOX
|
||||||
|
|
||||||
|
// TODO(syg): V8_HEAP_SANDBOX doesn't work with pointer cage
|
||||||
|
#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
|
||||||
|
#error "V8_HEAP_SANDBOX requires per-Isolate pointer compression cage"
|
||||||
|
#endif
|
||||||
|
|
||||||
// Due to the type-specific pointer tags for external pointers, we need to
|
// Due to the type-specific pointer tags for external pointers, we need to
|
||||||
// allocate an entry in the table here even though it will just store nullptr.
|
// allocate an entry in the table here even though it will just store nullptr.
|
||||||
AllocateExternalPointerEntries(isolate);
|
AllocateExternalPointerEntries(isolate);
|
||||||
|
@ -14,7 +14,7 @@ namespace debug_helper_internal {
|
|||||||
|
|
||||||
bool IsPointerCompressed(uintptr_t address) {
|
bool IsPointerCompressed(uintptr_t address) {
|
||||||
#if COMPRESS_POINTERS_BOOL
|
#if COMPRESS_POINTERS_BOOL
|
||||||
return address < i::kPtrComprHeapReservationSize;
|
return address < i::kPtrComprCageReservationSize;
|
||||||
#else
|
#else
|
||||||
return false;
|
return false;
|
||||||
#endif
|
#endif
|
||||||
|
@ -348,7 +348,7 @@ class ReadStringVisitor : public TqObjectVisitor {
|
|||||||
GetOrFinish(object->GetResourceDataValue(accessor_));
|
GetOrFinish(object->GetResourceDataValue(accessor_));
|
||||||
#ifdef V8_COMPRESS_POINTERS
|
#ifdef V8_COMPRESS_POINTERS
|
||||||
uintptr_t data_address = static_cast<uintptr_t>(
|
uintptr_t data_address = static_cast<uintptr_t>(
|
||||||
DecodeExternalPointer(GetIsolateForPtrComprFromOnHeapAddress(
|
DecodeExternalPointer(GetPtrComprCageBaseFromOnHeapAddress(
|
||||||
heap_addresses_.any_heap_pointer),
|
heap_addresses_.any_heap_pointer),
|
||||||
resource_data, kExternalStringResourceDataTag));
|
resource_data, kExternalStringResourceDataTag));
|
||||||
#else
|
#else
|
||||||
|
Loading…
Reference in New Issue
Block a user