[ptr-cmpr] Change const Isolate* to IsolateRoot

Introduce an IsolateRoot class, which encapsulates the root address
needed for pointer decompression. This class is implicitly constructible
from both Isolate* and LocalIsolate*, allowing us to avoid templating
methods that can take both, or awkwardly creating a `const Isolate*`
from a `LocalIsolate*` just for getters.

Change-Id: I6d4b9492409fc7d5b375162e381192cb48c8ba01
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2440605
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Reviewed-by: Tobias Tebbi <tebbi@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Auto-Submit: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/master@{#70365}
This commit is contained in:
Leszek Swirski 2020-10-06 17:41:41 +02:00 committed by Commit Bot
parent ec76fb0f6f
commit 9e26f70529
62 changed files with 294 additions and 301 deletions

View File

@ -3663,7 +3663,7 @@ MaybeLocal<Uint32> Value::ToUint32(Local<Context> context) const {
RETURN_ESCAPED(result);
}
i::Address i::DecodeExternalPointerImpl(const Isolate* isolate,
i::Address i::DecodeExternalPointerImpl(const i::Isolate* isolate,
i::ExternalPointer_t encoded_pointer) {
return i::DecodeExternalPointer(isolate, encoded_pointer);
}

View File

@ -12,7 +12,7 @@
namespace v8 {
namespace internal {
V8_INLINE Address DecodeExternalPointer(const Isolate* isolate,
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate,
ExternalPointer_t encoded_pointer) {
STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);
#ifdef V8_HEAP_SANDBOX
@ -59,7 +59,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
}
V8_INLINE Address ReadExternalPointerField(Address field_address,
const Isolate* isolate) {
IsolateRoot isolate) {
// Pointer compression causes types larger than kTaggedSize to be unaligned.
constexpr bool v8_pointer_compression_unaligned =
kExternalPointerSize > kTaggedSize;

View File

@ -12,7 +12,7 @@ namespace internal {
// Convert external pointer from on-V8-heap representation to an actual external
// pointer value.
V8_INLINE Address DecodeExternalPointer(const Isolate* isolate,
V8_INLINE Address DecodeExternalPointer(IsolateRoot isolate,
ExternalPointer_t encoded_pointer);
constexpr ExternalPointer_t kNullExternalPointer = 0;
@ -33,7 +33,7 @@ V8_INLINE void InitExternalPointerField(Address field_address, Isolate* isolate,
// Reads external pointer for the field, and decodes it if the sandbox is
// enabled.
V8_INLINE Address ReadExternalPointerField(Address field_address,
const Isolate* isolate);
IsolateRoot isolate);
// Encodes value if the sandbox is enabled and writes it into the field.
V8_INLINE void WriteExternalPointerField(Address field_address,

View File

@ -1685,6 +1685,31 @@ enum class DynamicMapChecksStatus : uint8_t {
kDeopt = 2
};
#ifdef V8_COMPRESS_POINTERS
class IsolateRoot {
public:
explicit constexpr IsolateRoot(Address address) : address_(address) {}
// NOLINTNEXTLINE
inline IsolateRoot(const Isolate* isolate);
// NOLINTNEXTLINE
inline IsolateRoot(const LocalIsolate* isolate);
inline Address address() const;
private:
Address address_;
};
#else
class IsolateRoot {
public:
IsolateRoot() = default;
// NOLINTNEXTLINE
IsolateRoot(const Isolate* isolate) {}
// NOLINTNEXTLINE
IsolateRoot(const LocalIsolate* isolate) {}
};
#endif
} // namespace internal
// Tag dispatching support for acquire loads and release stores.

View File

@ -8,32 +8,37 @@
#include "include/v8-internal.h"
#include "src/common/ptr-compr.h"
#include "src/execution/isolate.h"
#include "src/execution/local-isolate-inl.h"
namespace v8 {
namespace internal {
#ifdef V8_COMPRESS_POINTERS
IsolateRoot::IsolateRoot(const Isolate* isolate)
: address_(isolate->isolate_root()) {}
IsolateRoot::IsolateRoot(const LocalIsolate* isolate)
: address_(isolate->isolate_root()) {}
Address IsolateRoot::address() const {
Address ret = address_;
ret = reinterpret_cast<Address>(V8_ASSUME_ALIGNED(
reinterpret_cast<void*>(ret), kPtrComprIsolateRootAlignment));
return ret;
}
// Compresses full-pointer representation of a tagged value to on-heap
// representation.
V8_INLINE Tagged_t CompressTagged(Address tagged) {
return static_cast<Tagged_t>(static_cast<uint32_t>(tagged));
}
V8_INLINE Address GetIsolateRoot(Address on_heap_addr) {
// We subtract 1 here in order to let the compiler generate addition of 32-bit
// signed constant instead of 64-bit constant (the problem is that 2Gb looks
// like a negative 32-bit value). It's correct because we will never use
// leftmost address of V8 heap as |on_heap_addr|.
V8_INLINE constexpr Address GetIsolateRootAddress(Address on_heap_addr) {
return RoundDown<kPtrComprIsolateRootAlignment>(on_heap_addr);
}
V8_INLINE Address GetIsolateRoot(const Isolate* isolate) {
Address isolate_root = isolate->isolate_root();
#ifdef V8_COMPRESS_POINTERS
isolate_root = reinterpret_cast<Address>(V8_ASSUME_ALIGNED(
reinterpret_cast<void*>(isolate_root), kPtrComprIsolateRootAlignment));
#endif
return isolate_root;
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) {
return isolate.address();
}
// Decompresses smi value.
@ -47,7 +52,7 @@ V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) {
template <typename TOnHeapAddress>
V8_INLINE Address DecompressTaggedPointer(TOnHeapAddress on_heap_addr,
Tagged_t raw_value) {
return GetIsolateRoot(on_heap_addr) + static_cast<Address>(raw_value);
return GetIsolateRootAddress(on_heap_addr) + static_cast<Address>(raw_value);
}
// Decompresses any tagged value, preserving both weak- and smi- tags.
@ -66,9 +71,9 @@ STATIC_ASSERT(kPtrComprIsolateRootAlignment ==
V8_INLINE Tagged_t CompressTagged(Address tagged) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRoot(Address on_heap_addr) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRootAddress(Address on_heap_addr) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRoot(const Isolate* isolate) { UNREACHABLE(); }
V8_INLINE Address GetIsolateRootAddress(IsolateRoot isolate) { UNREACHABLE(); }
V8_INLINE Address DecompressTaggedSigned(Tagged_t raw_value) { UNREACHABLE(); }

View File

@ -306,7 +306,7 @@ void BytecodeArray::BytecodeArrayVerify(Isolate* isolate) {
USE_TORQUE_VERIFIER(JSReceiver)
bool JSObject::ElementsAreSafeToExamine(const Isolate* isolate) const {
bool JSObject::ElementsAreSafeToExamine(IsolateRoot isolate) const {
// If a GC was caused while constructing this object, the elements
// pointer may point to a one pointer filler map.
return elements(isolate) !=

View File

@ -468,7 +468,7 @@ void PrintSloppyArgumentElements(std::ostream& os, ElementsKind kind,
}
}
void PrintEmbedderData(const Isolate* isolate, std::ostream& os,
void PrintEmbedderData(IsolateRoot isolate, std::ostream& os,
EmbedderDataSlot slot) {
DisallowHeapAllocation no_gc;
Object value = slot.load_tagged();
@ -578,7 +578,7 @@ static void JSObjectPrintBody(std::ostream& os,
}
int embedder_fields = obj.GetEmbedderFieldCount();
if (embedder_fields > 0) {
const Isolate* isolate = GetIsolateForPtrCompr(obj);
IsolateRoot isolate = GetIsolateForPtrCompr(obj);
os << " - embedder fields = {";
for (int i = 0; i < embedder_fields; i++) {
os << "\n ";
@ -772,7 +772,7 @@ void PrintWeakArrayElements(std::ostream& os, T* array) {
} // namespace
void EmbedderDataArray::EmbedderDataArrayPrint(std::ostream& os) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
PrintHeader(os, "EmbedderDataArray");
os << "\n - length: " << length();
EmbedderDataSlot start(*this, 0);

View File

@ -13,34 +13,19 @@
namespace v8 {
namespace internal {
inline const Isolate* GetIsolateForPtrComprFromOnHeapAddress(Address address) {
inline constexpr IsolateRoot GetIsolateForPtrComprFromOnHeapAddress(
Address address) {
#ifdef V8_COMPRESS_POINTERS
return Isolate::FromRoot(GetIsolateRoot(address));
return IsolateRoot(GetIsolateRootAddress(address));
#else
return nullptr;
return IsolateRoot();
#endif // V8_COMPRESS_POINTERS
}
inline const Isolate* GetIsolateForPtrCompr(HeapObject object) {
inline IsolateRoot GetIsolateForPtrCompr(HeapObject object) {
return GetIsolateForPtrComprFromOnHeapAddress(object.ptr());
}
inline const Isolate* GetIsolateForPtrCompr(const Isolate* isolate) {
#ifdef V8_COMPRESS_POINTERS
return isolate;
#else
return nullptr;
#endif // V8_COMPRESS_POINTERS
}
inline const Isolate* GetIsolateForPtrCompr(const LocalIsolate* isolate) {
#ifdef V8_COMPRESS_POINTERS
return isolate->GetIsolateForPtrCompr();
#else
return nullptr;
#endif // V8_COMPRESS_POINTERS
}
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
// Avoid using the below GetIsolateFromWritableObject because we want to be
// able to get the heap, but not the isolate, for off-thread objects.
@ -48,7 +33,8 @@ V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object) {
#if defined V8_ENABLE_THIRD_PARTY_HEAP
return Heap::GetIsolateFromWritableObject(object)->heap();
#elif defined V8_COMPRESS_POINTERS
Isolate* isolate = Isolate::FromRoot(GetIsolateRoot(object.ptr()));
Isolate* isolate =
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
DCHECK_NOT_NULL(isolate);
return isolate->heap();
#else
@ -62,7 +48,8 @@ V8_INLINE Isolate* GetIsolateFromWritableObject(HeapObject object) {
#ifdef V8_ENABLE_THIRD_PARTY_HEAP
return Heap::GetIsolateFromWritableObject(object);
#elif defined V8_COMPRESS_POINTERS
Isolate* isolate = Isolate::FromRoot(GetIsolateRoot(object.ptr()));
Isolate* isolate =
Isolate::FromRootAddress(GetIsolateRootAddress(object.ptr()));
DCHECK_NOT_NULL(isolate);
return isolate;
#else

View File

@ -14,7 +14,7 @@ namespace internal {
// value is intended to be used only as a hoisted computation of isolate root
// inside trivial accessors for optmizing value decompression.
// When pointer compression is disabled this function always returns nullptr.
V8_INLINE const Isolate* GetIsolateForPtrCompr(HeapObject object);
V8_INLINE IsolateRoot GetIsolateForPtrCompr(HeapObject object);
V8_INLINE Heap* GetHeapFromWritableObject(HeapObject object);

View File

@ -955,7 +955,7 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
static size_t isolate_root_bias() {
return OFFSET_OF(Isolate, isolate_data_) + IsolateData::kIsolateRootBias;
}
static Isolate* FromRoot(Address isolate_root) {
static Isolate* FromRootAddress(Address isolate_root) {
return reinterpret_cast<Isolate*>(isolate_root - isolate_root_bias());
}

View File

@ -53,8 +53,6 @@ class V8_EXPORT_PRIVATE LocalIsolate final : private HiddenLocalFactory {
StringTable* string_table() { return isolate_->string_table(); }
const Isolate* GetIsolateForPtrCompr() const { return isolate_; }
v8::internal::LocalFactory* factory() {
// Upcast to the privately inherited base-class using c-style casts to avoid
// undefined behavior (as static_cast cannot cast across private bases).

View File

@ -381,7 +381,7 @@ namespace {
void ExtractInternalFields(JSObject jsobject, void** embedder_fields, int len) {
int field_count = jsobject.GetEmbedderFieldCount();
const Isolate* isolate = GetIsolateForPtrCompr(jsobject);
IsolateRoot isolate = GetIsolateForPtrCompr(jsobject);
for (int i = 0; i < len; ++i) {
if (field_count == i) break;
void* pointer;

View File

@ -2677,8 +2677,7 @@ static inline SlotCallbackResult UpdateSlot(TSlot slot,
}
template <AccessMode access_mode, typename TSlot>
static inline SlotCallbackResult UpdateSlot(const Isolate* isolate,
TSlot slot) {
static inline SlotCallbackResult UpdateSlot(IsolateRoot isolate, TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
HeapObject heap_obj;
if (TSlot::kCanBeWeak && obj->GetHeapObjectIfWeak(&heap_obj)) {
@ -2691,7 +2690,7 @@ static inline SlotCallbackResult UpdateSlot(const Isolate* isolate,
}
template <AccessMode access_mode, typename TSlot>
static inline SlotCallbackResult UpdateStrongSlot(const Isolate* isolate,
static inline SlotCallbackResult UpdateStrongSlot(IsolateRoot isolate,
TSlot slot) {
typename TSlot::TObject obj = slot.Relaxed_Load(isolate);
DCHECK(!HAS_WEAK_HEAP_OBJECT_TAG(obj.ptr()));
@ -2709,8 +2708,7 @@ static inline SlotCallbackResult UpdateStrongSlot(const Isolate* isolate,
// It does not expect to encounter pointers to dead objects.
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
public:
explicit PointersUpdatingVisitor(const Isolate* isolate)
: isolate_(isolate) {}
explicit PointersUpdatingVisitor(IsolateRoot isolate) : isolate_(isolate) {}
void VisitPointer(HeapObject host, ObjectSlot p) override {
UpdateStrongSlotInternal(isolate_, p);
@ -2765,32 +2763,32 @@ class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
}
private:
static inline SlotCallbackResult UpdateRootSlotInternal(
const Isolate* isolate, FullObjectSlot slot) {
static inline SlotCallbackResult UpdateRootSlotInternal(IsolateRoot isolate,
FullObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateRootSlotInternal(
const Isolate* isolate, OffHeapObjectSlot slot) {
IsolateRoot isolate, OffHeapObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
const Isolate* isolate, MaybeObjectSlot slot) {
IsolateRoot isolate, MaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateStrongSlotInternal(
const Isolate* isolate, ObjectSlot slot) {
static inline SlotCallbackResult UpdateStrongSlotInternal(IsolateRoot isolate,
ObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
static inline SlotCallbackResult UpdateSlotInternal(const Isolate* isolate,
static inline SlotCallbackResult UpdateSlotInternal(IsolateRoot isolate,
MaybeObjectSlot slot) {
return UpdateSlot<AccessMode::NON_ATOMIC>(isolate, slot);
}
const Isolate* isolate_;
IsolateRoot isolate_;
};
static String UpdateReferenceInExternalStringTableEntry(Heap* heap,
@ -3723,7 +3721,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
if ((updating_mode_ == RememberedSetUpdatingMode::ALL) &&
(chunk_->slot_set<OLD_TO_OLD, AccessMode::NON_ATOMIC>() != nullptr)) {
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(chunk_);
const Isolate* isolate = heap_->isolate();
IsolateRoot isolate = heap_->isolate();
RememberedSet<OLD_TO_OLD>::Iterate(
chunk_,
[&filter, isolate](MaybeObjectSlot slot) {
@ -3763,7 +3761,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak
// typed slots.
const Isolate* isolate = heap_->isolate();
IsolateRoot isolate = heap_->isolate();
return UpdateTypedSlotHelper::UpdateTypedSlot(
heap_, slot_type, slot, [isolate](FullMaybeObjectSlot slot) {
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(isolate, slot);

View File

@ -15,8 +15,8 @@ namespace internal {
// static
ReadOnlyRoots ReadOnlyHeap::GetReadOnlyRoots(HeapObject object) {
#ifdef V8_COMPRESS_POINTERS
const Isolate* isolate = GetIsolateForPtrCompr(object);
return ReadOnlyRoots(const_cast<Isolate*>(isolate));
IsolateRoot isolate = GetIsolateForPtrCompr(object);
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
#else
#ifdef V8_SHARED_RO_HEAP
// This fails if we are creating heap objects and the roots haven't yet been

View File

@ -28,7 +28,7 @@ namespace v8 {
namespace internal {
void CopyAndRebaseRoots(Address* src, Address* dst, Address new_base) {
Address src_base = GetIsolateRoot(src[0]);
Address src_base = GetIsolateRootAddress(src[0]);
for (size_t i = 0; i < ReadOnlyHeap::kEntriesCount; ++i) {
dst[i] = src[i] - src_base + new_base;
}
@ -113,7 +113,7 @@ void PointerCompressedReadOnlyArtifacts::InitializeRootsIn(Isolate* isolate) {
auto isolate_ro_roots =
isolate->roots_table().read_only_roots_begin().location();
CopyAndRebaseRoots(read_only_roots_, isolate_ro_roots,
GetIsolateRoot(isolate));
isolate->isolate_root());
}
SharedReadOnlySpace* PointerCompressedReadOnlyArtifacts::CreateReadOnlySpace(
@ -123,7 +123,7 @@ SharedReadOnlySpace* PointerCompressedReadOnlyArtifacts::CreateReadOnlySpace(
std::vector<std::unique_ptr<v8::PageAllocator::SharedMemoryMapping>> mappings;
std::vector<ReadOnlyPage*> pages;
Address isolate_root = GetIsolateRoot(isolate);
Address isolate_root = isolate->isolate_root();
for (size_t i = 0; i < pages_.size(); ++i) {
const ReadOnlyPage* page = pages_[i];
const Tagged_t offset = OffsetForPage(i);
@ -167,7 +167,7 @@ ReadOnlyHeap* PointerCompressedReadOnlyArtifacts::GetReadOnlyHeapForIsolate(
// ReadOnlyArtifacts and be decompressed on the fly.
auto original_cache = read_only_heap_->read_only_object_cache_;
auto& cache = read_only_heap->read_only_object_cache_;
Address isolate_root = GetIsolateRoot(isolate);
Address isolate_root = isolate->isolate_root();
for (Object original_object : original_cache) {
Address original_address = original_object.ptr();
Address new_address = isolate_root + CompressTagged(original_address);

View File

@ -288,7 +288,7 @@ int Code::SizeIncludingMetadata() const {
}
ByteArray Code::unchecked_relocation_info() const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return ByteArray::unchecked_cast(
TaggedField<HeapObject, kRelocationInfoOffset>::load(isolate, *this));
}

View File

@ -33,7 +33,7 @@ Object CompressedObjectSlot::operator*() const {
return Object(DecompressTaggedAny(address(), value));
}
Object CompressedObjectSlot::load(const Isolate* isolate) const {
Object CompressedObjectSlot::load(IsolateRoot isolate) const {
Tagged_t value = *location();
return Object(DecompressTaggedAny(isolate, value));
}
@ -52,7 +52,7 @@ Object CompressedObjectSlot::Relaxed_Load() const {
return Object(DecompressTaggedAny(address(), value));
}
Object CompressedObjectSlot::Relaxed_Load(const Isolate* isolate) const {
Object CompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return Object(DecompressTaggedAny(isolate, value));
}
@ -85,7 +85,7 @@ MaybeObject CompressedMaybeObjectSlot::operator*() const {
return MaybeObject(DecompressTaggedAny(address(), value));
}
MaybeObject CompressedMaybeObjectSlot::load(const Isolate* isolate) const {
MaybeObject CompressedMaybeObjectSlot::load(IsolateRoot isolate) const {
Tagged_t value = *location();
return MaybeObject(DecompressTaggedAny(isolate, value));
}
@ -99,8 +99,7 @@ MaybeObject CompressedMaybeObjectSlot::Relaxed_Load() const {
return MaybeObject(DecompressTaggedAny(address(), value));
}
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(
const Isolate* isolate) const {
MaybeObject CompressedMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return MaybeObject(DecompressTaggedAny(isolate, value));
}
@ -126,8 +125,7 @@ HeapObjectReference CompressedHeapObjectSlot::operator*() const {
return HeapObjectReference(DecompressTaggedPointer(address(), value));
}
HeapObjectReference CompressedHeapObjectSlot::load(
const Isolate* isolate) const {
HeapObjectReference CompressedHeapObjectSlot::load(IsolateRoot isolate) const {
Tagged_t value = *location();
return HeapObjectReference(DecompressTaggedPointer(isolate, value));
}
@ -150,7 +148,7 @@ void CompressedHeapObjectSlot::StoreHeapObject(HeapObject value) const {
// OffHeapCompressedObjectSlot implementation.
//
Object OffHeapCompressedObjectSlot::load(const Isolate* isolate) const {
Object OffHeapCompressedObjectSlot::load(IsolateRoot isolate) const {
Tagged_t value = *location();
return Object(DecompressTaggedAny(isolate, value));
}
@ -159,12 +157,12 @@ void OffHeapCompressedObjectSlot::store(Object value) const {
*location() = CompressTagged(value.ptr());
}
Object OffHeapCompressedObjectSlot::Relaxed_Load(const Isolate* isolate) const {
Object OffHeapCompressedObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location());
return Object(DecompressTaggedAny(isolate, value));
}
Object OffHeapCompressedObjectSlot::Acquire_Load(const Isolate* isolate) const {
Object OffHeapCompressedObjectSlot::Acquire_Load(IsolateRoot isolate) const {
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location());
return Object(DecompressTaggedAny(isolate, value));
}

View File

@ -41,12 +41,12 @@ class CompressedObjectSlot : public SlotBase<CompressedObjectSlot, Tagged_t> {
// TODO(leszeks): Consider deprecating the operator* load, and always pass the
// Isolate.
inline Object operator*() const;
inline Object load(const Isolate* isolate) const;
inline Object load(IsolateRoot isolate) const;
inline void store(Object value) const;
inline Object Acquire_Load() const;
inline Object Relaxed_Load() const;
inline Object Relaxed_Load(const Isolate* isolate) const;
inline Object Relaxed_Load(IsolateRoot isolate) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline Object Release_CompareAndSwap(Object old, Object target) const;
@ -77,11 +77,11 @@ class CompressedMaybeObjectSlot
: SlotBase(slot.address()) {}
inline MaybeObject operator*() const;
inline MaybeObject load(const Isolate* isolate) const;
inline MaybeObject load(IsolateRoot isolate) const;
inline void store(MaybeObject value) const;
inline MaybeObject Relaxed_Load() const;
inline MaybeObject Relaxed_Load(const Isolate* isolate) const;
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
inline void Relaxed_Store(MaybeObject value) const;
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
};
@ -105,7 +105,7 @@ class CompressedHeapObjectSlot
: SlotBase(slot.address()) {}
inline HeapObjectReference operator*() const;
inline HeapObjectReference load(const Isolate* isolate) const;
inline HeapObjectReference load(IsolateRoot isolate) const;
inline void store(HeapObjectReference value) const;
inline HeapObject ToHeapObject() const;
@ -131,11 +131,11 @@ class OffHeapCompressedObjectSlot
explicit OffHeapCompressedObjectSlot(const uint32_t* ptr)
: SlotBase(reinterpret_cast<Address>(ptr)) {}
inline Object load(const Isolate* isolate) const;
inline Object load(IsolateRoot isolate) const;
inline void store(Object value) const;
inline Object Relaxed_Load(const Isolate* isolate) const;
inline Object Acquire_Load(const Isolate* isolate) const;
inline Object Relaxed_Load(IsolateRoot isolate) const;
inline Object Acquire_Load(IsolateRoot isolate) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline void Release_CompareAndSwap(Object old, Object target) const;

View File

@ -56,11 +56,11 @@ SMI_ACCESSORS(Context, length, kLengthOffset)
CAST_ACCESSOR(NativeContext)
Object Context::get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
}
Object Context::get(const Isolate* isolate, int index) const {
Object Context::get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned>(index),
static_cast<unsigned>(this->length()));
return TaggedField<Object>::Relaxed_Load(isolate, *this,
@ -88,11 +88,11 @@ void Context::set_scope_info(ScopeInfo scope_info) {
}
Object Context::synchronized_get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return synchronized_get(isolate, index);
}
Object Context::synchronized_get(const Isolate* isolate, int index) const {
Object Context::synchronized_get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned int>(index),
static_cast<unsigned int>(this->length()));
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));

View File

@ -438,13 +438,13 @@ class Context : public HeapObject {
// Setter and getter for elements.
V8_INLINE Object get(int index) const;
V8_INLINE Object get(const Isolate* isolate, int index) const;
V8_INLINE Object get(IsolateRoot isolate, int index) const;
V8_INLINE void set(int index, Object value);
// Setter with explicit barrier mode.
V8_INLINE void set(int index, Object value, WriteBarrierMode mode);
// Setter and getter with synchronization semantics.
V8_INLINE Object synchronized_get(int index) const;
V8_INLINE Object synchronized_get(const Isolate* isolate, int index) const;
V8_INLINE Object synchronized_get(IsolateRoot isolate, int index) const;
V8_INLINE void synchronized_set(int index, Object value);
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,

View File

@ -104,11 +104,11 @@ ObjectSlot DescriptorArray::GetDescriptorSlot(int descriptor) {
}
Name DescriptorArray::GetKey(InternalIndex descriptor_number) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetKey(isolate, descriptor_number);
}
Name DescriptorArray::GetKey(const Isolate* isolate,
Name DescriptorArray::GetKey(IsolateRoot isolate,
InternalIndex descriptor_number) const {
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
@ -127,12 +127,11 @@ int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
}
Name DescriptorArray::GetSortedKey(int descriptor_number) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetSortedKey(isolate, descriptor_number);
}
Name DescriptorArray::GetSortedKey(const Isolate* isolate,
int descriptor_number) {
Name DescriptorArray::GetSortedKey(IsolateRoot isolate, int descriptor_number) {
return GetKey(isolate, InternalIndex(GetSortedKeyIndex(descriptor_number)));
}
@ -142,11 +141,11 @@ void DescriptorArray::SetSortedKey(int descriptor_number, int pointer) {
}
Object DescriptorArray::GetStrongValue(InternalIndex descriptor_number) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetStrongValue(isolate, descriptor_number);
}
Object DescriptorArray::GetStrongValue(const Isolate* isolate,
Object DescriptorArray::GetStrongValue(IsolateRoot isolate,
InternalIndex descriptor_number) {
return GetValue(isolate, descriptor_number).cast<Object>();
}
@ -160,11 +159,11 @@ void DescriptorArray::SetValue(InternalIndex descriptor_number,
}
MaybeObject DescriptorArray::GetValue(InternalIndex descriptor_number) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetValue(isolate, descriptor_number);
}
MaybeObject DescriptorArray::GetValue(const Isolate* isolate,
MaybeObject DescriptorArray::GetValue(IsolateRoot isolate,
InternalIndex descriptor_number) {
DCHECK_LT(descriptor_number.as_int(), number_of_descriptors());
int entry_offset = OffsetOfDescriptorAt(descriptor_number.as_int());
@ -191,11 +190,11 @@ int DescriptorArray::GetFieldIndex(InternalIndex descriptor_number) {
}
FieldType DescriptorArray::GetFieldType(InternalIndex descriptor_number) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return GetFieldType(isolate, descriptor_number);
}
FieldType DescriptorArray::GetFieldType(const Isolate* isolate,
FieldType DescriptorArray::GetFieldType(IsolateRoot isolate,
InternalIndex descriptor_number) {
DCHECK_EQ(GetDetails(descriptor_number).location(), kField);
MaybeObject wrapped_type = GetValue(isolate, descriptor_number);

View File

@ -67,22 +67,22 @@ class DescriptorArray
// Accessors for fetching instance descriptor at descriptor number.
inline Name GetKey(InternalIndex descriptor_number) const;
inline Name GetKey(const Isolate* isolate,
inline Name GetKey(IsolateRoot isolate,
InternalIndex descriptor_number) const;
inline Object GetStrongValue(InternalIndex descriptor_number);
inline Object GetStrongValue(const Isolate* isolate,
inline Object GetStrongValue(IsolateRoot isolate,
InternalIndex descriptor_number);
inline MaybeObject GetValue(InternalIndex descriptor_number);
inline MaybeObject GetValue(const Isolate* isolate,
inline MaybeObject GetValue(IsolateRoot isolate,
InternalIndex descriptor_number);
inline PropertyDetails GetDetails(InternalIndex descriptor_number);
inline int GetFieldIndex(InternalIndex descriptor_number);
inline FieldType GetFieldType(InternalIndex descriptor_number);
inline FieldType GetFieldType(const Isolate* isolate,
inline FieldType GetFieldType(IsolateRoot isolate,
InternalIndex descriptor_number);
inline Name GetSortedKey(int descriptor_number);
inline Name GetSortedKey(const Isolate* isolate, int descriptor_number);
inline Name GetSortedKey(IsolateRoot isolate, int descriptor_number);
inline int GetSortedKeyIndex(int descriptor_number);
// Accessor for complete descriptor.

View File

@ -30,12 +30,12 @@ Dictionary<Derived, Shape>::Dictionary(Address ptr)
template <typename Derived, typename Shape>
Object Dictionary<Derived, Shape>::ValueAt(InternalIndex entry) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return ValueAt(isolate, entry);
}
template <typename Derived, typename Shape>
Object Dictionary<Derived, Shape>::ValueAt(const Isolate* isolate,
Object Dictionary<Derived, Shape>::ValueAt(IsolateRoot isolate,
InternalIndex entry) {
return this->get(isolate, DerivedHashTable::EntryToIndex(entry) +
Derived::kEntryValueIndex);
@ -181,11 +181,11 @@ Handle<Map> GlobalDictionary::GetMap(ReadOnlyRoots roots) {
}
Name NameDictionary::NameAt(InternalIndex entry) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return NameAt(isolate, entry);
}
Name NameDictionary::NameAt(const Isolate* isolate, InternalIndex entry) {
Name NameDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
return Name::cast(KeyAt(isolate, entry));
}
@ -194,31 +194,31 @@ Handle<Map> NameDictionary::GetMap(ReadOnlyRoots roots) {
}
PropertyCell GlobalDictionary::CellAt(InternalIndex entry) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return CellAt(isolate, entry);
}
PropertyCell GlobalDictionary::CellAt(const Isolate* isolate,
PropertyCell GlobalDictionary::CellAt(IsolateRoot isolate,
InternalIndex entry) {
DCHECK(KeyAt(isolate, entry).IsPropertyCell(isolate));
return PropertyCell::cast(KeyAt(isolate, entry));
}
Name GlobalDictionary::NameAt(InternalIndex entry) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return NameAt(isolate, entry);
}
Name GlobalDictionary::NameAt(const Isolate* isolate, InternalIndex entry) {
Name GlobalDictionary::NameAt(IsolateRoot isolate, InternalIndex entry) {
return CellAt(isolate, entry).name(isolate);
}
Object GlobalDictionary::ValueAt(InternalIndex entry) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return ValueAt(isolate, entry);
}
Object GlobalDictionary::ValueAt(const Isolate* isolate, InternalIndex entry) {
Object GlobalDictionary::ValueAt(IsolateRoot isolate, InternalIndex entry) {
return CellAt(isolate, entry).value(isolate);
}

View File

@ -32,7 +32,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary
using Key = typename Shape::Key;
// Returns the value at entry.
inline Object ValueAt(InternalIndex entry);
inline Object ValueAt(const Isolate* isolate, InternalIndex entry);
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
// Set the value for entry.
inline void ValueAtPut(InternalIndex entry, Object value);
@ -197,7 +197,7 @@ class V8_EXPORT_PRIVATE NameDictionary
static const int kInitialCapacity = 2;
inline Name NameAt(InternalIndex entry);
inline Name NameAt(const Isolate* isolate, InternalIndex entry);
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
inline void set_hash(int hash);
inline int hash() const;
@ -234,14 +234,14 @@ class V8_EXPORT_PRIVATE GlobalDictionary
DECL_CAST(GlobalDictionary)
inline Object ValueAt(InternalIndex entry);
inline Object ValueAt(const Isolate* isolate, InternalIndex entry);
inline Object ValueAt(IsolateRoot isolate, InternalIndex entry);
inline PropertyCell CellAt(InternalIndex entry);
inline PropertyCell CellAt(const Isolate* isolate, InternalIndex entry);
inline PropertyCell CellAt(IsolateRoot isolate, InternalIndex entry);
inline void SetEntry(InternalIndex entry, Object key, Object value,
PropertyDetails details);
inline void ClearEntry(InternalIndex entry);
inline Name NameAt(InternalIndex entry);
inline Name NameAt(const Isolate* isolate, InternalIndex entry);
inline Name NameAt(IsolateRoot isolate, InternalIndex entry);
inline void ValueAtPut(InternalIndex entry, Object value);
OBJECT_CONSTRUCTORS(

View File

@ -1423,7 +1423,7 @@ class DictionaryElementsAccessor
DisallowHeapAllocation no_gc;
NumberDictionary dict = NumberDictionary::cast(backing_store);
if (!dict.requires_slow_elements()) return false;
const Isolate* isolate = GetIsolateForPtrCompr(holder);
IsolateRoot isolate = GetIsolateForPtrCompr(holder);
ReadOnlyRoots roots = holder.GetReadOnlyRoots(isolate);
for (InternalIndex i : dict.IterateEntries()) {
Object key = dict.KeyAt(isolate, i);

View File

@ -81,7 +81,7 @@ void EmbedderDataSlot::store_tagged(JSObject object, int embedder_field_index,
#endif
}
bool EmbedderDataSlot::ToAlignedPointer(const Isolate* isolate,
bool EmbedderDataSlot::ToAlignedPointer(IsolateRoot isolate,
void** out_pointer) const {
// We don't care about atomicity of access here because embedder slots
// are accessed this way only from the main thread via API during "mutator"
@ -106,7 +106,7 @@ bool EmbedderDataSlot::ToAlignedPointer(const Isolate* isolate,
return HAS_SMI_TAG(raw_value);
}
bool EmbedderDataSlot::ToAlignedPointerSafe(const Isolate* isolate,
bool EmbedderDataSlot::ToAlignedPointerSafe(IsolateRoot isolate,
void** out_pointer) const {
#ifdef V8_HEAP_SANDBOX
uint32_t index = base::Memory<uint32_t>(address() + kRawPayloadOffset);

View File

@ -75,8 +75,7 @@ class EmbedderDataSlot
// When V8 heap sandbox is enabled, calling this method when the raw part of
// the slot does not contain valid external pointer table index is undefined
// behaviour and most likely result in crashes.
V8_INLINE bool ToAlignedPointer(const Isolate* isolate,
void** out_result) const;
V8_INLINE bool ToAlignedPointer(IsolateRoot isolate, void** out_result) const;
// Same as ToAlignedPointer() but with a workaround for V8 heap sandbox.
// When V8 heap sandbox is enabled, this method doesn't crash when the raw
@ -87,7 +86,7 @@ class EmbedderDataSlot
//
// Call this function if you are not sure whether the slot contains valid
// external pointer or not.
V8_INLINE bool ToAlignedPointerSafe(const Isolate* isolate,
V8_INLINE bool ToAlignedPointerSafe(IsolateRoot isolate,
void** out_result) const;
// Returns true if the pointer was successfully stored or false it the pointer

View File

@ -143,8 +143,7 @@ MaybeObject FeedbackVector::Get(FeedbackSlot slot) const {
return raw_feedback_slots(GetIndex(slot));
}
MaybeObject FeedbackVector::Get(const Isolate* isolate,
FeedbackSlot slot) const {
MaybeObject FeedbackVector::Get(IsolateRoot isolate, FeedbackSlot slot) const {
return raw_feedback_slots(isolate, GetIndex(slot));
}

View File

@ -218,7 +218,7 @@ class FeedbackVector
// Conversion from an integer index to the underlying array to a slot.
static inline FeedbackSlot ToSlot(intptr_t index);
inline MaybeObject Get(FeedbackSlot slot) const;
inline MaybeObject Get(const Isolate* isolate, FeedbackSlot slot) const;
inline MaybeObject Get(IsolateRoot isolate, FeedbackSlot slot) const;
inline void Set(FeedbackSlot slot, MaybeObject value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline void Set(FeedbackSlot slot, Object value,

View File

@ -61,11 +61,11 @@ int FieldIndex::GetLoadByFieldIndex() const {
}
FieldIndex FieldIndex::ForDescriptor(Map map, InternalIndex descriptor_index) {
const Isolate* isolate = GetIsolateForPtrCompr(map);
IsolateRoot isolate = GetIsolateForPtrCompr(map);
return ForDescriptor(isolate, map, descriptor_index);
}
FieldIndex FieldIndex::ForDescriptor(const Isolate* isolate, Map map,
FieldIndex FieldIndex::ForDescriptor(IsolateRoot isolate, Map map,
InternalIndex descriptor_index) {
PropertyDetails details = map.instance_descriptors(isolate, kRelaxedLoad)
.GetDetails(descriptor_index);

View File

@ -31,7 +31,7 @@ class FieldIndex final {
static inline FieldIndex ForInObjectOffset(int offset, Encoding encoding);
static inline FieldIndex ForDescriptor(Map map,
InternalIndex descriptor_index);
static inline FieldIndex ForDescriptor(const Isolate* isolate, Map map,
static inline FieldIndex ForDescriptor(IsolateRoot isolate, Map map,
InternalIndex descriptor_index);
inline int GetLoadByFieldIndex() const;

View File

@ -69,11 +69,11 @@ bool FixedArray::ContainsOnlySmisOrHoles() {
}
Object FixedArray::get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
}
Object FixedArray::get(const Isolate* isolate, int index) const {
Object FixedArray::get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return TaggedField<Object>::Relaxed_Load(isolate, *this,
OffsetOfElementAt(index));
@ -123,11 +123,11 @@ void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
}
Object FixedArray::synchronized_get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return synchronized_get(isolate, index);
}
Object FixedArray::synchronized_get(const Isolate* isolate, int index) const {
Object FixedArray::synchronized_get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
}
@ -406,11 +406,11 @@ void FixedDoubleArray::FillWithHoles(int from, int to) {
}
MaybeObject WeakFixedArray::Get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return Get(isolate, index);
}
MaybeObject WeakFixedArray::Get(const Isolate* isolate, int index) const {
MaybeObject WeakFixedArray::Get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
return objects(isolate, index);
}
@ -441,11 +441,11 @@ void WeakFixedArray::CopyElements(Isolate* isolate, int dst_index,
}
MaybeObject WeakArrayList::Get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return Get(isolate, index);
}
MaybeObject WeakArrayList::Get(const Isolate* isolate, int index) const {
MaybeObject WeakArrayList::Get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(capacity()));
return objects(isolate, index);
}
@ -496,7 +496,7 @@ Object ArrayList::Get(int index) const {
return FixedArray::cast(*this).get(kFirstIndex + index);
}
Object ArrayList::Get(const Isolate* isolate, int index) const {
Object ArrayList::Get(IsolateRoot isolate, int index) const {
return FixedArray::cast(*this).get(isolate, kFirstIndex + index);
}
@ -621,7 +621,7 @@ Object TemplateList::get(int index) const {
return FixedArray::cast(*this).get(kFirstElementIndex + index);
}
Object TemplateList::get(const Isolate* isolate, int index) const {
Object TemplateList::get(IsolateRoot isolate, int index) const {
return FixedArray::cast(*this).get(isolate, kFirstElementIndex + index);
}

View File

@ -100,7 +100,7 @@ class FixedArray
public:
// Setter and getter for elements.
inline Object get(int index) const;
inline Object get(const Isolate* isolate, int index) const;
inline Object get(IsolateRoot isolate, int index) const;
static inline Handle<Object> get(FixedArray array, int index,
Isolate* isolate);
@ -112,7 +112,7 @@ class FixedArray
// Synchronized setters and getters.
inline Object synchronized_get(int index) const;
inline Object synchronized_get(const Isolate* isolate, int index) const;
inline Object synchronized_get(IsolateRoot isolate, int index) const;
// Currently only Smis are written with release semantics, hence we can avoid
// a write barrier.
inline void synchronized_set(int index, Smi value);
@ -267,7 +267,7 @@ class WeakFixedArray
: public TorqueGeneratedWeakFixedArray<WeakFixedArray, HeapObject> {
public:
inline MaybeObject Get(int index) const;
inline MaybeObject Get(const Isolate* isolate, int index) const;
inline MaybeObject Get(IsolateRoot isolate, int index) const;
inline void Set(
int index, MaybeObject value,
@ -342,7 +342,7 @@ class WeakArrayList
V8_EXPORT_PRIVATE void Compact(Isolate* isolate);
inline MaybeObject Get(int index) const;
inline MaybeObject Get(const Isolate* isolate, int index) const;
inline MaybeObject Get(IsolateRoot isolate, int index) const;
// Set the element at index to obj. The underlying array must be large enough.
// If you need to grow the WeakArrayList, use the static AddToEnd() method
@ -443,7 +443,7 @@ class ArrayList : public TorqueGeneratedArrayList<ArrayList, FixedArray> {
// storage capacity, i.e., length().
inline void SetLength(int length);
inline Object Get(int index) const;
inline Object Get(const Isolate* isolate, int index) const;
inline Object Get(IsolateRoot isolate, int index) const;
inline ObjectSlot Slot(int index);
// Set the element at index to obj. The underlying array must be large enough.
@ -589,7 +589,7 @@ class TemplateList
static Handle<TemplateList> New(Isolate* isolate, int size);
inline int length() const;
inline Object get(int index) const;
inline Object get(const Isolate* isolate, int index) const;
inline Object get(IsolateRoot isolate, int index) const;
inline void set(int index, Object value);
static Handle<TemplateList> Add(Isolate* isolate, Handle<TemplateList> list,
Handle<Object> value);

View File

@ -142,8 +142,7 @@ InternalIndex HashTable<Derived, Shape>::FindEntry(LocalIsolate* isolate,
// Find entry for key otherwise return kNotFound.
template <typename Derived, typename Shape>
template <typename LocalIsolate>
InternalIndex HashTable<Derived, Shape>::FindEntry(const LocalIsolate* isolate,
InternalIndex HashTable<Derived, Shape>::FindEntry(IsolateRoot isolate,
ReadOnlyRoots roots, Key key,
int32_t hash) {
uint32_t capacity = Capacity();
@ -180,8 +179,8 @@ bool HashTable<Derived, Shape>::ToKey(ReadOnlyRoots roots, InternalIndex entry,
}
template <typename Derived, typename Shape>
bool HashTable<Derived, Shape>::ToKey(const Isolate* isolate,
InternalIndex entry, Object* out_k) {
bool HashTable<Derived, Shape>::ToKey(IsolateRoot isolate, InternalIndex entry,
Object* out_k) {
Object k = KeyAt(isolate, entry);
if (!IsKey(GetReadOnlyRoots(isolate), k)) return false;
*out_k = Shape::Unwrap(k);
@ -190,16 +189,14 @@ bool HashTable<Derived, Shape>::ToKey(const Isolate* isolate,
template <typename Derived, typename Shape>
Object HashTable<Derived, Shape>::KeyAt(InternalIndex entry) {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return KeyAt(isolate, entry);
}
template <typename Derived, typename Shape>
template <typename LocalIsolate>
Object HashTable<Derived, Shape>::KeyAt(const LocalIsolate* isolate,
Object HashTable<Derived, Shape>::KeyAt(IsolateRoot isolate,
InternalIndex entry) {
return get(GetIsolateForPtrCompr(isolate),
EntryToIndex(entry) + kEntryKeyIndex);
return get(isolate, EntryToIndex(entry) + kEntryKeyIndex);
}
template <typename Derived, typename Shape>

View File

@ -138,26 +138,24 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
void IterateElements(ObjectVisitor* visitor);
// Find entry for key otherwise return kNotFound.
template <typename LocalIsolate>
inline InternalIndex FindEntry(const LocalIsolate* isolate,
ReadOnlyRoots roots, Key key, int32_t hash);
inline InternalIndex FindEntry(IsolateRoot isolate, ReadOnlyRoots roots,
Key key, int32_t hash);
template <typename LocalIsolate>
inline InternalIndex FindEntry(LocalIsolate* isolate, Key key);
// Rehashes the table in-place.
void Rehash(const Isolate* isolate);
void Rehash(IsolateRoot isolate);
// Returns whether k is a real key. The hole and undefined are not allowed as
// keys and can be used to indicate missing or deleted elements.
static inline bool IsKey(ReadOnlyRoots roots, Object k);
inline bool ToKey(ReadOnlyRoots roots, InternalIndex entry, Object* out_k);
inline bool ToKey(const Isolate* isolate, InternalIndex entry, Object* out_k);
inline bool ToKey(IsolateRoot isolate, InternalIndex entry, Object* out_k);
// Returns the key at entry.
inline Object KeyAt(InternalIndex entry);
template <typename LocalIsolate>
inline Object KeyAt(const LocalIsolate* isolate, InternalIndex entry);
inline Object KeyAt(IsolateRoot isolate, InternalIndex entry);
static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize;
static const int kEntrySize = Shape::kEntrySize;
@ -219,7 +217,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
// Find the entry at which to insert element with the given key that
// has the given hash value.
InternalIndex FindInsertionEntry(const Isolate* isolate, ReadOnlyRoots roots,
InternalIndex FindInsertionEntry(IsolateRoot isolate, ReadOnlyRoots roots,
uint32_t hash);
InternalIndex FindInsertionEntry(Isolate* isolate, uint32_t hash);
@ -233,7 +231,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
// Rehashes this hash-table into the new table.
void Rehash(const Isolate* isolate, Derived new_table);
void Rehash(IsolateRoot isolate, Derived new_table);
inline void set_key(int index, Object value);
inline void set_key(int index, Object value, WriteBarrierMode mode);
@ -324,7 +322,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) ObjectHashTableBase
// returned in case the key is not present.
Object Lookup(Handle<Object> key);
Object Lookup(Handle<Object> key, int32_t hash);
Object Lookup(const Isolate* isolate, Handle<Object> key, int32_t hash);
Object Lookup(IsolateRoot isolate, Handle<Object> key, int32_t hash);
// Returns the value at entry.
Object ValueAt(InternalIndex entry);

View File

@ -68,11 +68,11 @@ class HeapObject : public Object {
inline ReadOnlyRoots GetReadOnlyRoots() const;
// This version is intended to be used for the isolate values produced by
// i::GetIsolateForPtrCompr(HeapObject) function which may return nullptr.
inline ReadOnlyRoots GetReadOnlyRoots(const Isolate* isolate) const;
inline ReadOnlyRoots GetReadOnlyRoots(IsolateRoot isolate) const;
#define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(const Isolate* isolate) const;
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
IS_TYPE_FUNCTION_DECL(HashTableBase)
IS_TYPE_FUNCTION_DECL(SmallOrderedHashTable)
@ -93,7 +93,7 @@ class HeapObject : public Object {
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
V8_INLINE bool Is##Name() const; \
V8_INLINE bool Is##Name(const Isolate* isolate) const;
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE

View File

@ -204,9 +204,9 @@ void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
}
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
const Isolate* isolate) {
IsolateRoot isolate) {
#ifdef V8_COMPRESS_POINTERS
return GetIsolateRoot(isolate);
return isolate.address();
#else
return 0;
#endif

View File

@ -290,7 +290,7 @@ class JSTypedArray
// as Tagged_t value and an |external_pointer| value.
// For full-pointer mode the compensation value is zero.
static inline Address ExternalPointerCompensationForOnHeapArray(
const Isolate* isolate);
IsolateRoot isolate);
//
// Serializer/deserializer support.

View File

@ -300,11 +300,11 @@ void JSObject::SetEmbedderField(int index, Smi value) {
}
bool JSObject::IsUnboxedDoubleField(FieldIndex index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return IsUnboxedDoubleField(isolate, index);
}
bool JSObject::IsUnboxedDoubleField(const Isolate* isolate,
bool JSObject::IsUnboxedDoubleField(IsolateRoot isolate,
FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
return map(isolate).IsUnboxedDoubleField(isolate, index);
@ -314,11 +314,11 @@ bool JSObject::IsUnboxedDoubleField(const Isolate* isolate,
// is needed to correctly distinguish between properties stored in-object and
// properties stored in the properties array.
Object JSObject::RawFastPropertyAt(FieldIndex index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return RawFastPropertyAt(isolate, index);
}
Object JSObject::RawFastPropertyAt(const Isolate* isolate,
Object JSObject::RawFastPropertyAt(IsolateRoot isolate,
FieldIndex index) const {
DCHECK(!IsUnboxedDoubleField(isolate, index));
if (index.is_inobject()) {

View File

@ -621,16 +621,14 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
const char* reason);
inline bool IsUnboxedDoubleField(FieldIndex index) const;
inline bool IsUnboxedDoubleField(const Isolate* isolate,
FieldIndex index) const;
inline bool IsUnboxedDoubleField(IsolateRoot isolate, FieldIndex index) const;
// Access fast-case object properties at index.
static Handle<Object> FastPropertyAt(Handle<JSObject> object,
Representation representation,
FieldIndex index);
inline Object RawFastPropertyAt(FieldIndex index) const;
inline Object RawFastPropertyAt(const Isolate* isolate,
FieldIndex index) const;
inline Object RawFastPropertyAt(IsolateRoot isolate, FieldIndex index) const;
inline double RawFastDoublePropertyAt(FieldIndex index) const;
inline uint64_t RawFastDoublePropertyAsBitsAt(FieldIndex index) const;
@ -725,7 +723,7 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
// If a GC was caused while constructing this object, the elements pointer
// may point to a one pointer filler map. The object won't be rooted, but
// our heap verification code could stumble across it.
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(const Isolate* isolate) const;
V8_EXPORT_PRIVATE bool ElementsAreSafeToExamine(IsolateRoot isolate) const;
#endif
Object SlowReverseLookup(Object value);

View File

@ -27,11 +27,11 @@ SMI_ACCESSORS(ObjectBoilerplateDescription, flags,
FixedArray::OffsetOfElementAt(kLiteralTypeOffset))
Object ObjectBoilerplateDescription::name(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return name(isolate, index);
}
Object ObjectBoilerplateDescription::name(const Isolate* isolate,
Object ObjectBoilerplateDescription::name(IsolateRoot isolate,
int index) const {
// get() already checks for out of bounds access, but we do not want to allow
// access to the last element, if it is the number of properties.
@ -40,11 +40,11 @@ Object ObjectBoilerplateDescription::name(const Isolate* isolate,
}
Object ObjectBoilerplateDescription::value(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return value(isolate, index);
}
Object ObjectBoilerplateDescription::value(const Isolate* isolate,
Object ObjectBoilerplateDescription::value(IsolateRoot isolate,
int index) const {
return get(isolate, 2 * index + 1 + kDescriptionStartIndex);
}

View File

@ -26,10 +26,10 @@ class ClassLiteral;
class ObjectBoilerplateDescription : public FixedArray {
public:
inline Object name(int index) const;
inline Object name(const Isolate* isolate, int index) const;
inline Object name(IsolateRoot isolate, int index) const;
inline Object value(int index) const;
inline Object value(const Isolate* isolate, int index) const;
inline Object value(IsolateRoot isolate, int index) const;
inline void set_key_value(int index, Object key, Object value);

View File

@ -154,11 +154,11 @@ bool Map::EquivalentToForNormalization(const Map other,
}
bool Map::IsUnboxedDoubleField(FieldIndex index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return IsUnboxedDoubleField(isolate, index);
}
bool Map::IsUnboxedDoubleField(const Isolate* isolate, FieldIndex index) const {
bool Map::IsUnboxedDoubleField(IsolateRoot isolate, FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
if (!index.is_inobject()) return false;
return !layout_descriptor(isolate, kAcquireLoad)

View File

@ -862,8 +862,7 @@ class Map : public HeapObject {
// Returns true if given field is unboxed double.
inline bool IsUnboxedDoubleField(FieldIndex index) const;
inline bool IsUnboxedDoubleField(const Isolate* isolate,
FieldIndex index) const;
inline bool IsUnboxedDoubleField(IsolateRoot isolate, FieldIndex index) const;
void PrintMapDetails(std::ostream& os);
@ -1003,7 +1002,7 @@ class NormalizedMapCache : public WeakFixedArray {
DECL_VERIFIER(NormalizedMapCache)
private:
friend bool HeapObject::IsNormalizedMapCache(const Isolate* isolate) const;
friend bool HeapObject::IsNormalizedMapCache(IsolateRoot isolate) const;
static const int kEntries = 64;

View File

@ -69,7 +69,7 @@ HeapObjectReference HeapObjectReference::From(Object object,
}
// static
HeapObjectReference HeapObjectReference::ClearedValue(const Isolate* isolate) {
HeapObjectReference HeapObjectReference::ClearedValue(IsolateRoot isolate) {
// Construct cleared weak ref value.
#ifdef V8_COMPRESS_POINTERS
// This is necessary to make pointer decompression computation also

View File

@ -50,7 +50,7 @@ class HeapObjectReference : public MaybeObject {
V8_INLINE static HeapObjectReference From(Object object,
HeapObjectReferenceType type);
V8_INLINE static HeapObjectReference ClearedValue(const Isolate* isolate);
V8_INLINE static HeapObjectReference ClearedValue(IsolateRoot isolate);
template <typename THeapObjectSlot>
V8_INLINE static void Update(THeapObjectSlot slot, HeapObject value);

View File

@ -82,14 +82,14 @@
// parameter.
#define DECL_GETTER(name, type) \
inline type name() const; \
inline type name(const Isolate* isolate) const;
inline type name(IsolateRoot isolate) const;
#define DEF_GETTER(holder, name, type) \
type holder::name() const { \
const Isolate* isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate); \
} \
type holder::name(const Isolate* isolate) const
#define DEF_GETTER(holder, name, type) \
type holder::name() const { \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate); \
} \
type holder::name(IsolateRoot isolate) const
#define DECL_ACCESSORS(name, type) \
DECL_GETTER(name, type) \
@ -98,7 +98,7 @@
#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
inline type name(tag_type tag) const; \
inline type name(const Isolate* isolate, tag_type) const;
inline type name(IsolateRoot isolate, tag_type) const;
#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
inline void set_##name(type value, tag_type, \
@ -175,10 +175,10 @@
#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
set_condition) \
type holder::name(RelaxedLoadTag tag) const { \
const Isolate* isolate = GetIsolateForPtrCompr(*this); \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
} \
type holder::name(const Isolate* isolate, RelaxedLoadTag) const { \
type holder::name(IsolateRoot isolate, RelaxedLoadTag) const { \
type value = TaggedField<type, offset>::load(isolate, *this); \
DCHECK(get_condition); \
return value; \
@ -199,10 +199,10 @@
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
get_condition, set_condition) \
type holder::name(AcquireLoadTag tag) const { \
const Isolate* isolate = GetIsolateForPtrCompr(*this); \
IsolateRoot isolate = GetIsolateForPtrCompr(*this); \
return holder::name(isolate, tag); \
} \
type holder::name(const Isolate* isolate, AcquireLoadTag) const { \
type holder::name(IsolateRoot isolate, AcquireLoadTag) const { \
type value = TaggedField<type, offset>::Acquire_Load(isolate, *this); \
DCHECK(get_condition); \
return value; \

View File

@ -79,7 +79,7 @@ bool Object::IsTaggedIndex() const {
bool Object::Is##type_() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(); \
} \
bool Object::Is##type_(const Isolate* isolate) const { \
bool Object::Is##type_(IsolateRoot isolate) const { \
return IsHeapObject() && HeapObject::cast(*this).Is##type_(isolate); \
}
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
@ -233,23 +233,23 @@ DEF_GETTER(HeapObject, IsExternalTwoByteString, bool) {
bool Object::IsNumber() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
const Isolate* isolate = GetIsolateForPtrCompr(this_heap_object);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.IsHeapNumber(isolate);
}
bool Object::IsNumber(const Isolate* isolate) const {
bool Object::IsNumber(IsolateRoot isolate) const {
return IsSmi() || IsHeapNumber(isolate);
}
bool Object::IsNumeric() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
const Isolate* isolate = GetIsolateForPtrCompr(this_heap_object);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.IsHeapNumber(isolate) ||
this_heap_object.IsBigInt(isolate);
}
bool Object::IsNumeric(const Isolate* isolate) const {
bool Object::IsNumeric(IsolateRoot isolate) const {
return IsNumber(isolate) || IsBigInt(isolate);
}
@ -277,11 +277,11 @@ DEF_GETTER(HeapObject, IsRegExpMatchInfo, bool) {
bool Object::IsLayoutDescriptor() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
const Isolate* isolate = GetIsolateForPtrCompr(this_heap_object);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.IsByteArray(isolate);
}
bool Object::IsLayoutDescriptor(const Isolate* isolate) const {
bool Object::IsLayoutDescriptor(IsolateRoot isolate) const {
return IsSmi() || IsByteArray(isolate);
}
@ -386,11 +386,11 @@ DEF_GETTER(HeapObject, IsWasmExceptionPackage, bool) {
bool Object::IsPrimitive() const {
if (IsSmi()) return true;
HeapObject this_heap_object = HeapObject::cast(*this);
const Isolate* isolate = GetIsolateForPtrCompr(this_heap_object);
IsolateRoot isolate = GetIsolateForPtrCompr(this_heap_object);
return this_heap_object.map(isolate).IsPrimitiveMap();
}
bool Object::IsPrimitive(const Isolate* isolate) const {
bool Object::IsPrimitive(IsolateRoot isolate) const {
return IsSmi() || HeapObject::cast(*this).map(isolate).IsPrimitiveMap();
}
@ -420,7 +420,7 @@ DEF_GETTER(HeapObject, IsAccessCheckNeeded, bool) {
bool Object::Is##Name() const { \
return IsHeapObject() && HeapObject::cast(*this).Is##Name(); \
} \
bool Object::Is##Name(const Isolate* isolate) const { \
bool Object::Is##Name(IsolateRoot isolate) const { \
return IsHeapObject() && HeapObject::cast(*this).Is##Name(isolate); \
}
STRUCT_LIST(MAKE_STRUCT_PREDICATE)
@ -486,7 +486,7 @@ bool Object::FilterKey(PropertyFilter filter) {
return false;
}
Representation Object::OptimalRepresentation(const Isolate* isolate) const {
Representation Object::OptimalRepresentation(IsolateRoot isolate) const {
if (!FLAG_track_fields) return Representation::Tagged();
if (IsSmi()) {
return Representation::Smi();
@ -505,7 +505,7 @@ Representation Object::OptimalRepresentation(const Isolate* isolate) const {
}
}
ElementsKind Object::OptimalElementsKind(const Isolate* isolate) const {
ElementsKind Object::OptimalElementsKind(IsolateRoot isolate) const {
if (IsSmi()) return PACKED_SMI_ELEMENTS;
if (IsNumber(isolate)) return PACKED_DOUBLE_ELEMENTS;
return PACKED_ELEMENTS;
@ -651,7 +651,7 @@ void Object::InitExternalPointerField(size_t offset, Isolate* isolate,
}
Address Object::ReadExternalPointerField(size_t offset,
const Isolate* isolate) const {
IsolateRoot isolate) const {
return i::ReadExternalPointerField(field_address(offset), isolate);
}
@ -706,10 +706,10 @@ ReadOnlyRoots HeapObject::GetReadOnlyRoots() const {
return ReadOnlyHeap::GetReadOnlyRoots(*this);
}
ReadOnlyRoots HeapObject::GetReadOnlyRoots(const Isolate* isolate) const {
ReadOnlyRoots HeapObject::GetReadOnlyRoots(IsolateRoot isolate) const {
#ifdef V8_COMPRESS_POINTERS
DCHECK_NOT_NULL(isolate);
return ReadOnlyRoots(const_cast<Isolate*>(isolate));
DCHECK_NE(isolate.address(), 0);
return ReadOnlyRoots(Isolate::FromRootAddress(isolate.address()));
#else
return GetReadOnlyRoots();
#endif

View File

@ -5146,7 +5146,7 @@ bool JSArray::WouldChangeReadOnlyLength(Handle<JSArray> array, uint32_t index) {
template <typename Derived, typename Shape>
void Dictionary<Derived, Shape>::Print(std::ostream& os) {
DisallowHeapAllocation no_gc;
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
Derived dictionary = Derived::cast(*this);
for (InternalIndex i : dictionary.IterateEntries()) {
@ -5652,8 +5652,7 @@ Handle<Derived> HashTable<Derived, Shape>::NewInternal(
}
template <typename Derived, typename Shape>
void HashTable<Derived, Shape>::Rehash(const Isolate* isolate,
Derived new_table) {
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate, Derived new_table) {
DisallowHeapAllocation no_gc;
WriteBarrierMode mode = new_table.GetWriteBarrierMode(no_gc);
@ -5717,7 +5716,7 @@ void HashTable<Derived, Shape>::Swap(InternalIndex entry1, InternalIndex entry2,
}
template <typename Derived, typename Shape>
void HashTable<Derived, Shape>::Rehash(const Isolate* isolate) {
void HashTable<Derived, Shape>::Rehash(IsolateRoot isolate) {
DisallowHeapAllocation no_gc;
WriteBarrierMode mode = GetWriteBarrierMode(no_gc);
ReadOnlyRoots roots = GetReadOnlyRoots(isolate);
@ -5784,7 +5783,7 @@ Handle<Derived> HashTable<Derived, Shape>::EnsureCapacity(
isolate, new_nof,
should_pretenure ? AllocationType::kOld : AllocationType::kYoung);
table->Rehash(GetIsolateForPtrCompr(isolate), *new_table);
table->Rehash(isolate, *new_table);
return new_table;
}
@ -5850,8 +5849,9 @@ Handle<Derived> HashTable<Derived, Shape>::Shrink(Isolate* isolate,
}
template <typename Derived, typename Shape>
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(
const Isolate* isolate, ReadOnlyRoots roots, uint32_t hash) {
InternalIndex HashTable<Derived, Shape>::FindInsertionEntry(IsolateRoot isolate,
ReadOnlyRoots roots,
uint32_t hash) {
uint32_t capacity = Capacity();
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
@ -6342,8 +6342,7 @@ Handle<Derived> Dictionary<Derived, Shape>::Add(LocalIsolate* isolate,
// Compute the key object.
Handle<Object> k = Shape::AsHandle(isolate, key);
InternalIndex entry = dictionary->FindInsertionEntry(
GetIsolateForPtrCompr(isolate), roots, hash);
InternalIndex entry = dictionary->FindInsertionEntry(isolate, roots, hash);
dictionary->SetEntry(entry, *k, *value, details);
DCHECK(dictionary->KeyAt(isolate, entry).IsNumber() ||
Shape::Unwrap(dictionary->KeyAt(isolate, entry)).IsUniqueName());
@ -6611,7 +6610,7 @@ void ObjectHashTableBase<Derived, Shape>::FillEntriesWithHoles(
}
template <typename Derived, typename Shape>
Object ObjectHashTableBase<Derived, Shape>::Lookup(const Isolate* isolate,
Object ObjectHashTableBase<Derived, Shape>::Lookup(IsolateRoot isolate,
Handle<Object> key,
int32_t hash) {
DisallowHeapAllocation no_gc;
@ -6627,7 +6626,7 @@ template <typename Derived, typename Shape>
Object ObjectHashTableBase<Derived, Shape>::Lookup(Handle<Object> key) {
DisallowHeapAllocation no_gc;
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
ReadOnlyRoots roots = this->GetReadOnlyRoots(isolate);
DCHECK(this->IsKey(roots, *key));

View File

@ -281,7 +281,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
#define IS_TYPE_FUNCTION_DECL(Type) \
V8_INLINE bool Is##Type() const; \
V8_INLINE bool Is##Type(const Isolate* isolate) const;
V8_INLINE bool Is##Type(IsolateRoot isolate) const;
OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DECL)
IS_TYPE_FUNCTION_DECL(HashTableBase)
@ -309,7 +309,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
#define DECL_STRUCT_PREDICATE(NAME, Name, name) \
V8_INLINE bool Is##Name() const; \
V8_INLINE bool Is##Name(const Isolate* isolate) const;
V8_INLINE bool Is##Name(IsolateRoot isolate) const;
STRUCT_LIST(DECL_STRUCT_PREDICATE)
#undef DECL_STRUCT_PREDICATE
@ -324,9 +324,9 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
V8_EXPORT_PRIVATE bool ToInt32(int32_t* value);
inline bool ToUint32(uint32_t* value) const;
inline Representation OptimalRepresentation(const Isolate* isolate) const;
inline Representation OptimalRepresentation(IsolateRoot isolate) const;
inline ElementsKind OptimalElementsKind(const Isolate* isolate) const;
inline ElementsKind OptimalElementsKind(IsolateRoot isolate) const;
inline bool FitsRepresentation(Representation representation);
@ -673,7 +673,7 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
inline void InitExternalPointerField(size_t offset, Isolate* isolate,
Address value);
inline Address ReadExternalPointerField(size_t offset,
const Isolate* isolate) const;
IsolateRoot isolate) const;
inline void WriteExternalPointerField(size_t offset, Isolate* isolate,
Address value);

View File

@ -25,11 +25,11 @@ SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
SYNCHRONIZED_SMI_ACCESSORS(PropertyArray, length_and_hash, kLengthAndHashOffset)
Object PropertyArray::get(int index) const {
const Isolate* isolate = GetIsolateForPtrCompr(*this);
IsolateRoot isolate = GetIsolateForPtrCompr(*this);
return get(isolate, index);
}
Object PropertyArray::get(const Isolate* isolate, int index) const {
Object PropertyArray::get(IsolateRoot isolate, int index) const {
DCHECK_LT(static_cast<unsigned>(index),
static_cast<unsigned>(this->length()));
return TaggedField<Object>::Relaxed_Load(isolate, *this,

View File

@ -30,7 +30,7 @@ class PropertyArray : public HeapObject {
inline int Hash() const;
inline Object get(int index) const;
inline Object get(const Isolate* isolate, int index) const;
inline Object get(IsolateRoot isolate, int index) const;
inline void set(int index, Object value);
// Setter with explicit barrier mode.

View File

@ -75,7 +75,7 @@ Descriptor Descriptor::DataField(Handle<Name> key, int field_index,
Descriptor Descriptor::DataConstant(Handle<Name> key, Handle<Object> value,
PropertyAttributes attributes) {
const Isolate* isolate = GetIsolateForPtrCompr(*key);
IsolateRoot isolate = GetIsolateForPtrCompr(*key);
return Descriptor(key, MaybeObjectHandle(value), kData, attributes,
kDescriptor, PropertyConstness::kConst,
value->OptimalRepresentation(isolate), 0);

View File

@ -31,7 +31,7 @@ bool FullObjectSlot::contains_value(Address raw_value) const {
Object FullObjectSlot::operator*() const { return Object(*location()); }
Object FullObjectSlot::load(const Isolate* isolate) const { return **this; }
Object FullObjectSlot::load(IsolateRoot isolate) const { return **this; }
void FullObjectSlot::store(Object value) const { *location() = value.ptr(); }
@ -39,7 +39,7 @@ Object FullObjectSlot::Acquire_Load() const {
return Object(base::AsAtomicPointer::Acquire_Load(location()));
}
Object FullObjectSlot::Acquire_Load(const Isolate* isolate) const {
Object FullObjectSlot::Acquire_Load(IsolateRoot isolate) const {
return Acquire_Load();
}
@ -47,7 +47,7 @@ Object FullObjectSlot::Relaxed_Load() const {
return Object(base::AsAtomicPointer::Relaxed_Load(location()));
}
Object FullObjectSlot::Relaxed_Load(const Isolate* isolate) const {
Object FullObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
return Relaxed_Load();
}
@ -79,7 +79,7 @@ MaybeObject FullMaybeObjectSlot::operator*() const {
return MaybeObject(*location());
}
MaybeObject FullMaybeObjectSlot::load(const Isolate* isolate) const {
MaybeObject FullMaybeObjectSlot::load(IsolateRoot isolate) const {
return **this;
}
@ -91,7 +91,7 @@ MaybeObject FullMaybeObjectSlot::Relaxed_Load() const {
return MaybeObject(base::AsAtomicPointer::Relaxed_Load(location()));
}
MaybeObject FullMaybeObjectSlot::Relaxed_Load(const Isolate* isolate) const {
MaybeObject FullMaybeObjectSlot::Relaxed_Load(IsolateRoot isolate) const {
return Relaxed_Load();
}
@ -113,7 +113,7 @@ HeapObjectReference FullHeapObjectSlot::operator*() const {
return HeapObjectReference(*location());
}
HeapObjectReference FullHeapObjectSlot::load(const Isolate* isolate) const {
HeapObjectReference FullHeapObjectSlot::load(IsolateRoot isolate) const {
return **this;
}

View File

@ -110,13 +110,13 @@ class FullObjectSlot : public SlotBase<FullObjectSlot, Address> {
inline bool contains_value(Address raw_value) const;
inline Object operator*() const;
inline Object load(const Isolate* isolate) const;
inline Object load(IsolateRoot isolate) const;
inline void store(Object value) const;
inline Object Acquire_Load() const;
inline Object Acquire_Load(const Isolate* isolate) const;
inline Object Acquire_Load(IsolateRoot isolate) const;
inline Object Relaxed_Load() const;
inline Object Relaxed_Load(const Isolate* isolate) const;
inline Object Relaxed_Load(IsolateRoot isolate) const;
inline void Relaxed_Store(Object value) const;
inline void Release_Store(Object value) const;
inline Object Relaxed_CompareAndSwap(Object old, Object target) const;
@ -147,11 +147,11 @@ class FullMaybeObjectSlot
: SlotBase(slot.address()) {}
inline MaybeObject operator*() const;
inline MaybeObject load(const Isolate* isolate) const;
inline MaybeObject load(IsolateRoot isolate) const;
inline void store(MaybeObject value) const;
inline MaybeObject Relaxed_Load() const;
inline MaybeObject Relaxed_Load(const Isolate* isolate) const;
inline MaybeObject Relaxed_Load(IsolateRoot isolate) const;
inline void Relaxed_Store(MaybeObject value) const;
inline void Release_CompareAndSwap(MaybeObject old, MaybeObject target) const;
};
@ -174,7 +174,7 @@ class FullHeapObjectSlot : public SlotBase<FullHeapObjectSlot, Address> {
: SlotBase(slot.address()) {}
inline HeapObjectReference operator*() const;
inline HeapObjectReference load(const Isolate* isolate) const;
inline HeapObjectReference load(IsolateRoot isolate) const;
inline void store(HeapObjectReference value) const;
inline HeapObject ToHeapObject() const;

View File

@ -91,14 +91,14 @@ bool KeyIsMatch(StringTableKey* key, String string) {
class StringTable::Data {
public:
static std::unique_ptr<Data> New(int capacity);
static std::unique_ptr<Data> Resize(const Isolate* isolate,
static std::unique_ptr<Data> Resize(IsolateRoot isolate,
std::unique_ptr<Data> data, int capacity);
OffHeapObjectSlot slot(InternalIndex index) const {
return OffHeapObjectSlot(&elements_[index.as_uint32()]);
}
Object Get(const Isolate* isolate, InternalIndex index) const {
Object Get(IsolateRoot isolate, InternalIndex index) const {
return slot(index).Acquire_Load(isolate);
}
@ -136,13 +136,13 @@ class StringTable::Data {
int number_of_deleted_elements() const { return number_of_deleted_elements_; }
template <typename StringTableKey>
InternalIndex FindEntry(const Isolate* isolate, StringTableKey* key,
InternalIndex FindEntry(IsolateRoot isolate, StringTableKey* key,
uint32_t hash) const;
InternalIndex FindInsertionEntry(const Isolate* isolate, uint32_t hash) const;
InternalIndex FindInsertionEntry(IsolateRoot isolate, uint32_t hash) const;
template <typename StringTableKey>
InternalIndex FindEntryOrInsertionEntry(const Isolate* isolate,
InternalIndex FindEntryOrInsertionEntry(IsolateRoot isolate,
StringTableKey* key,
uint32_t hash) const;
@ -157,7 +157,7 @@ class StringTable::Data {
Data* PreviousData() { return previous_data_.get(); }
void DropPreviousData() { previous_data_.reset(); }
void Print(const Isolate* isolate) const;
void Print(IsolateRoot isolate) const;
size_t GetCurrentMemoryUsage() const;
private:
@ -224,7 +224,7 @@ std::unique_ptr<StringTable::Data> StringTable::Data::New(int capacity) {
}
std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
const Isolate* isolate, std::unique_ptr<Data> data, int capacity) {
IsolateRoot isolate, std::unique_ptr<Data> data, int capacity) {
std::unique_ptr<Data> new_data(new (capacity) Data(capacity));
DCHECK_LT(data->number_of_elements(), new_data->capacity());
@ -248,7 +248,7 @@ std::unique_ptr<StringTable::Data> StringTable::Data::Resize(
}
template <typename StringTableKey>
InternalIndex StringTable::Data::FindEntry(const Isolate* isolate,
InternalIndex StringTable::Data::FindEntry(IsolateRoot isolate,
StringTableKey* key,
uint32_t hash) const {
uint32_t count = 1;
@ -266,7 +266,7 @@ InternalIndex StringTable::Data::FindEntry(const Isolate* isolate,
}
}
InternalIndex StringTable::Data::FindInsertionEntry(const Isolate* isolate,
InternalIndex StringTable::Data::FindInsertionEntry(IsolateRoot isolate,
uint32_t hash) const {
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
@ -283,7 +283,7 @@ InternalIndex StringTable::Data::FindInsertionEntry(const Isolate* isolate,
template <typename StringTableKey>
InternalIndex StringTable::Data::FindEntryOrInsertionEntry(
const Isolate* isolate, StringTableKey* key, uint32_t hash) const {
IsolateRoot isolate, StringTableKey* key, uint32_t hash) const {
InternalIndex insertion_entry = InternalIndex::NotFound();
uint32_t count = 1;
// EnsureCapacity will guarantee the hash table is never full.
@ -317,7 +317,7 @@ void StringTable::Data::IterateElements(RootVisitor* visitor) {
visitor->VisitRootPointers(Root::kStringTable, nullptr, first_slot, end_slot);
}
void StringTable::Data::Print(const Isolate* isolate) const {
void StringTable::Data::Print(IsolateRoot isolate) const {
OFStream os(stdout);
os << "StringTable {" << std::endl;
for (InternalIndex i : InternalIndex::Range(capacity_)) {
@ -461,8 +461,6 @@ Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
// allocation if another write also did an allocation. This assumes that
// writes are rarer than reads.
const Isolate* ptr_cmp_isolate = GetIsolateForPtrCompr(isolate);
Handle<String> new_string;
while (true) {
// Load the current string table data, in case another thread updates the
@ -474,9 +472,9 @@ Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
// because the new table won't delete it's corresponding entry until the
// string is dead, in which case it will die in this table too and worst
// case we'll have a false miss.
InternalIndex entry = data->FindEntry(ptr_cmp_isolate, key, key->hash());
InternalIndex entry = data->FindEntry(isolate, key, key->hash());
if (entry.is_found()) {
return handle(String::cast(data->Get(ptr_cmp_isolate, entry)), isolate);
return handle(String::cast(data->Get(isolate, entry)), isolate);
}
// No entry found, so adding new string.
@ -490,14 +488,14 @@ Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
{
base::MutexGuard table_write_guard(&write_mutex_);
Data* data = EnsureCapacity(ptr_cmp_isolate, 1);
Data* data = EnsureCapacity(isolate, 1);
// Check one last time if the key is present in the table, in case it was
// added after the check.
InternalIndex entry =
data->FindEntryOrInsertionEntry(ptr_cmp_isolate, key, key->hash());
data->FindEntryOrInsertionEntry(isolate, key, key->hash());
Object element = data->Get(ptr_cmp_isolate, entry);
Object element = data->Get(isolate, entry);
if (element == empty_element()) {
// This entry is empty, so write it and register that we added an
// element.
@ -539,7 +537,7 @@ template Handle<String> StringTable::LookupKey(LocalIsolate* isolate,
template Handle<String> StringTable::LookupKey(Isolate* isolate,
StringTableInsertionKey* key);
StringTable::Data* StringTable::EnsureCapacity(const Isolate* isolate,
StringTable::Data* StringTable::EnsureCapacity(IsolateRoot isolate,
int additional_elements) {
// This call is only allowed while the write mutex is held.
write_mutex_.AssertHeld();
@ -677,7 +675,7 @@ Address StringTable::TryStringToIndexOrLookupExisting(Isolate* isolate,
isolate, string, source, start);
}
void StringTable::Print(const Isolate* isolate) const {
void StringTable::Print(IsolateRoot isolate) const {
data_.load(std::memory_order_acquire)->Print(isolate);
}

View File

@ -77,7 +77,7 @@ class V8_EXPORT_PRIVATE StringTable {
static Address TryStringToIndexOrLookupExisting(Isolate* isolate,
Address raw_string);
void Print(const Isolate* isolate) const;
void Print(IsolateRoot isolate) const;
size_t GetCurrentMemoryUsage() const;
// The following methods must be called either while holding the write lock,
@ -89,7 +89,7 @@ class V8_EXPORT_PRIVATE StringTable {
private:
class Data;
Data* EnsureCapacity(const Isolate* isolate, int additional_elements);
Data* EnsureCapacity(IsolateRoot isolate, int additional_elements);
std::atomic<Data*> data_;
// Write mutex is mutable so that readers of concurrently mutated values (e.g.

View File

@ -61,7 +61,7 @@ T TaggedField<T, kFieldOffset>::load(HeapObject host, int offset) {
// static
template <typename T, int kFieldOffset>
T TaggedField<T, kFieldOffset>::load(const Isolate* isolate, HeapObject host,
T TaggedField<T, kFieldOffset>::load(IsolateRoot isolate, HeapObject host,
int offset) {
Tagged_t value = *location(host, offset);
return T(tagged_to_full(isolate, value));
@ -96,8 +96,7 @@ T TaggedField<T, kFieldOffset>::Relaxed_Load(HeapObject host, int offset) {
// static
template <typename T, int kFieldOffset>
template <typename LocalIsolate>
T TaggedField<T, kFieldOffset>::Relaxed_Load(const LocalIsolate* isolate,
T TaggedField<T, kFieldOffset>::Relaxed_Load(IsolateRoot isolate,
HeapObject host, int offset) {
AtomicTagged_t value = AsAtomicTagged::Relaxed_Load(location(host, offset));
return T(tagged_to_full(isolate, value));
@ -126,8 +125,7 @@ T TaggedField<T, kFieldOffset>::Acquire_Load(HeapObject host, int offset) {
// static
template <typename T, int kFieldOffset>
template <typename LocalIsolate>
T TaggedField<T, kFieldOffset>::Acquire_Load(const LocalIsolate* isolate,
T TaggedField<T, kFieldOffset>::Acquire_Load(IsolateRoot isolate,
HeapObject host, int offset) {
AtomicTagged_t value = AsAtomicTagged::Acquire_Load(location(host, offset));
return T(tagged_to_full(isolate, value));

View File

@ -38,22 +38,20 @@ class TaggedField : public AllStatic {
static inline Address address(HeapObject host, int offset = 0);
static inline T load(HeapObject host, int offset = 0);
static inline T load(const Isolate* isolate, HeapObject host, int offset = 0);
static inline T load(IsolateRoot isolate, HeapObject host, int offset = 0);
static inline void store(HeapObject host, T value);
static inline void store(HeapObject host, int offset, T value);
static inline T Relaxed_Load(HeapObject host, int offset = 0);
template <typename LocalIsolate>
static inline T Relaxed_Load(const LocalIsolate* isolate, HeapObject host,
static inline T Relaxed_Load(IsolateRoot isolate, HeapObject host,
int offset = 0);
static inline void Relaxed_Store(HeapObject host, T value);
static inline void Relaxed_Store(HeapObject host, int offset, T value);
static inline T Acquire_Load(HeapObject host, int offset = 0);
template <typename LocalIsolate>
static inline T Acquire_Load(const LocalIsolate* isolate, HeapObject host,
static inline T Acquire_Load(IsolateRoot isolate, HeapObject host,
int offset = 0);
static inline void Release_Store(HeapObject host, T value);

View File

@ -1479,7 +1479,7 @@ class RootsReferencesExtractor : public RootVisitor {
OffHeapObjectSlot start,
OffHeapObjectSlot end) override {
DCHECK_EQ(root, Root::kStringTable);
const Isolate* isolate = Isolate::FromHeap(explorer_->heap_);
IsolateRoot isolate = Isolate::FromHeap(explorer_->heap_);
for (OffHeapObjectSlot p = start; p < end; ++p) {
explorer_->SetGcSubrootReference(root, description, visiting_weak_roots_,
p.load(isolate));

View File

@ -4036,7 +4036,7 @@ void CppClassGenerator::GenerateFieldAccessorForTagged(const Field& f) {
hdr_ << " inline " << type << " " << name << "(" << (f.index ? "int i" : "")
<< ") const;\n";
hdr_ << " inline " << type << " " << name << "(const Isolate* isolates"
hdr_ << " inline " << type << " " << name << "(IsolateRoot isolates"
<< (f.index ? ", int i" : "") << ") const;\n";
hdr_ << " inline void set_" << name << "(" << (f.index ? "int i, " : "")
<< type << " value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);\n\n";
@ -4047,15 +4047,14 @@ void CppClassGenerator::GenerateFieldAccessorForTagged(const Field& f) {
inl_ << "template <class D, class P>\n";
inl_ << type << " " << gen_name_ << "<D, P>::" << name << "("
<< (f.index ? "int i" : "") << ") const {\n";
inl_ << " const Isolate* isolate = GetIsolateForPtrCompr(*this);\n";
inl_ << " IsolateRoot isolate = GetIsolateForPtrCompr(*this);\n";
inl_ << " return " << gen_name_ << "::" << name << "(isolate"
<< (f.index ? ", i" : "") << ");\n";
inl_ << "}\n";
inl_ << "template <class D, class P>\n";
inl_ << type << " " << gen_name_ << "<D, P>::" << name
<< "(const Isolate* isolate" << (f.index ? ", int i" : "")
<< ") const {\n";
<< "(IsolateRoot isolate" << (f.index ? ", int i" : "") << ") const {\n";
// TODO(tebbi): The distinction between relaxed and non-relaxed accesses here
// is pretty arbitrary and just tries to preserve what was there before.

View File

@ -330,7 +330,8 @@ class ReadStringVisitor : public TqObjectVisitor {
GetOrFinish(object->GetResourceDataValue(accessor_));
#ifdef V8_COMPRESS_POINTERS
uintptr_t data_address = static_cast<uintptr_t>(DecodeExternalPointer(
Isolate::FromRoot(GetIsolateRoot(heap_addresses_.any_heap_pointer)),
Isolate::FromRootAddress(
GetIsolateRootAddress(heap_addresses_.any_heap_pointer)),
resource_data));
#else
uintptr_t data_address = static_cast<uintptr_t>(resource_data);