Use CagedPointers for ArrayBuffer backing stores

This CL turns references to ArrayBuffer backing stores from
JSArrayBuffers, JSTypedArrays, and JSDataViews into CagedPointers
when those are enabled.

CagedPointers cannot generally represent nullptr, as NULL usually lies
outside the cage. As such, nullptr backing stores are replaced with a
special empty backing store value, which, in the current implementation,
points to the end of the cage, right in front of the trailing guard
regions. Due to this, it is no longer correct to compare a backing store
pointer against nullptr.

Bug: chromium:1218005
Change-Id: I4a6c7a82aabb4debcb6bb2babe4035ba2da8e79f
Cq-Include-Trybots: luci.v8.try:v8_linux64_heap_sandbox_dbg_ng,v8_linux_arm64_sim_heap_sandbox_dbg_ng
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3244419
Commit-Queue: Samuel Groß <saelo@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78218}
This commit is contained in:
Samuel Groß 2021-12-02 10:45:48 +01:00 committed by V8 LUCI CQ
parent 4f34cd833b
commit 42ed4928cd
24 changed files with 203 additions and 141 deletions

View File

@ -494,6 +494,11 @@ constexpr bool VirtualMemoryCageIsEnabled() {
#endif
}
// CagedPointers are guaranteed to point into the virtual memory cage. This is
// achieved for example by storing them as offset from the cage base rather
// than as raw pointers.
using CagedPointer_t = Address;
#ifdef V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE
#define GB (1ULL << 30)
@ -511,17 +516,11 @@ constexpr size_t kVirtualMemoryCageSize = 1ULL << kVirtualMemoryCageSizeLog2;
constexpr size_t kVirtualMemoryCageAlignment =
Internals::kPtrComprCageBaseAlignment;
#ifdef V8_CAGED_POINTERS
// CagedPointers are guaranteed to point into the virtual memory cage. This is
// achieved by storing them as offset from the cage base rather than as raw
// pointers.
using CagedPointer_t = Address;
// For efficiency, the offset is stored shifted to the left, so that
// it is guaranteed that the offset is smaller than the cage size after
// shifting it to the right again. This constant specifies the shift amount.
// Caged pointers are stored inside the heap as offset from the cage base
// shifted to the left. This way, it is guaranteed that the offset is smaller
// than the cage size after shifting it to the right again. This constant
// specifies the shift amount.
constexpr uint64_t kCagedPointerShift = 64 - kVirtualMemoryCageSizeLog2;
#endif
// Size of the guard regions surrounding the virtual memory cage. This assumes a
// worst-case scenario of a 32-bit unsigned index being used to access an array

View File

@ -65,8 +65,8 @@ TNode<JSArrayBuffer> TypedArrayBuiltinsAssembler::AllocateEmptyOnHeapBuffer(
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kByteLengthOffset,
UintPtrConstant(0));
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kBackingStoreOffset,
PointerConstant(nullptr));
StoreCagedPointerToObject(buffer, JSArrayBuffer::kBackingStoreOffset,
EmptyBackingStoreBufferConstant());
StoreObjectFieldNoWriteBarrier(buffer, JSArrayBuffer::kExtensionOffset,
IntPtrConstant(0));
for (int offset = JSArrayBuffer::kHeaderSize;
@ -437,10 +437,10 @@ void TypedArrayBuiltinsAssembler::SetJSTypedArrayOnHeapDataPtr(
TNode<IntPtrT> ptr_compr_cage_base =
IntPtrSub(full_base, Signed(ChangeUint32ToWord(compressed_base)));
// Add JSTypedArray::ExternalPointerCompensationForOnHeapArray() to offset.
// See JSTypedArray::AddExternalPointerCompensationForDeserialization().
DCHECK_EQ(
isolate()->cage_base(),
JSTypedArray::ExternalPointerCompensationForOnHeapArray(isolate()));
// See JSTypedArray::SetOnHeapDataPtr() for details.
offset = Unsigned(IntPtrAdd(offset, ptr_compr_cage_base));
}

View File

@ -1539,16 +1539,21 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(TNode<Object> value,
}
}
#ifdef V8_CAGED_POINTERS
TNode<CagedPtrT> CodeStubAssembler::LoadCagedPointerFromObject(
TNode<RawPtrT> CodeStubAssembler::LoadCagedPointerFromObject(
TNode<HeapObject> object, TNode<IntPtrT> field_offset) {
return LoadObjectField<CagedPtrT>(object, field_offset);
#ifdef V8_CAGED_POINTERS
return ReinterpretCast<RawPtrT>(
LoadObjectField<CagedPtrT>(object, field_offset));
#else
return LoadObjectField<RawPtrT>(object, field_offset);
#endif // V8_CAGED_POINTERS
}
void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object,
TNode<IntPtrT> offset,
TNode<CagedPtrT> pointer) {
TNode<RawPtrT> pointer) {
#ifdef V8_CAGED_POINTERS
TNode<CagedPtrT> caged_pointer = ReinterpretCast<CagedPtrT>(pointer);
#ifdef DEBUG
// Verify pointer points into the cage.
TNode<ExternalReference> cage_base_address =
@ -1557,13 +1562,26 @@ void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object,
ExternalConstant(ExternalReference::virtual_memory_cage_end_address());
TNode<UintPtrT> cage_base = Load<UintPtrT>(cage_base_address);
TNode<UintPtrT> cage_end = Load<UintPtrT>(cage_end_address);
CSA_CHECK(this, UintPtrGreaterThanOrEqual(pointer, cage_base));
CSA_CHECK(this, UintPtrLessThan(pointer, cage_end));
#endif
StoreObjectFieldNoWriteBarrier<CagedPtrT>(object, offset, pointer);
CSA_DCHECK(this, UintPtrGreaterThanOrEqual(caged_pointer, cage_base));
CSA_DCHECK(this, UintPtrLessThan(caged_pointer, cage_end));
#endif // DEBUG
StoreObjectFieldNoWriteBarrier<CagedPtrT>(object, offset, caged_pointer);
#else
StoreObjectFieldNoWriteBarrier<RawPtrT>(object, offset, pointer);
#endif // V8_CAGED_POINTERS
}
TNode<RawPtrT> CodeStubAssembler::EmptyBackingStoreBufferConstant() {
#ifdef V8_CAGED_POINTERS
// TODO(chromium:1218005) consider creating a LoadCagedPointerConstant() if
// more of these constants are required later on.
TNode<ExternalReference> empty_backing_store_buffer =
ExternalConstant(ExternalReference::empty_backing_store_buffer());
return Load<RawPtrT>(empty_backing_store_buffer);
#else
return ReinterpretCast<RawPtrT>(IntPtrConstant(0));
#endif // V8_CAGED_POINTERS
}
TNode<ExternalPointerT> CodeStubAssembler::ChangeUint32ToExternalPointer(
TNode<Uint32T> value) {
@ -13860,8 +13878,8 @@ void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr(
TNode<JSArrayBuffer> array_buffer) {
return LoadObjectField<RawPtrT>(array_buffer,
JSArrayBuffer::kBackingStoreOffset);
return LoadCagedPointerFromObject(array_buffer,
JSArrayBuffer::kBackingStoreOffset);
}
TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(

View File

@ -1043,32 +1043,29 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Works only with V8_ENABLE_FORCE_SLOW_PATH compile time flag. Nop otherwise.
void GotoIfForceSlowPath(Label* if_true);
#ifdef V8_CAGED_POINTERS
//
// Caged pointer related functionality.
//
// Load a caged pointer value from an object.
TNode<CagedPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
int offset) {
TNode<RawPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
int offset) {
return LoadCagedPointerFromObject(object, IntPtrConstant(offset));
}
TNode<CagedPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
TNode<IntPtrT> offset);
TNode<RawPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
TNode<IntPtrT> offset);
// Stored a caged pointer value to an object.
void StoreCagedPointerToObject(TNode<HeapObject> object, int offset,
TNode<CagedPtrT> pointer) {
TNode<RawPtrT> pointer) {
StoreCagedPointerToObject(object, IntPtrConstant(offset), pointer);
}
void StoreCagedPointerToObject(TNode<HeapObject> object,
TNode<IntPtrT> offset,
TNode<CagedPtrT> pointer);
TNode<IntPtrT> offset, TNode<RawPtrT> pointer);
#endif // V8_CAGED_POINTERS
TNode<RawPtrT> EmptyBackingStoreBufferConstant();
//
// ExternalPointerT-related functionality.
@ -1148,14 +1145,14 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
TNode<RawPtrT> LoadJSTypedArrayExternalPointerPtr(
TNode<JSTypedArray> holder) {
return LoadObjectField<RawPtrT>(holder,
JSTypedArray::kExternalPointerOffset);
return LoadCagedPointerFromObject(holder,
JSTypedArray::kExternalPointerOffset);
}
void StoreJSTypedArrayExternalPointerPtr(TNode<JSTypedArray> holder,
TNode<RawPtrT> value) {
StoreObjectFieldNoWriteBarrier<RawPtrT>(
holder, JSTypedArray::kExternalPointerOffset, value);
StoreCagedPointerToObject(holder, JSTypedArray::kExternalPointerOffset,
value);
}
// Load value from current parent frame by given offset in bytes.

View File

@ -226,7 +226,7 @@ ExternalReference ExternalReference::handle_scope_implementer_address(
return ExternalReference(isolate->handle_scope_implementer_address());
}
#ifdef V8_VIRTUAL_MEMORY_CAGE
#ifdef V8_CAGED_POINTERS
ExternalReference ExternalReference::virtual_memory_cage_base_address() {
return ExternalReference(GetProcessWideVirtualMemoryCage()->base_address());
}
@ -234,7 +234,13 @@ ExternalReference ExternalReference::virtual_memory_cage_base_address() {
ExternalReference ExternalReference::virtual_memory_cage_end_address() {
return ExternalReference(GetProcessWideVirtualMemoryCage()->end_address());
}
#endif
ExternalReference ExternalReference::empty_backing_store_buffer() {
return ExternalReference(GetProcessWideVirtualMemoryCage()
->constants()
.empty_backing_store_buffer_address());
}
#endif // V8_CAGED_POINTERS
#ifdef V8_HEAP_SANDBOX
ExternalReference ExternalReference::external_pointer_table_address(

View File

@ -318,13 +318,14 @@ class StatsCounter;
#define EXTERNAL_REFERENCE_LIST_INTL(V)
#endif // V8_INTL_SUPPORT
#ifdef V8_VIRTUAL_MEMORY_CAGE
#ifdef V8_CAGED_POINTERS
#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \
V(virtual_memory_cage_base_address, "V8VirtualMemoryCage::base()") \
V(virtual_memory_cage_end_address, "V8VirtualMemoryCage::end()")
V(virtual_memory_cage_end_address, "V8VirtualMemoryCage::end()") \
V(empty_backing_store_buffer, "EmptyBackingStoreBuffer()")
#else
#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V)
#endif // V8_VIRTUAL_MEMORY_CAGE
#endif // V8_CAGED_POINTERS
#ifdef V8_HEAP_SANDBOX
#define EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V) \

View File

@ -40,7 +40,9 @@ enum class MachineRepresentation : uint8_t {
kTagged, // (uncompressed) Object (Smi or HeapObject)
kCompressedPointer, // (compressed) HeapObject
kCompressed, // (compressed) Object (Smi or HeapObject)
kCagedPointer, // Guaranteed to point into the virtual memory cage.
// A 64-bit pointer encoded in a way (e.g. as offset) that guarantees it will
// point into the virtual memory cage.
kCagedPointer,
// FP and SIMD representations must be last, and in order of increasing size.
kFloat32,
kFloat64,

View File

@ -421,8 +421,13 @@ FieldAccess AccessBuilder::ForJSTypedArrayExternalPointer() {
JSTypedArray::kExternalPointerOffset,
MaybeHandle<Name>(),
MaybeHandle<Map>(),
#ifdef V8_CAGED_POINTERS
Type::CagedPointer(),
MachineType::CagedPointer(),
#else
Type::ExternalPointer(),
MachineType::Pointer(),
#endif
kNoWriteBarrier,
ConstFieldInfo::None(),
false,
@ -437,8 +442,13 @@ FieldAccess AccessBuilder::ForJSDataViewDataPointer() {
JSDataView::kDataPointerOffset,
MaybeHandle<Name>(),
MaybeHandle<Map>(),
#ifdef V8_CAGED_POINTERS
Type::CagedPointer(),
MachineType::CagedPointer(),
#else
Type::ExternalPointer(),
MachineType::Pointer(),
#endif
kNoWriteBarrier,
ConstFieldInfo::None(),
false,

View File

@ -840,13 +840,9 @@ void InstructionSelector::VisitLoad(Node* node) {
immediate_mode = kLoadStoreImm64;
break;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kArm64LdrDecodeCagedPointer;
immediate_mode = kLoadStoreImm64;
break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128:
opcode = kArm64LdrQ;
immediate_mode = kNoImmediate;
@ -948,13 +944,9 @@ void InstructionSelector::VisitStore(Node* node) {
COMPRESS_POINTERS_BOOL ? kLoadStoreImm32 : kLoadStoreImm64;
break;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kArm64StrEncodeCagedPointer;
immediate_mode = kLoadStoreImm64;
break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kWord64:
opcode = kArm64Str;
immediate_mode = kLoadStoreImm64;

View File

@ -298,12 +298,8 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
opcode = kX64Movq;
break;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kX64MovqDecodeCagedPointer;
break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128:
opcode = kX64Movdqu;
break;
@ -341,11 +337,7 @@ ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
case MachineRepresentation::kWord64:
return kX64Movq;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
return kX64MovqEncodeCagedPointer;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128:
return kX64Movdqu;
case MachineRepresentation::kNone: // Fall through.

View File

@ -181,12 +181,6 @@ struct MaybeBoolFlag {
#define V8_VIRTUAL_MEMORY_CAGE_BOOL false
#endif
#ifdef V8_CAGED_POINTERS
#define V8_CAGED_POINTERS_BOOL true
#else
#define V8_CAGED_POINTERS_BOOL false
#endif
// D8's MultiMappedAllocator is only available on Linux, and only if the virtual
// memory cage is not enabled.
#if V8_OS_LINUX && !V8_VIRTUAL_MEMORY_CAGE_BOOL

View File

@ -194,6 +194,8 @@ BackingStore::BackingStore(void* buffer_start, size_t byte_length,
DCHECK_IMPLIES(is_resizable_, free_on_destruct_);
DCHECK_IMPLIES(!is_wasm_memory && !is_resizable_,
byte_length_ == max_byte_length_);
DCHECK_GE(max_byte_length_, byte_length_);
DCHECK_GE(byte_capacity_, max_byte_length_);
}
BackingStore::~BackingStore() {
@ -323,10 +325,9 @@ std::unique_ptr<BackingStore> BackingStore::Allocate(
counters->array_buffer_new_size_failures()->AddSample(mb_length);
return {};
}
DCHECK(IsValidBackingStorePointer(buffer_start));
}
DCHECK(IsValidBackingStorePointer(buffer_start));
auto result = new BackingStore(buffer_start, // start
byte_length, // length
byte_length, // max length

View File

@ -99,8 +99,8 @@ class V8_EXPORT_PRIVATE BackingStore : public BackingStoreBase {
bool free_on_destruct() const { return free_on_destruct_; }
bool IsEmpty() const {
DCHECK_GE(max_byte_length_, byte_length_);
return max_byte_length_ == 0;
DCHECK_GE(byte_capacity_, byte_length_);
return byte_capacity_ == 0;
}
enum ResizeOrGrowResult { kSuccess, kFailure, kRace };

View File

@ -36,12 +36,14 @@ void JSArrayBuffer::set_byte_length(size_t value) {
}
DEF_GETTER(JSArrayBuffer, backing_store, void*) {
return reinterpret_cast<void*>(ReadField<Address>(kBackingStoreOffset));
Address value = ReadCagedPointerField(kBackingStoreOffset, cage_base);
return reinterpret_cast<void*>(value);
}
void JSArrayBuffer::set_backing_store(void* value) {
void JSArrayBuffer::set_backing_store(Isolate* isolate, void* value) {
DCHECK(IsValidBackingStorePointer(value));
WriteField<Address>(kBackingStoreOffset, reinterpret_cast<Address>(value));
Address addr = reinterpret_cast<Address>(value);
WriteCagedPointerField(kBackingStoreOffset, isolate, addr);
}
std::shared_ptr<BackingStore> JSArrayBuffer::GetBackingStore() const {
@ -249,16 +251,12 @@ void JSTypedArray::set_length(size_t value) {
}
DEF_GETTER(JSTypedArray, external_pointer, Address) {
return ReadField<Address>(kExternalPointerOffset);
}
DEF_GETTER(JSTypedArray, external_pointer_raw, Address) {
return ReadField<Address>(kExternalPointerOffset);
return ReadCagedPointerField(kExternalPointerOffset, cage_base);
}
void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
DCHECK(IsValidBackingStorePointer(reinterpret_cast<void*>(value)));
WriteField<Address>(kExternalPointerOffset, value);
WriteCagedPointerField(kExternalPointerOffset, isolate, value);
}
Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
@ -283,19 +281,17 @@ void JSTypedArray::SetExternalBackingStoreRefForSerialization(uint32_t ref) {
void JSTypedArray::RemoveExternalPointerCompensationForSerialization(
Isolate* isolate) {
DCHECK(is_on_heap());
// TODO(v8:10391): once we have an external table, avoid the need for
// compensation by replacing external_pointer and base_pointer fields
// with one data_pointer field which can point to either external data
// backing store or into on-heap backing store.
Address offset =
external_pointer() - ExternalPointerCompensationForOnHeapArray(isolate);
#ifdef V8_HEAP_SANDBOX
// Write decompensated offset directly to the external pointer field, thus
// allowing the offset to be propagated through serialization-deserialization.
WriteField<ExternalPointer_t>(kExternalPointerOffset, offset);
#else
set_external_pointer(isolate, offset);
#endif
WriteField<Address>(kExternalPointerOffset, offset);
}
void JSTypedArray::AddExternalPointerCompensationForDeserialization(
Isolate* isolate) {
DCHECK(is_on_heap());
Address pointer = ReadField<Address>(kExternalPointerOffset) +
ExternalPointerCompensationForOnHeapArray(isolate);
set_external_pointer(isolate, pointer);
}
void* JSTypedArray::DataPtr() {
@ -322,14 +318,6 @@ void JSTypedArray::SetOffHeapDataPtr(Isolate* isolate, void* base,
DCHECK_EQ(address, reinterpret_cast<Address>(DataPtr()));
}
void JSTypedArray::SetOnHeapDataPtr(Isolate* isolate, HeapObject base,
Address offset) {
set_base_pointer(base);
set_external_pointer(
isolate, offset + ExternalPointerCompensationForOnHeapArray(isolate));
DCHECK_EQ(base.ptr() + offset, reinterpret_cast<Address>(DataPtr()));
}
bool JSTypedArray::is_on_heap() const {
// Keep synced with `is_on_heap(AcquireLoadTag)`.
DisallowGarbageCollection no_gc;
@ -378,12 +366,14 @@ MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
}
DEF_GETTER(JSDataView, data_pointer, void*) {
return reinterpret_cast<void*>(ReadField<Address>(kDataPointerOffset));
Address value = ReadCagedPointerField(kDataPointerOffset, cage_base);
return reinterpret_cast<void*>(value);
}
void JSDataView::set_data_pointer(Isolate* isolate, void* value) {
DCHECK(IsValidBackingStorePointer(value));
WriteField<Address>(kDataPointerOffset, reinterpret_cast<Address>(value));
void JSDataView::set_data_pointer(Isolate* isolate, void* ptr) {
DCHECK(IsValidBackingStorePointer(ptr));
Address value = reinterpret_cast<Address>(ptr);
WriteCagedPointerField(kDataPointerOffset, isolate, value);
}
} // namespace internal

View File

@ -56,7 +56,7 @@ void JSArrayBuffer::Setup(SharedFlag shared, ResizableFlag resizable,
}
set_extension(nullptr);
if (!backing_store) {
set_backing_store(nullptr);
set_backing_store(GetIsolate(), EmptyBackingStoreBuffer());
set_byte_length(0);
set_max_byte_length(0);
} else {
@ -76,7 +76,16 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
!backing_store->is_wasm_memory() && !backing_store->is_resizable(),
backing_store->byte_length() == backing_store->max_byte_length());
DCHECK(!was_detached());
set_backing_store(backing_store->buffer_start());
DCHECK(IsValidBackingStorePointer(backing_store->buffer_start()));
Isolate* isolate = GetIsolate();
if (backing_store->IsEmpty()) {
set_backing_store(isolate, EmptyBackingStoreBuffer());
} else {
DCHECK_NE(nullptr, backing_store->buffer_start());
set_backing_store(isolate, backing_store->buffer_start());
}
if (is_shared() && is_resizable()) {
// GSABs need to read their byte_length from the BackingStore. Maintain the
// invariant that their byte_length field is always 0.
@ -91,7 +100,7 @@ void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
size_t bytes = backing_store->PerIsolateAccountingLength();
extension->set_accounting_length(bytes);
extension->set_backing_store(std::move(backing_store));
GetIsolate()->heap()->AppendArrayBufferExtension(*this, extension);
isolate->heap()->AppendArrayBufferExtension(*this, extension);
}
void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
@ -120,7 +129,7 @@ void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
DCHECK(!is_shared());
DCHECK(!is_asmjs_memory());
set_backing_store(nullptr);
set_backing_store(isolate, EmptyBackingStoreBuffer());
set_byte_length(0);
set_was_detached(true);
}

View File

@ -39,7 +39,7 @@ class JSArrayBuffer
// [backing_store]: backing memory for this array
// It should not be assumed that this will be nullptr for empty ArrayBuffers.
DECL_GETTER(backing_store, void*)
inline void set_backing_store(void* value);
inline void set_backing_store(Isolate* isolate, void* value);
// [extension]: extension object used for GC
DECL_PRIMITIVE_ACCESSORS(extension, ArrayBufferExtension*)
@ -289,8 +289,6 @@ class JSTypedArray
inline void* DataPtr();
inline void SetOffHeapDataPtr(Isolate* isolate, void* base, Address offset);
inline void SetOnHeapDataPtr(Isolate* isolate, HeapObject base,
Address offset);
// Whether the buffer's backing store is on-heap or off-heap.
inline bool is_on_heap() const;
@ -329,6 +327,9 @@ class JSTypedArray
// Subtracts external pointer compensation from the external pointer value.
inline void RemoveExternalPointerCompensationForSerialization(
Isolate* isolate);
// Adds external pointer compensation to the external pointer value.
inline void AddExternalPointerCompensationForDeserialization(
Isolate* isolate);
static inline MaybeHandle<JSTypedArray> Validate(Isolate* isolate,
Handle<Object> receiver,
@ -365,7 +366,6 @@ class JSTypedArray
inline size_t LengthUnchecked() const;
DECL_GETTER(external_pointer, Address)
DECL_GETTER(external_pointer_raw, ExternalPointer_t)
DECL_SETTER(base_pointer, Object)
DECL_RELEASE_SETTER(base_pointer, Object)

View File

@ -630,7 +630,6 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
return value;
}
#ifdef V8_CAGED_POINTERS
Address Object::ReadCagedPointerField(size_t offset,
PtrComprCageBase cage_base) const {
return i::ReadCagedPointerField(field_address(offset), cage_base);
@ -646,7 +645,6 @@ void Object::WriteCagedPointerField(size_t offset, Isolate* isolate,
i::WriteCagedPointerField(field_address(offset), PtrComprCageBase(isolate),
value);
}
#endif // V8_CAGED_POINTERS
void Object::InitExternalPointerField(size_t offset, Isolate* isolate) {
i::InitExternalPointerField(field_address(offset), isolate);

View File

@ -700,16 +700,14 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
}
//
// CagedPointer field accessors.
// CagedPointer_t field accessors.
//
#ifdef V8_CAGED_POINTERS
inline Address ReadCagedPointerField(size_t offset,
PtrComprCageBase cage_base) const;
inline void WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base,
Address value);
inline void WriteCagedPointerField(size_t offset, Isolate* isolate,
Address value);
#endif // V8_CAGED_POINTERS
//
// ExternalPointer_t field accessors.

View File

@ -12,23 +12,27 @@
namespace v8 {
namespace internal {
V8_INLINE Address ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base) {
#ifdef V8_CAGED_POINTERS
V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base) {
// Caged pointers are currently only used if the sandbox is enabled.
DCHECK(V8_HEAP_SANDBOX_BOOL);
Address caged_pointer = base::ReadUnalignedValue<Address>(field_address);
CagedPointer_t caged_pointer =
base::ReadUnalignedValue<CagedPointer_t>(field_address);
Address offset = caged_pointer >> kCagedPointerShift;
Address pointer = cage_base.address() + offset;
return pointer;
#else
return base::ReadUnalignedValue<Address>(field_address);
#endif
}
V8_INLINE void WriteCagedPointerField(Address field_address,
PtrComprCageBase cage_base,
CagedPointer_t pointer) {
Address pointer) {
#ifdef V8_CAGED_POINTERS
// Caged pointers are currently only used if the sandbox is enabled.
DCHECK(V8_HEAP_SANDBOX_BOOL);
@ -36,12 +40,13 @@ V8_INLINE void WriteCagedPointerField(Address field_address,
DCHECK(GetProcessWideVirtualMemoryCage()->Contains(pointer));
Address offset = pointer - cage_base.address();
Address caged_pointer = offset << kCagedPointerShift;
base::WriteUnalignedValue<Address>(field_address, caged_pointer);
CagedPointer_t caged_pointer = offset << kCagedPointerShift;
base::WriteUnalignedValue<CagedPointer_t>(field_address, caged_pointer);
#else
base::WriteUnalignedValue<Address>(field_address, pointer);
#endif
}
#endif // V8_CAGED_POINTERS
} // namespace internal
} // namespace v8

View File

@ -10,16 +10,12 @@
namespace v8 {
namespace internal {
#ifdef V8_CAGED_POINTERS
V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base);
V8_INLINE Address ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base);
V8_INLINE void WriteCagedPointerField(Address field_address,
PtrComprCageBase cage_base,
CagedPointer_t value);
#endif // V8_CAGED_POINTERS
Address value);
} // namespace internal
} // namespace v8

View File

@ -11,6 +11,7 @@
#include "src/base/lazy-instance.h"
#include "src/base/utils/random-number-generator.h"
#include "src/flags/flags.h"
#include "src/security/caged-pointer.h"
#include "src/utils/allocation.h"
#if defined(V8_OS_WIN)
@ -338,6 +339,8 @@ bool V8VirtualMemoryCage::Initialize(v8::PageAllocator* page_allocator,
initialized_ = true;
is_fake_cage_ = false;
InitializeConstants();
return true;
}
@ -400,9 +403,19 @@ bool V8VirtualMemoryCage::InitializeAsFakeCage(
cage_page_allocator_ = std::make_unique<FakeBoundedPageAllocator>(
page_allocator_, base_, size_, reservation_size_);
InitializeConstants();
return true;
}
void V8VirtualMemoryCage::InitializeConstants() {
#ifdef V8_CAGED_POINTERS
// Place the empty backing store buffer at the end of the cage, so that any
// accidental access to it will most likely hit a guard page.
constants_.set_empty_backing_store_buffer(base_ + size_ - 1);
#endif
}
void V8VirtualMemoryCage::TearDown() {
if (initialized_) {
cage_page_allocator_.reset();
@ -416,6 +429,9 @@ void V8VirtualMemoryCage::TearDown() {
initialized_ = false;
is_fake_cage_ = false;
page_allocator_ = nullptr;
#ifdef V8_CAGED_POINTERS
constants_.Reset();
#endif
}
disabled_ = false;
}

View File

@ -92,6 +92,27 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
return Contains(reinterpret_cast<Address>(ptr));
}
#ifdef V8_CAGED_POINTERS
class CagedPointerConstants final {
public:
Address empty_backing_store_buffer() const {
return empty_backing_store_buffer_;
}
Address empty_backing_store_buffer_address() const {
return reinterpret_cast<Address>(&empty_backing_store_buffer_);
}
void set_empty_backing_store_buffer(Address value) {
empty_backing_store_buffer_ = value;
}
void Reset() { empty_backing_store_buffer_ = 0; }
private:
Address empty_backing_store_buffer_ = 0;
};
const CagedPointerConstants& constants() const { return constants_; }
#endif
private:
// The SequentialUnmapperTest calls the private Initialize method to create a
// cage without guard regions, which would otherwise consume too much memory.
@ -114,6 +135,10 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
bool InitializeAsFakeCage(v8::PageAllocator* page_allocator, size_t size,
size_t size_to_reserve);
// Initialize the caged pointer constants for this cage. Called by the
// Initialize methods above.
void InitializeConstants();
Address base_ = kNullAddress;
Address end_ = kNullAddress;
size_t size_ = 0;
@ -132,6 +157,11 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
v8::PageAllocator* page_allocator_ = nullptr;
// The allocator to allocate pages inside the cage.
std::unique_ptr<v8::PageAllocator> cage_page_allocator_;
#ifdef V8_CAGED_POINTERS
// CagedPointer constants inside this cage.
CagedPointerConstants constants_;
#endif
};
#endif // V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE
@ -151,6 +181,16 @@ V8_INLINE bool IsValidBackingStorePointer(void* ptr) {
#endif
}
V8_INLINE void* EmptyBackingStoreBuffer() {
#ifdef V8_CAGED_POINTERS
return reinterpret_cast<void*>(GetProcessWideVirtualMemoryCage()
->constants()
.empty_backing_store_buffer());
#else
return nullptr;
#endif
}
} // namespace internal
} // namespace v8

View File

@ -486,7 +486,7 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
} else if (InstanceTypeChecker::IsJSDataView(instance_type)) {
Handle<JSDataView> data_view = Handle<JSDataView>::cast(obj);
JSArrayBuffer buffer = JSArrayBuffer::cast(data_view->buffer());
void* backing_store = nullptr;
void* backing_store = EmptyBackingStoreBuffer();
uint32_t store_index = buffer.GetBackingStoreRefForDeserialization();
if (store_index != kEmptyBackingStoreRefSentinel) {
// The backing store of the JSArrayBuffer has not been correctly restored
@ -501,18 +501,15 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
Handle<JSTypedArray> typed_array = Handle<JSTypedArray>::cast(obj);
// Fixup typed array pointers.
if (typed_array->is_on_heap()) {
Address raw_external_pointer = typed_array->external_pointer_raw();
typed_array->SetOnHeapDataPtr(
main_thread_isolate(), HeapObject::cast(typed_array->base_pointer()),
raw_external_pointer);
typed_array->AddExternalPointerCompensationForDeserialization(
main_thread_isolate());
} else {
// Serializer writes backing store ref as a DataPtr() value.
uint32_t store_index =
typed_array->GetExternalBackingStoreRefForDeserialization();
auto backing_store = backing_stores_[store_index];
auto start = backing_store
? reinterpret_cast<byte*>(backing_store->buffer_start())
: nullptr;
void* start = backing_store ? backing_store->buffer_start()
: EmptyBackingStoreBuffer();
typed_array->SetOffHeapDataPtr(main_thread_isolate(), start,
typed_array->byte_offset());
}
@ -523,7 +520,8 @@ void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
kEmptyBackingStoreRefSentinel) {
new_off_heap_array_buffers_.push_back(buffer);
} else {
buffer->set_backing_store(nullptr);
buffer->set_backing_store(main_thread_isolate(),
EmptyBackingStoreBuffer());
}
} else if (InstanceTypeChecker::IsBytecodeArray(instance_type)) {
// TODO(mythria): Remove these once we store the default values for these

View File

@ -545,7 +545,7 @@ void Serializer::ObjectSerializer::SerializeJSArrayBuffer() {
SerializeObject();
buffer->set_backing_store(backing_store);
buffer->set_backing_store(isolate(), backing_store);
buffer->set_extension(extension);
}