Introduce CagedPointer

A CagedPointer is guaranteed to point into the Virtual Memory Cage and
will for example be used for ArrayBuffer backing stores when the heap
sandbox is enabled. In the current implementation, CagedPointers are
stored as offsets from the cage base, shifted to the left. Because the
cage base address is usually available in a register, accessing a
CagedPointer is very efficient, requiring only an additional shift and
add operation.

Bug: chromium:1218005
Change-Id: Ifc8c088e3862400672051a8c52840514dee2911f
Cq-Include-Trybots: luci.v8.try:v8_linux64_heap_sandbox_dbg_ng,v8_linux_arm64_sim_heap_sandbox_dbg_ng
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3123417
Reviewed-by: Toon Verwaest <verwaest@chromium.org>
Reviewed-by: Igor Sheludko <ishell@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Commit-Queue: Samuel Groß <saelo@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77614}
This commit is contained in:
Samuel Groß 2021-10-29 14:51:58 +02:00 committed by V8 LUCI CQ
parent f4b4bfdcd3
commit afd1554963
44 changed files with 467 additions and 24 deletions

View File

@ -1881,6 +1881,8 @@ filegroup(
"src/runtime/runtime.h",
"src/security/external-pointer-table.cc",
"src/security/vm-cage.cc",
"src/security/caged-pointer-inl.h",
"src/security/caged-pointer.h",
"src/security/external-pointer-inl.h",
"src/security/external-pointer-table.h",
"src/security/external-pointer.h",

View File

@ -3325,6 +3325,8 @@ v8_header_set("v8_internal_headers") {
"src/roots/roots.h",
"src/runtime/runtime-utils.h",
"src/runtime/runtime.h",
"src/security/caged-pointer-inl.h",
"src/security/caged-pointer.h",
"src/security/external-pointer-inl.h",
"src/security/external-pointer-table.h",
"src/security/external-pointer.h",

View File

@ -500,7 +500,8 @@ constexpr bool VirtualMemoryCageIsEnabled() {
#define TB (1ULL << 40)
// Size of the virtual memory cage, excluding the guard regions surrounding it.
constexpr size_t kVirtualMemoryCageSize = 1ULL * TB;
constexpr size_t kVirtualMemoryCageSizeLog2 = 40; // 1 TB
constexpr size_t kVirtualMemoryCageSize = 1ULL << kVirtualMemoryCageSizeLog2;
// Required alignment of the virtual memory cage. For simplicity, we require the
// size of the guard regions to be a multiple of this, so that this specifies
@ -510,6 +511,18 @@ constexpr size_t kVirtualMemoryCageSize = 1ULL * TB;
constexpr size_t kVirtualMemoryCageAlignment =
Internals::kPtrComprCageBaseAlignment;
#ifdef V8_CAGED_POINTERS
// CagedPointers are guaranteed to point into the virtual memory cage. This is
// achieved by storing them as offset from the cage base rather than as raw
// pointers.
using CagedPointer_t = Address;
// For efficiency, the offset is stored shifted to the left, so that
// it is guaranteed that the offset is smaller than the cage size after
// shifting it to the right again. This constant specifies the shift amount.
constexpr uint64_t kCagedPointerShift = 64 - kVirtualMemoryCageSizeLog2;
#endif
// Size of the guard regions surrounding the virtual memory cage. This assumes a
// worst-case scenario of a 32-bit unsigned index being used to access an array
// of 64-bit values.

View File

@ -560,6 +560,12 @@ V8 shared library set USING_V8_SHARED.
#define V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE
#endif
// CagedPointers are currently only used if the heap sandbox is enabled.
// In the future, they will be enabled when the virtual memory cage is enabled.
#if defined(V8_HEAP_SANDBOX)
#define V8_CAGED_POINTERS
#endif
// clang-format on
#undef V8_HAS_CPP_ATTRIBUTE

View File

@ -3072,6 +3072,43 @@ void MacroAssembler::RecordWriteField(Register object, int offset,
Bind(&done);
}
void TurboAssembler::EncodeCagedPointer(const Register& value) {
ASM_CODE_COMMENT(this);
#ifdef V8_CAGED_POINTERS
Sub(value, value, kPtrComprCageBaseRegister);
Mov(value, Operand(value, LSL, kCagedPointerShift));
#else
UNREACHABLE();
#endif
}
void TurboAssembler::DecodeCagedPointer(const Register& value) {
ASM_CODE_COMMENT(this);
#ifdef V8_CAGED_POINTERS
Add(value, kPtrComprCageBaseRegister,
Operand(value, LSR, kCagedPointerShift));
#else
UNREACHABLE();
#endif
}
void TurboAssembler::LoadCagedPointerField(const Register& destination,
const MemOperand& field_operand) {
ASM_CODE_COMMENT(this);
Ldr(destination, field_operand);
DecodeCagedPointer(destination);
}
void TurboAssembler::StoreCagedPointerField(
const Register& value, const MemOperand& dst_field_operand) {
ASM_CODE_COMMENT(this);
UseScratchRegisterScope temps(this);
Register scratch = temps.AcquireX();
Mov(scratch, value);
EncodeCagedPointer(scratch);
Str(scratch, dst_field_operand);
}
void TurboAssembler::LoadExternalPointerField(Register destination,
MemOperand field_operand,
ExternalPointerTag tag,

View File

@ -1441,6 +1441,19 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
// ---------------------------------------------------------------------------
// V8 Heap sandbox support
// Transform a CagedPointer from/to its encoded form, which is used when the
// pointer is stored on the heap and ensures that the pointer will always
// point into the virtual memory cage.
void EncodeCagedPointer(const Register& value);
void DecodeCagedPointer(const Register& value);
// Load and decode a CagedPointer from the heap.
void LoadCagedPointerField(const Register& destination,
const MemOperand& field_operand);
// Encode and store a CagedPointer to the heap.
void StoreCagedPointerField(const Register& value,
const MemOperand& dst_field_operand);
// Loads a field containing off-heap pointer and does necessary decoding
// if V8 heap sandbox is enabled.
void LoadExternalPointerField(Register destination, MemOperand field_operand,

View File

@ -1539,6 +1539,32 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(TNode<Object> value,
}
}
#ifdef V8_CAGED_POINTERS
TNode<CagedPtrT> CodeStubAssembler::LoadCagedPointerFromObject(
TNode<HeapObject> object, TNode<IntPtrT> field_offset) {
return LoadObjectField<CagedPtrT>(object, field_offset);
}
void CodeStubAssembler::StoreCagedPointerToObject(TNode<HeapObject> object,
TNode<IntPtrT> offset,
TNode<CagedPtrT> pointer) {
#ifdef DEBUG
// Verify pointer points into the cage.
TNode<ExternalReference> cage_base_address =
ExternalConstant(ExternalReference::virtual_memory_cage_base_address());
TNode<ExternalReference> cage_end_address =
ExternalConstant(ExternalReference::virtual_memory_cage_end_address());
TNode<UintPtrT> cage_base = Load<UintPtrT>(cage_base_address);
TNode<UintPtrT> cage_end = Load<UintPtrT>(cage_end_address);
CSA_CHECK(this, UintPtrGreaterThanOrEqual(pointer, cage_base));
CSA_CHECK(this, UintPtrLessThan(pointer, cage_end));
#endif
StoreObjectFieldNoWriteBarrier<CagedPtrT>(object, offset, pointer);
}
#endif // V8_CAGED_POINTERS
TNode<ExternalPointerT> CodeStubAssembler::ChangeUint32ToExternalPointer(
TNode<Uint32T> value) {
STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize);

View File

@ -1042,6 +1042,33 @@ class V8_EXPORT_PRIVATE CodeStubAssembler
// Works only with V8_ENABLE_FORCE_SLOW_PATH compile time flag. Nop otherwise.
void GotoIfForceSlowPath(Label* if_true);
#ifdef V8_CAGED_POINTERS
//
// Caged pointer related functionality.
//
// Load a caged pointer value from an object.
TNode<CagedPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
int offset) {
return LoadCagedPointerFromObject(object, IntPtrConstant(offset));
}
TNode<CagedPtrT> LoadCagedPointerFromObject(TNode<HeapObject> object,
TNode<IntPtrT> offset);
// Stored a caged pointer value to an object.
void StoreCagedPointerToObject(TNode<HeapObject> object, int offset,
TNode<CagedPtrT> pointer) {
StoreCagedPointerToObject(object, IntPtrConstant(offset), pointer);
}
void StoreCagedPointerToObject(TNode<HeapObject> object,
TNode<IntPtrT> offset,
TNode<CagedPtrT> pointer);
#endif // V8_CAGED_POINTERS
//
// ExternalPointerT-related functionality.
//

View File

@ -207,6 +207,16 @@ ExternalReference ExternalReference::handle_scope_implementer_address(
return ExternalReference(isolate->handle_scope_implementer_address());
}
#ifdef V8_VIRTUAL_MEMORY_CAGE
ExternalReference ExternalReference::virtual_memory_cage_base_address() {
return ExternalReference(GetProcessWideVirtualMemoryCage()->base_address());
}
ExternalReference ExternalReference::virtual_memory_cage_end_address() {
return ExternalReference(GetProcessWideVirtualMemoryCage()->end_address());
}
#endif
#ifdef V8_HEAP_SANDBOX
ExternalReference ExternalReference::external_pointer_table_address(
Isolate* isolate) {

View File

@ -298,6 +298,7 @@ class StatsCounter;
V(re_experimental_match_for_call_from_js, \
"ExperimentalRegExp::MatchForCallFromJs") \
EXTERNAL_REFERENCE_LIST_INTL(V) \
EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \
EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V)
#ifdef V8_INTL_SUPPORT
#define EXTERNAL_REFERENCE_LIST_INTL(V) \
@ -307,6 +308,14 @@ class StatsCounter;
#define EXTERNAL_REFERENCE_LIST_INTL(V)
#endif // V8_INTL_SUPPORT
#ifdef V8_VIRTUAL_MEMORY_CAGE
#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \
V(virtual_memory_cage_base_address, "V8VirtualMemoryCage::base()") \
V(virtual_memory_cage_end_address, "V8VirtualMemoryCage::end()")
#else
#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V)
#endif // V8_VIRTUAL_MEMORY_CAGE
#ifdef V8_HEAP_SANDBOX
#define EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V) \
V(external_pointer_table_grow_table_function, \

View File

@ -57,6 +57,8 @@ const char* MachineReprToString(MachineRepresentation rep) {
return "kRepCompressed";
case MachineRepresentation::kMapWord:
return "kRepMapWord";
case MachineRepresentation::kCagedPointer:
return "kRepCagedPointer";
}
UNREACHABLE();
}

View File

@ -40,6 +40,7 @@ enum class MachineRepresentation : uint8_t {
kTagged, // (uncompressed) Object (Smi or HeapObject)
kCompressedPointer, // (compressed) HeapObject
kCompressed, // (compressed) Object (Smi or HeapObject)
kCagedPointer, // Guaranteed to point into the virtual memory cage.
// FP and SIMD representations must be last, and in order of increasing size.
kFloat32,
kFloat64,
@ -222,6 +223,10 @@ class MachineType {
return MachineType(MachineRepresentation::kCompressed,
MachineSemantic::kAny);
}
constexpr static MachineType CagedPointer() {
return MachineType(MachineRepresentation::kCagedPointer,
MachineSemantic::kNone);
}
constexpr static MachineType Bool() {
return MachineType(MachineRepresentation::kBit, MachineSemantic::kBool);
}
@ -260,6 +265,8 @@ class MachineType {
return MachineType::AnyCompressed();
case MachineRepresentation::kCompressedPointer:
return MachineType::CompressedPointer();
case MachineRepresentation::kCagedPointer:
return MachineType::CagedPointer();
default:
UNREACHABLE();
}
@ -353,6 +360,8 @@ V8_EXPORT_PRIVATE inline constexpr int ElementSizeLog2Of(
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
return kTaggedSizeLog2;
case MachineRepresentation::kCagedPointer:
return kSystemPointerSizeLog2;
default:
UNREACHABLE();
}

View File

@ -35,6 +35,11 @@ struct RawPtrT : WordT {
static constexpr MachineType kMachineType = MachineType::Pointer();
};
// A RawPtrT that is guaranteed to point into the virtual memory cage.
struct CagedPtrT : WordT {
static constexpr MachineType kMachineType = MachineType::CagedPointer();
};
template <class To>
struct RawPtr : RawPtrT {};

View File

@ -376,6 +376,43 @@ void MacroAssembler::RecordWriteField(Register object, int offset,
}
}
void TurboAssembler::EncodeCagedPointer(Register value) {
ASM_CODE_COMMENT(this);
#ifdef V8_CAGED_POINTERS
subq(value, kPtrComprCageBaseRegister);
shlq(value, Immediate(kCagedPointerShift));
#else
UNREACHABLE();
#endif
}
void TurboAssembler::DecodeCagedPointer(Register value) {
ASM_CODE_COMMENT(this);
#ifdef V8_CAGED_POINTERS
shrq(value, Immediate(kCagedPointerShift));
addq(value, kPtrComprCageBaseRegister);
#else
UNREACHABLE();
#endif
}
void TurboAssembler::LoadCagedPointerField(Register destination,
Operand field_operand) {
ASM_CODE_COMMENT(this);
movq(destination, field_operand);
DecodeCagedPointer(destination);
}
void TurboAssembler::StoreCagedPointerField(Operand dst_field_operand,
Register value) {
ASM_CODE_COMMENT(this);
DCHECK(!AreAliased(value, kScratchRegister));
DCHECK(!dst_field_operand.AddressUsesRegister(kScratchRegister));
movq(kScratchRegister, value);
EncodeCagedPointer(kScratchRegister);
movq(dst_field_operand, kScratchRegister);
}
void TurboAssembler::LoadExternalPointerField(
Register destination, Operand field_operand, ExternalPointerTag tag,
Register scratch, IsolateRootLocation isolateRootLocation) {

View File

@ -604,6 +604,17 @@ class V8_EXPORT_PRIVATE TurboAssembler
// ---------------------------------------------------------------------------
// V8 Heap sandbox support
// Transform a CagedPointer from/to its encoded form, which is used when the
// pointer is stored on the heap and ensures that the pointer will always
// point into the virtual memory cage.
void EncodeCagedPointer(Register value);
void DecodeCagedPointer(Register value);
// Load and decode a CagedPointer from the heap.
void LoadCagedPointerField(Register destination, Operand field_operand);
// Encode and store a CagedPointer to the heap.
void StoreCagedPointerField(Operand dst_field_operand, Register value);
enum class IsolateRootLocation { kInScratchRegister, kInRootRegister };
// Loads a field containing off-heap pointer and does necessary decoding
// if V8 heap sandbox is enabled.

View File

@ -626,6 +626,7 @@ void InstructionSelector::VisitLoad(Node* node) {
break;
case MachineRepresentation::kCompressedPointer: // Fall through.
case MachineRepresentation::kCompressed: // Fall through.
case MachineRepresentation::kCagedPointer: // Fall through.
case MachineRepresentation::kWord64: // Fall through.
case MachineRepresentation::kMapWord: // Fall through.
case MachineRepresentation::kNone:
@ -663,6 +664,7 @@ ArchOpcode GetStoreOpcode(MachineRepresentation rep) {
return kArmVst1S128;
case MachineRepresentation::kCompressedPointer: // Fall through.
case MachineRepresentation::kCompressed: // Fall through.
case MachineRepresentation::kCagedPointer: // Fall through.
case MachineRepresentation::kWord64: // Fall through.
case MachineRepresentation::kMapWord: // Fall through.
case MachineRepresentation::kNone:

View File

@ -1894,6 +1894,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ AtomicDecompressAnyTagged(i.OutputRegister(), i.InputRegister(0),
i.InputRegister(1), i.TempRegister(0));
break;
case kArm64LdrDecodeCagedPointer:
__ LoadCagedPointerField(i.OutputRegister(), i.MemoryOperand());
break;
case kArm64Str:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
__ Str(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
@ -1907,6 +1910,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ AtomicStoreTaggedField(i.InputRegister(2), i.InputRegister(0),
i.InputRegister(1), i.TempRegister(0));
break;
case kArm64StrEncodeCagedPointer:
__ StoreCagedPointerField(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
break;
case kArm64LdrS:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
__ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand());

View File

@ -201,6 +201,8 @@ namespace compiler {
V(Arm64LdarDecompressAnyTagged) \
V(Arm64StrCompressTagged) \
V(Arm64StlrCompressTagged) \
V(Arm64LdrDecodeCagedPointer) \
V(Arm64StrEncodeCagedPointer) \
V(Arm64DmbIsh) \
V(Arm64DsbIsb) \
V(Arm64Sxtl) \

View File

@ -317,6 +317,7 @@ int InstructionScheduler::GetTargetInstructionFlags(
case kArm64LdarDecompressTaggedSigned:
case kArm64LdarDecompressTaggedPointer:
case kArm64LdarDecompressAnyTagged:
case kArm64LdrDecodeCagedPointer:
case kArm64Peek:
case kArm64LoadSplat:
case kArm64LoadLane:
@ -340,6 +341,7 @@ int InstructionScheduler::GetTargetInstructionFlags(
case kArm64Str:
case kArm64StrCompressTagged:
case kArm64StlrCompressTagged:
case kArm64StrEncodeCagedPointer:
case kArm64DmbIsh:
case kArm64DsbIsb:
case kArm64StoreLane:

View File

@ -839,6 +839,14 @@ void InstructionSelector::VisitLoad(Node* node) {
opcode = kArm64Ldr;
immediate_mode = kLoadStoreImm64;
break;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kArm64LdrDecodeCagedPointer;
immediate_mode = kLoadStoreImm64;
break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128:
opcode = kArm64LdrQ;
immediate_mode = kNoImmediate;
@ -939,6 +947,14 @@ void InstructionSelector::VisitStore(Node* node) {
immediate_mode =
COMPRESS_POINTERS_BOOL ? kLoadStoreImm32 : kLoadStoreImm64;
break;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kArm64StrEncodeCagedPointer;
immediate_mode = kLoadStoreImm64;
break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kWord64:
opcode = kArm64Str;
immediate_mode = kLoadStoreImm64;

View File

@ -276,6 +276,7 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
break;
case MachineRepresentation::kCompressedPointer: // Fall through.
case MachineRepresentation::kCompressed: // Fall through.
case MachineRepresentation::kCagedPointer: // Fall through.
case MachineRepresentation::kWord64: // Fall through.
case MachineRepresentation::kMapWord: // Fall through.
case MachineRepresentation::kNone:
@ -619,6 +620,7 @@ ArchOpcode GetStoreOpcode(MachineRepresentation rep) {
return kIA32Movdqu;
case MachineRepresentation::kCompressedPointer: // Fall through.
case MachineRepresentation::kCompressed: // Fall through.
case MachineRepresentation::kCagedPointer: // Fall through.
case MachineRepresentation::kWord64: // Fall through.
case MachineRepresentation::kMapWord: // Fall through.
case MachineRepresentation::kNone:

View File

@ -259,6 +259,9 @@ std::ostream& operator<<(std::ostream& os, const InstructionOperand& op) {
case MachineRepresentation::kCompressed:
os << "|c";
break;
case MachineRepresentation::kCagedPointer:
os << "|cg";
break;
case MachineRepresentation::kMapWord:
UNREACHABLE();
}
@ -928,6 +931,7 @@ static MachineRepresentation FilterRepresentation(MachineRepresentation rep) {
case MachineRepresentation::kSimd128:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
return rep;
case MachineRepresentation::kNone:
case MachineRepresentation::kMapWord:

View File

@ -553,6 +553,7 @@ class LocationOperand : public InstructionOperand {
case MachineRepresentation::kTagged:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
return true;
case MachineRepresentation::kBit:
case MachineRepresentation::kWord8:

View File

@ -51,6 +51,7 @@ inline int ByteWidthForStackSlot(MachineRepresentation rep) {
case MachineRepresentation::kWord16:
case MachineRepresentation::kWord32:
case MachineRepresentation::kFloat32:
case MachineRepresentation::kCagedPointer:
return kSystemPointerSize;
case MachineRepresentation::kTaggedSigned:
case MachineRepresentation::kTaggedPointer:

View File

@ -344,6 +344,9 @@ void EmitStore(TurboAssembler* tasm, Operand operand, Register value,
case MachineRepresentation::kTagged:
tasm->StoreTaggedField(operand, value);
break;
case MachineRepresentation::kCagedPointer:
tasm->StoreCagedPointerField(operand, value);
break;
default:
UNREACHABLE();
}
@ -509,19 +512,33 @@ void EmitTSANStoreOOL(Zone* zone, CodeGenerator* codegen, TurboAssembler* tasm,
template <std::memory_order order>
Register GetTSANValueRegister(TurboAssembler* tasm, Register value,
X64OperandConverter& i) {
X64OperandConverter& i,
MachineRepresentation rep) {
if (rep == MachineRepresentation::kCagedPointer) {
// CagedPointers need to be encoded.
Register value_reg = i.TempRegister(1);
tasm->movq(value_reg, value);
tasm->EncodeCagedPointer(value_reg);
return value_reg;
}
return value;
}
template <std::memory_order order>
Register GetTSANValueRegister(TurboAssembler* tasm, Immediate value,
X64OperandConverter& i);
X64OperandConverter& i,
MachineRepresentation rep);
template <>
Register GetTSANValueRegister<std::memory_order_relaxed>(
TurboAssembler* tasm, Immediate value, X64OperandConverter& i) {
TurboAssembler* tasm, Immediate value, X64OperandConverter& i,
MachineRepresentation rep) {
Register value_reg = i.TempRegister(1);
tasm->movq(value_reg, value);
if (rep == MachineRepresentation::kCagedPointer) {
// CagedPointers need to be encoded.
tasm->EncodeCagedPointer(value_reg);
}
return value_reg;
}
@ -539,7 +556,7 @@ void EmitTSANAwareStore(Zone* zone, CodeGenerator* codegen,
int size = ElementSizeInBytes(rep);
EmitMemoryProbeForTrapHandlerIfNeeded(tasm, i.TempRegister(0), operand,
stub_call_mode, size);
Register value_reg = GetTSANValueRegister<order>(tasm, value, i);
Register value_reg = GetTSANValueRegister<order>(tasm, value, i, rep);
EmitTSANStoreOOL(zone, codegen, tasm, operand, value_reg, i, stub_call_mode,
size, order);
} else {
@ -2369,6 +2386,28 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
break;
}
case kX64MovqDecodeCagedPointer: {
CHECK(instr->HasOutput());
Operand address(i.MemoryOperand());
Register dst = i.OutputRegister();
__ movq(dst, address);
__ DecodeCagedPointer(dst);
EmitTSANRelaxedLoadOOLIfNeeded(zone(), this, tasm(), address, i,
DetermineStubCallMode(),
kSystemPointerSize);
break;
}
case kX64MovqEncodeCagedPointer: {
CHECK(!instr->HasOutput());
size_t index = 0;
Operand operand = i.MemoryOperand(&index);
CHECK(!HasImmediateInput(instr, index));
Register value(i.InputRegister(index));
EmitTSANAwareStore<std::memory_order_relaxed>(
zone(), this, tasm(), operand, value, i, DetermineStubCallMode(),
MachineRepresentation::kCagedPointer);
break;
}
case kX64Movq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset());
if (instr->HasOutput()) {

View File

@ -163,6 +163,8 @@ namespace compiler {
V(X64MovqDecompressTaggedPointer) \
V(X64MovqDecompressAnyTagged) \
V(X64MovqCompressTagged) \
V(X64MovqEncodeCagedPointer) \
V(X64MovqDecodeCagedPointer) \
V(X64BitcastFI) \
V(X64BitcastDL) \
V(X64BitcastIF) \

View File

@ -396,6 +396,8 @@ int InstructionScheduler::GetTargetInstructionFlags(
case kX64MovqDecompressTaggedPointer:
case kX64MovqDecompressAnyTagged:
case kX64MovqCompressTagged:
case kX64MovqDecodeCagedPointer:
case kX64MovqEncodeCagedPointer:
case kX64Movq:
case kX64Movsd:
case kX64Movss:

View File

@ -297,6 +297,13 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
case MachineRepresentation::kWord64:
opcode = kX64Movq;
break;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
opcode = kX64MovqDecodeCagedPointer;
break;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128:
opcode = kX64Movdqu;
break;
@ -333,6 +340,12 @@ ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) {
return kX64MovqCompressTagged;
case MachineRepresentation::kWord64:
return kX64Movq;
case MachineRepresentation::kCagedPointer:
#ifdef V8_CAGED_POINTERS
return kX64MovqEncodeCagedPointer;
#else
UNREACHABLE();
#endif
case MachineRepresentation::kSimd128:
return kX64Movdqu;
case MachineRepresentation::kNone: // Fall through.

View File

@ -2556,6 +2556,7 @@ JSNativeContextSpecialization::BuildPropertyStore(
case MachineRepresentation::kBit:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
case MachineRepresentation::kWord8:
case MachineRepresentation::kWord16:
case MachineRepresentation::kWord32:

View File

@ -1069,6 +1069,7 @@ Reduction LoadElimination::ReduceLoadElement(Node* node) {
case MachineRepresentation::kFloat32:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
// TODO(turbofan): Add support for doing the truncations.
break;
case MachineRepresentation::kFloat64:
@ -1125,6 +1126,7 @@ Reduction LoadElimination::ReduceStoreElement(Node* node) {
case MachineRepresentation::kFloat32:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
// TODO(turbofan): Add support for doing the truncations.
break;
case MachineRepresentation::kFloat64:
@ -1429,6 +1431,7 @@ LoadElimination::IndexRange LoadElimination::FieldIndexOf(
case MachineRepresentation::kMapWord:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
break;
}
int representation_size = ElementSizeInBytes(rep);

View File

@ -996,6 +996,7 @@ class MachineRepresentationChecker {
// happens in dead code.
return IsAnyTagged(actual);
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCagedPointer:
case MachineRepresentation::kFloat32:
case MachineRepresentation::kFloat64:
case MachineRepresentation::kSimd128:

View File

@ -667,6 +667,7 @@ std::ostream& operator<<(std::ostream& os, TruncateKind kind) {
V(MapInHeader) \
V(AnyTagged) \
V(CompressedPointer) \
V(CagedPointer) \
V(AnyCompressed)
#define MACHINE_REPRESENTATION_LIST(V) \
@ -682,6 +683,7 @@ std::ostream& operator<<(std::ostream& os, TruncateKind kind) {
V(kTaggedPointer) \
V(kTagged) \
V(kCompressedPointer) \
V(kCagedPointer) \
V(kCompressed)
#define LOAD_TRANSFORM_LIST(V) \

View File

@ -534,6 +534,8 @@ Reduction MemoryLowering::ReduceStoreField(Node* node,
DCHECK_IMPLIES(V8_HEAP_SANDBOX_BOOL,
!access.type.Is(Type::ExternalPointer()) &&
!access.type.Is(Type::SandboxedExternalPointer()));
// CagedPointers are not currently stored by optimized code.
DCHECK(!access.type.Is(Type::CagedPointer()));
MachineType machine_type = access.machine_type;
Node* object = node->InputAt(0);
Node* value = node->InputAt(1);

View File

@ -242,6 +242,7 @@ Node* RepresentationChanger::GetRepresentationFor(
return node;
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCagedPointer:
case MachineRepresentation::kMapWord:
UNREACHABLE();
}
@ -1246,6 +1247,13 @@ Node* RepresentationChanger::GetWord64RepresentationFor(
jsgraph()->common()->DeadValue(MachineRepresentation::kWord64),
unreachable);
}
} else if (output_rep == MachineRepresentation::kCagedPointer) {
if (output_type.Is(Type::CagedPointer())) {
return node;
} else {
return TypeError(node, output_rep, output_type,
MachineRepresentation::kWord64);
}
} else {
return TypeError(node, output_rep, output_type,
MachineRepresentation::kWord64);

View File

@ -160,6 +160,7 @@ UseInfo TruncatingUseInfoFromRepresentation(MachineRepresentation rep) {
return UseInfo::Bool();
case MachineRepresentation::kCompressedPointer:
case MachineRepresentation::kCompressed:
case MachineRepresentation::kCagedPointer:
case MachineRepresentation::kSimd128:
case MachineRepresentation::kNone:
break;
@ -1073,7 +1074,8 @@ class RepresentationSelector {
} else if (type.Is(Type::BigInt()) && use.IsUsedAsWord64()) {
return MachineRepresentation::kWord64;
} else if (type.Is(Type::ExternalPointer()) ||
type.Is(Type::SandboxedExternalPointer())) {
type.Is(Type::SandboxedExternalPointer()) ||
type.Is(Type::CagedPointer())) {
return MachineType::PointerRepresentation();
}
return MachineRepresentation::kTagged;

View File

@ -112,24 +112,25 @@ namespace compiler {
V(NaN, 1u << 12) \
V(Symbol, 1u << 13) \
V(InternalizedString, 1u << 14) \
V(OtherCallable, 1u << 16) \
V(OtherObject, 1u << 17) \
V(OtherUndetectable, 1u << 18) \
V(CallableProxy, 1u << 19) \
V(OtherProxy, 1u << 20) \
V(Function, 1u << 21) \
V(BoundFunction, 1u << 22) \
V(Hole, 1u << 23) \
V(OtherInternal, 1u << 24) \
V(ExternalPointer, 1u << 25) \
V(Array, 1u << 26) \
V(UnsignedBigInt63, 1u << 27) \
V(OtherUnsignedBigInt64, 1u << 28) \
V(NegativeBigInt63, 1u << 29) \
V(OtherBigInt, 1u << 30) \
V(OtherCallable, 1u << 15) \
V(OtherObject, 1u << 16) \
V(OtherUndetectable, 1u << 17) \
V(CallableProxy, 1u << 18) \
V(OtherProxy, 1u << 19) \
V(Function, 1u << 20) \
V(BoundFunction, 1u << 21) \
V(Hole, 1u << 22) \
V(OtherInternal, 1u << 23) \
V(ExternalPointer, 1u << 24) \
V(Array, 1u << 25) \
V(UnsignedBigInt63, 1u << 26) \
V(OtherUnsignedBigInt64, 1u << 27) \
V(NegativeBigInt63, 1u << 28) \
V(OtherBigInt, 1u << 29) \
/* TODO(v8:10391): Remove this type once all ExternalPointer usages are */ \
/* sandbox-ready. */ \
V(SandboxedExternalPointer, 1u << 31) \
V(SandboxedExternalPointer, 1u << 30) \
V(CagedPointer, 1u << 31) \
#define PROPER_BITSET_TYPE_LIST(V) \
V(None, 0u) \
@ -201,7 +202,8 @@ namespace compiler {
V(Unique, kBoolean | kUniqueName | kNull | \
kUndefined | kHole | kReceiver) \
V(Internal, kHole | kExternalPointer | \
kSandboxedExternalPointer | kOtherInternal) \
kSandboxedExternalPointer | kCagedPointer | \
kOtherInternal) \
V(NonInternal, kPrimitive | kReceiver) \
V(NonBigInt, kNonBigIntPrimitive | kReceiver) \
V(NonNumber, kBigInt | kUnique | kString | kInternal) \

View File

@ -181,6 +181,12 @@ struct MaybeBoolFlag {
#define V8_VIRTUAL_MEMORY_CAGE_BOOL false
#endif
#ifdef V8_CAGED_POINTERS
#define V8_CAGED_POINTERS_BOOL true
#else
#define V8_CAGED_POINTERS_BOOL false
#endif
// D8's MultiMappedAllocator is only available on Linux, and only if the virtual
// memory cage is not enabled.
#if V8_OS_LINUX && !V8_VIRTUAL_MEMORY_CAGE_BOOL

View File

@ -42,6 +42,7 @@
#include "src/objects/tagged-impl-inl.h"
#include "src/objects/tagged-index.h"
#include "src/objects/templates.h"
#include "src/security/caged-pointer-inl.h"
#include "src/security/external-pointer-inl.h"
// Has to be the last include (doesn't have include guards):
@ -634,6 +635,24 @@ MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
return value;
}
#ifdef V8_CAGED_POINTERS
Address Object::ReadCagedPointerField(size_t offset,
PtrComprCageBase cage_base) const {
return i::ReadCagedPointerField(field_address(offset), cage_base);
}
void Object::WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base,
Address value) {
i::WriteCagedPointerField(field_address(offset), cage_base, value);
}
void Object::WriteCagedPointerField(size_t offset, Isolate* isolate,
Address value) {
i::WriteCagedPointerField(field_address(offset), PtrComprCageBase(isolate),
value);
}
#endif // V8_CAGED_POINTERS
void Object::InitExternalPointerField(size_t offset, Isolate* isolate) {
i::InitExternalPointerField(field_address(offset), isolate);
}

View File

@ -698,6 +698,18 @@ class Object : public TaggedImpl<HeapObjectReferenceType::STRONG, Address> {
}
}
//
// CagedPointer field accessors.
//
#ifdef V8_CAGED_POINTERS
inline Address ReadCagedPointerField(size_t offset,
PtrComprCageBase cage_base) const;
inline void WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base,
Address value);
inline void WriteCagedPointerField(size_t offset, Isolate* isolate,
Address value);
#endif // V8_CAGED_POINTERS
//
// ExternalPointer_t field accessors.
//

View File

@ -25,7 +25,6 @@ bitfield struct TurbofanTypeBits extends uint32 {
naN: bool: 1 bit;
symbol: bool: 1 bit;
internalized_string: bool: 1 bit;
_unused_padding_field_2: bool: 1 bit;
other_callable: bool: 1 bit;
other_object: bool: 1 bit;
other_undetectable: bool: 1 bit;
@ -42,6 +41,7 @@ bitfield struct TurbofanTypeBits extends uint32 {
negative_big_int_63: bool: 1 bit;
other_big_int: bool: 1 bit;
sandboxed_external_pointer: bool: 1 bit;
caged_pointer: bool: 1 bit;
}
@export

View File

@ -0,0 +1,48 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_SECURITY_CAGED_POINTER_INL_H_
#define V8_SECURITY_CAGED_POINTER_INL_H_
#include "include/v8-internal.h"
#include "src/execution/isolate.h"
#include "src/security/caged-pointer.h"
namespace v8 {
namespace internal {
#ifdef V8_CAGED_POINTERS
V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base) {
// Caged pointers are currently only used if the sandbox is enabled.
DCHECK(V8_HEAP_SANDBOX_BOOL);
Address caged_pointer = base::ReadUnalignedValue<Address>(field_address);
Address offset = caged_pointer >> kCagedPointerShift;
Address pointer = cage_base.address() + offset;
return pointer;
}
V8_INLINE void WriteCagedPointerField(Address field_address,
PtrComprCageBase cage_base,
CagedPointer_t pointer) {
// Caged pointers are currently only used if the sandbox is enabled.
DCHECK(V8_HEAP_SANDBOX_BOOL);
// The pointer must point into the virtual memory cage.
DCHECK(GetProcessWideVirtualMemoryCage()->Contains(pointer));
Address offset = pointer - cage_base.address();
Address caged_pointer = offset << kCagedPointerShift;
base::WriteUnalignedValue<Address>(field_address, caged_pointer);
}
#endif // V8_CAGED_POINTERS
} // namespace internal
} // namespace v8
#endif // V8_SECURITY_CAGED_POINTER_INL_H_

View File

@ -0,0 +1,27 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_SECURITY_CAGED_POINTER_H_
#define V8_SECURITY_CAGED_POINTER_H_
#include "src/common/globals.h"
namespace v8 {
namespace internal {
#ifdef V8_CAGED_POINTERS
V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address,
PtrComprCageBase cage_base);
V8_INLINE void WriteCagedPointerField(Address field_address,
PtrComprCageBase cage_base,
CagedPointer_t value);
#endif // V8_CAGED_POINTERS
} // namespace internal
} // namespace v8
#endif // V8_SECURITY_CAGED_POINTER_H_

View File

@ -328,6 +328,7 @@ bool V8VirtualMemoryCage::Initialize(v8::PageAllocator* page_allocator,
page_allocator_ = page_allocator;
size_ = size;
end_ = base_ + size_;
reservation_size_ = reservation_size;
cage_page_allocator_ = std::make_unique<base::BoundedPageAllocator>(
@ -391,6 +392,7 @@ bool V8VirtualMemoryCage::InitializeAsFakeCage(
base_ = reservation_base_;
size_ = size;
end_ = base_ + size_;
reservation_size_ = size_to_reserve;
initialized_ = true;
is_fake_cage_ = true;
@ -407,6 +409,7 @@ void V8VirtualMemoryCage::TearDown() {
CHECK(page_allocator_->FreePages(reinterpret_cast<void*>(reservation_base_),
reservation_size_));
base_ = kNullAddress;
end_ = kNullAddress;
size_ = 0;
reservation_base_ = kNullAddress;
reservation_size_ = 0;

View File

@ -73,8 +73,13 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
bool is_fake_cage() const { return is_fake_cage_; }
Address base() const { return base_; }
Address end() const { return end_; }
size_t size() const { return size_; }
Address base_address() const { return reinterpret_cast<Address>(&base_); }
Address end_address() const { return reinterpret_cast<Address>(&end_); }
Address size_address() const { return reinterpret_cast<Address>(&size_); }
v8::PageAllocator* page_allocator() const {
return cage_page_allocator_.get();
}
@ -110,6 +115,7 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage {
size_t size_to_reserve);
Address base_ = kNullAddress;
Address end_ = kNullAddress;
size_t size_ = 0;
// Base and size of the virtual memory reservation backing this cage. These