From afd1554963d2bcb5de8adaf3a5bda10a8223bb07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Samuel=20Gro=C3=9F?= Date: Fri, 29 Oct 2021 14:51:58 +0200 Subject: [PATCH] Introduce CagedPointer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A CagedPointer is guaranteed to point into the Virtual Memory Cage and will for example be used for ArrayBuffer backing stores when the heap sandbox is enabled. In the current implementation, CagedPointers are stored as offsets from the cage base, shifted to the left. Because the cage base address is usually available in a register, accessing a CagedPointer is very efficient, requiring only an additional shift and add operation. Bug: chromium:1218005 Change-Id: Ifc8c088e3862400672051a8c52840514dee2911f Cq-Include-Trybots: luci.v8.try:v8_linux64_heap_sandbox_dbg_ng,v8_linux_arm64_sim_heap_sandbox_dbg_ng Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3123417 Reviewed-by: Toon Verwaest Reviewed-by: Igor Sheludko Reviewed-by: Jakob Gruber Commit-Queue: Samuel Groß Cr-Commit-Position: refs/heads/main@{#77614} --- BUILD.bazel | 2 + BUILD.gn | 2 + include/v8-internal.h | 15 +++++- include/v8config.h | 6 +++ src/codegen/arm64/macro-assembler-arm64.cc | 37 ++++++++++++++ src/codegen/arm64/macro-assembler-arm64.h | 13 +++++ src/codegen/code-stub-assembler.cc | 26 ++++++++++ src/codegen/code-stub-assembler.h | 27 +++++++++++ src/codegen/external-reference.cc | 10 ++++ src/codegen/external-reference.h | 9 ++++ src/codegen/machine-type.cc | 2 + src/codegen/machine-type.h | 9 ++++ src/codegen/tnode.h | 5 ++ src/codegen/x64/macro-assembler-x64.cc | 37 ++++++++++++++ src/codegen/x64/macro-assembler-x64.h | 11 +++++ .../backend/arm/instruction-selector-arm.cc | 2 + .../backend/arm64/code-generator-arm64.cc | 6 +++ .../backend/arm64/instruction-codes-arm64.h | 2 + .../arm64/instruction-scheduler-arm64.cc | 2 + .../arm64/instruction-selector-arm64.cc | 16 +++++++ .../backend/ia32/instruction-selector-ia32.cc | 2 + src/compiler/backend/instruction.cc | 4 ++ src/compiler/backend/instruction.h | 1 + src/compiler/backend/register-allocation.h | 1 + .../backend/x64/code-generator-x64.cc | 47 ++++++++++++++++-- .../backend/x64/instruction-codes-x64.h | 2 + .../backend/x64/instruction-scheduler-x64.cc | 2 + .../backend/x64/instruction-selector-x64.cc | 13 +++++ .../js-native-context-specialization.cc | 1 + src/compiler/load-elimination.cc | 3 ++ src/compiler/machine-graph-verifier.cc | 1 + src/compiler/machine-operator.cc | 2 + src/compiler/memory-lowering.cc | 2 + src/compiler/representation-change.cc | 8 ++++ src/compiler/simplified-lowering.cc | 4 +- src/compiler/types.h | 36 +++++++------- src/flags/flag-definitions.h | 6 +++ src/objects/objects-inl.h | 19 ++++++++ src/objects/objects.h | 12 +++++ src/objects/turbofan-types.tq | 2 +- src/security/caged-pointer-inl.h | 48 +++++++++++++++++++ src/security/caged-pointer.h | 27 +++++++++++ src/security/vm-cage.cc | 3 ++ src/security/vm-cage.h | 6 +++ 44 files changed, 467 insertions(+), 24 deletions(-) create mode 100644 src/security/caged-pointer-inl.h create mode 100644 src/security/caged-pointer.h diff --git a/BUILD.bazel b/BUILD.bazel index 8c58459221..f7e773cee1 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -1881,6 +1881,8 @@ filegroup( "src/runtime/runtime.h", "src/security/external-pointer-table.cc", "src/security/vm-cage.cc", + "src/security/caged-pointer-inl.h", + "src/security/caged-pointer.h", "src/security/external-pointer-inl.h", "src/security/external-pointer-table.h", "src/security/external-pointer.h", diff --git a/BUILD.gn b/BUILD.gn index c9f37a9551..e6f5bb2e51 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -3325,6 +3325,8 @@ v8_header_set("v8_internal_headers") { "src/roots/roots.h", "src/runtime/runtime-utils.h", "src/runtime/runtime.h", + "src/security/caged-pointer-inl.h", + "src/security/caged-pointer.h", "src/security/external-pointer-inl.h", "src/security/external-pointer-table.h", "src/security/external-pointer.h", diff --git a/include/v8-internal.h b/include/v8-internal.h index c5b2d4cfdc..f0531bcff6 100644 --- a/include/v8-internal.h +++ b/include/v8-internal.h @@ -500,7 +500,8 @@ constexpr bool VirtualMemoryCageIsEnabled() { #define TB (1ULL << 40) // Size of the virtual memory cage, excluding the guard regions surrounding it. -constexpr size_t kVirtualMemoryCageSize = 1ULL * TB; +constexpr size_t kVirtualMemoryCageSizeLog2 = 40; // 1 TB +constexpr size_t kVirtualMemoryCageSize = 1ULL << kVirtualMemoryCageSizeLog2; // Required alignment of the virtual memory cage. For simplicity, we require the // size of the guard regions to be a multiple of this, so that this specifies @@ -510,6 +511,18 @@ constexpr size_t kVirtualMemoryCageSize = 1ULL * TB; constexpr size_t kVirtualMemoryCageAlignment = Internals::kPtrComprCageBaseAlignment; +#ifdef V8_CAGED_POINTERS +// CagedPointers are guaranteed to point into the virtual memory cage. This is +// achieved by storing them as offset from the cage base rather than as raw +// pointers. +using CagedPointer_t = Address; + +// For efficiency, the offset is stored shifted to the left, so that +// it is guaranteed that the offset is smaller than the cage size after +// shifting it to the right again. This constant specifies the shift amount. +constexpr uint64_t kCagedPointerShift = 64 - kVirtualMemoryCageSizeLog2; +#endif + // Size of the guard regions surrounding the virtual memory cage. This assumes a // worst-case scenario of a 32-bit unsigned index being used to access an array // of 64-bit values. diff --git a/include/v8config.h b/include/v8config.h index 4923183825..ecb992822c 100644 --- a/include/v8config.h +++ b/include/v8config.h @@ -560,6 +560,12 @@ V8 shared library set USING_V8_SHARED. #define V8_VIRTUAL_MEMORY_CAGE_IS_AVAILABLE #endif +// CagedPointers are currently only used if the heap sandbox is enabled. +// In the future, they will be enabled when the virtual memory cage is enabled. +#if defined(V8_HEAP_SANDBOX) +#define V8_CAGED_POINTERS +#endif + // clang-format on #undef V8_HAS_CPP_ATTRIBUTE diff --git a/src/codegen/arm64/macro-assembler-arm64.cc b/src/codegen/arm64/macro-assembler-arm64.cc index e8bb573e42..bcf2e4574a 100644 --- a/src/codegen/arm64/macro-assembler-arm64.cc +++ b/src/codegen/arm64/macro-assembler-arm64.cc @@ -3072,6 +3072,43 @@ void MacroAssembler::RecordWriteField(Register object, int offset, Bind(&done); } +void TurboAssembler::EncodeCagedPointer(const Register& value) { + ASM_CODE_COMMENT(this); +#ifdef V8_CAGED_POINTERS + Sub(value, value, kPtrComprCageBaseRegister); + Mov(value, Operand(value, LSL, kCagedPointerShift)); +#else + UNREACHABLE(); +#endif +} + +void TurboAssembler::DecodeCagedPointer(const Register& value) { + ASM_CODE_COMMENT(this); +#ifdef V8_CAGED_POINTERS + Add(value, kPtrComprCageBaseRegister, + Operand(value, LSR, kCagedPointerShift)); +#else + UNREACHABLE(); +#endif +} + +void TurboAssembler::LoadCagedPointerField(const Register& destination, + const MemOperand& field_operand) { + ASM_CODE_COMMENT(this); + Ldr(destination, field_operand); + DecodeCagedPointer(destination); +} + +void TurboAssembler::StoreCagedPointerField( + const Register& value, const MemOperand& dst_field_operand) { + ASM_CODE_COMMENT(this); + UseScratchRegisterScope temps(this); + Register scratch = temps.AcquireX(); + Mov(scratch, value); + EncodeCagedPointer(scratch); + Str(scratch, dst_field_operand); +} + void TurboAssembler::LoadExternalPointerField(Register destination, MemOperand field_operand, ExternalPointerTag tag, diff --git a/src/codegen/arm64/macro-assembler-arm64.h b/src/codegen/arm64/macro-assembler-arm64.h index 893791fbdb..165d702c31 100644 --- a/src/codegen/arm64/macro-assembler-arm64.h +++ b/src/codegen/arm64/macro-assembler-arm64.h @@ -1441,6 +1441,19 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase { // --------------------------------------------------------------------------- // V8 Heap sandbox support + // Transform a CagedPointer from/to its encoded form, which is used when the + // pointer is stored on the heap and ensures that the pointer will always + // point into the virtual memory cage. + void EncodeCagedPointer(const Register& value); + void DecodeCagedPointer(const Register& value); + + // Load and decode a CagedPointer from the heap. + void LoadCagedPointerField(const Register& destination, + const MemOperand& field_operand); + // Encode and store a CagedPointer to the heap. + void StoreCagedPointerField(const Register& value, + const MemOperand& dst_field_operand); + // Loads a field containing off-heap pointer and does necessary decoding // if V8 heap sandbox is enabled. void LoadExternalPointerField(Register destination, MemOperand field_operand, diff --git a/src/codegen/code-stub-assembler.cc b/src/codegen/code-stub-assembler.cc index fd9ed040e5..4a9c06bdd8 100644 --- a/src/codegen/code-stub-assembler.cc +++ b/src/codegen/code-stub-assembler.cc @@ -1539,6 +1539,32 @@ void CodeStubAssembler::BranchIfToBooleanIsTrue(TNode value, } } +#ifdef V8_CAGED_POINTERS + +TNode CodeStubAssembler::LoadCagedPointerFromObject( + TNode object, TNode field_offset) { + return LoadObjectField(object, field_offset); +} + +void CodeStubAssembler::StoreCagedPointerToObject(TNode object, + TNode offset, + TNode pointer) { +#ifdef DEBUG + // Verify pointer points into the cage. + TNode cage_base_address = + ExternalConstant(ExternalReference::virtual_memory_cage_base_address()); + TNode cage_end_address = + ExternalConstant(ExternalReference::virtual_memory_cage_end_address()); + TNode cage_base = Load(cage_base_address); + TNode cage_end = Load(cage_end_address); + CSA_CHECK(this, UintPtrGreaterThanOrEqual(pointer, cage_base)); + CSA_CHECK(this, UintPtrLessThan(pointer, cage_end)); +#endif + StoreObjectFieldNoWriteBarrier(object, offset, pointer); +} + +#endif // V8_CAGED_POINTERS + TNode CodeStubAssembler::ChangeUint32ToExternalPointer( TNode value) { STATIC_ASSERT(kExternalPointerSize == kSystemPointerSize); diff --git a/src/codegen/code-stub-assembler.h b/src/codegen/code-stub-assembler.h index 8ddea007ab..4d16af8a3d 100644 --- a/src/codegen/code-stub-assembler.h +++ b/src/codegen/code-stub-assembler.h @@ -1042,6 +1042,33 @@ class V8_EXPORT_PRIVATE CodeStubAssembler // Works only with V8_ENABLE_FORCE_SLOW_PATH compile time flag. Nop otherwise. void GotoIfForceSlowPath(Label* if_true); +#ifdef V8_CAGED_POINTERS + + // + // Caged pointer related functionality. + // + + // Load a caged pointer value from an object. + TNode LoadCagedPointerFromObject(TNode object, + int offset) { + return LoadCagedPointerFromObject(object, IntPtrConstant(offset)); + } + + TNode LoadCagedPointerFromObject(TNode object, + TNode offset); + + // Stored a caged pointer value to an object. + void StoreCagedPointerToObject(TNode object, int offset, + TNode pointer) { + StoreCagedPointerToObject(object, IntPtrConstant(offset), pointer); + } + + void StoreCagedPointerToObject(TNode object, + TNode offset, + TNode pointer); + +#endif // V8_CAGED_POINTERS + // // ExternalPointerT-related functionality. // diff --git a/src/codegen/external-reference.cc b/src/codegen/external-reference.cc index 46882693f1..1981e29911 100644 --- a/src/codegen/external-reference.cc +++ b/src/codegen/external-reference.cc @@ -207,6 +207,16 @@ ExternalReference ExternalReference::handle_scope_implementer_address( return ExternalReference(isolate->handle_scope_implementer_address()); } +#ifdef V8_VIRTUAL_MEMORY_CAGE +ExternalReference ExternalReference::virtual_memory_cage_base_address() { + return ExternalReference(GetProcessWideVirtualMemoryCage()->base_address()); +} + +ExternalReference ExternalReference::virtual_memory_cage_end_address() { + return ExternalReference(GetProcessWideVirtualMemoryCage()->end_address()); +} +#endif + #ifdef V8_HEAP_SANDBOX ExternalReference ExternalReference::external_pointer_table_address( Isolate* isolate) { diff --git a/src/codegen/external-reference.h b/src/codegen/external-reference.h index 1497b7342d..d7cffa966b 100644 --- a/src/codegen/external-reference.h +++ b/src/codegen/external-reference.h @@ -298,6 +298,7 @@ class StatsCounter; V(re_experimental_match_for_call_from_js, \ "ExperimentalRegExp::MatchForCallFromJs") \ EXTERNAL_REFERENCE_LIST_INTL(V) \ + EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \ EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V) #ifdef V8_INTL_SUPPORT #define EXTERNAL_REFERENCE_LIST_INTL(V) \ @@ -307,6 +308,14 @@ class StatsCounter; #define EXTERNAL_REFERENCE_LIST_INTL(V) #endif // V8_INTL_SUPPORT +#ifdef V8_VIRTUAL_MEMORY_CAGE +#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) \ + V(virtual_memory_cage_base_address, "V8VirtualMemoryCage::base()") \ + V(virtual_memory_cage_end_address, "V8VirtualMemoryCage::end()") +#else +#define EXTERNAL_REFERENCE_LIST_VIRTUAL_MEMORY_CAGE(V) +#endif // V8_VIRTUAL_MEMORY_CAGE + #ifdef V8_HEAP_SANDBOX #define EXTERNAL_REFERENCE_LIST_HEAP_SANDBOX(V) \ V(external_pointer_table_grow_table_function, \ diff --git a/src/codegen/machine-type.cc b/src/codegen/machine-type.cc index 064e17d2f0..5679563bd1 100644 --- a/src/codegen/machine-type.cc +++ b/src/codegen/machine-type.cc @@ -57,6 +57,8 @@ const char* MachineReprToString(MachineRepresentation rep) { return "kRepCompressed"; case MachineRepresentation::kMapWord: return "kRepMapWord"; + case MachineRepresentation::kCagedPointer: + return "kRepCagedPointer"; } UNREACHABLE(); } diff --git a/src/codegen/machine-type.h b/src/codegen/machine-type.h index 19ec3e86b1..b3f8ef56b5 100644 --- a/src/codegen/machine-type.h +++ b/src/codegen/machine-type.h @@ -40,6 +40,7 @@ enum class MachineRepresentation : uint8_t { kTagged, // (uncompressed) Object (Smi or HeapObject) kCompressedPointer, // (compressed) HeapObject kCompressed, // (compressed) Object (Smi or HeapObject) + kCagedPointer, // Guaranteed to point into the virtual memory cage. // FP and SIMD representations must be last, and in order of increasing size. kFloat32, kFloat64, @@ -222,6 +223,10 @@ class MachineType { return MachineType(MachineRepresentation::kCompressed, MachineSemantic::kAny); } + constexpr static MachineType CagedPointer() { + return MachineType(MachineRepresentation::kCagedPointer, + MachineSemantic::kNone); + } constexpr static MachineType Bool() { return MachineType(MachineRepresentation::kBit, MachineSemantic::kBool); } @@ -260,6 +265,8 @@ class MachineType { return MachineType::AnyCompressed(); case MachineRepresentation::kCompressedPointer: return MachineType::CompressedPointer(); + case MachineRepresentation::kCagedPointer: + return MachineType::CagedPointer(); default: UNREACHABLE(); } @@ -353,6 +360,8 @@ V8_EXPORT_PRIVATE inline constexpr int ElementSizeLog2Of( case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: return kTaggedSizeLog2; + case MachineRepresentation::kCagedPointer: + return kSystemPointerSizeLog2; default: UNREACHABLE(); } diff --git a/src/codegen/tnode.h b/src/codegen/tnode.h index 0687bb638d..4dfb9a1741 100644 --- a/src/codegen/tnode.h +++ b/src/codegen/tnode.h @@ -35,6 +35,11 @@ struct RawPtrT : WordT { static constexpr MachineType kMachineType = MachineType::Pointer(); }; +// A RawPtrT that is guaranteed to point into the virtual memory cage. +struct CagedPtrT : WordT { + static constexpr MachineType kMachineType = MachineType::CagedPointer(); +}; + template struct RawPtr : RawPtrT {}; diff --git a/src/codegen/x64/macro-assembler-x64.cc b/src/codegen/x64/macro-assembler-x64.cc index ebbd859b91..c8c5903410 100644 --- a/src/codegen/x64/macro-assembler-x64.cc +++ b/src/codegen/x64/macro-assembler-x64.cc @@ -376,6 +376,43 @@ void MacroAssembler::RecordWriteField(Register object, int offset, } } +void TurboAssembler::EncodeCagedPointer(Register value) { + ASM_CODE_COMMENT(this); +#ifdef V8_CAGED_POINTERS + subq(value, kPtrComprCageBaseRegister); + shlq(value, Immediate(kCagedPointerShift)); +#else + UNREACHABLE(); +#endif +} + +void TurboAssembler::DecodeCagedPointer(Register value) { + ASM_CODE_COMMENT(this); +#ifdef V8_CAGED_POINTERS + shrq(value, Immediate(kCagedPointerShift)); + addq(value, kPtrComprCageBaseRegister); +#else + UNREACHABLE(); +#endif +} + +void TurboAssembler::LoadCagedPointerField(Register destination, + Operand field_operand) { + ASM_CODE_COMMENT(this); + movq(destination, field_operand); + DecodeCagedPointer(destination); +} + +void TurboAssembler::StoreCagedPointerField(Operand dst_field_operand, + Register value) { + ASM_CODE_COMMENT(this); + DCHECK(!AreAliased(value, kScratchRegister)); + DCHECK(!dst_field_operand.AddressUsesRegister(kScratchRegister)); + movq(kScratchRegister, value); + EncodeCagedPointer(kScratchRegister); + movq(dst_field_operand, kScratchRegister); +} + void TurboAssembler::LoadExternalPointerField( Register destination, Operand field_operand, ExternalPointerTag tag, Register scratch, IsolateRootLocation isolateRootLocation) { diff --git a/src/codegen/x64/macro-assembler-x64.h b/src/codegen/x64/macro-assembler-x64.h index c2e0626aa7..da57d4629a 100644 --- a/src/codegen/x64/macro-assembler-x64.h +++ b/src/codegen/x64/macro-assembler-x64.h @@ -604,6 +604,17 @@ class V8_EXPORT_PRIVATE TurboAssembler // --------------------------------------------------------------------------- // V8 Heap sandbox support + // Transform a CagedPointer from/to its encoded form, which is used when the + // pointer is stored on the heap and ensures that the pointer will always + // point into the virtual memory cage. + void EncodeCagedPointer(Register value); + void DecodeCagedPointer(Register value); + + // Load and decode a CagedPointer from the heap. + void LoadCagedPointerField(Register destination, Operand field_operand); + // Encode and store a CagedPointer to the heap. + void StoreCagedPointerField(Operand dst_field_operand, Register value); + enum class IsolateRootLocation { kInScratchRegister, kInRootRegister }; // Loads a field containing off-heap pointer and does necessary decoding // if V8 heap sandbox is enabled. diff --git a/src/compiler/backend/arm/instruction-selector-arm.cc b/src/compiler/backend/arm/instruction-selector-arm.cc index d0511ae62b..3ad4e720c4 100644 --- a/src/compiler/backend/arm/instruction-selector-arm.cc +++ b/src/compiler/backend/arm/instruction-selector-arm.cc @@ -626,6 +626,7 @@ void InstructionSelector::VisitLoad(Node* node) { break; case MachineRepresentation::kCompressedPointer: // Fall through. case MachineRepresentation::kCompressed: // Fall through. + case MachineRepresentation::kCagedPointer: // Fall through. case MachineRepresentation::kWord64: // Fall through. case MachineRepresentation::kMapWord: // Fall through. case MachineRepresentation::kNone: @@ -663,6 +664,7 @@ ArchOpcode GetStoreOpcode(MachineRepresentation rep) { return kArmVst1S128; case MachineRepresentation::kCompressedPointer: // Fall through. case MachineRepresentation::kCompressed: // Fall through. + case MachineRepresentation::kCagedPointer: // Fall through. case MachineRepresentation::kWord64: // Fall through. case MachineRepresentation::kMapWord: // Fall through. case MachineRepresentation::kNone: diff --git a/src/compiler/backend/arm64/code-generator-arm64.cc b/src/compiler/backend/arm64/code-generator-arm64.cc index 894116a35f..008ad5d82e 100644 --- a/src/compiler/backend/arm64/code-generator-arm64.cc +++ b/src/compiler/backend/arm64/code-generator-arm64.cc @@ -1894,6 +1894,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ AtomicDecompressAnyTagged(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1), i.TempRegister(0)); break; + case kArm64LdrDecodeCagedPointer: + __ LoadCagedPointerField(i.OutputRegister(), i.MemoryOperand()); + break; case kArm64Str: EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset()); __ Str(i.InputOrZeroRegister64(0), i.MemoryOperand(1)); @@ -1907,6 +1910,9 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( __ AtomicStoreTaggedField(i.InputRegister(2), i.InputRegister(0), i.InputRegister(1), i.TempRegister(0)); break; + case kArm64StrEncodeCagedPointer: + __ StoreCagedPointerField(i.InputOrZeroRegister64(0), i.MemoryOperand(1)); + break; case kArm64LdrS: EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset()); __ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand()); diff --git a/src/compiler/backend/arm64/instruction-codes-arm64.h b/src/compiler/backend/arm64/instruction-codes-arm64.h index 36d5d848e8..46d5314f4f 100644 --- a/src/compiler/backend/arm64/instruction-codes-arm64.h +++ b/src/compiler/backend/arm64/instruction-codes-arm64.h @@ -201,6 +201,8 @@ namespace compiler { V(Arm64LdarDecompressAnyTagged) \ V(Arm64StrCompressTagged) \ V(Arm64StlrCompressTagged) \ + V(Arm64LdrDecodeCagedPointer) \ + V(Arm64StrEncodeCagedPointer) \ V(Arm64DmbIsh) \ V(Arm64DsbIsb) \ V(Arm64Sxtl) \ diff --git a/src/compiler/backend/arm64/instruction-scheduler-arm64.cc b/src/compiler/backend/arm64/instruction-scheduler-arm64.cc index 5425722984..3cffa51e90 100644 --- a/src/compiler/backend/arm64/instruction-scheduler-arm64.cc +++ b/src/compiler/backend/arm64/instruction-scheduler-arm64.cc @@ -317,6 +317,7 @@ int InstructionScheduler::GetTargetInstructionFlags( case kArm64LdarDecompressTaggedSigned: case kArm64LdarDecompressTaggedPointer: case kArm64LdarDecompressAnyTagged: + case kArm64LdrDecodeCagedPointer: case kArm64Peek: case kArm64LoadSplat: case kArm64LoadLane: @@ -340,6 +341,7 @@ int InstructionScheduler::GetTargetInstructionFlags( case kArm64Str: case kArm64StrCompressTagged: case kArm64StlrCompressTagged: + case kArm64StrEncodeCagedPointer: case kArm64DmbIsh: case kArm64DsbIsb: case kArm64StoreLane: diff --git a/src/compiler/backend/arm64/instruction-selector-arm64.cc b/src/compiler/backend/arm64/instruction-selector-arm64.cc index 9e4ca57b1f..31aafbcb97 100644 --- a/src/compiler/backend/arm64/instruction-selector-arm64.cc +++ b/src/compiler/backend/arm64/instruction-selector-arm64.cc @@ -839,6 +839,14 @@ void InstructionSelector::VisitLoad(Node* node) { opcode = kArm64Ldr; immediate_mode = kLoadStoreImm64; break; + case MachineRepresentation::kCagedPointer: +#ifdef V8_CAGED_POINTERS + opcode = kArm64LdrDecodeCagedPointer; + immediate_mode = kLoadStoreImm64; + break; +#else + UNREACHABLE(); +#endif case MachineRepresentation::kSimd128: opcode = kArm64LdrQ; immediate_mode = kNoImmediate; @@ -939,6 +947,14 @@ void InstructionSelector::VisitStore(Node* node) { immediate_mode = COMPRESS_POINTERS_BOOL ? kLoadStoreImm32 : kLoadStoreImm64; break; + case MachineRepresentation::kCagedPointer: +#ifdef V8_CAGED_POINTERS + opcode = kArm64StrEncodeCagedPointer; + immediate_mode = kLoadStoreImm64; + break; +#else + UNREACHABLE(); +#endif case MachineRepresentation::kWord64: opcode = kArm64Str; immediate_mode = kLoadStoreImm64; diff --git a/src/compiler/backend/ia32/instruction-selector-ia32.cc b/src/compiler/backend/ia32/instruction-selector-ia32.cc index 8c2b58564a..72a95ccf28 100644 --- a/src/compiler/backend/ia32/instruction-selector-ia32.cc +++ b/src/compiler/backend/ia32/instruction-selector-ia32.cc @@ -276,6 +276,7 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) { break; case MachineRepresentation::kCompressedPointer: // Fall through. case MachineRepresentation::kCompressed: // Fall through. + case MachineRepresentation::kCagedPointer: // Fall through. case MachineRepresentation::kWord64: // Fall through. case MachineRepresentation::kMapWord: // Fall through. case MachineRepresentation::kNone: @@ -619,6 +620,7 @@ ArchOpcode GetStoreOpcode(MachineRepresentation rep) { return kIA32Movdqu; case MachineRepresentation::kCompressedPointer: // Fall through. case MachineRepresentation::kCompressed: // Fall through. + case MachineRepresentation::kCagedPointer: // Fall through. case MachineRepresentation::kWord64: // Fall through. case MachineRepresentation::kMapWord: // Fall through. case MachineRepresentation::kNone: diff --git a/src/compiler/backend/instruction.cc b/src/compiler/backend/instruction.cc index 7af0bc223f..7167ef75eb 100644 --- a/src/compiler/backend/instruction.cc +++ b/src/compiler/backend/instruction.cc @@ -259,6 +259,9 @@ std::ostream& operator<<(std::ostream& os, const InstructionOperand& op) { case MachineRepresentation::kCompressed: os << "|c"; break; + case MachineRepresentation::kCagedPointer: + os << "|cg"; + break; case MachineRepresentation::kMapWord: UNREACHABLE(); } @@ -928,6 +931,7 @@ static MachineRepresentation FilterRepresentation(MachineRepresentation rep) { case MachineRepresentation::kSimd128: case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: return rep; case MachineRepresentation::kNone: case MachineRepresentation::kMapWord: diff --git a/src/compiler/backend/instruction.h b/src/compiler/backend/instruction.h index 7372a5160d..7000469549 100644 --- a/src/compiler/backend/instruction.h +++ b/src/compiler/backend/instruction.h @@ -553,6 +553,7 @@ class LocationOperand : public InstructionOperand { case MachineRepresentation::kTagged: case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: return true; case MachineRepresentation::kBit: case MachineRepresentation::kWord8: diff --git a/src/compiler/backend/register-allocation.h b/src/compiler/backend/register-allocation.h index 37173da1fc..11a4a5b964 100644 --- a/src/compiler/backend/register-allocation.h +++ b/src/compiler/backend/register-allocation.h @@ -51,6 +51,7 @@ inline int ByteWidthForStackSlot(MachineRepresentation rep) { case MachineRepresentation::kWord16: case MachineRepresentation::kWord32: case MachineRepresentation::kFloat32: + case MachineRepresentation::kCagedPointer: return kSystemPointerSize; case MachineRepresentation::kTaggedSigned: case MachineRepresentation::kTaggedPointer: diff --git a/src/compiler/backend/x64/code-generator-x64.cc b/src/compiler/backend/x64/code-generator-x64.cc index b3e729c2a9..ffbc37806e 100644 --- a/src/compiler/backend/x64/code-generator-x64.cc +++ b/src/compiler/backend/x64/code-generator-x64.cc @@ -344,6 +344,9 @@ void EmitStore(TurboAssembler* tasm, Operand operand, Register value, case MachineRepresentation::kTagged: tasm->StoreTaggedField(operand, value); break; + case MachineRepresentation::kCagedPointer: + tasm->StoreCagedPointerField(operand, value); + break; default: UNREACHABLE(); } @@ -509,19 +512,33 @@ void EmitTSANStoreOOL(Zone* zone, CodeGenerator* codegen, TurboAssembler* tasm, template Register GetTSANValueRegister(TurboAssembler* tasm, Register value, - X64OperandConverter& i) { + X64OperandConverter& i, + MachineRepresentation rep) { + if (rep == MachineRepresentation::kCagedPointer) { + // CagedPointers need to be encoded. + Register value_reg = i.TempRegister(1); + tasm->movq(value_reg, value); + tasm->EncodeCagedPointer(value_reg); + return value_reg; + } return value; } template Register GetTSANValueRegister(TurboAssembler* tasm, Immediate value, - X64OperandConverter& i); + X64OperandConverter& i, + MachineRepresentation rep); template <> Register GetTSANValueRegister( - TurboAssembler* tasm, Immediate value, X64OperandConverter& i) { + TurboAssembler* tasm, Immediate value, X64OperandConverter& i, + MachineRepresentation rep) { Register value_reg = i.TempRegister(1); tasm->movq(value_reg, value); + if (rep == MachineRepresentation::kCagedPointer) { + // CagedPointers need to be encoded. + tasm->EncodeCagedPointer(value_reg); + } return value_reg; } @@ -539,7 +556,7 @@ void EmitTSANAwareStore(Zone* zone, CodeGenerator* codegen, int size = ElementSizeInBytes(rep); EmitMemoryProbeForTrapHandlerIfNeeded(tasm, i.TempRegister(0), operand, stub_call_mode, size); - Register value_reg = GetTSANValueRegister(tasm, value, i); + Register value_reg = GetTSANValueRegister(tasm, value, i, rep); EmitTSANStoreOOL(zone, codegen, tasm, operand, value_reg, i, stub_call_mode, size, order); } else { @@ -2369,6 +2386,28 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( } break; } + case kX64MovqDecodeCagedPointer: { + CHECK(instr->HasOutput()); + Operand address(i.MemoryOperand()); + Register dst = i.OutputRegister(); + __ movq(dst, address); + __ DecodeCagedPointer(dst); + EmitTSANRelaxedLoadOOLIfNeeded(zone(), this, tasm(), address, i, + DetermineStubCallMode(), + kSystemPointerSize); + break; + } + case kX64MovqEncodeCagedPointer: { + CHECK(!instr->HasOutput()); + size_t index = 0; + Operand operand = i.MemoryOperand(&index); + CHECK(!HasImmediateInput(instr, index)); + Register value(i.InputRegister(index)); + EmitTSANAwareStore( + zone(), this, tasm(), operand, value, i, DetermineStubCallMode(), + MachineRepresentation::kCagedPointer); + break; + } case kX64Movq: EmitOOLTrapIfNeeded(zone(), this, opcode, instr, __ pc_offset()); if (instr->HasOutput()) { diff --git a/src/compiler/backend/x64/instruction-codes-x64.h b/src/compiler/backend/x64/instruction-codes-x64.h index bee9e92ed7..bf2c9b00c7 100644 --- a/src/compiler/backend/x64/instruction-codes-x64.h +++ b/src/compiler/backend/x64/instruction-codes-x64.h @@ -163,6 +163,8 @@ namespace compiler { V(X64MovqDecompressTaggedPointer) \ V(X64MovqDecompressAnyTagged) \ V(X64MovqCompressTagged) \ + V(X64MovqEncodeCagedPointer) \ + V(X64MovqDecodeCagedPointer) \ V(X64BitcastFI) \ V(X64BitcastDL) \ V(X64BitcastIF) \ diff --git a/src/compiler/backend/x64/instruction-scheduler-x64.cc b/src/compiler/backend/x64/instruction-scheduler-x64.cc index 87fea7ea01..a0a972b4e5 100644 --- a/src/compiler/backend/x64/instruction-scheduler-x64.cc +++ b/src/compiler/backend/x64/instruction-scheduler-x64.cc @@ -396,6 +396,8 @@ int InstructionScheduler::GetTargetInstructionFlags( case kX64MovqDecompressTaggedPointer: case kX64MovqDecompressAnyTagged: case kX64MovqCompressTagged: + case kX64MovqDecodeCagedPointer: + case kX64MovqEncodeCagedPointer: case kX64Movq: case kX64Movsd: case kX64Movss: diff --git a/src/compiler/backend/x64/instruction-selector-x64.cc b/src/compiler/backend/x64/instruction-selector-x64.cc index 672122bb64..fae33735cc 100644 --- a/src/compiler/backend/x64/instruction-selector-x64.cc +++ b/src/compiler/backend/x64/instruction-selector-x64.cc @@ -297,6 +297,13 @@ ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) { case MachineRepresentation::kWord64: opcode = kX64Movq; break; + case MachineRepresentation::kCagedPointer: +#ifdef V8_CAGED_POINTERS + opcode = kX64MovqDecodeCagedPointer; + break; +#else + UNREACHABLE(); +#endif case MachineRepresentation::kSimd128: opcode = kX64Movdqu; break; @@ -333,6 +340,12 @@ ArchOpcode GetStoreOpcode(StoreRepresentation store_rep) { return kX64MovqCompressTagged; case MachineRepresentation::kWord64: return kX64Movq; + case MachineRepresentation::kCagedPointer: +#ifdef V8_CAGED_POINTERS + return kX64MovqEncodeCagedPointer; +#else + UNREACHABLE(); +#endif case MachineRepresentation::kSimd128: return kX64Movdqu; case MachineRepresentation::kNone: // Fall through. diff --git a/src/compiler/js-native-context-specialization.cc b/src/compiler/js-native-context-specialization.cc index a61d2028dd..0c9f057995 100644 --- a/src/compiler/js-native-context-specialization.cc +++ b/src/compiler/js-native-context-specialization.cc @@ -2556,6 +2556,7 @@ JSNativeContextSpecialization::BuildPropertyStore( case MachineRepresentation::kBit: case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: case MachineRepresentation::kWord8: case MachineRepresentation::kWord16: case MachineRepresentation::kWord32: diff --git a/src/compiler/load-elimination.cc b/src/compiler/load-elimination.cc index 377654e421..202f28de7d 100644 --- a/src/compiler/load-elimination.cc +++ b/src/compiler/load-elimination.cc @@ -1069,6 +1069,7 @@ Reduction LoadElimination::ReduceLoadElement(Node* node) { case MachineRepresentation::kFloat32: case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: // TODO(turbofan): Add support for doing the truncations. break; case MachineRepresentation::kFloat64: @@ -1125,6 +1126,7 @@ Reduction LoadElimination::ReduceStoreElement(Node* node) { case MachineRepresentation::kFloat32: case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: // TODO(turbofan): Add support for doing the truncations. break; case MachineRepresentation::kFloat64: @@ -1429,6 +1431,7 @@ LoadElimination::IndexRange LoadElimination::FieldIndexOf( case MachineRepresentation::kMapWord: case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: break; } int representation_size = ElementSizeInBytes(rep); diff --git a/src/compiler/machine-graph-verifier.cc b/src/compiler/machine-graph-verifier.cc index 31f0526679..9c61a0cc76 100644 --- a/src/compiler/machine-graph-verifier.cc +++ b/src/compiler/machine-graph-verifier.cc @@ -996,6 +996,7 @@ class MachineRepresentationChecker { // happens in dead code. return IsAnyTagged(actual); case MachineRepresentation::kCompressedPointer: + case MachineRepresentation::kCagedPointer: case MachineRepresentation::kFloat32: case MachineRepresentation::kFloat64: case MachineRepresentation::kSimd128: diff --git a/src/compiler/machine-operator.cc b/src/compiler/machine-operator.cc index df82fe048f..a8830ad7d6 100644 --- a/src/compiler/machine-operator.cc +++ b/src/compiler/machine-operator.cc @@ -667,6 +667,7 @@ std::ostream& operator<<(std::ostream& os, TruncateKind kind) { V(MapInHeader) \ V(AnyTagged) \ V(CompressedPointer) \ + V(CagedPointer) \ V(AnyCompressed) #define MACHINE_REPRESENTATION_LIST(V) \ @@ -682,6 +683,7 @@ std::ostream& operator<<(std::ostream& os, TruncateKind kind) { V(kTaggedPointer) \ V(kTagged) \ V(kCompressedPointer) \ + V(kCagedPointer) \ V(kCompressed) #define LOAD_TRANSFORM_LIST(V) \ diff --git a/src/compiler/memory-lowering.cc b/src/compiler/memory-lowering.cc index 47c0a582cc..96b9ccf974 100644 --- a/src/compiler/memory-lowering.cc +++ b/src/compiler/memory-lowering.cc @@ -534,6 +534,8 @@ Reduction MemoryLowering::ReduceStoreField(Node* node, DCHECK_IMPLIES(V8_HEAP_SANDBOX_BOOL, !access.type.Is(Type::ExternalPointer()) && !access.type.Is(Type::SandboxedExternalPointer())); + // CagedPointers are not currently stored by optimized code. + DCHECK(!access.type.Is(Type::CagedPointer())); MachineType machine_type = access.machine_type; Node* object = node->InputAt(0); Node* value = node->InputAt(1); diff --git a/src/compiler/representation-change.cc b/src/compiler/representation-change.cc index 32be3a5c19..f5bddd4510 100644 --- a/src/compiler/representation-change.cc +++ b/src/compiler/representation-change.cc @@ -242,6 +242,7 @@ Node* RepresentationChanger::GetRepresentationFor( return node; case MachineRepresentation::kCompressed: case MachineRepresentation::kCompressedPointer: + case MachineRepresentation::kCagedPointer: case MachineRepresentation::kMapWord: UNREACHABLE(); } @@ -1246,6 +1247,13 @@ Node* RepresentationChanger::GetWord64RepresentationFor( jsgraph()->common()->DeadValue(MachineRepresentation::kWord64), unreachable); } + } else if (output_rep == MachineRepresentation::kCagedPointer) { + if (output_type.Is(Type::CagedPointer())) { + return node; + } else { + return TypeError(node, output_rep, output_type, + MachineRepresentation::kWord64); + } } else { return TypeError(node, output_rep, output_type, MachineRepresentation::kWord64); diff --git a/src/compiler/simplified-lowering.cc b/src/compiler/simplified-lowering.cc index 3486223018..dfb4a2bf2f 100644 --- a/src/compiler/simplified-lowering.cc +++ b/src/compiler/simplified-lowering.cc @@ -160,6 +160,7 @@ UseInfo TruncatingUseInfoFromRepresentation(MachineRepresentation rep) { return UseInfo::Bool(); case MachineRepresentation::kCompressedPointer: case MachineRepresentation::kCompressed: + case MachineRepresentation::kCagedPointer: case MachineRepresentation::kSimd128: case MachineRepresentation::kNone: break; @@ -1073,7 +1074,8 @@ class RepresentationSelector { } else if (type.Is(Type::BigInt()) && use.IsUsedAsWord64()) { return MachineRepresentation::kWord64; } else if (type.Is(Type::ExternalPointer()) || - type.Is(Type::SandboxedExternalPointer())) { + type.Is(Type::SandboxedExternalPointer()) || + type.Is(Type::CagedPointer())) { return MachineType::PointerRepresentation(); } return MachineRepresentation::kTagged; diff --git a/src/compiler/types.h b/src/compiler/types.h index 46129680c0..9feab08f70 100644 --- a/src/compiler/types.h +++ b/src/compiler/types.h @@ -112,24 +112,25 @@ namespace compiler { V(NaN, 1u << 12) \ V(Symbol, 1u << 13) \ V(InternalizedString, 1u << 14) \ - V(OtherCallable, 1u << 16) \ - V(OtherObject, 1u << 17) \ - V(OtherUndetectable, 1u << 18) \ - V(CallableProxy, 1u << 19) \ - V(OtherProxy, 1u << 20) \ - V(Function, 1u << 21) \ - V(BoundFunction, 1u << 22) \ - V(Hole, 1u << 23) \ - V(OtherInternal, 1u << 24) \ - V(ExternalPointer, 1u << 25) \ - V(Array, 1u << 26) \ - V(UnsignedBigInt63, 1u << 27) \ - V(OtherUnsignedBigInt64, 1u << 28) \ - V(NegativeBigInt63, 1u << 29) \ - V(OtherBigInt, 1u << 30) \ + V(OtherCallable, 1u << 15) \ + V(OtherObject, 1u << 16) \ + V(OtherUndetectable, 1u << 17) \ + V(CallableProxy, 1u << 18) \ + V(OtherProxy, 1u << 19) \ + V(Function, 1u << 20) \ + V(BoundFunction, 1u << 21) \ + V(Hole, 1u << 22) \ + V(OtherInternal, 1u << 23) \ + V(ExternalPointer, 1u << 24) \ + V(Array, 1u << 25) \ + V(UnsignedBigInt63, 1u << 26) \ + V(OtherUnsignedBigInt64, 1u << 27) \ + V(NegativeBigInt63, 1u << 28) \ + V(OtherBigInt, 1u << 29) \ /* TODO(v8:10391): Remove this type once all ExternalPointer usages are */ \ /* sandbox-ready. */ \ - V(SandboxedExternalPointer, 1u << 31) \ + V(SandboxedExternalPointer, 1u << 30) \ + V(CagedPointer, 1u << 31) \ #define PROPER_BITSET_TYPE_LIST(V) \ V(None, 0u) \ @@ -201,7 +202,8 @@ namespace compiler { V(Unique, kBoolean | kUniqueName | kNull | \ kUndefined | kHole | kReceiver) \ V(Internal, kHole | kExternalPointer | \ - kSandboxedExternalPointer | kOtherInternal) \ + kSandboxedExternalPointer | kCagedPointer | \ + kOtherInternal) \ V(NonInternal, kPrimitive | kReceiver) \ V(NonBigInt, kNonBigIntPrimitive | kReceiver) \ V(NonNumber, kBigInt | kUnique | kString | kInternal) \ diff --git a/src/flags/flag-definitions.h b/src/flags/flag-definitions.h index e682abf7e4..cb5623dc41 100644 --- a/src/flags/flag-definitions.h +++ b/src/flags/flag-definitions.h @@ -181,6 +181,12 @@ struct MaybeBoolFlag { #define V8_VIRTUAL_MEMORY_CAGE_BOOL false #endif +#ifdef V8_CAGED_POINTERS +#define V8_CAGED_POINTERS_BOOL true +#else +#define V8_CAGED_POINTERS_BOOL false +#endif + // D8's MultiMappedAllocator is only available on Linux, and only if the virtual // memory cage is not enabled. #if V8_OS_LINUX && !V8_VIRTUAL_MEMORY_CAGE_BOOL diff --git a/src/objects/objects-inl.h b/src/objects/objects-inl.h index 3ce906c8da..e45ac5255b 100644 --- a/src/objects/objects-inl.h +++ b/src/objects/objects-inl.h @@ -42,6 +42,7 @@ #include "src/objects/tagged-impl-inl.h" #include "src/objects/tagged-index.h" #include "src/objects/templates.h" +#include "src/security/caged-pointer-inl.h" #include "src/security/external-pointer-inl.h" // Has to be the last include (doesn't have include guards): @@ -634,6 +635,24 @@ MaybeHandle Object::SetElement(Isolate* isolate, Handle object, return value; } +#ifdef V8_CAGED_POINTERS +Address Object::ReadCagedPointerField(size_t offset, + PtrComprCageBase cage_base) const { + return i::ReadCagedPointerField(field_address(offset), cage_base); +} + +void Object::WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base, + Address value) { + i::WriteCagedPointerField(field_address(offset), cage_base, value); +} + +void Object::WriteCagedPointerField(size_t offset, Isolate* isolate, + Address value) { + i::WriteCagedPointerField(field_address(offset), PtrComprCageBase(isolate), + value); +} +#endif // V8_CAGED_POINTERS + void Object::InitExternalPointerField(size_t offset, Isolate* isolate) { i::InitExternalPointerField(field_address(offset), isolate); } diff --git a/src/objects/objects.h b/src/objects/objects.h index cbcf4e5360..4daaafead3 100644 --- a/src/objects/objects.h +++ b/src/objects/objects.h @@ -698,6 +698,18 @@ class Object : public TaggedImpl { } } + // + // CagedPointer field accessors. + // +#ifdef V8_CAGED_POINTERS + inline Address ReadCagedPointerField(size_t offset, + PtrComprCageBase cage_base) const; + inline void WriteCagedPointerField(size_t offset, PtrComprCageBase cage_base, + Address value); + inline void WriteCagedPointerField(size_t offset, Isolate* isolate, + Address value); +#endif // V8_CAGED_POINTERS + // // ExternalPointer_t field accessors. // diff --git a/src/objects/turbofan-types.tq b/src/objects/turbofan-types.tq index d1e65854e6..035b6f8829 100644 --- a/src/objects/turbofan-types.tq +++ b/src/objects/turbofan-types.tq @@ -25,7 +25,6 @@ bitfield struct TurbofanTypeBits extends uint32 { naN: bool: 1 bit; symbol: bool: 1 bit; internalized_string: bool: 1 bit; - _unused_padding_field_2: bool: 1 bit; other_callable: bool: 1 bit; other_object: bool: 1 bit; other_undetectable: bool: 1 bit; @@ -42,6 +41,7 @@ bitfield struct TurbofanTypeBits extends uint32 { negative_big_int_63: bool: 1 bit; other_big_int: bool: 1 bit; sandboxed_external_pointer: bool: 1 bit; + caged_pointer: bool: 1 bit; } @export diff --git a/src/security/caged-pointer-inl.h b/src/security/caged-pointer-inl.h new file mode 100644 index 0000000000..5c0959db25 --- /dev/null +++ b/src/security/caged-pointer-inl.h @@ -0,0 +1,48 @@ +// Copyright 2021 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_SECURITY_CAGED_POINTER_INL_H_ +#define V8_SECURITY_CAGED_POINTER_INL_H_ + +#include "include/v8-internal.h" +#include "src/execution/isolate.h" +#include "src/security/caged-pointer.h" + +namespace v8 { +namespace internal { + +#ifdef V8_CAGED_POINTERS + +V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address, + PtrComprCageBase cage_base) { + // Caged pointers are currently only used if the sandbox is enabled. + DCHECK(V8_HEAP_SANDBOX_BOOL); + + Address caged_pointer = base::ReadUnalignedValue
(field_address); + + Address offset = caged_pointer >> kCagedPointerShift; + Address pointer = cage_base.address() + offset; + return pointer; +} + +V8_INLINE void WriteCagedPointerField(Address field_address, + PtrComprCageBase cage_base, + CagedPointer_t pointer) { + // Caged pointers are currently only used if the sandbox is enabled. + DCHECK(V8_HEAP_SANDBOX_BOOL); + + // The pointer must point into the virtual memory cage. + DCHECK(GetProcessWideVirtualMemoryCage()->Contains(pointer)); + + Address offset = pointer - cage_base.address(); + Address caged_pointer = offset << kCagedPointerShift; + base::WriteUnalignedValue
(field_address, caged_pointer); +} + +#endif // V8_CAGED_POINTERS + +} // namespace internal +} // namespace v8 + +#endif // V8_SECURITY_CAGED_POINTER_INL_H_ diff --git a/src/security/caged-pointer.h b/src/security/caged-pointer.h new file mode 100644 index 0000000000..5b15f63844 --- /dev/null +++ b/src/security/caged-pointer.h @@ -0,0 +1,27 @@ +// Copyright 2021 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +#ifndef V8_SECURITY_CAGED_POINTER_H_ +#define V8_SECURITY_CAGED_POINTER_H_ + +#include "src/common/globals.h" + +namespace v8 { +namespace internal { + +#ifdef V8_CAGED_POINTERS + +V8_INLINE CagedPointer_t ReadCagedPointerField(Address field_address, + PtrComprCageBase cage_base); + +V8_INLINE void WriteCagedPointerField(Address field_address, + PtrComprCageBase cage_base, + CagedPointer_t value); + +#endif // V8_CAGED_POINTERS + +} // namespace internal +} // namespace v8 + +#endif // V8_SECURITY_CAGED_POINTER_H_ diff --git a/src/security/vm-cage.cc b/src/security/vm-cage.cc index fd4fd3b7ef..38067bf86c 100644 --- a/src/security/vm-cage.cc +++ b/src/security/vm-cage.cc @@ -328,6 +328,7 @@ bool V8VirtualMemoryCage::Initialize(v8::PageAllocator* page_allocator, page_allocator_ = page_allocator; size_ = size; + end_ = base_ + size_; reservation_size_ = reservation_size; cage_page_allocator_ = std::make_unique( @@ -391,6 +392,7 @@ bool V8VirtualMemoryCage::InitializeAsFakeCage( base_ = reservation_base_; size_ = size; + end_ = base_ + size_; reservation_size_ = size_to_reserve; initialized_ = true; is_fake_cage_ = true; @@ -407,6 +409,7 @@ void V8VirtualMemoryCage::TearDown() { CHECK(page_allocator_->FreePages(reinterpret_cast(reservation_base_), reservation_size_)); base_ = kNullAddress; + end_ = kNullAddress; size_ = 0; reservation_base_ = kNullAddress; reservation_size_ = 0; diff --git a/src/security/vm-cage.h b/src/security/vm-cage.h index 2b49ff6168..26aa2c8f37 100644 --- a/src/security/vm-cage.h +++ b/src/security/vm-cage.h @@ -73,8 +73,13 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage { bool is_fake_cage() const { return is_fake_cage_; } Address base() const { return base_; } + Address end() const { return end_; } size_t size() const { return size_; } + Address base_address() const { return reinterpret_cast
(&base_); } + Address end_address() const { return reinterpret_cast
(&end_); } + Address size_address() const { return reinterpret_cast
(&size_); } + v8::PageAllocator* page_allocator() const { return cage_page_allocator_.get(); } @@ -110,6 +115,7 @@ class V8_EXPORT_PRIVATE V8VirtualMemoryCage { size_t size_to_reserve); Address base_ = kNullAddress; + Address end_ = kNullAddress; size_t size_ = 0; // Base and size of the virtual memory reservation backing this cage. These