From 708be823a16c3f1fbe30c8099d4bb8c4037513fe Mon Sep 17 00:00:00 2001 From: Jakob Gruber Date: Wed, 17 Oct 2018 15:12:53 +0200 Subject: [PATCH] [root] Refactor root offset accessors in TurboAssembler Some confusion has crept in over time, specifically around the distinction between an offset to an external reference's address and an offset to its entry in the external reference table. This CL unifies naming and interfaces. Drive-by: Fix formatting in macro-assembler-x64. Bug: v8:6666 Change-Id: Iade98ca28a7304aba0254b92b553343826a08e41 Reviewed-on: https://chromium-review.googlesource.com/c/1286674 Commit-Queue: Jakob Gruber Reviewed-by: Sigurd Schneider Cr-Commit-Position: refs/heads/master@{#56741} --- src/arm/macro-assembler-arm.cc | 3 +- src/arm64/macro-assembler-arm64.cc | 3 +- src/compiler/x64/instruction-selector-x64.cc | 6 +- src/ia32/macro-assembler-ia32.cc | 54 +++++-------- src/mips/macro-assembler-mips.cc | 6 +- src/mips64/macro-assembler-mips64.cc | 4 +- src/ppc/macro-assembler-ppc.cc | 3 +- src/s390/macro-assembler-s390.cc | 5 +- src/turbo-assembler.cc | 43 +++++----- src/turbo-assembler.h | 14 +++- src/x64/macro-assembler-x64.cc | 82 +++++++++----------- 11 files changed, 110 insertions(+), 113 deletions(-) diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index e643aa3dde..fdb59d1e8d 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -528,7 +528,8 @@ void MacroAssembler::Store(Register src, void TurboAssembler::LoadRoot(Register destination, RootIndex index, Condition cond) { - ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), cond); + ldr(destination, + MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)), cond); } diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index 9a5254a6c2..d9ef420030 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -1519,7 +1519,8 @@ void TurboAssembler::CanonicalizeNaN(const VRegister& dst, void TurboAssembler::LoadRoot(Register destination, RootIndex index) { // TODO(jbramley): Most root values are constants, and can be synthesized // without a load. Refer to the ARM back end for details. - Ldr(destination, MemOperand(kRootRegister, RootRegisterOffset(index))); + Ldr(destination, + MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } diff --git a/src/compiler/x64/instruction-selector-x64.cc b/src/compiler/x64/instruction-selector-x64.cc index 28153989db..6184a208fb 100644 --- a/src/compiler/x64/instruction-selector-x64.cc +++ b/src/compiler/x64/instruction-selector-x64.cc @@ -1731,7 +1731,8 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node, kX64Cmp | AddressingModeField::encode(kMode_Root); return VisitCompare( selector, opcode, - g.TempImmediate(TurboAssemblerBase::RootRegisterOffset(root_index)), + g.TempImmediate( + TurboAssemblerBase::RootRegisterOffsetForRootIndex(root_index)), g.UseRegister(m.left().node()), cont); } else if (m.left().HasValue() && roots_table.IsRootHandle(m.left().Value(), &root_index)) { @@ -1739,7 +1740,8 @@ void VisitWord64Compare(InstructionSelector* selector, Node* node, kX64Cmp | AddressingModeField::encode(kMode_Root); return VisitCompare( selector, opcode, - g.TempImmediate(TurboAssemblerBase::RootRegisterOffset(root_index)), + g.TempImmediate( + TurboAssemblerBase::RootRegisterOffsetForRootIndex(root_index)), g.UseRegister(m.right().node()), cont); } } diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index 438087c290..195abb950d 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -83,7 +83,8 @@ void TurboAssembler::LoadRoot(Register destination, RootIndex index) { #ifdef V8_EMBEDDED_BUILTINS if (root_array_available()) { Assembler::AllowExplicitEbxAccessScope read_only_access(this); - mov(destination, Operand(kRootRegister, RootRegisterOffset(index))); + mov(destination, + Operand(kRootRegister, RootRegisterOffsetForRootIndex(index))); return; } #endif // V8_EMBEDDED_BUILTINS @@ -125,7 +126,7 @@ void TurboAssembler::CompareRoot(Register with, RootIndex index) { #ifdef V8_EMBEDDED_BUILTINS if (root_array_available()) { Assembler::AllowExplicitEbxAccessScope read_only_access(this); - cmp(with, Operand(kRootRegister, RootRegisterOffset(index))); + cmp(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index))); return; } #endif // V8_EMBEDDED_BUILTINS @@ -168,7 +169,7 @@ void MacroAssembler::PushRoot(RootIndex index) { if (root_array_available()) { DCHECK(RootsTable::IsImmortalImmovable(index)); Assembler::AllowExplicitEbxAccessScope read_only_access(this); - push(Operand(kRootRegister, RootRegisterOffset(index))); + push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index))); return; } #endif // V8_EMBEDDED_BUILTINS @@ -195,16 +196,9 @@ Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference, return Operand(kRootRegister, offset); } else { // Otherwise, do a memory load from the external reference table. - - // Encode as an index into the external reference table stored on the - // isolate. - ExternalReferenceEncoder encoder(isolate()); - ExternalReferenceEncoder::Value v = encoder.Encode(reference.address()); - CHECK(!v.is_from_api()); - - mov(scratch, - Operand(kRootRegister, - RootRegisterOffsetForExternalReferenceIndex(v.index()))); + mov(scratch, Operand(kRootRegister, + RootRegisterOffsetForExternalReferenceTableEntry( + isolate(), reference))); return Operand(scratch, 0); } } @@ -212,6 +206,19 @@ Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference, return Operand(scratch, 0); } +// TODO(v8:6666): If possible, refactor into a platform-independent function in +// TurboAssembler. +Operand TurboAssembler::ExternalReferenceAddressAsOperand( + ExternalReference reference) { + DCHECK(FLAG_embedded_builtins); + DCHECK(root_array_available()); + DCHECK(ShouldGenerateIsolateIndependentCode()); + Assembler::AllowExplicitEbxAccessScope read_only_access(this); + return Operand( + kRootRegister, + RootRegisterOffsetForExternalReferenceTableEntry(isolate(), reference)); +} + // TODO(v8:6666): If possible, refactor into a platform-independent function in // TurboAssembler. Operand TurboAssembler::HeapObjectAsOperand(Handle object) { @@ -222,7 +229,7 @@ Operand TurboAssembler::HeapObjectAsOperand(Handle object) { int builtin_index; RootIndex root_index; if (isolate()->roots_table().IsRootHandle(object, &root_index)) { - return Operand(kRootRegister, RootRegisterOffset(root_index)); + return Operand(kRootRegister, RootRegisterOffsetForRootIndex(root_index)); } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) { return Operand(kRootRegister, RootRegisterOffsetForBuiltinIndex(builtin_index)); @@ -237,25 +244,6 @@ Operand TurboAssembler::HeapObjectAsOperand(Handle object) { } } -// TODO(v8:6666): If possible, refactor into a platform-independent function in -// TurboAssembler. -Operand TurboAssembler::ExternalReferenceAddressAsOperand( - ExternalReference reference) { - DCHECK(FLAG_embedded_builtins); - DCHECK(root_array_available()); - DCHECK(ShouldGenerateIsolateIndependentCode()); - Assembler::AllowExplicitEbxAccessScope read_only_access(this); - - // Encode as an index into the external reference table stored on the - // isolate. - ExternalReferenceEncoder encoder(isolate()); - ExternalReferenceEncoder::Value v = encoder.Encode(reference.address()); - CHECK(!v.is_from_api()); - - return Operand(kRootRegister, - RootRegisterOffsetForExternalReferenceIndex(v.index())); -} - void TurboAssembler::LoadFromConstantsTable(Register destination, int constant_index) { DCHECK(!is_ebx_addressable_); diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc index 3314c8ce33..bf222221f0 100644 --- a/src/mips/macro-assembler-mips.cc +++ b/src/mips/macro-assembler-mips.cc @@ -128,14 +128,16 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1, } void TurboAssembler::LoadRoot(Register destination, RootIndex index) { - lw(destination, MemOperand(kRootRegister, RootRegisterOffset(index))); + lw(destination, + MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } void TurboAssembler::LoadRoot(Register destination, RootIndex index, Condition cond, Register src1, const Operand& src2) { Branch(2, NegateCondition(cond), src1, src2); - lw(destination, MemOperand(kRootRegister, RootRegisterOffset(index))); + lw(destination, + MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } diff --git a/src/mips64/macro-assembler-mips64.cc b/src/mips64/macro-assembler-mips64.cc index 876d02ed3e..ef0e67d39c 100644 --- a/src/mips64/macro-assembler-mips64.cc +++ b/src/mips64/macro-assembler-mips64.cc @@ -128,14 +128,14 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1, } void TurboAssembler::LoadRoot(Register destination, RootIndex index) { - Ld(destination, MemOperand(s6, RootRegisterOffset(index))); + Ld(destination, MemOperand(s6, RootRegisterOffsetForRootIndex(index))); } void TurboAssembler::LoadRoot(Register destination, RootIndex index, Condition cond, Register src1, const Operand& src2) { Branch(2, NegateCondition(cond), src1, src2); - Ld(destination, MemOperand(s6, RootRegisterOffset(index))); + Ld(destination, MemOperand(s6, RootRegisterOffsetForRootIndex(index))); } diff --git a/src/ppc/macro-assembler-ppc.cc b/src/ppc/macro-assembler-ppc.cc index 3b2ce0e98e..381ba13403 100644 --- a/src/ppc/macro-assembler-ppc.cc +++ b/src/ppc/macro-assembler-ppc.cc @@ -397,7 +397,8 @@ void TurboAssembler::MultiPopDoubles(RegList dregs, Register location) { void TurboAssembler::LoadRoot(Register destination, RootIndex index, Condition cond) { DCHECK(cond == al); - LoadP(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), r0); + LoadP(destination, + MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)), r0); } void MacroAssembler::RecordWriteField(Register object, int offset, diff --git a/src/s390/macro-assembler-s390.cc b/src/s390/macro-assembler-s390.cc index 80385b1a38..8fcb595c2b 100644 --- a/src/s390/macro-assembler-s390.cc +++ b/src/s390/macro-assembler-s390.cc @@ -430,7 +430,8 @@ void TurboAssembler::MultiPopDoubles(RegList dregs, Register location) { void TurboAssembler::LoadRoot(Register destination, RootIndex index, Condition) { - LoadP(destination, MemOperand(kRootRegister, RootRegisterOffset(index)), r0); + LoadP(destination, + MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index)), r0); } void MacroAssembler::RecordWriteField(Register object, int offset, @@ -1512,7 +1513,7 @@ void MacroAssembler::CompareInstanceType(Register map, Register type_reg, } void MacroAssembler::CompareRoot(Register obj, RootIndex index) { - CmpP(obj, MemOperand(kRootRegister, RootRegisterOffset(index))); + CmpP(obj, MemOperand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } void MacroAssembler::CallStub(CodeStub* stub, Condition cond) { diff --git a/src/turbo-assembler.cc b/src/turbo-assembler.cc index 422303e43f..449f62730f 100644 --- a/src/turbo-assembler.cc +++ b/src/turbo-assembler.cc @@ -72,29 +72,24 @@ void TurboAssemblerBase::IndirectLoadExternalReference( LoadRootRegisterOffset(destination, offset); } else { // Otherwise, do a memory load from the external reference table. - - // Encode as an index into the external reference table stored on the - // isolate. - ExternalReferenceEncoder encoder(isolate()); - ExternalReferenceEncoder::Value v = encoder.Encode(reference.address()); - CHECK(!v.is_from_api()); - - LoadRootRelative(destination, - RootRegisterOffsetForExternalReferenceIndex(v.index())); + LoadRootRelative( + destination, + RootRegisterOffsetForExternalReferenceTableEntry(isolate(), reference)); } } // static -int32_t TurboAssemblerBase::RootRegisterOffset(RootIndex root_index) { +int32_t TurboAssemblerBase::RootRegisterOffsetForRootIndex( + RootIndex root_index) { return (static_cast(root_index) << kPointerSizeLog2) - kRootRegisterBias; } // static -int32_t TurboAssemblerBase::RootRegisterOffsetForExternalReferenceIndex( - int reference_index) { - return IsolateData::kExternalReferenceTableOffset - kRootRegisterBias + - ExternalReferenceTable::OffsetOfEntry(reference_index); +int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltinIndex( + int builtin_index) { + return IsolateData::kBuiltinsTableOffset - kRootRegisterBias + + builtin_index * kPointerSize; } // static @@ -104,6 +99,19 @@ intptr_t TurboAssemblerBase::RootRegisterOffsetForExternalReference( reinterpret_cast(isolate->roots_array_start()); } +// static +int32_t TurboAssemblerBase::RootRegisterOffsetForExternalReferenceTableEntry( + Isolate* isolate, const ExternalReference& reference) { + // Encode as an index into the external reference table stored on the + // isolate. + ExternalReferenceEncoder encoder(isolate); + ExternalReferenceEncoder::Value v = encoder.Encode(reference.address()); + CHECK(!v.is_from_api()); + + return IsolateData::kExternalReferenceTableOffset - kRootRegisterBias + + ExternalReferenceTable::OffsetOfEntry(v.index()); +} + // static bool TurboAssemblerBase::IsAddressableThroughRootRegister( Isolate* isolate, const ExternalReference& reference) { @@ -111,13 +119,6 @@ bool TurboAssemblerBase::IsAddressableThroughRootRegister( return isolate->root_register_addressable_region().contains(address); } -// static -int32_t TurboAssemblerBase::RootRegisterOffsetForBuiltinIndex( - int builtin_index) { - return IsolateData::kBuiltinsTableOffset - kRootRegisterBias + - builtin_index * kPointerSize; -} - void TurboAssemblerBase::RecordCommentForOffHeapTrampoline(int builtin_index) { if (!FLAG_code_comments) return; size_t len = strlen("-- Inlined Trampoline to --") + diff --git a/src/turbo-assembler.h b/src/turbo-assembler.h index 70048962dd..a0199cd97d 100644 --- a/src/turbo-assembler.h +++ b/src/turbo-assembler.h @@ -48,21 +48,27 @@ class V8_EXPORT_PRIVATE TurboAssemblerBase : public Assembler { virtual void LoadFromConstantsTable(Register destination, int constant_index) = 0; + // Corresponds to: destination = kRootRegister + offset. virtual void LoadRootRegisterOffset(Register destination, intptr_t offset) = 0; + + // Corresponds to: destination = [kRootRegister + offset]. virtual void LoadRootRelative(Register destination, int32_t offset) = 0; virtual void LoadRoot(Register destination, RootIndex index) = 0; - static int32_t RootRegisterOffset(RootIndex root_index); - static int32_t RootRegisterOffsetForExternalReferenceIndex( - int reference_index); - + static int32_t RootRegisterOffsetForRootIndex(RootIndex root_index); static int32_t RootRegisterOffsetForBuiltinIndex(int builtin_index); + // Returns the root-relative offset to reference.address(). static intptr_t RootRegisterOffsetForExternalReference( Isolate* isolate, const ExternalReference& reference); + // Returns the root-relative offset to the external reference table entry, + // which itself contains reference.address(). + static int32_t RootRegisterOffsetForExternalReferenceTableEntry( + Isolate* isolate, const ExternalReference& reference); + // An address is addressable through kRootRegister if it is located within // isolate->root_register_addressable_region(). static bool IsAddressableThroughRootRegister( diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 83cf2538cc..701fd0f231 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -185,16 +185,9 @@ Operand TurboAssembler::ExternalReferenceAsOperand(ExternalReference reference, return Operand(kRootRegister, static_cast(offset)); } else { // Otherwise, do a memory load from the external reference table. - - // Encode as an index into the external reference table stored on the - // isolate. - ExternalReferenceEncoder encoder(isolate()); - ExternalReferenceEncoder::Value v = encoder.Encode(reference.address()); - CHECK(!v.is_from_api()); - - movp(scratch, - Operand(kRootRegister, - RootRegisterOffsetForExternalReferenceIndex(v.index()))); + movp(scratch, Operand(kRootRegister, + RootRegisterOffsetForExternalReferenceTableEntry( + isolate(), reference))); return Operand(scratch, 0); } } @@ -209,17 +202,18 @@ void MacroAssembler::PushAddress(ExternalReference source) { void TurboAssembler::LoadRoot(Register destination, RootIndex index) { DCHECK(root_array_available_); - movp(destination, Operand(kRootRegister, RootRegisterOffset(index))); + movp(destination, + Operand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } void MacroAssembler::PushRoot(RootIndex index) { DCHECK(root_array_available_); - Push(Operand(kRootRegister, RootRegisterOffset(index))); + Push(Operand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } void TurboAssembler::CompareRoot(Register with, RootIndex index) { DCHECK(root_array_available_); - cmpp(with, Operand(kRootRegister, RootRegisterOffset(index))); + cmpp(with, Operand(kRootRegister, RootRegisterOffsetForRootIndex(index))); } void TurboAssembler::CompareRoot(Operand with, RootIndex index) { @@ -1487,41 +1481,41 @@ void TurboAssembler::Jump(Address destination, RelocInfo::Mode rmode) { void TurboAssembler::Jump(Handle code_object, RelocInfo::Mode rmode, Condition cc) { -// TODO(X64): Inline this -if (FLAG_embedded_builtins) { - if (root_array_available_ && options().isolate_independent_code && - !Builtins::IsIsolateIndependentBuiltin(*code_object)) { - // Calls to embedded targets are initially generated as standard - // pc-relative calls below. When creating the embedded blob, call offsets - // are patched up to point directly to the off-heap instruction start. - // Note: It is safe to dereference code_object above since code generation - // for builtins and code stubs happens on the main thread. - Label skip; - if (cc != always) { - if (cc == never) return; - j(NegateCondition(cc), &skip, Label::kNear); - } - IndirectLoadConstant(kScratchRegister, code_object); - leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); - jmp(kScratchRegister); - bind(&skip); - return; - } else if (options().inline_offheap_trampolines) { - int builtin_index = Builtins::kNoBuiltinId; - if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) && - Builtins::IsIsolateIndependent(builtin_index)) { - // Inline the trampoline. - RecordCommentForOffHeapTrampoline(builtin_index); - CHECK_NE(builtin_index, Builtins::kNoBuiltinId); - EmbeddedData d = EmbeddedData::FromBlob(); - Address entry = d.InstructionStartOfBuiltin(builtin_index); - Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET); + // TODO(X64): Inline this + if (FLAG_embedded_builtins) { + if (root_array_available_ && options().isolate_independent_code && + !Builtins::IsIsolateIndependentBuiltin(*code_object)) { + // Calls to embedded targets are initially generated as standard + // pc-relative calls below. When creating the embedded blob, call offsets + // are patched up to point directly to the off-heap instruction start. + // Note: It is safe to dereference code_object above since code generation + // for builtins and code stubs happens on the main thread. + Label skip; + if (cc != always) { + if (cc == never) return; + j(NegateCondition(cc), &skip, Label::kNear); + } + IndirectLoadConstant(kScratchRegister, code_object); + leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); jmp(kScratchRegister); + bind(&skip); return; + } else if (options().inline_offheap_trampolines) { + int builtin_index = Builtins::kNoBuiltinId; + if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) && + Builtins::IsIsolateIndependent(builtin_index)) { + // Inline the trampoline. + RecordCommentForOffHeapTrampoline(builtin_index); + CHECK_NE(builtin_index, Builtins::kNoBuiltinId); + EmbeddedData d = EmbeddedData::FromBlob(); + Address entry = d.InstructionStartOfBuiltin(builtin_index); + Move(kScratchRegister, entry, RelocInfo::OFF_HEAP_TARGET); + jmp(kScratchRegister); + return; + } } } -} -j(cc, code_object, rmode); + j(cc, code_object, rmode); } void MacroAssembler::JumpToInstructionStream(Address entry) {