From 4252d53f57e6ef4c5c16146886c2c1f69ab37f81 Mon Sep 17 00:00:00 2001 From: "Ben L. Titzer" Date: Wed, 20 Jun 2018 15:47:30 +0200 Subject: [PATCH] [asm] Rework Assembler::IsolateData into Assembler::Options This CL attempts to simplify the Assembler's dependency on the isolate, in particular on a global "serializer_enabled" mode contained therein. The "serializer_enabled" condition enabled and disabled a number of things in both the assemblers and macro assemblers. To make these dependencies explicit, the Assembler::IsolateData is refactored to be a proper Assembler::Options struct that controls specific assembler behaviors, with default settings easily computable from the isolate. This also helps make the contract for compiling WASM code more explicit (since WASM code needs to have reloc info recorded for external references) we can explicitly enable this recording without trying to "trick" the assembler using "serializer_enabled". R=jgruber@chromium.org CC=mstarzinger@chromium.org, herhut@chromium.org Change-Id: I7a8ba49df7b75b292d73ec2aa6e507c27a3d99c8 Reviewed-on: https://chromium-review.googlesource.com/1105982 Commit-Queue: Ben Titzer Reviewed-by: Jakob Gruber Cr-Commit-Position: refs/heads/master@{#53890} --- src/arm/assembler-arm.cc | 26 ++++----- src/arm/assembler-arm.h | 6 +- src/arm/macro-assembler-arm.cc | 17 +++--- src/arm/macro-assembler-arm.h | 16 +++--- src/arm64/assembler-arm64-inl.h | 2 +- src/arm64/assembler-arm64.cc | 29 +++++----- src/arm64/assembler-arm64.h | 8 +-- src/arm64/instructions-arm64.cc | 14 ++--- src/arm64/instructions-arm64.h | 8 +-- src/arm64/macro-assembler-arm64.cc | 20 +++---- src/arm64/macro-assembler-arm64.h | 18 +++--- src/assembler.cc | 32 +++++++---- src/assembler.h | 55 +++++++++---------- src/code-stubs.cc | 7 ++- src/compiler/code-generator.cc | 16 ++++-- src/ia32/assembler-ia32.cc | 6 +- src/ia32/assembler-ia32.h | 4 +- src/ia32/macro-assembler-ia32.cc | 5 +- src/ia32/macro-assembler-ia32.h | 16 +++--- src/mips/assembler-mips.cc | 7 +-- src/mips/assembler-mips.h | 4 +- src/mips/macro-assembler-mips.cc | 17 +++--- src/mips/macro-assembler-mips.h | 16 +++--- src/mips64/assembler-mips64.cc | 6 +- src/mips64/assembler-mips64.h | 4 +- src/mips64/macro-assembler-mips64.cc | 17 +++--- src/mips64/macro-assembler-mips64.h | 16 +++--- src/ppc/assembler-ppc.cc | 22 ++++---- src/ppc/assembler-ppc.h | 12 ++-- src/ppc/macro-assembler-ppc.cc | 17 +++--- src/ppc/macro-assembler-ppc.h | 15 +++-- src/s390/assembler-s390.cc | 8 +-- src/s390/assembler-s390.h | 4 +- src/s390/macro-assembler-s390.cc | 17 +++--- src/s390/macro-assembler-s390.h | 15 +++-- src/turbo-assembler.cc | 10 +--- src/turbo-assembler.h | 5 +- .../baseline/arm64/liftoff-assembler-arm64.h | 2 +- src/wasm/baseline/liftoff-assembler.cc | 3 +- .../baseline/mips/liftoff-assembler-mips.h | 5 +- .../mips64/liftoff-assembler-mips64.h | 5 +- src/wasm/jump-table-assembler.h | 21 +++---- src/x64/assembler-x64-inl.h | 4 +- src/x64/assembler-x64.cc | 7 +-- src/x64/assembler-x64.h | 4 +- src/x64/macro-assembler-x64.cc | 39 ++++++------- src/x64/macro-assembler-x64.h | 17 +++--- 47 files changed, 322 insertions(+), 302 deletions(-) diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc index 0950e85150..b386fb2fc6 100644 --- a/src/arm/assembler-arm.cc +++ b/src/arm/assembler-arm.cc @@ -538,8 +538,8 @@ const Instr kLdrRegFpNegOffsetPattern = const Instr kStrRegFpNegOffsetPattern = al | B26 | NegOffset | fp.code() * B16; const Instr kLdrStrInstrTypeMask = 0xFFFF0000; -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size), +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size), pending_32_bit_constants_(), pending_64_bit_constants_(), scratch_register_list_(ip.bit()) { @@ -882,7 +882,7 @@ void Assembler::target_at_put(int pos, int target_pos) { if (is_uint8(target24)) { // If the target fits in a byte then only patch with a mov // instruction. - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), 1); patcher.mov(dst, Operand(target24)); } else { @@ -891,12 +891,12 @@ void Assembler::target_at_put(int pos, int target_pos) { if (CpuFeatures::IsSupported(ARMv7)) { // Patch with movw/movt. if (target16_1 == 0) { - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), 1); CpuFeatureScope scope(&patcher, ARMv7); patcher.movw(dst, target16_0); } else { - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), 2); CpuFeatureScope scope(&patcher, ARMv7); patcher.movw(dst, target16_0); @@ -908,12 +908,12 @@ void Assembler::target_at_put(int pos, int target_pos) { uint8_t target8_1 = target16_0 >> 8; uint8_t target8_2 = target16_1 & kImm8Mask; if (target8_2 == 0) { - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), 2); patcher.mov(dst, Operand(target8_0)); patcher.orr(dst, dst, Operand(target8_1 << 8)); } else { - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), 3); patcher.mov(dst, Operand(target8_0)); patcher.orr(dst, dst, Operand(target8_1 << 8)); @@ -1098,8 +1098,8 @@ bool FitsShifter(uint32_t imm32, uint32_t* rotate_imm, uint32_t* immed_8, // encoded. bool MustOutputRelocInfo(RelocInfo::Mode rmode, const Assembler* assembler) { if (rmode == RelocInfo::EXTERNAL_REFERENCE) { - if (assembler != nullptr && assembler->predictable_code_size()) return true; - return assembler->serializer_enabled(); + if (assembler->predictable_code_size()) return true; + return assembler->options().record_reloc_info_for_exrefs; } else if (RelocInfo::IsNone(rmode)) { return false; } @@ -5137,8 +5137,8 @@ void Assembler::dq(uint64_t value) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { if (RelocInfo::IsNone(rmode) || // Don't record external references unless the heap will be serialized. - (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && - !emit_debug_code())) { + (rmode == RelocInfo::EXTERNAL_REFERENCE && + !options().record_reloc_info_for_exrefs && !emit_debug_code())) { return; } DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here @@ -5447,9 +5447,9 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) { next_buffer_check_ = pc_offset() + kCheckPoolInterval; } -PatchingAssembler::PatchingAssembler(IsolateData isolate_data, byte* address, +PatchingAssembler::PatchingAssembler(const Options& options, byte* address, int instructions) - : Assembler(isolate_data, address, instructions * kInstrSize + kGap) { + : Assembler(options, address, instructions * kInstrSize + kGap) { DCHECK_EQ(reloc_info_writer.pos(), buffer_ + buffer_size_); } diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h index a7299dba8d..459c10a184 100644 --- a/src/arm/assembler-arm.h +++ b/src/arm/assembler-arm.h @@ -629,8 +629,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler(); // GetCode emits any pending (non-emitted) code and fills the descriptor @@ -1722,7 +1722,7 @@ class EnsureSpace BASE_EMBEDDED { class PatchingAssembler : public Assembler { public: - PatchingAssembler(IsolateData isolate_data, byte* address, int instructions); + PatchingAssembler(const Options& options, byte* address, int instructions); ~PatchingAssembler(); void Emit(Address addr); diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 5e5290533e..3a9757699e 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -30,9 +30,10 @@ namespace v8 { namespace internal { -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -202,14 +203,14 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, Condition cond) { DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { UseScratchRegisterScope temps(this); Register scratch = temps.Acquire(); IndirectLoadConstant(scratch, code); add(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag)); Jump(scratch, cond); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -310,14 +311,14 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode, bool check_constant_pool) { DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { // Use ip directly instead of using UseScratchRegisterScope, as we do not // preserve scratch registers across calls. IndirectLoadConstant(ip, code); add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); Call(ip, cond); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -377,7 +378,7 @@ void TurboAssembler::Move(Register dst, Smi* smi) { mov(dst, Operand(smi)); } void TurboAssembler::Move(Register dst, Handle value) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(dst, value); return; } @@ -387,7 +388,7 @@ void TurboAssembler::Move(Register dst, Handle value) { void TurboAssembler::Move(Register dst, ExternalReference reference) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(dst, reference); return; } diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index 7e70f92a13..dcc6c930e5 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -92,12 +92,10 @@ enum TargetAddressStorageMode { class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} // Activation support. void EnterFrame(StackFrame::Type type, @@ -602,7 +600,11 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); void Mls(Register dst, Register src1, Register src2, Register srcA, Condition cond = al); diff --git a/src/arm64/assembler-arm64-inl.h b/src/arm64/assembler-arm64-inl.h index 70925af542..3d49ce0c91 100644 --- a/src/arm64/assembler-arm64-inl.h +++ b/src/arm64/assembler-arm64-inl.h @@ -563,7 +563,7 @@ Address Assembler::runtime_entry_at(Address pc) { return Assembler::target_address_at(pc, 0 /* unused */); } else { DCHECK(instr->IsBranchAndLink() || instr->IsUnconditionalBranch()); - return instr->ImmPCOffset() + isolate_data().code_range_start; + return instr->ImmPCOffset() + options().code_range_start; } } diff --git a/src/arm64/assembler-arm64.cc b/src/arm64/assembler-arm64.cc index 822dc4b90e..7842f7422f 100644 --- a/src/arm64/assembler-arm64.cc +++ b/src/arm64/assembler-arm64.cc @@ -312,7 +312,7 @@ bool Operand::NeedsRelocation(const Assembler* assembler) const { RelocInfo::Mode rmode = immediate_.rmode(); if (rmode == RelocInfo::EXTERNAL_REFERENCE) { - return assembler->serializer_enabled(); + return assembler->options().record_reloc_info_for_exrefs; } return !RelocInfo::IsNone(rmode); @@ -542,7 +542,7 @@ void ConstPool::EmitEntries() { // Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0. DCHECK(instr->IsLdrLiteral() && instr->ImmLLiteral() == 0); - instr->SetImmPCOffsetTarget(assm_->isolate_data(), assm_->pc()); + instr->SetImmPCOffsetTarget(assm_->options(), assm_->pc()); } assm_->dc64(entry.first); @@ -552,8 +552,8 @@ void ConstPool::EmitEntries() { // Assembler -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size), +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size), constpool_(this), unresolved_branches_() { const_pool_blocked_nesting_ = 0; @@ -699,22 +699,22 @@ void Assembler::RemoveBranchFromLabelLinkChain(Instruction* branch, } else if (branch == next_link) { // The branch is the last (but not also the first) instruction in the chain. - prev_link->SetImmPCOffsetTarget(isolate_data(), prev_link); + prev_link->SetImmPCOffsetTarget(options(), prev_link); } else { // The branch is in the middle of the chain. if (prev_link->IsTargetInImmPCOffsetRange(next_link)) { - prev_link->SetImmPCOffsetTarget(isolate_data(), next_link); + prev_link->SetImmPCOffsetTarget(options(), next_link); } else if (label_veneer != nullptr) { // Use the veneer for all previous links in the chain. - prev_link->SetImmPCOffsetTarget(isolate_data(), prev_link); + prev_link->SetImmPCOffsetTarget(options(), prev_link); end_of_chain = false; link = next_link; while (!end_of_chain) { next_link = link->ImmPCOffsetTarget(); end_of_chain = (link == next_link); - link->SetImmPCOffsetTarget(isolate_data(), label_veneer); + link->SetImmPCOffsetTarget(options(), label_veneer); link = next_link; } } else { @@ -785,11 +785,10 @@ void Assembler::bind(Label* label) { // Internal references do not get patched to an instruction but directly // to an address. internal_reference_positions_.push_back(linkoffset); - PatchingAssembler patcher(isolate_data(), reinterpret_cast(link), - 2); + PatchingAssembler patcher(options(), reinterpret_cast(link), 2); patcher.dc64(reinterpret_cast(pc_)); } else { - link->SetImmPCOffsetTarget(isolate_data(), + link->SetImmPCOffsetTarget(options(), reinterpret_cast(pc_)); } @@ -4084,9 +4083,7 @@ void Assembler::EmitStringData(const char* string) { void Assembler::debug(const char* message, uint32_t code, Instr params) { #ifdef USE_SIMULATOR - // Don't generate simulator specific code if we are building a snapshot, which - // might be run on real hardware. - if (!serializer_enabled()) { + if (options().enable_simulator_code) { // The arguments to the debug marker need to be contiguous in memory, so // make sure we don't try to emit pools. BlockPoolsScope scope(this); @@ -4788,7 +4785,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data, if (!RelocInfo::IsNone(rmode) && write_reloc_info) { // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE && - !serializer_enabled() && !emit_debug_code()) { + !options().record_reloc_info_for_exrefs && !emit_debug_code()) { return; } DCHECK_GE(buffer_space(), kMaxRelocSize); // too late to grow buffer here @@ -4947,7 +4944,7 @@ void Assembler::EmitVeneers(bool force_emit, bool need_protection, int margin) { // to the label. Instruction* veneer = reinterpret_cast(pc_); RemoveBranchFromLabelLinkChain(branch, label, veneer); - branch->SetImmPCOffsetTarget(isolate_data(), veneer); + branch->SetImmPCOffsetTarget(options(), veneer); b(label); #ifdef DEBUG DCHECK(SizeOfCodeGeneratedSince(&veneer_size_check) <= diff --git a/src/arm64/assembler-arm64.h b/src/arm64/assembler-arm64.h index f0bbdc1bfc..7f11864ef2 100644 --- a/src/arm64/assembler-arm64.h +++ b/src/arm64/assembler-arm64.h @@ -899,8 +899,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler(); @@ -3626,8 +3626,8 @@ class PatchingAssembler : public Assembler { // relocation information takes space in the buffer, the PatchingAssembler // will crash trying to grow the buffer. // Note that the instruction cache will not be flushed. - PatchingAssembler(IsolateData isolate_data, byte* start, unsigned count) - : Assembler(isolate_data, start, count * kInstructionSize + kGap) { + PatchingAssembler(const Options& options, byte* start, unsigned count) + : Assembler(options, start, count * kInstructionSize + kGap) { // Block constant pool emission. StartBlockPools(); } diff --git a/src/arm64/instructions-arm64.cc b/src/arm64/instructions-arm64.cc index 272948a819..3afaf8afc7 100644 --- a/src/arm64/instructions-arm64.cc +++ b/src/arm64/instructions-arm64.cc @@ -227,21 +227,21 @@ bool Instruction::IsTargetInImmPCOffsetRange(Instruction* target) { return IsValidImmPCOffset(BranchType(), DistanceTo(target)); } -void Instruction::SetImmPCOffsetTarget(Assembler::IsolateData isolate_data, +void Instruction::SetImmPCOffsetTarget(const Assembler::Options& options, Instruction* target) { if (IsPCRelAddressing()) { - SetPCRelImmTarget(isolate_data, target); + SetPCRelImmTarget(options, target); } else if (BranchType() != UnknownBranchType) { SetBranchImmTarget(target); } else if (IsUnresolvedInternalReference()) { - SetUnresolvedInternalReferenceImmTarget(isolate_data, target); + SetUnresolvedInternalReferenceImmTarget(options, target); } else { // Load literal (offset from PC). SetImmLLiteral(target); } } -void Instruction::SetPCRelImmTarget(Assembler::IsolateData isolate_data, +void Instruction::SetPCRelImmTarget(const Assembler::Options& options, Instruction* target) { // ADRP is not supported, so 'this' must point to an ADR instruction. DCHECK(IsAdr()); @@ -252,7 +252,7 @@ void Instruction::SetPCRelImmTarget(Assembler::IsolateData isolate_data, imm = Assembler::ImmPCRelAddress(static_cast(target_offset)); SetInstructionBits(Mask(~ImmPCRel_mask) | imm); } else { - PatchingAssembler patcher(isolate_data, reinterpret_cast(this), + PatchingAssembler patcher(options, reinterpret_cast(this), PatchingAssembler::kAdrFarPatchableNInstrs); patcher.PatchAdrFar(target_offset); } @@ -293,7 +293,7 @@ void Instruction::SetBranchImmTarget(Instruction* target) { } void Instruction::SetUnresolvedInternalReferenceImmTarget( - Assembler::IsolateData isolate_data, Instruction* target) { + const Assembler::Options& options, Instruction* target) { DCHECK(IsUnresolvedInternalReference()); DCHECK(IsAligned(DistanceTo(target), kInstructionSize)); DCHECK(is_int32(DistanceTo(target) >> kInstructionSizeLog2)); @@ -302,7 +302,7 @@ void Instruction::SetUnresolvedInternalReferenceImmTarget( uint32_t high16 = unsigned_bitextract_32(31, 16, target_offset); uint32_t low16 = unsigned_bitextract_32(15, 0, target_offset); - PatchingAssembler patcher(isolate_data, reinterpret_cast(this), 2); + PatchingAssembler patcher(options, reinterpret_cast(this), 2); patcher.brk(high16); patcher.brk(low16); } diff --git a/src/arm64/instructions-arm64.h b/src/arm64/instructions-arm64.h index b1c488eb65..3f86b47fd1 100644 --- a/src/arm64/instructions-arm64.h +++ b/src/arm64/instructions-arm64.h @@ -402,10 +402,10 @@ class Instruction { bool IsTargetInImmPCOffsetRange(Instruction* target); // Patch a PC-relative offset to refer to 'target'. 'this' may be a branch or // a PC-relative addressing instruction. - void SetImmPCOffsetTarget(AssemblerBase::IsolateData isolate_data, + void SetImmPCOffsetTarget(const AssemblerBase::Options& options, Instruction* target); - void SetUnresolvedInternalReferenceImmTarget(AssemblerBase::IsolateData, - Instruction* target); + void SetUnresolvedInternalReferenceImmTarget( + const AssemblerBase::Options& options, Instruction* target); // Patch a literal load instruction to load from 'source'. void SetImmLLiteral(Instruction* source); @@ -441,7 +441,7 @@ class Instruction { static const int ImmPCRelRangeBitwidth = 21; static bool IsValidPCRelOffset(ptrdiff_t offset) { return is_int21(offset); } - void SetPCRelImmTarget(AssemblerBase::IsolateData isolate_data, + void SetPCRelImmTarget(const AssemblerBase::Options& options, Instruction* target); void SetBranchImmTarget(Instruction* target); }; diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index d663b6330e..3c93336687 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -27,10 +27,10 @@ namespace v8 { namespace internal { -MacroAssembler::MacroAssembler(Isolate* isolate, byte* buffer, - unsigned buffer_size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, buffer_size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -349,7 +349,7 @@ void TurboAssembler::Mov(const Register& rd, const Operand& operand, void TurboAssembler::Mov(const Register& rd, ExternalReference reference) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(rd, reference); return; } @@ -1572,7 +1572,7 @@ void TurboAssembler::Move(Register dst, Register src) { Mov(dst, src); } void TurboAssembler::Move(Register dst, Handle value) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(dst, value); return; } @@ -1957,14 +1957,14 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, Condition cond) { DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { UseScratchRegisterScope temps(this); Register scratch = temps.AcquireX(); IndirectLoadConstant(scratch, code); Add(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag)); Jump(scratch, cond); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -2033,14 +2033,14 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode) { #endif #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { UseScratchRegisterScope temps(this); Register scratch = temps.AcquireX(); IndirectLoadConstant(scratch, code); Add(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag)); Call(scratch); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -2109,7 +2109,7 @@ void TurboAssembler::CallForDeoptimization(Address target, int deopt_id, DCHECK(is_uint16(deopt_id)); movz(temp, deopt_id); int64_t offset = static_cast(target) - - static_cast(isolate_data().code_range_start); + static_cast(options().code_range_start); DCHECK_EQ(offset % kInstructionSize, 0); offset = offset / static_cast(kInstructionSize); DCHECK(IsNearCallOffset(offset)); diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index c8a1e1d6e9..990973154e 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -179,12 +179,10 @@ enum PreShiftImmMode { class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} // The Abort method should call a V8 runtime function, but the CallRuntime // mechanism depends on CEntry. If use_real_aborts is false, Abort will @@ -1308,8 +1306,12 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: - MacroAssembler(Isolate* isolate, byte* buffer, unsigned buffer_size, - CodeObjectRequired create_code_object); + MacroAssembler(Isolate* isolate, void* buffer, int size, + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); // Instruction set functions ------------------------------------------------ // Logical macros. diff --git a/src/assembler.cc b/src/assembler.cc index 12078fba0c..609951fd9d 100644 --- a/src/assembler.cc +++ b/src/assembler.cc @@ -53,20 +53,30 @@ const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING"; // ----------------------------------------------------------------------------- // Implementation of AssemblerBase -#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64 -AssemblerBase::IsolateData::IsolateData(Isolate* isolate) - : IsolateData(isolate->serializer_enabled() ? kSerializerEnabled - : kSerializerDisabled, - isolate->heap()->memory_allocator()->code_range()->start()) {} -#else -AssemblerBase::IsolateData::IsolateData(Isolate* isolate) - : IsolateData(isolate->serializer_enabled() ? kSerializerEnabled - : kSerializerDisabled) {} +AssemblerBase::Options AssemblerBase::DefaultOptions( + Isolate* isolate, bool explicitly_support_serialization) { + Options options; + bool serializer = + isolate->serializer_enabled() || explicitly_support_serialization; + options.record_reloc_info_for_exrefs = serializer; + options.enable_root_array_delta_access = !serializer; +#ifdef USE_SIMULATOR + // Don't generate simulator specific code if we are building a snapshot, which + // might be run on real hardware. + options.enable_simulator_code = !serializer; #endif + options.isolate_independent_code = isolate->ShouldLoadConstantsFromRootList(); + options.inline_offheap_trampolines = !serializer; +#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64 + options.code_range_start = + isolate->heap()->memory_allocator()->code_range()->start(); +#endif + return options; +} -AssemblerBase::AssemblerBase(IsolateData isolate_data, void* buffer, +AssemblerBase::AssemblerBase(const Options& options, void* buffer, int buffer_size) - : isolate_data_(isolate_data), + : options_(options), enabled_cpu_features_(0), emit_debug_code_(FLAG_debug_code), predictable_code_size_(false), diff --git a/src/assembler.h b/src/assembler.h index 3e86d85495..07fa64514d 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -139,38 +139,35 @@ enum class CodeObjectRequired { kNo, kYes }; class AssemblerBase : public Malloced { public: - enum SerializerEnabled : bool { - kSerializerEnabled = true, - kSerializerDisabled = false - }; - struct IsolateData { - explicit IsolateData(Isolate* isolate); - -#if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_X64 - constexpr IsolateData(SerializerEnabled serializer_enabled, - Address code_range_start) - : serializer_enabled(serializer_enabled), - code_range_start(code_range_start) {} -#else - explicit constexpr IsolateData(SerializerEnabled serializer_enabled) - : serializer_enabled(serializer_enabled) {} -#endif - - SerializerEnabled serializer_enabled; -#if V8_TARGET_ARCH_ARM64 || V8_TARGET_ARCH_X64 - Address code_range_start; -#endif + struct Options { + // Recording reloc info for external references is needed whenever + // code is serialized, e.g. into the snapshot or as a WASM module. + // It can be disabled for code that will not survive process destruction. + bool record_reloc_info_for_exrefs = true; + // Enables access to exrefs by computing a delta from the root array. + // Only valid if code will not survive the process. + bool enable_root_array_delta_access = false; + // Enables specific assembler sequences only used for the simulator. + bool enable_simulator_code = false; + // Enables use of isolate-independent constants, indirected through the + // root array. + // (macro assembler feature). + bool isolate_independent_code = false; + // Enables the use of isolate-independent builtins through an off-heap + // trampoline. (macro assembler feature). + bool inline_offheap_trampolines = false; + // On some platforms, all code is within a given range in the process, + // and the start of this range is configured here. + Address code_range_start = 0; }; - AssemblerBase(IsolateData isolate_data, void* buffer, int buffer_size); + static Options DefaultOptions(Isolate* isolate, + bool explicitly_support_serialization = false); + + AssemblerBase(const Options& options, void* buffer, int buffer_size); virtual ~AssemblerBase(); - IsolateData isolate_data() const { return isolate_data_; } - - bool serializer_enabled() const { return isolate_data_.serializer_enabled; } - void enable_serializer() { - isolate_data_.serializer_enabled = kSerializerEnabled; - } + const Options& options() const { return options_; } bool emit_debug_code() const { return emit_debug_code_; } void set_emit_debug_code(bool value) { emit_debug_code_ = value; } @@ -258,7 +255,7 @@ class AssemblerBase : public Malloced { void RequestHeapObject(HeapObjectRequest request); private: - IsolateData isolate_data_; + const Options options_; uint64_t enabled_cpu_features_; bool emit_debug_code_; bool predictable_code_size_; diff --git a/src/code-stubs.cc b/src/code-stubs.cc index 528a480e28..8f82f43da3 100644 --- a/src/code-stubs.cc +++ b/src/code-stubs.cc @@ -106,15 +106,16 @@ Handle PlatformCodeStub::GenerateCode() { Factory* factory = isolate()->factory(); // Generate the new code. - MacroAssembler masm(isolate(), nullptr, 256, CodeObjectRequired::kYes); + // TODO(yangguo): remove this once we can serialize IC stubs. + Assembler::Options options = Assembler::DefaultOptions(isolate(), true); + MacroAssembler masm(isolate(), options, nullptr, 256, + CodeObjectRequired::kYes); { // Update the static counter each time a new code stub is generated. isolate()->counters()->code_stubs()->Increment(); // Generate the code for the stub. - // TODO(yangguo): remove this once we can serialize IC stubs. - masm.enable_serializer(); NoCurrentFrameScope scope(&masm); Generate(&masm); } diff --git a/src/compiler/code-generator.cc b/src/compiler/code-generator.cc index fa1025599c..cc787107f7 100644 --- a/src/compiler/code-generator.cc +++ b/src/compiler/code-generator.cc @@ -38,6 +38,15 @@ class CodeGenerator::JumpTable final : public ZoneObject { size_t const target_count_; }; +Assembler::Options AssemblerOptions(Isolate* isolate, Code::Kind kind) { + Assembler::Options options = Assembler::DefaultOptions(isolate); + if (kind == Code::JS_TO_WASM_FUNCTION || kind == Code::WASM_FUNCTION) { + options.record_reloc_info_for_exrefs = true; + options.enable_root_array_delta_access = false; + } + return options; +} + CodeGenerator::CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage, InstructionSequence* code, OptimizedCompilationInfo* info, Isolate* isolate, @@ -57,7 +66,8 @@ CodeGenerator::CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage, current_block_(RpoNumber::Invalid()), start_source_position_(start_source_position), current_source_position_(SourcePosition::Unknown()), - tasm_(isolate, nullptr, 0, CodeObjectRequired::kNo), + tasm_(isolate, AssemblerOptions(isolate, info->code_kind()), nullptr, 0, + CodeObjectRequired::kNo), resolver_(this), safepoints_(zone()), handlers_(zone()), @@ -88,10 +98,6 @@ CodeGenerator::CodeGenerator(Zone* codegen_zone, Frame* frame, Linkage* linkage, CHECK_EQ(info->is_osr(), osr_helper_.has_value()); tasm_.set_jump_optimization_info(jump_opt); Code::Kind code_kind = info_->code_kind(); - if (code_kind == Code::JS_TO_WASM_FUNCTION || - code_kind == Code::WASM_FUNCTION) { - tasm_.enable_serializer(); - } if (code_kind == Code::WASM_FUNCTION || code_kind == Code::WASM_TO_JS_FUNCTION || code_kind == Code::WASM_INTERPRETER_ENTRY) { diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc index a06b46f6a2..6881dd720b 100644 --- a/src/ia32/assembler-ia32.cc +++ b/src/ia32/assembler-ia32.cc @@ -323,8 +323,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { #define EMIT(x) \ *pc_++ = (x) -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size) { +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size) { // Clear the buffer in debug mode unless it was provided by the // caller in which case we can't be sure it's okay to overwrite // existing code in it. @@ -3330,7 +3330,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { DCHECK(!RelocInfo::IsNone(rmode)); // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE && - !serializer_enabled() && !emit_debug_code()) { + !options().record_reloc_info_for_exrefs && !emit_debug_code()) { return; } RelocInfo rinfo(reinterpret_cast
(pc_), rmode, data, nullptr); diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h index 40cadbcda6..f282825125 100644 --- a/src/ia32/assembler-ia32.h +++ b/src/ia32/assembler-ia32.h @@ -501,8 +501,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler() {} // GetCode emits any pending (non-emitted) code and fills the descriptor diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index 7ca1194c8d..ae27e5c27b 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -27,9 +27,10 @@ namespace internal { // ------------------------------------------------------------------------- // MacroAssembler implementation. -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index 9d948d3f4f..ae5d4007f5 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -56,12 +56,10 @@ bool AreAliased(Register reg1, Register reg2, Register reg3 = no_reg, class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} void CheckPageFlag(Register object, Register scratch, int mask, Condition cc, Label* condition_met, @@ -385,7 +383,11 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); // Load a register with a long value as efficiently as possible. void Set(Register dst, int32_t x) { diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc index efffe80dc3..ff9a07a350 100644 --- a/src/mips/assembler-mips.cc +++ b/src/mips/assembler-mips.cc @@ -312,8 +312,8 @@ const Instr kLwSwInstrTypeMask = 0xFFE00000; const Instr kLwSwInstrArgumentMask = ~kLwSwInstrTypeMask; const Instr kLwSwOffsetMask = kImm16Mask; -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size), +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size), scratch_register_list_(at.bit()) { reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); @@ -3740,9 +3740,8 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { // We do not try to reuse pool constants. RelocInfo rinfo(reinterpret_cast
(pc_), rmode, data, nullptr); if (!RelocInfo::IsNone(rinfo.rmode())) { - // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE && - !serializer_enabled() && !emit_debug_code()) { + !options().record_reloc_info_for_exrefs && !emit_debug_code()) { return; } DCHECK_GE(buffer_space(), kMaxRelocSize); // Too late to grow buffer here. diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h index 6f73e497c0..278404986c 100644 --- a/src/mips/assembler-mips.h +++ b/src/mips/assembler-mips.h @@ -492,8 +492,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler() { } // GetCode emits any pending (non-emitted) code and fills the descriptor diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc index b76d1f6940..055620449d 100644 --- a/src/mips/macro-assembler-mips.cc +++ b/src/mips/macro-assembler-mips.cc @@ -25,9 +25,10 @@ namespace v8 { namespace internal { -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -1322,7 +1323,7 @@ void TurboAssembler::Sc(Register rd, const MemOperand& rs) { void TurboAssembler::li(Register dst, Handle value, LiFlags mode) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(dst, value); return; } @@ -1332,7 +1333,7 @@ void TurboAssembler::li(Register dst, Handle value, LiFlags mode) { void TurboAssembler::li(Register dst, ExternalReference value, LiFlags mode) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(dst, value); return; } @@ -3767,11 +3768,11 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, BranchDelaySlot bd) { DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(t9, code); Jump(t9, Code::kHeaderSize - kHeapObjectTag, cond, rs, rt, bd); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -3940,11 +3941,11 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode, BranchDelaySlot bd) { BlockTrampolinePoolScope block_trampoline_pool(this); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(t9, code); Call(t9, Code::kHeaderSize - kHeapObjectTag, cond, rs, rt, bd); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h index a94fd91edd..1b7ea06248 100644 --- a/src/mips/macro-assembler-mips.h +++ b/src/mips/macro-assembler-mips.h @@ -133,12 +133,10 @@ inline MemOperand CFunctionArgumentOperand(int index) { class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} // Activation support. void EnterFrame(StackFrame::Type type); @@ -926,7 +924,11 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); // Swap two registers. If the scratch register is omitted then a slightly // less efficient form using xor instead of mov is emitted. diff --git a/src/mips64/assembler-mips64.cc b/src/mips64/assembler-mips64.cc index 7a61989059..29b02ef106 100644 --- a/src/mips64/assembler-mips64.cc +++ b/src/mips64/assembler-mips64.cc @@ -290,8 +290,8 @@ const Instr kLwSwInstrTypeMask = 0xFFE00000; const Instr kLwSwInstrArgumentMask = ~kLwSwInstrTypeMask; const Instr kLwSwOffsetMask = kImm16Mask; -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size), +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size), scratch_register_list_(at.bit()) { reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); @@ -4093,7 +4093,7 @@ void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { if (!RelocInfo::IsNone(rinfo.rmode())) { // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE && - !serializer_enabled() && !emit_debug_code()) { + !options().record_reloc_info_for_exrefs && !emit_debug_code()) { return; } DCHECK_GE(buffer_space(), kMaxRelocSize); // Too late to grow buffer here. diff --git a/src/mips64/assembler-mips64.h b/src/mips64/assembler-mips64.h index cf8526cf09..50b10fa7b3 100644 --- a/src/mips64/assembler-mips64.h +++ b/src/mips64/assembler-mips64.h @@ -499,8 +499,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler() { } // GetCode emits any pending (non-emitted) code and fills the descriptor diff --git a/src/mips64/macro-assembler-mips64.cc b/src/mips64/macro-assembler-mips64.cc index 35a96a673d..dce55c6d9c 100644 --- a/src/mips64/macro-assembler-mips64.cc +++ b/src/mips64/macro-assembler-mips64.cc @@ -25,9 +25,10 @@ namespace v8 { namespace internal { -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -1561,7 +1562,7 @@ void TurboAssembler::Scd(Register rd, const MemOperand& rs) { void TurboAssembler::li(Register dst, Handle value, LiFlags mode) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(dst, value); return; } @@ -1571,7 +1572,7 @@ void TurboAssembler::li(Register dst, Handle value, LiFlags mode) { void TurboAssembler::li(Register dst, ExternalReference value, LiFlags mode) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(dst, value); return; } @@ -4197,12 +4198,12 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, BranchDelaySlot bd) { DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(t9, code); Daddu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); Jump(t9, cond, rs, rt, bd); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -4303,12 +4304,12 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode, BranchDelaySlot bd) { BlockTrampolinePoolScope block_trampoline_pool(this); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(t9, code); Daddu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); Call(t9, cond, rs, rt, bd); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { diff --git a/src/mips64/macro-assembler-mips64.h b/src/mips64/macro-assembler-mips64.h index 81ac441367..96e2451fbc 100644 --- a/src/mips64/macro-assembler-mips64.h +++ b/src/mips64/macro-assembler-mips64.h @@ -150,12 +150,10 @@ inline MemOperand CFunctionArgumentOperand(int index) { class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} // Activation support. void EnterFrame(StackFrame::Type type); @@ -939,7 +937,11 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); bool IsNear(Label* L, Condition cond, int rs_reg); diff --git a/src/ppc/assembler-ppc.cc b/src/ppc/assembler-ppc.cc index 42e05bcbe0..7dfda2d43c 100644 --- a/src/ppc/assembler-ppc.cc +++ b/src/ppc/assembler-ppc.cc @@ -241,8 +241,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { // ----------------------------------------------------------------------------- // Specific instructions, constants, and masks. -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size), +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size), constant_pool_builder_(kLoadPtrMaxReachBits, kLoadDoubleMaxReachBits) { reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); @@ -498,7 +498,7 @@ void Assembler::target_at_put(int pos, int target_pos, bool* is_branch) { // pointer in a register. Register dst = Register::from_code(instr_at(pos + kInstrSize)); int32_t offset = target_pos + (Code::kHeaderSize - kHeapObjectTag); - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), 2); patcher.bitwise_mov32(dst, offset); break; @@ -514,7 +514,7 @@ void Assembler::target_at_put(int pos, int target_pos, bool* is_branch) { : (SIGN_EXT_IMM22(operands & kImm22Mask)); int32_t offset = target_pos + delta; PatchingAssembler patcher( - isolate_data(), reinterpret_cast(buffer_ + pos), + options(), reinterpret_cast(buffer_ + pos), 2 + static_cast(opcode == kUnboundAddLabelLongOffsetOpcode)); patcher.bitwise_add32(dst, base, offset); if (opcode == kUnboundAddLabelLongOffsetOpcode) patcher.nop(); @@ -523,7 +523,7 @@ void Assembler::target_at_put(int pos, int target_pos, bool* is_branch) { case kUnboundMovLabelAddrOpcode: { // Load the address of the label in a register. Register dst = Register::from_code(instr_at(pos + kInstrSize)); - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), kMovInstructionsNoConstantPool); // Keep internal references relative until EmitRelocations. @@ -531,7 +531,7 @@ void Assembler::target_at_put(int pos, int target_pos, bool* is_branch) { break; } case kUnboundJumpTableEntryOpcode: { - PatchingAssembler patcher(isolate_data(), + PatchingAssembler patcher(options(), reinterpret_cast(buffer_ + pos), kPointerSize / kInstrSize); // Keep internal references relative until EmitRelocations. @@ -1299,7 +1299,7 @@ void Assembler::EnsureSpaceFor(int space_needed) { bool Operand::must_output_reloc_info(const Assembler* assembler) const { if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { if (assembler != nullptr && assembler->predictable_code_size()) return true; - return assembler->serializer_enabled(); + return assembler->options().record_reloc_info_for_exrefs; } else if (RelocInfo::IsNone(rmode_)) { return false; } @@ -2071,8 +2071,8 @@ void Assembler::dp(uintptr_t data) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { if (RelocInfo::IsNone(rmode) || // Don't record external references unless the heap will be serialized. - (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && - !emit_debug_code())) { + (rmode == RelocInfo::EXTERNAL_REFERENCE && + !options().record_reloc_info_for_exrefs && !emit_debug_code())) { return; } DeferredRelocInfo rinfo(pc_offset(), rmode, data); @@ -2142,9 +2142,9 @@ void Assembler::CheckTrampolinePool() { } } -PatchingAssembler::PatchingAssembler(IsolateData isolate_data, byte* address, +PatchingAssembler::PatchingAssembler(const Options& options, byte* address, int instructions) - : Assembler(isolate_data, address, instructions * kInstrSize + kGap) { + : Assembler(options, address, instructions * kInstrSize + kGap) { DCHECK_EQ(reloc_info_writer.pos(), buffer_ + buffer_size_); } diff --git a/src/ppc/assembler-ppc.h b/src/ppc/assembler-ppc.h index aa68532e02..1b70888ac6 100644 --- a/src/ppc/assembler-ppc.h +++ b/src/ppc/assembler-ppc.h @@ -505,8 +505,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler() {} // GetCode emits any pending (non-emitted) code and fills the descriptor @@ -1455,10 +1455,10 @@ class Assembler : public AssemblerBase { void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0); ConstantPoolEntry::Access ConstantPoolAddEntry(RelocInfo::Mode rmode, intptr_t value) { + // TODO(ppc): does this condition depend on serialization somehow? bool sharing_ok = - RelocInfo::IsNone(rmode) || - (!serializer_enabled() && RelocInfo::IsShareableRelocMode(rmode) && - !is_constant_pool_entry_sharing_blocked()); + RelocInfo::IsNone(rmode) || (RelocInfo::IsShareableRelocMode(rmode) && + !is_constant_pool_entry_sharing_blocked()); return constant_pool_builder_.AddEntry(pc_offset(), value, sharing_ok); } ConstantPoolEntry::Access ConstantPoolAddEntry(Double value) { @@ -1647,7 +1647,7 @@ class EnsureSpace BASE_EMBEDDED { class PatchingAssembler : public Assembler { public: - PatchingAssembler(IsolateData isolate_data, byte* address, int instructions); + PatchingAssembler(const Options& options, byte* address, int instructions); ~PatchingAssembler(); }; diff --git a/src/ppc/macro-assembler-ppc.cc b/src/ppc/macro-assembler-ppc.cc index ecff702cdd..afce99c757 100644 --- a/src/ppc/macro-assembler-ppc.cc +++ b/src/ppc/macro-assembler-ppc.cc @@ -26,9 +26,10 @@ namespace v8 { namespace internal { -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -198,7 +199,7 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, DCHECK(RelocInfo::IsCodeTarget(rmode)); // 'code' is always generated ppc code, never THUMB code #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { Register scratch = ip; IndirectLoadConstant(scratch, code); addi(scratch, scratch, Operand(Code::kHeaderSize - kHeapObjectTag)); @@ -207,7 +208,7 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, Jump(scratch); bind(&skip); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -298,7 +299,7 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode, DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { // Use ip directly instead of using UseScratchRegisterScope, as we do not // preserve scratch registers across calls. IndirectLoadConstant(ip, code); @@ -308,7 +309,7 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode, Call(ip); bind(&skip); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -358,7 +359,7 @@ void TurboAssembler::Push(Smi* smi) { void TurboAssembler::Move(Register dst, Handle value) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(dst, value); return; } @@ -368,7 +369,7 @@ void TurboAssembler::Move(Register dst, Handle value) { void TurboAssembler::Move(Register dst, ExternalReference reference) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(dst, reference); return; } diff --git a/src/ppc/macro-assembler-ppc.h b/src/ppc/macro-assembler-ppc.h index 405190ffa4..91dd051c96 100644 --- a/src/ppc/macro-assembler-ppc.h +++ b/src/ppc/macro-assembler-ppc.h @@ -117,11 +117,10 @@ bool AreAliased(DoubleRegister reg1, DoubleRegister reg2, class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} // Converts the integer (untagged smi) in |src| to a double, storing // the result to |dst| @@ -702,7 +701,11 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); // --------------------------------------------------------------------------- // GC Support diff --git a/src/s390/assembler-s390.cc b/src/s390/assembler-s390.cc index bd94709f76..366a1ddd6b 100644 --- a/src/s390/assembler-s390.cc +++ b/src/s390/assembler-s390.cc @@ -347,8 +347,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { // ----------------------------------------------------------------------------- // Specific instructions, constants, and masks. -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size) { +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size) { reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); code_targets_.reserve(100); @@ -792,8 +792,8 @@ void Assembler::dp(uintptr_t data) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { if (RelocInfo::IsNone(rmode) || // Don't record external references unless the heap will be serialized. - (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && - !emit_debug_code())) { + (rmode == RelocInfo::EXTERNAL_REFERENCE && + !options().record_reloc_info_for_exrefs && !emit_debug_code())) { return; } DeferredRelocInfo rinfo(pc_offset(), rmode, data); diff --git a/src/s390/assembler-s390.h b/src/s390/assembler-s390.h index 132870997b..e29fb25945 100644 --- a/src/s390/assembler-s390.h +++ b/src/s390/assembler-s390.h @@ -489,8 +489,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler() {} // GetCode emits any pending (non-emitted) code and fills the descriptor diff --git a/src/s390/macro-assembler-s390.cc b/src/s390/macro-assembler-s390.cc index 0a429fc2c3..fd0cfbf20d 100644 --- a/src/s390/macro-assembler-s390.cc +++ b/src/s390/macro-assembler-s390.cc @@ -26,9 +26,10 @@ namespace v8 { namespace internal { -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -196,13 +197,13 @@ void TurboAssembler::Jump(Handle code, RelocInfo::Mode rmode, Condition cond) { DCHECK(RelocInfo::IsCodeTarget(rmode)); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { Register scratch = r1; IndirectLoadConstant(scratch, code); la(scratch, MemOperand(scratch, Code::kHeaderSize - kHeapObjectTag)); b(cond, scratch); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -293,14 +294,14 @@ void TurboAssembler::Call(Handle code, RelocInfo::Mode rmode, DCHECK(RelocInfo::IsCodeTarget(rmode) && cond == al); #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { // Use ip directly instead of using UseScratchRegisterScope, as we do not // preserve scratch registers across calls. IndirectLoadConstant(ip, code); la(ip, MemOperand(ip, Code::kHeaderSize - kHeapObjectTag)); Call(ip); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -354,7 +355,7 @@ void TurboAssembler::Push(Smi* smi) { void TurboAssembler::Move(Register dst, Handle value) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(dst, value); return; } @@ -364,7 +365,7 @@ void TurboAssembler::Move(Register dst, Handle value) { void TurboAssembler::Move(Register dst, ExternalReference reference) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(dst, reference); return; } diff --git a/src/s390/macro-assembler-s390.h b/src/s390/macro-assembler-s390.h index 6d12b9149c..715fd0e7ff 100644 --- a/src/s390/macro-assembler-s390.h +++ b/src/s390/macro-assembler-s390.h @@ -174,11 +174,10 @@ bool AreAliased(DoubleRegister reg1, DoubleRegister reg2, class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} #ifdef V8_EMBEDDED_BUILTINS void LoadFromConstantsTable(Register destination, @@ -1064,7 +1063,11 @@ class TurboAssembler : public TurboAssemblerBase { class MacroAssembler : public TurboAssembler { public: MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); // Call a code stub. void TailCallStub(CodeStub* stub, Condition cond = al); diff --git a/src/turbo-assembler.cc b/src/turbo-assembler.cc index 2edc43d8cf..e04bfc0264 100644 --- a/src/turbo-assembler.cc +++ b/src/turbo-assembler.cc @@ -12,20 +12,16 @@ namespace v8 { namespace internal { -TurboAssemblerBase::TurboAssemblerBase(Isolate* isolate, void* buffer, - int buffer_size, +TurboAssemblerBase::TurboAssemblerBase(Isolate* isolate, const Options& options, + void* buffer, int buffer_size, CodeObjectRequired create_code_object) - : Assembler(isolate, buffer, buffer_size), isolate_(isolate) { + : Assembler(options, buffer, buffer_size), isolate_(isolate) { if (create_code_object == CodeObjectRequired::kYes) { code_object_ = Handle::New( isolate->heap()->self_reference_marker(), isolate); } } -TurboAssemblerBase::TurboAssemblerBase(IsolateData isolate_data, void* buffer, - int buffer_size) - : Assembler(isolate_data, buffer, buffer_size) {} - #ifdef V8_EMBEDDED_BUILTINS void TurboAssemblerBase::IndirectLoadConstant(Register destination, Handle object) { diff --git a/src/turbo-assembler.h b/src/turbo-assembler.h index bd8e1a72a0..e550a82919 100644 --- a/src/turbo-assembler.h +++ b/src/turbo-assembler.h @@ -52,9 +52,8 @@ class TurboAssemblerBase : public Assembler { virtual void LoadRoot(Register destination, Heap::RootListIndex index) = 0; protected: - TurboAssemblerBase(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object); - TurboAssemblerBase(IsolateData isolate_data, void* buffer, int buffer_size); + TurboAssemblerBase(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object); Isolate* const isolate_ = nullptr; diff --git a/src/wasm/baseline/arm64/liftoff-assembler-arm64.h b/src/wasm/baseline/arm64/liftoff-assembler-arm64.h index 0fee429568..2d3d3e4140 100644 --- a/src/wasm/baseline/arm64/liftoff-assembler-arm64.h +++ b/src/wasm/baseline/arm64/liftoff-assembler-arm64.h @@ -148,7 +148,7 @@ void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset, return; } #endif - PatchingAssembler patching_assembler(IsolateData(isolate()), buffer_ + offset, + PatchingAssembler patching_assembler(Assembler::Options{}, buffer_ + offset, 1); patching_assembler.PatchSubSp(bytes); } diff --git a/src/wasm/baseline/liftoff-assembler.cc b/src/wasm/baseline/liftoff-assembler.cc index aff98b2824..51c7305d8f 100644 --- a/src/wasm/baseline/liftoff-assembler.cc +++ b/src/wasm/baseline/liftoff-assembler.cc @@ -337,7 +337,8 @@ void LiftoffAssembler::CacheState::Split(const CacheState& source) { // TODO(clemensh): Provide a reasonably sized buffer, based on wasm function // size. LiftoffAssembler::LiftoffAssembler(Isolate* isolate) - : TurboAssembler(isolate, nullptr, 0, CodeObjectRequired::kNo) { + : TurboAssembler(isolate, Assembler::DefaultOptions(isolate), nullptr, 0, + CodeObjectRequired::kNo) { set_trap_on_abort(true); // Avoid calls to Abort. } diff --git a/src/wasm/baseline/mips/liftoff-assembler-mips.h b/src/wasm/baseline/mips/liftoff-assembler-mips.h index e09fa69541..50e134116e 100644 --- a/src/wasm/baseline/mips/liftoff-assembler-mips.h +++ b/src/wasm/baseline/mips/liftoff-assembler-mips.h @@ -118,8 +118,9 @@ void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset, // We can't run out of space, just pass anything big enough to not cause the // assembler to try to grow the buffer. constexpr int kAvailableSpace = 256; - TurboAssembler patching_assembler(isolate(), buffer_ + offset, - kAvailableSpace, CodeObjectRequired::kNo); + TurboAssembler patching_assembler(nullptr, Assembler::Options{}, + buffer_ + offset, kAvailableSpace, + CodeObjectRequired::kNo); // If bytes can be represented as 16bit, addiu will be generated and two // nops will stay untouched. Otherwise, lui-ori sequence will load it to // register and, as third instruction, addu will be generated. diff --git a/src/wasm/baseline/mips64/liftoff-assembler-mips64.h b/src/wasm/baseline/mips64/liftoff-assembler-mips64.h index 5ade1622ac..5a6e14c59d 100644 --- a/src/wasm/baseline/mips64/liftoff-assembler-mips64.h +++ b/src/wasm/baseline/mips64/liftoff-assembler-mips64.h @@ -108,8 +108,9 @@ void LiftoffAssembler::PatchPrepareStackFrame(uint32_t offset, // We can't run out of space, just pass anything big enough to not cause the // assembler to try to grow the buffer. constexpr int kAvailableSpace = 256; - TurboAssembler patching_assembler(isolate(), buffer_ + offset, - kAvailableSpace, CodeObjectRequired::kNo); + TurboAssembler patching_assembler(nullptr, Assembler::Options{}, + buffer_ + offset, kAvailableSpace, + CodeObjectRequired::kNo); // If bytes can be represented as 16bit, daddiu will be generated and two // nops will stay untouched. Otherwise, lui-ori sequence will load it to // register and, as third instruction, daddu will be generated. diff --git a/src/wasm/jump-table-assembler.h b/src/wasm/jump-table-assembler.h index 32add32d9c..00dca41e11 100644 --- a/src/wasm/jump-table-assembler.h +++ b/src/wasm/jump-table-assembler.h @@ -13,24 +13,19 @@ namespace internal { namespace wasm { class JumpTableAssembler : public TurboAssembler { + public: // {JumpTableAssembler} is never used during snapshot generation, and its code // must be independent of the code range of any isolate anyway. So just use - // this default {IsolateData} for each {JumpTableAssembler}. - static constexpr IsolateData GetDefaultIsolateData() { -#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64 - return IsolateData(kSerializerDisabled, kNullAddress); -#else - return IsolateData(kSerializerDisabled); -#endif - } - - public: - JumpTableAssembler() : TurboAssembler(GetDefaultIsolateData(), nullptr, 0) {} + // this default {Options} for each {JumpTableAssembler}. + JumpTableAssembler() + : TurboAssembler(nullptr, Assembler::Options{}, nullptr, 0, + CodeObjectRequired::kNo) {} // Instantiate a {JumpTableAssembler} for patching. explicit JumpTableAssembler(Address slot_addr, int size = 256) - : TurboAssembler(GetDefaultIsolateData(), - reinterpret_cast(slot_addr), size) {} + : TurboAssembler(nullptr, Assembler::Options{}, + reinterpret_cast(slot_addr), size, + CodeObjectRequired::kNo) {} // Emit a trampoline to a possibly far away code target. void EmitJumpTrampoline(Address target); diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h index 561e59e98f..ad95791869 100644 --- a/src/x64/assembler-x64-inl.h +++ b/src/x64/assembler-x64-inl.h @@ -66,7 +66,7 @@ void Assembler::emit_code_target(Handle target, RelocInfo::Mode rmode) { void Assembler::emit_runtime_entry(Address entry, RelocInfo::Mode rmode) { DCHECK(RelocInfo::IsRuntimeEntry(rmode)); RecordRelocInfo(rmode); - emitl(static_cast(entry - isolate_data().code_range_start)); + emitl(static_cast(entry - options().code_range_start)); } void Assembler::emit(Immediate x) { @@ -282,7 +282,7 @@ Handle Assembler::code_target_object_handle_at(Address pc) { } Address Assembler::runtime_entry_at(Address pc) { - return Memory::int32_at(pc) + isolate_data().code_range_start; + return Memory::int32_at(pc) + options().code_range_start; } // ----------------------------------------------------------------------------- diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index c4d3b5b984..2ccdd7d6b9 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -356,8 +356,8 @@ void Assembler::AllocateAndInstallRequestedHeapObjects(Isolate* isolate) { // ----------------------------------------------------------------------------- // Implementation of Assembler. -Assembler::Assembler(IsolateData isolate_data, void* buffer, int buffer_size) - : AssemblerBase(isolate_data, buffer, buffer_size) { +Assembler::Assembler(const Options& options, void* buffer, int buffer_size) + : AssemblerBase(options, buffer, buffer_size) { // Clear the buffer in debug mode unless it was provided by the // caller in which case we can't be sure it's okay to overwrite // existing code in it. @@ -4831,9 +4831,8 @@ void Assembler::dq(Label* label) { void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { DCHECK(!RelocInfo::IsNone(rmode)); - // Don't record external references unless the heap will be serialized. if (rmode == RelocInfo::EXTERNAL_REFERENCE && - !serializer_enabled() && !emit_debug_code()) { + !options().record_reloc_info_for_exrefs && !emit_debug_code()) { return; } RelocInfo rinfo(reinterpret_cast
(pc_), rmode, data, nullptr); diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index 44cad56849..709c244d66 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -450,8 +450,8 @@ class Assembler : public AssemblerBase { // buffer is too small, a fatal error occurs. No deallocation of the buffer is // done upon destruction of the assembler. Assembler(Isolate* isolate, void* buffer, int buffer_size) - : Assembler(IsolateData(isolate), buffer, buffer_size) {} - Assembler(IsolateData isolate_data, void* buffer, int buffer_size); + : Assembler(DefaultOptions(isolate), buffer, buffer_size) {} + Assembler(const Options& options, void* buffer, int buffer_size); virtual ~Assembler() {} // GetCode emits any pending (non-emitted) code and fills the descriptor diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index b6a2722d25..93d98ce3f7 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -63,9 +63,10 @@ StackArgumentsAccessor::StackArgumentsAccessor( extra_displacement_to_last_argument_( extra_displacement_to_last_argument) {} -MacroAssembler::MacroAssembler(Isolate* isolate, void* buffer, int size, +MacroAssembler::MacroAssembler(Isolate* isolate, const Options& options, + void* buffer, int size, CodeObjectRequired create_code_object) - : TurboAssembler(isolate, buffer, size, create_code_object) { + : TurboAssembler(isolate, options, buffer, size, create_code_object) { if (create_code_object == CodeObjectRequired::kYes) { // Unlike TurboAssembler, which can be used off the main thread and may not // allocate, macro assembler creates its own copy of the self-reference @@ -102,7 +103,7 @@ int64_t TurboAssembler::RootRegisterDelta(ExternalReference other) { } void MacroAssembler::Load(Register destination, ExternalReference source) { - if (root_array_available_ && !serializer_enabled()) { + if (root_array_available_ && options().enable_root_array_delta_access) { int64_t delta = RootRegisterDelta(source); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { movp(destination, Operand(kRootRegister, static_cast(delta))); @@ -111,7 +112,7 @@ void MacroAssembler::Load(Register destination, ExternalReference source) { } // Safe code. #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(kScratchRegister, source); movp(destination, Operand(kScratchRegister, 0)); return; @@ -127,7 +128,7 @@ void MacroAssembler::Load(Register destination, ExternalReference source) { void MacroAssembler::Store(ExternalReference destination, Register source) { - if (root_array_available_ && !serializer_enabled()) { + if (root_array_available_ && options().enable_root_array_delta_access) { int64_t delta = RootRegisterDelta(destination); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { movp(Operand(kRootRegister, static_cast(delta)), source); @@ -186,7 +187,7 @@ void TurboAssembler::LoadRootRegisterOffset(Register destination, void TurboAssembler::LoadAddress(Register destination, ExternalReference source) { - if (root_array_available_ && !serializer_enabled()) { + if (root_array_available_ && options().enable_root_array_delta_access) { int64_t delta = RootRegisterDelta(source); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { leap(destination, Operand(kRootRegister, static_cast(delta))); @@ -195,7 +196,7 @@ void TurboAssembler::LoadAddress(Register destination, } // Safe code. #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(destination, source); return; } @@ -205,7 +206,7 @@ void TurboAssembler::LoadAddress(Register destination, Operand TurboAssembler::ExternalOperand(ExternalReference target, Register scratch) { - if (root_array_available_ && !serializer_enabled()) { + if (root_array_available_ && options().enable_root_array_delta_access) { int64_t delta = RootRegisterDelta(target); if (delta != kInvalidRootRegisterDelta && is_int32(delta)) { return Operand(kRootRegister, static_cast(delta)); @@ -216,7 +217,7 @@ Operand TurboAssembler::ExternalOperand(ExternalReference target, } int TurboAssembler::LoadAddressSize(ExternalReference source) { - if (root_array_available_ && !serializer_enabled()) { + if (root_array_available_ && options().enable_root_array_delta_access) { // This calculation depends on the internals of LoadAddress. // It's correctness is ensured by the asserts in the Call // instruction below. @@ -237,14 +238,6 @@ int TurboAssembler::LoadAddressSize(ExternalReference source) { void MacroAssembler::PushAddress(ExternalReference source) { - Address address = source.address(); - if (is_int32(address) && !serializer_enabled()) { - if (emit_debug_code()) { - Move(kScratchRegister, kZapValue, RelocInfo::NONE); - } - Push(Immediate(static_cast(address))); - return; - } LoadAddress(kScratchRegister, source); Push(kScratchRegister); } @@ -1069,7 +1062,7 @@ void TurboAssembler::Move(Register dst, Smi* source) { void TurboAssembler::Move(Register dst, ExternalReference ext) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadExternalReference(dst, ext); return; } @@ -1378,7 +1371,7 @@ void TurboAssembler::Push(Handle source) { void TurboAssembler::Move(Register result, Handle object, RelocInfo::Mode rmode) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { + if (root_array_available_ && options().isolate_independent_code) { IndirectLoadConstant(result, object); return; } @@ -1521,7 +1514,7 @@ void TurboAssembler::Jump(Handle code_object, RelocInfo::Mode rmode, Condition cc) { // TODO(X64): Inline this #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() && + if (root_array_available_ && options().isolate_independent_code && !Builtins::IsEmbeddedBuiltin(*code_object)) { // Calls to embedded targets are initially generated as standard // pc-relative calls below. When creating the embedded blob, call offsets @@ -1538,7 +1531,7 @@ void TurboAssembler::Jump(Handle code_object, RelocInfo::Mode rmode, jmp(kScratchRegister); bind(&skip); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { @@ -1598,7 +1591,7 @@ void TurboAssembler::Call(Address destination, RelocInfo::Mode rmode) { void TurboAssembler::Call(Handle code_object, RelocInfo::Mode rmode) { #ifdef V8_EMBEDDED_BUILTINS - if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList() && + if (root_array_available_ && options().isolate_independent_code && !Builtins::IsEmbeddedBuiltin(*code_object)) { // Calls to embedded targets are initially generated as standard // pc-relative calls below. When creating the embedded blob, call offsets @@ -1609,7 +1602,7 @@ void TurboAssembler::Call(Handle code_object, RelocInfo::Mode rmode) { leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize)); call(kScratchRegister); return; - } else if (!isolate()->serializer_enabled()) { + } else if (options().inline_offheap_trampolines) { int builtin_index = Builtins::kNoBuiltinId; if (isolate()->builtins()->IsBuiltinHandle(code_object, &builtin_index) && Builtins::IsIsolateIndependent(builtin_index)) { diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index a0d6c72b98..0526c59ad0 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -129,12 +129,10 @@ class StackArgumentsAccessor BASE_EMBEDDED { class TurboAssembler : public TurboAssemblerBase { public: - TurboAssembler(Isolate* isolate, void* buffer, int buffer_size, - CodeObjectRequired create_code_object) - : TurboAssemblerBase(isolate, buffer, buffer_size, create_code_object) {} - - TurboAssembler(IsolateData isolate_data, void* buffer, int buffer_size) - : TurboAssemblerBase(isolate_data, buffer, buffer_size) {} + TurboAssembler(Isolate* isolate, const Options& options, void* buffer, + int buffer_size, CodeObjectRequired create_code_object) + : TurboAssemblerBase(isolate, options, buffer, buffer_size, + create_code_object) {} template struct AvxHelper { @@ -557,8 +555,13 @@ class TurboAssembler : public TurboAssemblerBase { // MacroAssembler implements a collection of frequently used macros. class MacroAssembler : public TurboAssembler { public: + // TODO(titzer): inline this utility constructor. MacroAssembler(Isolate* isolate, void* buffer, int size, - CodeObjectRequired create_code_object); + CodeObjectRequired create_code_object) + : MacroAssembler(isolate, Assembler::DefaultOptions(isolate), buffer, + size, create_code_object) {} + MacroAssembler(Isolate* isolate, const Options& options, void* buffer, + int size, CodeObjectRequired create_code_object); // Loads and stores the value of an external reference. // Special case code for load and store to take advantage of