From e3e3480b9aa2ab45c0675dd7fe6cd5a44aab98eb Mon Sep 17 00:00:00 2001 From: jgruber Date: Tue, 5 Jun 2018 17:51:00 +0200 Subject: [PATCH] [builtins] Move builtins table to the Heap class The builtins table is an array of pointers to builtin code objects. It used to be located within the Builtins class, which itself was part of the Isolate. To enable faster isolate-independent access to builtin code objects, this CL moves the builtins table into the heap, at a constant known offset from the roots table. With this change, builtins can be accessed through the root pointer with a single instruction: mov reg, [kRootPointer, ] TurboAssembler::LookupConstant is also extended in this CL to potentially shortcut the slow-ish constants table lookup: root constants are loaded through the root list, and builtin constants through the builtins table. Bug: v8:6666 Change-Id: I8e83c2a8783c01ebece89483274f42ab4c8872f2 Reviewed-on: https://chromium-review.googlesource.com/1075275 Commit-Queue: Jakob Gruber Reviewed-by: Michael Starzinger Reviewed-by: Ulan Degenbaev Cr-Commit-Position: refs/heads/master@{#53528} --- src/arm/macro-assembler-arm.cc | 44 ++++++++++++++++++------ src/arm/macro-assembler-arm.h | 8 ++++- src/arm64/macro-assembler-arm64.cc | 43 ++++++++++++++++++----- src/arm64/macro-assembler-arm64.h | 10 ++++-- src/builtins/builtins.cc | 41 ++++++++-------------- src/builtins/builtins.h | 37 ++++---------------- src/builtins/setup-builtins-internal.cc | 20 +++++------ src/external-reference-table.cc | 3 +- src/external-reference-table.h | 15 ++++++--- src/external-reference.cc | 2 +- src/heap/heap.cc | 28 ++++++++++++++- src/heap/heap.h | 22 ++++++++++++ src/ia32/macro-assembler-ia32.h | 5 +++ src/isolate.cc | 3 ++ src/mips/macro-assembler-mips.cc | 37 +++++++++++++++++--- src/mips/macro-assembler-mips.h | 8 ++++- src/mips64/macro-assembler-mips64.cc | 37 +++++++++++++++++--- src/mips64/macro-assembler-mips64.h | 8 ++++- src/x64/macro-assembler-x64.cc | 45 +++++++++++++++++++------ src/x64/macro-assembler-x64.h | 8 ++++- 20 files changed, 302 insertions(+), 122 deletions(-) diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index c8db9c2637..be6842a46b 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -138,20 +138,38 @@ int TurboAssembler::PopCallerSaved(SaveFPRegsMode fp_mode, Register exclusion1, #ifdef V8_EMBEDDED_BUILTINS void TurboAssembler::LookupConstant(Register destination, - Handle object) { + Handle object) { CHECK(isolate()->ShouldLoadConstantsFromRootList()); CHECK(root_array_available_); + // Before falling back to the (fairly slow) lookup from the constants table, + // check if any of the fast paths can be applied. + { + int builtin_index; + Heap::RootListIndex root_index; + if (isolate()->heap()->IsRootHandle(object, &root_index)) { + // Roots are loaded relative to the root register. + LoadRoot(destination, root_index); + return; + } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) { + // Similar to roots, builtins may be loaded from the builtins table. + LoadBuiltin(destination, builtin_index); + return; + } else if (object.is_identical_to(code_object_) && + Builtins::IsBuiltinId(maybe_builtin_index_)) { + // The self-reference loaded through Codevalue() may also be a builtin + // and thus viable for a fast load. + LoadBuiltin(destination, maybe_builtin_index_); + return; + } + } + // Ensure the given object is in the builtins constants table and fetch its // index. BuiltinsConstantsTableBuilder* builder = isolate()->builtins_constants_table_builder(); uint32_t index = builder->AddObject(object); - // TODO(jgruber): Load builtins from the builtins table. - // TODO(jgruber): Ensure that code generation can recognize constant targets - // in kArchCallCodeObject. - DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( Heap::kBuiltinsConstantsTableRootIndex)); @@ -202,6 +220,15 @@ void TurboAssembler::LookupExternalReference(Register destination, ldr(destination, MemOperand(kRootRegister, roots_to_external_reference_offset)); } + +void TurboAssembler::LoadBuiltin(Register destination, int builtin_index) { + DCHECK(Builtins::IsBuiltinId(builtin_index)); + + int32_t roots_to_builtins_offset = + Heap::roots_to_builtins_offset() + builtin_index * kPointerSize; + + ldr(destination, MemOperand(kRootRegister, roots_to_builtins_offset)); +} #endif // V8_EMBEDDED_BUILTINS void TurboAssembler::Jump(Register target, Condition cond) { bx(target, cond); } @@ -392,12 +419,7 @@ void TurboAssembler::Move(Register dst, Smi* smi) { mov(dst, Operand(smi)); } void TurboAssembler::Move(Register dst, Handle value) { #ifdef V8_EMBEDDED_BUILTINS if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { - Heap::RootListIndex root_index; - if (!isolate()->heap()->IsRootHandle(value, &root_index)) { - LookupConstant(dst, value); - } else { - LoadRoot(dst, root_index); - } + LookupConstant(dst, value); return; } #endif // V8_EMBEDDED_BUILTINS diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index 51ef552a92..d335909c25 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -322,9 +322,10 @@ class TurboAssembler : public Assembler { Register src_high, uint32_t shift); #ifdef V8_EMBEDDED_BUILTINS - void LookupConstant(Register destination, Handle object); + void LookupConstant(Register destination, Handle object); void LookupExternalReference(Register destination, ExternalReference reference); + void LoadBuiltin(Register destination, int builtin_index); #endif // V8_EMBEDDED_BUILTINS // Returns the size of a call in instructions. Note, the value returned is @@ -552,11 +553,16 @@ class TurboAssembler : public Assembler { bool root_array_available() const { return root_array_available_; } void set_root_array_available(bool v) { root_array_available_ = v; } + void set_builtin_index(int builtin_index) { + maybe_builtin_index_ = builtin_index; + } + protected: // This handle will be patched with the code object on installation. Handle code_object_; private: + int maybe_builtin_index_ = -1; // May be set while generating builtins. bool has_frame_ = false; bool root_array_available_ = true; Isolate* const isolate_; diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index 38e1760e87..8b73425caf 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -1589,14 +1589,14 @@ void MacroAssembler::LoadObject(Register result, Handle object) { void TurboAssembler::Move(Register dst, Register src) { Mov(dst, src); } -void TurboAssembler::Move(Register dst, Handle x) { +void TurboAssembler::Move(Register dst, Handle value) { #ifdef V8_EMBEDDED_BUILTINS if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { - LookupConstant(dst, x); + LookupConstant(dst, value); return; } #endif // V8_EMBEDDED_BUILTINS - Mov(dst, x); + Mov(dst, value); } void TurboAssembler::Move(Register dst, Smi* src) { Mov(dst, src); } @@ -1886,20 +1886,38 @@ void TurboAssembler::CallCFunction(Register function, int num_of_reg_args, #ifdef V8_EMBEDDED_BUILTINS void TurboAssembler::LookupConstant(Register destination, - Handle object) { + Handle object) { CHECK(isolate()->ShouldLoadConstantsFromRootList()); CHECK(root_array_available_); + // Before falling back to the (fairly slow) lookup from the constants table, + // check if any of the fast paths can be applied. + { + int builtin_index; + Heap::RootListIndex root_index; + if (isolate()->heap()->IsRootHandle(object, &root_index)) { + // Roots are loaded relative to the root register. + LoadRoot(destination, root_index); + return; + } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) { + // Similar to roots, builtins may be loaded from the builtins table. + LoadBuiltin(destination, builtin_index); + return; + } else if (object.is_identical_to(code_object_) && + Builtins::IsBuiltinId(maybe_builtin_index_)) { + // The self-reference loaded through Codevalue() may also be a builtin + // and thus viable for a fast load. + LoadBuiltin(destination, maybe_builtin_index_); + return; + } + } + // Ensure the given object is in the builtins constants table and fetch its // index. BuiltinsConstantsTableBuilder* builder = isolate()->builtins_constants_table_builder(); uint32_t index = builder->AddObject(object); - // TODO(jgruber): Load builtins from the builtins table. - // TODO(jgruber): Ensure that code generation can recognize constant targets - // in kArchCallCodeObject. - DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( Heap::kBuiltinsConstantsTableRootIndex)); @@ -1931,6 +1949,15 @@ void TurboAssembler::LookupExternalReference(Register destination, Ldr(destination, MemOperand(kRootRegister, roots_to_external_reference_offset)); } + +void TurboAssembler::LoadBuiltin(Register destination, int builtin_index) { + DCHECK(Builtins::IsBuiltinId(builtin_index)); + + int32_t roots_to_builtins_offset = + Heap::roots_to_builtins_offset() + builtin_index * kPointerSize; + + Ldr(destination, MemOperand(kRootRegister, roots_to_builtins_offset)); +} #endif // V8_EMBEDDED_BUILTINS void TurboAssembler::Jump(Register target, Condition cond) { diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index 97826b4387..9cc6405c36 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -259,7 +259,7 @@ class TurboAssembler : public Assembler { // This is required for compatibility with architecture independent code. // Remove if not needed. void Move(Register dst, Register src); - void Move(Register dst, Handle x); + void Move(Register dst, Handle value); void Move(Register dst, Smi* src); // Register swap. Note that the register operands should be distinct. @@ -879,9 +879,10 @@ class TurboAssembler : public Assembler { void Movi(const VRegister& vd, uint64_t hi, uint64_t lo); #ifdef V8_EMBEDDED_BUILTINS - void LookupConstant(Register destination, Handle object); + void LookupConstant(Register destination, Handle object); void LookupExternalReference(Register destination, ExternalReference reference); + void LoadBuiltin(Register destination, int builtin_index); #endif // V8_EMBEDDED_BUILTINS void Jump(Register target, Condition cond = al); @@ -1230,6 +1231,10 @@ class TurboAssembler : public Assembler { bool root_array_available() const { return root_array_available_; } void set_root_array_available(bool v) { root_array_available_ = v; } + void set_builtin_index(int builtin_index) { + maybe_builtin_index_ = builtin_index; + } + protected: // The actual Push and Pop implementations. These don't generate any code // other than that required for the push or pop. This allows @@ -1266,6 +1271,7 @@ class TurboAssembler : public Assembler { Handle code_object_; private: + int maybe_builtin_index_ = -1; // May be set while generating builtins. bool has_frame_ = false; bool root_array_available_ = true; Isolate* const isolate_; diff --git a/src/builtins/builtins.cc b/src/builtins/builtins.cc index 2ca0623f95..c9c3f04525 100644 --- a/src/builtins/builtins.cc +++ b/src/builtins/builtins.cc @@ -65,12 +65,6 @@ const BuiltinMetadata builtin_metadata[] = { } // namespace -Builtins::Builtins() : initialized_(false) { - memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count); -} - -Builtins::~Builtins() {} - BailoutId Builtins::GetContinuationBailoutId(Name name) { DCHECK(Builtins::KindOf(name) == TFJ || Builtins::KindOf(name) == TFC); return BailoutId(BailoutId::kFirstBuiltinContinuationId + name); @@ -85,18 +79,11 @@ Builtins::Name Builtins::GetBuiltinFromBailoutId(BailoutId id) { void Builtins::TearDown() { initialized_ = false; } -void Builtins::IterateBuiltins(RootVisitor* v) { - for (int i = 0; i < builtin_count; i++) { - v->VisitRootPointer(Root::kBuiltins, name(i), &builtins_[i]); - } -} - const char* Builtins::Lookup(Address pc) { // may be called during initialization (disassembler!) if (initialized_) { for (int i = 0; i < builtin_count; i++) { - Code* entry = Code::cast(builtins_[i]); - if (entry->contains(pc)) return name(i); + if (isolate_->heap()->builtin(i)->contains(pc)) return name(i); } } return nullptr; @@ -137,16 +124,15 @@ Handle Builtins::OrdinaryToPrimitive(OrdinaryToPrimitiveHint hint) { } void Builtins::set_builtin(int index, HeapObject* builtin) { - DCHECK(Builtins::IsBuiltinId(index)); - DCHECK(Internals::HasHeapObjectTag(builtin)); - // The given builtin may be completely uninitialized thus we cannot check its - // type here. - builtins_[index] = builtin; + isolate_->heap()->set_builtin(index, builtin); } +Code* Builtins::builtin(int index) { return isolate_->heap()->builtin(index); } + Handle Builtins::builtin_handle(int index) { DCHECK(IsBuiltinId(index)); - return Handle(reinterpret_cast(builtin_address(index))); + return Handle( + reinterpret_cast(isolate_->heap()->builtin_address(index))); } // static @@ -157,8 +143,7 @@ int Builtins::GetStackParameterCount(Name name) { // static Callable Builtins::CallableFor(Isolate* isolate, Name name) { - Handle code( - reinterpret_cast(isolate->builtins()->builtin_address(name))); + Handle code = isolate->builtins()->builtin_handle(name); CallDescriptors::Key key; switch (name) { // This macro is deliberately crafted so as to emit very little code, @@ -199,13 +184,15 @@ bool Builtins::IsBuiltin(const Code* code) { return Builtins::IsBuiltinId(code->builtin_index()); } -bool Builtins::IsBuiltinHandle(Handle code, int* index) const { - Object** const handle_location = bit_cast(code.address()); - Object* const* start = &builtins_[0]; - Object* const* end = &builtins_[Builtins::builtin_count]; +bool Builtins::IsBuiltinHandle(Handle maybe_code, + int* index) const { + Heap* heap = isolate_->heap(); + Address handle_location = maybe_code.address(); + Address start = heap->builtin_address(0); + Address end = heap->builtin_address(Builtins::builtin_count); if (handle_location >= end) return false; if (handle_location < start) return false; - *index = static_cast(handle_location - start); + *index = static_cast(handle_location - start) >> kPointerSizeLog2; DCHECK(Builtins::IsBuiltinId(*index)); return true; } diff --git a/src/builtins/builtins.h b/src/builtins/builtins.h index 605c627c93..891e5f3064 100644 --- a/src/builtins/builtins.h +++ b/src/builtins/builtins.h @@ -31,13 +31,10 @@ class CodeAssemblerState; class Builtins { public: - ~Builtins(); + explicit Builtins(Isolate* isolate) : isolate_(isolate) {} void TearDown(); - // Garbage collection support. - void IterateBuiltins(RootVisitor* v); - // Disassembler support. const char* Lookup(Address pc); @@ -75,24 +72,9 @@ class Builtins { // Used by BuiltinDeserializer and CreateOffHeapTrampolines in isolate.cc. void set_builtin(int index, HeapObject* builtin); - Code* builtin(int index) { - DCHECK(IsBuiltinId(index)); - // Code::cast cannot be used here since we access builtins - // during the marking phase of mark sweep. See IC::Clear. - return reinterpret_cast(builtins_[index]); - } - - Address builtin_address(int index) { - DCHECK(IsBuiltinId(index)); - return reinterpret_cast
(&builtins_[index]); - } - + Code* builtin(int index); V8_EXPORT_PRIVATE Handle builtin_handle(int index); - // Used by lazy deserialization to determine whether a given builtin has been - // deserialized. See the DeserializeLazy builtin. - Object** builtins_table_address() { return &builtins_[0]; } - V8_EXPORT_PRIVATE static Callable CallableFor(Isolate* isolate, Name name); static int GetStackParameterCount(Name name); @@ -115,7 +97,7 @@ class Builtins { // As above, but safe to access off the main thread since the check is done // by handle location. Similar to Heap::IsRootHandle. - bool IsBuiltinHandle(Handle code, int* index) const; + bool IsBuiltinHandle(Handle maybe_code, int* index) const; // True, iff the given code object is a builtin with off-heap embedded code. static bool IsEmbeddedBuiltin(const Code* code); @@ -154,9 +136,6 @@ class Builtins { static bool AllowDynamicFunction(Isolate* isolate, Handle target, Handle target_global_proxy); - private: - Builtins(); - #ifdef V8_EMBEDDED_BUILTINS // Creates a trampoline code object that jumps to the given off-heap entry. // The result should not be used directly, but only from the related Factory @@ -165,6 +144,7 @@ class Builtins { Address off_heap_entry); #endif + private: static void Generate_CallFunction(MacroAssembler* masm, ConvertReceiverMode mode); @@ -197,14 +177,9 @@ class Builtins { #undef DECLARE_ASM #undef DECLARE_TF - // Note: These are always Code objects, but to conform with - // IterateBuiltins() above which assumes Object**'s for the callback - // function f, we use an Object* array here. - Object* builtins_[builtin_count]; - bool initialized_; + Isolate* isolate_; + bool initialized_ = false; - friend class Factory; // For GenerateOffHeapTrampolineFor. - friend class Isolate; friend class SetupIsolateDelegate; DISALLOW_COPY_AND_ASSIGN(Builtins); diff --git a/src/builtins/setup-builtins-internal.cc b/src/builtins/setup-builtins-internal.cc index e334b286ab..2e8dde6a48 100644 --- a/src/builtins/setup-builtins-internal.cc +++ b/src/builtins/setup-builtins-internal.cc @@ -72,6 +72,7 @@ Code* BuildWithMacroAssembler(Isolate* isolate, int32_t builtin_index, const size_t buffer_size = 32 * KB; byte buffer[buffer_size]; // NOLINT(runtime/arrays) MacroAssembler masm(isolate, buffer, buffer_size, CodeObjectRequired::kYes); + masm.set_builtin_index(builtin_index); DCHECK(!masm.has_frame()); generator(&masm); CodeDesc desc; @@ -92,6 +93,7 @@ Code* BuildAdaptor(Isolate* isolate, int32_t builtin_index, const size_t buffer_size = 32 * KB; byte buffer[buffer_size]; // NOLINT(runtime/arrays) MacroAssembler masm(isolate, buffer, buffer_size, CodeObjectRequired::kYes); + masm.set_builtin_index(builtin_index); DCHECK(!masm.has_frame()); Builtins::Generate_Adaptor(&masm, builtin_address, exit_frame_type); CodeDesc desc; @@ -157,7 +159,7 @@ Code* BuildWithCodeStubAssemblerCS(Isolate* isolate, int32_t builtin_index, void SetupIsolateDelegate::AddBuiltin(Builtins* builtins, int index, Code* code) { DCHECK_EQ(index, code->builtin_index()); - builtins->builtins_[index] = code; + builtins->set_builtin(index, code); } void SetupIsolateDelegate::PopulateWithPlaceholders(Isolate* isolate) { @@ -189,8 +191,7 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) { if (RelocInfo::IsCodeTarget(rinfo->rmode())) { Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); if (!target->is_builtin()) continue; - Code* new_target = - Code::cast(builtins->builtins_[target->builtin_index()]); + Code* new_target = builtins->builtin(target->builtin_index()); rinfo->set_target_address(new_target->raw_instruction_start(), UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH); } else { @@ -199,8 +200,7 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) { if (!object->IsCode()) continue; Code* target = Code::cast(object); if (!target->is_builtin()) continue; - Code* new_target = - Code::cast(builtins->builtins_[target->builtin_index()]); + Code* new_target = builtins->builtin(target->builtin_index()); rinfo->set_target_object(isolate->heap(), new_target, UPDATE_WRITE_BARRIER, SKIP_ICACHE_FLUSH); } @@ -274,16 +274,14 @@ void SetupIsolateDelegate::SetupBuiltinsInternal(Isolate* isolate) { ReplacePlaceholders(isolate); -#define SET_PROMISE_REJECTION_PREDICTION(Name) \ - Code::cast(builtins->builtins_[Builtins::k##Name]) \ - ->set_is_promise_rejection(true); +#define SET_PROMISE_REJECTION_PREDICTION(Name) \ + builtins->builtin(Builtins::k##Name)->set_is_promise_rejection(true); BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(SET_PROMISE_REJECTION_PREDICTION) #undef SET_PROMISE_REJECTION_PREDICTION -#define SET_EXCEPTION_CAUGHT_PREDICTION(Name) \ - Code::cast(builtins->builtins_[Builtins::k##Name]) \ - ->set_is_exception_caught(true); +#define SET_EXCEPTION_CAUGHT_PREDICTION(Name) \ + builtins->builtin(Builtins::k##Name)->set_is_exception_caught(true); BUILTIN_EXCEPTION_CAUGHT_PREDICTION_LIST(SET_EXCEPTION_CAUGHT_PREDICTION) #undef SET_EXCEPTION_CAUGHT_PREDICTION diff --git a/src/external-reference-table.cc b/src/external-reference-table.cc index 47d3cd2d53..e9c4221f9a 100644 --- a/src/external-reference-table.cc +++ b/src/external-reference-table.cc @@ -35,7 +35,8 @@ void ExternalReferenceTable::Init(Isolate* isolate) { AddIsolateAddresses(isolate, &index); AddAccessors(isolate, &index); AddStubCache(isolate, &index); - is_initialized_ = true; + is_initialized_ = static_cast(true); + USE(unused_padding_); CHECK_EQ(kSize, index); } diff --git a/src/external-reference-table.h b/src/external-reference-table.h index 8cb7d95eed..ab11660579 100644 --- a/src/external-reference-table.h +++ b/src/external-reference-table.h @@ -43,20 +43,26 @@ class ExternalReferenceTable { kIsolateAddressReferenceCount + kAccessorReferenceCount + kStubCacheReferenceCount; - uint32_t size() const { return static_cast(kSize); } + static constexpr uint32_t size() { return static_cast(kSize); } Address address(uint32_t i) { return refs_[i].address; } const char* name(uint32_t i) { return refs_[i].name; } - bool is_initialized() const { return is_initialized_; } + bool is_initialized() const { return is_initialized_ != 0; } static const char* ResolveSymbol(void* address); - static uint32_t OffsetOfEntry(uint32_t i) { + static constexpr uint32_t OffsetOfEntry(uint32_t i) { // Used in CodeAssembler::LookupExternalReference. STATIC_ASSERT(offsetof(ExternalReferenceEntry, address) == 0); return i * sizeof(ExternalReferenceEntry); } + static constexpr uint32_t SizeInBytes() { + STATIC_ASSERT(OffsetOfEntry(size()) + 2 * kUInt32Size == + sizeof(ExternalReferenceTable)); + return OffsetOfEntry(size()) + 2 * kUInt32Size; + } + ExternalReferenceTable() {} void Init(Isolate* isolate); @@ -80,7 +86,8 @@ class ExternalReferenceTable { void AddStubCache(Isolate* isolate, int* index); ExternalReferenceEntry refs_[kSize]; - bool is_initialized_ = false; + uint32_t is_initialized_ = 0; // Not bool to guarantee deterministic size. + uint32_t unused_padding_ = 0; // For alignment. DISALLOW_COPY_AND_ASSIGN(ExternalReferenceTable); }; diff --git a/src/external-reference.cc b/src/external-reference.cc index 8b09c67ccc..68e420c8cb 100644 --- a/src/external-reference.cc +++ b/src/external-reference.cc @@ -128,7 +128,7 @@ ExternalReference ExternalReference::isolate_address(Isolate* isolate) { } ExternalReference ExternalReference::builtins_address(Isolate* isolate) { - return ExternalReference(isolate->builtins()->builtins_table_address()); + return ExternalReference(isolate->heap()->builtin_address(0)); } ExternalReference ExternalReference::handle_scope_implementer_address( diff --git a/src/heap/heap.cc b/src/heap/heap.cc index d272b1283b..d5e9765a40 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -3867,6 +3867,26 @@ void Heap::ZapCodeObject(Address start_address, int size_in_bytes) { #endif } +Code* Heap::builtin(int index) { + DCHECK(Builtins::IsBuiltinId(index)); + // Code::cast cannot be used here since we access builtins + // during the marking phase of mark sweep. See IC::Clear. + return reinterpret_cast(builtins_[index]); +} + +Address Heap::builtin_address(int index) { + DCHECK(Builtins::IsBuiltinId(index) || index == Builtins::builtin_count); + return reinterpret_cast
(&builtins_[index]); +} + +void Heap::set_builtin(int index, HeapObject* builtin) { + DCHECK(Builtins::IsBuiltinId(index)); + DCHECK(Internals::HasHeapObjectTag(builtin)); + // The given builtin may be completely uninitialized thus we cannot check its + // type here. + builtins_[index] = builtin; +} + void Heap::IterateRoots(RootVisitor* v, VisitMode mode) { IterateStrongRoots(v, mode); IterateWeakRoots(v, mode); @@ -3982,7 +4002,7 @@ void Heap::IterateStrongRoots(RootVisitor* v, VisitMode mode) { // heap. Note that it is not necessary to iterate over code objects // on scavenge collections. if (!isMinorGC) { - isolate_->builtins()->IterateBuiltins(v); + IterateBuiltins(v); v->Synchronize(VisitorSynchronization::kBuiltins); isolate_->interpreter()->IterateDispatchTable(v); v->Synchronize(VisitorSynchronization::kDispatchTable); @@ -4046,6 +4066,12 @@ void Heap::IterateWeakGlobalHandles(RootVisitor* v) { isolate_->global_handles()->IterateWeakRoots(v); } +void Heap::IterateBuiltins(RootVisitor* v) { + for (int i = 0; i < Builtins::builtin_count; i++) { + v->VisitRootPointer(Root::kBuiltins, Builtins::name(i), &builtins_[i]); + } +} + // TODO(1236194): Since the heap size is configurable on the command line // and through the API, we should gracefully handle the case that the heap // size is not big enough to fit all the initial objects. diff --git a/src/heap/heap.h b/src/heap/heap.h index 84128f8aa8..376bd4b20a 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -1120,6 +1120,10 @@ class Heap { return kRootsExternalReferenceTableOffset; } + static constexpr int roots_to_builtins_offset() { + return kRootsBuiltinsOffset; + } + // Sets the stub_cache_ (only used when expanding the dictionary). void SetRootCodeStubs(SimpleNumberDictionary* value); @@ -1216,6 +1220,14 @@ class Heap { // Invoked when GC was requested via the stack guard. void HandleGCRequest(); + // =========================================================================== + // Builtins. ================================================================= + // =========================================================================== + + Code* builtin(int index); + Address builtin_address(int index); + void set_builtin(int index, HeapObject* builtin); + // =========================================================================== // Iterators. ================================================================ // =========================================================================== @@ -1229,6 +1241,8 @@ class Heap { void IterateWeakRoots(RootVisitor* v, VisitMode mode); // Iterates over weak global handles. void IterateWeakGlobalHandles(RootVisitor* v); + // Iterates over builtins. + void IterateBuiltins(RootVisitor* v); // =========================================================================== // Store buffer API. ========================================================= @@ -2235,6 +2249,14 @@ class Heap { kRootListLength * kPointerSize; ExternalReferenceTable external_reference_table_; + // As external references above, builtins are accessed through an offset from + // the roots register. Its offset from roots_ must remain static. This is + // verified in Isolate::Init() using runtime checks. + static constexpr int kRootsBuiltinsOffset = + kRootsExternalReferenceTableOffset + + ExternalReferenceTable::SizeInBytes(); + Object* builtins_[Builtins::builtin_count]; + size_t code_range_size_; size_t max_semi_space_size_; size_t initial_semispace_size_; diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index 34a8517f16..6a88433989 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -370,11 +370,16 @@ class TurboAssembler : public Assembler { bool root_array_available() const { return root_array_available_; } void set_root_array_available(bool v) { root_array_available_ = v; } + void set_builtin_index(int builtin_index) { + maybe_builtin_index_ = builtin_index; + } + protected: // This handle will be patched with the code object on installation. Handle code_object_; private: + int maybe_builtin_index_ = -1; // May be set while generating builtins. bool has_frame_ = false; bool root_array_available_ = false; Isolate* const isolate_; diff --git a/src/isolate.cc b/src/isolate.cc index 821200eaf3..c93e754376 100644 --- a/src/isolate.cc +++ b/src/isolate.cc @@ -2503,6 +2503,7 @@ Isolate::Isolate() global_handles_(nullptr), eternal_handles_(nullptr), thread_manager_(nullptr), + builtins_(this), setup_delegate_(nullptr), regexp_stack_(nullptr), date_cache_(nullptr), @@ -3113,6 +3114,8 @@ bool Isolate::Init(StartupDeserializer* des) { static_cast(OFFSET_OF(Isolate, heap_.external_reference_table_)), Internals::kIsolateRootsOffset + Heap::kRootsExternalReferenceTableOffset); + CHECK_EQ(static_cast(OFFSET_OF(Isolate, heap_.builtins_)), + Internals::kIsolateRootsOffset + Heap::kRootsBuiltinsOffset); { HandleScope scope(this); diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc index cd4acb696e..b5136609c7 100644 --- a/src/mips/macro-assembler-mips.cc +++ b/src/mips/macro-assembler-mips.cc @@ -3612,20 +3612,38 @@ bool TurboAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L, #ifdef V8_EMBEDDED_BUILTINS void TurboAssembler::LookupConstant(Register destination, - Handle object) { + Handle object) { CHECK(isolate()->ShouldLoadConstantsFromRootList()); CHECK(root_array_available_); + // Before falling back to the (fairly slow) lookup from the constants table, + // check if any of the fast paths can be applied. + { + int builtin_index; + Heap::RootListIndex root_index; + if (isolate()->heap()->IsRootHandle(object, &root_index)) { + // Roots are loaded relative to the root register. + LoadRoot(destination, root_index); + return; + } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) { + // Similar to roots, builtins may be loaded from the builtins table. + LoadBuiltin(destination, builtin_index); + return; + } else if (object.is_identical_to(code_object_) && + Builtins::IsBuiltinId(maybe_builtin_index_)) { + // The self-reference loaded through Codevalue() may also be a builtin + // and thus viable for a fast load. + LoadBuiltin(destination, maybe_builtin_index_); + return; + } + } + // Ensure the given object is in the builtins constants table and fetch its // index. BuiltinsConstantsTableBuilder* builder = isolate()->builtins_constants_table_builder(); uint32_t index = builder->AddObject(object); - // TODO(jgruber): Load builtins from the builtins table. - // TODO(jgruber): Ensure that code generation can recognize constant targets - // in kArchCallCodeObject. - DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( Heap::kBuiltinsConstantsTableRootIndex)); @@ -3657,6 +3675,15 @@ void TurboAssembler::LookupExternalReference(Register destination, lw(destination, MemOperand(kRootRegister, roots_to_external_reference_offset)); } + +void TurboAssembler::LoadBuiltin(Register destination, int builtin_index) { + DCHECK(Builtins::IsBuiltinId(builtin_index)); + + int32_t roots_to_builtins_offset = + Heap::roots_to_builtins_offset() + builtin_index * kPointerSize; + + lw(destination, MemOperand(kRootRegister, roots_to_builtins_offset)); +} #endif // V8_EMBEDDED_BUILTINS void TurboAssembler::Jump(Register target, int16_t offset, Condition cond, diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h index 8d7f20079b..4f476129e5 100644 --- a/src/mips/macro-assembler-mips.h +++ b/src/mips/macro-assembler-mips.h @@ -251,9 +251,10 @@ class TurboAssembler : public Assembler { void li(Register dst, ExternalReference value, LiFlags mode = OPTIMIZE_SIZE); #ifdef V8_EMBEDDED_BUILTINS - void LookupConstant(Register destination, Handle object); + void LookupConstant(Register destination, Handle object); void LookupExternalReference(Register destination, ExternalReference reference); + void LoadBuiltin(Register destination, int builtin_index); #endif // V8_EMBEDDED_BUILTINS // Jump, Call, and Ret pseudo instructions implementing inter-working. @@ -849,6 +850,10 @@ class TurboAssembler : public Assembler { bool root_array_available() const { return root_array_available_; } void set_root_array_available(bool v) { root_array_available_ = v; } + void set_builtin_index(int builtin_index) { + maybe_builtin_index_ = builtin_index; + } + protected: void BranchLong(Label* L, BranchDelaySlot bdslot); @@ -860,6 +865,7 @@ class TurboAssembler : public Assembler { Handle code_object_; private: + int maybe_builtin_index_ = -1; // May be set while generating builtins. bool has_frame_ = false; bool root_array_available_ = true; Isolate* const isolate_; diff --git a/src/mips64/macro-assembler-mips64.cc b/src/mips64/macro-assembler-mips64.cc index 314c70e8a1..477f9cd6bc 100644 --- a/src/mips64/macro-assembler-mips64.cc +++ b/src/mips64/macro-assembler-mips64.cc @@ -4121,20 +4121,38 @@ bool TurboAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L, #ifdef V8_EMBEDDED_BUILTINS void TurboAssembler::LookupConstant(Register destination, - Handle object) { + Handle object) { CHECK(isolate()->ShouldLoadConstantsFromRootList()); CHECK(root_array_available_); + // Before falling back to the (fairly slow) lookup from the constants table, + // check if any of the fast paths can be applied. + { + int builtin_index; + Heap::RootListIndex root_index; + if (isolate()->heap()->IsRootHandle(object, &root_index)) { + // Roots are loaded relative to the root register. + LoadRoot(destination, root_index); + return; + } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) { + // Similar to roots, builtins may be loaded from the builtins table. + LoadBuiltin(destination, builtin_index); + return; + } else if (object.is_identical_to(code_object_) && + Builtins::IsBuiltinId(maybe_builtin_index_)) { + // The self-reference loaded through Codevalue() may also be a builtin + // and thus viable for a fast load. + LoadBuiltin(destination, maybe_builtin_index_); + return; + } + } + // Ensure the given object is in the builtins constants table and fetch its // index. BuiltinsConstantsTableBuilder* builder = isolate()->builtins_constants_table_builder(); uint32_t index = builder->AddObject(object); - // TODO(jgruber): Load builtins from the builtins table. - // TODO(jgruber): Ensure that code generation can recognize constant targets - // in kArchCallCodeObject. - DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( Heap::kBuiltinsConstantsTableRootIndex)); @@ -4166,6 +4184,15 @@ void TurboAssembler::LookupExternalReference(Register destination, Ld(destination, MemOperand(kRootRegister, roots_to_external_reference_offset)); } + +void TurboAssembler::LoadBuiltin(Register destination, int builtin_index) { + DCHECK(Builtins::IsBuiltinId(builtin_index)); + + int32_t roots_to_builtins_offset = + Heap::roots_to_builtins_offset() + builtin_index * kPointerSize; + + Ld(destination, MemOperand(kRootRegister, roots_to_builtins_offset)); +} #endif // V8_EMBEDDED_BUILTINS void TurboAssembler::Jump(Register target, Condition cond, Register rs, diff --git a/src/mips64/macro-assembler-mips64.h b/src/mips64/macro-assembler-mips64.h index d4c8dff193..315f8b596a 100644 --- a/src/mips64/macro-assembler-mips64.h +++ b/src/mips64/macro-assembler-mips64.h @@ -271,9 +271,10 @@ class TurboAssembler : public Assembler { void li(Register dst, ExternalReference value, LiFlags mode = OPTIMIZE_SIZE); #ifdef V8_EMBEDDED_BUILTINS - void LookupConstant(Register destination, Handle object); + void LookupConstant(Register destination, Handle object); void LookupExternalReference(Register destination, ExternalReference reference); + void LoadBuiltin(Register destination, int builtin_index); #endif // V8_EMBEDDED_BUILTINS // Jump, Call, and Ret pseudo instructions implementing inter-working. @@ -864,6 +865,10 @@ class TurboAssembler : public Assembler { bool root_array_available() const { return root_array_available_; } void set_root_array_available(bool v) { root_array_available_ = v; } + void set_builtin_index(int builtin_index) { + maybe_builtin_index_ = builtin_index; + } + protected: inline Register GetRtAsRegisterHelper(const Operand& rt, Register scratch); inline int32_t GetOffset(int32_t offset, Label* L, OffsetSize bits); @@ -872,6 +877,7 @@ class TurboAssembler : public Assembler { Handle code_object_; private: + int maybe_builtin_index_ = -1; // May be set while generating builtins. bool has_frame_ = false; bool root_array_available_ = true; Isolate* const isolate_; diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 14b3a97d2e..7549486e04 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -156,20 +156,38 @@ void MacroAssembler::Store(ExternalReference destination, Register source) { #ifdef V8_EMBEDDED_BUILTINS void TurboAssembler::LookupConstant(Register destination, - Handle object) { + Handle object) { CHECK(isolate()->ShouldLoadConstantsFromRootList()); CHECK(root_array_available_); + // Before falling back to the (fairly slow) lookup from the constants table, + // check if any of the fast paths can be applied. + { + int builtin_index; + Heap::RootListIndex root_index; + if (isolate()->heap()->IsRootHandle(object, &root_index)) { + // Roots are loaded relative to the root register. + LoadRoot(destination, root_index); + return; + } else if (isolate()->builtins()->IsBuiltinHandle(object, &builtin_index)) { + // Similar to roots, builtins may be loaded from the builtins table. + LoadBuiltin(destination, builtin_index); + return; + } else if (object.is_identical_to(code_object_) && + Builtins::IsBuiltinId(maybe_builtin_index_)) { + // The self-reference loaded through Codevalue() may also be a builtin + // and thus viable for a fast load. + LoadBuiltin(destination, maybe_builtin_index_); + return; + } + } + // Ensure the given object is in the builtins constants table and fetch its // index. BuiltinsConstantsTableBuilder* builder = isolate()->builtins_constants_table_builder(); uint32_t index = builder->AddObject(object); - // TODO(jgruber): Load builtins from the builtins table. - // TODO(jgruber): Ensure that code generation can recognize constant targets - // in kArchCallCodeObject. - DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant( Heap::kBuiltinsConstantsTableRootIndex)); @@ -200,6 +218,16 @@ void TurboAssembler::LookupExternalReference(Register destination, movp(destination, Operand(kRootRegister, roots_to_external_reference_offset)); } + +void TurboAssembler::LoadBuiltin(Register destination, int builtin_index) { + DCHECK(Builtins::IsBuiltinId(builtin_index)); + + int32_t roots_to_builtins_offset = Heap::roots_to_builtins_offset() - + kRootRegisterBias + + builtin_index * kPointerSize; + + movp(destination, Operand(kRootRegister, roots_to_builtins_offset)); +} #endif // V8_EMBEDDED_BUILTINS void TurboAssembler::LoadAddress(Register destination, @@ -1401,12 +1429,7 @@ void TurboAssembler::Move(Register result, Handle object, RelocInfo::Mode rmode) { #ifdef V8_EMBEDDED_BUILTINS if (root_array_available_ && isolate()->ShouldLoadConstantsFromRootList()) { - Heap::RootListIndex root_index; - if (!isolate()->heap()->IsRootHandle(object, &root_index)) { - LookupConstant(result, object); - } else { - LoadRoot(result, root_index); - } + LookupConstant(result, object); return; } #endif // V8_EMBEDDED_BUILTINS diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index f59a5d593b..47e1ed2006 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -366,9 +366,10 @@ class TurboAssembler : public Assembler { void LoadAddress(Register destination, ExternalReference source); #ifdef V8_EMBEDDED_BUILTINS - void LookupConstant(Register destination, Handle object); + void LookupConstant(Register destination, Handle object); void LookupExternalReference(Register destination, ExternalReference reference); + void LoadBuiltin(Register destination, int builtin_index); #endif // V8_EMBEDDED_BUILTINS // Operand pointing to an external reference. @@ -529,6 +530,10 @@ class TurboAssembler : public Assembler { void ResetSpeculationPoisonRegister(); + void set_builtin_index(int builtin_index) { + maybe_builtin_index_ = builtin_index; + } + protected: static const int kSmiShift = kSmiTagSize + kSmiShiftSize; int smi_count = 0; @@ -546,6 +551,7 @@ class TurboAssembler : public Assembler { Handle code_object_; private: + int maybe_builtin_index_ = -1; // May be set while generating builtins. bool has_frame_ = false; Isolate* const isolate_; };