Remove remnant code for read-only builtin trampolines

.. all this no longer exists.

Bug: v8:13654
Change-Id: I76468164125d148bfdf29f6178fa5e0e608b949f
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4177094
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Commit-Queue: Jakob Linke <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/main@{#85383}
This commit is contained in:
Jakob Linke 2023-01-18 15:14:39 +01:00 committed by V8 LUCI CQ
parent 1009874faa
commit 21ab5ea94c
15 changed files with 9 additions and 222 deletions

View File

@ -359,86 +359,6 @@ void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
}
}
namespace {
enum TrampolineType { kAbort, kJump };
class OffHeapTrampolineGenerator {
public:
explicit OffHeapTrampolineGenerator(Isolate* isolate)
: isolate_(isolate),
masm_(isolate, AssemblerOptions::DefaultForOffHeapTrampoline(isolate),
CodeObjectRequired::kYes,
ExternalAssemblerBuffer(buffer_, kBufferSize)) {}
CodeDesc Generate(Address off_heap_entry, TrampolineType type) {
// Generate replacement code that simply tail-calls the off-heap code.
DCHECK(!masm_.has_frame());
{
FrameScope scope(&masm_, StackFrame::NO_FRAME_TYPE);
if (type == TrampolineType::kJump) {
masm_.CodeEntry();
masm_.JumpToOffHeapInstructionStream(off_heap_entry);
} else {
DCHECK_EQ(type, TrampolineType::kAbort);
masm_.Trap();
}
}
CodeDesc desc;
masm_.GetCode(isolate_, &desc);
return desc;
}
Handle<HeapObject> CodeObject() { return masm_.CodeObject(); }
private:
Isolate* isolate_;
// Enough to fit the single jmp.
static constexpr int kBufferSize = 256;
byte buffer_[kBufferSize];
MacroAssembler masm_;
};
constexpr int OffHeapTrampolineGenerator::kBufferSize;
} // namespace
// static
Handle<Code> Builtins::GenerateOffHeapTrampolineFor(
Isolate* isolate, Address off_heap_entry, int32_t kind_specific_flags,
bool generate_jump_to_instruction_stream) {
DCHECK_NOT_NULL(isolate->embedded_blob_code());
DCHECK_NE(0, isolate->embedded_blob_code_size());
OffHeapTrampolineGenerator generator(isolate);
CodeDesc desc =
generator.Generate(off_heap_entry, generate_jump_to_instruction_stream
? TrampolineType::kJump
: TrampolineType::kAbort);
return Factory::CodeBuilder(isolate, desc, CodeKind::BUILTIN)
.set_kind_specific_flags(kind_specific_flags)
.set_self_reference(generator.CodeObject())
.set_is_executable(generate_jump_to_instruction_stream)
.Build();
}
// static
Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(
Isolate* isolate) {
OffHeapTrampolineGenerator generator(isolate);
// Generate a jump to a dummy address as we're not actually interested in the
// generated instruction stream.
CodeDesc desc = generator.Generate(kNullAddress, TrampolineType::kJump);
Handle<ByteArray> reloc_info = isolate->factory()->NewByteArray(
desc.reloc_size, AllocationType::kReadOnly);
InstructionStream::CopyRelocInfoToByteArray(*reloc_info, desc);
return reloc_info;
}
// static
Handle<Code> Builtins::CreateInterpreterEntryTrampolineForProfiling(
Isolate* isolate) {
@ -482,7 +402,6 @@ Handle<Code> Builtins::CreateInterpreterEntryTrampolineForProfiling(
.set_kind_specific_flags(kind_specific_flags)
// Mimic the InterpreterEntryTrampoline.
.set_builtin(Builtin::kInterpreterEntryTrampoline)
.set_is_executable(true)
.Build();
}
@ -527,65 +446,6 @@ bool Builtins::AllowDynamicFunction(Isolate* isolate, Handle<JSFunction> target,
return isolate->MayAccess(responsible_context, target_global_proxy);
}
// static
bool Builtins::CodeObjectIsExecutable(Builtin builtin) {
// If the runtime/optimized code always knows when executing a given builtin
// that it is a builtin, then that builtin does not need an executable
// InstructionStream object. Such InstructionStream objects can go in
// read_only_space (and can even be smaller with no branch instruction), thus
// saving memory.
// Builtins with JS linkage will always have executable InstructionStream
// objects since they can be called directly from jitted code with no way of
// determining that they are builtins at generation time. E.g.
// f = Array.of;
// f(1, 2, 3);
// TODO(delphick): This is probably too loose but for now Wasm can call any JS
// linkage builtin via its InstructionStream object. Once Wasm is fixed this
// can either be tighted or removed completely.
if (Builtins::KindOf(builtin) != BCH && HasJSLinkage(builtin)) {
return true;
}
// There are some other non-TF builtins that also have JS linkage like
// InterpreterEntryTrampoline which are explicitly allow-listed below.
// TODO(delphick): Some of these builtins do not fit with the above, but
// currently cause problems if they're not executable. This list should be
// pared down as much as possible.
switch (builtin) {
case Builtin::kInterpreterEntryTrampoline:
case Builtin::kCompileLazy:
case Builtin::kCompileLazyDeoptimizedCode:
case Builtin::kCallFunction_ReceiverIsNullOrUndefined:
case Builtin::kCallFunction_ReceiverIsNotNullOrUndefined:
case Builtin::kCallFunction_ReceiverIsAny:
case Builtin::kCallBoundFunction:
case Builtin::kCall_ReceiverIsNullOrUndefined:
case Builtin::kCall_ReceiverIsNotNullOrUndefined:
case Builtin::kCall_ReceiverIsAny:
case Builtin::kHandleApiCall:
case Builtin::kInstantiateAsmJs:
#if V8_ENABLE_WEBASSEMBLY
case Builtin::kGenericJSToWasmWrapper:
case Builtin::kWasmReturnPromiseOnSuspend:
#endif // V8_ENABLE_WEBASSEMBLY
// TODO(delphick): Remove this when calls to it have the trampoline inlined
// or are converted to use kCallBuiltinPointer.
case Builtin::kCEntry_Return1_ArgvOnStack_NoBuiltinExit:
return true;
default:
#if V8_TARGET_ARCH_MIPS64
// TODO(Loongson): Move non-JS linkage builtins code objects into RO_SPACE
// caused MIPS platform to crash, and we need some time to handle it. Now
// disable this change temporarily on MIPS platform.
return true;
#else
return false;
#endif // V8_TARGET_ARCH_MIPS64
}
}
Builtin ExampleBuiltinForTorqueFunctionPointerType(
size_t function_pointer_type_id) {
switch (function_pointer_type_id) {

View File

@ -220,28 +220,10 @@ class Builtins {
static bool AllowDynamicFunction(Isolate* isolate, Handle<JSFunction> target,
Handle<JSObject> target_global_proxy);
// Creates a trampoline code object that jumps to the given off-heap entry.
// The result should not be used directly, but only from the related Factory
// function.
// TODO(delphick): Come up with a better name since it may not generate an
// executable trampoline.
static Handle<Code> GenerateOffHeapTrampolineFor(
Isolate* isolate, Address off_heap_entry, int32_t kind_specific_flags,
bool generate_jump_to_instruction_stream);
// Generate the RelocInfo ByteArray that would be generated for an offheap
// trampoline.
static Handle<ByteArray> GenerateOffHeapTrampolineRelocInfo(Isolate* isolate);
// Creates a copy of InterpreterEntryTrampolineForProfiling in the code space.
static Handle<Code> CreateInterpreterEntryTrampolineForProfiling(
Isolate* isolate);
// Only builtins with JS linkage should ever need to be called via their
// trampoline InstructionStream object. The remaining builtins have
// non-executable InstructionStream objects.
static bool CodeObjectIsExecutable(Builtin builtin);
static bool IsJSEntryVariant(Builtin builtin) {
switch (builtin) {
case Builtin::kJSEntry:

View File

@ -2213,7 +2213,6 @@ void TurboAssembler::Call(Handle<Code> code, RelocInfo::Mode rmode) {
return;
}
DCHECK(FromCode(*code).IsExecutable());
DCHECK(RelocInfo::IsCodeTarget(rmode));
if (CanUseNearCallOrJump(rmode)) {

View File

@ -85,15 +85,6 @@ AssemblerOptions AssemblerOptions::Default(Isolate* isolate) {
return options;
}
AssemblerOptions AssemblerOptions::DefaultForOffHeapTrampoline(
Isolate* isolate) {
AssemblerOptions options = AssemblerOptions::Default(isolate);
// Off-heap trampolines may not contain any metadata since their metadata
// offsets refer to the off-heap metadata area.
options.emit_code_comments = false;
return options;
}
namespace {
class DefaultAssemblerBuffer : public AssemblerBuffer {

View File

@ -193,7 +193,6 @@ struct V8_EXPORT_PRIVATE AssemblerOptions {
bool emit_code_comments = v8_flags.code_comments;
static AssemblerOptions Default(Isolate* isolate);
static AssemblerOptions DefaultForOffHeapTrampoline(Isolate* isolate);
};
class AssemblerBuffer {

View File

@ -1026,7 +1026,6 @@ void Assembler::call(Label* L) {
void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) {
DCHECK(RelocInfo::IsCodeTarget(rmode));
DCHECK(FromCode(*target).IsExecutable());
EnsureSpace ensure_space(this);
// 1110 1000 #32-bit disp.
emit(0xE8);

View File

@ -1151,11 +1151,6 @@ void InstructionStream::InstructionStreamVerify(Isolate* isolate) {
CHECK_IMPLIES(!ReadOnlyHeap::Contains(*this),
IsAligned(raw_instruction_start(), kCodeAlignment));
CHECK_EQ(*this, code(kAcquireLoad).instruction_stream());
// TODO(delphick): Refactor Factory::CodeBuilder::BuildInternal, so that the
// following CHECK works builtin trampolines. It currently fails because
// InstructionStreamVerify is called halfway through constructing the
// trampoline and so not everything is set up.
// CHECK_EQ(ReadOnlyHeap::Contains(*this), !IsExecutable());
relocation_info().ObjectVerify(isolate);
CHECK(V8_ENABLE_THIRD_PARTY_HEAP_BOOL ||
CodeSize() <= MemoryChunkLayout::MaxRegularCodeObjectSize() ||

View File

@ -114,11 +114,7 @@ MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
if (CompiledWithConcurrentBaseline()) {
code = local_isolate_->factory()->NewCode(0, AllocationType::kOld);
} else {
AllocationType allocation_type =
V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
? AllocationType::kOld
: AllocationType::kReadOnly;
code = factory->NewCode(0, allocation_type);
code = factory->NewCode(0, AllocationType::kOld);
}
static constexpr bool kIsNotOffHeapTrampoline = false;
@ -264,9 +260,7 @@ MaybeHandle<InstructionStream> Factory::CodeBuilder::AllocateInstructionStream(
Heap* heap = isolate_->heap();
HeapAllocator* allocator = heap->allocator();
HeapObject result;
AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
? AllocationType::kCode
: AllocationType::kReadOnly;
const AllocationType allocation_type = AllocationType::kCode;
const int object_size = InstructionStream::SizeFor(code_desc_.body_size());
if (retry_allocation_or_fail) {
result = allocator->AllocateRawWith<HeapAllocator::kRetryOrFail>(
@ -285,12 +279,10 @@ MaybeHandle<InstructionStream> Factory::CodeBuilder::AllocateInstructionStream(
*isolate_->factory()->instruction_stream_map(), SKIP_WRITE_BARRIER);
Handle<InstructionStream> code =
handle(InstructionStream::cast(result), isolate_);
if (is_executable_) {
DCHECK(IsAligned(code->address(), kCodeAlignment));
DCHECK_IMPLIES(
!V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
heap->code_region().contains(code->address()));
}
DCHECK(IsAligned(code->address(), kCodeAlignment));
DCHECK_IMPLIES(
!V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
heap->code_region().contains(code->address()));
return code;
}
@ -298,12 +290,9 @@ MaybeHandle<InstructionStream>
Factory::CodeBuilder::AllocateConcurrentSparkplugInstructionStream(
bool retry_allocation_or_fail) {
LocalHeap* heap = local_isolate_->heap();
AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
? AllocationType::kCode
: AllocationType::kReadOnly;
const int object_size = InstructionStream::SizeFor(code_desc_.body_size());
HeapObject result;
if (!heap->AllocateRaw(object_size, allocation_type).To(&result)) {
if (!heap->AllocateRaw(object_size, AllocationType::kCode).To(&result)) {
return MaybeHandle<InstructionStream>();
}
CHECK(!result.is_null());
@ -315,7 +304,7 @@ Factory::CodeBuilder::AllocateConcurrentSparkplugInstructionStream(
*local_isolate_->factory()->instruction_stream_map(), SKIP_WRITE_BARRIER);
Handle<InstructionStream> code =
handle(InstructionStream::cast(result), local_isolate_);
DCHECK_IMPLIES(is_executable_, IsAligned(code->address(), kCodeAlignment));
DCHECK(IsAligned(code->address(), kCodeAlignment));
return code;
}

View File

@ -1007,12 +1007,6 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
return *this;
}
CodeBuilder& set_is_executable(bool executable) {
DCHECK_EQ(kind_, CodeKind::BUILTIN);
is_executable_ = executable;
return *this;
}
CodeBuilder& set_kind_specific_flags(int32_t flags) {
kind_specific_flags_ = flags;
return *this;
@ -1054,7 +1048,6 @@ class V8_EXPORT_PRIVATE Factory : public FactoryBase<Factory> {
DeoptimizationData::Empty(isolate_);
Handle<HeapObject> interpreter_data_;
BasicBlockProfilerData* profiler_data_ = nullptr;
bool is_executable_ = true;
bool is_turbofanned_ = false;
int stack_slots_ = 0;
};

View File

@ -982,11 +982,6 @@ void Heap::CreateInitialReadOnlyObjects() {
Handle<ScopeInfo> shadow_realm_scope_info =
ScopeInfo::CreateForShadowRealmNativeContext(isolate());
set_shadow_realm_scope_info(*shadow_realm_scope_info);
// Canonical off-heap trampoline data.
auto reloc_info = Builtins::GenerateOffHeapTrampolineRelocInfo(isolate_);
set_off_heap_trampoline_relocation_info(*reloc_info);
StaticRootsEnsureAllocatedSize(*reloc_info, 4 * kTaggedSize);
}
void Heap::CreateInitialMutableObjects() {

View File

@ -1274,11 +1274,6 @@ bool InstructionStream::IsWeakObjectInDeoptimizationLiteralArray(
HeapObject::cast(object));
}
bool InstructionStream::IsExecutable() {
return !Builtins::IsBuiltinId(builtin_id()) || !is_off_heap_trampoline() ||
Builtins::CodeObjectIsExecutable(builtin_id());
}
// This field has to have relaxed atomic accessors because it is accessed in the
// concurrent marker.
static_assert(FIELD_SIZE(Code::kKindSpecificFlagsOffset) == kInt32Size);

View File

@ -704,10 +704,6 @@ class InstructionStream : public HeapObject {
static inline bool IsWeakObjectInDeoptimizationLiteralArray(Object object);
// Returns false if this is an embedded builtin InstructionStream object
// that's in read_only_space and hence doesn't have execute permissions.
inline bool IsExecutable();
// Returns true if the function is inlined in the code.
bool Inlines(SharedFunctionInfo sfi);

View File

@ -218,9 +218,6 @@ class Symbol;
V(HeapObject, self_reference_marker, SelfReferenceMarker) \
/* Marker for basic-block usage counters array during code-generation */ \
V(Oddball, basic_block_counters_marker, BasicBlockCountersMarker) \
/* Canonical off-heap trampoline data */ \
V(ByteArray, off_heap_trampoline_relocation_info, \
OffHeapTrampolineRelocationInfo) \
/* Canonical scope infos */ \
V(ScopeInfo, global_this_binding_scope_info, GlobalThisBindingScopeInfo) \
V(ScopeInfo, empty_function_scope_info, EmptyFunctionScopeInfo) \

View File

@ -175,7 +175,6 @@ struct kStaticReadOnlyRoot {
static constexpr Tagged_t single_character_string_table = 0x4035;
static constexpr Tagged_t self_reference_marker = 0x7c55;
static constexpr Tagged_t basic_block_counters_marker = 0x7c95;
static constexpr Tagged_t off_heap_trampoline_relocation_info = 0x831d;
static constexpr Tagged_t global_this_binding_scope_info = 0x82a9;
static constexpr Tagged_t empty_function_scope_info = 0x82c9;
static constexpr Tagged_t native_scope_info = 0x82ed;
@ -767,7 +766,7 @@ struct kStaticReadOnlyRoot {
static constexpr Tagged_t store_handler3_map = 0x2a71;
};
static constexpr std::array<Tagged_t, 737> StaticReadOnlyRootsPointerTable = {
static constexpr std::array<Tagged_t, 736> StaticReadOnlyRootsPointerTable = {
kStaticReadOnlyRoot::free_space_map,
kStaticReadOnlyRoot::one_pointer_filler_map,
kStaticReadOnlyRoot::two_pointer_filler_map,
@ -918,7 +917,6 @@ static constexpr std::array<Tagged_t, 737> StaticReadOnlyRootsPointerTable = {
kStaticReadOnlyRoot::single_character_string_table,
kStaticReadOnlyRoot::self_reference_marker,
kStaticReadOnlyRoot::basic_block_counters_marker,
kStaticReadOnlyRoot::off_heap_trampoline_relocation_info,
kStaticReadOnlyRoot::global_this_binding_scope_info,
kStaticReadOnlyRoot::empty_function_scope_info,
kStaticReadOnlyRoot::native_scope_info,

View File

@ -538,7 +538,6 @@ KNOWN_OBJECTS = {
("read_only_space", 0x082c9): "EmptyFunctionScopeInfo",
("read_only_space", 0x082ed): "NativeScopeInfo",
("read_only_space", 0x08305): "ShadowRealmScopeInfo",
("read_only_space", 0x0831d): "OffHeapTrampolineRelocationInfo",
("old_space", 0x0426d): "ArgumentsIteratorAccessor",
("old_space", 0x04285): "ArrayLengthAccessor",
("old_space", 0x0429d): "BoundFunctionLengthAccessor",