[wasm] Allocate one far jump table per code space
This moves the code to allocate the far jump table from {SetRuntimeStubs} to {AddCodeSpace} to allocate one such table per code space. Also, the {runtime_stub_table_} and {runtime_stub_entries_} fields do not make sense any more now and are replaced by calls to {GetNearRuntimeStubEntry} and {GetRuntimeStubId}. R=mstarzinger@chromium.org Bug: v8:9477 Change-Id: Ie1f5c9d4eb282270337a684c34f097d8077fdfbb Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1795348 Commit-Queue: Clemens Hammacher <clemensh@chromium.org> Reviewed-by: Michael Starzinger <mstarzinger@chromium.org> Cr-Commit-Position: refs/heads/master@{#63700}
This commit is contained in:
parent
5b5a360857
commit
3d2159462c
@ -290,6 +290,7 @@ class LockGuard final {
|
||||
};
|
||||
|
||||
using MutexGuard = LockGuard<Mutex>;
|
||||
using RecursiveMutexGuard = LockGuard<RecursiveMutex>;
|
||||
|
||||
enum MutexSharedType : bool { kShared = true, kExclusive = false };
|
||||
|
||||
|
@ -85,6 +85,12 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
|
||||
return slot_index * kFarJumpTableSlotSize;
|
||||
}
|
||||
|
||||
// Translate a far jump table offset to the index into the table.
|
||||
static uint32_t FarJumpSlotOffsetToIndex(uint32_t offset) {
|
||||
DCHECK_EQ(0, offset % kFarJumpTableSlotSize);
|
||||
return offset / kFarJumpTableSlotSize;
|
||||
}
|
||||
|
||||
// Determine the size of a far jump table containing the given number of
|
||||
// slots.
|
||||
static constexpr uint32_t SizeForNumberOfFarJumpSlots(
|
||||
|
@ -1370,7 +1370,6 @@ std::shared_ptr<NativeModule> CompileToNativeModule(
|
||||
auto native_module = isolate->wasm_engine()->NewNativeModule(
|
||||
isolate, enabled, std::move(module));
|
||||
native_module->SetWireBytes(std::move(wire_bytes_copy));
|
||||
native_module->SetRuntimeStubs(isolate);
|
||||
|
||||
CompileNativeModule(isolate, thrower, wasm_module, native_module.get());
|
||||
if (thrower->error()) return {};
|
||||
@ -1509,7 +1508,6 @@ void AsyncCompileJob::CreateNativeModule(
|
||||
native_module_ = isolate_->wasm_engine()->NewNativeModule(
|
||||
isolate_, enabled_features_, std::move(module));
|
||||
native_module_->SetWireBytes({std::move(bytes_copy_), wire_bytes_.length()});
|
||||
native_module_->SetRuntimeStubs(isolate_);
|
||||
|
||||
if (stream_) stream_->NotifyNativeModuleCreated(native_module_);
|
||||
}
|
||||
|
@ -244,7 +244,6 @@ void WasmCode::Validate() const {
|
||||
CHECK_NOT_NULL(code);
|
||||
#ifdef V8_EMBEDDED_BUILTINS
|
||||
CHECK_EQ(WasmCode::kJumpTable, code->kind());
|
||||
CHECK_EQ(native_module()->runtime_stub_table_, code);
|
||||
CHECK(code->contains(target));
|
||||
#else
|
||||
CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
|
||||
@ -539,7 +538,7 @@ Vector<byte> WasmCodeAllocator::AllocateForCode(NativeModule* native_module,
|
||||
|
||||
Vector<byte> WasmCodeAllocator::AllocateForCodeInRegion(
|
||||
NativeModule* native_module, size_t size, base::AddressRegion region) {
|
||||
base::MutexGuard lock(&mutex_);
|
||||
base::RecursiveMutexGuard lock(&mutex_);
|
||||
DCHECK_EQ(code_manager_, native_module->engine()->code_manager());
|
||||
DCHECK_LT(0, size);
|
||||
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
|
||||
@ -614,7 +613,7 @@ Vector<byte> WasmCodeAllocator::AllocateForCodeInRegion(
|
||||
}
|
||||
|
||||
bool WasmCodeAllocator::SetExecutable(bool executable) {
|
||||
base::MutexGuard lock(&mutex_);
|
||||
base::RecursiveMutexGuard lock(&mutex_);
|
||||
if (is_executable_ == executable) return true;
|
||||
TRACE_HEAP("Setting module %p as executable: %d.\n", this, executable);
|
||||
|
||||
@ -677,7 +676,7 @@ void WasmCodeAllocator::FreeCode(Vector<WasmCode* const> codes) {
|
||||
freed_code_size_.fetch_add(code_size);
|
||||
|
||||
// Merge {freed_regions} into {freed_code_space_} and discard full pages.
|
||||
base::MutexGuard guard(&mutex_);
|
||||
base::RecursiveMutexGuard guard(&mutex_);
|
||||
PageAllocator* allocator = GetPlatformPageAllocator();
|
||||
size_t commit_page_size = allocator->CommitPageSize();
|
||||
for (auto region : freed_regions.regions()) {
|
||||
@ -701,7 +700,7 @@ void WasmCodeAllocator::FreeCode(Vector<WasmCode* const> codes) {
|
||||
}
|
||||
|
||||
base::AddressRegion WasmCodeAllocator::GetSingleCodeRegion() const {
|
||||
base::MutexGuard lock(&mutex_);
|
||||
base::RecursiveMutexGuard lock(&mutex_);
|
||||
DCHECK_EQ(1, owned_code_space_.size());
|
||||
return owned_code_space_[0].region();
|
||||
}
|
||||
@ -822,8 +821,8 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
|
||||
code->InstructionStart();
|
||||
int mode_mask = RelocInfo::kApplyMask |
|
||||
RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
|
||||
Address constant_pool_start =
|
||||
reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
|
||||
Address dst_code_addr = reinterpret_cast<Address>(dst_code_bytes.begin());
|
||||
Address constant_pool_start = dst_code_addr + constant_pool_offset;
|
||||
RelocIterator orig_it(*code, mode_mask);
|
||||
for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
|
||||
constant_pool_start, mode_mask);
|
||||
@ -832,8 +831,8 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
|
||||
if (RelocInfo::IsWasmStubCall(mode)) {
|
||||
uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
|
||||
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
|
||||
Address entry = runtime_stub_entry(
|
||||
static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
|
||||
Address entry = GetNearRuntimeStubEntry(
|
||||
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), dst_code_addr);
|
||||
it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
|
||||
} else {
|
||||
it.rinfo()->apply(delta);
|
||||
@ -885,13 +884,13 @@ void NativeModule::UseLazyStub(uint32_t func_index) {
|
||||
JumpTableAssembler::GenerateLazyCompileTable(
|
||||
lazy_compile_table_->instruction_start(), num_slots,
|
||||
module_->num_imported_functions,
|
||||
runtime_stub_entry(WasmCode::kWasmCompileLazy));
|
||||
GetNearRuntimeStubEntry(WasmCode::kWasmCompileLazy,
|
||||
lazy_compile_table_->instruction_start()));
|
||||
}
|
||||
|
||||
// Add jump table entry for jump to the lazy compile stub.
|
||||
uint32_t slot_index = func_index - module_->num_imported_functions;
|
||||
DCHECK_NULL(code_table_[slot_index]);
|
||||
DCHECK_NE(runtime_stub_entry(WasmCode::kWasmCompileLazy), kNullAddress);
|
||||
Address lazy_compile_target =
|
||||
lazy_compile_table_->instruction_start() +
|
||||
JumpTableAssembler::LazyCompileSlotIndexToOffset(slot_index);
|
||||
@ -899,56 +898,6 @@ void NativeModule::UseLazyStub(uint32_t func_index) {
|
||||
PatchJumpTablesLocked(func_index, lazy_compile_target);
|
||||
}
|
||||
|
||||
// TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
|
||||
// was removed and embedded builtins are no longer optional.
|
||||
void NativeModule::SetRuntimeStubs(Isolate* isolate) {
|
||||
#ifndef V8_EMBEDDED_BUILTINS
|
||||
FATAL(
|
||||
"WebAssembly is not supported in no-embed builds. no-embed builds are "
|
||||
"deprecated. See\n"
|
||||
" - https://groups.google.com/d/msg/v8-users/9F53xqBjpkI/9WmKSbcWBAAJ\n"
|
||||
" - https://crbug.com/v8/8519\n"
|
||||
" - https://crbug.com/v8/8531\n");
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
|
||||
WasmCodeRefScope code_ref_scope;
|
||||
base::AddressRegion single_code_space_region;
|
||||
{
|
||||
base::MutexGuard guard(&allocation_mutex_);
|
||||
DCHECK_EQ(1, code_space_data_.size());
|
||||
single_code_space_region = code_space_data_[0].region;
|
||||
}
|
||||
int num_function_slots =
|
||||
NumWasmFunctionsInFarJumpTable(module_->num_declared_functions);
|
||||
WasmCode* jump_table = CreateEmptyJumpTableInRegion(
|
||||
JumpTableAssembler::SizeForNumberOfFarJumpSlots(
|
||||
WasmCode::kRuntimeStubCount, num_function_slots),
|
||||
single_code_space_region);
|
||||
Address base = jump_table->instruction_start();
|
||||
EmbeddedData embedded_data = EmbeddedData::FromBlob();
|
||||
#define RUNTIME_STUB(Name) Builtins::k##Name,
|
||||
#define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
|
||||
Builtins::Name wasm_runtime_stubs[WasmCode::kRuntimeStubCount] = {
|
||||
WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
|
||||
#undef RUNTIME_STUB
|
||||
#undef RUNTIME_STUB_TRAP
|
||||
Address builtin_address[WasmCode::kRuntimeStubCount];
|
||||
for (int i = 0; i < WasmCode::kRuntimeStubCount; ++i) {
|
||||
Builtins::Name builtin = wasm_runtime_stubs[i];
|
||||
CHECK(embedded_data.ContainsBuiltin(builtin));
|
||||
builtin_address[i] = embedded_data.InstructionStartOfBuiltin(builtin);
|
||||
runtime_stub_entries_[i] =
|
||||
base + JumpTableAssembler::FarJumpSlotIndexToOffset(i);
|
||||
}
|
||||
JumpTableAssembler::GenerateFarJumpTable(
|
||||
base, builtin_address, WasmCode::kRuntimeStubCount, num_function_slots);
|
||||
DCHECK_NULL(runtime_stub_table_);
|
||||
// TODO(clemensh): Store this as "far jump table" (instead of "runtime stub
|
||||
// table") per code space.
|
||||
runtime_stub_table_ = jump_table;
|
||||
DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
|
||||
}
|
||||
|
||||
std::unique_ptr<WasmCode> NativeModule::AddCode(
|
||||
uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
|
||||
uint32_t tagged_parameter_slots,
|
||||
@ -995,8 +944,8 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
|
||||
int mode_mask = RelocInfo::kApplyMask |
|
||||
RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
|
||||
RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
|
||||
Address constant_pool_start =
|
||||
reinterpret_cast<Address>(dst_code_bytes.begin()) + constant_pool_offset;
|
||||
Address code_start = reinterpret_cast<Address>(dst_code_bytes.begin());
|
||||
Address constant_pool_start = code_start + constant_pool_offset;
|
||||
for (RelocIterator it(dst_code_bytes, reloc_info.as_vector(),
|
||||
constant_pool_start, mode_mask);
|
||||
!it.done(); it.next()) {
|
||||
@ -1008,8 +957,8 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
|
||||
} else if (RelocInfo::IsWasmStubCall(mode)) {
|
||||
uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
|
||||
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
|
||||
Address entry = runtime_stub_entry(
|
||||
static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
|
||||
Address entry = GetNearRuntimeStubEntry(
|
||||
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), code_start);
|
||||
it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
|
||||
} else {
|
||||
it.rinfo()->apply(delta);
|
||||
@ -1204,6 +1153,17 @@ void NativeModule::PatchJumpTablesLocked(uint32_t func_index, Address target) {
|
||||
}
|
||||
|
||||
void NativeModule::AddCodeSpace(base::AddressRegion region) {
|
||||
#ifndef V8_EMBEDDED_BUILTINS
|
||||
// The far jump table contains far jumps to the embedded builtins. This
|
||||
// requires a build with embedded builtins enabled.
|
||||
FATAL(
|
||||
"WebAssembly is not supported in no-embed builds. no-embed builds are "
|
||||
"deprecated. See\n"
|
||||
" - https://groups.google.com/d/msg/v8-users/9F53xqBjpkI/9WmKSbcWBAAJ\n"
|
||||
" - https://crbug.com/v8/8519\n"
|
||||
" - https://crbug.com/v8/8531\n");
|
||||
#endif // V8_EMBEDDED_BUILTINS
|
||||
|
||||
// Each code space must be at least twice as large as the overhead per code
|
||||
// space. Otherwise, we are wasting too much memory.
|
||||
DCHECK_GE(region.size(),
|
||||
@ -1230,15 +1190,17 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) {
|
||||
|
||||
WasmCodeRefScope code_ref_scope;
|
||||
WasmCode* jump_table = nullptr;
|
||||
WasmCode* far_jump_table = nullptr;
|
||||
const uint32_t num_wasm_functions = module_->num_declared_functions;
|
||||
const bool has_functions = num_wasm_functions > 0;
|
||||
bool is_first_code_space;
|
||||
{
|
||||
base::MutexGuard guard(&allocation_mutex_);
|
||||
is_first_code_space = code_space_data_.empty();
|
||||
}
|
||||
const bool is_first_code_space = code_space_data_.empty();
|
||||
// TODO(clemensh): Avoid additional jump table if the code space is close
|
||||
// enough to another existing code space.
|
||||
const bool needs_jump_table =
|
||||
has_functions && is_first_code_space && !implicit_alloc_disabled;
|
||||
has_functions &&
|
||||
(kNeedsFarJumpsBetweenCodeSpaces || is_first_code_space) &&
|
||||
!implicit_alloc_disabled;
|
||||
const bool needs_far_jump_table = !implicit_alloc_disabled;
|
||||
|
||||
if (needs_jump_table) {
|
||||
jump_table = CreateEmptyJumpTableInRegion(
|
||||
@ -1246,10 +1208,35 @@ void NativeModule::AddCodeSpace(base::AddressRegion region) {
|
||||
CHECK(region.contains(jump_table->instruction_start()));
|
||||
}
|
||||
|
||||
if (needs_far_jump_table) {
|
||||
int num_function_slots = NumWasmFunctionsInFarJumpTable(num_wasm_functions);
|
||||
far_jump_table = CreateEmptyJumpTableInRegion(
|
||||
JumpTableAssembler::SizeForNumberOfFarJumpSlots(
|
||||
WasmCode::kRuntimeStubCount, num_function_slots),
|
||||
region);
|
||||
CHECK(region.contains(far_jump_table->instruction_start()));
|
||||
EmbeddedData embedded_data = EmbeddedData::FromBlob();
|
||||
#define RUNTIME_STUB(Name) Builtins::k##Name,
|
||||
#define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
|
||||
Builtins::Name stub_names[WasmCode::kRuntimeStubCount] = {
|
||||
WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
|
||||
#undef RUNTIME_STUB
|
||||
#undef RUNTIME_STUB_TRAP
|
||||
Address builtin_addresses[WasmCode::kRuntimeStubCount];
|
||||
for (int i = 0; i < WasmCode::kRuntimeStubCount; ++i) {
|
||||
Builtins::Name builtin = stub_names[i];
|
||||
CHECK(embedded_data.ContainsBuiltin(builtin));
|
||||
builtin_addresses[i] = embedded_data.InstructionStartOfBuiltin(builtin);
|
||||
}
|
||||
JumpTableAssembler::GenerateFarJumpTable(
|
||||
far_jump_table->instruction_start(), builtin_addresses,
|
||||
WasmCode::kRuntimeStubCount, num_function_slots);
|
||||
}
|
||||
|
||||
if (is_first_code_space) main_jump_table_ = jump_table;
|
||||
|
||||
base::MutexGuard guard(&allocation_mutex_);
|
||||
code_space_data_.push_back(CodeSpaceData{region, jump_table});
|
||||
code_space_data_.push_back(CodeSpaceData{region, jump_table, far_jump_table});
|
||||
}
|
||||
|
||||
namespace {
|
||||
@ -1305,6 +1292,19 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
|
||||
return main_jump_table_->instruction_start() + slot_offset;
|
||||
}
|
||||
|
||||
Address NativeModule::GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index,
|
||||
Address near_to) const {
|
||||
base::MutexGuard guard(&allocation_mutex_);
|
||||
for (auto& code_space_data : code_space_data_) {
|
||||
if (code_space_data.region.contains(near_to)) {
|
||||
auto offset = JumpTableAssembler::FarJumpSlotIndexToOffset(index);
|
||||
DCHECK_GT(code_space_data.far_jump_table->instructions().size(), offset);
|
||||
return code_space_data.far_jump_table->instruction_start() + offset;
|
||||
}
|
||||
}
|
||||
FATAL("near_to is not part of a code space");
|
||||
}
|
||||
|
||||
uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
|
||||
Address slot_address) const {
|
||||
DCHECK(is_jump_table_slot(slot_address));
|
||||
@ -1315,16 +1315,40 @@ uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
|
||||
return module_->num_imported_functions + slot_idx;
|
||||
}
|
||||
|
||||
const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
|
||||
#define RETURN_NAME(Name) \
|
||||
if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
|
||||
return #Name; \
|
||||
WasmCode::RuntimeStubId NativeModule::GetRuntimeStubId(Address target) const {
|
||||
base::MutexGuard guard(&allocation_mutex_);
|
||||
|
||||
for (auto& code_space_data : code_space_data_) {
|
||||
if (code_space_data.far_jump_table->contains(target)) {
|
||||
uint32_t offset = static_cast<uint32_t>(
|
||||
target - code_space_data.far_jump_table->instruction_start());
|
||||
uint32_t index = JumpTableAssembler::FarJumpSlotOffsetToIndex(offset);
|
||||
if (index >= WasmCode::kRuntimeStubCount) continue;
|
||||
if (JumpTableAssembler::FarJumpSlotIndexToOffset(index) != offset) {
|
||||
continue;
|
||||
}
|
||||
return static_cast<WasmCode::RuntimeStubId>(index);
|
||||
}
|
||||
}
|
||||
#define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
|
||||
WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
|
||||
#undef RETURN_NAME_TRAP
|
||||
#undef RETURN_NAME
|
||||
return "<unknown>";
|
||||
|
||||
// Invalid address.
|
||||
return WasmCode::kRuntimeStubCount;
|
||||
}
|
||||
|
||||
const char* NativeModule::GetRuntimeStubName(Address target) const {
|
||||
WasmCode::RuntimeStubId stub_id = GetRuntimeStubId(target);
|
||||
|
||||
#define RUNTIME_STUB_NAME(Name) #Name,
|
||||
#define RUNTIME_STUB_NAME_TRAP(Name) "ThrowWasm" #Name,
|
||||
constexpr const char* runtime_stub_names[] = {WASM_RUNTIME_STUB_LIST(
|
||||
RUNTIME_STUB_NAME, RUNTIME_STUB_NAME_TRAP) "<unknown>"};
|
||||
#undef RUNTIME_STUB_NAME
|
||||
#undef RUNTIME_STUB_NAME_TRAP
|
||||
STATIC_ASSERT(arraysize(runtime_stub_names) ==
|
||||
WasmCode::kRuntimeStubCount + 1);
|
||||
|
||||
DCHECK_GT(arraysize(runtime_stub_names), stub_id);
|
||||
return runtime_stub_names[stub_id];
|
||||
}
|
||||
|
||||
NativeModule::~NativeModule() {
|
||||
|
@ -318,7 +318,10 @@ class WasmCodeAllocator {
|
||||
// The engine-wide wasm code manager.
|
||||
WasmCodeManager* const code_manager_;
|
||||
|
||||
mutable base::Mutex mutex_;
|
||||
// TODO(clemensh): Try to make this non-recursive again. It's recursive
|
||||
// currently because {AllocateForCodeInRegion} might create a new code space,
|
||||
// which recursively calls {AllocateForCodeInRegion} for the jump table.
|
||||
mutable base::RecursiveMutex mutex_;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Protected by {mutex_}:
|
||||
@ -392,11 +395,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
// table with trampolines accordingly.
|
||||
void UseLazyStub(uint32_t func_index);
|
||||
|
||||
// Initializes all runtime stubs by setting up entry addresses in the runtime
|
||||
// stub table. It must be called exactly once per native module before adding
|
||||
// other WasmCode so that runtime stub ids can be resolved during relocation.
|
||||
void SetRuntimeStubs(Isolate* isolate);
|
||||
|
||||
// Creates a snapshot of the current state of the code table. This is useful
|
||||
// to get a consistent view of the table (e.g. used by the serializer).
|
||||
std::vector<WasmCode*> SnapshotCodeTable() const;
|
||||
@ -407,13 +405,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
void SetWasmSourceMap(std::unique_ptr<WasmModuleSourceMap> source_map);
|
||||
WasmModuleSourceMap* GetWasmSourceMap() const;
|
||||
|
||||
Address runtime_stub_entry(WasmCode::RuntimeStubId index) const {
|
||||
DCHECK_LT(index, WasmCode::kRuntimeStubCount);
|
||||
Address entry_address = runtime_stub_entries_[index];
|
||||
DCHECK_NE(kNullAddress, entry_address);
|
||||
return entry_address;
|
||||
}
|
||||
|
||||
Address jump_table_start() const {
|
||||
return main_jump_table_ ? main_jump_table_->instruction_start()
|
||||
: kNullAddress;
|
||||
@ -429,6 +420,12 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
// the first jump table).
|
||||
Address GetCallTargetForFunction(uint32_t func_index) const;
|
||||
|
||||
// Get a runtime stub entry (which is a far jump table slot) within near-call
|
||||
// distance to {near_to}. Fails if {near_to} is not part of any code space of
|
||||
// this module.
|
||||
Address GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index,
|
||||
Address near_to) const;
|
||||
|
||||
// Reverse lookup from a given call target (i.e. a jump table slot as the
|
||||
// above {GetCallTargetForFunction} returns) to a function index.
|
||||
uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const;
|
||||
@ -479,7 +476,11 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
|
||||
const WasmFeatures& enabled_features() const { return enabled_features_; }
|
||||
|
||||
const char* GetRuntimeStubName(Address runtime_stub_entry) const;
|
||||
// Returns the runtime stub id that corresponds to the given address (which
|
||||
// must be a far jump table slot). Returns {kRuntimeStubCount} on failure.
|
||||
WasmCode::RuntimeStubId GetRuntimeStubId(Address runtime_stub_target) const;
|
||||
|
||||
const char* GetRuntimeStubName(Address runtime_stub_target) const;
|
||||
|
||||
// Sample the current code size of this modules to the given counters.
|
||||
enum CodeSamplingTime : int8_t { kAfterBaseline, kAfterTopTier, kSampling };
|
||||
@ -508,6 +509,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
struct CodeSpaceData {
|
||||
base::AddressRegion region;
|
||||
WasmCode* jump_table;
|
||||
WasmCode* far_jump_table;
|
||||
};
|
||||
|
||||
// Private constructor, called via {WasmCodeManager::NewNativeModule()}.
|
||||
@ -577,12 +579,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
// {WireBytesStorage}, held by background compile tasks.
|
||||
std::shared_ptr<OwnedVector<const uint8_t>> wire_bytes_;
|
||||
|
||||
// Contains entry points for runtime stub calls via {WASM_STUB_CALL}.
|
||||
Address runtime_stub_entries_[WasmCode::kRuntimeStubCount] = {kNullAddress};
|
||||
|
||||
// Jump table used for runtime stubs (i.e. trampolines to embedded builtins).
|
||||
WasmCode* runtime_stub_table_ = nullptr;
|
||||
|
||||
// Jump table used by external calls (from JS). Wasm calls use one of the jump
|
||||
// tables stored in {code_space_data_}.
|
||||
WasmCode* main_jump_table_ = nullptr;
|
||||
|
@ -289,9 +289,6 @@ class V8_EXPORT_PRIVATE NativeModuleSerializer {
|
||||
Vector<WasmCode* const> code_table_;
|
||||
bool write_called_;
|
||||
|
||||
// Reverse lookup tables for embedded addresses.
|
||||
std::map<Address, uint32_t> wasm_stub_targets_lookup_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(NativeModuleSerializer);
|
||||
};
|
||||
|
||||
@ -301,11 +298,6 @@ NativeModuleSerializer::NativeModuleSerializer(
|
||||
DCHECK_NOT_NULL(native_module_);
|
||||
// TODO(mtrofin): persist the export wrappers. Ideally, we'd only persist
|
||||
// the unique ones, i.e. the cache.
|
||||
for (uint32_t i = 0; i < WasmCode::kRuntimeStubCount; ++i) {
|
||||
Address addr = native_module_->runtime_stub_entry(
|
||||
static_cast<WasmCode::RuntimeStubId>(i));
|
||||
wasm_stub_targets_lookup_.insert(std::make_pair(addr, i));
|
||||
}
|
||||
}
|
||||
|
||||
size_t NativeModuleSerializer::MeasureCode(const WasmCode* code) const {
|
||||
@ -400,10 +392,9 @@ void NativeModuleSerializer::WriteCode(const WasmCode* code, Writer* writer) {
|
||||
SetWasmCalleeTag(iter.rinfo(), tag);
|
||||
} break;
|
||||
case RelocInfo::WASM_STUB_CALL: {
|
||||
Address orig_target = orig_iter.rinfo()->wasm_stub_call_address();
|
||||
auto stub_iter = wasm_stub_targets_lookup_.find(orig_target);
|
||||
DCHECK(stub_iter != wasm_stub_targets_lookup_.end());
|
||||
uint32_t tag = stub_iter->second;
|
||||
Address target = orig_iter.rinfo()->wasm_stub_call_address();
|
||||
uint32_t tag = native_module_->GetRuntimeStubId(target);
|
||||
DCHECK_GT(WasmCode::kRuntimeStubCount, tag);
|
||||
SetWasmCalleeTag(iter.rinfo(), tag);
|
||||
} break;
|
||||
case RelocInfo::EXTERNAL_REFERENCE: {
|
||||
@ -564,8 +555,9 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
|
||||
case RelocInfo::WASM_STUB_CALL: {
|
||||
uint32_t tag = GetWasmCalleeTag(iter.rinfo());
|
||||
DCHECK_LT(tag, WasmCode::kRuntimeStubCount);
|
||||
Address target = native_module_->runtime_stub_entry(
|
||||
static_cast<WasmCode::RuntimeStubId>(tag));
|
||||
Address target = native_module_->GetNearRuntimeStubEntry(
|
||||
static_cast<WasmCode::RuntimeStubId>(tag),
|
||||
code->instruction_start());
|
||||
iter.rinfo()->set_wasm_stub_call_address(target, SKIP_ICACHE_FLUSH);
|
||||
break;
|
||||
}
|
||||
@ -628,7 +620,6 @@ MaybeHandle<WasmModuleObject> DeserializeNativeModule(
|
||||
auto shared_native_module = isolate->wasm_engine()->NewNativeModule(
|
||||
isolate, enabled_features, std::move(decode_result.value()));
|
||||
shared_native_module->SetWireBytes(OwnedVector<uint8_t>::Of(wire_bytes_vec));
|
||||
shared_native_module->SetRuntimeStubs(isolate);
|
||||
|
||||
Handle<FixedArray> export_wrappers;
|
||||
CompileJsToWasmWrappers(isolate, shared_native_module->module(),
|
||||
|
@ -22,10 +22,8 @@ std::shared_ptr<NativeModule> NewModule(Isolate* isolate) {
|
||||
std::shared_ptr<WasmModule> module(new WasmModule);
|
||||
bool can_request_more = false;
|
||||
size_t size = 16384;
|
||||
auto native_module = isolate->wasm_engine()->NewNativeModule(
|
||||
return isolate->wasm_engine()->NewNativeModule(
|
||||
isolate, kAllWasmFeatures, size, can_request_more, std::move(module));
|
||||
native_module->SetRuntimeStubs(isolate);
|
||||
return native_module;
|
||||
}
|
||||
|
||||
TEST(CacheHit) {
|
||||
|
@ -321,7 +321,6 @@ Handle<WasmInstanceObject> TestingModuleBuilder::InitInstanceObject() {
|
||||
auto native_module = isolate_->wasm_engine()->NewNativeModule(
|
||||
isolate_, enabled_features_, test_module_);
|
||||
native_module->SetWireBytes(OwnedVector<const uint8_t>());
|
||||
native_module->SetRuntimeStubs(isolate_);
|
||||
|
||||
Handle<WasmModuleObject> module_object =
|
||||
WasmModuleObject::New(isolate_, std::move(native_module), script);
|
||||
|
Loading…
Reference in New Issue
Block a user