[wasm] Fix regression caused by multiple code spaces

The {GetNearRuntimeStubEntry} and {GetNearCallTargetForFunction}
functions need to find the code space that contains the current
function. This lookup requires a lock and is non-trivial. The repeated
lookup caused severe regressions.

This CL introduces a {JumpTablesRef} struct which holds information
about the jump tables to use. It can be looked up once and then used
for a whole function or even several functions within the same code
space (in {NativeModule::AddCompiledCode} which adds a whole vector of
compilation results).

This fixes the regressions.

R=ahaas@chromium.org

Bug: chromium:1004262, v8:9477
Change-Id: I50bd8327a131e3bee79d86b6d7e867a506959312
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1840153
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#64129}
This commit is contained in:
Clemens Backes 2019-10-04 14:52:31 +02:00 committed by Commit Bot
parent 8ca191b6cb
commit 44c3b7b518
3 changed files with 69 additions and 44 deletions

View File

@ -833,6 +833,8 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
code->InstructionStart();
int mode_mask = RelocInfo::kApplyMask |
RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
auto jump_tables_ref =
FindJumpTablesForCode(reinterpret_cast<Address>(dst_code_bytes.begin()));
Address dst_code_addr = reinterpret_cast<Address>(dst_code_bytes.begin());
Address constant_pool_start = dst_code_addr + constant_pool_offset;
RelocIterator orig_it(*code, mode_mask);
@ -844,7 +846,7 @@ WasmCode* NativeModule::AddCodeForTesting(Handle<Code> code) {
uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
Address entry = GetNearRuntimeStubEntry(
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), dst_code_addr);
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), jump_tables_ref);
it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
} else {
it.rinfo()->apply(delta);
@ -896,8 +898,9 @@ void NativeModule::UseLazyStub(uint32_t func_index) {
JumpTableAssembler::GenerateLazyCompileTable(
lazy_compile_table_->instruction_start(), num_slots,
module_->num_imported_functions,
GetNearRuntimeStubEntry(WasmCode::kWasmCompileLazy,
lazy_compile_table_->instruction_start()));
GetNearRuntimeStubEntry(
WasmCode::kWasmCompileLazy,
FindJumpTablesForCode(lazy_compile_table_->instruction_start())));
}
// Add jump table entry for jump to the lazy compile stub.
@ -916,10 +919,14 @@ std::unique_ptr<WasmCode> NativeModule::AddCode(
OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
ExecutionTier tier) {
return AddCodeWithCodeSpace(
index, desc, stack_slots, tagged_parameter_slots,
std::move(protected_instructions), std::move(source_position_table), kind,
tier, code_allocator_.AllocateForCode(this, desc.instr_size));
Vector<byte> code_space =
code_allocator_.AllocateForCode(this, desc.instr_size);
auto jump_table_ref =
FindJumpTablesForCode(reinterpret_cast<Address>(code_space.begin()));
return AddCodeWithCodeSpace(index, desc, stack_slots, tagged_parameter_slots,
std::move(protected_instructions),
std::move(source_position_table), kind, tier,
code_space, jump_table_ref);
}
std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
@ -927,7 +934,8 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
uint32_t tagged_parameter_slots,
OwnedVector<ProtectedInstructionData> protected_instructions,
OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
ExecutionTier tier, Vector<uint8_t> dst_code_bytes) {
ExecutionTier tier, Vector<uint8_t> dst_code_bytes,
const JumpTablesRef& jump_tables_ref) {
OwnedVector<byte> reloc_info;
if (desc.reloc_size > 0) {
reloc_info = OwnedVector<byte>::New(desc.reloc_size);
@ -964,13 +972,13 @@ std::unique_ptr<WasmCode> NativeModule::AddCodeWithCodeSpace(
RelocInfo::Mode mode = it.rinfo()->rmode();
if (RelocInfo::IsWasmCall(mode)) {
uint32_t call_tag = it.rinfo()->wasm_call_tag();
Address target = GetNearCallTargetForFunction(call_tag, code_start);
Address target = GetNearCallTargetForFunction(call_tag, jump_tables_ref);
it.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
} else if (RelocInfo::IsWasmStubCall(mode)) {
uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
Address entry = GetNearRuntimeStubEntry(
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), code_start);
static_cast<WasmCode::RuntimeStubId>(stub_call_tag), jump_tables_ref);
it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
} else {
it.rinfo()->apply(delta);
@ -1333,32 +1341,34 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
return main_jump_table_->instruction_start() + slot_offset;
}
Address NativeModule::GetNearCallTargetForFunction(uint32_t func_index,
Address near_to) const {
uint32_t slot_offset = GetJumpTableOffset(func_index);
NativeModule::JumpTablesRef NativeModule::FindJumpTablesForCode(
Address code_addr) const {
base::MutexGuard guard(&allocation_mutex_);
for (auto& code_space_data : code_space_data_) {
const bool jump_table_reachable = !kNeedsFarJumpsBetweenCodeSpaces ||
code_space_data.region.contains(near_to);
if (jump_table_reachable && code_space_data.jump_table) {
DCHECK_LT(slot_offset, code_space_data.jump_table->instructions().size());
return code_space_data.jump_table->instruction_start() + slot_offset;
const bool jump_table_reachable =
!kNeedsFarJumpsBetweenCodeSpaces ||
code_space_data.region.contains(code_addr);
if (jump_table_reachable && code_space_data.far_jump_table) {
// We might not have a jump table if we have no functions.
return {code_space_data.jump_table
? code_space_data.jump_table->instruction_start()
: kNullAddress,
code_space_data.far_jump_table->instruction_start()};
}
}
FATAL("near_to is not part of a code space");
FATAL("code_addr is not part of a code space");
}
Address NativeModule::GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index,
Address near_to) const {
base::MutexGuard guard(&allocation_mutex_);
for (auto& code_space_data : code_space_data_) {
if (code_space_data.region.contains(near_to)) {
auto offset = JumpTableAssembler::FarJumpSlotIndexToOffset(index);
DCHECK_GT(code_space_data.far_jump_table->instructions().size(), offset);
return code_space_data.far_jump_table->instruction_start() + offset;
}
}
FATAL("near_to is not part of a code space");
Address NativeModule::GetNearCallTargetForFunction(
uint32_t func_index, const JumpTablesRef& jump_tables) const {
uint32_t slot_offset = GetJumpTableOffset(func_index);
return jump_tables.jump_table_start + slot_offset;
}
Address NativeModule::GetNearRuntimeStubEntry(
WasmCode::RuntimeStubId index, const JumpTablesRef& jump_tables) const {
auto offset = JumpTableAssembler::FarJumpSlotIndexToOffset(index);
return jump_tables.far_jump_table_start + offset;
}
uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
@ -1661,6 +1671,9 @@ std::vector<WasmCode*> NativeModule::AddCompiledCode(
}
Vector<byte> code_space =
code_allocator_.AllocateForCode(this, total_code_space);
// Lookup the jump tables to use once, then use for all code objects.
auto jump_tables_ref =
FindJumpTablesForCode(reinterpret_cast<Address>(code_space.begin()));
std::vector<std::unique_ptr<WasmCode>> generated_code;
generated_code.reserve(results.size());
@ -1675,7 +1688,7 @@ std::vector<WasmCode*> NativeModule::AddCompiledCode(
result.func_index, result.code_desc, result.frame_slot_count,
result.tagged_parameter_slots, std::move(result.protected_instructions),
std::move(result.source_positions), GetCodeKind(result),
result.result_tier, this_code_space));
result.result_tier, this_code_space, jump_tables_ref));
}
DCHECK_EQ(0, code_space.size());

View File

@ -421,16 +421,26 @@ class V8_EXPORT_PRIVATE NativeModule final {
// the first jump table).
Address GetCallTargetForFunction(uint32_t func_index) const;
// Similarly to {GetCallTargetForFunction}, but ensures that the returned
// address is near to the {near_to} address by finding the closest jump table.
Address GetNearCallTargetForFunction(uint32_t func_index,
Address near_to) const;
struct JumpTablesRef {
const Address jump_table_start;
const Address far_jump_table_start;
};
// Get a runtime stub entry (which is a far jump table slot) within near-call
// distance to {near_to}. Fails if {near_to} is not part of any code space of
// this module.
// Finds the jump tables that should be used for the code at {code_addr}. This
// information is then passed to {GetNearCallTargetForFunction} and
// {GetNearRuntimeStubEntry} to avoid the overhead of looking this information
// up there.
JumpTablesRef FindJumpTablesForCode(Address code_addr) const;
// Similarly to {GetCallTargetForFunction}, but uses the jump table previously
// looked up via {FindJumpTablesForCode}.
Address GetNearCallTargetForFunction(uint32_t func_index,
const JumpTablesRef&) const;
// Get a runtime stub entry (which is a far jump table slot) in the jump table
// previously looked up via {FindJumpTablesForCode}.
Address GetNearRuntimeStubEntry(WasmCode::RuntimeStubId index,
Address near_to) const;
const JumpTablesRef&) const;
// Reverse lookup from a given call target (which must be a jump table slot)
// to a function index.
@ -534,7 +544,8 @@ class V8_EXPORT_PRIVATE NativeModule final {
OwnedVector<trap_handler::ProtectedInstructionData>
protected_instructions,
OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
ExecutionTier tier, Vector<uint8_t> code_space);
ExecutionTier tier, Vector<uint8_t> code_space,
const JumpTablesRef& jump_tables_ref);
WasmCode* CreateEmptyJumpTableInRegion(uint32_t jump_table_size,
base::AddressRegion);

View File

@ -541,6 +541,8 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED);
auto jump_tables_ref =
native_module_->FindJumpTablesForCode(code->instruction_start());
for (RelocIterator iter(code->instructions(), code->reloc_info(),
code->constant_pool(), mask);
!iter.done(); iter.next()) {
@ -548,8 +550,8 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
switch (mode) {
case RelocInfo::WASM_CALL: {
uint32_t tag = GetWasmCalleeTag(iter.rinfo());
Address target = native_module_->GetNearCallTargetForFunction(
tag, code->instruction_start());
Address target =
native_module_->GetNearCallTargetForFunction(tag, jump_tables_ref);
iter.rinfo()->set_wasm_call_address(target, SKIP_ICACHE_FLUSH);
break;
}
@ -557,8 +559,7 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
uint32_t tag = GetWasmCalleeTag(iter.rinfo());
DCHECK_LT(tag, WasmCode::kRuntimeStubCount);
Address target = native_module_->GetNearRuntimeStubEntry(
static_cast<WasmCode::RuntimeStubId>(tag),
code->instruction_start());
static_cast<WasmCode::RuntimeStubId>(tag), jump_tables_ref);
iter.rinfo()->set_wasm_stub_call_address(target, SKIP_ICACHE_FLUSH);
break;
}