Reland "[wasm] Introduce jump table"
This is a reland of 733b7c8258
.
The arm64 bug was fixed in https://crrev.com/c/1105051.
Original change's description:
> [wasm] Introduce jump table
>
> This introduces the concept of a jump table for WebAssembly, which is
> used for every direct and indirect call to any WebAssembly function.
> For lazy compilation, it will initially contain code to call the
> WasmCompileLazy builtin, where it passes the function index to be
> called.
> For non-lazy-compilation, it will contain a jump to the actual code.
> The jump table allows to easily redirect functions for lazy
> compilation, tier-up, debugging and (in the future) code aging. After
> this CL, we will not need to patch existing code any more for any of
> these operations.
>
> R=mstarzinger@chromium.org, titzer@chromium.org
>
> Bug: v8:7758
> Change-Id: I45f9983c2b06ae81bf5ce9847f4542fb48844a4f
> Reviewed-on: https://chromium-review.googlesource.com/1097075
> Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
> Reviewed-by: Ben Titzer <titzer@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#53805}
TBR=titzer@chromium.org,mstarzinger@chromium.org
Bug: v8:7758
Change-Id: I68555230c6db97e70f0b8fef784188f55ee04794
Reviewed-on: https://chromium-review.googlesource.com/1105158
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53829}
This commit is contained in:
parent
58339dfe39
commit
5f56641b41
@ -1153,6 +1153,7 @@ int Operand::InstructionsRequired(const Assembler* assembler,
|
||||
void Assembler::Move32BitImmediate(Register rd, const Operand& x,
|
||||
Condition cond) {
|
||||
if (UseMovImmediateLoad(x, this)) {
|
||||
CpuFeatureScope scope(this, ARMv7);
|
||||
// UseMovImmediateLoad should return false when we need to output
|
||||
// relocation info, since we prefer the constant pool for values that
|
||||
// can be patched.
|
||||
@ -1160,12 +1161,9 @@ void Assembler::Move32BitImmediate(Register rd, const Operand& x,
|
||||
UseScratchRegisterScope temps(this);
|
||||
// Re-use the destination register as a scratch if possible.
|
||||
Register target = rd != pc ? rd : temps.Acquire();
|
||||
if (CpuFeatures::IsSupported(ARMv7)) {
|
||||
uint32_t imm32 = static_cast<uint32_t>(x.immediate());
|
||||
CpuFeatureScope scope(this, ARMv7);
|
||||
movw(target, imm32 & 0xFFFF, cond);
|
||||
movt(target, imm32 >> 16, cond);
|
||||
}
|
||||
if (target.code() != rd.code()) {
|
||||
mov(rd, target, LeaveCC, cond);
|
||||
}
|
||||
|
@ -1549,6 +1549,9 @@ class Assembler : public AssemblerBase {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
// Move a 32-bit immediate into a register, potentially via the constant pool.
|
||||
void Move32BitImmediate(Register rd, const Operand& x, Condition cond = al);
|
||||
|
||||
protected:
|
||||
int buffer_space() const { return reloc_info_writer.pos() - pc_; }
|
||||
|
||||
@ -1680,9 +1683,6 @@ class Assembler : public AssemblerBase {
|
||||
inline void CheckBuffer();
|
||||
void GrowBuffer();
|
||||
|
||||
// 32-bit immediate values
|
||||
void Move32BitImmediate(Register rd, const Operand& x, Condition cond = al);
|
||||
|
||||
// Instruction generation
|
||||
void AddrMode1(Instr instr, Register rd, Register rn, const Operand& x);
|
||||
// Attempt to encode operand |x| for instruction |instr| and return true on
|
||||
|
@ -2294,6 +2294,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
// The function index was put in r4 by the jump table trampoline.
|
||||
// Convert to Smi for the runtime call.
|
||||
__ SmiTag(r4, r4);
|
||||
{
|
||||
TrapOnAbortScope trap_on_abort_scope(masm); // Avoid calls to Abort.
|
||||
FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
|
||||
@ -2308,8 +2311,10 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
__ stm(db_w, sp, gp_regs);
|
||||
__ vstm(db_w, sp, lowest_fp_reg, highest_fp_reg);
|
||||
|
||||
// Pass the WASM instance as an explicit argument to WasmCompileLazy.
|
||||
// Pass instance and function index as explicit arguments to the runtime
|
||||
// function.
|
||||
__ push(kWasmInstanceRegister);
|
||||
__ push(r4);
|
||||
// Load the correct CEntry builtin from the instance object.
|
||||
__ ldr(r2, FieldMemOperand(kWasmInstanceRegister,
|
||||
WasmInstanceObject::kCEntryStubOffset));
|
||||
|
@ -2746,6 +2746,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
// The function index was put in w8 by the jump table trampoline.
|
||||
// Sign extend and convert to Smi for the runtime call.
|
||||
__ sxtw(x8, w8);
|
||||
__ SmiTag(x8, x8);
|
||||
{
|
||||
TrapOnAbortScope trap_on_abort_scope(masm); // Avoid calls to Abort.
|
||||
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
|
||||
@ -2760,8 +2764,9 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
__ PushXRegList(gp_regs);
|
||||
__ PushDRegList(fp_regs);
|
||||
|
||||
// Pass the WASM instance as an explicit argument to WasmCompileLazy.
|
||||
__ PushArgument(kWasmInstanceRegister);
|
||||
// Pass instance and function index as explicit arguments to the runtime
|
||||
// function.
|
||||
__ Push(kWasmInstanceRegister, x8);
|
||||
// Load the correct CEntry builtin from the instance object.
|
||||
__ Ldr(x2, FieldMemOperand(kWasmInstanceRegister,
|
||||
WasmInstanceObject::kCEntryStubOffset));
|
||||
|
@ -2481,6 +2481,9 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
// The function index was put in edi by the jump table trampoline.
|
||||
// Convert to Smi for the runtime call.
|
||||
__ SmiTag(edi);
|
||||
{
|
||||
TrapOnAbortScope trap_on_abort_scope(masm); // Avoid calls to Abort.
|
||||
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
|
||||
@ -2504,8 +2507,10 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
offset += kSimd128Size;
|
||||
}
|
||||
|
||||
// Pass the WASM instance as an explicit argument to WasmCompileLazy.
|
||||
// Push the WASM instance as an explicit argument to WasmCompileLazy.
|
||||
__ Push(kWasmInstanceRegister);
|
||||
// Push the function index as second argument.
|
||||
__ Push(edi);
|
||||
// Load the correct CEntry builtin from the instance object.
|
||||
__ mov(ecx, FieldOperand(kWasmInstanceRegister,
|
||||
WasmInstanceObject::kCEntryStubOffset));
|
||||
|
@ -2423,6 +2423,10 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
|
||||
}
|
||||
|
||||
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
// The function index was pushed to the stack by the caller as int32.
|
||||
__ Pop(r11);
|
||||
// Convert to Smi for the runtime call.
|
||||
__ SmiTag(r11, r11);
|
||||
{
|
||||
TrapOnAbortScope trap_on_abort_scope(masm); // Avoid calls to Abort.
|
||||
FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
|
||||
@ -2446,8 +2450,10 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
|
||||
offset += kSimd128Size;
|
||||
}
|
||||
|
||||
// Pass the WASM instance as an explicit argument to WasmCompileLazy.
|
||||
// Push the WASM instance as an explicit argument to WasmCompileLazy.
|
||||
__ Push(kWasmInstanceRegister);
|
||||
// Push the function index as second argument.
|
||||
__ Push(r11);
|
||||
// Load the correct CEntry builtin from the instance object.
|
||||
__ movp(rcx, FieldOperand(kWasmInstanceRegister,
|
||||
WasmInstanceObject::kCEntryStubOffset));
|
||||
|
@ -3216,6 +3216,12 @@ void Assembler::GrowBuffer() {
|
||||
*p += pc_delta;
|
||||
}
|
||||
|
||||
// Relocate js-to-wasm calls (which are encoded pc-relative).
|
||||
for (RelocIterator it(desc, RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL));
|
||||
!it.done(); it.next()) {
|
||||
it.rinfo()->apply(pc_delta);
|
||||
}
|
||||
|
||||
DCHECK(!buffer_overflow());
|
||||
}
|
||||
|
||||
|
@ -1739,10 +1739,6 @@ void WasmInstanceObject::WasmInstanceObjectPrint(std::ostream& os) { // NOLINT
|
||||
os << "\n - managed_native_allocations: "
|
||||
<< Brief(managed_native_allocations());
|
||||
}
|
||||
if (has_managed_indirect_patcher()) {
|
||||
os << "\n - managed_indirect_patcher: "
|
||||
<< Brief(managed_indirect_patcher());
|
||||
}
|
||||
os << "\n - memory_start: " << static_cast<void*>(memory_start());
|
||||
os << "\n - memory_size: " << memory_size();
|
||||
os << "\n - memory_mask: " << AsHex(memory_mask());
|
||||
|
@ -291,8 +291,9 @@ RUNTIME_FUNCTION(Runtime_WasmStackGuard) {
|
||||
|
||||
RUNTIME_FUNCTION(Runtime_WasmCompileLazy) {
|
||||
HandleScope scope(isolate);
|
||||
DCHECK_EQ(1, args.length());
|
||||
DCHECK_EQ(2, args.length());
|
||||
CONVERT_ARG_HANDLE_CHECKED(WasmInstanceObject, instance, 0);
|
||||
CONVERT_SMI_ARG_CHECKED(func_index, 1);
|
||||
|
||||
ClearThreadInWasmScope wasm_flag(true);
|
||||
|
||||
@ -306,7 +307,8 @@ RUNTIME_FUNCTION(Runtime_WasmCompileLazy) {
|
||||
DCHECK_EQ(*instance, WasmCompileLazyFrame::cast(it.frame())->wasm_instance());
|
||||
#endif
|
||||
|
||||
Address entrypoint = wasm::CompileLazy(isolate, instance);
|
||||
Address entrypoint = wasm::CompileLazy(
|
||||
isolate, instance->compiled_module()->GetNativeModule(), func_index);
|
||||
return reinterpret_cast<Object*>(entrypoint);
|
||||
}
|
||||
|
||||
|
@ -581,7 +581,7 @@ namespace internal {
|
||||
F(WasmThrow, 0, 1) \
|
||||
F(WasmThrowCreate, 2, 1) \
|
||||
F(WasmThrowTypeError, 0, 1) \
|
||||
F(WasmCompileLazy, 1, 1)
|
||||
F(WasmCompileLazy, 2, 1)
|
||||
|
||||
#define FOR_EACH_INTRINSIC_RETURN_PAIR(F) \
|
||||
F(DebugBreakOnBytecode, 1, 2) \
|
||||
|
@ -4,6 +4,7 @@
|
||||
|
||||
#include "src/wasm/jump-table-assembler.h"
|
||||
|
||||
#include "src/assembler-inl.h"
|
||||
#include "src/macro-assembler-inl.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -27,6 +28,122 @@ void JumpTableAssembler::EmitJumpTrampoline(Address target) {
|
||||
#endif
|
||||
}
|
||||
|
||||
// The implementation is compact enough to implement it inline here. If it gets
|
||||
// much bigger, we might want to split it in a separate file per architecture.
|
||||
#if V8_TARGET_ARCH_X64
|
||||
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
|
||||
Address lazy_compile_target) {
|
||||
// TODO(clemensh): Try more efficient sequences.
|
||||
// Alternative 1:
|
||||
// [header]: mov r10, [lazy_compile_target]
|
||||
// jmp r10
|
||||
// [slot 0]: push [0]
|
||||
// jmp [header] // pc-relative --> slot size: 10 bytes
|
||||
//
|
||||
// Alternative 2:
|
||||
// [header]: lea r10, [rip - [header]]
|
||||
// shr r10, 3 // compute index from offset
|
||||
// push r10
|
||||
// mov r10, [lazy_compile_target]
|
||||
// jmp r10
|
||||
// [slot 0]: call [header]
|
||||
// ret // -> slot size: 5 bytes
|
||||
|
||||
// Use a push, because mov to an extended register takes 6 bytes.
|
||||
pushq(Immediate(func_index)); // max 5 bytes
|
||||
movq(kScratchRegister, uint64_t{lazy_compile_target}); // max 10 bytes
|
||||
jmp(kScratchRegister); // 3 bytes
|
||||
}
|
||||
|
||||
void JumpTableAssembler::EmitJumpSlot(Address target) {
|
||||
movq(kScratchRegister, static_cast<uint64_t>(target));
|
||||
jmp(kScratchRegister);
|
||||
}
|
||||
|
||||
void JumpTableAssembler::NopBytes(int bytes) {
|
||||
DCHECK_LE(0, bytes);
|
||||
Nop(bytes);
|
||||
}
|
||||
|
||||
#elif V8_TARGET_ARCH_IA32
|
||||
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
|
||||
Address lazy_compile_target) {
|
||||
mov(edi, func_index); // 5 bytes
|
||||
jmp(lazy_compile_target, RelocInfo::NONE); // 5 bytes
|
||||
}
|
||||
|
||||
void JumpTableAssembler::EmitJumpSlot(Address target) {
|
||||
jmp(target, RelocInfo::NONE);
|
||||
}
|
||||
|
||||
void JumpTableAssembler::NopBytes(int bytes) {
|
||||
DCHECK_LE(0, bytes);
|
||||
Nop(bytes);
|
||||
}
|
||||
|
||||
#elif V8_TARGET_ARCH_ARM
|
||||
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
|
||||
Address lazy_compile_target) {
|
||||
// Load function index to r4.
|
||||
// This generates <= 3 instructions: ldr, const pool start, constant
|
||||
Move32BitImmediate(r4, Operand(func_index));
|
||||
// Jump to {lazy_compile_target}.
|
||||
int offset =
|
||||
lazy_compile_target - reinterpret_cast<Address>(pc_) - kPcLoadDelta;
|
||||
DCHECK_EQ(0, offset % kInstrSize);
|
||||
DCHECK(is_int26(offset)); // 26 bit imm
|
||||
b(offset); // 1 instr
|
||||
CheckConstPool(true, false); // force emit of const pool
|
||||
}
|
||||
|
||||
void JumpTableAssembler::EmitJumpSlot(Address target) {
|
||||
int offset = target - reinterpret_cast<Address>(pc_) - kPcLoadDelta;
|
||||
DCHECK_EQ(0, offset % kInstrSize);
|
||||
DCHECK(is_int26(offset)); // 26 bit imm
|
||||
b(offset);
|
||||
}
|
||||
|
||||
void JumpTableAssembler::NopBytes(int bytes) {
|
||||
DCHECK_LE(0, bytes);
|
||||
DCHECK_EQ(0, bytes % kInstrSize);
|
||||
for (; bytes > 0; bytes -= kInstrSize) {
|
||||
nop();
|
||||
}
|
||||
}
|
||||
|
||||
#elif V8_TARGET_ARCH_ARM64
|
||||
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
|
||||
Address lazy_compile_target) {
|
||||
Mov(w8, func_index); // max. 2 instr
|
||||
Jump(lazy_compile_target, RelocInfo::NONE); // 1 instr
|
||||
}
|
||||
|
||||
void JumpTableAssembler::EmitJumpSlot(Address target) {
|
||||
Jump(target, RelocInfo::NONE);
|
||||
}
|
||||
|
||||
void JumpTableAssembler::NopBytes(int bytes) {
|
||||
DCHECK_LE(0, bytes);
|
||||
DCHECK_EQ(0, bytes % kInstructionSize);
|
||||
for (; bytes > 0; bytes -= kInstructionSize) {
|
||||
nop();
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
|
||||
Address lazy_compile_target) {
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
|
||||
void JumpTableAssembler::EmitJumpSlot(Address target) { UNIMPLEMENTED(); }
|
||||
|
||||
void JumpTableAssembler::NopBytes(int bytes) {
|
||||
DCHECK_LE(0, bytes);
|
||||
UNIMPLEMENTED();
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace wasm
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -6,6 +6,7 @@
|
||||
#define V8_WASM_JUMP_TABLE_ASSEMBLER_H_
|
||||
|
||||
#include "src/macro-assembler.h"
|
||||
#include "src/wasm/wasm-code-manager.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -26,8 +27,42 @@ class JumpTableAssembler : public TurboAssembler {
|
||||
public:
|
||||
JumpTableAssembler() : TurboAssembler(GetDefaultIsolateData(), nullptr, 0) {}
|
||||
|
||||
// Instantiate a {JumpTableAssembler} for patching.
|
||||
explicit JumpTableAssembler(Address slot_addr, int size = 256)
|
||||
: TurboAssembler(GetDefaultIsolateData(),
|
||||
reinterpret_cast<void*>(slot_addr), size) {}
|
||||
|
||||
// Emit a trampoline to a possibly far away code target.
|
||||
void EmitJumpTrampoline(Address target);
|
||||
|
||||
#if V8_TARGET_ARCH_X64
|
||||
static constexpr int kJumpTableSlotSize = 18;
|
||||
#elif V8_TARGET_ARCH_IA32
|
||||
static constexpr int kJumpTableSlotSize = 10;
|
||||
#elif V8_TARGET_ARCH_ARM
|
||||
static constexpr int kJumpTableSlotSize = 4 * kInstrSize;
|
||||
#elif V8_TARGET_ARCH_ARM64
|
||||
static constexpr int kJumpTableSlotSize = 3 * kInstructionSize;
|
||||
#else
|
||||
static constexpr int kJumpTableSlotSize = 1;
|
||||
#endif
|
||||
|
||||
void EmitLazyCompileJumpSlot(uint32_t func_index,
|
||||
Address lazy_compile_target);
|
||||
|
||||
void EmitJumpSlot(Address target);
|
||||
|
||||
void NopBytes(int bytes);
|
||||
|
||||
static void PatchJumpTableSlot(Address slot, Address new_target,
|
||||
WasmCode::FlushICache flush_i_cache) {
|
||||
JumpTableAssembler jsasm(slot);
|
||||
jsasm.EmitJumpSlot(new_target);
|
||||
jsasm.NopBytes(kJumpTableSlotSize - jsasm.pc_offset());
|
||||
if (flush_i_cache) {
|
||||
Assembler::FlushICache(slot, kJumpTableSlotSize);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace wasm
|
||||
|
@ -378,156 +378,37 @@ MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject(
|
||||
return {};
|
||||
}
|
||||
|
||||
// A helper class to prevent pathological patching behavior for indirect
|
||||
// references to code which must be updated after lazy compiles.
|
||||
// Utilizes a reverse mapping to prevent O(n^2) behavior.
|
||||
class IndirectPatcher {
|
||||
public:
|
||||
void Patch(Handle<WasmInstanceObject> caller_instance,
|
||||
Handle<WasmInstanceObject> target_instance, int func_index,
|
||||
Address old_target, Address new_target) {
|
||||
TRACE_LAZY(
|
||||
"IndirectPatcher::Patch(caller=%p, target=%p, func_index=%i, "
|
||||
"old_target=%" PRIuPTR ", new_target=%" PRIuPTR ")\n",
|
||||
*caller_instance, *target_instance, func_index, old_target, new_target);
|
||||
if (mapping_.size() == 0 || misses_ >= kMaxMisses) {
|
||||
BuildMapping(caller_instance);
|
||||
}
|
||||
// Patch entries for the given function index.
|
||||
WasmCodeManager* code_manager =
|
||||
caller_instance->GetIsolate()->wasm_engine()->code_manager();
|
||||
USE(code_manager);
|
||||
auto& entries = mapping_[func_index];
|
||||
int patched = 0;
|
||||
for (auto index : entries) {
|
||||
if (index < 0) {
|
||||
// Imported function entry.
|
||||
int i = -1 - index;
|
||||
ImportedFunctionEntry entry(caller_instance, i);
|
||||
if (entry.target() == old_target) {
|
||||
DCHECK_EQ(
|
||||
func_index,
|
||||
code_manager->GetCodeFromStartAddress(entry.target())->index());
|
||||
entry.set_wasm_to_wasm(*target_instance, new_target);
|
||||
patched++;
|
||||
}
|
||||
} else {
|
||||
// Indirect function table entry.
|
||||
int i = index;
|
||||
IndirectFunctionTableEntry entry(caller_instance, i);
|
||||
if (entry.target() == old_target) {
|
||||
DCHECK_EQ(
|
||||
func_index,
|
||||
code_manager->GetCodeFromStartAddress(entry.target())->index());
|
||||
entry.set(entry.sig_id(), *target_instance, new_target);
|
||||
patched++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (patched == 0) misses_++;
|
||||
}
|
||||
|
||||
private:
|
||||
void BuildMapping(Handle<WasmInstanceObject> caller_instance) {
|
||||
mapping_.clear();
|
||||
misses_ = 0;
|
||||
TRACE_LAZY("BuildMapping for (caller=%p)...\n", *caller_instance);
|
||||
Isolate* isolate = caller_instance->GetIsolate();
|
||||
WasmCodeManager* code_manager = isolate->wasm_engine()->code_manager();
|
||||
uint32_t num_imported_functions =
|
||||
caller_instance->module()->num_imported_functions;
|
||||
// Process the imported function entries.
|
||||
for (unsigned i = 0; i < num_imported_functions; i++) {
|
||||
ImportedFunctionEntry entry(caller_instance, i);
|
||||
WasmCode* code = code_manager->GetCodeFromStartAddress(entry.target());
|
||||
if (code->kind() != WasmCode::kLazyStub) continue;
|
||||
TRACE_LAZY(" +import[%u] -> #%d (%p)\n", i, code->index(),
|
||||
code->instructions().start());
|
||||
DCHECK(!entry.is_js_receiver_entry());
|
||||
WasmInstanceObject* target_instance = entry.instance();
|
||||
WasmCode* new_code =
|
||||
target_instance->compiled_module()->GetNativeModule()->code(
|
||||
code->index());
|
||||
if (new_code->kind() != WasmCode::kLazyStub) {
|
||||
// Patch an imported function entry which is already compiled.
|
||||
entry.set_wasm_to_wasm(target_instance, new_code->instruction_start());
|
||||
} else {
|
||||
int key = code->index();
|
||||
int index = -1 - i;
|
||||
mapping_[key].push_back(index);
|
||||
}
|
||||
}
|
||||
// Process the indirect function table entries.
|
||||
size_t ift_size = caller_instance->indirect_function_table_size();
|
||||
for (unsigned i = 0; i < ift_size; i++) {
|
||||
IndirectFunctionTableEntry entry(caller_instance, i);
|
||||
if (entry.target() == kNullAddress) continue; // null IFT entry
|
||||
WasmCode* code = code_manager->GetCodeFromStartAddress(entry.target());
|
||||
if (code->kind() != WasmCode::kLazyStub) continue;
|
||||
TRACE_LAZY(" +indirect[%u] -> #%d (lazy:%p)\n", i, code->index(),
|
||||
code->instructions().start());
|
||||
WasmInstanceObject* target_instance = entry.instance();
|
||||
WasmCode* new_code =
|
||||
target_instance->compiled_module()->GetNativeModule()->code(
|
||||
code->index());
|
||||
if (new_code->kind() != WasmCode::kLazyStub) {
|
||||
// Patch an indirect function table entry which is already compiled.
|
||||
entry.set(entry.sig_id(), target_instance,
|
||||
new_code->instruction_start());
|
||||
} else {
|
||||
int key = code->index();
|
||||
int index = i;
|
||||
mapping_[key].push_back(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static constexpr int kMaxMisses = 5; // maximum misses before rebuilding
|
||||
std::unordered_map<int, std::vector<int>> mapping_;
|
||||
int misses_ = 0;
|
||||
};
|
||||
|
||||
ModuleEnv CreateModuleEnvFromModuleObject(
|
||||
Isolate* isolate, Handle<WasmModuleObject> module_object) {
|
||||
WasmModule* module = module_object->module();
|
||||
ModuleEnv CreateModuleEnvFromNativeModule(NativeModule* native_module) {
|
||||
WasmModule* module = native_module->module_object()->module();
|
||||
wasm::UseTrapHandler use_trap_handler =
|
||||
module_object->compiled_module()->GetNativeModule()->use_trap_handler()
|
||||
? kUseTrapHandler
|
||||
: kNoTrapHandler;
|
||||
native_module->use_trap_handler() ? kUseTrapHandler : kNoTrapHandler;
|
||||
return ModuleEnv(module, use_trap_handler, wasm::kRuntimeExceptionSupport);
|
||||
}
|
||||
|
||||
const wasm::WasmCode* LazyCompileFunction(
|
||||
Isolate* isolate, Handle<WasmModuleObject> module_object, int func_index) {
|
||||
wasm::WasmCode* LazyCompileFunction(Isolate* isolate,
|
||||
NativeModule* native_module,
|
||||
int func_index) {
|
||||
base::ElapsedTimer compilation_timer;
|
||||
NativeModule* native_module =
|
||||
module_object->compiled_module()->GetNativeModule();
|
||||
wasm::WasmCode* existing_code =
|
||||
native_module->code(static_cast<uint32_t>(func_index));
|
||||
if (existing_code != nullptr &&
|
||||
existing_code->kind() == wasm::WasmCode::kFunction) {
|
||||
TRACE_LAZY("Function %d already compiled.\n", func_index);
|
||||
return existing_code;
|
||||
}
|
||||
DCHECK(!native_module->has_code(static_cast<uint32_t>(func_index)));
|
||||
|
||||
compilation_timer.Start();
|
||||
// TODO(wasm): Refactor this to only get the name if it is really needed for
|
||||
// tracing / debugging.
|
||||
std::string func_name;
|
||||
{
|
||||
WasmName name =
|
||||
Vector<const char>::cast(module_object->GetRawFunctionName(func_index));
|
||||
WasmName name = Vector<const char>::cast(
|
||||
native_module->module_object()->GetRawFunctionName(func_index));
|
||||
// Copy to std::string, because the underlying string object might move on
|
||||
// the heap.
|
||||
func_name.assign(name.start(), static_cast<size_t>(name.length()));
|
||||
}
|
||||
|
||||
TRACE_LAZY("Compiling function %s, %d.\n", func_name.c_str(), func_index);
|
||||
TRACE_LAZY("Compiling function '%s' (#%d).\n", func_name.c_str(), func_index);
|
||||
|
||||
ModuleEnv module_env =
|
||||
CreateModuleEnvFromModuleObject(isolate, module_object);
|
||||
ModuleEnv module_env = CreateModuleEnvFromNativeModule(native_module);
|
||||
|
||||
const uint8_t* module_start = module_object->module_bytes()->GetChars();
|
||||
const uint8_t* module_start =
|
||||
native_module->module_object()->module_bytes()->GetChars();
|
||||
|
||||
const WasmFunction* func = &module_env.module->functions[func_index];
|
||||
FunctionBody body{func->sig, func->code.offset(),
|
||||
@ -574,292 +455,19 @@ const wasm::WasmCode* LazyCompileFunction(
|
||||
return wasm_code;
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
int AdvanceSourcePositionTableIterator(SourcePositionTableIterator& iterator,
|
||||
int offset) {
|
||||
DCHECK(!iterator.done());
|
||||
int byte_pos;
|
||||
do {
|
||||
byte_pos = iterator.source_position().ScriptOffset();
|
||||
iterator.Advance();
|
||||
} while (!iterator.done() && iterator.code_offset() <= offset);
|
||||
return byte_pos;
|
||||
}
|
||||
|
||||
const wasm::WasmCode* LazyCompileFromJsToWasm(
|
||||
Isolate* isolate, Handle<WasmInstanceObject> instance,
|
||||
Handle<Code> js_to_wasm_caller, uint32_t callee_func_index) {
|
||||
Decoder decoder(nullptr, nullptr);
|
||||
Handle<WasmModuleObject> module_object(instance->module_object());
|
||||
NativeModule* native_module = instance->compiled_module()->GetNativeModule();
|
||||
|
||||
TRACE_LAZY(
|
||||
"Starting lazy compilation (func %u, js_to_wasm: true, patch caller: "
|
||||
"true). \n",
|
||||
callee_func_index);
|
||||
LazyCompileFunction(isolate, module_object, callee_func_index);
|
||||
{
|
||||
DisallowHeapAllocation no_gc;
|
||||
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
|
||||
RelocIterator it(*js_to_wasm_caller,
|
||||
RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL));
|
||||
DCHECK(!it.done());
|
||||
const wasm::WasmCode* callee_compiled =
|
||||
native_module->code(callee_func_index);
|
||||
DCHECK_NOT_NULL(callee_compiled);
|
||||
DCHECK_EQ(WasmCode::kLazyStub,
|
||||
isolate->wasm_engine()
|
||||
->code_manager()
|
||||
->GetCodeFromStartAddress(it.rinfo()->js_to_wasm_address())
|
||||
->kind());
|
||||
it.rinfo()->set_js_to_wasm_address(callee_compiled->instruction_start());
|
||||
TRACE_LAZY("Patched 1 location in js-to-wasm %p.\n", *js_to_wasm_caller);
|
||||
|
||||
#ifdef DEBUG
|
||||
it.next();
|
||||
DCHECK(it.done());
|
||||
#endif
|
||||
}
|
||||
|
||||
wasm::WasmCode* ret = native_module->code(callee_func_index);
|
||||
DCHECK_NOT_NULL(ret);
|
||||
DCHECK_EQ(wasm::WasmCode::kFunction, ret->kind());
|
||||
return ret;
|
||||
}
|
||||
|
||||
const wasm::WasmCode* LazyCompileIndirectCall(
|
||||
Isolate* isolate, Handle<WasmInstanceObject> instance,
|
||||
Address CompileLazy(Isolate* isolate, NativeModule* native_module,
|
||||
uint32_t func_index) {
|
||||
TRACE_LAZY(
|
||||
"Starting lazy compilation (func %u, js_to_wasm: false, patch caller: "
|
||||
"false). \n",
|
||||
func_index);
|
||||
Handle<WasmModuleObject> module_object(instance->module_object());
|
||||
return LazyCompileFunction(isolate, module_object, func_index);
|
||||
}
|
||||
|
||||
const wasm::WasmCode* LazyCompileDirectCall(Isolate* isolate,
|
||||
Handle<WasmInstanceObject> instance,
|
||||
const wasm::WasmCode* wasm_caller,
|
||||
int32_t caller_ret_offset) {
|
||||
DCHECK_LE(0, caller_ret_offset);
|
||||
|
||||
Decoder decoder(nullptr, nullptr);
|
||||
|
||||
// Gather all the targets of direct calls inside the code of {wasm_caller}
|
||||
// and place their function indexes in {direct_callees}.
|
||||
std::vector<int32_t> direct_callees;
|
||||
// The last one before {caller_ret_offset} must be the call that triggered
|
||||
// this lazy compilation.
|
||||
int callee_pos = -1;
|
||||
uint32_t num_non_compiled_callees = 0; // For stats.
|
||||
{
|
||||
DisallowHeapAllocation no_gc;
|
||||
WasmModuleObject* module_object = instance->module_object();
|
||||
SeqOneByteString* module_bytes = module_object->module_bytes();
|
||||
uint32_t caller_func_index = wasm_caller->index();
|
||||
SourcePositionTableIterator source_pos_iterator(
|
||||
wasm_caller->source_positions());
|
||||
|
||||
const byte* func_bytes =
|
||||
module_bytes->GetChars() +
|
||||
module_object->module()->functions[caller_func_index].code.offset();
|
||||
for (RelocIterator it(wasm_caller->instructions(),
|
||||
wasm_caller->reloc_info(),
|
||||
wasm_caller->constant_pool(),
|
||||
RelocInfo::ModeMask(RelocInfo::WASM_CALL));
|
||||
!it.done(); it.next()) {
|
||||
// TODO(clemensh): Introduce safe_cast<T, bool> which (D)CHECKS
|
||||
// (depending on the bool) against limits of T and then static_casts.
|
||||
size_t offset_l = it.rinfo()->pc() - wasm_caller->instruction_start();
|
||||
DCHECK_GE(kMaxInt, offset_l);
|
||||
int offset = static_cast<int>(offset_l);
|
||||
int byte_pos =
|
||||
AdvanceSourcePositionTableIterator(source_pos_iterator, offset);
|
||||
|
||||
WasmCode* callee = isolate->wasm_engine()->code_manager()->LookupCode(
|
||||
it.rinfo()->target_address());
|
||||
if (callee->kind() == WasmCode::kLazyStub) {
|
||||
// The callee has not been compiled.
|
||||
++num_non_compiled_callees;
|
||||
int32_t callee_func_index =
|
||||
ExtractDirectCallIndex(decoder, func_bytes + byte_pos);
|
||||
DCHECK_LT(callee_func_index,
|
||||
wasm_caller->native_module()->num_functions());
|
||||
// {caller_ret_offset} points to one instruction after the call.
|
||||
// Remember the last called function before that offset.
|
||||
if (offset < caller_ret_offset) {
|
||||
callee_pos = static_cast<int>(direct_callees.size());
|
||||
}
|
||||
direct_callees.push_back(callee_func_index);
|
||||
} else {
|
||||
// If the callee is not the lazy compile stub, assume this callee
|
||||
// has already been compiled.
|
||||
direct_callees.push_back(-1);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
TRACE_LAZY("Found %d non-compiled callees in function=%p.\n",
|
||||
num_non_compiled_callees, wasm_caller);
|
||||
USE(num_non_compiled_callees);
|
||||
}
|
||||
CHECK_LE(0, callee_pos);
|
||||
|
||||
// TODO(wasm): compile all functions in non_compiled_callees in
|
||||
// background, wait for direct_callees[callee_pos].
|
||||
auto callee_func_index = direct_callees[callee_pos];
|
||||
TRACE_LAZY(
|
||||
"Starting lazy compilation (function=%p retaddr=+%d direct_callees[%d] "
|
||||
"-> %d).\n",
|
||||
wasm_caller, caller_ret_offset, callee_pos, callee_func_index);
|
||||
|
||||
Handle<WasmModuleObject> module_object(instance->module_object());
|
||||
NativeModule* native_module = instance->compiled_module()->GetNativeModule();
|
||||
const WasmCode* ret =
|
||||
LazyCompileFunction(isolate, module_object, callee_func_index);
|
||||
DCHECK_NOT_NULL(ret);
|
||||
|
||||
int patched = 0;
|
||||
{
|
||||
// Now patch the code in {wasm_caller} with all functions which are now
|
||||
// compiled. This will pick up any other compiled functions, not only {ret}.
|
||||
size_t pos = 0;
|
||||
for (RelocIterator
|
||||
it(wasm_caller->instructions(), wasm_caller->reloc_info(),
|
||||
wasm_caller->constant_pool(),
|
||||
RelocInfo::ModeMask(RelocInfo::WASM_CALL));
|
||||
!it.done(); it.next(), ++pos) {
|
||||
auto callee_index = direct_callees[pos];
|
||||
if (callee_index < 0) continue; // callee already compiled.
|
||||
const WasmCode* callee_compiled = native_module->code(callee_index);
|
||||
if (callee_compiled->kind() != WasmCode::kFunction) continue;
|
||||
DCHECK_EQ(WasmCode::kLazyStub,
|
||||
isolate->wasm_engine()
|
||||
->code_manager()
|
||||
->GetCodeFromStartAddress(it.rinfo()->wasm_call_address())
|
||||
->kind());
|
||||
it.rinfo()->set_wasm_call_address(callee_compiled->instruction_start());
|
||||
++patched;
|
||||
}
|
||||
DCHECK_EQ(direct_callees.size(), pos);
|
||||
}
|
||||
|
||||
DCHECK_LT(0, patched);
|
||||
TRACE_LAZY("Patched %d calls(s) in %p.\n", patched, wasm_caller);
|
||||
USE(patched);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Address CompileLazy(Isolate* isolate,
|
||||
Handle<WasmInstanceObject> target_instance) {
|
||||
HistogramTimerScope lazy_time_scope(
|
||||
isolate->counters()->wasm_lazy_compilation_time());
|
||||
|
||||
//==========================================================================
|
||||
// Begin stack walk.
|
||||
//==========================================================================
|
||||
StackFrameIterator it(isolate);
|
||||
|
||||
//==========================================================================
|
||||
// First frame: C entry stub.
|
||||
//==========================================================================
|
||||
DCHECK(!it.done());
|
||||
DCHECK_EQ(StackFrame::EXIT, it.frame()->type());
|
||||
it.Advance();
|
||||
|
||||
//==========================================================================
|
||||
// Second frame: WasmCompileLazy builtin.
|
||||
//==========================================================================
|
||||
DCHECK(!it.done());
|
||||
int target_func_index = -1;
|
||||
bool indirectly_called = false;
|
||||
const wasm::WasmCode* lazy_stub =
|
||||
isolate->wasm_engine()->code_manager()->LookupCode(it.frame()->pc());
|
||||
CHECK_EQ(wasm::WasmCode::kLazyStub, lazy_stub->kind());
|
||||
if (!lazy_stub->IsAnonymous()) {
|
||||
// If the lazy stub is not "anonymous", then its copy encodes the target
|
||||
// function index. Used for import and indirect calls.
|
||||
target_func_index = lazy_stub->index();
|
||||
indirectly_called = true;
|
||||
}
|
||||
it.Advance();
|
||||
|
||||
//==========================================================================
|
||||
// Third frame: The calling wasm code (direct or indirect), or js-to-wasm
|
||||
// wrapper.
|
||||
//==========================================================================
|
||||
DCHECK(!it.done());
|
||||
DCHECK(it.frame()->is_js_to_wasm() || it.frame()->is_wasm_compiled());
|
||||
Handle<Code> js_to_wasm_caller_code;
|
||||
Handle<WasmInstanceObject> caller_instance;
|
||||
const WasmCode* wasm_caller_code = nullptr;
|
||||
int32_t caller_ret_offset = -1;
|
||||
if (it.frame()->is_js_to_wasm()) {
|
||||
js_to_wasm_caller_code = handle(it.frame()->LookupCode(), isolate);
|
||||
// This wasn't actually an indirect call, but a JS->wasm call.
|
||||
indirectly_called = false;
|
||||
} else {
|
||||
caller_instance =
|
||||
handle(WasmCompiledFrame::cast(it.frame())->wasm_instance(), isolate);
|
||||
wasm_caller_code =
|
||||
isolate->wasm_engine()->code_manager()->LookupCode(it.frame()->pc());
|
||||
auto offset = it.frame()->pc() - wasm_caller_code->instruction_start();
|
||||
caller_ret_offset = static_cast<int32_t>(offset);
|
||||
DCHECK_EQ(offset, caller_ret_offset);
|
||||
}
|
||||
|
||||
//==========================================================================
|
||||
// Begin compilation.
|
||||
//==========================================================================
|
||||
Handle<WasmCompiledModule> compiled_module(
|
||||
target_instance->compiled_module());
|
||||
|
||||
NativeModule* native_module = compiled_module->GetNativeModule();
|
||||
DCHECK(!native_module->lazy_compile_frozen());
|
||||
|
||||
NativeModuleModificationScope native_module_modification_scope(native_module);
|
||||
|
||||
const wasm::WasmCode* result = nullptr;
|
||||
|
||||
if (!js_to_wasm_caller_code.is_null()) {
|
||||
result = LazyCompileFromJsToWasm(isolate, target_instance,
|
||||
js_to_wasm_caller_code, target_func_index);
|
||||
wasm::WasmCode* result =
|
||||
LazyCompileFunction(isolate, native_module, func_index);
|
||||
DCHECK_NOT_NULL(result);
|
||||
DCHECK_EQ(target_func_index, result->index());
|
||||
} else {
|
||||
DCHECK_NOT_NULL(wasm_caller_code);
|
||||
if (target_func_index < 0) {
|
||||
result = LazyCompileDirectCall(isolate, target_instance, wasm_caller_code,
|
||||
caller_ret_offset);
|
||||
DCHECK_NOT_NULL(result);
|
||||
} else {
|
||||
result =
|
||||
LazyCompileIndirectCall(isolate, target_instance, target_func_index);
|
||||
DCHECK_NOT_NULL(result);
|
||||
}
|
||||
}
|
||||
|
||||
//==========================================================================
|
||||
// Update import and indirect function tables in the caller.
|
||||
//==========================================================================
|
||||
if (indirectly_called) {
|
||||
DCHECK(!caller_instance.is_null());
|
||||
if (!caller_instance->has_managed_indirect_patcher()) {
|
||||
auto patcher = Managed<IndirectPatcher>::Allocate(isolate, 0);
|
||||
caller_instance->set_managed_indirect_patcher(*patcher);
|
||||
}
|
||||
IndirectPatcher* patcher = Managed<IndirectPatcher>::cast(
|
||||
caller_instance->managed_indirect_patcher())
|
||||
->raw();
|
||||
Address old_target = lazy_stub->instruction_start();
|
||||
patcher->Patch(caller_instance, target_instance, target_func_index,
|
||||
old_target, result->instruction_start());
|
||||
}
|
||||
DCHECK_EQ(func_index, result->index());
|
||||
|
||||
return result->instruction_start();
|
||||
}
|
||||
@ -881,15 +489,6 @@ void FlushICache(const wasm::NativeModule* native_module) {
|
||||
}
|
||||
}
|
||||
|
||||
void FlushICache(Handle<FixedArray> functions) {
|
||||
for (int i = 0, e = functions->length(); i < e; ++i) {
|
||||
if (!functions->get(i)->IsCode()) continue;
|
||||
Code* code = Code::cast(functions->get(i));
|
||||
Assembler::FlushICache(code->raw_instruction_start(),
|
||||
code->raw_instruction_size());
|
||||
}
|
||||
}
|
||||
|
||||
byte* raw_buffer_ptr(MaybeHandle<JSArrayBuffer> buffer, int offset) {
|
||||
return static_cast<byte*>(buffer.ToHandleChecked()->backing_store()) + offset;
|
||||
}
|
||||
@ -1089,24 +688,6 @@ void FinishCompilationUnits(CompilationState* compilation_state,
|
||||
}
|
||||
}
|
||||
|
||||
void UpdateAllCompiledModulesWithTopTierCode(
|
||||
Handle<WasmModuleObject> module_object) {
|
||||
WasmModule* module = module_object->module();
|
||||
DCHECK_GT(module->functions.size() - module->num_imported_functions, 0);
|
||||
USE(module);
|
||||
|
||||
CodeSpaceMemoryModificationScope modification_scope(
|
||||
module_object->GetIsolate()->heap());
|
||||
|
||||
NativeModule* native_module =
|
||||
module_object->compiled_module()->GetNativeModule();
|
||||
|
||||
// Link.
|
||||
CodeSpecialization code_specialization;
|
||||
code_specialization.RelocateDirectCalls(native_module);
|
||||
code_specialization.ApplyToWholeModule(native_module, module_object);
|
||||
}
|
||||
|
||||
void CompileInParallel(Isolate* isolate, NativeModule* native_module,
|
||||
const ModuleWireBytes& wire_bytes, ModuleEnv* module_env,
|
||||
Handle<WasmModuleObject> module_object,
|
||||
@ -1147,53 +728,6 @@ void CompileInParallel(Isolate* isolate, NativeModule* native_module,
|
||||
compilation_state->SetNumberOfFunctionsToCompile(functions_count);
|
||||
compilation_state->SetWireBytes(wire_bytes);
|
||||
|
||||
DeferredHandles* deferred_handles = nullptr;
|
||||
Handle<WasmModuleObject> module_object_deferred;
|
||||
if (compilation_state->compile_mode() == CompileMode::kTiering) {
|
||||
// Open a deferred handle scope for the module_object, in order to allow
|
||||
// for background tiering compilation.
|
||||
DeferredHandleScope deferred(isolate);
|
||||
module_object_deferred = handle(*module_object, isolate);
|
||||
deferred_handles = deferred.Detach();
|
||||
}
|
||||
compilation_state->AddCallback(
|
||||
[module_object_deferred, deferred_handles](
|
||||
// Callback is called from a foreground thread.
|
||||
CompilationEvent event, ErrorThrower* thrower) mutable {
|
||||
switch (event) {
|
||||
case CompilationEvent::kFinishedBaselineCompilation:
|
||||
// Nothing to do, since we are finishing baseline compilation
|
||||
// in this foreground thread.
|
||||
return;
|
||||
case CompilationEvent::kFinishedTopTierCompilation:
|
||||
UpdateAllCompiledModulesWithTopTierCode(module_object_deferred);
|
||||
// TODO(wasm): Currently compilation has to finish before the
|
||||
// {deferred_handles} can be removed. We need to make sure that
|
||||
// we can clean it up at a time when the native module
|
||||
// should die (but currently cannot, since it's kept alive
|
||||
// through the {deferred_handles} themselves).
|
||||
delete deferred_handles;
|
||||
deferred_handles = nullptr;
|
||||
return;
|
||||
case CompilationEvent::kFailedCompilation:
|
||||
// If baseline compilation failed, we will reflect this without
|
||||
// a callback, in this thread through {thrower}.
|
||||
// Tier-up compilation should not fail if baseline compilation
|
||||
// did not fail.
|
||||
DCHECK(!module_object_deferred->compiled_module()
|
||||
->GetNativeModule()
|
||||
->compilation_state()
|
||||
->baseline_compilation_finished());
|
||||
delete deferred_handles;
|
||||
deferred_handles = nullptr;
|
||||
return;
|
||||
case CompilationEvent::kDestroyed:
|
||||
if (deferred_handles) delete deferred_handles;
|
||||
return;
|
||||
}
|
||||
UNREACHABLE();
|
||||
});
|
||||
|
||||
// 1) The main thread allocates a compilation unit for each wasm function
|
||||
// and stores them in the vector {compilation_units} within the
|
||||
// {compilation_state}. By adding units to the {compilation_state}, new
|
||||
@ -1813,10 +1347,8 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
|
||||
//--------------------------------------------------------------------------
|
||||
CodeSpecialization code_specialization;
|
||||
code_specialization.RelocateDirectCalls(native_module);
|
||||
code_specialization.ApplyToWholeModule(native_module, module_object_,
|
||||
SKIP_ICACHE_FLUSH);
|
||||
code_specialization.ApplyToWholeModule(native_module, SKIP_ICACHE_FLUSH);
|
||||
FlushICache(native_module);
|
||||
FlushICache(handle(module_object_->export_wrappers(), isolate_));
|
||||
|
||||
//--------------------------------------------------------------------------
|
||||
// Insert the compiled module into the weak list of compiled modules.
|
||||
@ -1857,7 +1389,6 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
|
||||
//--------------------------------------------------------------------------
|
||||
if (module_->start_function_index >= 0) {
|
||||
int start_index = module_->start_function_index;
|
||||
Handle<WasmInstanceObject> start_function_instance = instance;
|
||||
Address start_call_address =
|
||||
static_cast<uint32_t>(start_index) < module_->num_imported_functions
|
||||
? kNullAddress
|
||||
@ -1868,7 +1399,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
|
||||
// TODO(clemensh): Don't generate an exported function for the start
|
||||
// function. Use CWasmEntry instead.
|
||||
start_function_ = WasmExportedFunction::New(
|
||||
isolate_, start_function_instance, MaybeHandle<String>(), start_index,
|
||||
isolate_, instance, MaybeHandle<String>(), start_index,
|
||||
static_cast<int>(sig->parameter_count()), wrapper_code);
|
||||
}
|
||||
|
||||
@ -2121,15 +1652,14 @@ int InstanceBuilder::ProcessImports(Handle<WasmInstanceObject> instance) {
|
||||
int num_imported_mutable_globals = 0;
|
||||
|
||||
DCHECK_EQ(module_->import_table.size(), sanitized_imports_.size());
|
||||
for (int index = 0; index < static_cast<int>(module_->import_table.size());
|
||||
++index) {
|
||||
int num_imports = static_cast<int>(module_->import_table.size());
|
||||
NativeModule* native_module = instance->compiled_module()->GetNativeModule();
|
||||
for (int index = 0; index < num_imports; ++index) {
|
||||
WasmImport& import = module_->import_table[index];
|
||||
|
||||
Handle<String> module_name = sanitized_imports_[index].module_name;
|
||||
Handle<String> import_name = sanitized_imports_[index].import_name;
|
||||
Handle<Object> value = sanitized_imports_[index].value;
|
||||
NativeModule* native_module =
|
||||
instance->compiled_module()->GetNativeModule();
|
||||
|
||||
switch (import.kind) {
|
||||
case kExternalFunction: {
|
||||
@ -2159,8 +1689,8 @@ int InstanceBuilder::ProcessImports(Handle<WasmInstanceObject> instance) {
|
||||
return -1;
|
||||
}
|
||||
// The import reference is the instance object itself.
|
||||
ImportedFunctionEntry entry(instance, func_index);
|
||||
Address imported_target = imported_function->GetWasmCallTarget();
|
||||
ImportedFunctionEntry entry(instance, func_index);
|
||||
entry.set_wasm_to_wasm(*imported_instance, imported_target);
|
||||
} else {
|
||||
// The imported function is a callable.
|
||||
@ -3130,17 +2660,17 @@ class AsyncCompileJob::PrepareAndStartCompile : public CompileStep {
|
||||
}
|
||||
return;
|
||||
case CompilationEvent::kFinishedTopTierCompilation:
|
||||
// It is only safe to schedule the UpdateToTopTierCompiledCode
|
||||
// step if no foreground task is currently pending, and no
|
||||
// finisher is outstanding (streaming compilation).
|
||||
// It is only safe to remove the AsyncCompileJob if no
|
||||
// foreground task is currently pending, and no finisher is
|
||||
// outstanding (streaming compilation).
|
||||
if (job->num_pending_foreground_tasks_ == 0 &&
|
||||
job->outstanding_finishers_.Value() == 0) {
|
||||
job->DoSync<UpdateToTopTierCompiledCode>();
|
||||
}
|
||||
job->isolate_->wasm_engine()->RemoveCompileJob(job);
|
||||
} else {
|
||||
// If a foreground task was pending or a finsher was pending,
|
||||
// we will rely on FinishModule to switch the step to
|
||||
// UpdateToTopTierCompiledCode.
|
||||
// we will rely on FinishModule to remove the job.
|
||||
job->tiering_completed_ = true;
|
||||
}
|
||||
return;
|
||||
case CompilationEvent::kFailedCompilation: {
|
||||
// Tier-up compilation should not fail if baseline compilation
|
||||
@ -3242,21 +2772,9 @@ class AsyncCompileJob::FinishModule : public CompileStep {
|
||||
->compilation_state()
|
||||
->compile_mode());
|
||||
if (job_->tiering_completed_) {
|
||||
job_->DoSync<UpdateToTopTierCompiledCode>();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//==========================================================================
|
||||
// Step 7 (sync): Update with top tier code.
|
||||
//==========================================================================
|
||||
class AsyncCompileJob::UpdateToTopTierCompiledCode : public CompileStep {
|
||||
void RunInForeground() override {
|
||||
TRACE_COMPILE("(7) Update native module to use optimized code...\n");
|
||||
|
||||
UpdateAllCompiledModulesWithTopTierCode(job_->module_object_);
|
||||
job_->isolate_->wasm_engine()->RemoveCompileJob(job_);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class AsyncCompileJob::AbortCompilation : public CompileStep {
|
||||
|
@ -65,15 +65,8 @@ V8_EXPORT_PRIVATE Handle<Script> CreateWasmScript(
|
||||
Isolate* isolate, const ModuleWireBytes& wire_bytes);
|
||||
|
||||
// Triggered by the WasmCompileLazy builtin.
|
||||
// Walks the stack (top three frames) to determine the wasm instance involved
|
||||
// and which function to compile.
|
||||
// Then triggers WasmCompiledModule::CompileLazy, taking care of correctly
|
||||
// patching the call site or indirect function tables.
|
||||
// Returns either the Code object that has been lazily compiled, or Illegal if
|
||||
// an error occurred. In the latter case, a pending exception has been set,
|
||||
// which will be triggered when returning from the runtime function, i.e. the
|
||||
// Illegal builtin will never be called.
|
||||
Address CompileLazy(Isolate* isolate, Handle<WasmInstanceObject> instance);
|
||||
// Returns the instruction start of the compiled code object.
|
||||
Address CompileLazy(Isolate*, NativeModule*, uint32_t func_index);
|
||||
|
||||
// Encapsulates all the state and steps of an asynchronous compilation.
|
||||
// An asynchronous compile job consists of a number of tasks that are executed
|
||||
|
@ -51,20 +51,6 @@ constexpr bool kModuleCanAllocateMoreMemory = true;
|
||||
|
||||
constexpr bool kNeedsTrampoline = !kModuleCanAllocateMoreMemory;
|
||||
|
||||
void RelocateCode(WasmCode* code, const WasmCode* orig,
|
||||
WasmCode::FlushICache flush_icache) {
|
||||
intptr_t delta = code->instruction_start() - orig->instruction_start();
|
||||
for (RelocIterator it(code->instructions(), code->reloc_info(),
|
||||
code->constant_pool(), RelocInfo::kApplyMask);
|
||||
!it.done(); it.next()) {
|
||||
it.rinfo()->apply(delta);
|
||||
}
|
||||
if (flush_icache) {
|
||||
Assembler::FlushICache(code->instructions().start(),
|
||||
code->instructions().size());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void DisjointAllocationPool::Merge(AddressRange range) {
|
||||
@ -296,6 +282,8 @@ const char* GetWasmCodeKindAsString(WasmCode::Kind kind) {
|
||||
return "interpreter entry";
|
||||
case WasmCode::kTrampoline:
|
||||
return "trampoline";
|
||||
case WasmCode::kJumpTable:
|
||||
return "jump table";
|
||||
}
|
||||
return "unknown kind";
|
||||
}
|
||||
@ -316,26 +304,30 @@ WasmCode::~WasmCode() {
|
||||
base::AtomicNumber<size_t> NativeModule::next_id_;
|
||||
|
||||
NativeModule::NativeModule(Isolate* isolate, uint32_t num_functions,
|
||||
uint32_t num_imports, bool can_request_more,
|
||||
VirtualMemory* code_space,
|
||||
uint32_t num_imported_functions,
|
||||
bool can_request_more, VirtualMemory* code_space,
|
||||
WasmCodeManager* code_manager, ModuleEnv& env)
|
||||
: instance_id(next_id_.Increment(1)),
|
||||
num_functions_(num_functions),
|
||||
num_imported_functions_(num_imports),
|
||||
num_imported_functions_(num_imported_functions),
|
||||
compilation_state_(NewCompilationState(isolate, env)),
|
||||
free_code_space_({code_space->address(), code_space->end()}),
|
||||
wasm_code_manager_(code_manager),
|
||||
can_request_more_memory_(can_request_more),
|
||||
use_trap_handler_(env.use_trap_handler) {
|
||||
if (num_functions > 0) {
|
||||
uint32_t num_wasm_functions = num_functions - num_imports;
|
||||
code_table_.reset(new WasmCode*[num_wasm_functions]);
|
||||
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
|
||||
}
|
||||
VirtualMemory my_mem;
|
||||
owned_code_space_.push_back(my_mem);
|
||||
owned_code_space_.back().TakeControl(code_space);
|
||||
owned_code_.reserve(num_functions);
|
||||
|
||||
DCHECK_LE(num_imported_functions, num_functions);
|
||||
uint32_t num_wasm_functions = num_functions - num_imported_functions;
|
||||
if (num_wasm_functions > 0) {
|
||||
code_table_.reset(new WasmCode*[num_wasm_functions]);
|
||||
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
|
||||
|
||||
jump_table_ = CreateEmptyJumpTable(num_wasm_functions);
|
||||
}
|
||||
}
|
||||
|
||||
void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
|
||||
@ -346,6 +338,9 @@ void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
|
||||
memset(new_table, 0, max_wasm * sizeof(*new_table));
|
||||
memcpy(new_table, code_table_.get(), num_wasm * sizeof(*new_table));
|
||||
code_table_.reset(new_table);
|
||||
|
||||
// Re-allocate jump table.
|
||||
jump_table_ = CreateEmptyJumpTable(max_wasm);
|
||||
}
|
||||
|
||||
void NativeModule::SetNumFunctionsForTesting(uint32_t num_functions) {
|
||||
@ -402,6 +397,7 @@ WasmCode* NativeModule::AddOwnedCode(
|
||||
std::upper_bound(owned_code_.begin(), owned_code_.end(),
|
||||
ret->instruction_start(), WasmCodeUniquePtrComparator());
|
||||
owned_code_.insert(insert_before, std::move(code));
|
||||
|
||||
if (flush_icache) {
|
||||
Assembler::FlushICache(ret->instructions().start(),
|
||||
ret->instructions().size());
|
||||
@ -422,14 +418,29 @@ WasmCode* NativeModule::AddCodeCopy(Handle<Code> code, WasmCode::Kind kind,
|
||||
WasmCode* NativeModule::AddInterpreterEntry(Handle<Code> code, uint32_t index) {
|
||||
WasmCode* ret = AddAnonymousCode(code, WasmCode::kInterpreterEntry);
|
||||
ret->index_ = Just(index);
|
||||
PatchJumpTable(index, ret->instruction_start(), WasmCode::kFlushICache);
|
||||
return ret;
|
||||
}
|
||||
|
||||
void NativeModule::SetLazyBuiltin(Handle<Code> code) {
|
||||
uint32_t num_wasm_functions = num_functions_ - num_imported_functions_;
|
||||
if (num_wasm_functions == 0) return;
|
||||
WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
|
||||
for (WasmCode*& code_table_entry : code_table()) {
|
||||
code_table_entry = lazy_builtin;
|
||||
// Fill the jump table with jumps to the lazy compile stub.
|
||||
Address lazy_compile_target = lazy_builtin->instruction_start();
|
||||
JumpTableAssembler jtasm(
|
||||
jump_table_->instruction_start(),
|
||||
static_cast<int>(jump_table_->instructions().size()) + 256);
|
||||
for (uint32_t i = 0; i < num_wasm_functions; ++i) {
|
||||
// Check that the offset in the jump table increases as expected.
|
||||
DCHECK_EQ(i * JumpTableAssembler::kJumpTableSlotSize, jtasm.pc_offset());
|
||||
jtasm.EmitLazyCompileJumpSlot(i + num_imported_functions_,
|
||||
lazy_compile_target);
|
||||
jtasm.NopBytes((i + 1) * JumpTableAssembler::kJumpTableSlotSize -
|
||||
jtasm.pc_offset());
|
||||
}
|
||||
Assembler::FlushICache(jump_table_->instructions().start(),
|
||||
jump_table_->instructions().size());
|
||||
}
|
||||
|
||||
void NativeModule::SetRuntimeStubs(Isolate* isolate) {
|
||||
@ -591,10 +602,11 @@ WasmCode* NativeModule::AddCode(
|
||||
}
|
||||
}
|
||||
|
||||
set_code(index, ret);
|
||||
if (use_trap_handler_) {
|
||||
ret->RegisterTrapHandlerData();
|
||||
}
|
||||
set_code(index, ret);
|
||||
PatchJumpTable(index, ret->instruction_start(), WasmCode::kFlushICache);
|
||||
|
||||
// Flush the i-cache here instead of in AddOwnedCode, to include the changes
|
||||
// made while iterating over the RelocInfo above.
|
||||
@ -639,6 +651,38 @@ Address NativeModule::CreateTrampolineTo(Handle<Code> code) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) {
|
||||
// Only call this if we really need a jump table.
|
||||
DCHECK_LT(0, num_wasm_functions);
|
||||
size_t jump_table_size =
|
||||
num_wasm_functions * JumpTableAssembler::kJumpTableSlotSize;
|
||||
std::unique_ptr<byte[]> instructions(new byte[jump_table_size]);
|
||||
memset(instructions.get(), 0, jump_table_size);
|
||||
return AddOwnedCode({instructions.get(), jump_table_size}, // instructions
|
||||
nullptr, // reloc_info
|
||||
0, // reloc_size
|
||||
nullptr, // source_pos
|
||||
0, // source_pos_size
|
||||
Nothing<uint32_t>(), // index
|
||||
WasmCode::kJumpTable, // kind
|
||||
0, // constant_pool_offset
|
||||
0, // stack_slots
|
||||
0, // safepoint_table_offset
|
||||
0, // handler_table_offset
|
||||
{}, // protected_instructions
|
||||
WasmCode::kOther, // tier
|
||||
WasmCode::kNoFlushICache); // flush_icache
|
||||
}
|
||||
|
||||
void NativeModule::PatchJumpTable(uint32_t func_index, Address target,
|
||||
WasmCode::FlushICache flush_icache) {
|
||||
DCHECK_LE(num_imported_functions_, func_index);
|
||||
uint32_t slot_idx = func_index - num_imported_functions_;
|
||||
Address jump_table_slot = jump_table_->instruction_start() +
|
||||
slot_idx * JumpTableAssembler::kJumpTableSlotSize;
|
||||
JumpTableAssembler::PatchJumpTableSlot(jump_table_slot, target, flush_icache);
|
||||
}
|
||||
|
||||
Address NativeModule::GetLocalAddressFor(Handle<Code> code) {
|
||||
DCHECK(Heap::IsImmovable(*code));
|
||||
|
||||
@ -733,7 +777,7 @@ Address NativeModule::AllocateForCode(size_t size) {
|
||||
return mem.start;
|
||||
}
|
||||
|
||||
WasmCode* NativeModule::Lookup(Address pc) {
|
||||
WasmCode* NativeModule::Lookup(Address pc) const {
|
||||
if (owned_code_.empty()) return nullptr;
|
||||
auto iter = std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
|
||||
WasmCodeUniquePtrComparator());
|
||||
@ -744,66 +788,27 @@ WasmCode* NativeModule::Lookup(Address pc) {
|
||||
return candidate->contains(pc) ? candidate : nullptr;
|
||||
}
|
||||
|
||||
Address NativeModule::GetCallTargetForFunction(uint32_t func_index) {
|
||||
// TODO(clemensh): Introduce a jump table and return a slot of it here.
|
||||
WasmCode* wasm_code = code(func_index);
|
||||
if (!wasm_code) return kNullAddress;
|
||||
if (wasm_code->kind() != WasmCode::kLazyStub) {
|
||||
return wasm_code->instruction_start();
|
||||
}
|
||||
Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
|
||||
// TODO(clemensh): Measure performance win of returning instruction start
|
||||
// directly if we have turbofan code. Downside: Redirecting functions (e.g.
|
||||
// for debugging) gets much harder.
|
||||
|
||||
DCHECK_IMPLIES(func_index < num_imported_functions_,
|
||||
!wasm_code->IsAnonymous());
|
||||
if (!wasm_code->IsAnonymous()) {
|
||||
// If the function wasn't imported, its index should match.
|
||||
DCHECK_IMPLIES(func_index >= num_imported_functions_,
|
||||
func_index == wasm_code->index());
|
||||
return wasm_code->instruction_start();
|
||||
}
|
||||
if (lazy_compile_stubs_ == nullptr) {
|
||||
lazy_compile_stubs_.reset(new WasmCode*[num_functions_]);
|
||||
memset(lazy_compile_stubs_.get(), 0, num_functions_ * sizeof(WasmCode*));
|
||||
}
|
||||
WasmCode* cloned_code = lazy_compile_stubs_[func_index];
|
||||
if (cloned_code == nullptr) {
|
||||
cloned_code = CloneCode(wasm_code, WasmCode::kNoFlushICache);
|
||||
RelocateCode(cloned_code, wasm_code, WasmCode::kFlushICache);
|
||||
cloned_code->index_ = Just(func_index);
|
||||
lazy_compile_stubs_[func_index] = cloned_code;
|
||||
}
|
||||
DCHECK_EQ(func_index, cloned_code->index());
|
||||
return cloned_code->instruction_start();
|
||||
// Return the jump table slot for that function index.
|
||||
DCHECK_NOT_NULL(jump_table_);
|
||||
uint32_t slot_idx = func_index - num_imported_functions_;
|
||||
DCHECK_LT(slot_idx, jump_table_->instructions().size() /
|
||||
JumpTableAssembler::kJumpTableSlotSize);
|
||||
return jump_table_->instruction_start() +
|
||||
slot_idx * JumpTableAssembler::kJumpTableSlotSize;
|
||||
}
|
||||
|
||||
WasmCode* NativeModule::CloneCode(const WasmCode* original_code,
|
||||
WasmCode::FlushICache flush_icache) {
|
||||
std::unique_ptr<byte[]> reloc_info;
|
||||
if (original_code->reloc_info().size() > 0) {
|
||||
reloc_info.reset(new byte[original_code->reloc_info().size()]);
|
||||
memcpy(reloc_info.get(), original_code->reloc_info().start(),
|
||||
original_code->reloc_info().size());
|
||||
}
|
||||
std::unique_ptr<byte[]> source_pos;
|
||||
if (original_code->source_positions().size() > 0) {
|
||||
source_pos.reset(new byte[original_code->source_positions().size()]);
|
||||
memcpy(source_pos.get(), original_code->source_positions().start(),
|
||||
original_code->source_positions().size());
|
||||
}
|
||||
DCHECK_EQ(0, original_code->protected_instructions().size());
|
||||
std::unique_ptr<ProtectedInstructions> protected_instructions(
|
||||
new ProtectedInstructions(0));
|
||||
WasmCode* ret = AddOwnedCode(
|
||||
original_code->instructions(), std::move(reloc_info),
|
||||
original_code->reloc_info().size(), std::move(source_pos),
|
||||
original_code->source_positions().size(), original_code->index_,
|
||||
original_code->kind(), original_code->constant_pool_offset_,
|
||||
original_code->stack_slots(), original_code->safepoint_table_offset_,
|
||||
original_code->handler_table_offset_, std::move(protected_instructions),
|
||||
original_code->tier(), flush_icache);
|
||||
if (!ret->IsAnonymous()) {
|
||||
set_code(ret->index(), ret);
|
||||
}
|
||||
return ret;
|
||||
uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(Address slot_address) {
|
||||
DCHECK(is_jump_table_slot(slot_address));
|
||||
uint32_t offset =
|
||||
static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
|
||||
uint32_t slot_idx = offset / JumpTableAssembler::kJumpTableSlotSize;
|
||||
DCHECK_LT(slot_idx, num_functions_ - num_imported_functions_);
|
||||
return num_imported_functions_ + slot_idx;
|
||||
}
|
||||
|
||||
void NativeModule::DisableTrapHandler() {
|
||||
@ -888,16 +893,22 @@ void WasmCodeManager::TryAllocate(size_t size, VirtualMemory* ret, void* hint) {
|
||||
reinterpret_cast<void*>(ret->end()), ret->size());
|
||||
}
|
||||
|
||||
// static
|
||||
size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
|
||||
constexpr size_t kCodeSizeMultiplier = 4;
|
||||
constexpr size_t kImportSize = 32 * kPointerSize;
|
||||
|
||||
uint32_t num_functions = static_cast<uint32_t>(module->functions.size());
|
||||
uint32_t num_wasm_functions = num_functions - module->num_imported_functions;
|
||||
|
||||
size_t estimate =
|
||||
AllocatePageSize() /* TODO(titzer): 1 page spot bonus */ +
|
||||
sizeof(NativeModule) +
|
||||
(sizeof(WasmCode*) * module->functions.size() /* code table size */) +
|
||||
(sizeof(WasmCode) * module->functions.size() /* code object size */) +
|
||||
(kImportSize * module->num_imported_functions /* import size */);
|
||||
(sizeof(WasmCode*) * num_wasm_functions /* code table size */) +
|
||||
(sizeof(WasmCode) * num_wasm_functions /* code object size */) +
|
||||
(kImportSize * module->num_imported_functions /* import size */) +
|
||||
(JumpTableAssembler::kJumpTableSlotSize *
|
||||
num_wasm_functions /* jump table size */);
|
||||
|
||||
for (auto& function : module->functions) {
|
||||
estimate += kCodeSizeMultiplier * function.code.length();
|
||||
@ -1032,7 +1043,7 @@ WasmCode* WasmCodeManager::GetCodeFromStartAddress(Address pc) const {
|
||||
return code;
|
||||
}
|
||||
|
||||
WasmCode* WasmCodeManager::LookupCode(Address pc) const {
|
||||
NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
|
||||
if (lookup_map_.empty()) return nullptr;
|
||||
|
||||
auto iter = lookup_map_.upper_bound(pc);
|
||||
@ -1043,8 +1054,12 @@ WasmCode* WasmCodeManager::LookupCode(Address pc) const {
|
||||
NativeModule* candidate = iter->second.second;
|
||||
|
||||
DCHECK_NOT_NULL(candidate);
|
||||
if (range_start <= pc && pc < range_end) return candidate->Lookup(pc);
|
||||
return nullptr;
|
||||
return range_start <= pc && pc < range_end ? candidate : nullptr;
|
||||
}
|
||||
|
||||
WasmCode* WasmCodeManager::LookupCode(Address pc) const {
|
||||
NativeModule* candidate = LookupNativeModule(pc);
|
||||
return candidate ? candidate->Lookup(pc) : nullptr;
|
||||
}
|
||||
|
||||
void WasmCodeManager::Free(VirtualMemory* mem) {
|
||||
|
@ -94,7 +94,8 @@ class V8_EXPORT_PRIVATE WasmCode final {
|
||||
kLazyStub,
|
||||
kRuntimeStub,
|
||||
kInterpreterEntry,
|
||||
kTrampoline
|
||||
kTrampoline,
|
||||
kJumpTable
|
||||
};
|
||||
|
||||
// Each runtime stub is identified by an id. This id is used to reference the
|
||||
@ -251,10 +252,8 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
WasmCode* AddInterpreterEntry(Handle<Code> code, uint32_t index);
|
||||
|
||||
// When starting lazy compilation, provide the WasmLazyCompile builtin by
|
||||
// calling SetLazyBuiltin. It will initialize the code table with it. Copies
|
||||
// of it might be cloned from them later when creating entries for exported
|
||||
// functions and indirect callable functions, so that they may be identified
|
||||
// by the runtime.
|
||||
// calling SetLazyBuiltin. It will be copied into this NativeModule and the
|
||||
// jump table will be populated with that copy.
|
||||
void SetLazyBuiltin(Handle<Code> code);
|
||||
|
||||
// Initializes all runtime stubs by copying them over from the JS-allocated
|
||||
@ -282,6 +281,12 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
return code;
|
||||
}
|
||||
|
||||
bool is_jump_table_slot(Address address) const {
|
||||
return jump_table_->contains(address);
|
||||
}
|
||||
|
||||
uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address);
|
||||
|
||||
// Transition this module from code relying on trap handlers (i.e. without
|
||||
// explicit memory bounds checks) to code that does not require trap handlers
|
||||
// (i.e. code with explicit bounds checks).
|
||||
@ -290,11 +295,9 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
// after calling this method.
|
||||
void DisableTrapHandler();
|
||||
|
||||
// Returns the instruction start of code suitable for indirect or import calls
|
||||
// for the given function index. If the code at the given index is the lazy
|
||||
// compile stub, it will clone a non-anonymous lazy compile stub for the
|
||||
// purpose. This will soon change to always return a jump table slot.
|
||||
Address GetCallTargetForFunction(uint32_t index);
|
||||
// Returns the target to call for the given function (returns a jump table
|
||||
// slot within {jump_table_}).
|
||||
Address GetCallTargetForFunction(uint32_t func_index) const;
|
||||
|
||||
bool SetExecutable(bool executable);
|
||||
|
||||
@ -322,6 +325,8 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
void set_lazy_compile_frozen(bool frozen) { lazy_compile_frozen_ = frozen; }
|
||||
bool lazy_compile_frozen() const { return lazy_compile_frozen_; }
|
||||
|
||||
WasmCode* Lookup(Address) const;
|
||||
|
||||
const size_t instance_id = 0;
|
||||
~NativeModule();
|
||||
|
||||
@ -333,9 +338,10 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
friend class NativeModuleModificationScope;
|
||||
|
||||
static base::AtomicNumber<size_t> next_id_;
|
||||
NativeModule(Isolate* isolate, uint32_t num_functions, uint32_t num_imports,
|
||||
bool can_request_more, VirtualMemory* code_space,
|
||||
WasmCodeManager* code_manager, ModuleEnv& env);
|
||||
NativeModule(Isolate* isolate, uint32_t num_functions,
|
||||
uint32_t num_imported_functions, bool can_request_more,
|
||||
VirtualMemory* code_space, WasmCodeManager* code_manager,
|
||||
ModuleEnv& env);
|
||||
|
||||
WasmCode* AddAnonymousCode(Handle<Code>, WasmCode::Kind kind);
|
||||
Address AllocateForCode(size_t size);
|
||||
@ -354,13 +360,16 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
size_t handler_table_offset,
|
||||
std::unique_ptr<ProtectedInstructions>, WasmCode::Tier,
|
||||
WasmCode::FlushICache);
|
||||
WasmCode* CloneCode(const WasmCode*, WasmCode::FlushICache);
|
||||
WasmCode* Lookup(Address);
|
||||
Address GetLocalAddressFor(Handle<Code>);
|
||||
Address CreateTrampolineTo(Handle<Code>);
|
||||
// TODO(7424): Only used for debugging in {WasmCode::Validate}. Remove.
|
||||
Code* ReverseTrampolineLookup(Address target);
|
||||
|
||||
WasmCode* CreateEmptyJumpTable(uint32_t num_wasm_functions);
|
||||
|
||||
void PatchJumpTable(uint32_t func_index, Address target,
|
||||
WasmCode::FlushICache);
|
||||
|
||||
void set_code(uint32_t index, WasmCode* code) {
|
||||
DCHECK_LT(index, num_functions_);
|
||||
DCHECK_LE(num_imported_functions_, index);
|
||||
@ -375,7 +384,6 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
uint32_t num_functions_;
|
||||
uint32_t num_imported_functions_;
|
||||
std::unique_ptr<WasmCode* []> code_table_;
|
||||
std::unique_ptr<WasmCode* []> lazy_compile_stubs_;
|
||||
|
||||
WasmCode* runtime_stub_table_[WasmCode::kRuntimeStubCount] = {nullptr};
|
||||
|
||||
@ -383,6 +391,9 @@ class V8_EXPORT_PRIVATE NativeModule final {
|
||||
// start of the trampoline.
|
||||
std::unordered_map<Address, Address> trampolines_;
|
||||
|
||||
// Jump table used to easily redirect wasm function calls.
|
||||
WasmCode* jump_table_ = nullptr;
|
||||
|
||||
std::unique_ptr<CompilationState, CompilationStateDeleter> compilation_state_;
|
||||
|
||||
// A phantom reference to the {WasmModuleObject}. It is intentionally not
|
||||
@ -423,6 +434,7 @@ class V8_EXPORT_PRIVATE WasmCodeManager final {
|
||||
Isolate* isolate, size_t memory_estimate, uint32_t num_functions,
|
||||
uint32_t num_imported_functions, bool can_request_more, ModuleEnv& env);
|
||||
|
||||
NativeModule* LookupNativeModule(Address pc) const;
|
||||
WasmCode* LookupCode(Address pc) const;
|
||||
WasmCode* GetCodeFromStartAddress(Address pc) const;
|
||||
size_t remaining_uncommitted_code_space() const;
|
||||
|
@ -68,62 +68,19 @@ void CodeSpecialization::RelocateDirectCalls(NativeModule* native_module) {
|
||||
relocate_direct_calls_module_ = native_module;
|
||||
}
|
||||
|
||||
bool CodeSpecialization::ApplyToWholeModule(
|
||||
NativeModule* native_module, Handle<WasmModuleObject> module_object,
|
||||
bool CodeSpecialization::ApplyToWholeModule(NativeModule* native_module,
|
||||
ICacheFlushMode icache_flush_mode) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
WasmModule* module = module_object->module();
|
||||
std::vector<WasmFunction>* wasm_functions =
|
||||
&module_object->module()->functions;
|
||||
FixedArray* export_wrappers = module_object->export_wrappers();
|
||||
DCHECK_EQ(export_wrappers->length(), module->num_exported_functions);
|
||||
|
||||
bool changed = false;
|
||||
int func_index = module->num_imported_functions;
|
||||
|
||||
// Patch all wasm functions.
|
||||
for (int num_wasm_functions = static_cast<int>(wasm_functions->size());
|
||||
func_index < num_wasm_functions; ++func_index) {
|
||||
WasmCode* wasm_function = native_module->code(func_index);
|
||||
// TODO(clemensh): Get rid of this nullptr check
|
||||
if (wasm_function == nullptr ||
|
||||
wasm_function->kind() != WasmCode::kFunction) {
|
||||
continue;
|
||||
}
|
||||
changed |= ApplyToWasmCode(wasm_function, icache_flush_mode);
|
||||
for (WasmCode* wasm_code : native_module->code_table()) {
|
||||
if (wasm_code == nullptr) continue;
|
||||
if (wasm_code->kind() != WasmCode::kFunction) continue;
|
||||
changed |= ApplyToWasmCode(wasm_code, icache_flush_mode);
|
||||
}
|
||||
|
||||
// Patch all exported functions (JS_TO_WASM_FUNCTION).
|
||||
int reloc_mode = 0;
|
||||
// Patch CODE_TARGET if we shall relocate direct calls. If we patch direct
|
||||
// calls, the instance registered for that (relocate_direct_calls_module_)
|
||||
// should match the instance we currently patch (instance).
|
||||
if (relocate_direct_calls_module_ != nullptr) {
|
||||
DCHECK_EQ(native_module, relocate_direct_calls_module_);
|
||||
reloc_mode |= RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL);
|
||||
}
|
||||
if (!reloc_mode) return changed;
|
||||
int wrapper_index = 0;
|
||||
for (auto exp : module->export_table) {
|
||||
if (exp.kind != kExternalFunction) continue;
|
||||
Code* export_wrapper = Code::cast(export_wrappers->get(wrapper_index++));
|
||||
if (export_wrapper->kind() != Code::JS_TO_WASM_FUNCTION) continue;
|
||||
for (RelocIterator it(export_wrapper, reloc_mode); !it.done(); it.next()) {
|
||||
RelocInfo::Mode mode = it.rinfo()->rmode();
|
||||
switch (mode) {
|
||||
case RelocInfo::JS_TO_WASM_CALL: {
|
||||
changed = true;
|
||||
Address new_target =
|
||||
native_module->GetCallTargetForFunction(exp.index);
|
||||
it.rinfo()->set_js_to_wasm_address(new_target, icache_flush_mode);
|
||||
} break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
}
|
||||
DCHECK_EQ(module->functions.size(), func_index);
|
||||
DCHECK_EQ(export_wrappers->length(), wrapper_index);
|
||||
return changed;
|
||||
}
|
||||
|
||||
@ -167,9 +124,9 @@ bool CodeSpecialization::ApplyToWasmCode(wasm::WasmCode* code,
|
||||
uint32_t called_func_index = ExtractDirectCallIndex(
|
||||
patch_direct_calls_helper->decoder,
|
||||
patch_direct_calls_helper->func_bytes + byte_pos);
|
||||
const WasmCode* new_code = native_module->code(called_func_index);
|
||||
it.rinfo()->set_wasm_call_address(new_code->instruction_start(),
|
||||
icache_flush_mode);
|
||||
Address new_target =
|
||||
native_module->GetCallTargetForFunction(called_func_index);
|
||||
it.rinfo()->set_wasm_call_address(new_target, icache_flush_mode);
|
||||
changed = true;
|
||||
} break;
|
||||
|
||||
|
@ -29,9 +29,8 @@ class CodeSpecialization {
|
||||
|
||||
// Update all direct call sites based on the code table in the given module.
|
||||
void RelocateDirectCalls(NativeModule* module);
|
||||
// Apply all relocations and patching to all code in the module (i.e. wasm
|
||||
// code and exported function wrapper code).
|
||||
bool ApplyToWholeModule(NativeModule*, Handle<WasmModuleObject>,
|
||||
// Apply all relocations and patching to all code in the module.
|
||||
bool ApplyToWholeModule(NativeModule*,
|
||||
ICacheFlushMode = FLUSH_ICACHE_IF_NEEDED);
|
||||
// Apply all relocations and patching to one wasm code object.
|
||||
bool ApplyToWasmCode(wasm::WasmCode*,
|
||||
|
@ -568,56 +568,6 @@ Handle<FixedArray> GetOrCreateInterpretedFunctions(
|
||||
return new_arr;
|
||||
}
|
||||
|
||||
using CodeRelocationMap = std::map<Address, Address>;
|
||||
|
||||
void RedirectCallsitesInCode(Isolate* isolate, const wasm::WasmCode* code,
|
||||
CodeRelocationMap* map) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
for (RelocIterator it(code->instructions(), code->reloc_info(),
|
||||
code->constant_pool(),
|
||||
RelocInfo::ModeMask(RelocInfo::WASM_CALL));
|
||||
!it.done(); it.next()) {
|
||||
Address target = it.rinfo()->target_address();
|
||||
auto new_target = map->find(target);
|
||||
if (new_target == map->end()) continue;
|
||||
it.rinfo()->set_wasm_call_address(new_target->second);
|
||||
}
|
||||
}
|
||||
|
||||
void RedirectCallsitesInJSWrapperCode(Isolate* isolate, Code* code,
|
||||
CodeRelocationMap* map) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
for (RelocIterator it(code, RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL));
|
||||
!it.done(); it.next()) {
|
||||
Address target = it.rinfo()->js_to_wasm_address();
|
||||
auto new_target = map->find(target);
|
||||
if (new_target == map->end()) continue;
|
||||
it.rinfo()->set_js_to_wasm_address(new_target->second);
|
||||
}
|
||||
}
|
||||
|
||||
void RedirectCallsitesInInstance(Isolate* isolate, WasmInstanceObject* instance,
|
||||
CodeRelocationMap* map) {
|
||||
DisallowHeapAllocation no_gc;
|
||||
// Redirect all calls in wasm functions.
|
||||
wasm::NativeModule* native_module =
|
||||
instance->compiled_module()->GetNativeModule();
|
||||
for (uint32_t i = native_module->num_imported_functions(),
|
||||
e = native_module->num_functions();
|
||||
i < e; ++i) {
|
||||
wasm::WasmCode* code = native_module->code(i);
|
||||
RedirectCallsitesInCode(isolate, code, map);
|
||||
}
|
||||
// TODO(6668): Find instances that imported our code and also patch those.
|
||||
|
||||
// Redirect all calls in exported functions.
|
||||
FixedArray* export_wrapper = instance->module_object()->export_wrappers();
|
||||
for (int i = 0, e = export_wrapper->length(); i != e; ++i) {
|
||||
Code* code = Code::cast(export_wrapper->get(i));
|
||||
RedirectCallsitesInJSWrapperCode(isolate, code, map);
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Handle<WasmDebugInfo> WasmDebugInfo::New(Handle<WasmInstanceObject> instance) {
|
||||
@ -663,7 +613,6 @@ void WasmDebugInfo::RedirectToInterpreter(Handle<WasmDebugInfo> debug_info,
|
||||
wasm::NativeModule* native_module =
|
||||
instance->compiled_module()->GetNativeModule();
|
||||
wasm::WasmModule* module = instance->module();
|
||||
CodeRelocationMap code_to_relocate;
|
||||
|
||||
// We may modify js wrappers, as well as wasm functions. Hence the 2
|
||||
// modification scopes.
|
||||
@ -680,16 +629,10 @@ void WasmDebugInfo::RedirectToInterpreter(Handle<WasmDebugInfo> debug_info,
|
||||
isolate, func_index, module->functions[func_index].sig);
|
||||
const wasm::WasmCode* wasm_new_code = native_module->AddInterpreterEntry(
|
||||
new_code.ToHandleChecked(), func_index);
|
||||
const wasm::WasmCode* old_code =
|
||||
native_module->code(static_cast<uint32_t>(func_index));
|
||||
Handle<Foreign> foreign_holder = isolate->factory()->NewForeign(
|
||||
wasm_new_code->instruction_start(), TENURED);
|
||||
interpreted_functions->set(func_index, *foreign_holder);
|
||||
DCHECK_EQ(0, code_to_relocate.count(old_code->instruction_start()));
|
||||
code_to_relocate.insert(std::make_pair(old_code->instruction_start(),
|
||||
wasm_new_code->instruction_start()));
|
||||
}
|
||||
RedirectCallsitesInInstance(isolate, *instance, &code_to_relocate);
|
||||
}
|
||||
|
||||
void WasmDebugInfo::PrepareStep(StepAction step_action) {
|
||||
|
@ -2676,18 +2676,23 @@ class ThreadImpl {
|
||||
return {ExternalCallResult::INVALID_FUNC};
|
||||
}
|
||||
|
||||
WasmCode* code;
|
||||
Handle<WasmInstanceObject> instance;
|
||||
{
|
||||
IndirectFunctionTableEntry entry(instance_object_, entry_index);
|
||||
// Signature check.
|
||||
if (entry.sig_id() != static_cast<int32_t>(expected_sig_id)) {
|
||||
return {ExternalCallResult::SIGNATURE_MISMATCH};
|
||||
}
|
||||
|
||||
instance = handle(entry.instance(), isolate);
|
||||
code = isolate->wasm_engine()->code_manager()->GetCodeFromStartAddress(
|
||||
entry.target());
|
||||
Handle<WasmInstanceObject> instance = handle(entry.instance(), isolate);
|
||||
Address target = entry.target();
|
||||
NativeModule* native_module =
|
||||
isolate->wasm_engine()->code_manager()->LookupNativeModule(target);
|
||||
WasmCode* code;
|
||||
if (native_module->is_jump_table_slot(target)) {
|
||||
uint32_t func_index =
|
||||
native_module->GetFunctionIndexFromJumpTableSlot(target);
|
||||
code = native_module->code(func_index);
|
||||
} else {
|
||||
code = native_module->Lookup(target);
|
||||
}
|
||||
|
||||
// Call either an internal or external WASM function.
|
||||
|
@ -169,8 +169,6 @@ OPTIONAL_ACCESSORS(WasmInstanceObject, indirect_function_table_instances,
|
||||
FixedArray, kIndirectFunctionTableInstancesOffset)
|
||||
OPTIONAL_ACCESSORS(WasmInstanceObject, managed_native_allocations, Foreign,
|
||||
kManagedNativeAllocationsOffset)
|
||||
OPTIONAL_ACCESSORS(WasmInstanceObject, managed_indirect_patcher, Foreign,
|
||||
kManagedIndirectPatcherOffset)
|
||||
ACCESSORS(WasmInstanceObject, undefined_value, Oddball, kUndefinedValueOffset)
|
||||
ACCESSORS(WasmInstanceObject, null_value, Oddball, kNullValueOffset)
|
||||
ACCESSORS(WasmInstanceObject, centry_stub, Code, kCEntryStubOffset)
|
||||
|
@ -78,7 +78,7 @@ class IndirectFunctionTableEntry {
|
||||
// - target = pointer to wasm-to-js wrapper code entrypoint
|
||||
// - an imported wasm function from another instance, which has fields
|
||||
// - instance = target instance
|
||||
// - target = entrypoint to wasm code of the function
|
||||
// - target = entrypoint for the function
|
||||
class ImportedFunctionEntry {
|
||||
public:
|
||||
inline ImportedFunctionEntry(Handle<WasmInstanceObject>, int index);
|
||||
@ -388,7 +388,6 @@ class WasmInstanceObject : public JSObject {
|
||||
DECL_ACCESSORS(imported_function_callables, FixedArray)
|
||||
DECL_OPTIONAL_ACCESSORS(indirect_function_table_instances, FixedArray)
|
||||
DECL_OPTIONAL_ACCESSORS(managed_native_allocations, Foreign)
|
||||
DECL_OPTIONAL_ACCESSORS(managed_indirect_patcher, Foreign)
|
||||
DECL_ACCESSORS(undefined_value, Oddball)
|
||||
DECL_ACCESSORS(null_value, Oddball)
|
||||
DECL_ACCESSORS(centry_stub, Code)
|
||||
@ -423,7 +422,6 @@ class WasmInstanceObject : public JSObject {
|
||||
V(kImportedFunctionCallablesOffset, kPointerSize) \
|
||||
V(kIndirectFunctionTableInstancesOffset, kPointerSize) \
|
||||
V(kManagedNativeAllocationsOffset, kPointerSize) \
|
||||
V(kManagedIndirectPatcherOffset, kPointerSize) \
|
||||
V(kUndefinedValueOffset, kPointerSize) \
|
||||
V(kNullValueOffset, kPointerSize) \
|
||||
V(kCEntryStubOffset, kPointerSize) \
|
||||
|
@ -268,7 +268,7 @@ NativeModuleSerializer::NativeModuleSerializer(Isolate* isolate,
|
||||
}
|
||||
|
||||
size_t NativeModuleSerializer::MeasureCode(const WasmCode* code) const {
|
||||
if (code->kind() == WasmCode::kLazyStub) return sizeof(size_t);
|
||||
if (code == nullptr) return sizeof(size_t);
|
||||
DCHECK_EQ(WasmCode::kFunction, code->kind());
|
||||
return kCodeHeaderSize + code->instructions().size() +
|
||||
code->reloc_info().size() + code->source_positions().size() +
|
||||
@ -290,7 +290,7 @@ void NativeModuleSerializer::WriteHeader(Writer* writer) {
|
||||
}
|
||||
|
||||
void NativeModuleSerializer::WriteCode(const WasmCode* code, Writer* writer) {
|
||||
if (code->kind() == WasmCode::kLazyStub) {
|
||||
if (code == nullptr) {
|
||||
writer->Write(size_t{0});
|
||||
return;
|
||||
}
|
||||
@ -500,6 +500,8 @@ bool NativeModuleDeserializer::ReadCode(uint32_t fn_index, Reader* reader) {
|
||||
handler_table_offset, std::move(protected_instructions), tier,
|
||||
WasmCode::kNoFlushICache);
|
||||
native_module_->set_code(fn_index, ret);
|
||||
native_module_->PatchJumpTable(fn_index, ret->instruction_start(),
|
||||
WasmCode::kFlushICache);
|
||||
|
||||
// Relocate the code.
|
||||
int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
|
||||
|
@ -119,9 +119,9 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name) {
|
||||
Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
|
||||
// Wrap the code so it can be called as a JS function.
|
||||
Link();
|
||||
wasm::WasmCode* code = native_module_->code(index);
|
||||
Address target = native_module_->GetCallTargetForFunction(index);
|
||||
MaybeHandle<Code> maybe_ret_code = compiler::CompileJSToWasmWrapper(
|
||||
isolate_, test_module_ptr_, code->instruction_start(), index,
|
||||
isolate_, test_module_ptr_, target, index,
|
||||
trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler : kNoTrapHandler);
|
||||
Handle<Code> ret_code = maybe_ret_code.ToHandleChecked();
|
||||
Handle<JSFunction> ret = WasmExportedFunction::New(
|
||||
@ -167,9 +167,9 @@ void TestingModuleBuilder::PopulateIndirectFunctionTable() {
|
||||
for (int j = 0; j < table_size; j++) {
|
||||
WasmFunction& function = test_module_->functions[table.values[j]];
|
||||
int sig_id = test_module_->signature_map.Find(function.sig);
|
||||
auto wasm_code = native_module_->code(function.func_index);
|
||||
IndirectFunctionTableEntry(instance, j)
|
||||
.set(sig_id, *instance, wasm_code->instruction_start());
|
||||
auto target =
|
||||
native_module_->GetCallTargetForFunction(function.func_index);
|
||||
IndirectFunctionTableEntry(instance, j).set(sig_id, *instance, target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -210,15 +210,13 @@ class TestingModuleBuilder {
|
||||
return reinterpret_cast<Address>(globals_data_);
|
||||
}
|
||||
void Link() {
|
||||
if (!linked_) {
|
||||
Handle<WasmModuleObject> module(instance_object()->module_object());
|
||||
if (linked_) return;
|
||||
CodeSpecialization code_specialization;
|
||||
code_specialization.RelocateDirectCalls(native_module_);
|
||||
code_specialization.ApplyToWholeModule(native_module_, module);
|
||||
code_specialization.ApplyToWholeModule(native_module_);
|
||||
linked_ = true;
|
||||
native_module_->SetExecutable(true);
|
||||
}
|
||||
}
|
||||
|
||||
ModuleEnv CreateModuleEnv();
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include "testing/gmock/include/gmock/gmock.h"
|
||||
|
||||
#include "src/wasm/function-compiler.h"
|
||||
#include "src/wasm/jump-table-assembler.h"
|
||||
#include "src/wasm/wasm-code-manager.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -143,6 +144,10 @@ enum ModuleStyle : int { Fixed = 0, Growable = 1 };
|
||||
class WasmCodeManagerTest : public TestWithContext,
|
||||
public ::testing::WithParamInterface<ModuleStyle> {
|
||||
public:
|
||||
static constexpr uint32_t kNumFunctions = 10;
|
||||
static constexpr uint32_t kJumpTableSize = RoundUp<kCodeAlignment>(
|
||||
kNumFunctions * JumpTableAssembler::kJumpTableSlotSize);
|
||||
|
||||
using NativeModulePtr = std::unique_ptr<NativeModule>;
|
||||
|
||||
NativeModulePtr AllocModule(WasmCodeManager* manager, size_t size,
|
||||
@ -150,8 +155,8 @@ class WasmCodeManagerTest : public TestWithContext,
|
||||
bool can_request_more = style == Growable;
|
||||
wasm::ModuleEnv env(nullptr, UseTrapHandler::kNoTrapHandler,
|
||||
RuntimeExceptionSupport::kNoRuntimeExceptionSupport);
|
||||
return manager->NewNativeModule(i_isolate(), size, 10, 0, can_request_more,
|
||||
env);
|
||||
return manager->NewNativeModule(i_isolate(), size, kNumFunctions, 0,
|
||||
can_request_more, env);
|
||||
}
|
||||
|
||||
WasmCode* AddCode(NativeModule* native_module, uint32_t index, size_t size) {
|
||||
@ -175,9 +180,7 @@ TEST_P(WasmCodeManagerTest, EmptyCase) {
|
||||
WasmCodeManager manager(0 * page());
|
||||
CHECK_EQ(0, manager.remaining_uncommitted_code_space());
|
||||
|
||||
NativeModulePtr native_module = AllocModule(&manager, 1 * page(), GetParam());
|
||||
CHECK(native_module);
|
||||
ASSERT_DEATH_IF_SUPPORTED(AddCode(native_module.get(), 0, 10),
|
||||
ASSERT_DEATH_IF_SUPPORTED(AllocModule(&manager, 1 * page(), GetParam()),
|
||||
"OOM in NativeModule::AddOwnedCode");
|
||||
}
|
||||
|
||||
@ -186,7 +189,7 @@ TEST_P(WasmCodeManagerTest, AllocateAndGoOverLimit) {
|
||||
CHECK_EQ(1 * page(), manager.remaining_uncommitted_code_space());
|
||||
NativeModulePtr native_module = AllocModule(&manager, 1 * page(), GetParam());
|
||||
CHECK(native_module);
|
||||
CHECK_EQ(1 * page(), manager.remaining_uncommitted_code_space());
|
||||
CHECK_EQ(0, manager.remaining_uncommitted_code_space());
|
||||
uint32_t index = 0;
|
||||
WasmCode* code = AddCode(native_module.get(), index++, 1 * kCodeAlignment);
|
||||
CHECK_NOT_NULL(code);
|
||||
@ -196,7 +199,8 @@ TEST_P(WasmCodeManagerTest, AllocateAndGoOverLimit) {
|
||||
CHECK_NOT_NULL(code);
|
||||
CHECK_EQ(0, manager.remaining_uncommitted_code_space());
|
||||
|
||||
code = AddCode(native_module.get(), index++, page() - 4 * kCodeAlignment);
|
||||
code = AddCode(native_module.get(), index++,
|
||||
page() - 4 * kCodeAlignment - kJumpTableSize);
|
||||
CHECK_NOT_NULL(code);
|
||||
CHECK_EQ(0, manager.remaining_uncommitted_code_space());
|
||||
|
||||
@ -206,14 +210,14 @@ TEST_P(WasmCodeManagerTest, AllocateAndGoOverLimit) {
|
||||
}
|
||||
|
||||
TEST_P(WasmCodeManagerTest, TotalLimitIrrespectiveOfModuleCount) {
|
||||
WasmCodeManager manager(1 * page());
|
||||
NativeModulePtr nm1 = AllocModule(&manager, 1 * page(), GetParam());
|
||||
NativeModulePtr nm2 = AllocModule(&manager, 1 * page(), GetParam());
|
||||
WasmCodeManager manager(3 * page());
|
||||
NativeModulePtr nm1 = AllocModule(&manager, 2 * page(), GetParam());
|
||||
NativeModulePtr nm2 = AllocModule(&manager, 2 * page(), GetParam());
|
||||
CHECK(nm1);
|
||||
CHECK(nm2);
|
||||
WasmCode* code = AddCode(nm1.get(), 0, 1 * page());
|
||||
WasmCode* code = AddCode(nm1.get(), 0, 2 * page() - kJumpTableSize);
|
||||
CHECK_NOT_NULL(code);
|
||||
ASSERT_DEATH_IF_SUPPORTED(AddCode(nm2.get(), 0, 1 * page()),
|
||||
ASSERT_DEATH_IF_SUPPORTED(AddCode(nm2.get(), 0, 2 * page() - kJumpTableSize),
|
||||
"OOM in NativeModule::AddOwnedCode");
|
||||
}
|
||||
|
||||
@ -224,10 +228,10 @@ TEST_P(WasmCodeManagerTest, DifferentHeapsApplyLimitsIndependently) {
|
||||
NativeModulePtr nm2 = AllocModule(&manager2, 1 * page(), GetParam());
|
||||
CHECK(nm1);
|
||||
CHECK(nm2);
|
||||
WasmCode* code = AddCode(nm1.get(), 0, 1 * page());
|
||||
WasmCode* code = AddCode(nm1.get(), 0, 1 * page() - kJumpTableSize);
|
||||
CHECK_NOT_NULL(code);
|
||||
CHECK_EQ(0, manager1.remaining_uncommitted_code_space());
|
||||
code = AddCode(nm2.get(), 0, 1 * page());
|
||||
code = AddCode(nm2.get(), 0, 1 * page() - kJumpTableSize);
|
||||
CHECK_NOT_NULL(code);
|
||||
}
|
||||
|
||||
@ -252,7 +256,7 @@ TEST_P(WasmCodeManagerTest, CommitIncrements) {
|
||||
code = AddCode(nm.get(), 1, 2 * page());
|
||||
CHECK_NOT_NULL(code);
|
||||
CHECK_EQ(manager.remaining_uncommitted_code_space(), 7 * page());
|
||||
code = AddCode(nm.get(), 2, page() - kCodeAlignment);
|
||||
code = AddCode(nm.get(), 2, page() - kCodeAlignment - kJumpTableSize);
|
||||
CHECK_NOT_NULL(code);
|
||||
CHECK_EQ(manager.remaining_uncommitted_code_space(), 7 * page());
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user