[wasm] Remove deprecated --wasm-jit-to-native flag.

The feature in question has been enabled by default for a while and we
no longer need to maintain a configuration without it enabled. Note that
this change only removes the mechanical pieces. Further cleanup enabled
by this will be done as follow-ups.

R=clemensh@chromium.org
BUG=v8:7549

Change-Id: I90e5bcddabe74a18a4d2a88132e8dc93317bcff4
Reviewed-on: https://chromium-review.googlesource.com/958424
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Reviewed-by: Michael Hablich <hablich@chromium.org>
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51883}
This commit is contained in:
Michael Starzinger 2018-03-12 13:49:18 +01:00 committed by Commit Bot
parent 388579bdca
commit 0fc6c35c3a
31 changed files with 323 additions and 1314 deletions

View File

@ -992,23 +992,18 @@ size_t PipelineWasmCompilationJob::AllocatedMemory() const {
PipelineWasmCompilationJob::Status PipelineWasmCompilationJob::FinalizeJobImpl(
Isolate* isolate) {
if (!FLAG_wasm_jit_to_native) {
pipeline_.FinalizeCode();
ValidateImmovableEmbeddedObjects();
} else {
CodeGenerator* code_generator = pipeline_.data_->code_generator();
CompilationInfo::WasmCodeDesc* wasm_code_desc =
compilation_info()->wasm_code_desc();
code_generator->tasm()->GetCode(isolate, &wasm_code_desc->code_desc);
wasm_code_desc->safepoint_table_offset =
code_generator->GetSafepointTableOffset();
wasm_code_desc->handler_table_offset =
code_generator->GetHandlerTableOffset();
wasm_code_desc->frame_slot_count =
code_generator->frame()->GetTotalFrameSlotCount();
wasm_code_desc->source_positions_table =
code_generator->GetSourcePositionTable();
}
CodeGenerator* code_generator = pipeline_.data_->code_generator();
CompilationInfo::WasmCodeDesc* wasm_code_desc =
compilation_info()->wasm_code_desc();
code_generator->tasm()->GetCode(isolate, &wasm_code_desc->code_desc);
wasm_code_desc->safepoint_table_offset =
code_generator->GetSafepointTableOffset();
wasm_code_desc->handler_table_offset =
code_generator->GetHandlerTableOffset();
wasm_code_desc->frame_slot_count =
code_generator->frame()->GetTotalFrameSlotCount();
wasm_code_desc->source_positions_table =
code_generator->GetSourcePositionTable();
return SUCCEEDED;
}

View File

@ -2579,20 +2579,11 @@ Node* WasmGraphBuilder::CallDirect(uint32_t index, Node** args, Node*** rets,
wasm::WasmCodePosition position) {
DCHECK_NULL(args[0]);
wasm::FunctionSig* sig = env_->module->functions[index].sig;
if (FLAG_wasm_jit_to_native) {
// Just encode the function index. This will be patched at instantiation.
Address code = reinterpret_cast<Address>(index);
args[0] = jsgraph()->RelocatableIntPtrConstant(
reinterpret_cast<intptr_t>(code), RelocInfo::WASM_CALL);
} else {
// Add code object as constant.
Handle<Code> code = index < env_->function_code.size()
? env_->function_code[index]
: env_->default_function_code;
DCHECK(!code.is_null());
args[0] = HeapConstant(code);
}
// Just encode the function index. This will be patched at instantiation.
Address code = reinterpret_cast<Address>(index);
args[0] = jsgraph()->RelocatableIntPtrConstant(
reinterpret_cast<intptr_t>(code), RelocInfo::WASM_CALL);
return BuildWasmCall(sig, args, rets, position);
}
@ -3411,16 +3402,12 @@ void WasmGraphBuilder::BuildCWasmEntry() {
// Create parameter nodes (offset by 1 for the receiver parameter).
Node* code_obj = nullptr;
if (FLAG_wasm_jit_to_native) {
Node* foreign_code_obj = Param(CWasmEntryParameters::kCodeObject + 1);
MachineOperatorBuilder* machine = jsgraph()->machine();
code_obj = graph()->NewNode(
machine->Load(MachineType::Pointer()), foreign_code_obj,
Int32Constant(Foreign::kForeignAddressOffset - kHeapObjectTag),
*effect_, *control_);
} else {
code_obj = Param(CWasmEntryParameters::kCodeObject + 1);
}
Node* foreign_code_obj = Param(CWasmEntryParameters::kCodeObject + 1);
MachineOperatorBuilder* machine = jsgraph()->machine();
code_obj = graph()->NewNode(
machine->Load(MachineType::Pointer()), foreign_code_obj,
Int32Constant(Foreign::kForeignAddressOffset - kHeapObjectTag), *effect_,
*control_);
Node* wasm_context = Param(CWasmEntryParameters::kWasmContext + 1);
Node* arg_buffer = Param(CWasmEntryParameters::kArgumentsBuffer + 1);
@ -4728,8 +4715,8 @@ Handle<Code> CompileJSToWasmWrapper(Isolate* isolate, wasm::WasmModule* module,
// TODO(titzer): compile JS to WASM wrappers without a {ModuleEnv}.
ModuleEnv env(module,
// TODO(mtrofin): remove the Illegal builtin when we don't need
// FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove the Illegal builtin when we don't
// need FLAG_wasm_jit_to_native
BUILTIN_CODE(isolate, Illegal), // default_function_code
use_trap_handler);
@ -5047,13 +5034,6 @@ Handle<Code> CompileWasmInterpreterEntry(Isolate* isolate, uint32_t func_index,
}
}
if (!FLAG_wasm_jit_to_native) {
Handle<FixedArray> deopt_data =
isolate->factory()->NewFixedArray(1, TENURED);
Handle<WeakCell> weak_instance = isolate->factory()->NewWeakCell(instance);
deopt_data->set(0, *weak_instance);
code->set_deoptimization_data(*deopt_data);
}
return code;
}
@ -5377,88 +5357,55 @@ WasmCodeWrapper WasmCompilationUnit::FinishTurbofanCompilation(
if (tf_.job_->FinalizeJob(isolate_) != CompilationJob::SUCCEEDED) {
return {};
}
if (!FLAG_wasm_jit_to_native) {
Handle<Code> code = tf_.info_->code();
DCHECK(!code.is_null());
if (FLAG_trace_wasm_decode_time) {
double codegen_ms = codegen_timer.Elapsed().InMillisecondsF();
PrintF("wasm-code-generation ok: %u bytes, %0.3f ms code generation\n",
static_cast<unsigned>(func_body_.end - func_body_.start),
codegen_ms);
}
PackProtectedInstructions(code);
// TODO(mtrofin): when we crystalize a design in lieu of WasmCodeDesc, that
// works for both wasm and non-wasm, we can simplify AddCode to just take
// that as a parameter.
const CodeDesc& desc =
tf_.job_->compilation_info()->wasm_code_desc()->code_desc;
wasm::WasmCode* code = native_module_->AddCode(
desc, tf_.job_->compilation_info()->wasm_code_desc()->frame_slot_count,
func_index_,
tf_.job_->compilation_info()->wasm_code_desc()->safepoint_table_offset,
tf_.job_->compilation_info()->wasm_code_desc()->handler_table_offset,
std::move(protected_instructions_), wasm::WasmCode::kTurbofan);
if (!code) {
return WasmCodeWrapper(code);
} else {
// TODO(mtrofin): when we crystalize a design in lieu of WasmCodeDesc, that
// works for both wasm and non-wasm, we can simplify AddCode to just take
// that as a parameter.
const CodeDesc& desc =
tf_.job_->compilation_info()->wasm_code_desc()->code_desc;
wasm::WasmCode* code = native_module_->AddCode(
desc, tf_.job_->compilation_info()->wasm_code_desc()->frame_slot_count,
func_index_,
tf_.job_->compilation_info()->wasm_code_desc()->safepoint_table_offset,
tf_.job_->compilation_info()->wasm_code_desc()->handler_table_offset,
std::move(protected_instructions_), wasm::WasmCode::kTurbofan);
if (!code) {
return WasmCodeWrapper(code);
}
if (FLAG_trace_wasm_decode_time) {
double codegen_ms = codegen_timer.Elapsed().InMillisecondsF();
PrintF("wasm-code-generation ok: %u bytes, %0.3f ms code generation\n",
static_cast<unsigned>(func_body_.end - func_body_.start),
codegen_ms);
}
}
if (FLAG_trace_wasm_decode_time) {
double codegen_ms = codegen_timer.Elapsed().InMillisecondsF();
PrintF("wasm-code-generation ok: %u bytes, %0.3f ms code generation\n",
static_cast<unsigned>(func_body_.end - func_body_.start),
codegen_ms);
}
PROFILE(isolate_,
CodeCreateEvent(CodeEventListener::FUNCTION_TAG, code, func_name_));
PROFILE(isolate_,
CodeCreateEvent(CodeEventListener::FUNCTION_TAG, code, func_name_));
Handle<ByteArray> source_positions =
tf_.job_->compilation_info()->wasm_code_desc()->source_positions_table;
Handle<ByteArray> source_positions =
tf_.job_->compilation_info()->wasm_code_desc()->source_positions_table;
native_module_->compiled_module()->source_positions()->set(
func_index_, *source_positions);
native_module_->compiled_module()->source_positions()->set(func_index_,
*source_positions);
#ifdef ENABLE_DISASSEMBLER
// Note: only do this after setting source positions, as this will be
// accessed and printed here.
if (FLAG_print_code || FLAG_print_wasm_code) {
// TODO(wasm): Use proper log files, here and elsewhere.
PrintF("--- Native Wasm code ---\n");
code->Print(isolate_);
PrintF("--- End code ---\n");
}
// Note: only do this after setting source positions, as this will be
// accessed and printed here.
if (FLAG_print_code || FLAG_print_wasm_code) {
// TODO(wasm): Use proper log files, here and elsewhere.
PrintF("--- Native Wasm code ---\n");
code->Print(isolate_);
PrintF("--- End code ---\n");
}
#endif
// TODO(mtrofin): this should probably move up in the common caller,
// once liftoff has source positions. Until then, we'd need to handle
// undefined values, which is complicating the code.
LOG_CODE_EVENT(isolate_,
CodeLinePosInfoRecordEvent(code->instructions().start(),
*source_positions));
return WasmCodeWrapper(code);
}
}
// TODO(mtrofin): remove when FLAG_wasm_jit_to_native is not needed
void WasmCompilationUnit::PackProtectedInstructions(Handle<Code> code) const {
if (protected_instructions_->empty()) return;
DCHECK_LT(protected_instructions_->size(), std::numeric_limits<int>::max());
const int num_instructions =
static_cast<int>(protected_instructions_->size());
Handle<FixedArray> fn_protected = isolate_->factory()->NewFixedArray(
num_instructions * Code::kTrapDataSize, TENURED);
for (int i = 0; i < num_instructions; ++i) {
const trap_handler::ProtectedInstructionData& instruction =
protected_instructions_->at(i);
fn_protected->set(Code::kTrapDataSize * i + Code::kTrapCodeOffset,
Smi::FromInt(instruction.instr_offset));
fn_protected->set(Code::kTrapDataSize * i + Code::kTrapLandingOffset,
Smi::FromInt(instruction.landing_offset));
}
code->set_protected_instructions(*fn_protected);
// TODO(mtrofin): this should probably move up in the common caller,
// once liftoff has source positions. Until then, we'd need to handle
// undefined values, which is complicating the code.
LOG_CODE_EVENT(isolate_,
CodeLinePosInfoRecordEvent(code->instructions().start(),
*source_positions));
return WasmCodeWrapper(code);
}
WasmCodeWrapper WasmCompilationUnit::FinishLiftoffCompilation(
@ -5469,36 +5416,16 @@ WasmCodeWrapper WasmCompilationUnit::FinishLiftoffCompilation(
Handle<ByteArray> source_positions =
liftoff_.source_position_table_builder_.ToSourcePositionTable(isolate_);
WasmCodeWrapper ret;
if (!FLAG_wasm_jit_to_native) {
Handle<Code> code;
code = isolate_->factory()->NewCode(
desc, Code::WASM_FUNCTION, code, Builtins::kNoBuiltinId,
source_positions, MaybeHandle<DeoptimizationData>(), kMovable,
0, // stub_key
false, // is_turbofanned
liftoff_.asm_.GetTotalFrameSlotCount(), // stack_slots
liftoff_.safepoint_table_offset_);
if (isolate_->logger()->is_logging_code_events() ||
isolate_->is_profiling()) {
RecordFunctionCompilation(CodeEventListener::FUNCTION_TAG, isolate_, code,
"wasm#%d-liftoff", func_index_);
}
PackProtectedInstructions(code);
ret = WasmCodeWrapper(code);
} else {
// TODO(herhut) Consider lifting it to FinishCompilation.
native_module_->compiled_module()->source_positions()->set(
func_index_, *source_positions);
wasm::WasmCode* code = native_module_->AddCode(
desc, liftoff_.asm_.GetTotalFrameSlotCount(), func_index_,
liftoff_.safepoint_table_offset_, 0, std::move(protected_instructions_),
wasm::WasmCode::kLiftoff);
PROFILE(isolate_,
CodeCreateEvent(CodeEventListener::FUNCTION_TAG, code, func_name_));
ret = WasmCodeWrapper(code);
}
// TODO(herhut) Consider lifting it to FinishCompilation.
native_module_->compiled_module()->source_positions()->set(func_index_,
*source_positions);
wasm::WasmCode* code = native_module_->AddCode(
desc, liftoff_.asm_.GetTotalFrameSlotCount(), func_index_,
liftoff_.safepoint_table_offset_, 0, std::move(protected_instructions_),
wasm::WasmCode::kLiftoff);
PROFILE(isolate_,
CodeCreateEvent(CodeEventListener::FUNCTION_TAG, code, func_name_));
WasmCodeWrapper ret = WasmCodeWrapper(code);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_code || FLAG_print_wasm_code) {
// TODO(wasm): Use proper log files, here and elsewhere.

View File

@ -72,7 +72,8 @@ struct ModuleEnv {
// FixedArray.
const std::vector<Address> function_tables;
// TODO(mtrofin): remove these 2 once we don't need FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove these 2 once we don't need
// FLAG_wasm_jit_to_native
// Contains the code objects to call for each direct call.
// (the same length as module.functions)
const std::vector<Handle<Code>> function_code;
@ -138,8 +139,6 @@ class WasmCompilationUnit final {
size_t memory_cost() const { return memory_cost_; }
private:
void PackProtectedInstructions(Handle<Code> code) const;
struct LiftoffData {
wasm::LiftoffAssembler asm_;
int safepoint_table_offset_;

View File

@ -257,13 +257,10 @@ CallDescriptor* GetWasmCallDescriptor(Zone* zone, wasm::FunctionSig* fsig,
const RegList kCalleeSaveFPRegisters = 0;
// The target for wasm calls is always a code object.
MachineType target_type = FLAG_wasm_jit_to_native ? MachineType::Pointer()
: MachineType::AnyTagged();
MachineType target_type = MachineType::Pointer();
LinkageLocation target_loc = LinkageLocation::ForAnyRegister(target_type);
CallDescriptor::Kind kind = FLAG_wasm_jit_to_native
? CallDescriptor::kCallWasmFunction
: CallDescriptor::kCallCodeObject;
CallDescriptor::Kind kind = CallDescriptor::kCallWasmFunction;
return new (zone) CallDescriptor( // --
kind, // kind

View File

@ -522,11 +522,8 @@ DEFINE_INT(wasm_num_compilation_tasks, 10,
"number of parallel compilation tasks for wasm")
DEFINE_DEBUG_BOOL(wasm_trace_native_heap, false,
"trace wasm native heap events")
DEFINE_BOOL(wasm_jit_to_native, true,
"JIT wasm code to native (not JS GC) memory")
DEFINE_BOOL(wasm_write_protect_code_memory, false,
"write protect code memory on the wasm native heap")
DEFINE_IMPLICATION(future, wasm_jit_to_native)
DEFINE_BOOL(wasm_trace_serialization, false,
"trace serialization/deserialization")
DEFINE_BOOL(wasm_async_compilation, true,

View File

@ -192,12 +192,8 @@ DISABLE_ASAN Address ReadMemoryAt(Address address) {
WasmInstanceObject* LookupWasmInstanceObjectFromStandardFrame(
const StandardFrame* frame) {
// TODO(titzer): WASM instances cannot be found from the code in the future.
WasmInstanceObject* ret =
FLAG_wasm_jit_to_native
? WasmInstanceObject::GetOwningInstance(
frame->isolate()->wasm_engine()->code_manager()->LookupCode(
frame->pc()))
: WasmInstanceObject::GetOwningInstanceGC(frame->LookupCode());
WasmInstanceObject* ret = WasmInstanceObject::GetOwningInstance(
frame->isolate()->wasm_engine()->code_manager()->LookupCode(frame->pc()));
// This is a live stack frame, there must be a live wasm instance available.
DCHECK_NOT_NULL(ret);
return ret;
@ -819,9 +815,7 @@ void StandardFrame::IterateCompiledFrame(RootVisitor* v) const {
// Find the code and compute the safepoint information.
Address inner_pointer = pc();
const wasm::WasmCode* wasm_code =
FLAG_wasm_jit_to_native
? isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer)
: nullptr;
isolate()->wasm_engine()->code_manager()->LookupCode(inner_pointer);
SafepointEntry safepoint_entry;
uint32_t stack_slots;
Code* code = nullptr;
@ -1738,14 +1732,12 @@ void WasmCompiledFrame::Print(StringStream* accumulator, PrintMode mode,
accumulator->Add("WASM [");
Script* script = this->script();
accumulator->PrintName(script->name());
Address instruction_start = FLAG_wasm_jit_to_native
? isolate()
->wasm_engine()
->code_manager()
->LookupCode(pc())
->instructions()
.start()
: LookupCode()->instruction_start();
Address instruction_start = isolate()
->wasm_engine()
->code_manager()
->LookupCode(pc())
->instructions()
.start();
int pc = static_cast<int>(this->pc() - instruction_start);
Vector<const uint8_t> raw_func_name =
shared()->GetRawFunctionName(this->function_index());
@ -1772,10 +1764,8 @@ Address WasmCompiledFrame::GetCallerStackPointer() const {
}
WasmCodeWrapper WasmCompiledFrame::wasm_code() const {
return FLAG_wasm_jit_to_native
? WasmCodeWrapper(
isolate()->wasm_engine()->code_manager()->LookupCode(pc()))
: WasmCodeWrapper(Handle<Code>(LookupCode(), isolate()));
return WasmCodeWrapper(
isolate()->wasm_engine()->code_manager()->LookupCode(pc()));
}
WasmInstanceObject* WasmCompiledFrame::wasm_instance() const {
@ -1818,21 +1808,14 @@ bool WasmCompiledFrame::at_to_number_conversion() const {
// ToNumber conversion call.
Address callee_pc = reinterpret_cast<Address>(this->callee_pc());
int pos = -1;
if (FLAG_wasm_jit_to_native) {
wasm::WasmCode* code =
callee_pc
? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc)
: nullptr;
if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
int offset = static_cast<int>(callee_pc - code->instructions().start());
pos = FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(code,
offset);
} else {
Code* code = callee_pc ? isolate()->FindCodeObject(callee_pc) : nullptr;
if (!code || code->kind() != Code::WASM_TO_JS_FUNCTION) return false;
int offset = static_cast<int>(callee_pc - code->instruction_start());
pos = AbstractCode::cast(code)->SourcePosition(offset);
}
wasm::WasmCode* code =
callee_pc
? isolate()->wasm_engine()->code_manager()->LookupCode(callee_pc)
: nullptr;
if (!code || code->kind() != wasm::WasmCode::kWasmToJsWrapper) return false;
int offset = static_cast<int>(callee_pc - code->instructions().start());
pos = FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(code,
offset);
DCHECK(pos == 0 || pos == 1);
// The imported call has position 0, ToNumber has position 1.
return !!pos;
@ -1840,13 +1823,6 @@ bool WasmCompiledFrame::at_to_number_conversion() const {
int WasmCompiledFrame::LookupExceptionHandlerInTable(int* stack_slots) {
DCHECK_NOT_NULL(stack_slots);
if (!FLAG_wasm_jit_to_native) {
Code* code = LookupCode();
HandlerTable table(code);
int pc_offset = static_cast<int>(pc() - code->entry());
*stack_slots = code->stack_slots();
return table.LookupReturn(pc_offset);
}
wasm::WasmCode* code =
isolate()->wasm_engine()->code_manager()->LookupCode(pc());
if (!code->IsAnonymous() && code->handler_table_offset() > 0) {
@ -1887,13 +1863,7 @@ void WasmInterpreterEntryFrame::Summarize(
}
}
Code* WasmInterpreterEntryFrame::unchecked_code() const {
if (FLAG_wasm_jit_to_native) {
UNIMPLEMENTED();
} else {
return isolate()->FindCodeObject(pc());
}
}
Code* WasmInterpreterEntryFrame::unchecked_code() const { UNREACHABLE(); }
// TODO(titzer): deprecate this method.
WasmInstanceObject* WasmInterpreterEntryFrame::wasm_instance() const {
@ -2132,9 +2102,7 @@ void JavaScriptFrame::Iterate(RootVisitor* v) const {
void InternalFrame::Iterate(RootVisitor* v) const {
wasm::WasmCode* wasm_code =
FLAG_wasm_jit_to_native
? isolate()->wasm_engine()->code_manager()->LookupCode(pc())
: nullptr;
isolate()->wasm_engine()->code_manager()->LookupCode(pc());
if (wasm_code != nullptr) {
DCHECK(wasm_code->kind() == wasm::WasmCode::kLazyStub);
} else {

View File

@ -1305,17 +1305,10 @@ Object* Isolate::UnwindAndFindHandler() {
trap_handler::SetThreadInWasm();
set_wasm_caught_exception(exception);
if (FLAG_wasm_jit_to_native) {
wasm::WasmCode* wasm_code =
wasm_engine()->code_manager()->LookupCode(frame->pc());
return FoundHandler(nullptr, wasm_code->instructions().start(),
offset, wasm_code->constant_pool(), return_sp,
frame->fp());
} else {
Code* code = frame->LookupCode();
return FoundHandler(nullptr, code->instruction_start(), offset,
code->constant_pool(), return_sp, frame->fp());
}
wasm::WasmCode* wasm_code =
wasm_engine()->code_manager()->LookupCode(frame->pc());
return FoundHandler(nullptr, wasm_code->instructions().start(), offset,
wasm_code->constant_pool(), return_sp, frame->fp());
}
case StackFrame::OPTIMIZED: {
@ -1696,11 +1689,9 @@ bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
// TODO(titzer): store a reference to the code object in FrameArray;
// a second lookup here could lead to inconsistency.
int byte_offset =
FLAG_wasm_jit_to_native
? FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
compiled_module->GetNativeModule()->GetCode(func_index),
code_offset)
: elements->Code(i)->SourcePosition(code_offset);
FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
compiled_module->GetNativeModule()->GetCode(func_index),
code_offset);
bool is_at_number_conversion =
elements->IsAsmJsWasmFrame(i) &&

View File

@ -650,16 +650,9 @@ void WasmStackFrame::FromFrameArray(Isolate* isolate, Handle<FrameArray> array,
if (array->IsWasmInterpretedFrame(frame_ix)) {
code_ = {};
} else {
code_ =
FLAG_wasm_jit_to_native
? WasmCodeWrapper(
wasm_instance_->compiled_module()->GetNativeModule()->GetCode(
wasm_func_index_))
: WasmCodeWrapper(handle(
Code::cast(
wasm_instance_->compiled_module()->code_table()->get(
wasm_func_index_)),
isolate));
code_ = WasmCodeWrapper(
wasm_instance_->compiled_module()->GetNativeModule()->GetCode(
wasm_func_index_));
}
offset_ = array->Offset(frame_ix)->value();
}

View File

@ -1490,7 +1490,6 @@ void WasmCompiledModule::WasmCompiledModuleVerify() {
VerifyObjectField(kCodeTableOffset);
VerifyObjectField(kFunctionTablesOffset);
VerifyObjectField(kEmptyFunctionTablesOffset);
VerifyObjectField(kInstanceIdOffset);
}
void WasmDebugInfo::WasmDebugInfoVerify() {

View File

@ -80,7 +80,7 @@ class Code : public HeapObject {
DECL_ACCESSORS(source_position_table, Object)
inline ByteArray* SourcePositionTable() const;
// TODO(mtrofin): remove when we don't need FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove when we don't need FLAG_wasm_jit_to_native
// [protected instructions]: Array containing list of protected
// instructions and corresponding landing pad offset.
DECL_ACCESSORS(protected_instructions, FixedArray)

View File

@ -31,14 +31,8 @@ WasmInstanceObject* GetWasmInstanceOnStackTop(Isolate* isolate) {
const Address entry = Isolate::c_entry_fp(isolate->thread_local_top());
Address pc =
Memory::Address_at(entry + StandardFrameConstants::kCallerPCOffset);
WasmInstanceObject* owning_instance = nullptr;
if (FLAG_wasm_jit_to_native) {
owning_instance = WasmInstanceObject::GetOwningInstance(
isolate->wasm_engine()->code_manager()->LookupCode(pc));
} else {
owning_instance = WasmInstanceObject::GetOwningInstanceGC(
isolate->inner_pointer_to_code_cache()->GetCacheEntry(pc)->code);
}
WasmInstanceObject* owning_instance = WasmInstanceObject::GetOwningInstance(
isolate->wasm_engine()->code_manager()->LookupCode(pc));
CHECK_NOT_NULL(owning_instance);
return owning_instance;
}
@ -296,15 +290,11 @@ RUNTIME_FUNCTION(Runtime_WasmCompileLazy) {
DCHECK_EQ(0, args.length());
HandleScope scope(isolate);
if (FLAG_wasm_jit_to_native) {
Address new_func = wasm::CompileLazy(isolate);
// The alternative to this is having 2 lazy compile builtins. The builtins
// are part of the snapshot, so the flag has no impact on the codegen there.
return reinterpret_cast<Object*>(new_func - Code::kHeaderSize +
kHeapObjectTag);
} else {
return *wasm::CompileLazyOnGCHeap(isolate);
}
Address new_func = wasm::CompileLazy(isolate);
// The alternative to this is having 2 lazy compile builtins. The builtins
// are part of the snapshot, so the flag has no impact on the codegen there.
return reinterpret_cast<Object*>(new_func - Code::kHeaderSize +
kHeapObjectTag);
}
} // namespace internal

View File

@ -1236,16 +1236,9 @@ class LiftoffCompiler {
source_position_table_builder_->AddPosition(
__ pc_offset(), SourcePosition(decoder->position()), false);
if (FLAG_wasm_jit_to_native) {
// Just encode the function index. This will be patched at instantiation.
Address addr = reinterpret_cast<Address>(operand.index);
__ CallNativeWasmCode(addr);
} else {
Handle<Code> target = operand.index < env_->function_code.size()
? env_->function_code[operand.index]
: env_->default_function_code;
__ Call(target, RelocInfo::CODE_TARGET);
}
// Just encode the function index. This will be patched at instantiation.
Address addr = reinterpret_cast<Address>(operand.index);
__ CallNativeWasmCode(addr);
safepoint_table_builder_.DefineSafepoint(asm_, Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);

View File

@ -466,95 +466,6 @@ MaybeHandle<WasmInstanceObject> InstantiateToInstanceObject(
return {};
}
Handle<Code> CompileLazyOnGCHeap(Isolate* isolate) {
HistogramTimerScope lazy_time_scope(
isolate->counters()->wasm_lazy_compilation_time());
// Find the wasm frame which triggered the lazy compile, to get the wasm
// instance.
StackFrameIterator it(isolate);
// First frame: C entry stub.
DCHECK(!it.done());
DCHECK_EQ(StackFrame::EXIT, it.frame()->type());
it.Advance();
// Second frame: WasmCompileLazy builtin.
DCHECK(!it.done());
Handle<Code> lazy_compile_code(it.frame()->LookupCode(), isolate);
DCHECK_EQ(Builtins::kWasmCompileLazy, lazy_compile_code->builtin_index());
Handle<WasmInstanceObject> instance;
Handle<FixedArray> exp_deopt_data;
int func_index = -1;
// If the lazy compile stub has deopt data, use that to determine the
// instance and function index. Otherwise this must be a wasm->wasm call
// within one instance, so extract the information from the caller.
if (lazy_compile_code->deoptimization_data()->length() > 0) {
exp_deopt_data = handle(lazy_compile_code->deoptimization_data(), isolate);
auto func_info = GetWasmFunctionInfo(isolate, lazy_compile_code);
instance = func_info.instance.ToHandleChecked();
func_index = func_info.func_index;
}
it.Advance();
// Third frame: The calling wasm code or js-to-wasm wrapper.
DCHECK(!it.done());
DCHECK(it.frame()->is_js_to_wasm() || it.frame()->is_wasm_compiled());
Handle<Code> caller_code = handle(it.frame()->LookupCode(), isolate);
if (it.frame()->is_js_to_wasm()) {
DCHECK(!instance.is_null());
} else if (instance.is_null()) {
// Then this is a direct call (otherwise we would have attached the instance
// via deopt data to the lazy compile stub). Just use the instance of the
// caller.
instance =
handle(WasmInstanceObject::GetOwningInstanceGC(*caller_code), isolate);
}
int offset =
static_cast<int>(it.frame()->pc() - caller_code->instruction_start());
// Only patch the caller code if this is *no* indirect call.
// exp_deopt_data will be null if the called function is not exported at all,
// and its length will be <= 2 if all entries in tables were already patched.
// Note that this check is conservative: If the first call to an exported
// function is direct, we will just patch the export tables, and only on the
// second call we will patch the caller.
bool patch_caller = caller_code->kind() == Code::JS_TO_WASM_FUNCTION ||
exp_deopt_data.is_null() || exp_deopt_data->length() <= 2;
wasm::LazyCompilationOrchestrator* orchestrator =
Managed<wasm::LazyCompilationOrchestrator>::cast(
instance->compiled_module()
->shared()
->lazy_compilation_orchestrator())
->get();
DCHECK(!orchestrator->IsFrozenForTesting());
Handle<Code> compiled_code = orchestrator->CompileLazyOnGCHeap(
isolate, instance, caller_code, offset, func_index, patch_caller);
if (!exp_deopt_data.is_null() && exp_deopt_data->length() > 2) {
TRACE_LAZY("Patching %d position(s) in function tables.\n",
(exp_deopt_data->length() - 2) / 2);
// See EnsureExportedLazyDeoptData: exp_deopt_data[2...(len-1)] are pairs of
// <export_table, index> followed by undefined values.
// Use this information here to patch all export tables.
DCHECK_EQ(0, exp_deopt_data->length() % 2);
for (int idx = 2, end = exp_deopt_data->length(); idx < end; idx += 2) {
if (exp_deopt_data->get(idx)->IsUndefined(isolate)) break;
FixedArray* exp_table = FixedArray::cast(exp_deopt_data->get(idx));
int exp_index = Smi::ToInt(exp_deopt_data->get(idx + 1));
int table_index = compiler::FunctionTableCodeOffset(exp_index);
DCHECK(exp_table->get(table_index) == *lazy_compile_code);
exp_table->set(table_index, *compiled_code);
}
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// After processing, remove the list of exported entries, such that we don't
// do the patching redundantly.
Handle<FixedArray> new_deopt_data =
isolate->factory()->CopyFixedArrayUpTo(exp_deopt_data, 2, TENURED);
lazy_compile_code->set_deoptimization_data(*new_deopt_data);
}
return compiled_code;
}
Address CompileLazy(Isolate* isolate) {
HistogramTimerScope lazy_time_scope(
isolate->counters()->wasm_lazy_compilation_time());
@ -695,25 +606,7 @@ compiler::ModuleEnv CreateModuleEnvFromCompiledModule(
Isolate* isolate, Handle<WasmCompiledModule> compiled_module) {
DisallowHeapAllocation no_gc;
WasmModule* module = compiled_module->shared()->module();
if (FLAG_wasm_jit_to_native) {
compiler::ModuleEnv result(module, std::vector<Address>{},
std::vector<Handle<Code>>{},
BUILTIN_CODE(isolate, WasmCompileLazy),
compiled_module->use_trap_handler());
return result;
}
std::vector<GlobalHandleAddress> function_tables;
int num_function_tables = static_cast<int>(module->function_tables.size());
FixedArray* ft =
num_function_tables == 0 ? nullptr : compiled_module->function_tables();
for (int i = 0; i < num_function_tables; ++i) {
// TODO(clemensh): defer these handles for concurrent compilation.
function_tables.push_back(WasmCompiledModule::GetTableValue(ft, i));
}
compiler::ModuleEnv result(module, std::move(function_tables),
compiler::ModuleEnv result(module, std::vector<Address>{},
std::vector<Handle<Code>>{},
BUILTIN_CODE(isolate, WasmCompileLazy),
compiled_module->use_trap_handler());
@ -740,20 +633,12 @@ const wasm::WasmCode* LazyCompilationOrchestrator::CompileFunction(
TRACE_LAZY("Compiling function %s, %d.\n", func_name.c_str(), func_index);
if (FLAG_wasm_jit_to_native) {
wasm::WasmCode* existing_code = compiled_module->GetNativeModule()->GetCode(
static_cast<uint32_t>(func_index));
if (existing_code != nullptr &&
existing_code->kind() == wasm::WasmCode::kFunction) {
TRACE_LAZY("Function %d already compiled.\n", func_index);
return existing_code;
}
} else {
if (Code::cast(compiled_module->code_table()->get(func_index))->kind() ==
Code::WASM_FUNCTION) {
TRACE_LAZY("Function %d already compiled.\n", func_index);
return nullptr;
}
wasm::WasmCode* existing_code = compiled_module->GetNativeModule()->GetCode(
static_cast<uint32_t>(func_index));
if (existing_code != nullptr &&
existing_code->kind() == wasm::WasmCode::kFunction) {
TRACE_LAZY("Function %d already compiled.\n", func_index);
return existing_code;
}
compiler::ModuleEnv module_env =
@ -783,19 +668,8 @@ const wasm::WasmCode* LazyCompilationOrchestrator::CompileFunction(
// TODO(clemensh): According to the spec, we can actually skip validation at
// module creation time, and return a function that always traps here.
CHECK(!thrower.error());
// Now specialize the generated code for this instance.
// {code} is used only when !FLAG_wasm_jit_to_native, so it may be removed
// when that flag is removed.
Handle<Code> code;
if (code_wrapper.IsCodeObject()) {
code = code_wrapper.GetCode();
AttachWasmFunctionInfo(isolate, code, instance, func_index);
DCHECK_EQ(Builtins::kWasmCompileLazy,
Code::cast(compiled_module->code_table()->get(func_index))
->builtin_index());
compiled_module->code_table()->set(func_index, *code);
}
// Now specialize the generated code for this instance.
Zone specialization_zone(isolate->allocator(), ZONE_NAME);
CodeSpecialization code_specialization(isolate, &specialization_zone);
code_specialization.RelocateDirectCalls(instance);
@ -807,24 +681,18 @@ const wasm::WasmCode* LazyCompilationOrchestrator::CompileFunction(
auto counters = isolate->counters();
counters->wasm_lazily_compiled_functions()->Increment();
if (!code_wrapper.IsCodeObject()) {
const wasm::WasmCode* wasm_code = code_wrapper.GetWasmCode();
Assembler::FlushICache(wasm_code->instructions().start(),
wasm_code->instructions().size());
counters->wasm_generated_code_size()->Increment(
static_cast<int>(wasm_code->instructions().size()));
counters->wasm_reloc_size()->Increment(
static_cast<int>(wasm_code->reloc_info().size()));
const wasm::WasmCode* wasm_code = code_wrapper.GetWasmCode();
Assembler::FlushICache(wasm_code->instructions().start(),
wasm_code->instructions().size());
counters->wasm_generated_code_size()->Increment(
static_cast<int>(wasm_code->instructions().size()));
counters->wasm_reloc_size()->Increment(
static_cast<int>(wasm_code->reloc_info().size()));
} else {
Assembler::FlushICache(code->instruction_start(), code->instruction_size());
counters->wasm_generated_code_size()->Increment(code->body_size());
counters->wasm_reloc_size()->Increment(code->relocation_info()->length());
}
counters->wasm_lazy_compilation_throughput()->AddSample(
compilation_time != 0 ? static_cast<int>(func_size / compilation_time)
: 0);
return !code_wrapper.IsCodeObject() ? code_wrapper.GetWasmCode() : nullptr;
return code_wrapper.GetWasmCode();
}
namespace {
@ -1360,7 +1228,7 @@ size_t ModuleCompiler::InitializeCompilationUnits(
Vector<const uint8_t> bytes(wire_bytes.start() + func->code.offset(),
func->code.end_offset() - func->code.offset());
WasmName name = wire_bytes.GetName(func);
DCHECK_IMPLIES(FLAG_wasm_jit_to_native, native_module_ != nullptr);
DCHECK_NOT_NULL(native_module_);
builder.AddUnit(module_env, native_module_, func, buffer_offset, bytes,
name);
}
@ -1533,7 +1401,7 @@ MaybeHandle<WasmModuleObject> CompileToModuleObject(
const ModuleWireBytes& wire_bytes, Handle<Script> asm_js_script,
Vector<const byte> asm_js_offset_table_bytes) {
Handle<Code> centry_stub = CEntryStub(isolate, 1).GetCode();
// TODO(mtrofin): the wasm::NativeModule parameter to the ModuleCompiler
// TODO(mstarzinger): the wasm::NativeModule parameter to the ModuleCompiler
// constructor is null here, and initialized in CompileToModuleObjectInternal.
// This is a point-in-time, until we remove the FLAG_wasm_jit_to_native flag,
// and stop needing a FixedArray for code for the non-native case. Otherwise,
@ -1611,152 +1479,74 @@ void RecordStats(const wasm::NativeModule* native_module, Counters* counters) {
// deoptimization data attached. This is needed for lazy compile stubs which are
// called from JS_TO_WASM functions or via exported function tables. The deopt
// data is used to determine which function this lazy compile stub belongs to.
// TODO(mtrofin): remove the instance and code_table members once we remove the
// FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove the instance and code_table members once we remove
// the FLAG_wasm_jit_to_native
WasmCodeWrapper EnsureExportedLazyDeoptData(Isolate* isolate,
Handle<WasmInstanceObject> instance,
Handle<FixedArray> code_table,
wasm::NativeModule* native_module,
uint32_t func_index) {
if (!FLAG_wasm_jit_to_native) {
Handle<Code> code(Code::cast(code_table->get(func_index)), isolate);
if (code->builtin_index() != Builtins::kWasmCompileLazy) {
// No special deopt data needed for compiled functions, and imported
// functions, which map to Illegal at this point (they get compiled at
// instantiation time).
DCHECK(code->kind() == Code::WASM_FUNCTION ||
code->kind() == Code::WASM_TO_JS_FUNCTION ||
code->kind() == Code::WASM_TO_WASM_FUNCTION ||
code->builtin_index() == Builtins::kIllegal);
return WasmCodeWrapper(code);
}
// deopt_data:
// #0: weak instance
// #1: func_index
// might be extended later for table exports (see
// EnsureTableExportLazyDeoptData).
Handle<FixedArray> deopt_data(code->deoptimization_data());
DCHECK_EQ(0, deopt_data->length() % 2);
if (deopt_data->length() == 0) {
code = isolate->factory()->CopyCode(code);
code_table->set(func_index, *code);
AttachWasmFunctionInfo(isolate, code, instance, func_index);
}
#ifdef DEBUG
auto func_info = GetWasmFunctionInfo(isolate, code);
DCHECK_IMPLIES(!instance.is_null(),
*func_info.instance.ToHandleChecked() == *instance);
DCHECK_EQ(func_index, func_info.func_index);
#endif
wasm::WasmCode* code = native_module->GetCode(func_index);
// {code} will be nullptr when exporting imports.
if (code == nullptr || code->kind() != wasm::WasmCode::kLazyStub ||
!code->IsAnonymous()) {
return WasmCodeWrapper(code);
} else {
wasm::WasmCode* code = native_module->GetCode(func_index);
// {code} will be nullptr when exporting imports.
if (code == nullptr || code->kind() != wasm::WasmCode::kLazyStub ||
!code->IsAnonymous()) {
return WasmCodeWrapper(code);
}
// Clone the lazy builtin into the native module.
return WasmCodeWrapper(
native_module->CloneLazyBuiltinInto(code, func_index));
}
// Clone the lazy builtin into the native module.
return WasmCodeWrapper(native_module->CloneLazyBuiltinInto(code, func_index));
}
// Ensure that the code object in <code_table> at offset <func_index> has
// deoptimization data attached. This is needed for lazy compile stubs which are
// called from JS_TO_WASM functions or via exported function tables. The deopt
// data is used to determine which function this lazy compile stub belongs to.
// TODO(mtrofin): remove the instance and code_table members once we remove the
// FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove the instance and code_table members once we remove
// the FLAG_wasm_jit_to_native
WasmCodeWrapper EnsureTableExportLazyDeoptData(
Isolate* isolate, Handle<WasmInstanceObject> instance,
Handle<FixedArray> code_table, wasm::NativeModule* native_module,
uint32_t func_index, Handle<FixedArray> export_table, int export_index,
std::unordered_map<uint32_t, uint32_t>* num_table_exports) {
if (!FLAG_wasm_jit_to_native) {
Handle<Code> code =
EnsureExportedLazyDeoptData(isolate, instance, code_table,
native_module, func_index)
.GetCode();
if (code->builtin_index() != Builtins::kWasmCompileLazy)
return WasmCodeWrapper(code);
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// deopt_data:
// #0: weak instance
// #1: func_index
// [#2: export table
// #3: export table index]
// [#4: export table
// #5: export table index]
// ...
// num_table_exports counts down and determines the index for the new
// export table entry.
auto table_export_entry = num_table_exports->find(func_index);
DCHECK(table_export_entry != num_table_exports->end());
DCHECK_LT(0, table_export_entry->second);
uint32_t this_idx = 2 * table_export_entry->second;
--table_export_entry->second;
Handle<FixedArray> deopt_data(code->deoptimization_data());
DCHECK_EQ(0, deopt_data->length() % 2);
if (deopt_data->length() == 2) {
// Then only the "header" (#0 and #1) exists. Extend for the export table
// entries (make space for this_idx + 2 elements).
deopt_data = isolate->factory()->CopyFixedArrayAndGrow(deopt_data,
this_idx, TENURED);
code->set_deoptimization_data(*deopt_data);
}
DCHECK_LE(this_idx + 2, deopt_data->length());
DCHECK(deopt_data->get(this_idx)->IsUndefined(isolate));
DCHECK(deopt_data->get(this_idx + 1)->IsUndefined(isolate));
deopt_data->set(this_idx, *export_table);
deopt_data->set(this_idx + 1, Smi::FromInt(export_index));
const wasm::WasmCode* code =
EnsureExportedLazyDeoptData(isolate, instance, code_table, native_module,
func_index)
.GetWasmCode();
if (code == nullptr || code->kind() != wasm::WasmCode::kLazyStub)
return WasmCodeWrapper(code);
// deopt_data:
// [#0: export table
// #1: export table index]
// [#2: export table
// #3: export table index]
// ...
// num_table_exports counts down and determines the index for the new
// export table entry.
auto table_export_entry = num_table_exports->find(func_index);
DCHECK(table_export_entry != num_table_exports->end());
DCHECK_LT(0, table_export_entry->second);
--table_export_entry->second;
uint32_t this_idx = 2 * table_export_entry->second;
int int_func_index = static_cast<int>(func_index);
Object* deopt_entry =
native_module->compiled_module()->lazy_compile_data()->get(
int_func_index);
FixedArray* deopt_data = nullptr;
if (!deopt_entry->IsFixedArray()) {
// we count indices down, so we enter here first for the
// largest index.
deopt_data = *isolate->factory()->NewFixedArray(this_idx + 2, TENURED);
native_module->compiled_module()->lazy_compile_data()->set(int_func_index,
deopt_data);
} else {
const wasm::WasmCode* code =
EnsureExportedLazyDeoptData(isolate, instance, code_table,
native_module, func_index)
.GetWasmCode();
if (code == nullptr || code->kind() != wasm::WasmCode::kLazyStub)
return WasmCodeWrapper(code);
// deopt_data:
// [#0: export table
// #1: export table index]
// [#2: export table
// #3: export table index]
// ...
// num_table_exports counts down and determines the index for the new
// export table entry.
auto table_export_entry = num_table_exports->find(func_index);
DCHECK(table_export_entry != num_table_exports->end());
DCHECK_LT(0, table_export_entry->second);
--table_export_entry->second;
uint32_t this_idx = 2 * table_export_entry->second;
int int_func_index = static_cast<int>(func_index);
Object* deopt_entry =
native_module->compiled_module()->lazy_compile_data()->get(
int_func_index);
FixedArray* deopt_data = nullptr;
if (!deopt_entry->IsFixedArray()) {
// we count indices down, so we enter here first for the
// largest index.
deopt_data = *isolate->factory()->NewFixedArray(this_idx + 2, TENURED);
native_module->compiled_module()->lazy_compile_data()->set(int_func_index,
deopt_data);
} else {
deopt_data = FixedArray::cast(deopt_entry);
DCHECK_LE(this_idx + 2, deopt_data->length());
}
DCHECK(deopt_data->get(this_idx)->IsUndefined(isolate));
DCHECK(deopt_data->get(this_idx + 1)->IsUndefined(isolate));
deopt_data->set(this_idx, *export_table);
deopt_data->set(this_idx + 1, Smi::FromInt(export_index));
return WasmCodeWrapper(code);
deopt_data = FixedArray::cast(deopt_entry);
DCHECK_LE(this_idx + 2, deopt_data->length());
}
DCHECK(deopt_data->get(this_idx)->IsUndefined(isolate));
DCHECK(deopt_data->get(this_idx + 1)->IsUndefined(isolate));
deopt_data->set(this_idx, *export_table);
deopt_data->set(this_idx + 1, Smi::FromInt(export_index));
return WasmCodeWrapper(code);
}
bool in_bounds(uint32_t offset, uint32_t size, uint32_t upper) {
@ -1783,24 +1573,12 @@ WasmCodeWrapper MakeWasmToWasmWrapper(
.sig;
if (expected_sig && !expected_sig->Equals(*sig)) return {};
if (!FLAG_wasm_jit_to_native) {
Handle<Code> wrapper_code = compiler::CompileWasmToWasmWrapper(
isolate, imported_function->GetWasmCode(), *sig,
new_wasm_context_address);
// Set the deoptimization data for the WasmToWasm wrapper. This is
// needed by the interpreter to find the imported instance for
// a cross-instance call.
AttachWasmFunctionInfo(isolate, wrapper_code, imported_instance,
imported_function->function_index());
return WasmCodeWrapper(wrapper_code);
} else {
Handle<Code> code = compiler::CompileWasmToWasmWrapper(
isolate, imported_function->GetWasmCode(), *sig,
new_wasm_context_address);
return WasmCodeWrapper(
instance->compiled_module()->GetNativeModule()->AddCodeCopy(
code, wasm::WasmCode::kWasmToWasmWrapper, index));
}
Handle<Code> code = compiler::CompileWasmToWasmWrapper(
isolate, imported_function->GetWasmCode(), *sig,
new_wasm_context_address);
return WasmCodeWrapper(
instance->compiled_module()->GetNativeModule()->AddCodeCopy(
code, wasm::WasmCode::kWasmToWasmWrapper, index));
}
WasmCodeWrapper UnwrapExportOrCompileImportWrapper(
@ -1816,18 +1594,12 @@ WasmCodeWrapper UnwrapExportOrCompileImportWrapper(
}
// No wasm function or being debugged. Compile a new wrapper for the new
// signature.
if (FLAG_wasm_jit_to_native) {
Handle<Code> temp_code = compiler::CompileWasmToJSWrapper(
isolate, target, sig, import_index, origin,
instance->compiled_module()->use_trap_handler(), js_imports_table);
return WasmCodeWrapper(
instance->compiled_module()->GetNativeModule()->AddCodeCopy(
temp_code, wasm::WasmCode::kWasmToJsWrapper, import_index));
} else {
return WasmCodeWrapper(compiler::CompileWasmToJSWrapper(
isolate, target, sig, import_index, origin,
instance->compiled_module()->use_trap_handler(), js_imports_table));
}
Handle<Code> temp_code = compiler::CompileWasmToJSWrapper(
isolate, target, sig, import_index, origin,
instance->compiled_module()->use_trap_handler(), js_imports_table);
return WasmCodeWrapper(
instance->compiled_module()->GetNativeModule()->AddCodeCopy(
temp_code, wasm::WasmCode::kWasmToJsWrapper, import_index));
}
double MonotonicallyIncreasingTimeInMs() {
@ -1860,7 +1632,8 @@ std::unique_ptr<compiler::ModuleEnv> CreateDefaultModuleEnv(
use_trap_handler);
}
// TODO(mtrofin): remove code_table when we don't need FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove code_table when we don't need
// FLAG_wasm_jit_to_native
Handle<WasmCompiledModule> NewCompiledModule(Isolate* isolate,
WasmModule* module,
Handle<FixedArray> code_table,
@ -1943,7 +1716,7 @@ MaybeHandle<WasmModuleObject> ModuleCompiler::CompileToModuleObjectInternal(
? BUILTIN_CODE(isolate_, WasmCompileLazy)
: BUILTIN_CODE(isolate_, Illegal);
// TODO(mtrofin): remove code_table and code_table_size when we don't
// TODO(mstarzinger): remove code_table and code_table_size when we don't
// need FLAG_wasm_jit_to_native anymore. Keep export_wrappers.
int code_table_size = static_cast<int>(module_->functions.size());
int export_wrappers_size = static_cast<int>(module_->num_exported_functions);
@ -1969,7 +1742,7 @@ MaybeHandle<WasmModuleObject> ModuleCompiler::CompileToModuleObjectInternal(
isolate_, shared->module(), code_table, export_wrappers, env.get());
native_module_ = compiled_module->GetNativeModule();
compiled_module->OnWasmModuleDecodingComplete(shared);
if (lazy_compile && FLAG_wasm_jit_to_native) {
if (lazy_compile) {
Handle<FixedArray> lazy_compile_data = isolate_->factory()->NewFixedArray(
static_cast<int>(module_->functions.size()), TENURED);
compiled_module->set_lazy_compile_data(*lazy_compile_data);
@ -1983,8 +1756,7 @@ MaybeHandle<WasmModuleObject> ModuleCompiler::CompileToModuleObjectInternal(
funcs_to_compile > 1 &&
V8::GetCurrentPlatform()->NumberOfWorkerThreads() > 0;
// Avoid a race condition by collecting results into a second vector.
std::vector<Handle<Code>> results(
FLAG_wasm_jit_to_native ? 0 : env->module->functions.size());
std::vector<Handle<Code>> results(0);
if (compile_parallel) {
CompileInParallel(wire_bytes, env.get(), results, thrower);
@ -1993,18 +1765,7 @@ MaybeHandle<WasmModuleObject> ModuleCompiler::CompileToModuleObjectInternal(
}
if (thrower->error()) return {};
if (!FLAG_wasm_jit_to_native) {
// At this point, compilation has completed. Update the code table.
for (size_t i =
module_->num_imported_functions + FLAG_skip_compiling_wasm_funcs;
i < results.size(); ++i) {
Code* code = *results[i];
code_table->set(static_cast<int>(i), code);
RecordStats(code, counters());
}
} else {
RecordStats(native_module_, counters());
}
RecordStats(native_module_, counters());
} else {
if (module_->is_wasm()) {
// Validate wasm modules for lazy compilation. Don't validate asm.js
@ -2015,9 +1776,7 @@ MaybeHandle<WasmModuleObject> ModuleCompiler::CompileToModuleObjectInternal(
// (lazy) compilation time.
ValidateSequentially(wire_bytes, env.get(), thrower);
}
if (FLAG_wasm_jit_to_native) {
native_module_->SetLazyBuiltin(BUILTIN_CODE(isolate_, WasmCompileLazy));
}
native_module_->SetLazyBuiltin(BUILTIN_CODE(isolate_, WasmCompileLazy));
}
if (thrower->error()) return {};
@ -2080,7 +1839,7 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
//--------------------------------------------------------------------------
// Reuse the compiled module (if no owner), otherwise clone.
//--------------------------------------------------------------------------
// TODO(mtrofin): remove code_table
// TODO(mstarzinger): remove code_table
// when FLAG_wasm_jit_to_native is not needed
Handle<FixedArray> code_table;
Handle<FixedArray> wrapper_table;
@ -2115,76 +1874,26 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
// the owner + original state used for cloning and patching
// won't be mutated by possible finalizer runs.
DCHECK(!owner.is_null());
if (FLAG_wasm_jit_to_native) {
TRACE("Cloning from %zu\n", original->GetNativeModule()->instance_id);
compiled_module_ = WasmCompiledModule::Clone(isolate_, original);
native_module = compiled_module_->GetNativeModule();
wrapper_table = handle(compiled_module_->export_wrappers(), isolate_);
} else {
TRACE("Cloning from %d\n", original->instance_id());
compiled_module_ = WasmCompiledModule::Clone(isolate_, original);
code_table = handle(compiled_module_->code_table(), isolate_);
wrapper_table = handle(compiled_module_->export_wrappers(), isolate_);
// Avoid creating too many handles in the outer scope.
HandleScope scope(isolate_);
// Clone the code for wasm functions and exports.
for (int i = 0; i < code_table->length(); ++i) {
Handle<Code> orig_code(Code::cast(code_table->get(i)), isolate_);
switch (orig_code->kind()) {
case Code::WASM_TO_JS_FUNCTION:
case Code::WASM_TO_WASM_FUNCTION:
// Imports will be overwritten with newly compiled wrappers.
break;
case Code::BUILTIN:
DCHECK_EQ(Builtins::kWasmCompileLazy, orig_code->builtin_index());
// If this code object has deoptimization data, then we need a
// unique copy to attach updated deoptimization data.
if (orig_code->deoptimization_data()->length() > 0) {
Handle<Code> code = factory->CopyCode(orig_code);
AttachWasmFunctionInfo(isolate_, code,
Handle<WasmInstanceObject>(), i);
code_table->set(i, *code);
}
break;
case Code::WASM_FUNCTION: {
Handle<Code> code = factory->CopyCode(orig_code);
AttachWasmFunctionInfo(isolate_, code,
Handle<WasmInstanceObject>(), i);
code_table->set(i, *code);
break;
}
default:
UNREACHABLE();
}
}
}
TRACE("Cloning from %zu\n", original->GetNativeModule()->instance_id);
compiled_module_ = WasmCompiledModule::Clone(isolate_, original);
native_module = compiled_module_->GetNativeModule();
wrapper_table = handle(compiled_module_->export_wrappers(), isolate_);
for (int i = 0; i < wrapper_table->length(); ++i) {
Handle<Code> orig_code(Code::cast(wrapper_table->get(i)), isolate_);
DCHECK_EQ(orig_code->kind(), Code::JS_TO_WASM_FUNCTION);
Handle<Code> code = factory->CopyCode(orig_code);
wrapper_table->set(i, *code);
}
if (FLAG_wasm_jit_to_native) {
RecordStats(native_module, counters());
} else {
RecordStats(code_table, counters());
}
RecordStats(native_module, counters());
RecordStats(wrapper_table, counters());
} else {
// There was no owner, so we can reuse the original.
compiled_module_ = original;
wrapper_table = handle(compiled_module_->export_wrappers(), isolate_);
if (FLAG_wasm_jit_to_native) {
old_module = compiled_module_->GetNativeModule();
native_module = old_module;
TRACE("Reusing existing instance %zu\n",
compiled_module_->GetNativeModule()->instance_id);
} else {
code_table = handle(compiled_module_->code_table(), isolate_);
TRACE("Reusing existing instance %d\n",
compiled_module_->instance_id());
}
old_module = compiled_module_->GetNativeModule();
native_module = old_module;
TRACE("Reusing existing instance %zu\n",
compiled_module_->GetNativeModule()->instance_id);
}
Handle<WeakCell> weak_native_context =
isolate_->factory()->NewWeakCell(isolate_->native_context());
@ -2332,34 +2041,6 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
code_specialization.RelocateWasmContextReferences(wasm_context_address);
js_to_wasm_cache_.SetContextAddress(wasm_context_address);
if (!FLAG_wasm_jit_to_native) {
//--------------------------------------------------------------------------
// Set up the runtime support for the new instance.
//--------------------------------------------------------------------------
Handle<WeakCell> weak_link = factory->NewWeakCell(instance);
for (int i = num_imported_functions + FLAG_skip_compiling_wasm_funcs,
num_functions = static_cast<int>(module_->functions.size());
i < num_functions; ++i) {
Handle<Code> code = handle(Code::cast(code_table->get(i)), isolate_);
if (code->kind() == Code::WASM_FUNCTION) {
AttachWasmFunctionInfo(isolate_, code, weak_link, i);
continue;
}
DCHECK_EQ(Builtins::kWasmCompileLazy, code->builtin_index());
int deopt_len = code->deoptimization_data()->length();
if (deopt_len == 0) continue;
DCHECK_LE(2, deopt_len);
DCHECK_EQ(i, Smi::ToInt(code->deoptimization_data()->get(1)));
code->deoptimization_data()->set(0, *weak_link);
// Entries [2, deopt_len) encode information about table exports of this
// function. This is rebuilt in {LoadTableSegments}, so reset it here.
for (int i = 2; i < deopt_len; ++i) {
code->deoptimization_data()->set_undefined(isolate_, i);
}
}
}
//--------------------------------------------------------------------------
// Set up the exports object for the new instance.
//--------------------------------------------------------------------------
@ -2384,22 +2065,14 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
code_specialization.RelocateDirectCalls(instance);
code_specialization.ApplyToWholeInstance(*instance, SKIP_ICACHE_FLUSH);
if (FLAG_wasm_jit_to_native) {
FlushICache(native_module);
} else {
FlushICache(code_table);
}
FlushICache(native_module);
FlushICache(wrapper_table);
//--------------------------------------------------------------------------
// Unpack and notify signal handler of protected instructions.
//--------------------------------------------------------------------------
if (use_trap_handler()) {
if (FLAG_wasm_jit_to_native) {
UnpackAndRegisterProtectedInstructions(isolate_, native_module);
} else {
UnpackAndRegisterProtectedInstructionsGC(isolate_, code_table);
}
UnpackAndRegisterProtectedInstructions(isolate_, native_module);
}
//--------------------------------------------------------------------------
@ -2456,12 +2129,8 @@ MaybeHandle<WasmInstanceObject> InstanceBuilder::Build() {
}
DCHECK(!isolate_->has_pending_exception());
if (FLAG_wasm_jit_to_native) {
TRACE("Successfully built instance %zu\n",
compiled_module_->GetNativeModule()->instance_id);
} else {
TRACE("Finishing instance %d\n", compiled_module_->instance_id());
}
TRACE("Successfully built instance %zu\n",
compiled_module_->GetNativeModule()->instance_id);
TRACE_CHAIN(module_object_->compiled_module());
return instance;
}
@ -2710,9 +2379,6 @@ int InstanceBuilder::ProcessImports(Handle<FixedArray> code_table,
index, module_name, import_name);
return -1;
}
if (!FLAG_wasm_jit_to_native) {
code_table->set(num_imported_functions, *import_code.GetCode());
}
RecordStats(import_code, counters());
num_imported_functions++;
break;
@ -3175,18 +2841,8 @@ void InstanceBuilder::InitializeTables(
CodeSpecialization* code_specialization) {
size_t function_table_count = module_->function_tables.size();
Handle<FixedArray> old_function_tables_gc =
FLAG_wasm_jit_to_native
? Handle<FixedArray>::null()
: handle(compiled_module_->function_tables(), isolate_);
// function_table_count is 0 or 1, so we just create these objects even if not
// needed for native wasm.
// TODO(mtrofin): remove the {..}_gc variables when we don't need
// FLAG_wasm_jit_to_native
Handle<FixedArray> new_function_tables_gc =
isolate_->factory()->NewFixedArray(static_cast<int>(function_table_count),
TENURED);
// These go on the instance.
Handle<FixedArray> rooted_function_tables =
@ -3195,10 +2851,6 @@ void InstanceBuilder::InitializeTables(
instance->set_function_tables(*rooted_function_tables);
if (!FLAG_wasm_jit_to_native) {
DCHECK_EQ(old_function_tables_gc->length(),
new_function_tables_gc->length());
}
for (size_t index = 0; index < function_table_count; ++index) {
WasmIndirectFunctionTable& table = module_->function_tables[index];
TableInstance& table_instance = table_instances_[index];
@ -3250,22 +2902,6 @@ void InstanceBuilder::InitializeTables(
v8::WeakCallbackType::kFinalizer);
rooted_function_tables->set(int_index, *global_func_table);
GlobalHandleAddress new_func_table_addr = global_func_table.address();
GlobalHandleAddress old_func_table_addr;
if (!WASM_CONTEXT_TABLES) {
WasmCompiledModule::SetTableValue(isolate_, new_function_tables_gc,
int_index, new_func_table_addr);
old_func_table_addr =
WasmCompiledModule::GetTableValue(*old_function_tables_gc, int_index);
code_specialization->RelocatePointer(old_func_table_addr,
new_func_table_addr);
}
}
if (!WASM_CONTEXT_TABLES) {
compiled_module_->set_function_tables(*new_function_tables_gc);
}
}
@ -3282,27 +2918,14 @@ void InstanceBuilder::LoadTableSegments(Handle<FixedArray> code_table,
if (compile_lazy(module_)) {
for (auto& table_init : module_->table_inits) {
for (uint32_t func_index : table_init.entries) {
if (!FLAG_wasm_jit_to_native) {
Code* code =
Code::cast(code_table->get(static_cast<int>(func_index)));
// Only increase the counter for lazy compile builtins (it's not
// needed otherwise).
if (code->builtin_index() != Builtins::kWasmCompileLazy) {
DCHECK(code->kind() == Code::WASM_FUNCTION ||
code->kind() == Code::WASM_TO_JS_FUNCTION ||
code->kind() == Code::WASM_TO_WASM_FUNCTION);
continue;
}
} else {
const wasm::WasmCode* code = native_module->GetCode(func_index);
// Only increase the counter for lazy compile builtins (it's not
// needed otherwise).
if (code->kind() != wasm::WasmCode::kLazyStub) {
DCHECK(code->kind() == wasm::WasmCode::kFunction ||
code->kind() == wasm::WasmCode::kWasmToJsWrapper ||
code->kind() == wasm::WasmCode::kWasmToWasmWrapper);
continue;
}
const wasm::WasmCode* code = native_module->GetCode(func_index);
// Only increase the counter for lazy compile builtins (it's not
// needed otherwise).
if (code->kind() != wasm::WasmCode::kLazyStub) {
DCHECK(code->kind() == wasm::WasmCode::kFunction ||
code->kind() == wasm::WasmCode::kWasmToJsWrapper ||
code->kind() == wasm::WasmCode::kWasmToWasmWrapper);
continue;
}
++num_table_exports[func_index];
}
@ -3670,23 +3293,9 @@ class AsyncCompileJob::PrepareAndStartCompile : public CompileStep {
Factory* factory = isolate->factory();
Handle<Code> illegal_builtin = BUILTIN_CODE(isolate, Illegal);
if (!FLAG_wasm_jit_to_native) {
// The {code_table} array contains import wrappers and functions (which
// are both included in {functions.size()}.
// The results of compilation will be written into it.
// Initialize {code_table_} with the illegal builtin. All call sites
// will be patched at instantiation.
int code_table_size = static_cast<int>(module_->functions.size());
job_->code_table_ = factory->NewFixedArray(code_table_size, TENURED);
for (int i = 0, e = module_->num_imported_functions; i < e; ++i) {
job_->code_table_->set(i, *illegal_builtin);
}
} else {
// Just makes it easier to deal with code that wants code_table, while
// we have FLAG_wasm_jit_to_native around.
job_->code_table_ = factory->NewFixedArray(0, TENURED);
}
// Just makes it easier to deal with code that wants code_table, while
// we have FLAG_wasm_jit_to_native around.
job_->code_table_ = factory->NewFixedArray(0, TENURED);
job_->module_env_ =
CreateDefaultModuleEnv(isolate, module_, illegal_builtin);
@ -3871,17 +3480,7 @@ class AsyncCompileJob::ExecuteAndFinishCompilationUnits : public CompileStep {
class AsyncCompileJob::FinishCompile : public CompileStep {
void RunInForeground() override {
TRACE_COMPILE("(5b) Finish compile...\n");
if (FLAG_wasm_jit_to_native) {
RecordStats(job_->compiled_module_->GetNativeModule(), job_->counters());
} else {
// At this point, compilation has completed. Update the code table.
for (int i = FLAG_skip_compiling_wasm_funcs,
e = job_->code_table_->length();
i < e; ++i) {
Object* val = job_->code_table_->get(i);
if (val->IsCode()) RecordStats(Code::cast(val), job_->counters());
}
}
RecordStats(job_->compiled_module_->GetNativeModule(), job_->counters());
// Create heap objects for script and module bytes to be stored in the
// shared module data. Asm.js is not compiled asynchronously.

View File

@ -51,7 +51,6 @@ V8_EXPORT_PRIVATE Handle<Script> CreateWasmScript(
// which will be triggered when returning from the runtime function, i.e. the
// Illegal builtin will never be called.
Address CompileLazy(Isolate* isolate);
Handle<Code> CompileLazyOnGCHeap(Isolate* isolate);
// This class orchestrates the lazy compilation of wasm functions. It is
// triggered by the WasmCompileLazy builtin.

View File

@ -70,17 +70,6 @@ class PatchDirectCallsHelper {
const byte* func_bytes;
};
bool IsAtWasmDirectCallTarget(RelocIterator& it) {
DCHECK(RelocInfo::IsCodeTarget(it.rinfo()->rmode()));
Code* code = Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
return code->kind() == Code::WASM_FUNCTION ||
code->kind() == Code::WASM_TO_JS_FUNCTION ||
code->kind() == Code::WASM_TO_WASM_FUNCTION ||
code->kind() == Code::WASM_INTERPRETER_ENTRY ||
code->builtin_index() == Builtins::kIllegal ||
code->builtin_index() == Builtins::kWasmCompileLazy;
}
} // namespace
CodeSpecialization::CodeSpecialization(Isolate* isolate, Zone* zone) {}
@ -117,7 +106,6 @@ bool CodeSpecialization::ApplyToWholeInstance(
DisallowHeapAllocation no_gc;
WasmCompiledModule* compiled_module = instance->compiled_module();
NativeModule* native_module = compiled_module->GetNativeModule();
FixedArray* code_table = compiled_module->code_table();
WasmSharedModuleData* shared = compiled_module->shared();
WasmModule* module = shared->module();
std::vector<WasmFunction>* wasm_functions = &shared->module()->functions;
@ -130,18 +118,11 @@ bool CodeSpecialization::ApplyToWholeInstance(
// Patch all wasm functions.
for (int num_wasm_functions = static_cast<int>(wasm_functions->size());
func_index < num_wasm_functions; ++func_index) {
WasmCodeWrapper wrapper;
if (FLAG_wasm_jit_to_native) {
const WasmCode* wasm_function = native_module->GetCode(func_index);
if (wasm_function->kind() != WasmCode::kFunction) {
continue;
}
wrapper = WasmCodeWrapper(wasm_function);
} else {
Code* wasm_function = Code::cast(code_table->get(func_index));
if (wasm_function->kind() != Code::WASM_FUNCTION) continue;
wrapper = WasmCodeWrapper(handle(wasm_function));
const WasmCode* wasm_function = native_module->GetCode(func_index);
if (wasm_function->kind() != WasmCode::kFunction) {
continue;
}
WasmCodeWrapper wrapper = WasmCodeWrapper(wasm_function);
changed |= ApplyToWasmCode(wrapper, icache_flush_mode);
}
@ -156,9 +137,7 @@ bool CodeSpecialization::ApplyToWholeInstance(
// should match the instance we currently patch (instance).
if (!relocate_direct_calls_instance_.is_null()) {
DCHECK_EQ(instance, *relocate_direct_calls_instance_);
reloc_mode |=
RelocInfo::ModeMask(FLAG_wasm_jit_to_native ? RelocInfo::JS_TO_WASM_CALL
: RelocInfo::CODE_TARGET);
reloc_mode |= RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL);
}
if (!reloc_mode) return changed;
int wrapper_index = 0;
@ -175,20 +154,10 @@ bool CodeSpecialization::ApplyToWholeInstance(
icache_flush_mode);
break;
case RelocInfo::JS_TO_WASM_CALL: {
DCHECK(FLAG_wasm_jit_to_native);
const WasmCode* new_code = native_module->GetCode(exp.index);
it.rinfo()->set_js_to_wasm_address(new_code->instructions().start(),
SKIP_ICACHE_FLUSH);
} break;
case RelocInfo::CODE_TARGET: {
DCHECK(!FLAG_wasm_jit_to_native);
// Ignore calls to other builtins like ToNumber.
if (!IsAtWasmDirectCallTarget(it)) continue;
Code* new_code = Code::cast(code_table->get(exp.index));
it.rinfo()->set_target_address(new_code->instruction_start(),
UPDATE_WRITE_BARRIER,
SKIP_ICACHE_FLUSH);
} break;
default:
UNREACHABLE();
}
@ -241,35 +210,7 @@ bool CodeSpecialization::ApplyToWasmCode(WasmCodeWrapper code,
for (; !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
switch (mode) {
case RelocInfo::CODE_TARGET: {
DCHECK(!FLAG_wasm_jit_to_native);
DCHECK(reloc_direct_calls);
// Skip everything which is not a wasm call (stack checks, traps, ...).
if (!IsAtWasmDirectCallTarget(it)) continue;
// Iterate simultaneously over the relocation information and the source
// position table. For each call in the reloc info, move the source
// position iterator forward to that position to find the byte offset of
// the respective call. Then extract the call index from the module wire
// bytes to find the new compiled function.
size_t offset = it.rinfo()->pc() - code.GetCode()->instruction_start();
if (!patch_direct_calls_helper) {
patch_direct_calls_helper.emplace(*relocate_direct_calls_instance_,
*code.GetCode());
}
int byte_pos = AdvanceSourcePositionTableIterator(
patch_direct_calls_helper->source_pos_it, offset);
int called_func_index = ExtractDirectCallIndex(
patch_direct_calls_helper->decoder,
patch_direct_calls_helper->func_bytes + byte_pos);
FixedArray* code_table =
relocate_direct_calls_instance_->compiled_module()->code_table();
Code* new_code = Code::cast(code_table->get(called_func_index));
it.rinfo()->set_target_address(new_code->instruction_start(),
UPDATE_WRITE_BARRIER, icache_flush_mode);
changed = true;
} break;
case RelocInfo::WASM_CALL: {
DCHECK(FLAG_wasm_jit_to_native);
DCHECK(reloc_direct_calls);
// Iterate simultaneously over the relocation information and the source
// position table. For each call in the reloc info, move the source

View File

@ -15,12 +15,11 @@ namespace internal {
// When constructing, we check the flag. After that, we just
// check using the member.
WasmCodeWrapper::WasmCodeWrapper(Handle<Code> code) {
DCHECK(!FLAG_wasm_jit_to_native);
code_ptr_.code_handle_ = code.location();
UNREACHABLE(); // TODO(mstarzinger): This whole class will be deprecated!
}
WasmCodeWrapper::WasmCodeWrapper(const wasm::WasmCode* code) {
DCHECK(FLAG_wasm_jit_to_native);
code_ptr_.wasm_code_ = code;
}
@ -34,7 +33,7 @@ const wasm::WasmCode* WasmCodeWrapper::GetWasmCode() const {
return code_ptr_.wasm_code_;
}
bool WasmCodeWrapper::IsCodeObject() const { return !FLAG_wasm_jit_to_native; }
bool WasmCodeWrapper::IsCodeObject() const { return false; }
#ifdef ENABLE_DISASSEMBLER
void WasmCodeWrapper::Disassemble(const char* name, Isolate* isolate,

View File

@ -16,7 +16,7 @@ class Code;
struct WasmContext;
class WasmInstanceObject;
// TODO(mtrofin): remove once we remove FLAG_wasm_jit_to_native
// TODO(mstarzinger): remove once we remove FLAG_wasm_jit_to_native
class WasmCodeWrapper {
public:
WasmCodeWrapper() {}

View File

@ -574,20 +574,6 @@ Handle<FixedArray> GetOrCreateInterpretedFunctions(
}
using CodeRelocationMap = std::map<Address, Address>;
using CodeRelocationMapGC =
IdentityMap<Handle<Code>, FreeStoreAllocationPolicy>;
void RedirectCallsitesInCodeGC(Code* code, CodeRelocationMapGC& map) {
DisallowHeapAllocation no_gc;
for (RelocIterator it(code, RelocInfo::kCodeTargetMask); !it.done();
it.next()) {
DCHECK(RelocInfo::IsCodeTarget(it.rinfo()->rmode()));
Code* target = Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
Handle<Code>* new_target = map.Find(target);
if (!new_target) continue;
it.rinfo()->set_target_address((*new_target)->instruction_start());
}
}
void RedirectCallsitesInCode(Isolate* isolate, const wasm::WasmCode* code,
CodeRelocationMap* map) {
@ -615,28 +601,6 @@ void RedirectCallsitesInJSWrapperCode(Isolate* isolate, Code* code,
}
}
void RedirectCallsitesInInstanceGC(Isolate* isolate,
WasmInstanceObject* instance,
CodeRelocationMapGC& map) {
DisallowHeapAllocation no_gc;
// Redirect all calls in wasm functions.
FixedArray* code_table = instance->compiled_module()->code_table();
for (int i = 0, e = GetNumFunctions(instance); i < e; ++i) {
RedirectCallsitesInCodeGC(Code::cast(code_table->get(i)), map);
}
// TODO(6668): Find instances that imported our code and also patch those.
// Redirect all calls in exported functions.
FixedArray* weak_exported_functions =
instance->compiled_module()->weak_exported_functions();
for (int i = 0, e = weak_exported_functions->length(); i != e; ++i) {
WeakCell* weak_function = WeakCell::cast(weak_exported_functions->get(i));
if (weak_function->cleared()) continue;
Code* code = JSFunction::cast(weak_function->value())->code();
RedirectCallsitesInCodeGC(code, map);
}
}
void RedirectCallsitesInInstance(Isolate* isolate, WasmInstanceObject* instance,
CodeRelocationMap* map) {
DisallowHeapAllocation no_gc;
@ -707,7 +671,6 @@ void WasmDebugInfo::RedirectToInterpreter(Handle<WasmDebugInfo> debug_info,
Handle<FixedArray> code_table(instance->compiled_module()->code_table(),
isolate);
CodeRelocationMapGC code_to_relocate_gc(isolate->heap());
// We may modify js wrappers, as well as wasm functions. Hence the 2
// modification scopes.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
@ -721,30 +684,19 @@ void WasmDebugInfo::RedirectToInterpreter(Handle<WasmDebugInfo> debug_info,
Handle<Code> new_code = compiler::CompileWasmInterpreterEntry(
isolate, func_index, module->functions[func_index].sig, instance);
if (FLAG_wasm_jit_to_native) {
const wasm::WasmCode* wasm_new_code =
native_module->AddInterpreterWrapper(new_code, func_index);
const wasm::WasmCode* old_code =
native_module->GetCode(static_cast<uint32_t>(func_index));
Handle<Foreign> foreign_holder = isolate->factory()->NewForeign(
wasm_new_code->instructions().start(), TENURED);
interpreted_functions->set(func_index, *foreign_holder);
DCHECK_EQ(0, code_to_relocate.count(old_code->instructions().start()));
code_to_relocate.insert(
std::make_pair(old_code->instructions().start(),
wasm_new_code->instructions().start()));
} else {
Code* old_code = Code::cast(code_table->get(func_index));
interpreted_functions->set(func_index, *new_code);
DCHECK_NULL(code_to_relocate_gc.Find(old_code));
code_to_relocate_gc.Set(old_code, new_code);
}
}
if (FLAG_wasm_jit_to_native) {
RedirectCallsitesInInstance(isolate, *instance, &code_to_relocate);
} else {
RedirectCallsitesInInstanceGC(isolate, *instance, code_to_relocate_gc);
const wasm::WasmCode* wasm_new_code =
native_module->AddInterpreterWrapper(new_code, func_index);
const wasm::WasmCode* old_code =
native_module->GetCode(static_cast<uint32_t>(func_index));
Handle<Foreign> foreign_holder = isolate->factory()->NewForeign(
wasm_new_code->instructions().start(), TENURED);
interpreted_functions->set(func_index, *foreign_holder);
DCHECK_EQ(0, code_to_relocate.count(old_code->instructions().start()));
code_to_relocate.insert(
std::make_pair(old_code->instructions().start(),
wasm_new_code->instructions().start()));
}
RedirectCallsitesInInstance(isolate, *instance, &code_to_relocate);
}
void WasmDebugInfo::PrepareStep(StepAction step_action) {

View File

@ -2587,17 +2587,10 @@ class ThreadImpl {
Isolate* isolate = codemap()->instance()->GetIsolate();
HandleScope handle_scope(isolate);
if (FLAG_wasm_jit_to_native) {
const wasm::WasmCode* target =
codemap()->GetImportedFunction(function_index);
return CallWasmCode(isolate, target,
codemap()->module()->functions[function_index].sig);
} else {
Handle<Code> target(codemap()->GetImportedFunctionGC(function_index),
isolate);
return CallCodeObject(isolate, target,
codemap()->module()->functions[function_index].sig);
}
const wasm::WasmCode* target =
codemap()->GetImportedFunction(function_index);
return CallWasmCode(isolate, target,
codemap()->module()->functions[function_index].sig);
}
ExternalCallResult CallIndirectFunction(uint32_t table_index,
@ -2627,7 +2620,6 @@ class ThreadImpl {
Isolate* isolate = compiled_module->GetIsolate();
const wasm::WasmCode* target = nullptr;
Code* target_gc = nullptr;
{
DisallowHeapAllocation no_gc;
// Get function to be called directly from the live instance to see latest
@ -2660,10 +2652,6 @@ class ThreadImpl {
if (static_cast<uint32_t>(found_sig) != canonical_sig_index) {
return {ExternalCallResult::SIGNATURE_MISMATCH};
}
// Get code object.
target_gc = Code::cast(fun_table->get(
compiler::FunctionTableCodeOffset(static_cast<int>(entry_index))));
} else {
// The function table is stored in the wasm context.
// TODO(wasm): the wasm interpreter currently supports only one table.
@ -2692,11 +2680,7 @@ class ThreadImpl {
// accumulating handles in the outer scope.
HandleScope handle_scope(isolate);
FunctionSig* signature = module()->signatures[sig_index];
if (FLAG_wasm_jit_to_native) {
return CallWasmCode(isolate, target, signature);
} else {
return CallCodeObject(isolate, handle(target_gc, isolate), signature);
}
return CallWasmCode(isolate, target, signature);
}
inline Activation current_activation() {

View File

@ -39,60 +39,6 @@ constexpr const char* WasmException::kRuntimeIdStr;
// static
constexpr const char* WasmException::kRuntimeValuesStr;
void UnpackAndRegisterProtectedInstructionsGC(Isolate* isolate,
Handle<FixedArray> code_table) {
DisallowHeapAllocation no_gc;
std::vector<trap_handler::ProtectedInstructionData> unpacked;
for (int i = 0; i < code_table->length(); ++i) {
Object* maybe_code = code_table->get(i);
// This is sometimes undefined when we're called from cctests.
if (maybe_code->IsUndefined(isolate)) continue;
Code* code = Code::cast(maybe_code);
if (code->kind() != Code::WASM_FUNCTION) {
continue;
}
if (code->trap_handler_index()->value() != trap_handler::kInvalidIndex) {
// This function has already been registered.
continue;
}
byte* base = code->entry();
FixedArray* protected_instructions = code->protected_instructions();
DCHECK(protected_instructions != nullptr);
for (int i = 0; i < protected_instructions->length();
i += Code::kTrapDataSize) {
trap_handler::ProtectedInstructionData data;
data.instr_offset =
protected_instructions
->GetValueChecked<Smi>(isolate, i + Code::kTrapCodeOffset)
->value();
data.landing_offset =
protected_instructions
->GetValueChecked<Smi>(isolate, i + Code::kTrapLandingOffset)
->value();
unpacked.emplace_back(data);
}
if (unpacked.empty()) continue;
const int index = RegisterHandlerData(base, code->instruction_size(),
unpacked.size(), &unpacked[0]);
unpacked.clear();
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// TODO(eholk): if index is negative, fail.
DCHECK_LE(0, index);
code->set_trap_handler_index(Smi::FromInt(index));
}
}
void UnpackAndRegisterProtectedInstructions(
Isolate* isolate, const wasm::NativeModule* native_module) {
DisallowHeapAllocation no_gc;

View File

@ -277,9 +277,6 @@ Handle<Object> GetOrCreateIndirectCallWrapper(
Isolate* isolate, Handle<WasmInstanceObject> owning_instance,
WasmCodeWrapper wasm_code, uint32_t func_index, FunctionSig* sig);
void UnpackAndRegisterProtectedInstructionsGC(Isolate* isolate,
Handle<FixedArray> code_table);
void UnpackAndRegisterProtectedInstructions(
Isolate* isolate, const wasm::NativeModule* native_module);

View File

@ -143,9 +143,6 @@ WCM_OBJECT(FixedArray, function_tables, kFunctionTablesOffset)
WCM_OBJECT(FixedArray, empty_function_tables, kEmptyFunctionTablesOffset)
ACCESSORS(WasmCompiledModule, raw_next_instance, Object, kNextInstanceOffset);
ACCESSORS(WasmCompiledModule, raw_prev_instance, Object, kPrevInstanceOffset);
#ifdef DEBUG
WCM_SMALL_CONST_NUMBER(uint32_t, instance_id, kInstanceIdOffset);
#endif
#undef WCM_OBJECT_OR_WEAK
#undef WCM_OBJECT

View File

@ -671,20 +671,6 @@ WasmInstanceObject* WasmInstanceObject::GetOwningInstance(
return WasmInstanceObject::cast(cell->value());
}
WasmInstanceObject* WasmInstanceObject::GetOwningInstanceGC(Code* code) {
DisallowHeapAllocation no_gc;
DCHECK(code->kind() == Code::WASM_FUNCTION ||
code->kind() == Code::WASM_INTERPRETER_ENTRY);
FixedArray* deopt_data = code->deoptimization_data();
DCHECK_EQ(code->kind() == Code::WASM_INTERPRETER_ENTRY ? 1 : 2,
deopt_data->length());
Object* weak_link = deopt_data->get(0);
DCHECK(weak_link->IsWeakCell());
WeakCell* cell = WeakCell::cast(weak_link);
if (cell->cleared()) return nullptr;
return WasmInstanceObject::cast(cell->value());
}
void WasmInstanceObject::ValidateInstancesChainForTesting(
Isolate* isolate, Handle<WasmModuleObject> module_obj, int instance_count) {
CHECK_GE(instance_count, 0);
@ -729,30 +715,10 @@ void InstanceFinalizer(const v8::WeakCallbackInfo<void>& data) {
// instances before the instance is destroyed.
WasmCompiledModule* compiled_module = owner->compiled_module();
wasm::NativeModule* native_module = compiled_module->GetNativeModule();
if (FLAG_wasm_jit_to_native) {
if (native_module) {
TRACE("Finalizing %zu {\n", native_module->instance_id);
} else {
TRACE("Finalized already cleaned up compiled module\n");
}
if (native_module) {
TRACE("Finalizing %zu {\n", native_module->instance_id);
} else {
TRACE("Finalizing %d {\n", compiled_module->instance_id());
if (compiled_module->use_trap_handler()) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
DisallowHeapAllocation no_gc;
FixedArray* code_table = compiled_module->code_table();
for (int i = 0; i < code_table->length(); ++i) {
Code* code = Code::cast(code_table->get(i));
int index = code->trap_handler_index()->value();
if (index >= 0) {
trap_handler::ReleaseHandlerData(index);
code->set_trap_handler_index(
Smi::FromInt(trap_handler::kInvalidIndex));
}
}
}
TRACE("Finalized already cleaned up compiled module\n");
}
WeakCell* weak_wasm_module = compiled_module->weak_wasm_module();
@ -880,44 +846,18 @@ WasmCodeWrapper WasmExportedFunction::GetWasmCode() {
DisallowHeapAllocation no_gc;
Handle<Code> export_wrapper_code = handle(this->code());
DCHECK_EQ(export_wrapper_code->kind(), Code::JS_TO_WASM_FUNCTION);
int mask =
RelocInfo::ModeMask(FLAG_wasm_jit_to_native ? RelocInfo::JS_TO_WASM_CALL
: RelocInfo::CODE_TARGET);
auto IsWasmFunctionCode = [](Code* code) {
return code->kind() == Code::WASM_FUNCTION ||
code->kind() == Code::WASM_TO_JS_FUNCTION ||
code->kind() == Code::WASM_TO_WASM_FUNCTION ||
code->kind() == Code::WASM_INTERPRETER_ENTRY ||
code->builtin_index() == Builtins::kWasmCompileLazy;
};
for (RelocIterator it(*export_wrapper_code, mask);; it.next()) {
DCHECK(!it.done());
WasmCodeWrapper target;
if (FLAG_wasm_jit_to_native) {
target = WasmCodeWrapper(
GetIsolate()->wasm_engine()->code_manager()->LookupCode(
it.rinfo()->js_to_wasm_address()));
} else {
Code* code = Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
if (!IsWasmFunctionCode(code)) continue;
target = WasmCodeWrapper(handle(code));
}
int mask = RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL);
RelocIterator it(*export_wrapper_code, mask);
DCHECK(!it.done());
WasmCodeWrapper target =
WasmCodeWrapper(GetIsolate()->wasm_engine()->code_manager()->LookupCode(
it.rinfo()->js_to_wasm_address()));
// There should only be this one call to wasm code.
#ifdef DEBUG
for (it.next(); !it.done(); it.next()) {
if (FLAG_wasm_jit_to_native) {
UNREACHABLE();
} else {
Code* code =
Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
DCHECK(!IsWasmFunctionCode(code));
}
}
it.next();
#endif
return target;
}
UNREACHABLE();
DCHECK(it.done());
return target;
}
WasmModule* WasmSharedModuleData::module() const {
@ -956,46 +896,6 @@ bool WasmSharedModuleData::is_asm_js() {
return asm_js;
}
void WasmSharedModuleData::ReinitializeAfterDeserialization(
Isolate* isolate, Handle<WasmSharedModuleData> shared) {
DCHECK(shared->module_wrapper()->IsUndefined(isolate));
#ifdef DEBUG
// No BreakpointInfo objects should survive deserialization.
if (shared->has_breakpoint_infos()) {
for (int i = 0, e = shared->breakpoint_infos()->length(); i < e; ++i) {
DCHECK(shared->breakpoint_infos()->get(i)->IsUndefined(isolate));
}
}
#endif
shared->reset_breakpoint_infos();
WasmModule* module = nullptr;
{
// We parse the module again directly from the module bytes, so
// the underlying storage must not be moved meanwhile.
DisallowHeapAllocation no_allocation;
SeqOneByteString* module_bytes = shared->module_bytes();
const byte* start =
reinterpret_cast<const byte*>(module_bytes->GetCharsAddress());
const byte* end = start + module_bytes->length();
// TODO(titzer): remember the module origin in the compiled_module
// For now, we assume serialized modules did not originate from asm.js.
wasm::ModuleResult result =
SyncDecodeWasmModule(isolate, start, end, false, wasm::kWasmOrigin);
CHECK(result.ok());
CHECK_NOT_NULL(result.val);
// Take ownership of the WasmModule and immediately transfer it to the
// WasmModuleWrapper below.
module = result.val.release();
}
Handle<wasm::WasmModuleWrapper> module_wrapper =
wasm::WasmModuleWrapper::From(isolate, module);
shared->set_module_wrapper(*module_wrapper);
}
namespace {
int GetBreakpointPos(Isolate* isolate, Object* break_point_info_or_undef) {
@ -1371,54 +1271,34 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(
isolate->factory()->NewWeakCell(isolate->native_context());
compiled_module->set_weak_native_context(*weak_native_context);
compiled_module->set_use_trap_handler(use_trap_handler);
if (!FLAG_wasm_jit_to_native) {
compiled_module->InitId();
compiled_module->set_code_table(*code_table);
if (!export_wrappers.is_null()) {
compiled_module->set_export_wrappers(*export_wrappers);
int num_function_tables = static_cast<int>(function_tables.size());
if (num_function_tables > 0) {
Handle<FixedArray> ft =
isolate->factory()->NewFixedArray(num_function_tables, TENURED);
for (int i = 0; i < num_function_tables; ++i) {
SetTableValue(isolate, ft, i, function_tables[i]);
}
// TODO(wasm): setting the empty tables here this way is OK under the
// assumption that we compile and then instantiate. It needs rework if we
// do direct instantiation. The empty tables are used as a default when
// resetting the compiled module.
compiled_module->set_function_tables(*ft);
compiled_module->set_empty_function_tables(*ft);
}
} else {
if (!export_wrappers.is_null()) {
compiled_module->set_export_wrappers(*export_wrappers);
}
wasm::NativeModule* native_module = nullptr;
{
std::unique_ptr<wasm::NativeModule> native_module_ptr =
isolate->wasm_engine()->code_manager()->NewNativeModule(*module);
native_module = native_module_ptr.release();
Handle<Foreign> native_module_wrapper =
Managed<wasm::NativeModule>::From(isolate, native_module);
compiled_module->set_native_module(*native_module_wrapper);
Handle<WasmCompiledModule> weak_link =
isolate->global_handles()->Create(*compiled_module);
GlobalHandles::MakeWeak(Handle<Object>::cast(weak_link).location(),
Handle<Object>::cast(weak_link).location(),
&CompiledModuleFinalizer,
v8::WeakCallbackType::kFinalizer);
compiled_module->GetNativeModule()->SetCompiledModule(weak_link);
}
// This is here just because it's easier for APIs that need to work with
// either code_table or native_module. Otherwise we need to check if
// has_code_table and pass undefined.
compiled_module->set_code_table(*code_table);
int function_count = static_cast<int>(module->functions.size());
Handle<FixedArray> source_positions =
isolate->factory()->NewFixedArray(function_count, TENURED);
compiled_module->set_source_positions(*source_positions);
}
wasm::NativeModule* native_module = nullptr;
{
std::unique_ptr<wasm::NativeModule> native_module_ptr =
isolate->wasm_engine()->code_manager()->NewNativeModule(*module);
native_module = native_module_ptr.release();
Handle<Foreign> native_module_wrapper =
Managed<wasm::NativeModule>::From(isolate, native_module);
compiled_module->set_native_module(*native_module_wrapper);
Handle<WasmCompiledModule> weak_link =
isolate->global_handles()->Create(*compiled_module);
GlobalHandles::MakeWeak(Handle<Object>::cast(weak_link).location(),
Handle<Object>::cast(weak_link).location(),
&CompiledModuleFinalizer,
v8::WeakCallbackType::kFinalizer);
compiled_module->GetNativeModule()->SetCompiledModule(weak_link);
}
// This is here just because it's easier for APIs that need to work with
// either code_table or native_module. Otherwise we need to check if
// has_code_table and pass undefined.
compiled_module->set_code_table(*code_table);
int function_count = static_cast<int>(module->functions.size());
Handle<FixedArray> source_positions =
isolate->factory()->NewFixedArray(function_count, TENURED);
compiled_module->set_source_positions(*source_positions);
// TODO(mtrofin): copy the rest of the specialization parameters over.
// We're currently OK because we're only using defaults.
return compiled_module;
@ -1427,20 +1307,14 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(
Handle<WasmCompiledModule> WasmCompiledModule::Clone(
Isolate* isolate, Handle<WasmCompiledModule> module) {
Handle<FixedArray> code_copy;
if (!FLAG_wasm_jit_to_native) {
code_copy = isolate->factory()->CopyFixedArray(
handle(module->code_table(), isolate));
}
Handle<WasmCompiledModule> ret = Handle<WasmCompiledModule>::cast(
isolate->factory()->NewStruct(WASM_COMPILED_MODULE_TYPE, TENURED));
ret->set_shared(module->shared());
ret->set_weak_native_context(module->weak_native_context());
ret->set_export_wrappers(module->export_wrappers());
ret->set_weak_wasm_module(module->weak_wasm_module());
if (FLAG_wasm_jit_to_native) {
ret->set_source_positions(module->source_positions());
ret->set_native_module(module->native_module());
}
ret->set_source_positions(module->source_positions());
ret->set_native_module(module->native_module());
if (module->has_lazy_compile_data()) {
ret->set_lazy_compile_data(module->lazy_compile_data());
}
@ -1452,11 +1326,6 @@ Handle<WasmCompiledModule> WasmCompiledModule::Clone(
if (module->has_empty_function_tables()) {
ret->set_empty_function_tables(module->empty_function_tables());
}
if (!FLAG_wasm_jit_to_native) {
ret->InitId();
ret->set_code_table(*code_copy);
return ret;
}
Handle<FixedArray> export_copy = isolate->factory()->CopyFixedArray(
handle(module->export_wrappers(), isolate));
@ -1511,14 +1380,6 @@ wasm::NativeModule* WasmCompiledModule::GetNativeModule() const {
return Managed<wasm::NativeModule>::cast(native_module())->get();
}
void WasmCompiledModule::InitId() {
#if DEBUG
static uint32_t instance_id_counter = 0;
set_instance_id(instance_id_counter++);
TRACE("New compiled module id: %d\n", instance_id());
#endif
}
void WasmCompiledModule::Reset(Isolate* isolate,
WasmCompiledModule* compiled_module) {
DisallowHeapAllocation no_gc;
@ -1602,11 +1463,7 @@ void WasmCompiledModule::PrintInstancesChain() {
#if DEBUG
if (!FLAG_trace_wasm_instances) return;
for (WasmCompiledModule* current = this; current != nullptr;) {
if (FLAG_wasm_jit_to_native) {
PrintF("->%zu", current->GetNativeModule()->instance_id);
} else {
PrintF("->%d", current->instance_id());
}
PrintF("->%zu", current->GetNativeModule()->instance_id);
if (!current->has_next_instance()) break;
current = current->next_instance();
}
@ -1648,9 +1505,6 @@ void WasmCompiledModule::ReinitializeAfterDeserialization(
// At this point, no module wrapper exists, so the shared module data is
// incomplete.
Handle<WasmSharedModuleData> shared(compiled_module->shared(), isolate);
if (!FLAG_wasm_jit_to_native) {
WasmSharedModuleData::ReinitializeAfterDeserialization(isolate, shared);
}
size_t function_table_count =
compiled_module->shared()->module()->function_tables.size();

View File

@ -38,7 +38,8 @@ class WasmCompiledModule;
class WasmDebugInfo;
class WasmInstanceObject;
#define WASM_CONTEXT_TABLES FLAG_wasm_jit_to_native
// TODO(mstarzinger): Remove this macro!
#define WASM_CONTEXT_TABLES true
#define DECL_OPTIONAL_ACCESSORS(name, type) \
INLINE(bool has_##name()); \
@ -244,7 +245,6 @@ class WasmInstanceObject : public JSObject {
// instance. Intended to be called from runtime functions. Returns nullptr on
// failing to get owning instance.
static WasmInstanceObject* GetOwningInstance(const wasm::WasmCode* code);
static WasmInstanceObject* GetOwningInstanceGC(Code* code);
static void ValidateInstancesChainForTesting(
Isolate* isolate, Handle<WasmModuleObject> module_obj,
@ -309,9 +309,6 @@ class WasmSharedModuleData : public Struct {
// Check whether this module was generated from asm.js source.
bool is_asm_js();
static void ReinitializeAfterDeserialization(Isolate*,
Handle<WasmSharedModuleData>);
static void AddBreakpoint(Handle<WasmSharedModuleData>, int position,
Handle<BreakPoint> break_point);
@ -443,7 +440,6 @@ class WasmCompiledModule : public Struct {
V(kCodeTableOffset, kPointerSize) \
V(kFunctionTablesOffset, kPointerSize) \
V(kEmptyFunctionTablesOffset, kPointerSize) \
V(kInstanceIdOffset, kPointerSize) \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize,
@ -498,15 +494,6 @@ class WasmCompiledModule : public Struct {
WCM_OBJECT(FixedArray, function_tables)
WCM_CONST_OBJECT(FixedArray, empty_function_tables)
public:
// TODO(mtrofin): this is unnecessary when we stop needing
// FLAG_wasm_jit_to_native, because we have instance_id on NativeModule.
#if DEBUG
WCM_SMALL_CONST_NUMBER(uint32_t, instance_id)
#else
uint32_t instance_id() const { return static_cast<uint32_t>(-1); }
#endif
public:
static Handle<WasmCompiledModule> New(
Isolate* isolate, wasm::WasmModule* module, Handle<FixedArray> code_table,
@ -543,7 +530,7 @@ class WasmCompiledModule : public Struct {
inline void ReplaceCodeTableForTesting(
std::vector<wasm::WasmCode*>&& testing_table);
// TODO(mtrofin): following 4 unnecessary after we're done with
// TODO(mstarzinger): following 4 unnecessary after we're done with
// FLAG_wasm_jit_to_native
static void SetTableValue(Isolate* isolate, Handle<FixedArray> table,
int index, Address value);
@ -554,8 +541,6 @@ class WasmCompiledModule : public Struct {
void LogWasmCodes(Isolate* isolate);
private:
void InitId();
DISALLOW_IMPLICIT_CONSTRUCTORS(WasmCompiledModule);
};

View File

@ -460,14 +460,6 @@ size_t NativeModuleSerializer::Write(Vector<byte> dest) {
// static
std::pair<std::unique_ptr<const byte[]>, size_t> SerializeNativeModule(
Isolate* isolate, Handle<WasmCompiledModule> compiled_module) {
if (!FLAG_wasm_jit_to_native) {
std::unique_ptr<ScriptData> script_data =
WasmCompiledModuleSerializer::SerializeWasmModule(isolate,
compiled_module);
script_data->ReleaseDataOwnership();
size_t size = static_cast<size_t>(script_data->length());
return {std::unique_ptr<const byte[]>(script_data->data()), size};
}
NativeModule* native_module = compiled_module->GetNativeModule();
NativeModuleSerializer serializer(isolate, native_module);
size_t version_size = kVersionSize;
@ -630,11 +622,6 @@ Address NativeModuleDeserializer::GetTrampolineOrStubFromTag(uint32_t tag) {
MaybeHandle<WasmCompiledModule> DeserializeNativeModule(
Isolate* isolate, Vector<const byte> data, Vector<const byte> wire_bytes) {
if (!FLAG_wasm_jit_to_native) {
ScriptData script_data(data.start(), data.length());
return WasmCompiledModuleSerializer::DeserializeWasmModule(
isolate, &script_data, wire_bytes);
}
if (!IsWasmCodegenAllowed(isolate, isolate->native_context())) {
return {};
}

View File

@ -63,18 +63,14 @@ class CWasmEntryArgTester {
isolate_);
CHECK(!buffer_obj->IsHeapObject());
Handle<Object> call_args[]{
(FLAG_wasm_jit_to_native
? Handle<Object>::cast(isolate_->factory()->NewForeign(
wasm_code_.GetWasmCode()->instructions().start(), TENURED))
: Handle<Object>::cast(wasm_code_.GetCode())),
Handle<Object>::cast(isolate_->factory()->NewForeign(
wasm_code_.GetWasmCode()->instructions().start(), TENURED)),
handle(reinterpret_cast<Object*>(wasm_code_.wasm_context()), isolate_),
buffer_obj};
static_assert(
arraysize(call_args) == compiler::CWasmEntryParameters::kNumParameters,
"adapt this test");
if (FLAG_wasm_jit_to_native) {
wasm_code_.GetWasmCode()->owner()->SetExecutable(true);
}
wasm_code_.GetWasmCode()->owner()->SetExecutable(true);
MaybeHandle<Object> return_obj = Execution::Call(
isolate_, c_wasm_entry_fn_, receiver, arraysize(call_args), call_args);
CHECK(!return_obj.is_null());

View File

@ -258,7 +258,7 @@ class WasmSerializationTest {
uint32_t* slot = reinterpret_cast<uint32_t*>(
const_cast<uint8_t*>(serialized_bytes_.first) +
SerializedCodeData::kPayloadLengthOffset);
*slot = FLAG_wasm_jit_to_native ? 0u : 0xFEFEFEFEu;
*slot = 0u;
}
v8::MaybeLocal<v8::WasmCompiledModule> Deserialize() {

View File

@ -72,9 +72,7 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name) {
test_module_.functions.reserve(kMaxFunctions);
}
uint32_t index = static_cast<uint32_t>(test_module_.functions.size());
if (FLAG_wasm_jit_to_native) {
native_module_->ResizeCodeTableForTest(index);
}
native_module_->ResizeCodeTableForTest(index);
test_module_.functions.push_back(
{sig, index, 0, {0, 0}, {0, 0}, false, false});
if (name) {
@ -101,21 +99,15 @@ uint32_t TestingModuleBuilder::AddJsFunction(
Handle<Code> code = compiler::CompileWasmToJSWrapper(
isolate_, jsfunc, sig, index, test_module_.origin(),
trap_handler::IsTrapHandlerEnabled(), js_imports_table);
if (FLAG_wasm_jit_to_native) {
native_module_->ResizeCodeTableForTest(index);
native_module_->AddCodeCopy(code, wasm::WasmCode::kWasmToJsWrapper, index);
} else {
function_code_[index] = code;
}
native_module_->ResizeCodeTableForTest(index);
native_module_->AddCodeCopy(code, wasm::WasmCode::kWasmToJsWrapper, index);
return index;
}
Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
// Wrap the code so it can be called as a JS function.
Link();
WasmCodeWrapper code = FLAG_wasm_jit_to_native
? WasmCodeWrapper(native_module_->GetCode(index))
: WasmCodeWrapper(function_code_[index]);
WasmCodeWrapper code = WasmCodeWrapper(native_module_->GetCode(index));
byte* context_address =
test_module_.has_memory
? reinterpret_cast<byte*>(instance_object_->wasm_context()->get())
@ -444,9 +436,7 @@ void WasmFunctionCompiler::Build(const byte* start, const byte* end) {
Handle<WasmCompiledModule> compiled_module(
builder_->instance_object()->compiled_module(), isolate());
NativeModule* native_module = compiled_module->GetNativeModule();
if (FLAG_wasm_jit_to_native) {
native_module->ResizeCodeTableForTest(function_->func_index);
}
native_module->ResizeCodeTableForTest(function_->func_index);
Handle<SeqOneByteString> wire_bytes(compiled_module->shared()->module_bytes(),
isolate());
@ -472,47 +462,10 @@ void WasmFunctionCompiler::Build(const byte* start, const byte* end) {
isolate()->counters(), builder_->runtime_exception_support(),
builder_->lower_simd());
unit.ExecuteCompilation();
WasmCodeWrapper code_wrapper = unit.FinishCompilation(&thrower);
unit.FinishCompilation(&thrower);
CHECK(!thrower.error());
if (!FLAG_wasm_jit_to_native) {
Handle<Code> code = code_wrapper.GetCode();
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate()->heap());
// Manually add the deoptimization info that would otherwise be added
// during instantiation. Deopt data holds <WeakCell<wasm_instance>,
// func_index>.
DCHECK_EQ(0, code->deoptimization_data()->length());
Handle<FixedArray> deopt_data =
isolate()->factory()->NewFixedArray(2, TENURED);
Handle<Object> weak_instance =
isolate()->factory()->NewWeakCell(builder_->instance_object());
deopt_data->set(0, *weak_instance);
deopt_data->set(1, Smi::FromInt(static_cast<int>(function_index())));
code->set_deoptimization_data(*deopt_data);
// Build the TurboFan graph.
builder_->SetFunctionCode(function_index(), code);
// Add to code table.
Handle<FixedArray> code_table(compiled_module->code_table(), isolate());
if (static_cast<int>(function_index()) >= code_table->length()) {
Handle<FixedArray> new_arr = isolate()->factory()->NewFixedArray(
static_cast<int>(function_index()) + 1);
code_table->CopyTo(0, *new_arr, 0, code_table->length());
code_table = new_arr;
compiled_module->ReplaceCodeTableForTesting(code_table);
}
DCHECK(code_table->get(static_cast<int>(function_index()))
->IsUndefined(isolate()));
code_table->set(static_cast<int>(function_index()), *code);
if (trap_handler::IsTrapHandlerEnabled()) {
UnpackAndRegisterProtectedInstructionsGC(isolate(), code_table);
}
} else {
if (trap_handler::IsTrapHandlerEnabled()) {
UnpackAndRegisterProtectedInstructions(isolate(), native_module);
}
if (trap_handler::IsTrapHandlerEnabled()) {
UnpackAndRegisterProtectedInstructions(isolate(), native_module);
}
}
@ -533,18 +486,7 @@ WasmFunctionCompiler::WasmFunctionCompiler(Zone* zone, FunctionSig* sig,
function_ = builder_->GetFunctionAt(index);
}
WasmFunctionCompiler::~WasmFunctionCompiler() {
if (!FLAG_wasm_jit_to_native) {
if (trap_handler::IsTrapHandlerEnabled() &&
!builder_->GetFunctionCode(function_index()).is_null()) {
const int handler_index = builder_->GetFunctionCode(function_index())
.GetCode()
->trap_handler_index()
->value();
trap_handler::ReleaseHandlerData(handler_index);
}
}
}
WasmFunctionCompiler::~WasmFunctionCompiler() {}
FunctionSig* WasmRunnerBase::CreateSig(MachineType return_type,
Vector<MachineType> param_types) {

View File

@ -91,13 +91,7 @@ class TestingModuleBuilder {
byte* AddMemory(uint32_t size);
size_t CodeTableLength() const {
if (FLAG_wasm_jit_to_native) {
return native_module_->FunctionCount();
} else {
return function_code_.size();
}
}
size_t CodeTableLength() const { return native_module_->FunctionCount(); }
template <typename T>
T* AddMemoryElems(uint32_t count) {
@ -208,18 +202,13 @@ class TestingModuleBuilder {
Isolate* isolate() { return isolate_; }
Handle<WasmInstanceObject> instance_object() { return instance_object_; }
WasmCodeWrapper GetFunctionCode(uint32_t index) {
if (FLAG_wasm_jit_to_native) {
return WasmCodeWrapper(native_module_->GetCode(index));
} else {
return WasmCodeWrapper(function_code_[index]);
}
return WasmCodeWrapper(native_module_->GetCode(index));
}
void SetFunctionCode(int index, Handle<Code> code) {
function_code_[index] = code;
}
Address globals_start() { return reinterpret_cast<Address>(globals_data_); }
void Link() {
if (!FLAG_wasm_jit_to_native) return;
if (!linked_) {
native_module_->LinkAll();
linked_ = true;
@ -281,20 +270,14 @@ class WasmFunctionWrapper : private compiler::GraphAndBuilders {
}
void SetInnerCode(WasmCodeWrapper code) {
if (FLAG_wasm_jit_to_native) {
intptr_t address = reinterpret_cast<intptr_t>(
code.GetWasmCode()->instructions().start());
compiler::NodeProperties::ChangeOp(
inner_code_node_,
kPointerSize == 8
? common()->RelocatableInt64Constant(address,
RelocInfo::WASM_CALL)
: common()->RelocatableInt32Constant(static_cast<int>(address),
RelocInfo::WASM_CALL));
} else {
compiler::NodeProperties::ChangeOp(
inner_code_node_, common()->HeapConstant(code.GetCode()));
}
intptr_t address =
reinterpret_cast<intptr_t>(code.GetWasmCode()->instructions().start());
compiler::NodeProperties::ChangeOp(
inner_code_node_,
kPointerSize == 8
? common()->RelocatableInt64Constant(address, RelocInfo::WASM_CALL)
: common()->RelocatableInt32Constant(static_cast<int>(address),
RelocInfo::WASM_CALL));
}
const compiler::Operator* IntPtrConstant(intptr_t value) {

View File

@ -24,7 +24,6 @@ ALL_VARIANT_FLAGS = {
"stress_sampling": [["--stress-sampling-allocation-profiler=16384"]],
"trusted": [["--no-untrusted-code-mitigations"]],
"wasm_traps": [["--wasm-trap-handler", "--invoke-weak-callbacks"]],
"wasm_no_native": [["--no-wasm-jit-to-native"]],
}
SLOW_VARIANTS = set([

View File

@ -44,7 +44,7 @@ VARIANT_ALIASES = {
# Shortcut for the two above ("more" first - it has the longer running tests).
"exhaustive": MORE_VARIANTS + VARIANTS,
# Additional variants, run on a subset of bots.
"extra": ["future", "liftoff", "trusted", "wasm_no_native"],
"extra": ["future", "liftoff", "trusted"],
}
GC_STRESS_FLAGS = ["--gc-interval=500", "--stress-compaction",