[wasm] Merge the WasmContext into WasmInstanceObject

This change makes lifetime management of WasmCode much simpler.
By using the WasmInstanceObject as the context for WASM code execution,
including the pointer to the memory base and indirect function tables,
this keeps the instance alive when WASM code is on the stack, since
the instance object is passed as a parameter and spilled onto the stack.
This is in preparation of sharing the code between instances and
isolates.

Bug: v8:7424

Change-Id: Ic2e4b7bcc2feb20001d0553a615a8a9dff36317e
Reviewed-on: https://chromium-review.googlesource.com/958520
Commit-Queue: Ben Titzer <titzer@chromium.org>
Reviewed-by: Clemens Hammacher <clemensh@chromium.org>
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#52361}
This commit is contained in:
Ben L. Titzer 2018-04-04 17:14:01 +02:00 committed by Commit Bot
parent f81f301f7e
commit 57bf0bfefb
54 changed files with 1443 additions and 2502 deletions

View File

@ -213,12 +213,6 @@ bool RelocInfo::OffHeapTargetIsCodedSpecially() {
#endif
}
void RelocInfo::set_wasm_context_reference(Address address,
ICacheFlushMode icache_flush_mode) {
DCHECK(IsWasmContextReference(rmode_));
set_embedded_address(address, icache_flush_mode);
}
void RelocInfo::set_global_handle(Address address,
ICacheFlushMode icache_flush_mode) {
DCHECK_EQ(rmode_, WASM_GLOBAL_HANDLE);
@ -242,11 +236,6 @@ Address RelocInfo::global_handle() const {
return embedded_address();
}
Address RelocInfo::wasm_context_reference() const {
DCHECK(IsWasmContextReference(rmode_));
return embedded_address();
}
void RelocInfo::set_target_address(Address target,
WriteBarrierMode write_barrier_mode,
ICacheFlushMode icache_flush_mode) {
@ -546,8 +535,6 @@ const char* RelocInfo::RelocModeName(RelocInfo::Mode rmode) {
return "constant pool";
case VENEER_POOL:
return "veneer pool";
case WASM_CONTEXT_REFERENCE:
return "wasm context reference";
case WASM_GLOBAL_HANDLE:
return "global handle";
case WASM_CALL:
@ -650,7 +637,6 @@ void RelocInfo::Verify(Isolate* isolate) {
case DEOPT_ID:
case CONST_POOL:
case VENEER_POOL:
case WASM_CONTEXT_REFERENCE:
case WASM_GLOBAL_HANDLE:
case WASM_CALL:
case JS_TO_WASM_CALL:

View File

@ -366,10 +366,6 @@ class RelocInfo {
// Please note the order is important (see IsCodeTarget, IsGCRelocMode).
CODE_TARGET,
EMBEDDED_OBJECT,
// Wasm entries are to relocate pointers into the wasm memory embedded in
// wasm code. Everything after WASM_CONTEXT_REFERENCE (inclusive) is not
// GC'ed.
WASM_CONTEXT_REFERENCE,
WASM_GLOBAL_HANDLE,
WASM_CALL,
JS_TO_WASM_CALL,
@ -466,15 +462,12 @@ class RelocInfo {
return mode == OFF_HEAP_TARGET;
}
static inline bool IsNone(Mode mode) { return mode == NONE; }
static inline bool IsWasmContextReference(Mode mode) {
return mode == WASM_CONTEXT_REFERENCE;
}
static inline bool IsWasmReference(Mode mode) {
return IsWasmPtrReference(mode);
}
static inline bool IsWasmPtrReference(Mode mode) {
return mode == WASM_CONTEXT_REFERENCE || mode == WASM_GLOBAL_HANDLE ||
mode == WASM_CALL || mode == JS_TO_WASM_CALL;
return mode == WASM_GLOBAL_HANDLE || mode == WASM_CALL ||
mode == JS_TO_WASM_CALL;
}
static constexpr int ModeMask(Mode mode) { return 1 << mode; }
@ -509,14 +502,10 @@ class RelocInfo {
// constant pool, otherwise the pointer is embedded in the instruction stream.
bool IsInConstantPool();
Address wasm_context_reference() const;
Address global_handle() const;
Address js_to_wasm_address() const;
Address wasm_call_address() const;
void set_wasm_context_reference(
Address address,
ICacheFlushMode icache_flush_mode = FLUSH_ICACHE_IF_NEEDED);
void set_target_address(
Address target,
WriteBarrierMode write_barrier_mode = UPDATE_WRITE_BARRIER,

View File

@ -281,15 +281,15 @@ void Int64Lowering::LowerNode(Node* node) {
static_cast<int>(signature()->parameter_count())) {
int old_index = ParameterIndexOf(node->op());
// TODO(wasm): Make this part not wasm specific.
// Prevent special lowering of the WasmContext parameter.
if (old_index == kWasmContextParameterIndex) {
// Prevent special lowering of the instance parameter.
if (old_index == kWasmInstanceParameterIndex) {
DefaultLowering(node);
break;
}
// Adjust old_index to be compliant with the signature.
--old_index;
int new_index = GetParameterIndexAfterLowering(signature(), old_index);
// Adjust new_index to consider the WasmContext parameter.
// Adjust new_index to consider the instance parameter.
++new_index;
NodeProperties::ChangeOp(node, common()->Parameter(new_index));

View File

@ -58,6 +58,21 @@ namespace compiler {
FATAL("Unsupported opcode 0x%x:%s", (opcode), \
wasm::WasmOpcodes::OpcodeName(opcode));
#define WASM_INSTANCE_OBJECT_OFFSET(name) \
(WasmInstanceObject::k##name##Offset - kHeapObjectTag)
#define LOAD_INSTANCE_FIELD(name, type) \
graph()->NewNode( \
jsgraph()->machine()->Load(type), instance_node_.get(), \
jsgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(name)), *effect_, \
*control_)
#define LOAD_FIXED_ARRAY_SLOT(array_node, index) \
graph()->NewNode(jsgraph()->machine()->Load(MachineType::TaggedPointer()), \
array_node, \
jsgraph()->Int32Constant(FixedArrayOffsetMinusTag(index)), \
*effect_, *control_);
namespace {
constexpr uint32_t kBytesPerExceptionValuesArrayElement = 2;
@ -89,7 +104,6 @@ WasmGraphBuilder::WasmGraphBuilder(
jsgraph_(jsgraph),
centry_stub_node_(jsgraph_->HeapConstant(centry_stub)),
env_(env),
function_tables_(zone),
cur_buffer_(def_buffer_),
cur_bufsize_(kDefaultBufferSize),
has_simd_(ContainsSimd(sig)),
@ -2532,22 +2546,22 @@ Node* WasmGraphBuilder::BuildCCall(MachineSignature* sig, Node* function,
Node* WasmGraphBuilder::BuildWasmCall(wasm::FunctionSig* sig, Node** args,
Node*** rets,
wasm::WasmCodePosition position,
Node* wasm_context, bool use_retpoline) {
if (wasm_context == nullptr) {
DCHECK_NOT_NULL(wasm_context_);
wasm_context = wasm_context_.get();
Node* instance_node, bool use_retpoline) {
if (instance_node == nullptr) {
DCHECK_NOT_NULL(instance_node_);
instance_node = instance_node_.get();
}
SetNeedsStackCheck();
const size_t params = sig->parameter_count();
const size_t extra = 3; // wasm_context, effect, and control.
const size_t extra = 3; // instance_node, effect, and control.
const size_t count = 1 + params + extra;
// Reallocate the buffer to make space for extra inputs.
args = Realloc(args, 1 + params, count);
// Make room for the wasm_context parameter at index 1, just after code.
// Make room for the instance_node parameter at index 1, just after code.
memmove(&args[2], &args[1], params * sizeof(Node*));
args[1] = wasm_context;
args[1] = instance_node;
// Add effect and control inputs.
args[params + 2] = *effect_;
@ -2582,12 +2596,32 @@ Node* WasmGraphBuilder::CallDirect(uint32_t index, Node** args, Node*** rets,
DCHECK_NULL(args[0]);
wasm::FunctionSig* sig = env_->module->functions[index].sig;
// Just encode the function index. This will be patched at instantiation.
Address code = reinterpret_cast<Address>(index);
args[0] = jsgraph()->RelocatableIntPtrConstant(
reinterpret_cast<intptr_t>(code), RelocInfo::WASM_CALL);
if (env_ && index < env_->module->num_imported_functions) {
// A call to an imported function.
// Load the instance from the imported_instances array at a known offset.
Node* imported_instances = LOAD_INSTANCE_FIELD(
ImportedFunctionInstances, MachineType::TaggedPointer());
Node* instance_node = LOAD_FIXED_ARRAY_SLOT(imported_instances, index);
return BuildWasmCall(sig, args, rets, position);
// Load the target from the imported_targets array at a known offset.
Node* imported_targets =
LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
Node* target_node = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Pointer()), imported_targets,
jsgraph()->Int32Constant(index * sizeof(Address)),
jsgraph()->graph()->start(), jsgraph()->graph()->start());
args[0] = target_node;
return BuildWasmCall(sig, args, rets, position, instance_node);
} else {
// A call to a function in this module.
// Just encode the function index. This will be patched at instantiation.
Address code = reinterpret_cast<Address>(index);
args[0] = jsgraph()->RelocatableIntPtrConstant(
reinterpret_cast<intptr_t>(code), RelocInfo::WASM_CALL);
return BuildWasmCall(sig, args, rets, position);
}
}
Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
@ -2597,18 +2631,16 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
DCHECK_NOT_NULL(env_);
// Assume only one table for now.
uint32_t table_index = 0;
wasm::FunctionSig* sig = env_->module->signatures[sig_index];
Node* table = nullptr;
Node* table_size = nullptr;
GetFunctionTableNodes(table_index, &table, &table_size);
Node* ift_size =
LOAD_INSTANCE_FIELD(IndirectFunctionTableSize, MachineType::Uint32());
MachineOperatorBuilder* machine = jsgraph()->machine();
Node* key = args[0];
// Bounds check against the table size.
Node* in_bounds =
graph()->NewNode(machine->Uint32LessThan(), key, table_size);
Node* in_bounds = graph()->NewNode(machine->Uint32LessThan(), key, ift_size);
TrapIfFalse(wasm::kTrapFuncInvalid, in_bounds, position);
// Mask the key to prevent SSCA.
@ -2618,55 +2650,55 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
graph()->NewNode(machine->Word32Xor(), key, Int32Constant(-1));
Node* masked_diff = graph()->NewNode(
machine->Word32And(),
graph()->NewNode(machine->Int32Sub(), key, table_size), neg_key);
graph()->NewNode(machine->Int32Sub(), key, ift_size), neg_key);
Node* mask =
graph()->NewNode(machine->Word32Sar(), masked_diff, Int32Constant(31));
key = graph()->NewNode(machine->Word32And(), key, mask);
}
// Load signature from the table and check.
// The table is a FixedArray; signatures are encoded as SMIs.
// [sig1, code1, sig2, code2, sig3, code3, ...]
static_assert(compiler::kFunctionTableEntrySize == 2, "consistency");
static_assert(compiler::kFunctionTableSignatureOffset == 0, "consistency");
static_assert(compiler::kFunctionTableCodeOffset == 1, "consistency");
Node* ift_sig_ids =
LOAD_INSTANCE_FIELD(IndirectFunctionTableSigIds, MachineType::Pointer());
int32_t canonical_sig_num = env_->module->signature_ids[sig_index];
// The table entries are {IndirectFunctionTableEntry} structs.
int32_t expected_sig_id = env_->module->signature_ids[sig_index];
Node* scaled_key =
graph()->NewNode(machine->Int32Mul(), key,
Int32Constant(sizeof(IndirectFunctionTableEntry)));
graph()->NewNode(machine->Word32Shl(), key, Int32Constant(2));
const Operator* add = nullptr;
if (machine->Is64()) {
scaled_key = graph()->NewNode(machine->ChangeInt32ToInt64(), scaled_key);
scaled_key = graph()->NewNode(machine->ChangeUint32ToUint64(), scaled_key);
add = machine->Int64Add();
} else {
add = machine->Int32Add();
}
Node* entry_address = graph()->NewNode(add, table, scaled_key);
Node* loaded_sig = graph()->NewNode(
machine->Load(MachineType::Int32()), entry_address,
Int32Constant(offsetof(IndirectFunctionTableEntry, sig_id)), *effect_,
*control_);
Node* loaded_sig =
graph()->NewNode(machine->Load(MachineType::Int32()), ift_sig_ids,
scaled_key, *effect_, *control_);
Node* sig_match = graph()->NewNode(machine->WordEqual(), loaded_sig,
Int32Constant(canonical_sig_num));
Int32Constant(expected_sig_id));
TrapIfFalse(wasm::kTrapFuncSigMismatch, sig_match, position);
Node* target = graph()->NewNode(
machine->Load(MachineType::Pointer()), entry_address,
Int32Constant(offsetof(IndirectFunctionTableEntry, target)), *effect_,
*control_);
Node* ift_targets =
LOAD_INSTANCE_FIELD(IndirectFunctionTableTargets, MachineType::Pointer());
Node* ift_instances = LOAD_INSTANCE_FIELD(IndirectFunctionTableInstances,
MachineType::TaggedPointer());
Node* loaded_context = graph()->NewNode(
machine->Load(MachineType::Pointer()), entry_address,
Int32Constant(offsetof(IndirectFunctionTableEntry, context)), *effect_,
*control_);
scaled_key = graph()->NewNode(machine->Word32Shl(), key,
Int32Constant(kPointerSizeLog2));
Node* target = graph()->NewNode(machine->Load(MachineType::Pointer()),
ift_targets, scaled_key, *effect_, *control_);
auto access = AccessBuilder::ForFixedArrayElement();
Node* target_instance = graph()->NewNode(
machine->Load(MachineType::TaggedPointer()),
graph()->NewNode(add, ift_instances, scaled_key),
Int32Constant(access.header_size - access.tag()), *effect_, *control_);
args[0] = target;
return BuildWasmCall(sig, args, rets, position, loaded_context);
return BuildWasmCall(sig, args, rets, position, target_instance);
}
Node* WasmGraphBuilder::BuildI32Rol(Node* left, Node* right) {
@ -2996,11 +3028,11 @@ Node* WasmGraphBuilder::BuildHeapNumberValueIndexConstant() {
return jsgraph()->IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag);
}
void WasmGraphBuilder::BuildJSToWasmWrapper(wasm::WasmCode* wasm_code,
Address wasm_context_address) {
void WasmGraphBuilder::BuildJSToWasmWrapper(Handle<WeakCell> weak_instance,
wasm::WasmCode* wasm_code) {
const int wasm_count = static_cast<int>(sig_->parameter_count());
const int count =
wasm_count + 4; // wasm_code, wasm_context, effect, and control.
wasm_count + 4; // wasm_code, instance_node, effect, and control.
Node** args = Buffer(count);
// Build the start and the JS parameter nodes.
@ -3014,13 +3046,19 @@ void WasmGraphBuilder::BuildJSToWasmWrapper(wasm::WasmCode* wasm_code,
Linkage::GetJSCallContextParamIndex(wasm_count + 1), "%context"),
graph()->start());
// Create the wasm_context node to pass as parameter. This must be a
// RelocatableIntPtrConstant because JSToWasm wrappers are compiled at module
// compile time and patched at instance build time.
DCHECK_NULL(wasm_context_);
wasm_context_ = jsgraph()->RelocatableIntPtrConstant(
reinterpret_cast<uintptr_t>(wasm_context_address),
RelocInfo::WASM_CONTEXT_REFERENCE);
// Create the instance_node node to pass as parameter. This is either
// an actual reference to an instance or a placeholder reference,
// since JSToWasm wrappers can be compiled at module compile time and
// patched at instance build time.
DCHECK_NULL(instance_node_);
// TODO(titzer): JSToWasmWrappers should load the instance from the
// incoming JSFunction, but this is currently too slow/too complex because
// we use a regular JS property with a private symbol.
instance_node_ = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::TaggedPointer()),
jsgraph()->HeapConstant(weak_instance),
jsgraph()->Int32Constant(WeakCell::kValueOffset - kHeapObjectTag),
*effect_, *control_);
Address instr_start =
wasm_code == nullptr ? nullptr : wasm_code->instructions().start();
@ -3038,7 +3076,7 @@ void WasmGraphBuilder::BuildJSToWasmWrapper(wasm::WasmCode* wasm_code,
// the wasm function could not be re-imported into another wasm module.
int pos = 0;
args[pos++] = wasm_code_node;
args[pos++] = wasm_context_.get();
args[pos++] = instance_node_.get();
args[pos++] = *effect_;
args[pos++] = *control_;
@ -3054,7 +3092,7 @@ void WasmGraphBuilder::BuildJSToWasmWrapper(wasm::WasmCode* wasm_code,
int pos = 0;
args[pos++] = wasm_code_node;
args[pos++] = wasm_context_.get();
args[pos++] = instance_node_.get();
// Convert JS parameters to wasm numbers.
for (int i = 0; i < wasm_count; ++i) {
@ -3089,42 +3127,15 @@ int WasmGraphBuilder::AddParameterNodes(Node** args, int pos, int param_count,
wasm::FunctionSig* sig) {
// Convert wasm numbers to JS values.
for (int i = 0; i < param_count; ++i) {
Node* param = Param(i + 1); // Start from index 1 to drop the wasm_context.
Node* param =
Param(i + 1); // Start from index 1 to drop the instance_node.
args[pos++] = ToJS(param, sig->GetParam(i));
}
return pos;
}
Node* WasmGraphBuilder::LoadImportDataAtOffset(int offset, Node* table) {
offset = FixedArray::OffsetOfElementAt(offset) - kHeapObjectTag;
Node* offset_node = jsgraph()->Int32Constant(offset);
Node* import_data = graph()->NewNode(
jsgraph()->machine()->Load(LoadRepresentation::TaggedPointer()), table,
offset_node, *effect_, *control_);
*effect_ = import_data;
return import_data;
}
Node* WasmGraphBuilder::LoadNativeContext(Node* table) {
// The js_imports_table is set up so that index 0 has isolate->native_context
return LoadImportDataAtOffset(0, table);
}
int OffsetForImportData(int index, WasmGraphBuilder::ImportDataType type) {
// The js_imports_table is set up so that index 0 has isolate->native_context
// and for every index, 3*index+1 has the JSReceiver, 3*index+2 has function's
// global proxy and 3*index+3 has function's context.
return 3 * index + type;
}
Node* WasmGraphBuilder::LoadImportData(int index, ImportDataType type,
Node* table) {
return LoadImportDataAtOffset(OffsetForImportData(index, type), table);
}
bool WasmGraphBuilder::BuildWasmToJSWrapper(
Handle<JSReceiver> target, Handle<FixedArray> global_js_imports_table,
int index) {
bool WasmGraphBuilder::BuildWasmToJSWrapper(Handle<JSReceiver> target,
int index) {
DCHECK(target->IsCallable());
int wasm_count = static_cast<int>(sig_->parameter_count());
@ -3136,22 +3147,30 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
*effect_ = start;
*control_ = start;
// We add the target function to a table and look it up during runtime. This
// ensures that if the GC kicks in, it doesn't need to patch the code for the
// JS function.
// js_imports_table is fixed array with global handle scope whose lifetime is
// tied to the instance.
// TODO(aseemgarg): explore using per-import global handle instead of a table
Node* table_ptr = jsgraph()->IntPtrConstant(
reinterpret_cast<intptr_t>(global_js_imports_table.location()));
Node* table = graph()->NewNode(
jsgraph()->machine()->Load(LoadRepresentation::TaggedPointer()),
table_ptr, jsgraph()->IntPtrConstant(0), *effect_, *control_);
*effect_ = table;
instance_node_.set(Param(compiler::kWasmInstanceParameterIndex));
Node* callables_node = LOAD_INSTANCE_FIELD(ImportedFunctionCallables,
MachineType::TaggedPointer());
Node* callable_node = LOAD_FIXED_ARRAY_SLOT(callables_node, index);
Node* undefined_node =
jsgraph()->Constant(handle(isolate->heap()->undefined_value(), isolate));
Node* compiled_module =
LOAD_INSTANCE_FIELD(CompiledModule, MachineType::TaggedPointer());
// TODO(wasm): native context is only weak because of recycling compiled
// modules.
Node* weak_native_context = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::TaggedPointer()), compiled_module,
jsgraph()->Int32Constant(WasmCompiledModule::kNativeContextOffset -
kHeapObjectTag),
*effect_, *control_);
Node* native_context = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::TaggedPointer()),
weak_native_context,
jsgraph()->Int32Constant(WeakCell::kValueOffset - kHeapObjectTag),
*effect_, *control_);
if (!wasm::IsJSCompatibleSignature(sig_)) {
// Throw a TypeError.
Node* native_context = LoadNativeContext(table);
BuildCallToRuntimeWithContext(Runtime::kWasmThrowTypeError, native_context,
nullptr, 0);
// We don't need to return a value here, as the runtime call will not return
@ -3161,25 +3180,30 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
}
Node** args = Buffer(wasm_count + 9);
Node* call = nullptr;
BuildModifyThreadInWasmFlag(false);
if (target->IsJSFunction()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(target);
FieldAccess field_access = AccessBuilder::ForJSFunctionContext();
Node* function_context = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::TaggedPointer()), callable_node,
jsgraph()->Int32Constant(field_access.offset - field_access.tag()),
*effect_, *control_);
if (!IsClassConstructor(function->shared()->kind())) {
if (function->shared()->internal_formal_parameter_count() == wasm_count) {
int pos = 0;
args[pos++] =
LoadImportData(index, kFunction, table); // target callable.
args[pos++] = callable_node; // target callable.
// Receiver.
if (is_sloppy(function->shared()->language_mode()) &&
!function->shared()->native()) {
args[pos++] = LoadImportData(index, kGlobalProxy, table);
Node* global_proxy = LOAD_FIXED_ARRAY_SLOT(
native_context, Context::GLOBAL_PROXY_INDEX);
args[pos++] = global_proxy;
} else {
args[pos++] = jsgraph()->Constant(
handle(isolate->heap()->undefined_value(), isolate));
args[pos++] = undefined_node;
}
call_descriptor = Linkage::GetJSCallDescriptor(
@ -3188,9 +3212,9 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
// Convert wasm numbers to JS values.
pos = AddParameterNodes(args, pos, wasm_count, sig_);
args[pos++] = jsgraph()->UndefinedConstant(); // new target
args[pos++] = undefined_node; // new target
args[pos++] = jsgraph()->Int32Constant(wasm_count); // argument count
args[pos++] = LoadImportData(index, kFunctionContext, table);
args[pos++] = function_context;
args[pos++] = *effect_;
args[pos++] = *control_;
@ -3200,24 +3224,24 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
Callable callable = CodeFactory::ArgumentAdaptor(isolate);
int pos = 0;
args[pos++] = jsgraph()->HeapConstant(callable.code());
args[pos++] =
LoadImportData(index, kFunction, table); // target callable
args[pos++] = jsgraph()->UndefinedConstant(); // new target
args[pos++] = callable_node; // target callable
args[pos++] = undefined_node; // new target
args[pos++] = jsgraph()->Int32Constant(wasm_count); // argument count
args[pos++] = jsgraph()->Int32Constant(
function->shared()->internal_formal_parameter_count());
// Receiver.
if (is_sloppy(function->shared()->language_mode()) &&
!function->shared()->native()) {
args[pos++] = LoadImportData(index, kGlobalProxy, table);
Node* global_proxy = LOAD_FIXED_ARRAY_SLOT(
native_context, Context::GLOBAL_PROXY_INDEX);
args[pos++] = global_proxy;
} else {
args[pos++] = jsgraph()->Constant(
handle(isolate->heap()->undefined_value(), isolate));
args[pos++] = undefined_node;
}
// Convert wasm numbers to JS values.
pos = AddParameterNodes(args, pos, wasm_count, sig_);
args[pos++] = LoadImportData(index, kFunctionContext, table);
args[pos++] = function_context;
args[pos++] = *effect_;
args[pos++] = *control_;
call = graph()->NewNode(
@ -3229,16 +3253,15 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
}
}
Node* native_context = nullptr;
// We cannot call the target directly, we have to use the Call builtin.
if (!call) {
int pos = 0;
// We cannot call the target directly, we have to use the Call builtin.
Callable callable = CodeFactory::Call(isolate);
args[pos++] = jsgraph()->HeapConstant(callable.code());
args[pos++] = LoadImportData(index, kFunction, table); // target callable.
args[pos++] = jsgraph()->Int32Constant(wasm_count); // argument count
args[pos++] = jsgraph()->Constant(
handle(isolate->heap()->undefined_value(), isolate)); // receiver
args[pos++] = callable_node;
args[pos++] = jsgraph()->Int32Constant(wasm_count); // argument count
args[pos++] = undefined_node; // receiver
call_descriptor = Linkage::GetStubCallDescriptor(
isolate, graph()->zone(), callable.descriptor(), wasm_count + 1,
@ -3252,7 +3275,6 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
// is only needed if the target is a constructor to throw a TypeError, if
// the target is a native function, or if the target is a callable JSObject,
// which can only be constructed by the runtime.
native_context = LoadNativeContext(table);
args[pos++] = native_context;
args[pos++] = *effect_;
args[pos++] = *control_;
@ -3264,15 +3286,13 @@ bool WasmGraphBuilder::BuildWasmToJSWrapper(
*effect_ = call;
SetSourcePosition(call, 0);
BuildModifyThreadInWasmFlag(true);
// Convert the return value back.
Node* val = sig_->return_count() == 0
? jsgraph()->Int32Constant(0)
: FromJS(call,
native_context != nullptr ? native_context
: LoadNativeContext(table),
sig_->GetReturn());
: FromJS(call, native_context, sig_->GetReturn());
BuildModifyThreadInWasmFlag(true);
Return(val);
return true;
}
@ -3286,41 +3306,6 @@ bool HasInt64ParamOrReturn(wasm::FunctionSig* sig) {
}
} // namespace
void WasmGraphBuilder::BuildWasmToWasmWrapper(wasm::WasmCode* wasm_code,
Address new_context_address) {
int wasm_count = static_cast<int>(sig_->parameter_count());
int count = wasm_count + 4; // wasm_code, wasm_context, effect, and control.
Node** args = Buffer(count);
// Build the start node.
Node* start = Start(count + 1);
*control_ = start;
*effect_ = start;
int pos = 0;
// Add the wasm code target.
Address instr_start =
wasm_code == nullptr ? nullptr : wasm_code->instructions().start();
args[pos++] = jsgraph()->RelocatableIntPtrConstant(
reinterpret_cast<intptr_t>(instr_start), RelocInfo::JS_TO_WASM_CALL);
// Add the wasm_context of the other instance.
args[pos++] = jsgraph()->IntPtrConstant(
reinterpret_cast<uintptr_t>(new_context_address));
// Add the parameters starting from index 1 since the parameter with index 0
// is the old wasm_context.
for (int i = 0; i < wasm_count; ++i) {
args[pos++] = Param(i + 1);
}
args[pos++] = *effect_;
args[pos++] = *control_;
// Tail-call the wasm code.
auto call_descriptor = GetWasmCallDescriptor(jsgraph()->zone(), sig_);
Node* tail_call = graph()->NewNode(
jsgraph()->common()->TailCall(call_descriptor), count, args);
MergeControlToEnd(jsgraph(), tail_call);
}
void WasmGraphBuilder::BuildWasmInterpreterEntry(uint32_t func_index) {
int param_count = static_cast<int>(sig_->parameter_count());
@ -3354,7 +3339,7 @@ void WasmGraphBuilder::BuildWasmInterpreterEntry(uint32_t func_index) {
for (int i = 0; i < param_count; ++i) {
wasm::ValueType type = sig_->GetParam(i);
// Start from the parameter with index 1 to drop the wasm_context.
// Start from the parameter with index 1 to drop the instance_node.
*effect_ = graph()->NewNode(GetSafeStoreOperator(offset, type), arg_buffer,
Int32Constant(offset), Param(i + 1), *effect_,
*control_);
@ -3401,16 +3386,16 @@ void WasmGraphBuilder::BuildCWasmEntry() {
machine->Load(MachineType::Pointer()), foreign_code_obj,
Int32Constant(Foreign::kForeignAddressOffset - kHeapObjectTag), *effect_,
*control_);
Node* wasm_context = Param(CWasmEntryParameters::kWasmContext + 1);
Node* instance_node = Param(CWasmEntryParameters::kWasmInstance + 1);
Node* arg_buffer = Param(CWasmEntryParameters::kArgumentsBuffer + 1);
int wasm_arg_count = static_cast<int>(sig_->parameter_count());
int arg_count = wasm_arg_count + 4; // code, wasm_context, control, effect
int arg_count = wasm_arg_count + 4; // code, instance_node, control, effect
Node** args = Buffer(arg_count);
int pos = 0;
args[pos++] = code_obj;
args[pos++] = wasm_context;
args[pos++] = instance_node;
int offset = 0;
for (wasm::ValueType type : sig_->parameters()) {
@ -3458,48 +3443,46 @@ void WasmGraphBuilder::BuildCWasmEntry() {
}
}
void WasmGraphBuilder::InitContextCache(WasmContextCacheNodes* context_cache) {
DCHECK_NOT_NULL(wasm_context_);
void WasmGraphBuilder::InitInstanceCache(
WasmInstanceCacheNodes* instance_cache) {
DCHECK_NOT_NULL(instance_node_);
DCHECK_NOT_NULL(*control_);
DCHECK_NOT_NULL(*effect_);
// Load the memory start.
Node* mem_start = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::UintPtr()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, mem_start))),
jsgraph()->machine()->Load(MachineType::UintPtr()), instance_node_.get(),
jsgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(MemoryStart)),
*effect_, *control_);
*effect_ = mem_start;
context_cache->mem_start = mem_start;
instance_cache->mem_start = mem_start;
// Load the memory size.
Node* mem_size = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Uint32()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, mem_size))),
jsgraph()->machine()->Load(MachineType::Uint32()), instance_node_.get(),
jsgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(MemorySize)),
*effect_, *control_);
*effect_ = mem_size;
context_cache->mem_size = mem_size;
instance_cache->mem_size = mem_size;
if (untrusted_code_mitigations_) {
// Load the memory mask.
Node* mem_mask = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Uint32()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, mem_mask))),
jsgraph()->machine()->Load(MachineType::Uint32()), instance_node_.get(),
jsgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(MemoryMask)),
*effect_, *control_);
*effect_ = mem_mask;
context_cache->mem_mask = mem_mask;
instance_cache->mem_mask = mem_mask;
} else {
// Explicitly set to nullptr to ensure a SEGV when we try to use it.
context_cache->mem_mask = nullptr;
instance_cache->mem_mask = nullptr;
}
}
void WasmGraphBuilder::PrepareContextCacheForLoop(
WasmContextCacheNodes* context_cache, Node* control) {
void WasmGraphBuilder::PrepareInstanceCacheForLoop(
WasmInstanceCacheNodes* instance_cache, Node* control) {
#define INTRODUCE_PHI(field, rep) \
context_cache->field = Phi(rep, 1, &context_cache->field, control);
instance_cache->field = Phi(rep, 1, &instance_cache->field, control);
INTRODUCE_PHI(mem_start, MachineType::PointerRepresentation());
INTRODUCE_PHI(mem_size, MachineRepresentation::kWord32);
@ -3510,9 +3493,9 @@ void WasmGraphBuilder::PrepareContextCacheForLoop(
#undef INTRODUCE_PHI
}
void WasmGraphBuilder::NewContextCacheMerge(WasmContextCacheNodes* to,
WasmContextCacheNodes* from,
Node* merge) {
void WasmGraphBuilder::NewInstanceCacheMerge(WasmInstanceCacheNodes* to,
WasmInstanceCacheNodes* from,
Node* merge) {
#define INTRODUCE_PHI(field, rep) \
if (to->field != from->field) { \
Node* vals[] = {to->field, from->field}; \
@ -3528,9 +3511,9 @@ void WasmGraphBuilder::NewContextCacheMerge(WasmContextCacheNodes* to,
#undef INTRODUCE_PHI
}
void WasmGraphBuilder::MergeContextCacheInto(WasmContextCacheNodes* to,
WasmContextCacheNodes* from,
Node* merge) {
void WasmGraphBuilder::MergeInstanceCacheInto(WasmInstanceCacheNodes* to,
WasmInstanceCacheNodes* from,
Node* merge) {
to->mem_size = CreateOrMergeIntoPhi(MachineRepresentation::kWord32, merge,
to->mem_size, from->mem_size);
to->mem_start = CreateOrMergeIntoPhi(MachineType::PointerRepresentation(),
@ -3574,21 +3557,21 @@ Node* WasmGraphBuilder::CreateOrMergeIntoEffectPhi(Node* merge, Node* tnode,
void WasmGraphBuilder::GetGlobalBaseAndOffset(MachineType mem_type,
uint32_t offset, Node** base_node,
Node** offset_node) {
DCHECK_NOT_NULL(wasm_context_);
DCHECK_NOT_NULL(instance_node_);
if (globals_start_ == nullptr) {
// Load globals_start from the WasmContext at runtime.
// Load globals_start from the instance object at runtime.
// TODO(wasm): we currently generate only one load of the {globals_start}
// start per graph, which means it can be placed anywhere by the scheduler.
// This is legal because the globals_start should never change.
// However, in some cases (e.g. if the WasmContext is already in a
// However, in some cases (e.g. if the instance object is already in a
// register), it is slightly more efficient to reload this value from the
// WasmContext. Since this depends on register allocation, it is not
// instance object. Since this depends on register allocation, it is not
// possible to express in the graph, and would essentially constitute a
// "mem2reg" optimization in TurboFan.
globals_start_ = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::UintPtr()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, globals_start))),
jsgraph()->machine()->Load(MachineType::UintPtr()),
instance_node_.get(),
jsgraph()->Int32Constant(WASM_INSTANCE_OBJECT_OFFSET(GlobalsStart)),
graph()->start(), graph()->start());
}
*base_node = globals_start_.get();
@ -3605,8 +3588,8 @@ void WasmGraphBuilder::GetGlobalBaseAndOffset(MachineType mem_type,
}
Node* WasmGraphBuilder::MemBuffer(uint32_t offset) {
DCHECK_NOT_NULL(context_cache_);
Node* mem_start = context_cache_->mem_start;
DCHECK_NOT_NULL(instance_cache_);
Node* mem_start = instance_cache_->mem_start;
DCHECK_NOT_NULL(mem_start);
if (offset == 0) return mem_start;
return graph()->NewNode(jsgraph()->machine()->IntAdd(), mem_start,
@ -3616,8 +3599,8 @@ Node* WasmGraphBuilder::MemBuffer(uint32_t offset) {
Node* WasmGraphBuilder::CurrentMemoryPages() {
// CurrentMemoryPages can not be called from asm.js.
DCHECK_EQ(wasm::kWasmOrigin, env_->module->origin());
DCHECK_NOT_NULL(context_cache_);
Node* mem_size = context_cache_->mem_size;
DCHECK_NOT_NULL(instance_cache_);
Node* mem_size = instance_cache_->mem_size;
DCHECK_NOT_NULL(mem_size);
if (jsgraph()->machine()->Is64()) {
mem_size = graph()->NewNode(jsgraph()->machine()->TruncateInt64ToInt32(),
@ -3628,23 +3611,6 @@ Node* WasmGraphBuilder::CurrentMemoryPages() {
jsgraph()->Int32Constant(WhichPowerOf2(wasm::kWasmPageSize)));
}
void WasmGraphBuilder::GetFunctionTableNodes(uint32_t table_index, Node** table,
Node** table_size) {
// The table address and size are stored in the WasmContext.
// Don't bother caching them, since they are only used in indirect calls,
// which would cause them to be spilled on the stack anyway.
*table = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::UintPtr()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, table))),
*effect_, *control_);
*table_size = graph()->NewNode(
jsgraph()->machine()->Load(MachineType::Uint32()), wasm_context_.get(),
jsgraph()->Int32Constant(
static_cast<int32_t>(offsetof(WasmContext, table_size))),
*effect_, *control_);
}
Node* WasmGraphBuilder::BuildModifyThreadInWasmFlag(bool new_value) {
// TODO(eholk): generate code to modify the thread-local storage directly,
// rather than calling the runtime.
@ -3741,8 +3707,8 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
wasm::WasmCodePosition position,
EnforceBoundsCheck enforce_check) {
if (FLAG_wasm_no_bounds_checks) return Uint32ToUintptr(index);
DCHECK_NOT_NULL(context_cache_);
Node* mem_size = context_cache_->mem_size;
DCHECK_NOT_NULL(instance_cache_);
Node* mem_size = instance_cache_->mem_size;
DCHECK_NOT_NULL(mem_size);
auto m = jsgraph()->machine();
@ -3810,7 +3776,7 @@ Node* WasmGraphBuilder::BoundsCheckMem(uint8_t access_size, Node* index,
if (untrusted_code_mitigations_) {
// In the fallthrough case, condition the index with the memory mask.
Node* mem_mask = context_cache_->mem_mask;
Node* mem_mask = instance_cache_->mem_mask;
DCHECK_NOT_NULL(mem_mask);
index = graph()->NewNode(m->Word32And(), index, mem_mask);
}
@ -3987,9 +3953,9 @@ Node* GetAsmJsOOBValue(MachineRepresentation rep, JSGraph* jsgraph) {
} // namespace
Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
DCHECK_NOT_NULL(context_cache_);
Node* mem_start = context_cache_->mem_start;
Node* mem_size = context_cache_->mem_size;
DCHECK_NOT_NULL(instance_cache_);
Node* mem_start = instance_cache_->mem_start;
Node* mem_size = instance_cache_->mem_size;
DCHECK_NOT_NULL(mem_start);
DCHECK_NOT_NULL(mem_size);
@ -4007,7 +3973,7 @@ Node* WasmGraphBuilder::BuildAsmjsLoadMem(MachineType type, Node* index) {
if (untrusted_code_mitigations_) {
// Condition the index with the memory mask.
Node* mem_mask = context_cache_->mem_mask;
Node* mem_mask = instance_cache_->mem_mask;
DCHECK_NOT_NULL(mem_mask);
index =
graph()->NewNode(jsgraph()->machine()->Word32And(), index, mem_mask);
@ -4033,9 +3999,9 @@ Node* WasmGraphBuilder::Uint32ToUintptr(Node* node) {
Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
Node* val) {
DCHECK_NOT_NULL(context_cache_);
Node* mem_start = context_cache_->mem_start;
Node* mem_size = context_cache_->mem_size;
DCHECK_NOT_NULL(instance_cache_);
Node* mem_start = instance_cache_->mem_start;
Node* mem_size = instance_cache_->mem_size;
DCHECK_NOT_NULL(mem_start);
DCHECK_NOT_NULL(mem_size);
@ -4051,7 +4017,7 @@ Node* WasmGraphBuilder::BuildAsmjsStoreMem(MachineType type, Node* index,
if (untrusted_code_mitigations_) {
// Condition the index with the memory mask.
Node* mem_mask = context_cache_->mem_mask;
Node* mem_mask = instance_cache_->mem_mask;
DCHECK_NOT_NULL(mem_mask);
index =
graph()->NewNode(jsgraph()->machine()->Word32And(), index, mem_mask);
@ -4666,8 +4632,8 @@ void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,
} // namespace
Handle<Code> CompileJSToWasmWrapper(Isolate* isolate, wasm::WasmModule* module,
Handle<WeakCell> weak_instance,
wasm::WasmCode* wasm_code, uint32_t index,
Address wasm_context_address,
bool use_trap_handler) {
const wasm::WasmFunction* func = &module->functions[index];
@ -4693,7 +4659,7 @@ Handle<Code> CompileJSToWasmWrapper(Isolate* isolate, wasm::WasmModule* module,
CEntryStub(isolate, 1).GetCode(), func->sig);
builder.set_control_ptr(&control);
builder.set_effect_ptr(&effect);
builder.BuildJSToWasmWrapper(wasm_code, wasm_context_address);
builder.BuildJSToWasmWrapper(weak_instance, wasm_code);
//----------------------------------------------------------------------------
// Run the compilation pipeline.
@ -4778,10 +4744,10 @@ void ValidateImportWrapperReferencesImmovables(Handle<Code> wrapper) {
} // namespace
Handle<Code> CompileWasmToJSWrapper(
Isolate* isolate, Handle<JSReceiver> target, wasm::FunctionSig* sig,
uint32_t index, wasm::ModuleOrigin origin, bool use_trap_handler,
Handle<FixedArray> global_js_imports_table) {
Handle<Code> CompileWasmToJSWrapper(Isolate* isolate, Handle<JSReceiver> target,
wasm::FunctionSig* sig, uint32_t index,
wasm::ModuleOrigin origin,
bool use_trap_handler) {
//----------------------------------------------------------------------------
// Create the Graph
//----------------------------------------------------------------------------
@ -4807,19 +4773,8 @@ Handle<Code> CompileWasmToJSWrapper(
source_position_table);
builder.set_control_ptr(&control);
builder.set_effect_ptr(&effect);
if (builder.BuildWasmToJSWrapper(target, global_js_imports_table, index)) {
global_js_imports_table->set(
OffsetForImportData(index, WasmGraphBuilder::kFunction), *target);
if (target->IsJSFunction()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(target);
global_js_imports_table->set(
OffsetForImportData(index, WasmGraphBuilder::kFunctionContext),
function->context());
global_js_imports_table->set(
OffsetForImportData(index, WasmGraphBuilder::kGlobalProxy),
function->context()->global_proxy());
}
}
builder.BuildWasmToJSWrapper(target, index);
if (FLAG_trace_turbo_graph) { // Simple textual RPO.
OFStream os(stdout);
os << "-- Graph after change lowering -- " << std::endl;
@ -4844,14 +4799,7 @@ Handle<Code> CompileWasmToJSWrapper(
Handle<Code> code = Pipeline::GenerateCodeForTesting(
&info, isolate, incoming, &graph, nullptr, source_position_table);
ValidateImportWrapperReferencesImmovables(code);
Handle<FixedArray> deopt_data = isolate->factory()->NewFixedArray(2, TENURED);
intptr_t loc = reinterpret_cast<intptr_t>(global_js_imports_table.location());
Handle<Object> loc_handle = isolate->factory()->NewHeapNumberFromBits(loc);
deopt_data->set(0, *loc_handle);
Handle<Object> index_handle = isolate->factory()->NewNumberFromInt(
OffsetForImportData(index, WasmGraphBuilder::kFunction));
deopt_data->set(1, *index_handle);
code->set_deoptimization_data(*deopt_data);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_opt_code && !code.is_null()) {
CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
@ -4868,81 +4816,8 @@ Handle<Code> CompileWasmToJSWrapper(
return code;
}
Handle<Code> CompileWasmToWasmWrapper(Isolate* isolate, wasm::WasmCode* target,
wasm::FunctionSig* sig,
Address new_wasm_context_address) {
//----------------------------------------------------------------------------
// Create the Graph
//----------------------------------------------------------------------------
Zone zone(isolate->allocator(), ZONE_NAME);
Graph graph(&zone);
CommonOperatorBuilder common(&zone);
MachineOperatorBuilder machine(
&zone, MachineType::PointerRepresentation(),
InstructionSelector::SupportedMachineOperatorFlags(),
InstructionSelector::AlignmentRequirements());
JSGraph jsgraph(isolate, &graph, &common, nullptr, nullptr, &machine);
Node* control = nullptr;
Node* effect = nullptr;
ModuleEnv env(nullptr, target->HasTrapHandlerIndex());
WasmGraphBuilder builder(&env, &zone, &jsgraph, Handle<Code>(), sig);
builder.set_control_ptr(&control);
builder.set_effect_ptr(&effect);
builder.BuildWasmToWasmWrapper(target, new_wasm_context_address);
if (HasInt64ParamOrReturn(sig)) builder.LowerInt64();
if (FLAG_trace_turbo_graph) { // Simple textual RPO.
OFStream os(stdout);
os << "-- Graph after change lowering -- " << std::endl;
os << AsRPO(graph);
}
// Schedule and compile to machine code.
CallDescriptor* incoming = GetWasmCallDescriptor(&zone, sig);
if (machine.Is32()) {
incoming = GetI32WasmCallDescriptor(&zone, incoming);
}
bool debugging =
#if DEBUG
true;
#else
FLAG_print_opt_code || FLAG_trace_turbo || FLAG_trace_turbo_graph;
#endif
Vector<const char> func_name = ArrayVector("wasm-to-wasm");
static unsigned id = 0;
Vector<char> buffer;
if (debugging) {
buffer = Vector<char>::New(128);
int chars = SNPrintF(buffer, "wasm-to-wasm#%d", id);
func_name = Vector<const char>::cast(buffer.SubVector(0, chars));
}
CompilationInfo info(func_name, &zone, Code::WASM_TO_WASM_FUNCTION);
Handle<Code> code =
Pipeline::GenerateCodeForTesting(&info, isolate, incoming, &graph);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_opt_code && !code.is_null()) {
CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
OFStream os(tracing_scope.file());
code->Disassemble(buffer.start(), os);
}
#endif
if (debugging) {
buffer.Dispose();
}
if (isolate->logger()->is_logging_code_events() || isolate->is_profiling()) {
RecordFunctionCompilation(CodeEventListener::STUB_TAG, isolate, code,
"wasm-to-wasm");
}
return code;
}
Handle<Code> CompileWasmInterpreterEntry(Isolate* isolate, uint32_t func_index,
wasm::FunctionSig* sig,
Handle<WasmInstanceObject> instance) {
wasm::FunctionSig* sig) {
//----------------------------------------------------------------------------
// Create the Graph
//----------------------------------------------------------------------------
@ -5066,6 +4941,11 @@ Handle<Code> CompileCWasmEntry(Isolate* isolate, wasm::FunctionSig* sig) {
return code;
}
int FixedArrayOffsetMinusTag(uint32_t index) {
auto access = AccessBuilder::ForFixedArraySlot(index);
return access.offset - access.tag();
}
SourcePositionTable* WasmCompilationUnit::BuildGraphForWasmFunction(
double* decode_ms) {
base::ElapsedTimer decode_timer;
@ -5381,6 +5261,9 @@ wasm::WasmCode* WasmCompilationUnit::CompileWasmFunction(
#undef WASM_64
#undef FATAL_UNSUPPORTED_OPCODE
#undef WASM_INSTANCE_OBJECT_OFFSET
#undef LOAD_INSTANCE_FIELD
#undef LOAD_FIXED_ARRAY_SLOT
} // namespace compiler
} // namespace internal

View File

@ -43,34 +43,12 @@ class WasmCode;
namespace compiler {
// Indirect function tables contain a <smi(sig), code> pair for each entry.
enum FunctionTableEntries : int {
kFunctionTableSignatureOffset = 0,
kFunctionTableCodeOffset = 1,
kFunctionTableEntrySize = 2
};
constexpr inline int FunctionTableSigOffset(int i) {
return kFunctionTableEntrySize * i + kFunctionTableSignatureOffset;
}
constexpr inline int FunctionTableCodeOffset(int i) {
return kFunctionTableEntrySize * i + kFunctionTableCodeOffset;
}
// The {ModuleEnv} encapsulates the module data that is used by the
// {WasmGraphBuilder} during graph building. It represents the parameters to
// which the compiled code should be specialized, including which code to call
// for direct calls {function_code}, which tables to use for indirect calls
// {function_tables}, memory start address and size {mem_start, mem_size},
// as well as the module itself {module}.
// {WasmGraphBuilder} during graph building.
// ModuleEnvs are shareable across multiple compilations.
struct ModuleEnv {
// A pointer to the decoded module's static representation.
const wasm::WasmModule* module;
// The function tables are FixedArrays of <smi, code> pairs used to signature
// check and dispatch indirect calls. It has the same length as
// module.function_tables. We use the address to a global handle to the
// FixedArray.
const std::vector<Address> function_tables;
// True if trap handling should be used in compiled code, rather than
// compiling in bounds checks for each memory access.
@ -78,12 +56,6 @@ struct ModuleEnv {
ModuleEnv(const wasm::WasmModule* module, bool use_trap_handler)
: module(module), use_trap_handler(use_trap_handler) {}
ModuleEnv(const wasm::WasmModule* module,
std::vector<Address> function_tables, bool use_trap_handler)
: module(module),
function_tables(std::move(function_tables)),
use_trap_handler(use_trap_handler) {}
};
enum RuntimeExceptionSupport : bool {
@ -183,35 +155,28 @@ class WasmCompilationUnit final {
};
// Wraps a JS function, producing a code object that can be called from wasm.
// The global_js_imports_table is a global handle to a fixed array of target
// JSReceiver with the lifetime tied to the module. We store it's location (non
// GCable) in the generated code so that it can reside outside of GCed heap.
Handle<Code> CompileWasmToJSWrapper(Isolate* isolate, Handle<JSReceiver> target,
wasm::FunctionSig* sig, uint32_t index,
wasm::ModuleOrigin origin,
bool use_trap_handler,
Handle<FixedArray> global_js_imports_table);
bool use_trap_handler);
// Wraps a given wasm code object, producing a code object.
V8_EXPORT_PRIVATE Handle<Code> CompileJSToWasmWrapper(
Isolate* isolate, wasm::WasmModule* module, wasm::WasmCode* wasm_code,
uint32_t index, Address wasm_context_address, bool use_trap_handler);
// Wraps a wasm function, producing a code object that can be called from other
// wasm instances (the WasmContext address must be changed).
Handle<Code> CompileWasmToWasmWrapper(Isolate* isolate, wasm::WasmCode* target,
wasm::FunctionSig* sig,
Address new_wasm_context_address);
Isolate* isolate, wasm::WasmModule* module, Handle<WeakCell> weak_instance,
wasm::WasmCode* wasm_code, uint32_t index, bool use_trap_handler);
// Compiles a stub that redirects a call to a wasm function to the wasm
// interpreter. It's ABI compatible with the compiled wasm function.
Handle<Code> CompileWasmInterpreterEntry(Isolate* isolate, uint32_t func_index,
wasm::FunctionSig* sig,
Handle<WasmInstanceObject> instance);
wasm::FunctionSig* sig);
// Helper function to get the offset into a fixed array for a given {index}.
// TODO(titzer): access-builder.h is not accessible outside compiler. Move?
int FixedArrayOffsetMinusTag(uint32_t index);
enum CWasmEntryParameters {
kCodeObject,
kWasmContext,
kWasmInstance,
kArgumentsBuffer,
// marker:
kNumParameters
@ -222,12 +187,11 @@ enum CWasmEntryParameters {
// buffer and calls the wasm function given as first parameter.
Handle<Code> CompileCWasmEntry(Isolate* isolate, wasm::FunctionSig* sig);
// Values from the {WasmContext} are cached between WASM-level function calls.
// Values from the instance object are cached between WASM-level function calls.
// This struct allows the SSA environment handling this cache to be defined
// and manipulated in wasm-compiler.{h,cc} instead of inside the WASM decoder.
// (Note that currently, the globals base is immutable in a context, so not
// cached here.)
struct WasmContextCacheNodes {
// (Note that currently, the globals base is immutable, so not cached here.)
struct WasmInstanceCacheNodes {
Node* mem_start;
Node* mem_size;
Node* mem_mask;
@ -335,29 +299,16 @@ class WasmGraphBuilder {
Node* CallIndirect(uint32_t index, Node** args, Node*** rets,
wasm::WasmCodePosition position);
void BuildJSToWasmWrapper(wasm::WasmCode* wasm_code_start,
Address wasm_context_address);
enum ImportDataType {
kFunction = 1,
kGlobalProxy = 2,
kFunctionContext = 3,
};
Node* LoadImportDataAtOffset(int offset, Node* table);
Node* LoadNativeContext(Node* table);
Node* LoadImportData(int index, ImportDataType type, Node* table);
void BuildJSToWasmWrapper(Handle<WeakCell> weak_instance,
wasm::WasmCode* wasm_code);
bool BuildWasmToJSWrapper(Handle<JSReceiver> target,
Handle<FixedArray> global_js_imports_table,
int index);
void BuildWasmToWasmWrapper(wasm::WasmCode* wasm_code_start,
Address new_wasm_context_address);
void BuildWasmInterpreterEntry(uint32_t func_index);
void BuildCWasmEntry();
Node* ToJS(Node* node, wasm::ValueType type);
Node* FromJS(Node* node, Node* js_context, wasm::ValueType type);
Node* Invert(Node* node);
void GetFunctionTableNodes(uint32_t table_index, Node** table,
Node** table_size);
//-----------------------------------------------------------------------
// Operations that concern the linear memory.
@ -375,8 +326,8 @@ class WasmGraphBuilder {
wasm::ValueType type);
static void PrintDebugName(Node* node);
void set_wasm_context(Node* wasm_context) {
this->wasm_context_ = wasm_context;
void set_instance_node(Node* instance_node) {
this->instance_node_ = instance_node;
}
Node* Control() { return *control_; }
@ -389,17 +340,17 @@ class WasmGraphBuilder {
void GetGlobalBaseAndOffset(MachineType mem_type, uint32_t offset,
Node** base_node, Node** offset_node);
// Utilities to manipulate sets of context cache nodes.
void InitContextCache(WasmContextCacheNodes* context_cache);
void PrepareContextCacheForLoop(WasmContextCacheNodes* context_cache,
Node* control);
void NewContextCacheMerge(WasmContextCacheNodes* to,
WasmContextCacheNodes* from, Node* merge);
void MergeContextCacheInto(WasmContextCacheNodes* to,
WasmContextCacheNodes* from, Node* merge);
// Utilities to manipulate sets of instance cache nodes.
void InitInstanceCache(WasmInstanceCacheNodes* instance_cache);
void PrepareInstanceCacheForLoop(WasmInstanceCacheNodes* instance_cache,
Node* control);
void NewInstanceCacheMerge(WasmInstanceCacheNodes* to,
WasmInstanceCacheNodes* from, Node* merge);
void MergeInstanceCacheInto(WasmInstanceCacheNodes* to,
WasmInstanceCacheNodes* from, Node* merge);
void set_context_cache(WasmContextCacheNodes* context_cache) {
this->context_cache_ = context_cache;
void set_instance_cache(WasmInstanceCacheNodes* instance_cache) {
this->instance_cache_ = instance_cache;
}
wasm::FunctionSig* GetFunctionSignature() { return sig_; }
@ -446,15 +397,14 @@ class WasmGraphBuilder {
// env_ == nullptr means we're not compiling Wasm functions, such as for
// wrappers or interpreter stubs.
ModuleEnv* const env_ = nullptr;
SetOncePointer<Node> wasm_context_;
SetOncePointer<Node> instance_node_;
struct FunctionTableNodes {
Node* table_addr;
Node* size;
};
ZoneVector<FunctionTableNodes> function_tables_;
Node** control_ = nullptr;
Node** effect_ = nullptr;
WasmContextCacheNodes* context_cache_ = nullptr;
WasmInstanceCacheNodes* instance_cache_ = nullptr;
SetOncePointer<Node> globals_start_;
Node** cur_buffer_;
size_t cur_bufsize_;
@ -492,7 +442,8 @@ class WasmGraphBuilder {
Node* BuildCCall(MachineSignature* sig, Node* function, Args... args);
Node* BuildWasmCall(wasm::FunctionSig* sig, Node** args, Node*** rets,
wasm::WasmCodePosition position,
Node* wasm_context = nullptr, bool use_retpoline = false);
Node* instance_node = nullptr,
bool use_retpoline = false);
Node* BuildF32CopySign(Node* left, Node* right);
Node* BuildF64CopySign(Node* left, Node* right);
@ -610,9 +561,9 @@ class WasmGraphBuilder {
Builtins::Name GetBuiltinIdForTrap(wasm::TrapReason reason);
};
// The parameter index where the wasm_context paramter should be placed in wasm
// The parameter index where the instance parameter should be placed in wasm
// call descriptors. This is used by the Int64Lowering::LowerNode method.
constexpr int kWasmContextParameterIndex = 0;
constexpr int kWasmInstanceParameterIndex = 0;
V8_EXPORT_PRIVATE CallDescriptor* GetWasmCallDescriptor(
Zone* zone, wasm::FunctionSig* signature, bool use_retpoline = false);

View File

@ -32,6 +32,8 @@ MachineType MachineTypeFor(ValueType type) {
return MachineType::Float32();
case wasm::kWasmS128:
return MachineType::Simd128();
case wasm::kWasmAnyRef:
return MachineType::TaggedPointer();
default:
UNREACHABLE();
}
@ -225,15 +227,15 @@ static constexpr Allocator parameter_registers(kGPParamRegisters,
// General code uses the above configuration data.
CallDescriptor* GetWasmCallDescriptor(Zone* zone, wasm::FunctionSig* fsig,
bool use_retpoline) {
// The '+ 1' here is to accomodate the wasm_context as first parameter.
// The '+ 1' here is to accomodate the instance object as first parameter.
LocationSignature::Builder locations(zone, fsig->return_count(),
fsig->parameter_count() + 1);
// Add register and/or stack parameter(s).
Allocator params = parameter_registers;
// The wasm_context.
locations.AddParam(params.Next(MachineType::PointerRepresentation()));
// The instance object.
locations.AddParam(params.Next(MachineRepresentation::kTaggedPointer));
const int parameter_count = static_cast<int>(fsig->parameter_count());
for (int i = 0; i < parameter_count; i++) {

View File

@ -137,8 +137,8 @@ class ElementsAccessor {
virtual uint32_t Push(Handle<JSArray> receiver, Arguments* args,
uint32_t push_size) = 0;
virtual uint32_t Unshift(Handle<JSArray> receiver,
Arguments* args, uint32_t unshift_size) = 0;
virtual uint32_t Unshift(Handle<JSArray> receiver, Arguments* args,
uint32_t unshift_size) = 0;
virtual Handle<JSObject> Slice(Handle<JSObject> receiver, uint32_t start,
uint32_t end) = 0;

View File

@ -1140,8 +1140,6 @@ void JSFunction::JSFunctionPrint(std::ostream& os) { // NOLINT
WasmExportedFunction* function = WasmExportedFunction::cast(this);
os << "\n - WASM instance "
<< reinterpret_cast<void*>(function->instance());
os << "\n context "
<< reinterpret_cast<void*>(function->instance()->wasm_context()->get());
os << "\n - WASM function index " << function->function_index();
}
shared()->PrintSourceCode(os);

View File

@ -14226,7 +14226,6 @@ bool Code::IsProcessIndependent() {
mode_mask ==
(RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::WASM_CONTEXT_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::WASM_GLOBAL_HANDLE) |
RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL) |

View File

@ -37,6 +37,7 @@ WasmInstanceObject* GetWasmInstanceOnStackTop(Isolate* isolate) {
return owning_instance;
}
// TODO(titzer): rename to GetNativeContextFromWasmInstanceOnStackTop()
Context* GetWasmContextOnStackTop(Isolate* isolate) {
return GetWasmInstanceOnStackTop(isolate)
->compiled_module()

View File

@ -28,17 +28,17 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
BAILOUT("LoadConstant");
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromContext");
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromInstance");
}
void LiftoffAssembler::SpillContext(Register context) {
BAILOUT("SpillContext");
void LiftoffAssembler::SpillInstance(Register instance) {
BAILOUT("SpillInstance");
}
void LiftoffAssembler::FillContextInto(Register dst) {
BAILOUT("FillContextInto");
void LiftoffAssembler::FillInstanceInto(Register dst) {
BAILOUT("FillInstanceInto");
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,

View File

@ -28,17 +28,17 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
BAILOUT("LoadConstant");
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromContext");
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromInstance");
}
void LiftoffAssembler::SpillContext(Register context) {
BAILOUT("SpillContext");
void LiftoffAssembler::SpillInstance(Register instance) {
BAILOUT("SpillInstance");
}
void LiftoffAssembler::FillContextInto(Register dst) {
BAILOUT("FillContextInto");
void LiftoffAssembler::FillInstanceInto(Register dst) {
BAILOUT("FillInstanceInto");
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,

View File

@ -16,8 +16,8 @@ namespace wasm {
namespace liftoff {
// ebp-8 holds the stack marker, ebp-16 is the wasm context, first stack slot
// is located at ebp-24.
// ebp-8 holds the stack marker, ebp-16 is the instance parameter, first stack
// slot is located at ebp-24.
constexpr int32_t kConstantStackSpace = 16;
constexpr int32_t kFirstStackSlotOffset =
kConstantStackSpace + LiftoffAssembler::kStackSlotSize;
@ -33,7 +33,7 @@ inline Operand GetHalfStackSlot(uint32_t half_index) {
}
// TODO(clemensh): Make this a constexpr variable once Operand is constexpr.
inline Operand GetContextOperand() { return Operand(ebp, -16); }
inline Operand GetInstanceOperand() { return Operand(ebp, -16); }
static constexpr LiftoffRegList kByteRegs =
LiftoffRegList::FromBits<Register::ListOf<eax, ecx, edx, ebx>()>();
@ -133,20 +133,20 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
}
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
DCHECK_LE(offset, kMaxInt);
mov(dst, liftoff::GetContextOperand());
mov(dst, liftoff::GetInstanceOperand());
DCHECK_EQ(4, size);
mov(dst, Operand(dst, offset));
}
void LiftoffAssembler::SpillContext(Register context) {
mov(liftoff::GetContextOperand(), context);
void LiftoffAssembler::SpillInstance(Register instance) {
mov(liftoff::GetInstanceOperand(), instance);
}
void LiftoffAssembler::FillContextInto(Register dst) {
mov(dst, liftoff::GetContextOperand());
void LiftoffAssembler::FillInstanceInto(Register dst) {
mov(dst, liftoff::GetInstanceOperand());
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
@ -1182,7 +1182,7 @@ void LiftoffAssembler::CallNativeWasmCode(Address addr) {
}
void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) {
// Set context to zero.
// Set instance to zero.
xor_(esi, esi);
CallRuntimeDelayed(zone, fid);
}

View File

@ -437,7 +437,7 @@ void LiftoffAssembler::SpillAllRegisters() {
void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
compiler::CallDescriptor* call_descriptor,
Register* target,
LiftoffRegister* explicit_context) {
LiftoffRegister* target_instance) {
uint32_t num_params = static_cast<uint32_t>(sig->parameter_count());
// Input 0 is the call target.
constexpr size_t kInputShift = 1;
@ -455,14 +455,14 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
StackTransferRecipe stack_transfers(this);
LiftoffRegList param_regs;
// Move the explicit context (if any) into the correct context register.
compiler::LinkageLocation context_loc =
// Move the target instance (if supplied) into the correct instance register.
compiler::LinkageLocation instance_loc =
call_descriptor->GetInputLocation(kInputShift);
DCHECK(context_loc.IsRegister() && !context_loc.IsAnyRegister());
LiftoffRegister context_reg(Register::from_code(context_loc.AsRegister()));
param_regs.set(context_reg);
if (explicit_context && *explicit_context != context_reg) {
stack_transfers.MoveRegister(context_reg, *explicit_context, kWasmIntPtr);
DCHECK(instance_loc.IsRegister() && !instance_loc.IsAnyRegister());
LiftoffRegister instance_reg(Register::from_code(instance_loc.AsRegister()));
param_regs.set(instance_reg);
if (target_instance && *target_instance != instance_reg) {
stack_transfers.MoveRegister(instance_reg, *target_instance, kWasmIntPtr);
}
// Now move all parameter values into the right slot for the call.
@ -504,7 +504,7 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
}
}
}
// {call_desc_input_idx} should point after the context parameter now.
// {call_desc_input_idx} should point after the instance parameter now.
DCHECK_EQ(call_desc_input_idx, kInputShift + 1);
// If the target register overlaps with a parameter register, then move the
@ -523,7 +523,7 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
}
}
// Execute the stack transfers before filling the context register.
// Execute the stack transfers before filling the instance register.
stack_transfers.Execute();
// Pop parameters from the value stack.
@ -533,9 +533,9 @@ void LiftoffAssembler::PrepareCall(wasm::FunctionSig* sig,
// Reset register use counters.
cache_state_.reset_used_registers();
// Reload the context from the stack.
if (!explicit_context) {
FillContextInto(context_reg.gp());
// Reload the instance from the stack.
if (!target_instance) {
FillInstanceInto(instance_reg.gp());
}
}

View File

@ -322,7 +322,7 @@ class LiftoffAssembler : public TurboAssembler {
// register, or {no_reg} if target was spilled to the stack.
void PrepareCall(wasm::FunctionSig*, compiler::CallDescriptor*,
Register* target = nullptr,
LiftoffRegister* explicit_context = nullptr);
LiftoffRegister* target_instance = nullptr);
// Process return values of the call.
void FinishCall(wasm::FunctionSig*, compiler::CallDescriptor*);
@ -352,9 +352,9 @@ class LiftoffAssembler : public TurboAssembler {
inline void LoadConstant(LiftoffRegister, WasmValue,
RelocInfo::Mode rmode = RelocInfo::NONE);
inline void LoadFromContext(Register dst, uint32_t offset, int size);
inline void SpillContext(Register context);
inline void FillContextInto(Register dst);
inline void LoadFromInstance(Register dst, uint32_t offset, int size);
inline void SpillInstance(Register instance);
inline void FillInstanceInto(Register dst);
inline void Load(LiftoffRegister dst, Register src_addr, Register offset_reg,
uint32_t offset_imm, LoadType type, LiftoffRegList pinned,
uint32_t* protected_load_pc = nullptr);

View File

@ -32,6 +32,18 @@ namespace {
if (FLAG_trace_liftoff) PrintF("[liftoff] " __VA_ARGS__); \
} while (false)
#define WASM_INSTANCE_OBJECT_OFFSET(name) \
(WasmInstanceObject::k##name##Offset - kHeapObjectTag)
#define LOAD_INSTANCE_FIELD(dst, name, type) \
__ LoadFromInstance(dst.gp(), WASM_INSTANCE_OBJECT_OFFSET(name), \
LoadType(type).size());
#define FIXED_ARRAY_HEADER_SIZE (FixedArray::kHeaderSize - kHeapObjectTag)
constexpr LoadType::LoadTypeValue kPointerLoadType =
kPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
#if V8_TARGET_ARCH_ARM64
// On ARM64, the Assembler keeps track of pointers to Labels to resolve
// branches to distant targets. Moving labels would confuse the Assembler,
@ -270,24 +282,24 @@ class LiftoffCompiler {
// finish compilation without errors even if we hit unimplemented
// LiftoffAssembler methods.
if (DidAssemblerBailout(decoder)) return;
// Parameter 0 is the wasm context.
// Parameter 0 is the instance parameter.
uint32_t num_params =
static_cast<uint32_t>(decoder->sig_->parameter_count());
for (uint32_t i = 0; i < __ num_locals(); ++i) {
if (!CheckSupportedType(decoder, kTypes_ilfd, __ local_type(i), "param"))
return;
}
// Input 0 is the call target, the context is at 1.
constexpr int kContextParameterIndex = 1;
// Store the context parameter to a special stack slot.
compiler::LinkageLocation context_loc =
descriptor_->GetInputLocation(kContextParameterIndex);
DCHECK(context_loc.IsRegister());
DCHECK(!context_loc.IsAnyRegister());
Register context_reg = Register::from_code(context_loc.AsRegister());
__ SpillContext(context_reg);
// Input 0 is the code target, 1 is the context. First parameter at 2.
uint32_t input_idx = kContextParameterIndex + 1;
// Input 0 is the call target, the instance is at 1.
constexpr int kInstanceParameterIndex = 1;
// Store the instance parameter to a special stack slot.
compiler::LinkageLocation instance_loc =
descriptor_->GetInputLocation(kInstanceParameterIndex);
DCHECK(instance_loc.IsRegister());
DCHECK(!instance_loc.IsAnyRegister());
Register instance_reg = Register::from_code(instance_loc.AsRegister());
__ SpillInstance(instance_reg);
// Input 0 is the code target, 1 is the instance. First parameter at 2.
uint32_t input_idx = kInstanceParameterIndex + 1;
for (uint32_t param_idx = 0; param_idx < num_params; ++param_idx) {
input_idx += ProcessParameter(__ local_type(param_idx), input_idx);
}
@ -940,13 +952,12 @@ class LiftoffCompiler {
if (!CheckSupportedType(decoder, kTypes_ilfd, global->type, "global"))
return;
LiftoffRegList pinned;
Register addr = pinned.set(__ GetUnusedRegister(kGpReg)).gp();
__ LoadFromContext(addr, offsetof(WasmContext, globals_start),
kPointerSize);
LiftoffRegister addr = pinned.set(__ GetUnusedRegister(kGpReg));
LOAD_INSTANCE_FIELD(addr, GlobalsStart, kPointerLoadType);
LiftoffRegister value =
pinned.set(__ GetUnusedRegister(reg_class_for(global->type), pinned));
LoadType type = LoadType::ForValueType(global->type);
__ Load(value, addr, no_reg, global->offset, type, pinned);
__ Load(value, addr.gp(), no_reg, global->offset, type, pinned);
__ PushRegister(global->type, value);
}
@ -956,12 +967,11 @@ class LiftoffCompiler {
if (!CheckSupportedType(decoder, kTypes_ilfd, global->type, "global"))
return;
LiftoffRegList pinned;
Register addr = pinned.set(__ GetUnusedRegister(kGpReg)).gp();
__ LoadFromContext(addr, offsetof(WasmContext, globals_start),
kPointerSize);
LiftoffRegister addr = pinned.set(__ GetUnusedRegister(kGpReg));
LOAD_INSTANCE_FIELD(addr, GlobalsStart, kPointerLoadType);
LiftoffRegister reg = pinned.set(__ PopToRegister(pinned));
StoreType type = StoreType::ForValueType(global->type);
__ Store(addr, no_reg, global->offset, reg, type, pinned);
__ Store(addr.gp(), no_reg, global->offset, reg, type, pinned);
}
void Unreachable(Decoder* decoder) { unsupported(decoder, "unreachable"); }
@ -1116,7 +1126,7 @@ class LiftoffCompiler {
LiftoffRegister end_offset_reg =
pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister mem_size = __ GetUnusedRegister(kGpReg, pinned);
__ LoadFromContext(mem_size.gp(), offsetof(WasmContext, mem_size), 4);
LOAD_INSTANCE_FIELD(mem_size, MemorySize, LoadType::kI32Load);
__ LoadConstant(end_offset_reg, WasmValue(end_offset));
if (end_offset >= min_size_) {
__ emit_cond_jump(kUnsignedGreaterEqual, trap_label, kWasmI32,
@ -1207,12 +1217,12 @@ class LiftoffCompiler {
if (BoundsCheckMem(decoder, type.size(), operand.offset, index, pinned)) {
return;
}
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
__ LoadFromContext(addr, offsetof(WasmContext, mem_start), kPointerSize);
LiftoffRegister addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LOAD_INSTANCE_FIELD(addr, MemoryStart, kPointerLoadType);
RegClass rc = reg_class_for(value_type);
LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned));
uint32_t protected_load_pc = 0;
__ Load(value, addr, index, operand.offset, type, pinned,
__ Load(value, addr.gp(), index, operand.offset, type, pinned,
&protected_load_pc);
if (env_->use_trap_handler) {
AddOutOfLineTrap(decoder->position(),
@ -1238,10 +1248,10 @@ class LiftoffCompiler {
if (BoundsCheckMem(decoder, type.size(), operand.offset, index, pinned)) {
return;
}
Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp();
__ LoadFromContext(addr, offsetof(WasmContext, mem_start), kPointerSize);
LiftoffRegister addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LOAD_INSTANCE_FIELD(addr, MemoryStart, kPointerLoadType);
uint32_t protected_store_pc = 0;
__ Store(addr, index, operand.offset, value, type, pinned,
__ Store(addr.gp(), index, operand.offset, value, type, pinned,
&protected_store_pc);
if (env_->use_trap_handler) {
AddOutOfLineTrap(decoder->position(),
@ -1276,19 +1286,55 @@ class LiftoffCompiler {
call_descriptor =
GetLoweredCallDescriptor(compilation_zone_, call_descriptor);
__ PrepareCall(operand.sig, call_descriptor);
if (operand.index < env_->module->num_imported_functions) {
// A direct call to an imported function.
LiftoffRegList pinned;
LiftoffRegister tmp = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister target = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
source_position_table_builder_->AddPosition(
__ pc_offset(), SourcePosition(decoder->position()), false);
LiftoffRegister imported_targets = tmp;
LOAD_INSTANCE_FIELD(imported_targets, ImportedFunctionTargets,
kPointerLoadType);
__ Load(target, imported_targets.gp(), no_reg,
operand.index * sizeof(Address), kPointerLoadType, pinned);
// Just encode the function index. This will be patched at instantiation.
Address addr = reinterpret_cast<Address>(operand.index);
__ CallNativeWasmCode(addr);
LiftoffRegister imported_instances = tmp;
LOAD_INSTANCE_FIELD(imported_instances, ImportedFunctionInstances,
kPointerLoadType);
LiftoffRegister target_instance = tmp;
__ Load(target_instance, imported_instances.gp(), no_reg,
compiler::FixedArrayOffsetMinusTag(operand.index),
kPointerLoadType, pinned);
safepoint_table_builder_.DefineSafepoint(asm_, Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
LiftoffRegister* explicit_instance = &target_instance;
Register target_reg = target.gp();
__ PrepareCall(operand.sig, call_descriptor, &target_reg,
explicit_instance);
source_position_table_builder_->AddPosition(
__ pc_offset(), SourcePosition(decoder->position()), false);
__ FinishCall(operand.sig, call_descriptor);
__ CallIndirect(operand.sig, call_descriptor, target_reg);
safepoint_table_builder_.DefineSafepoint(asm_, Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
__ FinishCall(operand.sig, call_descriptor);
} else {
// A direct call within this module just gets the current instance.
__ PrepareCall(operand.sig, call_descriptor);
source_position_table_builder_->AddPosition(
__ pc_offset(), SourcePosition(decoder->position()), false);
// Just encode the function index. This will be patched at instantiation.
Address addr = reinterpret_cast<Address>(operand.index);
__ CallNativeWasmCode(addr);
safepoint_table_builder_.DefineSafepoint(asm_, Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt);
__ FinishCall(operand.sig, call_descriptor);
}
}
void CallIndirect(Decoder* decoder, const Value& index_val,
@ -1321,37 +1367,31 @@ class LiftoffCompiler {
pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister scratch = pinned.set(__ GetUnusedRegister(kGpReg, pinned));
LiftoffRegister* explicit_context = nullptr;
// Bounds check against the table size.
Label* invalid_func_label = AddOutOfLineTrap(
decoder->position(), Builtins::kThrowWasmTrapFuncInvalid);
static constexpr LoadType kPointerLoadType =
kPointerSize == 8 ? LoadType::kI64Load : LoadType::kI32Load;
uint32_t canonical_sig_num = env_->module->signature_ids[operand.sig_index];
DCHECK_GE(canonical_sig_num, 0);
DCHECK_GE(kMaxInt, canonical_sig_num);
// Compare against table size stored in {wasm_context->table_size}.
__ LoadFromContext(tmp_const.gp(), offsetof(WasmContext, table_size),
sizeof(uint32_t));
// Compare against table size stored in
// {instance->indirect_function_table_size}.
LOAD_INSTANCE_FIELD(tmp_const, IndirectFunctionTableSize,
LoadType::kI32Load);
__ emit_cond_jump(kUnsignedGreaterEqual, invalid_func_label, kWasmI32,
index.gp(), tmp_const.gp());
// Load the table from {wasm_context->table}
__ LoadFromContext(table.gp(), offsetof(WasmContext, table), kPointerSize);
// Load the signature from {wasm_context->table[$index].sig_id}
// == wasm_context.table + $index * #sizeof(IndirectionFunctionTableEntry)
// + #offsetof(sig_id)
__ LoadConstant(
tmp_const,
WasmValue(static_cast<uint32_t>(sizeof(IndirectFunctionTableEntry))));
__ emit_i32_mul(index.gp(), index.gp(), tmp_const.gp());
__ Load(scratch, table.gp(), index.gp(),
offsetof(IndirectFunctionTableEntry, sig_id), LoadType::kI32Load,
pinned);
// Load the signature from {instance->ift_sig_ids[key]}
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kPointerLoadType);
__ LoadConstant(tmp_const,
WasmValue(static_cast<uint32_t>(sizeof(uint32_t))));
// TODO(wasm): use a emit_i32_shli() instead of a multiply.
// (currently cannot use shl on ia32/x64 because it clobbers %rcx).
__ emit_i32_mul(index.gp(), index.gp(), tmp_const.gp());
__ Load(scratch, table.gp(), index.gp(), 0, LoadType::kI32Load, pinned);
// Compare against expected signature.
__ LoadConstant(tmp_const, WasmValue(canonical_sig_num));
Label* sig_mismatch_label = AddOutOfLineTrap(
@ -1360,18 +1400,22 @@ class LiftoffCompiler {
LiftoffAssembler::kWasmIntPtr, scratch.gp(),
tmp_const.gp());
// Load the target address from {wasm_context->table[$index].target}
__ Load(scratch, table.gp(), index.gp(),
offsetof(IndirectFunctionTableEntry, target), kPointerLoadType,
pinned);
if (kPointerSize == 8) {
// {index} has already been multiplied by 4. Multiply by another 2.
__ LoadConstant(tmp_const, WasmValue(2));
__ emit_i32_mul(index.gp(), index.gp(), tmp_const.gp());
}
// Load the context from {wasm_context->table[$index].context}
// TODO(wasm): directly allocate the correct context register to avoid
// any potential moves.
__ Load(tmp_const, table.gp(), index.gp(),
offsetof(IndirectFunctionTableEntry, context), kPointerLoadType,
pinned);
explicit_context = &tmp_const;
// Load the target from {instance->ift_targets[key]}
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableTargets, kPointerLoadType);
__ Load(scratch, table.gp(), index.gp(), 0, kPointerLoadType, pinned);
// Load the instance from {instance->ift_instances[key]}
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableInstances,
kPointerLoadType);
__ Load(tmp_const, table.gp(), index.gp(), FIXED_ARRAY_HEADER_SIZE,
kPointerLoadType, pinned);
LiftoffRegister* explicit_instance = &tmp_const;
source_position_table_builder_->AddPosition(
__ pc_offset(), SourcePosition(decoder->position()), false);
@ -1382,7 +1426,7 @@ class LiftoffCompiler {
GetLoweredCallDescriptor(compilation_zone_, call_descriptor);
Register target = scratch.gp();
__ PrepareCall(operand.sig, call_descriptor, &target, explicit_context);
__ PrepareCall(operand.sig, call_descriptor, &target, explicit_instance);
__ CallIndirect(operand.sig, call_descriptor, target);
safepoint_table_builder_.DefineSafepoint(asm_, Safepoint::kSimple, 0,
@ -1518,6 +1562,9 @@ bool compiler::WasmCompilationUnit::ExecuteLiftoffCompilation() {
#undef __
#undef TRACE
#undef WASM_INSTANCE_OBJECT_OFFSET
#undef LOAD_INSTANCE_FIELD
#undef FIXED_ARRAY_HEADER_SIZE
} // namespace internal
} // namespace v8

View File

@ -15,8 +15,8 @@ namespace wasm {
namespace liftoff {
// fp-8 holds the stack marker, fp-16 is the wasm context, first stack slot
// is located at fp-24.
// fp-8 holds the stack marker, fp-16 is the instance parameter, first stack
// slot is located at fp-24.
constexpr int32_t kConstantStackSpace = 16;
constexpr int32_t kFirstStackSlotOffset =
kConstantStackSpace + LiftoffAssembler::kStackSlotSize;
@ -31,7 +31,7 @@ inline MemOperand GetHalfStackSlot(uint32_t half_index) {
return MemOperand(fp, -kFirstStackSlotOffset - offset);
}
inline MemOperand GetContextOperand() { return MemOperand(fp, -16); }
inline MemOperand GetInstanceOperand() { return MemOperand(fp, -16); }
// Use this register to store the address of the last argument pushed on the
// stack for a call to C. This register must be callee saved according to the c
@ -129,20 +129,20 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
}
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
DCHECK_LE(offset, kMaxInt);
lw(dst, liftoff::GetContextOperand());
lw(dst, liftoff::GetInstanceOperand());
DCHECK_EQ(4, size);
lw(dst, MemOperand(dst, offset));
}
void LiftoffAssembler::SpillContext(Register context) {
sw(context, liftoff::GetContextOperand());
void LiftoffAssembler::SpillInstance(Register instance) {
sw(instance, liftoff::GetInstanceOperand());
}
void LiftoffAssembler::FillContextInto(Register dst) {
lw(dst, liftoff::GetContextOperand());
void LiftoffAssembler::FillInstanceInto(Register dst) {
lw(dst, liftoff::GetInstanceOperand());
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
@ -880,7 +880,7 @@ void LiftoffAssembler::CallNativeWasmCode(Address addr) {
}
void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) {
// Set context to zero.
// Set instance to zero.
TurboAssembler::Move(cp, zero_reg);
CallRuntimeDelayed(zone, fid);
}

View File

@ -15,8 +15,8 @@ namespace wasm {
namespace liftoff {
// fp-8 holds the stack marker, fp-16 is the wasm context, first stack slot
// is located at fp-24.
// fp-8 holds the stack marker, fp-16 is the instance parameter, first stack
// slot is located at fp-24.
constexpr int32_t kConstantStackSpace = 16;
constexpr int32_t kFirstStackSlotOffset =
kConstantStackSpace + LiftoffAssembler::kStackSlotSize;
@ -26,7 +26,7 @@ inline MemOperand GetStackSlot(uint32_t index) {
return MemOperand(fp, -kFirstStackSlotOffset - offset);
}
inline MemOperand GetContextOperand() { return MemOperand(fp, -16); }
inline MemOperand GetInstanceOperand() { return MemOperand(fp, -16); }
// Use this register to store the address of the last argument pushed on the
// stack for a call to C. This register must be callee saved according to the c
@ -120,10 +120,10 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
}
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
DCHECK_LE(offset, kMaxInt);
ld(dst, liftoff::GetContextOperand());
ld(dst, liftoff::GetInstanceOperand());
DCHECK(size == 4 || size == 8);
if (size == 4) {
lw(dst, MemOperand(dst, offset));
@ -132,12 +132,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
}
}
void LiftoffAssembler::SpillContext(Register context) {
sd(context, liftoff::GetContextOperand());
void LiftoffAssembler::SpillInstance(Register instance) {
sd(instance, liftoff::GetInstanceOperand());
}
void LiftoffAssembler::FillContextInto(Register dst) {
ld(dst, liftoff::GetContextOperand());
void LiftoffAssembler::FillInstanceInto(Register dst) {
ld(dst, liftoff::GetInstanceOperand());
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
@ -707,7 +707,7 @@ void LiftoffAssembler::CallNativeWasmCode(Address addr) {
}
void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) {
// Set context to zero.
// Set instance to zero.
TurboAssembler::Move(cp, zero_reg);
CallRuntimeDelayed(zone, fid);
}

View File

@ -28,17 +28,17 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
BAILOUT("LoadConstant");
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromContext");
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromInstance");
}
void LiftoffAssembler::SpillContext(Register context) {
BAILOUT("SpillContext");
void LiftoffAssembler::SpillInstance(Register instance) {
BAILOUT("SpillInstance");
}
void LiftoffAssembler::FillContextInto(Register dst) {
BAILOUT("FillContextInto");
void LiftoffAssembler::FillInstanceInto(Register dst) {
BAILOUT("FillInstanceInto");
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,

View File

@ -28,17 +28,17 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
BAILOUT("LoadConstant");
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromContext");
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
BAILOUT("LoadFromInstance");
}
void LiftoffAssembler::SpillContext(Register context) {
BAILOUT("SpillContext");
void LiftoffAssembler::SpillInstance(Register instance) {
BAILOUT("SpillInstance");
}
void LiftoffAssembler::FillContextInto(Register dst) {
BAILOUT("FillContextInto");
void LiftoffAssembler::FillInstanceInto(Register dst) {
BAILOUT("FillInstanceInto");
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,

View File

@ -16,8 +16,8 @@ namespace wasm {
namespace liftoff {
// rbp-8 holds the stack marker, rbp-16 is the wasm context, first stack slot
// is located at rbp-24.
// rbp-8 holds the stack marker, rbp-16 is the instance parameter, first stack
// slot is located at rbp-24.
constexpr int32_t kConstantStackSpace = 16;
constexpr int32_t kFirstStackSlotOffset =
kConstantStackSpace + LiftoffAssembler::kStackSlotSize;
@ -28,7 +28,7 @@ inline Operand GetStackSlot(uint32_t index) {
}
// TODO(clemensh): Make this a constexpr variable once Operand is constexpr.
inline Operand GetContextOperand() { return Operand(rbp, -16); }
inline Operand GetInstanceOperand() { return Operand(rbp, -16); }
// Use this register to store the address of the last argument pushed on the
// stack for a call to C. This register must be callee saved according to the c
@ -131,10 +131,10 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
}
}
void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
int size) {
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
int size) {
DCHECK_LE(offset, kMaxInt);
movp(dst, liftoff::GetContextOperand());
movp(dst, liftoff::GetInstanceOperand());
DCHECK(size == 4 || size == 8);
if (size == 4) {
movl(dst, Operand(dst, offset));
@ -143,12 +143,12 @@ void LiftoffAssembler::LoadFromContext(Register dst, uint32_t offset,
}
}
void LiftoffAssembler::SpillContext(Register context) {
movp(liftoff::GetContextOperand(), context);
void LiftoffAssembler::SpillInstance(Register instance) {
movp(liftoff::GetInstanceOperand(), instance);
}
void LiftoffAssembler::FillContextInto(Register dst) {
movp(dst, liftoff::GetContextOperand());
void LiftoffAssembler::FillInstanceInto(Register dst) {
movp(dst, liftoff::GetInstanceOperand());
}
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
@ -972,7 +972,7 @@ void LiftoffAssembler::CallNativeWasmCode(Address addr) {
}
void LiftoffAssembler::CallRuntime(Zone* zone, Runtime::FunctionId fid) {
// Set context to zero.
// Set instance to zero.
xorp(rsi, rsi);
CallRuntimeDelayed(zone, fid);
}

View File

@ -774,7 +774,7 @@ class WasmDecoder : public Decoder {
case kExprGrowMemory:
case kExprCallFunction:
case kExprCallIndirect:
// Add context cache nodes to the assigned set.
// Add instance cache nodes to the assigned set.
// TODO(titzer): make this more clear.
assigned->Add(locals_count - 1);
length = OpcodeLength(decoder, pc);

View File

@ -37,7 +37,7 @@ struct SsaEnv {
State state;
TFNode* control;
TFNode* effect;
compiler::WasmContextCacheNodes context_cache;
compiler::WasmInstanceCacheNodes instance_cache;
TFNode** locals;
bool go() { return state >= kReached; }
@ -46,7 +46,7 @@ struct SsaEnv {
locals = nullptr;
control = nullptr;
effect = nullptr;
context_cache = {};
instance_cache = {};
}
void SetNotMerged() {
if (state == kMerged) state = kReached;
@ -100,14 +100,14 @@ class WasmGraphBuildingInterface {
: nullptr;
// The first '+ 1' is needed by TF Start node, the second '+ 1' is for the
// wasm_context parameter.
// instance parameter.
TFNode* start = builder_->Start(
static_cast<int>(decoder->sig_->parameter_count() + 1 + 1));
// Initialize the wasm_context (the paramater at index 0).
builder_->set_wasm_context(
builder_->Param(compiler::kWasmContextParameterIndex));
// Initialize the instance parameter (index 0).
builder_->set_instance_node(
builder_->Param(compiler::kWasmInstanceParameterIndex));
// Initialize local variables. Parameters are shifted by 1 because of the
// the wasm_context.
// the instance parameter.
uint32_t index = 0;
for (; index < decoder->sig_->parameter_count(); ++index) {
ssa_env->locals[index] = builder_->Param(index + 1);
@ -129,11 +129,10 @@ class WasmGraphBuildingInterface {
SetEnv(ssa_env);
}
// Reload the wasm context variables from the WasmContext structure attached
// to the memory object into the Ssa Environment.
// Reload the instance cache entries into the Ssa Environment.
void LoadContextIntoSsa(SsaEnv* ssa_env) {
if (!ssa_env || !ssa_env->go()) return;
builder_->InitContextCache(&ssa_env->context_cache);
builder_->InitInstanceCache(&ssa_env->instance_cache);
}
void StartFunctionBody(Decoder* decoder, Control* block) {
@ -366,7 +365,7 @@ class WasmGraphBuildingInterface {
void GrowMemory(Decoder* decoder, const Value& value, Value* result) {
result->node = BUILD(GrowMemory, value.node);
// Always reload the context cache after growing memory.
// Always reload the instance cache after growing memory.
LoadContextIntoSsa(ssa_env_);
}
@ -549,10 +548,10 @@ class WasmGraphBuildingInterface {
}
#endif
ssa_env_ = env;
// TODO(wasm): combine the control and effect pointers with context cache.
// TODO(wasm): combine the control and effect pointers with instance cache.
builder_->set_control_ptr(&env->control);
builder_->set_effect_ptr(&env->effect);
builder_->set_context_cache(&env->context_cache);
builder_->set_instance_cache(&env->instance_cache);
}
TFNode* CheckForException(Decoder* decoder, TFNode* node) {
@ -638,7 +637,7 @@ class WasmGraphBuildingInterface {
to->locals = from->locals;
to->control = from->control;
to->effect = from->effect;
to->context_cache = from->context_cache;
to->instance_cache = from->instance_cache;
break;
}
case SsaEnv::kReached: { // Create a new merge.
@ -662,9 +661,9 @@ class WasmGraphBuildingInterface {
builder_->Phi(decoder->GetLocalType(i), 2, vals, merge);
}
}
// Start a new merge from the context cache.
builder_->NewContextCacheMerge(&to->context_cache, &from->context_cache,
merge);
// Start a new merge from the instance cache.
builder_->NewInstanceCacheMerge(&to->instance_cache,
&from->instance_cache, merge);
break;
}
case SsaEnv::kMerged: {
@ -679,9 +678,9 @@ class WasmGraphBuildingInterface {
to->locals[i] = builder_->CreateOrMergeIntoPhi(
decoder->GetLocalType(i), merge, to->locals[i], from->locals[i]);
}
// Merge the context caches.
builder_->MergeContextCacheInto(&to->context_cache,
&from->context_cache, merge);
// Merge the instance caches.
builder_->MergeInstanceCacheInto(&to->instance_cache,
&from->instance_cache, merge);
break;
}
default:
@ -697,21 +696,22 @@ class WasmGraphBuildingInterface {
env->control = builder_->Loop(env->control);
env->effect = builder_->EffectPhi(1, &env->effect, env->control);
builder_->Terminate(env->effect, env->control);
// The '+ 1' here is to be able to set the context cache as assigned.
// The '+ 1' here is to be able to set the instance cache as assigned.
BitVector* assigned = WasmDecoder<validate>::AnalyzeLoopAssignment(
decoder, decoder->pc(), decoder->total_locals() + 1, decoder->zone());
if (decoder->failed()) return env;
if (assigned != nullptr) {
// Only introduce phis for variables assigned in this loop.
int context_cache_index = decoder->total_locals();
int instance_cache_index = decoder->total_locals();
for (int i = decoder->NumLocals() - 1; i >= 0; i--) {
if (!assigned->Contains(i)) continue;
env->locals[i] = builder_->Phi(decoder->GetLocalType(i), 1,
&env->locals[i], env->control);
}
// Introduce phis for context cache pointers if necessary.
if (assigned->Contains(context_cache_index)) {
builder_->PrepareContextCacheForLoop(&env->context_cache, env->control);
// Introduce phis for instance cache pointers if necessary.
if (assigned->Contains(instance_cache_index)) {
builder_->PrepareInstanceCacheForLoop(&env->instance_cache,
env->control);
}
SsaEnv* loop_body_env = Split(decoder, env);
@ -726,8 +726,8 @@ class WasmGraphBuildingInterface {
&env->locals[i], env->control);
}
// Conservatively introduce phis for context cache.
builder_->PrepareContextCacheForLoop(&env->context_cache, env->control);
// Conservatively introduce phis for instance cache.
builder_->PrepareInstanceCacheForLoop(&env->instance_cache, env->control);
SsaEnv* loop_body_env = Split(decoder, env);
builder_->StackCheck(decoder->position(), &loop_body_env->effect,
@ -750,11 +750,11 @@ class WasmGraphBuildingInterface {
size > 0 ? reinterpret_cast<TFNode**>(decoder->zone()->New(size))
: nullptr;
memcpy(result->locals, from->locals, size);
result->context_cache = from->context_cache;
result->instance_cache = from->instance_cache;
} else {
result->state = SsaEnv::kUnreachable;
result->locals = nullptr;
result->context_cache = {};
result->instance_cache = {};
}
return result;
@ -770,7 +770,7 @@ class WasmGraphBuildingInterface {
result->locals = from->locals;
result->control = from->control;
result->effect = from->effect;
result->context_cache = from->context_cache;
result->instance_cache = from->instance_cache;
from->Kill(SsaEnv::kUnreachable);
return result;
}
@ -782,7 +782,7 @@ class WasmGraphBuildingInterface {
result->control = nullptr;
result->effect = nullptr;
result->locals = nullptr;
result->context_cache = {};
result->instance_cache = {};
return result;
}

File diff suppressed because it is too large Load Diff

View File

@ -70,7 +70,7 @@ Address CompileLazy(Isolate* isolate);
// logic to actually orchestrate parallel execution of wasm compilation jobs.
// TODO(clemensh): Implement concurrent lazy compilation.
class LazyCompilationOrchestrator {
const WasmCode* CompileFunction(Isolate*, Handle<WasmInstanceObject>,
const WasmCode* CompileFunction(Isolate*, Handle<WasmCompiledModule>,
int func_index);
public:
@ -79,9 +79,8 @@ class LazyCompilationOrchestrator {
Handle<Code> caller,
uint32_t exported_func_index);
const wasm::WasmCode* CompileDirectCall(Isolate*, Handle<WasmInstanceObject>,
Maybe<uint32_t>,
const WasmCode* caller,
int call_offset);
int caller_ret_offset);
const wasm::WasmCode* CompileIndirectCall(Isolate*,
Handle<WasmInstanceObject>,
uint32_t func_index);

View File

@ -449,9 +449,15 @@ void NativeModule::ResizeCodeTableForTest(size_t last_index) {
}
WasmCode* NativeModule::GetCode(uint32_t index) const {
DCHECK_LT(index, FunctionCount());
return code_table_[index];
}
void NativeModule::SetCode(uint32_t index, WasmCode* wasm_code) {
DCHECK_LT(index, FunctionCount());
code_table_[index] = wasm_code;
}
uint32_t NativeModule::FunctionCount() const {
DCHECK_LE(code_table_.size(), std::numeric_limits<uint32_t>::max());
return static_cast<uint32_t>(code_table_.size());
@ -594,6 +600,10 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code,
// made while iterating over the RelocInfo above.
Assembler::FlushICache(ret->instructions().start(),
ret->instructions().size());
if (FLAG_print_wasm_code) {
// TODO(mstarzinger): don't need the isolate here.
ret->Print(code->GetIsolate());
}
return ret;
}

View File

@ -258,6 +258,7 @@ class V8_EXPORT_PRIVATE NativeModule final {
// FunctionCount is WasmModule::functions.size().
uint32_t FunctionCount() const;
WasmCode* GetCode(uint32_t index) const;
void SetCode(uint32_t index, WasmCode* wasm_code);
// We special-case lazy cloning because we currently rely on making copies
// of the lazy builtin, to be able to identify, in the runtime, which function

View File

@ -63,10 +63,12 @@ CodeSpecialization::CodeSpecialization(Isolate* isolate, Zone* zone) {}
CodeSpecialization::~CodeSpecialization() {}
void CodeSpecialization::RelocateWasmContextReferences(Address new_context) {
DCHECK_NOT_NULL(new_context);
DCHECK_NULL(new_wasm_context_address_);
new_wasm_context_address_ = new_context;
void CodeSpecialization::UpdateInstanceReferences(
Handle<WeakCell> old_weak_instance, Handle<WeakCell> new_weak_instance) {
DCHECK(!old_weak_instance.is_null());
DCHECK(!new_weak_instance.is_null());
old_weak_instance_ = old_weak_instance;
new_weak_instance_ = new_weak_instance;
}
void CodeSpecialization::RelocateDirectCalls(NativeModule* native_module) {
@ -100,12 +102,11 @@ bool CodeSpecialization::ApplyToWholeModule(NativeModule* native_module,
changed |= ApplyToWasmCode(wasm_function, icache_flush_mode);
}
bool patch_wasm_weak_instances =
!old_weak_instance_.is_identical_to(new_weak_instance_);
// Patch all exported functions (JS_TO_WASM_FUNCTION).
int reloc_mode = 0;
// We need to patch WASM_CONTEXT_REFERENCE to put the correct address.
if (new_wasm_context_address_) {
reloc_mode |= RelocInfo::ModeMask(RelocInfo::WASM_CONTEXT_REFERENCE);
}
// Patch CODE_TARGET if we shall relocate direct calls. If we patch direct
// calls, the instance registered for that (relocate_direct_calls_module_)
// should match the instance we currently patch (instance).
@ -113,6 +114,10 @@ bool CodeSpecialization::ApplyToWholeModule(NativeModule* native_module,
DCHECK_EQ(native_module, relocate_direct_calls_module_);
reloc_mode |= RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL);
}
// Instance references are simply embedded objects.
if (patch_wasm_weak_instances) {
reloc_mode |= RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
}
if (!reloc_mode) return changed;
int wrapper_index = 0;
for (auto exp : module->export_table) {
@ -123,20 +128,25 @@ bool CodeSpecialization::ApplyToWholeModule(NativeModule* native_module,
for (RelocIterator it(export_wrapper, reloc_mode); !it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
switch (mode) {
case RelocInfo::WASM_CONTEXT_REFERENCE:
it.rinfo()->set_wasm_context_reference(new_wasm_context_address_,
icache_flush_mode);
break;
case RelocInfo::JS_TO_WASM_CALL: {
changed = true;
const WasmCode* new_code = native_module->GetCode(exp.index);
it.rinfo()->set_js_to_wasm_address(new_code->instructions().start(),
icache_flush_mode);
} break;
case RelocInfo::EMBEDDED_OBJECT: {
changed = true;
const HeapObject* old = it.rinfo()->target_object();
if (*old_weak_instance_ == old) {
it.rinfo()->set_target_object(
*new_weak_instance_, WriteBarrierMode::UPDATE_WRITE_BARRIER,
icache_flush_mode);
}
} break;
default:
UNREACHABLE();
}
}
changed = true;
}
DCHECK_EQ(module->functions.size(), func_index);
DCHECK_EQ(compiled_module->export_wrappers()->length(), wrapper_index);

View File

@ -28,8 +28,10 @@ class CodeSpecialization {
CodeSpecialization(Isolate*, Zone*);
~CodeSpecialization();
// Update WasmContext references.
void RelocateWasmContextReferences(Address new_context);
// Update instance references in code. Instance references should only
// appear in export wrappers.
void UpdateInstanceReferences(Handle<WeakCell> old_weak_instance,
Handle<WeakCell> new_weak_instance);
// Update all direct call sites based on the code table in the given instance.
void RelocateDirectCalls(NativeModule* module);
// Apply all relocations and patching to all code in the instance (wasm code
@ -41,8 +43,8 @@ class CodeSpecialization {
ICacheFlushMode = FLUSH_ICACHE_IF_NEEDED);
private:
Address new_wasm_context_address_ = 0;
Handle<WeakCell> old_weak_instance_;
Handle<WeakCell> new_weak_instance_;
NativeModule* relocate_direct_calls_module_ = nullptr;
};

View File

@ -140,12 +140,13 @@ class InterpreterHandle {
}
public:
// TODO(wasm): properly handlify this constructor.
InterpreterHandle(Isolate* isolate, WasmDebugInfo* debug_info)
: isolate_(isolate),
module_(
debug_info->wasm_instance()->compiled_module()->shared()->module()),
interpreter_(isolate, module_, GetBytes(debug_info),
debug_info->wasm_instance()->wasm_context()->get()) {}
handle(debug_info->wasm_instance())) {}
~InterpreterHandle() { DCHECK_EQ(0, activations_.size()); }
@ -197,8 +198,6 @@ class InterpreterHandle {
uint32_t activation_id = StartActivation(frame_pointer);
WasmInterpreter::HeapObjectsScope heap_objects_scope(&interpreter_,
instance_object);
WasmInterpreter::Thread* thread = interpreter_.GetThread(0);
thread->InitFrame(&module()->functions[func_index], wasm_args.start());
bool finished = false;
@ -681,7 +680,7 @@ void WasmDebugInfo::RedirectToInterpreter(Handle<WasmDebugInfo> debug_info,
if (!interpreted_functions->get(func_index)->IsUndefined(isolate)) continue;
Handle<Code> new_code = compiler::CompileWasmInterpreterEntry(
isolate, func_index, module->functions[func_index].sig, instance);
isolate, func_index, module->functions[func_index].sig);
const wasm::WasmCode* wasm_new_code =
native_module->AddInterpreterWrapper(new_code, func_index);
const wasm::WasmCode* old_code =

View File

@ -634,29 +634,6 @@ const char* OpcodeName(uint32_t val) {
return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(val));
}
// Unwrap a wasm to js wrapper, return the callable heap object.
// If the wrapper would throw a TypeError, return a null handle.
Handle<HeapObject> UnwrapWasmToJSWrapper(Isolate* isolate,
const wasm::WasmCode* wasm_code) {
Handle<FixedArray> js_imports_table;
int index = 0;
DCHECK_EQ(wasm::WasmCode::kWasmToJsWrapper, wasm_code->kind());
js_imports_table = Handle<FixedArray>(wasm_code->native_module()
->compiled_module()
->owning_instance()
->js_imports_table());
index = 1 + 3 * static_cast<int>(wasm_code->index());
Handle<Object> obj(js_imports_table->get(index), isolate);
if (obj->IsCallable()) {
return Handle<HeapObject>::cast(obj);
} else {
// If we did not find a callable object, this is an illegal JS import and
// obj must be undefined.
DCHECK(obj->IsUndefined(isolate));
return Handle<HeapObject>::null();
}
}
class SideTable;
// Code and metadata needed to execute a function.
@ -951,9 +928,6 @@ class CodeMap {
Zone* zone_;
const WasmModule* module_;
ZoneVector<InterpreterCode> interpreter_code_;
// This handle is set and reset by the SetInstanceObject() /
// ClearInstanceObject() method, which is used by the HeapObjectsScope.
Handle<WasmInstanceObject> instance_;
// TODO(wasm): Remove this testing wart. It is needed because interpreter
// entry stubs are not generated in testing the interpreter in cctests.
bool call_indirect_through_module_ = false;
@ -981,27 +955,7 @@ class CodeMap {
call_indirect_through_module_ = val;
}
void SetInstanceObject(Handle<WasmInstanceObject> instance) {
DCHECK(instance_.is_null());
instance_ = instance;
}
void ClearInstanceObject() { instance_ = Handle<WasmInstanceObject>::null(); }
const WasmModule* module() const { return module_; }
bool has_instance() const { return !instance_.is_null(); }
WasmInstanceObject* instance() const {
DCHECK(has_instance());
return *instance_;
}
const wasm::WasmCode* GetImportedFunction(uint32_t function_index) {
DCHECK(has_instance());
DCHECK_GT(module_->num_imported_functions, function_index);
const wasm::NativeModule* native_module =
instance()->compiled_module()->GetNativeModule();
return native_module->GetCode(function_index);
}
InterpreterCode* GetCode(const WasmFunction* function) {
InterpreterCode* code = GetCode(function->func_index);
@ -1080,60 +1034,6 @@ class CodeMap {
}
};
Handle<Object> WasmValueToNumber(Factory* factory, WasmValue val,
wasm::ValueType type) {
switch (type) {
case kWasmI32:
return factory->NewNumberFromInt(val.to<int32_t>());
case kWasmI64:
// wasm->js and js->wasm is illegal for i64 type.
UNREACHABLE();
case kWasmF32:
return factory->NewNumber(val.to<float>());
case kWasmF64:
return factory->NewNumber(val.to<double>());
default:
// TODO(wasm): Implement simd.
UNIMPLEMENTED();
return Handle<Object>::null();
}
}
// Convert JS value to WebAssembly, spec here:
// https://github.com/WebAssembly/design/blob/master/JS.md#towebassemblyvalue
// Return WasmValue() (i.e. of type kWasmStmt) on failure. In that case, an
// exception will be pending on the isolate.
WasmValue ToWebAssemblyValue(Isolate* isolate, Handle<Object> value,
wasm::ValueType type) {
switch (type) {
case kWasmI32: {
MaybeHandle<Object> maybe_i32 = Object::ToInt32(isolate, value);
if (maybe_i32.is_null()) return {};
int32_t value;
CHECK(maybe_i32.ToHandleChecked()->ToInt32(&value));
return WasmValue(value);
}
case kWasmI64:
// If the signature contains i64, a type error was thrown before.
UNREACHABLE();
case kWasmF32: {
MaybeHandle<Object> maybe_number = Object::ToNumber(value);
if (maybe_number.is_null()) return {};
return WasmValue(
static_cast<float>(maybe_number.ToHandleChecked()->Number()));
}
case kWasmF64: {
MaybeHandle<Object> maybe_number = Object::ToNumber(value);
if (maybe_number.is_null()) return {};
return WasmValue(maybe_number.ToHandleChecked()->Number());
}
default:
// TODO(wasm): Handle simd.
UNIMPLEMENTED();
return WasmValue();
}
}
// Like a static_cast from src to dst, but specialized for boxed floats.
template <typename dst, typename src>
struct converter {
@ -1179,9 +1079,10 @@ class ThreadImpl {
};
public:
ThreadImpl(Zone* zone, CodeMap* codemap, WasmContext* wasm_context)
ThreadImpl(Zone* zone, CodeMap* codemap,
Handle<WasmInstanceObject> instance_object)
: codemap_(codemap),
wasm_context_(wasm_context),
instance_object_(instance_object),
zone_(zone),
frames_(zone),
activations_(zone) {}
@ -1341,7 +1242,7 @@ class ThreadImpl {
friend class InterpretedFrameImpl;
CodeMap* codemap_;
WasmContext* wasm_context_;
Handle<WasmInstanceObject> instance_object_;
Zone* zone_;
WasmValue* stack_start_ = nullptr; // Start of allocated stack space.
WasmValue* stack_limit_ = nullptr; // End of allocated stack space.
@ -1498,14 +1399,14 @@ class ThreadImpl {
template <typename mtype>
inline byte* BoundsCheckMem(uint32_t offset, uint32_t index) {
uint32_t mem_size = wasm_context_->mem_size;
size_t mem_size = instance_object_->memory_size();
if (sizeof(mtype) > mem_size) return nullptr;
if (offset > (mem_size - sizeof(mtype))) return nullptr;
if (index > (mem_size - sizeof(mtype) - offset)) return nullptr;
// Compute the effective address of the access, making sure to condition
// the index even in the in-bounds case.
return wasm_context_->mem_start + offset +
(index & wasm_context_->mem_mask);
return instance_object_->memory_start() + offset +
(index & instance_object_->memory_mask());
}
template <typename ctype, typename mtype>
@ -1529,7 +1430,7 @@ class ThreadImpl {
wasm::MemoryTracingInfo info(operand.offset + index, false, rep);
TraceMemoryOperation(ExecutionEngine::kInterpreter, &info,
code->function->func_index, static_cast<int>(pc),
wasm_context_->mem_start);
instance_object_->memory_start());
}
return true;
@ -1555,7 +1456,7 @@ class ThreadImpl {
wasm::MemoryTracingInfo info(operand.offset + index, true, rep);
TraceMemoryOperation(ExecutionEngine::kInterpreter, &info,
code->function->func_index, static_cast<int>(pc),
wasm_context_->mem_start);
instance_object_->memory_start());
}
return true;
@ -1710,14 +1611,10 @@ class ThreadImpl {
if (V8_LIKELY(current_stack_size <= stack_size_limit)) {
return true;
}
if (!codemap()->has_instance()) {
// In test mode: Just abort.
FATAL("wasm interpreter: stack overflow");
}
// The pc of the top frame is initialized to the first instruction. We reset
// it to 0 here such that we report the same position as in compiled code.
frames_.back().pc = 0;
Isolate* isolate = codemap()->instance()->GetIsolate();
Isolate* isolate = instance_object_->GetIsolate();
HandleScope handle_scope(isolate);
isolate->StackOverflow();
return HandleException(isolate) == WasmInterpreter::Thread::HANDLED;
@ -1999,7 +1896,7 @@ class ThreadImpl {
GlobalIndexOperand<Decoder::kNoValidate> operand(&decoder,
code->at(pc));
const WasmGlobal* global = &module()->globals[operand.index];
byte* ptr = wasm_context_->globals_start + global->offset;
byte* ptr = instance_object_->globals_start() + global->offset;
WasmValue val;
switch (global->type) {
#define CASE_TYPE(wasm, ctype) \
@ -2019,7 +1916,7 @@ class ThreadImpl {
GlobalIndexOperand<Decoder::kNoValidate> operand(&decoder,
code->at(pc));
const WasmGlobal* global = &module()->globals[operand.index];
byte* ptr = wasm_context_->globals_start + global->offset;
byte* ptr = instance_object_->globals_start() + global->offset;
WasmValue val = Pop();
switch (global->type) {
#define CASE_TYPE(wasm, ctype) \
@ -2125,10 +2022,7 @@ class ThreadImpl {
MemoryIndexOperand<Decoder::kNoValidate> operand(&decoder,
code->at(pc));
uint32_t delta_pages = Pop().to<uint32_t>();
Handle<WasmMemoryObject> memory(
codemap()->instance()->memory_object());
DCHECK_EQ(wasm_context_,
codemap()->instance()->wasm_context()->get());
Handle<WasmMemoryObject> memory(instance_object_->memory_object());
Isolate* isolate = memory->GetIsolate();
int32_t result = WasmMemoryObject::Grow(isolate, memory, delta_pages);
Push(WasmValue(result));
@ -2141,8 +2035,8 @@ class ThreadImpl {
case kExprMemorySize: {
MemoryIndexOperand<Decoder::kNoValidate> operand(&decoder,
code->at(pc));
Push(WasmValue(
static_cast<uint32_t>(wasm_context_->mem_size / kWasmPageSize)));
Push(WasmValue(static_cast<uint32_t>(instance_object_->memory_size() /
kWasmPageSize)));
len = 1 + operand.length;
break;
}
@ -2341,66 +2235,17 @@ class ThreadImpl {
return {ExternalCallResult::EXTERNAL_RETURNED};
}
ExternalCallResult CallExternalJSFunction(Isolate* isolate,
const wasm::WasmCode* code,
FunctionSig* signature) {
Handle<HeapObject> target = UnwrapWasmToJSWrapper(isolate, code);
if (target.is_null()) {
ExternalCallResult CallExternalWasmFunction(
Isolate* isolate, Handle<WasmInstanceObject> instance,
const wasm::WasmCode* code, FunctionSig* sig) {
if (code->kind() == wasm::WasmCode::kWasmToJsWrapper &&
!IsJSCompatibleSignature(sig)) {
isolate->Throw(*isolate->factory()->NewTypeError(
MessageTemplate::kWasmTrapTypeError));
return TryHandleException(isolate);
}
#if DEBUG
std::ostringstream oss;
target->HeapObjectShortPrint(oss);
TRACE(" => Calling imported function %s\n", oss.str().c_str());
#endif
int num_args = static_cast<int>(signature->parameter_count());
// Get all arguments as JS values.
std::vector<Handle<Object>> args;
args.reserve(num_args);
WasmValue* wasm_args = sp_ - num_args;
for (int i = 0; i < num_args; ++i) {
args.push_back(WasmValueToNumber(isolate->factory(), wasm_args[i],
signature->GetParam(i)));
}
// The receiver is the global proxy if in sloppy mode (default), undefined
// if in strict mode.
Handle<Object> receiver = isolate->global_proxy();
if (target->IsJSFunction() &&
is_strict(JSFunction::cast(*target)->shared()->language_mode())) {
receiver = isolate->factory()->undefined_value();
}
MaybeHandle<Object> maybe_retval =
Execution::Call(isolate, target, receiver, num_args, args.data());
if (maybe_retval.is_null()) return TryHandleException(isolate);
Handle<Object> retval = maybe_retval.ToHandleChecked();
// Pop arguments off the stack.
sp_ -= num_args;
// Push return values.
if (signature->return_count() > 0) {
// TODO(wasm): Handle multiple returns.
DCHECK_EQ(1, signature->return_count());
WasmValue value =
ToWebAssemblyValue(isolate, retval, signature->GetReturn());
if (value.type() == kWasmStmt) return TryHandleException(isolate);
Push(value);
}
return {ExternalCallResult::EXTERNAL_RETURNED};
}
ExternalCallResult CallExternalWasmFunction(Isolate* isolate,
const wasm::WasmCode* code,
FunctionSig* sig) {
Handle<WasmDebugInfo> debug_info(codemap()->instance()->debug_info(),
isolate);
Handle<WasmDebugInfo> debug_info(instance_object_->debug_info(), isolate);
Handle<JSFunction> wasm_entry =
WasmDebugInfo::GetCWasmEntry(debug_info, sig);
@ -2451,7 +2296,7 @@ class ThreadImpl {
arg_buffer.resize(return_size);
}
// Wrap the arg_buffer data pointer and the WasmContext* in a handle. As
// Wrap the arg_buffer data pointer in a handle. As
// this is an aligned pointer, to the GC it will look like a Smi.
Handle<Object> arg_buffer_obj(reinterpret_cast<Object*>(arg_buffer.data()),
isolate);
@ -2460,16 +2305,9 @@ class ThreadImpl {
static_assert(compiler::CWasmEntryParameters::kNumParameters == 3,
"code below needs adaption");
Handle<Object> args[compiler::CWasmEntryParameters::kNumParameters];
WasmContext* context = code->native_module()
->compiled_module()
->owning_instance()
->wasm_context()
->get();
Handle<Object> context_obj(reinterpret_cast<Object*>(context), isolate);
DCHECK(!context_obj->IsHeapObject());
args[compiler::CWasmEntryParameters::kCodeObject] = Handle<Object>::cast(
isolate->factory()->NewForeign(code->instructions().start(), TENURED));
args[compiler::CWasmEntryParameters::kWasmContext] = context_obj;
args[compiler::CWasmEntryParameters::kWasmInstance] = instance;
args[compiler::CWasmEntryParameters::kArgumentsBuffer] = arg_buffer_obj;
Handle<Object> receiver = isolate->factory()->undefined_value();
@ -2512,39 +2350,19 @@ class ThreadImpl {
return {ExternalCallResult::EXTERNAL_RETURNED};
}
ExternalCallResult CallWasmCode(Isolate* isolate, const wasm::WasmCode* code,
FunctionSig* signature) {
DCHECK(AllowHandleAllocation::IsAllowed());
DCHECK(AllowHeapAllocation::IsAllowed());
if (code->kind() == wasm::WasmCode::kFunction) {
if (code->native_module()->compiled_module()->owning_instance() !=
codemap()->instance()) {
return CallExternalWasmFunction(isolate, code, signature);
}
return {ExternalCallResult::INTERNAL, codemap()->GetCode(code->index())};
}
if (code->kind() == wasm::WasmCode::kWasmToJsWrapper) {
return CallExternalJSFunction(isolate, code, signature);
}
if (code->kind() == wasm::WasmCode::kWasmToWasmWrapper ||
code->kind() == wasm::WasmCode::kInterpreterStub) {
return CallExternalWasmFunction(isolate, code, signature);
}
return {ExternalCallResult::INVALID_FUNC};
}
ExternalCallResult CallImportedFunction(uint32_t function_index) {
// Use a new HandleScope to avoid leaking / accumulating handles in the
// outer scope.
Isolate* isolate = codemap()->instance()->GetIsolate();
Isolate* isolate = instance_object_->GetIsolate();
HandleScope handle_scope(isolate);
const wasm::WasmCode* target =
codemap()->GetImportedFunction(function_index);
return CallWasmCode(isolate, target,
codemap()->module()->functions[function_index].sig);
DCHECK_GT(module()->num_imported_functions, function_index);
auto entry = instance_object_->imported_function_entry_at(function_index);
Handle<WasmInstanceObject> instance(entry.instance(), isolate);
FunctionSig* sig = codemap()->module()->functions[function_index].sig;
auto code = isolate->wasm_engine()->code_manager()->GetCodeFromStartAddress(
entry.target());
return CallExternalWasmFunction(isolate, instance, code, sig);
}
ExternalCallResult CallIndirectFunction(uint32_t table_index,
@ -2569,47 +2387,48 @@ class ThreadImpl {
return {ExternalCallResult::INTERNAL, code};
}
WasmCompiledModule* compiled_module =
codemap()->instance()->compiled_module();
Isolate* isolate = compiled_module->GetIsolate();
Isolate* isolate = instance_object_->GetIsolate();
uint32_t expected_sig_id = module()->signature_ids[sig_index];
DCHECK_EQ(expected_sig_id,
module()->signature_map.Find(module()->signatures[sig_index]));
const wasm::WasmCode* target = nullptr;
{
DisallowHeapAllocation no_gc;
// Get function to be called directly from the live instance to see latest
// changes to the tables.
// The function table is stored in the instance.
// TODO(wasm): the wasm interpreter currently supports only one table.
CHECK_EQ(0, table_index);
// Bounds check against table size.
if (entry_index >= instance_object_->indirect_function_table_size()) {
return {ExternalCallResult::INVALID_FUNC};
}
auto entry =
instance_object_->indirect_function_table_entry_at(entry_index);
// Canonicalize signature index.
uint32_t canonical_sig_index = module()->signature_ids[sig_index];
DCHECK_EQ(canonical_sig_index,
module()->signature_map.Find(module()->signatures[sig_index]));
// The function table is stored in the wasm context.
// TODO(wasm): the wasm interpreter currently supports only one table.
CHECK_EQ(0, table_index);
// Bounds check against table size.
if (entry_index >= wasm_context_->table_size) {
return {ExternalCallResult::INVALID_FUNC};
}
// Signature check.
int32_t entry_sig = wasm_context_->table[entry_index].sig_id;
if (entry_sig != static_cast<int32_t>(canonical_sig_index)) {
return {ExternalCallResult::SIGNATURE_MISMATCH};
}
// Load the target address (first instruction of code).
Address first_instr = wasm_context_->table[entry_index].target;
// TODO(titzer): load the wasm context instead of relying on the
// target code being specialized to the target instance.
// Get code object.
target = isolate->wasm_engine()->code_manager()->GetCodeFromStartAddress(
first_instr);
// Signature check.
if (entry.sig_id() != static_cast<int32_t>(expected_sig_id)) {
return {ExternalCallResult::SIGNATURE_MISMATCH};
}
// Call the code object. Use a new HandleScope to avoid leaking /
// accumulating handles in the outer scope.
HandleScope handle_scope(isolate);
// Call either an internal or external WASM function.
HandleScope scope(isolate);
FunctionSig* signature = module()->signatures[sig_index];
return CallWasmCode(isolate, target, signature);
Handle<WasmInstanceObject> instance(entry.instance(), isolate);
// Lookup code object from entry address.
auto code = isolate->wasm_engine()->code_manager()->GetCodeFromStartAddress(
entry.target());
if (code->kind() == wasm::WasmCode::kFunction) {
if (!instance_object_.is_identical_to(instance)) {
// Cross instance call.
return CallExternalWasmFunction(isolate, instance, code, signature);
}
return {ExternalCallResult::INTERNAL, codemap()->GetCode(code->index())};
}
// Call to external function.
if (code->kind() == wasm::WasmCode::kInterpreterStub ||
code->kind() == wasm::WasmCode::kWasmToJsWrapper) {
return CallExternalWasmFunction(isolate, instance, code, signature);
}
return {ExternalCallResult::INVALID_FUNC};
}
inline Activation current_activation() {
@ -2698,37 +2517,6 @@ const InterpretedFrameImpl* ToImpl(const InterpretedFrame* frame) {
return reinterpret_cast<const InterpretedFrameImpl*>(frame);
}
//============================================================================
// Implementation details of the heap objects scope.
//============================================================================
class HeapObjectsScopeImpl {
public:
HeapObjectsScopeImpl(CodeMap* codemap, Handle<WasmInstanceObject> instance)
: codemap_(codemap), needs_reset(!codemap_->has_instance()) {
if (needs_reset) {
instance_ = handle(*instance);
codemap_->SetInstanceObject(instance_);
} else {
DCHECK_EQ(*instance, codemap_->instance());
return;
}
}
~HeapObjectsScopeImpl() {
if (!needs_reset) return;
DCHECK_EQ(*instance_, codemap_->instance());
codemap_->ClearInstanceObject();
// Clear the handle, such that anyone who accidentally copied them will
// notice.
*instance_.location() = nullptr;
}
private:
CodeMap* codemap_;
Handle<WasmInstanceObject> instance_;
bool needs_reset;
};
} // namespace
//============================================================================
@ -2809,23 +2597,42 @@ class WasmInterpreterInternals : public ZoneObject {
WasmInterpreterInternals(Isolate* isolate, Zone* zone,
const WasmModule* module,
const ModuleWireBytes& wire_bytes,
WasmContext* wasm_context)
Handle<WasmInstanceObject> instance_object)
: module_bytes_(wire_bytes.start(), wire_bytes.end(), zone),
codemap_(isolate, module, module_bytes_.data(), zone),
threads_(zone) {
threads_.emplace_back(zone, &codemap_, wasm_context);
threads_.emplace_back(zone, &codemap_, instance_object);
}
};
namespace {
// TODO(wasm): a finalizer is only required to delete the global handle.
void GlobalHandleDeleter(const v8::WeakCallbackInfo<void>& data) {
GlobalHandles::Destroy(reinterpret_cast<Object**>(
reinterpret_cast<JSObject**>(data.GetParameter())));
}
Handle<WasmInstanceObject> MakeWeak(
Isolate* isolate, Handle<WasmInstanceObject> instance_object) {
Handle<Object> handle = isolate->global_handles()->Create(*instance_object);
// TODO(wasm): use a phantom handle in the WasmInterpreter.
GlobalHandles::MakeWeak(handle.location(), handle.location(),
&GlobalHandleDeleter,
v8::WeakCallbackType::kFinalizer);
return Handle<WasmInstanceObject>::cast(handle);
}
} // namespace
//============================================================================
// Implementation of the public interface of the interpreter.
//============================================================================
WasmInterpreter::WasmInterpreter(Isolate* isolate, const WasmModule* module,
const ModuleWireBytes& wire_bytes,
WasmContext* wasm_context)
Handle<WasmInstanceObject> instance_object)
: zone_(isolate->allocator(), ZONE_NAME),
internals_(new (&zone_) WasmInterpreterInternals(
isolate, &zone_, module, wire_bytes, wasm_context)) {}
isolate, &zone_, module, wire_bytes,
MakeWeak(isolate, instance_object))) {}
WasmInterpreter::~WasmInterpreter() { internals_->~WasmInterpreterInternals(); }
@ -2931,19 +2738,6 @@ void InterpretedFrame::Deleter::operator()(InterpretedFrame* ptr) {
delete ToImpl(ptr);
}
//============================================================================
// Public API of the heap objects scope.
//============================================================================
WasmInterpreter::HeapObjectsScope::HeapObjectsScope(
WasmInterpreter* interpreter, Handle<WasmInstanceObject> instance) {
static_assert(sizeof(data) == sizeof(HeapObjectsScopeImpl), "Size mismatch");
new (data) HeapObjectsScopeImpl(&interpreter->internals_->codemap_, instance);
}
WasmInterpreter::HeapObjectsScope::~HeapObjectsScope() {
reinterpret_cast<HeapObjectsScopeImpl*>(data)->~HeapObjectsScopeImpl();
}
#undef TRACE
#undef FOREACH_INTERNAL_OPCODE
#undef WASM_CTYPES

View File

@ -16,7 +16,6 @@ class AccountingAllocator;
namespace internal {
class WasmInstanceObject;
struct WasmContext;
namespace wasm {
@ -88,19 +87,6 @@ class InterpretedFrame {
// An interpreter capable of executing WebAssembly.
class V8_EXPORT_PRIVATE WasmInterpreter {
public:
// Open a HeapObjectsScope before running any code in the interpreter which
// needs access to the instance object or needs to call to JS functions.
class V8_EXPORT_PRIVATE HeapObjectsScope {
public:
HeapObjectsScope(WasmInterpreter* interpreter,
Handle<WasmInstanceObject> instance);
~HeapObjectsScope();
private:
char data[3 * sizeof(void*)]; // must match sizeof(HeapObjectsScopeImpl).
DISALLOW_COPY_AND_ASSIGN(HeapObjectsScope);
};
// State machine for a Thread:
// +---------Run()/Step()--------+
// V |
@ -181,7 +167,8 @@ class V8_EXPORT_PRIVATE WasmInterpreter {
};
WasmInterpreter(Isolate* isolate, const WasmModule* module,
const ModuleWireBytes& wire_bytes, WasmContext* wasm_context);
const ModuleWireBytes& wire_bytes,
Handle<WasmInstanceObject> instance);
~WasmInterpreter();
//==========================================================================

View File

@ -65,19 +65,21 @@ SMI_ACCESSORS(WasmGlobalObject, offset, kOffsetOffset)
SMI_ACCESSORS(WasmGlobalObject, is_mutable, kIsMutableOffset)
// WasmInstanceObject
ACCESSORS(WasmInstanceObject, wasm_context, Managed<WasmContext>,
kWasmContextOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, memory_start, byte*, kMemoryStartOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, memory_size, uintptr_t,
kMemorySizeOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, memory_mask, uintptr_t,
kMemoryMaskOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, imported_function_targets, Address*,
kImportedFunctionTargetsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, globals_start, byte*,
kGlobalsStartOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table,
IndirectFunctionTableEntry*, kIndirectFunctionTableOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_size, uintptr_t,
kIndirectFunctionTableSizeOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_sig_ids,
uint32_t*, kIndirectFunctionTableSigIdsOffset)
PRIMITIVE_ACCESSORS(WasmInstanceObject, indirect_function_table_targets,
Address*, kIndirectFunctionTableTargetsOffset)
ACCESSORS(WasmInstanceObject, compiled_module, WasmCompiledModule,
kCompiledModuleOffset)
@ -90,12 +92,18 @@ OPTIONAL_ACCESSORS(WasmInstanceObject, debug_info, WasmDebugInfo,
kDebugInfoOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, table_object, WasmTableObject,
kTableObjectOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, function_tables, FixedArray,
kFunctionTablesOffset)
ACCESSORS(WasmInstanceObject, directly_called_instances, FixedArray,
kDirectlyCalledInstancesOffset)
ACCESSORS(WasmInstanceObject, js_imports_table, FixedArray,
kJsImportsTableOffset)
ACCESSORS(WasmInstanceObject, imported_function_instances, FixedArray,
kImportedFunctionInstancesOffset)
ACCESSORS(WasmInstanceObject, imported_function_callables, FixedArray,
kImportedFunctionCallablesOffset)
OPTIONAL_ACCESSORS(WasmInstanceObject, indirect_function_table_instances,
FixedArray, kIndirectFunctionTableInstancesOffset)
ACCESSORS(WasmInstanceObject, managed_native_allocations, Foreign,
kManagedNativeAllocationsOffset)
inline bool WasmInstanceObject::has_indirect_function_table() {
return indirect_function_table_sig_ids() != nullptr;
}
// WasmSharedModuleData
ACCESSORS(WasmSharedModuleData, module_wrapper, Object, kModuleWrapperOffset)
@ -168,7 +176,6 @@ WCM_OBJECT(WasmCompiledModule, prev_instance, kPrevInstanceOffset)
WCM_WEAK_LINK(WasmInstanceObject, owning_instance, kOwningInstanceOffset)
WCM_WEAK_LINK(WasmModuleObject, wasm_module, kWasmModuleOffset)
WCM_OBJECT(Foreign, native_module, kNativeModuleOffset)
WCM_OBJECT(FixedArray, lazy_compile_data, kLazyCompileDataOffset)
WCM_SMALL_CONST_NUMBER(bool, use_trap_handler, kUseTrapHandlerOffset)
ACCESSORS(WasmCompiledModule, raw_next_instance, Object, kNextInstanceOffset);
ACCESSORS(WasmCompiledModule, raw_prev_instance, Object, kPrevInstanceOffset);
@ -185,6 +192,10 @@ uint32_t WasmTableObject::current_length() { return functions()->length(); }
bool WasmMemoryObject::has_maximum_pages() { return maximum_pages() >= 0; }
inline bool WasmCompiledModule::has_instance() const {
return !weak_owning_instance()->cleared();
}
#include "src/objects/object-macros-undef.h"
} // namespace internal

View File

@ -27,6 +27,8 @@
if (FLAG_trace_wasm_instances) PrintF(__VA_ARGS__); \
} while (false)
#define TRACE_IFT(...)
namespace v8 {
namespace internal {
@ -37,6 +39,93 @@ using WasmModule = wasm::WasmModule;
namespace {
// Manages the natively-allocated memory for a WasmInstanceObject. Since
// an instance finalizer is not guaranteed to run upon isolate shutdown,
// we must use a Managed<WasmInstanceNativeAllocations> to guarantee
// it is freed.
// Native allocations are the signature ids and targets for indirect call
// targets, as well as the call targets for imported functions.
class WasmInstanceNativeAllocations {
public:
// Helper macro to set an internal field and the corresponding field
// on an instance.
#define SET(instance, field, value) \
{ \
auto v = value; \
this->field##_ = v; \
instance->set_##field(v); \
}
// Allocates initial native storage for a given instance.
WasmInstanceNativeAllocations(Handle<WasmInstanceObject> instance,
size_t num_imported_functions) {
SET(instance, imported_function_targets,
reinterpret_cast<Address*>(
calloc(num_imported_functions, sizeof(Address))));
}
~WasmInstanceNativeAllocations() { free(); }
// Frees natively-allocated storage.
void free() {
::free(indirect_function_table_sig_ids_);
::free(indirect_function_table_targets_);
::free(imported_function_targets_);
indirect_function_table_sig_ids_ = nullptr;
indirect_function_table_targets_ = nullptr;
imported_function_targets_ = nullptr;
}
// Resizes the indirect function table.
void resize_indirect_function_table(Isolate* isolate,
Handle<WasmInstanceObject> instance,
size_t new_size) {
size_t old_size = instance->indirect_function_table_size();
void* new_sig_ids = nullptr;
void* new_targets = nullptr;
Handle<FixedArray> new_instances;
if (indirect_function_table_sig_ids_) {
// Reallocate the old storage.
new_sig_ids = realloc(indirect_function_table_sig_ids_,
new_size * sizeof(uint32_t));
new_targets =
realloc(indirect_function_table_targets_, new_size * sizeof(Address));
Handle<FixedArray> old(instance->indirect_function_table_instances(),
isolate);
new_instances = isolate->factory()->CopyFixedArrayAndGrow(
old, static_cast<int>(new_size - old_size));
} else {
// Allocate new storage.
new_sig_ids = malloc(new_size * sizeof(uint32_t));
new_targets = malloc(new_size * sizeof(Address));
new_instances =
isolate->factory()->NewFixedArray(static_cast<int>(new_size));
}
// Initialize new entries.
instance->set_indirect_function_table_size(new_size);
SET(instance, indirect_function_table_sig_ids,
reinterpret_cast<uint32_t*>(new_sig_ids));
SET(instance, indirect_function_table_targets,
reinterpret_cast<Address*>(new_targets));
instance->set_indirect_function_table_instances(*new_instances);
for (size_t j = old_size; j < new_size; j++) {
auto entry =
instance->indirect_function_table_entry_at(static_cast<int>(j));
entry.clear();
}
}
uint32_t* indirect_function_table_sig_ids_ = nullptr;
Address* indirect_function_table_targets_ = nullptr;
Address* imported_function_targets_ = nullptr;
#undef SET
};
WasmInstanceNativeAllocations* GetNativeAllocations(
WasmInstanceObject* instance) {
return reinterpret_cast<Managed<WasmInstanceNativeAllocations>*>(
instance->managed_native_allocations())
->get();
}
// An iterator that returns first the module itself, then all modules linked via
// next, then all linked via prev.
class CompiledModulesIterator
@ -120,9 +209,7 @@ class CompiledModuleInstancesIterator
private:
bool NeedToAdvance() {
return !it.current_.is_null() &&
(!it.current_->has_weak_owning_instance() ||
it.current_->weak_owning_instance()->cleared());
return !it.current_.is_null() && !it.current_->has_instance();
}
CompiledModulesIterator it;
};
@ -188,7 +275,7 @@ void WasmModuleObject::ValidateStateForTesting(
CHECK_EQ(compiled_module->weak_wasm_module()->value(), *module_obj);
CHECK(!compiled_module->has_prev_instance());
CHECK(!compiled_module->has_next_instance());
CHECK(!compiled_module->has_weak_owning_instance());
CHECK(!compiled_module->has_instance());
}
Handle<WasmTableObject> WasmTableObject::New(Isolate* isolate, uint32_t initial,
@ -216,9 +303,7 @@ Handle<WasmTableObject> WasmTableObject::New(Isolate* isolate, uint32_t initial,
void WasmTableObject::AddDispatchTable(Isolate* isolate,
Handle<WasmTableObject> table_obj,
Handle<WasmInstanceObject> instance,
int table_index,
Handle<FixedArray> function_table) {
DCHECK_EQ(0, function_table->length() % compiler::kFunctionTableEntrySize);
int table_index) {
Handle<FixedArray> dispatch_tables(table_obj->dispatch_tables());
int old_length = dispatch_tables->length();
DCHECK_EQ(0, old_length % kDispatchTableNumElements);
@ -235,8 +320,6 @@ void WasmTableObject::AddDispatchTable(Isolate* isolate,
*instance);
new_dispatch_tables->set(old_length + kDispatchTableIndexOffset,
Smi::FromInt(table_index));
new_dispatch_tables->set(old_length + kDispatchTableFunctionTableOffset,
*function_table);
table_obj->set_dispatch_tables(*new_dispatch_tables);
}
@ -247,31 +330,21 @@ void WasmTableObject::Grow(Isolate* isolate, uint32_t count) {
Handle<FixedArray> dispatch_tables(this->dispatch_tables());
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
uint32_t old_size = functions()->length();
constexpr int kInvalidSigIndex = -1;
// The tables are stored in the WASM context, no code patching is
// necessary. We simply have to grow the raw tables in the WasmContext
// for each instance that has imported this table.
// Tables are stored in the instance object, no code patching is
// necessary. We simply have to grow the raw tables in each instance
// that has imported this table.
// TODO(titzer): replace the dispatch table with a weak list of all
// the instances that import a given table.
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
// TODO(titzer): potentially racy update of WasmContext::table
WasmContext* wasm_context =
WasmInstanceObject::cast(dispatch_tables->get(i))
->wasm_context()
->get();
DCHECK_EQ(old_size, wasm_context->table_size);
Handle<WasmInstanceObject> instance(
WasmInstanceObject::cast(dispatch_tables->get(i)), isolate);
DCHECK_EQ(old_size, instance->indirect_function_table_size());
uint32_t new_size = old_size + count;
wasm_context->table = reinterpret_cast<IndirectFunctionTableEntry*>(realloc(
wasm_context->table, new_size * sizeof(IndirectFunctionTableEntry)));
for (uint32_t j = old_size; j < new_size; j++) {
wasm_context->table[j].sig_id = kInvalidSigIndex;
wasm_context->table[j].context = nullptr;
wasm_context->table[j].target = nullptr;
}
wasm_context->table_size = new_size;
WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(instance,
new_size);
}
}
@ -291,17 +364,16 @@ void WasmTableObject::Set(Isolate* isolate, Handle<WasmTableObject> table,
DCHECK_NOT_NULL(wasm_function->sig);
wasm::WasmCode* wasm_code = exported_function->GetWasmCode();
UpdateDispatchTables(isolate, table, table_index, wasm_function->sig,
handle(exported_function->instance()), wasm_code,
exported_function->function_index());
handle(exported_function->instance()), wasm_code);
array->set(table_index, *function);
}
void WasmTableObject::UpdateDispatchTables(
Isolate* isolate, Handle<WasmTableObject> table, int table_index,
wasm::FunctionSig* sig, Handle<WasmInstanceObject> from_instance,
wasm::WasmCode* wasm_code, int func_index) {
// We simply need to update the WASM contexts for each instance
// that imports this table.
wasm::WasmCode* wasm_code) {
// We simply need to update the IFTs for each instance that imports
// this table.
DisallowHeapAllocation no_gc;
FixedArray* dispatch_tables = table->dispatch_tables();
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
@ -313,10 +385,8 @@ void WasmTableObject::UpdateDispatchTables(
WasmInstanceObject* to_instance = WasmInstanceObject::cast(
dispatch_tables->get(i + kDispatchTableInstanceOffset));
auto sig_id = to_instance->module()->signature_map.Find(sig);
auto& entry = to_instance->wasm_context()->get()->table[table_index];
entry.sig_id = sig_id;
entry.context = from_instance->wasm_context()->get();
entry.target = wasm_code->instructions().start();
auto entry = to_instance->indirect_function_table_entry_at(table_index);
entry.set(sig_id, from_instance, wasm_code);
}
}
@ -327,14 +397,11 @@ void WasmTableObject::ClearDispatchTables(Handle<WasmTableObject> table,
DCHECK_EQ(0, dispatch_tables->length() % kDispatchTableNumElements);
for (int i = 0; i < dispatch_tables->length();
i += kDispatchTableNumElements) {
constexpr int kInvalidSigIndex = -1; // TODO(titzer): move to header.
WasmInstanceObject* to_instance = WasmInstanceObject::cast(
WasmInstanceObject* target_instance = WasmInstanceObject::cast(
dispatch_tables->get(i + kDispatchTableInstanceOffset));
DCHECK_LT(index, to_instance->wasm_context()->get()->table_size);
auto& entry = to_instance->wasm_context()->get()->table[index];
entry.sig_id = kInvalidSigIndex;
entry.context = nullptr;
entry.target = nullptr;
DCHECK_LT(index, target_instance->indirect_function_table_size());
auto entry = target_instance->indirect_function_table_entry_at(index);
entry.clear();
}
}
@ -411,16 +478,17 @@ MaybeHandle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
// May GC, because SetSpecializationMemInfoFrom may GC
void SetInstanceMemory(Isolate* isolate, Handle<WasmInstanceObject> instance,
Handle<JSArrayBuffer> buffer) {
auto wasm_context = instance->wasm_context()->get();
wasm_context->SetRawMemory(reinterpret_cast<byte*>(buffer->backing_store()),
buffer->byte_length()->Number());
instance->SetRawMemory(reinterpret_cast<byte*>(buffer->backing_store()),
buffer->byte_length()->Number());
#if DEBUG
// To flush out bugs earlier, in DEBUG mode, check that all pages of the
// memory are accessible by reading and writing one byte on each page.
for (uint32_t offset = 0; offset < wasm_context->mem_size;
offset += wasm::kWasmPageSize) {
byte val = wasm_context->mem_start[offset];
wasm_context->mem_start[offset] = val;
byte* mem_start = instance->memory_start();
uintptr_t mem_size = instance->memory_size();
for (uint32_t offset = 0; offset < mem_size; offset += wasm::kWasmPageSize) {
byte val = mem_start[offset];
USE(val);
mem_start[offset] = val;
}
#endif
}
@ -552,41 +620,106 @@ Handle<WasmGlobalObject> WasmGlobalObject::New(
return global_obj;
}
void IndirectFunctionTableEntry::clear() {
instance_->indirect_function_table_sig_ids()[index_] = -1;
instance_->indirect_function_table_targets()[index_] = 0;
instance_->indirect_function_table_instances()->set(
index_, instance_->GetIsolate()->heap()->undefined_value());
}
void IndirectFunctionTableEntry::set(int sig_id,
Handle<WasmInstanceObject> instance,
const wasm::WasmCode* wasm_code) {
TRACE_IFT("IFT entry %p[%d] = {sig_id=%d, instance=%p, target=%p}\n",
*instance_, index_, sig_id, *instance,
wasm_code->instructions().start());
instance_->indirect_function_table_sig_ids()[index_] = sig_id;
instance_->indirect_function_table_targets()[index_] =
wasm_code->instructions().start();
instance_->indirect_function_table_instances()->set(index_, *instance);
}
WasmInstanceObject* IndirectFunctionTableEntry::instance() {
return WasmInstanceObject::cast(
instance_->indirect_function_table_instances()->get(index_));
}
int IndirectFunctionTableEntry::sig_id() {
return instance_->indirect_function_table_sig_ids()[index_];
}
Address IndirectFunctionTableEntry::target() {
return instance_->indirect_function_table_targets()[index_];
}
void ImportedFunctionEntry::set(Handle<JSReceiver> callable,
const wasm::WasmCode* wasm_to_js_wrapper) {
TRACE_IFT("Import callable %p[%d] = {callable=%p, target=%p}\n", *instance_,
index_, *callable, wasm_to_js_wrapper->instructions().start());
DCHECK_EQ(wasm::WasmCode::kWasmToJsWrapper, wasm_to_js_wrapper->kind());
instance_->imported_function_instances()->set(index_, *instance_);
instance_->imported_function_callables()->set(index_, *callable);
instance_->imported_function_targets()[index_] =
wasm_to_js_wrapper->instructions().start();
}
void ImportedFunctionEntry::set(Handle<WasmInstanceObject> instance,
const wasm::WasmCode* wasm_code) {
TRACE_IFT("Import WASM %p[%d] = {instance=%p, target=%p}\n", *instance_,
index_, *instance, wasm_code->instructions().start());
instance_->imported_function_instances()->set(index_, *instance);
instance_->imported_function_callables()->set(
index_, instance_->GetHeap()->undefined_value());
instance_->imported_function_targets()[index_] =
wasm_code->instructions().start();
}
WasmInstanceObject* ImportedFunctionEntry::instance() {
return WasmInstanceObject::cast(
instance_->imported_function_instances()->get(index_));
}
JSReceiver* ImportedFunctionEntry::callable() {
return JSReceiver::cast(
instance_->imported_function_callables()->get(index_));
}
Address ImportedFunctionEntry::target() {
return instance_->imported_function_targets()[index_];
}
bool ImportedFunctionEntry::is_js_receiver_entry() {
return instance_->imported_function_callables()->get(index_)->IsJSReceiver();
}
bool WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(
size_t minimum_size) {
constexpr int kInvalidSigIndex = -1;
uintptr_t old_size = indirect_function_table_size();
Handle<WasmInstanceObject> instance, size_t minimum_size) {
uintptr_t old_size = instance->indirect_function_table_size();
if (old_size >= minimum_size) return false; // Nothing to do.
size_t new_size = minimum_size;
IndirectFunctionTableEntry* new_storage;
if (indirect_function_table()) {
// Reallocate the old storage.
new_storage = reinterpret_cast<IndirectFunctionTableEntry*>(
realloc(indirect_function_table(),
new_size * sizeof(IndirectFunctionTableEntry)));
} else {
// Allocate new storage.
new_storage = reinterpret_cast<IndirectFunctionTableEntry*>(
calloc(new_size, sizeof(IndirectFunctionTableEntry)));
}
// Initialize new entries.
for (size_t j = old_size; j < new_size; j++) {
auto entry = indirect_function_table_entry_at(static_cast<int>(j));
entry->sig_id = kInvalidSigIndex;
entry->context = nullptr;
entry->target = nullptr;
}
set_indirect_function_table_size(new_size);
set_indirect_function_table(new_storage);
Isolate* isolate = instance->GetIsolate();
HandleScope scope(isolate);
auto native_allocations = GetNativeAllocations(*instance);
native_allocations->resize_indirect_function_table(isolate, instance,
minimum_size);
return true;
}
IndirectFunctionTableEntry*
WasmInstanceObject::indirect_function_table_entry_at(int i) {
IndirectFunctionTableEntry WasmInstanceObject::indirect_function_table_entry_at(
int i) {
DCHECK_GE(i, 0);
DCHECK_LT(i, indirect_function_table_size());
return &indirect_function_table()[i];
Handle<WasmInstanceObject> handle(this, GetIsolate());
IndirectFunctionTableEntry entry(handle, i);
return entry;
}
ImportedFunctionEntry WasmInstanceObject::imported_function_entry_at(int i) {
DCHECK_GE(i, 0);
DCHECK_LT(i, compiled_module()->shared()->module()->num_imported_functions);
Handle<WasmInstanceObject> handle(this, GetIsolate());
ImportedFunctionEntry entry(handle, i);
return entry;
}
void WasmInstanceObject::SetRawMemory(byte* mem_start, size_t mem_size) {
@ -625,24 +758,29 @@ Handle<WasmInstanceObject> WasmInstanceObject::New(
Handle<WasmInstanceObject> instance(
reinterpret_cast<WasmInstanceObject*>(*instance_object), isolate);
auto wasm_context = Managed<WasmContext>::Allocate(isolate);
wasm_context->get()->SetRawMemory(nullptr, 0);
wasm_context->get()->globals_start = nullptr;
instance->set_wasm_context(*wasm_context);
// Initialize the imported function arrays.
auto num_imported_functions =
compiled_module->shared()->module()->num_imported_functions;
auto native_allocations = Managed<WasmInstanceNativeAllocations>::Allocate(
isolate, instance, num_imported_functions);
instance->set_managed_native_allocations(*native_allocations);
Handle<FixedArray> imported_function_instances =
isolate->factory()->NewFixedArray(num_imported_functions);
instance->set_imported_function_instances(*imported_function_instances);
Handle<FixedArray> imported_function_callables =
isolate->factory()->NewFixedArray(num_imported_functions);
instance->set_imported_function_callables(*imported_function_callables);
instance->SetRawMemory(nullptr, 0);
instance->set_globals_start(nullptr);
instance->set_indirect_function_table_size(0);
instance->set_indirect_function_table_sig_ids(nullptr);
instance->set_indirect_function_table_targets(nullptr);
instance->set_compiled_module(*compiled_module);
// TODO(titzer): ensure that untagged fields are not visited by the GC.
// (if they are, the GC will crash).
uintptr_t invalid = static_cast<uintptr_t>(kHeapObjectTag);
instance->set_memory_start(reinterpret_cast<byte*>(invalid));
instance->set_memory_size(invalid);
instance->set_memory_mask(invalid);
instance->set_globals_start(reinterpret_cast<byte*>(invalid));
instance->set_indirect_function_table(
reinterpret_cast<IndirectFunctionTableEntry*>(invalid));
instance->set_indirect_function_table_size(invalid);
return instance;
}
@ -667,7 +805,7 @@ void WasmInstanceObject::ValidateInstancesChainForTesting(
CHECK_EQ(JSObject::cast(compiled_module->weak_wasm_module()->value()),
*module_obj);
Object* prev = nullptr;
int found_instances = compiled_module->has_weak_owning_instance() ? 1 : 0;
int found_instances = compiled_module->has_instance() ? 1 : 0;
WasmCompiledModule* current_instance = compiled_module;
while (current_instance->has_next_instance()) {
CHECK((prev == nullptr && !current_instance->has_prev_instance()) ||
@ -741,18 +879,11 @@ void InstanceFinalizer(const v8::WeakCallbackInfo<void>& data) {
}
}
void* invalid =
reinterpret_cast<void*>(static_cast<uintptr_t>(kHeapObjectTag));
if (instance->indirect_function_table() &&
instance->indirect_function_table() != invalid) {
// The indirect function table is C++ memory and needs to be explicitly
// freed.
free(instance->indirect_function_table());
}
// Free raw C++ memory associated with the instance.
GetNativeAllocations(instance)->free();
compiled_module->RemoveFromChain();
compiled_module->reset_weak_owning_instance();
GlobalHandles::Destroy(reinterpret_cast<Object**>(p));
TRACE("}\n");
}
@ -1259,7 +1390,6 @@ MaybeHandle<FixedArray> WasmSharedModuleData::CheckBreakPoints(
Handle<WasmCompiledModule> WasmCompiledModule::New(
Isolate* isolate, WasmModule* module, Handle<FixedArray> export_wrappers,
const std::vector<GlobalHandleAddress>& function_tables,
bool use_trap_handler) {
Handle<WasmCompiledModule> compiled_module = Handle<WasmCompiledModule>::cast(
isolate->factory()->NewStruct(WASM_COMPILED_MODULE_TYPE, TENURED));
@ -1270,6 +1400,7 @@ Handle<WasmCompiledModule> WasmCompiledModule::New(
if (!export_wrappers.is_null()) {
compiled_module->set_export_wrappers(*export_wrappers);
}
compiled_module->set_weak_owning_instance(isolate->heap()->empty_weak_cell());
wasm::NativeModule* native_module = nullptr;
{
std::unique_ptr<wasm::NativeModule> native_module_ptr =
@ -1295,10 +1426,8 @@ Handle<WasmCompiledModule> WasmCompiledModule::Clone(
ret->set_weak_native_context(module->weak_native_context());
ret->set_export_wrappers(module->export_wrappers());
ret->set_weak_wasm_module(module->weak_wasm_module());
ret->set_weak_owning_instance(isolate->heap()->empty_weak_cell());
ret->set_native_module(module->native_module());
if (module->has_lazy_compile_data()) {
ret->set_lazy_compile_data(module->lazy_compile_data());
}
ret->set_use_trap_handler(module->use_trap_handler());
Handle<FixedArray> export_copy = isolate->factory()->CopyFixedArray(
@ -1314,11 +1443,6 @@ Handle<WasmCompiledModule> WasmCompiledModule::Clone(
ret->set_native_module(*native_module_wrapper);
ret->GetNativeModule()->SetCompiledModule(ret);
if (module->has_lazy_compile_data()) {
Handle<FixedArray> lazy_comp_data = isolate->factory()->NewFixedArray(
module->lazy_compile_data()->length(), TENURED);
ret->set_lazy_compile_data(*lazy_comp_data);
}
return ret;
}
@ -1357,14 +1481,6 @@ void WasmCompiledModule::Reset(Isolate* isolate,
Zone specialization_zone(isolate->allocator(), ZONE_NAME);
wasm::CodeSpecialization code_specialization(isolate, &specialization_zone);
if (compiled_module->has_lazy_compile_data()) {
for (int i = 0, e = compiled_module->lazy_compile_data()->length(); i < e;
++i) {
compiled_module->lazy_compile_data()->set(
i, isolate->heap()->undefined_value());
}
}
for (uint32_t i = native_module->num_imported_functions(),
end = native_module->FunctionCount();
i < end; ++i) {
@ -1576,6 +1692,6 @@ void WasmCompiledModule::LogWasmCodes(Isolate* isolate) {
}
#undef TRACE
#undef TRACE_IFT
} // namespace internal
} // namespace v8

View File

@ -1,5 +1,5 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// Copyright 2016 the V8 project authors. All rights reserved. Use of
// this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_WASM_WASM_OBJECTS_H_
@ -42,48 +42,62 @@ class WasmInstanceObject;
INLINE(bool has_##name()); \
DECL_ACCESSORS(name, type)
// An entry in an indirect dispatch table.
struct IndirectFunctionTableEntry {
int32_t sig_id = 0;
WasmContext* context = nullptr;
Address target = nullptr;
// An entry in an indirect function table (IFT).
// Each entry in the IFT has the following fields:
// - instance = target instance
// - sig_id = signature id of function
// - target = entrypoint to wasm code for the function, or wasm-to-js wrapper
class IndirectFunctionTableEntry {
public:
void clear();
void set(int sig_id, Handle<WasmInstanceObject> instance,
const wasm::WasmCode* wasm_code);
MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(IndirectFunctionTableEntry)
WasmInstanceObject* instance();
int sig_id();
Address target();
private:
// These entries are only constructed by the WasmInstanceObject.
friend class WasmInstanceObject;
IndirectFunctionTableEntry(Handle<WasmInstanceObject> instance, int index)
: instance_(instance), index_(index) {}
Handle<WasmInstanceObject> instance_;
int index_;
};
// Wasm context used to store the mem_size and mem_start address of the linear
// memory. These variables can be accessed at C++ level at graph build time
// (e.g., initialized during instance building / changed at runtime by
// grow_memory). The address of the WasmContext is provided to the wasm entry
// functions using a RelocatableIntPtrConstant, then the address is passed as
// parameter to the other wasm functions.
// Note that generated code can directly read from instances of this struct.
struct WasmContext {
byte* mem_start = nullptr;
uint32_t mem_size = 0; // TODO(titzer): uintptr_t?
uint32_t mem_mask = 0; // TODO(titzer): uintptr_t?
byte* globals_start = nullptr;
// TODO(wasm): pad these entries to a power of two.
IndirectFunctionTableEntry* table = nullptr;
uint32_t table_size = 0;
// An entry for an imported function.
// (note this is not called a "table" since it is not dynamically indexed).
// The imported function entries are used to call imported functions.
// For each imported function there is an entry which is either:
// - an imported JSReceiver, which has fields
// - instance = importing instance
// - receiver = JSReceiver, either a JS function or other callable
// - target = pointer to wasm-to-js wrapper code entrypoint
// - an imported wasm function from another instance, which has fields
// - instance = target instance
// - target = entrypoint to wasm code of the function
class ImportedFunctionEntry {
public:
// Initialize this entry as a {JSReceiver} call.
void set(Handle<JSReceiver> callable,
const wasm::WasmCode* wasm_to_js_wrapper);
// Initialize this entry as a WASM to WASM call.
void set(Handle<WasmInstanceObject> target_instance,
const wasm::WasmCode* wasm_function);
void SetRawMemory(void* mem_start, size_t mem_size) {
DCHECK_LE(mem_size, wasm::kV8MaxWasmMemoryPages * wasm::kWasmPageSize);
this->mem_start = static_cast<byte*>(mem_start);
this->mem_size = static_cast<uint32_t>(mem_size);
this->mem_mask = base::bits::RoundUpToPowerOfTwo32(this->mem_size) - 1;
DCHECK_LE(mem_size, this->mem_mask + 1);
}
WasmInstanceObject* instance();
JSReceiver* callable();
Address target();
bool is_js_receiver_entry();
~WasmContext() {
if (table) free(table);
mem_start = nullptr;
mem_size = 0;
mem_mask = 0;
globals_start = nullptr;
table = nullptr;
table_size = 0;
}
private:
// These entries are only constructed by the WasmInstanceObject.
friend class WasmInstanceObject;
ImportedFunctionEntry(Handle<WasmInstanceObject> instance, int index)
: instance_(instance), index_(index) {}
Handle<WasmInstanceObject> instance_;
int index_;
};
// Representation of a WebAssembly.Module JavaScript-level object.
@ -138,8 +152,7 @@ class WasmTableObject : public JSObject {
Handle<FixedArray>* js_functions);
static void AddDispatchTable(Isolate* isolate, Handle<WasmTableObject> table,
Handle<WasmInstanceObject> instance,
int table_index,
Handle<FixedArray> function_table);
int table_index);
static void Set(Isolate* isolate, Handle<WasmTableObject> table,
int32_t index, Handle<JSFunction> function);
@ -148,7 +161,7 @@ class WasmTableObject : public JSObject {
Handle<WasmTableObject> table,
int table_index, wasm::FunctionSig* sig,
Handle<WasmInstanceObject> from_instance,
wasm::WasmCode* wasm_code, int func_index);
wasm::WasmCode* wasm_code);
static void ClearDispatchTables(Handle<WasmTableObject> table, int index);
};
@ -161,14 +174,12 @@ class WasmMemoryObject : public JSObject {
DECL_ACCESSORS(array_buffer, JSArrayBuffer)
DECL_INT_ACCESSORS(maximum_pages)
DECL_OPTIONAL_ACCESSORS(instances, FixedArrayOfWeakCells)
DECL_ACCESSORS(wasm_context, Managed<WasmContext>)
// Layout description.
#define WASM_MEMORY_OBJECT_FIELDS(V) \
V(kArrayBufferOffset, kPointerSize) \
V(kMaximumPagesOffset, kPointerSize) \
V(kInstancesOffset, kPointerSize) \
V(kWasmContextOffset, kPointerSize) \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
@ -222,44 +233,47 @@ class WasmInstanceObject : public JSObject {
public:
DECL_CAST(WasmInstanceObject)
DECL_ACCESSORS(wasm_context, Managed<WasmContext>)
DECL_ACCESSORS(compiled_module, WasmCompiledModule)
DECL_ACCESSORS(exports_object, JSObject)
DECL_OPTIONAL_ACCESSORS(memory_object, WasmMemoryObject)
DECL_OPTIONAL_ACCESSORS(globals_buffer, JSArrayBuffer)
DECL_OPTIONAL_ACCESSORS(debug_info, WasmDebugInfo)
DECL_OPTIONAL_ACCESSORS(table_object, WasmTableObject)
DECL_OPTIONAL_ACCESSORS(function_tables, FixedArray)
DECL_ACCESSORS(imported_function_instances, FixedArray)
DECL_ACCESSORS(imported_function_callables, FixedArray)
DECL_OPTIONAL_ACCESSORS(indirect_function_table_instances, FixedArray)
DECL_OPTIONAL_ACCESSORS(managed_native_allocations, Foreign)
DECL_PRIMITIVE_ACCESSORS(memory_start, byte*)
DECL_PRIMITIVE_ACCESSORS(memory_size, uintptr_t)
DECL_PRIMITIVE_ACCESSORS(memory_mask, uintptr_t)
DECL_PRIMITIVE_ACCESSORS(imported_function_targets, Address*)
DECL_PRIMITIVE_ACCESSORS(globals_start, byte*)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table, IndirectFunctionTableEntry*)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_size, uintptr_t)
// FixedArray of all instances whose code was imported
DECL_ACCESSORS(directly_called_instances, FixedArray)
DECL_ACCESSORS(js_imports_table, FixedArray)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_sig_ids, uint32_t*)
DECL_PRIMITIVE_ACCESSORS(indirect_function_table_targets, Address*)
// Layout description.
#define WASM_INSTANCE_OBJECT_FIELDS(V) \
V(kWasmContextOffset, kPointerSize) \
V(kCompiledModuleOffset, kPointerSize) \
V(kExportsObjectOffset, kPointerSize) \
V(kMemoryObjectOffset, kPointerSize) \
V(kGlobalsBufferOffset, kPointerSize) \
V(kDebugInfoOffset, kPointerSize) \
V(kTableObjectOffset, kPointerSize) \
V(kFunctionTablesOffset, kPointerSize) \
V(kDirectlyCalledInstancesOffset, kPointerSize) \
V(kJsImportsTableOffset, kPointerSize) \
V(kFirstUntaggedOffset, 0) /* marker */ \
V(kMemoryStartOffset, kPointerSize) /* untagged */ \
V(kMemorySizeOffset, kPointerSize) /* untagged */ \
V(kMemoryMaskOffset, kPointerSize) /* untagged */ \
V(kGlobalsStartOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableSizeOffset, kPointerSize) /* untagged */ \
#define WASM_INSTANCE_OBJECT_FIELDS(V) \
V(kCompiledModuleOffset, kPointerSize) \
V(kExportsObjectOffset, kPointerSize) \
V(kMemoryObjectOffset, kPointerSize) \
V(kGlobalsBufferOffset, kPointerSize) \
V(kDebugInfoOffset, kPointerSize) \
V(kTableObjectOffset, kPointerSize) \
V(kFunctionTablesOffset, kPointerSize) \
V(kImportedFunctionInstancesOffset, kPointerSize) \
V(kImportedFunctionCallablesOffset, kPointerSize) \
V(kIndirectFunctionTableInstancesOffset, kPointerSize) \
V(kManagedNativeAllocationsOffset, kPointerSize) \
V(kFirstUntaggedOffset, 0) /* marker */ \
V(kMemoryStartOffset, kPointerSize) /* untagged */ \
V(kMemorySizeOffset, kPointerSize) /* untagged */ \
V(kMemoryMaskOffset, kPointerSize) /* untagged */ \
V(kImportedFunctionTargetsOffset, kPointerSize) /* untagged */ \
V(kGlobalsStartOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableSizeOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableSigIdsOffset, kPointerSize) /* untagged */ \
V(kIndirectFunctionTableTargetsOffset, kPointerSize) /* untagged */ \
V(kSize, 0)
DEFINE_FIELD_OFFSET_CONSTANTS(JSObject::kHeaderSize,
@ -269,9 +283,13 @@ class WasmInstanceObject : public JSObject {
WasmModuleObject* module_object();
V8_EXPORT_PRIVATE wasm::WasmModule* module();
bool EnsureIndirectFunctionTableWithMinimumSize(size_t minimum_size);
static bool EnsureIndirectFunctionTableWithMinimumSize(
Handle<WasmInstanceObject> instance, size_t minimum_size);
IndirectFunctionTableEntry* indirect_function_table_entry_at(int index);
IndirectFunctionTableEntry indirect_function_table_entry_at(int index);
ImportedFunctionEntry imported_function_entry_at(int index);
bool has_indirect_function_table();
void SetRawMemory(byte* mem_start, size_t mem_size);
@ -443,23 +461,6 @@ class WasmSharedModuleData : public Struct {
// This represents the set of wasm compiled functions, together
// with all the information necessary for re-specializing them.
//
// We specialize wasm functions to their instance by embedding:
// - raw pointer to the wasm_context, that contains the size of the
// memory and the pointer to the backing store of the array buffer
// used as memory of a particular WebAssembly.Instance object. This
// information are then used at runtime to access memory / verify bounds
// check limits.
// - the objects representing the function tables and signature tables
//
// Even without instantiating, we need values for all of these parameters.
// We need to track these values to be able to create new instances and
// to be able to serialize/deserialize.
// The design decisions for how we track these values is not too immediate,
// and it deserves a summary. The "tricky" ones are: memory, globals, and
// the tables (signature and functions).
// For tables, we need to hold a reference to the JS Heap object, because
// we embed them as objects, and they may move.
class WasmCompiledModule : public Struct {
public:
DECL_CAST(WasmCompiledModule)
@ -527,7 +528,6 @@ class WasmCompiledModule : public Struct {
WCM_WEAK_LINK(WasmInstanceObject, owning_instance)
WCM_WEAK_LINK(WasmModuleObject, wasm_module)
WCM_OBJECT(Foreign, native_module)
WCM_OBJECT(FixedArray, lazy_compile_data)
// TODO(mstarzinger): Make {use_trap_handler} smaller.
WCM_SMALL_CONST_NUMBER(bool, use_trap_handler)
@ -535,13 +535,14 @@ class WasmCompiledModule : public Struct {
static Handle<WasmCompiledModule> New(
Isolate* isolate, wasm::WasmModule* module,
Handle<FixedArray> export_wrappers,
const std::vector<wasm::GlobalHandleAddress>& function_tables,
bool use_trap_hander);
static Handle<WasmCompiledModule> Clone(Isolate* isolate,
Handle<WasmCompiledModule> module);
static void Reset(Isolate* isolate, WasmCompiledModule* module);
bool has_instance() const;
wasm::NativeModule* GetNativeModule() const;
void InsertInChain(WasmModuleObject*);
void RemoveFromChain();

View File

@ -23,8 +23,8 @@ constexpr ValueType kWasmI32 = MachineRepresentation::kWord32;
constexpr ValueType kWasmI64 = MachineRepresentation::kWord64;
constexpr ValueType kWasmF32 = MachineRepresentation::kFloat32;
constexpr ValueType kWasmF64 = MachineRepresentation::kFloat64;
constexpr ValueType kWasmAnyRef = MachineRepresentation::kTaggedPointer;
constexpr ValueType kWasmS128 = MachineRepresentation::kSimd128;
constexpr ValueType kWasmAnyRef = MachineRepresentation::kTaggedPointer;
constexpr ValueType kWasmVar = MachineRepresentation::kTagged;
using FunctionSig = Signature<ValueType>;

View File

@ -682,7 +682,6 @@ MaybeHandle<WasmCompiledModule> DeserializeNativeModule(
Handle<WasmCompiledModule> compiled_module =
WasmCompiledModule::New(isolate, shared->module(), export_wrappers,
std::vector<wasm::GlobalHandleAddress>(),
trap_handler::IsTrapHandlerEnabled());
compiled_module->set_shared(*shared);
script->set_wasm_compiled_module(*compiled_module);

View File

@ -100,7 +100,6 @@ v8_source_set("cctest_sources") {
"compiler/test-run-tail-calls.cc",
"compiler/test-run-unwinding-info.cc",
"compiler/test-run-variables.cc",
"compiler/test-run-wasm-machops.cc",
"compiler/value-helper.cc",
"compiler/value-helper.h",
"expression-type-collector-macros.h",
@ -240,7 +239,6 @@ v8_source_set("cctest_sources") {
"wasm/test-run-wasm-interpreter.cc",
"wasm/test-run-wasm-js.cc",
"wasm/test-run-wasm-module.cc",
"wasm/test-run-wasm-relocation.cc",
"wasm/test-run-wasm-sign-extension.cc",
"wasm/test-run-wasm-simd.cc",
"wasm/test-run-wasm.cc",
@ -277,7 +275,6 @@ v8_source_set("cctest_sources") {
"test-code-stubs.h",
"test-disasm-arm.cc",
"test-macro-assembler-arm.cc",
"test-run-wasm-relocation-arm.cc",
"test-sync-primitives-arm.cc",
]
} else if (v8_current_cpu == "arm64") {
@ -290,7 +287,6 @@ v8_source_set("cctest_sources") {
"test-fuzz-arm64.cc",
"test-javascript-arm64.cc",
"test-js-arm64-variables.cc",
"test-run-wasm-relocation-arm64.cc",
"test-sync-primitives-arm64.cc",
"test-utils-arm64.cc",
"test-utils-arm64.h",
@ -303,7 +299,6 @@ v8_source_set("cctest_sources") {
"test-code-stubs.h",
"test-disasm-ia32.cc",
"test-log-stack-tracer.cc",
"test-run-wasm-relocation-ia32.cc",
]
} else if (v8_current_cpu == "mips") {
sources += [ ### gcmole(arch:mips) ###
@ -350,7 +345,6 @@ v8_source_set("cctest_sources") {
"test-disasm-x64.cc",
"test-log-stack-tracer.cc",
"test-macro-assembler-x64.cc",
"test-run-wasm-relocation-x64.cc",
"wasm/test-run-wasm-atomics64.cc",
]
} else if (v8_current_cpu == "ppc" || v8_current_cpu == "ppc64") {

View File

@ -1,156 +0,0 @@
// Copyright 2016 the V8 project authors. All rights reserved. Use of this
// source code is governed by a BSD-style license that can be found in the
// LICENSE file.
#include <cmath>
#include <functional>
#include <limits>
#include "src/base/bits.h"
#include "src/base/utils/random-number-generator.h"
#include "src/codegen.h"
#include "src/objects-inl.h"
#include "src/wasm/wasm-objects.h"
#include "test/cctest/cctest.h"
#include "test/cctest/compiler/codegen-tester.h"
#include "test/cctest/compiler/graph-builder-tester.h"
#include "test/cctest/compiler/value-helper.h"
namespace v8 {
namespace internal {
namespace compiler {
template <typename CType>
static void RunLoadStoreRelocation(MachineType rep) {
const int kNumElems = 2;
CType buffer[kNumElems];
CType new_buffer[kNumElems];
byte* raw = reinterpret_cast<byte*>(buffer);
byte* new_raw = reinterpret_cast<byte*>(new_buffer);
WasmContext wasm_context;
wasm_context.SetRawMemory(raw, sizeof(buffer));
for (size_t i = 0; i < sizeof(buffer); i++) {
raw[i] = static_cast<byte>((i + sizeof(CType)) ^ 0xAA);
new_raw[i] = static_cast<byte>((i + sizeof(CType)) ^ 0xAA);
}
uint32_t OK = 0x29000;
RawMachineAssemblerTester<uint32_t> m;
Node* wasm_context_node =
m.RelocatableIntPtrConstant(reinterpret_cast<uintptr_t>(&wasm_context),
RelocInfo::WASM_CONTEXT_REFERENCE);
Node* offset = m.Int32Constant(offsetof(WasmContext, mem_start));
Node* base = m.Load(MachineType::UintPtr(), wasm_context_node, offset);
Node* base1 = m.IntPtrAdd(base, m.Int32Constant(sizeof(CType)));
Node* index = m.Int32Constant(0);
Node* load = m.Load(rep, base, index);
m.Store(rep.representation(), base1, index, load, kNoWriteBarrier);
m.Return(m.Int32Constant(OK));
CHECK(buffer[0] != buffer[1]);
CHECK_EQ(OK, m.Call());
CHECK(buffer[0] == buffer[1]);
wasm_context.SetRawMemory(new_raw, sizeof(new_buffer));
CHECK(new_buffer[0] != new_buffer[1]);
CHECK_EQ(OK, m.Call());
CHECK(new_buffer[0] == new_buffer[1]);
}
TEST(RunLoadStoreRelocation) {
RunLoadStoreRelocation<int8_t>(MachineType::Int8());
RunLoadStoreRelocation<uint8_t>(MachineType::Uint8());
RunLoadStoreRelocation<int16_t>(MachineType::Int16());
RunLoadStoreRelocation<uint16_t>(MachineType::Uint16());
RunLoadStoreRelocation<int32_t>(MachineType::Int32());
RunLoadStoreRelocation<uint32_t>(MachineType::Uint32());
RunLoadStoreRelocation<void*>(MachineType::AnyTagged());
RunLoadStoreRelocation<float>(MachineType::Float32());
RunLoadStoreRelocation<double>(MachineType::Float64());
}
template <typename CType>
static void RunLoadStoreRelocationOffset(MachineType rep) {
RawMachineAssemblerTester<int32_t> r(MachineType::Int32());
const int kNumElems = 4;
CType buffer[kNumElems];
CType new_buffer[kNumElems + 1];
WasmContext wasm_context;
for (int32_t x = 0; x < kNumElems; x++) {
int32_t y = kNumElems - x - 1;
// initialize the buffer with raw data.
byte* raw = reinterpret_cast<byte*>(buffer);
wasm_context.SetRawMemory(raw, sizeof(buffer));
for (size_t i = 0; i < sizeof(buffer); i++) {
raw[i] = static_cast<byte>((i + sizeof(buffer)) ^ 0xAA);
}
RawMachineAssemblerTester<int32_t> m;
int32_t OK = 0x29000 + x;
Node* wasm_context_node =
m.RelocatableIntPtrConstant(reinterpret_cast<uintptr_t>(&wasm_context),
RelocInfo::WASM_CONTEXT_REFERENCE);
Node* offset = m.Int32Constant(offsetof(WasmContext, mem_start));
Node* base = m.Load(MachineType::UintPtr(), wasm_context_node, offset);
Node* index0 = m.IntPtrConstant(x * sizeof(buffer[0]));
Node* load = m.Load(rep, base, index0);
Node* index1 = m.IntPtrConstant(y * sizeof(buffer[0]));
m.Store(rep.representation(), base, index1, load, kNoWriteBarrier);
m.Return(m.Int32Constant(OK));
CHECK(buffer[x] != buffer[y]);
CHECK_EQ(OK, m.Call());
CHECK(buffer[x] == buffer[y]);
// Initialize new buffer and set old_buffer to 0
byte* new_raw = reinterpret_cast<byte*>(new_buffer);
for (size_t i = 0; i < sizeof(buffer); i++) {
raw[i] = 0;
new_raw[i] = static_cast<byte>((i + sizeof(buffer)) ^ 0xAA);
}
wasm_context.SetRawMemory(new_raw, sizeof(new_buffer));
CHECK(new_buffer[x] != new_buffer[y]);
CHECK_EQ(OK, m.Call());
CHECK(new_buffer[x] == new_buffer[y]);
}
}
TEST(RunLoadStoreRelocationOffset) {
RunLoadStoreRelocationOffset<int8_t>(MachineType::Int8());
RunLoadStoreRelocationOffset<uint8_t>(MachineType::Uint8());
RunLoadStoreRelocationOffset<int16_t>(MachineType::Int16());
RunLoadStoreRelocationOffset<uint16_t>(MachineType::Uint16());
RunLoadStoreRelocationOffset<int32_t>(MachineType::Int32());
RunLoadStoreRelocationOffset<uint32_t>(MachineType::Uint32());
RunLoadStoreRelocationOffset<void*>(MachineType::AnyTagged());
RunLoadStoreRelocationOffset<float>(MachineType::Float32());
RunLoadStoreRelocationOffset<double>(MachineType::Float64());
}
TEST(Uint32LessThanMemoryRelocation) {
RawMachineAssemblerTester<uint32_t> m;
RawMachineLabel within_bounds, out_of_bounds;
WasmContext wasm_context;
wasm_context.SetRawMemory(reinterpret_cast<void*>(1234), 0x200);
Node* index = m.Int32Constant(0x200);
Node* wasm_context_node =
m.RelocatableIntPtrConstant(reinterpret_cast<uintptr_t>(&wasm_context),
RelocInfo::WASM_CONTEXT_REFERENCE);
Node* offset = m.Int32Constant(offsetof(WasmContext, mem_size));
Node* limit = m.Load(MachineType::Uint32(), wasm_context_node, offset);
Node* cond = m.AddNode(m.machine()->Uint32LessThan(), index, limit);
m.Branch(cond, &within_bounds, &out_of_bounds);
m.Bind(&within_bounds);
m.Return(m.Int32Constant(0xACED));
m.Bind(&out_of_bounds);
m.Return(m.Int32Constant(0xDEADBEEF));
// Check that index is out of bounds with current size
CHECK_EQ(0xDEADBEEF, m.Call());
wasm_context.SetRawMemory(wasm_context.mem_start, 0x400);
// Check that after limit is increased, index is within bounds.
CHECK_EQ(0xACEDu, m.Call());
}
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -61,7 +61,6 @@ UNINITIALIZED_TEST(VerifyBuiltinsIsolateIndependence) {
mode_mask ==
(RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::WASM_CONTEXT_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::WASM_GLOBAL_HANDLE) |
RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
RelocInfo::ModeMask(RelocInfo::JS_TO_WASM_CALL) |

View File

@ -1,79 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <iostream> // NOLINT(readability/streams)
#include "src/v8.h"
#include "test/cctest/cctest.h"
#include "src/arm/assembler-arm-inl.h"
#include "src/arm/simulator-arm.h"
#include "src/disassembler.h"
#include "src/factory.h"
#include "src/ostreams.h"
#include "test/cctest/compiler/c-signature.h"
#include "test/cctest/compiler/call-tester.h"
namespace v8 {
namespace internal {
namespace wasm {
#define __ assm.
static int32_t DummyStaticFunction(Object* result) { return 1; }
TEST(WasmRelocationArmContextReference) {
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
v8::internal::byte buffer[4096];
DummyStaticFunction(nullptr);
int32_t imm = 1234567;
Assembler assm(isolate, buffer, sizeof buffer);
__ mov(r0, Operand(imm, RelocInfo::WASM_CONTEXT_REFERENCE));
__ mov(pc, Operand(lr));
CodeDesc desc;
assm.GetCode(isolate, &desc);
Handle<Code> code =
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
compiler::CSignatureOf<int32_t> csig;
compiler::CodeRunner<int32_t> runnable(isolate, code, &csig);
int32_t ret_value = runnable.Call();
CHECK_EQ(ret_value, imm);
#ifdef DEBUG
OFStream os(stdout);
code->Print(os);
::printf("f() = %d\n\n", ret_value);
#endif
int offset = 1234;
// Relocating references by offset
int mode_mask = (1 << RelocInfo::WASM_CONTEXT_REFERENCE);
for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
DCHECK(RelocInfo::IsWasmContextReference(it.rinfo()->rmode()));
it.rinfo()->set_wasm_context_reference(
it.rinfo()->wasm_context_reference() + offset, SKIP_ICACHE_FLUSH);
}
// Call into relocated code object
ret_value = runnable.Call();
CHECK_EQ((imm + offset), ret_value);
#ifdef DEBUG
code->Print(os);
::printf("f() = %d\n\n", ret_value);
#endif
}
#undef __
} // namespace wasm
} // namespace internal
} // namespace v8

View File

@ -1,84 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <iostream> // NOLINT(readability/streams)
#include "src/v8.h"
#include "test/cctest/cctest.h"
#include "src/arm64/assembler-arm64-inl.h"
#include "src/arm64/macro-assembler-arm64-inl.h"
#include "src/arm64/simulator-arm64.h"
#include "src/arm64/utils-arm64.h"
#include "src/disassembler.h"
#include "src/factory.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/ostreams.h"
#include "test/cctest/compiler/c-signature.h"
#include "test/cctest/compiler/call-tester.h"
namespace v8 {
namespace internal {
namespace wasm {
#define __ masm.
static int64_t DummyStaticFunction(Object* result) { return 1; }
TEST(WasmRelocationArm64ContextReference) {
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
v8::internal::byte buffer[4096];
DummyStaticFunction(nullptr);
int64_t imm = 1234567;
MacroAssembler masm(isolate, buffer, sizeof buffer,
v8::internal::CodeObjectRequired::kYes);
__ Mov(x0, Immediate(imm, RelocInfo::WASM_CONTEXT_REFERENCE));
__ Ret();
CodeDesc desc;
masm.GetCode(isolate, &desc);
Handle<Code> code =
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
compiler::CSignatureOf<int64_t> csig;
compiler::CodeRunner<int64_t> runnable(isolate, code, &csig);
int64_t ret_value = runnable.Call();
CHECK_EQ(ret_value, imm);
#ifdef DEBUG
OFStream os(stdout);
code->Print(os);
::printf("f() = %" PRIx64 "\n\n", ret_value);
#endif
int offset = 1234;
// Relocating reference by offset
int mode_mask = (1 << RelocInfo::WASM_CONTEXT_REFERENCE);
for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
DCHECK(RelocInfo::IsWasmContextReference(it.rinfo()->rmode()));
it.rinfo()->set_wasm_context_reference(
it.rinfo()->wasm_context_reference() + offset, SKIP_ICACHE_FLUSH);
}
// Call into relocated code object
ret_value = runnable.Call();
CHECK_EQ((imm + offset), ret_value);
#ifdef DEBUG
code->Print(os);
::printf("f() = %" PRIx64 "\n\n", ret_value);
#endif
}
#undef __
} // namespace wasm
} // namespace internal
} // namespace v8

View File

@ -1,90 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdlib.h>
#include "src/v8.h"
#include "src/debug/debug.h"
#include "src/disasm.h"
#include "src/disassembler.h"
#include "src/frame-constants.h"
#include "src/ic/ic.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/ostreams.h"
#include "test/cctest/cctest.h"
#include "test/cctest/compiler/c-signature.h"
#include "test/cctest/compiler/call-tester.h"
namespace v8 {
namespace internal {
namespace wasm {
#define __ assm.
static int32_t DummyStaticFunction(Object* result) { return 1; }
TEST(WasmRelocationIa32ContextReference) {
Isolate* isolate = CcTest::i_isolate();
Zone zone(isolate->allocator(), ZONE_NAME);
HandleScope scope(isolate);
v8::internal::byte buffer[4096];
Assembler assm(isolate, buffer, sizeof buffer);
DummyStaticFunction(nullptr);
int32_t imm = 1234567;
__ mov(eax, Immediate(reinterpret_cast<Address>(imm),
RelocInfo::WASM_CONTEXT_REFERENCE));
__ nop();
__ ret(0);
compiler::CSignatureOf<int32_t> csig;
CodeDesc desc;
assm.GetCode(isolate, &desc);
Handle<Code> code =
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
USE(code);
compiler::CodeRunner<int32_t> runnable(isolate, code, &csig);
int32_t ret_value = runnable.Call();
CHECK_EQ(ret_value, imm);
#ifdef OBJECT_PRINT
OFStream os(stdout);
code->Print(os);
byte* begin = code->instruction_start();
byte* end = begin + code->instruction_size();
disasm::Disassembler::Disassemble(stdout, begin, end);
#endif
int offset = 1234;
// Relocating references by offset
int mode_mask = (1 << RelocInfo::WASM_CONTEXT_REFERENCE);
for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
DCHECK(RelocInfo::IsWasmContextReference(it.rinfo()->rmode()));
it.rinfo()->set_wasm_context_reference(
it.rinfo()->wasm_context_reference() + offset, SKIP_ICACHE_FLUSH);
}
// Check if immediate is updated correctly
ret_value = runnable.Call();
CHECK_EQ(ret_value, imm + offset);
#ifdef OBJECT_PRINT
code->Print(os);
begin = code->instruction_start();
end = begin + code->instruction_size();
disasm::Disassembler::Disassemble(stdout, begin, end);
#endif
}
#undef __
} // namespace wasm
} // namespace internal
} // namespace v8

View File

@ -1,86 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdlib.h>
#include "src/v8.h"
#include "src/debug/debug.h"
#include "src/disasm.h"
#include "src/disassembler.h"
#include "src/ic/ic.h"
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/ostreams.h"
#include "test/cctest/cctest.h"
#include "test/cctest/compiler/c-signature.h"
#include "test/cctest/compiler/call-tester.h"
namespace v8 {
namespace internal {
namespace compiler {
#define __ assm.
static int32_t DummyStaticFunction(Object* result) { return 1; }
TEST(WasmRelocationX64ContextReference) {
Isolate* isolate = CcTest::i_isolate();
HandleScope scope(isolate);
v8::internal::byte buffer[4096];
Assembler assm(isolate, buffer, sizeof buffer);
DummyStaticFunction(nullptr);
int64_t imm = 1234567;
__ movq(rax, imm, RelocInfo::WASM_CONTEXT_REFERENCE);
__ nop();
__ ret(0);
CodeDesc desc;
assm.GetCode(isolate, &desc);
Handle<Code> code =
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
USE(code);
CSignatureOf<int64_t> csig;
CodeRunner<int64_t> runnable(isolate, code, &csig);
int64_t ret_value = runnable.Call();
CHECK_EQ(ret_value, imm);
#ifdef OBJECT_PRINT
OFStream os(stdout);
code->Print(os);
byte* begin = code->instruction_start();
byte* end = begin + code->instruction_size();
disasm::Disassembler::Disassemble(stdout, begin, end);
#endif
int offset = 1234;
// Relocating references by offset
int mode_mask = (1 << RelocInfo::WASM_CONTEXT_REFERENCE);
for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
DCHECK(RelocInfo::IsWasmContextReference(it.rinfo()->rmode()));
it.rinfo()->set_wasm_context_reference(
it.rinfo()->wasm_context_reference() + offset, SKIP_ICACHE_FLUSH);
}
// Check if immediate is updated correctly
ret_value = runnable.Call();
CHECK_EQ(ret_value, imm + offset);
#ifdef OBJECT_PRINT
code->Print(os);
begin = code->instruction_start();
end = begin + code->instruction_size();
disasm::Disassembler::Disassemble(stdout, begin, end);
#endif
}
#undef __
} // namespace compiler
} // namespace internal
} // namespace v8

View File

@ -62,15 +62,13 @@ class CWasmEntryArgTester {
Handle<Object> buffer_obj(reinterpret_cast<Object*>(arg_buffer.data()),
isolate_);
CHECK(!buffer_obj->IsHeapObject());
WasmContext* wasm_context = wasm_code_->native_module()
->compiled_module()
->owning_instance()
->wasm_context()
->get();
Handle<Object> call_args[]{
Handle<Object>::cast(isolate_->factory()->NewForeign(
wasm_code_->instructions().start(), TENURED)),
handle(reinterpret_cast<Object*>(wasm_context), isolate_), buffer_obj};
handle(
wasm_code_->native_module()->compiled_module()->owning_instance(),
isolate_),
buffer_obj};
static_assert(
arraysize(call_args) == compiler::CWasmEntryParameters::kNumParameters,
"adapt this test");

View File

@ -7,6 +7,7 @@
#include <stdlib.h>
#include <string.h>
#include "src/api.h"
#include "src/assembler-inl.h"
#include "test/cctest/cctest.h"
#include "test/cctest/compiler/value-helper.h"
@ -39,8 +40,7 @@ class PredictableInputValues {
}
};
uint32_t AddJSSelector(TestingModuleBuilder* builder, FunctionSig* sig,
int which, Handle<FixedArray> js_imports_table) {
ManuallyImportedJSFunction CreateJSSelector(FunctionSig* sig, int which) {
const int kMaxParams = 11;
static const char* formals[kMaxParams] = {"",
"a",
@ -61,7 +61,12 @@ uint32_t AddJSSelector(TestingModuleBuilder* builder, FunctionSig* sig,
SNPrintF(source, "(function(%s) { return %c; })",
formals[sig->parameter_count()], param);
return builder->AddJsFunction(sig, source.start(), js_imports_table);
Handle<JSFunction> js_function =
Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CompileRun(source.start()))));
ManuallyImportedJSFunction import = {sig, js_function};
return import;
}
void EXPECT_CALL(double expected, Handle<JSFunction> jsfunc,
@ -128,15 +133,20 @@ WASM_EXEC_TEST(Run_I32Popcount_jswrapped) {
}
WASM_EXEC_TEST(Run_CallJS_Add_jswrapped) {
WasmRunner<int, int> r(execution_mode);
TestSignatures sigs;
Handle<FixedArray> js_imports_table =
r.main_isolate()->factory()->NewFixedArray(2 * 3 + 1, TENURED);
uint32_t js_index = r.builder().AddJsFunction(
sigs.i_i(), "(function(a) { return a + 99; })", js_imports_table);
BUILD(r, WASM_CALL_FUNCTION(js_index, WASM_GET_LOCAL(0)));
HandleScope scope(CcTest::InitIsolateOnce());
const char* source = "(function(a) { return a + 99; })";
Handle<JSFunction> js_function =
Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CompileRun(source))));
ManuallyImportedJSFunction import = {sigs.i_i(), js_function};
WasmRunner<int, int> r(execution_mode, &import);
uint32_t js_index = 0;
Handle<JSFunction> jsfunc = r.builder().WrapCode(r.function()->func_index);
WasmFunctionCompiler& t = r.NewFunction(sigs.i_i());
BUILD(t, WASM_CALL_FUNCTION(js_index, WASM_GET_LOCAL(0)));
Handle<JSFunction> jsfunc = r.builder().WrapCode(t.function_index());
EXPECT_CALL(101, jsfunc, 2, -8);
EXPECT_CALL(199, jsfunc, 100, -1);
@ -153,11 +163,9 @@ void RunJSSelectTest(WasmExecutionMode mode, int which) {
HandleScope scope(CcTest::InitIsolateOnce());
FunctionSig sig(1, num_params, types);
WasmRunner<void> r(mode);
Handle<FixedArray> js_imports_table =
scope.isolate()->factory()->NewFixedArray(2 * 3 + 1, TENURED);
uint32_t js_index =
AddJSSelector(&r.builder(), &sig, which, js_imports_table);
ManuallyImportedJSFunction import = CreateJSSelector(&sig, which);
WasmRunner<void> r(mode, &import);
uint32_t js_index = 0;
WasmFunctionCompiler& t = r.NewFunction(&sig);
@ -409,20 +417,17 @@ void RunJSSelectAlignTest(WasmExecutionMode mode, int num_args,
ADD_CODE(code, WASM_GET_LOCAL(i));
}
uint8_t predicted_js_index = 1;
ADD_CODE(code, kExprCallFunction, predicted_js_index);
uint8_t imported_js_index = 0;
ADD_CODE(code, kExprCallFunction, imported_js_index);
size_t end = code.size();
code.push_back(0);
// Call different select JS functions.
for (int which = 0; which < num_params; which++) {
WasmRunner<void> r(mode);
Handle<FixedArray> js_imports_table =
factory->NewFixedArray(2 * 3 + 1, TENURED);
uint32_t js_index =
AddJSSelector(&r.builder(), &sig, which, js_imports_table);
CHECK_EQ(predicted_js_index, js_index);
HandleScope scope(isolate);
ManuallyImportedJSFunction import = CreateJSSelector(&sig, which);
WasmRunner<void> r(mode, &import);
WasmFunctionCompiler& t = r.NewFunction(&sig);
t.Build(&code[0], &code[end]);

View File

@ -1,68 +0,0 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdlib.h>
#include "src/assembler-inl.h"
#include "src/objects-inl.h"
#include "src/v8.h"
#include "test/cctest/cctest.h"
#include "test/cctest/compiler/c-signature.h"
#include "test/cctest/wasm/wasm-run-utils.h"
#include "test/common/wasm/wasm-macro-gen.h"
namespace v8 {
namespace internal {
namespace wasm {
namespace test_run_wasm_relocation {
WASM_COMPILED_EXEC_TEST(RunPatchWasmContext) {
WasmRunner<uint32_t, uint32_t> r(execution_mode);
Isolate* isolate = CcTest::i_isolate();
r.builder().AddGlobal<uint32_t>();
r.builder().AddGlobal<uint32_t>();
BUILD(r, WASM_SET_GLOBAL(0, WASM_GET_LOCAL(0)), WASM_GET_GLOBAL(0));
CHECK_EQ(1, r.builder().CodeTableLength());
// Run with the old global data.
CHECK_EQ(113, r.Call(113));
WasmContext* old_wasm_context =
r.builder().instance_object()->wasm_context()->get();
Address old_wasm_context_address =
reinterpret_cast<Address>(old_wasm_context);
uint32_t new_global_data[3] = {0, 0, 0};
WasmContext new_wasm_context;
new_wasm_context.globals_start = reinterpret_cast<byte*>(new_global_data);
{
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
// Patch in a new WasmContext that points to the new global data.
int filter = 1 << RelocInfo::WASM_CONTEXT_REFERENCE;
bool patched = false;
Handle<Code> code = r.GetWrapperCode();
for (RelocIterator it(*code, filter); !it.done(); it.next()) {
CHECK_EQ(old_wasm_context_address, it.rinfo()->wasm_context_reference());
it.rinfo()->set_wasm_context_reference(
reinterpret_cast<Address>(&new_wasm_context));
patched = true;
}
CHECK(patched);
Assembler::FlushICache(code->instruction_start(), code->instruction_size());
}
// Run with the new global data.
CHECK_EQ(115, r.Call(115));
CHECK_EQ(115, new_global_data[0]);
}
} // namespace test_run_wasm_relocation
} // namespace wasm
} // namespace internal
} // namespace v8

View File

@ -102,15 +102,16 @@ void CheckComputeLocation(v8::internal::Isolate* i_isolate, Handle<Object> exc,
// Call from JS to wasm to JS and throw an Error from JS.
WASM_EXEC_TEST(CollectDetailedWasmStack_ExplicitThrowFromJs) {
WasmRunner<void> r(execution_mode);
TestSignatures sigs;
Handle<FixedArray> js_imports_table =
r.main_isolate()->factory()->NewFixedArray(2 * 3 + 1, TENURED);
uint32_t js_throwing_index = r.builder().AddJsFunction(
sigs.v_v(),
"(function js() {\n function a() {\n throw new Error(); };\n a(); })",
js_imports_table);
HandleScope scope(CcTest::InitIsolateOnce());
const char* source =
"(function js() {\n function a() {\n throw new Error(); };\n a(); })";
Handle<JSFunction> js_function =
Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CompileRun(source))));
ManuallyImportedJSFunction import = {sigs.v_v(), js_function};
uint32_t js_throwing_index = 0;
WasmRunner<void> r(execution_mode, &import);
// Add a nop such that we don't always get position 1.
BUILD(r, WASM_NOP, WASM_CALL_FUNCTION0(js_throwing_index));
@ -156,7 +157,7 @@ WASM_EXEC_TEST(CollectDetailedWasmStack_WasmError) {
int unreachable_pos = 1 << (8 * pos_shift);
TestSignatures sigs;
// Create a WasmRunner with stack checks and traps enabled.
WasmRunner<int> r(execution_mode, "main",
WasmRunner<int> r(execution_mode, 0, "main",
compiler::kRuntimeExceptionSupport);
std::vector<byte> code(unreachable_pos + 1, kExprNop);

View File

@ -69,7 +69,7 @@ void CheckExceptionInfos(v8::internal::Isolate* i_isolate, Handle<Object> exc,
// Trigger a trap for executing unreachable.
WASM_EXEC_TEST(Unreachable) {
// Create a WasmRunner with stack checks and traps enabled.
WasmRunner<void> r(execution_mode, "main",
WasmRunner<void> r(execution_mode, 0, "main",
compiler::kRuntimeExceptionSupport);
TestSignatures sigs;
@ -104,7 +104,7 @@ WASM_EXEC_TEST(Unreachable) {
// Trigger a trap for loading from out-of-bounds.
WASM_EXEC_TEST(IllegalLoad) {
WasmRunner<void> r(execution_mode, "main",
WasmRunner<void> r(execution_mode, 0, "main",
compiler::kRuntimeExceptionSupport);
TestSignatures sigs;

View File

@ -4,7 +4,6 @@
#include "test/cctest/wasm/wasm-run-utils.h"
#include "src/api.h"
#include "src/assembler-inl.h"
#include "src/wasm/wasm-memory.h"
#include "src/wasm/wasm-objects-inl.h"
@ -14,21 +13,47 @@ namespace internal {
namespace wasm {
TestingModuleBuilder::TestingModuleBuilder(
Zone* zone, WasmExecutionMode mode,
compiler::RuntimeExceptionSupport exception_support, LowerSimd lower_simd)
Zone* zone, ManuallyImportedJSFunction* maybe_import,
WasmExecutionMode mode, compiler::RuntimeExceptionSupport exception_support,
LowerSimd lower_simd)
: test_module_ptr_(&test_module_),
isolate_(CcTest::InitIsolateOnce()),
global_offset(0),
mem_start_(nullptr),
mem_size_(0),
interpreter_(nullptr),
execution_mode_(mode),
runtime_exception_support_(exception_support),
lower_simd_(lower_simd) {
WasmJs::Install(isolate_, true);
test_module_.globals_size = kMaxGlobalsSize;
memset(globals_data_, 0, sizeof(globals_data_));
uint32_t maybe_import_index = 0;
if (maybe_import) {
// Manually add an imported function before any other functions.
// This must happen before the instance objectis created, since the
// instance object allocates import entries.
maybe_import_index = AddFunction(maybe_import->sig, nullptr);
DCHECK_EQ(0, maybe_import_index);
test_module_.num_imported_functions = 1;
test_module_.functions[0].imported = true;
}
instance_object_ = InitInstanceObject();
if (maybe_import) {
// Manually compile a wasm to JS wrapper and insert it into the instance.
CodeSpaceMemoryModificationScope modification_scope(isolate_->heap());
Handle<Code> code = compiler::CompileWasmToJSWrapper(
isolate_, maybe_import->js_function, maybe_import->sig,
maybe_import_index, test_module_.origin(),
trap_handler::IsTrapHandlerEnabled());
native_module_->ResizeCodeTableForTest(maybe_import_index);
auto wasm_to_js_wrapper = native_module_->AddCodeCopy(
code, wasm::WasmCode::kWasmToJsWrapper, maybe_import_index);
auto entry =
instance_object()->imported_function_entry_at(maybe_import_index);
entry.set(maybe_import->js_function, wasm_to_js_wrapper);
}
if (mode == kExecuteInterpreter) {
interpreter_ = WasmDebugInfo::SetupForTesting(instance_object_);
}
@ -62,7 +87,7 @@ byte* TestingModuleBuilder::AddMemory(uint32_t size) {
// TODO(wasm): Delete the following two lines when test-run-wasm will use a
// multiple of kPageSize as memory size. At the moment, the effect of these
// two lines is used to shrink the memory for testing purposes.
instance_object_->wasm_context()->get()->SetRawMemory(mem_start_, mem_size_);
instance_object_->SetRawMemory(mem_start_, mem_size_);
return mem_start_;
}
@ -73,7 +98,9 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name) {
test_module_.functions.reserve(kMaxFunctions);
}
uint32_t index = static_cast<uint32_t>(test_module_.functions.size());
native_module_->ResizeCodeTableForTest(index);
if (native_module_) {
native_module_->ResizeCodeTableForTest(index);
}
test_module_.functions.push_back({sig, index, 0, {0, 0}, false, false});
if (name) {
Vector<const byte> name_vec = Vector<const byte>::cast(CStrVector(name));
@ -87,32 +114,17 @@ uint32_t TestingModuleBuilder::AddFunction(FunctionSig* sig, const char* name) {
return index;
}
uint32_t TestingModuleBuilder::AddJsFunction(
FunctionSig* sig, const char* source, Handle<FixedArray> js_imports_table) {
Handle<JSFunction> jsfunc = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
*v8::Local<v8::Function>::Cast(CompileRun(source))));
uint32_t index = AddFunction(sig, nullptr);
js_imports_table->set(0, *isolate_->native_context());
// TODO(6792): No longer needed once WebAssembly code is off heap.
CodeSpaceMemoryModificationScope modification_scope(isolate_->heap());
Handle<Code> code = compiler::CompileWasmToJSWrapper(
isolate_, jsfunc, sig, index, test_module_.origin(),
trap_handler::IsTrapHandlerEnabled(), js_imports_table);
native_module_->ResizeCodeTableForTest(index);
native_module_->AddCodeCopy(code, wasm::WasmCode::kWasmToJsWrapper, index);
return index;
}
Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
// Wrap the code so it can be called as a JS function.
Link();
wasm::WasmCode* code = native_module_->GetCode(index);
byte* context_address =
test_module_.has_memory
? reinterpret_cast<byte*>(instance_object_->wasm_context()->get())
: nullptr;
Handle<WasmCompiledModule> compiled_module(
instance_object()->compiled_module(), isolate_);
Handle<WeakCell> weak_instance(compiled_module->weak_owning_instance(),
isolate_);
Handle<Code> ret_code = compiler::CompileJSToWasmWrapper(
isolate_, &test_module_, code, index, context_address,
isolate_, &test_module_, weak_instance, code, index,
trap_handler::IsTrapHandlerEnabled());
Handle<JSFunction> ret = WasmExportedFunction::New(
isolate_, instance_object(), MaybeHandle<String>(),
@ -121,8 +133,6 @@ Handle<JSFunction> TestingModuleBuilder::WrapCode(uint32_t index) {
ret_code);
// Add weak reference to exported functions.
Handle<WasmCompiledModule> compiled_module(
instance_object()->compiled_module(), isolate_);
Handle<FixedArray> old_arr(compiled_module->weak_exported_functions(),
isolate_);
Handle<FixedArray> new_arr =
@ -145,40 +155,23 @@ void TestingModuleBuilder::AddIndirectFunctionTable(
for (uint32_t i = 0; i < table_size; ++i) {
table.values.push_back(function_indexes[i]);
}
FixedArray* func_table = *isolate_->factory()->NewFixedArray(
table_size * compiler::kFunctionTableEntrySize);
function_tables_.push_back(
isolate_->global_handles()->Create(func_table).address());
WasmContext* wasm_context = instance_object()->wasm_context()->get();
wasm_context->table = reinterpret_cast<IndirectFunctionTableEntry*>(
calloc(table_size, sizeof(IndirectFunctionTableEntry)));
wasm_context->table_size = table_size;
for (uint32_t i = 0; i < table_size; i++) {
wasm_context->table[i].sig_id = -1;
}
WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(
instance_object(), table_size);
}
void TestingModuleBuilder::PopulateIndirectFunctionTable() {
if (interpret()) return;
// Initialize the fixed arrays in instance->function_tables.
WasmContext* wasm_context = instance_object()->wasm_context()->get();
for (uint32_t i = 0; i < function_tables_.size(); i++) {
auto instance = instance_object();
uint32_t num_tables = 1; // TODO(titzer): multiple tables.
for (uint32_t i = 0; i < num_tables; i++) {
WasmIndirectFunctionTable& table = test_module_.function_tables[i];
Handle<FixedArray> function_table(
reinterpret_cast<FixedArray**>(function_tables_[i]));
int table_size = static_cast<int>(table.values.size());
int table_size = static_cast<int>(instance->indirect_function_table_size());
for (int j = 0; j < table_size; j++) {
WasmFunction& function = test_module_.functions[table.values[j]];
int sig_id = test_module_.signature_map.Find(function.sig);
function_table->set(compiler::FunctionTableSigOffset(j),
Smi::FromInt(sig_id));
auto start =
native_module_->GetCode(function.func_index)->instructions().start();
wasm_context->table[j].context = wasm_context;
wasm_context->table[j].sig_id = sig_id;
wasm_context->table[j].target = start;
auto wasm_code = native_module_->GetCode(function.func_index);
auto entry = instance->indirect_function_table_entry_at(j);
entry.set(sig_id, instance, wasm_code);
}
}
}
@ -201,8 +194,7 @@ uint32_t TestingModuleBuilder::AddBytes(Vector<const byte> bytes) {
}
compiler::ModuleEnv TestingModuleBuilder::CreateModuleEnv() {
return {&test_module_, function_tables_,
trap_handler::IsTrapHandlerEnabled()};
return {&test_module_, trap_handler::IsTrapHandlerEnabled()};
}
const WasmGlobal* TestingModuleBuilder::AddGlobal(ValueType type) {
@ -230,9 +222,9 @@ Handle<WasmInstanceObject> TestingModuleBuilder::InitInstanceObject() {
WasmSharedModuleData::New(isolate_, module_wrapper, empty_string, script,
Handle<ByteArray>::null());
Handle<FixedArray> export_wrappers = isolate_->factory()->NewFixedArray(0);
Handle<WasmCompiledModule> compiled_module = WasmCompiledModule::New(
isolate_, test_module_ptr_, export_wrappers, function_tables_,
trap_handler::IsTrapHandlerEnabled());
Handle<WasmCompiledModule> compiled_module =
WasmCompiledModule::New(isolate_, test_module_ptr_, export_wrappers,
trap_handler::IsTrapHandlerEnabled());
compiled_module->set_shared(*shared_module_data);
// This method is called when we initialize TestEnvironment. We don't
// have a memory yet, so we won't create it here. We'll update the
@ -244,7 +236,7 @@ Handle<WasmInstanceObject> TestingModuleBuilder::InitInstanceObject() {
DCHECK(compiled_module->IsWasmCompiledModule());
script->set_wasm_compiled_module(*compiled_module);
auto instance = WasmInstanceObject::New(isolate_, compiled_module);
instance->wasm_context()->get()->globals_start = globals_data_;
instance->set_globals_start(globals_data_);
Handle<WeakCell> weak_instance = isolate()->factory()->NewWeakCell(instance);
compiled_module->set_weak_owning_instance(*weak_instance);
return instance;

View File

@ -79,13 +79,19 @@ using compiler::Node;
r.Build(code, code + arraysize(code)); \
} while (false)
// For tests that must manually import a JSFunction with source code.
struct ManuallyImportedJSFunction {
FunctionSig* sig;
Handle<JSFunction> js_function;
};
// A Wasm module builder. Globals are pre-set, however, memory and code may be
// progressively added by a test. In turn, we piecemeal update the runtime
// objects, i.e. {WasmInstanceObject}, {WasmCompiledModule} and, if necessary,
// the interpreter.
class TestingModuleBuilder {
public:
TestingModuleBuilder(Zone*, WasmExecutionMode,
TestingModuleBuilder(Zone*, ManuallyImportedJSFunction*, WasmExecutionMode,
compiler::RuntimeExceptionSupport, LowerSimd);
void ChangeOriginToAsmjs() { test_module_.set_origin(kAsmJsOrigin); }
@ -177,9 +183,6 @@ class TestingModuleBuilder {
uint32_t AddFunction(FunctionSig* sig, const char* name);
uint32_t AddJsFunction(FunctionSig* sig, const char* source,
Handle<FixedArray> js_imports_table);
Handle<JSFunction> WrapCode(uint32_t index);
void AddIndirectFunctionTable(const uint16_t* function_indexes,
@ -225,15 +228,14 @@ class TestingModuleBuilder {
WasmModule test_module_;
WasmModule* test_module_ptr_;
Isolate* isolate_;
uint32_t global_offset;
byte* mem_start_;
uint32_t mem_size_;
std::vector<GlobalHandleAddress> function_tables_;
uint32_t global_offset = 0;
byte* mem_start_ = nullptr;
uint32_t mem_size_ = 0;
V8_ALIGNED(16) byte globals_data_[kMaxGlobalsSize];
WasmInterpreter* interpreter_;
WasmInterpreter* interpreter_ = nullptr;
WasmExecutionMode execution_mode_;
Handle<WasmInstanceObject> instance_object_;
NativeModule* native_module_;
NativeModule* native_module_ = nullptr;
bool linked_ = false;
compiler::RuntimeExceptionSupport runtime_exception_support_;
LowerSimd lower_simd_;
@ -281,13 +283,9 @@ class WasmFunctionWrapper : private compiler::GraphAndBuilders {
: common()->Int64Constant(static_cast<int64_t>(value));
}
void SetContextAddress(uintptr_t value) {
auto rmode = RelocInfo::WASM_CONTEXT_REFERENCE;
auto op = kPointerSize == 8 ? common()->RelocatableInt64Constant(
static_cast<int64_t>(value), rmode)
: common()->RelocatableInt32Constant(
static_cast<int32_t>(value), rmode);
compiler::NodeProperties::ChangeOp(context_address_, op);
void SetInstance(Handle<WasmInstanceObject> instance) {
compiler::NodeProperties::ChangeOp(context_address_,
common()->HeapConstant(instance));
}
Handle<Code> GetWrapperCode();
@ -349,11 +347,13 @@ class WasmFunctionCompiler : public compiler::GraphAndBuilders {
// code, and run that code.
class WasmRunnerBase : public HandleAndZoneScope {
public:
WasmRunnerBase(WasmExecutionMode execution_mode, int num_params,
WasmRunnerBase(ManuallyImportedJSFunction* maybe_import,
WasmExecutionMode execution_mode, int num_params,
compiler::RuntimeExceptionSupport runtime_exception_support,
LowerSimd lower_simd)
: zone_(&allocator_, ZONE_NAME),
builder_(&zone_, execution_mode, runtime_exception_support, lower_simd),
builder_(&zone_, maybe_import, execution_mode,
runtime_exception_support, lower_simd),
wrapper_(&zone_, num_params) {}
// Builds a graph from the given Wasm code and generates the machine
@ -430,11 +430,12 @@ template <typename ReturnType, typename... ParamTypes>
class WasmRunner : public WasmRunnerBase {
public:
WasmRunner(WasmExecutionMode execution_mode,
ManuallyImportedJSFunction* maybe_import = nullptr,
const char* main_fn_name = "main",
compiler::RuntimeExceptionSupport runtime_exception_support =
compiler::kNoRuntimeExceptionSupport,
LowerSimd lower_simd = kNoLowerSimd)
: WasmRunnerBase(execution_mode, sizeof...(ParamTypes),
: WasmRunnerBase(maybe_import, execution_mode, sizeof...(ParamTypes),
runtime_exception_support, lower_simd) {
NewFunction<ReturnType, ParamTypes...>(main_fn_name);
if (!interpret()) {
@ -443,8 +444,8 @@ class WasmRunner : public WasmRunnerBase {
}
WasmRunner(WasmExecutionMode execution_mode, LowerSimd lower_simd)
: WasmRunner(execution_mode, "main", compiler::kNoRuntimeExceptionSupport,
lower_simd) {}
: WasmRunner(execution_mode, nullptr, "main",
compiler::kNoRuntimeExceptionSupport, lower_simd) {}
ReturnType Call(ParamTypes... p) {
DCHECK(compiled_);
@ -459,10 +460,8 @@ class WasmRunner : public WasmRunnerBase {
set_trap_callback_for_testing(trap_callback);
wrapper_.SetInnerCode(builder_.GetFunctionCode(0));
WasmContext* wasm_context =
builder().instance_object()->wasm_context()->get();
wrapper_.SetContextAddress(reinterpret_cast<uintptr_t>(wasm_context));
builder().Link();
wrapper_.SetInstance(builder_.instance_object());
builder_.Link();
Handle<Code> wrapper_code = wrapper_.GetWrapperCode();
compiler::CodeRunner<int32_t> runner(CcTest::InitIsolateOnce(),
wrapper_code, wrapper_.signature());
@ -479,8 +478,6 @@ class WasmRunner : public WasmRunnerBase {
thread->Reset();
std::array<WasmValue, sizeof...(p)> args{{WasmValue(p)...}};
thread->InitFrame(function(), args.data());
WasmInterpreter::HeapObjectsScope heap_objects_scope(
interpreter(), builder().instance_object());
if (thread->Run() == WasmInterpreter::FINISHED) {
WasmValue val = thread->GetReturnValue();
possible_nondeterminism_ |= thread->PossibleNondeterminism();

View File

@ -97,7 +97,6 @@ bool InterpretWasmModuleForTesting(Isolate* isolate,
Zone zone(isolate->allocator(), ZONE_NAME);
WasmInterpreter* interpreter = WasmDebugInfo::SetupForTesting(instance);
WasmInterpreter::HeapObjectsScope heap_objects_scope(interpreter, instance);
WasmInterpreter::Thread* thread = interpreter->GetThread(0);
thread->Reset();
@ -169,7 +168,6 @@ int32_t InterpretWasmModule(Isolate* isolate,
v8::internal::HandleScope scope(isolate);
WasmInterpreter* interpreter = WasmDebugInfo::SetupForTesting(instance);
WasmInterpreter::HeapObjectsScope heap_objects_scope(interpreter, instance);
WasmInterpreter::Thread* thread = interpreter->GetThread(0);
thread->Reset();

View File

@ -27,13 +27,22 @@ var instance4;
]).exportFunc();
module = new WebAssembly.Module(builder.toBuffer());
print("Initial module");
%ValidateWasmModuleState(module);
print("Initial instances=0");
%ValidateWasmInstancesChain(module, 0);
instance1 = new WebAssembly.Instance(module, {"": {getValue: () => 1}});
print("Initial instances=1");
%ValidateWasmInstancesChain(module, 1);
instance2 = new WebAssembly.Instance(module, {"": {getValue: () => 2}});
print("Initial instances=2");
%ValidateWasmInstancesChain(module, 2);
instance3 = new WebAssembly.Instance(module, {"": {getValue: () => 3}});
print("Initial instances=3");
%ValidateWasmInstancesChain(module, 3);
})();
@ -43,6 +52,7 @@ var instance4;
})();
gc();
print("After gc instances=2");
%ValidateWasmInstancesChain(module, 2);
(function CompiledModuleInstancesClear3() {
@ -51,6 +61,7 @@ gc();
})();
gc();
print("After gc instances=1");
%ValidateWasmInstancesChain(module, 1);
(function CompiledModuleInstancesClear2() {
@ -58,7 +69,11 @@ gc();
instance2 = null;
})();
// Note that two GC's are required because weak cells are not cleared
// in the same cycle that the instance finalizer is run.
gc();
gc();
print("After gc module state");
%ValidateWasmModuleState(module);
(function CompiledModuleInstancesInitialize4AndClearModule() {

View File

@ -195,13 +195,20 @@ let id = (() => { // identity exported function
exp_add: exp_add, exp_inc: exp_inc, exp_ten: exp_ten}});
let table = instance.exports.table;
exp_a = table.get(0);
exp_i = table.get(1);
exp_t = table.get(2);
assertEquals(exp_a(1, 4), 5);
assertEquals(exp_i(8), 9);
assertEquals(exp_t(0), 10);
print(" initial check");
function checkTableFunc(index, expected, ...args) {
let f = table.get(index);
print(" table[" + index + "] = " + f);
result = f(...args);
print(" -> expect " + expected + ", got " + result);
assertEquals(expected, result);
}
checkTableFunc(0, 5, 1, 4);
checkTableFunc(1, 9, 8);
checkTableFunc(2, 10, 0);
let builder1 = new WasmModuleBuilder();
let g = builder1.addImportedGlobal("q", "base", kWasmI32);
@ -213,23 +220,22 @@ let id = (() => { // identity exported function
let module1 = new WebAssembly.Module(builder1.toBuffer());
function verifyTableFuncs(base) {
assertEquals(exp_a(1, 4), 5);
assertEquals(exp_i(8), 9);
assertEquals(exp_t(0), 10);
print(" base = " + base);
checkTableFunc(0, 5, 1, 4);
checkTableFunc(1, 9, 8);
checkTableFunc(2, 10, 0);
mul = table.get(base);
add = table.get(base + 1);
sub = table.get(base + 2);
assertEquals(20, mul(10, 2));
assertEquals(12, add(10, 2));
assertEquals(8, sub(10, 2));
checkTableFunc(base+0, 20, 10, 2); // mul
checkTableFunc(base+1, 12, 10, 2); // add
checkTableFunc(base+2, 8, 10, 2); // sub
}
for (let i = 3; i < 10; i++) {
let instance1 = new WebAssembly.Instance(module1, {q: {base: i, table: table}});
verifyTableFuncs(i);
assertEquals(table.length, table.grow(3));
var prev = table.length;
assertEquals(prev, table.grow(3));
assertEquals(prev + 3, table.length);
verifyTableFuncs(i);
assertThrows(() => table.set(table.length, id), RangeError);