Reland: [wasm][anyref] Add support of call-indirect for multiple tables

The reason for the revert was that Liftoff did not bail out on indirect
calls to tables other than table 0. Whenever the Liftoff code got
executed, the test would fail.

Original message:
With this CL it is possible to use any anyfunc table in call-indirect,
not just the first table.

The current implementation is based on runtime calls. This is just an
initial implementation which should be replaced by a
dispatch-table-based eventually. However, this implementation allows
us to move forward with the anyref proposal implementation.

R=mstarzinger@chromium.org

Bug: v8:7581
Change-Id: Iedd56ee7acb281441bca32ffd3dc7157203ee1ac
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1532072
Commit-Queue: Andreas Haas <ahaas@chromium.org>
Auto-Submit: Andreas Haas <ahaas@chromium.org>
Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60382}
This commit is contained in:
Andreas Haas 2019-03-21 09:07:46 +01:00 committed by Commit Bot
parent 21a471f2d8
commit 2d9ec0a420
13 changed files with 502 additions and 81 deletions

View File

@ -2770,10 +2770,14 @@ Node* WasmGraphBuilder::CallDirect(uint32_t index, Node** args, Node*** rets,
return BuildWasmCall(sig, args, rets, position, nullptr, kNoRetpoline);
}
Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
Node*** rets,
Node* WasmGraphBuilder::CallIndirect(uint32_t table_index, uint32_t sig_index,
Node** args, Node*** rets,
wasm::WasmCodePosition position) {
return BuildIndirectCall(sig_index, args, rets, position, kCallContinues);
if (table_index == 0) {
return BuildIndirectCall(sig_index, args, rets, position, kCallContinues);
}
return BuildIndirectCall(table_index, sig_index, args, rets, position,
kCallContinues);
}
Node* WasmGraphBuilder::BuildIndirectCall(uint32_t sig_index, Node** args,
@ -2872,6 +2876,48 @@ Node* WasmGraphBuilder::BuildIndirectCall(uint32_t sig_index, Node** args,
}
}
Node* WasmGraphBuilder::BuildIndirectCall(uint32_t table_index,
uint32_t sig_index, Node** args,
Node*** rets,
wasm::WasmCodePosition position,
IsReturnCall continuation) {
DCHECK_NOT_NULL(args[0]);
Node* entry_index = args[0];
DCHECK_NOT_NULL(env_);
BoundsCheckTable(table_index, entry_index, position, wasm::kTrapFuncInvalid,
nullptr);
DCHECK(Smi::IsValid(table_index));
DCHECK(Smi::IsValid(sig_index));
Node* runtime_args[]{
graph()->NewNode(mcgraph()->common()->NumberConstant(table_index)),
BuildChangeUint31ToSmi(entry_index),
graph()->NewNode(mcgraph()->common()->NumberConstant(sig_index))};
Node* target_instance = BuildCallToRuntime(
Runtime::kWasmIndirectCallCheckSignatureAndGetTargetInstance,
runtime_args, arraysize(runtime_args));
// We reuse the runtime_args array here, even though we only need the first
// two arguments.
Node* call_target = BuildCallToRuntime(
Runtime::kWasmIndirectCallGetTargetAddress, runtime_args, 2);
wasm::FunctionSig* sig = env_->module->signatures[sig_index];
args[0] = call_target;
const UseRetpoline use_retpoline =
untrusted_code_mitigations_ ? kRetpoline : kNoRetpoline;
switch (continuation) {
case kCallContinues:
return BuildWasmCall(sig, args, rets, position, target_instance,
use_retpoline);
case kReturnCall:
return BuildWasmReturnCall(sig, args, position, target_instance,
use_retpoline);
}
}
Node* WasmGraphBuilder::ReturnCall(uint32_t index, Node** args,
wasm::WasmCodePosition position) {
DCHECK_NULL(args[0]);
@ -2891,9 +2937,14 @@ Node* WasmGraphBuilder::ReturnCall(uint32_t index, Node** args,
return BuildWasmReturnCall(sig, args, position, nullptr, kNoRetpoline);
}
Node* WasmGraphBuilder::ReturnCallIndirect(uint32_t sig_index, Node** args,
Node* WasmGraphBuilder::ReturnCallIndirect(uint32_t table_index,
uint32_t sig_index, Node** args,
wasm::WasmCodePosition position) {
return BuildIndirectCall(sig_index, args, nullptr, position, kReturnCall);
if (table_index == 0) {
return BuildIndirectCall(sig_index, args, nullptr, position, kReturnCall);
}
return BuildIndirectCall(table_index, sig_index, args, nullptr, position,
kReturnCall);
}
Node* WasmGraphBuilder::BuildI32Rol(Node* left, Node* right) {
@ -3315,10 +3366,10 @@ Node* WasmGraphBuilder::SetGlobal(uint32_t index, Node* val) {
graph()->NewNode(op, base, offset, val, Effect(), Control()));
}
void WasmGraphBuilder::GetTableBaseAndOffset(uint32_t table_index, Node* index,
wasm::WasmCodePosition position,
Node** base_node,
Node** offset_node) {
void WasmGraphBuilder::BoundsCheckTable(uint32_t table_index, Node* entry_index,
wasm::WasmCodePosition position,
wasm::TrapReason trap_reason,
Node** base_node) {
Node* tables = LOAD_INSTANCE_FIELD(Tables, MachineType::TaggedPointer());
Node* table = LOAD_FIXED_ARRAY_SLOT_ANY(tables, table_index);
@ -3337,22 +3388,32 @@ void WasmGraphBuilder::GetTableBaseAndOffset(uint32_t table_index, Node* index,
storage_size = BuildChangeSmiToInt32(storage_size);
// Bounds check against the table size.
Node* in_bounds = graph()->NewNode(mcgraph()->machine()->Uint32LessThan(),
index, storage_size);
TrapIfFalse(wasm::kTrapTableOutOfBounds, in_bounds, position);
entry_index, storage_size);
TrapIfFalse(trap_reason, in_bounds, position);
if (base_node) {
*base_node = storage;
}
}
void WasmGraphBuilder::GetTableBaseAndOffset(uint32_t table_index,
Node* entry_index,
wasm::WasmCodePosition position,
Node** base_node,
Node** offset_node) {
BoundsCheckTable(table_index, entry_index, position,
wasm::kTrapTableOutOfBounds, base_node);
// From the index, calculate the actual offset in the FixeArray. This
// is kHeaderSize + (index * kTaggedSize). kHeaderSize can be acquired with
// wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0).
Node* index_times_tagged_size =
graph()->NewNode(mcgraph()->machine()->IntMul(), Uint32ToUintptr(index),
mcgraph()->Int32Constant(kTaggedSize));
Node* index_times_tagged_size = graph()->NewNode(
mcgraph()->machine()->IntMul(), Uint32ToUintptr(entry_index),
mcgraph()->Int32Constant(kTaggedSize));
*offset_node = graph()->NewNode(
mcgraph()->machine()->IntAdd(), index_times_tagged_size,
mcgraph()->IntPtrConstant(
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)));
*base_node = storage;
}
Node* WasmGraphBuilder::GetTable(uint32_t table_index, Node* index,

View File

@ -281,13 +281,13 @@ class WasmGraphBuilder {
Node* CallDirect(uint32_t index, Node** args, Node*** rets,
wasm::WasmCodePosition position);
Node* CallIndirect(uint32_t index, Node** args, Node*** rets,
wasm::WasmCodePosition position);
Node* CallIndirect(uint32_t table_index, uint32_t sig_index, Node** args,
Node*** rets, wasm::WasmCodePosition position);
Node* ReturnCall(uint32_t index, Node** args,
wasm::WasmCodePosition position);
Node* ReturnCallIndirect(uint32_t index, Node** args,
wasm::WasmCodePosition position);
Node* ReturnCallIndirect(uint32_t table_index, uint32_t sig_index,
Node** args, wasm::WasmCodePosition position);
Node* Invert(Node* node);
@ -344,6 +344,10 @@ class WasmGraphBuilder {
void GetBaseAndOffsetForImportedMutableAnyRefGlobal(
const wasm::WasmGlobal& global, Node** base, Node** offset);
void BoundsCheckTable(uint32_t table_index, Node* index,
wasm::WasmCodePosition position,
wasm::TrapReason trap_reason, Node** base_node);
void GetTableBaseAndOffset(uint32_t table_index, Node* index,
wasm::WasmCodePosition position, Node** base_node,
Node** offset_node);
@ -491,9 +495,13 @@ class WasmGraphBuilder {
Node* BuildCallNode(wasm::FunctionSig* sig, Node** args,
wasm::WasmCodePosition position, Node* instance_node,
const Operator* op);
// Special implementation for CallIndirect for table 0.
Node* BuildIndirectCall(uint32_t sig_index, Node** args, Node*** rets,
wasm::WasmCodePosition position,
IsReturnCall continuation);
Node* BuildIndirectCall(uint32_t table_index, uint32_t sig_index, Node** args,
Node*** rets, wasm::WasmCodePosition position,
IsReturnCall continuation);
Node* BuildWasmCall(wasm::FunctionSig* sig, Node** args, Node*** rets,
wasm::WasmCodePosition position, Node* instance_node,
UseRetpoline use_retpoline);

View File

@ -373,6 +373,116 @@ RUNTIME_FUNCTION(Runtime_WasmFunctionTableSet) {
return ReadOnlyRoots(isolate).undefined_value();
}
RUNTIME_FUNCTION(Runtime_WasmIndirectCallCheckSignatureAndGetTargetInstance) {
HandleScope scope(isolate);
DCHECK_EQ(3, args.length());
auto instance =
Handle<WasmInstanceObject>(GetWasmInstanceOnStackTop(isolate), isolate);
CONVERT_UINT32_ARG_CHECKED(table_index, 0);
CONVERT_UINT32_ARG_CHECKED(entry_index, 1);
CONVERT_UINT32_ARG_CHECKED(sig_index, 2);
DCHECK(isolate->context().is_null());
isolate->set_context(instance->native_context());
DCHECK_LT(table_index, instance->tables()->length());
auto table_obj = handle(
WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
// This check is already done in generated code.
DCHECK(WasmTableObject::IsInBounds(isolate, table_obj, entry_index));
bool is_valid;
bool is_null;
MaybeHandle<WasmInstanceObject> maybe_target_instance;
int function_index;
WasmTableObject::GetFunctionTableEntry(
isolate, table_obj, entry_index, &is_valid, &is_null,
&maybe_target_instance, &function_index);
CHECK(is_valid);
if (is_null) {
// We throw a signature mismatch trap to be in sync with the generated
// code. There we do a signature check instead of a null-check. Trap
// reasons are not defined in the spec. Otherwise, a null-check is
// performed before a signature, according to the spec.
return ThrowWasmError(isolate, MessageTemplate::kWasmTrapFuncSigMismatch);
}
// Now we do the signature check.
Handle<WasmInstanceObject> target_instance =
maybe_target_instance.ToHandleChecked();
const wasm::WasmModule* target_module =
target_instance->module_object()->native_module()->module();
wasm::FunctionSig* target_sig = target_module->functions[function_index].sig;
auto target_sig_id = instance->module()->signature_map.Find(*target_sig);
uint32_t expected_sig_id = instance->module()->signature_ids[sig_index];
if (expected_sig_id != static_cast<uint32_t>(target_sig_id)) {
return ThrowWasmError(isolate, MessageTemplate::kWasmTrapFuncSigMismatch);
}
if (function_index <
static_cast<int>(target_instance->module()->num_imported_functions)) {
// The function in the target instance was imported. Use its imports table,
// which contains a tuple needed by the import wrapper.
ImportedFunctionEntry entry(target_instance, function_index);
return entry.object_ref();
}
return *target_instance;
}
RUNTIME_FUNCTION(Runtime_WasmIndirectCallGetTargetAddress) {
HandleScope scope(isolate);
DCHECK_EQ(2, args.length());
auto instance =
Handle<WasmInstanceObject>(GetWasmInstanceOnStackTop(isolate), isolate);
CONVERT_UINT32_ARG_CHECKED(table_index, 0);
CONVERT_UINT32_ARG_CHECKED(entry_index, 1);
DCHECK_LT(table_index, instance->tables()->length());
auto table_obj = handle(
WasmTableObject::cast(instance->tables()->get(table_index)), isolate);
DCHECK(WasmTableObject::IsInBounds(isolate, table_obj, entry_index));
bool is_valid;
bool is_null;
MaybeHandle<WasmInstanceObject> maybe_target_instance;
int function_index;
WasmTableObject::GetFunctionTableEntry(
isolate, table_obj, entry_index, &is_valid, &is_null,
&maybe_target_instance, &function_index);
CHECK(is_valid);
// The null-check should already have been done in
// Runtime_WasmIndirectCallCheckSignatureAndGetTargetInstance. That runtime
// function should always be called first.
CHECK(!is_null);
Handle<WasmInstanceObject> target_instance =
maybe_target_instance.ToHandleChecked();
Address call_target = 0;
if (function_index <
static_cast<int>(target_instance->module()->num_imported_functions)) {
// The function in the target instance was imported. Use its imports table,
// which contains a tuple needed by the import wrapper.
ImportedFunctionEntry entry(target_instance, function_index);
call_target = entry.target();
} else {
// The function in the target instance was not imported.
call_target = target_instance->GetCallTarget(function_index);
}
// The return value is an address and not a SMI. However, the address is
// always aligned, and a SMI uses the same space as {Address}.
CHECK(HAS_SMI_TAG(call_target));
return Smi(call_target);
}
RUNTIME_FUNCTION(Runtime_WasmTableInit) {
HandleScope scope(isolate);
DCHECK_EQ(5, args.length());

View File

@ -527,24 +527,26 @@ namespace internal {
F(TypedArraySet, 2, 1) \
F(TypedArraySortFast, 1, 1)
#define FOR_EACH_INTRINSIC_WASM(F, I) \
F(ThrowWasmError, 1, 1) \
F(ThrowWasmStackOverflow, 0, 1) \
F(WasmI32AtomicWait, 4, 1) \
F(WasmI64AtomicWait, 5, 1) \
F(WasmAtomicNotify, 3, 1) \
F(WasmExceptionGetValues, 1, 1) \
F(WasmExceptionGetTag, 1, 1) \
F(WasmMemoryGrow, 2, 1) \
F(WasmRunInterpreter, 2, 1) \
F(WasmStackGuard, 0, 1) \
F(WasmThrowCreate, 2, 1) \
F(WasmThrowTypeError, 0, 1) \
F(WasmFunctionTableGet, 3, 1) \
F(WasmFunctionTableSet, 4, 1) \
F(WasmTableInit, 5, 1) \
F(WasmTableCopy, 5, 1) \
F(WasmIsValidAnyFuncValue, 1, 1) \
#define FOR_EACH_INTRINSIC_WASM(F, I) \
F(ThrowWasmError, 1, 1) \
F(ThrowWasmStackOverflow, 0, 1) \
F(WasmI32AtomicWait, 4, 1) \
F(WasmI64AtomicWait, 5, 1) \
F(WasmAtomicNotify, 3, 1) \
F(WasmExceptionGetValues, 1, 1) \
F(WasmExceptionGetTag, 1, 1) \
F(WasmMemoryGrow, 2, 1) \
F(WasmRunInterpreter, 2, 1) \
F(WasmStackGuard, 0, 1) \
F(WasmThrowCreate, 2, 1) \
F(WasmThrowTypeError, 0, 1) \
F(WasmFunctionTableGet, 3, 1) \
F(WasmFunctionTableSet, 4, 1) \
F(WasmTableInit, 5, 1) \
F(WasmTableCopy, 5, 1) \
F(WasmIndirectCallCheckSignatureAndGetTargetInstance, 3, 1) \
F(WasmIndirectCallGetTargetAddress, 2, 1) \
F(WasmIsValidAnyFuncValue, 1, 1) \
F(WasmCompileLazy, 2, 1)
#define FOR_EACH_INTRINSIC_RETURN_PAIR_IMPL(F, I) \

View File

@ -1736,6 +1736,9 @@ class LiftoffCompiler {
if (imm.sig->return_count() > 1) {
return unsupported(decoder, "multi-return");
}
if (imm.table_index != 0) {
return unsupported(decoder, "table index != 0");
}
if (imm.sig->return_count() == 1 &&
!CheckSupportedType(decoder, kSupportedTypes, imm.sig->GetReturn(0),
"return")) {

View File

@ -302,11 +302,12 @@ struct CallIndirectImmediate {
uint32_t sig_index;
FunctionSig* sig = nullptr;
uint32_t length = 0;
inline CallIndirectImmediate(Decoder* decoder, const byte* pc) {
inline CallIndirectImmediate(const WasmFeatures enabled, Decoder* decoder,
const byte* pc) {
uint32_t len = 0;
sig_index = decoder->read_u32v<validate>(pc + 1, &len, "signature index");
table_index = decoder->read_u8<validate>(pc + 1 + len, "table index");
if (!VALIDATE(table_index == 0)) {
if (!VALIDATE(table_index == 0 || enabled.anyref)) {
decoder->errorf(pc + 1 + len, "expected table index 0, found %u",
table_index);
}
@ -952,10 +953,16 @@ class WasmDecoder : public Decoder {
}
inline bool Validate(const byte* pc, CallIndirectImmediate<validate>& imm) {
if (!VALIDATE(module_ != nullptr && !module_->tables.empty())) {
if (!VALIDATE(module_ != nullptr &&
imm.table_index < module_->tables.size())) {
error("function table has to exist to execute call_indirect");
return false;
}
if (!VALIDATE(module_ != nullptr &&
module_->tables[imm.table_index].type == kWasmAnyFunc)) {
error("table of call_indirect must be of type anyfunc");
return false;
}
if (!Complete(pc, imm)) {
errorf(pc + 1, "invalid signature index: #%u", imm.sig_index);
return false;
@ -1178,7 +1185,7 @@ class WasmDecoder : public Decoder {
}
case kExprCallIndirect:
case kExprReturnCallIndirect: {
CallIndirectImmediate<validate> imm(decoder, pc);
CallIndirectImmediate<validate> imm(kAllWasmFeatures, decoder, pc);
return 1 + imm.length;
}
@ -1373,7 +1380,7 @@ class WasmDecoder : public Decoder {
return {imm.sig->parameter_count(), imm.sig->return_count()};
}
case kExprCallIndirect: {
CallIndirectImmediate<validate> imm(this, pc);
CallIndirectImmediate<validate> imm(this->enabled_, this, pc);
CHECK(Complete(pc, imm));
// Indirect calls pop an additional argument for the table index.
return {imm.sig->parameter_count() + 1,
@ -2147,7 +2154,7 @@ class WasmFullDecoder : public WasmDecoder<validate> {
break;
}
case kExprCallIndirect: {
CallIndirectImmediate<validate> imm(this, this->pc_);
CallIndirectImmediate<validate> imm(this->enabled_, this, this->pc_);
len = 1 + imm.length;
if (!this->Validate(this->pc_, imm)) break;
auto index = Pop(0, kWasmI32);
@ -2176,7 +2183,7 @@ class WasmFullDecoder : public WasmDecoder<validate> {
}
case kExprReturnCallIndirect: {
CHECK_PROTOTYPE_OPCODE(return_call);
CallIndirectImmediate<validate> imm(this, this->pc_);
CallIndirectImmediate<validate> imm(this->enabled_, this, this->pc_);
len = 1 + imm.length;
if (!this->Validate(this->pc_, imm)) break;
if (!this->CanReturnCall(imm.sig)) {

View File

@ -234,7 +234,8 @@ bool PrintRawWasmCode(AccountingAllocator* allocator, const FunctionBody& body,
break;
}
case kExprCallIndirect: {
CallIndirectImmediate<Decoder::kNoValidate> imm(&i, i.pc());
CallIndirectImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
i.pc());
os << " // sig #" << imm.sig_index;
if (decoder.Complete(i.pc(), imm)) {
os << ": " << *imm.sig;

View File

@ -402,25 +402,27 @@ class WasmGraphBuildingInterface {
void CallDirect(FullDecoder* decoder,
const CallFunctionImmediate<validate>& imm,
const Value args[], Value returns[]) {
DoCall(decoder, nullptr, imm.sig, imm.index, args, returns);
DoCall(decoder, 0, nullptr, imm.sig, imm.index, args, returns);
}
void ReturnCall(FullDecoder* decoder,
const CallFunctionImmediate<validate>& imm,
const Value args[]) {
DoReturnCall(decoder, nullptr, imm.sig, imm.index, args);
DoReturnCall(decoder, 0, nullptr, imm.sig, imm.index, args);
}
void CallIndirect(FullDecoder* decoder, const Value& index,
const CallIndirectImmediate<validate>& imm,
const Value args[], Value returns[]) {
DoCall(decoder, index.node, imm.sig, imm.sig_index, args, returns);
DoCall(decoder, imm.table_index, index.node, imm.sig, imm.sig_index, args,
returns);
}
void ReturnCallIndirect(FullDecoder* decoder, const Value& index,
const CallIndirectImmediate<validate>& imm,
const Value args[]) {
DoReturnCall(decoder, index.node, imm.sig, imm.sig_index, args);
DoReturnCall(decoder, imm.table_index, index.node, imm.sig, imm.sig_index,
args);
}
void SimdOp(FullDecoder* decoder, WasmOpcode opcode, Vector<Value> args,
@ -850,8 +852,9 @@ class WasmGraphBuildingInterface {
return result;
}
void DoCall(FullDecoder* decoder, TFNode* index_node, FunctionSig* sig,
uint32_t index, const Value args[], Value returns[]) {
void DoCall(FullDecoder* decoder, uint32_t table_index, TFNode* index_node,
FunctionSig* sig, uint32_t sig_index, const Value args[],
Value returns[]) {
int param_count = static_cast<int>(sig->parameter_count());
TFNode** arg_nodes = builder_->Buffer(param_count + 1);
TFNode** return_nodes = nullptr;
@ -860,9 +863,11 @@ class WasmGraphBuildingInterface {
arg_nodes[i + 1] = args[i].node;
}
if (index_node) {
BUILD(CallIndirect, index, arg_nodes, &return_nodes, decoder->position());
BUILD(CallIndirect, table_index, sig_index, arg_nodes, &return_nodes,
decoder->position());
} else {
BUILD(CallDirect, index, arg_nodes, &return_nodes, decoder->position());
BUILD(CallDirect, sig_index, arg_nodes, &return_nodes,
decoder->position());
}
int return_count = static_cast<int>(sig->return_count());
for (int i = 0; i < return_count; ++i) {
@ -873,18 +878,20 @@ class WasmGraphBuildingInterface {
LoadContextIntoSsa(ssa_env_);
}
void DoReturnCall(FullDecoder* decoder, TFNode* index_node, FunctionSig* sig,
uint32_t index, const Value args[]) {
int param_count = static_cast<int>(sig->parameter_count());
TFNode** arg_nodes = builder_->Buffer(param_count + 1);
void DoReturnCall(FullDecoder* decoder, uint32_t table_index,
TFNode* index_node, FunctionSig* sig, uint32_t sig_index,
const Value args[]) {
int arg_count = static_cast<int>(sig->parameter_count());
TFNode** arg_nodes = builder_->Buffer(arg_count + 1);
arg_nodes[0] = index_node;
for (int i = 0; i < param_count; ++i) {
for (int i = 0; i < arg_count; ++i) {
arg_nodes[i + 1] = args[i].node;
}
if (index_node) {
BUILD(ReturnCallIndirect, index, arg_nodes, decoder->position());
BUILD(ReturnCallIndirect, table_index, sig_index, arg_nodes,
decoder->position());
} else {
BUILD(ReturnCall, index, arg_nodes, decoder->position());
BUILD(ReturnCall, sig_index, arg_nodes, decoder->position());
}
}
};
@ -892,8 +899,7 @@ class WasmGraphBuildingInterface {
} // namespace
DecodeResult BuildTFGraph(AccountingAllocator* allocator,
const WasmFeatures& enabled,
const wasm::WasmModule* module,
const WasmFeatures& enabled, const WasmModule* module,
compiler::WasmGraphBuilder* builder,
WasmFeatures* detected, const FunctionBody& body,
compiler::NodeOriginTable* node_origins) {

View File

@ -1406,7 +1406,8 @@ class ThreadImpl {
return pc + 1 + imm.length;
}
case kExprCallIndirect: {
CallIndirectImmediate<Decoder::kNoValidate> imm(decoder, code->at(pc));
CallIndirectImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures,
decoder, code->at(pc));
return pc + 1 + imm.length;
}
default:
@ -2351,32 +2352,32 @@ class ThreadImpl {
// Encode the exception values on the operand stack into the exception
// package allocated above. This encoding has to be in sync with other
// backends so that exceptions can be passed between them.
const wasm::WasmExceptionSig* sig = exception->sig;
const WasmExceptionSig* sig = exception->sig;
uint32_t encoded_index = 0;
for (size_t i = 0; i < sig->parameter_count(); ++i) {
WasmValue value = sp_[i - sig->parameter_count()];
switch (sig->GetParam(i)) {
case wasm::kWasmI32: {
case kWasmI32: {
uint32_t u32 = value.to_u32();
EncodeI32ExceptionValue(encoded_values, &encoded_index, u32);
break;
}
case wasm::kWasmF32: {
case kWasmF32: {
uint32_t f32 = value.to_f32_boxed().get_bits();
EncodeI32ExceptionValue(encoded_values, &encoded_index, f32);
break;
}
case wasm::kWasmI64: {
case kWasmI64: {
uint64_t u64 = value.to_u64();
EncodeI64ExceptionValue(encoded_values, &encoded_index, u64);
break;
}
case wasm::kWasmF64: {
case kWasmF64: {
uint64_t f64 = value.to_f64_boxed().get_bits();
EncodeI64ExceptionValue(encoded_values, &encoded_index, f64);
break;
}
case wasm::kWasmAnyRef:
case kWasmAnyRef:
UNIMPLEMENTED();
break;
default:
@ -2658,8 +2659,8 @@ class ThreadImpl {
} break;
case kExprCallIndirect: {
CallIndirectImmediate<Decoder::kNoValidate> imm(&decoder,
code->at(pc));
CallIndirectImmediate<Decoder::kNoValidate> imm(
kAllWasmFeatures, &decoder, code->at(pc));
uint32_t entry_index = Pop().to<uint32_t>();
// Assume only one table for now.
DCHECK_LE(module()->tables.size(), 1u);
@ -2730,8 +2731,8 @@ class ThreadImpl {
} break;
case kExprReturnCallIndirect: {
CallIndirectImmediate<Decoder::kNoValidate> imm(&decoder,
code->at(pc));
CallIndirectImmediate<Decoder::kNoValidate> imm(
kAllWasmFeatures, &decoder, code->at(pc));
uint32_t entry_index = Pop().to<uint32_t>();
// Assume only one table for now.
DCHECK_LE(module()->tables.size(), 1u);
@ -3147,8 +3148,7 @@ class ThreadImpl {
const WasmCode* code,
FunctionSig* sig) {
int num_args = static_cast<int>(sig->parameter_count());
wasm::WasmFeatures enabled_features =
wasm::WasmFeaturesFromIsolate(isolate);
WasmFeatures enabled_features = WasmFeaturesFromIsolate(isolate);
if (code->kind() == WasmCode::kWasmToJsWrapper &&
!IsJSCompatibleSignature(sig, enabled_features.bigint)) {

View File

@ -143,7 +143,8 @@ void PrintWasmText(const WasmModule* module, const ModuleWireBytes& wire_bytes,
break;
}
case kExprCallIndirect: {
CallIndirectImmediate<Decoder::kNoValidate> imm(&i, i.pc());
CallIndirectImmediate<Decoder::kNoValidate> imm(kAllWasmFeatures, &i,
i.pc());
DCHECK_EQ(0, imm.table_index);
os << "call_indirect " << imm.sig_index;
break;

View File

@ -166,6 +166,7 @@ void TestingModuleBuilder::AddIndirectFunctionTable(
table.initial_size = table_size;
table.maximum_size = table_size;
table.has_maximum_size = true;
table.type = kWasmAnyFunc;
WasmInstanceObject::EnsureIndirectFunctionTableWithMinimumSize(
instance_object(), table_size);
for (uint32_t i = 0; i < table_size; ++i) {

View File

@ -0,0 +1,202 @@
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --expose-wasm --experimental-wasm-anyref --experimental-wasm-return-call
load("test/mjsunit/wasm/wasm-module-builder.js");
(function IndirectCallToNonZeroTable() {
print(arguments.callee.name);
const builder = new WasmModuleBuilder();
const placeholder = builder.addTable(kWasmAnyFunc, 3).index;
const table1 = builder.addTable(kWasmAnyFunc, 3).index;
const table2 = builder.addTable(kWasmAnyFunc, 5).index;
const sig_index = builder.addType(kSig_i_v);
const other_sig = builder.addType(kSig_i_i);
const v1 = 16;
const v2 = 26;
const v3 = 36;
const v4 = 46;
const v5 = 56;
const f_unreachable = builder.addFunction('unreachable', sig_index)
.addBody([kExprUnreachable]).index;
const f1 = builder.addFunction('f1', sig_index)
.addBody([kExprI32Const, v1])
.index;
const f2 = builder.addFunction('f2', sig_index)
.addBody([kExprI32Const, v2])
.index;
const f3 = builder.addFunction('f3', sig_index)
.addBody([kExprI32Const, v3])
.index;
const f4 = builder.addFunction('f4', sig_index)
.addBody([kExprI32Const, v4])
.index;
const f5 = builder.addFunction('f5', sig_index)
.addBody([kExprI32Const, v5])
.index;
builder.addFunction('call1', kSig_i_i)
.addBody([kExprGetLocal, 0, // function index
kExprCallIndirect, sig_index, table1])
.exportAs('call1');
builder.addFunction('return_call1', kSig_i_i)
.addBody([kExprGetLocal, 0, // function index
kExprReturnCallIndirect, sig_index, table1])
.exportAs('return_call1');
builder.addFunction('call2', kSig_i_i)
.addBody([kExprGetLocal, 0, // function index
kExprCallIndirect, sig_index, table2])
.exportAs('call2');
builder.addFunction('return_call2', kSig_i_i)
.addBody([kExprGetLocal, 0, // function index
kExprReturnCallIndirect, sig_index, table2])
.exportAs('return_call2');
builder.addFunction('call_invalid_sig', kSig_i_i)
.addBody([kExprGetLocal, 0, kExprGetLocal, 0, // function index + param
kExprCallIndirect, other_sig, table2])
.exportAs('call_invalid_sig');
builder.addFunction('return_call_invalid_sig', kSig_i_i)
.addBody([kExprGetLocal, 0, kExprGetLocal, 0, // function index + param
kExprReturnCallIndirect, other_sig, table2])
.exportAs('return_call_invalid_sig');
// We want to crash if we call through the table with index 0.
builder.addElementSegment(placeholder, 0, false,
[f_unreachable, f_unreachable, f_unreachable], false);
builder.addElementSegment(table1, 0, false, [f1, f2, f3], false);
// Keep one slot in table2 uninitialized. We should trap if we call it.
builder.addElementSegment(table2, 1, false,
[f_unreachable, f_unreachable, f4, f5], false);
const instance = builder.instantiate();
assertEquals(v1, instance.exports.call1(0));
assertEquals(v2, instance.exports.call1(1));
assertEquals(v3, instance.exports.call1(2));
assertTraps(kTrapFuncInvalid, () => instance.exports.call1(3));
assertEquals(v1, instance.exports.return_call1(0));
assertEquals(v2, instance.exports.return_call1(1));
assertEquals(v3, instance.exports.return_call1(2));
assertTraps(kTrapFuncInvalid, () => instance.exports.return_call1(3));
// Try to call through the uninitialized table entry.
assertTraps(kTrapFuncSigMismatch, () => instance.exports.call2(0));
assertEquals(v4, instance.exports.call2(3));
assertEquals(v5, instance.exports.call2(4));
assertTraps(kTrapFuncSigMismatch,
() => instance.exports.call_invalid_sig(4));
assertTraps(kTrapFuncSigMismatch, () => instance.exports.return_call2(0));
assertEquals(v4, instance.exports.return_call2(3));
assertEquals(v5, instance.exports.return_call2(4));
assertTraps(kTrapFuncSigMismatch,
() => instance.exports.return_call_invalid_sig(4));
})();
(function IndirectCallToImportedNonZeroTable() {
print(arguments.callee.name);
const table_size = 10;
const placeholder = new WebAssembly.Table(
{ initial: table_size, maximum: table_size, element: "anyfunc" });
const table = new WebAssembly.Table(
{ initial: table_size, maximum: table_size, element: "anyfunc" });
const builder = new WasmModuleBuilder();
builder.addImportedTable("m", "placeholder", table_size, table_size);
const t1 = builder.addImportedTable("m", "table", table_size, table_size);
// We initialize the module twice and put the function f1 in the table at
// the index defined by {g}. Thereby we can initialize the table at different
// slots for different instances. The function f1 also returns {g} so that we
// can see that actually different functions get called.
const g = builder.addImportedGlobal("m", "base", kWasmI32);
const sig_index = builder.addType(kSig_i_v);
const f1 = builder.addFunction("foo", sig_index)
.addBody([kExprGetGlobal, g, kExprI32Const, 12, kExprI32Add]);
builder.addFunction('call', kSig_i_i)
.addBody([kExprGetLocal, 0, // function index
kExprCallIndirect, sig_index, t1])
.exportAs('call');
builder.addElementSegment(t1, g, true, [f1.index], true);
const base1 = 3;
const base2 = 5;
const instance1 = builder.instantiate({
m: {
placeholder: placeholder,
table: table,
base: base1
}
});
const instance2 = builder.instantiate({
m: {
placeholder: placeholder,
table: table,
base: base2
}
});
assertEquals(base1 + 12, instance1.exports.call(base1));
assertEquals(base2 + 12, instance1.exports.call(base2));
assertEquals(base1 + 12, instance2.exports.call(base1));
assertEquals(base2 + 12, instance2.exports.call(base2));
})();
function js_div(a, b) { return (a / b) | 0; }
(function CallImportedFunction() {
let kTableSize = 10;
print(arguments.callee.name);
var builder = new WasmModuleBuilder();
let div = builder.addImport("q", "js_div", kSig_i_ii);
builder.addImportedTable("q", "placeholder", kTableSize, kTableSize);
let table_index = builder.addImportedTable("q", "table", kTableSize, kTableSize);
let g = builder.addImportedGlobal("q", "base", kWasmI32);
let sig_index = builder.addType(kSig_i_ii);
builder.addFunction("placeholder", sig_index)
.addBody([kExprGetLocal, 0]);
builder.addElementSegment(table_index, g, true, [div]);
builder.addFunction("main", kSig_i_ii)
.addBody([
kExprI32Const, 55, // --
kExprGetLocal, 0, // --
kExprGetLocal, 1, // --
kExprCallIndirect, 0, table_index]) // --
.exportAs("main");
let m = new WebAssembly.Module(builder.toBuffer());
let table = new WebAssembly.Table({
element: "anyfunc",
initial: kTableSize,
maximum: kTableSize
});
let placeholder = new WebAssembly.Table({
element: "anyfunc",
initial: kTableSize,
maximum: kTableSize
});
let instance = new WebAssembly.Instance(m, {
q: {
base: 0, table: table, placeholder: placeholder,
js_div: js_div
}
});
assertEquals(13, instance.exports.main(4, 0));
})();

View File

@ -1626,7 +1626,7 @@ TEST_F(FunctionBodyDecoderTest, SimpleIndirectReturnCalls) {
FunctionSig* sig = sigs.i_i();
TestModuleBuilder builder;
builder.InitializeTable();
builder.AddTable(kWasmAnyFunc, 20, true, 30);
module = builder.module();
byte f0 = builder.AddSignature(sigs.i_v());
@ -1645,7 +1645,7 @@ TEST_F(FunctionBodyDecoderTest, IndirectReturnCallsOutOfBounds) {
FunctionSig* sig = sigs.i_i();
TestModuleBuilder builder;
builder.InitializeTable();
builder.AddTable(kWasmAnyFunc, 20, false, 20);
module = builder.module();
ExpectFailure(sig, {WASM_RETURN_CALL_INDIRECT0(0, WASM_ZERO)});
@ -1768,7 +1768,7 @@ TEST_F(FunctionBodyDecoderTest, MultiReturnType) {
TEST_F(FunctionBodyDecoderTest, SimpleIndirectCalls) {
FunctionSig* sig = sigs.i_i();
TestModuleBuilder builder;
builder.InitializeTable();
builder.AddTable(kWasmAnyFunc, 20, false, 20);
module = builder.module();
byte f0 = builder.AddSignature(sigs.i_v());
@ -1784,7 +1784,7 @@ TEST_F(FunctionBodyDecoderTest, SimpleIndirectCalls) {
TEST_F(FunctionBodyDecoderTest, IndirectCallsOutOfBounds) {
FunctionSig* sig = sigs.i_i();
TestModuleBuilder builder;
builder.InitializeTable();
builder.AddTable(kWasmAnyFunc, 20, false, 20);
module = builder.module();
ExpectFailure(sig, {WASM_CALL_INDIRECT0(0, WASM_ZERO)});
@ -2113,6 +2113,25 @@ TEST_F(FunctionBodyDecoderTest, GetTable) {
WASM_GET_TABLE(oob_tab, WASM_I32V(3)))});
}
TEST_F(FunctionBodyDecoderTest, MultiTableCallIndirect) {
WASM_FEATURE_SCOPE(anyref);
TestModuleBuilder builder;
module = builder.module();
byte tab_ref = builder.AddTable(kWasmAnyRef, 10, true, 20);
byte tab_func = builder.AddTable(kWasmAnyFunc, 20, true, 30);
ValueType sig_types[]{kWasmAnyRef, kWasmAnyFunc, kWasmI32};
FunctionSig sig(0, 3, sig_types);
byte sig_index = builder.AddSignature(sigs.i_v());
// We can store anyfunc values as anyref, but not the other way around.
ExpectValidates(sigs.i_v(),
{kExprI32Const, 0, kExprCallIndirect, sig_index, tab_func});
ExpectFailure(sigs.i_v(),
{kExprI32Const, 0, kExprCallIndirect, sig_index, tab_ref});
}
TEST_F(FunctionBodyDecoderTest, WasmMemoryGrow) {
TestModuleBuilder builder;
module = builder.module();