[wasm] Turn ValueType from an enum to a class
In preparation for adding reference types, which need an additional parameter to indicate the referenced type. Bug: v8:7748 Change-Id: If4023f3d9c7f42ed603b69c43356d2e8b81a0daa Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2091471 Commit-Queue: Jakob Kummerow <jkummerow@chromium.org> Reviewed-by: Clemens Backes <clemensb@chromium.org> Cr-Commit-Position: refs/heads/master@{#66687}
This commit is contained in:
parent
4253a00ec8
commit
f3b4167f8b
@ -268,10 +268,14 @@ class MachineType {
|
||||
}
|
||||
}
|
||||
|
||||
bool LessThanOrEqualPointerSize() {
|
||||
constexpr bool LessThanOrEqualPointerSize() const {
|
||||
return ElementSizeLog2Of(this->representation()) <= kSystemPointerSizeLog2;
|
||||
}
|
||||
|
||||
constexpr byte MemSize() const {
|
||||
return 1 << i::ElementSizeLog2Of(this->representation());
|
||||
}
|
||||
|
||||
private:
|
||||
MachineRepresentation representation_;
|
||||
MachineSemantic semantic_;
|
||||
|
@ -249,9 +249,8 @@ Node* WasmGraphBuilder::Phi(wasm::ValueType type, unsigned count,
|
||||
Node** vals_and_control) {
|
||||
DCHECK(IrOpcode::IsMergeOpcode(vals_and_control[count]->opcode()));
|
||||
return graph()->NewNode(
|
||||
mcgraph()->common()->Phi(wasm::ValueTypes::MachineRepresentationFor(type),
|
||||
count),
|
||||
count + 1, vals_and_control);
|
||||
mcgraph()->common()->Phi(type.machine_representation(), count), count + 1,
|
||||
vals_and_control);
|
||||
}
|
||||
|
||||
Node* WasmGraphBuilder::EffectPhi(unsigned count, Node** effects_and_control) {
|
||||
@ -1135,26 +1134,26 @@ Node* WasmGraphBuilder::BuildChangeEndiannessStore(
|
||||
Node* result;
|
||||
Node* value = node;
|
||||
MachineOperatorBuilder* m = mcgraph()->machine();
|
||||
int valueSizeInBytes = wasm::ValueTypes::ElementSizeInBytes(wasmtype);
|
||||
int valueSizeInBytes = wasmtype.element_size_bytes();
|
||||
int valueSizeInBits = 8 * valueSizeInBytes;
|
||||
bool isFloat = false;
|
||||
|
||||
switch (wasmtype) {
|
||||
case wasm::kWasmF64:
|
||||
switch (wasmtype.kind()) {
|
||||
case wasm::ValueType::kF64:
|
||||
value = graph()->NewNode(m->BitcastFloat64ToInt64(), node);
|
||||
isFloat = true;
|
||||
V8_FALLTHROUGH;
|
||||
case wasm::kWasmI64:
|
||||
case wasm::ValueType::kI64:
|
||||
result = mcgraph()->Int64Constant(0);
|
||||
break;
|
||||
case wasm::kWasmF32:
|
||||
case wasm::ValueType::kF32:
|
||||
value = graph()->NewNode(m->BitcastFloat32ToInt32(), node);
|
||||
isFloat = true;
|
||||
V8_FALLTHROUGH;
|
||||
case wasm::kWasmI32:
|
||||
case wasm::ValueType::kI32:
|
||||
result = mcgraph()->Int32Constant(0);
|
||||
break;
|
||||
case wasm::kWasmS128:
|
||||
case wasm::ValueType::kS128:
|
||||
DCHECK(ReverseBytesSupported(m, valueSizeInBytes));
|
||||
break;
|
||||
default:
|
||||
@ -1169,7 +1168,7 @@ Node* WasmGraphBuilder::BuildChangeEndiannessStore(
|
||||
// In case we store lower part of WasmI64 expression, we can truncate
|
||||
// upper 32bits
|
||||
value = graph()->NewNode(m->TruncateInt64ToInt32(), value);
|
||||
valueSizeInBytes = wasm::ValueTypes::ElementSizeInBytes(wasm::kWasmI32);
|
||||
valueSizeInBytes = wasm::kWasmI32.element_size_bytes();
|
||||
valueSizeInBits = 8 * valueSizeInBytes;
|
||||
if (mem_rep == MachineRepresentation::kWord16) {
|
||||
value =
|
||||
@ -1243,11 +1242,11 @@ Node* WasmGraphBuilder::BuildChangeEndiannessStore(
|
||||
}
|
||||
|
||||
if (isFloat) {
|
||||
switch (wasmtype) {
|
||||
case wasm::kWasmF64:
|
||||
switch (wasmtype.kind()) {
|
||||
case wasm::ValueType::kF64:
|
||||
result = graph()->NewNode(m->BitcastInt64ToFloat64(), result);
|
||||
break;
|
||||
case wasm::kWasmF32:
|
||||
case wasm::ValueType::kF32:
|
||||
result = graph()->NewNode(m->BitcastInt32ToFloat32(), result);
|
||||
break;
|
||||
default:
|
||||
@ -2036,17 +2035,17 @@ Node* WasmGraphBuilder::Throw(uint32_t exception_index,
|
||||
MachineOperatorBuilder* m = mcgraph()->machine();
|
||||
for (size_t i = 0; i < sig->parameter_count(); ++i) {
|
||||
Node* value = values[i];
|
||||
switch (sig->GetParam(i)) {
|
||||
case wasm::kWasmF32:
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
case wasm::ValueType::kF32:
|
||||
value = graph()->NewNode(m->BitcastFloat32ToInt32(), value);
|
||||
V8_FALLTHROUGH;
|
||||
case wasm::kWasmI32:
|
||||
case wasm::ValueType::kI32:
|
||||
BuildEncodeException32BitValue(values_array, &index, value);
|
||||
break;
|
||||
case wasm::kWasmF64:
|
||||
case wasm::ValueType::kF64:
|
||||
value = graph()->NewNode(m->BitcastFloat64ToInt64(), value);
|
||||
V8_FALLTHROUGH;
|
||||
case wasm::kWasmI64: {
|
||||
case wasm::ValueType::kI64: {
|
||||
Node* upper32 = graph()->NewNode(
|
||||
m->TruncateInt64ToInt32(),
|
||||
Binop(wasm::kExprI64ShrU, value, Int64Constant(32)));
|
||||
@ -2055,7 +2054,7 @@ Node* WasmGraphBuilder::Throw(uint32_t exception_index,
|
||||
BuildEncodeException32BitValue(values_array, &index, lower32);
|
||||
break;
|
||||
}
|
||||
case wasm::kWasmS128:
|
||||
case wasm::ValueType::kS128:
|
||||
BuildEncodeException32BitValue(
|
||||
values_array, &index,
|
||||
graph()->NewNode(m->I32x4ExtractLane(0), value));
|
||||
@ -2069,14 +2068,15 @@ Node* WasmGraphBuilder::Throw(uint32_t exception_index,
|
||||
values_array, &index,
|
||||
graph()->NewNode(m->I32x4ExtractLane(3), value));
|
||||
break;
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmFuncRef:
|
||||
case wasm::kWasmNullRef:
|
||||
case wasm::kWasmExnRef:
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kFuncRef:
|
||||
case wasm::ValueType::kNullRef:
|
||||
case wasm::ValueType::kExnRef:
|
||||
STORE_FIXED_ARRAY_SLOT_ANY(values_array, index, value);
|
||||
++index;
|
||||
break;
|
||||
default:
|
||||
case wasm::ValueType::kStmt:
|
||||
case wasm::ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
@ -2179,24 +2179,24 @@ Node* WasmGraphBuilder::GetExceptionValues(Node* except_obj,
|
||||
DCHECK_EQ(sig->parameter_count(), values.size());
|
||||
for (size_t i = 0; i < sig->parameter_count(); ++i) {
|
||||
Node* value;
|
||||
switch (sig->GetParam(i)) {
|
||||
case wasm::kWasmI32:
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
case wasm::ValueType::kI32:
|
||||
value = BuildDecodeException32BitValue(values_array, &index);
|
||||
break;
|
||||
case wasm::kWasmI64:
|
||||
case wasm::ValueType::kI64:
|
||||
value = BuildDecodeException64BitValue(values_array, &index);
|
||||
break;
|
||||
case wasm::kWasmF32: {
|
||||
case wasm::ValueType::kF32: {
|
||||
value = Unop(wasm::kExprF32ReinterpretI32,
|
||||
BuildDecodeException32BitValue(values_array, &index));
|
||||
break;
|
||||
}
|
||||
case wasm::kWasmF64: {
|
||||
case wasm::ValueType::kF64: {
|
||||
value = Unop(wasm::kExprF64ReinterpretI64,
|
||||
BuildDecodeException64BitValue(values_array, &index));
|
||||
break;
|
||||
}
|
||||
case wasm::kWasmS128:
|
||||
case wasm::ValueType::kS128:
|
||||
value = graph()->NewNode(
|
||||
mcgraph()->machine()->I32x4Splat(),
|
||||
BuildDecodeException32BitValue(values_array, &index));
|
||||
@ -2210,14 +2210,15 @@ Node* WasmGraphBuilder::GetExceptionValues(Node* except_obj,
|
||||
mcgraph()->machine()->I32x4ReplaceLane(3), value,
|
||||
BuildDecodeException32BitValue(values_array, &index));
|
||||
break;
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmFuncRef:
|
||||
case wasm::kWasmNullRef:
|
||||
case wasm::kWasmExnRef:
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kFuncRef:
|
||||
case wasm::ValueType::kNullRef:
|
||||
case wasm::ValueType::kExnRef:
|
||||
value = LOAD_FIXED_ARRAY_SLOT_ANY(values_array, index);
|
||||
++index;
|
||||
break;
|
||||
default:
|
||||
case wasm::ValueType::kStmt:
|
||||
case wasm::ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
values[i] = value;
|
||||
@ -3294,7 +3295,7 @@ Node* WasmGraphBuilder::BuildCallToRuntime(Runtime::FunctionId f,
|
||||
|
||||
Node* WasmGraphBuilder::GlobalGet(uint32_t index) {
|
||||
const wasm::WasmGlobal& global = env_->module->globals[index];
|
||||
if (wasm::ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
if (global.mutability && global.imported) {
|
||||
Node* base = nullptr;
|
||||
Node* offset = nullptr;
|
||||
@ -3306,27 +3307,24 @@ Node* WasmGraphBuilder::GlobalGet(uint32_t index) {
|
||||
return LOAD_FIXED_ARRAY_SLOT_ANY(globals_buffer, global.offset);
|
||||
}
|
||||
|
||||
MachineType mem_type =
|
||||
wasm::ValueTypes::MachineTypeFor(env_->module->globals[index].type);
|
||||
MachineType mem_type = global.type.machine_type();
|
||||
if (mem_type.representation() == MachineRepresentation::kSimd128) {
|
||||
has_simd_ = true;
|
||||
}
|
||||
Node* base = nullptr;
|
||||
Node* offset = nullptr;
|
||||
GetGlobalBaseAndOffset(mem_type, env_->module->globals[index], &base,
|
||||
&offset);
|
||||
GetGlobalBaseAndOffset(mem_type, global, &base, &offset);
|
||||
Node* result = SetEffect(graph()->NewNode(
|
||||
mcgraph()->machine()->Load(mem_type), base, offset, effect(), control()));
|
||||
#if defined(V8_TARGET_BIG_ENDIAN)
|
||||
result = BuildChangeEndiannessLoad(result, mem_type,
|
||||
env_->module->globals[index].type);
|
||||
result = BuildChangeEndiannessLoad(result, mem_type, global.type);
|
||||
#endif
|
||||
return result;
|
||||
}
|
||||
|
||||
Node* WasmGraphBuilder::GlobalSet(uint32_t index, Node* val) {
|
||||
const wasm::WasmGlobal& global = env_->module->globals[index];
|
||||
if (wasm::ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
if (global.mutability && global.imported) {
|
||||
Node* base = nullptr;
|
||||
Node* offset = nullptr;
|
||||
@ -3337,24 +3335,20 @@ Node* WasmGraphBuilder::GlobalSet(uint32_t index, Node* val) {
|
||||
}
|
||||
Node* globals_buffer =
|
||||
LOAD_INSTANCE_FIELD(TaggedGlobalsBuffer, MachineType::TaggedPointer());
|
||||
return STORE_FIXED_ARRAY_SLOT_ANY(globals_buffer,
|
||||
env_->module->globals[index].offset, val);
|
||||
return STORE_FIXED_ARRAY_SLOT_ANY(globals_buffer, global.offset, val);
|
||||
}
|
||||
|
||||
MachineType mem_type =
|
||||
wasm::ValueTypes::MachineTypeFor(env_->module->globals[index].type);
|
||||
MachineType mem_type = global.type.machine_type();
|
||||
if (mem_type.representation() == MachineRepresentation::kSimd128) {
|
||||
has_simd_ = true;
|
||||
}
|
||||
Node* base = nullptr;
|
||||
Node* offset = nullptr;
|
||||
GetGlobalBaseAndOffset(mem_type, env_->module->globals[index], &base,
|
||||
&offset);
|
||||
GetGlobalBaseAndOffset(mem_type, global, &base, &offset);
|
||||
const Operator* op = mcgraph()->machine()->Store(
|
||||
StoreRepresentation(mem_type.representation(), kNoWriteBarrier));
|
||||
#if defined(V8_TARGET_BIG_ENDIAN)
|
||||
val = BuildChangeEndiannessStore(val, mem_type.representation(),
|
||||
env_->module->globals[index].type);
|
||||
val = BuildChangeEndiannessStore(val, mem_type.representation(), global.type);
|
||||
#endif
|
||||
return SetEffect(
|
||||
graph()->NewNode(op, base, offset, val, effect(), control()));
|
||||
@ -3607,15 +3601,15 @@ Node* WasmGraphBuilder::BoundsCheckMemRange(Node** start, Node** size,
|
||||
|
||||
const Operator* WasmGraphBuilder::GetSafeLoadOperator(int offset,
|
||||
wasm::ValueType type) {
|
||||
int alignment = offset % (wasm::ValueTypes::ElementSizeInBytes(type));
|
||||
MachineType mach_type = wasm::ValueTypes::MachineTypeFor(type);
|
||||
int alignment = offset % type.element_size_bytes();
|
||||
MachineType mach_type = type.machine_type();
|
||||
if (COMPRESS_POINTERS_BOOL && mach_type.IsTagged()) {
|
||||
// We are loading tagged value from off-heap location, so we need to load
|
||||
// it as a full word otherwise we will not be able to decompress it.
|
||||
mach_type = MachineType::Pointer();
|
||||
}
|
||||
if (alignment == 0 || mcgraph()->machine()->UnalignedLoadSupported(
|
||||
wasm::ValueTypes::MachineRepresentationFor(type))) {
|
||||
type.machine_representation())) {
|
||||
return mcgraph()->machine()->Load(mach_type);
|
||||
}
|
||||
return mcgraph()->machine()->UnalignedLoad(mach_type);
|
||||
@ -3623,8 +3617,8 @@ const Operator* WasmGraphBuilder::GetSafeLoadOperator(int offset,
|
||||
|
||||
const Operator* WasmGraphBuilder::GetSafeStoreOperator(int offset,
|
||||
wasm::ValueType type) {
|
||||
int alignment = offset % (wasm::ValueTypes::ElementSizeInBytes(type));
|
||||
MachineRepresentation rep = wasm::ValueTypes::MachineRepresentationFor(type);
|
||||
int alignment = offset % type.element_size_bytes();
|
||||
MachineRepresentation rep = type.machine_representation();
|
||||
if (COMPRESS_POINTERS_BOOL && IsAnyTagged(rep)) {
|
||||
// We are storing tagged value to off-heap location, so we need to store
|
||||
// it as a full word otherwise we will not be able to decompress it.
|
||||
@ -3777,8 +3771,8 @@ Node* WasmGraphBuilder::LoadTransform(wasm::ValueType type, MachineType memtype,
|
||||
#else
|
||||
// Wasm semantics throw on OOB. Introduce explicit bounds check and
|
||||
// conditioning when not using the trap handler.
|
||||
index = BoundsCheckMem(wasm::ValueTypes::MemSize(memtype), index, offset,
|
||||
position, kCanOmitBoundsCheck);
|
||||
index = BoundsCheckMem(memtype.MemSize(), index, offset, position,
|
||||
kCanOmitBoundsCheck);
|
||||
|
||||
LoadTransformation transformation = GetLoadTransformation(memtype, transform);
|
||||
LoadKind load_kind = GetLoadKind(mcgraph(), memtype, use_trap_handler());
|
||||
@ -3811,8 +3805,8 @@ Node* WasmGraphBuilder::LoadMem(wasm::ValueType type, MachineType memtype,
|
||||
|
||||
// Wasm semantics throw on OOB. Introduce explicit bounds check and
|
||||
// conditioning when not using the trap handler.
|
||||
index = BoundsCheckMem(wasm::ValueTypes::MemSize(memtype), index, offset,
|
||||
position, kCanOmitBoundsCheck);
|
||||
index = BoundsCheckMem(memtype.MemSize(), index, offset, position,
|
||||
kCanOmitBoundsCheck);
|
||||
|
||||
if (memtype.representation() == MachineRepresentation::kWord8 ||
|
||||
mcgraph()->machine()->UnalignedLoadSupported(memtype.representation())) {
|
||||
@ -4021,7 +4015,7 @@ Signature<MachineRepresentation>* CreateMachineSignature(
|
||||
if (origin == WasmGraphBuilder::kCalledFromJS) {
|
||||
builder.AddReturn(MachineRepresentation::kTagged);
|
||||
} else {
|
||||
builder.AddReturn(wasm::ValueTypes::MachineRepresentationFor(ret));
|
||||
builder.AddReturn(ret.machine_representation());
|
||||
}
|
||||
}
|
||||
|
||||
@ -4032,7 +4026,7 @@ Signature<MachineRepresentation>* CreateMachineSignature(
|
||||
// provided by JavaScript, and not two 32-bit parameters.
|
||||
builder.AddParam(MachineRepresentation::kTagged);
|
||||
} else {
|
||||
builder.AddParam(wasm::ValueTypes::MachineRepresentationFor(param));
|
||||
builder.AddParam(param.machine_representation());
|
||||
}
|
||||
}
|
||||
return builder.Build();
|
||||
@ -4754,9 +4748,8 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
|
||||
switch (opcode) {
|
||||
#define BUILD_ATOMIC_BINOP(Name, Operation, Type, Prefix) \
|
||||
case wasm::kExpr##Name: { \
|
||||
Node* index = CheckBoundsAndAlignment( \
|
||||
wasm::ValueTypes::MemSize(MachineType::Type()), inputs[0], offset, \
|
||||
position); \
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Type().MemSize(), \
|
||||
inputs[0], offset, position); \
|
||||
node = graph()->NewNode( \
|
||||
mcgraph()->machine()->Prefix##Atomic##Operation(MachineType::Type()), \
|
||||
MemBuffer(offset), index, inputs[1], effect(), control()); \
|
||||
@ -4767,9 +4760,8 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
|
||||
|
||||
#define BUILD_ATOMIC_CMP_EXCHG(Name, Type, Prefix) \
|
||||
case wasm::kExpr##Name: { \
|
||||
Node* index = CheckBoundsAndAlignment( \
|
||||
wasm::ValueTypes::MemSize(MachineType::Type()), inputs[0], offset, \
|
||||
position); \
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Type().MemSize(), \
|
||||
inputs[0], offset, position); \
|
||||
node = graph()->NewNode( \
|
||||
mcgraph()->machine()->Prefix##AtomicCompareExchange( \
|
||||
MachineType::Type()), \
|
||||
@ -4779,24 +4771,22 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
|
||||
ATOMIC_CMP_EXCHG_LIST(BUILD_ATOMIC_CMP_EXCHG)
|
||||
#undef BUILD_ATOMIC_CMP_EXCHG
|
||||
|
||||
#define BUILD_ATOMIC_LOAD_OP(Name, Type, Prefix) \
|
||||
case wasm::kExpr##Name: { \
|
||||
Node* index = CheckBoundsAndAlignment( \
|
||||
wasm::ValueTypes::MemSize(MachineType::Type()), inputs[0], offset, \
|
||||
position); \
|
||||
node = graph()->NewNode( \
|
||||
mcgraph()->machine()->Prefix##AtomicLoad(MachineType::Type()), \
|
||||
MemBuffer(offset), index, effect(), control()); \
|
||||
break; \
|
||||
#define BUILD_ATOMIC_LOAD_OP(Name, Type, Prefix) \
|
||||
case wasm::kExpr##Name: { \
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Type().MemSize(), \
|
||||
inputs[0], offset, position); \
|
||||
node = graph()->NewNode( \
|
||||
mcgraph()->machine()->Prefix##AtomicLoad(MachineType::Type()), \
|
||||
MemBuffer(offset), index, effect(), control()); \
|
||||
break; \
|
||||
}
|
||||
ATOMIC_LOAD_LIST(BUILD_ATOMIC_LOAD_OP)
|
||||
#undef BUILD_ATOMIC_LOAD_OP
|
||||
|
||||
#define BUILD_ATOMIC_STORE_OP(Name, Type, Rep, Prefix) \
|
||||
case wasm::kExpr##Name: { \
|
||||
Node* index = CheckBoundsAndAlignment( \
|
||||
wasm::ValueTypes::MemSize(MachineType::Type()), inputs[0], offset, \
|
||||
position); \
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Type().MemSize(), \
|
||||
inputs[0], offset, position); \
|
||||
node = graph()->NewNode( \
|
||||
mcgraph()->machine()->Prefix##AtomicStore(MachineRepresentation::Rep), \
|
||||
MemBuffer(offset), index, inputs[1], effect(), control()); \
|
||||
@ -4805,9 +4795,8 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
|
||||
ATOMIC_STORE_LIST(BUILD_ATOMIC_STORE_OP)
|
||||
#undef BUILD_ATOMIC_STORE_OP
|
||||
case wasm::kExprAtomicNotify: {
|
||||
Node* index = CheckBoundsAndAlignment(
|
||||
wasm::ValueTypes::MemSize(MachineType::Uint32()), inputs[0], offset,
|
||||
position);
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Uint32().MemSize(),
|
||||
inputs[0], offset, position);
|
||||
// Now that we've bounds-checked, compute the effective address.
|
||||
Node* address = graph()->NewNode(mcgraph()->machine()->Int32Add(),
|
||||
Uint32Constant(offset), index);
|
||||
@ -4826,9 +4815,8 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
|
||||
}
|
||||
|
||||
case wasm::kExprI32AtomicWait: {
|
||||
Node* index = CheckBoundsAndAlignment(
|
||||
wasm::ValueTypes::MemSize(MachineType::Uint32()), inputs[0], offset,
|
||||
position);
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Uint32().MemSize(),
|
||||
inputs[0], offset, position);
|
||||
// Now that we've bounds-checked, compute the effective address.
|
||||
Node* address = graph()->NewNode(mcgraph()->machine()->Int32Add(),
|
||||
Uint32Constant(offset), index);
|
||||
@ -4848,9 +4836,8 @@ Node* WasmGraphBuilder::AtomicOp(wasm::WasmOpcode opcode, Node* const* inputs,
|
||||
}
|
||||
|
||||
case wasm::kExprI64AtomicWait: {
|
||||
Node* index = CheckBoundsAndAlignment(
|
||||
wasm::ValueTypes::MemSize(MachineType::Uint64()), inputs[0], offset,
|
||||
position);
|
||||
Node* index = CheckBoundsAndAlignment(MachineType::Uint64().MemSize(),
|
||||
inputs[0], offset, position);
|
||||
// Now that we've bounds-checked, compute the effective address.
|
||||
Node* address = graph()->NewNode(mcgraph()->machine()->Int32Add(),
|
||||
Uint32Constant(offset), index);
|
||||
@ -5325,27 +5312,28 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
}
|
||||
|
||||
Node* ToJS(Node* node, wasm::ValueType type) {
|
||||
switch (type) {
|
||||
case wasm::kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case wasm::ValueType::kI32:
|
||||
return BuildChangeInt32ToTagged(node);
|
||||
case wasm::kWasmS128:
|
||||
case wasm::ValueType::kS128:
|
||||
UNREACHABLE();
|
||||
case wasm::kWasmI64: {
|
||||
case wasm::ValueType::kI64: {
|
||||
DCHECK(enabled_features_.has_bigint());
|
||||
return BuildChangeInt64ToBigInt(node);
|
||||
}
|
||||
case wasm::kWasmF32:
|
||||
case wasm::ValueType::kF32:
|
||||
node = graph()->NewNode(mcgraph()->machine()->ChangeFloat32ToFloat64(),
|
||||
node);
|
||||
return BuildChangeFloat64ToTagged(node);
|
||||
case wasm::kWasmF64:
|
||||
case wasm::ValueType::kF64:
|
||||
return BuildChangeFloat64ToTagged(node);
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmFuncRef:
|
||||
case wasm::kWasmNullRef:
|
||||
case wasm::kWasmExnRef:
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kFuncRef:
|
||||
case wasm::ValueType::kNullRef:
|
||||
case wasm::ValueType::kExnRef:
|
||||
return node;
|
||||
default:
|
||||
case wasm::ValueType::kStmt:
|
||||
case wasm::ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
@ -5400,14 +5388,14 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
}
|
||||
|
||||
Node* BuildFloat64ToWasm(Node* value, wasm::ValueType type) {
|
||||
switch (type) {
|
||||
case wasm::kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case wasm::ValueType::kI32:
|
||||
return graph()->NewNode(mcgraph()->machine()->TruncateFloat64ToWord32(),
|
||||
value);
|
||||
case wasm::kWasmF32:
|
||||
case wasm::ValueType::kF32:
|
||||
return graph()->NewNode(
|
||||
mcgraph()->machine()->TruncateFloat64ToFloat32(), value);
|
||||
case wasm::kWasmF64:
|
||||
case wasm::ValueType::kF64:
|
||||
return value;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -5415,12 +5403,12 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
}
|
||||
|
||||
Node* BuildSmiToWasm(Node* smi, wasm::ValueType type) {
|
||||
switch (type) {
|
||||
case wasm::kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case wasm::ValueType::kI32:
|
||||
return BuildChangeSmiToInt32(smi);
|
||||
case wasm::kWasmF32:
|
||||
case wasm::ValueType::kF32:
|
||||
return BuildChangeSmiToFloat32(smi);
|
||||
case wasm::kWasmF64:
|
||||
case wasm::ValueType::kF64:
|
||||
return BuildChangeSmiToFloat64(smi);
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -5428,12 +5416,12 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
}
|
||||
|
||||
Node* FromJS(Node* input, Node* js_context, wasm::ValueType type) {
|
||||
switch (type) {
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmExnRef:
|
||||
switch (type.kind()) {
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kExnRef:
|
||||
return input;
|
||||
|
||||
case wasm::kWasmNullRef: {
|
||||
case wasm::ValueType::kNullRef: {
|
||||
Node* check = graph()->NewNode(mcgraph()->machine()->WordEqual(), input,
|
||||
RefNull());
|
||||
|
||||
@ -5452,7 +5440,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
return input;
|
||||
}
|
||||
|
||||
case wasm::kWasmFuncRef: {
|
||||
case wasm::ValueType::kFuncRef: {
|
||||
Node* check =
|
||||
BuildChangeSmiToInt32(SetEffect(BuildCallToRuntimeWithContext(
|
||||
Runtime::kWasmIsValidFuncRefValue, js_context, &input, 1)));
|
||||
@ -5472,7 +5460,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
return input;
|
||||
}
|
||||
|
||||
case wasm::kWasmI64:
|
||||
case wasm::ValueType::kI64:
|
||||
// i64 values can only come from BigInt.
|
||||
DCHECK(enabled_features_.has_bigint());
|
||||
return BuildChangeBigIntToInt64(input, js_context);
|
||||
@ -5497,8 +5485,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
auto smi_to_wasm = gasm_->MakeLabel(MachineRepresentation::kTaggedSigned);
|
||||
auto call_to_number =
|
||||
gasm_->MakeDeferredLabel(MachineRepresentation::kTaggedPointer);
|
||||
auto done =
|
||||
gasm_->MakeLabel(wasm::ValueTypes::MachineRepresentationFor(type));
|
||||
auto done = gasm_->MakeLabel(type.machine_representation());
|
||||
|
||||
// Branch to smi conversion or the ToNumber call.
|
||||
gasm_->Branch(BuildTestSmi(input), &smi_to_wasm, &call_to_number, input);
|
||||
@ -5927,11 +5914,11 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
// Store arguments on our stack, then align the stack for calling to C.
|
||||
int param_bytes = 0;
|
||||
for (wasm::ValueType type : sig_->parameters()) {
|
||||
param_bytes += wasm::ValueTypes::MemSize(type);
|
||||
param_bytes += type.element_size_bytes();
|
||||
}
|
||||
int return_bytes = 0;
|
||||
for (wasm::ValueType type : sig_->returns()) {
|
||||
return_bytes += wasm::ValueTypes::MemSize(type);
|
||||
return_bytes += type.element_size_bytes();
|
||||
}
|
||||
|
||||
int stack_slot_bytes = std::max(param_bytes, return_bytes);
|
||||
@ -5950,7 +5937,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
SetEffect(graph()->NewNode(GetSafeStoreOperator(offset, type), values,
|
||||
Int32Constant(offset), Param(i + 1), effect(),
|
||||
control()));
|
||||
offset += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
offset += type.element_size_bytes();
|
||||
}
|
||||
// The function is passed as the last parameter, after WASM arguments.
|
||||
Node* function_node = Param(param_count + 1);
|
||||
@ -6019,7 +6006,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
graph()->NewNode(GetSafeLoadOperator(offset, type), values,
|
||||
Int32Constant(offset), effect(), control()));
|
||||
returns[i] = val;
|
||||
offset += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
offset += type.element_size_bytes();
|
||||
}
|
||||
Return(VectorOf(returns));
|
||||
}
|
||||
@ -6039,13 +6026,13 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
// Compute size for the argument buffer.
|
||||
int args_size_bytes = 0;
|
||||
for (wasm::ValueType type : sig_->parameters()) {
|
||||
args_size_bytes += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
args_size_bytes += type.element_size_bytes();
|
||||
}
|
||||
|
||||
// The return value is also passed via this buffer:
|
||||
int return_size_bytes = 0;
|
||||
for (wasm::ValueType type : sig_->returns()) {
|
||||
return_size_bytes += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
return_size_bytes += type.element_size_bytes();
|
||||
}
|
||||
|
||||
// Get a stack slot for the arguments.
|
||||
@ -6064,7 +6051,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
SetEffect(graph()->NewNode(GetSafeStoreOperator(offset, type), arg_buffer,
|
||||
Int32Constant(offset), Param(i + 1), effect(),
|
||||
control()));
|
||||
offset += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
offset += type.element_size_bytes();
|
||||
}
|
||||
DCHECK_EQ(args_size_bytes, offset);
|
||||
|
||||
@ -6091,7 +6078,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
graph()->NewNode(GetSafeLoadOperator(offset, type), arg_buffer,
|
||||
Int32Constant(offset), effect(), control()));
|
||||
returns[i] = val;
|
||||
offset += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
offset += type.element_size_bytes();
|
||||
}
|
||||
Return(VectorOf(returns));
|
||||
}
|
||||
@ -6215,7 +6202,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
graph()->NewNode(GetSafeLoadOperator(offset, type), arg_buffer,
|
||||
Int32Constant(offset), effect(), control()));
|
||||
args[pos++] = arg_load;
|
||||
offset += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
offset += type.element_size_bytes();
|
||||
}
|
||||
|
||||
args[pos++] = effect();
|
||||
@ -6248,7 +6235,7 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
|
||||
SetEffect(graph()->NewNode(GetSafeStoreOperator(offset, type), arg_buffer,
|
||||
Int32Constant(offset), value, effect(),
|
||||
control()));
|
||||
offset += wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
offset += type.element_size_bytes();
|
||||
pos++;
|
||||
}
|
||||
|
||||
@ -6985,16 +6972,14 @@ CallDescriptor* GetWasmCallDescriptor(
|
||||
// during frame iteration.
|
||||
const size_t parameter_count = fsig->parameter_count();
|
||||
for (size_t i = 0; i < parameter_count; i++) {
|
||||
MachineRepresentation param =
|
||||
wasm::ValueTypes::MachineRepresentationFor(fsig->GetParam(i));
|
||||
MachineRepresentation param = fsig->GetParam(i).machine_representation();
|
||||
// Skip tagged parameters (e.g. any-ref).
|
||||
if (IsAnyTagged(param)) continue;
|
||||
auto l = params.Next(param);
|
||||
locations.AddParamAt(i + param_offset, l);
|
||||
}
|
||||
for (size_t i = 0; i < parameter_count; i++) {
|
||||
MachineRepresentation param =
|
||||
wasm::ValueTypes::MachineRepresentationFor(fsig->GetParam(i));
|
||||
MachineRepresentation param = fsig->GetParam(i).machine_representation();
|
||||
// Skip untagged parameters.
|
||||
if (!IsAnyTagged(param)) continue;
|
||||
auto l = params.Next(param);
|
||||
@ -7019,8 +7004,7 @@ CallDescriptor* GetWasmCallDescriptor(
|
||||
|
||||
const int return_count = static_cast<int>(locations.return_count_);
|
||||
for (int i = 0; i < return_count; i++) {
|
||||
MachineRepresentation ret =
|
||||
wasm::ValueTypes::MachineRepresentationFor(fsig->GetReturn(i));
|
||||
MachineRepresentation ret = fsig->GetReturn(i).machine_representation();
|
||||
auto l = rets.Next(ret);
|
||||
locations.AddReturn(l);
|
||||
}
|
||||
|
@ -1786,7 +1786,7 @@ void WasmGlobalObject::WasmGlobalObjectPrint(std::ostream& os) { // NOLINT
|
||||
os << "\n - tagged_buffer: " << Brief(tagged_buffer());
|
||||
os << "\n - offset: " << offset();
|
||||
os << "\n - flags: " << flags();
|
||||
os << "\n - type: " << type();
|
||||
os << "\n - type: " << type().kind();
|
||||
os << "\n - is_mutable: " << is_mutable();
|
||||
os << "\n";
|
||||
}
|
||||
|
@ -234,33 +234,33 @@ RUNTIME_FUNCTION(Runtime_WasmRunInterpreter) {
|
||||
Address arg_buf_ptr = arg_buffer;
|
||||
for (int i = 0; i < num_params; ++i) {
|
||||
#define CASE_ARG_TYPE(type, ctype) \
|
||||
case wasm::type: \
|
||||
DCHECK_EQ(wasm::ValueTypes::ElementSizeInBytes(sig->GetParam(i)), \
|
||||
sizeof(ctype)); \
|
||||
case wasm::ValueType::type: \
|
||||
DCHECK_EQ(sig->GetParam(i).element_size_bytes(), sizeof(ctype)); \
|
||||
wasm_args[i] = \
|
||||
wasm::WasmValue(base::ReadUnalignedValue<ctype>(arg_buf_ptr)); \
|
||||
arg_buf_ptr += sizeof(ctype); \
|
||||
break;
|
||||
switch (sig->GetParam(i)) {
|
||||
CASE_ARG_TYPE(kWasmI32, uint32_t)
|
||||
CASE_ARG_TYPE(kWasmI64, uint64_t)
|
||||
CASE_ARG_TYPE(kWasmF32, float)
|
||||
CASE_ARG_TYPE(kWasmF64, double)
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
CASE_ARG_TYPE(kI32, uint32_t)
|
||||
CASE_ARG_TYPE(kI64, uint64_t)
|
||||
CASE_ARG_TYPE(kF32, float)
|
||||
CASE_ARG_TYPE(kF64, double)
|
||||
#undef CASE_ARG_TYPE
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmFuncRef:
|
||||
case wasm::kWasmNullRef:
|
||||
case wasm::kWasmExnRef: {
|
||||
DCHECK_EQ(wasm::ValueTypes::ElementSizeInBytes(sig->GetParam(i)),
|
||||
kSystemPointerSize);
|
||||
Handle<Object> ref(base::ReadUnalignedValue<Object>(arg_buf_ptr),
|
||||
isolate);
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kFuncRef:
|
||||
case wasm::ValueType::kNullRef:
|
||||
case wasm::ValueType::kExnRef: {
|
||||
DCHECK_EQ(sig->GetParam(i).element_size_bytes(), kSystemPointerSize);
|
||||
Handle<Object> ref(
|
||||
Object(base::ReadUnalignedValue<Address>(arg_buf_ptr)), isolate);
|
||||
DCHECK_IMPLIES(sig->GetParam(i) == wasm::kWasmNullRef, ref->IsNull());
|
||||
wasm_args[i] = wasm::WasmValue(ref);
|
||||
arg_buf_ptr += kSystemPointerSize;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case wasm::ValueType::kStmt:
|
||||
case wasm::ValueType::kS128:
|
||||
case wasm::ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
@ -288,24 +288,22 @@ RUNTIME_FUNCTION(Runtime_WasmRunInterpreter) {
|
||||
arg_buf_ptr = arg_buffer;
|
||||
for (int i = 0; i < num_returns; ++i) {
|
||||
#define CASE_RET_TYPE(type, ctype) \
|
||||
case wasm::type: \
|
||||
DCHECK_EQ(wasm::ValueTypes::ElementSizeInBytes(sig->GetReturn(i)), \
|
||||
sizeof(ctype)); \
|
||||
case wasm::ValueType::type: \
|
||||
DCHECK_EQ(sig->GetReturn(i).element_size_bytes(), sizeof(ctype)); \
|
||||
base::WriteUnalignedValue<ctype>(arg_buf_ptr, wasm_rets[i].to<ctype>()); \
|
||||
arg_buf_ptr += sizeof(ctype); \
|
||||
break;
|
||||
switch (sig->GetReturn(i)) {
|
||||
CASE_RET_TYPE(kWasmI32, uint32_t)
|
||||
CASE_RET_TYPE(kWasmI64, uint64_t)
|
||||
CASE_RET_TYPE(kWasmF32, float)
|
||||
CASE_RET_TYPE(kWasmF64, double)
|
||||
switch (sig->GetReturn(i).kind()) {
|
||||
CASE_RET_TYPE(kI32, uint32_t)
|
||||
CASE_RET_TYPE(kI64, uint64_t)
|
||||
CASE_RET_TYPE(kF32, float)
|
||||
CASE_RET_TYPE(kF64, double)
|
||||
#undef CASE_RET_TYPE
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmFuncRef:
|
||||
case wasm::kWasmNullRef:
|
||||
case wasm::kWasmExnRef: {
|
||||
DCHECK_EQ(wasm::ValueTypes::ElementSizeInBytes(sig->GetReturn(i)),
|
||||
kSystemPointerSize);
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kFuncRef:
|
||||
case wasm::ValueType::kNullRef:
|
||||
case wasm::ValueType::kExnRef: {
|
||||
DCHECK_EQ(sig->GetReturn(i).element_size_bytes(), kSystemPointerSize);
|
||||
DCHECK_IMPLIES(sig->GetReturn(i) == wasm::kWasmNullRef,
|
||||
wasm_rets[i].to_anyref()->IsNull());
|
||||
base::WriteUnalignedValue<Object>(arg_buf_ptr,
|
||||
|
@ -287,17 +287,17 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
|
||||
}
|
||||
|
||||
int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmS128:
|
||||
return ValueTypes::ElementSizeInBytes(type);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return type.element_size_bytes();
|
||||
default:
|
||||
return kStackSlotSize;
|
||||
}
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmS128:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return true;
|
||||
default:
|
||||
// No alignment because all other types are kStackSlotSize.
|
||||
@ -307,11 +307,11 @@ bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
RelocInfo::Mode rmode) {
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
TurboAssembler::Move(reg.gp(), Operand(value.to_i32(), rmode));
|
||||
break;
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
DCHECK(RelocInfo::IsNone(rmode));
|
||||
int32_t low_word = value.to_i64();
|
||||
int32_t high_word = value.to_i64() >> 32;
|
||||
@ -319,10 +319,10 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
TurboAssembler::Move(reg.high_gp(), Operand(high_word));
|
||||
break;
|
||||
}
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
vmov(liftoff::GetFloatRegister(reg.fp()), value.to_f32_boxed());
|
||||
break;
|
||||
case kWasmF64: {
|
||||
case ValueType::kF64: {
|
||||
Register extra_scratch = GetUnusedRegister(kGpReg).gp();
|
||||
vmov(reg.fp(), Double(value.to_f64_boxed().get_scalar()), extra_scratch);
|
||||
break;
|
||||
@ -602,21 +602,21 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||
ValueType type) {
|
||||
int32_t offset = (caller_slot_idx + 1) * kSystemPointerSize;
|
||||
MemOperand src(fp, offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
ldr(dst.gp(), src);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
ldr(dst.low_gp(), src);
|
||||
ldr(dst.high_gp(), MemOperand(fp, offset + kSystemPointerSize));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
vldr(liftoff::GetFloatRegister(dst.fp()), src);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
vldr(dst.fp(), src);
|
||||
break;
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register addr = liftoff::CalculateActualAddress(this, &temps, src.rn(),
|
||||
no_reg, src.offset());
|
||||
@ -658,21 +658,21 @@ void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
||||
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
RecordUsedSpillOffset(offset);
|
||||
MemOperand dst = liftoff::GetStackSlot(offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
str(reg.gp(), dst);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
str(reg.low_gp(), liftoff::GetHalfStackSlot(offset, kLowWord));
|
||||
str(reg.high_gp(), liftoff::GetHalfStackSlot(offset, kHighWord));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
vstr(liftoff::GetFloatRegister(reg.fp()), dst);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
vstr(reg.fp(), dst);
|
||||
break;
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
UseScratchRegisterScope temps(this);
|
||||
Register addr = liftoff::CalculateActualAddress(this, &temps, dst.rn(),
|
||||
no_reg, dst.offset());
|
||||
@ -696,12 +696,12 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
} else {
|
||||
src = temps.Acquire();
|
||||
}
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
mov(src, Operand(value.to_i32()));
|
||||
str(src, dst);
|
||||
break;
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
int32_t low_word = value.to_i64();
|
||||
mov(src, Operand(low_word));
|
||||
str(src, liftoff::GetHalfStackSlot(offset, kLowWord));
|
||||
@ -717,21 +717,21 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
}
|
||||
|
||||
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
ldr(reg.gp(), liftoff::GetStackSlot(offset));
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
ldr(reg.low_gp(), liftoff::GetHalfStackSlot(offset, kLowWord));
|
||||
ldr(reg.high_gp(), liftoff::GetHalfStackSlot(offset, kHighWord));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
vldr(liftoff::GetFloatRegister(reg.fp()), liftoff::GetStackSlot(offset));
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
vldr(reg.fp(), liftoff::GetStackSlot(offset));
|
||||
break;
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
// Get memory address of slot to fill from.
|
||||
MemOperand slot = liftoff::GetStackSlot(offset);
|
||||
UseScratchRegisterScope temps(this);
|
||||
@ -1703,25 +1703,25 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
|
||||
int arg_bytes = 0;
|
||||
for (ValueType param_type : sig->parameters()) {
|
||||
switch (param_type) {
|
||||
case kWasmI32:
|
||||
switch (param_type.kind()) {
|
||||
case ValueType::kI32:
|
||||
str(args->gp(), MemOperand(sp, arg_bytes));
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
str(args->low_gp(), MemOperand(sp, arg_bytes));
|
||||
str(args->high_gp(), MemOperand(sp, arg_bytes + kSystemPointerSize));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
vstr(liftoff::GetFloatRegister(args->fp()), MemOperand(sp, arg_bytes));
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
vstr(args->fp(), MemOperand(sp, arg_bytes));
|
||||
break;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
args++;
|
||||
arg_bytes += ValueTypes::MemSize(param_type);
|
||||
arg_bytes += param_type.element_size_bytes();
|
||||
}
|
||||
DCHECK_LE(arg_bytes, stack_bytes);
|
||||
|
||||
@ -1746,18 +1746,18 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
|
||||
// Load potential output value from the buffer on the stack.
|
||||
if (out_argument_type != kWasmStmt) {
|
||||
switch (out_argument_type) {
|
||||
case kWasmI32:
|
||||
switch (out_argument_type.kind()) {
|
||||
case ValueType::kI32:
|
||||
ldr(result_reg->gp(), MemOperand(sp));
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
ldr(result_reg->low_gp(), MemOperand(sp));
|
||||
ldr(result_reg->high_gp(), MemOperand(sp, kSystemPointerSize));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
vldr(liftoff::GetFloatRegister(result_reg->fp()), MemOperand(sp));
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
vldr(result_reg->fp(), MemOperand(sp));
|
||||
break;
|
||||
default:
|
||||
@ -1798,25 +1798,25 @@ void LiftoffStackSlots::Construct() {
|
||||
const LiftoffAssembler::VarState& src = slot.src_;
|
||||
switch (src.loc()) {
|
||||
case LiftoffAssembler::VarState::kStack: {
|
||||
switch (src.type()) {
|
||||
switch (src.type().kind()) {
|
||||
// i32 and i64 can be treated as similar cases, i64 being previously
|
||||
// split into two i32 registers
|
||||
case kWasmI32:
|
||||
case kWasmI64:
|
||||
case kWasmF32: {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kI64:
|
||||
case ValueType::kF32: {
|
||||
UseScratchRegisterScope temps(asm_);
|
||||
Register scratch = temps.Acquire();
|
||||
asm_->ldr(scratch,
|
||||
liftoff::GetHalfStackSlot(slot.src_offset_, slot.half_));
|
||||
asm_->Push(scratch);
|
||||
} break;
|
||||
case kWasmF64: {
|
||||
case ValueType::kF64: {
|
||||
UseScratchRegisterScope temps(asm_);
|
||||
DwVfpRegister scratch = temps.AcquireD();
|
||||
asm_->vldr(scratch, liftoff::GetStackSlot(slot.src_offset_));
|
||||
asm_->vpush(scratch);
|
||||
} break;
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
MemOperand mem_op = liftoff::GetStackSlot(slot.src_offset_);
|
||||
UseScratchRegisterScope temps(asm_);
|
||||
Register addr = liftoff::CalculateActualAddress(
|
||||
@ -1832,22 +1832,22 @@ void LiftoffStackSlots::Construct() {
|
||||
break;
|
||||
}
|
||||
case LiftoffAssembler::VarState::kRegister:
|
||||
switch (src.type()) {
|
||||
case kWasmI64: {
|
||||
switch (src.type().kind()) {
|
||||
case ValueType::kI64: {
|
||||
LiftoffRegister reg =
|
||||
slot.half_ == kLowWord ? src.reg().low() : src.reg().high();
|
||||
asm_->push(reg.gp());
|
||||
} break;
|
||||
case kWasmI32:
|
||||
case ValueType::kI32:
|
||||
asm_->push(src.reg().gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
asm_->vpush(liftoff::GetFloatRegister(src.reg().fp()));
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
asm_->vpush(src.reg().fp());
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
asm_->vpush(liftoff::GetSimd128Register(src.reg().low_fp()));
|
||||
break;
|
||||
default:
|
||||
|
@ -46,16 +46,16 @@ inline MemOperand GetStackSlot(int offset) { return MemOperand(fp, -offset); }
|
||||
inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
|
||||
|
||||
inline CPURegister GetRegFromType(const LiftoffRegister& reg, ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return reg.gp().W();
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return reg.gp().X();
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return reg.fp().S();
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return reg.fp().D();
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
return reg.fp().Q();
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -74,14 +74,14 @@ inline CPURegList PadVRegList(RegList list) {
|
||||
|
||||
inline CPURegister AcquireByType(UseScratchRegisterScope* temps,
|
||||
ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return temps->AcquireW();
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return temps->AcquireX();
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return temps->AcquireS();
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return temps->AcquireD();
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -174,17 +174,17 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
|
||||
int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
// TODO(zhin): Unaligned access typically take additional cycles, we should do
|
||||
// some performance testing to see how big an effect it will take.
|
||||
switch (type) {
|
||||
case kWasmS128:
|
||||
return ValueTypes::ElementSizeInBytes(type);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return type.element_size_bytes();
|
||||
default:
|
||||
return kStackSlotSize;
|
||||
}
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmS128:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return true;
|
||||
default:
|
||||
// No alignment because all other types are kStackSlotSize.
|
||||
@ -194,17 +194,17 @@ bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
RelocInfo::Mode rmode) {
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
Mov(reg.gp().W(), Immediate(value.to_i32(), rmode));
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
Mov(reg.gp().X(), Immediate(value.to_i64(), rmode));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
Fmov(reg.fp().S(), value.to_f32_boxed().get_scalar());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
Fmov(reg.fp().D(), value.to_f64_boxed().get_scalar());
|
||||
break;
|
||||
default:
|
||||
@ -444,8 +444,8 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
MemOperand dst = liftoff::GetStackSlot(offset);
|
||||
UseScratchRegisterScope temps(this);
|
||||
CPURegister src = CPURegister::no_reg();
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
if (value.to_i32() == 0) {
|
||||
src = wzr;
|
||||
} else {
|
||||
@ -453,7 +453,7 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
Mov(src.W(), value.to_i32());
|
||||
}
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
if (value.to_i64() == 0) {
|
||||
src = xzr;
|
||||
} else {
|
||||
@ -1018,15 +1018,15 @@ void LiftoffAssembler::emit_jump(Register target) { Br(target); }
|
||||
void LiftoffAssembler::emit_cond_jump(Condition cond, Label* label,
|
||||
ValueType type, Register lhs,
|
||||
Register rhs) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
if (rhs.is_valid()) {
|
||||
Cmp(lhs.W(), rhs.W());
|
||||
} else {
|
||||
Cmp(lhs.W(), wzr);
|
||||
}
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
if (rhs.is_valid()) {
|
||||
Cmp(lhs.X(), rhs.X());
|
||||
} else {
|
||||
@ -1186,7 +1186,7 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
int arg_bytes = 0;
|
||||
for (ValueType param_type : sig->parameters()) {
|
||||
Poke(liftoff::GetRegFromType(*args++, param_type), arg_bytes);
|
||||
arg_bytes += ValueTypes::MemSize(param_type);
|
||||
arg_bytes += param_type.element_size_bytes();
|
||||
}
|
||||
DCHECK_LE(arg_bytes, stack_bytes);
|
||||
|
||||
|
@ -40,21 +40,21 @@ static constexpr LiftoffRegList kByteRegs =
|
||||
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
||||
int32_t offset, ValueType type) {
|
||||
Operand src(base, offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
assm->mov(dst.gp(), src);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
assm->mov(dst.low_gp(), src);
|
||||
assm->mov(dst.high_gp(), Operand(base, offset + 4));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
assm->movss(dst.fp(), src);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
assm->movsd(dst.fp(), src);
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
assm->movdqu(dst.fp(), src);
|
||||
break;
|
||||
default:
|
||||
@ -65,18 +65,18 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
||||
inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
||||
LiftoffRegister src, ValueType type) {
|
||||
Operand dst(base, offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
assm->mov(dst, src.gp());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
assm->mov(dst, src.low_gp());
|
||||
assm->mov(Operand(base, offset + 4), src.high_gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
assm->movss(dst, src.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
assm->movsd(dst, src.fp());
|
||||
break;
|
||||
default:
|
||||
@ -85,23 +85,23 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
||||
}
|
||||
|
||||
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
assm->push(reg.gp());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
assm->push(reg.high_gp());
|
||||
assm->push(reg.low_gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
assm->AllocateStackSpace(sizeof(float));
|
||||
assm->movss(Operand(esp, 0), reg.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
assm->AllocateStackSpace(sizeof(double));
|
||||
assm->movsd(Operand(esp, 0), reg.fp());
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
assm->AllocateStackSpace(sizeof(double) * 2);
|
||||
assm->movdqu(Operand(esp, 0), reg.fp());
|
||||
break;
|
||||
@ -202,18 +202,18 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
|
||||
}
|
||||
|
||||
int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
return ValueTypes::ElementSizeInBytes(type);
|
||||
return type.element_size_bytes();
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) { return false; }
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
RelocInfo::Mode rmode) {
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
TurboAssembler::Move(reg.gp(), Immediate(value.to_i32(), rmode));
|
||||
break;
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
DCHECK(RelocInfo::IsNone(rmode));
|
||||
int32_t low_word = value.to_i64();
|
||||
int32_t high_word = value.to_i64() >> 32;
|
||||
@ -221,10 +221,10 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
TurboAssembler::Move(reg.high_gp(), Immediate(high_word));
|
||||
break;
|
||||
}
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
TurboAssembler::Move(reg.fp(), value.to_f32_boxed().get_bits());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
TurboAssembler::Move(reg.fp(), value.to_f64_boxed().get_bits());
|
||||
break;
|
||||
default:
|
||||
@ -578,21 +578,21 @@ void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
||||
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
RecordUsedSpillOffset(offset);
|
||||
Operand dst = liftoff::GetStackSlot(offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
mov(dst, reg.gp());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
mov(liftoff::GetHalfStackSlot(offset, kLowWord), reg.low_gp());
|
||||
mov(liftoff::GetHalfStackSlot(offset, kHighWord), reg.high_gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
movss(dst, reg.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
movsd(dst, reg.fp());
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
movdqu(dst, reg.fp());
|
||||
break;
|
||||
default:
|
||||
@ -603,11 +603,11 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
RecordUsedSpillOffset(offset);
|
||||
Operand dst = liftoff::GetStackSlot(offset);
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
mov(dst, Immediate(value.to_i32()));
|
||||
break;
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
int32_t low_word = value.to_i64();
|
||||
int32_t high_word = value.to_i64() >> 32;
|
||||
mov(liftoff::GetHalfStackSlot(offset, kLowWord), Immediate(low_word));
|
||||
@ -622,21 +622,21 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
|
||||
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
||||
Operand src = liftoff::GetStackSlot(offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
mov(reg.gp(), src);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
mov(reg.low_gp(), liftoff::GetHalfStackSlot(offset, kLowWord));
|
||||
mov(reg.high_gp(), liftoff::GetHalfStackSlot(offset, kHighWord));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
movss(reg.fp(), src);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
movsd(reg.fp(), src);
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
movdqu(reg.fp(), src);
|
||||
break;
|
||||
default:
|
||||
@ -1783,8 +1783,8 @@ void LiftoffAssembler::emit_cond_jump(Condition cond, Label* label,
|
||||
ValueType type, Register lhs,
|
||||
Register rhs) {
|
||||
if (rhs != no_reg) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
cmp(lhs, rhs);
|
||||
break;
|
||||
default:
|
||||
@ -2118,7 +2118,7 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
int arg_bytes = 0;
|
||||
for (ValueType param_type : sig->parameters()) {
|
||||
liftoff::Store(this, esp, arg_bytes, *args++, param_type);
|
||||
arg_bytes += ValueTypes::MemSize(param_type);
|
||||
arg_bytes += param_type.element_size_bytes();
|
||||
}
|
||||
DCHECK_LE(arg_bytes, stack_bytes);
|
||||
|
||||
|
@ -22,6 +22,8 @@ namespace wasm {
|
||||
|
||||
using VarState = LiftoffAssembler::VarState;
|
||||
|
||||
constexpr ValueType LiftoffAssembler::kWasmIntPtr;
|
||||
|
||||
namespace {
|
||||
|
||||
class StackTransferRecipe {
|
||||
@ -922,7 +924,7 @@ void LiftoffAssembler::set_num_locals(uint32_t num_locals) {
|
||||
}
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, VarState slot) {
|
||||
os << ValueTypes::TypeName(slot.type()) << ":";
|
||||
os << slot.type().type_name() << ":";
|
||||
switch (slot.loc()) {
|
||||
case VarState::kStack:
|
||||
return os << "s";
|
||||
|
@ -351,16 +351,16 @@ class LiftoffCompiler {
|
||||
}
|
||||
|
||||
LiftoffBailoutReason BailoutReasonForType(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmS128:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return kSimd;
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
return kAnyRef;
|
||||
case kWasmExnRef:
|
||||
case ValueType::kExnRef:
|
||||
return kExceptionHandling;
|
||||
case kWasmBottom:
|
||||
case ValueType::kBottom:
|
||||
return kMultiValue;
|
||||
default:
|
||||
return kOtherReason;
|
||||
@ -381,7 +381,7 @@ class LiftoffCompiler {
|
||||
}
|
||||
LiftoffBailoutReason bailout_reason = BailoutReasonForType(type);
|
||||
EmbeddedVector<char, 128> buffer;
|
||||
SNPrintF(buffer, "%s %s", ValueTypes::TypeName(type), context);
|
||||
SNPrintF(buffer, "%s %s", type.type_name(), context);
|
||||
unsupported(decoder, bailout_reason, buffer.begin());
|
||||
return false;
|
||||
}
|
||||
@ -815,17 +815,17 @@ class LiftoffCompiler {
|
||||
// Store arguments on our stack, then align the stack for calling to C.
|
||||
int param_bytes = 0;
|
||||
for (ValueType param_type : sig->parameters()) {
|
||||
param_bytes += ValueTypes::MemSize(param_type);
|
||||
param_bytes += param_type.element_size_bytes();
|
||||
}
|
||||
int out_arg_bytes = out_argument_type == kWasmStmt
|
||||
? 0
|
||||
: ValueTypes::MemSize(out_argument_type);
|
||||
: out_argument_type.element_size_bytes();
|
||||
int stack_bytes = std::max(param_bytes, out_arg_bytes);
|
||||
__ CallC(sig, arg_regs, result_regs, out_argument_type, stack_bytes,
|
||||
ext_ref);
|
||||
}
|
||||
|
||||
template <ValueType src_type, ValueType result_type, class EmitFn>
|
||||
template <ValueType::Kind src_type, ValueType::Kind result_type, class EmitFn>
|
||||
void EmitUnOp(EmitFn fn) {
|
||||
constexpr RegClass src_rc = reg_class_for(src_type);
|
||||
constexpr RegClass result_rc = reg_class_for(result_type);
|
||||
@ -834,25 +834,25 @@ class LiftoffCompiler {
|
||||
? __ GetUnusedRegister(result_rc, {src})
|
||||
: __ GetUnusedRegister(result_rc);
|
||||
fn(dst, src);
|
||||
__ PushRegister(result_type, dst);
|
||||
__ PushRegister(ValueType(result_type), dst);
|
||||
}
|
||||
|
||||
template <ValueType type>
|
||||
template <ValueType::Kind type>
|
||||
void EmitFloatUnOpWithCFallback(
|
||||
bool (LiftoffAssembler::*emit_fn)(DoubleRegister, DoubleRegister),
|
||||
ExternalReference (*fallback_fn)()) {
|
||||
auto emit_with_c_fallback = [=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
if ((asm_.*emit_fn)(dst.fp(), src.fp())) return;
|
||||
ExternalReference ext_ref = fallback_fn();
|
||||
ValueType sig_reps[] = {type};
|
||||
ValueType sig_reps[] = {ValueType(type)};
|
||||
FunctionSig sig(0, 1, sig_reps);
|
||||
GenerateCCall(&dst, &sig, type, &src, ext_ref);
|
||||
GenerateCCall(&dst, &sig, ValueType(type), &src, ext_ref);
|
||||
};
|
||||
EmitUnOp<type, type>(emit_with_c_fallback);
|
||||
}
|
||||
|
||||
enum TypeConversionTrapping : bool { kCanTrap = true, kNoTrap = false };
|
||||
template <ValueType dst_type, ValueType src_type,
|
||||
template <ValueType::Kind dst_type, ValueType::Kind src_type,
|
||||
TypeConversionTrapping can_trap>
|
||||
void EmitTypeConversion(WasmOpcode opcode, ExternalReference (*fallback_fn)(),
|
||||
WasmCodePosition trap_position) {
|
||||
@ -871,54 +871,55 @@ class LiftoffCompiler {
|
||||
ExternalReference ext_ref = fallback_fn();
|
||||
if (can_trap) {
|
||||
// External references for potentially trapping conversions return int.
|
||||
ValueType sig_reps[] = {kWasmI32, src_type};
|
||||
ValueType sig_reps[] = {kWasmI32, ValueType(src_type)};
|
||||
FunctionSig sig(1, 1, sig_reps);
|
||||
LiftoffRegister ret_reg =
|
||||
__ GetUnusedRegister(kGpReg, LiftoffRegList::ForRegs(dst));
|
||||
LiftoffRegister dst_regs[] = {ret_reg, dst};
|
||||
GenerateCCall(dst_regs, &sig, dst_type, &src, ext_ref);
|
||||
GenerateCCall(dst_regs, &sig, ValueType(dst_type), &src, ext_ref);
|
||||
__ emit_cond_jump(kEqual, trap, kWasmI32, ret_reg.gp());
|
||||
} else {
|
||||
ValueType sig_reps[] = {src_type};
|
||||
ValueType sig_reps[] = {ValueType(src_type)};
|
||||
FunctionSig sig(0, 1, sig_reps);
|
||||
GenerateCCall(&dst, &sig, dst_type, &src, ext_ref);
|
||||
GenerateCCall(&dst, &sig, ValueType(dst_type), &src, ext_ref);
|
||||
}
|
||||
}
|
||||
__ PushRegister(dst_type, dst);
|
||||
__ PushRegister(ValueType(dst_type), dst);
|
||||
}
|
||||
|
||||
void UnOp(FullDecoder* decoder, WasmOpcode opcode, const Value& value,
|
||||
Value* result) {
|
||||
#define CASE_I32_UNOP(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
EmitUnOp<kWasmI32, kWasmI32>( \
|
||||
EmitUnOp<ValueType::kI32, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) { \
|
||||
__ emit_##fn(dst.gp(), src.gp()); \
|
||||
}); \
|
||||
break;
|
||||
#define CASE_I64_UNOP(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
EmitUnOp<kWasmI64, kWasmI64>( \
|
||||
EmitUnOp<ValueType::kI64, ValueType::kI64>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) { \
|
||||
__ emit_##fn(dst, src); \
|
||||
}); \
|
||||
break;
|
||||
#define CASE_FLOAT_UNOP(opcode, type, fn) \
|
||||
case kExpr##opcode: \
|
||||
EmitUnOp<kWasm##type, kWasm##type>( \
|
||||
EmitUnOp<ValueType::k##type, ValueType::k##type>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) { \
|
||||
__ emit_##fn(dst.fp(), src.fp()); \
|
||||
}); \
|
||||
break;
|
||||
#define CASE_FLOAT_UNOP_WITH_CFALLBACK(opcode, type, fn) \
|
||||
case kExpr##opcode: \
|
||||
EmitFloatUnOpWithCFallback<kWasm##type>(&LiftoffAssembler::emit_##fn, \
|
||||
&ExternalReference::wasm_##fn); \
|
||||
#define CASE_FLOAT_UNOP_WITH_CFALLBACK(opcode, type, fn) \
|
||||
case kExpr##opcode: \
|
||||
EmitFloatUnOpWithCFallback<ValueType::k##type>( \
|
||||
&LiftoffAssembler::emit_##fn, &ExternalReference::wasm_##fn); \
|
||||
break;
|
||||
#define CASE_TYPE_CONVERSION(opcode, dst_type, src_type, ext_ref, can_trap) \
|
||||
case kExpr##opcode: \
|
||||
EmitTypeConversion<kWasm##dst_type, kWasm##src_type, can_trap>( \
|
||||
kExpr##opcode, ext_ref, can_trap ? decoder->position() : 0); \
|
||||
EmitTypeConversion<ValueType::k##dst_type, ValueType::k##src_type, \
|
||||
can_trap>(kExpr##opcode, ext_ref, \
|
||||
can_trap ? decoder->position() : 0); \
|
||||
break;
|
||||
switch (opcode) {
|
||||
CASE_I32_UNOP(I32Clz, i32_clz)
|
||||
@ -984,19 +985,19 @@ class LiftoffCompiler {
|
||||
outstanding_op_ = kExprI32Eqz;
|
||||
break;
|
||||
}
|
||||
EmitUnOp<kWasmI32, kWasmI32>(
|
||||
EmitUnOp<ValueType::kI32, ValueType::kI32>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_i32_eqz(dst.gp(), src.gp());
|
||||
});
|
||||
break;
|
||||
case kExprI64Eqz:
|
||||
EmitUnOp<kWasmI64, kWasmI32>(
|
||||
EmitUnOp<ValueType::kI64, ValueType::kI32>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_i64_eqz(dst.gp(), src);
|
||||
});
|
||||
break;
|
||||
case kExprI32Popcnt:
|
||||
EmitUnOp<kWasmI32, kWasmI32>(
|
||||
EmitUnOp<ValueType::kI32, ValueType::kI32>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
if (__ emit_i32_popcnt(dst.gp(), src.gp())) return;
|
||||
ValueType sig_i_i_reps[] = {kWasmI32, kWasmI32};
|
||||
@ -1006,7 +1007,7 @@ class LiftoffCompiler {
|
||||
});
|
||||
break;
|
||||
case kExprI64Popcnt:
|
||||
EmitUnOp<kWasmI64, kWasmI64>(
|
||||
EmitUnOp<ValueType::kI64, ValueType::kI64>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
if (__ emit_i64_popcnt(dst, src)) return;
|
||||
// The c function returns i32. We will zero-extend later.
|
||||
@ -1040,8 +1041,8 @@ class LiftoffCompiler {
|
||||
#undef CASE_TYPE_CONVERSION
|
||||
}
|
||||
|
||||
template <ValueType src_type, ValueType result_type, typename EmitFn,
|
||||
typename EmitFnImm>
|
||||
template <ValueType::Kind src_type, ValueType::Kind result_type,
|
||||
typename EmitFn, typename EmitFnImm>
|
||||
void EmitBinOpImm(EmitFn fn, EmitFnImm fnImm) {
|
||||
static constexpr RegClass src_rc = reg_class_for(src_type);
|
||||
static constexpr RegClass result_rc = reg_class_for(result_type);
|
||||
@ -1058,7 +1059,7 @@ class LiftoffCompiler {
|
||||
: __ GetUnusedRegister(result_rc);
|
||||
|
||||
fnImm(dst, lhs, imm);
|
||||
__ PushRegister(result_type, dst);
|
||||
__ PushRegister(ValueType(result_type), dst);
|
||||
} else {
|
||||
// The RHS was not an immediate.
|
||||
LiftoffRegister rhs = __ PopToRegister();
|
||||
@ -1067,11 +1068,12 @@ class LiftoffCompiler {
|
||||
? __ GetUnusedRegister(result_rc, {lhs, rhs})
|
||||
: __ GetUnusedRegister(result_rc);
|
||||
fn(dst, lhs, rhs);
|
||||
__ PushRegister(result_type, dst);
|
||||
__ PushRegister(ValueType(result_type), dst);
|
||||
}
|
||||
}
|
||||
|
||||
template <ValueType src_type, ValueType result_type, typename EmitFn>
|
||||
template <ValueType::Kind src_type, ValueType::Kind result_type,
|
||||
typename EmitFn>
|
||||
void EmitBinOp(EmitFn fn) {
|
||||
static constexpr RegClass src_rc = reg_class_for(src_type);
|
||||
static constexpr RegClass result_rc = reg_class_for(result_type);
|
||||
@ -1081,7 +1083,7 @@ class LiftoffCompiler {
|
||||
? __ GetUnusedRegister(result_rc, {lhs, rhs})
|
||||
: __ GetUnusedRegister(result_rc);
|
||||
fn(dst, lhs, rhs);
|
||||
__ PushRegister(result_type, dst);
|
||||
__ PushRegister(ValueType(result_type), dst);
|
||||
}
|
||||
|
||||
void EmitDivOrRem64CCall(LiftoffRegister dst, LiftoffRegister lhs,
|
||||
@ -1112,13 +1114,13 @@ class LiftoffCompiler {
|
||||
const Value& rhs, Value* result) {
|
||||
#define CASE_I32_BINOP(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasmI32, kWasmI32>( \
|
||||
return EmitBinOp<ValueType::kI32, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_##fn(dst.gp(), lhs.gp(), rhs.gp()); \
|
||||
});
|
||||
#define CASE_I32_BINOPI(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOpImm<kWasmI32, kWasmI32>( \
|
||||
return EmitBinOpImm<ValueType::kI32, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_##fn(dst.gp(), lhs.gp(), rhs.gp()); \
|
||||
}, \
|
||||
@ -1127,13 +1129,13 @@ class LiftoffCompiler {
|
||||
});
|
||||
#define CASE_I64_BINOP(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasmI64, kWasmI64>( \
|
||||
return EmitBinOp<ValueType::kI64, ValueType::kI64>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_##fn(dst, lhs, rhs); \
|
||||
});
|
||||
#define CASE_I64_BINOPI(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOpImm<kWasmI64, kWasmI64>( \
|
||||
return EmitBinOpImm<ValueType::kI64, ValueType::kI64>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_##fn(dst, lhs, rhs); \
|
||||
}, \
|
||||
@ -1142,7 +1144,7 @@ class LiftoffCompiler {
|
||||
});
|
||||
#define CASE_FLOAT_BINOP(opcode, type, fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasm##type, kWasm##type>( \
|
||||
return EmitBinOp<ValueType::k##type, ValueType::k##type>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_##fn(dst.fp(), lhs.fp(), rhs.fp()); \
|
||||
});
|
||||
@ -1154,32 +1156,32 @@ class LiftoffCompiler {
|
||||
outstanding_op_ = kExpr##opcode; \
|
||||
break; \
|
||||
} \
|
||||
return EmitBinOp<kWasmI32, kWasmI32>( \
|
||||
return EmitBinOp<ValueType::kI32, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
constexpr Condition cond = GetCompareCondition(kExpr##opcode); \
|
||||
__ emit_i32_set_cond(cond, dst.gp(), lhs.gp(), rhs.gp()); \
|
||||
});
|
||||
#define CASE_I64_CMPOP(opcode, cond) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasmI64, kWasmI32>( \
|
||||
return EmitBinOp<ValueType::kI64, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_i64_set_cond(cond, dst.gp(), lhs, rhs); \
|
||||
});
|
||||
#define CASE_F32_CMPOP(opcode, cond) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasmF32, kWasmI32>( \
|
||||
return EmitBinOp<ValueType::kF32, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_f32_set_cond(cond, dst.gp(), lhs.fp(), rhs.fp()); \
|
||||
});
|
||||
#define CASE_F64_CMPOP(opcode, cond) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasmF64, kWasmI32>( \
|
||||
return EmitBinOp<ValueType::kF64, ValueType::kI32>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
__ emit_f64_set_cond(cond, dst.gp(), lhs.fp(), rhs.fp()); \
|
||||
});
|
||||
#define CASE_I64_SHIFTOP(opcode, fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOpImm<kWasmI64, kWasmI64>( \
|
||||
return EmitBinOpImm<ValueType::kI64, ValueType::kI64>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister src, \
|
||||
LiftoffRegister amount) { \
|
||||
__ emit_##fn(dst, src, \
|
||||
@ -1190,7 +1192,7 @@ class LiftoffCompiler {
|
||||
});
|
||||
#define CASE_CCALL_BINOP(opcode, type, ext_ref_fn) \
|
||||
case kExpr##opcode: \
|
||||
return EmitBinOp<kWasm##type, kWasm##type>( \
|
||||
return EmitBinOp<ValueType::k##type, ValueType::k##type>( \
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
|
||||
LiftoffRegister args[] = {lhs, rhs}; \
|
||||
auto ext_ref = ExternalReference::ext_ref_fn(); \
|
||||
@ -1270,9 +1272,10 @@ class LiftoffCompiler {
|
||||
CASE_FLOAT_BINOP(F64Max, F64, f64_max)
|
||||
CASE_FLOAT_BINOP(F64CopySign, F64, f64_copysign)
|
||||
case kExprI32DivS:
|
||||
EmitBinOp<kWasmI32, kWasmI32>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
EmitBinOp<ValueType::kI32, ValueType::kI32>([this, decoder](
|
||||
LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
WasmCodePosition position = decoder->position();
|
||||
AddOutOfLineTrap(position, WasmCode::kThrowWasmTrapDivByZero);
|
||||
// Adding the second trap might invalidate the pointer returned for
|
||||
@ -1286,36 +1289,37 @@ class LiftoffCompiler {
|
||||
});
|
||||
break;
|
||||
case kExprI32DivU:
|
||||
EmitBinOp<kWasmI32, kWasmI32>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* div_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapDivByZero);
|
||||
__ emit_i32_divu(dst.gp(), lhs.gp(), rhs.gp(), div_by_zero);
|
||||
});
|
||||
EmitBinOp<ValueType::kI32, ValueType::kI32>(
|
||||
[this, decoder](LiftoffRegister dst, LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* div_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapDivByZero);
|
||||
__ emit_i32_divu(dst.gp(), lhs.gp(), rhs.gp(), div_by_zero);
|
||||
});
|
||||
break;
|
||||
case kExprI32RemS:
|
||||
EmitBinOp<kWasmI32, kWasmI32>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
__ emit_i32_rems(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
|
||||
});
|
||||
EmitBinOp<ValueType::kI32, ValueType::kI32>(
|
||||
[this, decoder](LiftoffRegister dst, LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
__ emit_i32_rems(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
|
||||
});
|
||||
break;
|
||||
case kExprI32RemU:
|
||||
EmitBinOp<kWasmI32, kWasmI32>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
__ emit_i32_remu(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
|
||||
});
|
||||
EmitBinOp<ValueType::kI32, ValueType::kI32>(
|
||||
[this, decoder](LiftoffRegister dst, LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
__ emit_i32_remu(dst.gp(), lhs.gp(), rhs.gp(), rem_by_zero);
|
||||
});
|
||||
break;
|
||||
case kExprI64DivS:
|
||||
EmitBinOp<kWasmI64, kWasmI64>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
EmitBinOp<ValueType::kI64, ValueType::kI64>([this, decoder](
|
||||
LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
WasmCodePosition position = decoder->position();
|
||||
AddOutOfLineTrap(position, WasmCode::kThrowWasmTrapDivByZero);
|
||||
// Adding the second trap might invalidate the pointer returned for
|
||||
@ -1333,9 +1337,10 @@ class LiftoffCompiler {
|
||||
});
|
||||
break;
|
||||
case kExprI64DivU:
|
||||
EmitBinOp<kWasmI64, kWasmI64>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
EmitBinOp<ValueType::kI64, ValueType::kI64>([this, decoder](
|
||||
LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* div_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapDivByZero);
|
||||
if (!__ emit_i64_divu(dst, lhs, rhs, div_by_zero)) {
|
||||
@ -1345,21 +1350,22 @@ class LiftoffCompiler {
|
||||
});
|
||||
break;
|
||||
case kExprI64RemS:
|
||||
EmitBinOp<kWasmI64, kWasmI64>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
if (!__ emit_i64_rems(dst, lhs, rhs, rem_by_zero)) {
|
||||
ExternalReference ext_ref = ExternalReference::wasm_int64_mod();
|
||||
EmitDivOrRem64CCall(dst, lhs, rhs, ext_ref, rem_by_zero);
|
||||
}
|
||||
});
|
||||
EmitBinOp<ValueType::kI64, ValueType::kI64>(
|
||||
[this, decoder](LiftoffRegister dst, LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
if (!__ emit_i64_rems(dst, lhs, rhs, rem_by_zero)) {
|
||||
ExternalReference ext_ref = ExternalReference::wasm_int64_mod();
|
||||
EmitDivOrRem64CCall(dst, lhs, rhs, ext_ref, rem_by_zero);
|
||||
}
|
||||
});
|
||||
break;
|
||||
case kExprI64RemU:
|
||||
EmitBinOp<kWasmI64, kWasmI64>([this, decoder](LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
EmitBinOp<ValueType::kI64, ValueType::kI64>([this, decoder](
|
||||
LiftoffRegister dst,
|
||||
LiftoffRegister lhs,
|
||||
LiftoffRegister rhs) {
|
||||
Label* rem_by_zero = AddOutOfLineTrap(
|
||||
decoder->position(), WasmCode::kThrowWasmTrapRemByZero);
|
||||
if (!__ emit_i64_remu(dst, lhs, rhs, rem_by_zero)) {
|
||||
@ -1993,8 +1999,7 @@ class LiftoffCompiler {
|
||||
WasmMemoryGrowDescriptor descriptor;
|
||||
DCHECK_EQ(0, descriptor.GetStackParameterCount());
|
||||
DCHECK_EQ(1, descriptor.GetRegisterParameterCount());
|
||||
DCHECK_EQ(ValueTypes::MachineTypeFor(kWasmI32),
|
||||
descriptor.GetParameterType(0));
|
||||
DCHECK_EQ(kWasmI32.machine_type(), descriptor.GetParameterType(0));
|
||||
|
||||
Register param_reg = descriptor.GetRegisterParameter(0);
|
||||
if (input.gp() != param_reg) __ Move(param_reg, input.gp(), kWasmI32);
|
||||
@ -2245,73 +2250,73 @@ class LiftoffCompiler {
|
||||
}
|
||||
switch (opcode) {
|
||||
case wasm::kExprF64x2Splat:
|
||||
EmitUnOp<kWasmF64, kWasmS128>(
|
||||
EmitUnOp<ValueType::kF64, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_f64x2_splat(dst, src);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprF64x2Add:
|
||||
EmitBinOp<kWasmS128, kWasmS128>(
|
||||
EmitBinOp<ValueType::kS128, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) {
|
||||
__ emit_f64x2_add(dst, lhs, rhs);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprF32x4Splat:
|
||||
EmitUnOp<kWasmF32, kWasmS128>(
|
||||
EmitUnOp<ValueType::kF32, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_f32x4_splat(dst, src);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprF32x4Add:
|
||||
EmitBinOp<kWasmS128, kWasmS128>(
|
||||
EmitBinOp<ValueType::kS128, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) {
|
||||
__ emit_f32x4_add(dst, lhs, rhs);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI64x2Splat:
|
||||
EmitUnOp<kWasmI64, kWasmS128>(
|
||||
EmitUnOp<ValueType::kI64, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_i64x2_splat(dst, src);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI64x2Add:
|
||||
EmitBinOp<kWasmS128, kWasmS128>(
|
||||
EmitBinOp<ValueType::kS128, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) {
|
||||
__ emit_i64x2_add(dst, lhs, rhs);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI32x4Splat:
|
||||
EmitUnOp<kWasmI32, kWasmS128>(
|
||||
EmitUnOp<ValueType::kI32, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_i32x4_splat(dst, src);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI32x4Add:
|
||||
EmitBinOp<kWasmS128, kWasmS128>(
|
||||
EmitBinOp<ValueType::kS128, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) {
|
||||
__ emit_i32x4_add(dst, lhs, rhs);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI16x8Splat:
|
||||
EmitUnOp<kWasmI32, kWasmS128>(
|
||||
EmitUnOp<ValueType::kI32, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_i16x8_splat(dst, src);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI16x8Add:
|
||||
EmitBinOp<kWasmS128, kWasmS128>(
|
||||
EmitBinOp<ValueType::kS128, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) {
|
||||
__ emit_i16x8_add(dst, lhs, rhs);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI8x16Splat:
|
||||
EmitUnOp<kWasmI32, kWasmS128>(
|
||||
EmitUnOp<ValueType::kI32, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister src) {
|
||||
__ emit_i8x16_splat(dst, src);
|
||||
});
|
||||
break;
|
||||
case wasm::kExprI8x16Add:
|
||||
EmitBinOp<kWasmS128, kWasmS128>(
|
||||
EmitBinOp<ValueType::kS128, ValueType::kS128>(
|
||||
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) {
|
||||
__ emit_i8x16_add(dst, lhs, rhs);
|
||||
});
|
||||
@ -2473,12 +2478,12 @@ class LiftoffCompiler {
|
||||
LiftoffRegister timeout = pinned.set(__ PopToRegister(pinned));
|
||||
LiftoffRegister expected_value = pinned.set(__ PopToRegister(pinned));
|
||||
Register index = pinned.set(__ PopToRegister(pinned)).gp();
|
||||
if (BoundsCheckMem(decoder, ValueTypes::ElementSizeInBytes(type),
|
||||
imm.offset, index, pinned, kDoForceCheck)) {
|
||||
if (BoundsCheckMem(decoder, type.element_size_bytes(), imm.offset, index,
|
||||
pinned, kDoForceCheck)) {
|
||||
return;
|
||||
}
|
||||
AlignmentCheckMem(decoder, ValueTypes::ElementSizeInBytes(type), imm.offset,
|
||||
index, pinned);
|
||||
AlignmentCheckMem(decoder, type.element_size_bytes(), imm.offset, index,
|
||||
pinned);
|
||||
|
||||
uint32_t offset = imm.offset;
|
||||
index = AddMemoryMasking(index, &offset, &pinned);
|
||||
@ -2525,12 +2530,12 @@ class LiftoffCompiler {
|
||||
LiftoffRegList pinned;
|
||||
LiftoffRegister count = pinned.set(__ PopToRegister());
|
||||
Register index = pinned.set(__ PopToRegister(pinned)).gp();
|
||||
if (BoundsCheckMem(decoder, ValueTypes::ElementSizeInBytes(kWasmI32),
|
||||
imm.offset, index, pinned, kDoForceCheck)) {
|
||||
if (BoundsCheckMem(decoder, kWasmI32.element_size_bytes(), imm.offset,
|
||||
index, pinned, kDoForceCheck)) {
|
||||
return;
|
||||
}
|
||||
AlignmentCheckMem(decoder, ValueTypes::ElementSizeInBytes(kWasmI32),
|
||||
imm.offset, index, pinned);
|
||||
AlignmentCheckMem(decoder, kWasmI32.element_size_bytes(), imm.offset, index,
|
||||
pinned);
|
||||
|
||||
uint32_t offset = imm.offset;
|
||||
index = AddMemoryMasking(index, &offset, &pinned);
|
||||
|
@ -53,22 +53,26 @@ static inline constexpr bool needs_fp_reg_pair(ValueType type) {
|
||||
return kNeedS128RegPair && type == kWasmS128;
|
||||
}
|
||||
|
||||
static inline constexpr RegClass reg_class_for(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmF32:
|
||||
case kWasmF64:
|
||||
static inline constexpr RegClass reg_class_for(ValueType::Kind kind) {
|
||||
switch (kind) {
|
||||
case ValueType::kF32:
|
||||
case ValueType::kF64:
|
||||
return kFpReg;
|
||||
case kWasmI32:
|
||||
case ValueType::kI32:
|
||||
return kGpReg;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return kNeedI64RegPair ? kGpRegPair : kGpReg;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
return kNeedS128RegPair ? kFpRegPair : kFpReg;
|
||||
default:
|
||||
return kNoReg; // unsupported type
|
||||
}
|
||||
}
|
||||
|
||||
static inline constexpr RegClass reg_class_for(ValueType type) {
|
||||
return reg_class_for(type.kind());
|
||||
}
|
||||
|
||||
// Description of LiftoffRegister code encoding.
|
||||
// This example uses the ARM architecture, which as of writing has:
|
||||
// - 9 GP registers, requiring 4 bits
|
||||
|
@ -58,20 +58,20 @@ inline Operand GetMemOp(LiftoffAssembler* assm, Register addr, Register offset,
|
||||
|
||||
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src,
|
||||
ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
assm->movl(dst.gp(), src);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
assm->movq(dst.gp(), src);
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
assm->Movss(dst.fp(), src);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
assm->Movsd(dst.fp(), src);
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
assm->Movdqu(dst.fp(), src);
|
||||
break;
|
||||
default:
|
||||
@ -81,17 +81,17 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src,
|
||||
|
||||
inline void Store(LiftoffAssembler* assm, Operand dst, LiftoffRegister src,
|
||||
ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
assm->movl(dst, src.gp());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
assm->movq(dst, src.gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
assm->Movss(dst, src.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
assm->Movsd(dst, src.fp());
|
||||
break;
|
||||
default:
|
||||
@ -100,20 +100,20 @@ inline void Store(LiftoffAssembler* assm, Operand dst, LiftoffRegister src,
|
||||
}
|
||||
|
||||
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
case kWasmI64:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kI64:
|
||||
assm->pushq(reg.gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
assm->AllocateStackSpace(kSystemPointerSize);
|
||||
assm->Movss(Operand(rsp, 0), reg.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
assm->AllocateStackSpace(kSystemPointerSize);
|
||||
assm->Movsd(Operand(rsp, 0), reg.fp());
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
assm->AllocateStackSpace(kSystemPointerSize * 2);
|
||||
assm->Movdqu(Operand(rsp, 0), reg.fp());
|
||||
break;
|
||||
@ -185,32 +185,32 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
|
||||
}
|
||||
|
||||
int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
return ValueTypes::ElementSizeInBytes(type);
|
||||
return type.element_size_bytes();
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) { return false; }
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
RelocInfo::Mode rmode) {
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
if (value.to_i32() == 0 && RelocInfo::IsNone(rmode)) {
|
||||
xorl(reg.gp(), reg.gp());
|
||||
} else {
|
||||
movl(reg.gp(), Immediate(value.to_i32(), rmode));
|
||||
}
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
if (RelocInfo::IsNone(rmode)) {
|
||||
TurboAssembler::Set(reg.gp(), value.to_i64());
|
||||
} else {
|
||||
movq(reg.gp(), Immediate64(value.to_i64(), rmode));
|
||||
}
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
TurboAssembler::Move(reg.fp(), value.to_f32_boxed().get_bits());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
TurboAssembler::Move(reg.fp(), value.to_f64_boxed().get_bits());
|
||||
break;
|
||||
default:
|
||||
@ -687,11 +687,11 @@ void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
||||
DCHECK_NE(dst_offset, src_offset);
|
||||
Operand dst = liftoff::GetStackSlot(dst_offset);
|
||||
Operand src = liftoff::GetStackSlot(src_offset);
|
||||
if (ValueTypes::ElementSizeLog2Of(type) == 2) {
|
||||
if (type.element_size_log2() == 2) {
|
||||
movl(kScratchRegister, src);
|
||||
movl(dst, kScratchRegister);
|
||||
} else {
|
||||
DCHECK_EQ(3, ValueTypes::ElementSizeLog2Of(type));
|
||||
DCHECK_EQ(3, type.element_size_log2());
|
||||
movq(kScratchRegister, src);
|
||||
movq(dst, kScratchRegister);
|
||||
}
|
||||
@ -723,20 +723,20 @@ void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
||||
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
RecordUsedSpillOffset(offset);
|
||||
Operand dst = liftoff::GetStackSlot(offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
movl(dst, reg.gp());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
movq(dst, reg.gp());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
Movss(dst, reg.fp());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
Movsd(dst, reg.fp());
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
Movdqu(dst, reg.fp());
|
||||
break;
|
||||
default:
|
||||
@ -747,11 +747,11 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
RecordUsedSpillOffset(offset);
|
||||
Operand dst = liftoff::GetStackSlot(offset);
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
movl(dst, Immediate(value.to_i32()));
|
||||
break;
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
if (is_int32(value.to_i64())) {
|
||||
// Sign extend low word.
|
||||
movq(dst, Immediate(static_cast<int32_t>(value.to_i64())));
|
||||
@ -773,20 +773,20 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
|
||||
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
||||
Operand src = liftoff::GetStackSlot(offset);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
movl(reg.gp(), src);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
movq(reg.gp(), src);
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
Movss(reg.fp(), src);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
Movsd(reg.fp(), src);
|
||||
break;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
Movdqu(reg.fp(), src);
|
||||
break;
|
||||
default:
|
||||
@ -1032,16 +1032,16 @@ void LiftoffAssembler::emit_i32_xor(Register dst, Register lhs, int32_t imm) {
|
||||
}
|
||||
|
||||
namespace liftoff {
|
||||
template <ValueType type>
|
||||
template <ValueType::Kind type>
|
||||
inline void EmitShiftOperation(LiftoffAssembler* assm, Register dst,
|
||||
Register src, Register amount,
|
||||
void (Assembler::*emit_shift)(Register)) {
|
||||
// If dst is rcx, compute into the scratch register first, then move to rcx.
|
||||
if (dst == rcx) {
|
||||
assm->Move(kScratchRegister, src, type);
|
||||
if (amount != rcx) assm->Move(rcx, amount, type);
|
||||
assm->Move(kScratchRegister, src, ValueType(type));
|
||||
if (amount != rcx) assm->Move(rcx, amount, ValueType(type));
|
||||
(assm->*emit_shift)(kScratchRegister);
|
||||
assm->Move(rcx, kScratchRegister, type);
|
||||
assm->Move(rcx, kScratchRegister, ValueType(type));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1053,11 +1053,11 @@ inline void EmitShiftOperation(LiftoffAssembler* assm, Register dst,
|
||||
src == rcx || assm->cache_state()->is_used(LiftoffRegister(rcx));
|
||||
if (use_scratch) assm->movq(kScratchRegister, rcx);
|
||||
if (src == rcx) src = kScratchRegister;
|
||||
assm->Move(rcx, amount, type);
|
||||
assm->Move(rcx, amount, ValueType(type));
|
||||
}
|
||||
|
||||
// Do the actual shift.
|
||||
if (dst != src) assm->Move(dst, src, type);
|
||||
if (dst != src) assm->Move(dst, src, ValueType(type));
|
||||
(assm->*emit_shift)(dst);
|
||||
|
||||
// Restore rcx if needed.
|
||||
@ -1067,8 +1067,8 @@ inline void EmitShiftOperation(LiftoffAssembler* assm, Register dst,
|
||||
|
||||
void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
|
||||
Register amount) {
|
||||
liftoff::EmitShiftOperation<kWasmI32>(this, dst, src, amount,
|
||||
&Assembler::shll_cl);
|
||||
liftoff::EmitShiftOperation<ValueType::kI32>(this, dst, src, amount,
|
||||
&Assembler::shll_cl);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
|
||||
@ -1079,8 +1079,8 @@ void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
|
||||
|
||||
void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
|
||||
Register amount) {
|
||||
liftoff::EmitShiftOperation<kWasmI32>(this, dst, src, amount,
|
||||
&Assembler::sarl_cl);
|
||||
liftoff::EmitShiftOperation<ValueType::kI32>(this, dst, src, amount,
|
||||
&Assembler::sarl_cl);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
|
||||
@ -1091,8 +1091,8 @@ void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
|
||||
|
||||
void LiftoffAssembler::emit_i32_shr(Register dst, Register src,
|
||||
Register amount) {
|
||||
liftoff::EmitShiftOperation<kWasmI32>(this, dst, src, amount,
|
||||
&Assembler::shrl_cl);
|
||||
liftoff::EmitShiftOperation<ValueType::kI32>(this, dst, src, amount,
|
||||
&Assembler::shrl_cl);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::emit_i32_shr(Register dst, Register src,
|
||||
@ -1223,8 +1223,8 @@ void LiftoffAssembler::emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
|
||||
|
||||
void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
|
||||
Register amount) {
|
||||
liftoff::EmitShiftOperation<kWasmI64>(this, dst.gp(), src.gp(), amount,
|
||||
&Assembler::shlq_cl);
|
||||
liftoff::EmitShiftOperation<ValueType::kI64>(this, dst.gp(), src.gp(), amount,
|
||||
&Assembler::shlq_cl);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
|
||||
@ -1235,8 +1235,8 @@ void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
|
||||
|
||||
void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
|
||||
Register amount) {
|
||||
liftoff::EmitShiftOperation<kWasmI64>(this, dst.gp(), src.gp(), amount,
|
||||
&Assembler::sarq_cl);
|
||||
liftoff::EmitShiftOperation<ValueType::kI64>(this, dst.gp(), src.gp(), amount,
|
||||
&Assembler::sarq_cl);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
|
||||
@ -1247,8 +1247,8 @@ void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
|
||||
|
||||
void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
|
||||
Register amount) {
|
||||
liftoff::EmitShiftOperation<kWasmI64>(this, dst.gp(), src.gp(), amount,
|
||||
&Assembler::shrq_cl);
|
||||
liftoff::EmitShiftOperation<ValueType::kI64>(this, dst.gp(), src.gp(), amount,
|
||||
&Assembler::shrq_cl);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
|
||||
@ -1790,11 +1790,11 @@ void LiftoffAssembler::emit_cond_jump(Condition cond, Label* label,
|
||||
ValueType type, Register lhs,
|
||||
Register rhs) {
|
||||
if (rhs != no_reg) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
cmpl(lhs, rhs);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
cmpq(lhs, rhs);
|
||||
break;
|
||||
default:
|
||||
@ -2061,7 +2061,7 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
int arg_bytes = 0;
|
||||
for (ValueType param_type : sig->parameters()) {
|
||||
liftoff::Store(this, Operand(rsp, arg_bytes), *args++, param_type);
|
||||
arg_bytes += ValueTypes::MemSize(param_type);
|
||||
arg_bytes += param_type.element_size_bytes();
|
||||
}
|
||||
DCHECK_LE(arg_bytes, stack_bytes);
|
||||
|
||||
|
@ -62,18 +62,18 @@ auto ReadLebU64(const byte_t** pos) -> uint64_t {
|
||||
}
|
||||
|
||||
ValKind V8ValueTypeToWasm(i::wasm::ValueType v8_valtype) {
|
||||
switch (v8_valtype) {
|
||||
case i::wasm::kWasmI32:
|
||||
switch (v8_valtype.kind()) {
|
||||
case i::wasm::ValueType::kI32:
|
||||
return I32;
|
||||
case i::wasm::kWasmI64:
|
||||
case i::wasm::ValueType::kI64:
|
||||
return I64;
|
||||
case i::wasm::kWasmF32:
|
||||
case i::wasm::ValueType::kF32:
|
||||
return F32;
|
||||
case i::wasm::kWasmF64:
|
||||
case i::wasm::ValueType::kF64:
|
||||
return F64;
|
||||
case i::wasm::kWasmFuncRef:
|
||||
case i::wasm::ValueType::kFuncRef:
|
||||
return FUNCREF;
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
return ANYREF;
|
||||
default:
|
||||
// TODO(wasm+): support new value types
|
||||
@ -1212,7 +1212,7 @@ namespace {
|
||||
class SignatureHelper : public i::AllStatic {
|
||||
public:
|
||||
// Use an invalid type as a marker separating params and results.
|
||||
static const i::wasm::ValueType kMarker = i::wasm::kWasmStmt;
|
||||
static constexpr i::wasm::ValueType kMarker = i::wasm::kWasmStmt;
|
||||
|
||||
static i::Handle<i::PodArray<i::wasm::ValueType>> Serialize(
|
||||
i::Isolate* isolate, FuncType* type) {
|
||||
@ -1397,25 +1397,25 @@ void PushArgs(const i::wasm::FunctionSig* sig, const Val args[],
|
||||
i::wasm::CWasmArgumentsPacker* packer, StoreImpl* store) {
|
||||
for (size_t i = 0; i < sig->parameter_count(); i++) {
|
||||
i::wasm::ValueType type = sig->GetParam(i);
|
||||
switch (type) {
|
||||
case i::wasm::kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case i::wasm::ValueType::kI32:
|
||||
packer->Push(args[i].i32());
|
||||
break;
|
||||
case i::wasm::kWasmI64:
|
||||
case i::wasm::ValueType::kI64:
|
||||
packer->Push(args[i].i64());
|
||||
break;
|
||||
case i::wasm::kWasmF32:
|
||||
case i::wasm::ValueType::kF32:
|
||||
packer->Push(args[i].f32());
|
||||
break;
|
||||
case i::wasm::kWasmF64:
|
||||
case i::wasm::ValueType::kF64:
|
||||
packer->Push(args[i].f64());
|
||||
break;
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::kWasmFuncRef:
|
||||
case i::wasm::kWasmNullRef:
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
case i::wasm::ValueType::kFuncRef:
|
||||
case i::wasm::ValueType::kNullRef:
|
||||
packer->Push(WasmRefToV8(store->i_isolate(), args[i].ref())->ptr());
|
||||
break;
|
||||
case i::wasm::kWasmExnRef:
|
||||
case i::wasm::ValueType::kExnRef:
|
||||
// TODO(jkummerow): Implement these.
|
||||
UNIMPLEMENTED();
|
||||
break;
|
||||
@ -1430,29 +1430,29 @@ void PopArgs(const i::wasm::FunctionSig* sig, Val results[],
|
||||
packer->Reset();
|
||||
for (size_t i = 0; i < sig->return_count(); i++) {
|
||||
i::wasm::ValueType type = sig->GetReturn(i);
|
||||
switch (type) {
|
||||
case i::wasm::kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case i::wasm::ValueType::kI32:
|
||||
results[i] = Val(packer->Pop<int32_t>());
|
||||
break;
|
||||
case i::wasm::kWasmI64:
|
||||
case i::wasm::ValueType::kI64:
|
||||
results[i] = Val(packer->Pop<int64_t>());
|
||||
break;
|
||||
case i::wasm::kWasmF32:
|
||||
case i::wasm::ValueType::kF32:
|
||||
results[i] = Val(packer->Pop<float>());
|
||||
break;
|
||||
case i::wasm::kWasmF64:
|
||||
case i::wasm::ValueType::kF64:
|
||||
results[i] = Val(packer->Pop<double>());
|
||||
break;
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::kWasmFuncRef:
|
||||
case i::wasm::kWasmNullRef: {
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
case i::wasm::ValueType::kFuncRef:
|
||||
case i::wasm::ValueType::kNullRef: {
|
||||
i::Address raw = packer->Pop<i::Address>();
|
||||
i::Handle<i::Object> obj(i::Object(raw), store->i_isolate());
|
||||
DCHECK_IMPLIES(type == i::wasm::kWasmNullRef, obj->IsNull());
|
||||
results[i] = Val(V8RefValueToWasm(store, obj));
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmExnRef:
|
||||
case i::wasm::ValueType::kExnRef:
|
||||
// TODO(jkummerow): Implement these.
|
||||
UNIMPLEMENTED();
|
||||
break;
|
||||
@ -1694,17 +1694,17 @@ auto Global::type() const -> own<GlobalType> {
|
||||
|
||||
auto Global::get() const -> Val {
|
||||
i::Handle<i::WasmGlobalObject> v8_global = impl(this)->v8_object();
|
||||
switch (v8_global->type()) {
|
||||
case i::wasm::kWasmI32:
|
||||
switch (v8_global->type().kind()) {
|
||||
case i::wasm::ValueType::kI32:
|
||||
return Val(v8_global->GetI32());
|
||||
case i::wasm::kWasmI64:
|
||||
case i::wasm::ValueType::kI64:
|
||||
return Val(v8_global->GetI64());
|
||||
case i::wasm::kWasmF32:
|
||||
case i::wasm::ValueType::kF32:
|
||||
return Val(v8_global->GetF32());
|
||||
case i::wasm::kWasmF64:
|
||||
case i::wasm::ValueType::kF64:
|
||||
return Val(v8_global->GetF64());
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::kWasmFuncRef: {
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
case i::wasm::ValueType::kFuncRef: {
|
||||
StoreImpl* store = impl(this)->store();
|
||||
i::HandleScope scope(store->i_isolate());
|
||||
return Val(V8RefValueToWasm(store, v8_global->GetRef()));
|
||||
@ -1811,11 +1811,11 @@ auto Table::type() const -> own<TableType> {
|
||||
uint32_t max;
|
||||
if (!table->maximum_length().ToUint32(&max)) max = 0xFFFFFFFFu;
|
||||
ValKind kind;
|
||||
switch (table->type()) {
|
||||
case i::wasm::kWasmFuncRef:
|
||||
switch (table->type().kind()) {
|
||||
case i::wasm::ValueType::kFuncRef:
|
||||
kind = FUNCREF;
|
||||
break;
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
kind = ANYREF;
|
||||
break;
|
||||
default:
|
||||
|
@ -1195,14 +1195,14 @@ class WasmDecoder : public Decoder {
|
||||
imm.elem_segment_index);
|
||||
return false;
|
||||
}
|
||||
if (!Validate(pc_ + imm.length - imm.table.length - 1, imm.table))
|
||||
if (!Validate(pc_ + imm.length - imm.table.length - 1, imm.table)) {
|
||||
return false;
|
||||
if (!VALIDATE(ValueTypes::IsSubType(
|
||||
module_->elem_segments[imm.elem_segment_index].type,
|
||||
module_->tables[imm.table.index].type))) {
|
||||
}
|
||||
ValueType elem_type = module_->elem_segments[imm.elem_segment_index].type;
|
||||
if (!VALIDATE(
|
||||
elem_type.IsSubTypeOf(module_->tables[imm.table.index].type))) {
|
||||
errorf(pc_ + 2, "table %u is not a super-type of %s", imm.table.index,
|
||||
ValueTypes::TypeName(
|
||||
module_->elem_segments[imm.elem_segment_index].type));
|
||||
elem_type.type_name());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -1220,11 +1220,11 @@ class WasmDecoder : public Decoder {
|
||||
inline bool Validate(TableCopyImmediate<validate>& imm) {
|
||||
if (!Validate(pc_ + 1, imm.table_src)) return false;
|
||||
if (!Validate(pc_ + 2, imm.table_dst)) return false;
|
||||
ValueType src_type = module_->tables[imm.table_src.index].type;
|
||||
if (!VALIDATE(
|
||||
ValueTypes::IsSubType(module_->tables[imm.table_src.index].type,
|
||||
module_->tables[imm.table_dst.index].type))) {
|
||||
src_type.IsSubTypeOf(module_->tables[imm.table_dst.index].type))) {
|
||||
errorf(pc_ + 2, "table %u is not a super-type of %s", imm.table_dst.index,
|
||||
ValueTypes::TypeName(module_->tables[imm.table_src.index].type));
|
||||
src_type.type_name());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -1946,7 +1946,7 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
auto fval = Pop();
|
||||
auto tval = Pop(0, fval.type);
|
||||
ValueType type = tval.type == kWasmBottom ? fval.type : tval.type;
|
||||
if (ValueTypes::IsSubType(type, kWasmAnyRef)) {
|
||||
if (type.IsSubTypeOf(kWasmAnyRef)) {
|
||||
this->error(
|
||||
"select without type is only valid for value type inputs");
|
||||
break;
|
||||
@ -2428,7 +2428,7 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
if (WasmOpcodes::IsPrefixOpcode(opcode)) {
|
||||
opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1));
|
||||
}
|
||||
TRACE_PART(" %c@%d:%s", ValueTypes::ShortNameOf(val.type),
|
||||
TRACE_PART(" %c@%d:%s", val.type.short_name(),
|
||||
static_cast<int>(val.pc - this->start_),
|
||||
WasmOpcodes::OpcodeName(opcode));
|
||||
// If the decoder failed, don't try to decode the immediates, as this
|
||||
@ -2550,7 +2550,7 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
if (!CheckHasMemory()) return 0;
|
||||
MemoryAccessImmediate<validate> imm(this, this->pc_ + 1, type.size_log_2());
|
||||
auto index = Pop(0, kWasmI32);
|
||||
auto* result = Push(ValueType::kWasmS128);
|
||||
auto* result = Push(kWasmS128);
|
||||
CALL_INTERFACE_IF_REACHABLE(LoadTransform, type, transform, imm, index,
|
||||
result);
|
||||
return imm.length;
|
||||
@ -2602,15 +2602,15 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
if (this->enabled_.has_anyref()) {
|
||||
// The expected type is the biggest common sub type of all targets.
|
||||
(*result_types)[i] =
|
||||
ValueTypes::CommonSubType((*result_types)[i], (*merge)[i].type);
|
||||
ValueType::CommonSubType((*result_types)[i], (*merge)[i].type);
|
||||
} else {
|
||||
// All target must have the same signature.
|
||||
if ((*result_types)[i] != (*merge)[i].type) {
|
||||
this->errorf(pos,
|
||||
"inconsistent type in br_table target %u (previous "
|
||||
"was %s, this one is %s)",
|
||||
index, ValueTypes::TypeName((*result_types)[i]),
|
||||
ValueTypes::TypeName((*merge)[i].type));
|
||||
index, (*result_types)[i].type_name(),
|
||||
(*merge)[i].type.type_name());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -2635,11 +2635,10 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
// Type-check the topmost br_arity values on the stack.
|
||||
for (int i = 0; i < br_arity; ++i) {
|
||||
Value& val = stack_values[i];
|
||||
if (!ValueTypes::IsSubType(val.type, result_types[i])) {
|
||||
if (!val.type.IsSubTypeOf(result_types[i])) {
|
||||
this->errorf(this->pc_,
|
||||
"type error in merge[%u] (expected %s, got %s)", i,
|
||||
ValueTypes::TypeName(result_types[i]),
|
||||
ValueTypes::TypeName(val.type));
|
||||
result_types[i].type_name(), val.type.type_name());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -3004,12 +3003,11 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
|
||||
V8_INLINE Value Pop(int index, ValueType expected) {
|
||||
auto val = Pop();
|
||||
if (!VALIDATE(ValueTypes::IsSubType(val.type, expected) ||
|
||||
val.type == kWasmBottom || expected == kWasmBottom)) {
|
||||
if (!VALIDATE(val.type.IsSubTypeOf(expected) || val.type == kWasmBottom ||
|
||||
expected == kWasmBottom)) {
|
||||
this->errorf(val.pc, "%s[%d] expected type %s, found %s of type %s",
|
||||
SafeOpcodeNameAt(this->pc_), index,
|
||||
ValueTypes::TypeName(expected), SafeOpcodeNameAt(val.pc),
|
||||
ValueTypes::TypeName(val.type));
|
||||
SafeOpcodeNameAt(this->pc_), index, expected.type_name(),
|
||||
SafeOpcodeNameAt(val.pc), val.type.type_name());
|
||||
}
|
||||
return val;
|
||||
}
|
||||
@ -3069,10 +3067,9 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
for (uint32_t i = 0; i < merge->arity; ++i) {
|
||||
Value& val = stack_values[i];
|
||||
Value& old = (*merge)[i];
|
||||
if (!ValueTypes::IsSubType(val.type, old.type)) {
|
||||
if (!val.type.IsSubTypeOf(old.type)) {
|
||||
this->errorf(this->pc_, "type error in merge[%u] (expected %s, got %s)",
|
||||
i, ValueTypes::TypeName(old.type),
|
||||
ValueTypes::TypeName(val.type));
|
||||
i, old.type.type_name(), val.type.type_name());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -3087,10 +3084,9 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
for (uint32_t i = 0; i < c->start_merge.arity; ++i) {
|
||||
Value& start = c->start_merge[i];
|
||||
Value& end = c->end_merge[i];
|
||||
if (!ValueTypes::IsSubType(start.type, end.type)) {
|
||||
if (!start.type.IsSubTypeOf(end.type)) {
|
||||
this->errorf(this->pc_, "type error in merge[%u] (expected %s, got %s)",
|
||||
i, ValueTypes::TypeName(end.type),
|
||||
ValueTypes::TypeName(start.type));
|
||||
i, end.type.type_name(), start.type.type_name());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -3192,11 +3188,10 @@ class WasmFullDecoder : public WasmDecoder<validate> {
|
||||
for (int i = 0; i < num_returns; ++i) {
|
||||
auto& val = stack_values[i];
|
||||
ValueType expected_type = this->sig_->GetReturn(i);
|
||||
if (!ValueTypes::IsSubType(val.type, expected_type)) {
|
||||
if (!val.type.IsSubTypeOf(expected_type)) {
|
||||
this->errorf(this->pc_,
|
||||
"type error in return[%u] (expected %s, got %s)", i,
|
||||
ValueTypes::TypeName(expected_type),
|
||||
ValueTypes::TypeName(val.type));
|
||||
expected_type.type_name(), val.type.type_name());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -135,12 +135,12 @@ bool PrintRawWasmCode(AccountingAllocator* allocator, const FunctionBody& body,
|
||||
if (decls.type_list[pos] == type) {
|
||||
++count;
|
||||
} else {
|
||||
os << " " << count << " " << ValueTypes::TypeName(type);
|
||||
os << " " << count << " " << type.type_name();
|
||||
type = decls.type_list[pos];
|
||||
count = 1;
|
||||
}
|
||||
}
|
||||
os << " " << count << " " << ValueTypes::TypeName(type);
|
||||
os << " " << count << " " << type.type_name();
|
||||
}
|
||||
os << std::endl;
|
||||
if (line_numbers) line_numbers->push_back(kNoByteCode);
|
||||
@ -227,7 +227,7 @@ bool PrintRawWasmCode(AccountingAllocator* allocator, const FunctionBody& body,
|
||||
os << " @" << i.pc_offset();
|
||||
if (decoder.Complete(imm)) {
|
||||
for (uint32_t i = 0; i < imm.out_arity(); i++) {
|
||||
os << " " << ValueTypes::TypeName(imm.out_type(i));
|
||||
os << " " << imm.out_type(i).type_name();
|
||||
}
|
||||
}
|
||||
control_depth++;
|
||||
|
@ -695,21 +695,21 @@ class WasmGraphBuildingInterface {
|
||||
}
|
||||
|
||||
TFNode* DefaultValue(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return builder_->Int32Constant(0);
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return builder_->Int64Constant(0);
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return builder_->Float32Constant(0);
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return builder_->Float64Constant(0);
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
return builder_->S128Zero();
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef:
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef:
|
||||
return builder_->RefNull();
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -730,13 +730,12 @@ class WasmGraphBuildingInterface {
|
||||
Value& val = values[i];
|
||||
Value& old = (*merge)[i];
|
||||
DCHECK_NOT_NULL(val.node);
|
||||
DCHECK(val.type == kWasmBottom ||
|
||||
ValueTypes::MachineRepresentationFor(val.type) ==
|
||||
ValueTypes::MachineRepresentationFor(old.type));
|
||||
DCHECK(val.type == kWasmBottom || val.type.machine_representation() ==
|
||||
old.type.machine_representation());
|
||||
old.node = first ? val.node
|
||||
: builder_->CreateOrMergeIntoPhi(
|
||||
ValueTypes::MachineRepresentationFor(old.type),
|
||||
target->control, old.node, val.node);
|
||||
old.type.machine_representation(), target->control,
|
||||
old.node, val.node);
|
||||
}
|
||||
}
|
||||
|
||||
@ -798,8 +797,8 @@ class WasmGraphBuildingInterface {
|
||||
// Merge locals.
|
||||
for (int i = decoder->num_locals() - 1; i >= 0; i--) {
|
||||
to->locals[i] = builder_->CreateOrMergeIntoPhi(
|
||||
ValueTypes::MachineRepresentationFor(decoder->GetLocalType(i)),
|
||||
merge, to->locals[i], ssa_env_->locals[i]);
|
||||
decoder->GetLocalType(i).machine_representation(), merge,
|
||||
to->locals[i], ssa_env_->locals[i]);
|
||||
}
|
||||
// Merge the instance caches.
|
||||
builder_->MergeInstanceCacheInto(&to->instance_cache,
|
||||
|
@ -29,7 +29,7 @@ size_t LocalDeclEncoder::Emit(byte* buffer) const {
|
||||
LEBHelper::write_u32v(&pos, static_cast<uint32_t>(local_decls.size()));
|
||||
for (auto& local_decl : local_decls) {
|
||||
LEBHelper::write_u32v(&pos, local_decl.first);
|
||||
*pos = ValueTypes::ValueTypeCodeFor(local_decl.second);
|
||||
*pos = local_decl.second.value_type_code();
|
||||
++pos;
|
||||
}
|
||||
DCHECK_EQ(Size(), pos - buffer);
|
||||
|
@ -868,10 +868,10 @@ class ModuleDecoderImpl : public Decoder {
|
||||
errorf(pos, "out of bounds table index %u", table_index);
|
||||
break;
|
||||
}
|
||||
if (!ValueTypes::IsSubType(type, module_->tables[table_index].type)) {
|
||||
if (!type.IsSubTypeOf(module_->tables[table_index].type)) {
|
||||
errorf(pos,
|
||||
"Invalid element segment. Table %u is not a super-type of %s",
|
||||
table_index, ValueTypes::TypeName(type));
|
||||
table_index, type.type_name());
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1359,14 +1359,14 @@ class ModuleDecoderImpl : public Decoder {
|
||||
errorf(pos,
|
||||
"type mismatch in global initialization "
|
||||
"(from global #%u), expected %s, got %s",
|
||||
other_index, ValueTypes::TypeName(global->type),
|
||||
ValueTypes::TypeName(module->globals[other_index].type));
|
||||
other_index, global->type.type_name(),
|
||||
module->globals[other_index].type.type_name());
|
||||
}
|
||||
} else {
|
||||
if (!ValueTypes::IsSubType(TypeOf(module, global->init), global->type)) {
|
||||
if (!TypeOf(module, global->init).IsSubTypeOf(global->type)) {
|
||||
errorf(pos, "type error in global initialization, expected %s, got %s",
|
||||
ValueTypes::TypeName(global->type),
|
||||
ValueTypes::TypeName(TypeOf(module, global->init)));
|
||||
global->type.type_name(),
|
||||
TypeOf(module, global->init).type_name());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1379,13 +1379,12 @@ class ModuleDecoderImpl : public Decoder {
|
||||
for (WasmGlobal& global : module->globals) {
|
||||
if (global.mutability && global.imported) {
|
||||
global.index = num_imported_mutable_globals++;
|
||||
} else if (ValueTypes::IsReferenceType(global.type)) {
|
||||
} else if (global.type.IsReferenceType()) {
|
||||
global.offset = tagged_offset;
|
||||
// All entries in the tagged_globals_buffer have size 1.
|
||||
tagged_offset++;
|
||||
} else {
|
||||
byte size =
|
||||
ValueTypes::MemSize(ValueTypes::MachineTypeFor(global.type));
|
||||
int size = global.type.element_size_bytes();
|
||||
untagged_offset = (untagged_offset + size - 1) & ~(size - 1); // align
|
||||
global.offset = untagged_offset;
|
||||
untagged_offset += size;
|
||||
@ -1657,8 +1656,7 @@ class ModuleDecoderImpl : public Decoder {
|
||||
}
|
||||
if (expected != kWasmStmt && TypeOf(module, expr) != kWasmI32) {
|
||||
errorf(pos, "type error in init expression, expected %s, got %s",
|
||||
ValueTypes::TypeName(expected),
|
||||
ValueTypes::TypeName(TypeOf(module, expr)));
|
||||
expected.type_name(), TypeOf(module, expr).type_name());
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
@ -688,22 +688,22 @@ void InstanceBuilder::LoadDataSegments(Handle<WasmInstanceObject> instance) {
|
||||
void InstanceBuilder::WriteGlobalValue(const WasmGlobal& global, double num) {
|
||||
TRACE("init [globals_start=%p + %u] = %lf, type = %s\n",
|
||||
raw_buffer_ptr(untagged_globals_, 0), global.offset, num,
|
||||
ValueTypes::TypeName(global.type));
|
||||
switch (global.type) {
|
||||
case kWasmI32:
|
||||
global.type.type_name());
|
||||
switch (global.type.kind()) {
|
||||
case ValueType::kI32:
|
||||
WriteLittleEndianValue<int32_t>(GetRawGlobalPtr<int32_t>(global),
|
||||
DoubleToInt32(num));
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
// The Wasm-BigInt proposal currently says that i64 globals may
|
||||
// only be initialized with BigInts. See:
|
||||
// https://github.com/WebAssembly/JS-BigInt-integration/issues/12
|
||||
UNREACHABLE();
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
WriteLittleEndianValue<float>(GetRawGlobalPtr<float>(global),
|
||||
DoubleToFloat32(num));
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
WriteLittleEndianValue<double>(GetRawGlobalPtr<double>(global), num);
|
||||
break;
|
||||
default:
|
||||
@ -714,7 +714,7 @@ void InstanceBuilder::WriteGlobalValue(const WasmGlobal& global, double num) {
|
||||
void InstanceBuilder::WriteGlobalValue(const WasmGlobal& global, int64_t num) {
|
||||
TRACE("init [globals_start=%p + %u] = %" PRId64 ", type = %s\n",
|
||||
raw_buffer_ptr(untagged_globals_, 0), global.offset, num,
|
||||
ValueTypes::TypeName(global.type));
|
||||
global.type.type_name());
|
||||
DCHECK_EQ(kWasmI64, global.type);
|
||||
WriteLittleEndianValue<int64_t>(GetRawGlobalPtr<int64_t>(global), num);
|
||||
}
|
||||
@ -723,44 +723,45 @@ void InstanceBuilder::WriteGlobalValue(const WasmGlobal& global,
|
||||
Handle<WasmGlobalObject> value) {
|
||||
TRACE("init [globals_start=%p + %u] = ", raw_buffer_ptr(untagged_globals_, 0),
|
||||
global.offset);
|
||||
switch (global.type) {
|
||||
case kWasmI32: {
|
||||
switch (global.type.kind()) {
|
||||
case ValueType::kI32: {
|
||||
int32_t num = value->GetI32();
|
||||
WriteLittleEndianValue<int32_t>(GetRawGlobalPtr<int32_t>(global), num);
|
||||
TRACE("%d", num);
|
||||
break;
|
||||
}
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
int64_t num = value->GetI64();
|
||||
WriteLittleEndianValue<int64_t>(GetRawGlobalPtr<int64_t>(global), num);
|
||||
TRACE("%" PRId64, num);
|
||||
break;
|
||||
}
|
||||
case kWasmF32: {
|
||||
case ValueType::kF32: {
|
||||
float num = value->GetF32();
|
||||
WriteLittleEndianValue<float>(GetRawGlobalPtr<float>(global), num);
|
||||
TRACE("%f", num);
|
||||
break;
|
||||
}
|
||||
case kWasmF64: {
|
||||
case ValueType::kF64: {
|
||||
double num = value->GetF64();
|
||||
WriteLittleEndianValue<double>(GetRawGlobalPtr<double>(global), num);
|
||||
TRACE("%lf", num);
|
||||
break;
|
||||
}
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef: {
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef: {
|
||||
DCHECK_IMPLIES(global.type == kWasmNullRef, value->GetRef()->IsNull());
|
||||
tagged_globals_->set(global.offset, *value->GetRef());
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case ValueType::kStmt:
|
||||
case ValueType::kS128:
|
||||
case ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
TRACE(", type = %s (from WebAssembly.Global)\n",
|
||||
ValueTypes::TypeName(global.type));
|
||||
TRACE(", type = %s (from WebAssembly.Global)\n", global.type.type_name());
|
||||
}
|
||||
|
||||
void InstanceBuilder::WriteGlobalAnyRef(const WasmGlobal& global,
|
||||
@ -1062,7 +1063,7 @@ bool InstanceBuilder::ProcessImportedWasmGlobalObject(
|
||||
return false;
|
||||
}
|
||||
|
||||
bool is_sub_type = ValueTypes::IsSubType(global_object->type(), global.type);
|
||||
bool is_sub_type = global_object->type().IsSubTypeOf(global.type);
|
||||
bool is_same_type = global_object->type() == global.type;
|
||||
bool valid_type = global.mutability ? is_same_type : is_sub_type;
|
||||
|
||||
@ -1075,7 +1076,7 @@ bool InstanceBuilder::ProcessImportedWasmGlobalObject(
|
||||
DCHECK_LT(global.index, module_->num_imported_mutable_globals);
|
||||
Handle<Object> buffer;
|
||||
Address address_or_offset;
|
||||
if (ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
static_assert(sizeof(global_object->offset()) <= sizeof(Address),
|
||||
"The offset into the globals buffer does not fit into "
|
||||
"the imported_mutable_globals array");
|
||||
@ -1153,8 +1154,8 @@ bool InstanceBuilder::ProcessImportedGlobal(Handle<WasmInstanceObject> instance,
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type == ValueType::kWasmFuncRef) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
if (global.type == kWasmFuncRef) {
|
||||
if (!value->IsNull(isolate_) &&
|
||||
!WasmExportedFunction::IsWasmExportedFunction(*value)) {
|
||||
ReportLinkError(
|
||||
@ -1162,7 +1163,7 @@ bool InstanceBuilder::ProcessImportedGlobal(Handle<WasmInstanceObject> instance,
|
||||
import_index, module_name, import_name);
|
||||
return false;
|
||||
}
|
||||
} else if (global.type == ValueType::kWasmNullRef) {
|
||||
} else if (global.type == kWasmNullRef) {
|
||||
if (!value->IsNull(isolate_)) {
|
||||
ReportLinkError("imported nullref global must be null", import_index,
|
||||
module_name, import_name);
|
||||
@ -1375,7 +1376,7 @@ void InstanceBuilder::InitGlobals(Handle<WasmInstanceObject> instance) {
|
||||
uint32_t old_offset =
|
||||
module_->globals[global.init.val.global_index].offset;
|
||||
TRACE("init [globals+%u] = [globals+%d]\n", global.offset, old_offset);
|
||||
if (ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
DCHECK(enabled_.has_anyref() || enabled_.has_eh());
|
||||
tagged_globals_->set(new_offset, tagged_globals_->get(old_offset));
|
||||
} else {
|
||||
@ -1514,7 +1515,7 @@ void InstanceBuilder::ProcessExports(Handle<WasmInstanceObject> instance) {
|
||||
if (global.mutability && global.imported) {
|
||||
Handle<FixedArray> buffers_array(
|
||||
instance->imported_mutable_globals_buffers(), isolate_);
|
||||
if (ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
tagged_buffer = handle(
|
||||
FixedArray::cast(buffers_array->get(global.index)), isolate_);
|
||||
// For anyref globals we store the relative offset in the
|
||||
@ -1538,7 +1539,7 @@ void InstanceBuilder::ProcessExports(Handle<WasmInstanceObject> instance) {
|
||||
offset = static_cast<uint32_t>(global_addr - backing_store);
|
||||
}
|
||||
} else {
|
||||
if (ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
tagged_buffer = handle(instance->tagged_globals_buffer(), isolate_);
|
||||
} else {
|
||||
untagged_buffer =
|
||||
|
@ -29,40 +29,188 @@ class Simd128;
|
||||
// I32 I64 F32 F64 NullRef
|
||||
// \ \ \ \ /
|
||||
// ------------ Bottom
|
||||
#define FOREACH_VALUE_TYPE(V) \
|
||||
V(Stmt, -1, -1, Void, None, 'v', "<stmt>") \
|
||||
V(I32, 4, 2, I32, Int32, 'i', "i32") \
|
||||
V(I64, 8, 3, I64, Int64, 'l', "i64") \
|
||||
V(F32, 4, 2, F32, Float32, 'f', "f32") \
|
||||
V(F64, 8, 3, F64, Float64, 'd', "f64") \
|
||||
V(S128, 16, 4, S128, Simd128, 's', "s128") \
|
||||
V(AnyRef, kSystemPointerSize, kSystemPointerSizeLog2, AnyRef, TaggedPointer, \
|
||||
'r', "anyref") \
|
||||
V(FuncRef, kSystemPointerSize, kSystemPointerSizeLog2, FuncRef, \
|
||||
TaggedPointer, 'a', "funcref") \
|
||||
V(NullRef, kSystemPointerSize, kSystemPointerSizeLog2, NullRef, \
|
||||
TaggedPointer, 'n', "nullref") \
|
||||
V(ExnRef, kSystemPointerSize, kSystemPointerSizeLog2, ExnRef, TaggedPointer, \
|
||||
'e', "exn") \
|
||||
V(Bottom, -1, -1, Void, None, '*', "<bot>")
|
||||
// Format: kind, log2Size, code, machineType, shortName, typeName
|
||||
#define FOREACH_VALUE_TYPE(V) \
|
||||
V(Stmt, -1, Void, None, 'v', "<stmt>") \
|
||||
V(I32, 2, I32, Int32, 'i', "i32") \
|
||||
V(I64, 3, I64, Int64, 'l', "i64") \
|
||||
V(F32, 2, F32, Float32, 'f', "f32") \
|
||||
V(F64, 3, F64, Float64, 'd', "f64") \
|
||||
V(S128, 4, S128, Simd128, 's', "s128") \
|
||||
V(AnyRef, kSystemPointerSizeLog2, AnyRef, TaggedPointer, 'r', "anyref") \
|
||||
V(FuncRef, kSystemPointerSizeLog2, FuncRef, TaggedPointer, 'a', "funcref") \
|
||||
V(NullRef, kSystemPointerSizeLog2, NullRef, TaggedPointer, 'n', "nullref") \
|
||||
V(ExnRef, kSystemPointerSizeLog2, ExnRef, TaggedPointer, 'e', "exn") \
|
||||
V(Bottom, -1, Void, None, '*', "<bot>")
|
||||
|
||||
enum ValueType : uint8_t {
|
||||
#define DEF_ENUM(type, ...) kWasm##type,
|
||||
FOREACH_VALUE_TYPE(DEF_ENUM)
|
||||
class ValueType {
|
||||
public:
|
||||
enum Kind : uint8_t {
|
||||
#define DEF_ENUM(kind, ...) k##kind,
|
||||
FOREACH_VALUE_TYPE(DEF_ENUM)
|
||||
#undef DEF_ENUM
|
||||
};
|
||||
|
||||
constexpr ValueType() : kind_(kStmt) {}
|
||||
explicit constexpr ValueType(Kind kind) : kind_(kind) {}
|
||||
|
||||
constexpr Kind kind() const { return kind_; }
|
||||
|
||||
constexpr int element_size_log2() const {
|
||||
#if V8_HAS_CXX14_CONSTEXPR
|
||||
DCHECK_NE(kStmt, kind_);
|
||||
DCHECK_NE(kBottom, kind_);
|
||||
#endif
|
||||
|
||||
constexpr int kElementSizeLog2[] = {
|
||||
#define ELEM_SIZE_LOG2(kind, log2Size, ...) log2Size,
|
||||
FOREACH_VALUE_TYPE(ELEM_SIZE_LOG2)
|
||||
#undef ELEM_SIZE_LOG2
|
||||
};
|
||||
|
||||
return kElementSizeLog2[kind_];
|
||||
}
|
||||
|
||||
constexpr int element_size_bytes() const { return 1 << element_size_log2(); }
|
||||
|
||||
constexpr bool operator==(ValueType other) const {
|
||||
return kind_ == other.kind_;
|
||||
}
|
||||
constexpr bool operator!=(ValueType other) const {
|
||||
return kind_ != other.kind_;
|
||||
}
|
||||
|
||||
bool IsSubTypeOf(ValueType other) const {
|
||||
return (*this == other) || (kind_ == kNullRef && other.kind_ == kAnyRef) ||
|
||||
(kind_ == kFuncRef && other.kind_ == kAnyRef) ||
|
||||
(kind_ == kExnRef && other.kind_ == kAnyRef) ||
|
||||
(kind_ == kNullRef && other.kind_ == kFuncRef) ||
|
||||
(kind_ == kNullRef && other.kind_ == kExnRef);
|
||||
}
|
||||
|
||||
bool IsReferenceType() const {
|
||||
return kind_ == kAnyRef || kind_ == kFuncRef || kind_ == kNullRef ||
|
||||
kind_ == kExnRef;
|
||||
}
|
||||
|
||||
static ValueType CommonSubType(ValueType a, ValueType b) {
|
||||
if (a.kind() == b.kind()) return a;
|
||||
// The only sub type of any value type is {bot}.
|
||||
if (!a.IsReferenceType() || !b.IsReferenceType()) {
|
||||
return ValueType(kBottom);
|
||||
}
|
||||
if (a.IsSubTypeOf(b)) return a;
|
||||
if (b.IsSubTypeOf(a)) return b;
|
||||
// {a} and {b} are not each other's subtype. The biggest sub-type of all
|
||||
// reference types is {kWasmNullRef}.
|
||||
return ValueType(kNullRef);
|
||||
}
|
||||
|
||||
ValueTypeCode value_type_code() const {
|
||||
DCHECK_NE(kBottom, kind_);
|
||||
|
||||
constexpr ValueTypeCode kValueTypeCode[] = {
|
||||
#define TYPE_CODE(kind, log2Size, code, ...) kLocal##code,
|
||||
FOREACH_VALUE_TYPE(TYPE_CODE)
|
||||
#undef TYPE_CODE
|
||||
};
|
||||
|
||||
return kValueTypeCode[kind_];
|
||||
}
|
||||
|
||||
MachineType machine_type() const {
|
||||
DCHECK_NE(kBottom, kind_);
|
||||
|
||||
constexpr MachineType kMachineType[] = {
|
||||
#define MACH_TYPE(kind, log2Size, code, machineType, ...) \
|
||||
MachineType::machineType(),
|
||||
FOREACH_VALUE_TYPE(MACH_TYPE)
|
||||
#undef MACH_TYPE
|
||||
};
|
||||
|
||||
return kMachineType[kind_];
|
||||
}
|
||||
|
||||
MachineRepresentation machine_representation() {
|
||||
return machine_type().representation();
|
||||
}
|
||||
|
||||
static ValueType For(MachineType type) {
|
||||
switch (type.representation()) {
|
||||
case MachineRepresentation::kWord8:
|
||||
case MachineRepresentation::kWord16:
|
||||
case MachineRepresentation::kWord32:
|
||||
return ValueType(kI32);
|
||||
case MachineRepresentation::kWord64:
|
||||
return ValueType(kI64);
|
||||
case MachineRepresentation::kFloat32:
|
||||
return ValueType(kF32);
|
||||
case MachineRepresentation::kFloat64:
|
||||
return ValueType(kF64);
|
||||
case MachineRepresentation::kTaggedPointer:
|
||||
return ValueType(kAnyRef);
|
||||
case MachineRepresentation::kSimd128:
|
||||
return ValueType(kS128);
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
||||
constexpr char short_name() const {
|
||||
constexpr char kShortName[] = {
|
||||
#define SHORT_NAME(kind, log2Size, code, machineType, shortName, ...) shortName,
|
||||
FOREACH_VALUE_TYPE(SHORT_NAME)
|
||||
#undef SHORT_NAME
|
||||
};
|
||||
|
||||
return kShortName[kind_];
|
||||
}
|
||||
|
||||
constexpr const char* type_name() const {
|
||||
constexpr const char* kTypeName[] = {
|
||||
#define TYPE_NAME(kind, log2Size, code, machineType, shortName, typeName, ...) \
|
||||
typeName,
|
||||
FOREACH_VALUE_TYPE(TYPE_NAME)
|
||||
#undef TYPE_NAME
|
||||
};
|
||||
|
||||
return kTypeName[kind_];
|
||||
}
|
||||
|
||||
private:
|
||||
Kind kind_ : 8;
|
||||
// TODO(jkummerow): Add and use the following for reference types:
|
||||
// uint32_t ref_index_ : 24;
|
||||
};
|
||||
|
||||
static_assert(sizeof(ValueType) <= kUInt32Size,
|
||||
"ValueType is small and can be passed by value");
|
||||
|
||||
inline size_t hash_value(ValueType type) {
|
||||
return static_cast<size_t>(type.kind());
|
||||
}
|
||||
|
||||
constexpr ValueType kWasmI32 = ValueType(ValueType::kI32);
|
||||
constexpr ValueType kWasmI64 = ValueType(ValueType::kI64);
|
||||
constexpr ValueType kWasmF32 = ValueType(ValueType::kF32);
|
||||
constexpr ValueType kWasmF64 = ValueType(ValueType::kF64);
|
||||
constexpr ValueType kWasmAnyRef = ValueType(ValueType::kAnyRef);
|
||||
constexpr ValueType kWasmExnRef = ValueType(ValueType::kExnRef);
|
||||
constexpr ValueType kWasmFuncRef = ValueType(ValueType::kFuncRef);
|
||||
constexpr ValueType kWasmNullRef = ValueType(ValueType::kNullRef);
|
||||
constexpr ValueType kWasmS128 = ValueType(ValueType::kS128);
|
||||
constexpr ValueType kWasmStmt = ValueType(ValueType::kStmt);
|
||||
constexpr ValueType kWasmBottom = ValueType(ValueType::kBottom);
|
||||
|
||||
#define FOREACH_WASMVALUE_CTYPES(V) \
|
||||
V(kWasmI32, int32_t) \
|
||||
V(kWasmI64, int64_t) \
|
||||
V(kWasmF32, float) \
|
||||
V(kWasmF64, double) \
|
||||
V(kWasmS128, Simd128)
|
||||
V(kI32, int32_t) \
|
||||
V(kI64, int64_t) \
|
||||
V(kF32, float) \
|
||||
V(kF64, double) \
|
||||
V(kS128, Simd128)
|
||||
|
||||
using FunctionSig = Signature<ValueType>;
|
||||
|
||||
inline size_t hash_value(ValueType type) { return static_cast<size_t>(type); }
|
||||
|
||||
#define FOREACH_LOAD_TYPE(V) \
|
||||
V(I32, , Int32) \
|
||||
V(I32, 8S, Int8) \
|
||||
@ -88,7 +236,7 @@ class LoadType {
|
||||
#undef DEF_ENUM
|
||||
};
|
||||
|
||||
// Allow implicit convertion of the enum value to this wrapper.
|
||||
// Allow implicit conversion of the enum value to this wrapper.
|
||||
constexpr LoadType(LoadTypeValue val) // NOLINT(runtime/explicit)
|
||||
: val_(val) {}
|
||||
|
||||
@ -99,16 +247,16 @@ class LoadType {
|
||||
constexpr MachineType mem_type() const { return kMemType[val_]; }
|
||||
|
||||
static LoadType ForValueType(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return kI32Load;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return kI64Load;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return kF32Load;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return kF64Load;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
return kS128Load;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -128,7 +276,7 @@ class LoadType {
|
||||
};
|
||||
|
||||
static constexpr ValueType kValueType[] = {
|
||||
#define VALUE_TYPE(type, ...) kWasm##type,
|
||||
#define VALUE_TYPE(type, ...) ValueType(ValueType::k##type),
|
||||
FOREACH_LOAD_TYPE(VALUE_TYPE)
|
||||
#undef VALUE_TYPE
|
||||
};
|
||||
@ -171,16 +319,16 @@ class StoreType {
|
||||
constexpr MachineRepresentation mem_rep() const { return kMemRep[val_]; }
|
||||
|
||||
static StoreType ForValueType(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return kI32Store;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return kI64Store;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return kF32Store;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return kF64Store;
|
||||
case kWasmS128:
|
||||
case ValueType::kS128:
|
||||
return kS128Store;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -199,7 +347,7 @@ class StoreType {
|
||||
};
|
||||
|
||||
static constexpr ValueType kValueType[] = {
|
||||
#define VALUE_TYPE(type, ...) kWasm##type,
|
||||
#define VALUE_TYPE(type, ...) ValueType(ValueType::k##type),
|
||||
FOREACH_STORE_TYPE(VALUE_TYPE)
|
||||
#undef VALUE_TYPE
|
||||
};
|
||||
@ -211,143 +359,6 @@ class StoreType {
|
||||
};
|
||||
};
|
||||
|
||||
// A collection of ValueType-related static methods.
|
||||
class V8_EXPORT_PRIVATE ValueTypes {
|
||||
public:
|
||||
static inline bool IsSubType(ValueType actual, ValueType expected) {
|
||||
return (expected == actual) ||
|
||||
(expected == kWasmAnyRef && actual == kWasmNullRef) ||
|
||||
(expected == kWasmAnyRef && actual == kWasmFuncRef) ||
|
||||
(expected == kWasmAnyRef && actual == kWasmExnRef) ||
|
||||
(expected == kWasmFuncRef && actual == kWasmNullRef) ||
|
||||
(expected == kWasmExnRef && actual == kWasmNullRef);
|
||||
}
|
||||
|
||||
static inline bool IsReferenceType(ValueType type) {
|
||||
return type == kWasmAnyRef || type == kWasmFuncRef ||
|
||||
type == kWasmNullRef || type == kWasmExnRef;
|
||||
}
|
||||
|
||||
static inline ValueType CommonSubType(ValueType a, ValueType b) {
|
||||
if (a == b) return a;
|
||||
// The only sub type of any value type is {bot}.
|
||||
if (!IsReferenceType(a) || !IsReferenceType(b)) return kWasmBottom;
|
||||
if (IsSubType(a, b)) return a;
|
||||
if (IsSubType(b, a)) return b;
|
||||
// {a} and {b} are not each other's subtype. The biggest sub-type of all
|
||||
// reference types is {kWasmNullRef}.
|
||||
return kWasmNullRef;
|
||||
}
|
||||
|
||||
static byte MemSize(MachineType type) {
|
||||
return 1 << i::ElementSizeLog2Of(type.representation());
|
||||
}
|
||||
|
||||
static int ElementSizeInBytes(ValueType type) {
|
||||
DCHECK_NE(kWasmStmt, type);
|
||||
DCHECK_NE(kWasmBottom, type);
|
||||
|
||||
constexpr int kElementSizeInBytes[] = {
|
||||
#define ELEM_SIZE(type, size, ...) size,
|
||||
FOREACH_VALUE_TYPE(ELEM_SIZE)
|
||||
#undef ELEM_SIZE
|
||||
};
|
||||
|
||||
return kElementSizeInBytes[type];
|
||||
}
|
||||
|
||||
static int ElementSizeLog2Of(ValueType type) {
|
||||
DCHECK_NE(kWasmStmt, type);
|
||||
DCHECK_NE(kWasmBottom, type);
|
||||
|
||||
constexpr int kElementSizeLog2[] = {
|
||||
#define ELEM_SIZE_LOG2(type, size, log2Size, ...) log2Size,
|
||||
FOREACH_VALUE_TYPE(ELEM_SIZE_LOG2)
|
||||
#undef ELEM_SIZE_LOG2
|
||||
};
|
||||
|
||||
return kElementSizeLog2[type];
|
||||
}
|
||||
|
||||
static byte MemSize(ValueType type) { return 1 << ElementSizeLog2Of(type); }
|
||||
|
||||
static ValueTypeCode ValueTypeCodeFor(ValueType type) {
|
||||
DCHECK_NE(kWasmBottom, type);
|
||||
|
||||
constexpr ValueTypeCode kValueTypeCode[] = {
|
||||
#define TYPE_CODE(type, size, log2Size, code, ...) kLocal##code,
|
||||
FOREACH_VALUE_TYPE(TYPE_CODE)
|
||||
#undef TYPE_CODE
|
||||
};
|
||||
|
||||
return kValueTypeCode[type];
|
||||
}
|
||||
|
||||
static MachineType MachineTypeFor(ValueType type) {
|
||||
DCHECK_NE(kWasmBottom, type);
|
||||
|
||||
constexpr MachineType kMachineType[] = {
|
||||
#define MACH_TYPE(type, size, log2Size, code, machineType, ...) \
|
||||
MachineType::machineType(),
|
||||
FOREACH_VALUE_TYPE(MACH_TYPE)
|
||||
#undef MACH_TYPE
|
||||
};
|
||||
|
||||
return kMachineType[type];
|
||||
}
|
||||
|
||||
static MachineRepresentation MachineRepresentationFor(ValueType type) {
|
||||
return MachineTypeFor(type).representation();
|
||||
}
|
||||
|
||||
static ValueType ValueTypeFor(MachineType type) {
|
||||
switch (type.representation()) {
|
||||
case MachineRepresentation::kWord8:
|
||||
case MachineRepresentation::kWord16:
|
||||
case MachineRepresentation::kWord32:
|
||||
return kWasmI32;
|
||||
case MachineRepresentation::kWord64:
|
||||
return kWasmI64;
|
||||
case MachineRepresentation::kFloat32:
|
||||
return kWasmF32;
|
||||
case MachineRepresentation::kFloat64:
|
||||
return kWasmF64;
|
||||
case MachineRepresentation::kTaggedPointer:
|
||||
return kWasmAnyRef;
|
||||
case MachineRepresentation::kSimd128:
|
||||
return kWasmS128;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
||||
static constexpr char ShortNameOf(ValueType type) {
|
||||
constexpr char kShortName[] = {
|
||||
#define SHORT_NAME(type, size, log2Size, code, machineType, shortName, ...) \
|
||||
shortName,
|
||||
FOREACH_VALUE_TYPE(SHORT_NAME)
|
||||
#undef SHORT_NAME
|
||||
};
|
||||
|
||||
return kShortName[type];
|
||||
}
|
||||
|
||||
static constexpr const char* TypeName(ValueType type) {
|
||||
constexpr const char* kTypeName[] = {
|
||||
#define TYPE_NAME(type, size, log2Size, code, machineType, shortName, \
|
||||
typeName, ...) \
|
||||
typeName,
|
||||
FOREACH_VALUE_TYPE(TYPE_NAME)
|
||||
#undef TYPE_NAME
|
||||
};
|
||||
|
||||
return kTypeName[type];
|
||||
}
|
||||
|
||||
private:
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(ValueTypes);
|
||||
};
|
||||
|
||||
} // namespace wasm
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -48,11 +48,11 @@ class CWasmArgumentsPacker {
|
||||
static int TotalSize(const FunctionSig* sig) {
|
||||
int return_size = 0;
|
||||
for (ValueType t : sig->returns()) {
|
||||
return_size += ValueTypes::ElementSizeInBytes(t);
|
||||
return_size += t.element_size_bytes();
|
||||
}
|
||||
int param_size = 0;
|
||||
for (ValueType t : sig->parameters()) {
|
||||
param_size += ValueTypes::ElementSizeInBytes(t);
|
||||
param_size += t.element_size_bytes();
|
||||
}
|
||||
return std::max(return_size, param_size);
|
||||
}
|
||||
|
@ -48,23 +48,23 @@ Handle<String> PrintFToOneByteString(Isolate* isolate, const char* format,
|
||||
}
|
||||
|
||||
Handle<Object> WasmValueToValueObject(Isolate* isolate, WasmValue value) {
|
||||
switch (value.type()) {
|
||||
case kWasmI32:
|
||||
switch (value.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
if (Smi::IsValid(value.to<int32_t>()))
|
||||
return handle(Smi::FromInt(value.to<int32_t>()), isolate);
|
||||
return PrintFToOneByteString<false>(isolate, "%d", value.to<int32_t>());
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
int64_t i64 = value.to<int64_t>();
|
||||
int32_t i32 = static_cast<int32_t>(i64);
|
||||
if (i32 == i64 && Smi::IsValid(i32))
|
||||
return handle(Smi::FromIntptr(i32), isolate);
|
||||
return PrintFToOneByteString<false>(isolate, "%" PRId64, i64);
|
||||
}
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return isolate->factory()->NewNumber(value.to<float>());
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return isolate->factory()->NewNumber(value.to<double>());
|
||||
case kWasmAnyRef:
|
||||
case ValueType::kAnyRef:
|
||||
return value.to_anyref();
|
||||
default:
|
||||
UNIMPLEMENTED();
|
||||
@ -702,14 +702,14 @@ class DebugInfoImpl {
|
||||
if (debug_side_table_entry->is_register(index)) {
|
||||
// TODO(clemensb): Implement by loading from the frame of the
|
||||
// WasmDebugBreak builtin. The current values are just placeholders.
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return WasmValue(int32_t{-11});
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return WasmValue(int64_t{-11});
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return WasmValue(float{-11});
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return WasmValue(double{-11});
|
||||
default:
|
||||
UNIMPLEMENTED();
|
||||
@ -719,14 +719,14 @@ class DebugInfoImpl {
|
||||
// Otherwise load the value from the stack.
|
||||
Address stack_address =
|
||||
stack_frame_base - debug_side_table_entry->stack_offset(index);
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return WasmValue(ReadUnalignedValue<int32_t>(stack_address));
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return WasmValue(ReadUnalignedValue<int64_t>(stack_address));
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return WasmValue(ReadUnalignedValue<float>(stack_address));
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return WasmValue(ReadUnalignedValue<double>(stack_address));
|
||||
default:
|
||||
UNIMPLEMENTED();
|
||||
|
@ -1453,23 +1453,24 @@ class ThreadImpl {
|
||||
}
|
||||
|
||||
pc_t InitLocals(InterpreterCode* code) {
|
||||
for (auto p : code->locals.type_list) {
|
||||
for (ValueType p : code->locals.type_list) {
|
||||
WasmValue val;
|
||||
switch (p) {
|
||||
switch (p.kind()) {
|
||||
#define CASE_TYPE(valuetype, ctype) \
|
||||
case valuetype: \
|
||||
case ValueType::valuetype: \
|
||||
val = WasmValue(ctype{}); \
|
||||
break;
|
||||
FOREACH_WASMVALUE_CTYPES(CASE_TYPE)
|
||||
#undef CASE_TYPE
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef: {
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef: {
|
||||
val = WasmValue(isolate_->factory()->null_value());
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case ValueType::kStmt:
|
||||
case ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
break;
|
||||
}
|
||||
@ -2828,28 +2829,28 @@ class ThreadImpl {
|
||||
sp_t base_index = StackHeight() - sig->parameter_count();
|
||||
for (size_t i = 0; i < sig->parameter_count(); ++i) {
|
||||
WasmValue value = GetStackValue(base_index + i);
|
||||
switch (sig->GetParam(i)) {
|
||||
case kWasmI32: {
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
case ValueType::kI32: {
|
||||
uint32_t u32 = value.to_u32();
|
||||
EncodeI32ExceptionValue(encoded_values, &encoded_index, u32);
|
||||
break;
|
||||
}
|
||||
case kWasmF32: {
|
||||
case ValueType::kF32: {
|
||||
uint32_t f32 = value.to_f32_boxed().get_bits();
|
||||
EncodeI32ExceptionValue(encoded_values, &encoded_index, f32);
|
||||
break;
|
||||
}
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
uint64_t u64 = value.to_u64();
|
||||
EncodeI64ExceptionValue(encoded_values, &encoded_index, u64);
|
||||
break;
|
||||
}
|
||||
case kWasmF64: {
|
||||
case ValueType::kF64: {
|
||||
uint64_t f64 = value.to_f64_boxed().get_bits();
|
||||
EncodeI64ExceptionValue(encoded_values, &encoded_index, f64);
|
||||
break;
|
||||
}
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
int4 s128 = value.to_s128().to_i32x4();
|
||||
EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[0]);
|
||||
EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[1]);
|
||||
@ -2857,16 +2858,17 @@ class ThreadImpl {
|
||||
EncodeI32ExceptionValue(encoded_values, &encoded_index, s128.val[3]);
|
||||
break;
|
||||
}
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef: {
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef: {
|
||||
Handle<Object> anyref = value.to_anyref();
|
||||
DCHECK_IMPLIES(sig->GetParam(i) == kWasmNullRef, anyref->IsNull());
|
||||
encoded_values->set(encoded_index++, *anyref);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case ValueType::kStmt:
|
||||
case ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
@ -2926,32 +2928,32 @@ class ThreadImpl {
|
||||
uint32_t encoded_index = 0;
|
||||
for (size_t i = 0; i < sig->parameter_count(); ++i) {
|
||||
WasmValue value;
|
||||
switch (sig->GetParam(i)) {
|
||||
case kWasmI32: {
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
case ValueType::kI32: {
|
||||
uint32_t u32 = 0;
|
||||
DecodeI32ExceptionValue(encoded_values, &encoded_index, &u32);
|
||||
value = WasmValue(u32);
|
||||
break;
|
||||
}
|
||||
case kWasmF32: {
|
||||
case ValueType::kF32: {
|
||||
uint32_t f32_bits = 0;
|
||||
DecodeI32ExceptionValue(encoded_values, &encoded_index, &f32_bits);
|
||||
value = WasmValue(Float32::FromBits(f32_bits));
|
||||
break;
|
||||
}
|
||||
case kWasmI64: {
|
||||
case ValueType::kI64: {
|
||||
uint64_t u64 = 0;
|
||||
DecodeI64ExceptionValue(encoded_values, &encoded_index, &u64);
|
||||
value = WasmValue(u64);
|
||||
break;
|
||||
}
|
||||
case kWasmF64: {
|
||||
case ValueType::kF64: {
|
||||
uint64_t f64_bits = 0;
|
||||
DecodeI64ExceptionValue(encoded_values, &encoded_index, &f64_bits);
|
||||
value = WasmValue(Float64::FromBits(f64_bits));
|
||||
break;
|
||||
}
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
int4 s128 = {0, 0, 0, 0};
|
||||
uint32_t* vals = reinterpret_cast<uint32_t*>(s128.val);
|
||||
DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[0]);
|
||||
@ -2961,16 +2963,17 @@ class ThreadImpl {
|
||||
value = WasmValue(Simd128(s128));
|
||||
break;
|
||||
}
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef: {
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef: {
|
||||
Handle<Object> anyref(encoded_values->get(encoded_index++), isolate_);
|
||||
DCHECK_IMPLIES(sig->GetParam(i) == kWasmNullRef, anyref->IsNull());
|
||||
value = WasmValue(anyref);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case ValueType::kStmt:
|
||||
case ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
Push(value);
|
||||
@ -3409,9 +3412,9 @@ class ThreadImpl {
|
||||
GlobalIndexImmediate<Decoder::kNoValidate> imm(&decoder,
|
||||
code->at(pc));
|
||||
auto& global = module()->globals[imm.index];
|
||||
switch (global.type) {
|
||||
switch (global.type.kind()) {
|
||||
#define CASE_TYPE(valuetype, ctype) \
|
||||
case valuetype: { \
|
||||
case ValueType::valuetype: { \
|
||||
uint8_t* ptr = \
|
||||
WasmInstanceObject::GetGlobalStorage(instance_object_, global); \
|
||||
WriteLittleEndianValue<ctype>(reinterpret_cast<Address>(ptr), \
|
||||
@ -3420,10 +3423,10 @@ class ThreadImpl {
|
||||
}
|
||||
FOREACH_WASMVALUE_CTYPES(CASE_TYPE)
|
||||
#undef CASE_TYPE
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef: {
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef: {
|
||||
HandleScope handle_scope(isolate_); // Avoid leaking handles.
|
||||
Handle<FixedArray> global_buffer; // The buffer of the global.
|
||||
uint32_t global_index; // The index into the buffer.
|
||||
@ -3435,7 +3438,8 @@ class ThreadImpl {
|
||||
global_buffer->set(global_index, *ref);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case ValueType::kStmt:
|
||||
case ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
len = 1 + imm.length;
|
||||
@ -3792,27 +3796,28 @@ class ThreadImpl {
|
||||
sp_t plimit = top ? top->plimit() : 0;
|
||||
sp_t llimit = top ? top->llimit() : 0;
|
||||
for (size_t i = sp; i < StackHeight(); ++i) {
|
||||
if (i < plimit)
|
||||
if (i < plimit) {
|
||||
PrintF(" p%zu:", i);
|
||||
else if (i < llimit)
|
||||
} else if (i < llimit) {
|
||||
PrintF(" l%zu:", i);
|
||||
else
|
||||
} else {
|
||||
PrintF(" s%zu:", i);
|
||||
}
|
||||
WasmValue val = GetStackValue(i);
|
||||
switch (val.type()) {
|
||||
case kWasmI32:
|
||||
switch (val.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
PrintF("i32:%d", val.to<int32_t>());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
PrintF("i64:%" PRId64 "", val.to<int64_t>());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
PrintF("f32:%f", val.to<float>());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
PrintF("f64:%lf", val.to<double>());
|
||||
break;
|
||||
case kWasmS128: {
|
||||
case ValueType::kS128: {
|
||||
// This defaults to tracing all S128 values as i32x4 values for now,
|
||||
// when there is more state to know what type of values are on the
|
||||
// stack, the right format should be printed here.
|
||||
@ -3820,7 +3825,7 @@ class ThreadImpl {
|
||||
PrintF("i32x4:%d,%d,%d,%d", s.val[0], s.val[1], s.val[2], s.val[3]);
|
||||
break;
|
||||
}
|
||||
case kWasmAnyRef: {
|
||||
case ValueType::kAnyRef: {
|
||||
Handle<Object> ref = val.to_anyref();
|
||||
if (ref->IsNull()) {
|
||||
PrintF("ref:null");
|
||||
@ -3829,10 +3834,15 @@ class ThreadImpl {
|
||||
}
|
||||
break;
|
||||
}
|
||||
case kWasmStmt:
|
||||
case ValueType::kStmt:
|
||||
PrintF("void");
|
||||
break;
|
||||
default:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kExnRef:
|
||||
case ValueType::kNullRef:
|
||||
PrintF("(func|null|exn)ref:unimplemented");
|
||||
break;
|
||||
case ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
break;
|
||||
}
|
||||
@ -3873,23 +3883,23 @@ class ThreadImpl {
|
||||
sp_t base_index = StackHeight() - num_args;
|
||||
for (int i = 0; i < num_args; ++i) {
|
||||
WasmValue arg = GetStackValue(base_index + i);
|
||||
switch (sig->GetParam(i)) {
|
||||
case kWasmI32:
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
case ValueType::kI32:
|
||||
packer.Push(arg.to<uint32_t>());
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
packer.Push(arg.to<uint64_t>());
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
packer.Push(arg.to<float>());
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
packer.Push(arg.to<double>());
|
||||
break;
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef:
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef:
|
||||
DCHECK_IMPLIES(sig->GetParam(i) == kWasmNullRef,
|
||||
arg.to_anyref()->IsNull());
|
||||
packer.Push(arg.to_anyref()->ptr());
|
||||
@ -3915,23 +3925,23 @@ class ThreadImpl {
|
||||
// Push return values.
|
||||
packer.Reset();
|
||||
for (size_t i = 0; i < sig->return_count(); i++) {
|
||||
switch (sig->GetReturn(i)) {
|
||||
case kWasmI32:
|
||||
switch (sig->GetReturn(i).kind()) {
|
||||
case ValueType::kI32:
|
||||
Push(WasmValue(packer.Pop<uint32_t>()));
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
Push(WasmValue(packer.Pop<uint64_t>()));
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
Push(WasmValue(packer.Pop<float>()));
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
Push(WasmValue(packer.Pop<double>()));
|
||||
break;
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef: {
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef: {
|
||||
Handle<Object> ref(Object(packer.Pop<Address>()), isolate);
|
||||
DCHECK_IMPLIES(sig->GetReturn(i) == kWasmNullRef, ref->IsNull());
|
||||
Push(WasmValue(ref));
|
||||
|
@ -24,6 +24,7 @@
|
||||
#include "src/tasks/task-utils.h"
|
||||
#include "src/trap-handler/trap-handler.h"
|
||||
#include "src/wasm/streaming-decoder.h"
|
||||
#include "src/wasm/value-type.h"
|
||||
#include "src/wasm/wasm-engine.h"
|
||||
#include "src/wasm/wasm-limits.h"
|
||||
#include "src/wasm/wasm-objects-inl.h"
|
||||
@ -1306,8 +1307,8 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
|
||||
// Convert value to a WebAssembly value, the default value is 0.
|
||||
Local<v8::Value> value = Local<Value>::Cast(args[1]);
|
||||
switch (type) {
|
||||
case i::wasm::kWasmI32: {
|
||||
switch (type.kind()) {
|
||||
case i::wasm::ValueType::kI32: {
|
||||
int32_t i32_value = 0;
|
||||
if (!value->IsUndefined()) {
|
||||
v8::Local<v8::Int32> int32_value;
|
||||
@ -1317,7 +1318,7 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
global_obj->SetI32(i32_value);
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmI64: {
|
||||
case i::wasm::ValueType::kI64: {
|
||||
int64_t i64_value = 0;
|
||||
if (!value->IsUndefined()) {
|
||||
if (!enabled_features.has_bigint()) {
|
||||
@ -1332,7 +1333,7 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
global_obj->SetI64(i64_value);
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF32: {
|
||||
case i::wasm::ValueType::kF32: {
|
||||
float f32_value = 0;
|
||||
if (!value->IsUndefined()) {
|
||||
double f64_value = 0;
|
||||
@ -1344,7 +1345,7 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
global_obj->SetF32(f32_value);
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF64: {
|
||||
case i::wasm::ValueType::kF64: {
|
||||
double f64_value = 0;
|
||||
if (!value->IsUndefined()) {
|
||||
v8::Local<v8::Number> number_value;
|
||||
@ -1354,8 +1355,8 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
global_obj->SetF64(f64_value);
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::kWasmExnRef: {
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
case i::wasm::ValueType::kExnRef: {
|
||||
if (args.Length() < 2) {
|
||||
// When no initial value is provided, we have to use the WebAssembly
|
||||
// default value 'null', and not the JS default value 'undefined'.
|
||||
@ -1365,7 +1366,7 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
global_obj->SetAnyRef(Utils::OpenHandle(*value));
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmNullRef:
|
||||
case i::wasm::ValueType::kNullRef:
|
||||
if (args.Length() < 2) {
|
||||
// When no initial value is provided, we have to use the WebAssembly
|
||||
// default value 'null', and not the JS default value 'undefined'.
|
||||
@ -1376,7 +1377,7 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
thrower.TypeError("The value of nullref globals must be null");
|
||||
}
|
||||
break;
|
||||
case i::wasm::kWasmFuncRef: {
|
||||
case i::wasm::ValueType::kFuncRef: {
|
||||
if (args.Length() < 2) {
|
||||
// When no initial value is provided, we have to use the WebAssembly
|
||||
// default value 'null', and not the JS default value 'undefined'.
|
||||
@ -1391,7 +1392,9 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case i::wasm::ValueType::kStmt:
|
||||
case i::wasm::ValueType::kS128:
|
||||
case i::wasm::ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
@ -1808,11 +1811,11 @@ void WebAssemblyGlobalGetValueCommon(
|
||||
|
||||
v8::ReturnValue<v8::Value> return_value = args.GetReturnValue();
|
||||
|
||||
switch (receiver->type()) {
|
||||
case i::wasm::kWasmI32:
|
||||
switch (receiver->type().kind()) {
|
||||
case i::wasm::ValueType::kI32:
|
||||
return_value.Set(receiver->GetI32());
|
||||
break;
|
||||
case i::wasm::kWasmI64: {
|
||||
case i::wasm::ValueType::kI64: {
|
||||
auto enabled_features = i::wasm::WasmFeatures::FromIsolate(i_isolate);
|
||||
if (enabled_features.has_bigint()) {
|
||||
Local<BigInt> value = BigInt::New(isolate, receiver->GetI64());
|
||||
@ -1823,21 +1826,23 @@ void WebAssemblyGlobalGetValueCommon(
|
||||
}
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF32:
|
||||
case i::wasm::ValueType::kF32:
|
||||
return_value.Set(receiver->GetF32());
|
||||
break;
|
||||
case i::wasm::kWasmF64:
|
||||
case i::wasm::ValueType::kF64:
|
||||
return_value.Set(receiver->GetF64());
|
||||
break;
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::kWasmFuncRef:
|
||||
case i::wasm::kWasmNullRef:
|
||||
case i::wasm::kWasmExnRef:
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
case i::wasm::ValueType::kFuncRef:
|
||||
case i::wasm::ValueType::kNullRef:
|
||||
case i::wasm::ValueType::kExnRef:
|
||||
DCHECK_IMPLIES(receiver->type() == i::wasm::kWasmNullRef,
|
||||
receiver->GetRef()->IsNull());
|
||||
return_value.Set(Utils::ToLocal(receiver->GetRef()));
|
||||
break;
|
||||
default:
|
||||
case i::wasm::ValueType::kBottom:
|
||||
case i::wasm::ValueType::kStmt:
|
||||
case i::wasm::ValueType::kS128:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
@ -1872,14 +1877,14 @@ void WebAssemblyGlobalSetValue(
|
||||
return;
|
||||
}
|
||||
|
||||
switch (receiver->type()) {
|
||||
case i::wasm::kWasmI32: {
|
||||
switch (receiver->type().kind()) {
|
||||
case i::wasm::ValueType::kI32: {
|
||||
int32_t i32_value = 0;
|
||||
if (!args[0]->Int32Value(context).To(&i32_value)) return;
|
||||
receiver->SetI32(i32_value);
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmI64: {
|
||||
case i::wasm::ValueType::kI64: {
|
||||
auto enabled_features = i::wasm::WasmFeatures::FromIsolate(i_isolate);
|
||||
if (enabled_features.has_bigint()) {
|
||||
v8::Local<v8::BigInt> bigint_value;
|
||||
@ -1890,29 +1895,29 @@ void WebAssemblyGlobalSetValue(
|
||||
}
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF32: {
|
||||
case i::wasm::ValueType::kF32: {
|
||||
double f64_value = 0;
|
||||
if (!args[0]->NumberValue(context).To(&f64_value)) return;
|
||||
receiver->SetF32(i::DoubleToFloat32(f64_value));
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF64: {
|
||||
case i::wasm::ValueType::kF64: {
|
||||
double f64_value = 0;
|
||||
if (!args[0]->NumberValue(context).To(&f64_value)) return;
|
||||
receiver->SetF64(f64_value);
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmAnyRef:
|
||||
case i::wasm::kWasmExnRef: {
|
||||
case i::wasm::ValueType::kAnyRef:
|
||||
case i::wasm::ValueType::kExnRef: {
|
||||
receiver->SetAnyRef(Utils::OpenHandle(*args[0]));
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmNullRef:
|
||||
case i::wasm::ValueType::kNullRef:
|
||||
if (!receiver->SetNullRef(Utils::OpenHandle(*args[0]))) {
|
||||
thrower.TypeError("The value of nullref must be null");
|
||||
}
|
||||
break;
|
||||
case i::wasm::kWasmFuncRef: {
|
||||
case i::wasm::ValueType::kFuncRef: {
|
||||
if (!receiver->SetFuncRef(i_isolate, Utils::OpenHandle(*args[0]))) {
|
||||
thrower.TypeError(
|
||||
"value of an anyfunc reference must be either null or an "
|
||||
@ -1920,7 +1925,9 @@ void WebAssemblyGlobalSetValue(
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
case i::wasm::ValueType::kBottom:
|
||||
case i::wasm::ValueType::kStmt:
|
||||
case i::wasm::ValueType::kS128:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
@ -343,8 +343,7 @@ uint32_t WasmModuleBuilder::AddImport(Vector<const char> name,
|
||||
|
||||
uint32_t WasmModuleBuilder::AddGlobalImport(Vector<const char> name,
|
||||
ValueType type, bool mutability) {
|
||||
global_imports_.push_back(
|
||||
{name, ValueTypes::ValueTypeCodeFor(type), mutability});
|
||||
global_imports_.push_back({name, type.value_type_code(), mutability});
|
||||
return static_cast<uint32_t>(global_imports_.size() - 1);
|
||||
}
|
||||
|
||||
@ -408,11 +407,11 @@ void WasmModuleBuilder::WriteTo(ZoneBuffer* buffer) const {
|
||||
buffer->write_u8(kWasmFunctionTypeCode);
|
||||
buffer->write_size(sig->parameter_count());
|
||||
for (auto param : sig->parameters()) {
|
||||
buffer->write_u8(ValueTypes::ValueTypeCodeFor(param));
|
||||
buffer->write_u8(param.value_type_code());
|
||||
}
|
||||
buffer->write_size(sig->return_count());
|
||||
for (auto ret : sig->returns()) {
|
||||
buffer->write_u8(ValueTypes::ValueTypeCodeFor(ret));
|
||||
buffer->write_u8(ret.value_type_code());
|
||||
}
|
||||
}
|
||||
FixupSection(buffer, start);
|
||||
@ -455,7 +454,7 @@ void WasmModuleBuilder::WriteTo(ZoneBuffer* buffer) const {
|
||||
size_t start = EmitSection(kTableSectionCode, buffer);
|
||||
buffer->write_size(tables_.size());
|
||||
for (const WasmTable& table : tables_) {
|
||||
buffer->write_u8(ValueTypes::ValueTypeCodeFor(table.type));
|
||||
buffer->write_u8(table.type.value_type_code());
|
||||
buffer->write_u8(table.has_maximum ? kHasMaximumFlag : kNoMaximumFlag);
|
||||
buffer->write_size(table.min_size);
|
||||
if (table.has_maximum) buffer->write_size(table.max_size);
|
||||
@ -486,8 +485,8 @@ void WasmModuleBuilder::WriteTo(ZoneBuffer* buffer) const {
|
||||
size_t start = EmitSection(kGlobalSectionCode, buffer);
|
||||
buffer->write_size(globals_.size());
|
||||
|
||||
for (auto global : globals_) {
|
||||
buffer->write_u8(ValueTypes::ValueTypeCodeFor(global.type));
|
||||
for (const WasmGlobal& global : globals_) {
|
||||
buffer->write_u8(global.type.value_type_code());
|
||||
buffer->write_u8(global.mutability ? 1 : 0);
|
||||
switch (global.init.kind) {
|
||||
case WasmInitExpr::kI32Const:
|
||||
@ -522,22 +521,22 @@ void WasmModuleBuilder::WriteTo(ZoneBuffer* buffer) const {
|
||||
break;
|
||||
case WasmInitExpr::kNone: {
|
||||
// No initializer, emit a default value.
|
||||
switch (global.type) {
|
||||
case kWasmI32:
|
||||
switch (global.type.kind()) {
|
||||
case ValueType::kI32:
|
||||
buffer->write_u8(kExprI32Const);
|
||||
// LEB encoding of 0.
|
||||
buffer->write_u8(0);
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
buffer->write_u8(kExprI64Const);
|
||||
// LEB encoding of 0.
|
||||
buffer->write_u8(0);
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
buffer->write_u8(kExprF32Const);
|
||||
buffer->write_f32(0.f);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
buffer->write_u8(kExprF64Const);
|
||||
buffer->write_f64(0.);
|
||||
break;
|
||||
|
@ -218,38 +218,41 @@ namespace {
|
||||
// Converts the given {type} into a string representation that can be used in
|
||||
// reflective functions. Should be kept in sync with the {GetValueType} helper.
|
||||
Handle<String> ToValueTypeString(Isolate* isolate, ValueType type) {
|
||||
// TODO(ahaas/jkummerow): This could be as simple as:
|
||||
// return isolate->factory()->InternalizeUtf8String(type.type_name());
|
||||
// if we clean up all occurrences of "anyfunc" in favor of "funcref".
|
||||
Factory* factory = isolate->factory();
|
||||
Handle<String> string;
|
||||
switch (type) {
|
||||
case i::wasm::kWasmI32: {
|
||||
switch (type.kind()) {
|
||||
case i::wasm::ValueType::kI32: {
|
||||
string = factory->InternalizeUtf8String("i32");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmI64: {
|
||||
case i::wasm::ValueType::kI64: {
|
||||
string = factory->InternalizeUtf8String("i64");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF32: {
|
||||
case i::wasm::ValueType::kF32: {
|
||||
string = factory->InternalizeUtf8String("f32");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmF64: {
|
||||
case i::wasm::ValueType::kF64: {
|
||||
string = factory->InternalizeUtf8String("f64");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmAnyRef: {
|
||||
case i::wasm::ValueType::kAnyRef: {
|
||||
string = factory->InternalizeUtf8String("anyref");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmFuncRef: {
|
||||
case i::wasm::ValueType::kFuncRef: {
|
||||
string = factory->InternalizeUtf8String("anyfunc");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmNullRef: {
|
||||
case i::wasm::ValueType::kNullRef: {
|
||||
string = factory->InternalizeUtf8String("nullref");
|
||||
break;
|
||||
}
|
||||
case i::wasm::kWasmExnRef: {
|
||||
case i::wasm::ValueType::kExnRef: {
|
||||
string = factory->InternalizeUtf8String("exnref");
|
||||
break;
|
||||
}
|
||||
@ -333,13 +336,12 @@ Handle<JSObject> GetTypeForTable(Isolate* isolate, ValueType type,
|
||||
Factory* factory = isolate->factory();
|
||||
|
||||
Handle<String> element;
|
||||
if (type == ValueType::kWasmFuncRef) {
|
||||
if (type == kWasmFuncRef) {
|
||||
// TODO(wasm): We should define the "anyfunc" string in one central place
|
||||
// and then use that constant everywhere.
|
||||
element = factory->InternalizeUtf8String("anyfunc");
|
||||
} else {
|
||||
DCHECK(WasmFeatures::FromFlags().has_anyref() &&
|
||||
type == ValueType::kWasmAnyRef);
|
||||
DCHECK(WasmFeatures::FromFlags().has_anyref() && type == kWasmAnyRef);
|
||||
element = factory->InternalizeUtf8String("anyref");
|
||||
}
|
||||
|
||||
@ -650,11 +652,11 @@ size_t PrintSignature(Vector<char> buffer, const wasm::FunctionSig* sig) {
|
||||
buffer += 1;
|
||||
};
|
||||
for (wasm::ValueType t : sig->parameters()) {
|
||||
append_char(wasm::ValueTypes::ShortNameOf(t));
|
||||
append_char(t.short_name());
|
||||
}
|
||||
append_char(':');
|
||||
for (wasm::ValueType t : sig->returns()) {
|
||||
append_char(wasm::ValueTypes::ShortNameOf(t));
|
||||
append_char(t.short_name());
|
||||
}
|
||||
buffer[0] = '\0';
|
||||
return old_size - buffer.size();
|
||||
|
@ -123,13 +123,16 @@ ACCESSORS(WasmGlobalObject, untagged_buffer, JSArrayBuffer,
|
||||
ACCESSORS(WasmGlobalObject, tagged_buffer, FixedArray, kTaggedBufferOffset)
|
||||
SMI_ACCESSORS(WasmGlobalObject, offset, kOffsetOffset)
|
||||
SMI_ACCESSORS(WasmGlobalObject, flags, kFlagsOffset)
|
||||
BIT_FIELD_ACCESSORS(WasmGlobalObject, flags, type, WasmGlobalObject::TypeBits)
|
||||
wasm::ValueType WasmGlobalObject::type() const {
|
||||
return wasm::ValueType(TypeBits::decode(flags()));
|
||||
}
|
||||
void WasmGlobalObject::set_type(wasm::ValueType value) {
|
||||
set_flags(TypeBits::update(flags(), value.kind()));
|
||||
}
|
||||
BIT_FIELD_ACCESSORS(WasmGlobalObject, flags, is_mutable,
|
||||
WasmGlobalObject::IsMutableBit)
|
||||
|
||||
int WasmGlobalObject::type_size() const {
|
||||
return wasm::ValueTypes::ElementSizeInBytes(type());
|
||||
}
|
||||
int WasmGlobalObject::type_size() const { return type().element_size_bytes(); }
|
||||
|
||||
Address WasmGlobalObject::address() const {
|
||||
DCHECK_NE(type(), wasm::kWasmAnyRef);
|
||||
@ -155,7 +158,7 @@ double WasmGlobalObject::GetF64() {
|
||||
|
||||
Handle<Object> WasmGlobalObject::GetRef() {
|
||||
// We use this getter for anyref, funcref, and exnref.
|
||||
DCHECK(wasm::ValueTypes::IsReferenceType(type()));
|
||||
DCHECK(type().IsReferenceType());
|
||||
return handle(tagged_buffer().get(offset()), GetIsolate());
|
||||
}
|
||||
|
||||
@ -396,7 +399,7 @@ OPTIONAL_ACCESSORS(WasmDebugInfo, c_wasm_entry_map, Managed<wasm::SignatureMap>,
|
||||
#undef PRIMITIVE_ACCESSORS
|
||||
|
||||
wasm::ValueType WasmTableObject::type() {
|
||||
return static_cast<wasm::ValueType>(raw_type());
|
||||
return wasm::ValueType(static_cast<wasm::ValueType::Kind>(raw_type()));
|
||||
}
|
||||
|
||||
bool WasmMemoryObject::has_maximum_pages() { return maximum_pages() >= 0; }
|
||||
|
@ -300,7 +300,7 @@ Handle<WasmTableObject> WasmTableObject::New(Isolate* isolate,
|
||||
table_obj->set_entries(*backing_store);
|
||||
table_obj->set_current_length(initial);
|
||||
table_obj->set_maximum_length(*max);
|
||||
table_obj->set_raw_type(static_cast<int>(type));
|
||||
table_obj->set_raw_type(static_cast<int>(type.kind()));
|
||||
|
||||
table_obj->set_dispatch_tables(ReadOnlyRoots(isolate).empty_fixed_array());
|
||||
if (entries != nullptr) {
|
||||
@ -961,7 +961,7 @@ MaybeHandle<WasmGlobalObject> WasmGlobalObject::New(
|
||||
global_obj->set_is_mutable(is_mutable);
|
||||
}
|
||||
|
||||
if (wasm::ValueTypes::IsReferenceType(type)) {
|
||||
if (type.IsReferenceType()) {
|
||||
DCHECK(maybe_untagged_buffer.is_null());
|
||||
Handle<FixedArray> tagged_buffer;
|
||||
if (!maybe_tagged_buffer.ToHandle(&tagged_buffer)) {
|
||||
@ -973,7 +973,7 @@ MaybeHandle<WasmGlobalObject> WasmGlobalObject::New(
|
||||
global_obj->set_tagged_buffer(*tagged_buffer);
|
||||
} else {
|
||||
DCHECK(maybe_tagged_buffer.is_null());
|
||||
uint32_t type_size = wasm::ValueTypes::ElementSizeInBytes(type);
|
||||
uint32_t type_size = type.element_size_bytes();
|
||||
|
||||
Handle<JSArrayBuffer> untagged_buffer;
|
||||
if (!maybe_untagged_buffer.ToHandle(&untagged_buffer)) {
|
||||
@ -1484,7 +1484,7 @@ void WasmInstanceObject::ImportWasmJSFunctionIntoTable(
|
||||
// static
|
||||
uint8_t* WasmInstanceObject::GetGlobalStorage(
|
||||
Handle<WasmInstanceObject> instance, const wasm::WasmGlobal& global) {
|
||||
DCHECK(!wasm::ValueTypes::IsReferenceType(global.type));
|
||||
DCHECK(!global.type.IsReferenceType());
|
||||
if (global.mutability && global.imported) {
|
||||
return reinterpret_cast<byte*>(
|
||||
instance->imported_mutable_globals()[global.index]);
|
||||
@ -1497,7 +1497,7 @@ uint8_t* WasmInstanceObject::GetGlobalStorage(
|
||||
std::pair<Handle<FixedArray>, uint32_t>
|
||||
WasmInstanceObject::GetGlobalBufferAndIndex(Handle<WasmInstanceObject> instance,
|
||||
const wasm::WasmGlobal& global) {
|
||||
DCHECK(wasm::ValueTypes::IsReferenceType(global.type));
|
||||
DCHECK(global.type.IsReferenceType());
|
||||
Isolate* isolate = instance->GetIsolate();
|
||||
if (global.mutability && global.imported) {
|
||||
Handle<FixedArray> buffer(
|
||||
@ -1515,7 +1515,7 @@ WasmInstanceObject::GetGlobalBufferAndIndex(Handle<WasmInstanceObject> instance,
|
||||
wasm::WasmValue WasmInstanceObject::GetGlobalValue(
|
||||
Handle<WasmInstanceObject> instance, const wasm::WasmGlobal& global) {
|
||||
Isolate* isolate = instance->GetIsolate();
|
||||
if (wasm::ValueTypes::IsReferenceType(global.type)) {
|
||||
if (global.type.IsReferenceType()) {
|
||||
Handle<FixedArray> global_buffer; // The buffer of the global.
|
||||
uint32_t global_index = 0; // The index into the buffer.
|
||||
std::tie(global_buffer, global_index) =
|
||||
@ -1524,9 +1524,9 @@ wasm::WasmValue WasmInstanceObject::GetGlobalValue(
|
||||
}
|
||||
Address ptr = reinterpret_cast<Address>(GetGlobalStorage(instance, global));
|
||||
using wasm::Simd128;
|
||||
switch (global.type) {
|
||||
switch (global.type.kind()) {
|
||||
#define CASE_TYPE(valuetype, ctype) \
|
||||
case wasm::valuetype: \
|
||||
case wasm::ValueType::valuetype: \
|
||||
return wasm::WasmValue(base::ReadLittleEndianValue<ctype>(ptr));
|
||||
FOREACH_WASMVALUE_CTYPES(CASE_TYPE)
|
||||
#undef CASE_TYPE
|
||||
@ -1649,8 +1649,7 @@ namespace {
|
||||
constexpr uint32_t kBytesPerExceptionValuesArrayElement = 2;
|
||||
|
||||
size_t ComputeEncodedElementSize(wasm::ValueType type) {
|
||||
size_t byte_size =
|
||||
static_cast<size_t>(wasm::ValueTypes::ElementSizeInBytes(type));
|
||||
size_t byte_size = type.element_size_bytes();
|
||||
DCHECK_EQ(byte_size % kBytesPerExceptionValuesArrayElement, 0);
|
||||
DCHECK_LE(1, byte_size / kBytesPerExceptionValuesArrayElement);
|
||||
return byte_size / kBytesPerExceptionValuesArrayElement;
|
||||
@ -1666,28 +1665,29 @@ uint32_t WasmExceptionPackage::GetEncodedSize(
|
||||
const wasm::WasmExceptionSig* sig = exception->sig;
|
||||
uint32_t encoded_size = 0;
|
||||
for (size_t i = 0; i < sig->parameter_count(); ++i) {
|
||||
switch (sig->GetParam(i)) {
|
||||
case wasm::kWasmI32:
|
||||
case wasm::kWasmF32:
|
||||
switch (sig->GetParam(i).kind()) {
|
||||
case wasm::ValueType::kI32:
|
||||
case wasm::ValueType::kF32:
|
||||
DCHECK_EQ(2, ComputeEncodedElementSize(sig->GetParam(i)));
|
||||
encoded_size += 2;
|
||||
break;
|
||||
case wasm::kWasmI64:
|
||||
case wasm::kWasmF64:
|
||||
case wasm::ValueType::kI64:
|
||||
case wasm::ValueType::kF64:
|
||||
DCHECK_EQ(4, ComputeEncodedElementSize(sig->GetParam(i)));
|
||||
encoded_size += 4;
|
||||
break;
|
||||
case wasm::kWasmS128:
|
||||
case wasm::ValueType::kS128:
|
||||
DCHECK_EQ(8, ComputeEncodedElementSize(sig->GetParam(i)));
|
||||
encoded_size += 8;
|
||||
break;
|
||||
case wasm::kWasmAnyRef:
|
||||
case wasm::kWasmFuncRef:
|
||||
case wasm::kWasmNullRef:
|
||||
case wasm::kWasmExnRef:
|
||||
case wasm::ValueType::kAnyRef:
|
||||
case wasm::ValueType::kFuncRef:
|
||||
case wasm::ValueType::kNullRef:
|
||||
case wasm::ValueType::kExnRef:
|
||||
encoded_size += 1;
|
||||
break;
|
||||
default:
|
||||
case wasm::ValueType::kStmt:
|
||||
case wasm::ValueType::kBottom:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ extern class WasmMemoryObject extends JSObject {
|
||||
instances: WeakArrayList|Undefined;
|
||||
}
|
||||
|
||||
type WasmValueType extends uint8 constexpr 'wasm::ValueType';
|
||||
type WasmValueType extends uint8 constexpr 'wasm::ValueType::Kind';
|
||||
bitfield struct WasmGlobalObjectFlags extends uint31 {
|
||||
Type: WasmValueType: 8 bit; // "type" is a reserved word.
|
||||
is_mutable: bool: 1 bit;
|
||||
|
@ -443,12 +443,12 @@ bool WasmOpcodes::IsSimdPostMvpOpcode(WasmOpcode opcode) {
|
||||
std::ostream& operator<<(std::ostream& os, const FunctionSig& sig) {
|
||||
if (sig.return_count() == 0) os << "v";
|
||||
for (auto ret : sig.returns()) {
|
||||
os << ValueTypes::ShortNameOf(ret);
|
||||
os << ret.short_name();
|
||||
}
|
||||
os << "_";
|
||||
if (sig.parameter_count() == 0) os << "v";
|
||||
for (auto param : sig.parameters()) {
|
||||
os << ValueTypes::ShortNameOf(param);
|
||||
os << param.short_name();
|
||||
}
|
||||
return os;
|
||||
}
|
||||
|
@ -34,11 +34,11 @@ CallDescriptor* CreateCallDescriptor(Zone* zone, int return_count,
|
||||
wasm::FunctionSig::Builder builder(zone, return_count, param_count);
|
||||
|
||||
for (int i = 0; i < param_count; i++) {
|
||||
builder.AddParam(wasm::ValueTypes::ValueTypeFor(type));
|
||||
builder.AddParam(wasm::ValueType::For(type));
|
||||
}
|
||||
|
||||
for (int i = 0; i < return_count; i++) {
|
||||
builder.AddReturn(wasm::ValueTypes::ValueTypeFor(type));
|
||||
builder.AddReturn(wasm::ValueType::For(type));
|
||||
}
|
||||
return compiler::GetWasmCallDescriptor(zone, builder.Build());
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ std::ostream& operator<<(std::ostream& out, const DebugSideTableEntry& entry) {
|
||||
out << "{";
|
||||
const char* comma = "";
|
||||
for (auto& v : entry.values) {
|
||||
out << comma << ValueTypes::TypeName(v.type) << " ";
|
||||
out << comma << v.type.type_name() << " ";
|
||||
switch (v.kind) {
|
||||
case DebugSideTable::Entry::kConstant:
|
||||
out << "const:" << v.i32_const;
|
||||
|
@ -1419,7 +1419,7 @@ WASM_EXEC_TEST(StoreMem_offset_oob_i64) {
|
||||
WASM_LOAD_MEM(machineTypes[m], WASM_ZERO)),
|
||||
WASM_ZERO);
|
||||
|
||||
byte memsize = ValueTypes::MemSize(machineTypes[m]);
|
||||
byte memsize = machineTypes[m].MemSize();
|
||||
uint32_t boundary = num_bytes - 8 - memsize;
|
||||
CHECK_EQ(0, r.Call(boundary)); // in bounds.
|
||||
CHECK_EQ(0, memcmp(&memory[0], &memory[8 + boundary], memsize));
|
||||
@ -1536,9 +1536,9 @@ static void Run_WasmMixedCall_N(ExecutionTier execution_tier, int start) {
|
||||
// Build the selector function.
|
||||
// =========================================================================
|
||||
FunctionSig::Builder b(&zone, 1, num_params);
|
||||
b.AddReturn(ValueTypes::ValueTypeFor(result));
|
||||
b.AddReturn(ValueType::For(result));
|
||||
for (int i = 0; i < num_params; i++) {
|
||||
b.AddParam(ValueTypes::ValueTypeFor(memtypes[i]));
|
||||
b.AddParam(ValueType::For(memtypes[i]));
|
||||
}
|
||||
WasmFunctionCompiler& t = r.NewFunction(b.Build());
|
||||
BUILD(t, WASM_GET_LOCAL(which));
|
||||
@ -1558,7 +1558,7 @@ static void Run_WasmMixedCall_N(ExecutionTier execution_tier, int start) {
|
||||
ADD_CODE(code, WASM_CALL_FUNCTION0(t.function_index()));
|
||||
|
||||
// Store the result in a local.
|
||||
byte local_index = r.AllocateLocal(ValueTypes::ValueTypeFor(result));
|
||||
byte local_index = r.AllocateLocal(ValueType::For(result));
|
||||
ADD_CODE(code, kExprLocalSet, local_index);
|
||||
|
||||
// Store the result in memory.
|
||||
@ -1575,7 +1575,7 @@ static void Run_WasmMixedCall_N(ExecutionTier execution_tier, int start) {
|
||||
r.builder().RandomizeMemory();
|
||||
CHECK_EQ(kExpected, r.Call());
|
||||
|
||||
int size = ValueTypes::MemSize(result);
|
||||
int size = result.MemSize();
|
||||
for (int i = 0; i < size; i++) {
|
||||
int base = (which + 1) * kElemSize;
|
||||
byte expected = r.builder().raw_mem_at<byte>(base + i);
|
||||
|
@ -1571,8 +1571,7 @@ WASM_EXEC_TEST(LoadMem_offset_oob) {
|
||||
r.builder().RandomizeMemory(1116 + static_cast<int>(m));
|
||||
|
||||
constexpr byte offset = 8;
|
||||
uint32_t boundary =
|
||||
num_bytes - offset - ValueTypes::MemSize(machineTypes[m]);
|
||||
uint32_t boundary = num_bytes - offset - machineTypes[m].MemSize();
|
||||
|
||||
BUILD(r, WASM_LOAD_MEM_OFFSET(machineTypes[m], offset, WASM_GET_LOCAL(0)),
|
||||
WASM_DROP, WASM_ZERO);
|
||||
@ -1718,7 +1717,7 @@ WASM_EXEC_TEST(StoreMem_offset_oob) {
|
||||
WASM_LOAD_MEM(machineTypes[m], WASM_ZERO)),
|
||||
WASM_ZERO);
|
||||
|
||||
byte memsize = ValueTypes::MemSize(machineTypes[m]);
|
||||
byte memsize = machineTypes[m].MemSize();
|
||||
uint32_t boundary = num_bytes - 8 - memsize;
|
||||
CHECK_EQ(0, r.Call(boundary)); // in bounds.
|
||||
CHECK_EQ(0, memcmp(&memory[0], &memory[8 + boundary], memsize));
|
||||
@ -2643,9 +2642,9 @@ static void Run_WasmMixedCall_N(ExecutionTier execution_tier, int start) {
|
||||
// Build the selector function.
|
||||
// =========================================================================
|
||||
FunctionSig::Builder b(&zone, 1, num_params);
|
||||
b.AddReturn(ValueTypes::ValueTypeFor(result));
|
||||
b.AddReturn(ValueType::For(result));
|
||||
for (int i = 0; i < num_params; ++i) {
|
||||
b.AddParam(ValueTypes::ValueTypeFor(memtypes[i]));
|
||||
b.AddParam(ValueType::For(memtypes[i]));
|
||||
}
|
||||
WasmFunctionCompiler& t = r.NewFunction(b.Build());
|
||||
BUILD(t, WASM_GET_LOCAL(which));
|
||||
@ -2665,7 +2664,7 @@ static void Run_WasmMixedCall_N(ExecutionTier execution_tier, int start) {
|
||||
ADD_CODE(code, WASM_CALL_FUNCTION0(t.function_index()));
|
||||
|
||||
// Store the result in a local.
|
||||
byte local_index = r.AllocateLocal(ValueTypes::ValueTypeFor(result));
|
||||
byte local_index = r.AllocateLocal(ValueType::For(result));
|
||||
ADD_CODE(code, kExprLocalSet, local_index);
|
||||
|
||||
// Store the result in memory.
|
||||
@ -2682,7 +2681,7 @@ static void Run_WasmMixedCall_N(ExecutionTier execution_tier, int start) {
|
||||
r.builder().RandomizeMemory();
|
||||
CHECK_EQ(kExpected, r.Call());
|
||||
|
||||
int size = ValueTypes::MemSize(result);
|
||||
int size = result.MemSize();
|
||||
for (int i = 0; i < size; ++i) {
|
||||
int base = (which + 1) * kElemSize;
|
||||
byte expected = r.builder().raw_mem_at<byte>(base + i);
|
||||
@ -2740,7 +2739,7 @@ WASM_EXEC_TEST(MultiReturnSub) {
|
||||
template <typename T>
|
||||
void RunMultiReturnSelect(ExecutionTier execution_tier, const T* inputs) {
|
||||
EXPERIMENTAL_FLAG_SCOPE(mv);
|
||||
ValueType type = ValueTypes::ValueTypeFor(MachineTypeForC<T>());
|
||||
ValueType type = ValueType::For(MachineTypeForC<T>());
|
||||
ValueType storage[] = {type, type, type, type, type, type};
|
||||
const size_t kNumReturns = 2;
|
||||
const size_t kNumParams = arraysize(storage) - kNumReturns;
|
||||
@ -3498,7 +3497,7 @@ void BinOpOnDifferentRegisters(
|
||||
for (int i = 0; i < num_locals; ++i) {
|
||||
ADD_CODE(
|
||||
init_locals_code,
|
||||
WASM_SET_LOCAL(i, WASM_LOAD_MEM(ValueTypes::MachineTypeFor(type),
|
||||
WASM_SET_LOCAL(i, WASM_LOAD_MEM(type.machine_type(),
|
||||
WASM_I32V_2(sizeof(ctype) * i))));
|
||||
}
|
||||
// {write_locals_code} is shared by all code generated in the loop below.
|
||||
@ -3506,7 +3505,7 @@ void BinOpOnDifferentRegisters(
|
||||
// Write locals back into memory, shifted by one element to the right.
|
||||
for (int i = 0; i < num_locals; ++i) {
|
||||
ADD_CODE(write_locals_code,
|
||||
WASM_STORE_MEM(ValueTypes::MachineTypeFor(type),
|
||||
WASM_STORE_MEM(type.machine_type(),
|
||||
WASM_I32V_2(sizeof(ctype) * (i + 1)),
|
||||
WASM_GET_LOCAL(i)));
|
||||
}
|
||||
@ -3521,7 +3520,7 @@ void BinOpOnDifferentRegisters(
|
||||
std::vector<byte> code(init_locals_code);
|
||||
ADD_CODE(code,
|
||||
// Store the result of the binary operation at memory[0].
|
||||
WASM_STORE_MEM(ValueTypes::MachineTypeFor(type), WASM_ZERO,
|
||||
WASM_STORE_MEM(type.machine_type(), WASM_ZERO,
|
||||
WASM_BINOP(opcode, WASM_GET_LOCAL(lhs),
|
||||
WASM_GET_LOCAL(rhs))),
|
||||
// Return 0.
|
||||
|
@ -190,17 +190,17 @@ struct WasmValWrapper {
|
||||
// Only needed in debug builds. Avoid unused warning otherwise.
|
||||
#ifdef DEBUG
|
||||
std::ostream& operator<<(std::ostream& out, const WasmValWrapper& wrapper) {
|
||||
switch (wrapper.val.type()) {
|
||||
case kWasmI32:
|
||||
switch (wrapper.val.type().kind()) {
|
||||
case ValueType::kI32:
|
||||
out << "i32: " << wrapper.val.to<int32_t>();
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
out << "i64: " << wrapper.val.to<int64_t>();
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
out << "f32: " << wrapper.val.to<float>();
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
out << "f64: " << wrapper.val.to<double>();
|
||||
break;
|
||||
default:
|
||||
|
@ -322,7 +322,7 @@ CompilationEnv TestingModuleBuilder::CreateCompilationEnv(
|
||||
}
|
||||
|
||||
const WasmGlobal* TestingModuleBuilder::AddGlobal(ValueType type) {
|
||||
byte size = ValueTypes::MemSize(ValueTypes::MachineTypeFor(type));
|
||||
byte size = type.element_size_bytes();
|
||||
global_offset = (global_offset + size - 1) & ~(size - 1); // align
|
||||
test_module_->globals.push_back(
|
||||
{type, true, WasmInitExpr(), {global_offset}, false, false});
|
||||
@ -590,10 +590,10 @@ const FunctionSig* WasmRunnerBase::CreateSig(MachineType return_type,
|
||||
// Convert machine types to local types, and check that there are no
|
||||
// MachineType::None()'s in the parameters.
|
||||
int idx = 0;
|
||||
if (return_count) sig_types[idx++] = ValueTypes::ValueTypeFor(return_type);
|
||||
if (return_count) sig_types[idx++] = ValueType::For(return_type);
|
||||
for (MachineType param : param_types) {
|
||||
CHECK_NE(MachineType::None(), param);
|
||||
sig_types[idx++] = ValueTypes::ValueTypeFor(param);
|
||||
sig_types[idx++] = ValueType::For(param);
|
||||
}
|
||||
return new (&zone_) FunctionSig(return_count, param_count, sig_types);
|
||||
}
|
||||
|
@ -104,8 +104,7 @@ class TestingModuleBuilder {
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
T* AddGlobal(
|
||||
ValueType type = ValueTypes::ValueTypeFor(MachineTypeForC<T>())) {
|
||||
T* AddGlobal(ValueType type = ValueType::For(MachineTypeForC<T>())) {
|
||||
const WasmGlobal* global = AddGlobal(type);
|
||||
return reinterpret_cast<T*>(globals_data_ + global->offset);
|
||||
}
|
||||
|
@ -89,9 +89,8 @@
|
||||
#define WASM_BLOCK_F(...) kExprBlock, kLocalF32, __VA_ARGS__, kExprEnd
|
||||
#define WASM_BLOCK_D(...) kExprBlock, kLocalF64, __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_BLOCK_T(t, ...) \
|
||||
kExprBlock, static_cast<byte>(ValueTypes::ValueTypeCodeFor(t)), __VA_ARGS__, \
|
||||
kExprEnd
|
||||
#define WASM_BLOCK_T(t, ...) \
|
||||
kExprBlock, static_cast<byte>((t).value_type_code()), __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_BLOCK_X(index, ...) \
|
||||
kExprBlock, static_cast<byte>(index), __VA_ARGS__, kExprEnd
|
||||
@ -104,18 +103,16 @@
|
||||
#define WASM_LOOP_F(...) kExprLoop, kLocalF32, __VA_ARGS__, kExprEnd
|
||||
#define WASM_LOOP_D(...) kExprLoop, kLocalF64, __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_LOOP_T(t, ...) \
|
||||
kExprLoop, static_cast<byte>(ValueTypes::ValueTypeCodeFor(t)), __VA_ARGS__, \
|
||||
kExprEnd
|
||||
#define WASM_LOOP_T(t, ...) \
|
||||
kExprLoop, static_cast<byte>((t).value_type_code()), __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_LOOP_X(index, ...) \
|
||||
kExprLoop, static_cast<byte>(index), __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_IF(cond, ...) cond, kExprIf, kLocalVoid, __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_IF_T(t, cond, ...) \
|
||||
cond, kExprIf, static_cast<byte>(ValueTypes::ValueTypeCodeFor(t)), \
|
||||
__VA_ARGS__, kExprEnd
|
||||
#define WASM_IF_T(t, cond, ...) \
|
||||
cond, kExprIf, static_cast<byte>((t).value_type_code()), __VA_ARGS__, kExprEnd
|
||||
|
||||
#define WASM_IF_X(index, cond, ...) \
|
||||
cond, kExprIf, static_cast<byte>(index), __VA_ARGS__, kExprEnd
|
||||
@ -132,16 +129,16 @@
|
||||
#define WASM_IF_ELSE_D(cond, tstmt, fstmt) \
|
||||
cond, kExprIf, kLocalF64, tstmt, kExprElse, fstmt, kExprEnd
|
||||
|
||||
#define WASM_IF_ELSE_T(t, cond, tstmt, fstmt) \
|
||||
cond, kExprIf, static_cast<byte>(ValueTypes::ValueTypeCodeFor(t)), tstmt, \
|
||||
kExprElse, fstmt, kExprEnd
|
||||
#define WASM_IF_ELSE_T(t, cond, tstmt, fstmt) \
|
||||
cond, kExprIf, static_cast<byte>((t).value_type_code()), tstmt, kExprElse, \
|
||||
fstmt, kExprEnd
|
||||
|
||||
#define WASM_IF_ELSE_X(index, cond, tstmt, fstmt) \
|
||||
cond, kExprIf, static_cast<byte>(index), tstmt, kExprElse, fstmt, kExprEnd
|
||||
|
||||
#define WASM_TRY_CATCH_T(t, trystmt, catchstmt) \
|
||||
kExprTry, static_cast<byte>(ValueTypes::ValueTypeCodeFor(t)), trystmt, \
|
||||
kExprCatch, catchstmt, kExprEnd
|
||||
#define WASM_TRY_CATCH_T(t, trystmt, catchstmt) \
|
||||
kExprTry, static_cast<byte>((t).value_type_code()), trystmt, kExprCatch, \
|
||||
catchstmt, kExprEnd
|
||||
|
||||
#define WASM_SELECT(tval, fval, cond) tval, fval, cond, kExprSelect
|
||||
#define WASM_SELECT_I(tval, fval, cond) \
|
||||
|
@ -89,29 +89,29 @@ bool InterpretWasmModuleForTesting(Isolate* isolate,
|
||||
|
||||
// Fill the parameters up with default values.
|
||||
for (size_t i = argc; i < param_count; ++i) {
|
||||
switch (signature->GetParam(i)) {
|
||||
case kWasmI32:
|
||||
switch (signature->GetParam(i).kind()) {
|
||||
case ValueType::kI32:
|
||||
arguments[i] = WasmValue(int32_t{0});
|
||||
break;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
arguments[i] = WasmValue(int64_t{0});
|
||||
break;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
arguments[i] = WasmValue(0.0f);
|
||||
break;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
arguments[i] = WasmValue(0.0);
|
||||
break;
|
||||
case kWasmAnyRef:
|
||||
case kWasmFuncRef:
|
||||
case kWasmNullRef:
|
||||
case kWasmExnRef:
|
||||
case ValueType::kAnyRef:
|
||||
case ValueType::kFuncRef:
|
||||
case ValueType::kNullRef:
|
||||
case ValueType::kExnRef:
|
||||
arguments[i] =
|
||||
WasmValue(Handle<Object>::cast(isolate->factory()->null_value()));
|
||||
break;
|
||||
case kWasmStmt:
|
||||
case kWasmBottom:
|
||||
case kWasmS128:
|
||||
case ValueType::kStmt:
|
||||
case ValueType::kBottom:
|
||||
case ValueType::kS128:
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
|
@ -121,14 +121,14 @@ CallDescriptor* CreateRandomCallDescriptor(Zone* zone, size_t return_count,
|
||||
wasm::FunctionSig::Builder builder(zone, return_count, param_count);
|
||||
for (size_t i = 0; i < param_count; i++) {
|
||||
MachineType type = RandomType(input);
|
||||
builder.AddParam(wasm::ValueTypes::ValueTypeFor(type));
|
||||
builder.AddParam(wasm::ValueType::For(type));
|
||||
}
|
||||
// Read the end byte of the parameters.
|
||||
input->NextInt8(1);
|
||||
|
||||
for (size_t i = 0; i < return_count; i++) {
|
||||
MachineType type = RandomType(input);
|
||||
builder.AddReturn(wasm::ValueTypes::ValueTypeFor(type));
|
||||
builder.AddReturn(wasm::ValueType::For(type));
|
||||
}
|
||||
|
||||
return compiler::GetWasmCallDescriptor(zone, builder.Build());
|
||||
|
@ -97,7 +97,7 @@ ValueType GetValueType(DataRange* data) {
|
||||
}
|
||||
|
||||
class WasmGenerator {
|
||||
template <WasmOpcode Op, ValueType... Args>
|
||||
template <WasmOpcode Op, ValueType::Kind... Args>
|
||||
void op(DataRange* data) {
|
||||
Generate<Args...>(data);
|
||||
builder_->Emit(Op);
|
||||
@ -109,8 +109,7 @@ class WasmGenerator {
|
||||
ValueType br_type)
|
||||
: gen_(gen) {
|
||||
gen->blocks_.push_back(br_type);
|
||||
gen->builder_->EmitWithU8(block_type,
|
||||
ValueTypes::ValueTypeCodeFor(result_type));
|
||||
gen->builder_->EmitWithU8(block_type, result_type.value_type_code());
|
||||
}
|
||||
|
||||
~BlockScope() {
|
||||
@ -122,28 +121,28 @@ class WasmGenerator {
|
||||
WasmGenerator* const gen_;
|
||||
};
|
||||
|
||||
template <ValueType T>
|
||||
template <ValueType::Kind T>
|
||||
void block(DataRange* data) {
|
||||
BlockScope block_scope(this, kExprBlock, T, T);
|
||||
BlockScope block_scope(this, kExprBlock, ValueType(T), ValueType(T));
|
||||
Generate<T>(data);
|
||||
}
|
||||
|
||||
template <ValueType T>
|
||||
template <ValueType::Kind T>
|
||||
void loop(DataRange* data) {
|
||||
// When breaking to a loop header, don't provide any input value (hence
|
||||
// kWasmStmt).
|
||||
BlockScope block_scope(this, kExprLoop, T, kWasmStmt);
|
||||
BlockScope block_scope(this, kExprLoop, ValueType(T), kWasmStmt);
|
||||
Generate<T>(data);
|
||||
}
|
||||
|
||||
enum IfType { kIf, kIfElse };
|
||||
|
||||
template <ValueType T, IfType type>
|
||||
template <ValueType::Kind T, IfType type>
|
||||
void if_(DataRange* data) {
|
||||
static_assert(T == kWasmStmt || type == kIfElse,
|
||||
static_assert(T == ValueType::kStmt || type == kIfElse,
|
||||
"if without else cannot produce a value");
|
||||
Generate<kWasmI32>(data);
|
||||
BlockScope block_scope(this, kExprIf, T, T);
|
||||
Generate<ValueType::kI32>(data);
|
||||
BlockScope block_scope(this, kExprIf, ValueType(T), ValueType(T));
|
||||
Generate<T>(data);
|
||||
if (type == kIfElse) {
|
||||
builder_->Emit(kExprElse);
|
||||
@ -162,7 +161,7 @@ class WasmGenerator {
|
||||
kExprBr, static_cast<uint32_t>(blocks_.size()) - 1 - target_block);
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void br_if(DataRange* data) {
|
||||
// There is always at least the block representing the function body.
|
||||
DCHECK(!blocks_.empty());
|
||||
@ -173,7 +172,7 @@ class WasmGenerator {
|
||||
Generate(kWasmI32, data);
|
||||
builder_->EmitWithI32V(
|
||||
kExprBrIf, static_cast<uint32_t>(blocks_.size()) - 1 - target_block);
|
||||
ConvertOrGenerate(break_type, wanted_type, data);
|
||||
ConvertOrGenerate(break_type, ValueType(wanted_type), data);
|
||||
}
|
||||
|
||||
// TODO(eholk): make this function constexpr once gcc supports it
|
||||
@ -274,13 +273,13 @@ class WasmGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
template <WasmOpcode memory_op, ValueType... arg_types>
|
||||
template <WasmOpcode memory_op, ValueType::Kind... arg_types>
|
||||
void memop(DataRange* data) {
|
||||
const uint8_t align = data->get<uint8_t>() % (max_alignment(memory_op) + 1);
|
||||
const uint32_t offset = data->get<uint32_t>();
|
||||
|
||||
// Generate the index and the arguments, if any.
|
||||
Generate<kWasmI32, arg_types...>(data);
|
||||
Generate<ValueType::kI32, arg_types...>(data);
|
||||
|
||||
if (WasmOpcodes::IsPrefixOpcode(static_cast<WasmOpcode>(memory_op >> 8))) {
|
||||
DCHECK(memory_op >> 8 == kAtomicPrefix || memory_op >> 8 == kSimdPrefix);
|
||||
@ -292,7 +291,7 @@ class WasmGenerator {
|
||||
builder_->EmitU32V(offset);
|
||||
}
|
||||
|
||||
template <WasmOpcode Op, ValueType... Args>
|
||||
template <WasmOpcode Op, ValueType::Kind... Args>
|
||||
void atomic_op(DataRange* data) {
|
||||
const uint8_t align = data->get<uint8_t>() % (max_alignment(Op) + 1);
|
||||
const uint32_t offset = data->get<uint32_t>();
|
||||
@ -304,7 +303,7 @@ class WasmGenerator {
|
||||
builder_->EmitU32V(offset);
|
||||
}
|
||||
|
||||
template <WasmOpcode Op, ValueType... Args>
|
||||
template <WasmOpcode Op, ValueType::Kind... Args>
|
||||
void simd_op(DataRange* data) {
|
||||
Generate<Args...>(data);
|
||||
builder_->EmitWithPrefix(Op);
|
||||
@ -315,21 +314,21 @@ class WasmGenerator {
|
||||
builder_->Emit(kExprDrop);
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void call(DataRange* data) {
|
||||
call(data, wanted_type);
|
||||
call(data, ValueType(wanted_type));
|
||||
}
|
||||
|
||||
void Convert(ValueType src, ValueType dst) {
|
||||
auto idx = [](ValueType t) -> int {
|
||||
switch (t) {
|
||||
case kWasmI32:
|
||||
switch (t.kind()) {
|
||||
case ValueType::kI32:
|
||||
return 0;
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return 1;
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return 2;
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return 3;
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -402,32 +401,34 @@ class WasmGenerator {
|
||||
return {index, type};
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void local_op(DataRange* data, WasmOpcode opcode) {
|
||||
Var local = GetRandomLocal(data);
|
||||
// If there are no locals and no parameters, just generate any value (if a
|
||||
// value is needed), or do nothing.
|
||||
if (!local.is_valid()) {
|
||||
if (wanted_type == kWasmStmt) return;
|
||||
if (wanted_type == ValueType::kStmt) return;
|
||||
return Generate<wanted_type>(data);
|
||||
}
|
||||
|
||||
if (opcode != kExprLocalGet) Generate(local.type, data);
|
||||
builder_->EmitWithU32V(opcode, local.index);
|
||||
if (wanted_type != kWasmStmt && local.type != wanted_type) {
|
||||
Convert(local.type, wanted_type);
|
||||
if (wanted_type != ValueType::kStmt && local.type.kind() != wanted_type) {
|
||||
Convert(local.type, ValueType(wanted_type));
|
||||
}
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void get_local(DataRange* data) {
|
||||
static_assert(wanted_type != kWasmStmt, "illegal type");
|
||||
static_assert(wanted_type != ValueType::kStmt, "illegal type");
|
||||
local_op<wanted_type>(data, kExprLocalGet);
|
||||
}
|
||||
|
||||
void set_local(DataRange* data) { local_op<kWasmStmt>(data, kExprLocalSet); }
|
||||
void set_local(DataRange* data) {
|
||||
local_op<ValueType::kStmt>(data, kExprLocalSet);
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void tee_local(DataRange* data) {
|
||||
local_op<wanted_type>(data, kExprLocalTee);
|
||||
}
|
||||
@ -455,44 +456,44 @@ class WasmGenerator {
|
||||
return {index, type};
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void global_op(DataRange* data) {
|
||||
constexpr bool is_set = wanted_type == kWasmStmt;
|
||||
constexpr bool is_set = wanted_type == ValueType::kStmt;
|
||||
Var global = GetRandomGlobal(data, is_set);
|
||||
// If there are no globals, just generate any value (if a value is needed),
|
||||
// or do nothing.
|
||||
if (!global.is_valid()) {
|
||||
if (wanted_type == kWasmStmt) return;
|
||||
if (wanted_type == ValueType::kStmt) return;
|
||||
return Generate<wanted_type>(data);
|
||||
}
|
||||
|
||||
if (is_set) Generate(global.type, data);
|
||||
builder_->EmitWithU32V(is_set ? kExprGlobalSet : kExprGlobalGet,
|
||||
global.index);
|
||||
if (!is_set && global.type != wanted_type) {
|
||||
Convert(global.type, wanted_type);
|
||||
if (!is_set && global.type.kind() != wanted_type) {
|
||||
Convert(global.type, ValueType(wanted_type));
|
||||
}
|
||||
}
|
||||
|
||||
template <ValueType wanted_type>
|
||||
template <ValueType::Kind wanted_type>
|
||||
void get_global(DataRange* data) {
|
||||
static_assert(wanted_type != kWasmStmt, "illegal type");
|
||||
static_assert(wanted_type != ValueType::kStmt, "illegal type");
|
||||
global_op<wanted_type>(data);
|
||||
}
|
||||
|
||||
template <ValueType select_type>
|
||||
template <ValueType::Kind select_type>
|
||||
void select_with_type(DataRange* data) {
|
||||
static_assert(select_type != kWasmStmt, "illegal type for select");
|
||||
Generate<select_type, select_type, kWasmI32>(data);
|
||||
static_assert(select_type != ValueType::kStmt, "illegal type for select");
|
||||
Generate<select_type, select_type, ValueType::kI32>(data);
|
||||
// num_types is always 1.
|
||||
uint8_t num_types = 1;
|
||||
builder_->EmitWithU8U8(kExprSelectWithType, num_types,
|
||||
ValueTypes::ValueTypeCodeFor(select_type));
|
||||
ValueType(select_type).value_type_code());
|
||||
}
|
||||
|
||||
void set_global(DataRange* data) { global_op<kWasmStmt>(data); }
|
||||
void set_global(DataRange* data) { global_op<ValueType::kStmt>(data); }
|
||||
|
||||
template <ValueType... Types>
|
||||
template <ValueType::Kind... Types>
|
||||
void sequence(DataRange* data) {
|
||||
Generate<Types...>(data);
|
||||
}
|
||||
@ -550,10 +551,10 @@ class WasmGenerator {
|
||||
|
||||
void Generate(ValueType type, DataRange* data);
|
||||
|
||||
template <ValueType T>
|
||||
template <ValueType::Kind T>
|
||||
void Generate(DataRange* data);
|
||||
|
||||
template <ValueType T1, ValueType T2, ValueType... Ts>
|
||||
template <ValueType::Kind T1, ValueType::Kind T2, ValueType::Kind... Ts>
|
||||
void Generate(DataRange* data) {
|
||||
// TODO(clemensb): Implement a more even split.
|
||||
auto first_data = data->split();
|
||||
@ -578,42 +579,45 @@ class WasmGenerator {
|
||||
};
|
||||
|
||||
template <>
|
||||
void WasmGenerator::Generate<kWasmStmt>(DataRange* data) {
|
||||
void WasmGenerator::Generate<ValueType::kStmt>(DataRange* data) {
|
||||
GeneratorRecursionScope rec_scope(this);
|
||||
if (recursion_limit_reached() || data->size() == 0) return;
|
||||
|
||||
constexpr GenerateFn alternatives[] = {
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmStmt>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmStmt, kWasmStmt, kWasmStmt>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmStmt, kWasmStmt, kWasmStmt,
|
||||
kWasmStmt, kWasmStmt, kWasmStmt, kWasmStmt>,
|
||||
&WasmGenerator::block<kWasmStmt>,
|
||||
&WasmGenerator::loop<kWasmStmt>,
|
||||
&WasmGenerator::if_<kWasmStmt, kIf>,
|
||||
&WasmGenerator::if_<kWasmStmt, kIfElse>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kStmt,
|
||||
ValueType::kStmt, ValueType::kStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kStmt,
|
||||
ValueType::kStmt, ValueType::kStmt,
|
||||
ValueType::kStmt, ValueType::kStmt,
|
||||
ValueType::kStmt, ValueType::kStmt>,
|
||||
&WasmGenerator::block<ValueType::kStmt>,
|
||||
&WasmGenerator::loop<ValueType::kStmt>,
|
||||
&WasmGenerator::if_<ValueType::kStmt, kIf>,
|
||||
&WasmGenerator::if_<ValueType::kStmt, kIfElse>,
|
||||
&WasmGenerator::br,
|
||||
&WasmGenerator::br_if<kWasmStmt>,
|
||||
&WasmGenerator::br_if<ValueType::kStmt>,
|
||||
|
||||
&WasmGenerator::memop<kExprI32StoreMem, kWasmI32>,
|
||||
&WasmGenerator::memop<kExprI32StoreMem8, kWasmI32>,
|
||||
&WasmGenerator::memop<kExprI32StoreMem16, kWasmI32>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem8, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem16, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem32, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprF32StoreMem, kWasmF32>,
|
||||
&WasmGenerator::memop<kExprF64StoreMem, kWasmF64>,
|
||||
&WasmGenerator::memop<kExprI32AtomicStore, kWasmI32>,
|
||||
&WasmGenerator::memop<kExprI32AtomicStore8U, kWasmI32>,
|
||||
&WasmGenerator::memop<kExprI32AtomicStore16U, kWasmI32>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore8U, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore16U, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore32U, kWasmI64>,
|
||||
&WasmGenerator::memop<kExprI32StoreMem, ValueType::kI32>,
|
||||
&WasmGenerator::memop<kExprI32StoreMem8, ValueType::kI32>,
|
||||
&WasmGenerator::memop<kExprI32StoreMem16, ValueType::kI32>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem8, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem16, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprI64StoreMem32, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprF32StoreMem, ValueType::kF32>,
|
||||
&WasmGenerator::memop<kExprF64StoreMem, ValueType::kF64>,
|
||||
&WasmGenerator::memop<kExprI32AtomicStore, ValueType::kI32>,
|
||||
&WasmGenerator::memop<kExprI32AtomicStore8U, ValueType::kI32>,
|
||||
&WasmGenerator::memop<kExprI32AtomicStore16U, ValueType::kI32>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore8U, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore16U, ValueType::kI64>,
|
||||
&WasmGenerator::memop<kExprI64AtomicStore32U, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::drop,
|
||||
|
||||
&WasmGenerator::call<kWasmStmt>,
|
||||
&WasmGenerator::call<ValueType::kStmt>,
|
||||
|
||||
&WasmGenerator::set_local,
|
||||
&WasmGenerator::set_global};
|
||||
@ -622,7 +626,7 @@ void WasmGenerator::Generate<kWasmStmt>(DataRange* data) {
|
||||
}
|
||||
|
||||
template <>
|
||||
void WasmGenerator::Generate<kWasmI32>(DataRange* data) {
|
||||
void WasmGenerator::Generate<ValueType::kI32>(DataRange* data) {
|
||||
GeneratorRecursionScope rec_scope(this);
|
||||
if (recursion_limit_reached() || data->size() <= 1) {
|
||||
builder_->EmitI32Const(data->get<uint32_t>());
|
||||
@ -635,69 +639,70 @@ void WasmGenerator::Generate<kWasmI32>(DataRange* data) {
|
||||
&WasmGenerator::i32_const<3>,
|
||||
&WasmGenerator::i32_const<4>,
|
||||
|
||||
&WasmGenerator::sequence<kWasmI32, kWasmStmt>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmI32>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmI32, kWasmStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kI32, ValueType::kStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kI32>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kI32,
|
||||
ValueType::kStmt>,
|
||||
|
||||
&WasmGenerator::op<kExprI32Eqz, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Eq, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Ne, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32LtS, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32LtU, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32GeS, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32GeU, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Eqz, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Eq, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Ne, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32LtS, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32LtU, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32GeS, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32GeU, ValueType::kI32, ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::op<kExprI64Eqz, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Eq, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Ne, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64LtS, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64LtU, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64GeS, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64GeU, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Eqz, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Eq, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Ne, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64LtS, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64LtU, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64GeS, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64GeU, ValueType::kI64, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::op<kExprF32Eq, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Ne, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Lt, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Ge, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Eq, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Ne, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Lt, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Ge, ValueType::kF32, ValueType::kF32>,
|
||||
|
||||
&WasmGenerator::op<kExprF64Eq, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Ne, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Lt, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Ge, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Eq, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Ne, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Lt, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Ge, ValueType::kF64, ValueType::kF64>,
|
||||
|
||||
&WasmGenerator::op<kExprI32Add, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Sub, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Mul, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Add, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Sub, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Mul, ValueType::kI32, ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::op<kExprI32DivS, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32DivU, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32RemS, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32RemU, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32DivS, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32DivU, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32RemS, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32RemU, ValueType::kI32, ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::op<kExprI32And, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Ior, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Xor, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Shl, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32ShrU, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32ShrS, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Ror, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Rol, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32And, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Ior, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Xor, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Shl, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32ShrU, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32ShrS, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Ror, ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Rol, ValueType::kI32, ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::op<kExprI32Clz, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Ctz, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Popcnt, kWasmI32>,
|
||||
&WasmGenerator::op<kExprI32Clz, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Ctz, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprI32Popcnt, ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::op<kExprI32ConvertI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI32SConvertF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprI32UConvertF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprI32SConvertF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprI32UConvertF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprI32ReinterpretF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprI32ConvertI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI32SConvertF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprI32UConvertF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprI32SConvertF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprI32UConvertF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprI32ReinterpretF32, ValueType::kF32>,
|
||||
|
||||
&WasmGenerator::block<kWasmI32>,
|
||||
&WasmGenerator::loop<kWasmI32>,
|
||||
&WasmGenerator::if_<kWasmI32, kIfElse>,
|
||||
&WasmGenerator::br_if<kWasmI32>,
|
||||
&WasmGenerator::block<ValueType::kI32>,
|
||||
&WasmGenerator::loop<ValueType::kI32>,
|
||||
&WasmGenerator::if_<ValueType::kI32, kIfElse>,
|
||||
&WasmGenerator::br_if<ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::memop<kExprI32LoadMem>,
|
||||
&WasmGenerator::memop<kExprI32LoadMem8S>,
|
||||
@ -708,51 +713,72 @@ void WasmGenerator::Generate<kWasmI32>(DataRange* data) {
|
||||
&WasmGenerator::memop<kExprI32AtomicLoad8U>,
|
||||
&WasmGenerator::memop<kExprI32AtomicLoad16U>,
|
||||
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAdd, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicSub, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAnd, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicOr, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicXor, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicExchange, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicCompareExchange, kWasmI32,
|
||||
kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAdd8U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicSub8U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAnd8U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicOr8U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicXor8U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicExchange8U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicCompareExchange8U, kWasmI32,
|
||||
kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAdd16U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicSub16U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAnd16U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicOr16U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicXor16U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicExchange16U, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicCompareExchange16U, kWasmI32,
|
||||
kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAdd, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicSub, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAnd, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicOr, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicXor, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicExchange, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicCompareExchange, ValueType::kI32,
|
||||
ValueType::kI32, ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAdd8U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicSub8U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAnd8U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicOr8U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicXor8U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicExchange8U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicCompareExchange8U,
|
||||
ValueType::kI32, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAdd16U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicSub16U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicAnd16U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicOr16U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicXor16U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicExchange16U, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::atomic_op<kExprI32AtomicCompareExchange16U,
|
||||
ValueType::kI32, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::simd_op<kExprS1x16AnyTrue, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprS1x8AnyTrue, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprS1x4AnyTrue, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprS1x16AnyTrue, ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprS1x8AnyTrue, ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprS1x4AnyTrue, ValueType::kS128>,
|
||||
|
||||
&WasmGenerator::current_memory,
|
||||
&WasmGenerator::grow_memory,
|
||||
|
||||
&WasmGenerator::get_local<kWasmI32>,
|
||||
&WasmGenerator::tee_local<kWasmI32>,
|
||||
&WasmGenerator::get_global<kWasmI32>,
|
||||
&WasmGenerator::op<kExprSelect, kWasmI32, kWasmI32, kWasmI32>,
|
||||
&WasmGenerator::select_with_type<kWasmI32>,
|
||||
&WasmGenerator::get_local<ValueType::kI32>,
|
||||
&WasmGenerator::tee_local<ValueType::kI32>,
|
||||
&WasmGenerator::get_global<ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprSelect, ValueType::kI32, ValueType::kI32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::select_with_type<ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::call<kWasmI32>};
|
||||
&WasmGenerator::call<ValueType::kI32>};
|
||||
|
||||
GenerateOneOf(alternatives, data);
|
||||
}
|
||||
|
||||
template <>
|
||||
void WasmGenerator::Generate<kWasmI64>(DataRange* data) {
|
||||
void WasmGenerator::Generate<ValueType::kI64>(DataRange* data) {
|
||||
GeneratorRecursionScope rec_scope(this);
|
||||
if (recursion_limit_reached() || data->size() <= 1) {
|
||||
builder_->EmitI64Const(data->get<int64_t>());
|
||||
@ -769,36 +795,37 @@ void WasmGenerator::Generate<kWasmI64>(DataRange* data) {
|
||||
&WasmGenerator::i64_const<7>,
|
||||
&WasmGenerator::i64_const<8>,
|
||||
|
||||
&WasmGenerator::sequence<kWasmI64, kWasmStmt>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmI64>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmI64, kWasmStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kI64, ValueType::kStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kI64>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kI64,
|
||||
ValueType::kStmt>,
|
||||
|
||||
&WasmGenerator::op<kExprI64Add, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Sub, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Mul, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Add, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Sub, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Mul, ValueType::kI64, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::op<kExprI64DivS, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64DivU, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64RemS, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64RemU, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64DivS, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64DivU, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64RemS, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64RemU, ValueType::kI64, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::op<kExprI64And, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Ior, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Xor, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Shl, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64ShrU, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64ShrS, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Ror, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Rol, kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64And, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Ior, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Xor, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Shl, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64ShrU, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64ShrS, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Ror, ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Rol, ValueType::kI64, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::op<kExprI64Clz, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Ctz, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Popcnt, kWasmI64>,
|
||||
&WasmGenerator::op<kExprI64Clz, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Ctz, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprI64Popcnt, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::block<kWasmI64>,
|
||||
&WasmGenerator::loop<kWasmI64>,
|
||||
&WasmGenerator::if_<kWasmI64, kIfElse>,
|
||||
&WasmGenerator::br_if<kWasmI64>,
|
||||
&WasmGenerator::block<ValueType::kI64>,
|
||||
&WasmGenerator::loop<ValueType::kI64>,
|
||||
&WasmGenerator::if_<ValueType::kI64, kIfElse>,
|
||||
&WasmGenerator::br_if<ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::memop<kExprI64LoadMem>,
|
||||
&WasmGenerator::memop<kExprI64LoadMem8S>,
|
||||
@ -812,52 +839,80 @@ void WasmGenerator::Generate<kWasmI64>(DataRange* data) {
|
||||
&WasmGenerator::memop<kExprI64AtomicLoad16U>,
|
||||
&WasmGenerator::memop<kExprI64AtomicLoad32U>,
|
||||
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange, kWasmI32,
|
||||
kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd8U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub8U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd8U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr8U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor8U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange8U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange8U, kWasmI32,
|
||||
kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd16U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub16U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd16U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr16U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor16U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange16U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange16U, kWasmI32,
|
||||
kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd32U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub32U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd32U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr32U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor32U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange32U, kWasmI32, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange32U, kWasmI32,
|
||||
kWasmI64, kWasmI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange, ValueType::kI32,
|
||||
ValueType::kI64, ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd8U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub8U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd8U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr8U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor8U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange8U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange8U,
|
||||
ValueType::kI32, ValueType::kI64,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd16U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub16U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd16U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr16U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor16U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange16U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange16U,
|
||||
ValueType::kI32, ValueType::kI64,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAdd32U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicSub32U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicAnd32U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicOr32U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicXor32U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicExchange32U, ValueType::kI32,
|
||||
ValueType::kI64>,
|
||||
&WasmGenerator::atomic_op<kExprI64AtomicCompareExchange32U,
|
||||
ValueType::kI32, ValueType::kI64,
|
||||
ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::get_local<kWasmI64>,
|
||||
&WasmGenerator::tee_local<kWasmI64>,
|
||||
&WasmGenerator::get_global<kWasmI64>,
|
||||
&WasmGenerator::op<kExprSelect, kWasmI64, kWasmI64, kWasmI32>,
|
||||
&WasmGenerator::select_with_type<kWasmI64>,
|
||||
&WasmGenerator::get_local<ValueType::kI64>,
|
||||
&WasmGenerator::tee_local<ValueType::kI64>,
|
||||
&WasmGenerator::get_global<ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprSelect, ValueType::kI64, ValueType::kI64,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::select_with_type<ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::call<kWasmI64>};
|
||||
&WasmGenerator::call<ValueType::kI64>};
|
||||
|
||||
GenerateOneOf(alternatives, data);
|
||||
}
|
||||
|
||||
template <>
|
||||
void WasmGenerator::Generate<kWasmF32>(DataRange* data) {
|
||||
void WasmGenerator::Generate<ValueType::kF32>(DataRange* data) {
|
||||
GeneratorRecursionScope rec_scope(this);
|
||||
if (recursion_limit_reached() || data->size() <= sizeof(float)) {
|
||||
builder_->EmitF32Const(data->get<float>());
|
||||
@ -865,52 +920,54 @@ void WasmGenerator::Generate<kWasmF32>(DataRange* data) {
|
||||
}
|
||||
|
||||
constexpr GenerateFn alternatives[] = {
|
||||
&WasmGenerator::sequence<kWasmF32, kWasmStmt>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmF32>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmF32, kWasmStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kF32, ValueType::kStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kF32>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kF32,
|
||||
ValueType::kStmt>,
|
||||
|
||||
&WasmGenerator::op<kExprF32Abs, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Neg, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Ceil, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Floor, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Trunc, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32NearestInt, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Sqrt, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Add, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Sub, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Mul, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Div, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Min, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Max, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32CopySign, kWasmF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF32Abs, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Neg, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Ceil, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Floor, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Trunc, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32NearestInt, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Sqrt, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Add, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Sub, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Mul, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Div, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Min, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32Max, ValueType::kF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF32CopySign, ValueType::kF32, ValueType::kF32>,
|
||||
|
||||
&WasmGenerator::op<kExprF32SConvertI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprF32UConvertI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprF32SConvertI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprF32UConvertI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprF32ConvertF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF32ReinterpretI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprF32SConvertI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprF32UConvertI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprF32SConvertI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprF32UConvertI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprF32ConvertF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF32ReinterpretI32, ValueType::kI32>,
|
||||
|
||||
&WasmGenerator::block<kWasmF32>,
|
||||
&WasmGenerator::loop<kWasmF32>,
|
||||
&WasmGenerator::if_<kWasmF32, kIfElse>,
|
||||
&WasmGenerator::br_if<kWasmF32>,
|
||||
&WasmGenerator::block<ValueType::kF32>,
|
||||
&WasmGenerator::loop<ValueType::kF32>,
|
||||
&WasmGenerator::if_<ValueType::kF32, kIfElse>,
|
||||
&WasmGenerator::br_if<ValueType::kF32>,
|
||||
|
||||
&WasmGenerator::memop<kExprF32LoadMem>,
|
||||
|
||||
&WasmGenerator::get_local<kWasmF32>,
|
||||
&WasmGenerator::tee_local<kWasmF32>,
|
||||
&WasmGenerator::get_global<kWasmF32>,
|
||||
&WasmGenerator::op<kExprSelect, kWasmF32, kWasmF32, kWasmI32>,
|
||||
&WasmGenerator::select_with_type<kWasmF32>,
|
||||
&WasmGenerator::get_local<ValueType::kF32>,
|
||||
&WasmGenerator::tee_local<ValueType::kF32>,
|
||||
&WasmGenerator::get_global<ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprSelect, ValueType::kF32, ValueType::kF32,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::select_with_type<ValueType::kF32>,
|
||||
|
||||
&WasmGenerator::call<kWasmF32>};
|
||||
&WasmGenerator::call<ValueType::kF32>};
|
||||
|
||||
GenerateOneOf(alternatives, data);
|
||||
}
|
||||
|
||||
template <>
|
||||
void WasmGenerator::Generate<kWasmF64>(DataRange* data) {
|
||||
void WasmGenerator::Generate<ValueType::kF64>(DataRange* data) {
|
||||
GeneratorRecursionScope rec_scope(this);
|
||||
if (recursion_limit_reached() || data->size() <= sizeof(double)) {
|
||||
builder_->EmitF64Const(data->get<double>());
|
||||
@ -918,52 +975,54 @@ void WasmGenerator::Generate<kWasmF64>(DataRange* data) {
|
||||
}
|
||||
|
||||
constexpr GenerateFn alternatives[] = {
|
||||
&WasmGenerator::sequence<kWasmF64, kWasmStmt>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmF64>,
|
||||
&WasmGenerator::sequence<kWasmStmt, kWasmF64, kWasmStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kF64, ValueType::kStmt>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kF64>,
|
||||
&WasmGenerator::sequence<ValueType::kStmt, ValueType::kF64,
|
||||
ValueType::kStmt>,
|
||||
|
||||
&WasmGenerator::op<kExprF64Abs, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Neg, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Ceil, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Floor, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Trunc, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64NearestInt, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Sqrt, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Add, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Sub, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Mul, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Div, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Min, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Max, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64CopySign, kWasmF64, kWasmF64>,
|
||||
&WasmGenerator::op<kExprF64Abs, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Neg, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Ceil, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Floor, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Trunc, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64NearestInt, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Sqrt, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Add, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Sub, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Mul, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Div, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Min, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64Max, ValueType::kF64, ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprF64CopySign, ValueType::kF64, ValueType::kF64>,
|
||||
|
||||
&WasmGenerator::op<kExprF64SConvertI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprF64UConvertI32, kWasmI32>,
|
||||
&WasmGenerator::op<kExprF64SConvertI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprF64UConvertI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprF64ConvertF32, kWasmF32>,
|
||||
&WasmGenerator::op<kExprF64ReinterpretI64, kWasmI64>,
|
||||
&WasmGenerator::op<kExprF64SConvertI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprF64UConvertI32, ValueType::kI32>,
|
||||
&WasmGenerator::op<kExprF64SConvertI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprF64UConvertI64, ValueType::kI64>,
|
||||
&WasmGenerator::op<kExprF64ConvertF32, ValueType::kF32>,
|
||||
&WasmGenerator::op<kExprF64ReinterpretI64, ValueType::kI64>,
|
||||
|
||||
&WasmGenerator::block<kWasmF64>,
|
||||
&WasmGenerator::loop<kWasmF64>,
|
||||
&WasmGenerator::if_<kWasmF64, kIfElse>,
|
||||
&WasmGenerator::br_if<kWasmF64>,
|
||||
&WasmGenerator::block<ValueType::kF64>,
|
||||
&WasmGenerator::loop<ValueType::kF64>,
|
||||
&WasmGenerator::if_<ValueType::kF64, kIfElse>,
|
||||
&WasmGenerator::br_if<ValueType::kF64>,
|
||||
|
||||
&WasmGenerator::memop<kExprF64LoadMem>,
|
||||
|
||||
&WasmGenerator::get_local<kWasmF64>,
|
||||
&WasmGenerator::tee_local<kWasmF64>,
|
||||
&WasmGenerator::get_global<kWasmF64>,
|
||||
&WasmGenerator::op<kExprSelect, kWasmF64, kWasmF64, kWasmI32>,
|
||||
&WasmGenerator::select_with_type<kWasmF64>,
|
||||
&WasmGenerator::get_local<ValueType::kF64>,
|
||||
&WasmGenerator::tee_local<ValueType::kF64>,
|
||||
&WasmGenerator::get_global<ValueType::kF64>,
|
||||
&WasmGenerator::op<kExprSelect, ValueType::kF64, ValueType::kF64,
|
||||
ValueType::kI32>,
|
||||
&WasmGenerator::select_with_type<ValueType::kF64>,
|
||||
|
||||
&WasmGenerator::call<kWasmF64>};
|
||||
&WasmGenerator::call<ValueType::kF64>};
|
||||
|
||||
GenerateOneOf(alternatives, data);
|
||||
}
|
||||
|
||||
template <>
|
||||
void WasmGenerator::Generate<kWasmS128>(DataRange* data) {
|
||||
void WasmGenerator::Generate<ValueType::kS128>(DataRange* data) {
|
||||
GeneratorRecursionScope rec_scope(this);
|
||||
if (recursion_limit_reached() || data->size() <= sizeof(int32_t)) {
|
||||
// TODO(v8:8460): v128.const is not implemented yet, and we need a way to
|
||||
@ -974,45 +1033,51 @@ void WasmGenerator::Generate<kWasmS128>(DataRange* data) {
|
||||
}
|
||||
|
||||
constexpr GenerateFn alternatives[] = {
|
||||
&WasmGenerator::simd_op<kExprI8x16Splat, kWasmI32>,
|
||||
&WasmGenerator::simd_op<kExprI16x8Splat, kWasmI32>,
|
||||
&WasmGenerator::simd_op<kExprI32x4Splat, kWasmI32>,
|
||||
&WasmGenerator::simd_op<kExprI64x2Splat, kWasmI64>,
|
||||
&WasmGenerator::simd_op<kExprF32x4Splat, kWasmF32>,
|
||||
&WasmGenerator::simd_op<kExprF64x2Splat, kWasmF64>,
|
||||
&WasmGenerator::simd_op<kExprI8x16Splat, ValueType::kI32>,
|
||||
&WasmGenerator::simd_op<kExprI16x8Splat, ValueType::kI32>,
|
||||
&WasmGenerator::simd_op<kExprI32x4Splat, ValueType::kI32>,
|
||||
&WasmGenerator::simd_op<kExprI64x2Splat, ValueType::kI64>,
|
||||
&WasmGenerator::simd_op<kExprF32x4Splat, ValueType::kF32>,
|
||||
&WasmGenerator::simd_op<kExprF64x2Splat, ValueType::kF64>,
|
||||
|
||||
&WasmGenerator::simd_op<kExprI8x16Add, kWasmS128, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprI16x8Add, kWasmS128, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprI32x4Add, kWasmS128, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprI64x2Add, kWasmS128, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprF32x4Add, kWasmS128, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprF64x2Add, kWasmS128, kWasmS128>,
|
||||
&WasmGenerator::simd_op<kExprI8x16Add, ValueType::kS128,
|
||||
ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprI16x8Add, ValueType::kS128,
|
||||
ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprI32x4Add, ValueType::kS128,
|
||||
ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprI64x2Add, ValueType::kS128,
|
||||
ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprF32x4Add, ValueType::kS128,
|
||||
ValueType::kS128>,
|
||||
&WasmGenerator::simd_op<kExprF64x2Add, ValueType::kS128,
|
||||
ValueType::kS128>,
|
||||
|
||||
&WasmGenerator::memop<kExprS128LoadMem>,
|
||||
&WasmGenerator::memop<kExprS128StoreMem, kWasmS128>};
|
||||
&WasmGenerator::memop<kExprS128StoreMem, ValueType::kS128>};
|
||||
|
||||
GenerateOneOf(alternatives, data);
|
||||
}
|
||||
|
||||
void WasmGenerator::grow_memory(DataRange* data) {
|
||||
Generate<kWasmI32>(data);
|
||||
Generate<ValueType::kI32>(data);
|
||||
builder_->EmitWithU8(kExprMemoryGrow, 0);
|
||||
}
|
||||
|
||||
void WasmGenerator::Generate(ValueType type, DataRange* data) {
|
||||
switch (type) {
|
||||
case kWasmStmt:
|
||||
return Generate<kWasmStmt>(data);
|
||||
case kWasmI32:
|
||||
return Generate<kWasmI32>(data);
|
||||
case kWasmI64:
|
||||
return Generate<kWasmI64>(data);
|
||||
case kWasmF32:
|
||||
return Generate<kWasmF32>(data);
|
||||
case kWasmF64:
|
||||
return Generate<kWasmF64>(data);
|
||||
case kWasmS128:
|
||||
return Generate<kWasmS128>(data);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kStmt:
|
||||
return Generate<ValueType::kStmt>(data);
|
||||
case ValueType::kI32:
|
||||
return Generate<ValueType::kI32>(data);
|
||||
case ValueType::kI64:
|
||||
return Generate<ValueType::kI64>(data);
|
||||
case ValueType::kF32:
|
||||
return Generate<ValueType::kF32>(data);
|
||||
case ValueType::kF64:
|
||||
return Generate<ValueType::kF64>(data);
|
||||
case ValueType::kS128:
|
||||
return Generate<ValueType::kS128>(data);
|
||||
default:
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
@ -80,20 +80,20 @@ PrintSig PrintReturns(const FunctionSig* sig) {
|
||||
return {sig->return_count(), [=](size_t i) { return sig->GetReturn(i); }};
|
||||
}
|
||||
const char* ValueTypeToConstantName(ValueType type) {
|
||||
switch (type) {
|
||||
case kWasmI32:
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
return "kWasmI32";
|
||||
case kWasmI64:
|
||||
case ValueType::kI64:
|
||||
return "kWasmI64";
|
||||
case kWasmF32:
|
||||
case ValueType::kF32:
|
||||
return "kWasmF32";
|
||||
case kWasmF64:
|
||||
case ValueType::kF64:
|
||||
return "kWasmF64";
|
||||
case kWasmAnyRef:
|
||||
case ValueType::kAnyRef:
|
||||
return "kWasmAnyRef";
|
||||
case kWasmFuncRef:
|
||||
case ValueType::kFuncRef:
|
||||
return "kWasmFuncRef";
|
||||
case kWasmExnRef:
|
||||
case ValueType::kExnRef:
|
||||
return "kWasmExnRef";
|
||||
default:
|
||||
UNREACHABLE();
|
||||
@ -224,8 +224,7 @@ void GenerateTestCase(Isolate* isolate, ModuleWireBytes wire_bytes,
|
||||
ValueType type = decls.type_list[pos];
|
||||
while (pos + count < locals && decls.type_list[pos + count] == type)
|
||||
++count;
|
||||
os << ".addLocals({" << ValueTypes::TypeName(type)
|
||||
<< "_count: " << count << "})";
|
||||
os << ".addLocals({" << type.type_name() << "_count: " << count << "})";
|
||||
}
|
||||
os << "\n";
|
||||
}
|
||||
|
@ -1485,7 +1485,7 @@ TEST_F(FunctionBodyDecoderTest, AllLoadMemCombinations) {
|
||||
MachineType mem_type = machineTypes[j];
|
||||
byte code[] = {WASM_LOAD_MEM(mem_type, WASM_ZERO)};
|
||||
FunctionSig sig(1, 0, &local_type);
|
||||
Validate(local_type == ValueTypes::ValueTypeFor(mem_type), &sig, code);
|
||||
Validate(local_type == ValueType::For(mem_type), &sig, code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1500,7 +1500,7 @@ TEST_F(FunctionBodyDecoderTest, AllStoreMemCombinations) {
|
||||
MachineType mem_type = machineTypes[j];
|
||||
byte code[] = {WASM_STORE_MEM(mem_type, WASM_ZERO, WASM_GET_LOCAL(0))};
|
||||
FunctionSig sig(0, 1, &local_type);
|
||||
Validate(local_type == ValueTypes::ValueTypeFor(mem_type), &sig, code);
|
||||
Validate(local_type == ValueType::For(mem_type), &sig, code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1749,8 +1749,8 @@ TEST_F(FunctionBodyDecoderTest, MultiReturnType) {
|
||||
|
||||
ExpectValidates(&sig_cd_v, {WASM_CALL_FUNCTION0(0)});
|
||||
|
||||
if (ValueTypes::IsSubType(kValueTypes[c], kValueTypes[a]) &&
|
||||
ValueTypes::IsSubType(kValueTypes[d], kValueTypes[b])) {
|
||||
if (kValueTypes[c].IsSubTypeOf(kValueTypes[a]) &&
|
||||
kValueTypes[d].IsSubTypeOf(kValueTypes[b])) {
|
||||
ExpectValidates(&sig_ab_v, {WASM_CALL_FUNCTION0(0)});
|
||||
} else {
|
||||
ExpectFailure(&sig_ab_v, {WASM_CALL_FUNCTION0(0)});
|
||||
@ -1996,8 +1996,7 @@ TEST_F(FunctionBodyDecoderTest, AllGetGlobalCombinations) {
|
||||
TestModuleBuilder builder;
|
||||
module = builder.module();
|
||||
builder.AddGlobal(global_type);
|
||||
Validate(ValueTypes::IsSubType(global_type, local_type), &sig,
|
||||
{WASM_GET_GLOBAL(0)});
|
||||
Validate(global_type.IsSubTypeOf(local_type), &sig, {WASM_GET_GLOBAL(0)});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2011,7 +2010,7 @@ TEST_F(FunctionBodyDecoderTest, AllSetGlobalCombinations) {
|
||||
TestModuleBuilder builder;
|
||||
module = builder.module();
|
||||
builder.AddGlobal(global_type);
|
||||
Validate(ValueTypes::IsSubType(local_type, global_type), &sig,
|
||||
Validate(local_type.IsSubTypeOf(global_type), &sig,
|
||||
{WASM_SET_GLOBAL(0, WASM_GET_LOCAL(0))});
|
||||
}
|
||||
}
|
||||
@ -2407,8 +2406,7 @@ TEST_F(FunctionBodyDecoderTest, Break_TypeCheckAll1) {
|
||||
sig.GetReturn(), WASM_IF(WASM_ZERO, WASM_BRV(0, WASM_GET_LOCAL(0))),
|
||||
WASM_GET_LOCAL(1))};
|
||||
|
||||
Validate(ValueTypes::IsSubType(kValueTypes[j], kValueTypes[i]), &sig,
|
||||
code);
|
||||
Validate(kValueTypes[j].IsSubTypeOf(kValueTypes[i]), &sig, code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2422,8 +2420,7 @@ TEST_F(FunctionBodyDecoderTest, Break_TypeCheckAll2) {
|
||||
WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0)),
|
||||
WASM_GET_LOCAL(1))};
|
||||
|
||||
Validate(ValueTypes::IsSubType(kValueTypes[j], kValueTypes[i]), &sig,
|
||||
code);
|
||||
Validate(kValueTypes[j].IsSubTypeOf(kValueTypes[i]), &sig, code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2437,8 +2434,7 @@ TEST_F(FunctionBodyDecoderTest, Break_TypeCheckAll3) {
|
||||
WASM_GET_LOCAL(1),
|
||||
WASM_BRV_IF_ZERO(0, WASM_GET_LOCAL(0)))};
|
||||
|
||||
Validate(ValueTypes::IsSubType(kValueTypes[j], kValueTypes[i]), &sig,
|
||||
code);
|
||||
Validate(kValueTypes[j].IsSubTypeOf(kValueTypes[i]), &sig, code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2482,8 +2478,7 @@ TEST_F(FunctionBodyDecoderTest, BreakIf_val_type) {
|
||||
types[1], WASM_BRV_IF(0, WASM_GET_LOCAL(1), WASM_GET_LOCAL(2)),
|
||||
WASM_DROP, WASM_GET_LOCAL(0))};
|
||||
|
||||
Validate(ValueTypes::IsSubType(kValueTypes[j], kValueTypes[i]), &sig,
|
||||
code);
|
||||
Validate(kValueTypes[j].IsSubTypeOf(kValueTypes[i]), &sig, code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3811,8 +3806,7 @@ TEST_F(LocalDeclDecoderTest, OneLocal) {
|
||||
WASM_FEATURE_SCOPE(anyref);
|
||||
for (size_t i = 0; i < arraysize(kValueTypes); i++) {
|
||||
ValueType type = kValueTypes[i];
|
||||
const byte data[] = {1, 1,
|
||||
static_cast<byte>(ValueTypes::ValueTypeCodeFor(type))};
|
||||
const byte data[] = {1, 1, static_cast<byte>(type.value_type_code())};
|
||||
BodyLocalDecls decls(zone());
|
||||
bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
|
||||
EXPECT_TRUE(result);
|
||||
@ -3827,8 +3821,7 @@ TEST_F(LocalDeclDecoderTest, FiveLocals) {
|
||||
WASM_FEATURE_SCOPE(anyref);
|
||||
for (size_t i = 0; i < arraysize(kValueTypes); i++) {
|
||||
ValueType type = kValueTypes[i];
|
||||
const byte data[] = {1, 5,
|
||||
static_cast<byte>(ValueTypes::ValueTypeCodeFor(type))};
|
||||
const byte data[] = {1, 5, static_cast<byte>(type.value_type_code())};
|
||||
BodyLocalDecls decls(zone());
|
||||
bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
|
||||
EXPECT_TRUE(result);
|
||||
@ -3893,8 +3886,7 @@ TEST_F(LocalDeclDecoderTest, UseEncoder) {
|
||||
TEST_F(LocalDeclDecoderTest, ExnRef) {
|
||||
WASM_FEATURE_SCOPE(eh);
|
||||
ValueType type = kWasmExnRef;
|
||||
const byte data[] = {1, 1,
|
||||
static_cast<byte>(ValueTypes::ValueTypeCodeFor(type))};
|
||||
const byte data[] = {1, 1, static_cast<byte>(type.value_type_code())};
|
||||
BodyLocalDecls decls(zone());
|
||||
bool result = DecodeLocalDecls(&decls, data, data + sizeof(data));
|
||||
EXPECT_TRUE(result);
|
||||
|
@ -21,9 +21,9 @@ TEST_F(WasmCallDescriptorTest, TestAnyRefIsGrouped) {
|
||||
ValueType params[kMaxCount];
|
||||
|
||||
for (size_t i = 0; i < kMaxCount; i += 2) {
|
||||
params[i] = ValueType::kWasmAnyRef;
|
||||
params[i] = kWasmAnyRef;
|
||||
CHECK_LT(i + 1, kMaxCount);
|
||||
params[i + 1] = ValueType::kWasmI32;
|
||||
params[i + 1] = kWasmI32;
|
||||
}
|
||||
|
||||
for (size_t count = 1; count <= kMaxCount; ++count) {
|
||||
|
Loading…
Reference in New Issue
Block a user