[wasm][gc] Implement struct.set and switch struct.get to optref

Work towards adding heap-allocated object support for wasm, according to
the gc proposal.

Changes:
- Implement subtyping for reference types (ref s) and (optref s),
  where 's' is a struct type.
  This CL does *not* implement subtyping between struct and function
  types. Also, it does not handle i31refs and eqrefs.
- Implement struct.set.
- Change struct.get to accept an optref as argument, as required by the
  standard.
- Allow locals to store objects of ref and optref types.
- Add a test for struct.set and optref locals. Modify the test for
  struct.get accordingly.

Reference: https://github.com/WebAssembly/gc

R=jkummerow@chromium.org
R=clemensb@chromium.org

Bug: v8:7748
Change-Id: I708626fa5f90a6e24e667d66eed1c7697f458a23
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2172089
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: Clemens Backes <clemensb@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67562}
This commit is contained in:
Manos Koukoutos 2020-05-05 10:43:58 +00:00 committed by Commit Bot
parent 3ee4ead58c
commit 1ba5766f5e
13 changed files with 166 additions and 40 deletions

View File

@ -873,6 +873,7 @@ namespace internal {
TFS(ThrowWasmTrapTableOutOfBounds) \ TFS(ThrowWasmTrapTableOutOfBounds) \
TFS(ThrowWasmTrapBrOnExnNullRef) \ TFS(ThrowWasmTrapBrOnExnNullRef) \
TFS(ThrowWasmTrapRethrowNullRef) \ TFS(ThrowWasmTrapRethrowNullRef) \
TFS(ThrowWasmTrapNullDereference) \
\ \
/* WeakMap */ \ /* WeakMap */ \
TFJ(WeakMapConstructor, kDontAdaptArgumentsSentinel) \ TFJ(WeakMapConstructor, kDontAdaptArgumentsSentinel) \

View File

@ -1603,7 +1603,8 @@ enum class LoadSensitivity {
V(TrapElemSegmentDropped) \ V(TrapElemSegmentDropped) \
V(TrapTableOutOfBounds) \ V(TrapTableOutOfBounds) \
V(TrapBrOnExnNullRef) \ V(TrapBrOnExnNullRef) \
V(TrapRethrowNullRef) V(TrapRethrowNullRef) \
V(TrapNullDereference)
enum KeyedAccessLoadMode { enum KeyedAccessLoadMode {
STANDARD_LOAD, STANDARD_LOAD,

View File

@ -553,6 +553,7 @@ namespace internal {
T(WasmTrapTableOutOfBounds, "table access out of bounds") \ T(WasmTrapTableOutOfBounds, "table access out of bounds") \
T(WasmTrapBrOnExnNullRef, "br_on_exn on nullref value") \ T(WasmTrapBrOnExnNullRef, "br_on_exn on nullref value") \
T(WasmTrapRethrowNullRef, "rethrowing nullref value") \ T(WasmTrapRethrowNullRef, "rethrowing nullref value") \
T(WasmTrapNullDereference, "dereferencing a null pointer") \
T(WasmExceptionError, "wasm exception") \ T(WasmExceptionError, "wasm exception") \
/* Asm.js validation related */ \ /* Asm.js validation related */ \
T(AsmJsInvalid, "Invalid asm.js: %") \ T(AsmJsInvalid, "Invalid asm.js: %") \

View File

@ -50,6 +50,7 @@
#include "src/wasm/memory-tracing.h" #include "src/wasm/memory-tracing.h"
#include "src/wasm/object-access.h" #include "src/wasm/object-access.h"
#include "src/wasm/wasm-code-manager.h" #include "src/wasm/wasm-code-manager.h"
#include "src/wasm/wasm-constants.h"
#include "src/wasm/wasm-limits.h" #include "src/wasm/wasm-limits.h"
#include "src/wasm/wasm-linkage.h" #include "src/wasm/wasm-linkage.h"
#include "src/wasm/wasm-module.h" #include "src/wasm/wasm-module.h"
@ -5030,6 +5031,21 @@ Node* FieldOffset(MachineGraph* graph, const wasm::StructType* type,
return graph->IntPtrConstant(offset); return graph->IntPtrConstant(offset);
} }
// Set a field of a struct, without checking if the struct is null.
// Helper method for StructNew and StructSet.
Node* StoreStructFieldUnchecked(MachineGraph* graph, WasmGraphAssembler* gasm,
Node* struct_object,
const wasm::StructType* type,
uint32_t field_index, Node* value) {
WriteBarrierKind write_barrier = type->field(field_index).IsReferenceType()
? kPointerWriteBarrier
: kNoWriteBarrier;
StoreRepresentation rep(type->field(field_index).machine_representation(),
write_barrier);
Node* offset = FieldOffset(graph, type, field_index);
return gasm->Store(rep, struct_object, offset, value);
}
Node* WasmGraphBuilder::StructNew(uint32_t struct_index, Node* WasmGraphBuilder::StructNew(uint32_t struct_index,
const wasm::StructType* type, const wasm::StructType* type,
Vector<Node*> fields) { Vector<Node*> fields) {
@ -5040,25 +5056,32 @@ Node* WasmGraphBuilder::StructNew(uint32_t struct_index,
Node* s = BuildCallToRuntime(Runtime::kWasmStructNew, runtime_args, Node* s = BuildCallToRuntime(Runtime::kWasmStructNew, runtime_args,
arraysize(runtime_args)); arraysize(runtime_args));
for (uint32_t i = 0; i < type->field_count(); i++) { for (uint32_t i = 0; i < type->field_count(); i++) {
wasm::ValueType field_type = type->field(i); StoreStructFieldUnchecked(mcgraph(), gasm_.get(), s, type, i, fields[i]);
WriteBarrierKind write_barrier = type->field(i).IsReferenceType()
? kPointerWriteBarrier
: kNoWriteBarrier;
StoreRepresentation rep(field_type.machine_representation(), write_barrier);
Node* offset = FieldOffset(mcgraph(), type, i);
gasm_->Store(rep, s, offset, fields[i]);
} }
return s; return s;
} }
Node* WasmGraphBuilder::StructGet(Node* struct_object, Node* WasmGraphBuilder::StructGet(Node* struct_object,
const wasm::StructType* type, const wasm::StructType* type,
uint32_t field_index) { uint32_t field_index,
wasm::WasmCodePosition position) {
MachineType machine_type = FieldType(type, field_index); MachineType machine_type = FieldType(type, field_index);
Node* offset = FieldOffset(mcgraph(), type, field_index); Node* offset = FieldOffset(mcgraph(), type, field_index);
TrapIfTrue(wasm::kTrapNullDereference,
gasm_->WordEqual(struct_object, RefNull()), position);
return gasm_->Load(machine_type, struct_object, offset); return gasm_->Load(machine_type, struct_object, offset);
} }
Node* WasmGraphBuilder::StructSet(Node* struct_object,
const wasm::StructType* type,
uint32_t field_index, Node* field_value,
wasm::WasmCodePosition position) {
TrapIfTrue(wasm::kTrapNullDereference,
gasm_->WordEqual(struct_object, RefNull()), position);
return StoreStructFieldUnchecked(mcgraph(), gasm_.get(), struct_object, type,
field_index, field_value);
}
class WasmDecorator final : public GraphDecorator { class WasmDecorator final : public GraphDecorator {
public: public:
explicit WasmDecorator(NodeOriginTable* origins, wasm::Decoder* decoder) explicit WasmDecorator(NodeOriginTable* origins, wasm::Decoder* decoder)

View File

@ -370,7 +370,10 @@ class WasmGraphBuilder {
Node* StructNew(uint32_t struct_index, const wasm::StructType* type, Node* StructNew(uint32_t struct_index, const wasm::StructType* type,
Vector<Node*> fields); Vector<Node*> fields);
Node* StructGet(Node* struct_object, const wasm::StructType* type, Node* StructGet(Node* struct_object, const wasm::StructType* type,
uint32_t field_index); uint32_t field_index, wasm::WasmCodePosition position);
Node* StructSet(Node* struct_object, const wasm::StructType* type,
uint32_t field_index, Node* value,
wasm::WasmCodePosition position);
bool has_simd() const { return has_simd_; } bool has_simd() const { return has_simd_; }

View File

@ -3398,6 +3398,12 @@ class LiftoffCompiler {
// TODO(7748): Implement. // TODO(7748): Implement.
unsupported(decoder, kGC, "struct.get"); unsupported(decoder, kGC, "struct.get");
} }
void StructSet(FullDecoder* decoder, const Value& struct_obj,
const FieldIndexImmediate<validate>& field,
const Value& field_value) {
// TODO(7748): Implement.
unsupported(decoder, kGC, "struct.set");
}
private: private:
// Emit additional source positions for return addresses. Used by debugging to // Emit additional source positions for return addresses. Used by debugging to

View File

@ -210,6 +210,7 @@ struct GlobalIndexImmediate {
namespace function_body_decoder { namespace function_body_decoder {
// Decode a byte representing a local type. Return {false} if the encoded // Decode a byte representing a local type. Return {false} if the encoded
// byte was invalid or the start of a type index. // byte was invalid or the start of a type index.
// TODO(7748): Refactor this to handle (opt)ref types
inline bool decode_local_type(uint8_t val, ValueType* result) { inline bool decode_local_type(uint8_t val, ValueType* result) {
switch (static_cast<ValueTypeCode>(val)) { switch (static_cast<ValueTypeCode>(val)) {
case kLocalVoid: case kLocalVoid:
@ -810,7 +811,9 @@ enum class LoadTransformationKind : uint8_t {
F(StructNew, const StructIndexImmediate<validate>& imm, const Value args[], \ F(StructNew, const StructIndexImmediate<validate>& imm, const Value args[], \
Value* result) \ Value* result) \
F(StructGet, const Value& struct_object, \ F(StructGet, const Value& struct_object, \
const FieldIndexImmediate<validate>& field, Value* result) const FieldIndexImmediate<validate>& field, Value* result) \
F(StructSet, const Value& struct_object, \
const FieldIndexImmediate<validate>& field, const Value& field_value)
// Generic Wasm bytecode decoder with utilities for decoding immediates, // Generic Wasm bytecode decoder with utilities for decoding immediates,
// lengths, etc. // lengths, etc.
@ -915,6 +918,26 @@ class WasmDecoder : public Decoder {
"invalid local type 'exception ref', enable with " "invalid local type 'exception ref', enable with "
"--experimental-wasm-eh"); "--experimental-wasm-eh");
return false; return false;
case kLocalRef:
if (enabled.has_gc()) {
uint32_t type_index = decoder->consume_u32v("type index");
type = ValueType(ValueType::kRef, type_index);
break;
}
decoder->error(decoder->pc() - 1,
"invalid local type 'ref', enable with "
"--experimental-wasm-gc");
return false;
case kLocalOptRef:
if (enabled.has_gc()) {
uint32_t type_index = decoder->consume_u32v("type index");
type = ValueType(ValueType::kOptRef, type_index);
break;
}
decoder->error(decoder->pc() - 1,
"invalid local type 'optref', enable with "
"--experimental-wasm-gc");
return false;
case kLocalS128: case kLocalS128:
if (enabled.has_simd()) { if (enabled.has_simd()) {
type = kWasmS128; type = kWasmS128;
@ -2903,16 +2926,25 @@ class WasmFullDecoder : public WasmDecoder<validate> {
FieldIndexImmediate<validate> field(this, this->pc_ + len); FieldIndexImmediate<validate> field(this, this->pc_ + len);
if (!this->Validate(this->pc_ + len, field)) break; if (!this->Validate(this->pc_ + len, field)) break;
len += field.length; len += field.length;
// TODO(7748): This should take an optref, and perform a null-check.
auto struct_obj = auto struct_obj =
Pop(0, ValueType(ValueType::kRef, field.struct_index.index)); Pop(0, ValueType(ValueType::kOptRef, field.struct_index.index));
auto* value = Push(field.struct_index.struct_type->field(field.index)); auto* value = Push(field.struct_index.struct_type->field(field.index));
// TODO(7748): Optimize this when struct type is null/ref
CALL_INTERFACE_IF_REACHABLE(StructGet, struct_obj, field, value); CALL_INTERFACE_IF_REACHABLE(StructGet, struct_obj, field, value);
break; break;
} }
case kExprStructSet: case kExprStructSet: {
UNIMPLEMENTED(); // TODO(7748): Implement. FieldIndexImmediate<validate> field(this, this->pc_ + len);
if (!this->Validate(this->pc_ + len, field)) break;
len += field.length;
auto field_value = Pop(
0, ValueType(field.struct_index.struct_type->field(field.index)));
auto struct_obj =
Pop(0, ValueType(ValueType::kOptRef, field.struct_index.index));
// TODO(7748): Optimize this when struct type is null/ref
CALL_INTERFACE_IF_REACHABLE(StructSet, struct_obj, field, field_value);
break; break;
}
case kExprArrayNew: case kExprArrayNew:
UNIMPLEMENTED(); // TODO(7748): Implement. UNIMPLEMENTED(); // TODO(7748): Implement.
break; break;

View File

@ -614,8 +614,16 @@ class WasmGraphBuildingInterface {
void StructGet(FullDecoder* decoder, const Value& struct_object, void StructGet(FullDecoder* decoder, const Value& struct_object,
const FieldIndexImmediate<validate>& field, Value* result) { const FieldIndexImmediate<validate>& field, Value* result) {
result->node = BUILD(StructGet, struct_object.node, result->node =
field.struct_index.struct_type, field.index); BUILD(StructGet, struct_object.node, field.struct_index.struct_type,
field.index, decoder->position());
}
void StructSet(FullDecoder* decoder, const Value& struct_object,
const FieldIndexImmediate<validate>& field,
const Value& field_value) {
BUILD(StructSet, struct_object.node, field.struct_index.struct_type,
field.index, field_value.node, decoder->position());
} }
private: private:
@ -729,6 +737,8 @@ class WasmGraphBuildingInterface {
case ValueType::kFuncRef: case ValueType::kFuncRef:
case ValueType::kNullRef: case ValueType::kNullRef:
case ValueType::kExnRef: case ValueType::kExnRef:
case ValueType::kOptRef:
case ValueType::kEqRef:
return builder_->RefNull(); return builder_->RefNull();
default: default:
UNREACHABLE(); UNREACHABLE();

View File

@ -31,6 +31,9 @@ size_t LocalDeclEncoder::Emit(byte* buffer) const {
LEBHelper::write_u32v(&pos, local_decl.first); LEBHelper::write_u32v(&pos, local_decl.first);
*pos = local_decl.second.value_type_code(); *pos = local_decl.second.value_type_code();
++pos; ++pos;
if (local_decl.second.has_immediate()) {
LEBHelper::write_u32v(&pos, local_decl.second.ref_index());
}
} }
DCHECK_EQ(Size(), pos - buffer); DCHECK_EQ(Size(), pos - buffer);
return static_cast<size_t>(pos - buffer); return static_cast<size_t>(pos - buffer);
@ -48,9 +51,17 @@ uint32_t LocalDeclEncoder::AddLocals(uint32_t count, ValueType type) {
return result; return result;
} }
// Size = (size of locals count) +
// (for each local pair <reps, type>, (size of reps) + (size of type))
size_t LocalDeclEncoder::Size() const { size_t LocalDeclEncoder::Size() const {
size_t size = LEBHelper::sizeof_u32v(local_decls.size()); size_t size = LEBHelper::sizeof_u32v(local_decls.size());
for (auto p : local_decls) size += 1 + LEBHelper::sizeof_u32v(p.first); for (auto p : local_decls) {
size +=
LEBHelper::sizeof_u32v(p.first) + // number of locals
1 + // Opcode
(p.second.has_immediate() ? LEBHelper::sizeof_u32v(p.second.ref_index())
: 0); // immediate
}
return size; return size;
} }

View File

@ -20,21 +20,26 @@ namespace wasm {
// Type for holding simd values, defined in wasm-value.h. // Type for holding simd values, defined in wasm-value.h.
class Simd128; class Simd128;
// Type lattice: For any two types connected by a line, the type at the bottom // Type lattice: Given a fixed struct type S, the following lattice
// is a subtype of the other type. // defines the subtyping relation among types:
// For every two types connected by a line, the top type is a
// (direct) subtype of the bottom type.
// //
// AnyRef // AnyRef
// / \ // / | \
// FuncRef ExnRef // FuncRef ExnRef OptRef(S)
// \ / // \ | / \
// I32 I64 F32 F64 NullRef // I32 I64 F32 F64 NullRef Ref(S)
// \ \ \ \ / // \ \ \ \ | /
// ------------ Bottom // ---------------------- Bottom ---------
// Format: kind, log2Size, code, machineType, shortName, typeName // Format: kind, log2Size, code, machineType, shortName, typeName
// //
// Some of these types are from proposals that are not standardized yet: // Some of these types are from proposals that are not standardized yet:
// - "ref" types per https://github.com/WebAssembly/function-references // - "ref" types per https://github.com/WebAssembly/function-references
// - "optref"/"eqref" per https://github.com/WebAssembly/gc // - "optref"/"eqref" per https://github.com/WebAssembly/gc
//
// TODO(7748): Extend this with eqref, struct and function subtyping.
// Keep up to date with funcref vs. anyref subtyping.
#define FOREACH_VALUE_TYPE(V) \ #define FOREACH_VALUE_TYPE(V) \
V(Stmt, -1, Void, None, 'v', "<stmt>") \ V(Stmt, -1, Void, None, 'v', "<stmt>") \
V(I32, 2, I32, Int32, 'i', "i32") \ V(I32, 2, I32, Int32, 'i', "i32") \
@ -59,20 +64,24 @@ class ValueType {
#undef DEF_ENUM #undef DEF_ENUM
}; };
constexpr bool has_immediate() const {
return kind() == kRef || kind() == kOptRef;
}
constexpr ValueType() : bit_field_(KindField::encode(kStmt)) {} constexpr ValueType() : bit_field_(KindField::encode(kStmt)) {}
explicit constexpr ValueType(Kind kind) explicit constexpr ValueType(Kind kind)
: bit_field_(KindField::encode(kind)) { : bit_field_(KindField::encode(kind)) {
DCHECK(kind != kRef && kind != kOptRef); DCHECK(!has_immediate());
} }
constexpr ValueType(Kind kind, uint32_t ref_index) constexpr ValueType(Kind kind, uint32_t ref_index)
: bit_field_(KindField::encode(kind) | RefIndexField::encode(ref_index)) { : bit_field_(KindField::encode(kind) | RefIndexField::encode(ref_index)) {
DCHECK(kind == kRef || kind == kOptRef); DCHECK(has_immediate());
} }
constexpr Kind kind() const { return KindField::decode(bit_field_); } constexpr Kind kind() const { return KindField::decode(bit_field_); }
constexpr uint32_t ref_index() const { constexpr uint32_t ref_index() const {
#if V8_HAS_CXX14_CONSTEXPR #if V8_HAS_CXX14_CONSTEXPR
DCHECK(kind() == kRef || kind() == kOptRef); DCHECK(has_immediate());
#endif #endif
return RefIndexField::decode(bit_field_); return RefIndexField::decode(bit_field_);
} }
@ -101,13 +110,18 @@ class ValueType {
return bit_field_ != other.bit_field_; return bit_field_ != other.bit_field_;
} }
// TODO(7748): Extend this with eqref, struct and function subtyping.
// Keep up to date with funcref vs. anyref subtyping.
bool IsSubTypeOf(ValueType other) const { bool IsSubTypeOf(ValueType other) const {
return (*this == other) || return (*this == other) ||
(kind() == kNullRef && other.kind() == kAnyRef) || (kind() == kNullRef &&
(kind() == kFuncRef && other.kind() == kAnyRef) || (other.kind() == kAnyRef || other.kind() == kFuncRef ||
(kind() == kExnRef && other.kind() == kAnyRef) || other.kind() == kExnRef || other.kind() == kOptRef)) ||
(kind() == kNullRef && other.kind() == kFuncRef) || (other.kind() == kAnyRef &&
(kind() == kNullRef && other.kind() == kExnRef); (kind() == kFuncRef || kind() == kExnRef || kind() == kOptRef ||
kind() == kRef)) ||
(kind() == kRef && other.kind() == kOptRef &&
ref_index() == other.ref_index());
} }
bool IsReferenceType() const { bool IsReferenceType() const {
@ -116,17 +130,21 @@ class ValueType {
kind() == kEqRef; kind() == kEqRef;
} }
// TODO(7748): Extend this with eqref, struct and function subtyping.
// Keep up to date with funcref vs. anyref subtyping.
static ValueType CommonSubType(ValueType a, ValueType b) { static ValueType CommonSubType(ValueType a, ValueType b) {
if (a.kind() == b.kind()) return a; if (a == b) return a;
// The only sub type of any value type is {bot}. // The only sub type of any value type is {bot}.
if (!a.IsReferenceType() || !b.IsReferenceType()) { if (!a.IsReferenceType() || !b.IsReferenceType()) {
return ValueType(kBottom); return ValueType(kBottom);
} }
if (a.IsSubTypeOf(b)) return a; if (a.IsSubTypeOf(b)) return a;
if (b.IsSubTypeOf(a)) return b; if (b.IsSubTypeOf(a)) return b;
// {a} and {b} are not each other's subtype. The biggest sub-type of all // {a} and {b} are not each other's subtype.
// reference types is {kWasmNullRef}. // If one of them is not nullable, their greatest subtype is bottom,
return ValueType(kNullRef); // otherwise null.
return (a.kind() == kRef || b.kind() == kRef) ? ValueType(kBottom)
: ValueType(kNullRef);
} }
ValueTypeCode value_type_code() const { ValueTypeCode value_type_code() const {

View File

@ -408,7 +408,7 @@ void WasmModuleBuilder::SetHasSharedMemory() { has_shared_memory_ = true; }
namespace { namespace {
void WriteValueType(ZoneBuffer* buffer, const ValueType& type) { void WriteValueType(ZoneBuffer* buffer, const ValueType& type) {
buffer->write_u8(type.value_type_code()); buffer->write_u8(type.value_type_code());
if (type.kind() == ValueType::kRef || type.kind() == ValueType::kOptRef) { if (type.has_immediate()) {
buffer->write_u32v(type.ref_index()); buffer->write_u32v(type.ref_index());
} }
} }

View File

@ -41,6 +41,7 @@ WASM_EXEC_TEST(BasicStruct) {
type_builder.AddField(kWasmI32); type_builder.AddField(kWasmI32);
int32_t type_index = builder->AddStructType(type_builder.Build()); int32_t type_index = builder->AddStructType(type_builder.Build());
ValueType kRefTypes[] = {ValueType(ValueType::kRef, type_index)}; ValueType kRefTypes[] = {ValueType(ValueType::kRef, type_index)};
ValueType kOptRefType = ValueType(ValueType::kOptRef, type_index);
FunctionSig sig_q_v(1, 0, kRefTypes); FunctionSig sig_q_v(1, 0, kRefTypes);
WasmFunctionBuilder* f = builder->AddFunction(sigs.i_v()); WasmFunctionBuilder* f = builder->AddFunction(sigs.i_v());
@ -65,6 +66,19 @@ WASM_EXEC_TEST(BasicStruct) {
kExprEnd}; kExprEnd};
h->EmitCode(h_code, sizeof(h_code)); h->EmitCode(h_code, sizeof(h_code));
WasmFunctionBuilder* j = builder->AddFunction(sigs.i_v());
uint32_t local_index = j->AddLocal(kOptRefType);
uint32_t field_index = 0;
j->builder()->AddExport(CStrVector("j"), j);
byte i_code[] = {
WASM_SET_LOCAL(local_index,
WASM_STRUCT_NEW(type_index, WASM_I32V(42), WASM_I32V(64))),
WASM_STRUCT_SET(type_index, field_index, WASM_GET_LOCAL(local_index),
WASM_I32V(-99)),
WASM_STRUCT_GET(type_index, field_index, WASM_GET_LOCAL(local_index)),
kExprEnd};
j->EmitCode(i_code, sizeof(i_code));
ZoneBuffer buffer(&zone); ZoneBuffer buffer(&zone);
builder->WriteTo(&buffer); builder->WriteTo(&buffer);
@ -92,6 +106,9 @@ WASM_EXEC_TEST(BasicStruct) {
Execution::Call(isolate, h_export, undefined, 0, nullptr) Execution::Call(isolate, h_export, undefined, 0, nullptr)
.ToHandleChecked(); .ToHandleChecked();
CHECK(ref_result->IsWasmStruct()); CHECK(ref_result->IsWasmStruct());
CHECK_EQ(-99, testing::CallWasmFunctionForTesting(isolate, instance, &thrower,
"j", 0, nullptr));
} }
} // namespace test_gc } // namespace test_gc

View File

@ -415,6 +415,9 @@ inline WasmOpcode LoadStoreOpcodeOf(MachineType type, bool store) {
#define WASM_STRUCT_GET(typeidx, fieldidx, ...) \ #define WASM_STRUCT_GET(typeidx, fieldidx, ...) \
__VA_ARGS__, WASM_GC_OP(kExprStructGet), static_cast<byte>(typeidx), \ __VA_ARGS__, WASM_GC_OP(kExprStructGet), static_cast<byte>(typeidx), \
static_cast<byte>(fieldidx) static_cast<byte>(fieldidx)
#define WASM_STRUCT_SET(typeidx, fieldidx, ...) \
__VA_ARGS__, WASM_GC_OP(kExprStructSet), static_cast<byte>(typeidx), \
static_cast<byte>(fieldidx)
// Pass: sig_index, ...args, func_index // Pass: sig_index, ...args, func_index
#define WASM_CALL_INDIRECT(sig_index, ...) \ #define WASM_CALL_INDIRECT(sig_index, ...) \