[wasm-gc] Remove rtts with depth

Since inheritance depth of every type is known in the isorecursive
hybrid type system, rtts with depth are removed. This enables
simplification of type checks in Liftoff and Turbofan, as well as
decoding of object allocation instructions.

Bug: v8:7748
Change-Id: I6b52579b584191d92644de1c6e805d9f054641d3
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3422626
Reviewed-by: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: Jakob Gruber <jgruber@chromium.org>
Commit-Queue: Manos Koukoutos <manoskouk@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78860}
This commit is contained in:
Manos Koukoutos 2022-01-28 14:53:45 +00:00 committed by V8 LUCI CQ
parent 0cfbf51efb
commit 8bb8bfdefc
42 changed files with 110 additions and 368 deletions

View File

@ -2499,7 +2499,6 @@ Node* WasmGraphBuilder::Throw(uint32_t tag_index, const wasm::WasmTag* tag,
case wasm::kRef:
case wasm::kOptRef:
case wasm::kRtt:
case wasm::kRttWithDepth:
gasm_->StoreFixedArrayElementAny(values_array, index, value);
++index;
break;
@ -2631,7 +2630,6 @@ Node* WasmGraphBuilder::GetExceptionValues(Node* except_obj,
case wasm::kRef:
case wasm::kOptRef:
case wasm::kRtt:
case wasm::kRttWithDepth:
value = gasm_->LoadFixedArrayElementAny(values_array, index);
++index;
break;
@ -5745,16 +5743,12 @@ void WasmGraphBuilder::TypeCheck(
Node* type_info = gasm_->LoadWasmTypeInfo(map);
Node* supertypes = gasm_->LoadSupertypes(type_info);
Node* rtt_depth =
config.rtt_depth >= 0
? gasm_->IntPtrConstant(config.rtt_depth)
: BuildChangeSmiToIntPtr(gasm_->LoadFixedArrayLengthAsSmi(
gasm_->LoadSupertypes(gasm_->LoadWasmTypeInfo(rtt))));
Node* rtt_depth = gasm_->UintPtrConstant(config.rtt_depth);
// If the depth of the rtt is known to be less that the minimum supertype
// array length, we can access the supertype without bounds-checking the
// supertype array.
if (config.rtt_depth < 0 || static_cast<uint32_t>(config.rtt_depth) >=
wasm::kMinimumSupertypeArraySize) {
if (config.rtt_depth >= wasm::kMinimumSupertypeArraySize) {
Node* supertypes_length =
BuildChangeSmiToIntPtr(gasm_->LoadFixedArrayLengthAsSmi(supertypes));
callbacks.fail_if_not(gasm_->UintLessThan(rtt_depth, supertypes_length),
@ -6420,7 +6414,6 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
UNREACHABLE();
}
case wasm::kRtt:
case wasm::kRttWithDepth:
case wasm::kI8:
case wasm::kI16:
case wasm::kS128:
@ -6593,7 +6586,6 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
return BuildChangeBigIntToInt64(input, js_context, frame_state);
case wasm::kRtt:
case wasm::kRttWithDepth:
case wasm::kS128:
case wasm::kI8:
case wasm::kI16:
@ -6649,7 +6641,6 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
case wasm::kOptRef:
case wasm::kI64:
case wasm::kRtt:
case wasm::kRttWithDepth:
case wasm::kS128:
case wasm::kI8:
case wasm::kI16:
@ -6815,7 +6806,6 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
case wasm::kOptRef:
case wasm::kI64:
case wasm::kRtt:
case wasm::kRttWithDepth:
case wasm::kS128:
case wasm::kI8:
case wasm::kI16:
@ -6866,7 +6856,6 @@ class WasmWrapperGraphBuilder : public WasmGraphBuilder {
case wasm::kOptRef:
case wasm::kI64:
case wasm::kRtt:
case wasm::kRttWithDepth:
case wasm::kS128:
case wasm::kI8:
case wasm::kI16:

View File

@ -230,7 +230,7 @@ class WasmGraphBuilder {
struct ObjectReferenceKnowledge {
bool object_can_be_null;
ReferenceKind reference_kind;
int8_t rtt_depth;
uint8_t rtt_depth;
};
enum EnforceBoundsCheck : bool { // --
kNeedsBoundsCheck = true,

View File

@ -1044,8 +1044,7 @@ Handle<WasmValueObject> WasmValueObject::New(
}
break;
}
case wasm::kRtt:
case wasm::kRttWithDepth: {
case wasm::kRtt: {
// TODO(7748): Expose RTTs to DevTools.
t = isolate->factory()->InternalizeString(base::StaticCharVector("rtt"));
v = isolate->factory()->InternalizeString(

View File

@ -1810,8 +1810,7 @@ void WasmStruct::WasmStructPrint(std::ostream& os) {
break;
case wasm::kRef:
case wasm::kOptRef:
case wasm::kRtt:
case wasm::kRttWithDepth: {
case wasm::kRtt: {
Tagged_t raw = base::ReadUnalignedValue<Tagged_t>(field_address);
#if V8_COMPRESS_POINTERS
Address obj = DecompressTaggedPointer(address(), raw);
@ -1868,7 +1867,6 @@ void WasmArray::WasmArrayPrint(std::ostream& os) {
case wasm::kRef:
case wasm::kOptRef:
case wasm::kRtt:
case wasm::kRttWithDepth:
os << "\n Printing elements of this type is unimplemented, sorry";
// TODO(7748): Implement.
break;

View File

@ -883,7 +883,6 @@ inline WasmValueType GetWasmValueType(wasm::ValueType type) {
TYPE_CASE(OptRef)
case wasm::kRtt:
case wasm::kRttWithDepth:
// Rtt values are not supposed to be made available to JavaScript side.
UNREACHABLE();

View File

@ -310,7 +310,6 @@ inline void Store(LiftoffAssembler* assm, LiftoffRegister src, MemOperand dst,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->str(src.gp(), dst);
break;
case kI64:
@ -345,7 +344,6 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->ldr(dst.gp(), src);
break;
case kI64:

View File

@ -85,7 +85,6 @@ inline CPURegister GetRegFromType(const LiftoffRegister& reg, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
return reg.gp().X();
case kF32:
return reg.fp().S();
@ -1567,7 +1566,6 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
DCHECK(rhs.is_valid());
DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
V8_FALLTHROUGH;

View File

@ -76,7 +76,6 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->mov(dst.gp(), src);
break;
case kI64:
@ -105,7 +104,6 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->mov(dst, src.gp());
break;
case kI64:
@ -136,7 +134,6 @@ inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
assm->AllocateStackSpace(padding);
assm->push(reg.gp());
break;
@ -1206,7 +1203,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
mov(dst, reg.gp());
break;
case kI64:
@ -2470,7 +2466,6 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
V8_FALLTHROUGH;
case kI32:

View File

@ -590,7 +590,6 @@ class LiftoffCompiler {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
case kI8:
case kI16:
bailout_reason = kGC;
@ -4262,8 +4261,7 @@ class LiftoffCompiler {
}
case wasm::kRef:
case wasm::kOptRef:
case wasm::kRtt:
case wasm::kRttWithDepth: {
case wasm::kRtt: {
--(*index_in_array);
__ StoreTaggedPointer(
values_array, no_reg,
@ -4321,8 +4319,7 @@ class LiftoffCompiler {
}
case wasm::kRef:
case wasm::kOptRef:
case wasm::kRtt:
case wasm::kRttWithDepth: {
case wasm::kRtt: {
__ LoadTaggedPointer(
value.gp(), values_array.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(*index),
@ -5446,7 +5443,7 @@ class LiftoffCompiler {
__ LoadTaggedPointer(
rtt.gp(), rtt.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(type_index), {});
__ PushRegister(kRttWithDepth, rtt);
__ PushRegister(kRtt, rtt);
}
enum NullSucceeds : bool { // --
@ -5507,37 +5504,21 @@ class LiftoffCompiler {
wasm::ObjectAccess::ToTagged(WasmTypeInfo::kSupertypesOffset);
__ LoadTaggedPointer(tmp1.gp(), tmp1.gp(), no_reg, kSuperTypesOffset,
pinned);
if (rtt.type.has_depth()) {
// Step 3: check the list's length if needed.
if (rtt.type.depth() >= kMinimumSupertypeArraySize) {
LiftoffRegister list_length = tmp2;
__ LoadFixedArrayLengthAsInt32(list_length, tmp1.gp(), pinned);
__ emit_i32_cond_jumpi(kUnsignedLessEqual, no_match, list_length.gp(),
rtt.type.depth());
}
// Step 4: load the candidate list slot into {tmp1}, and compare it.
__ LoadTaggedPointer(
tmp1.gp(), tmp1.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(rtt.type.depth()),
pinned);
__ emit_cond_jump(kUnequal, no_match, rtt.type.kind(), tmp1.gp(),
rtt_reg.gp());
} else {
// Step 3: if rtt's depth is unknown, we invoke a builtin to compute the
// result, as we might not have enough available registers.
// Preserve {obj_reg} across the call.
LiftoffRegList saved_regs = LiftoffRegList::ForRegs(obj_reg);
__ PushRegisters(saved_regs);
LiftoffAssembler::VarState rtt_state(kPointerKind, rtt_reg, 0);
LiftoffAssembler::VarState tmp1_state(kPointerKind, tmp1, 0);
CallRuntimeStub(WasmCode::kWasmSubtypeCheck,
MakeSig::Returns(kI32).Params(kOptRef, rtt.type.kind()),
{tmp1_state, rtt_state}, decoder->position());
__ PopRegisters(saved_regs);
__ Move(tmp1.gp(), kReturnRegister0, kI32);
__ emit_i32_cond_jumpi(kEqual, no_match, tmp1.gp(), 0);
// Step 3: check the list's length if needed.
uint32_t rtt_depth =
GetSubtypingDepth(decoder->module_, rtt.type.ref_index());
if (rtt_depth >= kMinimumSupertypeArraySize) {
LiftoffRegister list_length = tmp2;
__ LoadFixedArrayLengthAsInt32(list_length, tmp1.gp(), pinned);
__ emit_i32_cond_jumpi(kUnsignedLessEqual, no_match, list_length.gp(),
rtt_depth);
}
// Step 4: load the candidate list slot into {tmp1}, and compare it.
__ LoadTaggedPointer(
tmp1.gp(), tmp1.gp(), no_reg,
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(rtt_depth), pinned);
__ emit_cond_jump(kUnequal, no_match, rtt.type.kind(), tmp1.gp(),
rtt_reg.gp());
// Fall through to {match}.
__ bind(&match);
@ -6258,7 +6239,6 @@ class LiftoffCompiler {
case kOptRef:
return LoadNullValue(reg.gp(), pinned);
case kRtt:
case kRttWithDepth:
case kVoid:
case kBottom:
case kRef:
@ -6399,7 +6379,7 @@ class LiftoffCompiler {
// MVP:
kI32, kI64, kF32, kF64,
// Extern ref:
kRef, kOptRef, kRtt, kRttWithDepth, kI8, kI16};
kRef, kOptRef, kRtt, kI8, kI16};
LiftoffAssembler asm_;

View File

@ -69,7 +69,6 @@ static inline constexpr RegClass reg_class_for(ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
return kGpReg;
default:
return kNoReg; // unsupported kind

View File

@ -106,7 +106,6 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
assm->Ld_d(dst.gp(), src);
break;
case kF32:
@ -134,7 +133,6 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->St_d(src.gp(), dst);
break;
case kF32:
@ -623,7 +621,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
St_d(reg.gp(), dst);
break;
case kF32:
@ -676,7 +673,6 @@ void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
case kOptRef:
// TODO(LOONG_dev): LOONG64 Check, MIPS64 dosn't need, ARM64/LOONG64 need?
case kRtt:
case kRttWithDepth:
Ld_d(reg.gp(), src);
break;
case kF32:

View File

@ -95,7 +95,6 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
assm->lw(dst.gp(), src);
break;
case kI64:
@ -123,7 +122,6 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->Usw(src.gp(), dst);
break;
case kI64:
@ -819,7 +817,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
sw(reg.gp(), dst);
break;
case kI64:

View File

@ -106,7 +106,6 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
assm->Ld(dst.gp(), src);
break;
case kF32:
@ -134,7 +133,6 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->Usd(src.gp(), dst);
break;
case kF32:
@ -750,7 +748,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
Sd(reg.gp(), dst);
break;
case kF32:

View File

@ -815,7 +815,6 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
case kRef:
case kRtt:
case kOptRef:
case kRttWithDepth:
case kI64: {
LoadU64(dst.gp(), MemOperand(fp, offset), r0);
break;
@ -890,7 +889,6 @@ void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
case kRef:
case kRtt:
case kOptRef:
case kRttWithDepth:
case kI64: {
LoadU64(dst.gp(), MemOperand(sp, offset), r0);
break;
@ -969,7 +967,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
StoreU64(reg.gp(), liftoff::GetStackSlot(offset), r0);
break;
case kF32:
@ -1018,7 +1015,6 @@ void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
LoadU64(reg.gp(), liftoff::GetStackSlot(offset), r0);
break;
case kF32:
@ -1602,7 +1598,6 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
V8_FALLTHROUGH;
case kI64:
@ -2997,7 +2992,6 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
LoadU64(result_reg->gp(), MemOperand(sp));
break;
case kF32:
@ -3077,7 +3071,6 @@ void LiftoffStackSlots::Construct(int param_slots) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
case kI64: {
asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
UseScratchRegisterScope temps(asm_);
@ -3120,7 +3113,6 @@ void LiftoffStackSlots::Construct(int param_slots) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
asm_->push(src.reg().gp());
break;
case kF32:

View File

@ -990,7 +990,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
Sd(reg.gp(), dst);
break;
case kF32:

View File

@ -1251,7 +1251,6 @@ void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
case kRef:
case kRtt:
case kOptRef:
case kRttWithDepth:
case kI64: {
LoadU64(dst.gp(), MemOperand(fp, offset));
break;
@ -1330,7 +1329,6 @@ void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
case kRef:
case kRtt:
case kOptRef:
case kRttWithDepth:
case kI64: {
LoadU64(dst.gp(), MemOperand(sp, offset));
break;
@ -1432,7 +1430,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
StoreU64(reg.gp(), liftoff::GetStackSlot(offset));
break;
case kF32:
@ -1483,7 +1480,6 @@ void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
LoadU64(reg.gp(), liftoff::GetStackSlot(offset));
break;
case kF32:
@ -2142,7 +2138,6 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
V8_FALLTHROUGH;
case kI64:
@ -2882,7 +2877,6 @@ void LiftoffAssembler::CallC(const ValueKindSig* sig,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
LoadU64(result_reg->gp(), MemOperand(sp));
break;
case kF32:
@ -2988,7 +2982,6 @@ void LiftoffStackSlots::Construct(int param_slots) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
case kI64: {
asm_->AllocateStackSpace(stack_decrement - kSystemPointerSize);
UseScratchRegisterScope temps(asm_);
@ -3036,7 +3029,6 @@ void LiftoffStackSlots::Construct(int param_slots) {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
asm_->push(src.reg().gp());
break;
case kF32:

View File

@ -99,7 +99,6 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->movq(dst.gp(), src);
break;
case kF32:
@ -128,7 +127,6 @@ inline void Store(LiftoffAssembler* assm, Operand dst, LiftoffRegister src,
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
assm->StoreTaggedField(dst, src.gp());
break;
case kF32:
@ -927,7 +925,6 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
movq(dst, reg.gp());
break;
case kF32:
@ -2145,7 +2142,6 @@ void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
V8_FALLTHROUGH;
case kI64:

View File

@ -1562,7 +1562,6 @@ void PushArgs(const i::wasm::FunctionSig* sig, const Val args[],
packer->Push(WasmRefToV8(store->i_isolate(), args[i].ref())->ptr());
break;
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
case i::wasm::kS128:
// TODO(7748): Implement.
UNIMPLEMENTED();
@ -1602,7 +1601,6 @@ void PopArgs(const i::wasm::FunctionSig* sig, Val results[],
break;
}
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
case i::wasm::kS128:
// TODO(7748): Implement.
UNIMPLEMENTED();
@ -1865,7 +1863,6 @@ auto Global::get() const -> Val {
return Val(V8RefValueToWasm(store, v8_global->GetRef()));
}
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
case i::wasm::kS128:
// TODO(7748): Implement these.
UNIMPLEMENTED();

View File

@ -313,44 +313,9 @@ ValueType read_value_type(Decoder* decoder, const byte* pc,
return heap_type.is_bottom() ? kWasmBottom
: ValueType::Ref(heap_type, nullability);
}
case kRttWithDepthCode: {
if (!VALIDATE(enabled.has_gc())) {
DecodeError<validate>(
decoder, pc,
"invalid value type 'rtt', enable with --experimental-wasm-gc");
return kWasmBottom;
}
uint32_t depth = decoder->read_u32v<validate>(pc + 1, length, "depth");
*length += 1;
if (!VALIDATE(depth <= kV8MaxRttSubtypingDepth)) {
DecodeError<validate>(
decoder, pc,
"subtyping depth %u is greater than the maximum depth "
"%u supported by V8",
depth, kV8MaxRttSubtypingDepth);
return kWasmBottom;
}
uint32_t type_index_length;
uint32_t type_index =
decoder->read_u32v<validate>(pc + *length, &type_index_length);
*length += type_index_length;
if (!VALIDATE(type_index < kV8MaxWasmTypes)) {
DecodeError<validate>(
decoder, pc,
"Type index %u is greater than the maximum number %zu "
"of type definitions supported by V8",
type_index, kV8MaxWasmTypes);
return kWasmBottom;
}
// We use capacity over size so this works mid-DecodeTypeSection.
if (!VALIDATE(module == nullptr ||
type_index < module->types.capacity())) {
DecodeError<validate>(decoder, pc, "Type index %u is out of bounds",
type_index);
return kWasmBottom;
}
return ValueType::Rtt(type_index, depth);
}
// TODO(7748): This is here only for backwards compatibility, and the parsed
// depth is ignored.
case kRttWithDepthCode:
case kRttCode: {
if (!VALIDATE(enabled.has_gc())) {
DecodeError<validate>(
@ -358,8 +323,22 @@ ValueType read_value_type(Decoder* decoder, const byte* pc,
"invalid value type 'rtt', enable with --experimental-wasm-gc");
return kWasmBottom;
}
uint32_t type_index = decoder->read_u32v<validate>(pc + 1, length);
*length += 1;
if (code == kRttWithDepthCode) {
uint32_t depth = decoder->read_u32v<validate>(pc + 1, length, "depth");
*length += 1;
if (!VALIDATE(depth <= kV8MaxRttSubtypingDepth)) {
DecodeError<validate>(
decoder, pc,
"subtyping depth %u is greater than the maximum depth "
"%u supported by V8",
depth, kV8MaxRttSubtypingDepth);
return kWasmBottom;
}
}
uint32_t type_index_length;
uint32_t type_index =
decoder->read_u32v<validate>(pc + *length, &type_index_length);
*length += type_index_length;
if (!VALIDATE(type_index < kV8MaxWasmTypes)) {
DecodeError<validate>(
decoder, pc,
@ -4051,28 +4030,13 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
case kExprStructNewWithRtt: {
StructIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
ValueType rtt_type = ValueType::Rtt(imm.index);
Value rtt = opcode == kExprStructNew
? CreateValue(ValueType::Rtt(imm.index))
: Peek(0, imm.struct_type->field_count());
? CreateValue(rtt_type)
: Peek(0, imm.struct_type->field_count(), rtt_type);
if (opcode == kExprStructNew) {
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
DCHECK_EQ(opcode, kExprStructNewWithRtt);
if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
PopTypeError(imm.struct_type->field_count(), rtt, "rtt");
return 0;
}
// TODO(7748): Drop this check if {imm} is dropped from the proposal
// à la https://github.com/WebAssembly/function-references/pull/31.
if (!VALIDATE(rtt.type.is_bottom() ||
(rtt.type.ref_index() == imm.index &&
rtt.type.has_depth()))) {
PopTypeError(
imm.struct_type->field_count(), rtt,
"rtt with depth for type " + std::to_string(imm.index));
return 0;
}
}
ArgVector args = PeekArgs(imm.struct_type, 1);
Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
@ -4099,27 +4063,12 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
}
}
}
Value rtt = opcode == kExprStructNewDefault
? CreateValue(ValueType::Rtt(imm.index))
: Peek(0, 0);
ValueType rtt_type = ValueType::Rtt(imm.index);
Value rtt = opcode == kExprStructNewDefault ? CreateValue(rtt_type)
: Peek(0, 0, rtt_type);
if (opcode == kExprStructNewDefault) {
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
DCHECK_EQ(opcode, kExprStructNewDefaultWithRtt);
if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
PopTypeError(0, rtt, "rtt");
return 0;
}
// TODO(7748): Drop this check if {imm} is dropped from the proposal
// à la https://github.com/WebAssembly/function-references/pull/31.
if (!VALIDATE(rtt.type.is_bottom() ||
(rtt.type.ref_index() == imm.index &&
rtt.type.has_depth()))) {
PopTypeError(
0, rtt, "rtt with depth for type " + std::to_string(imm.index));
return 0;
}
}
Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
CALL_INTERFACE_IF_OK_AND_REACHABLE(StructNewDefault, imm, rtt, &value);
@ -4198,27 +4147,12 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
NON_CONST_ONLY
ArrayIndexImmediate<validate> imm(this, this->pc_ + opcode_length);
if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
Value rtt = opcode == kExprArrayNew
? CreateValue(ValueType::Rtt(imm.index))
: Peek(0, 2);
ValueType rtt_type = ValueType::Rtt(imm.index);
Value rtt = opcode == kExprArrayNew ? CreateValue(rtt_type)
: Peek(0, 2, rtt_type);
if (opcode == kExprArrayNew) {
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
DCHECK_EQ(opcode, kExprArrayNewWithRtt);
if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
PopTypeError(2, rtt, "rtt");
return 0;
}
// TODO(7748): Drop this check if {imm} is dropped from the proposal
// à la https://github.com/WebAssembly/function-references/pull/31.
if (!VALIDATE(rtt.type.is_bottom() ||
(rtt.type.ref_index() == imm.index &&
rtt.type.has_depth()))) {
PopTypeError(
2, rtt, "rtt with depth for type " + std::to_string(imm.index));
return 0;
}
}
Value length = Peek(1, 1, kWasmI32);
Value initial_value =
@ -4242,27 +4176,12 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
imm.array_type->element_type().name().c_str());
return 0;
}
Value rtt = opcode == kExprArrayNewDefault
? CreateValue(ValueType::Rtt(imm.index))
: Peek(0, 1);
ValueType rtt_type = ValueType::Rtt(imm.index);
Value rtt = opcode == kExprArrayNewDefault ? CreateValue(rtt_type)
: Peek(0, 1, rtt_type);
if (opcode == kExprArrayNewDefault) {
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
DCHECK_EQ(opcode, kExprArrayNewDefaultWithRtt);
if (!VALIDATE(rtt.type.is_rtt() || rtt.type.is_bottom())) {
PopTypeError(1, rtt, "rtt");
return 0;
}
// TODO(7748): Drop this check if {imm} is dropped from the proposal
// à la https://github.com/WebAssembly/function-references/pull/31.
if (!VALIDATE(rtt.type.is_bottom() ||
(rtt.type.ref_index() == imm.index &&
rtt.type.has_depth()))) {
PopTypeError(
1, rtt, "rtt with depth for type " + std::to_string(imm.index));
return 0;
}
}
Value length = Peek(1, 0, kWasmI32);
Value value = CreateValue(ValueType::Ref(imm.index, kNonNullable));
@ -4487,8 +4406,7 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
IndexImmediate<validate> imm(this, this->pc_ + opcode_length,
"type index");
if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
Value value = CreateValue(ValueType::Rtt(
imm.index, GetSubtypingDepth(this->module_, imm.index)));
Value value = CreateValue(ValueType::Rtt(imm.index));
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &value);
Push(value);
return opcode_length + imm.length;
@ -4503,8 +4421,7 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
"type index");
if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
opcode_length += imm.length;
rtt = CreateValue(ValueType::Rtt(
imm.index, GetSubtypingDepth(this->module_, imm.index)));
rtt = CreateValue(ValueType::Rtt(imm.index));
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
@ -4560,8 +4477,7 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
"type index");
if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
opcode_length += imm.length;
rtt = CreateValue(ValueType::Rtt(
imm.index, GetSubtypingDepth(this->module_, imm.index)));
rtt = CreateValue(ValueType::Rtt(imm.index));
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
@ -4631,8 +4547,7 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
"type index");
if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
pc_offset += imm.length;
rtt = CreateValue(ValueType::Rtt(
imm.index, GetSubtypingDepth(this->module_, imm.index)));
rtt = CreateValue(ValueType::Rtt(imm.index));
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {
@ -4713,8 +4628,7 @@ class WasmFullDecoder : public WasmDecoder<validate, decoding_mode> {
"type index");
if (!this->ValidateType(this->pc_ + opcode_length, imm)) return 0;
pc_offset += imm.length;
rtt = CreateValue(ValueType::Rtt(
imm.index, GetSubtypingDepth(this->module_, imm.index)));
rtt = CreateValue(ValueType::Rtt(imm.index));
CALL_INTERFACE_IF_OK_AND_REACHABLE(RttCanon, imm.index, &rtt);
Push(rtt);
} else {

View File

@ -1148,10 +1148,13 @@ class WasmGraphBuildingInterface {
DCHECK(object_type.is_object_reference()); // Checked by validation.
// In the bottom case, the result is irrelevant.
result.reference_kind =
rtt_type != kWasmBottom && module->has_signature(rtt_type.ref_index())
!rtt_type.is_bottom() && module->has_signature(rtt_type.ref_index())
? compiler::WasmGraphBuilder::kFunction
: compiler::WasmGraphBuilder::kArrayOrStruct;
result.rtt_depth = rtt_type.has_depth() ? rtt_type.depth() : -1;
result.rtt_depth = rtt_type.is_bottom()
? 0 /* unused */
: static_cast<uint8_t>(GetSubtypingDepth(
module, rtt_type.ref_index()));
return result;
}
@ -1460,7 +1463,6 @@ class WasmGraphBuildingInterface {
case kOptRef:
return builder_->RefNull();
case kRtt:
case kRttWithDepth:
case kVoid:
case kBottom:
case kRef:

View File

@ -117,7 +117,6 @@ WasmValue DefaultValueForType(ValueType type, Isolate* isolate) {
return WasmValue(isolate->factory()->null_value(), type);
case kVoid:
case kRtt:
case kRttWithDepth:
case kRef:
case kBottom:
UNREACHABLE();
@ -192,7 +191,7 @@ void InitExprInterface::RttCanon(FullDecoder* decoder, uint32_t type_index,
if (!generate_result()) return;
result->runtime_value = WasmValue(
handle(instance_->managed_object_maps().get(type_index), isolate_),
ValueType::Rtt(type_index, 0));
ValueType::Rtt(type_index));
}
void InitExprInterface::DoReturn(FullDecoder* decoder,

View File

@ -37,10 +37,6 @@ size_t LocalDeclEncoder::Emit(byte* buffer) const {
LEBHelper::write_u32v(&pos, locals_count);
*pos = locals_type.value_type_code();
++pos;
if (locals_type.has_depth()) {
*pos = locals_type.depth();
++pos;
}
if (locals_type.is_rtt()) {
LEBHelper::write_u32v(&pos, locals_type.ref_index());
}
@ -72,7 +68,6 @@ size_t LocalDeclEncoder::Size() const {
size +=
LEBHelper::sizeof_u32v(p.first) + // number of locals
1 + // Opcode
(p.second.has_depth() ? 1 : 0) + // Inheritance depth
(p.second.encoding_needs_heap_type()
? LEBHelper::sizeof_i32v(p.second.heap_type().code())
: 0) +

View File

@ -1421,7 +1421,6 @@ bool InstanceBuilder::ProcessImportedWasmGlobalObject(
value = WasmValue(global_object->GetF64());
break;
case kRtt:
case kRttWithDepth:
case kRef:
case kOptRef:
value = WasmValue(global_object->GetRef(), global_object->type());

View File

@ -41,13 +41,12 @@ class Simd128;
V(I8, 0, I8, Int8, 'b', "i8") \
V(I16, 1, I16, Int16, 'h', "i16")
#define FOREACH_VALUE_TYPE(V) \
V(Void, -1, Void, None, 'v', "<void>") \
FOREACH_NUMERIC_VALUE_TYPE(V) \
V(Rtt, kTaggedSizeLog2, Rtt, TaggedPointer, 't', "rtt") \
V(RttWithDepth, kTaggedSizeLog2, RttWithDepth, TaggedPointer, 'k', "rtt") \
V(Ref, kTaggedSizeLog2, Ref, AnyTagged, 'r', "ref") \
V(OptRef, kTaggedSizeLog2, OptRef, AnyTagged, 'n', "ref null") \
#define FOREACH_VALUE_TYPE(V) \
V(Void, -1, Void, None, 'v', "<void>") \
FOREACH_NUMERIC_VALUE_TYPE(V) \
V(Rtt, kTaggedSizeLog2, Rtt, TaggedPointer, 't', "rtt") \
V(Ref, kTaggedSizeLog2, Ref, AnyTagged, 'r', "ref") \
V(OptRef, kTaggedSizeLog2, OptRef, AnyTagged, 'n', "ref null") \
V(Bottom, -1, Void, None, '*', "<bot>")
constexpr int kMaxValueTypeSize = 16; // bytes
@ -208,8 +207,7 @@ constexpr bool is_numeric(ValueKind kind) {
}
constexpr bool is_reference(ValueKind kind) {
return kind == kRef || kind == kOptRef || kind == kRtt ||
kind == kRttWithDepth;
return kind == kRef || kind == kOptRef || kind == kRtt;
}
constexpr bool is_object_reference(ValueKind kind) {
@ -280,9 +278,7 @@ constexpr ValueKind unpacked(ValueKind kind) {
return is_packed(kind) ? kI32 : kind;
}
constexpr bool is_rtt(ValueKind kind) {
return kind == kRtt || kind == kRttWithDepth;
}
constexpr bool is_rtt(ValueKind kind) { return kind == kRtt; }
constexpr bool is_defaultable(ValueKind kind) {
DCHECK(kind != kBottom && kind != kVoid);
@ -319,14 +315,6 @@ class ValueType {
HeapTypeField::encode(type_index));
}
static constexpr ValueType Rtt(uint32_t type_index,
uint8_t inheritance_depth) {
DCHECK(HeapType(type_index).is_index());
return ValueType(KindField::encode(kRttWithDepth) |
HeapTypeField::encode(type_index) |
DepthField::encode(inheritance_depth));
}
// Useful when deserializing a type stored in a runtime object.
static constexpr ValueType FromRawBitField(uint32_t bit_field) {
return ValueType(bit_field);
@ -349,7 +337,6 @@ class ValueType {
}
constexpr bool is_rtt() const { return wasm::is_rtt(kind()); }
constexpr bool has_depth() const { return kind() == kRttWithDepth; }
constexpr bool has_index() const {
return is_rtt() || (is_object_reference() && heap_type().is_index());
@ -383,10 +370,6 @@ class ValueType {
DCHECK(is_object_reference());
return HeapType(heap_representation());
}
constexpr uint8_t depth() const {
DCHECK(has_depth());
return DepthField::decode(bit_field_);
}
constexpr uint32_t ref_index() const {
DCHECK(has_index());
return HeapTypeField::decode(bit_field_);
@ -488,8 +471,6 @@ class ValueType {
return kVoidCode;
case kRtt:
return kRttCode;
case kRttWithDepth:
return kRttWithDepthCode;
#define NUMERIC_TYPE_CASE(kind, ...) \
case k##kind: \
return k##kind##Code;
@ -519,7 +500,7 @@ class ValueType {
}
}
static constexpr int kLastUsedBit = 30;
static constexpr int kLastUsedBit = 24;
/****************************** Pretty-printing *****************************/
constexpr char short_name() const { return wasm::short_name(kind()); }
@ -536,10 +517,6 @@ class ValueType {
buf << heap_type().name() << "ref";
}
break;
case kRttWithDepth:
buf << "(rtt " << static_cast<uint32_t>(depth()) << " " << ref_index()
<< ")";
break;
case kRtt:
buf << "(rtt " << ref_index() << ")";
break;
@ -553,21 +530,16 @@ class ValueType {
// needed.
static constexpr int kKindBits = 5;
static constexpr int kHeapTypeBits = 20;
static constexpr int kDepthBits = 6;
private:
STATIC_ASSERT(kV8MaxWasmTypes < (1u << kHeapTypeBits));
// Note: we currently conservatively allow only 5 bits, but have room to
// store 6, so we can raise the limit if needed.
STATIC_ASSERT(kV8MaxRttSubtypingDepth < (1u << kDepthBits));
using KindField = base::BitField<ValueKind, 0, kKindBits>;
using HeapTypeField = KindField::Next<uint32_t, kHeapTypeBits>;
using DepthField = HeapTypeField::Next<uint8_t, kDepthBits>;
// This is implemented defensively against field order changes.
STATIC_ASSERT(kLastUsedBit == std::max(KindField::kLastUsedBit,
std::max(HeapTypeField::kLastUsedBit,
DepthField::kLastUsedBit)));
STATIC_ASSERT(kLastUsedBit ==
std::max(KindField::kLastUsedBit, HeapTypeField::kLastUsedBit));
constexpr explicit ValueType(uint32_t bit_field) : bit_field_(bit_field) {}

View File

@ -44,6 +44,7 @@ enum ValueTypeCode : uint8_t {
kOptRefCode = 0x6c,
kRefCode = 0x6b,
kI31RefCode = 0x6a,
// TODO(7748): Only here for backwards compatibility, remove when able.
kRttWithDepthCode = 0x69,
kRttCode = 0x68,
kDataRefCode = 0x67,

View File

@ -657,8 +657,7 @@ class DebugInfoImpl {
return WasmValue(Simd128(ReadUnalignedValue<int16>(stack_address)));
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth: {
case kRtt: {
Handle<Object> obj(Object(ReadUnalignedValue<Address>(stack_address)),
isolate);
return WasmValue(obj, value->type);

View File

@ -46,7 +46,7 @@ ValueType WasmInitExpr::type(const WasmModule* module,
case kArrayInitStatic:
return ValueType::Ref(immediate().index, kNonNullable);
case kRttCanon:
return ValueType::Rtt(immediate().heap_type, 0);
return ValueType::Rtt(immediate().heap_type);
}
}

View File

@ -1536,7 +1536,6 @@ void WebAssemblyGlobal(const v8::FunctionCallbackInfo<v8::Value>& args) {
break;
}
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
// TODO(7748): Implement.
UNIMPLEMENTED();
case i::wasm::kI8:
@ -1731,7 +1730,6 @@ void EncodeExceptionValues(v8::Isolate* isolate,
}
break;
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
case i::wasm::kI8:
case i::wasm::kI16:
case i::wasm::kVoid:
@ -2283,7 +2281,6 @@ void WebAssemblyExceptionGetArg(
}
break;
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
case i::wasm::kI8:
case i::wasm::kI16:
case i::wasm::kVoid:
@ -2345,7 +2342,6 @@ void WebAssemblyExceptionGetArg(
}
break;
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
case i::wasm::kI8:
case i::wasm::kI16:
case i::wasm::kVoid:
@ -2437,7 +2433,6 @@ void WebAssemblyGlobalGetValueCommon(
}
break;
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
UNIMPLEMENTED(); // TODO(7748): Implement.
case i::wasm::kI8:
case i::wasm::kI16:
@ -2532,7 +2527,6 @@ void WebAssemblyGlobalSetValue(
}
break;
case i::wasm::kRtt:
case i::wasm::kRttWithDepth:
// TODO(7748): Implement.
UNIMPLEMENTED();
case i::wasm::kI8:

View File

@ -134,7 +134,6 @@ void WriteValueType(ZoneBuffer* buffer, const ValueType& type) {
buffer->write_i32v(type.heap_type().code());
}
if (type.is_rtt()) {
if (type.has_depth()) buffer->write_u32v(type.depth());
buffer->write_u32v(type.ref_index());
}
}
@ -520,7 +519,6 @@ void WriteInitializerExpressionWithEnd(ZoneBuffer* buffer,
case kBottom:
case kRef:
case kRtt:
case kRttWithDepth:
UNREACHABLE();
}
break;

View File

@ -386,7 +386,6 @@ Handle<Object> WasmObject::ReadValueAt(Isolate* isolate, Handle<HeapObject> obj,
}
case wasm::kRtt:
case wasm::kRttWithDepth:
// Rtt values are not supposed to be made available to JavaScript side.
UNREACHABLE();
@ -422,7 +421,6 @@ MaybeHandle<Object> WasmObject::ToWasmValue(Isolate* isolate,
UNREACHABLE();
case wasm::kRtt:
case wasm::kRttWithDepth:
// Rtt values are not supposed to be made available to JavaScript side.
UNREACHABLE();
@ -500,7 +498,6 @@ void WasmObject::WriteValueAt(Isolate* isolate, Handle<HeapObject> obj,
UNREACHABLE();
case wasm::kRtt:
case wasm::kRttWithDepth:
// Rtt values are not supposed to be made available to JavaScript side.
UNREACHABLE();

View File

@ -1562,7 +1562,6 @@ wasm::WasmValue WasmStruct::GetFieldValue(uint32_t index) {
return wasm::WasmValue(ref, field_type);
}
case wasm::kRtt:
case wasm::kRttWithDepth:
// TODO(7748): Expose RTTs to DevTools.
UNIMPLEMENTED();
case wasm::kVoid:
@ -1592,7 +1591,6 @@ wasm::WasmValue WasmArray::GetElement(uint32_t index) {
return wasm::WasmValue(ref, element_type);
}
case wasm::kRtt:
case wasm::kRttWithDepth:
// TODO(7748): Expose RTTs to DevTools.
UNIMPLEMENTED();
case wasm::kVoid:
@ -1834,7 +1832,6 @@ uint32_t WasmExceptionPackage::GetEncodedSize(const wasm::WasmTag* tag) {
encoded_size += 1;
break;
case wasm::kRtt:
case wasm::kRttWithDepth:
case wasm::kVoid:
case wasm::kBottom:
case wasm::kI8:
@ -2336,7 +2333,6 @@ bool TypecheckJSObject(Isolate* isolate, const WasmModule* module,
}
}
case kRtt:
case kRttWithDepth:
// TODO(7748): Implement when the JS API for rtts is decided on.
*error_message =
"passing rtts between Webassembly and Javascript is not supported "

View File

@ -149,16 +149,6 @@ V8_NOINLINE V8_EXPORT_PRIVATE bool IsSubtypeOfImpl(
return supertype.kind() == kRtt &&
EquivalentIndices(subtype.ref_index(), supertype.ref_index(),
sub_module, super_module);
case kRttWithDepth:
return (supertype.kind() == kRtt &&
((sub_module == super_module &&
subtype.ref_index() == supertype.ref_index()) ||
EquivalentIndices(subtype.ref_index(), supertype.ref_index(),
sub_module, super_module))) ||
(supertype.kind() == kRttWithDepth &&
supertype.depth() == subtype.depth() &&
EquivalentIndices(subtype.ref_index(), supertype.ref_index(),
sub_module, super_module));
case kRef:
case kOptRef:
break;
@ -250,9 +240,6 @@ V8_NOINLINE bool EquivalentTypes(ValueType type1, ValueType type2,
DCHECK(type1.has_index() && type2.has_index() &&
(type1 != type2 || module1 != module2));
DCHECK_IMPLIES(type1.has_depth(), type2.has_depth()); // Due to 'if' above.
if (type1.has_depth() && type1.depth() != type2.depth()) return false;
DCHECK(type1.has_index() && module1->has_type(type1.ref_index()) &&
type2.has_index() && module2->has_type(type2.ref_index()));

View File

@ -71,7 +71,7 @@ V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype,
}
// We have this function call IsSubtypeOf instead of the opposite because type
// checks are much more common than heap type checks.}
// checks are much more common than heap type checks.
V8_INLINE bool IsHeapSubtypeOf(uint32_t subtype_index,
HeapType::Representation supertype,
const WasmModule* module) {

View File

@ -202,7 +202,6 @@ class WasmValue {
case kOptRef:
case kRef:
case kRtt:
case kRttWithDepth:
return "Handle [" + std::to_string(to_ref().address()) + "]";
case kVoid:
case kBottom:

View File

@ -561,7 +561,7 @@ WASM_COMPILED_EXEC_TEST(BrOnCast) {
const byte type_index = tester.DefineStruct({F(kWasmI32, true)});
const byte other_type_index = tester.DefineStruct({F(kWasmF32, true)});
const byte rtt_index =
tester.AddGlobal(ValueType::Rtt(type_index, 0), false,
tester.AddGlobal(ValueType::Rtt(type_index), false,
WasmInitExpr::RttCanon(
static_cast<HeapType::Representation>(type_index)));
const byte kTestStruct = tester.DefineFunction(
@ -1347,11 +1347,11 @@ WASM_COMPILED_EXEC_TEST(BasicRtt) {
const byte subtype_index = tester.DefineStruct(
{F(wasm::kWasmI32, true), F(wasm::kWasmI32, true)}, type_index);
ValueType kRttTypes[] = {ValueType::Rtt(type_index, 0)};
ValueType kRttTypes[] = {ValueType::Rtt(type_index)};
FunctionSig sig_t_v(1, 0, kRttTypes);
ValueType kRttSubtypes[] = {ValueType::Rtt(subtype_index, 1)};
ValueType kRttSubtypes[] = {ValueType::Rtt(subtype_index)};
FunctionSig sig_t2_v(1, 0, kRttSubtypes);
ValueType kRttTypesDeeper[] = {ValueType::Rtt(type_index, 1)};
ValueType kRttTypesDeeper[] = {ValueType::Rtt(type_index)};
FunctionSig sig_t3_v(1, 0, kRttTypesDeeper);
ValueType kRefTypes[] = {ref(type_index)};
FunctionSig sig_q_v(1, 0, kRefTypes);
@ -1643,7 +1643,7 @@ WASM_COMPILED_EXEC_TEST(ArrayNewMap) {
&sig, {},
{WASM_ARRAY_NEW(type_index, WASM_I32V(10), WASM_I32V(42)), kExprEnd});
ValueType rtt_type = ValueType::Rtt(type_index, 0);
ValueType rtt_type = ValueType::Rtt(type_index);
FunctionSig rtt_canon_sig(1, 0, &rtt_type);
const byte kRttCanon = tester.DefineFunction(
&rtt_canon_sig, {}, {WASM_RTT_CANON(type_index), kExprEnd});
@ -1677,7 +1677,7 @@ WASM_COMPILED_EXEC_TEST(FunctionRefs) {
ValueType func_type = ValueType::Ref(sig_index, kNullable);
FunctionSig sig_func(1, 0, &func_type);
ValueType rtt0 = ValueType::Rtt(sig_index, 0);
ValueType rtt0 = ValueType::Rtt(sig_index);
FunctionSig sig_rtt0(1, 0, &rtt0);
const byte rtt_canon = tester.DefineFunction(
&sig_rtt0, {}, {WASM_RTT_CANON(sig_index), kExprEnd});
@ -2030,12 +2030,12 @@ WASM_COMPILED_EXEC_TEST(CastsBenchmark) {
WasmInitExpr::RefNullConst(
static_cast<HeapType::Representation>(ListType)));
const byte RttSuper = tester.AddGlobal(
ValueType::Rtt(SuperType, 0), false,
ValueType::Rtt(SuperType), false,
WasmInitExpr::RttCanon(static_cast<HeapType::Representation>(SuperType)));
const byte RttSub = tester.AddGlobal(ValueType::Rtt(SubType, 1), false,
const byte RttSub = tester.AddGlobal(ValueType::Rtt(SubType), false,
WasmInitExpr::RttCanon(SubType));
const byte RttList = tester.AddGlobal(
ValueType::Rtt(ListType, 0), false,
ValueType::Rtt(ListType), false,
WasmInitExpr::RttCanon(static_cast<HeapType::Representation>(ListType)));
const uint32_t kListLength = 1024;

View File

@ -1442,7 +1442,6 @@ class WasmInterpreterInternals {
}
case kRef:
case kRtt:
case kRttWithDepth:
case kVoid:
case kBottom:
case kI8:
@ -3166,7 +3165,6 @@ class WasmInterpreterInternals {
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth:
encoded_values->set(encoded_index++, *value.to_ref());
break;
case kI8:
@ -3253,8 +3251,7 @@ class WasmInterpreterInternals {
}
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth: {
case kRtt: {
Handle<Object> ref(encoded_values->get(encoded_index++), isolate_);
value = WasmValue(ref, sig->GetParam(i));
break;
@ -3630,8 +3627,7 @@ class WasmInterpreterInternals {
#undef CASE_TYPE
case kRef:
case kOptRef:
case kRtt:
case kRttWithDepth: {
case kRtt: {
// TODO(7748): Type checks or DCHECKs for ref types?
HandleScope handle_scope(isolate_); // Avoid leaking handles.
Handle<FixedArray> global_buffer; // The buffer of the global.
@ -4050,7 +4046,6 @@ class WasmInterpreterInternals {
PrintF("ref:0x%" V8PRIxPTR, val.to_ref()->ptr());
break;
case kRtt:
case kRttWithDepth:
PrintF("rtt:0x%" V8PRIxPTR, val.to_ref()->ptr());
break;
case kI8:

View File

@ -584,8 +584,6 @@ inline WasmOpcode LoadStoreOpcodeOf(MachineType type, bool store) {
__VA_ARGS__, WASM_GC_OP(kExprArrayInitStatic), static_cast<byte>(index), \
static_cast<byte>(length)
#define WASM_RTT_WITH_DEPTH(depth, typeidx) \
kRttWithDepthCode, U32V_1(depth), U32V_1(typeidx)
#define WASM_RTT(typeidx) kRttCode, U32V_1(typeidx)
#define WASM_RTT_CANON(typeidx) \
WASM_GC_OP(kExprRttCanon), static_cast<byte>(typeidx)

View File

@ -73,7 +73,6 @@ base::OwnedVector<WasmValue> MakeDefaultInterpreterArguments(
break;
case kRef:
case kRtt:
case kRttWithDepth:
case kI8:
case kI16:
case kVoid:
@ -108,7 +107,6 @@ base::OwnedVector<Handle<Object>> MakeDefaultArguments(Isolate* isolate,
break;
case kRef:
case kRtt:
case kRttWithDepth:
case kI8:
case kI16:
case kVoid:

View File

@ -147,13 +147,8 @@ function wasmRefType(heap_type) {
return {opcode: kWasmRef, heap_type: heap_type};
}
let kWasmRttWithDepth = 0x69;
function wasmRtt(index, depth) {
if (index < 0) throw new Error("Expecting non-negative type index");
return {opcode: kWasmRttWithDepth, index: index, depth: depth};
}
let kWasmRtt = 0x68;
function wasmRttNoDepth(index) {
function wasmRtt(index) {
if (index < 0) throw new Error("Expecting non-negative type index");
return {opcode: kWasmRtt, index: index};
}

View File

@ -3984,8 +3984,8 @@ TEST_F(FunctionBodyDecoderTest, GCStruct) {
WASM_RTT_CANON(array_type_index)),
kExprDrop},
kAppendEnd,
"struct.new_with_rtt[1] expected rtt with depth for type 0, "
"found rtt.canon of type (rtt 0 1)");
"struct.new_with_rtt[1] expected type (rtt 0), found "
"rtt.canon of type (rtt 1)");
// Out-of-bounds index.
ExpectFailure(sigs.v_v(),
{WASM_STRUCT_NEW_WITH_RTT(42, WASM_I32V(0),
@ -4123,8 +4123,8 @@ TEST_F(FunctionBodyDecoderTest, GCArray) {
array_type_index, WASM_REF_NULL(kFuncRefCode), WASM_I32V(5),
WASM_RTT_CANON(struct_type_index))},
kAppendEnd,
"array.new_with_rtt[2] expected rtt with depth for type 0, "
"found rtt.canon of type (rtt 0 1)");
"array.new_with_rtt[2] expected type (rtt 0), found "
"rtt.canon of type (rtt 1)");
// Wrong type index.
ExpectFailure(
sigs.v_v(),
@ -4324,15 +4324,9 @@ TEST_F(FunctionBodyDecoderTest, RttCanon) {
uint8_t struct_type_index = builder.AddStruct({F(kWasmI64, true)});
for (uint32_t type_index : {array_type_index, struct_type_index}) {
ValueType rtt1 = ValueType::Rtt(type_index, 0);
ValueType rtt1 = ValueType::Rtt(type_index);
FunctionSig sig1(1, 0, &rtt1);
ExpectValidates(&sig1, {WASM_RTT_CANON(type_index)});
// rtt.canon should fail for incorrect depth.
ValueType rtt2 = ValueType::Rtt(type_index, 1);
FunctionSig sig2(1, 0, &rtt2);
ExpectFailure(&sig2, {WASM_RTT_CANON(type_index)}, kAppendEnd,
"type error in fallthru[0]");
}
}

View File

@ -789,8 +789,8 @@ TEST_F(WasmModuleVerifyTest, RttCanonGlobalStruct) {
static const byte data[] = {
SECTION(Type, ENTRY_COUNT(1),
WASM_STRUCT_DEF(FIELD_COUNT(1), STRUCT_FIELD(kI32Code, true))),
SECTION(Global, ENTRY_COUNT(1), WASM_RTT_WITH_DEPTH(0, 0), 0,
WASM_RTT_CANON(0), kExprEnd)};
SECTION(Global, ENTRY_COUNT(1), WASM_RTT(0), 0, WASM_RTT_CANON(0),
kExprEnd)};
ModuleResult result = DecodeModule(data, data + sizeof(data));
EXPECT_OK(result);
}
@ -799,14 +799,15 @@ TEST_F(WasmModuleVerifyTest, RttCanonGlobalTypeError) {
WASM_FEATURE_SCOPE(typed_funcref);
WASM_FEATURE_SCOPE(gc);
static const byte data[] = {
SECTION(Type, ENTRY_COUNT(1),
SECTION(Type, ENTRY_COUNT(2),
WASM_STRUCT_DEF(FIELD_COUNT(1), STRUCT_FIELD(kI32Code, true)),
WASM_STRUCT_DEF(FIELD_COUNT(1), STRUCT_FIELD(kI32Code, true))),
SECTION(Global, ENTRY_COUNT(1), WASM_RTT_WITH_DEPTH(1, 0), 1,
WASM_RTT_CANON(0), kExprEnd)};
SECTION(Global, ENTRY_COUNT(1), WASM_RTT(0), 1, WASM_RTT_CANON(1),
kExprEnd)};
ModuleResult result = DecodeModule(data, data + sizeof(data));
EXPECT_NOT_OK(
result,
"type error in init. expression[0] (expected (rtt 1 0), got (rtt 0 0))");
"type error in init. expression[0] (expected (rtt 0), got (rtt 1))");
}
TEST_F(WasmModuleVerifyTest, StructNewInitExpr) {
@ -827,7 +828,7 @@ TEST_F(WasmModuleVerifyTest, StructNewInitExpr) {
SECTION(Global, ENTRY_COUNT(3), // --
kI32Code, 0, // type, mutability
WASM_INIT_EXPR_I32V_1(10), // --
kRttWithDepthCode, 0, 0, 0, // type, mutability
kRttCode, 0, 0, // type, mutability
WASM_RTT_CANON(0), kExprEnd, // --
kRefCode, 0, 0, // type, mutability
WASM_INIT_EXPR_STRUCT_NEW(0, WASM_GLOBAL_GET(0),
@ -853,8 +854,8 @@ TEST_F(WasmModuleVerifyTest, StructNewInitExpr) {
kRefCode, 0, 0, // type, mutability
WASM_INIT_EXPR_STRUCT_NEW(0, WASM_I32V(42), WASM_RTT_CANON(1)))};
EXPECT_FAILURE_WITH_MSG(subexpr_type_error,
"struct.new_with_rtt[1] expected rtt with depth for "
"type 0, found rtt.canon of type (rtt 0 1)");
"struct.new_with_rtt[1] expected type (rtt 0), found "
"rtt.canon of type (rtt 1)");
}
TEST_F(WasmModuleVerifyTest, ArrayInitInitExpr) {
@ -2133,7 +2134,7 @@ TEST_F(WasmModuleVerifyTest, IllegalTableTypes) {
{kOptRefCode, 1},
{kOptRefCode, kI31RefCode},
{kI31RefCode},
{kRttWithDepthCode, 2, 0}};
{kRttCode, 0}};
for (Vec type : table_types) {
Vec data = {

View File

@ -182,26 +182,14 @@ TEST_F(WasmSubtypingTest, Subtyping) {
VALID_SUBTYPE(ref(9), ref(8));
// Identical rtts are subtypes of each other.
SUBTYPE(ValueType::Rtt(5, 3), ValueType::Rtt(5, 3));
SUBTYPE(ValueType::Rtt(5), ValueType::Rtt(5));
// Rtts of unrelated types are unrelated.
NOT_SUBTYPE(ValueType::Rtt(1, 1), ValueType::Rtt(2, 1));
NOT_SUBTYPE(ValueType::Rtt(1), ValueType::Rtt(2));
NOT_SUBTYPE(ValueType::Rtt(1, 0), ValueType::Rtt(2));
// Rtts of different depth are unrelated.
NOT_SUBTYPE(ValueType::Rtt(5, 1), ValueType::Rtt(5, 3));
NOT_SUBTYPE(ValueType::Rtt(5, 8), ValueType::Rtt(5, 3));
// Rtts of identical types are subtype-related.
// TODO(7748): Implement type canonicalization.
// SUBTYPE(ValueType::Rtt(8, 1), ValueType::Rtt(9, 1));
// SUBTYPE(ValueType::Rtt(8), ValueType::Rtt(9));
// Rtts of subtypes are not related.
NOT_SUBTYPE(ValueType::Rtt(1, 1), ValueType::Rtt(0, 1));
NOT_SUBTYPE(ValueType::Rtt(1), ValueType::Rtt(0));
// rtt(t, d) <: rtt(t)
for (uint8_t depth : {0, 1, 5}) {
SUBTYPE(ValueType::Rtt(1, depth), ValueType::Rtt(1));
}
// Function subtyping;
// Unrelated function types are unrelated.