[wasm][liftoff] Emit safepoints for externref values on the stack
With this CL we emit safepoint maps for externref values on the Liftoff value stack. With that there is support for externref parameters and locals in Liftoff, as well as for intermediate values of type externref. R=thibaudm@chromium.org Bug: v8:7581 Change-Id: I2df0a8d00b2da33fe06ff474b039cca4c7be726d Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2387571 Commit-Queue: Andreas Haas <ahaas@chromium.org> Reviewed-by: Thibaud Michaud <thibaudm@chromium.org> Cr-Commit-Position: refs/heads/master@{#69725}
This commit is contained in:
parent
40ce9f70a5
commit
10348e8eb6
@ -765,6 +765,8 @@ DEFINE_INT(trace_wasm_ast_start, 0,
|
||||
DEFINE_INT(trace_wasm_ast_end, 0, "end function for wasm AST trace (exclusive)")
|
||||
DEFINE_BOOL(liftoff, true,
|
||||
"enable Liftoff, the baseline compiler for WebAssembly")
|
||||
DEFINE_BOOL(liftoff_extern_ref, false,
|
||||
"enable support for externref in Liftoff")
|
||||
// We can't tier up (from Liftoff to TurboFan) in single-threaded mode, hence
|
||||
// disable Liftoff in that configuration for now. The alternative is disabling
|
||||
// TurboFan, which would reduce peak performance considerably.
|
||||
|
@ -272,6 +272,8 @@ inline void Store(LiftoffAssembler* assm, LiftoffRegister src, MemOperand dst,
|
||||
#endif
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kOptRef:
|
||||
case ValueType::kRef:
|
||||
assm->str(src.gp(), dst);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
@ -303,6 +305,8 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
|
||||
ValueType type) {
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kOptRef:
|
||||
case ValueType::kRef:
|
||||
assm->ldr(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
@ -497,13 +501,7 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return true;
|
||||
default:
|
||||
// No alignment because all other types are kStackSlotSize.
|
||||
return false;
|
||||
}
|
||||
return (type.kind() == ValueType::kS128 || type.is_reference_type());
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
@ -1283,7 +1281,7 @@ void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
||||
|
||||
void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
|
||||
DCHECK_NE(dst, src);
|
||||
DCHECK_EQ(type, kWasmI32);
|
||||
DCHECK(type == kWasmI32 || type.is_reference_type());
|
||||
TurboAssembler::Move(dst, src);
|
||||
}
|
||||
|
||||
|
@ -50,6 +50,8 @@ inline CPURegister GetRegFromType(const LiftoffRegister& reg, ValueType type) {
|
||||
case ValueType::kI32:
|
||||
return reg.gp().W();
|
||||
case ValueType::kI64:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
return reg.gp().X();
|
||||
case ValueType::kF32:
|
||||
return reg.fp().S();
|
||||
@ -276,13 +278,7 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return true;
|
||||
default:
|
||||
// No alignment because all other types are kStackSlotSize.
|
||||
return false;
|
||||
}
|
||||
return type.kind() == ValueType::kS128 || type.is_reference_type();
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
@ -735,7 +731,7 @@ void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
|
||||
if (type == kWasmI32) {
|
||||
Mov(dst.W(), src.W());
|
||||
} else {
|
||||
DCHECK_EQ(kWasmI64, type);
|
||||
DCHECK(kWasmI64 == type || type.is_reference_type());
|
||||
Mov(dst.X(), src.X());
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +42,8 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
||||
Operand src(base, offset);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kOptRef:
|
||||
case ValueType::kRef:
|
||||
assm->mov(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
@ -229,7 +231,9 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
return type.element_size_bytes();
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) { return false; }
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
return type.is_reference_type();
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
RelocInfo::Mode rmode) {
|
||||
@ -1028,7 +1032,7 @@ void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
||||
|
||||
void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
|
||||
DCHECK_NE(dst, src);
|
||||
DCHECK_EQ(kWasmI32, type);
|
||||
DCHECK(kWasmI32 == type || type.is_reference_type());
|
||||
mov(dst, src);
|
||||
}
|
||||
|
||||
@ -1050,6 +1054,8 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
Operand dst = liftoff::GetStackSlot(offset);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kOptRef:
|
||||
case ValueType::kRef:
|
||||
mov(dst, reg.gp());
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
|
@ -488,6 +488,21 @@ void LiftoffAssembler::CacheState::Split(const CacheState& source) {
|
||||
*this = source;
|
||||
}
|
||||
|
||||
void LiftoffAssembler::CacheState::DefineSafepoint(Safepoint& safepoint) {
|
||||
for (auto slot : stack_state) {
|
||||
DCHECK(!slot.is_reg());
|
||||
|
||||
if (slot.type().is_reference_type()) {
|
||||
// index = 0 is for the stack slot at 'fp - kSystemPointerSize', the
|
||||
// location of the current stack slot is 'fp - slot.offset()'.
|
||||
// The index we need is therefore '(fp - kSystemPointerSize) - (fp -
|
||||
// slot.offset())' = 'slot.offset() - kSystemPointerSize'.
|
||||
auto index = (slot.offset() - kSystemPointerSize) / kSystemPointerSize;
|
||||
safepoint.DefinePointerSlot(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
constexpr AssemblerOptions DefaultLiftoffOptions() {
|
||||
|
@ -135,6 +135,8 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
// Disallow copy construction.
|
||||
CacheState(const CacheState&) = delete;
|
||||
|
||||
void DefineSafepoint(Safepoint& safepoint);
|
||||
|
||||
base::SmallVector<VarState, 8> stack_state;
|
||||
LiftoffRegList used_registers;
|
||||
uint32_t register_use_count[kAfterMaxLiftoffRegCode] = {0};
|
||||
@ -1119,9 +1121,12 @@ class LiftoffAssembler : public TurboAssembler {
|
||||
uint32_t num_locals() const { return num_locals_; }
|
||||
void set_num_locals(uint32_t num_locals);
|
||||
|
||||
int GetTotalFrameSlotCount() const {
|
||||
// TODO(zhin): Temporary for migration from index to offset.
|
||||
return ((max_used_spill_offset_ + kStackSlotSize - 1) / kStackSlotSize);
|
||||
int GetTotalFrameSlotCountForGC() const {
|
||||
// The GC does not care about the actual number of spill slots, just about
|
||||
// the number of references that could be there in the spilling area. Note
|
||||
// that the offset of the first spill slot is kSystemPointerSize and not
|
||||
// '0'. Therefore we don't have to add '+1' here.
|
||||
return max_used_spill_offset_ / kSystemPointerSize;
|
||||
}
|
||||
|
||||
int GetTotalFrameSize() const { return max_used_spill_offset_; }
|
||||
|
@ -120,11 +120,17 @@ compiler::CallDescriptor* GetLoweredCallDescriptor(
|
||||
: call_desc;
|
||||
}
|
||||
|
||||
constexpr ValueType kSupportedTypesArr[] = {kWasmI32, kWasmI64, kWasmF32,
|
||||
kWasmF64, kWasmS128};
|
||||
constexpr ValueType kSupportedTypesArr[] = {
|
||||
kWasmI32, kWasmI64, kWasmF32, kWasmF64,
|
||||
kWasmS128, kWasmExternRef, kWasmFuncRef};
|
||||
constexpr Vector<const ValueType> kSupportedTypes =
|
||||
ArrayVector(kSupportedTypesArr);
|
||||
|
||||
constexpr ValueType kSupportedTypesWithoutRefsArr[] = {
|
||||
kWasmI32, kWasmI64, kWasmF32, kWasmF64, kWasmS128};
|
||||
constexpr Vector<const ValueType> kSupportedTypesWithoutRefs =
|
||||
ArrayVector(kSupportedTypesWithoutRefsArr);
|
||||
|
||||
constexpr Condition GetCompareCondition(WasmOpcode opcode) {
|
||||
switch (opcode) {
|
||||
case kExprI32Eq:
|
||||
@ -370,8 +376,8 @@ class LiftoffCompiler {
|
||||
Vector<const uint8_t>::cast(VectorOf(protected_instructions_)));
|
||||
}
|
||||
|
||||
uint32_t GetTotalFrameSlotCount() const {
|
||||
return __ GetTotalFrameSlotCount();
|
||||
uint32_t GetTotalFrameSlotCountForGC() const {
|
||||
return __ GetTotalFrameSlotCountForGC();
|
||||
}
|
||||
|
||||
void unsupported(FullDecoder* decoder, LiftoffBailoutReason reason,
|
||||
@ -534,7 +540,7 @@ class LiftoffCompiler {
|
||||
|
||||
void TierUpFunction(FullDecoder* decoder) {
|
||||
__ CallRuntimeStub(WasmCode::kWasmTriggerTierUp);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
DefineSafepoint();
|
||||
}
|
||||
|
||||
void TraceFunctionEntry(FullDecoder* decoder) {
|
||||
@ -543,13 +549,16 @@ class LiftoffCompiler {
|
||||
source_position_table_builder_.AddPosition(
|
||||
__ pc_offset(), SourcePosition(decoder->position()), false);
|
||||
__ CallRuntimeStub(WasmCode::kWasmTraceEnter);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
DefineSafepoint();
|
||||
}
|
||||
|
||||
void StartFunctionBody(FullDecoder* decoder, Control* block) {
|
||||
for (uint32_t i = 0; i < __ num_locals(); ++i) {
|
||||
if (!CheckSupportedType(decoder, kSupportedTypes, __ local_type(i),
|
||||
"param"))
|
||||
if (!CheckSupportedType(decoder,
|
||||
FLAG_liftoff_extern_ref
|
||||
? kSupportedTypes
|
||||
: kSupportedTypesWithoutRefs,
|
||||
__ local_type(i), "param"))
|
||||
return;
|
||||
}
|
||||
|
||||
@ -610,6 +619,25 @@ class LiftoffCompiler {
|
||||
}
|
||||
}
|
||||
|
||||
if (FLAG_liftoff_extern_ref) {
|
||||
// Initialize all reference type locals with ref.null.
|
||||
for (uint32_t param_idx = num_params; param_idx < __ num_locals();
|
||||
++param_idx) {
|
||||
ValueType type = decoder->local_type(param_idx);
|
||||
if (type.is_reference_type()) {
|
||||
Register isolate_root = __ GetUnusedRegister(kGpReg, {}).gp();
|
||||
// We can re-use the isolate_root register as result register.
|
||||
Register result = isolate_root;
|
||||
|
||||
LOAD_INSTANCE_FIELD(isolate_root, IsolateRoot, kSystemPointerSize);
|
||||
__ LoadTaggedPointer(
|
||||
result, isolate_root, no_reg,
|
||||
IsolateData::root_slot_offset(RootIndex::kNullValue), {});
|
||||
__ Spill(__ cache_state()->stack_state.back().offset(),
|
||||
LiftoffRegister(result), type);
|
||||
}
|
||||
}
|
||||
}
|
||||
DCHECK_EQ(__ num_locals(), __ cache_state()->stack_height());
|
||||
|
||||
if (V8_UNLIKELY(debug_sidetable_builder_)) {
|
||||
@ -727,11 +755,12 @@ class LiftoffCompiler {
|
||||
source_position_table_builder_.AddPosition(
|
||||
__ pc_offset(), SourcePosition(ool->position), true);
|
||||
__ CallRuntimeStub(ool->stub);
|
||||
// TODO(ahaas): Define a proper safepoint here.
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
DCHECK_EQ(!debug_sidetable_builder_, !ool->debug_sidetable_entry_builder);
|
||||
if (V8_UNLIKELY(ool->debug_sidetable_entry_builder)) {
|
||||
ool->debug_sidetable_entry_builder->set_pc_offset(__ pc_offset());
|
||||
}
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
DCHECK_EQ(ool->continuation.get()->is_bound(), is_stack_check);
|
||||
if (!ool->regs_to_save.is_empty()) __ PopRegisters(ool->regs_to_save);
|
||||
if (is_stack_check) {
|
||||
@ -755,7 +784,7 @@ class LiftoffCompiler {
|
||||
__ PatchPrepareStackFrame(pc_offset_stack_frame_construction_,
|
||||
__ GetTotalFrameSize());
|
||||
__ FinishCode();
|
||||
safepoint_table_builder_.Emit(&asm_, __ GetTotalFrameSlotCount());
|
||||
safepoint_table_builder_.Emit(&asm_, __ GetTotalFrameSlotCountForGC());
|
||||
__ MaybeEmitOutOfLineConstantPool();
|
||||
// The previous calls may have also generated a bailout.
|
||||
DidAssemblerBailout(decoder);
|
||||
@ -812,8 +841,9 @@ class LiftoffCompiler {
|
||||
source_position_table_builder_.AddPosition(
|
||||
__ pc_offset(), SourcePosition(decoder->position()), true);
|
||||
__ CallRuntimeStub(WasmCode::kWasmDebugBreak);
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kAllowRegisters);
|
||||
// TODO(ahaas): Define a proper safepoint here.
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kAllowRegisters);
|
||||
}
|
||||
|
||||
void Block(FullDecoder* decoder, Control* block) {}
|
||||
@ -1588,6 +1618,10 @@ class LiftoffCompiler {
|
||||
}
|
||||
|
||||
void RefNull(FullDecoder* decoder, ValueType type, Value*) {
|
||||
if (!FLAG_liftoff_extern_ref) {
|
||||
unsupported(decoder, kRefTypes, "ref_null");
|
||||
return;
|
||||
}
|
||||
Register isolate_root = __ GetUnusedRegister(kGpReg, {}).gp();
|
||||
// We can re-use the isolate_root register as result register.
|
||||
Register result = isolate_root;
|
||||
@ -1645,7 +1679,7 @@ class LiftoffCompiler {
|
||||
source_position_table_builder_.AddPosition(
|
||||
__ pc_offset(), SourcePosition(decoder->position()), false);
|
||||
__ CallRuntimeStub(WasmCode::kWasmTraceExit);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
DefineSafepoint();
|
||||
|
||||
__ DeallocateStackSlot(sizeof(int64_t));
|
||||
}
|
||||
@ -1766,8 +1800,10 @@ class LiftoffCompiler {
|
||||
void GlobalGet(FullDecoder* decoder, Value* result,
|
||||
const GlobalIndexImmediate<validate>& imm) {
|
||||
const auto* global = &env_->module->globals[imm.index];
|
||||
if (!CheckSupportedType(decoder, kSupportedTypes, global->type, "global"))
|
||||
if (!CheckSupportedType(decoder, kSupportedTypesWithoutRefs, global->type,
|
||||
"global")) {
|
||||
return;
|
||||
}
|
||||
LiftoffRegList pinned;
|
||||
uint32_t offset = 0;
|
||||
Register addr = GetGlobalBaseAndOffset(global, &pinned, &offset);
|
||||
@ -1781,7 +1817,8 @@ class LiftoffCompiler {
|
||||
void GlobalSet(FullDecoder* decoder, const Value& value,
|
||||
const GlobalIndexImmediate<validate>& imm) {
|
||||
auto* global = &env_->module->globals[imm.index];
|
||||
if (!CheckSupportedType(decoder, kSupportedTypes, global->type, "global"))
|
||||
if (!CheckSupportedType(decoder, kSupportedTypesWithoutRefs, global->type,
|
||||
"global"))
|
||||
return;
|
||||
LiftoffRegList pinned;
|
||||
uint32_t offset = 0;
|
||||
@ -2122,7 +2159,7 @@ class LiftoffCompiler {
|
||||
source_position_table_builder_.AddPosition(__ pc_offset(),
|
||||
SourcePosition(position), false);
|
||||
__ CallRuntimeStub(WasmCode::kWasmTraceMemory);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
DefineSafepoint();
|
||||
|
||||
__ DeallocateStackSlot(sizeof(MemoryTracingInfo));
|
||||
}
|
||||
@ -2298,8 +2335,8 @@ class LiftoffCompiler {
|
||||
if (input.gp() != param_reg) __ Move(param_reg, input.gp(), kWasmI32);
|
||||
|
||||
__ CallRuntimeStub(WasmCode::kWasmMemoryGrow);
|
||||
DefineSafepoint();
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
|
||||
if (kReturnRegister0 != result.gp()) {
|
||||
__ Move(result.gp(), kReturnRegister0, kWasmI32);
|
||||
@ -3129,12 +3166,11 @@ class LiftoffCompiler {
|
||||
__ PrepareBuiltinCall(&sig, call_descriptor,
|
||||
{index, expected_value, timeout});
|
||||
__ CallRuntimeStub(target);
|
||||
|
||||
DefineSafepoint();
|
||||
// Pop parameters from the value stack.
|
||||
__ cache_state()->stack_state.pop_back(3);
|
||||
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
|
||||
__ PushRegister(kWasmI32, LiftoffRegister(kReturnRegister0));
|
||||
}
|
||||
@ -3166,8 +3202,8 @@ class LiftoffCompiler {
|
||||
{descriptor.GetRegisterParameter(1), count, kWasmI32}});
|
||||
|
||||
__ CallRuntimeStub(WasmCode::kWasmAtomicNotify);
|
||||
DefineSafepoint();
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
|
||||
__ PushRegister(kWasmI32, LiftoffRegister(kReturnRegister0));
|
||||
}
|
||||
@ -3437,12 +3473,12 @@ class LiftoffCompiler {
|
||||
__ PrepareBuiltinCall(&sig, call_descriptor,
|
||||
{dst, src, size, table_index, segment_index});
|
||||
__ CallRuntimeStub(target);
|
||||
DefineSafepoint();
|
||||
|
||||
// Pop parameters from the value stack.
|
||||
__ cache_state()->stack_state.pop_back(3);
|
||||
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
}
|
||||
|
||||
void ElemDrop(FullDecoder* decoder, const ElemDropImmediate<validate>& imm) {
|
||||
@ -3507,12 +3543,12 @@ class LiftoffCompiler {
|
||||
__ PrepareBuiltinCall(&sig, call_descriptor,
|
||||
{dst, src, size, table_dst_index, table_src_index});
|
||||
__ CallRuntimeStub(target);
|
||||
DefineSafepoint();
|
||||
|
||||
// Pop parameters from the value stack.
|
||||
__ cache_state()->stack_state.pop_back(3);
|
||||
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
}
|
||||
|
||||
void TableGrow(FullDecoder* decoder, const TableIndexImmediate<validate>& imm,
|
||||
@ -3635,7 +3671,11 @@ class LiftoffCompiler {
|
||||
const CallFunctionImmediate<validate>& imm,
|
||||
const Value args[], Value returns[], CallKind call_kind) {
|
||||
for (ValueType ret : imm.sig->returns()) {
|
||||
if (!CheckSupportedType(decoder, kSupportedTypes, ret, "return")) {
|
||||
if (!CheckSupportedType(decoder,
|
||||
FLAG_liftoff_extern_ref
|
||||
? kSupportedTypes
|
||||
: kSupportedTypesWithoutRefs,
|
||||
ret, "return")) {
|
||||
// TODO(7581): Remove this once reference-types are full supported.
|
||||
if (!ret.is_reference_type()) {
|
||||
return;
|
||||
@ -3700,8 +3740,8 @@ class LiftoffCompiler {
|
||||
}
|
||||
}
|
||||
|
||||
DefineSafepoint();
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
|
||||
__ FinishCall(imm.sig, call_descriptor);
|
||||
}
|
||||
@ -3713,7 +3753,11 @@ class LiftoffCompiler {
|
||||
return unsupported(decoder, kRefTypes, "table index != 0");
|
||||
}
|
||||
for (ValueType ret : imm.sig->returns()) {
|
||||
if (!CheckSupportedType(decoder, kSupportedTypes, ret, "return")) {
|
||||
if (!CheckSupportedType(decoder,
|
||||
FLAG_liftoff_extern_ref
|
||||
? kSupportedTypes
|
||||
: kSupportedTypesWithoutRefs,
|
||||
ret, "return")) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -3837,8 +3881,8 @@ class LiftoffCompiler {
|
||||
__ CallIndirect(imm.sig, call_descriptor, target);
|
||||
}
|
||||
|
||||
DefineSafepoint();
|
||||
RegisterDebugSideTableEntry(DebugSideTableBuilder::kDidSpill);
|
||||
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
|
||||
|
||||
__ FinishCall(imm.sig, call_descriptor);
|
||||
}
|
||||
@ -3894,6 +3938,12 @@ class LiftoffCompiler {
|
||||
os << "\n";
|
||||
}
|
||||
|
||||
void DefineSafepoint() {
|
||||
Safepoint safepoint = safepoint_table_builder_.DefineSafepoint(
|
||||
&asm_, Safepoint::kNoLazyDeopt);
|
||||
__ cache_state()->DefineSafepoint(safepoint);
|
||||
}
|
||||
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(LiftoffCompiler);
|
||||
};
|
||||
|
||||
@ -3960,7 +4010,8 @@ WasmCompilationResult ExecuteLiftoffCompilation(
|
||||
result.instr_buffer = instruction_buffer->ReleaseBuffer();
|
||||
result.source_positions = compiler->GetSourcePositionTable();
|
||||
result.protected_instructions_data = compiler->GetProtectedInstructionsData();
|
||||
result.frame_slot_count = compiler->GetTotalFrameSlotCount();
|
||||
result.frame_slot_count = compiler->GetTotalFrameSlotCountForGC() +
|
||||
StandardFrameConstants::kFixedSlotCountAboveFp;
|
||||
result.tagged_parameter_slots = call_descriptor->GetTaggedParameterSlots();
|
||||
result.func_index = func_index;
|
||||
result.result_tier = ExecutionTier::kLiftoff;
|
||||
|
@ -61,6 +61,8 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
||||
MemOperand src(base, offset);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
assm->lw(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
@ -330,13 +332,7 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return true;
|
||||
default:
|
||||
// No alignment because all other types are kStackSlotSize.
|
||||
return false;
|
||||
}
|
||||
return type.kind() == ValueType::kS128 || type.is_reference_type();
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
@ -651,6 +647,8 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
MemOperand dst = liftoff::GetStackSlot(offset);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
sw(reg.gp(), dst);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
@ -701,6 +699,8 @@ void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
||||
MemOperand src = liftoff::GetStackSlot(offset);
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
lw(reg.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
|
@ -53,6 +53,8 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
|
||||
assm->lw(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
assm->ld(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kF32:
|
||||
@ -295,13 +297,7 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
switch (type.kind()) {
|
||||
case ValueType::kS128:
|
||||
return true;
|
||||
default:
|
||||
// No alignment because all other types are kStackSlotSize.
|
||||
return false;
|
||||
}
|
||||
return type.kind() == ValueType::kS128 || type.is_reference_type();
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
@ -588,6 +584,8 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
Sw(reg.gp(), dst);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
Sd(reg.gp(), dst);
|
||||
break;
|
||||
case ValueType::kF32:
|
||||
@ -614,7 +612,9 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
sw(tmp.gp(), dst);
|
||||
break;
|
||||
}
|
||||
case ValueType::kI64: {
|
||||
case ValueType::kI64:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef: {
|
||||
LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
|
||||
TurboAssembler::li(tmp.gp(), value.to_i64());
|
||||
sd(tmp.gp(), dst);
|
||||
@ -634,6 +634,8 @@ void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
||||
Lw(reg.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
Ld(reg.gp(), src);
|
||||
break;
|
||||
case ValueType::kF32:
|
||||
|
@ -62,6 +62,8 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Operand src,
|
||||
assm->movl(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
case ValueType::kOptRef:
|
||||
case ValueType::kRef:
|
||||
assm->movq(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kF32:
|
||||
@ -201,7 +203,9 @@ int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
||||
return type.element_size_bytes();
|
||||
}
|
||||
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) { return false; }
|
||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
||||
return type.is_reference_type();
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
RelocInfo::Mode rmode) {
|
||||
@ -746,7 +750,7 @@ void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
|
||||
if (type == kWasmI32) {
|
||||
movl(dst, src);
|
||||
} else {
|
||||
DCHECK_EQ(kWasmI64, type);
|
||||
DCHECK(kWasmI64 == type || type.is_reference_type());
|
||||
movq(dst, src);
|
||||
}
|
||||
}
|
||||
@ -772,6 +776,8 @@ void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
||||
movl(dst, reg.gp());
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
case ValueType::kOptRef:
|
||||
case ValueType::kRef:
|
||||
movq(dst, reg.gp());
|
||||
break;
|
||||
case ValueType::kF32:
|
||||
|
8
test/mjsunit/wasm/externref-liftoff.js
Normal file
8
test/mjsunit/wasm/externref-liftoff.js
Normal file
@ -0,0 +1,8 @@
|
||||
// Copyright 2020 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// Flags: --expose-wasm --experimental-wasm-reftypes --expose-gc --liftoff
|
||||
// Flags: --no-wasm-tier-up --liftoff-extern-ref
|
||||
|
||||
load("test/mjsunit/wasm/externref.js");
|
Loading…
Reference in New Issue
Block a user