[mips][Liftoff] Use ValueKind instead of ValueType
Port: 96a0677a22
Bug: v8:11477
Change-Id: I438a5b833a9838ffa53d30785cf627f75997d7fc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2719402
Auto-Submit: Liu yu <liuyu@loongson.cn>
Reviewed-by: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Commit-Queue: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Cr-Commit-Position: refs/heads/master@{#73037}
This commit is contained in:
parent
47892136e0
commit
d43377259f
@ -84,9 +84,9 @@ inline MemOperand GetHalfStackSlot(int offset, RegPairHalf half) {
|
|||||||
inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
|
inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
|
||||||
|
|
||||||
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
||||||
int32_t offset, ValueType type) {
|
int32_t offset, ValueKind kind) {
|
||||||
MemOperand src(base, offset);
|
MemOperand src(base, offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
case kRef:
|
case kRef:
|
||||||
case kOptRef:
|
case kOptRef:
|
||||||
@ -112,9 +112,9 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, Register base,
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
||||||
LiftoffRegister src, ValueType type) {
|
LiftoffRegister src, ValueKind kind) {
|
||||||
MemOperand dst(base, offset);
|
MemOperand dst(base, offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
case kOptRef:
|
case kOptRef:
|
||||||
case kRef:
|
case kRef:
|
||||||
@ -139,8 +139,8 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
|
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) {
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
case kOptRef:
|
case kOptRef:
|
||||||
case kRef:
|
case kRef:
|
||||||
@ -365,17 +365,17 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
|
|||||||
return liftoff::kInstanceOffset;
|
return liftoff::kInstanceOffset;
|
||||||
}
|
}
|
||||||
|
|
||||||
int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
int LiftoffAssembler::SlotSizeForType(ValueKind kind) {
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kS128:
|
case kS128:
|
||||||
return type.element_size_bytes();
|
return element_size_bytes(kind);
|
||||||
default:
|
default:
|
||||||
return kStackSlotSize;
|
return kStackSlotSize;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
|
||||||
return type.kind() == kS128 || type.is_reference_type();
|
return kind == kS128 || is_reference_type(kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||||
@ -697,46 +697,46 @@ void LiftoffAssembler::AtomicFence() { sync(); }
|
|||||||
|
|
||||||
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||||
uint32_t caller_slot_idx,
|
uint32_t caller_slot_idx,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
||||||
liftoff::Load(this, dst, fp, offset, type);
|
liftoff::Load(this, dst, fp, offset, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
|
void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
|
||||||
uint32_t caller_slot_idx,
|
uint32_t caller_slot_idx,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
||||||
liftoff::Store(this, fp, offset, src, type);
|
liftoff::Store(this, fp, offset, src, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
|
void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
liftoff::Load(this, dst, sp, offset, type);
|
liftoff::Load(this, dst, sp, offset, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
DCHECK_NE(dst_offset, src_offset);
|
DCHECK_NE(dst_offset, src_offset);
|
||||||
LiftoffRegister reg = GetUnusedRegister(reg_class_for(type), {});
|
LiftoffRegister reg = GetUnusedRegister(reg_class_for(kind), {});
|
||||||
Fill(reg, src_offset, type);
|
Fill(reg, src_offset, kind);
|
||||||
Spill(dst_offset, reg, type);
|
Spill(dst_offset, reg, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
|
void LiftoffAssembler::Move(Register dst, Register src, ValueKind kind) {
|
||||||
DCHECK_NE(dst, src);
|
DCHECK_NE(dst, src);
|
||||||
TurboAssembler::mov(dst, src);
|
TurboAssembler::mov(dst, src);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
DCHECK_NE(dst, src);
|
DCHECK_NE(dst, src);
|
||||||
TurboAssembler::Move(dst, src);
|
TurboAssembler::Move(dst, src);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
|
||||||
RecordUsedSpillOffset(offset);
|
RecordUsedSpillOffset(offset);
|
||||||
MemOperand dst = liftoff::GetStackSlot(offset);
|
MemOperand dst = liftoff::GetStackSlot(offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
case kRef:
|
case kRef:
|
||||||
case kOptRef:
|
case kOptRef:
|
||||||
@ -790,9 +790,9 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
|
||||||
MemOperand src = liftoff::GetStackSlot(offset);
|
MemOperand src = liftoff::GetStackSlot(offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
case kRef:
|
case kRef:
|
||||||
case kOptRef:
|
case kOptRef:
|
||||||
@ -1510,15 +1510,15 @@ void LiftoffAssembler::emit_jump(Register target) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
|
void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
|
||||||
Label* label, ValueType type,
|
Label* label, ValueKind kind,
|
||||||
Register lhs, Register rhs) {
|
Register lhs, Register rhs) {
|
||||||
Condition cond = liftoff::ToCondition(liftoff_cond);
|
Condition cond = liftoff::ToCondition(liftoff_cond);
|
||||||
if (rhs == no_reg) {
|
if (rhs == no_reg) {
|
||||||
DCHECK_EQ(type, kWasmI32);
|
DCHECK_EQ(kind, kI32);
|
||||||
TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
|
TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
|
||||||
} else {
|
} else {
|
||||||
DCHECK(type == kWasmI32 ||
|
DCHECK(kind == kI32 ||
|
||||||
(type.is_reference_type() &&
|
(is_reference_type(kind) &&
|
||||||
(liftoff_cond == kEqual || liftoff_cond == kUnequal)));
|
(liftoff_cond == kEqual || liftoff_cond == kUnequal)));
|
||||||
TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
|
TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
|
||||||
}
|
}
|
||||||
@ -1713,7 +1713,7 @@ void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
|
|||||||
bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
|
bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
|
||||||
LiftoffRegister true_value,
|
LiftoffRegister true_value,
|
||||||
LiftoffRegister false_value,
|
LiftoffRegister false_value,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2912,17 +2912,17 @@ void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
|
|||||||
TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
|
TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
void LiftoffAssembler::CallC(const ValueKindSig* sig,
|
||||||
const LiftoffRegister* args,
|
const LiftoffRegister* args,
|
||||||
const LiftoffRegister* rets,
|
const LiftoffRegister* rets,
|
||||||
ValueType out_argument_type, int stack_bytes,
|
ValueKind out_argument_kind, int stack_bytes,
|
||||||
ExternalReference ext_ref) {
|
ExternalReference ext_ref) {
|
||||||
addiu(sp, sp, -stack_bytes);
|
addiu(sp, sp, -stack_bytes);
|
||||||
|
|
||||||
int arg_bytes = 0;
|
int arg_bytes = 0;
|
||||||
for (ValueType param_type : sig->parameters()) {
|
for (ValueKind param_kind : sig->parameters()) {
|
||||||
liftoff::Store(this, sp, arg_bytes, *args++, param_type);
|
liftoff::Store(this, sp, arg_bytes, *args++, param_kind);
|
||||||
arg_bytes += param_type.element_size_bytes();
|
arg_bytes += element_size_bytes(param_kind);
|
||||||
}
|
}
|
||||||
DCHECK_LE(arg_bytes, stack_bytes);
|
DCHECK_LE(arg_bytes, stack_bytes);
|
||||||
|
|
||||||
@ -2948,8 +2948,8 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load potential output value from the buffer on the stack.
|
// Load potential output value from the buffer on the stack.
|
||||||
if (out_argument_type != kWasmStmt) {
|
if (out_argument_kind != kStmt) {
|
||||||
liftoff::Load(this, *next_result_reg, sp, 0, out_argument_type);
|
liftoff::Load(this, *next_result_reg, sp, 0, out_argument_kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
addiu(sp, sp, stack_bytes);
|
addiu(sp, sp, stack_bytes);
|
||||||
@ -2963,7 +2963,7 @@ void LiftoffAssembler::TailCallNativeWasmCode(Address addr) {
|
|||||||
Jump(addr, RelocInfo::WASM_CALL);
|
Jump(addr, RelocInfo::WASM_CALL);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::CallIndirect(const wasm::FunctionSig* sig,
|
void LiftoffAssembler::CallIndirect(const ValueKindSig* sig,
|
||||||
compiler::CallDescriptor* call_descriptor,
|
compiler::CallDescriptor* call_descriptor,
|
||||||
Register target) {
|
Register target) {
|
||||||
if (target == no_reg) {
|
if (target == no_reg) {
|
||||||
@ -3003,7 +3003,7 @@ void LiftoffStackSlots::Construct() {
|
|||||||
const LiftoffAssembler::VarState& src = slot.src_;
|
const LiftoffAssembler::VarState& src = slot.src_;
|
||||||
switch (src.loc()) {
|
switch (src.loc()) {
|
||||||
case LiftoffAssembler::VarState::kStack: {
|
case LiftoffAssembler::VarState::kStack: {
|
||||||
if (src.type().kind() == kF64) {
|
if (src.kind() == kF64) {
|
||||||
DCHECK_EQ(kLowWord, slot.half_);
|
DCHECK_EQ(kLowWord, slot.half_);
|
||||||
asm_->lw(kScratchReg,
|
asm_->lw(kScratchReg,
|
||||||
liftoff::GetHalfStackSlot(slot.src_offset_, kHighWord));
|
liftoff::GetHalfStackSlot(slot.src_offset_, kHighWord));
|
||||||
@ -3015,12 +3015,12 @@ void LiftoffStackSlots::Construct() {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case LiftoffAssembler::VarState::kRegister:
|
case LiftoffAssembler::VarState::kRegister:
|
||||||
if (src.type().kind() == kI64) {
|
if (src.kind() == kI64) {
|
||||||
liftoff::push(
|
liftoff::push(
|
||||||
asm_, slot.half_ == kLowWord ? src.reg().low() : src.reg().high(),
|
asm_, slot.half_ == kLowWord ? src.reg().low() : src.reg().high(),
|
||||||
kWasmI32);
|
kI32);
|
||||||
} else {
|
} else {
|
||||||
liftoff::push(asm_, src.reg(), src.type());
|
liftoff::push(asm_, src.reg(), src.kind());
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case LiftoffAssembler::VarState::kIntConst: {
|
case LiftoffAssembler::VarState::kIntConst: {
|
||||||
|
@ -93,8 +93,8 @@ inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr,
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
|
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
assm->Lw(dst.gp(), src);
|
assm->Lw(dst.gp(), src);
|
||||||
break;
|
break;
|
||||||
@ -120,9 +120,9 @@ inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
|
|||||||
}
|
}
|
||||||
|
|
||||||
inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
||||||
LiftoffRegister src, ValueType type) {
|
LiftoffRegister src, ValueKind kind) {
|
||||||
MemOperand dst(base, offset);
|
MemOperand dst(base, offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
assm->Usw(src.gp(), dst);
|
assm->Usw(src.gp(), dst);
|
||||||
break;
|
break;
|
||||||
@ -147,8 +147,8 @@ inline void Store(LiftoffAssembler* assm, Register base, int32_t offset,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueType type) {
|
inline void push(LiftoffAssembler* assm, LiftoffRegister reg, ValueKind kind) {
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
assm->daddiu(sp, sp, -kSystemPointerSize);
|
assm->daddiu(sp, sp, -kSystemPointerSize);
|
||||||
assm->sw(reg.gp(), MemOperand(sp, 0));
|
assm->sw(reg.gp(), MemOperand(sp, 0));
|
||||||
@ -352,17 +352,17 @@ constexpr int LiftoffAssembler::StaticStackFrameSize() {
|
|||||||
return liftoff::kInstanceOffset;
|
return liftoff::kInstanceOffset;
|
||||||
}
|
}
|
||||||
|
|
||||||
int LiftoffAssembler::SlotSizeForType(ValueType type) {
|
int LiftoffAssembler::SlotSizeForType(ValueKind kind) {
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kS128:
|
case kS128:
|
||||||
return type.element_size_bytes();
|
return element_size_bytes(kind);
|
||||||
default:
|
default:
|
||||||
return kStackSlotSize;
|
return kStackSlotSize;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool LiftoffAssembler::NeedsAlignment(ValueType type) {
|
bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
|
||||||
return type.kind() == kS128 || type.is_reference_type();
|
return kind == kS128 || is_reference_type(kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||||
@ -626,51 +626,51 @@ void LiftoffAssembler::AtomicFence() { sync(); }
|
|||||||
|
|
||||||
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
void LiftoffAssembler::LoadCallerFrameSlot(LiftoffRegister dst,
|
||||||
uint32_t caller_slot_idx,
|
uint32_t caller_slot_idx,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
MemOperand src(fp, kSystemPointerSize * (caller_slot_idx + 1));
|
MemOperand src(fp, kSystemPointerSize * (caller_slot_idx + 1));
|
||||||
liftoff::Load(this, dst, src, type);
|
liftoff::Load(this, dst, src, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
|
void LiftoffAssembler::StoreCallerFrameSlot(LiftoffRegister src,
|
||||||
uint32_t caller_slot_idx,
|
uint32_t caller_slot_idx,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
int32_t offset = kSystemPointerSize * (caller_slot_idx + 1);
|
||||||
liftoff::Store(this, fp, offset, src, type);
|
liftoff::Store(this, fp, offset, src, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
|
void LiftoffAssembler::LoadReturnStackSlot(LiftoffRegister dst, int offset,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
liftoff::Load(this, dst, MemOperand(sp, offset), type);
|
liftoff::Load(this, dst, MemOperand(sp, offset), kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
void LiftoffAssembler::MoveStackValue(uint32_t dst_offset, uint32_t src_offset,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
DCHECK_NE(dst_offset, src_offset);
|
DCHECK_NE(dst_offset, src_offset);
|
||||||
LiftoffRegister reg = GetUnusedRegister(reg_class_for(type), {});
|
LiftoffRegister reg = GetUnusedRegister(reg_class_for(kind), {});
|
||||||
Fill(reg, src_offset, type);
|
Fill(reg, src_offset, kind);
|
||||||
Spill(dst_offset, reg, type);
|
Spill(dst_offset, reg, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Move(Register dst, Register src, ValueType type) {
|
void LiftoffAssembler::Move(Register dst, Register src, ValueKind kind) {
|
||||||
DCHECK_NE(dst, src);
|
DCHECK_NE(dst, src);
|
||||||
// TODO(ksreten): Handle different sizes here.
|
// TODO(ksreten): Handle different sizes here.
|
||||||
TurboAssembler::Move(dst, src);
|
TurboAssembler::Move(dst, src);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
void LiftoffAssembler::Move(DoubleRegister dst, DoubleRegister src,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
DCHECK_NE(dst, src);
|
DCHECK_NE(dst, src);
|
||||||
if (type != kWasmS128) {
|
if (kind != kS128) {
|
||||||
TurboAssembler::Move(dst, src);
|
TurboAssembler::Move(dst, src);
|
||||||
} else {
|
} else {
|
||||||
TurboAssembler::move_v(dst.toW(), src.toW());
|
TurboAssembler::move_v(dst.toW(), src.toW());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueType type) {
|
void LiftoffAssembler::Spill(int offset, LiftoffRegister reg, ValueKind kind) {
|
||||||
RecordUsedSpillOffset(offset);
|
RecordUsedSpillOffset(offset);
|
||||||
MemOperand dst = liftoff::GetStackSlot(offset);
|
MemOperand dst = liftoff::GetStackSlot(offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
Sw(reg.gp(), dst);
|
Sw(reg.gp(), dst);
|
||||||
break;
|
break;
|
||||||
@ -720,9 +720,9 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueType type) {
|
void LiftoffAssembler::Fill(LiftoffRegister reg, int offset, ValueKind kind) {
|
||||||
MemOperand src = liftoff::GetStackSlot(offset);
|
MemOperand src = liftoff::GetStackSlot(offset);
|
||||||
switch (type.kind()) {
|
switch (kind) {
|
||||||
case kI32:
|
case kI32:
|
||||||
Lw(reg.gp(), src);
|
Lw(reg.gp(), src);
|
||||||
break;
|
break;
|
||||||
@ -1364,15 +1364,15 @@ void LiftoffAssembler::emit_jump(Register target) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
|
void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
|
||||||
Label* label, ValueType type,
|
Label* label, ValueKind kind,
|
||||||
Register lhs, Register rhs) {
|
Register lhs, Register rhs) {
|
||||||
Condition cond = liftoff::ToCondition(liftoff_cond);
|
Condition cond = liftoff::ToCondition(liftoff_cond);
|
||||||
if (rhs == no_reg) {
|
if (rhs == no_reg) {
|
||||||
DCHECK(type == kWasmI32 || type == kWasmI64);
|
DCHECK(kind == kI32 || kind == kI64);
|
||||||
TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
|
TurboAssembler::Branch(label, cond, lhs, Operand(zero_reg));
|
||||||
} else {
|
} else {
|
||||||
DCHECK((type == kWasmI32 || type == kWasmI64) ||
|
DCHECK((kind == kI32 || kind == kI64) ||
|
||||||
(type.is_reference_type() &&
|
(is_reference_type(kind) &&
|
||||||
(liftoff_cond == kEqual || liftoff_cond == kUnequal)));
|
(liftoff_cond == kEqual || liftoff_cond == kUnequal)));
|
||||||
TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
|
TurboAssembler::Branch(label, cond, lhs, Operand(rhs));
|
||||||
}
|
}
|
||||||
@ -1549,7 +1549,7 @@ void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
|
|||||||
bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
|
bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
|
||||||
LiftoffRegister true_value,
|
LiftoffRegister true_value,
|
||||||
LiftoffRegister false_value,
|
LiftoffRegister false_value,
|
||||||
ValueType type) {
|
ValueKind kind) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3079,17 +3079,17 @@ void LiftoffAssembler::DropStackSlotsAndRet(uint32_t num_stack_slots) {
|
|||||||
TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
|
TurboAssembler::DropAndRet(static_cast<int>(num_stack_slots));
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
void LiftoffAssembler::CallC(const ValueKindSig* sig,
|
||||||
const LiftoffRegister* args,
|
const LiftoffRegister* args,
|
||||||
const LiftoffRegister* rets,
|
const LiftoffRegister* rets,
|
||||||
ValueType out_argument_type, int stack_bytes,
|
ValueKind out_argument_kind, int stack_bytes,
|
||||||
ExternalReference ext_ref) {
|
ExternalReference ext_ref) {
|
||||||
Daddu(sp, sp, -stack_bytes);
|
Daddu(sp, sp, -stack_bytes);
|
||||||
|
|
||||||
int arg_bytes = 0;
|
int arg_bytes = 0;
|
||||||
for (ValueType param_type : sig->parameters()) {
|
for (ValueKind param_kind : sig->parameters()) {
|
||||||
liftoff::Store(this, sp, arg_bytes, *args++, param_type);
|
liftoff::Store(this, sp, arg_bytes, *args++, param_kind);
|
||||||
arg_bytes += param_type.element_size_bytes();
|
arg_bytes += element_size_bytes(param_kind);
|
||||||
}
|
}
|
||||||
DCHECK_LE(arg_bytes, stack_bytes);
|
DCHECK_LE(arg_bytes, stack_bytes);
|
||||||
|
|
||||||
@ -3115,8 +3115,8 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load potential output value from the buffer on the stack.
|
// Load potential output value from the buffer on the stack.
|
||||||
if (out_argument_type != kWasmStmt) {
|
if (out_argument_kind != kStmt) {
|
||||||
liftoff::Load(this, *next_result_reg, MemOperand(sp, 0), out_argument_type);
|
liftoff::Load(this, *next_result_reg, MemOperand(sp, 0), out_argument_kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
Daddu(sp, sp, stack_bytes);
|
Daddu(sp, sp, stack_bytes);
|
||||||
@ -3130,7 +3130,7 @@ void LiftoffAssembler::TailCallNativeWasmCode(Address addr) {
|
|||||||
Jump(addr, RelocInfo::WASM_CALL);
|
Jump(addr, RelocInfo::WASM_CALL);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LiftoffAssembler::CallIndirect(const wasm::FunctionSig* sig,
|
void LiftoffAssembler::CallIndirect(const ValueKindSig* sig,
|
||||||
compiler::CallDescriptor* call_descriptor,
|
compiler::CallDescriptor* call_descriptor,
|
||||||
Register target) {
|
Register target) {
|
||||||
if (target == no_reg) {
|
if (target == no_reg) {
|
||||||
@ -3170,7 +3170,7 @@ void LiftoffStackSlots::Construct() {
|
|||||||
const LiftoffAssembler::VarState& src = slot.src_;
|
const LiftoffAssembler::VarState& src = slot.src_;
|
||||||
switch (src.loc()) {
|
switch (src.loc()) {
|
||||||
case LiftoffAssembler::VarState::kStack:
|
case LiftoffAssembler::VarState::kStack:
|
||||||
if (src.type() != kWasmS128) {
|
if (src.kind() != kS128) {
|
||||||
asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
|
asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
|
||||||
asm_->push(kScratchReg);
|
asm_->push(kScratchReg);
|
||||||
} else {
|
} else {
|
||||||
@ -3181,7 +3181,7 @@ void LiftoffStackSlots::Construct() {
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case LiftoffAssembler::VarState::kRegister:
|
case LiftoffAssembler::VarState::kRegister:
|
||||||
liftoff::push(asm_, src.reg(), src.type());
|
liftoff::push(asm_, src.reg(), src.kind());
|
||||||
break;
|
break;
|
||||||
case LiftoffAssembler::VarState::kIntConst: {
|
case LiftoffAssembler::VarState::kIntConst: {
|
||||||
asm_->li(kScratchReg, Operand(src.i32_const()));
|
asm_->li(kScratchReg, Operand(src.i32_const()));
|
||||||
|
Loading…
Reference in New Issue
Block a user