[mips64][liftoff] Fix some memory access operations
MemOperand's immediate offset is int32_t value, but some offsets in liftoff are uint32_t, we should load the offsets to registers. And assemble instructions like lw/sw/daddiu could't handle operand with large immediate value. So we should use macro assemble instrs like Lw/Sw/Daddu instead, unless we can make sure the operands are proper for those assemble instructions. Bug: v8:10925 Change-Id: I122d35a6857461791999b603f0150311bfc6343e Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2434985 Reviewed-by: Clemens Backes <clemensb@chromium.org> Commit-Queue: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn> Cr-Commit-Position: refs/heads/master@{#70151}
This commit is contained in:
parent
905318c724
commit
9161924208
@ -46,19 +46,35 @@ inline MemOperand GetStackSlot(int offset) { return MemOperand(fp, -offset); }
|
||||
|
||||
inline MemOperand GetInstanceOperand() { return GetStackSlot(kInstanceOffset); }
|
||||
|
||||
inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr,
|
||||
Register offset, uint32_t offset_imm) {
|
||||
if (is_uint31(offset_imm)) {
|
||||
if (offset == no_reg) return MemOperand(addr, offset_imm);
|
||||
assm->daddu(kScratchReg, addr, offset);
|
||||
return MemOperand(kScratchReg, offset_imm);
|
||||
}
|
||||
// Offset immediate does not fit in 31 bits.
|
||||
assm->li(kScratchReg, offset_imm);
|
||||
assm->daddu(kScratchReg, kScratchReg, addr);
|
||||
if (offset != no_reg) {
|
||||
assm->daddu(kScratchReg, kScratchReg, offset);
|
||||
}
|
||||
return MemOperand(kScratchReg, 0);
|
||||
}
|
||||
|
||||
inline void Load(LiftoffAssembler* assm, LiftoffRegister dst, MemOperand src,
|
||||
ValueType type) {
|
||||
switch (type.kind()) {
|
||||
case ValueType::kI32:
|
||||
assm->lw(dst.gp(), src);
|
||||
assm->Lw(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kI64:
|
||||
case ValueType::kRef:
|
||||
case ValueType::kOptRef:
|
||||
assm->ld(dst.gp(), src);
|
||||
assm->Ld(dst.gp(), src);
|
||||
break;
|
||||
case ValueType::kF32:
|
||||
assm->lwc1(dst.fp(), src);
|
||||
assm->Lwc1(dst.fp(), src);
|
||||
break;
|
||||
case ValueType::kF64:
|
||||
assm->Ldc1(dst.fp(), src);
|
||||
@ -326,12 +342,12 @@ void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
|
||||
void LiftoffAssembler::LoadFromInstance(Register dst, uint32_t offset,
|
||||
int size) {
|
||||
DCHECK_LE(offset, kMaxInt);
|
||||
ld(dst, liftoff::GetInstanceOperand());
|
||||
Ld(dst, liftoff::GetInstanceOperand());
|
||||
DCHECK(size == 4 || size == 8);
|
||||
if (size == 4) {
|
||||
lw(dst, MemOperand(dst, offset));
|
||||
Lw(dst, MemOperand(dst, offset));
|
||||
} else {
|
||||
ld(dst, MemOperand(dst, offset));
|
||||
Ld(dst, MemOperand(dst, offset));
|
||||
}
|
||||
}
|
||||
|
||||
@ -341,11 +357,11 @@ void LiftoffAssembler::LoadTaggedPointerFromInstance(Register dst,
|
||||
}
|
||||
|
||||
void LiftoffAssembler::SpillInstance(Register instance) {
|
||||
sd(instance, liftoff::GetInstanceOperand());
|
||||
Sd(instance, liftoff::GetInstanceOperand());
|
||||
}
|
||||
|
||||
void LiftoffAssembler::FillInstanceInto(Register dst) {
|
||||
ld(dst, liftoff::GetInstanceOperand());
|
||||
Ld(dst, liftoff::GetInstanceOperand());
|
||||
}
|
||||
|
||||
void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
|
||||
@ -369,23 +385,17 @@ void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
|
||||
Register offset_reg, uint32_t offset_imm,
|
||||
LoadType type, LiftoffRegList pinned,
|
||||
uint32_t* protected_load_pc, bool is_load_mem) {
|
||||
Register src = no_reg;
|
||||
if (offset_reg != no_reg) {
|
||||
src = GetUnusedRegister(kGpReg, pinned).gp();
|
||||
emit_ptrsize_add(src, src_addr, offset_reg);
|
||||
}
|
||||
MemOperand src_op = (offset_reg != no_reg) ? MemOperand(src, offset_imm)
|
||||
: MemOperand(src_addr, offset_imm);
|
||||
MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm);
|
||||
|
||||
if (protected_load_pc) *protected_load_pc = pc_offset();
|
||||
switch (type.value()) {
|
||||
case LoadType::kI32Load8U:
|
||||
case LoadType::kI64Load8U:
|
||||
lbu(dst.gp(), src_op);
|
||||
Lbu(dst.gp(), src_op);
|
||||
break;
|
||||
case LoadType::kI32Load8S:
|
||||
case LoadType::kI64Load8S:
|
||||
lb(dst.gp(), src_op);
|
||||
Lb(dst.gp(), src_op);
|
||||
break;
|
||||
case LoadType::kI32Load16U:
|
||||
case LoadType::kI64Load16U:
|
||||
@ -430,16 +440,7 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
|
||||
uint32_t offset_imm, LiftoffRegister src,
|
||||
StoreType type, LiftoffRegList pinned,
|
||||
uint32_t* protected_store_pc, bool is_store_mem) {
|
||||
Register dst = no_reg;
|
||||
MemOperand dst_op = MemOperand(dst_addr, offset_imm);
|
||||
if (offset_reg != no_reg) {
|
||||
if (is_store_mem) {
|
||||
pinned.set(src);
|
||||
}
|
||||
dst = GetUnusedRegister(kGpReg, pinned).gp();
|
||||
emit_ptrsize_add(dst, dst_addr, offset_reg);
|
||||
dst_op = MemOperand(dst, offset_imm);
|
||||
}
|
||||
MemOperand dst_op = liftoff::GetMemOp(this, dst_addr, offset_reg, offset_imm);
|
||||
|
||||
#if defined(V8_TARGET_BIG_ENDIAN)
|
||||
if (is_store_mem) {
|
||||
@ -458,7 +459,7 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
|
||||
switch (type.value()) {
|
||||
case StoreType::kI32Store8:
|
||||
case StoreType::kI64Store8:
|
||||
sb(src.gp(), dst_op);
|
||||
Sb(src.gp(), dst_op);
|
||||
break;
|
||||
case StoreType::kI32Store16:
|
||||
case StoreType::kI64Store16:
|
||||
@ -619,7 +620,7 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
case ValueType::kI32: {
|
||||
LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
|
||||
TurboAssembler::li(tmp.gp(), Operand(value.to_i32()));
|
||||
sw(tmp.gp(), dst);
|
||||
Sw(tmp.gp(), dst);
|
||||
break;
|
||||
}
|
||||
case ValueType::kI64:
|
||||
@ -627,7 +628,7 @@ void LiftoffAssembler::Spill(int offset, WasmValue value) {
|
||||
case ValueType::kOptRef: {
|
||||
LiftoffRegister tmp = GetUnusedRegister(kGpReg, {});
|
||||
TurboAssembler::li(tmp.gp(), value.to_i64());
|
||||
sd(tmp.gp(), dst);
|
||||
Sd(tmp.gp(), dst);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
@ -2761,7 +2762,7 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
const LiftoffRegister* rets,
|
||||
ValueType out_argument_type, int stack_bytes,
|
||||
ExternalReference ext_ref) {
|
||||
daddiu(sp, sp, -stack_bytes);
|
||||
Daddu(sp, sp, -stack_bytes);
|
||||
|
||||
int arg_bytes = 0;
|
||||
for (ValueType param_type : sig->parameters()) {
|
||||
@ -2796,7 +2797,7 @@ void LiftoffAssembler::CallC(const wasm::FunctionSig* sig,
|
||||
liftoff::Load(this, *next_result_reg, MemOperand(sp, 0), out_argument_type);
|
||||
}
|
||||
|
||||
daddiu(sp, sp, stack_bytes);
|
||||
Daddu(sp, sp, stack_bytes);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::CallNativeWasmCode(Address addr) {
|
||||
@ -2834,12 +2835,12 @@ void LiftoffAssembler::CallRuntimeStub(WasmCode::RuntimeStubId sid) {
|
||||
}
|
||||
|
||||
void LiftoffAssembler::AllocateStackSlot(Register addr, uint32_t size) {
|
||||
daddiu(sp, sp, -size);
|
||||
Daddu(sp, sp, -size);
|
||||
TurboAssembler::Move(addr, sp);
|
||||
}
|
||||
|
||||
void LiftoffAssembler::DeallocateStackSlot(uint32_t size) {
|
||||
daddiu(sp, sp, size);
|
||||
Daddu(sp, sp, size);
|
||||
}
|
||||
|
||||
void LiftoffStackSlots::Construct() {
|
||||
@ -2848,12 +2849,12 @@ void LiftoffStackSlots::Construct() {
|
||||
switch (src.loc()) {
|
||||
case LiftoffAssembler::VarState::kStack:
|
||||
if (src.type() != kWasmS128) {
|
||||
asm_->ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
|
||||
asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
|
||||
asm_->push(kScratchReg);
|
||||
} else {
|
||||
asm_->ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_ - 8));
|
||||
asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_ - 8));
|
||||
asm_->push(kScratchReg);
|
||||
asm_->ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
|
||||
asm_->Ld(kScratchReg, liftoff::GetStackSlot(slot.src_offset_));
|
||||
asm_->push(kScratchReg);
|
||||
}
|
||||
break;
|
||||
|
Loading…
Reference in New Issue
Block a user