[liftoff] Remove lambdas for binops with immediate

This is the same cleanup as in the previous CL, but for {EmitBinOpImm}.
This requires a minor change in the name of the emit functions with
immediate, to make them distinguishable from the emit functions with two
registers. We just append an "i" to the functions with immediate.

R=zhin@chromium.org

Bug: v8:10364
Change-Id: Ie3732a956698368a5f1fbe67334a13014cd2f354
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2129631
Commit-Queue: Clemens Backes <clemensb@chromium.org>
Reviewed-by: Zhi An Ng <zhin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#66941}
This commit is contained in:
Clemens Backes 2020-03-31 14:26:57 +02:00 committed by Commit Bot
parent 4c414a2265
commit a1bd722799
9 changed files with 198 additions and 216 deletions

View File

@ -791,27 +791,27 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
Register rhs) { \
instruction(dst, lhs, rhs); \
}
#define I32_BINOP_I(name, instruction) \
I32_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name(Register dst, Register lhs, \
int32_t imm) { \
instruction(dst, lhs, Operand(imm)); \
#define I32_BINOP_I(name, instruction) \
I32_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name##i(Register dst, Register lhs, \
int32_t imm) { \
instruction(dst, lhs, Operand(imm)); \
}
#define I32_SHIFTOP(name, instruction) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
UseScratchRegisterScope temps(this); \
Register scratch = temps.Acquire(); \
and_(scratch, amount, Operand(0x1f)); \
instruction(dst, src, Operand(scratch)); \
} \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
int32_t amount) { \
if (V8_LIKELY((amount & 31) != 0)) { \
instruction(dst, src, Operand(amount & 31)); \
} else if (dst != src) { \
mov(dst, src); \
} \
#define I32_SHIFTOP(name, instruction) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
UseScratchRegisterScope temps(this); \
Register scratch = temps.Acquire(); \
and_(scratch, amount, Operand(0x1f)); \
instruction(dst, src, Operand(scratch)); \
} \
void LiftoffAssembler::emit_##name##i(Register dst, Register src, \
int32_t amount) { \
if (V8_LIKELY((amount & 31) != 0)) { \
instruction(dst, src, Operand(amount & 31)); \
} else if (dst != src) { \
mov(dst, src); \
} \
}
#define FP32_UNOP(name, instruction) \
void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister src) { \
@ -997,8 +997,8 @@ void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
liftoff::I64Binop<&Assembler::add, &Assembler::adc>(this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::I64BinopI<&Assembler::add, &Assembler::adc>(this, dst, lhs, imm);
}
@ -1057,8 +1057,8 @@ void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
liftoff::I64Shiftop<&TurboAssembler::LslPair, true>(this, dst, src, amount);
}
void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
UseScratchRegisterScope temps(this);
// {src.low_gp()} will still be needed after writing {dst.high_gp()}.
Register src_low =
@ -1072,8 +1072,8 @@ void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
liftoff::I64Shiftop<&TurboAssembler::AsrPair, false>(this, dst, src, amount);
}
void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
UseScratchRegisterScope temps(this);
// {src.high_gp()} will still be needed after writing {dst.low_gp()}.
Register src_high =
@ -1087,8 +1087,8 @@ void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
liftoff::I64Shiftop<&TurboAssembler::LsrPair, false>(this, dst, src, amount);
}
void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
UseScratchRegisterScope temps(this);
// {src.high_gp()} will still be needed after writing {dst.low_gp()}.
Register src_high =

View File

@ -532,22 +532,22 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
Register rhs) { \
instruction(dst.W(), lhs.W(), rhs.W()); \
}
#define I32_BINOP_I(name, instruction) \
I32_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name(Register dst, Register lhs, \
int32_t imm) { \
instruction(dst.W(), lhs.W(), Immediate(imm)); \
#define I32_BINOP_I(name, instruction) \
I32_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name##i(Register dst, Register lhs, \
int32_t imm) { \
instruction(dst.W(), lhs.W(), Immediate(imm)); \
}
#define I64_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \
LiftoffRegister rhs) { \
instruction(dst.gp().X(), lhs.gp().X(), rhs.gp().X()); \
}
#define I64_BINOP_I(name, instruction) \
I64_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \
int32_t imm) { \
instruction(dst.gp().X(), lhs.gp().X(), imm); \
#define I64_BINOP_I(name, instruction) \
I64_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name##i(LiftoffRegister dst, \
LiftoffRegister lhs, int32_t imm) { \
instruction(dst.gp().X(), lhs.gp().X(), imm); \
}
#define FP32_BINOP(name, instruction) \
void LiftoffAssembler::emit_##name(DoubleRegister dst, DoubleRegister lhs, \
@ -577,22 +577,22 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
instruction(dst.D(), src.D()); \
return true; \
}
#define I32_SHIFTOP(name, instruction) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
instruction(dst.W(), src.W(), amount.W()); \
} \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
int32_t amount) { \
instruction(dst.W(), src.W(), amount & 31); \
#define I32_SHIFTOP(name, instruction) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
instruction(dst.W(), src.W(), amount.W()); \
} \
void LiftoffAssembler::emit_##name##i(Register dst, Register src, \
int32_t amount) { \
instruction(dst.W(), src.W(), amount & 31); \
}
#define I64_SHIFTOP(name, instruction) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister src, \
Register amount) { \
instruction(dst.gp().X(), src.gp().X(), amount.X()); \
} \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister src, \
int32_t amount) { \
void LiftoffAssembler::emit_##name##i(LiftoffRegister dst, \
LiftoffRegister src, int32_t amount) { \
instruction(dst.gp().X(), src.gp().X(), amount & 63); \
}

View File

@ -686,7 +686,7 @@ void LiftoffAssembler::emit_i32_add(Register dst, Register lhs, Register rhs) {
}
}
void LiftoffAssembler::emit_i32_add(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_addi(Register dst, Register lhs, int32_t imm) {
if (lhs != dst) {
lea(dst, Operand(lhs, imm));
} else {
@ -829,7 +829,7 @@ void LiftoffAssembler::emit_i32_and(Register dst, Register lhs, Register rhs) {
liftoff::EmitCommutativeBinOp<&Assembler::and_>(this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i32_and(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_andi(Register dst, Register lhs, int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::and_>(this, dst, lhs, imm);
}
@ -837,7 +837,7 @@ void LiftoffAssembler::emit_i32_or(Register dst, Register lhs, Register rhs) {
liftoff::EmitCommutativeBinOp<&Assembler::or_>(this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i32_or(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_ori(Register dst, Register lhs, int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::or_>(this, dst, lhs, imm);
}
@ -845,7 +845,7 @@ void LiftoffAssembler::emit_i32_xor(Register dst, Register lhs, Register rhs) {
liftoff::EmitCommutativeBinOp<&Assembler::xor_>(this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i32_xor(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_xori(Register dst, Register lhs, int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::xor_>(this, dst, lhs, imm);
}
@ -891,8 +891,8 @@ void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
liftoff::EmitShiftOperation(this, dst, src, amount, &Assembler::shl_cl);
}
void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
int32_t amount) {
void LiftoffAssembler::emit_i32_shli(Register dst, Register src,
int32_t amount) {
if (dst != src) mov(dst, src);
shl(dst, amount & 31);
}
@ -902,8 +902,8 @@ void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
liftoff::EmitShiftOperation(this, dst, src, amount, &Assembler::sar_cl);
}
void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
int32_t amount) {
void LiftoffAssembler::emit_i32_sari(Register dst, Register src,
int32_t amount) {
if (dst != src) mov(dst, src);
sar(dst, amount & 31);
}
@ -913,8 +913,8 @@ void LiftoffAssembler::emit_i32_shr(Register dst, Register src,
liftoff::EmitShiftOperation(this, dst, src, amount, &Assembler::shr_cl);
}
void LiftoffAssembler::emit_i32_shr(Register dst, Register src,
int32_t amount) {
void LiftoffAssembler::emit_i32_shri(Register dst, Register src,
int32_t amount) {
if (dst != src) mov(dst, src);
shr(dst, amount & 31);
}
@ -1001,8 +1001,8 @@ void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
liftoff::OpWithCarry<&Assembler::add, &Assembler::adc>(this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::OpWithCarryI<&Assembler::add, &Assembler::adc>(this, dst, lhs, imm);
}
@ -1137,8 +1137,8 @@ void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
&TurboAssembler::ShlPair_cl);
}
void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
amount &= 63;
if (amount >= 32) {
if (dst.high_gp() != src.low_gp()) mov(dst.high_gp(), src.low_gp());
@ -1156,8 +1156,8 @@ void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
&TurboAssembler::SarPair_cl);
}
void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
amount &= 63;
if (amount >= 32) {
if (dst.low_gp() != src.high_gp()) mov(dst.low_gp(), src.high_gp());
@ -1175,8 +1175,8 @@ void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
&TurboAssembler::ShrPair_cl);
}
void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
amount &= 63;
if (amount >= 32) {
if (dst.low_gp() != src.high_gp()) mov(dst.low_gp(), src.high_gp());

View File

@ -515,7 +515,7 @@ class LiftoffAssembler : public TurboAssembler {
// i32 binops.
inline void emit_i32_add(Register dst, Register lhs, Register rhs);
inline void emit_i32_add(Register dst, Register lhs, int32_t imm);
inline void emit_i32_addi(Register dst, Register lhs, int32_t imm);
inline void emit_i32_sub(Register dst, Register lhs, Register rhs);
inline void emit_i32_mul(Register dst, Register lhs, Register rhs);
inline void emit_i32_divs(Register dst, Register lhs, Register rhs,
@ -528,17 +528,17 @@ class LiftoffAssembler : public TurboAssembler {
inline void emit_i32_remu(Register dst, Register lhs, Register rhs,
Label* trap_rem_by_zero);
inline void emit_i32_and(Register dst, Register lhs, Register rhs);
inline void emit_i32_and(Register dst, Register lhs, int32_t imm);
inline void emit_i32_andi(Register dst, Register lhs, int32_t imm);
inline void emit_i32_or(Register dst, Register lhs, Register rhs);
inline void emit_i32_or(Register dst, Register lhs, int32_t imm);
inline void emit_i32_ori(Register dst, Register lhs, int32_t imm);
inline void emit_i32_xor(Register dst, Register lhs, Register rhs);
inline void emit_i32_xor(Register dst, Register lhs, int32_t imm);
inline void emit_i32_xori(Register dst, Register lhs, int32_t imm);
inline void emit_i32_shl(Register dst, Register src, Register amount);
inline void emit_i32_shl(Register dst, Register src, int32_t amount);
inline void emit_i32_shli(Register dst, Register src, int32_t amount);
inline void emit_i32_sar(Register dst, Register src, Register amount);
inline void emit_i32_sar(Register dst, Register src, int32_t amount);
inline void emit_i32_sari(Register dst, Register src, int32_t amount);
inline void emit_i32_shr(Register dst, Register src, Register amount);
inline void emit_i32_shr(Register dst, Register src, int32_t amount);
inline void emit_i32_shri(Register dst, Register src, int32_t amount);
// i32 unops.
inline void emit_i32_clz(Register dst, Register src);
@ -548,8 +548,8 @@ class LiftoffAssembler : public TurboAssembler {
// i64 binops.
inline void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
LiftoffRegister rhs);
inline void emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_sub(LiftoffRegister dst, LiftoffRegister lhs,
LiftoffRegister rhs);
inline void emit_i64_mul(LiftoffRegister dst, LiftoffRegister lhs,
@ -565,28 +565,28 @@ class LiftoffAssembler : public TurboAssembler {
LiftoffRegister rhs, Label* trap_rem_by_zero);
inline void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
LiftoffRegister rhs);
inline void emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
LiftoffRegister rhs);
inline void emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
LiftoffRegister rhs);
inline void emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm);
inline void emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
Register amount);
inline void emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
int32_t amount);
inline void emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
int32_t amount);
inline void emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
Register amount);
inline void emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
int32_t amount);
inline void emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
int32_t amount);
inline void emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
Register amount);
inline void emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
int32_t amount);
inline void emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
int32_t amount);
// i64 unops.
inline void emit_i64_clz(LiftoffRegister dst, LiftoffRegister src);
@ -619,19 +619,19 @@ class LiftoffAssembler : public TurboAssembler {
emit_i32_and(dst, lhs, rhs);
}
}
inline void emit_ptrsize_shr(Register dst, Register src, int amount) {
inline void emit_ptrsize_shri(Register dst, Register src, int amount) {
if (kSystemPointerSize == 8) {
emit_i64_shr(LiftoffRegister(dst), LiftoffRegister(src), amount);
emit_i64_shri(LiftoffRegister(dst), LiftoffRegister(src), amount);
} else {
emit_i32_shr(dst, src, amount);
emit_i32_shri(dst, src, amount);
}
}
inline void emit_ptrsize_add(Register dst, Register lhs, int32_t imm) {
inline void emit_ptrsize_addi(Register dst, Register lhs, int32_t imm) {
if (kSystemPointerSize == 8) {
emit_i64_add(LiftoffRegister(dst), LiftoffRegister(lhs), imm);
emit_i64_addi(LiftoffRegister(dst), LiftoffRegister(lhs), imm);
} else {
emit_i32_add(dst, lhs, imm);
emit_i32_addi(dst, lhs, imm);
}
}
@ -957,9 +957,9 @@ void LiftoffAssembler::emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_and>(
void LiftoffAssembler::emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_andi>(
this, dst, lhs, imm);
}
@ -969,9 +969,9 @@ void LiftoffAssembler::emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_or>(
void LiftoffAssembler::emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_ori>(
this, dst, lhs, imm);
}
@ -981,9 +981,9 @@ void LiftoffAssembler::emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
this, dst, lhs, rhs);
}
void LiftoffAssembler::emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_xor>(
void LiftoffAssembler::emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitI64IndependentHalfOperationImm<&LiftoffAssembler::emit_i32_xori>(
this, dst, lhs, imm);
}

View File

@ -1129,17 +1129,11 @@ class LiftoffCompiler {
? __ GetUnusedRegister(result_rc, {lhs})
: __ GetUnusedRegister(result_rc);
fnImm(dst, lhs, imm);
CallEmitFn(fnImm, dst, lhs, imm);
__ PushRegister(ValueType(result_type), dst);
} else {
// The RHS was not an immediate.
LiftoffRegister rhs = __ PopToRegister();
LiftoffRegister lhs = __ PopToRegister(LiftoffRegList::ForRegs(rhs));
LiftoffRegister dst = src_rc == result_rc
? __ GetUnusedRegister(result_rc, {lhs, rhs})
: __ GetUnusedRegister(result_rc);
fn(dst, lhs, rhs);
__ PushRegister(ValueType(result_type), dst);
EmitBinOp<src_type, result_type>(fn);
}
}
@ -1187,28 +1181,18 @@ class LiftoffCompiler {
case kExpr##opcode: \
return EmitBinOp<ValueType::kI32, ValueType::kI32>( \
&LiftoffAssembler::emit_##fn);
#define CASE_I32_BINOPI(opcode, fn) \
case kExpr##opcode: \
return EmitBinOpImm<ValueType::kI32, ValueType::kI32>( \
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
__ emit_##fn(dst.gp(), lhs.gp(), rhs.gp()); \
}, \
[=](LiftoffRegister dst, LiftoffRegister lhs, int32_t imm) { \
__ emit_##fn(dst.gp(), lhs.gp(), imm); \
});
#define CASE_I32_BINOPI(opcode, fn) \
case kExpr##opcode: \
return EmitBinOpImm<ValueType::kI32, ValueType::kI32>( \
&LiftoffAssembler::emit_##fn, &LiftoffAssembler::emit_##fn##i);
#define CASE_I64_BINOP(opcode, fn) \
case kExpr##opcode: \
return EmitBinOp<ValueType::kI64, ValueType::kI64>( \
&LiftoffAssembler::emit_##fn);
#define CASE_I64_BINOPI(opcode, fn) \
case kExpr##opcode: \
return EmitBinOpImm<ValueType::kI64, ValueType::kI64>( \
[=](LiftoffRegister dst, LiftoffRegister lhs, LiftoffRegister rhs) { \
__ emit_##fn(dst, lhs, rhs); \
}, \
[=](LiftoffRegister dst, LiftoffRegister lhs, int32_t imm) { \
__ emit_##fn(dst, lhs, imm); \
});
#define CASE_I64_BINOPI(opcode, fn) \
case kExpr##opcode: \
return EmitBinOpImm<ValueType::kI64, ValueType::kI64>( \
&LiftoffAssembler::emit_##fn, &LiftoffAssembler::emit_##fn##i);
#define CASE_FLOAT_BINOP(opcode, type, fn) \
case kExpr##opcode: \
return EmitBinOp<ValueType::k##type, ValueType::k##type>( \
@ -1252,9 +1236,7 @@ class LiftoffCompiler {
__ emit_##fn(dst, src, \
amount.is_gp_pair() ? amount.low_gp() : amount.gp()); \
}, \
[=](LiftoffRegister dst, LiftoffRegister src, int32_t amount) { \
__ emit_##fn(dst, src, amount); \
});
&LiftoffAssembler::emit_##fn##i);
#define CASE_CCALL_BINOP(opcode, type, ext_ref_fn) \
case kExpr##opcode: \
return EmitBinOp<ValueType::k##type, ValueType::k##type>( \
@ -1894,12 +1876,12 @@ class LiftoffCompiler {
// AND of two operands. We could introduce a new variant of
// {emit_cond_jump} to use the "test" instruction without the "and" here.
// Then we can also avoid using the temp register here.
__ emit_i32_and(address, index, align_mask);
__ emit_i32_andi(address, index, align_mask);
__ emit_cond_jump(kUnequal, trap_label, kWasmI32, address);
return;
}
__ emit_i32_add(address, index, offset);
__ emit_i32_and(address, address, align_mask);
__ emit_i32_addi(address, index, offset);
__ emit_i32_andi(address, address, align_mask);
__ emit_cond_jump(kUnequal, trap_label, kWasmI32, address);
}
@ -1962,7 +1944,7 @@ class LiftoffCompiler {
if (index != old_index) __ Move(index, old_index, kWasmI32);
}
Register tmp = __ GetUnusedRegister(kGpReg, *pinned).gp();
__ emit_ptrsize_add(index, index, *offset);
__ emit_ptrsize_addi(index, index, *offset);
LOAD_INSTANCE_FIELD(tmp, MemoryMask, kSystemPointerSize);
__ emit_ptrsize_and(index, index, tmp);
*offset = 0;
@ -2046,7 +2028,7 @@ class LiftoffCompiler {
void CurrentMemoryPages(FullDecoder* decoder, Value* result) {
Register mem_size = __ GetUnusedRegister(kGpReg).gp();
LOAD_INSTANCE_FIELD(mem_size, MemorySize, kSystemPointerSize);
__ emit_ptrsize_shr(mem_size, mem_size, kWasmPageSizeLog2);
__ emit_ptrsize_shri(mem_size, mem_size, kWasmPageSizeLog2);
__ PushRegister(kWasmI32, LiftoffRegister(mem_size));
}
@ -2219,7 +2201,7 @@ class LiftoffCompiler {
// 3) mask = diff & neg_index
__ emit_i32_and(mask, diff, neg_index);
// 4) mask = mask >> 31
__ emit_i32_sar(mask, mask, 31);
__ emit_i32_sari(mask, mask, 31);
// Apply mask.
__ emit_i32_and(index, index, mask);
@ -2230,7 +2212,7 @@ class LiftoffCompiler {
LOAD_INSTANCE_FIELD(table, IndirectFunctionTableSigIds, kSystemPointerSize);
// Shift {index} by 2 (multiply by 4) to represent kInt32Size items.
STATIC_ASSERT((1 << 2) == kInt32Size);
__ emit_i32_shl(index, index, 2);
__ emit_i32_shli(index, index, 2);
__ Load(LiftoffRegister(scratch), table, index, 0, LoadType::kI32Load,
pinned);
@ -2652,7 +2634,7 @@ class LiftoffCompiler {
uint32_t offset = imm.offset;
index_reg = AddMemoryMasking(index_reg, &offset, &pinned);
if (offset != 0) __ emit_i32_add(index_reg, index_reg, offset);
if (offset != 0) __ emit_i32_addi(index_reg, index_reg, offset);
LiftoffAssembler::VarState timeout =
__ cache_state()->stack_state.end()[-1];
@ -2722,7 +2704,7 @@ class LiftoffCompiler {
uint32_t offset = imm.offset;
index = AddMemoryMasking(index, &offset, &pinned);
if (offset) __ emit_i32_add(index, index, offset);
if (offset) __ emit_i32_addi(index, index, offset);
// TODO(ahaas): Use PrepareCall to prepare parameters.
__ SpillAllRegisters();

View File

@ -768,10 +768,10 @@ I32_BINOP(xor, xor_)
#undef I32_BINOP
#define I32_BINOP_I(name, instruction) \
void LiftoffAssembler::emit_i32_##name(Register dst, Register lhs, \
int32_t imm) { \
instruction(dst, lhs, Operand(imm)); \
#define I32_BINOP_I(name, instruction) \
void LiftoffAssembler::emit_i32_##name##i(Register dst, Register lhs, \
int32_t imm) { \
instruction(dst, lhs, Operand(imm)); \
}
// clang-format off
@ -816,8 +816,8 @@ I32_SHIFTOP_I(shr, srl)
#undef I32_SHIFTOP
#undef I32_SHIFTOP_I
void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
TurboAssembler::AddPair(dst.low_gp(), dst.high_gp(), lhs.low_gp(),
lhs.high_gp(), imm,
kScratchReg, kScratchReg2);
@ -922,8 +922,8 @@ void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
&TurboAssembler::ShlPair);
}
void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
UseScratchRegisterScope temps(this);
// {src.low_gp()} will still be needed after writing {dst.high_gp()} and
// {dst.low_gp()}.
@ -946,8 +946,8 @@ void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
&TurboAssembler::SarPair);
}
void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
UseScratchRegisterScope temps(this);
// {src.high_gp()} will still be needed after writing {dst.high_gp()} and
// {dst.low_gp()}.
@ -965,8 +965,8 @@ void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
&TurboAssembler::ShrPair);
}
void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
UseScratchRegisterScope temps(this);
// {src.high_gp()} will still be needed after writing {dst.high_gp()} and
// {dst.low_gp()}.

View File

@ -264,22 +264,22 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
Register rhs) { \
bailout(kUnsupportedArchitecture, "i32 binop:: " #name); \
}
#define UNIMPLEMENTED_I32_BINOP_I(name) \
UNIMPLEMENTED_I32_BINOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register lhs, \
int32_t imm) { \
bailout(kUnsupportedArchitecture, "i32 binop_i: " #name); \
#define UNIMPLEMENTED_I32_BINOP_I(name) \
UNIMPLEMENTED_I32_BINOP(name) \
void LiftoffAssembler::emit_##name##i(Register dst, Register lhs, \
int32_t imm) { \
bailout(kUnsupportedArchitecture, "i32 binop_i: " #name); \
}
#define UNIMPLEMENTED_I64_BINOP(name) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \
LiftoffRegister rhs) { \
bailout(kUnsupportedArchitecture, "i64 binop: " #name); \
}
#define UNIMPLEMENTED_I64_BINOP_I(name) \
UNIMPLEMENTED_I64_BINOP(name) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \
int32_t imm) { \
bailout(kUnsupportedArchitecture, "i64_i binop: " #name); \
#define UNIMPLEMENTED_I64_BINOP_I(name) \
UNIMPLEMENTED_I64_BINOP(name) \
void LiftoffAssembler::emit_##name##i(LiftoffRegister dst, \
LiftoffRegister lhs, int32_t imm) { \
bailout(kUnsupportedArchitecture, "i64_i binop: " #name); \
}
#define UNIMPLEMENTED_GP_UNOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register src) { \
@ -299,22 +299,22 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
bailout(kUnsupportedArchitecture, "fp unop: " #name); \
return true; \
}
#define UNIMPLEMENTED_I32_SHIFTOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
} \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
int32_t amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
#define UNIMPLEMENTED_I32_SHIFTOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
} \
void LiftoffAssembler::emit_##name##i(Register dst, Register src, \
int32_t amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
}
#define UNIMPLEMENTED_I64_SHIFTOP(name) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister src, \
Register amount) { \
bailout(kUnsupportedArchitecture, "i64 shiftop: " #name); \
} \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister src, \
int32_t amount) { \
void LiftoffAssembler::emit_##name##i(LiftoffRegister dst, \
LiftoffRegister src, int32_t amount) { \
bailout(kUnsupportedArchitecture, "i64 shiftop: " #name); \
}

View File

@ -268,22 +268,22 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
Register rhs) { \
bailout(kUnsupportedArchitecture, "i32 binop: " #name); \
}
#define UNIMPLEMENTED_I32_BINOP_I(name) \
UNIMPLEMENTED_I32_BINOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register lhs, \
int32_t imm) { \
bailout(kUnsupportedArchitecture, "i32 binop_i: " #name); \
#define UNIMPLEMENTED_I32_BINOP_I(name) \
UNIMPLEMENTED_I32_BINOP(name) \
void LiftoffAssembler::emit_##name##i(Register dst, Register lhs, \
int32_t imm) { \
bailout(kUnsupportedArchitecture, "i32 binop_i: " #name); \
}
#define UNIMPLEMENTED_I64_BINOP(name) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \
LiftoffRegister rhs) { \
bailout(kUnsupportedArchitecture, "i64 binop: " #name); \
}
#define UNIMPLEMENTED_I64_BINOP_I(name) \
UNIMPLEMENTED_I64_BINOP(name) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister lhs, \
int32_t imm) { \
bailout(kUnsupportedArchitecture, "i64 binop_i: " #name); \
#define UNIMPLEMENTED_I64_BINOP_I(name) \
UNIMPLEMENTED_I64_BINOP(name) \
void LiftoffAssembler::emit_##name##i(LiftoffRegister dst, \
LiftoffRegister lhs, int32_t imm) { \
bailout(kUnsupportedArchitecture, "i64 binop_i: " #name); \
}
#define UNIMPLEMENTED_GP_UNOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register src) { \
@ -303,22 +303,22 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
bailout(kUnsupportedArchitecture, "fp unop: " #name); \
return true; \
}
#define UNIMPLEMENTED_I32_SHIFTOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
} \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
int32_t amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
#define UNIMPLEMENTED_I32_SHIFTOP(name) \
void LiftoffAssembler::emit_##name(Register dst, Register src, \
Register amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
} \
void LiftoffAssembler::emit_##name##i(Register dst, Register src, \
int32_t amount) { \
bailout(kUnsupportedArchitecture, "i32 shiftop: " #name); \
}
#define UNIMPLEMENTED_I64_SHIFTOP(name) \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister src, \
Register amount) { \
bailout(kUnsupportedArchitecture, "i64 shiftop: " #name); \
} \
void LiftoffAssembler::emit_##name(LiftoffRegister dst, LiftoffRegister src, \
int32_t amount) { \
void LiftoffAssembler::emit_##name##i(LiftoffRegister dst, \
LiftoffRegister src, int32_t amount) { \
bailout(kUnsupportedArchitecture, "i64 shiftop: " #name); \
}

View File

@ -840,7 +840,7 @@ void LiftoffAssembler::emit_i32_add(Register dst, Register lhs, Register rhs) {
}
}
void LiftoffAssembler::emit_i32_add(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_addi(Register dst, Register lhs, int32_t imm) {
if (lhs != dst) {
leal(dst, Operand(lhs, imm));
} else {
@ -1006,7 +1006,7 @@ void LiftoffAssembler::emit_i32_and(Register dst, Register lhs, Register rhs) {
lhs, rhs);
}
void LiftoffAssembler::emit_i32_and(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_andi(Register dst, Register lhs, int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::andl, &Assembler::movl>(
this, dst, lhs, imm);
}
@ -1016,7 +1016,7 @@ void LiftoffAssembler::emit_i32_or(Register dst, Register lhs, Register rhs) {
lhs, rhs);
}
void LiftoffAssembler::emit_i32_or(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_ori(Register dst, Register lhs, int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::orl, &Assembler::movl>(this, dst,
lhs, imm);
}
@ -1026,7 +1026,7 @@ void LiftoffAssembler::emit_i32_xor(Register dst, Register lhs, Register rhs) {
lhs, rhs);
}
void LiftoffAssembler::emit_i32_xor(Register dst, Register lhs, int32_t imm) {
void LiftoffAssembler::emit_i32_xori(Register dst, Register lhs, int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::xorl, &Assembler::movl>(
this, dst, lhs, imm);
}
@ -1071,8 +1071,8 @@ void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
&Assembler::shll_cl);
}
void LiftoffAssembler::emit_i32_shl(Register dst, Register src,
int32_t amount) {
void LiftoffAssembler::emit_i32_shli(Register dst, Register src,
int32_t amount) {
if (dst != src) movl(dst, src);
shll(dst, Immediate(amount & 31));
}
@ -1083,8 +1083,8 @@ void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
&Assembler::sarl_cl);
}
void LiftoffAssembler::emit_i32_sar(Register dst, Register src,
int32_t amount) {
void LiftoffAssembler::emit_i32_sari(Register dst, Register src,
int32_t amount) {
if (dst != src) movl(dst, src);
sarl(dst, Immediate(amount & 31));
}
@ -1095,8 +1095,8 @@ void LiftoffAssembler::emit_i32_shr(Register dst, Register src,
&Assembler::shrl_cl);
}
void LiftoffAssembler::emit_i32_shr(Register dst, Register src,
int32_t amount) {
void LiftoffAssembler::emit_i32_shri(Register dst, Register src,
int32_t amount) {
if (dst != src) movl(dst, src);
shrl(dst, Immediate(amount & 31));
}
@ -1125,8 +1125,8 @@ void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
}
}
void LiftoffAssembler::emit_i64_add(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_addi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
if (lhs.gp() != dst.gp()) {
leaq(dst.gp(), Operand(lhs.gp(), imm));
} else {
@ -1191,8 +1191,8 @@ void LiftoffAssembler::emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
this, dst.gp(), lhs.gp(), rhs.gp());
}
void LiftoffAssembler::emit_i64_and(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_andi(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::andq, &Assembler::movq>(
this, dst.gp(), lhs.gp(), imm);
}
@ -1203,8 +1203,8 @@ void LiftoffAssembler::emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
this, dst.gp(), lhs.gp(), rhs.gp());
}
void LiftoffAssembler::emit_i64_or(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_ori(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::orq, &Assembler::movq>(
this, dst.gp(), lhs.gp(), imm);
}
@ -1215,8 +1215,8 @@ void LiftoffAssembler::emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
this, dst.gp(), lhs.gp(), rhs.gp());
}
void LiftoffAssembler::emit_i64_xor(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
void LiftoffAssembler::emit_i64_xori(LiftoffRegister dst, LiftoffRegister lhs,
int32_t imm) {
liftoff::EmitCommutativeBinOpImm<&Assembler::xorq, &Assembler::movq>(
this, dst.gp(), lhs.gp(), imm);
}
@ -1227,8 +1227,8 @@ void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
&Assembler::shlq_cl);
}
void LiftoffAssembler::emit_i64_shl(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shli(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
if (dst.gp() != src.gp()) movq(dst.gp(), src.gp());
shlq(dst.gp(), Immediate(amount & 63));
}
@ -1239,8 +1239,8 @@ void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
&Assembler::sarq_cl);
}
void LiftoffAssembler::emit_i64_sar(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_sari(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
if (dst.gp() != src.gp()) movq(dst.gp(), src.gp());
sarq(dst.gp(), Immediate(amount & 63));
}
@ -1251,8 +1251,8 @@ void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
&Assembler::shrq_cl);
}
void LiftoffAssembler::emit_i64_shr(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
void LiftoffAssembler::emit_i64_shri(LiftoffRegister dst, LiftoffRegister src,
int32_t amount) {
if (dst != src) movq(dst.gp(), src.gp());
shrq(dst.gp(), Immediate(amount & 63));
}