ppc: [liftoff] implement SubS64 function

Drive-by: clean up SubS64/AddS64 macroassembler
Change-Id: I31a15b1f3f3825122f6857861845c8961ece3649
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3024152
Commit-Queue: Junliang Yan <junyan@redhat.com>
Reviewed-by: Milad Fa <mfarazma@redhat.com>
Cr-Commit-Position: refs/heads/master@{#75711}
This commit is contained in:
Junliang Yan 2021-07-13 11:52:15 -04:00 committed by V8 LUCI CQ
parent a1d6483023
commit 5165e3f4a5
4 changed files with 58 additions and 47 deletions

View File

@ -156,11 +156,8 @@ void TurboAssembler::LoadRootRegisterOffset(Register destination,
intptr_t offset) {
if (offset == 0) {
mr(destination, kRootRegister);
} else if (is_int16(offset)) {
addi(destination, kRootRegister, Operand(offset));
} else {
mov(destination, Operand(offset));
add(destination, kRootRegister, destination);
AddS64(destination, kRootRegister, Operand(offset), destination);
}
}
@ -1299,7 +1296,7 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
// since the sp slot and code slot were pushed after the fp.
}
addi(sp, sp, Operand(-stack_space * kSystemPointerSize));
AddS64(sp, sp, Operand(-stack_space * kSystemPointerSize));
// Allocate and align the frame preparing for calling the runtime
// function.
@ -1315,7 +1312,8 @@ void MacroAssembler::EnterExitFrame(bool save_doubles, int stack_space,
// Set the exit frame sp value to point just before the return address
// location.
addi(r8, sp, Operand((kStackFrameExtraParamSlot + 1) * kSystemPointerSize));
AddS64(r8, sp, Operand((kStackFrameExtraParamSlot + 1) * kSystemPointerSize),
r0);
StoreU64(r8, MemOperand(fp, ExitFrameConstants::kSPOffset));
}
@ -1344,7 +1342,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles, Register argument_count,
const int kNumRegs = kNumCallerSavedDoubles;
const int offset =
(ExitFrameConstants::kFixedFrameSizeFromFp + kNumRegs * kDoubleSize);
addi(r6, fp, Operand(-offset));
AddS64(r6, fp, Operand(-offset), r0);
MultiPopDoubles(kCallerSavedDoubles, r6);
}
@ -1397,14 +1395,15 @@ void TurboAssembler::PrepareForTailCall(Register callee_args_count,
Register dst_reg = scratch0;
ShiftLeftImm(dst_reg, caller_args_count, Operand(kSystemPointerSizeLog2));
add(dst_reg, fp, dst_reg);
addi(dst_reg, dst_reg,
Operand(StandardFrameConstants::kCallerSPOffset + kSystemPointerSize));
AddS64(dst_reg, dst_reg,
Operand(StandardFrameConstants::kCallerSPOffset + kSystemPointerSize),
scratch0);
Register src_reg = caller_args_count;
// Calculate the end of source area. +kSystemPointerSize is for the receiver.
ShiftLeftImm(src_reg, callee_args_count, Operand(kSystemPointerSizeLog2));
add(src_reg, sp, src_reg);
addi(src_reg, src_reg, Operand(kSystemPointerSize));
AddS64(src_reg, src_reg, Operand(kSystemPointerSize), scratch0);
if (FLAG_debug_code) {
CmpU64(src_reg, dst_reg);
@ -2237,7 +2236,8 @@ void TurboAssembler::PrepareCallCFunction(int num_reg_arguments,
// Make stack end at alignment and make room for stack arguments
// -- preserving original value of sp.
mr(scratch, sp);
addi(sp, sp, Operand(-(stack_passed_arguments + 1) * kSystemPointerSize));
AddS64(sp, sp, Operand(-(stack_passed_arguments + 1) * kSystemPointerSize),
scratch);
DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
ClearRightImm(sp, sp,
Operand(base::bits::WhichPowerOfTwo(frame_alignment)));
@ -2378,9 +2378,9 @@ void TurboAssembler::CallCFunctionHelper(Register function,
CalculateStackPassedWords(num_reg_arguments, num_double_arguments);
int stack_space = kNumRequiredStackFrameSlots + stack_passed_arguments;
if (ActivationFrameAlignment() > kSystemPointerSize) {
LoadU64(sp, MemOperand(sp, stack_space * kSystemPointerSize));
LoadU64(sp, MemOperand(sp, stack_space * kSystemPointerSize), r0);
} else {
addi(sp, sp, Operand(stack_space * kSystemPointerSize));
AddS64(sp, sp, Operand(stack_space * kSystemPointerSize), r0);
}
}
@ -2675,17 +2675,33 @@ void TurboAssembler::MovFloatToInt(Register dst, DoubleRegister src) {
addi(sp, sp, Operand(kFloatSize));
}
void TurboAssembler::AddS64(Register dst, Register src, Register value) {
add(dst, src, value);
void TurboAssembler::AddS64(Register dst, Register src, Register value, OEBit s,
RCBit r) {
add(dst, src, value, s, r);
}
void TurboAssembler::AddS64(Register dst, Register src, const Operand& value,
Register scratch) {
if (is_int16(value.immediate())) {
Register scratch, OEBit s, RCBit r) {
if (is_int16(value.immediate()) && s == LeaveOE && r == LeaveRC) {
addi(dst, src, value);
} else {
mov(scratch, value);
add(dst, src, scratch);
add(dst, src, scratch, s, r);
}
}
void TurboAssembler::SubS64(Register dst, Register src, Register value, OEBit s,
RCBit r) {
sub(dst, src, value, s, r);
}
void TurboAssembler::SubS64(Register dst, Register src, const Operand& value,
Register scratch, OEBit s, RCBit r) {
if (is_int16(value.immediate()) && s == LeaveOE && r == LeaveRC) {
subi(dst, src, value);
} else {
mov(scratch, value);
sub(dst, src, scratch, s, r);
}
}
@ -3276,8 +3292,8 @@ void TurboAssembler::LoadEntryFromBuiltinIndex(Register builtin_index) {
ShiftLeftImm(builtin_index, builtin_index,
Operand(kSystemPointerSizeLog2 - kSmiShift));
}
addi(builtin_index, builtin_index,
Operand(IsolateData::builtin_entry_table_offset()));
AddS64(builtin_index, builtin_index,
Operand(IsolateData::builtin_entry_table_offset()));
LoadU64(builtin_index, MemOperand(kRootRegister, builtin_index));
}

View File

@ -184,8 +184,13 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
void ResetRoundingMode();
void AddS64(Register dst, Register src, const Operand& value,
Register scratch = r0);
void AddS64(Register dst, Register src, Register value);
Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
void AddS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
RCBit r = LeaveRC);
void SubS64(Register dst, Register src, const Operand& value,
Register scratch = r0, OEBit s = LeaveOE, RCBit r = LeaveRC);
void SubS64(Register dst, Register src, Register value, OEBit s = LeaveOE,
RCBit r = LeaveRC);
void Push(Register src) { push(src); }
// Push a handle.

View File

@ -1106,12 +1106,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
if (ShouldApplyOffsetToStackCheck(instr, &offset)) {
lhs_register = i.TempRegister(0);
if (is_int16(offset)) {
__ subi(lhs_register, sp, Operand(offset));
} else {
__ mov(kScratchReg, Operand(offset));
__ sub(lhs_register, sp, kScratchReg);
}
__ SubS64(lhs_register, sp, Operand(offset), kScratchReg);
}
constexpr size_t kValueIndex = 0;
@ -1165,8 +1160,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
case kArchStackSlot: {
FrameOffset offset =
frame_access_state()->GetFrameOffset(i.InputInt32(0));
__ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
Operand(offset.offset()));
__ AddS64(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
Operand(offset.offset()), r0);
break;
}
case kArchWordPoisonOnSpeculation:
@ -1380,8 +1375,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
LeaveOE, i.OutputRCBit());
} else {
__ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
DCHECK_EQ(LeaveRC, i.OutputRCBit());
__ AddS64(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1),
r0, LeaveOE, i.OutputRCBit());
}
__ extsw(i.OutputRegister(), i.OutputRegister());
#if V8_TARGET_ARCH_PPC64
@ -1397,8 +1392,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
LeaveOE, i.OutputRCBit());
} else {
__ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
DCHECK_EQ(LeaveRC, i.OutputRCBit());
__ AddS64(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1),
r0, LeaveOE, i.OutputRCBit());
}
}
break;
@ -1419,15 +1414,8 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
LeaveOE, i.OutputRCBit());
} else {
if (is_int16(i.InputImmediate(1).immediate())) {
__ subi(i.OutputRegister(), i.InputRegister(0),
i.InputImmediate(1));
DCHECK_EQ(LeaveRC, i.OutputRCBit());
} else {
__ mov(kScratchReg, i.InputImmediate(1));
__ sub(i.OutputRegister(), i.InputRegister(0), kScratchReg, LeaveOE,
i.OutputRCBit());
}
__ SubS64(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1),
r0, LeaveOE, i.OutputRCBit());
}
#if V8_TARGET_ARCH_PPC64
}
@ -4006,7 +3994,8 @@ void CodeGenerator::AssembleConstructFrame() {
if (FLAG_enable_embedded_constant_pool) {
__ Push(r0, fp, kConstantPoolRegister);
// Adjust FP to point to saved FP.
__ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
__ SubS64(fp, sp,
Operand(StandardFrameConstants::kConstantPoolOffset), r0);
} else {
__ Push(r0, fp);
__ mr(fp, sp);

View File

@ -707,8 +707,8 @@ void LiftoffAssembler::FillStackSlotsWithZero(int start, int size) {
// Use r4 for start address (inclusive), r5 for end address (exclusive).
push(r4);
push(r5);
subi(r4, fp, Operand(start + size));
subi(r5, fp, Operand(start));
SubS64(r4, fp, Operand(start + size), r0);
SubS64(r5, fp, Operand(start), r0);
Label loop;
bind(&loop);
@ -792,7 +792,6 @@ UNIMPLEMENTED_I32_BINOP_I(i32_xor)
UNIMPLEMENTED_I32_SHIFTOP(i32_shl)
UNIMPLEMENTED_I32_SHIFTOP(i32_sar)
UNIMPLEMENTED_I32_SHIFTOP(i32_shr)
UNIMPLEMENTED_I64_BINOP(i64_sub)
UNIMPLEMENTED_I64_BINOP(i64_mul)
#ifdef V8_TARGET_ARCH_PPC64
UNIMPLEMENTED_I64_BINOP_I(i64_and)
@ -880,6 +879,8 @@ UNOP_LIST(EMIT_UNOP_FUNCTION)
USE, , void) \
V(f64_max, MaxF64, DoubleRegister, DoubleRegister, DoubleRegister, , , , \
USE, , void) \
V(i64_sub, SubS64, LiftoffRegister, LiftoffRegister, LiftoffRegister, \
LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void) \
V(i64_add, AddS64, LiftoffRegister, LiftoffRegister, LiftoffRegister, \
LFR_TO_REG, LFR_TO_REG, LFR_TO_REG, USE, , void) \
V(i64_addi, AddS64, LiftoffRegister, LiftoffRegister, int64_t, LFR_TO_REG, \