[maglev][arm64] Share GapMove IR

Drive-by fix: wrong compare order in Return

Bug: v8:7700
Change-Id: Id5c2f70ad75ecbd295144e8bae442360e5b00656
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4075726
Commit-Queue: Victor Gomes <victorgomes@chromium.org>
Auto-Submit: Victor Gomes <victorgomes@chromium.org>
Reviewed-by: Leszek Swirski <leszeks@chromium.org>
Cr-Commit-Position: refs/heads/main@{#84609}
This commit is contained in:
Victor Gomes 2022-12-02 11:07:45 +01:00 committed by V8 LUCI CQ
parent 0bd121f8e6
commit 765b3dac71
6 changed files with 91 additions and 55 deletions

View File

@ -21,11 +21,10 @@ inline MemOperand MaglevAssembler::StackSlotOperand(StackSlot slot) {
return MemOperand(fp, slot.index);
}
// TODO(Victorgomes): Unify this to use StackSlot struct.
inline MemOperand MaglevAssembler::GetStackSlot(
const compiler::AllocatedOperand& operand) {
// TODO(v8:7700): Implement!
UNREACHABLE();
return MemOperand();
return MemOperand(fp, GetFramePointerOffsetForStackSlot(operand));
}
inline MemOperand MaglevAssembler::ToMemOperand(
@ -104,6 +103,40 @@ inline void MaglevAssembler::MaterialiseValueNode(Register dst,
UNREACHABLE();
}
template <>
inline void MaglevAssembler::MoveRepr(MachineRepresentation repr, Register dst,
Register src) {
Mov(dst, src);
}
template <>
inline void MaglevAssembler::MoveRepr(MachineRepresentation repr, Register dst,
MemOperand src) {
switch (repr) {
case MachineRepresentation::kWord32:
return Ldr(dst.W(), src);
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTaggedSigned:
return Ldr(dst, src);
default:
UNREACHABLE();
}
}
template <>
inline void MaglevAssembler::MoveRepr(MachineRepresentation repr,
MemOperand dst, Register src) {
switch (repr) {
case MachineRepresentation::kWord32:
return Str(src.W(), dst);
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTaggedSigned:
return Str(src, dst);
default:
UNREACHABLE();
}
}
} // namespace maglev
} // namespace internal
} // namespace v8

View File

@ -202,7 +202,6 @@ UNIMPLEMENTED_NODE(ToName)
UNIMPLEMENTED_NODE(ToNumberOrNumeric)
UNIMPLEMENTED_NODE(ToObject)
UNIMPLEMENTED_NODE(ToString)
UNIMPLEMENTED_NODE(GapMove)
UNIMPLEMENTED_NODE(AssertInt32, condition_, reason_)
UNIMPLEMENTED_NODE(CheckDynamicValue)
UNIMPLEMENTED_NODE(CheckInt32IsSmi)
@ -341,7 +340,7 @@ void Return::GenerateCode(MaglevAssembler* masm, const ProcessingState& state) {
// If actual is bigger than formal, then we should use it to free up the stack
// arguments.
Label corrected_args_count;
__ CompareAndBranch(actual_params_size, params_size, ge,
__ CompareAndBranch(params_size, actual_params_size, ge,
&corrected_args_count);
__ Mov(params_size, actual_params_size);
__ bind(&corrected_args_count);

View File

@ -61,18 +61,7 @@ class MaglevAssembler : public MacroAssembler {
}
template <typename Dest, typename Source>
void MoveRepr(MachineRepresentation repr, Dest dst, Source src) {
switch (repr) {
case MachineRepresentation::kWord32:
return movl(dst, src);
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTaggedSigned:
return movq(dst, src);
default:
UNREACHABLE();
}
}
inline void MoveRepr(MachineRepresentation repr, Dest dst, Source src);
void Allocate(RegisterSnapshot& register_snapshot, Register result,
int size_in_bytes,

View File

@ -552,6 +552,44 @@ void ConstantGapMove::GenerateCode(MaglevAssembler* masm,
}
}
void GapMove::AllocateVreg(MaglevVregAllocationState* vreg_state) {
UNREACHABLE();
}
void GapMove::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
DCHECK_EQ(source().representation(), target().representation());
MachineRepresentation repr = source().representation();
if (source().IsRegister()) {
Register source_reg = ToRegister(source());
if (target().IsAnyRegister()) {
DCHECK(target().IsRegister());
__ MoveRepr(repr, ToRegister(target()), source_reg);
} else {
__ MoveRepr(repr, masm->ToMemOperand(target()), source_reg);
}
} else if (source().IsDoubleRegister()) {
DoubleRegister source_reg = ToDoubleRegister(source());
if (target().IsAnyRegister()) {
DCHECK(target().IsDoubleRegister());
__ Move(ToDoubleRegister(target()), source_reg);
} else {
__ Move(masm->ToMemOperand(target()), source_reg);
}
} else {
DCHECK(source().IsAnyStackSlot());
MemOperand source_op = masm->ToMemOperand(source());
if (target().IsRegister()) {
__ MoveRepr(repr, ToRegister(target()), source_op);
} else if (target().IsDoubleRegister()) {
__ Move(ToDoubleRegister(target()), source_op);
} else {
DCHECK(target().IsAnyStackSlot());
__ MoveRepr(repr, kScratchRegister, source_op);
__ MoveRepr(repr, masm->ToMemOperand(target()), kScratchRegister);
}
}
}
// ---
// Arch agnostic control nodes
// ---

View File

@ -325,6 +325,21 @@ inline void MaglevAssembler::AssertStackSizeCorrect() {
}
}
template <typename Dest, typename Source>
inline void MaglevAssembler::MoveRepr(MachineRepresentation repr, Dest dst,
Source src) {
switch (repr) {
case MachineRepresentation::kWord32:
return movl(dst, src);
case MachineRepresentation::kTagged:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTaggedSigned:
return movq(dst, src);
default:
UNREACHABLE();
}
}
} // namespace maglev
} // namespace internal
} // namespace v8

View File

@ -2059,44 +2059,6 @@ void GetKeyedGeneric::GenerateCode(MaglevAssembler* masm,
masm->DefineExceptionHandlerAndLazyDeoptPoint(this);
}
void GapMove::AllocateVreg(MaglevVregAllocationState* vreg_state) {
UNREACHABLE();
}
void GapMove::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
DCHECK_EQ(source().representation(), target().representation());
MachineRepresentation repr = source().representation();
if (source().IsRegister()) {
Register source_reg = ToRegister(source());
if (target().IsAnyRegister()) {
DCHECK(target().IsRegister());
__ MoveRepr(repr, ToRegister(target()), source_reg);
} else {
__ MoveRepr(repr, masm->ToMemOperand(target()), source_reg);
}
} else if (source().IsDoubleRegister()) {
DoubleRegister source_reg = ToDoubleRegister(source());
if (target().IsAnyRegister()) {
DCHECK(target().IsDoubleRegister());
__ Movsd(ToDoubleRegister(target()), source_reg);
} else {
__ Movsd(masm->ToMemOperand(target()), source_reg);
}
} else {
DCHECK(source().IsAnyStackSlot());
MemOperand source_op = masm->ToMemOperand(source());
if (target().IsRegister()) {
__ MoveRepr(repr, ToRegister(target()), source_op);
} else if (target().IsDoubleRegister()) {
__ Movsd(ToDoubleRegister(target()), source_op);
} else {
DCHECK(target().IsAnyStackSlot());
__ MoveRepr(repr, kScratchRegister, source_op);
__ MoveRepr(repr, masm->ToMemOperand(target()), kScratchRegister);
}
}
}
namespace {
constexpr Builtin BuiltinFor(Operation operation) {