[loong64][mips64] Cleanup some macros

Besides, fix a error in GetMemOp.

Port commit 247b33e921

Change-Id: I34cf0d22870f438fb6bfcd67ef50ec254fb92608
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3869758
Auto-Submit: Liu Yu <liuyu@loongson.cn>
Reviewed-by: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Commit-Queue: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Cr-Commit-Position: refs/heads/main@{#82928}
This commit is contained in:
Liu Yu 2022-09-02 14:43:20 +08:00 committed by V8 LUCI CQ
parent 2b5f239abe
commit 09aded5467
4 changed files with 468 additions and 413 deletions

View File

@ -12,363 +12,372 @@ namespace compiler {
// LOONG64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
V(Loong64Add_d) \
V(Loong64Add_w) \
V(Loong64AddOvf_d) \
V(Loong64Sub_d) \
V(Loong64Sub_w) \
V(Loong64SubOvf_d) \
V(Loong64Mul_d) \
V(Loong64MulOvf_w) \
V(Loong64Mulh_d) \
V(Loong64Mulh_w) \
V(Loong64Mulh_wu) \
V(Loong64Mul_w) \
V(Loong64Div_d) \
V(Loong64Div_w) \
V(Loong64Div_du) \
V(Loong64Div_wu) \
V(Loong64Mod_d) \
V(Loong64Mod_w) \
V(Loong64Mod_du) \
V(Loong64Mod_wu) \
V(Loong64And) \
V(Loong64And32) \
V(Loong64Or) \
V(Loong64Or32) \
V(Loong64Nor) \
V(Loong64Nor32) \
V(Loong64Xor) \
V(Loong64Xor32) \
V(Loong64Alsl_d) \
V(Loong64Alsl_w) \
V(Loong64Sll_d) \
V(Loong64Sll_w) \
V(Loong64Srl_d) \
V(Loong64Srl_w) \
V(Loong64Sra_d) \
V(Loong64Sra_w) \
V(Loong64Rotr_d) \
V(Loong64Rotr_w) \
V(Loong64Bstrpick_d) \
V(Loong64Bstrpick_w) \
V(Loong64Bstrins_d) \
V(Loong64Bstrins_w) \
V(Loong64ByteSwap64) \
V(Loong64ByteSwap32) \
V(Loong64Clz_d) \
V(Loong64Clz_w) \
V(Loong64Mov) \
V(Loong64Tst) \
V(Loong64Cmp) \
V(Loong64Float32Cmp) \
V(Loong64Float32Add) \
V(Loong64Float32Sub) \
V(Loong64Float32Mul) \
V(Loong64Float32Div) \
V(Loong64Float32Abs) \
V(Loong64Float32Neg) \
V(Loong64Float32Sqrt) \
V(Loong64Float32Max) \
V(Loong64Float32Min) \
V(Loong64Float32ToFloat64) \
V(Loong64Float32RoundDown) \
V(Loong64Float32RoundUp) \
V(Loong64Float32RoundTruncate) \
V(Loong64Float32RoundTiesEven) \
V(Loong64Float32ToInt32) \
V(Loong64Float32ToInt64) \
V(Loong64Float32ToUint32) \
V(Loong64Float32ToUint64) \
V(Loong64Float64Cmp) \
V(Loong64Float64Add) \
V(Loong64Float64Sub) \
V(Loong64Float64Mul) \
V(Loong64Float64Div) \
V(Loong64Float64Mod) \
V(Loong64Float64Abs) \
V(Loong64Float64Neg) \
V(Loong64Float64Sqrt) \
V(Loong64Float64Max) \
V(Loong64Float64Min) \
V(Loong64Float64ToFloat32) \
V(Loong64Float64RoundDown) \
V(Loong64Float64RoundUp) \
V(Loong64Float64RoundTruncate) \
V(Loong64Float64RoundTiesEven) \
V(Loong64Float64ToInt32) \
V(Loong64Float64ToInt64) \
V(Loong64Float64ToUint32) \
V(Loong64Float64ToUint64) \
V(Loong64Int32ToFloat32) \
V(Loong64Int32ToFloat64) \
V(Loong64Int64ToFloat32) \
V(Loong64Int64ToFloat64) \
V(Loong64Uint32ToFloat32) \
V(Loong64Uint32ToFloat64) \
V(Loong64Uint64ToFloat32) \
V(Loong64Uint64ToFloat64) \
V(Loong64Float64ExtractLowWord32) \
V(Loong64Float64ExtractHighWord32) \
V(Loong64Float64InsertLowWord32) \
V(Loong64Float64InsertHighWord32) \
V(Loong64BitcastDL) \
V(Loong64BitcastLD) \
V(Loong64Float64SilenceNaN) \
V(Loong64Ld_b) \
V(Loong64Ld_bu) \
V(Loong64St_b) \
V(Loong64Ld_h) \
V(Loong64Ld_hu) \
V(Loong64St_h) \
V(Loong64Ld_w) \
V(Loong64Ld_wu) \
V(Loong64St_w) \
V(Loong64Ld_d) \
V(Loong64St_d) \
V(Loong64Fld_s) \
V(Loong64Fst_s) \
V(Loong64Fld_d) \
V(Loong64Fst_d) \
V(Loong64Push) \
V(Loong64Peek) \
V(Loong64Poke) \
V(Loong64StackClaim) \
V(Loong64Ext_w_b) \
V(Loong64Ext_w_h) \
V(Loong64Dbar) \
V(Loong64S128Const) \
V(Loong64S128Zero) \
V(Loong64S128AllOnes) \
V(Loong64I32x4Splat) \
V(Loong64I32x4ExtractLane) \
V(Loong64I32x4ReplaceLane) \
V(Loong64I32x4Add) \
V(Loong64I32x4Sub) \
V(Loong64F64x2Abs) \
V(Loong64F64x2Neg) \
V(Loong64F32x4Splat) \
V(Loong64F32x4ExtractLane) \
V(Loong64F32x4ReplaceLane) \
V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4UConvertI32x4) \
V(Loong64I32x4Mul) \
V(Loong64I32x4MaxS) \
V(Loong64I32x4MinS) \
V(Loong64I32x4Eq) \
V(Loong64I32x4Ne) \
V(Loong64I32x4Shl) \
V(Loong64I32x4ShrS) \
V(Loong64I32x4ShrU) \
V(Loong64I32x4MaxU) \
V(Loong64I32x4MinU) \
V(Loong64F64x2Sqrt) \
V(Loong64F64x2Add) \
V(Loong64F64x2Sub) \
V(Loong64F64x2Mul) \
V(Loong64F64x2Div) \
V(Loong64F64x2Min) \
V(Loong64F64x2Max) \
V(Loong64F64x2Eq) \
V(Loong64F64x2Ne) \
V(Loong64F64x2Lt) \
V(Loong64F64x2Le) \
V(Loong64F64x2Splat) \
V(Loong64F64x2ExtractLane) \
V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Pmin) \
V(Loong64F64x2Pmax) \
V(Loong64F64x2Ceil) \
V(Loong64F64x2Floor) \
V(Loong64F64x2Trunc) \
V(Loong64F64x2NearestInt) \
V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2PromoteLowF32x4) \
V(Loong64I64x2Splat) \
V(Loong64I64x2ExtractLane) \
V(Loong64I64x2ReplaceLane) \
V(Loong64I64x2Add) \
V(Loong64I64x2Sub) \
V(Loong64I64x2Mul) \
V(Loong64I64x2Neg) \
V(Loong64I64x2Shl) \
V(Loong64I64x2ShrS) \
V(Loong64I64x2ShrU) \
V(Loong64I64x2BitMask) \
V(Loong64I64x2Eq) \
V(Loong64I64x2Ne) \
V(Loong64I64x2GtS) \
V(Loong64I64x2GeS) \
V(Loong64I64x2Abs) \
V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2UConvertI32x4High) \
V(Loong64ExtMulLow) \
V(Loong64ExtMulHigh) \
V(Loong64ExtAddPairwise) \
V(Loong64F32x4Abs) \
V(Loong64F32x4Neg) \
V(Loong64F32x4Sqrt) \
V(Loong64F32x4Add) \
V(Loong64F32x4Sub) \
V(Loong64F32x4Mul) \
V(Loong64F32x4Div) \
V(Loong64F32x4Max) \
V(Loong64F32x4Min) \
V(Loong64F32x4Eq) \
V(Loong64F32x4Ne) \
V(Loong64F32x4Lt) \
V(Loong64F32x4Le) \
V(Loong64F32x4Pmin) \
V(Loong64F32x4Pmax) \
V(Loong64F32x4Ceil) \
V(Loong64F32x4Floor) \
V(Loong64F32x4Trunc) \
V(Loong64F32x4NearestInt) \
V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64I32x4SConvertF32x4) \
V(Loong64I32x4UConvertF32x4) \
V(Loong64I32x4Neg) \
V(Loong64I32x4GtS) \
V(Loong64I32x4GeS) \
V(Loong64I32x4GtU) \
V(Loong64I32x4GeU) \
V(Loong64I32x4Abs) \
V(Loong64I32x4BitMask) \
V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I16x8Splat) \
V(Loong64I16x8ExtractLaneU) \
V(Loong64I16x8ExtractLaneS) \
V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Neg) \
V(Loong64I16x8Shl) \
V(Loong64I16x8ShrS) \
V(Loong64I16x8ShrU) \
V(Loong64I16x8Add) \
V(Loong64I16x8AddSatS) \
V(Loong64I16x8Sub) \
V(Loong64I16x8SubSatS) \
V(Loong64I16x8Mul) \
V(Loong64I16x8MaxS) \
V(Loong64I16x8MinS) \
V(Loong64I16x8Eq) \
V(Loong64I16x8Ne) \
V(Loong64I16x8GtS) \
V(Loong64I16x8GeS) \
V(Loong64I16x8AddSatU) \
V(Loong64I16x8SubSatU) \
V(Loong64I16x8MaxU) \
V(Loong64I16x8MinU) \
V(Loong64I16x8GtU) \
V(Loong64I16x8GeU) \
V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8Abs) \
V(Loong64I16x8BitMask) \
V(Loong64I16x8Q15MulRSatS) \
V(Loong64I8x16Splat) \
V(Loong64I8x16ExtractLaneU) \
V(Loong64I8x16ExtractLaneS) \
V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Neg) \
V(Loong64I8x16Shl) \
V(Loong64I8x16ShrS) \
V(Loong64I8x16Add) \
V(Loong64I8x16AddSatS) \
V(Loong64I8x16Sub) \
V(Loong64I8x16SubSatS) \
V(Loong64I8x16MaxS) \
V(Loong64I8x16MinS) \
V(Loong64I8x16Eq) \
V(Loong64I8x16Ne) \
V(Loong64I8x16GtS) \
V(Loong64I8x16GeS) \
V(Loong64I8x16ShrU) \
V(Loong64I8x16AddSatU) \
V(Loong64I8x16SubSatU) \
V(Loong64I8x16MaxU) \
V(Loong64I8x16MinU) \
V(Loong64I8x16GtU) \
V(Loong64I8x16GeU) \
V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16Abs) \
V(Loong64I8x16Popcnt) \
V(Loong64I8x16BitMask) \
V(Loong64S128And) \
V(Loong64S128Or) \
V(Loong64S128Xor) \
V(Loong64S128Not) \
V(Loong64S128Select) \
V(Loong64S128AndNot) \
V(Loong64I64x2AllTrue) \
V(Loong64I32x4AllTrue) \
V(Loong64I16x8AllTrue) \
V(Loong64I8x16AllTrue) \
V(Loong64V128AnyTrue) \
V(Loong64S32x4InterleaveRight) \
V(Loong64S32x4InterleaveLeft) \
V(Loong64S32x4PackEven) \
V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4Shuffle) \
V(Loong64S16x8InterleaveRight) \
V(Loong64S16x8InterleaveLeft) \
V(Loong64S16x8PackEven) \
V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x4Reverse) \
V(Loong64S16x2Reverse) \
V(Loong64S8x16InterleaveRight) \
V(Loong64S8x16InterleaveLeft) \
V(Loong64S8x16PackEven) \
V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveOdd) \
V(Loong64I8x16Shuffle) \
V(Loong64I8x16Swizzle) \
V(Loong64S8x16Concat) \
V(Loong64S8x8Reverse) \
V(Loong64S8x4Reverse) \
V(Loong64S8x2Reverse) \
V(Loong64S128LoadSplat) \
V(Loong64S128Load8x8S) \
V(Loong64S128Load8x8U) \
V(Loong64S128Load16x4S) \
V(Loong64S128Load16x4U) \
V(Loong64S128Load32x2S) \
V(Loong64S128Load32x2U) \
V(Loong64S128Load32Zero) \
V(Loong64S128Load64Zero) \
V(Loong64LoadLane) \
V(Loong64StoreLane) \
V(Loong64I32x4SConvertI16x8Low) \
V(Loong64I32x4SConvertI16x8High) \
V(Loong64I32x4UConvertI16x8Low) \
V(Loong64I32x4UConvertI16x8High) \
V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \
V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8UConvertI8x16High) \
V(Loong64I8x16SConvertI16x8) \
V(Loong64I8x16UConvertI16x8) \
V(Loong64StoreCompressTagged) \
V(Loong64Word64AtomicLoadUint32) \
V(Loong64Word64AtomicLoadUint64) \
V(Loong64Word64AtomicStoreWord64) \
V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(Loong64Add_d) \
V(Loong64Add_w) \
V(Loong64AddOvf_d) \
V(Loong64Sub_d) \
V(Loong64Sub_w) \
V(Loong64SubOvf_d) \
V(Loong64Mul_d) \
V(Loong64MulOvf_w) \
V(Loong64Mulh_d) \
V(Loong64Mulh_w) \
V(Loong64Mulh_wu) \
V(Loong64Mul_w) \
V(Loong64Div_d) \
V(Loong64Div_w) \
V(Loong64Div_du) \
V(Loong64Div_wu) \
V(Loong64Mod_d) \
V(Loong64Mod_w) \
V(Loong64Mod_du) \
V(Loong64Mod_wu) \
V(Loong64And) \
V(Loong64And32) \
V(Loong64Or) \
V(Loong64Or32) \
V(Loong64Nor) \
V(Loong64Nor32) \
V(Loong64Xor) \
V(Loong64Xor32) \
V(Loong64Alsl_d) \
V(Loong64Alsl_w) \
V(Loong64Sll_d) \
V(Loong64Sll_w) \
V(Loong64Srl_d) \
V(Loong64Srl_w) \
V(Loong64Sra_d) \
V(Loong64Sra_w) \
V(Loong64Rotr_d) \
V(Loong64Rotr_w) \
V(Loong64Bstrpick_d) \
V(Loong64Bstrpick_w) \
V(Loong64Bstrins_d) \
V(Loong64Bstrins_w) \
V(Loong64ByteSwap64) \
V(Loong64ByteSwap32) \
V(Loong64Clz_d) \
V(Loong64Clz_w) \
V(Loong64Mov) \
V(Loong64Tst) \
V(Loong64Cmp) \
V(Loong64Float32Cmp) \
V(Loong64Float32Add) \
V(Loong64Float32Sub) \
V(Loong64Float32Mul) \
V(Loong64Float32Div) \
V(Loong64Float32Abs) \
V(Loong64Float32Neg) \
V(Loong64Float32Sqrt) \
V(Loong64Float32Max) \
V(Loong64Float32Min) \
V(Loong64Float32ToFloat64) \
V(Loong64Float32RoundDown) \
V(Loong64Float32RoundUp) \
V(Loong64Float32RoundTruncate) \
V(Loong64Float32RoundTiesEven) \
V(Loong64Float32ToInt32) \
V(Loong64Float32ToInt64) \
V(Loong64Float32ToUint32) \
V(Loong64Float32ToUint64) \
V(Loong64Float64Cmp) \
V(Loong64Float64Add) \
V(Loong64Float64Sub) \
V(Loong64Float64Mul) \
V(Loong64Float64Div) \
V(Loong64Float64Mod) \
V(Loong64Float64Abs) \
V(Loong64Float64Neg) \
V(Loong64Float64Sqrt) \
V(Loong64Float64Max) \
V(Loong64Float64Min) \
V(Loong64Float64ToFloat32) \
V(Loong64Float64RoundDown) \
V(Loong64Float64RoundUp) \
V(Loong64Float64RoundTruncate) \
V(Loong64Float64RoundTiesEven) \
V(Loong64Float64ToInt32) \
V(Loong64Float64ToInt64) \
V(Loong64Float64ToUint32) \
V(Loong64Float64ToUint64) \
V(Loong64Int32ToFloat32) \
V(Loong64Int32ToFloat64) \
V(Loong64Int64ToFloat32) \
V(Loong64Int64ToFloat64) \
V(Loong64Uint32ToFloat32) \
V(Loong64Uint32ToFloat64) \
V(Loong64Uint64ToFloat32) \
V(Loong64Uint64ToFloat64) \
V(Loong64Float64ExtractLowWord32) \
V(Loong64Float64ExtractHighWord32) \
V(Loong64Float64InsertLowWord32) \
V(Loong64Float64InsertHighWord32) \
V(Loong64BitcastDL) \
V(Loong64BitcastLD) \
V(Loong64Float64SilenceNaN) \
V(Loong64Ld_b) \
V(Loong64Ld_bu) \
V(Loong64St_b) \
V(Loong64Ld_h) \
V(Loong64Ld_hu) \
V(Loong64St_h) \
V(Loong64Ld_w) \
V(Loong64Ld_wu) \
V(Loong64St_w) \
V(Loong64Ld_d) \
V(Loong64St_d) \
V(Loong64Fld_s) \
V(Loong64Fst_s) \
V(Loong64Fld_d) \
V(Loong64Fst_d) \
V(Loong64Push) \
V(Loong64Peek) \
V(Loong64Poke) \
V(Loong64StackClaim) \
V(Loong64Ext_w_b) \
V(Loong64Ext_w_h) \
V(Loong64Dbar) \
V(Loong64S128Const) \
V(Loong64S128Zero) \
V(Loong64S128AllOnes) \
V(Loong64I32x4Splat) \
V(Loong64I32x4ExtractLane) \
V(Loong64I32x4ReplaceLane) \
V(Loong64I32x4Add) \
V(Loong64I32x4Sub) \
V(Loong64F64x2Abs) \
V(Loong64F64x2Neg) \
V(Loong64F32x4Splat) \
V(Loong64F32x4ExtractLane) \
V(Loong64F32x4ReplaceLane) \
V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4UConvertI32x4) \
V(Loong64I32x4Mul) \
V(Loong64I32x4MaxS) \
V(Loong64I32x4MinS) \
V(Loong64I32x4Eq) \
V(Loong64I32x4Ne) \
V(Loong64I32x4Shl) \
V(Loong64I32x4ShrS) \
V(Loong64I32x4ShrU) \
V(Loong64I32x4MaxU) \
V(Loong64I32x4MinU) \
V(Loong64F64x2Sqrt) \
V(Loong64F64x2Add) \
V(Loong64F64x2Sub) \
V(Loong64F64x2Mul) \
V(Loong64F64x2Div) \
V(Loong64F64x2Min) \
V(Loong64F64x2Max) \
V(Loong64F64x2Eq) \
V(Loong64F64x2Ne) \
V(Loong64F64x2Lt) \
V(Loong64F64x2Le) \
V(Loong64F64x2Splat) \
V(Loong64F64x2ExtractLane) \
V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Pmin) \
V(Loong64F64x2Pmax) \
V(Loong64F64x2Ceil) \
V(Loong64F64x2Floor) \
V(Loong64F64x2Trunc) \
V(Loong64F64x2NearestInt) \
V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2PromoteLowF32x4) \
V(Loong64F64x2RelaxedMin) \
V(Loong64F64x2RelaxedMax) \
V(Loong64I64x2Splat) \
V(Loong64I64x2ExtractLane) \
V(Loong64I64x2ReplaceLane) \
V(Loong64I64x2Add) \
V(Loong64I64x2Sub) \
V(Loong64I64x2Mul) \
V(Loong64I64x2Neg) \
V(Loong64I64x2Shl) \
V(Loong64I64x2ShrS) \
V(Loong64I64x2ShrU) \
V(Loong64I64x2BitMask) \
V(Loong64I64x2Eq) \
V(Loong64I64x2Ne) \
V(Loong64I64x2GtS) \
V(Loong64I64x2GeS) \
V(Loong64I64x2Abs) \
V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2UConvertI32x4High) \
V(Loong64ExtMulLow) \
V(Loong64ExtMulHigh) \
V(Loong64ExtAddPairwise) \
V(Loong64F32x4Abs) \
V(Loong64F32x4Neg) \
V(Loong64F32x4Sqrt) \
V(Loong64F32x4Add) \
V(Loong64F32x4Sub) \
V(Loong64F32x4Mul) \
V(Loong64F32x4Div) \
V(Loong64F32x4Max) \
V(Loong64F32x4Min) \
V(Loong64F32x4Eq) \
V(Loong64F32x4Ne) \
V(Loong64F32x4Lt) \
V(Loong64F32x4Le) \
V(Loong64F32x4Pmin) \
V(Loong64F32x4Pmax) \
V(Loong64F32x4Ceil) \
V(Loong64F32x4Floor) \
V(Loong64F32x4Trunc) \
V(Loong64F32x4NearestInt) \
V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64F32x4RelaxedMin) \
V(Loong64F32x4RelaxedMax) \
V(Loong64I32x4SConvertF32x4) \
V(Loong64I32x4UConvertF32x4) \
V(Loong64I32x4Neg) \
V(Loong64I32x4GtS) \
V(Loong64I32x4GeS) \
V(Loong64I32x4GtU) \
V(Loong64I32x4GeU) \
V(Loong64I32x4Abs) \
V(Loong64I32x4BitMask) \
V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I32x4RelaxedTruncF32x4S) \
V(Loong64I32x4RelaxedTruncF32x4U) \
V(Loong64I32x4RelaxedTruncF64x2SZero) \
V(Loong64I32x4RelaxedTruncF64x2UZero) \
V(Loong64I16x8Splat) \
V(Loong64I16x8ExtractLaneU) \
V(Loong64I16x8ExtractLaneS) \
V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Neg) \
V(Loong64I16x8Shl) \
V(Loong64I16x8ShrS) \
V(Loong64I16x8ShrU) \
V(Loong64I16x8Add) \
V(Loong64I16x8AddSatS) \
V(Loong64I16x8Sub) \
V(Loong64I16x8SubSatS) \
V(Loong64I16x8Mul) \
V(Loong64I16x8MaxS) \
V(Loong64I16x8MinS) \
V(Loong64I16x8Eq) \
V(Loong64I16x8Ne) \
V(Loong64I16x8GtS) \
V(Loong64I16x8GeS) \
V(Loong64I16x8AddSatU) \
V(Loong64I16x8SubSatU) \
V(Loong64I16x8MaxU) \
V(Loong64I16x8MinU) \
V(Loong64I16x8GtU) \
V(Loong64I16x8GeU) \
V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8Abs) \
V(Loong64I16x8BitMask) \
V(Loong64I16x8Q15MulRSatS) \
V(Loong64I16x8RelaxedQ15MulRS) \
V(Loong64I8x16Splat) \
V(Loong64I8x16ExtractLaneU) \
V(Loong64I8x16ExtractLaneS) \
V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Neg) \
V(Loong64I8x16Shl) \
V(Loong64I8x16ShrS) \
V(Loong64I8x16Add) \
V(Loong64I8x16AddSatS) \
V(Loong64I8x16Sub) \
V(Loong64I8x16SubSatS) \
V(Loong64I8x16MaxS) \
V(Loong64I8x16MinS) \
V(Loong64I8x16Eq) \
V(Loong64I8x16Ne) \
V(Loong64I8x16GtS) \
V(Loong64I8x16GeS) \
V(Loong64I8x16ShrU) \
V(Loong64I8x16AddSatU) \
V(Loong64I8x16SubSatU) \
V(Loong64I8x16MaxU) \
V(Loong64I8x16MinU) \
V(Loong64I8x16GtU) \
V(Loong64I8x16GeU) \
V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16Abs) \
V(Loong64I8x16Popcnt) \
V(Loong64I8x16BitMask) \
V(Loong64S128And) \
V(Loong64S128Or) \
V(Loong64S128Xor) \
V(Loong64S128Not) \
V(Loong64S128Select) \
V(Loong64S128AndNot) \
V(Loong64I64x2AllTrue) \
V(Loong64I32x4AllTrue) \
V(Loong64I16x8AllTrue) \
V(Loong64I8x16AllTrue) \
V(Loong64V128AnyTrue) \
V(Loong64S32x4InterleaveRight) \
V(Loong64S32x4InterleaveLeft) \
V(Loong64S32x4PackEven) \
V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4Shuffle) \
V(Loong64S16x8InterleaveRight) \
V(Loong64S16x8InterleaveLeft) \
V(Loong64S16x8PackEven) \
V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x4Reverse) \
V(Loong64S16x2Reverse) \
V(Loong64S8x16InterleaveRight) \
V(Loong64S8x16InterleaveLeft) \
V(Loong64S8x16PackEven) \
V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveOdd) \
V(Loong64I8x16Shuffle) \
V(Loong64I8x16Swizzle) \
V(Loong64S8x16Concat) \
V(Loong64S8x8Reverse) \
V(Loong64S8x4Reverse) \
V(Loong64S8x2Reverse) \
V(Loong64S128LoadSplat) \
V(Loong64S128Load8x8S) \
V(Loong64S128Load8x8U) \
V(Loong64S128Load16x4S) \
V(Loong64S128Load16x4U) \
V(Loong64S128Load32x2S) \
V(Loong64S128Load32x2U) \
V(Loong64S128Load32Zero) \
V(Loong64S128Load64Zero) \
V(Loong64LoadLane) \
V(Loong64StoreLane) \
V(Loong64I32x4SConvertI16x8Low) \
V(Loong64I32x4SConvertI16x8High) \
V(Loong64I32x4UConvertI16x8Low) \
V(Loong64I32x4UConvertI16x8High) \
V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \
V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8UConvertI8x16High) \
V(Loong64I8x16SConvertI16x8) \
V(Loong64I8x16UConvertI16x8) \
V(Loong64StoreCompressTagged) \
V(Loong64Word64AtomicLoadUint32) \
V(Loong64Word64AtomicLoadUint64) \
V(Loong64Word64AtomicStoreWord64) \
V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
V(Loong64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -2685,61 +2685,65 @@ void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
V(I16x8) \
V(I8x16)
#define SIMD_UNOP_LIST(V) \
V(F64x2Abs, kLoong64F64x2Abs) \
V(F64x2Neg, kLoong64F64x2Neg) \
V(F64x2Sqrt, kLoong64F64x2Sqrt) \
V(F64x2Ceil, kLoong64F64x2Ceil) \
V(F64x2Floor, kLoong64F64x2Floor) \
V(F64x2Trunc, kLoong64F64x2Trunc) \
V(F64x2NearestInt, kLoong64F64x2NearestInt) \
V(I64x2Neg, kLoong64I64x2Neg) \
V(I64x2BitMask, kLoong64I64x2BitMask) \
V(F64x2ConvertLowI32x4S, kLoong64F64x2ConvertLowI32x4S) \
V(F64x2ConvertLowI32x4U, kLoong64F64x2ConvertLowI32x4U) \
V(F64x2PromoteLowF32x4, kLoong64F64x2PromoteLowF32x4) \
V(F32x4SConvertI32x4, kLoong64F32x4SConvertI32x4) \
V(F32x4UConvertI32x4, kLoong64F32x4UConvertI32x4) \
V(F32x4Abs, kLoong64F32x4Abs) \
V(F32x4Neg, kLoong64F32x4Neg) \
V(F32x4Sqrt, kLoong64F32x4Sqrt) \
V(F32x4Ceil, kLoong64F32x4Ceil) \
V(F32x4Floor, kLoong64F32x4Floor) \
V(F32x4Trunc, kLoong64F32x4Trunc) \
V(F32x4NearestInt, kLoong64F32x4NearestInt) \
V(F32x4DemoteF64x2Zero, kLoong64F32x4DemoteF64x2Zero) \
V(I64x2Abs, kLoong64I64x2Abs) \
V(I64x2SConvertI32x4Low, kLoong64I64x2SConvertI32x4Low) \
V(I64x2SConvertI32x4High, kLoong64I64x2SConvertI32x4High) \
V(I64x2UConvertI32x4Low, kLoong64I64x2UConvertI32x4Low) \
V(I64x2UConvertI32x4High, kLoong64I64x2UConvertI32x4High) \
V(I32x4SConvertF32x4, kLoong64I32x4SConvertF32x4) \
V(I32x4UConvertF32x4, kLoong64I32x4UConvertF32x4) \
V(I32x4Neg, kLoong64I32x4Neg) \
V(I32x4SConvertI16x8Low, kLoong64I32x4SConvertI16x8Low) \
V(I32x4SConvertI16x8High, kLoong64I32x4SConvertI16x8High) \
V(I32x4UConvertI16x8Low, kLoong64I32x4UConvertI16x8Low) \
V(I32x4UConvertI16x8High, kLoong64I32x4UConvertI16x8High) \
V(I32x4Abs, kLoong64I32x4Abs) \
V(I32x4BitMask, kLoong64I32x4BitMask) \
V(I32x4TruncSatF64x2SZero, kLoong64I32x4TruncSatF64x2SZero) \
V(I32x4TruncSatF64x2UZero, kLoong64I32x4TruncSatF64x2UZero) \
V(I16x8Neg, kLoong64I16x8Neg) \
V(I16x8SConvertI8x16Low, kLoong64I16x8SConvertI8x16Low) \
V(I16x8SConvertI8x16High, kLoong64I16x8SConvertI8x16High) \
V(I16x8UConvertI8x16Low, kLoong64I16x8UConvertI8x16Low) \
V(I16x8UConvertI8x16High, kLoong64I16x8UConvertI8x16High) \
V(I16x8Abs, kLoong64I16x8Abs) \
V(I16x8BitMask, kLoong64I16x8BitMask) \
V(I8x16Neg, kLoong64I8x16Neg) \
V(I8x16Abs, kLoong64I8x16Abs) \
V(I8x16Popcnt, kLoong64I8x16Popcnt) \
V(I8x16BitMask, kLoong64I8x16BitMask) \
V(S128Not, kLoong64S128Not) \
V(I64x2AllTrue, kLoong64I64x2AllTrue) \
V(I32x4AllTrue, kLoong64I32x4AllTrue) \
V(I16x8AllTrue, kLoong64I16x8AllTrue) \
V(I8x16AllTrue, kLoong64I8x16AllTrue) \
#define SIMD_UNOP_LIST(V) \
V(F64x2Abs, kLoong64F64x2Abs) \
V(F64x2Neg, kLoong64F64x2Neg) \
V(F64x2Sqrt, kLoong64F64x2Sqrt) \
V(F64x2Ceil, kLoong64F64x2Ceil) \
V(F64x2Floor, kLoong64F64x2Floor) \
V(F64x2Trunc, kLoong64F64x2Trunc) \
V(F64x2NearestInt, kLoong64F64x2NearestInt) \
V(I64x2Neg, kLoong64I64x2Neg) \
V(I64x2BitMask, kLoong64I64x2BitMask) \
V(F64x2ConvertLowI32x4S, kLoong64F64x2ConvertLowI32x4S) \
V(F64x2ConvertLowI32x4U, kLoong64F64x2ConvertLowI32x4U) \
V(F64x2PromoteLowF32x4, kLoong64F64x2PromoteLowF32x4) \
V(F32x4SConvertI32x4, kLoong64F32x4SConvertI32x4) \
V(F32x4UConvertI32x4, kLoong64F32x4UConvertI32x4) \
V(F32x4Abs, kLoong64F32x4Abs) \
V(F32x4Neg, kLoong64F32x4Neg) \
V(F32x4Sqrt, kLoong64F32x4Sqrt) \
V(F32x4Ceil, kLoong64F32x4Ceil) \
V(F32x4Floor, kLoong64F32x4Floor) \
V(F32x4Trunc, kLoong64F32x4Trunc) \
V(F32x4NearestInt, kLoong64F32x4NearestInt) \
V(F32x4DemoteF64x2Zero, kLoong64F32x4DemoteF64x2Zero) \
V(I64x2Abs, kLoong64I64x2Abs) \
V(I64x2SConvertI32x4Low, kLoong64I64x2SConvertI32x4Low) \
V(I64x2SConvertI32x4High, kLoong64I64x2SConvertI32x4High) \
V(I64x2UConvertI32x4Low, kLoong64I64x2UConvertI32x4Low) \
V(I64x2UConvertI32x4High, kLoong64I64x2UConvertI32x4High) \
V(I32x4SConvertF32x4, kLoong64I32x4SConvertF32x4) \
V(I32x4UConvertF32x4, kLoong64I32x4UConvertF32x4) \
V(I32x4Neg, kLoong64I32x4Neg) \
V(I32x4SConvertI16x8Low, kLoong64I32x4SConvertI16x8Low) \
V(I32x4SConvertI16x8High, kLoong64I32x4SConvertI16x8High) \
V(I32x4UConvertI16x8Low, kLoong64I32x4UConvertI16x8Low) \
V(I32x4UConvertI16x8High, kLoong64I32x4UConvertI16x8High) \
V(I32x4Abs, kLoong64I32x4Abs) \
V(I32x4BitMask, kLoong64I32x4BitMask) \
V(I32x4TruncSatF64x2SZero, kLoong64I32x4TruncSatF64x2SZero) \
V(I32x4TruncSatF64x2UZero, kLoong64I32x4TruncSatF64x2UZero) \
V(I32x4RelaxedTruncF32x4S, kLoong64I32x4RelaxedTruncF32x4S) \
V(I32x4RelaxedTruncF32x4U, kLoong64I32x4RelaxedTruncF32x4U) \
V(I32x4RelaxedTruncF64x2SZero, kLoong64I32x4RelaxedTruncF64x2SZero) \
V(I32x4RelaxedTruncF64x2UZero, kLoong64I32x4RelaxedTruncF64x2UZero) \
V(I16x8Neg, kLoong64I16x8Neg) \
V(I16x8SConvertI8x16Low, kLoong64I16x8SConvertI8x16Low) \
V(I16x8SConvertI8x16High, kLoong64I16x8SConvertI8x16High) \
V(I16x8UConvertI8x16Low, kLoong64I16x8UConvertI8x16Low) \
V(I16x8UConvertI8x16High, kLoong64I16x8UConvertI8x16High) \
V(I16x8Abs, kLoong64I16x8Abs) \
V(I16x8BitMask, kLoong64I16x8BitMask) \
V(I8x16Neg, kLoong64I8x16Neg) \
V(I8x16Abs, kLoong64I8x16Abs) \
V(I8x16Popcnt, kLoong64I8x16Popcnt) \
V(I8x16BitMask, kLoong64I8x16BitMask) \
V(S128Not, kLoong64S128Not) \
V(I64x2AllTrue, kLoong64I64x2AllTrue) \
V(I32x4AllTrue, kLoong64I32x4AllTrue) \
V(I16x8AllTrue, kLoong64I16x8AllTrue) \
V(I8x16AllTrue, kLoong64I8x16AllTrue) \
V(V128AnyTrue, kLoong64V128AnyTrue)
#define SIMD_SHIFT_OP_LIST(V) \
@ -2767,6 +2771,8 @@ void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
V(F64x2Ne, kLoong64F64x2Ne) \
V(F64x2Lt, kLoong64F64x2Lt) \
V(F64x2Le, kLoong64F64x2Le) \
V(F64x2RelaxedMin, kLoong64F64x2RelaxedMin) \
V(F64x2RelaxedMax, kLoong64F64x2RelaxedMax) \
V(I64x2Eq, kLoong64I64x2Eq) \
V(I64x2Ne, kLoong64I64x2Ne) \
V(I64x2Add, kLoong64I64x2Add) \
@ -2784,6 +2790,8 @@ void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
V(F32x4Ne, kLoong64F32x4Ne) \
V(F32x4Lt, kLoong64F32x4Lt) \
V(F32x4Le, kLoong64F32x4Le) \
V(F32x4RelaxedMin, kLoong64F32x4RelaxedMin) \
V(F32x4RelaxedMax, kLoong64F32x4RelaxedMax) \
V(I32x4Add, kLoong64I32x4Add) \
V(I32x4Sub, kLoong64I32x4Sub) \
V(I32x4Mul, kLoong64I32x4Mul) \
@ -2819,6 +2827,7 @@ void InstructionSelector::VisitInt64AbsWithOverflow(Node* node) {
V(I16x8SConvertI32x4, kLoong64I16x8SConvertI32x4) \
V(I16x8UConvertI32x4, kLoong64I16x8UConvertI32x4) \
V(I16x8Q15MulRSatS, kLoong64I16x8Q15MulRSatS) \
V(I16x8RelaxedQ15MulRS, kLoong64I16x8RelaxedQ15MulRS) \
V(I8x16Add, kLoong64I8x16Add) \
V(I8x16AddSatS, kLoong64I8x16AddSatS) \
V(I8x16AddSatU, kLoong64I8x16AddSatU) \
@ -2921,6 +2930,22 @@ void InstructionSelector::VisitS128Select(Node* node) {
VisitRRRR(this, kLoong64S128Select, node);
}
void InstructionSelector::VisitI8x16RelaxedLaneSelect(Node* node) {
VisitS128Select(node);
}
void InstructionSelector::VisitI16x8RelaxedLaneSelect(Node* node) {
VisitS128Select(node);
}
void InstructionSelector::VisitI32x4RelaxedLaneSelect(Node* node) {
VisitS128Select(node);
}
void InstructionSelector::VisitI64x2RelaxedLaneSelect(Node* node) {
VisitS128Select(node);
}
#if V8_ENABLE_WEBASSEMBLY
namespace {

View File

@ -3161,6 +3161,26 @@ SIMD_SHIFT_OP_LIST(SIMD_VISIT_SHIFT_OP)
SIMD_BINOP_LIST(SIMD_VISIT_BINOP)
#undef SIMD_VISIT_BINOP
#define SIMD_RELAXED_OP_LIST(V) \
V(F64x2RelaxedMin) \
V(F64x2RelaxedMax) \
V(F32x4RelaxedMin) \
V(F32x4RelaxedMax) \
V(I32x4RelaxedTruncF32x4S) \
V(I32x4RelaxedTruncF32x4U) \
V(I32x4RelaxedTruncF64x2SZero) \
V(I32x4RelaxedTruncF64x2UZero) \
V(I16x8RelaxedQ15MulRS) \
V(I8x16RelaxedLaneSelect) \
V(I16x8RelaxedLaneSelect) \
V(I32x4RelaxedLaneSelect) \
V(I64x2RelaxedLaneSelect)
#define SIMD_VISIT_RELAXED_OP(Name) \
void InstructionSelector::Visit##Name(Node* node) { UNREACHABLE(); }
SIMD_RELAXED_OP_LIST(SIMD_VISIT_RELAXED_OP)
#undef SIMD_VISIT_SHIFT_OP
void InstructionSelector::VisitS128Select(Node* node) {
VisitRRRR(this, kMips64S128Select, node);
}
@ -3409,6 +3429,7 @@ InstructionSelector::AlignmentRequirements() {
#undef SIMD_BINOP_LIST
#undef SIMD_SHIFT_OP_LIST
#undef SIMD_RELAXED_OP_LIST
#undef SIMD_UNOP_LIST
#undef SIMD_TYPE_LIST
#undef TRACE_UNIMPL

View File

@ -83,7 +83,7 @@ inline MemOperand GetMemOp(LiftoffAssembler* assm, Register addr,
bool i64_offset = false, unsigned shift_amount = 0) {
if (offset != no_reg) {
if (!i64_offset) {
assm->Dext(kScratchReg2, offset, 0, 32);
assm->Dext(kScratchReg, offset, 0, 32);
offset = kScratchReg;
}
if (shift_amount != 0) {