Fix MSVC C4065 warning in HasMemoryAccessMode().

Avoid generating switch statements with only a default case. Instead,
when there are no instructions that can trap, simply have
HasMemoryAccessMode() return false. This avoids a MSVC warning when
doing a 32-bit build.

To do this, remove empty TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST
definitions from instruction-codes-$arch.h files.

Change-Id: Ifed76eb9cbca169f30c188c1999e1e9be0b2c6aa
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3224807
Reviewed-by: Clemens Backes <clemensb@chromium.org>
Reviewed-by: Maya Lekova <mslekova@chromium.org>
Commit-Queue: Lei Zhang <thestig@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77441}
This commit is contained in:
Lei Zhang 2021-10-18 08:12:16 -07:00 committed by V8 LUCI CQ
parent 5bcd2037a3
commit 5333d90ecd
9 changed files with 2986 additions and 3014 deletions

View File

@ -12,361 +12,357 @@ namespace compiler {
// ARM-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(ArmAdd) \
V(ArmAnd) \
V(ArmBic) \
V(ArmClz) \
V(ArmCmp) \
V(ArmCmn) \
V(ArmTst) \
V(ArmTeq) \
V(ArmOrr) \
V(ArmEor) \
V(ArmSub) \
V(ArmRsb) \
V(ArmMul) \
V(ArmMla) \
V(ArmMls) \
V(ArmSmull) \
V(ArmSmmul) \
V(ArmSmmla) \
V(ArmUmull) \
V(ArmSdiv) \
V(ArmUdiv) \
V(ArmMov) \
V(ArmMvn) \
V(ArmBfc) \
V(ArmUbfx) \
V(ArmSbfx) \
V(ArmSxtb) \
V(ArmSxth) \
V(ArmSxtab) \
V(ArmSxtah) \
V(ArmUxtb) \
V(ArmUxth) \
V(ArmUxtab) \
V(ArmRbit) \
V(ArmRev) \
V(ArmUxtah) \
V(ArmAddPair) \
V(ArmSubPair) \
V(ArmMulPair) \
V(ArmLslPair) \
V(ArmLsrPair) \
V(ArmAsrPair) \
V(ArmVcmpF32) \
V(ArmVaddF32) \
V(ArmVsubF32) \
V(ArmVmulF32) \
V(ArmVmlaF32) \
V(ArmVmlsF32) \
V(ArmVdivF32) \
V(ArmVabsF32) \
V(ArmVnegF32) \
V(ArmVsqrtF32) \
V(ArmVcmpF64) \
V(ArmVaddF64) \
V(ArmVsubF64) \
V(ArmVmulF64) \
V(ArmVmlaF64) \
V(ArmVmlsF64) \
V(ArmVdivF64) \
V(ArmVmodF64) \
V(ArmVabsF64) \
V(ArmVnegF64) \
V(ArmVsqrtF64) \
V(ArmVmullLow) \
V(ArmVmullHigh) \
V(ArmVrintmF32) \
V(ArmVrintmF64) \
V(ArmVrintpF32) \
V(ArmVrintpF64) \
V(ArmVrintzF32) \
V(ArmVrintzF64) \
V(ArmVrintaF64) \
V(ArmVrintnF32) \
V(ArmVrintnF64) \
V(ArmVcvtF32F64) \
V(ArmVcvtF64F32) \
V(ArmVcvtF32S32) \
V(ArmVcvtF32U32) \
V(ArmVcvtF64S32) \
V(ArmVcvtF64U32) \
V(ArmVcvtS32F32) \
V(ArmVcvtU32F32) \
V(ArmVcvtS32F64) \
V(ArmVcvtU32F64) \
V(ArmVmovU32F32) \
V(ArmVmovF32U32) \
V(ArmVmovLowU32F64) \
V(ArmVmovLowF64U32) \
V(ArmVmovHighU32F64) \
V(ArmVmovHighF64U32) \
V(ArmVmovF64U32U32) \
V(ArmVmovU32U32F64) \
V(ArmVldrF32) \
V(ArmVstrF32) \
V(ArmVldrF64) \
V(ArmVld1F64) \
V(ArmVstrF64) \
V(ArmVst1F64) \
V(ArmVld1S128) \
V(ArmVst1S128) \
V(ArmVcnt) \
V(ArmVpadal) \
V(ArmVpaddl) \
V(ArmFloat32Max) \
V(ArmFloat64Max) \
V(ArmFloat32Min) \
V(ArmFloat64Min) \
V(ArmFloat64SilenceNaN) \
V(ArmLdrb) \
V(ArmLdrsb) \
V(ArmStrb) \
V(ArmLdrh) \
V(ArmLdrsh) \
V(ArmStrh) \
V(ArmLdr) \
V(ArmStr) \
V(ArmPush) \
V(ArmPoke) \
V(ArmPeek) \
V(ArmDmbIsh) \
V(ArmDsbIsb) \
V(ArmF64x2Splat) \
V(ArmF64x2ExtractLane) \
V(ArmF64x2ReplaceLane) \
V(ArmF64x2Abs) \
V(ArmF64x2Neg) \
V(ArmF64x2Sqrt) \
V(ArmF64x2Add) \
V(ArmF64x2Sub) \
V(ArmF64x2Mul) \
V(ArmF64x2Div) \
V(ArmF64x2Min) \
V(ArmF64x2Max) \
V(ArmF64x2Eq) \
V(ArmF64x2Ne) \
V(ArmF64x2Lt) \
V(ArmF64x2Le) \
V(ArmF64x2Pmin) \
V(ArmF64x2Pmax) \
V(ArmF64x2Ceil) \
V(ArmF64x2Floor) \
V(ArmF64x2Trunc) \
V(ArmF64x2NearestInt) \
V(ArmF64x2ConvertLowI32x4S) \
V(ArmF64x2ConvertLowI32x4U) \
V(ArmF64x2PromoteLowF32x4) \
V(ArmF32x4Splat) \
V(ArmF32x4ExtractLane) \
V(ArmF32x4ReplaceLane) \
V(ArmF32x4SConvertI32x4) \
V(ArmF32x4UConvertI32x4) \
V(ArmF32x4Abs) \
V(ArmF32x4Neg) \
V(ArmF32x4Sqrt) \
V(ArmF32x4RecipApprox) \
V(ArmF32x4RecipSqrtApprox) \
V(ArmF32x4Add) \
V(ArmF32x4Sub) \
V(ArmF32x4Mul) \
V(ArmF32x4Div) \
V(ArmF32x4Min) \
V(ArmF32x4Max) \
V(ArmF32x4Eq) \
V(ArmF32x4Ne) \
V(ArmF32x4Lt) \
V(ArmF32x4Le) \
V(ArmF32x4Pmin) \
V(ArmF32x4Pmax) \
V(ArmF32x4DemoteF64x2Zero) \
V(ArmI64x2SplatI32Pair) \
V(ArmI64x2ReplaceLaneI32Pair) \
V(ArmI64x2Abs) \
V(ArmI64x2Neg) \
V(ArmI64x2Shl) \
V(ArmI64x2ShrS) \
V(ArmI64x2Add) \
V(ArmI64x2Sub) \
V(ArmI64x2Mul) \
V(ArmI64x2ShrU) \
V(ArmI64x2BitMask) \
V(ArmI64x2Eq) \
V(ArmI64x2Ne) \
V(ArmI64x2GtS) \
V(ArmI64x2GeS) \
V(ArmI64x2SConvertI32x4Low) \
V(ArmI64x2SConvertI32x4High) \
V(ArmI64x2UConvertI32x4Low) \
V(ArmI64x2UConvertI32x4High) \
V(ArmI32x4Splat) \
V(ArmI32x4ExtractLane) \
V(ArmI32x4ReplaceLane) \
V(ArmI32x4SConvertF32x4) \
V(ArmI32x4SConvertI16x8Low) \
V(ArmI32x4SConvertI16x8High) \
V(ArmI32x4Neg) \
V(ArmI32x4Shl) \
V(ArmI32x4ShrS) \
V(ArmI32x4Add) \
V(ArmI32x4Sub) \
V(ArmI32x4Mul) \
V(ArmI32x4MinS) \
V(ArmI32x4MaxS) \
V(ArmI32x4Eq) \
V(ArmI32x4Ne) \
V(ArmI32x4GtS) \
V(ArmI32x4GeS) \
V(ArmI32x4UConvertF32x4) \
V(ArmI32x4UConvertI16x8Low) \
V(ArmI32x4UConvertI16x8High) \
V(ArmI32x4ShrU) \
V(ArmI32x4MinU) \
V(ArmI32x4MaxU) \
V(ArmI32x4GtU) \
V(ArmI32x4GeU) \
V(ArmI32x4Abs) \
V(ArmI32x4BitMask) \
V(ArmI32x4DotI16x8S) \
V(ArmI32x4TruncSatF64x2SZero) \
V(ArmI32x4TruncSatF64x2UZero) \
V(ArmI16x8Splat) \
V(ArmI16x8ExtractLaneS) \
V(ArmI16x8ReplaceLane) \
V(ArmI16x8SConvertI8x16Low) \
V(ArmI16x8SConvertI8x16High) \
V(ArmI16x8Neg) \
V(ArmI16x8Shl) \
V(ArmI16x8ShrS) \
V(ArmI16x8SConvertI32x4) \
V(ArmI16x8Add) \
V(ArmI16x8AddSatS) \
V(ArmI16x8Sub) \
V(ArmI16x8SubSatS) \
V(ArmI16x8Mul) \
V(ArmI16x8MinS) \
V(ArmI16x8MaxS) \
V(ArmI16x8Eq) \
V(ArmI16x8Ne) \
V(ArmI16x8GtS) \
V(ArmI16x8GeS) \
V(ArmI16x8ExtractLaneU) \
V(ArmI16x8UConvertI8x16Low) \
V(ArmI16x8UConvertI8x16High) \
V(ArmI16x8ShrU) \
V(ArmI16x8UConvertI32x4) \
V(ArmI16x8AddSatU) \
V(ArmI16x8SubSatU) \
V(ArmI16x8MinU) \
V(ArmI16x8MaxU) \
V(ArmI16x8GtU) \
V(ArmI16x8GeU) \
V(ArmI16x8RoundingAverageU) \
V(ArmI16x8Abs) \
V(ArmI16x8BitMask) \
V(ArmI16x8Q15MulRSatS) \
V(ArmI8x16Splat) \
V(ArmI8x16ExtractLaneS) \
V(ArmI8x16ReplaceLane) \
V(ArmI8x16Neg) \
V(ArmI8x16Shl) \
V(ArmI8x16ShrS) \
V(ArmI8x16SConvertI16x8) \
V(ArmI8x16Add) \
V(ArmI8x16AddSatS) \
V(ArmI8x16Sub) \
V(ArmI8x16SubSatS) \
V(ArmI8x16MinS) \
V(ArmI8x16MaxS) \
V(ArmI8x16Eq) \
V(ArmI8x16Ne) \
V(ArmI8x16GtS) \
V(ArmI8x16GeS) \
V(ArmI8x16ExtractLaneU) \
V(ArmI8x16ShrU) \
V(ArmI8x16UConvertI16x8) \
V(ArmI8x16AddSatU) \
V(ArmI8x16SubSatU) \
V(ArmI8x16MinU) \
V(ArmI8x16MaxU) \
V(ArmI8x16GtU) \
V(ArmI8x16GeU) \
V(ArmI8x16RoundingAverageU) \
V(ArmI8x16Abs) \
V(ArmI8x16BitMask) \
V(ArmS128Const) \
V(ArmS128Zero) \
V(ArmS128AllOnes) \
V(ArmS128Dup) \
V(ArmS128And) \
V(ArmS128Or) \
V(ArmS128Xor) \
V(ArmS128Not) \
V(ArmS128Select) \
V(ArmS128AndNot) \
V(ArmS32x4ZipLeft) \
V(ArmS32x4ZipRight) \
V(ArmS32x4UnzipLeft) \
V(ArmS32x4UnzipRight) \
V(ArmS32x4TransposeLeft) \
V(ArmS32x4TransposeRight) \
V(ArmS32x4Shuffle) \
V(ArmS16x8ZipLeft) \
V(ArmS16x8ZipRight) \
V(ArmS16x8UnzipLeft) \
V(ArmS16x8UnzipRight) \
V(ArmS16x8TransposeLeft) \
V(ArmS16x8TransposeRight) \
V(ArmS8x16ZipLeft) \
V(ArmS8x16ZipRight) \
V(ArmS8x16UnzipLeft) \
V(ArmS8x16UnzipRight) \
V(ArmS8x16TransposeLeft) \
V(ArmS8x16TransposeRight) \
V(ArmS8x16Concat) \
V(ArmI8x16Swizzle) \
V(ArmI8x16Shuffle) \
V(ArmS32x2Reverse) \
V(ArmS16x4Reverse) \
V(ArmS16x2Reverse) \
V(ArmS8x8Reverse) \
V(ArmS8x4Reverse) \
V(ArmS8x2Reverse) \
V(ArmI64x2AllTrue) \
V(ArmI32x4AllTrue) \
V(ArmI16x8AllTrue) \
V(ArmV128AnyTrue) \
V(ArmI8x16AllTrue) \
V(ArmS128Load8Splat) \
V(ArmS128Load16Splat) \
V(ArmS128Load32Splat) \
V(ArmS128Load64Splat) \
V(ArmS128Load8x8S) \
V(ArmS128Load8x8U) \
V(ArmS128Load16x4S) \
V(ArmS128Load16x4U) \
V(ArmS128Load32x2S) \
V(ArmS128Load32x2U) \
V(ArmS128Load32Zero) \
V(ArmS128Load64Zero) \
V(ArmS128LoadLaneLow) \
V(ArmS128LoadLaneHigh) \
V(ArmS128StoreLaneLow) \
V(ArmS128StoreLaneHigh) \
V(ArmWord32AtomicPairLoad) \
V(ArmWord32AtomicPairStore) \
V(ArmWord32AtomicPairAdd) \
V(ArmWord32AtomicPairSub) \
V(ArmWord32AtomicPairAnd) \
V(ArmWord32AtomicPairOr) \
V(ArmWord32AtomicPairXor) \
V(ArmWord32AtomicPairExchange) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(ArmAdd) \
V(ArmAnd) \
V(ArmBic) \
V(ArmClz) \
V(ArmCmp) \
V(ArmCmn) \
V(ArmTst) \
V(ArmTeq) \
V(ArmOrr) \
V(ArmEor) \
V(ArmSub) \
V(ArmRsb) \
V(ArmMul) \
V(ArmMla) \
V(ArmMls) \
V(ArmSmull) \
V(ArmSmmul) \
V(ArmSmmla) \
V(ArmUmull) \
V(ArmSdiv) \
V(ArmUdiv) \
V(ArmMov) \
V(ArmMvn) \
V(ArmBfc) \
V(ArmUbfx) \
V(ArmSbfx) \
V(ArmSxtb) \
V(ArmSxth) \
V(ArmSxtab) \
V(ArmSxtah) \
V(ArmUxtb) \
V(ArmUxth) \
V(ArmUxtab) \
V(ArmRbit) \
V(ArmRev) \
V(ArmUxtah) \
V(ArmAddPair) \
V(ArmSubPair) \
V(ArmMulPair) \
V(ArmLslPair) \
V(ArmLsrPair) \
V(ArmAsrPair) \
V(ArmVcmpF32) \
V(ArmVaddF32) \
V(ArmVsubF32) \
V(ArmVmulF32) \
V(ArmVmlaF32) \
V(ArmVmlsF32) \
V(ArmVdivF32) \
V(ArmVabsF32) \
V(ArmVnegF32) \
V(ArmVsqrtF32) \
V(ArmVcmpF64) \
V(ArmVaddF64) \
V(ArmVsubF64) \
V(ArmVmulF64) \
V(ArmVmlaF64) \
V(ArmVmlsF64) \
V(ArmVdivF64) \
V(ArmVmodF64) \
V(ArmVabsF64) \
V(ArmVnegF64) \
V(ArmVsqrtF64) \
V(ArmVmullLow) \
V(ArmVmullHigh) \
V(ArmVrintmF32) \
V(ArmVrintmF64) \
V(ArmVrintpF32) \
V(ArmVrintpF64) \
V(ArmVrintzF32) \
V(ArmVrintzF64) \
V(ArmVrintaF64) \
V(ArmVrintnF32) \
V(ArmVrintnF64) \
V(ArmVcvtF32F64) \
V(ArmVcvtF64F32) \
V(ArmVcvtF32S32) \
V(ArmVcvtF32U32) \
V(ArmVcvtF64S32) \
V(ArmVcvtF64U32) \
V(ArmVcvtS32F32) \
V(ArmVcvtU32F32) \
V(ArmVcvtS32F64) \
V(ArmVcvtU32F64) \
V(ArmVmovU32F32) \
V(ArmVmovF32U32) \
V(ArmVmovLowU32F64) \
V(ArmVmovLowF64U32) \
V(ArmVmovHighU32F64) \
V(ArmVmovHighF64U32) \
V(ArmVmovF64U32U32) \
V(ArmVmovU32U32F64) \
V(ArmVldrF32) \
V(ArmVstrF32) \
V(ArmVldrF64) \
V(ArmVld1F64) \
V(ArmVstrF64) \
V(ArmVst1F64) \
V(ArmVld1S128) \
V(ArmVst1S128) \
V(ArmVcnt) \
V(ArmVpadal) \
V(ArmVpaddl) \
V(ArmFloat32Max) \
V(ArmFloat64Max) \
V(ArmFloat32Min) \
V(ArmFloat64Min) \
V(ArmFloat64SilenceNaN) \
V(ArmLdrb) \
V(ArmLdrsb) \
V(ArmStrb) \
V(ArmLdrh) \
V(ArmLdrsh) \
V(ArmStrh) \
V(ArmLdr) \
V(ArmStr) \
V(ArmPush) \
V(ArmPoke) \
V(ArmPeek) \
V(ArmDmbIsh) \
V(ArmDsbIsb) \
V(ArmF64x2Splat) \
V(ArmF64x2ExtractLane) \
V(ArmF64x2ReplaceLane) \
V(ArmF64x2Abs) \
V(ArmF64x2Neg) \
V(ArmF64x2Sqrt) \
V(ArmF64x2Add) \
V(ArmF64x2Sub) \
V(ArmF64x2Mul) \
V(ArmF64x2Div) \
V(ArmF64x2Min) \
V(ArmF64x2Max) \
V(ArmF64x2Eq) \
V(ArmF64x2Ne) \
V(ArmF64x2Lt) \
V(ArmF64x2Le) \
V(ArmF64x2Pmin) \
V(ArmF64x2Pmax) \
V(ArmF64x2Ceil) \
V(ArmF64x2Floor) \
V(ArmF64x2Trunc) \
V(ArmF64x2NearestInt) \
V(ArmF64x2ConvertLowI32x4S) \
V(ArmF64x2ConvertLowI32x4U) \
V(ArmF64x2PromoteLowF32x4) \
V(ArmF32x4Splat) \
V(ArmF32x4ExtractLane) \
V(ArmF32x4ReplaceLane) \
V(ArmF32x4SConvertI32x4) \
V(ArmF32x4UConvertI32x4) \
V(ArmF32x4Abs) \
V(ArmF32x4Neg) \
V(ArmF32x4Sqrt) \
V(ArmF32x4RecipApprox) \
V(ArmF32x4RecipSqrtApprox) \
V(ArmF32x4Add) \
V(ArmF32x4Sub) \
V(ArmF32x4Mul) \
V(ArmF32x4Div) \
V(ArmF32x4Min) \
V(ArmF32x4Max) \
V(ArmF32x4Eq) \
V(ArmF32x4Ne) \
V(ArmF32x4Lt) \
V(ArmF32x4Le) \
V(ArmF32x4Pmin) \
V(ArmF32x4Pmax) \
V(ArmF32x4DemoteF64x2Zero) \
V(ArmI64x2SplatI32Pair) \
V(ArmI64x2ReplaceLaneI32Pair) \
V(ArmI64x2Abs) \
V(ArmI64x2Neg) \
V(ArmI64x2Shl) \
V(ArmI64x2ShrS) \
V(ArmI64x2Add) \
V(ArmI64x2Sub) \
V(ArmI64x2Mul) \
V(ArmI64x2ShrU) \
V(ArmI64x2BitMask) \
V(ArmI64x2Eq) \
V(ArmI64x2Ne) \
V(ArmI64x2GtS) \
V(ArmI64x2GeS) \
V(ArmI64x2SConvertI32x4Low) \
V(ArmI64x2SConvertI32x4High) \
V(ArmI64x2UConvertI32x4Low) \
V(ArmI64x2UConvertI32x4High) \
V(ArmI32x4Splat) \
V(ArmI32x4ExtractLane) \
V(ArmI32x4ReplaceLane) \
V(ArmI32x4SConvertF32x4) \
V(ArmI32x4SConvertI16x8Low) \
V(ArmI32x4SConvertI16x8High) \
V(ArmI32x4Neg) \
V(ArmI32x4Shl) \
V(ArmI32x4ShrS) \
V(ArmI32x4Add) \
V(ArmI32x4Sub) \
V(ArmI32x4Mul) \
V(ArmI32x4MinS) \
V(ArmI32x4MaxS) \
V(ArmI32x4Eq) \
V(ArmI32x4Ne) \
V(ArmI32x4GtS) \
V(ArmI32x4GeS) \
V(ArmI32x4UConvertF32x4) \
V(ArmI32x4UConvertI16x8Low) \
V(ArmI32x4UConvertI16x8High) \
V(ArmI32x4ShrU) \
V(ArmI32x4MinU) \
V(ArmI32x4MaxU) \
V(ArmI32x4GtU) \
V(ArmI32x4GeU) \
V(ArmI32x4Abs) \
V(ArmI32x4BitMask) \
V(ArmI32x4DotI16x8S) \
V(ArmI32x4TruncSatF64x2SZero) \
V(ArmI32x4TruncSatF64x2UZero) \
V(ArmI16x8Splat) \
V(ArmI16x8ExtractLaneS) \
V(ArmI16x8ReplaceLane) \
V(ArmI16x8SConvertI8x16Low) \
V(ArmI16x8SConvertI8x16High) \
V(ArmI16x8Neg) \
V(ArmI16x8Shl) \
V(ArmI16x8ShrS) \
V(ArmI16x8SConvertI32x4) \
V(ArmI16x8Add) \
V(ArmI16x8AddSatS) \
V(ArmI16x8Sub) \
V(ArmI16x8SubSatS) \
V(ArmI16x8Mul) \
V(ArmI16x8MinS) \
V(ArmI16x8MaxS) \
V(ArmI16x8Eq) \
V(ArmI16x8Ne) \
V(ArmI16x8GtS) \
V(ArmI16x8GeS) \
V(ArmI16x8ExtractLaneU) \
V(ArmI16x8UConvertI8x16Low) \
V(ArmI16x8UConvertI8x16High) \
V(ArmI16x8ShrU) \
V(ArmI16x8UConvertI32x4) \
V(ArmI16x8AddSatU) \
V(ArmI16x8SubSatU) \
V(ArmI16x8MinU) \
V(ArmI16x8MaxU) \
V(ArmI16x8GtU) \
V(ArmI16x8GeU) \
V(ArmI16x8RoundingAverageU) \
V(ArmI16x8Abs) \
V(ArmI16x8BitMask) \
V(ArmI16x8Q15MulRSatS) \
V(ArmI8x16Splat) \
V(ArmI8x16ExtractLaneS) \
V(ArmI8x16ReplaceLane) \
V(ArmI8x16Neg) \
V(ArmI8x16Shl) \
V(ArmI8x16ShrS) \
V(ArmI8x16SConvertI16x8) \
V(ArmI8x16Add) \
V(ArmI8x16AddSatS) \
V(ArmI8x16Sub) \
V(ArmI8x16SubSatS) \
V(ArmI8x16MinS) \
V(ArmI8x16MaxS) \
V(ArmI8x16Eq) \
V(ArmI8x16Ne) \
V(ArmI8x16GtS) \
V(ArmI8x16GeS) \
V(ArmI8x16ExtractLaneU) \
V(ArmI8x16ShrU) \
V(ArmI8x16UConvertI16x8) \
V(ArmI8x16AddSatU) \
V(ArmI8x16SubSatU) \
V(ArmI8x16MinU) \
V(ArmI8x16MaxU) \
V(ArmI8x16GtU) \
V(ArmI8x16GeU) \
V(ArmI8x16RoundingAverageU) \
V(ArmI8x16Abs) \
V(ArmI8x16BitMask) \
V(ArmS128Const) \
V(ArmS128Zero) \
V(ArmS128AllOnes) \
V(ArmS128Dup) \
V(ArmS128And) \
V(ArmS128Or) \
V(ArmS128Xor) \
V(ArmS128Not) \
V(ArmS128Select) \
V(ArmS128AndNot) \
V(ArmS32x4ZipLeft) \
V(ArmS32x4ZipRight) \
V(ArmS32x4UnzipLeft) \
V(ArmS32x4UnzipRight) \
V(ArmS32x4TransposeLeft) \
V(ArmS32x4TransposeRight) \
V(ArmS32x4Shuffle) \
V(ArmS16x8ZipLeft) \
V(ArmS16x8ZipRight) \
V(ArmS16x8UnzipLeft) \
V(ArmS16x8UnzipRight) \
V(ArmS16x8TransposeLeft) \
V(ArmS16x8TransposeRight) \
V(ArmS8x16ZipLeft) \
V(ArmS8x16ZipRight) \
V(ArmS8x16UnzipLeft) \
V(ArmS8x16UnzipRight) \
V(ArmS8x16TransposeLeft) \
V(ArmS8x16TransposeRight) \
V(ArmS8x16Concat) \
V(ArmI8x16Swizzle) \
V(ArmI8x16Shuffle) \
V(ArmS32x2Reverse) \
V(ArmS16x4Reverse) \
V(ArmS16x2Reverse) \
V(ArmS8x8Reverse) \
V(ArmS8x4Reverse) \
V(ArmS8x2Reverse) \
V(ArmI64x2AllTrue) \
V(ArmI32x4AllTrue) \
V(ArmI16x8AllTrue) \
V(ArmV128AnyTrue) \
V(ArmI8x16AllTrue) \
V(ArmS128Load8Splat) \
V(ArmS128Load16Splat) \
V(ArmS128Load32Splat) \
V(ArmS128Load64Splat) \
V(ArmS128Load8x8S) \
V(ArmS128Load8x8U) \
V(ArmS128Load16x4S) \
V(ArmS128Load16x4U) \
V(ArmS128Load32x2S) \
V(ArmS128Load32x2U) \
V(ArmS128Load32Zero) \
V(ArmS128Load64Zero) \
V(ArmS128LoadLaneLow) \
V(ArmS128LoadLaneHigh) \
V(ArmS128StoreLaneLow) \
V(ArmS128StoreLaneHigh) \
V(ArmWord32AtomicPairLoad) \
V(ArmWord32AtomicPairStore) \
V(ArmWord32AtomicPairAdd) \
V(ArmWord32AtomicPairSub) \
V(ArmWord32AtomicPairAnd) \
V(ArmWord32AtomicPairOr) \
V(ArmWord32AtomicPairXor) \
V(ArmWord32AtomicPairExchange) \
V(ArmWord32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,358 +12,354 @@ namespace compiler {
// IA32-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(IA32Add) \
V(IA32And) \
V(IA32Cmp) \
V(IA32Cmp16) \
V(IA32Cmp8) \
V(IA32Test) \
V(IA32Test16) \
V(IA32Test8) \
V(IA32Or) \
V(IA32Xor) \
V(IA32Sub) \
V(IA32Imul) \
V(IA32ImulHigh) \
V(IA32UmulHigh) \
V(IA32Idiv) \
V(IA32Udiv) \
V(IA32Not) \
V(IA32Neg) \
V(IA32Shl) \
V(IA32Shr) \
V(IA32Sar) \
V(IA32AddPair) \
V(IA32SubPair) \
V(IA32MulPair) \
V(IA32ShlPair) \
V(IA32ShrPair) \
V(IA32SarPair) \
V(IA32Rol) \
V(IA32Ror) \
V(IA32Lzcnt) \
V(IA32Tzcnt) \
V(IA32Popcnt) \
V(IA32Bswap) \
V(IA32MFence) \
V(IA32LFence) \
V(IA32Float32Cmp) \
V(IA32Float32Sqrt) \
V(IA32Float32Round) \
V(IA32Float64Cmp) \
V(IA32Float64Mod) \
V(IA32Float32Max) \
V(IA32Float64Max) \
V(IA32Float32Min) \
V(IA32Float64Min) \
V(IA32Float64Sqrt) \
V(IA32Float64Round) \
V(IA32Float32ToFloat64) \
V(IA32Float64ToFloat32) \
V(IA32Float32ToInt32) \
V(IA32Float32ToUint32) \
V(IA32Float64ToInt32) \
V(IA32Float64ToUint32) \
V(SSEInt32ToFloat32) \
V(IA32Uint32ToFloat32) \
V(SSEInt32ToFloat64) \
V(IA32Uint32ToFloat64) \
V(IA32Float64ExtractLowWord32) \
V(IA32Float64ExtractHighWord32) \
V(IA32Float64InsertLowWord32) \
V(IA32Float64InsertHighWord32) \
V(IA32Float64LoadLowWord32) \
V(IA32Float64SilenceNaN) \
V(Float32Add) \
V(Float32Sub) \
V(Float64Add) \
V(Float64Sub) \
V(Float32Mul) \
V(Float32Div) \
V(Float64Mul) \
V(Float64Div) \
V(Float64Abs) \
V(Float64Neg) \
V(Float32Abs) \
V(Float32Neg) \
V(IA32Movsxbl) \
V(IA32Movzxbl) \
V(IA32Movb) \
V(IA32Movsxwl) \
V(IA32Movzxwl) \
V(IA32Movw) \
V(IA32Movl) \
V(IA32Movss) \
V(IA32Movsd) \
V(IA32Movdqu) \
V(IA32Movlps) \
V(IA32Movhps) \
V(IA32BitcastFI) \
V(IA32BitcastIF) \
V(IA32Lea) \
V(IA32Push) \
V(IA32Poke) \
V(IA32Peek) \
V(IA32F64x2Splat) \
V(F64x2ExtractLane) \
V(F64x2ReplaceLane) \
V(IA32F64x2Sqrt) \
V(IA32F64x2Add) \
V(IA32F64x2Sub) \
V(IA32F64x2Mul) \
V(IA32F64x2Div) \
V(IA32F64x2Min) \
V(IA32F64x2Max) \
V(IA32F64x2Eq) \
V(IA32F64x2Ne) \
V(IA32F64x2Lt) \
V(IA32F64x2Le) \
V(IA32F64x2Pmin) \
V(IA32F64x2Pmax) \
V(IA32F64x2Round) \
V(IA32F64x2ConvertLowI32x4S) \
V(IA32F64x2ConvertLowI32x4U) \
V(IA32F64x2PromoteLowF32x4) \
V(IA32I64x2SplatI32Pair) \
V(IA32I64x2ReplaceLaneI32Pair) \
V(IA32I64x2Abs) \
V(IA32I64x2Neg) \
V(IA32I64x2Shl) \
V(IA32I64x2ShrS) \
V(IA32I64x2Add) \
V(IA32I64x2Sub) \
V(IA32I64x2Mul) \
V(IA32I64x2ShrU) \
V(IA32I64x2BitMask) \
V(IA32I64x2Eq) \
V(IA32I64x2Ne) \
V(IA32I64x2GtS) \
V(IA32I64x2GeS) \
V(IA32I64x2ExtMulLowI32x4S) \
V(IA32I64x2ExtMulHighI32x4S) \
V(IA32I64x2ExtMulLowI32x4U) \
V(IA32I64x2ExtMulHighI32x4U) \
V(IA32I64x2SConvertI32x4Low) \
V(IA32I64x2SConvertI32x4High) \
V(IA32I64x2UConvertI32x4Low) \
V(IA32I64x2UConvertI32x4High) \
V(IA32F32x4Splat) \
V(IA32F32x4ExtractLane) \
V(IA32Insertps) \
V(IA32F32x4SConvertI32x4) \
V(IA32F32x4UConvertI32x4) \
V(IA32F32x4Sqrt) \
V(IA32F32x4RecipApprox) \
V(IA32F32x4RecipSqrtApprox) \
V(IA32F32x4Add) \
V(IA32F32x4Sub) \
V(IA32F32x4Mul) \
V(IA32F32x4Div) \
V(IA32F32x4Min) \
V(IA32F32x4Max) \
V(IA32F32x4Eq) \
V(IA32F32x4Ne) \
V(IA32F32x4Lt) \
V(IA32F32x4Le) \
V(IA32F32x4Pmin) \
V(IA32F32x4Pmax) \
V(IA32F32x4Round) \
V(IA32F32x4DemoteF64x2Zero) \
V(IA32I32x4Splat) \
V(IA32I32x4ExtractLane) \
V(IA32I32x4SConvertF32x4) \
V(IA32I32x4SConvertI16x8Low) \
V(IA32I32x4SConvertI16x8High) \
V(IA32I32x4Neg) \
V(IA32I32x4Shl) \
V(IA32I32x4ShrS) \
V(IA32I32x4Add) \
V(IA32I32x4Sub) \
V(IA32I32x4Mul) \
V(IA32I32x4MinS) \
V(IA32I32x4MaxS) \
V(IA32I32x4Eq) \
V(IA32I32x4Ne) \
V(IA32I32x4GtS) \
V(IA32I32x4GeS) \
V(SSEI32x4UConvertF32x4) \
V(AVXI32x4UConvertF32x4) \
V(IA32I32x4UConvertI16x8Low) \
V(IA32I32x4UConvertI16x8High) \
V(IA32I32x4ShrU) \
V(IA32I32x4MinU) \
V(IA32I32x4MaxU) \
V(SSEI32x4GtU) \
V(AVXI32x4GtU) \
V(SSEI32x4GeU) \
V(AVXI32x4GeU) \
V(IA32I32x4Abs) \
V(IA32I32x4BitMask) \
V(IA32I32x4DotI16x8S) \
V(IA32I32x4ExtMulLowI16x8S) \
V(IA32I32x4ExtMulHighI16x8S) \
V(IA32I32x4ExtMulLowI16x8U) \
V(IA32I32x4ExtMulHighI16x8U) \
V(IA32I32x4ExtAddPairwiseI16x8S) \
V(IA32I32x4ExtAddPairwiseI16x8U) \
V(IA32I32x4TruncSatF64x2SZero) \
V(IA32I32x4TruncSatF64x2UZero) \
V(IA32I16x8Splat) \
V(IA32I16x8ExtractLaneS) \
V(IA32I16x8SConvertI8x16Low) \
V(IA32I16x8SConvertI8x16High) \
V(IA32I16x8Neg) \
V(IA32I16x8Shl) \
V(IA32I16x8ShrS) \
V(IA32I16x8SConvertI32x4) \
V(IA32I16x8Add) \
V(IA32I16x8AddSatS) \
V(IA32I16x8Sub) \
V(IA32I16x8SubSatS) \
V(IA32I16x8Mul) \
V(IA32I16x8MinS) \
V(IA32I16x8MaxS) \
V(IA32I16x8Eq) \
V(SSEI16x8Ne) \
V(AVXI16x8Ne) \
V(IA32I16x8GtS) \
V(SSEI16x8GeS) \
V(AVXI16x8GeS) \
V(IA32I16x8UConvertI8x16Low) \
V(IA32I16x8UConvertI8x16High) \
V(IA32I16x8ShrU) \
V(IA32I16x8UConvertI32x4) \
V(IA32I16x8AddSatU) \
V(IA32I16x8SubSatU) \
V(IA32I16x8MinU) \
V(IA32I16x8MaxU) \
V(SSEI16x8GtU) \
V(AVXI16x8GtU) \
V(SSEI16x8GeU) \
V(AVXI16x8GeU) \
V(IA32I16x8RoundingAverageU) \
V(IA32I16x8Abs) \
V(IA32I16x8BitMask) \
V(IA32I16x8ExtMulLowI8x16S) \
V(IA32I16x8ExtMulHighI8x16S) \
V(IA32I16x8ExtMulLowI8x16U) \
V(IA32I16x8ExtMulHighI8x16U) \
V(IA32I16x8ExtAddPairwiseI8x16S) \
V(IA32I16x8ExtAddPairwiseI8x16U) \
V(IA32I16x8Q15MulRSatS) \
V(IA32I8x16Splat) \
V(IA32I8x16ExtractLaneS) \
V(IA32Pinsrb) \
V(IA32Pinsrw) \
V(IA32Pinsrd) \
V(IA32Pextrb) \
V(IA32Pextrw) \
V(IA32S128Store32Lane) \
V(IA32I8x16SConvertI16x8) \
V(IA32I8x16Neg) \
V(IA32I8x16Shl) \
V(IA32I8x16ShrS) \
V(IA32I8x16Add) \
V(IA32I8x16AddSatS) \
V(IA32I8x16Sub) \
V(IA32I8x16SubSatS) \
V(IA32I8x16MinS) \
V(IA32I8x16MaxS) \
V(IA32I8x16Eq) \
V(SSEI8x16Ne) \
V(AVXI8x16Ne) \
V(IA32I8x16GtS) \
V(SSEI8x16GeS) \
V(AVXI8x16GeS) \
V(IA32I8x16UConvertI16x8) \
V(IA32I8x16AddSatU) \
V(IA32I8x16SubSatU) \
V(IA32I8x16ShrU) \
V(IA32I8x16MinU) \
V(IA32I8x16MaxU) \
V(SSEI8x16GtU) \
V(AVXI8x16GtU) \
V(SSEI8x16GeU) \
V(AVXI8x16GeU) \
V(IA32I8x16RoundingAverageU) \
V(IA32I8x16Abs) \
V(IA32I8x16BitMask) \
V(IA32I8x16Popcnt) \
V(IA32S128Const) \
V(IA32S128Zero) \
V(IA32S128AllOnes) \
V(IA32S128Not) \
V(IA32S128And) \
V(IA32S128Or) \
V(IA32S128Xor) \
V(IA32S128Select) \
V(IA32S128AndNot) \
V(IA32I8x16Swizzle) \
V(IA32I8x16Shuffle) \
V(IA32S128Load8Splat) \
V(IA32S128Load16Splat) \
V(IA32S128Load32Splat) \
V(IA32S128Load64Splat) \
V(IA32S128Load8x8S) \
V(IA32S128Load8x8U) \
V(IA32S128Load16x4S) \
V(IA32S128Load16x4U) \
V(IA32S128Load32x2S) \
V(IA32S128Load32x2U) \
V(IA32S32x4Rotate) \
V(IA32S32x4Swizzle) \
V(IA32S32x4Shuffle) \
V(IA32S16x8Blend) \
V(IA32S16x8HalfShuffle1) \
V(IA32S16x8HalfShuffle2) \
V(IA32S8x16Alignr) \
V(IA32S16x8Dup) \
V(IA32S8x16Dup) \
V(SSES16x8UnzipHigh) \
V(AVXS16x8UnzipHigh) \
V(SSES16x8UnzipLow) \
V(AVXS16x8UnzipLow) \
V(SSES8x16UnzipHigh) \
V(AVXS8x16UnzipHigh) \
V(SSES8x16UnzipLow) \
V(AVXS8x16UnzipLow) \
V(IA32S64x2UnpackHigh) \
V(IA32S32x4UnpackHigh) \
V(IA32S16x8UnpackHigh) \
V(IA32S8x16UnpackHigh) \
V(IA32S64x2UnpackLow) \
V(IA32S32x4UnpackLow) \
V(IA32S16x8UnpackLow) \
V(IA32S8x16UnpackLow) \
V(SSES8x16TransposeLow) \
V(AVXS8x16TransposeLow) \
V(SSES8x16TransposeHigh) \
V(AVXS8x16TransposeHigh) \
V(SSES8x8Reverse) \
V(AVXS8x8Reverse) \
V(SSES8x4Reverse) \
V(AVXS8x4Reverse) \
V(SSES8x2Reverse) \
V(AVXS8x2Reverse) \
V(IA32S128AnyTrue) \
V(IA32I64x2AllTrue) \
V(IA32I32x4AllTrue) \
V(IA32I16x8AllTrue) \
V(IA32I8x16AllTrue) \
V(IA32Word32AtomicPairLoad) \
V(IA32Word32ReleasePairStore) \
V(IA32Word32SeqCstPairStore) \
V(IA32Word32AtomicPairAdd) \
V(IA32Word32AtomicPairSub) \
V(IA32Word32AtomicPairAnd) \
V(IA32Word32AtomicPairOr) \
V(IA32Word32AtomicPairXor) \
V(IA32Word32AtomicPairExchange) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(IA32Add) \
V(IA32And) \
V(IA32Cmp) \
V(IA32Cmp16) \
V(IA32Cmp8) \
V(IA32Test) \
V(IA32Test16) \
V(IA32Test8) \
V(IA32Or) \
V(IA32Xor) \
V(IA32Sub) \
V(IA32Imul) \
V(IA32ImulHigh) \
V(IA32UmulHigh) \
V(IA32Idiv) \
V(IA32Udiv) \
V(IA32Not) \
V(IA32Neg) \
V(IA32Shl) \
V(IA32Shr) \
V(IA32Sar) \
V(IA32AddPair) \
V(IA32SubPair) \
V(IA32MulPair) \
V(IA32ShlPair) \
V(IA32ShrPair) \
V(IA32SarPair) \
V(IA32Rol) \
V(IA32Ror) \
V(IA32Lzcnt) \
V(IA32Tzcnt) \
V(IA32Popcnt) \
V(IA32Bswap) \
V(IA32MFence) \
V(IA32LFence) \
V(IA32Float32Cmp) \
V(IA32Float32Sqrt) \
V(IA32Float32Round) \
V(IA32Float64Cmp) \
V(IA32Float64Mod) \
V(IA32Float32Max) \
V(IA32Float64Max) \
V(IA32Float32Min) \
V(IA32Float64Min) \
V(IA32Float64Sqrt) \
V(IA32Float64Round) \
V(IA32Float32ToFloat64) \
V(IA32Float64ToFloat32) \
V(IA32Float32ToInt32) \
V(IA32Float32ToUint32) \
V(IA32Float64ToInt32) \
V(IA32Float64ToUint32) \
V(SSEInt32ToFloat32) \
V(IA32Uint32ToFloat32) \
V(SSEInt32ToFloat64) \
V(IA32Uint32ToFloat64) \
V(IA32Float64ExtractLowWord32) \
V(IA32Float64ExtractHighWord32) \
V(IA32Float64InsertLowWord32) \
V(IA32Float64InsertHighWord32) \
V(IA32Float64LoadLowWord32) \
V(IA32Float64SilenceNaN) \
V(Float32Add) \
V(Float32Sub) \
V(Float64Add) \
V(Float64Sub) \
V(Float32Mul) \
V(Float32Div) \
V(Float64Mul) \
V(Float64Div) \
V(Float64Abs) \
V(Float64Neg) \
V(Float32Abs) \
V(Float32Neg) \
V(IA32Movsxbl) \
V(IA32Movzxbl) \
V(IA32Movb) \
V(IA32Movsxwl) \
V(IA32Movzxwl) \
V(IA32Movw) \
V(IA32Movl) \
V(IA32Movss) \
V(IA32Movsd) \
V(IA32Movdqu) \
V(IA32Movlps) \
V(IA32Movhps) \
V(IA32BitcastFI) \
V(IA32BitcastIF) \
V(IA32Lea) \
V(IA32Push) \
V(IA32Poke) \
V(IA32Peek) \
V(IA32F64x2Splat) \
V(F64x2ExtractLane) \
V(F64x2ReplaceLane) \
V(IA32F64x2Sqrt) \
V(IA32F64x2Add) \
V(IA32F64x2Sub) \
V(IA32F64x2Mul) \
V(IA32F64x2Div) \
V(IA32F64x2Min) \
V(IA32F64x2Max) \
V(IA32F64x2Eq) \
V(IA32F64x2Ne) \
V(IA32F64x2Lt) \
V(IA32F64x2Le) \
V(IA32F64x2Pmin) \
V(IA32F64x2Pmax) \
V(IA32F64x2Round) \
V(IA32F64x2ConvertLowI32x4S) \
V(IA32F64x2ConvertLowI32x4U) \
V(IA32F64x2PromoteLowF32x4) \
V(IA32I64x2SplatI32Pair) \
V(IA32I64x2ReplaceLaneI32Pair) \
V(IA32I64x2Abs) \
V(IA32I64x2Neg) \
V(IA32I64x2Shl) \
V(IA32I64x2ShrS) \
V(IA32I64x2Add) \
V(IA32I64x2Sub) \
V(IA32I64x2Mul) \
V(IA32I64x2ShrU) \
V(IA32I64x2BitMask) \
V(IA32I64x2Eq) \
V(IA32I64x2Ne) \
V(IA32I64x2GtS) \
V(IA32I64x2GeS) \
V(IA32I64x2ExtMulLowI32x4S) \
V(IA32I64x2ExtMulHighI32x4S) \
V(IA32I64x2ExtMulLowI32x4U) \
V(IA32I64x2ExtMulHighI32x4U) \
V(IA32I64x2SConvertI32x4Low) \
V(IA32I64x2SConvertI32x4High) \
V(IA32I64x2UConvertI32x4Low) \
V(IA32I64x2UConvertI32x4High) \
V(IA32F32x4Splat) \
V(IA32F32x4ExtractLane) \
V(IA32Insertps) \
V(IA32F32x4SConvertI32x4) \
V(IA32F32x4UConvertI32x4) \
V(IA32F32x4Sqrt) \
V(IA32F32x4RecipApprox) \
V(IA32F32x4RecipSqrtApprox) \
V(IA32F32x4Add) \
V(IA32F32x4Sub) \
V(IA32F32x4Mul) \
V(IA32F32x4Div) \
V(IA32F32x4Min) \
V(IA32F32x4Max) \
V(IA32F32x4Eq) \
V(IA32F32x4Ne) \
V(IA32F32x4Lt) \
V(IA32F32x4Le) \
V(IA32F32x4Pmin) \
V(IA32F32x4Pmax) \
V(IA32F32x4Round) \
V(IA32F32x4DemoteF64x2Zero) \
V(IA32I32x4Splat) \
V(IA32I32x4ExtractLane) \
V(IA32I32x4SConvertF32x4) \
V(IA32I32x4SConvertI16x8Low) \
V(IA32I32x4SConvertI16x8High) \
V(IA32I32x4Neg) \
V(IA32I32x4Shl) \
V(IA32I32x4ShrS) \
V(IA32I32x4Add) \
V(IA32I32x4Sub) \
V(IA32I32x4Mul) \
V(IA32I32x4MinS) \
V(IA32I32x4MaxS) \
V(IA32I32x4Eq) \
V(IA32I32x4Ne) \
V(IA32I32x4GtS) \
V(IA32I32x4GeS) \
V(SSEI32x4UConvertF32x4) \
V(AVXI32x4UConvertF32x4) \
V(IA32I32x4UConvertI16x8Low) \
V(IA32I32x4UConvertI16x8High) \
V(IA32I32x4ShrU) \
V(IA32I32x4MinU) \
V(IA32I32x4MaxU) \
V(SSEI32x4GtU) \
V(AVXI32x4GtU) \
V(SSEI32x4GeU) \
V(AVXI32x4GeU) \
V(IA32I32x4Abs) \
V(IA32I32x4BitMask) \
V(IA32I32x4DotI16x8S) \
V(IA32I32x4ExtMulLowI16x8S) \
V(IA32I32x4ExtMulHighI16x8S) \
V(IA32I32x4ExtMulLowI16x8U) \
V(IA32I32x4ExtMulHighI16x8U) \
V(IA32I32x4ExtAddPairwiseI16x8S) \
V(IA32I32x4ExtAddPairwiseI16x8U) \
V(IA32I32x4TruncSatF64x2SZero) \
V(IA32I32x4TruncSatF64x2UZero) \
V(IA32I16x8Splat) \
V(IA32I16x8ExtractLaneS) \
V(IA32I16x8SConvertI8x16Low) \
V(IA32I16x8SConvertI8x16High) \
V(IA32I16x8Neg) \
V(IA32I16x8Shl) \
V(IA32I16x8ShrS) \
V(IA32I16x8SConvertI32x4) \
V(IA32I16x8Add) \
V(IA32I16x8AddSatS) \
V(IA32I16x8Sub) \
V(IA32I16x8SubSatS) \
V(IA32I16x8Mul) \
V(IA32I16x8MinS) \
V(IA32I16x8MaxS) \
V(IA32I16x8Eq) \
V(SSEI16x8Ne) \
V(AVXI16x8Ne) \
V(IA32I16x8GtS) \
V(SSEI16x8GeS) \
V(AVXI16x8GeS) \
V(IA32I16x8UConvertI8x16Low) \
V(IA32I16x8UConvertI8x16High) \
V(IA32I16x8ShrU) \
V(IA32I16x8UConvertI32x4) \
V(IA32I16x8AddSatU) \
V(IA32I16x8SubSatU) \
V(IA32I16x8MinU) \
V(IA32I16x8MaxU) \
V(SSEI16x8GtU) \
V(AVXI16x8GtU) \
V(SSEI16x8GeU) \
V(AVXI16x8GeU) \
V(IA32I16x8RoundingAverageU) \
V(IA32I16x8Abs) \
V(IA32I16x8BitMask) \
V(IA32I16x8ExtMulLowI8x16S) \
V(IA32I16x8ExtMulHighI8x16S) \
V(IA32I16x8ExtMulLowI8x16U) \
V(IA32I16x8ExtMulHighI8x16U) \
V(IA32I16x8ExtAddPairwiseI8x16S) \
V(IA32I16x8ExtAddPairwiseI8x16U) \
V(IA32I16x8Q15MulRSatS) \
V(IA32I8x16Splat) \
V(IA32I8x16ExtractLaneS) \
V(IA32Pinsrb) \
V(IA32Pinsrw) \
V(IA32Pinsrd) \
V(IA32Pextrb) \
V(IA32Pextrw) \
V(IA32S128Store32Lane) \
V(IA32I8x16SConvertI16x8) \
V(IA32I8x16Neg) \
V(IA32I8x16Shl) \
V(IA32I8x16ShrS) \
V(IA32I8x16Add) \
V(IA32I8x16AddSatS) \
V(IA32I8x16Sub) \
V(IA32I8x16SubSatS) \
V(IA32I8x16MinS) \
V(IA32I8x16MaxS) \
V(IA32I8x16Eq) \
V(SSEI8x16Ne) \
V(AVXI8x16Ne) \
V(IA32I8x16GtS) \
V(SSEI8x16GeS) \
V(AVXI8x16GeS) \
V(IA32I8x16UConvertI16x8) \
V(IA32I8x16AddSatU) \
V(IA32I8x16SubSatU) \
V(IA32I8x16ShrU) \
V(IA32I8x16MinU) \
V(IA32I8x16MaxU) \
V(SSEI8x16GtU) \
V(AVXI8x16GtU) \
V(SSEI8x16GeU) \
V(AVXI8x16GeU) \
V(IA32I8x16RoundingAverageU) \
V(IA32I8x16Abs) \
V(IA32I8x16BitMask) \
V(IA32I8x16Popcnt) \
V(IA32S128Const) \
V(IA32S128Zero) \
V(IA32S128AllOnes) \
V(IA32S128Not) \
V(IA32S128And) \
V(IA32S128Or) \
V(IA32S128Xor) \
V(IA32S128Select) \
V(IA32S128AndNot) \
V(IA32I8x16Swizzle) \
V(IA32I8x16Shuffle) \
V(IA32S128Load8Splat) \
V(IA32S128Load16Splat) \
V(IA32S128Load32Splat) \
V(IA32S128Load64Splat) \
V(IA32S128Load8x8S) \
V(IA32S128Load8x8U) \
V(IA32S128Load16x4S) \
V(IA32S128Load16x4U) \
V(IA32S128Load32x2S) \
V(IA32S128Load32x2U) \
V(IA32S32x4Rotate) \
V(IA32S32x4Swizzle) \
V(IA32S32x4Shuffle) \
V(IA32S16x8Blend) \
V(IA32S16x8HalfShuffle1) \
V(IA32S16x8HalfShuffle2) \
V(IA32S8x16Alignr) \
V(IA32S16x8Dup) \
V(IA32S8x16Dup) \
V(SSES16x8UnzipHigh) \
V(AVXS16x8UnzipHigh) \
V(SSES16x8UnzipLow) \
V(AVXS16x8UnzipLow) \
V(SSES8x16UnzipHigh) \
V(AVXS8x16UnzipHigh) \
V(SSES8x16UnzipLow) \
V(AVXS8x16UnzipLow) \
V(IA32S64x2UnpackHigh) \
V(IA32S32x4UnpackHigh) \
V(IA32S16x8UnpackHigh) \
V(IA32S8x16UnpackHigh) \
V(IA32S64x2UnpackLow) \
V(IA32S32x4UnpackLow) \
V(IA32S16x8UnpackLow) \
V(IA32S8x16UnpackLow) \
V(SSES8x16TransposeLow) \
V(AVXS8x16TransposeLow) \
V(SSES8x16TransposeHigh) \
V(AVXS8x16TransposeHigh) \
V(SSES8x8Reverse) \
V(AVXS8x8Reverse) \
V(SSES8x4Reverse) \
V(AVXS8x4Reverse) \
V(SSES8x2Reverse) \
V(AVXS8x2Reverse) \
V(IA32S128AnyTrue) \
V(IA32I64x2AllTrue) \
V(IA32I32x4AllTrue) \
V(IA32I16x8AllTrue) \
V(IA32I8x16AllTrue) \
V(IA32Word32AtomicPairLoad) \
V(IA32Word32ReleasePairStore) \
V(IA32Word32SeqCstPairStore) \
V(IA32Word32AtomicPairAdd) \
V(IA32Word32AtomicPairSub) \
V(IA32Word32AtomicPairAnd) \
V(IA32Word32AtomicPairOr) \
V(IA32Word32AtomicPairXor) \
V(IA32Word32AtomicPairExchange) \
V(IA32Word32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -315,6 +315,7 @@ using AccessModeField = base::BitField<MemoryAccessMode, 30, 2>;
// scary. {HasMemoryAccessMode} does not include these instructions, so they can
// be easily found by guarding encoding.
inline bool HasMemoryAccessMode(ArchOpcode opcode) {
#if defined(TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST)
switch (opcode) {
#define CASE(Name) \
case k##Name: \
@ -324,6 +325,9 @@ inline bool HasMemoryAccessMode(ArchOpcode opcode) {
default:
return false;
}
#else
return false;
#endif
}
using DeoptImmedArgsCountField = base::BitField<int, 22, 2>;

View File

@ -12,369 +12,365 @@ namespace compiler {
// LOONG64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(Loong64Add_d) \
V(Loong64Add_w) \
V(Loong64AddOvf_d) \
V(Loong64Sub_d) \
V(Loong64Sub_w) \
V(Loong64SubOvf_d) \
V(Loong64Mul_d) \
V(Loong64MulOvf_w) \
V(Loong64Mulh_d) \
V(Loong64Mulh_w) \
V(Loong64Mulh_wu) \
V(Loong64Mul_w) \
V(Loong64Div_d) \
V(Loong64Div_w) \
V(Loong64Div_du) \
V(Loong64Div_wu) \
V(Loong64Mod_d) \
V(Loong64Mod_w) \
V(Loong64Mod_du) \
V(Loong64Mod_wu) \
V(Loong64And) \
V(Loong64And32) \
V(Loong64Or) \
V(Loong64Or32) \
V(Loong64Nor) \
V(Loong64Nor32) \
V(Loong64Xor) \
V(Loong64Xor32) \
V(Loong64Alsl_d) \
V(Loong64Alsl_w) \
V(Loong64Sll_d) \
V(Loong64Sll_w) \
V(Loong64Srl_d) \
V(Loong64Srl_w) \
V(Loong64Sra_d) \
V(Loong64Sra_w) \
V(Loong64Rotr_d) \
V(Loong64Rotr_w) \
V(Loong64Bstrpick_d) \
V(Loong64Bstrpick_w) \
V(Loong64Bstrins_d) \
V(Loong64Bstrins_w) \
V(Loong64ByteSwap64) \
V(Loong64ByteSwap32) \
V(Loong64Clz_d) \
V(Loong64Clz_w) \
V(Loong64Mov) \
V(Loong64Tst) \
V(Loong64Cmp) \
V(Loong64Float32Cmp) \
V(Loong64Float32Add) \
V(Loong64Float32Sub) \
V(Loong64Float32Mul) \
V(Loong64Float32Div) \
V(Loong64Float32Abs) \
V(Loong64Float32Neg) \
V(Loong64Float32Sqrt) \
V(Loong64Float32Max) \
V(Loong64Float32Min) \
V(Loong64Float32ToFloat64) \
V(Loong64Float32RoundDown) \
V(Loong64Float32RoundUp) \
V(Loong64Float32RoundTruncate) \
V(Loong64Float32RoundTiesEven) \
V(Loong64Float32ToInt32) \
V(Loong64Float32ToInt64) \
V(Loong64Float32ToUint32) \
V(Loong64Float32ToUint64) \
V(Loong64Float64Cmp) \
V(Loong64Float64Add) \
V(Loong64Float64Sub) \
V(Loong64Float64Mul) \
V(Loong64Float64Div) \
V(Loong64Float64Mod) \
V(Loong64Float64Abs) \
V(Loong64Float64Neg) \
V(Loong64Float64Sqrt) \
V(Loong64Float64Max) \
V(Loong64Float64Min) \
V(Loong64Float64ToFloat32) \
V(Loong64Float64RoundDown) \
V(Loong64Float64RoundUp) \
V(Loong64Float64RoundTruncate) \
V(Loong64Float64RoundTiesEven) \
V(Loong64Float64ToInt32) \
V(Loong64Float64ToInt64) \
V(Loong64Float64ToUint32) \
V(Loong64Float64ToUint64) \
V(Loong64Int32ToFloat32) \
V(Loong64Int32ToFloat64) \
V(Loong64Int64ToFloat32) \
V(Loong64Int64ToFloat64) \
V(Loong64Uint32ToFloat32) \
V(Loong64Uint32ToFloat64) \
V(Loong64Uint64ToFloat32) \
V(Loong64Uint64ToFloat64) \
V(Loong64Float64ExtractLowWord32) \
V(Loong64Float64ExtractHighWord32) \
V(Loong64Float64InsertLowWord32) \
V(Loong64Float64InsertHighWord32) \
V(Loong64BitcastDL) \
V(Loong64BitcastLD) \
V(Loong64Float64SilenceNaN) \
V(Loong64Ld_b) \
V(Loong64Ld_bu) \
V(Loong64St_b) \
V(Loong64Ld_h) \
V(Loong64Ld_hu) \
V(Loong64St_h) \
V(Loong64Ld_w) \
V(Loong64Ld_wu) \
V(Loong64St_w) \
V(Loong64Ld_d) \
V(Loong64St_d) \
V(Loong64Fld_s) \
V(Loong64Fst_s) \
V(Loong64Fld_d) \
V(Loong64Fst_d) \
V(Loong64Push) \
V(Loong64Peek) \
V(Loong64Poke) \
V(Loong64StackClaim) \
V(Loong64Ext_w_b) \
V(Loong64Ext_w_h) \
V(Loong64Dbar) \
V(Loong64S128Const) \
V(Loong64S128Zero) \
V(Loong64S128AllOnes) \
V(Loong64I32x4Splat) \
V(Loong64I32x4ExtractLane) \
V(Loong64I32x4ReplaceLane) \
V(Loong64I32x4Add) \
V(Loong64I32x4Sub) \
V(Loong64F64x2Abs) \
V(Loong64F64x2Neg) \
V(Loong64F32x4Splat) \
V(Loong64F32x4ExtractLane) \
V(Loong64F32x4ReplaceLane) \
V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4UConvertI32x4) \
V(Loong64I32x4Mul) \
V(Loong64I32x4MaxS) \
V(Loong64I32x4MinS) \
V(Loong64I32x4Eq) \
V(Loong64I32x4Ne) \
V(Loong64I32x4Shl) \
V(Loong64I32x4ShrS) \
V(Loong64I32x4ShrU) \
V(Loong64I32x4MaxU) \
V(Loong64I32x4MinU) \
V(Loong64F64x2Sqrt) \
V(Loong64F64x2Add) \
V(Loong64F64x2Sub) \
V(Loong64F64x2Mul) \
V(Loong64F64x2Div) \
V(Loong64F64x2Min) \
V(Loong64F64x2Max) \
V(Loong64F64x2Eq) \
V(Loong64F64x2Ne) \
V(Loong64F64x2Lt) \
V(Loong64F64x2Le) \
V(Loong64F64x2Splat) \
V(Loong64F64x2ExtractLane) \
V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Pmin) \
V(Loong64F64x2Pmax) \
V(Loong64F64x2Ceil) \
V(Loong64F64x2Floor) \
V(Loong64F64x2Trunc) \
V(Loong64F64x2NearestInt) \
V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2PromoteLowF32x4) \
V(Loong64I64x2Splat) \
V(Loong64I64x2ExtractLane) \
V(Loong64I64x2ReplaceLane) \
V(Loong64I64x2Add) \
V(Loong64I64x2Sub) \
V(Loong64I64x2Mul) \
V(Loong64I64x2Neg) \
V(Loong64I64x2Shl) \
V(Loong64I64x2ShrS) \
V(Loong64I64x2ShrU) \
V(Loong64I64x2BitMask) \
V(Loong64I64x2Eq) \
V(Loong64I64x2Ne) \
V(Loong64I64x2GtS) \
V(Loong64I64x2GeS) \
V(Loong64I64x2Abs) \
V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2UConvertI32x4High) \
V(Loong64ExtMulLow) \
V(Loong64ExtMulHigh) \
V(Loong64ExtAddPairwise) \
V(Loong64F32x4Abs) \
V(Loong64F32x4Neg) \
V(Loong64F32x4Sqrt) \
V(Loong64F32x4RecipApprox) \
V(Loong64F32x4RecipSqrtApprox) \
V(Loong64F32x4Add) \
V(Loong64F32x4Sub) \
V(Loong64F32x4Mul) \
V(Loong64F32x4Div) \
V(Loong64F32x4Max) \
V(Loong64F32x4Min) \
V(Loong64F32x4Eq) \
V(Loong64F32x4Ne) \
V(Loong64F32x4Lt) \
V(Loong64F32x4Le) \
V(Loong64F32x4Pmin) \
V(Loong64F32x4Pmax) \
V(Loong64F32x4Ceil) \
V(Loong64F32x4Floor) \
V(Loong64F32x4Trunc) \
V(Loong64F32x4NearestInt) \
V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64I32x4SConvertF32x4) \
V(Loong64I32x4UConvertF32x4) \
V(Loong64I32x4Neg) \
V(Loong64I32x4GtS) \
V(Loong64I32x4GeS) \
V(Loong64I32x4GtU) \
V(Loong64I32x4GeU) \
V(Loong64I32x4Abs) \
V(Loong64I32x4BitMask) \
V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I16x8Splat) \
V(Loong64I16x8ExtractLaneU) \
V(Loong64I16x8ExtractLaneS) \
V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Neg) \
V(Loong64I16x8Shl) \
V(Loong64I16x8ShrS) \
V(Loong64I16x8ShrU) \
V(Loong64I16x8Add) \
V(Loong64I16x8AddSatS) \
V(Loong64I16x8Sub) \
V(Loong64I16x8SubSatS) \
V(Loong64I16x8Mul) \
V(Loong64I16x8MaxS) \
V(Loong64I16x8MinS) \
V(Loong64I16x8Eq) \
V(Loong64I16x8Ne) \
V(Loong64I16x8GtS) \
V(Loong64I16x8GeS) \
V(Loong64I16x8AddSatU) \
V(Loong64I16x8SubSatU) \
V(Loong64I16x8MaxU) \
V(Loong64I16x8MinU) \
V(Loong64I16x8GtU) \
V(Loong64I16x8GeU) \
V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8Abs) \
V(Loong64I16x8BitMask) \
V(Loong64I16x8Q15MulRSatS) \
V(Loong64I8x16Splat) \
V(Loong64I8x16ExtractLaneU) \
V(Loong64I8x16ExtractLaneS) \
V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Neg) \
V(Loong64I8x16Shl) \
V(Loong64I8x16ShrS) \
V(Loong64I8x16Add) \
V(Loong64I8x16AddSatS) \
V(Loong64I8x16Sub) \
V(Loong64I8x16SubSatS) \
V(Loong64I8x16MaxS) \
V(Loong64I8x16MinS) \
V(Loong64I8x16Eq) \
V(Loong64I8x16Ne) \
V(Loong64I8x16GtS) \
V(Loong64I8x16GeS) \
V(Loong64I8x16ShrU) \
V(Loong64I8x16AddSatU) \
V(Loong64I8x16SubSatU) \
V(Loong64I8x16MaxU) \
V(Loong64I8x16MinU) \
V(Loong64I8x16GtU) \
V(Loong64I8x16GeU) \
V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16Abs) \
V(Loong64I8x16Popcnt) \
V(Loong64I8x16BitMask) \
V(Loong64S128And) \
V(Loong64S128Or) \
V(Loong64S128Xor) \
V(Loong64S128Not) \
V(Loong64S128Select) \
V(Loong64S128AndNot) \
V(Loong64I64x2AllTrue) \
V(Loong64I32x4AllTrue) \
V(Loong64I16x8AllTrue) \
V(Loong64I8x16AllTrue) \
V(Loong64V128AnyTrue) \
V(Loong64S32x4InterleaveRight) \
V(Loong64S32x4InterleaveLeft) \
V(Loong64S32x4PackEven) \
V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4Shuffle) \
V(Loong64S16x8InterleaveRight) \
V(Loong64S16x8InterleaveLeft) \
V(Loong64S16x8PackEven) \
V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x4Reverse) \
V(Loong64S16x2Reverse) \
V(Loong64S8x16InterleaveRight) \
V(Loong64S8x16InterleaveLeft) \
V(Loong64S8x16PackEven) \
V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveOdd) \
V(Loong64I8x16Shuffle) \
V(Loong64I8x16Swizzle) \
V(Loong64S8x16Concat) \
V(Loong64S8x8Reverse) \
V(Loong64S8x4Reverse) \
V(Loong64S8x2Reverse) \
V(Loong64S128LoadSplat) \
V(Loong64S128Load8x8S) \
V(Loong64S128Load8x8U) \
V(Loong64S128Load16x4S) \
V(Loong64S128Load16x4U) \
V(Loong64S128Load32x2S) \
V(Loong64S128Load32x2U) \
V(Loong64S128Load32Zero) \
V(Loong64S128Load64Zero) \
V(Loong64LoadLane) \
V(Loong64StoreLane) \
V(Loong64I32x4SConvertI16x8Low) \
V(Loong64I32x4SConvertI16x8High) \
V(Loong64I32x4UConvertI16x8Low) \
V(Loong64I32x4UConvertI16x8High) \
V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \
V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8UConvertI8x16High) \
V(Loong64I8x16SConvertI16x8) \
V(Loong64I8x16UConvertI16x8) \
V(Loong64StoreCompressTagged) \
V(Loong64Word64AtomicLoadUint32) \
V(Loong64Word64AtomicLoadUint64) \
V(Loong64Word64AtomicStoreWord64) \
V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(Loong64Add_d) \
V(Loong64Add_w) \
V(Loong64AddOvf_d) \
V(Loong64Sub_d) \
V(Loong64Sub_w) \
V(Loong64SubOvf_d) \
V(Loong64Mul_d) \
V(Loong64MulOvf_w) \
V(Loong64Mulh_d) \
V(Loong64Mulh_w) \
V(Loong64Mulh_wu) \
V(Loong64Mul_w) \
V(Loong64Div_d) \
V(Loong64Div_w) \
V(Loong64Div_du) \
V(Loong64Div_wu) \
V(Loong64Mod_d) \
V(Loong64Mod_w) \
V(Loong64Mod_du) \
V(Loong64Mod_wu) \
V(Loong64And) \
V(Loong64And32) \
V(Loong64Or) \
V(Loong64Or32) \
V(Loong64Nor) \
V(Loong64Nor32) \
V(Loong64Xor) \
V(Loong64Xor32) \
V(Loong64Alsl_d) \
V(Loong64Alsl_w) \
V(Loong64Sll_d) \
V(Loong64Sll_w) \
V(Loong64Srl_d) \
V(Loong64Srl_w) \
V(Loong64Sra_d) \
V(Loong64Sra_w) \
V(Loong64Rotr_d) \
V(Loong64Rotr_w) \
V(Loong64Bstrpick_d) \
V(Loong64Bstrpick_w) \
V(Loong64Bstrins_d) \
V(Loong64Bstrins_w) \
V(Loong64ByteSwap64) \
V(Loong64ByteSwap32) \
V(Loong64Clz_d) \
V(Loong64Clz_w) \
V(Loong64Mov) \
V(Loong64Tst) \
V(Loong64Cmp) \
V(Loong64Float32Cmp) \
V(Loong64Float32Add) \
V(Loong64Float32Sub) \
V(Loong64Float32Mul) \
V(Loong64Float32Div) \
V(Loong64Float32Abs) \
V(Loong64Float32Neg) \
V(Loong64Float32Sqrt) \
V(Loong64Float32Max) \
V(Loong64Float32Min) \
V(Loong64Float32ToFloat64) \
V(Loong64Float32RoundDown) \
V(Loong64Float32RoundUp) \
V(Loong64Float32RoundTruncate) \
V(Loong64Float32RoundTiesEven) \
V(Loong64Float32ToInt32) \
V(Loong64Float32ToInt64) \
V(Loong64Float32ToUint32) \
V(Loong64Float32ToUint64) \
V(Loong64Float64Cmp) \
V(Loong64Float64Add) \
V(Loong64Float64Sub) \
V(Loong64Float64Mul) \
V(Loong64Float64Div) \
V(Loong64Float64Mod) \
V(Loong64Float64Abs) \
V(Loong64Float64Neg) \
V(Loong64Float64Sqrt) \
V(Loong64Float64Max) \
V(Loong64Float64Min) \
V(Loong64Float64ToFloat32) \
V(Loong64Float64RoundDown) \
V(Loong64Float64RoundUp) \
V(Loong64Float64RoundTruncate) \
V(Loong64Float64RoundTiesEven) \
V(Loong64Float64ToInt32) \
V(Loong64Float64ToInt64) \
V(Loong64Float64ToUint32) \
V(Loong64Float64ToUint64) \
V(Loong64Int32ToFloat32) \
V(Loong64Int32ToFloat64) \
V(Loong64Int64ToFloat32) \
V(Loong64Int64ToFloat64) \
V(Loong64Uint32ToFloat32) \
V(Loong64Uint32ToFloat64) \
V(Loong64Uint64ToFloat32) \
V(Loong64Uint64ToFloat64) \
V(Loong64Float64ExtractLowWord32) \
V(Loong64Float64ExtractHighWord32) \
V(Loong64Float64InsertLowWord32) \
V(Loong64Float64InsertHighWord32) \
V(Loong64BitcastDL) \
V(Loong64BitcastLD) \
V(Loong64Float64SilenceNaN) \
V(Loong64Ld_b) \
V(Loong64Ld_bu) \
V(Loong64St_b) \
V(Loong64Ld_h) \
V(Loong64Ld_hu) \
V(Loong64St_h) \
V(Loong64Ld_w) \
V(Loong64Ld_wu) \
V(Loong64St_w) \
V(Loong64Ld_d) \
V(Loong64St_d) \
V(Loong64Fld_s) \
V(Loong64Fst_s) \
V(Loong64Fld_d) \
V(Loong64Fst_d) \
V(Loong64Push) \
V(Loong64Peek) \
V(Loong64Poke) \
V(Loong64StackClaim) \
V(Loong64Ext_w_b) \
V(Loong64Ext_w_h) \
V(Loong64Dbar) \
V(Loong64S128Const) \
V(Loong64S128Zero) \
V(Loong64S128AllOnes) \
V(Loong64I32x4Splat) \
V(Loong64I32x4ExtractLane) \
V(Loong64I32x4ReplaceLane) \
V(Loong64I32x4Add) \
V(Loong64I32x4Sub) \
V(Loong64F64x2Abs) \
V(Loong64F64x2Neg) \
V(Loong64F32x4Splat) \
V(Loong64F32x4ExtractLane) \
V(Loong64F32x4ReplaceLane) \
V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4UConvertI32x4) \
V(Loong64I32x4Mul) \
V(Loong64I32x4MaxS) \
V(Loong64I32x4MinS) \
V(Loong64I32x4Eq) \
V(Loong64I32x4Ne) \
V(Loong64I32x4Shl) \
V(Loong64I32x4ShrS) \
V(Loong64I32x4ShrU) \
V(Loong64I32x4MaxU) \
V(Loong64I32x4MinU) \
V(Loong64F64x2Sqrt) \
V(Loong64F64x2Add) \
V(Loong64F64x2Sub) \
V(Loong64F64x2Mul) \
V(Loong64F64x2Div) \
V(Loong64F64x2Min) \
V(Loong64F64x2Max) \
V(Loong64F64x2Eq) \
V(Loong64F64x2Ne) \
V(Loong64F64x2Lt) \
V(Loong64F64x2Le) \
V(Loong64F64x2Splat) \
V(Loong64F64x2ExtractLane) \
V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Pmin) \
V(Loong64F64x2Pmax) \
V(Loong64F64x2Ceil) \
V(Loong64F64x2Floor) \
V(Loong64F64x2Trunc) \
V(Loong64F64x2NearestInt) \
V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2PromoteLowF32x4) \
V(Loong64I64x2Splat) \
V(Loong64I64x2ExtractLane) \
V(Loong64I64x2ReplaceLane) \
V(Loong64I64x2Add) \
V(Loong64I64x2Sub) \
V(Loong64I64x2Mul) \
V(Loong64I64x2Neg) \
V(Loong64I64x2Shl) \
V(Loong64I64x2ShrS) \
V(Loong64I64x2ShrU) \
V(Loong64I64x2BitMask) \
V(Loong64I64x2Eq) \
V(Loong64I64x2Ne) \
V(Loong64I64x2GtS) \
V(Loong64I64x2GeS) \
V(Loong64I64x2Abs) \
V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2UConvertI32x4High) \
V(Loong64ExtMulLow) \
V(Loong64ExtMulHigh) \
V(Loong64ExtAddPairwise) \
V(Loong64F32x4Abs) \
V(Loong64F32x4Neg) \
V(Loong64F32x4Sqrt) \
V(Loong64F32x4RecipApprox) \
V(Loong64F32x4RecipSqrtApprox) \
V(Loong64F32x4Add) \
V(Loong64F32x4Sub) \
V(Loong64F32x4Mul) \
V(Loong64F32x4Div) \
V(Loong64F32x4Max) \
V(Loong64F32x4Min) \
V(Loong64F32x4Eq) \
V(Loong64F32x4Ne) \
V(Loong64F32x4Lt) \
V(Loong64F32x4Le) \
V(Loong64F32x4Pmin) \
V(Loong64F32x4Pmax) \
V(Loong64F32x4Ceil) \
V(Loong64F32x4Floor) \
V(Loong64F32x4Trunc) \
V(Loong64F32x4NearestInt) \
V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64I32x4SConvertF32x4) \
V(Loong64I32x4UConvertF32x4) \
V(Loong64I32x4Neg) \
V(Loong64I32x4GtS) \
V(Loong64I32x4GeS) \
V(Loong64I32x4GtU) \
V(Loong64I32x4GeU) \
V(Loong64I32x4Abs) \
V(Loong64I32x4BitMask) \
V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I16x8Splat) \
V(Loong64I16x8ExtractLaneU) \
V(Loong64I16x8ExtractLaneS) \
V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Neg) \
V(Loong64I16x8Shl) \
V(Loong64I16x8ShrS) \
V(Loong64I16x8ShrU) \
V(Loong64I16x8Add) \
V(Loong64I16x8AddSatS) \
V(Loong64I16x8Sub) \
V(Loong64I16x8SubSatS) \
V(Loong64I16x8Mul) \
V(Loong64I16x8MaxS) \
V(Loong64I16x8MinS) \
V(Loong64I16x8Eq) \
V(Loong64I16x8Ne) \
V(Loong64I16x8GtS) \
V(Loong64I16x8GeS) \
V(Loong64I16x8AddSatU) \
V(Loong64I16x8SubSatU) \
V(Loong64I16x8MaxU) \
V(Loong64I16x8MinU) \
V(Loong64I16x8GtU) \
V(Loong64I16x8GeU) \
V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8Abs) \
V(Loong64I16x8BitMask) \
V(Loong64I16x8Q15MulRSatS) \
V(Loong64I8x16Splat) \
V(Loong64I8x16ExtractLaneU) \
V(Loong64I8x16ExtractLaneS) \
V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Neg) \
V(Loong64I8x16Shl) \
V(Loong64I8x16ShrS) \
V(Loong64I8x16Add) \
V(Loong64I8x16AddSatS) \
V(Loong64I8x16Sub) \
V(Loong64I8x16SubSatS) \
V(Loong64I8x16MaxS) \
V(Loong64I8x16MinS) \
V(Loong64I8x16Eq) \
V(Loong64I8x16Ne) \
V(Loong64I8x16GtS) \
V(Loong64I8x16GeS) \
V(Loong64I8x16ShrU) \
V(Loong64I8x16AddSatU) \
V(Loong64I8x16SubSatU) \
V(Loong64I8x16MaxU) \
V(Loong64I8x16MinU) \
V(Loong64I8x16GtU) \
V(Loong64I8x16GeU) \
V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16Abs) \
V(Loong64I8x16Popcnt) \
V(Loong64I8x16BitMask) \
V(Loong64S128And) \
V(Loong64S128Or) \
V(Loong64S128Xor) \
V(Loong64S128Not) \
V(Loong64S128Select) \
V(Loong64S128AndNot) \
V(Loong64I64x2AllTrue) \
V(Loong64I32x4AllTrue) \
V(Loong64I16x8AllTrue) \
V(Loong64I8x16AllTrue) \
V(Loong64V128AnyTrue) \
V(Loong64S32x4InterleaveRight) \
V(Loong64S32x4InterleaveLeft) \
V(Loong64S32x4PackEven) \
V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4Shuffle) \
V(Loong64S16x8InterleaveRight) \
V(Loong64S16x8InterleaveLeft) \
V(Loong64S16x8PackEven) \
V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x4Reverse) \
V(Loong64S16x2Reverse) \
V(Loong64S8x16InterleaveRight) \
V(Loong64S8x16InterleaveLeft) \
V(Loong64S8x16PackEven) \
V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveOdd) \
V(Loong64I8x16Shuffle) \
V(Loong64I8x16Swizzle) \
V(Loong64S8x16Concat) \
V(Loong64S8x8Reverse) \
V(Loong64S8x4Reverse) \
V(Loong64S8x2Reverse) \
V(Loong64S128LoadSplat) \
V(Loong64S128Load8x8S) \
V(Loong64S128Load8x8U) \
V(Loong64S128Load16x4S) \
V(Loong64S128Load16x4U) \
V(Loong64S128Load32x2S) \
V(Loong64S128Load32x2U) \
V(Loong64S128Load32Zero) \
V(Loong64S128Load64Zero) \
V(Loong64LoadLane) \
V(Loong64StoreLane) \
V(Loong64I32x4SConvertI16x8Low) \
V(Loong64I32x4SConvertI16x8High) \
V(Loong64I32x4UConvertI16x8Low) \
V(Loong64I32x4UConvertI16x8High) \
V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \
V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8UConvertI8x16High) \
V(Loong64I8x16SConvertI16x8) \
V(Loong64I8x16UConvertI16x8) \
V(Loong64StoreCompressTagged) \
V(Loong64Word64AtomicLoadUint32) \
V(Loong64Word64AtomicLoadUint64) \
V(Loong64Word64AtomicStoreWord64) \
V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
V(Loong64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,373 +12,369 @@ namespace compiler {
// MIPS-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(MipsAdd) \
V(MipsAddOvf) \
V(MipsSub) \
V(MipsSubOvf) \
V(MipsMul) \
V(MipsMulOvf) \
V(MipsMulHigh) \
V(MipsMulHighU) \
V(MipsDiv) \
V(MipsDivU) \
V(MipsMod) \
V(MipsModU) \
V(MipsAnd) \
V(MipsOr) \
V(MipsNor) \
V(MipsXor) \
V(MipsClz) \
V(MipsCtz) \
V(MipsPopcnt) \
V(MipsLsa) \
V(MipsShl) \
V(MipsShr) \
V(MipsSar) \
V(MipsShlPair) \
V(MipsShrPair) \
V(MipsSarPair) \
V(MipsExt) \
V(MipsIns) \
V(MipsRor) \
V(MipsMov) \
V(MipsTst) \
V(MipsCmp) \
V(MipsCmpS) \
V(MipsAddS) \
V(MipsSubS) \
V(MipsMulS) \
V(MipsDivS) \
V(MipsAbsS) \
V(MipsSqrtS) \
V(MipsMaxS) \
V(MipsMinS) \
V(MipsCmpD) \
V(MipsAddD) \
V(MipsSubD) \
V(MipsMulD) \
V(MipsDivD) \
V(MipsModD) \
V(MipsAbsD) \
V(MipsSqrtD) \
V(MipsMaxD) \
V(MipsMinD) \
V(MipsNegS) \
V(MipsNegD) \
V(MipsAddPair) \
V(MipsSubPair) \
V(MipsMulPair) \
V(MipsMaddS) \
V(MipsMaddD) \
V(MipsMsubS) \
V(MipsMsubD) \
V(MipsFloat32RoundDown) \
V(MipsFloat32RoundTruncate) \
V(MipsFloat32RoundUp) \
V(MipsFloat32RoundTiesEven) \
V(MipsFloat64RoundDown) \
V(MipsFloat64RoundTruncate) \
V(MipsFloat64RoundUp) \
V(MipsFloat64RoundTiesEven) \
V(MipsCvtSD) \
V(MipsCvtDS) \
V(MipsTruncWD) \
V(MipsRoundWD) \
V(MipsFloorWD) \
V(MipsCeilWD) \
V(MipsTruncWS) \
V(MipsRoundWS) \
V(MipsFloorWS) \
V(MipsCeilWS) \
V(MipsTruncUwD) \
V(MipsTruncUwS) \
V(MipsCvtDW) \
V(MipsCvtDUw) \
V(MipsCvtSW) \
V(MipsCvtSUw) \
V(MipsLb) \
V(MipsLbu) \
V(MipsSb) \
V(MipsLh) \
V(MipsUlh) \
V(MipsLhu) \
V(MipsUlhu) \
V(MipsSh) \
V(MipsUsh) \
V(MipsLw) \
V(MipsUlw) \
V(MipsSw) \
V(MipsUsw) \
V(MipsLwc1) \
V(MipsUlwc1) \
V(MipsSwc1) \
V(MipsUswc1) \
V(MipsLdc1) \
V(MipsUldc1) \
V(MipsSdc1) \
V(MipsUsdc1) \
V(MipsFloat64ExtractLowWord32) \
V(MipsFloat64ExtractHighWord32) \
V(MipsFloat64InsertLowWord32) \
V(MipsFloat64InsertHighWord32) \
V(MipsFloat64SilenceNaN) \
V(MipsFloat32Max) \
V(MipsFloat64Max) \
V(MipsFloat32Min) \
V(MipsFloat64Min) \
V(MipsPush) \
V(MipsPeek) \
V(MipsStoreToStackSlot) \
V(MipsByteSwap32) \
V(MipsStackClaim) \
V(MipsSeb) \
V(MipsSeh) \
V(MipsSync) \
V(MipsS128Zero) \
V(MipsI32x4Splat) \
V(MipsI32x4ExtractLane) \
V(MipsI32x4ReplaceLane) \
V(MipsI32x4Add) \
V(MipsI32x4Sub) \
V(MipsF64x2Abs) \
V(MipsF64x2Neg) \
V(MipsF64x2Sqrt) \
V(MipsF64x2Add) \
V(MipsF64x2Sub) \
V(MipsF64x2Mul) \
V(MipsF64x2Div) \
V(MipsF64x2Min) \
V(MipsF64x2Max) \
V(MipsF64x2Eq) \
V(MipsF64x2Ne) \
V(MipsF64x2Lt) \
V(MipsF64x2Le) \
V(MipsF64x2Pmin) \
V(MipsF64x2Pmax) \
V(MipsF64x2Ceil) \
V(MipsF64x2Floor) \
V(MipsF64x2Trunc) \
V(MipsF64x2NearestInt) \
V(MipsF64x2ConvertLowI32x4S) \
V(MipsF64x2ConvertLowI32x4U) \
V(MipsF64x2PromoteLowF32x4) \
V(MipsI64x2Add) \
V(MipsI64x2Sub) \
V(MipsI64x2Mul) \
V(MipsI64x2Neg) \
V(MipsI64x2Shl) \
V(MipsI64x2ShrS) \
V(MipsI64x2ShrU) \
V(MipsI64x2BitMask) \
V(MipsI64x2Eq) \
V(MipsI64x2Ne) \
V(MipsI64x2GtS) \
V(MipsI64x2GeS) \
V(MipsI64x2Abs) \
V(MipsI64x2SConvertI32x4Low) \
V(MipsI64x2SConvertI32x4High) \
V(MipsI64x2UConvertI32x4Low) \
V(MipsI64x2UConvertI32x4High) \
V(MipsI64x2ExtMulLowI32x4S) \
V(MipsI64x2ExtMulHighI32x4S) \
V(MipsI64x2ExtMulLowI32x4U) \
V(MipsI64x2ExtMulHighI32x4U) \
V(MipsF32x4Splat) \
V(MipsF32x4ExtractLane) \
V(MipsF32x4ReplaceLane) \
V(MipsF32x4SConvertI32x4) \
V(MipsF32x4UConvertI32x4) \
V(MipsF32x4DemoteF64x2Zero) \
V(MipsI32x4Mul) \
V(MipsI32x4MaxS) \
V(MipsI32x4MinS) \
V(MipsI32x4Eq) \
V(MipsI32x4Ne) \
V(MipsI32x4Shl) \
V(MipsI32x4ShrS) \
V(MipsI32x4ShrU) \
V(MipsI32x4MaxU) \
V(MipsI32x4MinU) \
V(MipsF64x2Splat) \
V(MipsF64x2ExtractLane) \
V(MipsF64x2ReplaceLane) \
V(MipsF32x4Abs) \
V(MipsF32x4Neg) \
V(MipsF32x4Sqrt) \
V(MipsF32x4RecipApprox) \
V(MipsF32x4RecipSqrtApprox) \
V(MipsF32x4Add) \
V(MipsF32x4Sub) \
V(MipsF32x4Mul) \
V(MipsF32x4Div) \
V(MipsF32x4Max) \
V(MipsF32x4Min) \
V(MipsF32x4Eq) \
V(MipsF32x4Ne) \
V(MipsF32x4Lt) \
V(MipsF32x4Le) \
V(MipsF32x4Pmin) \
V(MipsF32x4Pmax) \
V(MipsF32x4Ceil) \
V(MipsF32x4Floor) \
V(MipsF32x4Trunc) \
V(MipsF32x4NearestInt) \
V(MipsI32x4SConvertF32x4) \
V(MipsI32x4UConvertF32x4) \
V(MipsI32x4Neg) \
V(MipsI32x4GtS) \
V(MipsI32x4GeS) \
V(MipsI32x4GtU) \
V(MipsI32x4GeU) \
V(MipsI32x4Abs) \
V(MipsI32x4BitMask) \
V(MipsI32x4DotI16x8S) \
V(MipsI32x4ExtMulLowI16x8S) \
V(MipsI32x4ExtMulHighI16x8S) \
V(MipsI32x4ExtMulLowI16x8U) \
V(MipsI32x4ExtMulHighI16x8U) \
V(MipsI32x4TruncSatF64x2SZero) \
V(MipsI32x4TruncSatF64x2UZero) \
V(MipsI32x4ExtAddPairwiseI16x8S) \
V(MipsI32x4ExtAddPairwiseI16x8U) \
V(MipsI16x8Splat) \
V(MipsI16x8ExtractLaneU) \
V(MipsI16x8ExtractLaneS) \
V(MipsI16x8ReplaceLane) \
V(MipsI16x8Neg) \
V(MipsI16x8Shl) \
V(MipsI16x8ShrS) \
V(MipsI16x8ShrU) \
V(MipsI16x8Add) \
V(MipsI16x8AddSatS) \
V(MipsI16x8Sub) \
V(MipsI16x8SubSatS) \
V(MipsI16x8Mul) \
V(MipsI16x8MaxS) \
V(MipsI16x8MinS) \
V(MipsI16x8Eq) \
V(MipsI16x8Ne) \
V(MipsI16x8GtS) \
V(MipsI16x8GeS) \
V(MipsI16x8AddSatU) \
V(MipsI16x8SubSatU) \
V(MipsI16x8MaxU) \
V(MipsI16x8MinU) \
V(MipsI16x8GtU) \
V(MipsI16x8GeU) \
V(MipsI16x8RoundingAverageU) \
V(MipsI16x8Abs) \
V(MipsI16x8BitMask) \
V(MipsI16x8Q15MulRSatS) \
V(MipsI16x8ExtMulLowI8x16S) \
V(MipsI16x8ExtMulHighI8x16S) \
V(MipsI16x8ExtMulLowI8x16U) \
V(MipsI16x8ExtMulHighI8x16U) \
V(MipsI16x8ExtAddPairwiseI8x16S) \
V(MipsI16x8ExtAddPairwiseI8x16U) \
V(MipsI8x16Splat) \
V(MipsI8x16ExtractLaneU) \
V(MipsI8x16ExtractLaneS) \
V(MipsI8x16ReplaceLane) \
V(MipsI8x16Neg) \
V(MipsI8x16Shl) \
V(MipsI8x16ShrS) \
V(MipsI8x16Add) \
V(MipsI8x16AddSatS) \
V(MipsI8x16Sub) \
V(MipsI8x16SubSatS) \
V(MipsI8x16MaxS) \
V(MipsI8x16MinS) \
V(MipsI8x16Eq) \
V(MipsI8x16Ne) \
V(MipsI8x16GtS) \
V(MipsI8x16GeS) \
V(MipsI8x16ShrU) \
V(MipsI8x16AddSatU) \
V(MipsI8x16SubSatU) \
V(MipsI8x16MaxU) \
V(MipsI8x16MinU) \
V(MipsI8x16GtU) \
V(MipsI8x16GeU) \
V(MipsI8x16RoundingAverageU) \
V(MipsI8x16Abs) \
V(MipsI8x16Popcnt) \
V(MipsI8x16BitMask) \
V(MipsS128And) \
V(MipsS128Or) \
V(MipsS128Xor) \
V(MipsS128Not) \
V(MipsS128Select) \
V(MipsS128AndNot) \
V(MipsI64x2AllTrue) \
V(MipsI32x4AllTrue) \
V(MipsI16x8AllTrue) \
V(MipsI8x16AllTrue) \
V(MipsV128AnyTrue) \
V(MipsS32x4InterleaveRight) \
V(MipsS32x4InterleaveLeft) \
V(MipsS32x4PackEven) \
V(MipsS32x4PackOdd) \
V(MipsS32x4InterleaveEven) \
V(MipsS32x4InterleaveOdd) \
V(MipsS32x4Shuffle) \
V(MipsS16x8InterleaveRight) \
V(MipsS16x8InterleaveLeft) \
V(MipsS16x8PackEven) \
V(MipsS16x8PackOdd) \
V(MipsS16x8InterleaveEven) \
V(MipsS16x8InterleaveOdd) \
V(MipsS16x4Reverse) \
V(MipsS16x2Reverse) \
V(MipsS8x16InterleaveRight) \
V(MipsS8x16InterleaveLeft) \
V(MipsS8x16PackEven) \
V(MipsS8x16PackOdd) \
V(MipsS8x16InterleaveEven) \
V(MipsS8x16InterleaveOdd) \
V(MipsI8x16Shuffle) \
V(MipsI8x16Swizzle) \
V(MipsS8x16Concat) \
V(MipsS8x8Reverse) \
V(MipsS8x4Reverse) \
V(MipsS8x2Reverse) \
V(MipsS128Load8Splat) \
V(MipsS128Load16Splat) \
V(MipsS128Load32Splat) \
V(MipsS128Load64Splat) \
V(MipsS128Load8x8S) \
V(MipsS128Load8x8U) \
V(MipsS128Load16x4S) \
V(MipsS128Load16x4U) \
V(MipsS128Load32x2S) \
V(MipsS128Load32x2U) \
V(MipsMsaLd) \
V(MipsMsaSt) \
V(MipsI32x4SConvertI16x8Low) \
V(MipsI32x4SConvertI16x8High) \
V(MipsI32x4UConvertI16x8Low) \
V(MipsI32x4UConvertI16x8High) \
V(MipsI16x8SConvertI8x16Low) \
V(MipsI16x8SConvertI8x16High) \
V(MipsI16x8SConvertI32x4) \
V(MipsI16x8UConvertI32x4) \
V(MipsI16x8UConvertI8x16Low) \
V(MipsI16x8UConvertI8x16High) \
V(MipsI8x16SConvertI16x8) \
V(MipsI8x16UConvertI16x8) \
V(MipsWord32AtomicPairLoad) \
V(MipsWord32AtomicPairStore) \
V(MipsWord32AtomicPairAdd) \
V(MipsWord32AtomicPairSub) \
V(MipsWord32AtomicPairAnd) \
V(MipsWord32AtomicPairOr) \
V(MipsWord32AtomicPairXor) \
V(MipsWord32AtomicPairExchange) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(MipsAdd) \
V(MipsAddOvf) \
V(MipsSub) \
V(MipsSubOvf) \
V(MipsMul) \
V(MipsMulOvf) \
V(MipsMulHigh) \
V(MipsMulHighU) \
V(MipsDiv) \
V(MipsDivU) \
V(MipsMod) \
V(MipsModU) \
V(MipsAnd) \
V(MipsOr) \
V(MipsNor) \
V(MipsXor) \
V(MipsClz) \
V(MipsCtz) \
V(MipsPopcnt) \
V(MipsLsa) \
V(MipsShl) \
V(MipsShr) \
V(MipsSar) \
V(MipsShlPair) \
V(MipsShrPair) \
V(MipsSarPair) \
V(MipsExt) \
V(MipsIns) \
V(MipsRor) \
V(MipsMov) \
V(MipsTst) \
V(MipsCmp) \
V(MipsCmpS) \
V(MipsAddS) \
V(MipsSubS) \
V(MipsMulS) \
V(MipsDivS) \
V(MipsAbsS) \
V(MipsSqrtS) \
V(MipsMaxS) \
V(MipsMinS) \
V(MipsCmpD) \
V(MipsAddD) \
V(MipsSubD) \
V(MipsMulD) \
V(MipsDivD) \
V(MipsModD) \
V(MipsAbsD) \
V(MipsSqrtD) \
V(MipsMaxD) \
V(MipsMinD) \
V(MipsNegS) \
V(MipsNegD) \
V(MipsAddPair) \
V(MipsSubPair) \
V(MipsMulPair) \
V(MipsMaddS) \
V(MipsMaddD) \
V(MipsMsubS) \
V(MipsMsubD) \
V(MipsFloat32RoundDown) \
V(MipsFloat32RoundTruncate) \
V(MipsFloat32RoundUp) \
V(MipsFloat32RoundTiesEven) \
V(MipsFloat64RoundDown) \
V(MipsFloat64RoundTruncate) \
V(MipsFloat64RoundUp) \
V(MipsFloat64RoundTiesEven) \
V(MipsCvtSD) \
V(MipsCvtDS) \
V(MipsTruncWD) \
V(MipsRoundWD) \
V(MipsFloorWD) \
V(MipsCeilWD) \
V(MipsTruncWS) \
V(MipsRoundWS) \
V(MipsFloorWS) \
V(MipsCeilWS) \
V(MipsTruncUwD) \
V(MipsTruncUwS) \
V(MipsCvtDW) \
V(MipsCvtDUw) \
V(MipsCvtSW) \
V(MipsCvtSUw) \
V(MipsLb) \
V(MipsLbu) \
V(MipsSb) \
V(MipsLh) \
V(MipsUlh) \
V(MipsLhu) \
V(MipsUlhu) \
V(MipsSh) \
V(MipsUsh) \
V(MipsLw) \
V(MipsUlw) \
V(MipsSw) \
V(MipsUsw) \
V(MipsLwc1) \
V(MipsUlwc1) \
V(MipsSwc1) \
V(MipsUswc1) \
V(MipsLdc1) \
V(MipsUldc1) \
V(MipsSdc1) \
V(MipsUsdc1) \
V(MipsFloat64ExtractLowWord32) \
V(MipsFloat64ExtractHighWord32) \
V(MipsFloat64InsertLowWord32) \
V(MipsFloat64InsertHighWord32) \
V(MipsFloat64SilenceNaN) \
V(MipsFloat32Max) \
V(MipsFloat64Max) \
V(MipsFloat32Min) \
V(MipsFloat64Min) \
V(MipsPush) \
V(MipsPeek) \
V(MipsStoreToStackSlot) \
V(MipsByteSwap32) \
V(MipsStackClaim) \
V(MipsSeb) \
V(MipsSeh) \
V(MipsSync) \
V(MipsS128Zero) \
V(MipsI32x4Splat) \
V(MipsI32x4ExtractLane) \
V(MipsI32x4ReplaceLane) \
V(MipsI32x4Add) \
V(MipsI32x4Sub) \
V(MipsF64x2Abs) \
V(MipsF64x2Neg) \
V(MipsF64x2Sqrt) \
V(MipsF64x2Add) \
V(MipsF64x2Sub) \
V(MipsF64x2Mul) \
V(MipsF64x2Div) \
V(MipsF64x2Min) \
V(MipsF64x2Max) \
V(MipsF64x2Eq) \
V(MipsF64x2Ne) \
V(MipsF64x2Lt) \
V(MipsF64x2Le) \
V(MipsF64x2Pmin) \
V(MipsF64x2Pmax) \
V(MipsF64x2Ceil) \
V(MipsF64x2Floor) \
V(MipsF64x2Trunc) \
V(MipsF64x2NearestInt) \
V(MipsF64x2ConvertLowI32x4S) \
V(MipsF64x2ConvertLowI32x4U) \
V(MipsF64x2PromoteLowF32x4) \
V(MipsI64x2Add) \
V(MipsI64x2Sub) \
V(MipsI64x2Mul) \
V(MipsI64x2Neg) \
V(MipsI64x2Shl) \
V(MipsI64x2ShrS) \
V(MipsI64x2ShrU) \
V(MipsI64x2BitMask) \
V(MipsI64x2Eq) \
V(MipsI64x2Ne) \
V(MipsI64x2GtS) \
V(MipsI64x2GeS) \
V(MipsI64x2Abs) \
V(MipsI64x2SConvertI32x4Low) \
V(MipsI64x2SConvertI32x4High) \
V(MipsI64x2UConvertI32x4Low) \
V(MipsI64x2UConvertI32x4High) \
V(MipsI64x2ExtMulLowI32x4S) \
V(MipsI64x2ExtMulHighI32x4S) \
V(MipsI64x2ExtMulLowI32x4U) \
V(MipsI64x2ExtMulHighI32x4U) \
V(MipsF32x4Splat) \
V(MipsF32x4ExtractLane) \
V(MipsF32x4ReplaceLane) \
V(MipsF32x4SConvertI32x4) \
V(MipsF32x4UConvertI32x4) \
V(MipsF32x4DemoteF64x2Zero) \
V(MipsI32x4Mul) \
V(MipsI32x4MaxS) \
V(MipsI32x4MinS) \
V(MipsI32x4Eq) \
V(MipsI32x4Ne) \
V(MipsI32x4Shl) \
V(MipsI32x4ShrS) \
V(MipsI32x4ShrU) \
V(MipsI32x4MaxU) \
V(MipsI32x4MinU) \
V(MipsF64x2Splat) \
V(MipsF64x2ExtractLane) \
V(MipsF64x2ReplaceLane) \
V(MipsF32x4Abs) \
V(MipsF32x4Neg) \
V(MipsF32x4Sqrt) \
V(MipsF32x4RecipApprox) \
V(MipsF32x4RecipSqrtApprox) \
V(MipsF32x4Add) \
V(MipsF32x4Sub) \
V(MipsF32x4Mul) \
V(MipsF32x4Div) \
V(MipsF32x4Max) \
V(MipsF32x4Min) \
V(MipsF32x4Eq) \
V(MipsF32x4Ne) \
V(MipsF32x4Lt) \
V(MipsF32x4Le) \
V(MipsF32x4Pmin) \
V(MipsF32x4Pmax) \
V(MipsF32x4Ceil) \
V(MipsF32x4Floor) \
V(MipsF32x4Trunc) \
V(MipsF32x4NearestInt) \
V(MipsI32x4SConvertF32x4) \
V(MipsI32x4UConvertF32x4) \
V(MipsI32x4Neg) \
V(MipsI32x4GtS) \
V(MipsI32x4GeS) \
V(MipsI32x4GtU) \
V(MipsI32x4GeU) \
V(MipsI32x4Abs) \
V(MipsI32x4BitMask) \
V(MipsI32x4DotI16x8S) \
V(MipsI32x4ExtMulLowI16x8S) \
V(MipsI32x4ExtMulHighI16x8S) \
V(MipsI32x4ExtMulLowI16x8U) \
V(MipsI32x4ExtMulHighI16x8U) \
V(MipsI32x4TruncSatF64x2SZero) \
V(MipsI32x4TruncSatF64x2UZero) \
V(MipsI32x4ExtAddPairwiseI16x8S) \
V(MipsI32x4ExtAddPairwiseI16x8U) \
V(MipsI16x8Splat) \
V(MipsI16x8ExtractLaneU) \
V(MipsI16x8ExtractLaneS) \
V(MipsI16x8ReplaceLane) \
V(MipsI16x8Neg) \
V(MipsI16x8Shl) \
V(MipsI16x8ShrS) \
V(MipsI16x8ShrU) \
V(MipsI16x8Add) \
V(MipsI16x8AddSatS) \
V(MipsI16x8Sub) \
V(MipsI16x8SubSatS) \
V(MipsI16x8Mul) \
V(MipsI16x8MaxS) \
V(MipsI16x8MinS) \
V(MipsI16x8Eq) \
V(MipsI16x8Ne) \
V(MipsI16x8GtS) \
V(MipsI16x8GeS) \
V(MipsI16x8AddSatU) \
V(MipsI16x8SubSatU) \
V(MipsI16x8MaxU) \
V(MipsI16x8MinU) \
V(MipsI16x8GtU) \
V(MipsI16x8GeU) \
V(MipsI16x8RoundingAverageU) \
V(MipsI16x8Abs) \
V(MipsI16x8BitMask) \
V(MipsI16x8Q15MulRSatS) \
V(MipsI16x8ExtMulLowI8x16S) \
V(MipsI16x8ExtMulHighI8x16S) \
V(MipsI16x8ExtMulLowI8x16U) \
V(MipsI16x8ExtMulHighI8x16U) \
V(MipsI16x8ExtAddPairwiseI8x16S) \
V(MipsI16x8ExtAddPairwiseI8x16U) \
V(MipsI8x16Splat) \
V(MipsI8x16ExtractLaneU) \
V(MipsI8x16ExtractLaneS) \
V(MipsI8x16ReplaceLane) \
V(MipsI8x16Neg) \
V(MipsI8x16Shl) \
V(MipsI8x16ShrS) \
V(MipsI8x16Add) \
V(MipsI8x16AddSatS) \
V(MipsI8x16Sub) \
V(MipsI8x16SubSatS) \
V(MipsI8x16MaxS) \
V(MipsI8x16MinS) \
V(MipsI8x16Eq) \
V(MipsI8x16Ne) \
V(MipsI8x16GtS) \
V(MipsI8x16GeS) \
V(MipsI8x16ShrU) \
V(MipsI8x16AddSatU) \
V(MipsI8x16SubSatU) \
V(MipsI8x16MaxU) \
V(MipsI8x16MinU) \
V(MipsI8x16GtU) \
V(MipsI8x16GeU) \
V(MipsI8x16RoundingAverageU) \
V(MipsI8x16Abs) \
V(MipsI8x16Popcnt) \
V(MipsI8x16BitMask) \
V(MipsS128And) \
V(MipsS128Or) \
V(MipsS128Xor) \
V(MipsS128Not) \
V(MipsS128Select) \
V(MipsS128AndNot) \
V(MipsI64x2AllTrue) \
V(MipsI32x4AllTrue) \
V(MipsI16x8AllTrue) \
V(MipsI8x16AllTrue) \
V(MipsV128AnyTrue) \
V(MipsS32x4InterleaveRight) \
V(MipsS32x4InterleaveLeft) \
V(MipsS32x4PackEven) \
V(MipsS32x4PackOdd) \
V(MipsS32x4InterleaveEven) \
V(MipsS32x4InterleaveOdd) \
V(MipsS32x4Shuffle) \
V(MipsS16x8InterleaveRight) \
V(MipsS16x8InterleaveLeft) \
V(MipsS16x8PackEven) \
V(MipsS16x8PackOdd) \
V(MipsS16x8InterleaveEven) \
V(MipsS16x8InterleaveOdd) \
V(MipsS16x4Reverse) \
V(MipsS16x2Reverse) \
V(MipsS8x16InterleaveRight) \
V(MipsS8x16InterleaveLeft) \
V(MipsS8x16PackEven) \
V(MipsS8x16PackOdd) \
V(MipsS8x16InterleaveEven) \
V(MipsS8x16InterleaveOdd) \
V(MipsI8x16Shuffle) \
V(MipsI8x16Swizzle) \
V(MipsS8x16Concat) \
V(MipsS8x8Reverse) \
V(MipsS8x4Reverse) \
V(MipsS8x2Reverse) \
V(MipsS128Load8Splat) \
V(MipsS128Load16Splat) \
V(MipsS128Load32Splat) \
V(MipsS128Load64Splat) \
V(MipsS128Load8x8S) \
V(MipsS128Load8x8U) \
V(MipsS128Load16x4S) \
V(MipsS128Load16x4U) \
V(MipsS128Load32x2S) \
V(MipsS128Load32x2U) \
V(MipsMsaLd) \
V(MipsMsaSt) \
V(MipsI32x4SConvertI16x8Low) \
V(MipsI32x4SConvertI16x8High) \
V(MipsI32x4UConvertI16x8Low) \
V(MipsI32x4UConvertI16x8High) \
V(MipsI16x8SConvertI8x16Low) \
V(MipsI16x8SConvertI8x16High) \
V(MipsI16x8SConvertI32x4) \
V(MipsI16x8UConvertI32x4) \
V(MipsI16x8UConvertI8x16Low) \
V(MipsI16x8UConvertI8x16High) \
V(MipsI8x16SConvertI16x8) \
V(MipsI8x16UConvertI16x8) \
V(MipsWord32AtomicPairLoad) \
V(MipsWord32AtomicPairStore) \
V(MipsWord32AtomicPairAdd) \
V(MipsWord32AtomicPairSub) \
V(MipsWord32AtomicPairAnd) \
V(MipsWord32AtomicPairOr) \
V(MipsWord32AtomicPairXor) \
V(MipsWord32AtomicPairExchange) \
V(MipsWord32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,397 +12,393 @@ namespace compiler {
// MIPS64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(Mips64Add) \
V(Mips64Dadd) \
V(Mips64DaddOvf) \
V(Mips64Sub) \
V(Mips64Dsub) \
V(Mips64DsubOvf) \
V(Mips64Mul) \
V(Mips64MulOvf) \
V(Mips64MulHigh) \
V(Mips64DMulHigh) \
V(Mips64MulHighU) \
V(Mips64Dmul) \
V(Mips64Div) \
V(Mips64Ddiv) \
V(Mips64DivU) \
V(Mips64DdivU) \
V(Mips64Mod) \
V(Mips64Dmod) \
V(Mips64ModU) \
V(Mips64DmodU) \
V(Mips64And) \
V(Mips64And32) \
V(Mips64Or) \
V(Mips64Or32) \
V(Mips64Nor) \
V(Mips64Nor32) \
V(Mips64Xor) \
V(Mips64Xor32) \
V(Mips64Clz) \
V(Mips64Lsa) \
V(Mips64Dlsa) \
V(Mips64Shl) \
V(Mips64Shr) \
V(Mips64Sar) \
V(Mips64Ext) \
V(Mips64Ins) \
V(Mips64Dext) \
V(Mips64Dins) \
V(Mips64Dclz) \
V(Mips64Ctz) \
V(Mips64Dctz) \
V(Mips64Popcnt) \
V(Mips64Dpopcnt) \
V(Mips64Dshl) \
V(Mips64Dshr) \
V(Mips64Dsar) \
V(Mips64Ror) \
V(Mips64Dror) \
V(Mips64Mov) \
V(Mips64Tst) \
V(Mips64Cmp) \
V(Mips64CmpS) \
V(Mips64AddS) \
V(Mips64SubS) \
V(Mips64MulS) \
V(Mips64DivS) \
V(Mips64AbsS) \
V(Mips64NegS) \
V(Mips64SqrtS) \
V(Mips64MaxS) \
V(Mips64MinS) \
V(Mips64CmpD) \
V(Mips64AddD) \
V(Mips64SubD) \
V(Mips64MulD) \
V(Mips64DivD) \
V(Mips64ModD) \
V(Mips64AbsD) \
V(Mips64NegD) \
V(Mips64SqrtD) \
V(Mips64MaxD) \
V(Mips64MinD) \
V(Mips64Float64RoundDown) \
V(Mips64Float64RoundTruncate) \
V(Mips64Float64RoundUp) \
V(Mips64Float64RoundTiesEven) \
V(Mips64Float32RoundDown) \
V(Mips64Float32RoundTruncate) \
V(Mips64Float32RoundUp) \
V(Mips64Float32RoundTiesEven) \
V(Mips64CvtSD) \
V(Mips64CvtDS) \
V(Mips64TruncWD) \
V(Mips64RoundWD) \
V(Mips64FloorWD) \
V(Mips64CeilWD) \
V(Mips64TruncWS) \
V(Mips64RoundWS) \
V(Mips64FloorWS) \
V(Mips64CeilWS) \
V(Mips64TruncLS) \
V(Mips64TruncLD) \
V(Mips64TruncUwD) \
V(Mips64TruncUwS) \
V(Mips64TruncUlS) \
V(Mips64TruncUlD) \
V(Mips64CvtDW) \
V(Mips64CvtSL) \
V(Mips64CvtSW) \
V(Mips64CvtSUw) \
V(Mips64CvtSUl) \
V(Mips64CvtDL) \
V(Mips64CvtDUw) \
V(Mips64CvtDUl) \
V(Mips64Lb) \
V(Mips64Lbu) \
V(Mips64Sb) \
V(Mips64Lh) \
V(Mips64Ulh) \
V(Mips64Lhu) \
V(Mips64Ulhu) \
V(Mips64Sh) \
V(Mips64Ush) \
V(Mips64Ld) \
V(Mips64Uld) \
V(Mips64Lw) \
V(Mips64Ulw) \
V(Mips64Lwu) \
V(Mips64Ulwu) \
V(Mips64Sw) \
V(Mips64Usw) \
V(Mips64Sd) \
V(Mips64Usd) \
V(Mips64Lwc1) \
V(Mips64Ulwc1) \
V(Mips64Swc1) \
V(Mips64Uswc1) \
V(Mips64Ldc1) \
V(Mips64Uldc1) \
V(Mips64Sdc1) \
V(Mips64Usdc1) \
V(Mips64BitcastDL) \
V(Mips64BitcastLD) \
V(Mips64Float64ExtractLowWord32) \
V(Mips64Float64ExtractHighWord32) \
V(Mips64Float64InsertLowWord32) \
V(Mips64Float64InsertHighWord32) \
V(Mips64Float32Max) \
V(Mips64Float64Max) \
V(Mips64Float32Min) \
V(Mips64Float64Min) \
V(Mips64Float64SilenceNaN) \
V(Mips64Push) \
V(Mips64Peek) \
V(Mips64StoreToStackSlot) \
V(Mips64ByteSwap64) \
V(Mips64ByteSwap32) \
V(Mips64StackClaim) \
V(Mips64Seb) \
V(Mips64Seh) \
V(Mips64Sync) \
V(Mips64AssertEqual) \
V(Mips64S128Const) \
V(Mips64S128Zero) \
V(Mips64S128AllOnes) \
V(Mips64I32x4Splat) \
V(Mips64I32x4ExtractLane) \
V(Mips64I32x4ReplaceLane) \
V(Mips64I32x4Add) \
V(Mips64I32x4Sub) \
V(Mips64F64x2Abs) \
V(Mips64F64x2Neg) \
V(Mips64F32x4Splat) \
V(Mips64F32x4ExtractLane) \
V(Mips64F32x4ReplaceLane) \
V(Mips64F32x4SConvertI32x4) \
V(Mips64F32x4UConvertI32x4) \
V(Mips64I32x4Mul) \
V(Mips64I32x4MaxS) \
V(Mips64I32x4MinS) \
V(Mips64I32x4Eq) \
V(Mips64I32x4Ne) \
V(Mips64I32x4Shl) \
V(Mips64I32x4ShrS) \
V(Mips64I32x4ShrU) \
V(Mips64I32x4MaxU) \
V(Mips64I32x4MinU) \
V(Mips64F64x2Sqrt) \
V(Mips64F64x2Add) \
V(Mips64F64x2Sub) \
V(Mips64F64x2Mul) \
V(Mips64F64x2Div) \
V(Mips64F64x2Min) \
V(Mips64F64x2Max) \
V(Mips64F64x2Eq) \
V(Mips64F64x2Ne) \
V(Mips64F64x2Lt) \
V(Mips64F64x2Le) \
V(Mips64F64x2Splat) \
V(Mips64F64x2ExtractLane) \
V(Mips64F64x2ReplaceLane) \
V(Mips64F64x2Pmin) \
V(Mips64F64x2Pmax) \
V(Mips64F64x2Ceil) \
V(Mips64F64x2Floor) \
V(Mips64F64x2Trunc) \
V(Mips64F64x2NearestInt) \
V(Mips64F64x2ConvertLowI32x4S) \
V(Mips64F64x2ConvertLowI32x4U) \
V(Mips64F64x2PromoteLowF32x4) \
V(Mips64I64x2Splat) \
V(Mips64I64x2ExtractLane) \
V(Mips64I64x2ReplaceLane) \
V(Mips64I64x2Add) \
V(Mips64I64x2Sub) \
V(Mips64I64x2Mul) \
V(Mips64I64x2Neg) \
V(Mips64I64x2Shl) \
V(Mips64I64x2ShrS) \
V(Mips64I64x2ShrU) \
V(Mips64I64x2BitMask) \
V(Mips64I64x2Eq) \
V(Mips64I64x2Ne) \
V(Mips64I64x2GtS) \
V(Mips64I64x2GeS) \
V(Mips64I64x2Abs) \
V(Mips64I64x2SConvertI32x4Low) \
V(Mips64I64x2SConvertI32x4High) \
V(Mips64I64x2UConvertI32x4Low) \
V(Mips64I64x2UConvertI32x4High) \
V(Mips64ExtMulLow) \
V(Mips64ExtMulHigh) \
V(Mips64ExtAddPairwise) \
V(Mips64F32x4Abs) \
V(Mips64F32x4Neg) \
V(Mips64F32x4Sqrt) \
V(Mips64F32x4RecipApprox) \
V(Mips64F32x4RecipSqrtApprox) \
V(Mips64F32x4Add) \
V(Mips64F32x4Sub) \
V(Mips64F32x4Mul) \
V(Mips64F32x4Div) \
V(Mips64F32x4Max) \
V(Mips64F32x4Min) \
V(Mips64F32x4Eq) \
V(Mips64F32x4Ne) \
V(Mips64F32x4Lt) \
V(Mips64F32x4Le) \
V(Mips64F32x4Pmin) \
V(Mips64F32x4Pmax) \
V(Mips64F32x4Ceil) \
V(Mips64F32x4Floor) \
V(Mips64F32x4Trunc) \
V(Mips64F32x4NearestInt) \
V(Mips64F32x4DemoteF64x2Zero) \
V(Mips64I32x4SConvertF32x4) \
V(Mips64I32x4UConvertF32x4) \
V(Mips64I32x4Neg) \
V(Mips64I32x4GtS) \
V(Mips64I32x4GeS) \
V(Mips64I32x4GtU) \
V(Mips64I32x4GeU) \
V(Mips64I32x4Abs) \
V(Mips64I32x4BitMask) \
V(Mips64I32x4DotI16x8S) \
V(Mips64I32x4TruncSatF64x2SZero) \
V(Mips64I32x4TruncSatF64x2UZero) \
V(Mips64I16x8Splat) \
V(Mips64I16x8ExtractLaneU) \
V(Mips64I16x8ExtractLaneS) \
V(Mips64I16x8ReplaceLane) \
V(Mips64I16x8Neg) \
V(Mips64I16x8Shl) \
V(Mips64I16x8ShrS) \
V(Mips64I16x8ShrU) \
V(Mips64I16x8Add) \
V(Mips64I16x8AddSatS) \
V(Mips64I16x8Sub) \
V(Mips64I16x8SubSatS) \
V(Mips64I16x8Mul) \
V(Mips64I16x8MaxS) \
V(Mips64I16x8MinS) \
V(Mips64I16x8Eq) \
V(Mips64I16x8Ne) \
V(Mips64I16x8GtS) \
V(Mips64I16x8GeS) \
V(Mips64I16x8AddSatU) \
V(Mips64I16x8SubSatU) \
V(Mips64I16x8MaxU) \
V(Mips64I16x8MinU) \
V(Mips64I16x8GtU) \
V(Mips64I16x8GeU) \
V(Mips64I16x8RoundingAverageU) \
V(Mips64I16x8Abs) \
V(Mips64I16x8BitMask) \
V(Mips64I16x8Q15MulRSatS) \
V(Mips64I8x16Splat) \
V(Mips64I8x16ExtractLaneU) \
V(Mips64I8x16ExtractLaneS) \
V(Mips64I8x16ReplaceLane) \
V(Mips64I8x16Neg) \
V(Mips64I8x16Shl) \
V(Mips64I8x16ShrS) \
V(Mips64I8x16Add) \
V(Mips64I8x16AddSatS) \
V(Mips64I8x16Sub) \
V(Mips64I8x16SubSatS) \
V(Mips64I8x16MaxS) \
V(Mips64I8x16MinS) \
V(Mips64I8x16Eq) \
V(Mips64I8x16Ne) \
V(Mips64I8x16GtS) \
V(Mips64I8x16GeS) \
V(Mips64I8x16ShrU) \
V(Mips64I8x16AddSatU) \
V(Mips64I8x16SubSatU) \
V(Mips64I8x16MaxU) \
V(Mips64I8x16MinU) \
V(Mips64I8x16GtU) \
V(Mips64I8x16GeU) \
V(Mips64I8x16RoundingAverageU) \
V(Mips64I8x16Abs) \
V(Mips64I8x16Popcnt) \
V(Mips64I8x16BitMask) \
V(Mips64S128And) \
V(Mips64S128Or) \
V(Mips64S128Xor) \
V(Mips64S128Not) \
V(Mips64S128Select) \
V(Mips64S128AndNot) \
V(Mips64I64x2AllTrue) \
V(Mips64I32x4AllTrue) \
V(Mips64I16x8AllTrue) \
V(Mips64I8x16AllTrue) \
V(Mips64V128AnyTrue) \
V(Mips64S32x4InterleaveRight) \
V(Mips64S32x4InterleaveLeft) \
V(Mips64S32x4PackEven) \
V(Mips64S32x4PackOdd) \
V(Mips64S32x4InterleaveEven) \
V(Mips64S32x4InterleaveOdd) \
V(Mips64S32x4Shuffle) \
V(Mips64S16x8InterleaveRight) \
V(Mips64S16x8InterleaveLeft) \
V(Mips64S16x8PackEven) \
V(Mips64S16x8PackOdd) \
V(Mips64S16x8InterleaveEven) \
V(Mips64S16x8InterleaveOdd) \
V(Mips64S16x4Reverse) \
V(Mips64S16x2Reverse) \
V(Mips64S8x16InterleaveRight) \
V(Mips64S8x16InterleaveLeft) \
V(Mips64S8x16PackEven) \
V(Mips64S8x16PackOdd) \
V(Mips64S8x16InterleaveEven) \
V(Mips64S8x16InterleaveOdd) \
V(Mips64I8x16Shuffle) \
V(Mips64I8x16Swizzle) \
V(Mips64S8x16Concat) \
V(Mips64S8x8Reverse) \
V(Mips64S8x4Reverse) \
V(Mips64S8x2Reverse) \
V(Mips64S128LoadSplat) \
V(Mips64S128Load8x8S) \
V(Mips64S128Load8x8U) \
V(Mips64S128Load16x4S) \
V(Mips64S128Load16x4U) \
V(Mips64S128Load32x2S) \
V(Mips64S128Load32x2U) \
V(Mips64S128Load32Zero) \
V(Mips64S128Load64Zero) \
V(Mips64S128LoadLane) \
V(Mips64S128StoreLane) \
V(Mips64MsaLd) \
V(Mips64MsaSt) \
V(Mips64I32x4SConvertI16x8Low) \
V(Mips64I32x4SConvertI16x8High) \
V(Mips64I32x4UConvertI16x8Low) \
V(Mips64I32x4UConvertI16x8High) \
V(Mips64I16x8SConvertI8x16Low) \
V(Mips64I16x8SConvertI8x16High) \
V(Mips64I16x8SConvertI32x4) \
V(Mips64I16x8UConvertI32x4) \
V(Mips64I16x8UConvertI8x16Low) \
V(Mips64I16x8UConvertI8x16High) \
V(Mips64I8x16SConvertI16x8) \
V(Mips64I8x16UConvertI16x8) \
V(Mips64StoreCompressTagged) \
V(Mips64Word64AtomicLoadUint64) \
V(Mips64Word64AtomicStoreWord64) \
V(Mips64Word64AtomicAddUint64) \
V(Mips64Word64AtomicSubUint64) \
V(Mips64Word64AtomicAndUint64) \
V(Mips64Word64AtomicOrUint64) \
V(Mips64Word64AtomicXorUint64) \
V(Mips64Word64AtomicExchangeUint64) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(Mips64Add) \
V(Mips64Dadd) \
V(Mips64DaddOvf) \
V(Mips64Sub) \
V(Mips64Dsub) \
V(Mips64DsubOvf) \
V(Mips64Mul) \
V(Mips64MulOvf) \
V(Mips64MulHigh) \
V(Mips64DMulHigh) \
V(Mips64MulHighU) \
V(Mips64Dmul) \
V(Mips64Div) \
V(Mips64Ddiv) \
V(Mips64DivU) \
V(Mips64DdivU) \
V(Mips64Mod) \
V(Mips64Dmod) \
V(Mips64ModU) \
V(Mips64DmodU) \
V(Mips64And) \
V(Mips64And32) \
V(Mips64Or) \
V(Mips64Or32) \
V(Mips64Nor) \
V(Mips64Nor32) \
V(Mips64Xor) \
V(Mips64Xor32) \
V(Mips64Clz) \
V(Mips64Lsa) \
V(Mips64Dlsa) \
V(Mips64Shl) \
V(Mips64Shr) \
V(Mips64Sar) \
V(Mips64Ext) \
V(Mips64Ins) \
V(Mips64Dext) \
V(Mips64Dins) \
V(Mips64Dclz) \
V(Mips64Ctz) \
V(Mips64Dctz) \
V(Mips64Popcnt) \
V(Mips64Dpopcnt) \
V(Mips64Dshl) \
V(Mips64Dshr) \
V(Mips64Dsar) \
V(Mips64Ror) \
V(Mips64Dror) \
V(Mips64Mov) \
V(Mips64Tst) \
V(Mips64Cmp) \
V(Mips64CmpS) \
V(Mips64AddS) \
V(Mips64SubS) \
V(Mips64MulS) \
V(Mips64DivS) \
V(Mips64AbsS) \
V(Mips64NegS) \
V(Mips64SqrtS) \
V(Mips64MaxS) \
V(Mips64MinS) \
V(Mips64CmpD) \
V(Mips64AddD) \
V(Mips64SubD) \
V(Mips64MulD) \
V(Mips64DivD) \
V(Mips64ModD) \
V(Mips64AbsD) \
V(Mips64NegD) \
V(Mips64SqrtD) \
V(Mips64MaxD) \
V(Mips64MinD) \
V(Mips64Float64RoundDown) \
V(Mips64Float64RoundTruncate) \
V(Mips64Float64RoundUp) \
V(Mips64Float64RoundTiesEven) \
V(Mips64Float32RoundDown) \
V(Mips64Float32RoundTruncate) \
V(Mips64Float32RoundUp) \
V(Mips64Float32RoundTiesEven) \
V(Mips64CvtSD) \
V(Mips64CvtDS) \
V(Mips64TruncWD) \
V(Mips64RoundWD) \
V(Mips64FloorWD) \
V(Mips64CeilWD) \
V(Mips64TruncWS) \
V(Mips64RoundWS) \
V(Mips64FloorWS) \
V(Mips64CeilWS) \
V(Mips64TruncLS) \
V(Mips64TruncLD) \
V(Mips64TruncUwD) \
V(Mips64TruncUwS) \
V(Mips64TruncUlS) \
V(Mips64TruncUlD) \
V(Mips64CvtDW) \
V(Mips64CvtSL) \
V(Mips64CvtSW) \
V(Mips64CvtSUw) \
V(Mips64CvtSUl) \
V(Mips64CvtDL) \
V(Mips64CvtDUw) \
V(Mips64CvtDUl) \
V(Mips64Lb) \
V(Mips64Lbu) \
V(Mips64Sb) \
V(Mips64Lh) \
V(Mips64Ulh) \
V(Mips64Lhu) \
V(Mips64Ulhu) \
V(Mips64Sh) \
V(Mips64Ush) \
V(Mips64Ld) \
V(Mips64Uld) \
V(Mips64Lw) \
V(Mips64Ulw) \
V(Mips64Lwu) \
V(Mips64Ulwu) \
V(Mips64Sw) \
V(Mips64Usw) \
V(Mips64Sd) \
V(Mips64Usd) \
V(Mips64Lwc1) \
V(Mips64Ulwc1) \
V(Mips64Swc1) \
V(Mips64Uswc1) \
V(Mips64Ldc1) \
V(Mips64Uldc1) \
V(Mips64Sdc1) \
V(Mips64Usdc1) \
V(Mips64BitcastDL) \
V(Mips64BitcastLD) \
V(Mips64Float64ExtractLowWord32) \
V(Mips64Float64ExtractHighWord32) \
V(Mips64Float64InsertLowWord32) \
V(Mips64Float64InsertHighWord32) \
V(Mips64Float32Max) \
V(Mips64Float64Max) \
V(Mips64Float32Min) \
V(Mips64Float64Min) \
V(Mips64Float64SilenceNaN) \
V(Mips64Push) \
V(Mips64Peek) \
V(Mips64StoreToStackSlot) \
V(Mips64ByteSwap64) \
V(Mips64ByteSwap32) \
V(Mips64StackClaim) \
V(Mips64Seb) \
V(Mips64Seh) \
V(Mips64Sync) \
V(Mips64AssertEqual) \
V(Mips64S128Const) \
V(Mips64S128Zero) \
V(Mips64S128AllOnes) \
V(Mips64I32x4Splat) \
V(Mips64I32x4ExtractLane) \
V(Mips64I32x4ReplaceLane) \
V(Mips64I32x4Add) \
V(Mips64I32x4Sub) \
V(Mips64F64x2Abs) \
V(Mips64F64x2Neg) \
V(Mips64F32x4Splat) \
V(Mips64F32x4ExtractLane) \
V(Mips64F32x4ReplaceLane) \
V(Mips64F32x4SConvertI32x4) \
V(Mips64F32x4UConvertI32x4) \
V(Mips64I32x4Mul) \
V(Mips64I32x4MaxS) \
V(Mips64I32x4MinS) \
V(Mips64I32x4Eq) \
V(Mips64I32x4Ne) \
V(Mips64I32x4Shl) \
V(Mips64I32x4ShrS) \
V(Mips64I32x4ShrU) \
V(Mips64I32x4MaxU) \
V(Mips64I32x4MinU) \
V(Mips64F64x2Sqrt) \
V(Mips64F64x2Add) \
V(Mips64F64x2Sub) \
V(Mips64F64x2Mul) \
V(Mips64F64x2Div) \
V(Mips64F64x2Min) \
V(Mips64F64x2Max) \
V(Mips64F64x2Eq) \
V(Mips64F64x2Ne) \
V(Mips64F64x2Lt) \
V(Mips64F64x2Le) \
V(Mips64F64x2Splat) \
V(Mips64F64x2ExtractLane) \
V(Mips64F64x2ReplaceLane) \
V(Mips64F64x2Pmin) \
V(Mips64F64x2Pmax) \
V(Mips64F64x2Ceil) \
V(Mips64F64x2Floor) \
V(Mips64F64x2Trunc) \
V(Mips64F64x2NearestInt) \
V(Mips64F64x2ConvertLowI32x4S) \
V(Mips64F64x2ConvertLowI32x4U) \
V(Mips64F64x2PromoteLowF32x4) \
V(Mips64I64x2Splat) \
V(Mips64I64x2ExtractLane) \
V(Mips64I64x2ReplaceLane) \
V(Mips64I64x2Add) \
V(Mips64I64x2Sub) \
V(Mips64I64x2Mul) \
V(Mips64I64x2Neg) \
V(Mips64I64x2Shl) \
V(Mips64I64x2ShrS) \
V(Mips64I64x2ShrU) \
V(Mips64I64x2BitMask) \
V(Mips64I64x2Eq) \
V(Mips64I64x2Ne) \
V(Mips64I64x2GtS) \
V(Mips64I64x2GeS) \
V(Mips64I64x2Abs) \
V(Mips64I64x2SConvertI32x4Low) \
V(Mips64I64x2SConvertI32x4High) \
V(Mips64I64x2UConvertI32x4Low) \
V(Mips64I64x2UConvertI32x4High) \
V(Mips64ExtMulLow) \
V(Mips64ExtMulHigh) \
V(Mips64ExtAddPairwise) \
V(Mips64F32x4Abs) \
V(Mips64F32x4Neg) \
V(Mips64F32x4Sqrt) \
V(Mips64F32x4RecipApprox) \
V(Mips64F32x4RecipSqrtApprox) \
V(Mips64F32x4Add) \
V(Mips64F32x4Sub) \
V(Mips64F32x4Mul) \
V(Mips64F32x4Div) \
V(Mips64F32x4Max) \
V(Mips64F32x4Min) \
V(Mips64F32x4Eq) \
V(Mips64F32x4Ne) \
V(Mips64F32x4Lt) \
V(Mips64F32x4Le) \
V(Mips64F32x4Pmin) \
V(Mips64F32x4Pmax) \
V(Mips64F32x4Ceil) \
V(Mips64F32x4Floor) \
V(Mips64F32x4Trunc) \
V(Mips64F32x4NearestInt) \
V(Mips64F32x4DemoteF64x2Zero) \
V(Mips64I32x4SConvertF32x4) \
V(Mips64I32x4UConvertF32x4) \
V(Mips64I32x4Neg) \
V(Mips64I32x4GtS) \
V(Mips64I32x4GeS) \
V(Mips64I32x4GtU) \
V(Mips64I32x4GeU) \
V(Mips64I32x4Abs) \
V(Mips64I32x4BitMask) \
V(Mips64I32x4DotI16x8S) \
V(Mips64I32x4TruncSatF64x2SZero) \
V(Mips64I32x4TruncSatF64x2UZero) \
V(Mips64I16x8Splat) \
V(Mips64I16x8ExtractLaneU) \
V(Mips64I16x8ExtractLaneS) \
V(Mips64I16x8ReplaceLane) \
V(Mips64I16x8Neg) \
V(Mips64I16x8Shl) \
V(Mips64I16x8ShrS) \
V(Mips64I16x8ShrU) \
V(Mips64I16x8Add) \
V(Mips64I16x8AddSatS) \
V(Mips64I16x8Sub) \
V(Mips64I16x8SubSatS) \
V(Mips64I16x8Mul) \
V(Mips64I16x8MaxS) \
V(Mips64I16x8MinS) \
V(Mips64I16x8Eq) \
V(Mips64I16x8Ne) \
V(Mips64I16x8GtS) \
V(Mips64I16x8GeS) \
V(Mips64I16x8AddSatU) \
V(Mips64I16x8SubSatU) \
V(Mips64I16x8MaxU) \
V(Mips64I16x8MinU) \
V(Mips64I16x8GtU) \
V(Mips64I16x8GeU) \
V(Mips64I16x8RoundingAverageU) \
V(Mips64I16x8Abs) \
V(Mips64I16x8BitMask) \
V(Mips64I16x8Q15MulRSatS) \
V(Mips64I8x16Splat) \
V(Mips64I8x16ExtractLaneU) \
V(Mips64I8x16ExtractLaneS) \
V(Mips64I8x16ReplaceLane) \
V(Mips64I8x16Neg) \
V(Mips64I8x16Shl) \
V(Mips64I8x16ShrS) \
V(Mips64I8x16Add) \
V(Mips64I8x16AddSatS) \
V(Mips64I8x16Sub) \
V(Mips64I8x16SubSatS) \
V(Mips64I8x16MaxS) \
V(Mips64I8x16MinS) \
V(Mips64I8x16Eq) \
V(Mips64I8x16Ne) \
V(Mips64I8x16GtS) \
V(Mips64I8x16GeS) \
V(Mips64I8x16ShrU) \
V(Mips64I8x16AddSatU) \
V(Mips64I8x16SubSatU) \
V(Mips64I8x16MaxU) \
V(Mips64I8x16MinU) \
V(Mips64I8x16GtU) \
V(Mips64I8x16GeU) \
V(Mips64I8x16RoundingAverageU) \
V(Mips64I8x16Abs) \
V(Mips64I8x16Popcnt) \
V(Mips64I8x16BitMask) \
V(Mips64S128And) \
V(Mips64S128Or) \
V(Mips64S128Xor) \
V(Mips64S128Not) \
V(Mips64S128Select) \
V(Mips64S128AndNot) \
V(Mips64I64x2AllTrue) \
V(Mips64I32x4AllTrue) \
V(Mips64I16x8AllTrue) \
V(Mips64I8x16AllTrue) \
V(Mips64V128AnyTrue) \
V(Mips64S32x4InterleaveRight) \
V(Mips64S32x4InterleaveLeft) \
V(Mips64S32x4PackEven) \
V(Mips64S32x4PackOdd) \
V(Mips64S32x4InterleaveEven) \
V(Mips64S32x4InterleaveOdd) \
V(Mips64S32x4Shuffle) \
V(Mips64S16x8InterleaveRight) \
V(Mips64S16x8InterleaveLeft) \
V(Mips64S16x8PackEven) \
V(Mips64S16x8PackOdd) \
V(Mips64S16x8InterleaveEven) \
V(Mips64S16x8InterleaveOdd) \
V(Mips64S16x4Reverse) \
V(Mips64S16x2Reverse) \
V(Mips64S8x16InterleaveRight) \
V(Mips64S8x16InterleaveLeft) \
V(Mips64S8x16PackEven) \
V(Mips64S8x16PackOdd) \
V(Mips64S8x16InterleaveEven) \
V(Mips64S8x16InterleaveOdd) \
V(Mips64I8x16Shuffle) \
V(Mips64I8x16Swizzle) \
V(Mips64S8x16Concat) \
V(Mips64S8x8Reverse) \
V(Mips64S8x4Reverse) \
V(Mips64S8x2Reverse) \
V(Mips64S128LoadSplat) \
V(Mips64S128Load8x8S) \
V(Mips64S128Load8x8U) \
V(Mips64S128Load16x4S) \
V(Mips64S128Load16x4U) \
V(Mips64S128Load32x2S) \
V(Mips64S128Load32x2U) \
V(Mips64S128Load32Zero) \
V(Mips64S128Load64Zero) \
V(Mips64S128LoadLane) \
V(Mips64S128StoreLane) \
V(Mips64MsaLd) \
V(Mips64MsaSt) \
V(Mips64I32x4SConvertI16x8Low) \
V(Mips64I32x4SConvertI16x8High) \
V(Mips64I32x4UConvertI16x8Low) \
V(Mips64I32x4UConvertI16x8High) \
V(Mips64I16x8SConvertI8x16Low) \
V(Mips64I16x8SConvertI8x16High) \
V(Mips64I16x8SConvertI32x4) \
V(Mips64I16x8UConvertI32x4) \
V(Mips64I16x8UConvertI8x16Low) \
V(Mips64I16x8UConvertI8x16High) \
V(Mips64I8x16SConvertI16x8) \
V(Mips64I8x16UConvertI16x8) \
V(Mips64StoreCompressTagged) \
V(Mips64Word64AtomicLoadUint64) \
V(Mips64Word64AtomicStoreWord64) \
V(Mips64Word64AtomicAddUint64) \
V(Mips64Word64AtomicSubUint64) \
V(Mips64Word64AtomicAndUint64) \
V(Mips64Word64AtomicOrUint64) \
V(Mips64Word64AtomicXorUint64) \
V(Mips64Word64AtomicExchangeUint64) \
V(Mips64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,410 +12,406 @@ namespace compiler {
// PPC-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(PPC_Peek) \
V(PPC_Sync) \
V(PPC_And) \
V(PPC_AndComplement) \
V(PPC_Or) \
V(PPC_OrComplement) \
V(PPC_Xor) \
V(PPC_ShiftLeft32) \
V(PPC_ShiftLeft64) \
V(PPC_ShiftLeftPair) \
V(PPC_ShiftRight32) \
V(PPC_ShiftRight64) \
V(PPC_ShiftRightPair) \
V(PPC_ShiftRightAlg32) \
V(PPC_ShiftRightAlg64) \
V(PPC_ShiftRightAlgPair) \
V(PPC_RotRight32) \
V(PPC_RotRight64) \
V(PPC_Not) \
V(PPC_RotLeftAndMask32) \
V(PPC_RotLeftAndClear64) \
V(PPC_RotLeftAndClearLeft64) \
V(PPC_RotLeftAndClearRight64) \
V(PPC_Add32) \
V(PPC_Add64) \
V(PPC_AddWithOverflow32) \
V(PPC_AddPair) \
V(PPC_AddDouble) \
V(PPC_Sub) \
V(PPC_SubWithOverflow32) \
V(PPC_SubPair) \
V(PPC_SubDouble) \
V(PPC_Mul32) \
V(PPC_Mul32WithHigh32) \
V(PPC_Mul64) \
V(PPC_MulHigh32) \
V(PPC_MulHighU32) \
V(PPC_MulPair) \
V(PPC_MulDouble) \
V(PPC_Div32) \
V(PPC_Div64) \
V(PPC_DivU32) \
V(PPC_DivU64) \
V(PPC_DivDouble) \
V(PPC_Mod32) \
V(PPC_Mod64) \
V(PPC_ModU32) \
V(PPC_ModU64) \
V(PPC_ModDouble) \
V(PPC_Neg) \
V(PPC_NegDouble) \
V(PPC_SqrtDouble) \
V(PPC_FloorDouble) \
V(PPC_CeilDouble) \
V(PPC_TruncateDouble) \
V(PPC_RoundDouble) \
V(PPC_MaxDouble) \
V(PPC_MinDouble) \
V(PPC_AbsDouble) \
V(PPC_Cntlz32) \
V(PPC_Cntlz64) \
V(PPC_Popcnt32) \
V(PPC_Popcnt64) \
V(PPC_Cmp32) \
V(PPC_Cmp64) \
V(PPC_CmpDouble) \
V(PPC_Tst32) \
V(PPC_Tst64) \
V(PPC_Push) \
V(PPC_PushFrame) \
V(PPC_StoreToStackSlot) \
V(PPC_ExtendSignWord8) \
V(PPC_ExtendSignWord16) \
V(PPC_ExtendSignWord32) \
V(PPC_Uint32ToUint64) \
V(PPC_Int64ToInt32) \
V(PPC_Int64ToFloat32) \
V(PPC_Int64ToDouble) \
V(PPC_Uint64ToFloat32) \
V(PPC_Uint64ToDouble) \
V(PPC_Int32ToFloat32) \
V(PPC_Int32ToDouble) \
V(PPC_Uint32ToFloat32) \
V(PPC_Float32ToInt32) \
V(PPC_Float32ToUint32) \
V(PPC_Uint32ToDouble) \
V(PPC_Float32ToDouble) \
V(PPC_Float64SilenceNaN) \
V(PPC_DoubleToInt32) \
V(PPC_DoubleToUint32) \
V(PPC_DoubleToInt64) \
V(PPC_DoubleToUint64) \
V(PPC_DoubleToFloat32) \
V(PPC_DoubleExtractLowWord32) \
V(PPC_DoubleExtractHighWord32) \
V(PPC_DoubleInsertLowWord32) \
V(PPC_DoubleInsertHighWord32) \
V(PPC_DoubleConstruct) \
V(PPC_BitcastInt32ToFloat32) \
V(PPC_BitcastFloat32ToInt32) \
V(PPC_BitcastInt64ToDouble) \
V(PPC_BitcastDoubleToInt64) \
V(PPC_LoadWordS8) \
V(PPC_LoadWordU8) \
V(PPC_LoadWordS16) \
V(PPC_LoadWordU16) \
V(PPC_LoadWordS32) \
V(PPC_LoadWordU32) \
V(PPC_LoadByteRev32) \
V(PPC_LoadWord64) \
V(PPC_LoadByteRev64) \
V(PPC_LoadFloat32) \
V(PPC_LoadDouble) \
V(PPC_LoadSimd128) \
V(PPC_LoadReverseSimd128RR) \
V(PPC_StoreWord8) \
V(PPC_StoreWord16) \
V(PPC_StoreWord32) \
V(PPC_StoreByteRev32) \
V(PPC_StoreWord64) \
V(PPC_StoreByteRev64) \
V(PPC_StoreFloat32) \
V(PPC_StoreDouble) \
V(PPC_StoreSimd128) \
V(PPC_ByteRev32) \
V(PPC_ByteRev64) \
V(PPC_AtomicExchangeUint8) \
V(PPC_AtomicExchangeUint16) \
V(PPC_AtomicExchangeWord32) \
V(PPC_AtomicExchangeWord64) \
V(PPC_AtomicCompareExchangeUint8) \
V(PPC_AtomicCompareExchangeUint16) \
V(PPC_AtomicCompareExchangeWord32) \
V(PPC_AtomicCompareExchangeWord64) \
V(PPC_AtomicAddUint8) \
V(PPC_AtomicAddUint16) \
V(PPC_AtomicAddUint32) \
V(PPC_AtomicAddUint64) \
V(PPC_AtomicAddInt8) \
V(PPC_AtomicAddInt16) \
V(PPC_AtomicAddInt32) \
V(PPC_AtomicAddInt64) \
V(PPC_AtomicSubUint8) \
V(PPC_AtomicSubUint16) \
V(PPC_AtomicSubUint32) \
V(PPC_AtomicSubUint64) \
V(PPC_AtomicSubInt8) \
V(PPC_AtomicSubInt16) \
V(PPC_AtomicSubInt32) \
V(PPC_AtomicSubInt64) \
V(PPC_AtomicAndUint8) \
V(PPC_AtomicAndUint16) \
V(PPC_AtomicAndUint32) \
V(PPC_AtomicAndUint64) \
V(PPC_AtomicAndInt8) \
V(PPC_AtomicAndInt16) \
V(PPC_AtomicAndInt32) \
V(PPC_AtomicAndInt64) \
V(PPC_AtomicOrUint8) \
V(PPC_AtomicOrUint16) \
V(PPC_AtomicOrUint32) \
V(PPC_AtomicOrUint64) \
V(PPC_AtomicOrInt8) \
V(PPC_AtomicOrInt16) \
V(PPC_AtomicOrInt32) \
V(PPC_AtomicOrInt64) \
V(PPC_AtomicXorUint8) \
V(PPC_AtomicXorUint16) \
V(PPC_AtomicXorUint32) \
V(PPC_AtomicXorUint64) \
V(PPC_AtomicXorInt8) \
V(PPC_AtomicXorInt16) \
V(PPC_AtomicXorInt32) \
V(PPC_AtomicXorInt64) \
V(PPC_F64x2Splat) \
V(PPC_F64x2ExtractLane) \
V(PPC_F64x2ReplaceLane) \
V(PPC_F64x2Add) \
V(PPC_F64x2Sub) \
V(PPC_F64x2Mul) \
V(PPC_F64x2Eq) \
V(PPC_F64x2Ne) \
V(PPC_F64x2Le) \
V(PPC_F64x2Lt) \
V(PPC_F64x2Abs) \
V(PPC_F64x2Neg) \
V(PPC_F64x2Sqrt) \
V(PPC_F64x2Qfma) \
V(PPC_F64x2Qfms) \
V(PPC_F64x2Div) \
V(PPC_F64x2Min) \
V(PPC_F64x2Max) \
V(PPC_F64x2Ceil) \
V(PPC_F64x2Floor) \
V(PPC_F64x2Trunc) \
V(PPC_F64x2Pmin) \
V(PPC_F64x2Pmax) \
V(PPC_F64x2ConvertLowI32x4S) \
V(PPC_F64x2ConvertLowI32x4U) \
V(PPC_F64x2PromoteLowF32x4) \
V(PPC_F32x4Splat) \
V(PPC_F32x4ExtractLane) \
V(PPC_F32x4ReplaceLane) \
V(PPC_F32x4Add) \
V(PPC_F32x4Sub) \
V(PPC_F32x4Mul) \
V(PPC_F32x4Eq) \
V(PPC_F32x4Ne) \
V(PPC_F32x4Lt) \
V(PPC_F32x4Le) \
V(PPC_F32x4Abs) \
V(PPC_F32x4Neg) \
V(PPC_F32x4RecipApprox) \
V(PPC_F32x4RecipSqrtApprox) \
V(PPC_F32x4Sqrt) \
V(PPC_F32x4SConvertI32x4) \
V(PPC_F32x4UConvertI32x4) \
V(PPC_F32x4Div) \
V(PPC_F32x4Min) \
V(PPC_F32x4Max) \
V(PPC_F32x4Ceil) \
V(PPC_F32x4Floor) \
V(PPC_F32x4Trunc) \
V(PPC_F32x4Pmin) \
V(PPC_F32x4Pmax) \
V(PPC_F32x4Qfma) \
V(PPC_F32x4Qfms) \
V(PPC_F32x4DemoteF64x2Zero) \
V(PPC_I64x2Splat) \
V(PPC_I64x2ExtractLane) \
V(PPC_I64x2ReplaceLane) \
V(PPC_I64x2Add) \
V(PPC_I64x2Sub) \
V(PPC_I64x2Mul) \
V(PPC_I64x2Eq) \
V(PPC_I64x2Ne) \
V(PPC_I64x2GtS) \
V(PPC_I64x2GeS) \
V(PPC_I64x2Shl) \
V(PPC_I64x2ShrS) \
V(PPC_I64x2ShrU) \
V(PPC_I64x2Neg) \
V(PPC_I64x2BitMask) \
V(PPC_I64x2SConvertI32x4Low) \
V(PPC_I64x2SConvertI32x4High) \
V(PPC_I64x2UConvertI32x4Low) \
V(PPC_I64x2UConvertI32x4High) \
V(PPC_I64x2ExtMulLowI32x4S) \
V(PPC_I64x2ExtMulHighI32x4S) \
V(PPC_I64x2ExtMulLowI32x4U) \
V(PPC_I64x2ExtMulHighI32x4U) \
V(PPC_I64x2Abs) \
V(PPC_I32x4Splat) \
V(PPC_I32x4ExtractLane) \
V(PPC_I32x4ReplaceLane) \
V(PPC_I32x4Add) \
V(PPC_I32x4Sub) \
V(PPC_I32x4Mul) \
V(PPC_I32x4MinS) \
V(PPC_I32x4MinU) \
V(PPC_I32x4MaxS) \
V(PPC_I32x4MaxU) \
V(PPC_I32x4Eq) \
V(PPC_I32x4Ne) \
V(PPC_I32x4GtS) \
V(PPC_I32x4GeS) \
V(PPC_I32x4GtU) \
V(PPC_I32x4GeU) \
V(PPC_I32x4Shl) \
V(PPC_I32x4ShrS) \
V(PPC_I32x4ShrU) \
V(PPC_I32x4Neg) \
V(PPC_I32x4Abs) \
V(PPC_I32x4SConvertF32x4) \
V(PPC_I32x4UConvertF32x4) \
V(PPC_I32x4SConvertI16x8Low) \
V(PPC_I32x4SConvertI16x8High) \
V(PPC_I32x4UConvertI16x8Low) \
V(PPC_I32x4UConvertI16x8High) \
V(PPC_I32x4BitMask) \
V(PPC_I32x4DotI16x8S) \
V(PPC_I32x4ExtAddPairwiseI16x8S) \
V(PPC_I32x4ExtAddPairwiseI16x8U) \
V(PPC_I32x4ExtMulLowI16x8S) \
V(PPC_I32x4ExtMulHighI16x8S) \
V(PPC_I32x4ExtMulLowI16x8U) \
V(PPC_I32x4ExtMulHighI16x8U) \
V(PPC_I32x4TruncSatF64x2SZero) \
V(PPC_I32x4TruncSatF64x2UZero) \
V(PPC_I16x8Splat) \
V(PPC_I16x8ExtractLaneU) \
V(PPC_I16x8ExtractLaneS) \
V(PPC_I16x8ReplaceLane) \
V(PPC_I16x8Add) \
V(PPC_I16x8Sub) \
V(PPC_I16x8Mul) \
V(PPC_I16x8MinS) \
V(PPC_I16x8MinU) \
V(PPC_I16x8MaxS) \
V(PPC_I16x8MaxU) \
V(PPC_I16x8Eq) \
V(PPC_I16x8Ne) \
V(PPC_I16x8GtS) \
V(PPC_I16x8GeS) \
V(PPC_I16x8GtU) \
V(PPC_I16x8GeU) \
V(PPC_I16x8Shl) \
V(PPC_I16x8ShrS) \
V(PPC_I16x8ShrU) \
V(PPC_I16x8Neg) \
V(PPC_I16x8Abs) \
V(PPC_I16x8SConvertI32x4) \
V(PPC_I16x8UConvertI32x4) \
V(PPC_I16x8SConvertI8x16Low) \
V(PPC_I16x8SConvertI8x16High) \
V(PPC_I16x8UConvertI8x16Low) \
V(PPC_I16x8UConvertI8x16High) \
V(PPC_I16x8AddSatS) \
V(PPC_I16x8SubSatS) \
V(PPC_I16x8AddSatU) \
V(PPC_I16x8SubSatU) \
V(PPC_I16x8RoundingAverageU) \
V(PPC_I16x8BitMask) \
V(PPC_I16x8ExtAddPairwiseI8x16S) \
V(PPC_I16x8ExtAddPairwiseI8x16U) \
V(PPC_I16x8Q15MulRSatS) \
V(PPC_I16x8ExtMulLowI8x16S) \
V(PPC_I16x8ExtMulHighI8x16S) \
V(PPC_I16x8ExtMulLowI8x16U) \
V(PPC_I16x8ExtMulHighI8x16U) \
V(PPC_I8x16Splat) \
V(PPC_I8x16ExtractLaneU) \
V(PPC_I8x16ExtractLaneS) \
V(PPC_I8x16ReplaceLane) \
V(PPC_I8x16Add) \
V(PPC_I8x16Sub) \
V(PPC_I8x16MinS) \
V(PPC_I8x16MinU) \
V(PPC_I8x16MaxS) \
V(PPC_I8x16MaxU) \
V(PPC_I8x16Eq) \
V(PPC_I8x16Ne) \
V(PPC_I8x16GtS) \
V(PPC_I8x16GeS) \
V(PPC_I8x16GtU) \
V(PPC_I8x16GeU) \
V(PPC_I8x16Shl) \
V(PPC_I8x16ShrS) \
V(PPC_I8x16ShrU) \
V(PPC_I8x16Neg) \
V(PPC_I8x16Abs) \
V(PPC_I8x16SConvertI16x8) \
V(PPC_I8x16UConvertI16x8) \
V(PPC_I8x16AddSatS) \
V(PPC_I8x16SubSatS) \
V(PPC_I8x16AddSatU) \
V(PPC_I8x16SubSatU) \
V(PPC_I8x16RoundingAverageU) \
V(PPC_I8x16Shuffle) \
V(PPC_I8x16Swizzle) \
V(PPC_I8x16BitMask) \
V(PPC_I8x16Popcnt) \
V(PPC_I64x2AllTrue) \
V(PPC_I32x4AllTrue) \
V(PPC_I16x8AllTrue) \
V(PPC_I8x16AllTrue) \
V(PPC_V128AnyTrue) \
V(PPC_S128And) \
V(PPC_S128Or) \
V(PPC_S128Xor) \
V(PPC_S128Const) \
V(PPC_S128Zero) \
V(PPC_S128AllOnes) \
V(PPC_S128Not) \
V(PPC_S128Select) \
V(PPC_S128AndNot) \
V(PPC_S128Load8Splat) \
V(PPC_S128Load16Splat) \
V(PPC_S128Load32Splat) \
V(PPC_S128Load64Splat) \
V(PPC_S128Load8x8S) \
V(PPC_S128Load8x8U) \
V(PPC_S128Load16x4S) \
V(PPC_S128Load16x4U) \
V(PPC_S128Load32x2S) \
V(PPC_S128Load32x2U) \
V(PPC_S128Load32Zero) \
V(PPC_S128Load64Zero) \
V(PPC_S128Load8Lane) \
V(PPC_S128Load16Lane) \
V(PPC_S128Load32Lane) \
V(PPC_S128Load64Lane) \
V(PPC_S128Store8Lane) \
V(PPC_S128Store16Lane) \
V(PPC_S128Store32Lane) \
V(PPC_S128Store64Lane) \
V(PPC_StoreCompressTagged) \
V(PPC_LoadDecompressTaggedSigned) \
V(PPC_LoadDecompressTaggedPointer) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(PPC_Peek) \
V(PPC_Sync) \
V(PPC_And) \
V(PPC_AndComplement) \
V(PPC_Or) \
V(PPC_OrComplement) \
V(PPC_Xor) \
V(PPC_ShiftLeft32) \
V(PPC_ShiftLeft64) \
V(PPC_ShiftLeftPair) \
V(PPC_ShiftRight32) \
V(PPC_ShiftRight64) \
V(PPC_ShiftRightPair) \
V(PPC_ShiftRightAlg32) \
V(PPC_ShiftRightAlg64) \
V(PPC_ShiftRightAlgPair) \
V(PPC_RotRight32) \
V(PPC_RotRight64) \
V(PPC_Not) \
V(PPC_RotLeftAndMask32) \
V(PPC_RotLeftAndClear64) \
V(PPC_RotLeftAndClearLeft64) \
V(PPC_RotLeftAndClearRight64) \
V(PPC_Add32) \
V(PPC_Add64) \
V(PPC_AddWithOverflow32) \
V(PPC_AddPair) \
V(PPC_AddDouble) \
V(PPC_Sub) \
V(PPC_SubWithOverflow32) \
V(PPC_SubPair) \
V(PPC_SubDouble) \
V(PPC_Mul32) \
V(PPC_Mul32WithHigh32) \
V(PPC_Mul64) \
V(PPC_MulHigh32) \
V(PPC_MulHighU32) \
V(PPC_MulPair) \
V(PPC_MulDouble) \
V(PPC_Div32) \
V(PPC_Div64) \
V(PPC_DivU32) \
V(PPC_DivU64) \
V(PPC_DivDouble) \
V(PPC_Mod32) \
V(PPC_Mod64) \
V(PPC_ModU32) \
V(PPC_ModU64) \
V(PPC_ModDouble) \
V(PPC_Neg) \
V(PPC_NegDouble) \
V(PPC_SqrtDouble) \
V(PPC_FloorDouble) \
V(PPC_CeilDouble) \
V(PPC_TruncateDouble) \
V(PPC_RoundDouble) \
V(PPC_MaxDouble) \
V(PPC_MinDouble) \
V(PPC_AbsDouble) \
V(PPC_Cntlz32) \
V(PPC_Cntlz64) \
V(PPC_Popcnt32) \
V(PPC_Popcnt64) \
V(PPC_Cmp32) \
V(PPC_Cmp64) \
V(PPC_CmpDouble) \
V(PPC_Tst32) \
V(PPC_Tst64) \
V(PPC_Push) \
V(PPC_PushFrame) \
V(PPC_StoreToStackSlot) \
V(PPC_ExtendSignWord8) \
V(PPC_ExtendSignWord16) \
V(PPC_ExtendSignWord32) \
V(PPC_Uint32ToUint64) \
V(PPC_Int64ToInt32) \
V(PPC_Int64ToFloat32) \
V(PPC_Int64ToDouble) \
V(PPC_Uint64ToFloat32) \
V(PPC_Uint64ToDouble) \
V(PPC_Int32ToFloat32) \
V(PPC_Int32ToDouble) \
V(PPC_Uint32ToFloat32) \
V(PPC_Float32ToInt32) \
V(PPC_Float32ToUint32) \
V(PPC_Uint32ToDouble) \
V(PPC_Float32ToDouble) \
V(PPC_Float64SilenceNaN) \
V(PPC_DoubleToInt32) \
V(PPC_DoubleToUint32) \
V(PPC_DoubleToInt64) \
V(PPC_DoubleToUint64) \
V(PPC_DoubleToFloat32) \
V(PPC_DoubleExtractLowWord32) \
V(PPC_DoubleExtractHighWord32) \
V(PPC_DoubleInsertLowWord32) \
V(PPC_DoubleInsertHighWord32) \
V(PPC_DoubleConstruct) \
V(PPC_BitcastInt32ToFloat32) \
V(PPC_BitcastFloat32ToInt32) \
V(PPC_BitcastInt64ToDouble) \
V(PPC_BitcastDoubleToInt64) \
V(PPC_LoadWordS8) \
V(PPC_LoadWordU8) \
V(PPC_LoadWordS16) \
V(PPC_LoadWordU16) \
V(PPC_LoadWordS32) \
V(PPC_LoadWordU32) \
V(PPC_LoadByteRev32) \
V(PPC_LoadWord64) \
V(PPC_LoadByteRev64) \
V(PPC_LoadFloat32) \
V(PPC_LoadDouble) \
V(PPC_LoadSimd128) \
V(PPC_LoadReverseSimd128RR) \
V(PPC_StoreWord8) \
V(PPC_StoreWord16) \
V(PPC_StoreWord32) \
V(PPC_StoreByteRev32) \
V(PPC_StoreWord64) \
V(PPC_StoreByteRev64) \
V(PPC_StoreFloat32) \
V(PPC_StoreDouble) \
V(PPC_StoreSimd128) \
V(PPC_ByteRev32) \
V(PPC_ByteRev64) \
V(PPC_AtomicExchangeUint8) \
V(PPC_AtomicExchangeUint16) \
V(PPC_AtomicExchangeWord32) \
V(PPC_AtomicExchangeWord64) \
V(PPC_AtomicCompareExchangeUint8) \
V(PPC_AtomicCompareExchangeUint16) \
V(PPC_AtomicCompareExchangeWord32) \
V(PPC_AtomicCompareExchangeWord64) \
V(PPC_AtomicAddUint8) \
V(PPC_AtomicAddUint16) \
V(PPC_AtomicAddUint32) \
V(PPC_AtomicAddUint64) \
V(PPC_AtomicAddInt8) \
V(PPC_AtomicAddInt16) \
V(PPC_AtomicAddInt32) \
V(PPC_AtomicAddInt64) \
V(PPC_AtomicSubUint8) \
V(PPC_AtomicSubUint16) \
V(PPC_AtomicSubUint32) \
V(PPC_AtomicSubUint64) \
V(PPC_AtomicSubInt8) \
V(PPC_AtomicSubInt16) \
V(PPC_AtomicSubInt32) \
V(PPC_AtomicSubInt64) \
V(PPC_AtomicAndUint8) \
V(PPC_AtomicAndUint16) \
V(PPC_AtomicAndUint32) \
V(PPC_AtomicAndUint64) \
V(PPC_AtomicAndInt8) \
V(PPC_AtomicAndInt16) \
V(PPC_AtomicAndInt32) \
V(PPC_AtomicAndInt64) \
V(PPC_AtomicOrUint8) \
V(PPC_AtomicOrUint16) \
V(PPC_AtomicOrUint32) \
V(PPC_AtomicOrUint64) \
V(PPC_AtomicOrInt8) \
V(PPC_AtomicOrInt16) \
V(PPC_AtomicOrInt32) \
V(PPC_AtomicOrInt64) \
V(PPC_AtomicXorUint8) \
V(PPC_AtomicXorUint16) \
V(PPC_AtomicXorUint32) \
V(PPC_AtomicXorUint64) \
V(PPC_AtomicXorInt8) \
V(PPC_AtomicXorInt16) \
V(PPC_AtomicXorInt32) \
V(PPC_AtomicXorInt64) \
V(PPC_F64x2Splat) \
V(PPC_F64x2ExtractLane) \
V(PPC_F64x2ReplaceLane) \
V(PPC_F64x2Add) \
V(PPC_F64x2Sub) \
V(PPC_F64x2Mul) \
V(PPC_F64x2Eq) \
V(PPC_F64x2Ne) \
V(PPC_F64x2Le) \
V(PPC_F64x2Lt) \
V(PPC_F64x2Abs) \
V(PPC_F64x2Neg) \
V(PPC_F64x2Sqrt) \
V(PPC_F64x2Qfma) \
V(PPC_F64x2Qfms) \
V(PPC_F64x2Div) \
V(PPC_F64x2Min) \
V(PPC_F64x2Max) \
V(PPC_F64x2Ceil) \
V(PPC_F64x2Floor) \
V(PPC_F64x2Trunc) \
V(PPC_F64x2Pmin) \
V(PPC_F64x2Pmax) \
V(PPC_F64x2ConvertLowI32x4S) \
V(PPC_F64x2ConvertLowI32x4U) \
V(PPC_F64x2PromoteLowF32x4) \
V(PPC_F32x4Splat) \
V(PPC_F32x4ExtractLane) \
V(PPC_F32x4ReplaceLane) \
V(PPC_F32x4Add) \
V(PPC_F32x4Sub) \
V(PPC_F32x4Mul) \
V(PPC_F32x4Eq) \
V(PPC_F32x4Ne) \
V(PPC_F32x4Lt) \
V(PPC_F32x4Le) \
V(PPC_F32x4Abs) \
V(PPC_F32x4Neg) \
V(PPC_F32x4RecipApprox) \
V(PPC_F32x4RecipSqrtApprox) \
V(PPC_F32x4Sqrt) \
V(PPC_F32x4SConvertI32x4) \
V(PPC_F32x4UConvertI32x4) \
V(PPC_F32x4Div) \
V(PPC_F32x4Min) \
V(PPC_F32x4Max) \
V(PPC_F32x4Ceil) \
V(PPC_F32x4Floor) \
V(PPC_F32x4Trunc) \
V(PPC_F32x4Pmin) \
V(PPC_F32x4Pmax) \
V(PPC_F32x4Qfma) \
V(PPC_F32x4Qfms) \
V(PPC_F32x4DemoteF64x2Zero) \
V(PPC_I64x2Splat) \
V(PPC_I64x2ExtractLane) \
V(PPC_I64x2ReplaceLane) \
V(PPC_I64x2Add) \
V(PPC_I64x2Sub) \
V(PPC_I64x2Mul) \
V(PPC_I64x2Eq) \
V(PPC_I64x2Ne) \
V(PPC_I64x2GtS) \
V(PPC_I64x2GeS) \
V(PPC_I64x2Shl) \
V(PPC_I64x2ShrS) \
V(PPC_I64x2ShrU) \
V(PPC_I64x2Neg) \
V(PPC_I64x2BitMask) \
V(PPC_I64x2SConvertI32x4Low) \
V(PPC_I64x2SConvertI32x4High) \
V(PPC_I64x2UConvertI32x4Low) \
V(PPC_I64x2UConvertI32x4High) \
V(PPC_I64x2ExtMulLowI32x4S) \
V(PPC_I64x2ExtMulHighI32x4S) \
V(PPC_I64x2ExtMulLowI32x4U) \
V(PPC_I64x2ExtMulHighI32x4U) \
V(PPC_I64x2Abs) \
V(PPC_I32x4Splat) \
V(PPC_I32x4ExtractLane) \
V(PPC_I32x4ReplaceLane) \
V(PPC_I32x4Add) \
V(PPC_I32x4Sub) \
V(PPC_I32x4Mul) \
V(PPC_I32x4MinS) \
V(PPC_I32x4MinU) \
V(PPC_I32x4MaxS) \
V(PPC_I32x4MaxU) \
V(PPC_I32x4Eq) \
V(PPC_I32x4Ne) \
V(PPC_I32x4GtS) \
V(PPC_I32x4GeS) \
V(PPC_I32x4GtU) \
V(PPC_I32x4GeU) \
V(PPC_I32x4Shl) \
V(PPC_I32x4ShrS) \
V(PPC_I32x4ShrU) \
V(PPC_I32x4Neg) \
V(PPC_I32x4Abs) \
V(PPC_I32x4SConvertF32x4) \
V(PPC_I32x4UConvertF32x4) \
V(PPC_I32x4SConvertI16x8Low) \
V(PPC_I32x4SConvertI16x8High) \
V(PPC_I32x4UConvertI16x8Low) \
V(PPC_I32x4UConvertI16x8High) \
V(PPC_I32x4BitMask) \
V(PPC_I32x4DotI16x8S) \
V(PPC_I32x4ExtAddPairwiseI16x8S) \
V(PPC_I32x4ExtAddPairwiseI16x8U) \
V(PPC_I32x4ExtMulLowI16x8S) \
V(PPC_I32x4ExtMulHighI16x8S) \
V(PPC_I32x4ExtMulLowI16x8U) \
V(PPC_I32x4ExtMulHighI16x8U) \
V(PPC_I32x4TruncSatF64x2SZero) \
V(PPC_I32x4TruncSatF64x2UZero) \
V(PPC_I16x8Splat) \
V(PPC_I16x8ExtractLaneU) \
V(PPC_I16x8ExtractLaneS) \
V(PPC_I16x8ReplaceLane) \
V(PPC_I16x8Add) \
V(PPC_I16x8Sub) \
V(PPC_I16x8Mul) \
V(PPC_I16x8MinS) \
V(PPC_I16x8MinU) \
V(PPC_I16x8MaxS) \
V(PPC_I16x8MaxU) \
V(PPC_I16x8Eq) \
V(PPC_I16x8Ne) \
V(PPC_I16x8GtS) \
V(PPC_I16x8GeS) \
V(PPC_I16x8GtU) \
V(PPC_I16x8GeU) \
V(PPC_I16x8Shl) \
V(PPC_I16x8ShrS) \
V(PPC_I16x8ShrU) \
V(PPC_I16x8Neg) \
V(PPC_I16x8Abs) \
V(PPC_I16x8SConvertI32x4) \
V(PPC_I16x8UConvertI32x4) \
V(PPC_I16x8SConvertI8x16Low) \
V(PPC_I16x8SConvertI8x16High) \
V(PPC_I16x8UConvertI8x16Low) \
V(PPC_I16x8UConvertI8x16High) \
V(PPC_I16x8AddSatS) \
V(PPC_I16x8SubSatS) \
V(PPC_I16x8AddSatU) \
V(PPC_I16x8SubSatU) \
V(PPC_I16x8RoundingAverageU) \
V(PPC_I16x8BitMask) \
V(PPC_I16x8ExtAddPairwiseI8x16S) \
V(PPC_I16x8ExtAddPairwiseI8x16U) \
V(PPC_I16x8Q15MulRSatS) \
V(PPC_I16x8ExtMulLowI8x16S) \
V(PPC_I16x8ExtMulHighI8x16S) \
V(PPC_I16x8ExtMulLowI8x16U) \
V(PPC_I16x8ExtMulHighI8x16U) \
V(PPC_I8x16Splat) \
V(PPC_I8x16ExtractLaneU) \
V(PPC_I8x16ExtractLaneS) \
V(PPC_I8x16ReplaceLane) \
V(PPC_I8x16Add) \
V(PPC_I8x16Sub) \
V(PPC_I8x16MinS) \
V(PPC_I8x16MinU) \
V(PPC_I8x16MaxS) \
V(PPC_I8x16MaxU) \
V(PPC_I8x16Eq) \
V(PPC_I8x16Ne) \
V(PPC_I8x16GtS) \
V(PPC_I8x16GeS) \
V(PPC_I8x16GtU) \
V(PPC_I8x16GeU) \
V(PPC_I8x16Shl) \
V(PPC_I8x16ShrS) \
V(PPC_I8x16ShrU) \
V(PPC_I8x16Neg) \
V(PPC_I8x16Abs) \
V(PPC_I8x16SConvertI16x8) \
V(PPC_I8x16UConvertI16x8) \
V(PPC_I8x16AddSatS) \
V(PPC_I8x16SubSatS) \
V(PPC_I8x16AddSatU) \
V(PPC_I8x16SubSatU) \
V(PPC_I8x16RoundingAverageU) \
V(PPC_I8x16Shuffle) \
V(PPC_I8x16Swizzle) \
V(PPC_I8x16BitMask) \
V(PPC_I8x16Popcnt) \
V(PPC_I64x2AllTrue) \
V(PPC_I32x4AllTrue) \
V(PPC_I16x8AllTrue) \
V(PPC_I8x16AllTrue) \
V(PPC_V128AnyTrue) \
V(PPC_S128And) \
V(PPC_S128Or) \
V(PPC_S128Xor) \
V(PPC_S128Const) \
V(PPC_S128Zero) \
V(PPC_S128AllOnes) \
V(PPC_S128Not) \
V(PPC_S128Select) \
V(PPC_S128AndNot) \
V(PPC_S128Load8Splat) \
V(PPC_S128Load16Splat) \
V(PPC_S128Load32Splat) \
V(PPC_S128Load64Splat) \
V(PPC_S128Load8x8S) \
V(PPC_S128Load8x8U) \
V(PPC_S128Load16x4S) \
V(PPC_S128Load16x4U) \
V(PPC_S128Load32x2S) \
V(PPC_S128Load32x2U) \
V(PPC_S128Load32Zero) \
V(PPC_S128Load64Zero) \
V(PPC_S128Load8Lane) \
V(PPC_S128Load16Lane) \
V(PPC_S128Load32Lane) \
V(PPC_S128Load64Lane) \
V(PPC_S128Store8Lane) \
V(PPC_S128Store16Lane) \
V(PPC_S128Store32Lane) \
V(PPC_S128Store64Lane) \
V(PPC_StoreCompressTagged) \
V(PPC_LoadDecompressTaggedSigned) \
V(PPC_LoadDecompressTaggedPointer) \
V(PPC_LoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -9,400 +9,396 @@ namespace v8 {
namespace internal {
namespace compiler {
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
// RISC-V-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(RiscvAdd32) \
V(RiscvAdd64) \
V(RiscvAddOvf64) \
V(RiscvSub32) \
V(RiscvSub64) \
V(RiscvSubOvf64) \
V(RiscvMul32) \
V(RiscvMulOvf32) \
V(RiscvMulHigh32) \
V(RiscvMulHigh64) \
V(RiscvMulHighU32) \
V(RiscvMul64) \
V(RiscvDiv32) \
V(RiscvDiv64) \
V(RiscvDivU32) \
V(RiscvDivU64) \
V(RiscvMod32) \
V(RiscvMod64) \
V(RiscvModU32) \
V(RiscvModU64) \
V(RiscvAnd) \
V(RiscvAnd32) \
V(RiscvOr) \
V(RiscvOr32) \
V(RiscvNor) \
V(RiscvNor32) \
V(RiscvXor) \
V(RiscvXor32) \
V(RiscvClz32) \
V(RiscvShl32) \
V(RiscvShr32) \
V(RiscvSar32) \
V(RiscvZeroExtendWord) \
V(RiscvSignExtendWord) \
V(RiscvClz64) \
V(RiscvCtz32) \
V(RiscvCtz64) \
V(RiscvPopcnt32) \
V(RiscvPopcnt64) \
V(RiscvShl64) \
V(RiscvShr64) \
V(RiscvSar64) \
V(RiscvRor32) \
V(RiscvRor64) \
V(RiscvMov) \
V(RiscvTst) \
V(RiscvCmp) \
V(RiscvCmpZero) \
V(RiscvCmpS) \
V(RiscvAddS) \
V(RiscvSubS) \
V(RiscvMulS) \
V(RiscvDivS) \
V(RiscvModS) \
V(RiscvAbsS) \
V(RiscvNegS) \
V(RiscvSqrtS) \
V(RiscvMaxS) \
V(RiscvMinS) \
V(RiscvCmpD) \
V(RiscvAddD) \
V(RiscvSubD) \
V(RiscvMulD) \
V(RiscvDivD) \
V(RiscvModD) \
V(RiscvAbsD) \
V(RiscvNegD) \
V(RiscvSqrtD) \
V(RiscvMaxD) \
V(RiscvMinD) \
V(RiscvFloat64RoundDown) \
V(RiscvFloat64RoundTruncate) \
V(RiscvFloat64RoundUp) \
V(RiscvFloat64RoundTiesEven) \
V(RiscvFloat32RoundDown) \
V(RiscvFloat32RoundTruncate) \
V(RiscvFloat32RoundUp) \
V(RiscvFloat32RoundTiesEven) \
V(RiscvCvtSD) \
V(RiscvCvtDS) \
V(RiscvTruncWD) \
V(RiscvRoundWD) \
V(RiscvFloorWD) \
V(RiscvCeilWD) \
V(RiscvTruncWS) \
V(RiscvRoundWS) \
V(RiscvFloorWS) \
V(RiscvCeilWS) \
V(RiscvTruncLS) \
V(RiscvTruncLD) \
V(RiscvTruncUwD) \
V(RiscvTruncUwS) \
V(RiscvTruncUlS) \
V(RiscvTruncUlD) \
V(RiscvCvtDW) \
V(RiscvCvtSL) \
V(RiscvCvtSW) \
V(RiscvCvtSUw) \
V(RiscvCvtSUl) \
V(RiscvCvtDL) \
V(RiscvCvtDUw) \
V(RiscvCvtDUl) \
V(RiscvLb) \
V(RiscvLbu) \
V(RiscvSb) \
V(RiscvLh) \
V(RiscvUlh) \
V(RiscvLhu) \
V(RiscvUlhu) \
V(RiscvSh) \
V(RiscvUsh) \
V(RiscvLd) \
V(RiscvUld) \
V(RiscvLw) \
V(RiscvUlw) \
V(RiscvLwu) \
V(RiscvUlwu) \
V(RiscvSw) \
V(RiscvUsw) \
V(RiscvSd) \
V(RiscvUsd) \
V(RiscvLoadFloat) \
V(RiscvULoadFloat) \
V(RiscvStoreFloat) \
V(RiscvUStoreFloat) \
V(RiscvLoadDouble) \
V(RiscvULoadDouble) \
V(RiscvStoreDouble) \
V(RiscvUStoreDouble) \
V(RiscvBitcastDL) \
V(RiscvBitcastLD) \
V(RiscvBitcastInt32ToFloat32) \
V(RiscvBitcastFloat32ToInt32) \
V(RiscvFloat64ExtractLowWord32) \
V(RiscvFloat64ExtractHighWord32) \
V(RiscvFloat64InsertLowWord32) \
V(RiscvFloat64InsertHighWord32) \
V(RiscvFloat32Max) \
V(RiscvFloat64Max) \
V(RiscvFloat32Min) \
V(RiscvFloat64Min) \
V(RiscvFloat64SilenceNaN) \
V(RiscvPush) \
V(RiscvPeek) \
V(RiscvByteSwap64) \
V(RiscvByteSwap32) \
V(RiscvStoreToStackSlot) \
V(RiscvStackClaim) \
V(RiscvSignExtendByte) \
V(RiscvSignExtendShort) \
V(RiscvSync) \
V(RiscvAssertEqual) \
V(RiscvS128Const) \
V(RiscvS128Zero) \
V(RiscvS128AllOnes) \
V(RiscvI32x4Splat) \
V(RiscvI32x4ExtractLane) \
V(RiscvI32x4ReplaceLane) \
V(RiscvI32x4Add) \
V(RiscvI32x4Sub) \
V(RiscvF64x2Abs) \
V(RiscvF64x2Neg) \
V(RiscvF32x4Splat) \
V(RiscvF32x4ExtractLane) \
V(RiscvF32x4ReplaceLane) \
V(RiscvF32x4SConvertI32x4) \
V(RiscvF32x4UConvertI32x4) \
V(RiscvI64x2SConvertI32x4Low) \
V(RiscvI64x2SConvertI32x4High) \
V(RiscvI64x2UConvertI32x4Low) \
V(RiscvI64x2UConvertI32x4High) \
V(RiscvI32x4Mul) \
V(RiscvI32x4MaxS) \
V(RiscvI32x4MinS) \
V(RiscvI32x4Eq) \
V(RiscvI32x4Ne) \
V(RiscvI32x4Shl) \
V(RiscvI32x4ShrS) \
V(RiscvI32x4ShrU) \
V(RiscvI32x4MaxU) \
V(RiscvI32x4MinU) \
V(RiscvI64x2GtS) \
V(RiscvI64x2GeS) \
V(RiscvI64x2Eq) \
V(RiscvI64x2Ne) \
V(RiscvF64x2Sqrt) \
V(RiscvF64x2Add) \
V(RiscvF64x2Sub) \
V(RiscvF64x2Mul) \
V(RiscvF64x2Div) \
V(RiscvF64x2Min) \
V(RiscvF64x2Max) \
V(RiscvF64x2ConvertLowI32x4S) \
V(RiscvF64x2ConvertLowI32x4U) \
V(RiscvF64x2PromoteLowF32x4) \
V(RiscvF64x2Eq) \
V(RiscvF64x2Ne) \
V(RiscvF64x2Lt) \
V(RiscvF64x2Le) \
V(RiscvF64x2Splat) \
V(RiscvF64x2ExtractLane) \
V(RiscvF64x2ReplaceLane) \
V(RiscvF64x2Pmin) \
V(RiscvF64x2Pmax) \
V(RiscvF64x2Ceil) \
V(RiscvF64x2Floor) \
V(RiscvF64x2Trunc) \
V(RiscvF64x2NearestInt) \
V(RiscvI64x2Splat) \
V(RiscvI64x2ExtractLane) \
V(RiscvI64x2ReplaceLane) \
V(RiscvI64x2Add) \
V(RiscvI64x2Sub) \
V(RiscvI64x2Mul) \
V(RiscvI64x2Abs) \
V(RiscvI64x2Neg) \
V(RiscvI64x2Shl) \
V(RiscvI64x2ShrS) \
V(RiscvI64x2ShrU) \
V(RiscvI64x2BitMask) \
V(RiscvF32x4Abs) \
V(RiscvF32x4Neg) \
V(RiscvF32x4Sqrt) \
V(RiscvF32x4RecipApprox) \
V(RiscvF32x4RecipSqrtApprox) \
V(RiscvF32x4Add) \
V(RiscvF32x4Sub) \
V(RiscvF32x4Mul) \
V(RiscvF32x4Div) \
V(RiscvF32x4Max) \
V(RiscvF32x4Min) \
V(RiscvF32x4Eq) \
V(RiscvF32x4Ne) \
V(RiscvF32x4Lt) \
V(RiscvF32x4Le) \
V(RiscvF32x4Pmin) \
V(RiscvF32x4Pmax) \
V(RiscvF32x4DemoteF64x2Zero) \
V(RiscvF32x4Ceil) \
V(RiscvF32x4Floor) \
V(RiscvF32x4Trunc) \
V(RiscvF32x4NearestInt) \
V(RiscvI32x4SConvertF32x4) \
V(RiscvI32x4UConvertF32x4) \
V(RiscvI32x4Neg) \
V(RiscvI32x4GtS) \
V(RiscvI32x4GeS) \
V(RiscvI32x4GtU) \
V(RiscvI32x4GeU) \
V(RiscvI32x4Abs) \
V(RiscvI32x4BitMask) \
V(RiscvI32x4DotI16x8S) \
V(RiscvI32x4TruncSatF64x2SZero) \
V(RiscvI32x4TruncSatF64x2UZero) \
V(RiscvI16x8Splat) \
V(RiscvI16x8ExtractLaneU) \
V(RiscvI16x8ExtractLaneS) \
V(RiscvI16x8ReplaceLane) \
V(RiscvI16x8Neg) \
V(RiscvI16x8Shl) \
V(RiscvI16x8ShrS) \
V(RiscvI16x8ShrU) \
V(RiscvI16x8Add) \
V(RiscvI16x8AddSatS) \
V(RiscvI16x8Sub) \
V(RiscvI16x8SubSatS) \
V(RiscvI16x8Mul) \
V(RiscvI16x8MaxS) \
V(RiscvI16x8MinS) \
V(RiscvI16x8Eq) \
V(RiscvI16x8Ne) \
V(RiscvI16x8GtS) \
V(RiscvI16x8GeS) \
V(RiscvI16x8AddSatU) \
V(RiscvI16x8SubSatU) \
V(RiscvI16x8MaxU) \
V(RiscvI16x8MinU) \
V(RiscvI16x8GtU) \
V(RiscvI16x8GeU) \
V(RiscvI16x8RoundingAverageU) \
V(RiscvI16x8Q15MulRSatS) \
V(RiscvI16x8Abs) \
V(RiscvI16x8BitMask) \
V(RiscvI8x16Splat) \
V(RiscvI8x16ExtractLaneU) \
V(RiscvI8x16ExtractLaneS) \
V(RiscvI8x16ReplaceLane) \
V(RiscvI8x16Neg) \
V(RiscvI8x16Shl) \
V(RiscvI8x16ShrS) \
V(RiscvI8x16Add) \
V(RiscvI8x16AddSatS) \
V(RiscvI8x16Sub) \
V(RiscvI8x16SubSatS) \
V(RiscvI8x16MaxS) \
V(RiscvI8x16MinS) \
V(RiscvI8x16Eq) \
V(RiscvI8x16Ne) \
V(RiscvI8x16GtS) \
V(RiscvI8x16GeS) \
V(RiscvI8x16ShrU) \
V(RiscvI8x16AddSatU) \
V(RiscvI8x16SubSatU) \
V(RiscvI8x16MaxU) \
V(RiscvI8x16MinU) \
V(RiscvI8x16GtU) \
V(RiscvI8x16GeU) \
V(RiscvI8x16RoundingAverageU) \
V(RiscvI8x16Abs) \
V(RiscvI8x16BitMask) \
V(RiscvI8x16Popcnt) \
V(RiscvS128And) \
V(RiscvS128Or) \
V(RiscvS128Xor) \
V(RiscvS128Not) \
V(RiscvS128Select) \
V(RiscvS128AndNot) \
V(RiscvI32x4AllTrue) \
V(RiscvI16x8AllTrue) \
V(RiscvV128AnyTrue) \
V(RiscvI8x16AllTrue) \
V(RiscvI64x2AllTrue) \
V(RiscvS32x4InterleaveRight) \
V(RiscvS32x4InterleaveLeft) \
V(RiscvS32x4PackEven) \
V(RiscvS32x4PackOdd) \
V(RiscvS32x4InterleaveEven) \
V(RiscvS32x4InterleaveOdd) \
V(RiscvS32x4Shuffle) \
V(RiscvS16x8InterleaveRight) \
V(RiscvS16x8InterleaveLeft) \
V(RiscvS16x8PackEven) \
V(RiscvS16x8PackOdd) \
V(RiscvS16x8InterleaveEven) \
V(RiscvS16x8InterleaveOdd) \
V(RiscvS16x4Reverse) \
V(RiscvS16x2Reverse) \
V(RiscvS8x16InterleaveRight) \
V(RiscvS8x16InterleaveLeft) \
V(RiscvS8x16PackEven) \
V(RiscvS8x16PackOdd) \
V(RiscvS8x16InterleaveEven) \
V(RiscvS8x16InterleaveOdd) \
V(RiscvI8x16Shuffle) \
V(RiscvI8x16Swizzle) \
V(RiscvS8x16Concat) \
V(RiscvS8x8Reverse) \
V(RiscvS8x4Reverse) \
V(RiscvS8x2Reverse) \
V(RiscvS128Load8Splat) \
V(RiscvS128Load16Splat) \
V(RiscvS128Load32Splat) \
V(RiscvS128Load64Splat) \
V(RiscvS128Load8x8S) \
V(RiscvS128Load8x8U) \
V(RiscvS128Load16x4S) \
V(RiscvS128Load16x4U) \
V(RiscvS128Load32x2S) \
V(RiscvS128Load32x2U) \
V(RiscvS128LoadLane) \
V(RiscvS128StoreLane) \
V(RiscvRvvLd) \
V(RiscvRvvSt) \
V(RiscvI32x4SConvertI16x8Low) \
V(RiscvI32x4SConvertI16x8High) \
V(RiscvI32x4UConvertI16x8Low) \
V(RiscvI32x4UConvertI16x8High) \
V(RiscvI16x8SConvertI8x16Low) \
V(RiscvI16x8SConvertI8x16High) \
V(RiscvI16x8SConvertI32x4) \
V(RiscvI16x8UConvertI32x4) \
V(RiscvI16x8UConvertI8x16Low) \
V(RiscvI16x8UConvertI8x16High) \
V(RiscvI8x16SConvertI16x8) \
V(RiscvI8x16UConvertI16x8) \
V(RiscvWord64AtomicLoadUint64) \
V(RiscvWord64AtomicStoreWord64) \
V(RiscvWord64AtomicAddUint64) \
V(RiscvWord64AtomicSubUint64) \
V(RiscvWord64AtomicAndUint64) \
V(RiscvWord64AtomicOrUint64) \
V(RiscvWord64AtomicXorUint64) \
V(RiscvWord64AtomicExchangeUint64) \
V(RiscvWord64AtomicCompareExchangeUint64) \
V(RiscvStoreCompressTagged) \
V(RiscvLoadDecompressTaggedSigned) \
V(RiscvLoadDecompressTaggedPointer) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(RiscvAdd32) \
V(RiscvAdd64) \
V(RiscvAddOvf64) \
V(RiscvSub32) \
V(RiscvSub64) \
V(RiscvSubOvf64) \
V(RiscvMul32) \
V(RiscvMulOvf32) \
V(RiscvMulHigh32) \
V(RiscvMulHigh64) \
V(RiscvMulHighU32) \
V(RiscvMul64) \
V(RiscvDiv32) \
V(RiscvDiv64) \
V(RiscvDivU32) \
V(RiscvDivU64) \
V(RiscvMod32) \
V(RiscvMod64) \
V(RiscvModU32) \
V(RiscvModU64) \
V(RiscvAnd) \
V(RiscvAnd32) \
V(RiscvOr) \
V(RiscvOr32) \
V(RiscvNor) \
V(RiscvNor32) \
V(RiscvXor) \
V(RiscvXor32) \
V(RiscvClz32) \
V(RiscvShl32) \
V(RiscvShr32) \
V(RiscvSar32) \
V(RiscvZeroExtendWord) \
V(RiscvSignExtendWord) \
V(RiscvClz64) \
V(RiscvCtz32) \
V(RiscvCtz64) \
V(RiscvPopcnt32) \
V(RiscvPopcnt64) \
V(RiscvShl64) \
V(RiscvShr64) \
V(RiscvSar64) \
V(RiscvRor32) \
V(RiscvRor64) \
V(RiscvMov) \
V(RiscvTst) \
V(RiscvCmp) \
V(RiscvCmpZero) \
V(RiscvCmpS) \
V(RiscvAddS) \
V(RiscvSubS) \
V(RiscvMulS) \
V(RiscvDivS) \
V(RiscvModS) \
V(RiscvAbsS) \
V(RiscvNegS) \
V(RiscvSqrtS) \
V(RiscvMaxS) \
V(RiscvMinS) \
V(RiscvCmpD) \
V(RiscvAddD) \
V(RiscvSubD) \
V(RiscvMulD) \
V(RiscvDivD) \
V(RiscvModD) \
V(RiscvAbsD) \
V(RiscvNegD) \
V(RiscvSqrtD) \
V(RiscvMaxD) \
V(RiscvMinD) \
V(RiscvFloat64RoundDown) \
V(RiscvFloat64RoundTruncate) \
V(RiscvFloat64RoundUp) \
V(RiscvFloat64RoundTiesEven) \
V(RiscvFloat32RoundDown) \
V(RiscvFloat32RoundTruncate) \
V(RiscvFloat32RoundUp) \
V(RiscvFloat32RoundTiesEven) \
V(RiscvCvtSD) \
V(RiscvCvtDS) \
V(RiscvTruncWD) \
V(RiscvRoundWD) \
V(RiscvFloorWD) \
V(RiscvCeilWD) \
V(RiscvTruncWS) \
V(RiscvRoundWS) \
V(RiscvFloorWS) \
V(RiscvCeilWS) \
V(RiscvTruncLS) \
V(RiscvTruncLD) \
V(RiscvTruncUwD) \
V(RiscvTruncUwS) \
V(RiscvTruncUlS) \
V(RiscvTruncUlD) \
V(RiscvCvtDW) \
V(RiscvCvtSL) \
V(RiscvCvtSW) \
V(RiscvCvtSUw) \
V(RiscvCvtSUl) \
V(RiscvCvtDL) \
V(RiscvCvtDUw) \
V(RiscvCvtDUl) \
V(RiscvLb) \
V(RiscvLbu) \
V(RiscvSb) \
V(RiscvLh) \
V(RiscvUlh) \
V(RiscvLhu) \
V(RiscvUlhu) \
V(RiscvSh) \
V(RiscvUsh) \
V(RiscvLd) \
V(RiscvUld) \
V(RiscvLw) \
V(RiscvUlw) \
V(RiscvLwu) \
V(RiscvUlwu) \
V(RiscvSw) \
V(RiscvUsw) \
V(RiscvSd) \
V(RiscvUsd) \
V(RiscvLoadFloat) \
V(RiscvULoadFloat) \
V(RiscvStoreFloat) \
V(RiscvUStoreFloat) \
V(RiscvLoadDouble) \
V(RiscvULoadDouble) \
V(RiscvStoreDouble) \
V(RiscvUStoreDouble) \
V(RiscvBitcastDL) \
V(RiscvBitcastLD) \
V(RiscvBitcastInt32ToFloat32) \
V(RiscvBitcastFloat32ToInt32) \
V(RiscvFloat64ExtractLowWord32) \
V(RiscvFloat64ExtractHighWord32) \
V(RiscvFloat64InsertLowWord32) \
V(RiscvFloat64InsertHighWord32) \
V(RiscvFloat32Max) \
V(RiscvFloat64Max) \
V(RiscvFloat32Min) \
V(RiscvFloat64Min) \
V(RiscvFloat64SilenceNaN) \
V(RiscvPush) \
V(RiscvPeek) \
V(RiscvByteSwap64) \
V(RiscvByteSwap32) \
V(RiscvStoreToStackSlot) \
V(RiscvStackClaim) \
V(RiscvSignExtendByte) \
V(RiscvSignExtendShort) \
V(RiscvSync) \
V(RiscvAssertEqual) \
V(RiscvS128Const) \
V(RiscvS128Zero) \
V(RiscvS128AllOnes) \
V(RiscvI32x4Splat) \
V(RiscvI32x4ExtractLane) \
V(RiscvI32x4ReplaceLane) \
V(RiscvI32x4Add) \
V(RiscvI32x4Sub) \
V(RiscvF64x2Abs) \
V(RiscvF64x2Neg) \
V(RiscvF32x4Splat) \
V(RiscvF32x4ExtractLane) \
V(RiscvF32x4ReplaceLane) \
V(RiscvF32x4SConvertI32x4) \
V(RiscvF32x4UConvertI32x4) \
V(RiscvI64x2SConvertI32x4Low) \
V(RiscvI64x2SConvertI32x4High) \
V(RiscvI64x2UConvertI32x4Low) \
V(RiscvI64x2UConvertI32x4High) \
V(RiscvI32x4Mul) \
V(RiscvI32x4MaxS) \
V(RiscvI32x4MinS) \
V(RiscvI32x4Eq) \
V(RiscvI32x4Ne) \
V(RiscvI32x4Shl) \
V(RiscvI32x4ShrS) \
V(RiscvI32x4ShrU) \
V(RiscvI32x4MaxU) \
V(RiscvI32x4MinU) \
V(RiscvI64x2GtS) \
V(RiscvI64x2GeS) \
V(RiscvI64x2Eq) \
V(RiscvI64x2Ne) \
V(RiscvF64x2Sqrt) \
V(RiscvF64x2Add) \
V(RiscvF64x2Sub) \
V(RiscvF64x2Mul) \
V(RiscvF64x2Div) \
V(RiscvF64x2Min) \
V(RiscvF64x2Max) \
V(RiscvF64x2ConvertLowI32x4S) \
V(RiscvF64x2ConvertLowI32x4U) \
V(RiscvF64x2PromoteLowF32x4) \
V(RiscvF64x2Eq) \
V(RiscvF64x2Ne) \
V(RiscvF64x2Lt) \
V(RiscvF64x2Le) \
V(RiscvF64x2Splat) \
V(RiscvF64x2ExtractLane) \
V(RiscvF64x2ReplaceLane) \
V(RiscvF64x2Pmin) \
V(RiscvF64x2Pmax) \
V(RiscvF64x2Ceil) \
V(RiscvF64x2Floor) \
V(RiscvF64x2Trunc) \
V(RiscvF64x2NearestInt) \
V(RiscvI64x2Splat) \
V(RiscvI64x2ExtractLane) \
V(RiscvI64x2ReplaceLane) \
V(RiscvI64x2Add) \
V(RiscvI64x2Sub) \
V(RiscvI64x2Mul) \
V(RiscvI64x2Abs) \
V(RiscvI64x2Neg) \
V(RiscvI64x2Shl) \
V(RiscvI64x2ShrS) \
V(RiscvI64x2ShrU) \
V(RiscvI64x2BitMask) \
V(RiscvF32x4Abs) \
V(RiscvF32x4Neg) \
V(RiscvF32x4Sqrt) \
V(RiscvF32x4RecipApprox) \
V(RiscvF32x4RecipSqrtApprox) \
V(RiscvF32x4Add) \
V(RiscvF32x4Sub) \
V(RiscvF32x4Mul) \
V(RiscvF32x4Div) \
V(RiscvF32x4Max) \
V(RiscvF32x4Min) \
V(RiscvF32x4Eq) \
V(RiscvF32x4Ne) \
V(RiscvF32x4Lt) \
V(RiscvF32x4Le) \
V(RiscvF32x4Pmin) \
V(RiscvF32x4Pmax) \
V(RiscvF32x4DemoteF64x2Zero) \
V(RiscvF32x4Ceil) \
V(RiscvF32x4Floor) \
V(RiscvF32x4Trunc) \
V(RiscvF32x4NearestInt) \
V(RiscvI32x4SConvertF32x4) \
V(RiscvI32x4UConvertF32x4) \
V(RiscvI32x4Neg) \
V(RiscvI32x4GtS) \
V(RiscvI32x4GeS) \
V(RiscvI32x4GtU) \
V(RiscvI32x4GeU) \
V(RiscvI32x4Abs) \
V(RiscvI32x4BitMask) \
V(RiscvI32x4DotI16x8S) \
V(RiscvI32x4TruncSatF64x2SZero) \
V(RiscvI32x4TruncSatF64x2UZero) \
V(RiscvI16x8Splat) \
V(RiscvI16x8ExtractLaneU) \
V(RiscvI16x8ExtractLaneS) \
V(RiscvI16x8ReplaceLane) \
V(RiscvI16x8Neg) \
V(RiscvI16x8Shl) \
V(RiscvI16x8ShrS) \
V(RiscvI16x8ShrU) \
V(RiscvI16x8Add) \
V(RiscvI16x8AddSatS) \
V(RiscvI16x8Sub) \
V(RiscvI16x8SubSatS) \
V(RiscvI16x8Mul) \
V(RiscvI16x8MaxS) \
V(RiscvI16x8MinS) \
V(RiscvI16x8Eq) \
V(RiscvI16x8Ne) \
V(RiscvI16x8GtS) \
V(RiscvI16x8GeS) \
V(RiscvI16x8AddSatU) \
V(RiscvI16x8SubSatU) \
V(RiscvI16x8MaxU) \
V(RiscvI16x8MinU) \
V(RiscvI16x8GtU) \
V(RiscvI16x8GeU) \
V(RiscvI16x8RoundingAverageU) \
V(RiscvI16x8Q15MulRSatS) \
V(RiscvI16x8Abs) \
V(RiscvI16x8BitMask) \
V(RiscvI8x16Splat) \
V(RiscvI8x16ExtractLaneU) \
V(RiscvI8x16ExtractLaneS) \
V(RiscvI8x16ReplaceLane) \
V(RiscvI8x16Neg) \
V(RiscvI8x16Shl) \
V(RiscvI8x16ShrS) \
V(RiscvI8x16Add) \
V(RiscvI8x16AddSatS) \
V(RiscvI8x16Sub) \
V(RiscvI8x16SubSatS) \
V(RiscvI8x16MaxS) \
V(RiscvI8x16MinS) \
V(RiscvI8x16Eq) \
V(RiscvI8x16Ne) \
V(RiscvI8x16GtS) \
V(RiscvI8x16GeS) \
V(RiscvI8x16ShrU) \
V(RiscvI8x16AddSatU) \
V(RiscvI8x16SubSatU) \
V(RiscvI8x16MaxU) \
V(RiscvI8x16MinU) \
V(RiscvI8x16GtU) \
V(RiscvI8x16GeU) \
V(RiscvI8x16RoundingAverageU) \
V(RiscvI8x16Abs) \
V(RiscvI8x16BitMask) \
V(RiscvI8x16Popcnt) \
V(RiscvS128And) \
V(RiscvS128Or) \
V(RiscvS128Xor) \
V(RiscvS128Not) \
V(RiscvS128Select) \
V(RiscvS128AndNot) \
V(RiscvI32x4AllTrue) \
V(RiscvI16x8AllTrue) \
V(RiscvV128AnyTrue) \
V(RiscvI8x16AllTrue) \
V(RiscvI64x2AllTrue) \
V(RiscvS32x4InterleaveRight) \
V(RiscvS32x4InterleaveLeft) \
V(RiscvS32x4PackEven) \
V(RiscvS32x4PackOdd) \
V(RiscvS32x4InterleaveEven) \
V(RiscvS32x4InterleaveOdd) \
V(RiscvS32x4Shuffle) \
V(RiscvS16x8InterleaveRight) \
V(RiscvS16x8InterleaveLeft) \
V(RiscvS16x8PackEven) \
V(RiscvS16x8PackOdd) \
V(RiscvS16x8InterleaveEven) \
V(RiscvS16x8InterleaveOdd) \
V(RiscvS16x4Reverse) \
V(RiscvS16x2Reverse) \
V(RiscvS8x16InterleaveRight) \
V(RiscvS8x16InterleaveLeft) \
V(RiscvS8x16PackEven) \
V(RiscvS8x16PackOdd) \
V(RiscvS8x16InterleaveEven) \
V(RiscvS8x16InterleaveOdd) \
V(RiscvI8x16Shuffle) \
V(RiscvI8x16Swizzle) \
V(RiscvS8x16Concat) \
V(RiscvS8x8Reverse) \
V(RiscvS8x4Reverse) \
V(RiscvS8x2Reverse) \
V(RiscvS128Load8Splat) \
V(RiscvS128Load16Splat) \
V(RiscvS128Load32Splat) \
V(RiscvS128Load64Splat) \
V(RiscvS128Load8x8S) \
V(RiscvS128Load8x8U) \
V(RiscvS128Load16x4S) \
V(RiscvS128Load16x4U) \
V(RiscvS128Load32x2S) \
V(RiscvS128Load32x2U) \
V(RiscvS128LoadLane) \
V(RiscvS128StoreLane) \
V(RiscvRvvLd) \
V(RiscvRvvSt) \
V(RiscvI32x4SConvertI16x8Low) \
V(RiscvI32x4SConvertI16x8High) \
V(RiscvI32x4UConvertI16x8Low) \
V(RiscvI32x4UConvertI16x8High) \
V(RiscvI16x8SConvertI8x16Low) \
V(RiscvI16x8SConvertI8x16High) \
V(RiscvI16x8SConvertI32x4) \
V(RiscvI16x8UConvertI32x4) \
V(RiscvI16x8UConvertI8x16Low) \
V(RiscvI16x8UConvertI8x16High) \
V(RiscvI8x16SConvertI16x8) \
V(RiscvI8x16UConvertI16x8) \
V(RiscvWord64AtomicLoadUint64) \
V(RiscvWord64AtomicStoreWord64) \
V(RiscvWord64AtomicAddUint64) \
V(RiscvWord64AtomicSubUint64) \
V(RiscvWord64AtomicAndUint64) \
V(RiscvWord64AtomicOrUint64) \
V(RiscvWord64AtomicXorUint64) \
V(RiscvWord64AtomicExchangeUint64) \
V(RiscvWord64AtomicCompareExchangeUint64) \
V(RiscvStoreCompressTagged) \
V(RiscvLoadDecompressTaggedSigned) \
V(RiscvLoadDecompressTaggedPointer) \
V(RiscvLoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,396 +12,392 @@ namespace compiler {
// S390-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(S390_Peek) \
V(S390_Abs32) \
V(S390_Abs64) \
V(S390_And32) \
V(S390_And64) \
V(S390_Or32) \
V(S390_Or64) \
V(S390_Xor32) \
V(S390_Xor64) \
V(S390_ShiftLeft32) \
V(S390_ShiftLeft64) \
V(S390_ShiftRight32) \
V(S390_ShiftRight64) \
V(S390_ShiftRightArith32) \
V(S390_ShiftRightArith64) \
V(S390_RotRight32) \
V(S390_RotRight64) \
V(S390_Not32) \
V(S390_Not64) \
V(S390_RotLeftAndClear64) \
V(S390_RotLeftAndClearLeft64) \
V(S390_RotLeftAndClearRight64) \
V(S390_Lay) \
V(S390_Add32) \
V(S390_Add64) \
V(S390_AddFloat) \
V(S390_AddDouble) \
V(S390_Sub32) \
V(S390_Sub64) \
V(S390_SubFloat) \
V(S390_SubDouble) \
V(S390_Mul32) \
V(S390_Mul32WithOverflow) \
V(S390_Mul64) \
V(S390_MulHigh32) \
V(S390_MulHighU32) \
V(S390_MulFloat) \
V(S390_MulDouble) \
V(S390_Div32) \
V(S390_Div64) \
V(S390_DivU32) \
V(S390_DivU64) \
V(S390_DivFloat) \
V(S390_DivDouble) \
V(S390_Mod32) \
V(S390_Mod64) \
V(S390_ModU32) \
V(S390_ModU64) \
V(S390_ModDouble) \
V(S390_Neg32) \
V(S390_Neg64) \
V(S390_NegDouble) \
V(S390_NegFloat) \
V(S390_SqrtFloat) \
V(S390_FloorFloat) \
V(S390_CeilFloat) \
V(S390_TruncateFloat) \
V(S390_FloatNearestInt) \
V(S390_AbsFloat) \
V(S390_SqrtDouble) \
V(S390_FloorDouble) \
V(S390_CeilDouble) \
V(S390_TruncateDouble) \
V(S390_RoundDouble) \
V(S390_DoubleNearestInt) \
V(S390_MaxFloat) \
V(S390_MaxDouble) \
V(S390_MinFloat) \
V(S390_MinDouble) \
V(S390_AbsDouble) \
V(S390_Cntlz32) \
V(S390_Cntlz64) \
V(S390_Popcnt32) \
V(S390_Popcnt64) \
V(S390_Cmp32) \
V(S390_Cmp64) \
V(S390_CmpFloat) \
V(S390_CmpDouble) \
V(S390_Tst32) \
V(S390_Tst64) \
V(S390_Push) \
V(S390_PushFrame) \
V(S390_StoreToStackSlot) \
V(S390_SignExtendWord8ToInt32) \
V(S390_SignExtendWord16ToInt32) \
V(S390_SignExtendWord8ToInt64) \
V(S390_SignExtendWord16ToInt64) \
V(S390_SignExtendWord32ToInt64) \
V(S390_Uint32ToUint64) \
V(S390_Int64ToInt32) \
V(S390_Int64ToFloat32) \
V(S390_Int64ToDouble) \
V(S390_Uint64ToFloat32) \
V(S390_Uint64ToDouble) \
V(S390_Int32ToFloat32) \
V(S390_Int32ToDouble) \
V(S390_Uint32ToFloat32) \
V(S390_Uint32ToDouble) \
V(S390_Float32ToInt64) \
V(S390_Float32ToUint64) \
V(S390_Float32ToInt32) \
V(S390_Float32ToUint32) \
V(S390_Float32ToDouble) \
V(S390_Float64SilenceNaN) \
V(S390_DoubleToInt32) \
V(S390_DoubleToUint32) \
V(S390_DoubleToInt64) \
V(S390_DoubleToUint64) \
V(S390_DoubleToFloat32) \
V(S390_DoubleExtractLowWord32) \
V(S390_DoubleExtractHighWord32) \
V(S390_DoubleInsertLowWord32) \
V(S390_DoubleInsertHighWord32) \
V(S390_DoubleConstruct) \
V(S390_BitcastInt32ToFloat32) \
V(S390_BitcastFloat32ToInt32) \
V(S390_BitcastInt64ToDouble) \
V(S390_BitcastDoubleToInt64) \
V(S390_LoadWordS8) \
V(S390_LoadWordU8) \
V(S390_LoadWordS16) \
V(S390_LoadWordU16) \
V(S390_LoadWordS32) \
V(S390_LoadWordU32) \
V(S390_LoadAndTestWord32) \
V(S390_LoadAndTestWord64) \
V(S390_LoadAndTestFloat32) \
V(S390_LoadAndTestFloat64) \
V(S390_LoadReverse16RR) \
V(S390_LoadReverse32RR) \
V(S390_LoadReverse64RR) \
V(S390_LoadReverseSimd128RR) \
V(S390_LoadReverseSimd128) \
V(S390_LoadReverse16) \
V(S390_LoadReverse32) \
V(S390_LoadReverse64) \
V(S390_LoadWord64) \
V(S390_LoadFloat32) \
V(S390_LoadDouble) \
V(S390_StoreWord8) \
V(S390_StoreWord16) \
V(S390_StoreWord32) \
V(S390_StoreWord64) \
V(S390_StoreReverse16) \
V(S390_StoreReverse32) \
V(S390_StoreReverse64) \
V(S390_StoreReverseSimd128) \
V(S390_StoreFloat32) \
V(S390_StoreDouble) \
V(S390_Word64AtomicExchangeUint64) \
V(S390_Word64AtomicCompareExchangeUint64) \
V(S390_Word64AtomicAddUint64) \
V(S390_Word64AtomicSubUint64) \
V(S390_Word64AtomicAndUint64) \
V(S390_Word64AtomicOrUint64) \
V(S390_Word64AtomicXorUint64) \
V(S390_F64x2Splat) \
V(S390_F64x2ReplaceLane) \
V(S390_F64x2Abs) \
V(S390_F64x2Neg) \
V(S390_F64x2Sqrt) \
V(S390_F64x2Add) \
V(S390_F64x2Sub) \
V(S390_F64x2Mul) \
V(S390_F64x2Div) \
V(S390_F64x2Eq) \
V(S390_F64x2Ne) \
V(S390_F64x2Lt) \
V(S390_F64x2Le) \
V(S390_F64x2Min) \
V(S390_F64x2Max) \
V(S390_F64x2ExtractLane) \
V(S390_F64x2Qfma) \
V(S390_F64x2Qfms) \
V(S390_F64x2Pmin) \
V(S390_F64x2Pmax) \
V(S390_F64x2Ceil) \
V(S390_F64x2Floor) \
V(S390_F64x2Trunc) \
V(S390_F64x2NearestInt) \
V(S390_F64x2ConvertLowI32x4S) \
V(S390_F64x2ConvertLowI32x4U) \
V(S390_F64x2PromoteLowF32x4) \
V(S390_F32x4Splat) \
V(S390_F32x4ExtractLane) \
V(S390_F32x4ReplaceLane) \
V(S390_F32x4Add) \
V(S390_F32x4Sub) \
V(S390_F32x4Mul) \
V(S390_F32x4Eq) \
V(S390_F32x4Ne) \
V(S390_F32x4Lt) \
V(S390_F32x4Le) \
V(S390_F32x4Abs) \
V(S390_F32x4Neg) \
V(S390_F32x4RecipApprox) \
V(S390_F32x4RecipSqrtApprox) \
V(S390_F32x4SConvertI32x4) \
V(S390_F32x4UConvertI32x4) \
V(S390_F32x4Sqrt) \
V(S390_F32x4Div) \
V(S390_F32x4Min) \
V(S390_F32x4Max) \
V(S390_F32x4Qfma) \
V(S390_F32x4Qfms) \
V(S390_F32x4Pmin) \
V(S390_F32x4Pmax) \
V(S390_F32x4Ceil) \
V(S390_F32x4Floor) \
V(S390_F32x4Trunc) \
V(S390_F32x4NearestInt) \
V(S390_F32x4DemoteF64x2Zero) \
V(S390_I64x2Neg) \
V(S390_I64x2Add) \
V(S390_I64x2Sub) \
V(S390_I64x2Shl) \
V(S390_I64x2ShrS) \
V(S390_I64x2ShrU) \
V(S390_I64x2Mul) \
V(S390_I64x2Splat) \
V(S390_I64x2ReplaceLane) \
V(S390_I64x2ExtractLane) \
V(S390_I64x2Eq) \
V(S390_I64x2BitMask) \
V(S390_I64x2ExtMulLowI32x4S) \
V(S390_I64x2ExtMulHighI32x4S) \
V(S390_I64x2ExtMulLowI32x4U) \
V(S390_I64x2ExtMulHighI32x4U) \
V(S390_I64x2SConvertI32x4Low) \
V(S390_I64x2SConvertI32x4High) \
V(S390_I64x2UConvertI32x4Low) \
V(S390_I64x2UConvertI32x4High) \
V(S390_I64x2Ne) \
V(S390_I64x2GtS) \
V(S390_I64x2GeS) \
V(S390_I64x2Abs) \
V(S390_I32x4Splat) \
V(S390_I32x4ExtractLane) \
V(S390_I32x4ReplaceLane) \
V(S390_I32x4Add) \
V(S390_I32x4Sub) \
V(S390_I32x4Mul) \
V(S390_I32x4MinS) \
V(S390_I32x4MinU) \
V(S390_I32x4MaxS) \
V(S390_I32x4MaxU) \
V(S390_I32x4Eq) \
V(S390_I32x4Ne) \
V(S390_I32x4GtS) \
V(S390_I32x4GeS) \
V(S390_I32x4GtU) \
V(S390_I32x4GeU) \
V(S390_I32x4Neg) \
V(S390_I32x4Shl) \
V(S390_I32x4ShrS) \
V(S390_I32x4ShrU) \
V(S390_I32x4SConvertF32x4) \
V(S390_I32x4UConvertF32x4) \
V(S390_I32x4SConvertI16x8Low) \
V(S390_I32x4SConvertI16x8High) \
V(S390_I32x4UConvertI16x8Low) \
V(S390_I32x4UConvertI16x8High) \
V(S390_I32x4Abs) \
V(S390_I32x4BitMask) \
V(S390_I32x4DotI16x8S) \
V(S390_I32x4ExtMulLowI16x8S) \
V(S390_I32x4ExtMulHighI16x8S) \
V(S390_I32x4ExtMulLowI16x8U) \
V(S390_I32x4ExtMulHighI16x8U) \
V(S390_I32x4ExtAddPairwiseI16x8S) \
V(S390_I32x4ExtAddPairwiseI16x8U) \
V(S390_I32x4TruncSatF64x2SZero) \
V(S390_I32x4TruncSatF64x2UZero) \
V(S390_I16x8Splat) \
V(S390_I16x8ExtractLaneU) \
V(S390_I16x8ExtractLaneS) \
V(S390_I16x8ReplaceLane) \
V(S390_I16x8Add) \
V(S390_I16x8Sub) \
V(S390_I16x8Mul) \
V(S390_I16x8MinS) \
V(S390_I16x8MinU) \
V(S390_I16x8MaxS) \
V(S390_I16x8MaxU) \
V(S390_I16x8Eq) \
V(S390_I16x8Ne) \
V(S390_I16x8GtS) \
V(S390_I16x8GeS) \
V(S390_I16x8GtU) \
V(S390_I16x8GeU) \
V(S390_I16x8Shl) \
V(S390_I16x8ShrS) \
V(S390_I16x8ShrU) \
V(S390_I16x8Neg) \
V(S390_I16x8SConvertI32x4) \
V(S390_I16x8UConvertI32x4) \
V(S390_I16x8SConvertI8x16Low) \
V(S390_I16x8SConvertI8x16High) \
V(S390_I16x8UConvertI8x16Low) \
V(S390_I16x8UConvertI8x16High) \
V(S390_I16x8AddSatS) \
V(S390_I16x8SubSatS) \
V(S390_I16x8AddSatU) \
V(S390_I16x8SubSatU) \
V(S390_I16x8RoundingAverageU) \
V(S390_I16x8Abs) \
V(S390_I16x8BitMask) \
V(S390_I16x8ExtMulLowI8x16S) \
V(S390_I16x8ExtMulHighI8x16S) \
V(S390_I16x8ExtMulLowI8x16U) \
V(S390_I16x8ExtMulHighI8x16U) \
V(S390_I16x8ExtAddPairwiseI8x16S) \
V(S390_I16x8ExtAddPairwiseI8x16U) \
V(S390_I16x8Q15MulRSatS) \
V(S390_I8x16Splat) \
V(S390_I8x16ExtractLaneU) \
V(S390_I8x16ExtractLaneS) \
V(S390_I8x16ReplaceLane) \
V(S390_I8x16Add) \
V(S390_I8x16Sub) \
V(S390_I8x16MinS) \
V(S390_I8x16MinU) \
V(S390_I8x16MaxS) \
V(S390_I8x16MaxU) \
V(S390_I8x16Eq) \
V(S390_I8x16Ne) \
V(S390_I8x16GtS) \
V(S390_I8x16GeS) \
V(S390_I8x16GtU) \
V(S390_I8x16GeU) \
V(S390_I8x16Shl) \
V(S390_I8x16ShrS) \
V(S390_I8x16ShrU) \
V(S390_I8x16Neg) \
V(S390_I8x16SConvertI16x8) \
V(S390_I8x16UConvertI16x8) \
V(S390_I8x16AddSatS) \
V(S390_I8x16SubSatS) \
V(S390_I8x16AddSatU) \
V(S390_I8x16SubSatU) \
V(S390_I8x16RoundingAverageU) \
V(S390_I8x16Abs) \
V(S390_I8x16BitMask) \
V(S390_I8x16Shuffle) \
V(S390_I8x16Swizzle) \
V(S390_I8x16Popcnt) \
V(S390_I64x2AllTrue) \
V(S390_I32x4AllTrue) \
V(S390_I16x8AllTrue) \
V(S390_I8x16AllTrue) \
V(S390_V128AnyTrue) \
V(S390_S128And) \
V(S390_S128Or) \
V(S390_S128Xor) \
V(S390_S128Const) \
V(S390_S128Zero) \
V(S390_S128AllOnes) \
V(S390_S128Not) \
V(S390_S128Select) \
V(S390_S128AndNot) \
V(S390_S128Load8Splat) \
V(S390_S128Load16Splat) \
V(S390_S128Load32Splat) \
V(S390_S128Load64Splat) \
V(S390_S128Load8x8S) \
V(S390_S128Load8x8U) \
V(S390_S128Load16x4S) \
V(S390_S128Load16x4U) \
V(S390_S128Load32x2S) \
V(S390_S128Load32x2U) \
V(S390_S128Load32Zero) \
V(S390_S128Load64Zero) \
V(S390_S128Load8Lane) \
V(S390_S128Load16Lane) \
V(S390_S128Load32Lane) \
V(S390_S128Load64Lane) \
V(S390_S128Store8Lane) \
V(S390_S128Store16Lane) \
V(S390_S128Store32Lane) \
V(S390_S128Store64Lane) \
V(S390_StoreSimd128) \
V(S390_LoadSimd128) \
V(S390_StoreCompressTagged) \
V(S390_LoadDecompressTaggedSigned) \
V(S390_LoadDecompressTaggedPointer) \
#define TARGET_ARCH_OPCODE_LIST(V) \
V(S390_Peek) \
V(S390_Abs32) \
V(S390_Abs64) \
V(S390_And32) \
V(S390_And64) \
V(S390_Or32) \
V(S390_Or64) \
V(S390_Xor32) \
V(S390_Xor64) \
V(S390_ShiftLeft32) \
V(S390_ShiftLeft64) \
V(S390_ShiftRight32) \
V(S390_ShiftRight64) \
V(S390_ShiftRightArith32) \
V(S390_ShiftRightArith64) \
V(S390_RotRight32) \
V(S390_RotRight64) \
V(S390_Not32) \
V(S390_Not64) \
V(S390_RotLeftAndClear64) \
V(S390_RotLeftAndClearLeft64) \
V(S390_RotLeftAndClearRight64) \
V(S390_Lay) \
V(S390_Add32) \
V(S390_Add64) \
V(S390_AddFloat) \
V(S390_AddDouble) \
V(S390_Sub32) \
V(S390_Sub64) \
V(S390_SubFloat) \
V(S390_SubDouble) \
V(S390_Mul32) \
V(S390_Mul32WithOverflow) \
V(S390_Mul64) \
V(S390_MulHigh32) \
V(S390_MulHighU32) \
V(S390_MulFloat) \
V(S390_MulDouble) \
V(S390_Div32) \
V(S390_Div64) \
V(S390_DivU32) \
V(S390_DivU64) \
V(S390_DivFloat) \
V(S390_DivDouble) \
V(S390_Mod32) \
V(S390_Mod64) \
V(S390_ModU32) \
V(S390_ModU64) \
V(S390_ModDouble) \
V(S390_Neg32) \
V(S390_Neg64) \
V(S390_NegDouble) \
V(S390_NegFloat) \
V(S390_SqrtFloat) \
V(S390_FloorFloat) \
V(S390_CeilFloat) \
V(S390_TruncateFloat) \
V(S390_FloatNearestInt) \
V(S390_AbsFloat) \
V(S390_SqrtDouble) \
V(S390_FloorDouble) \
V(S390_CeilDouble) \
V(S390_TruncateDouble) \
V(S390_RoundDouble) \
V(S390_DoubleNearestInt) \
V(S390_MaxFloat) \
V(S390_MaxDouble) \
V(S390_MinFloat) \
V(S390_MinDouble) \
V(S390_AbsDouble) \
V(S390_Cntlz32) \
V(S390_Cntlz64) \
V(S390_Popcnt32) \
V(S390_Popcnt64) \
V(S390_Cmp32) \
V(S390_Cmp64) \
V(S390_CmpFloat) \
V(S390_CmpDouble) \
V(S390_Tst32) \
V(S390_Tst64) \
V(S390_Push) \
V(S390_PushFrame) \
V(S390_StoreToStackSlot) \
V(S390_SignExtendWord8ToInt32) \
V(S390_SignExtendWord16ToInt32) \
V(S390_SignExtendWord8ToInt64) \
V(S390_SignExtendWord16ToInt64) \
V(S390_SignExtendWord32ToInt64) \
V(S390_Uint32ToUint64) \
V(S390_Int64ToInt32) \
V(S390_Int64ToFloat32) \
V(S390_Int64ToDouble) \
V(S390_Uint64ToFloat32) \
V(S390_Uint64ToDouble) \
V(S390_Int32ToFloat32) \
V(S390_Int32ToDouble) \
V(S390_Uint32ToFloat32) \
V(S390_Uint32ToDouble) \
V(S390_Float32ToInt64) \
V(S390_Float32ToUint64) \
V(S390_Float32ToInt32) \
V(S390_Float32ToUint32) \
V(S390_Float32ToDouble) \
V(S390_Float64SilenceNaN) \
V(S390_DoubleToInt32) \
V(S390_DoubleToUint32) \
V(S390_DoubleToInt64) \
V(S390_DoubleToUint64) \
V(S390_DoubleToFloat32) \
V(S390_DoubleExtractLowWord32) \
V(S390_DoubleExtractHighWord32) \
V(S390_DoubleInsertLowWord32) \
V(S390_DoubleInsertHighWord32) \
V(S390_DoubleConstruct) \
V(S390_BitcastInt32ToFloat32) \
V(S390_BitcastFloat32ToInt32) \
V(S390_BitcastInt64ToDouble) \
V(S390_BitcastDoubleToInt64) \
V(S390_LoadWordS8) \
V(S390_LoadWordU8) \
V(S390_LoadWordS16) \
V(S390_LoadWordU16) \
V(S390_LoadWordS32) \
V(S390_LoadWordU32) \
V(S390_LoadAndTestWord32) \
V(S390_LoadAndTestWord64) \
V(S390_LoadAndTestFloat32) \
V(S390_LoadAndTestFloat64) \
V(S390_LoadReverse16RR) \
V(S390_LoadReverse32RR) \
V(S390_LoadReverse64RR) \
V(S390_LoadReverseSimd128RR) \
V(S390_LoadReverseSimd128) \
V(S390_LoadReverse16) \
V(S390_LoadReverse32) \
V(S390_LoadReverse64) \
V(S390_LoadWord64) \
V(S390_LoadFloat32) \
V(S390_LoadDouble) \
V(S390_StoreWord8) \
V(S390_StoreWord16) \
V(S390_StoreWord32) \
V(S390_StoreWord64) \
V(S390_StoreReverse16) \
V(S390_StoreReverse32) \
V(S390_StoreReverse64) \
V(S390_StoreReverseSimd128) \
V(S390_StoreFloat32) \
V(S390_StoreDouble) \
V(S390_Word64AtomicExchangeUint64) \
V(S390_Word64AtomicCompareExchangeUint64) \
V(S390_Word64AtomicAddUint64) \
V(S390_Word64AtomicSubUint64) \
V(S390_Word64AtomicAndUint64) \
V(S390_Word64AtomicOrUint64) \
V(S390_Word64AtomicXorUint64) \
V(S390_F64x2Splat) \
V(S390_F64x2ReplaceLane) \
V(S390_F64x2Abs) \
V(S390_F64x2Neg) \
V(S390_F64x2Sqrt) \
V(S390_F64x2Add) \
V(S390_F64x2Sub) \
V(S390_F64x2Mul) \
V(S390_F64x2Div) \
V(S390_F64x2Eq) \
V(S390_F64x2Ne) \
V(S390_F64x2Lt) \
V(S390_F64x2Le) \
V(S390_F64x2Min) \
V(S390_F64x2Max) \
V(S390_F64x2ExtractLane) \
V(S390_F64x2Qfma) \
V(S390_F64x2Qfms) \
V(S390_F64x2Pmin) \
V(S390_F64x2Pmax) \
V(S390_F64x2Ceil) \
V(S390_F64x2Floor) \
V(S390_F64x2Trunc) \
V(S390_F64x2NearestInt) \
V(S390_F64x2ConvertLowI32x4S) \
V(S390_F64x2ConvertLowI32x4U) \
V(S390_F64x2PromoteLowF32x4) \
V(S390_F32x4Splat) \
V(S390_F32x4ExtractLane) \
V(S390_F32x4ReplaceLane) \
V(S390_F32x4Add) \
V(S390_F32x4Sub) \
V(S390_F32x4Mul) \
V(S390_F32x4Eq) \
V(S390_F32x4Ne) \
V(S390_F32x4Lt) \
V(S390_F32x4Le) \
V(S390_F32x4Abs) \
V(S390_F32x4Neg) \
V(S390_F32x4RecipApprox) \
V(S390_F32x4RecipSqrtApprox) \
V(S390_F32x4SConvertI32x4) \
V(S390_F32x4UConvertI32x4) \
V(S390_F32x4Sqrt) \
V(S390_F32x4Div) \
V(S390_F32x4Min) \
V(S390_F32x4Max) \
V(S390_F32x4Qfma) \
V(S390_F32x4Qfms) \
V(S390_F32x4Pmin) \
V(S390_F32x4Pmax) \
V(S390_F32x4Ceil) \
V(S390_F32x4Floor) \
V(S390_F32x4Trunc) \
V(S390_F32x4NearestInt) \
V(S390_F32x4DemoteF64x2Zero) \
V(S390_I64x2Neg) \
V(S390_I64x2Add) \
V(S390_I64x2Sub) \
V(S390_I64x2Shl) \
V(S390_I64x2ShrS) \
V(S390_I64x2ShrU) \
V(S390_I64x2Mul) \
V(S390_I64x2Splat) \
V(S390_I64x2ReplaceLane) \
V(S390_I64x2ExtractLane) \
V(S390_I64x2Eq) \
V(S390_I64x2BitMask) \
V(S390_I64x2ExtMulLowI32x4S) \
V(S390_I64x2ExtMulHighI32x4S) \
V(S390_I64x2ExtMulLowI32x4U) \
V(S390_I64x2ExtMulHighI32x4U) \
V(S390_I64x2SConvertI32x4Low) \
V(S390_I64x2SConvertI32x4High) \
V(S390_I64x2UConvertI32x4Low) \
V(S390_I64x2UConvertI32x4High) \
V(S390_I64x2Ne) \
V(S390_I64x2GtS) \
V(S390_I64x2GeS) \
V(S390_I64x2Abs) \
V(S390_I32x4Splat) \
V(S390_I32x4ExtractLane) \
V(S390_I32x4ReplaceLane) \
V(S390_I32x4Add) \
V(S390_I32x4Sub) \
V(S390_I32x4Mul) \
V(S390_I32x4MinS) \
V(S390_I32x4MinU) \
V(S390_I32x4MaxS) \
V(S390_I32x4MaxU) \
V(S390_I32x4Eq) \
V(S390_I32x4Ne) \
V(S390_I32x4GtS) \
V(S390_I32x4GeS) \
V(S390_I32x4GtU) \
V(S390_I32x4GeU) \
V(S390_I32x4Neg) \
V(S390_I32x4Shl) \
V(S390_I32x4ShrS) \
V(S390_I32x4ShrU) \
V(S390_I32x4SConvertF32x4) \
V(S390_I32x4UConvertF32x4) \
V(S390_I32x4SConvertI16x8Low) \
V(S390_I32x4SConvertI16x8High) \
V(S390_I32x4UConvertI16x8Low) \
V(S390_I32x4UConvertI16x8High) \
V(S390_I32x4Abs) \
V(S390_I32x4BitMask) \
V(S390_I32x4DotI16x8S) \
V(S390_I32x4ExtMulLowI16x8S) \
V(S390_I32x4ExtMulHighI16x8S) \
V(S390_I32x4ExtMulLowI16x8U) \
V(S390_I32x4ExtMulHighI16x8U) \
V(S390_I32x4ExtAddPairwiseI16x8S) \
V(S390_I32x4ExtAddPairwiseI16x8U) \
V(S390_I32x4TruncSatF64x2SZero) \
V(S390_I32x4TruncSatF64x2UZero) \
V(S390_I16x8Splat) \
V(S390_I16x8ExtractLaneU) \
V(S390_I16x8ExtractLaneS) \
V(S390_I16x8ReplaceLane) \
V(S390_I16x8Add) \
V(S390_I16x8Sub) \
V(S390_I16x8Mul) \
V(S390_I16x8MinS) \
V(S390_I16x8MinU) \
V(S390_I16x8MaxS) \
V(S390_I16x8MaxU) \
V(S390_I16x8Eq) \
V(S390_I16x8Ne) \
V(S390_I16x8GtS) \
V(S390_I16x8GeS) \
V(S390_I16x8GtU) \
V(S390_I16x8GeU) \
V(S390_I16x8Shl) \
V(S390_I16x8ShrS) \
V(S390_I16x8ShrU) \
V(S390_I16x8Neg) \
V(S390_I16x8SConvertI32x4) \
V(S390_I16x8UConvertI32x4) \
V(S390_I16x8SConvertI8x16Low) \
V(S390_I16x8SConvertI8x16High) \
V(S390_I16x8UConvertI8x16Low) \
V(S390_I16x8UConvertI8x16High) \
V(S390_I16x8AddSatS) \
V(S390_I16x8SubSatS) \
V(S390_I16x8AddSatU) \
V(S390_I16x8SubSatU) \
V(S390_I16x8RoundingAverageU) \
V(S390_I16x8Abs) \
V(S390_I16x8BitMask) \
V(S390_I16x8ExtMulLowI8x16S) \
V(S390_I16x8ExtMulHighI8x16S) \
V(S390_I16x8ExtMulLowI8x16U) \
V(S390_I16x8ExtMulHighI8x16U) \
V(S390_I16x8ExtAddPairwiseI8x16S) \
V(S390_I16x8ExtAddPairwiseI8x16U) \
V(S390_I16x8Q15MulRSatS) \
V(S390_I8x16Splat) \
V(S390_I8x16ExtractLaneU) \
V(S390_I8x16ExtractLaneS) \
V(S390_I8x16ReplaceLane) \
V(S390_I8x16Add) \
V(S390_I8x16Sub) \
V(S390_I8x16MinS) \
V(S390_I8x16MinU) \
V(S390_I8x16MaxS) \
V(S390_I8x16MaxU) \
V(S390_I8x16Eq) \
V(S390_I8x16Ne) \
V(S390_I8x16GtS) \
V(S390_I8x16GeS) \
V(S390_I8x16GtU) \
V(S390_I8x16GeU) \
V(S390_I8x16Shl) \
V(S390_I8x16ShrS) \
V(S390_I8x16ShrU) \
V(S390_I8x16Neg) \
V(S390_I8x16SConvertI16x8) \
V(S390_I8x16UConvertI16x8) \
V(S390_I8x16AddSatS) \
V(S390_I8x16SubSatS) \
V(S390_I8x16AddSatU) \
V(S390_I8x16SubSatU) \
V(S390_I8x16RoundingAverageU) \
V(S390_I8x16Abs) \
V(S390_I8x16BitMask) \
V(S390_I8x16Shuffle) \
V(S390_I8x16Swizzle) \
V(S390_I8x16Popcnt) \
V(S390_I64x2AllTrue) \
V(S390_I32x4AllTrue) \
V(S390_I16x8AllTrue) \
V(S390_I8x16AllTrue) \
V(S390_V128AnyTrue) \
V(S390_S128And) \
V(S390_S128Or) \
V(S390_S128Xor) \
V(S390_S128Const) \
V(S390_S128Zero) \
V(S390_S128AllOnes) \
V(S390_S128Not) \
V(S390_S128Select) \
V(S390_S128AndNot) \
V(S390_S128Load8Splat) \
V(S390_S128Load16Splat) \
V(S390_S128Load32Splat) \
V(S390_S128Load64Splat) \
V(S390_S128Load8x8S) \
V(S390_S128Load8x8U) \
V(S390_S128Load16x4S) \
V(S390_S128Load16x4U) \
V(S390_S128Load32x2S) \
V(S390_S128Load32x2U) \
V(S390_S128Load32Zero) \
V(S390_S128Load64Zero) \
V(S390_S128Load8Lane) \
V(S390_S128Load16Lane) \
V(S390_S128Load32Lane) \
V(S390_S128Load64Lane) \
V(S390_S128Store8Lane) \
V(S390_S128Store16Lane) \
V(S390_S128Store32Lane) \
V(S390_S128Store64Lane) \
V(S390_StoreSimd128) \
V(S390_LoadSimd128) \
V(S390_StoreCompressTagged) \
V(S390_LoadDecompressTaggedSigned) \
V(S390_LoadDecompressTaggedPointer) \
V(S390_LoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction.