Fix MSVC C4065 warning in HasMemoryAccessMode().

Avoid generating switch statements with only a default case. Instead,
when there are no instructions that can trap, simply have
HasMemoryAccessMode() return false. This avoids a MSVC warning when
doing a 32-bit build.

To do this, remove empty TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST
definitions from instruction-codes-$arch.h files.

Change-Id: Ifed76eb9cbca169f30c188c1999e1e9be0b2c6aa
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3224807
Reviewed-by: Clemens Backes <clemensb@chromium.org>
Reviewed-by: Maya Lekova <mslekova@chromium.org>
Commit-Queue: Lei Zhang <thestig@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77441}
This commit is contained in:
Lei Zhang 2021-10-18 08:12:16 -07:00 committed by V8 LUCI CQ
parent 5bcd2037a3
commit 5333d90ecd
9 changed files with 2986 additions and 3014 deletions

View File

@ -12,361 +12,357 @@ namespace compiler {
// ARM-specific opcodes that specify which assembly sequence to emit. // ARM-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(ArmAdd) \
V(ArmAnd) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(ArmBic) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(ArmClz) \
V(ArmAdd) \ V(ArmCmp) \
V(ArmAnd) \ V(ArmCmn) \
V(ArmBic) \ V(ArmTst) \
V(ArmClz) \ V(ArmTeq) \
V(ArmCmp) \ V(ArmOrr) \
V(ArmCmn) \ V(ArmEor) \
V(ArmTst) \ V(ArmSub) \
V(ArmTeq) \ V(ArmRsb) \
V(ArmOrr) \ V(ArmMul) \
V(ArmEor) \ V(ArmMla) \
V(ArmSub) \ V(ArmMls) \
V(ArmRsb) \ V(ArmSmull) \
V(ArmMul) \ V(ArmSmmul) \
V(ArmMla) \ V(ArmSmmla) \
V(ArmMls) \ V(ArmUmull) \
V(ArmSmull) \ V(ArmSdiv) \
V(ArmSmmul) \ V(ArmUdiv) \
V(ArmSmmla) \ V(ArmMov) \
V(ArmUmull) \ V(ArmMvn) \
V(ArmSdiv) \ V(ArmBfc) \
V(ArmUdiv) \ V(ArmUbfx) \
V(ArmMov) \ V(ArmSbfx) \
V(ArmMvn) \ V(ArmSxtb) \
V(ArmBfc) \ V(ArmSxth) \
V(ArmUbfx) \ V(ArmSxtab) \
V(ArmSbfx) \ V(ArmSxtah) \
V(ArmSxtb) \ V(ArmUxtb) \
V(ArmSxth) \ V(ArmUxth) \
V(ArmSxtab) \ V(ArmUxtab) \
V(ArmSxtah) \ V(ArmRbit) \
V(ArmUxtb) \ V(ArmRev) \
V(ArmUxth) \ V(ArmUxtah) \
V(ArmUxtab) \ V(ArmAddPair) \
V(ArmRbit) \ V(ArmSubPair) \
V(ArmRev) \ V(ArmMulPair) \
V(ArmUxtah) \ V(ArmLslPair) \
V(ArmAddPair) \ V(ArmLsrPair) \
V(ArmSubPair) \ V(ArmAsrPair) \
V(ArmMulPair) \ V(ArmVcmpF32) \
V(ArmLslPair) \ V(ArmVaddF32) \
V(ArmLsrPair) \ V(ArmVsubF32) \
V(ArmAsrPair) \ V(ArmVmulF32) \
V(ArmVcmpF32) \ V(ArmVmlaF32) \
V(ArmVaddF32) \ V(ArmVmlsF32) \
V(ArmVsubF32) \ V(ArmVdivF32) \
V(ArmVmulF32) \ V(ArmVabsF32) \
V(ArmVmlaF32) \ V(ArmVnegF32) \
V(ArmVmlsF32) \ V(ArmVsqrtF32) \
V(ArmVdivF32) \ V(ArmVcmpF64) \
V(ArmVabsF32) \ V(ArmVaddF64) \
V(ArmVnegF32) \ V(ArmVsubF64) \
V(ArmVsqrtF32) \ V(ArmVmulF64) \
V(ArmVcmpF64) \ V(ArmVmlaF64) \
V(ArmVaddF64) \ V(ArmVmlsF64) \
V(ArmVsubF64) \ V(ArmVdivF64) \
V(ArmVmulF64) \ V(ArmVmodF64) \
V(ArmVmlaF64) \ V(ArmVabsF64) \
V(ArmVmlsF64) \ V(ArmVnegF64) \
V(ArmVdivF64) \ V(ArmVsqrtF64) \
V(ArmVmodF64) \ V(ArmVmullLow) \
V(ArmVabsF64) \ V(ArmVmullHigh) \
V(ArmVnegF64) \ V(ArmVrintmF32) \
V(ArmVsqrtF64) \ V(ArmVrintmF64) \
V(ArmVmullLow) \ V(ArmVrintpF32) \
V(ArmVmullHigh) \ V(ArmVrintpF64) \
V(ArmVrintmF32) \ V(ArmVrintzF32) \
V(ArmVrintmF64) \ V(ArmVrintzF64) \
V(ArmVrintpF32) \ V(ArmVrintaF64) \
V(ArmVrintpF64) \ V(ArmVrintnF32) \
V(ArmVrintzF32) \ V(ArmVrintnF64) \
V(ArmVrintzF64) \ V(ArmVcvtF32F64) \
V(ArmVrintaF64) \ V(ArmVcvtF64F32) \
V(ArmVrintnF32) \ V(ArmVcvtF32S32) \
V(ArmVrintnF64) \ V(ArmVcvtF32U32) \
V(ArmVcvtF32F64) \ V(ArmVcvtF64S32) \
V(ArmVcvtF64F32) \ V(ArmVcvtF64U32) \
V(ArmVcvtF32S32) \ V(ArmVcvtS32F32) \
V(ArmVcvtF32U32) \ V(ArmVcvtU32F32) \
V(ArmVcvtF64S32) \ V(ArmVcvtS32F64) \
V(ArmVcvtF64U32) \ V(ArmVcvtU32F64) \
V(ArmVcvtS32F32) \ V(ArmVmovU32F32) \
V(ArmVcvtU32F32) \ V(ArmVmovF32U32) \
V(ArmVcvtS32F64) \ V(ArmVmovLowU32F64) \
V(ArmVcvtU32F64) \ V(ArmVmovLowF64U32) \
V(ArmVmovU32F32) \ V(ArmVmovHighU32F64) \
V(ArmVmovF32U32) \ V(ArmVmovHighF64U32) \
V(ArmVmovLowU32F64) \ V(ArmVmovF64U32U32) \
V(ArmVmovLowF64U32) \ V(ArmVmovU32U32F64) \
V(ArmVmovHighU32F64) \ V(ArmVldrF32) \
V(ArmVmovHighF64U32) \ V(ArmVstrF32) \
V(ArmVmovF64U32U32) \ V(ArmVldrF64) \
V(ArmVmovU32U32F64) \ V(ArmVld1F64) \
V(ArmVldrF32) \ V(ArmVstrF64) \
V(ArmVstrF32) \ V(ArmVst1F64) \
V(ArmVldrF64) \ V(ArmVld1S128) \
V(ArmVld1F64) \ V(ArmVst1S128) \
V(ArmVstrF64) \ V(ArmVcnt) \
V(ArmVst1F64) \ V(ArmVpadal) \
V(ArmVld1S128) \ V(ArmVpaddl) \
V(ArmVst1S128) \ V(ArmFloat32Max) \
V(ArmVcnt) \ V(ArmFloat64Max) \
V(ArmVpadal) \ V(ArmFloat32Min) \
V(ArmVpaddl) \ V(ArmFloat64Min) \
V(ArmFloat32Max) \ V(ArmFloat64SilenceNaN) \
V(ArmFloat64Max) \ V(ArmLdrb) \
V(ArmFloat32Min) \ V(ArmLdrsb) \
V(ArmFloat64Min) \ V(ArmStrb) \
V(ArmFloat64SilenceNaN) \ V(ArmLdrh) \
V(ArmLdrb) \ V(ArmLdrsh) \
V(ArmLdrsb) \ V(ArmStrh) \
V(ArmStrb) \ V(ArmLdr) \
V(ArmLdrh) \ V(ArmStr) \
V(ArmLdrsh) \ V(ArmPush) \
V(ArmStrh) \ V(ArmPoke) \
V(ArmLdr) \ V(ArmPeek) \
V(ArmStr) \ V(ArmDmbIsh) \
V(ArmPush) \ V(ArmDsbIsb) \
V(ArmPoke) \ V(ArmF64x2Splat) \
V(ArmPeek) \ V(ArmF64x2ExtractLane) \
V(ArmDmbIsh) \ V(ArmF64x2ReplaceLane) \
V(ArmDsbIsb) \ V(ArmF64x2Abs) \
V(ArmF64x2Splat) \ V(ArmF64x2Neg) \
V(ArmF64x2ExtractLane) \ V(ArmF64x2Sqrt) \
V(ArmF64x2ReplaceLane) \ V(ArmF64x2Add) \
V(ArmF64x2Abs) \ V(ArmF64x2Sub) \
V(ArmF64x2Neg) \ V(ArmF64x2Mul) \
V(ArmF64x2Sqrt) \ V(ArmF64x2Div) \
V(ArmF64x2Add) \ V(ArmF64x2Min) \
V(ArmF64x2Sub) \ V(ArmF64x2Max) \
V(ArmF64x2Mul) \ V(ArmF64x2Eq) \
V(ArmF64x2Div) \ V(ArmF64x2Ne) \
V(ArmF64x2Min) \ V(ArmF64x2Lt) \
V(ArmF64x2Max) \ V(ArmF64x2Le) \
V(ArmF64x2Eq) \ V(ArmF64x2Pmin) \
V(ArmF64x2Ne) \ V(ArmF64x2Pmax) \
V(ArmF64x2Lt) \ V(ArmF64x2Ceil) \
V(ArmF64x2Le) \ V(ArmF64x2Floor) \
V(ArmF64x2Pmin) \ V(ArmF64x2Trunc) \
V(ArmF64x2Pmax) \ V(ArmF64x2NearestInt) \
V(ArmF64x2Ceil) \ V(ArmF64x2ConvertLowI32x4S) \
V(ArmF64x2Floor) \ V(ArmF64x2ConvertLowI32x4U) \
V(ArmF64x2Trunc) \ V(ArmF64x2PromoteLowF32x4) \
V(ArmF64x2NearestInt) \ V(ArmF32x4Splat) \
V(ArmF64x2ConvertLowI32x4S) \ V(ArmF32x4ExtractLane) \
V(ArmF64x2ConvertLowI32x4U) \ V(ArmF32x4ReplaceLane) \
V(ArmF64x2PromoteLowF32x4) \ V(ArmF32x4SConvertI32x4) \
V(ArmF32x4Splat) \ V(ArmF32x4UConvertI32x4) \
V(ArmF32x4ExtractLane) \ V(ArmF32x4Abs) \
V(ArmF32x4ReplaceLane) \ V(ArmF32x4Neg) \
V(ArmF32x4SConvertI32x4) \ V(ArmF32x4Sqrt) \
V(ArmF32x4UConvertI32x4) \ V(ArmF32x4RecipApprox) \
V(ArmF32x4Abs) \ V(ArmF32x4RecipSqrtApprox) \
V(ArmF32x4Neg) \ V(ArmF32x4Add) \
V(ArmF32x4Sqrt) \ V(ArmF32x4Sub) \
V(ArmF32x4RecipApprox) \ V(ArmF32x4Mul) \
V(ArmF32x4RecipSqrtApprox) \ V(ArmF32x4Div) \
V(ArmF32x4Add) \ V(ArmF32x4Min) \
V(ArmF32x4Sub) \ V(ArmF32x4Max) \
V(ArmF32x4Mul) \ V(ArmF32x4Eq) \
V(ArmF32x4Div) \ V(ArmF32x4Ne) \
V(ArmF32x4Min) \ V(ArmF32x4Lt) \
V(ArmF32x4Max) \ V(ArmF32x4Le) \
V(ArmF32x4Eq) \ V(ArmF32x4Pmin) \
V(ArmF32x4Ne) \ V(ArmF32x4Pmax) \
V(ArmF32x4Lt) \ V(ArmF32x4DemoteF64x2Zero) \
V(ArmF32x4Le) \ V(ArmI64x2SplatI32Pair) \
V(ArmF32x4Pmin) \ V(ArmI64x2ReplaceLaneI32Pair) \
V(ArmF32x4Pmax) \ V(ArmI64x2Abs) \
V(ArmF32x4DemoteF64x2Zero) \ V(ArmI64x2Neg) \
V(ArmI64x2SplatI32Pair) \ V(ArmI64x2Shl) \
V(ArmI64x2ReplaceLaneI32Pair) \ V(ArmI64x2ShrS) \
V(ArmI64x2Abs) \ V(ArmI64x2Add) \
V(ArmI64x2Neg) \ V(ArmI64x2Sub) \
V(ArmI64x2Shl) \ V(ArmI64x2Mul) \
V(ArmI64x2ShrS) \ V(ArmI64x2ShrU) \
V(ArmI64x2Add) \ V(ArmI64x2BitMask) \
V(ArmI64x2Sub) \ V(ArmI64x2Eq) \
V(ArmI64x2Mul) \ V(ArmI64x2Ne) \
V(ArmI64x2ShrU) \ V(ArmI64x2GtS) \
V(ArmI64x2BitMask) \ V(ArmI64x2GeS) \
V(ArmI64x2Eq) \ V(ArmI64x2SConvertI32x4Low) \
V(ArmI64x2Ne) \ V(ArmI64x2SConvertI32x4High) \
V(ArmI64x2GtS) \ V(ArmI64x2UConvertI32x4Low) \
V(ArmI64x2GeS) \ V(ArmI64x2UConvertI32x4High) \
V(ArmI64x2SConvertI32x4Low) \ V(ArmI32x4Splat) \
V(ArmI64x2SConvertI32x4High) \ V(ArmI32x4ExtractLane) \
V(ArmI64x2UConvertI32x4Low) \ V(ArmI32x4ReplaceLane) \
V(ArmI64x2UConvertI32x4High) \ V(ArmI32x4SConvertF32x4) \
V(ArmI32x4Splat) \ V(ArmI32x4SConvertI16x8Low) \
V(ArmI32x4ExtractLane) \ V(ArmI32x4SConvertI16x8High) \
V(ArmI32x4ReplaceLane) \ V(ArmI32x4Neg) \
V(ArmI32x4SConvertF32x4) \ V(ArmI32x4Shl) \
V(ArmI32x4SConvertI16x8Low) \ V(ArmI32x4ShrS) \
V(ArmI32x4SConvertI16x8High) \ V(ArmI32x4Add) \
V(ArmI32x4Neg) \ V(ArmI32x4Sub) \
V(ArmI32x4Shl) \ V(ArmI32x4Mul) \
V(ArmI32x4ShrS) \ V(ArmI32x4MinS) \
V(ArmI32x4Add) \ V(ArmI32x4MaxS) \
V(ArmI32x4Sub) \ V(ArmI32x4Eq) \
V(ArmI32x4Mul) \ V(ArmI32x4Ne) \
V(ArmI32x4MinS) \ V(ArmI32x4GtS) \
V(ArmI32x4MaxS) \ V(ArmI32x4GeS) \
V(ArmI32x4Eq) \ V(ArmI32x4UConvertF32x4) \
V(ArmI32x4Ne) \ V(ArmI32x4UConvertI16x8Low) \
V(ArmI32x4GtS) \ V(ArmI32x4UConvertI16x8High) \
V(ArmI32x4GeS) \ V(ArmI32x4ShrU) \
V(ArmI32x4UConvertF32x4) \ V(ArmI32x4MinU) \
V(ArmI32x4UConvertI16x8Low) \ V(ArmI32x4MaxU) \
V(ArmI32x4UConvertI16x8High) \ V(ArmI32x4GtU) \
V(ArmI32x4ShrU) \ V(ArmI32x4GeU) \
V(ArmI32x4MinU) \ V(ArmI32x4Abs) \
V(ArmI32x4MaxU) \ V(ArmI32x4BitMask) \
V(ArmI32x4GtU) \ V(ArmI32x4DotI16x8S) \
V(ArmI32x4GeU) \ V(ArmI32x4TruncSatF64x2SZero) \
V(ArmI32x4Abs) \ V(ArmI32x4TruncSatF64x2UZero) \
V(ArmI32x4BitMask) \ V(ArmI16x8Splat) \
V(ArmI32x4DotI16x8S) \ V(ArmI16x8ExtractLaneS) \
V(ArmI32x4TruncSatF64x2SZero) \ V(ArmI16x8ReplaceLane) \
V(ArmI32x4TruncSatF64x2UZero) \ V(ArmI16x8SConvertI8x16Low) \
V(ArmI16x8Splat) \ V(ArmI16x8SConvertI8x16High) \
V(ArmI16x8ExtractLaneS) \ V(ArmI16x8Neg) \
V(ArmI16x8ReplaceLane) \ V(ArmI16x8Shl) \
V(ArmI16x8SConvertI8x16Low) \ V(ArmI16x8ShrS) \
V(ArmI16x8SConvertI8x16High) \ V(ArmI16x8SConvertI32x4) \
V(ArmI16x8Neg) \ V(ArmI16x8Add) \
V(ArmI16x8Shl) \ V(ArmI16x8AddSatS) \
V(ArmI16x8ShrS) \ V(ArmI16x8Sub) \
V(ArmI16x8SConvertI32x4) \ V(ArmI16x8SubSatS) \
V(ArmI16x8Add) \ V(ArmI16x8Mul) \
V(ArmI16x8AddSatS) \ V(ArmI16x8MinS) \
V(ArmI16x8Sub) \ V(ArmI16x8MaxS) \
V(ArmI16x8SubSatS) \ V(ArmI16x8Eq) \
V(ArmI16x8Mul) \ V(ArmI16x8Ne) \
V(ArmI16x8MinS) \ V(ArmI16x8GtS) \
V(ArmI16x8MaxS) \ V(ArmI16x8GeS) \
V(ArmI16x8Eq) \ V(ArmI16x8ExtractLaneU) \
V(ArmI16x8Ne) \ V(ArmI16x8UConvertI8x16Low) \
V(ArmI16x8GtS) \ V(ArmI16x8UConvertI8x16High) \
V(ArmI16x8GeS) \ V(ArmI16x8ShrU) \
V(ArmI16x8ExtractLaneU) \ V(ArmI16x8UConvertI32x4) \
V(ArmI16x8UConvertI8x16Low) \ V(ArmI16x8AddSatU) \
V(ArmI16x8UConvertI8x16High) \ V(ArmI16x8SubSatU) \
V(ArmI16x8ShrU) \ V(ArmI16x8MinU) \
V(ArmI16x8UConvertI32x4) \ V(ArmI16x8MaxU) \
V(ArmI16x8AddSatU) \ V(ArmI16x8GtU) \
V(ArmI16x8SubSatU) \ V(ArmI16x8GeU) \
V(ArmI16x8MinU) \ V(ArmI16x8RoundingAverageU) \
V(ArmI16x8MaxU) \ V(ArmI16x8Abs) \
V(ArmI16x8GtU) \ V(ArmI16x8BitMask) \
V(ArmI16x8GeU) \ V(ArmI16x8Q15MulRSatS) \
V(ArmI16x8RoundingAverageU) \ V(ArmI8x16Splat) \
V(ArmI16x8Abs) \ V(ArmI8x16ExtractLaneS) \
V(ArmI16x8BitMask) \ V(ArmI8x16ReplaceLane) \
V(ArmI16x8Q15MulRSatS) \ V(ArmI8x16Neg) \
V(ArmI8x16Splat) \ V(ArmI8x16Shl) \
V(ArmI8x16ExtractLaneS) \ V(ArmI8x16ShrS) \
V(ArmI8x16ReplaceLane) \ V(ArmI8x16SConvertI16x8) \
V(ArmI8x16Neg) \ V(ArmI8x16Add) \
V(ArmI8x16Shl) \ V(ArmI8x16AddSatS) \
V(ArmI8x16ShrS) \ V(ArmI8x16Sub) \
V(ArmI8x16SConvertI16x8) \ V(ArmI8x16SubSatS) \
V(ArmI8x16Add) \ V(ArmI8x16MinS) \
V(ArmI8x16AddSatS) \ V(ArmI8x16MaxS) \
V(ArmI8x16Sub) \ V(ArmI8x16Eq) \
V(ArmI8x16SubSatS) \ V(ArmI8x16Ne) \
V(ArmI8x16MinS) \ V(ArmI8x16GtS) \
V(ArmI8x16MaxS) \ V(ArmI8x16GeS) \
V(ArmI8x16Eq) \ V(ArmI8x16ExtractLaneU) \
V(ArmI8x16Ne) \ V(ArmI8x16ShrU) \
V(ArmI8x16GtS) \ V(ArmI8x16UConvertI16x8) \
V(ArmI8x16GeS) \ V(ArmI8x16AddSatU) \
V(ArmI8x16ExtractLaneU) \ V(ArmI8x16SubSatU) \
V(ArmI8x16ShrU) \ V(ArmI8x16MinU) \
V(ArmI8x16UConvertI16x8) \ V(ArmI8x16MaxU) \
V(ArmI8x16AddSatU) \ V(ArmI8x16GtU) \
V(ArmI8x16SubSatU) \ V(ArmI8x16GeU) \
V(ArmI8x16MinU) \ V(ArmI8x16RoundingAverageU) \
V(ArmI8x16MaxU) \ V(ArmI8x16Abs) \
V(ArmI8x16GtU) \ V(ArmI8x16BitMask) \
V(ArmI8x16GeU) \ V(ArmS128Const) \
V(ArmI8x16RoundingAverageU) \ V(ArmS128Zero) \
V(ArmI8x16Abs) \ V(ArmS128AllOnes) \
V(ArmI8x16BitMask) \ V(ArmS128Dup) \
V(ArmS128Const) \ V(ArmS128And) \
V(ArmS128Zero) \ V(ArmS128Or) \
V(ArmS128AllOnes) \ V(ArmS128Xor) \
V(ArmS128Dup) \ V(ArmS128Not) \
V(ArmS128And) \ V(ArmS128Select) \
V(ArmS128Or) \ V(ArmS128AndNot) \
V(ArmS128Xor) \ V(ArmS32x4ZipLeft) \
V(ArmS128Not) \ V(ArmS32x4ZipRight) \
V(ArmS128Select) \ V(ArmS32x4UnzipLeft) \
V(ArmS128AndNot) \ V(ArmS32x4UnzipRight) \
V(ArmS32x4ZipLeft) \ V(ArmS32x4TransposeLeft) \
V(ArmS32x4ZipRight) \ V(ArmS32x4TransposeRight) \
V(ArmS32x4UnzipLeft) \ V(ArmS32x4Shuffle) \
V(ArmS32x4UnzipRight) \ V(ArmS16x8ZipLeft) \
V(ArmS32x4TransposeLeft) \ V(ArmS16x8ZipRight) \
V(ArmS32x4TransposeRight) \ V(ArmS16x8UnzipLeft) \
V(ArmS32x4Shuffle) \ V(ArmS16x8UnzipRight) \
V(ArmS16x8ZipLeft) \ V(ArmS16x8TransposeLeft) \
V(ArmS16x8ZipRight) \ V(ArmS16x8TransposeRight) \
V(ArmS16x8UnzipLeft) \ V(ArmS8x16ZipLeft) \
V(ArmS16x8UnzipRight) \ V(ArmS8x16ZipRight) \
V(ArmS16x8TransposeLeft) \ V(ArmS8x16UnzipLeft) \
V(ArmS16x8TransposeRight) \ V(ArmS8x16UnzipRight) \
V(ArmS8x16ZipLeft) \ V(ArmS8x16TransposeLeft) \
V(ArmS8x16ZipRight) \ V(ArmS8x16TransposeRight) \
V(ArmS8x16UnzipLeft) \ V(ArmS8x16Concat) \
V(ArmS8x16UnzipRight) \ V(ArmI8x16Swizzle) \
V(ArmS8x16TransposeLeft) \ V(ArmI8x16Shuffle) \
V(ArmS8x16TransposeRight) \ V(ArmS32x2Reverse) \
V(ArmS8x16Concat) \ V(ArmS16x4Reverse) \
V(ArmI8x16Swizzle) \ V(ArmS16x2Reverse) \
V(ArmI8x16Shuffle) \ V(ArmS8x8Reverse) \
V(ArmS32x2Reverse) \ V(ArmS8x4Reverse) \
V(ArmS16x4Reverse) \ V(ArmS8x2Reverse) \
V(ArmS16x2Reverse) \ V(ArmI64x2AllTrue) \
V(ArmS8x8Reverse) \ V(ArmI32x4AllTrue) \
V(ArmS8x4Reverse) \ V(ArmI16x8AllTrue) \
V(ArmS8x2Reverse) \ V(ArmV128AnyTrue) \
V(ArmI64x2AllTrue) \ V(ArmI8x16AllTrue) \
V(ArmI32x4AllTrue) \ V(ArmS128Load8Splat) \
V(ArmI16x8AllTrue) \ V(ArmS128Load16Splat) \
V(ArmV128AnyTrue) \ V(ArmS128Load32Splat) \
V(ArmI8x16AllTrue) \ V(ArmS128Load64Splat) \
V(ArmS128Load8Splat) \ V(ArmS128Load8x8S) \
V(ArmS128Load16Splat) \ V(ArmS128Load8x8U) \
V(ArmS128Load32Splat) \ V(ArmS128Load16x4S) \
V(ArmS128Load64Splat) \ V(ArmS128Load16x4U) \
V(ArmS128Load8x8S) \ V(ArmS128Load32x2S) \
V(ArmS128Load8x8U) \ V(ArmS128Load32x2U) \
V(ArmS128Load16x4S) \ V(ArmS128Load32Zero) \
V(ArmS128Load16x4U) \ V(ArmS128Load64Zero) \
V(ArmS128Load32x2S) \ V(ArmS128LoadLaneLow) \
V(ArmS128Load32x2U) \ V(ArmS128LoadLaneHigh) \
V(ArmS128Load32Zero) \ V(ArmS128StoreLaneLow) \
V(ArmS128Load64Zero) \ V(ArmS128StoreLaneHigh) \
V(ArmS128LoadLaneLow) \ V(ArmWord32AtomicPairLoad) \
V(ArmS128LoadLaneHigh) \ V(ArmWord32AtomicPairStore) \
V(ArmS128StoreLaneLow) \ V(ArmWord32AtomicPairAdd) \
V(ArmS128StoreLaneHigh) \ V(ArmWord32AtomicPairSub) \
V(ArmWord32AtomicPairLoad) \ V(ArmWord32AtomicPairAnd) \
V(ArmWord32AtomicPairStore) \ V(ArmWord32AtomicPairOr) \
V(ArmWord32AtomicPairAdd) \ V(ArmWord32AtomicPairXor) \
V(ArmWord32AtomicPairSub) \ V(ArmWord32AtomicPairExchange) \
V(ArmWord32AtomicPairAnd) \
V(ArmWord32AtomicPairOr) \
V(ArmWord32AtomicPairXor) \
V(ArmWord32AtomicPairExchange) \
V(ArmWord32AtomicPairCompareExchange) V(ArmWord32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,358 +12,354 @@ namespace compiler {
// IA32-specific opcodes that specify which assembly sequence to emit. // IA32-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(IA32Add) \
V(IA32And) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(IA32Cmp) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(IA32Cmp16) \
V(IA32Add) \ V(IA32Cmp8) \
V(IA32And) \ V(IA32Test) \
V(IA32Cmp) \ V(IA32Test16) \
V(IA32Cmp16) \ V(IA32Test8) \
V(IA32Cmp8) \ V(IA32Or) \
V(IA32Test) \ V(IA32Xor) \
V(IA32Test16) \ V(IA32Sub) \
V(IA32Test8) \ V(IA32Imul) \
V(IA32Or) \ V(IA32ImulHigh) \
V(IA32Xor) \ V(IA32UmulHigh) \
V(IA32Sub) \ V(IA32Idiv) \
V(IA32Imul) \ V(IA32Udiv) \
V(IA32ImulHigh) \ V(IA32Not) \
V(IA32UmulHigh) \ V(IA32Neg) \
V(IA32Idiv) \ V(IA32Shl) \
V(IA32Udiv) \ V(IA32Shr) \
V(IA32Not) \ V(IA32Sar) \
V(IA32Neg) \ V(IA32AddPair) \
V(IA32Shl) \ V(IA32SubPair) \
V(IA32Shr) \ V(IA32MulPair) \
V(IA32Sar) \ V(IA32ShlPair) \
V(IA32AddPair) \ V(IA32ShrPair) \
V(IA32SubPair) \ V(IA32SarPair) \
V(IA32MulPair) \ V(IA32Rol) \
V(IA32ShlPair) \ V(IA32Ror) \
V(IA32ShrPair) \ V(IA32Lzcnt) \
V(IA32SarPair) \ V(IA32Tzcnt) \
V(IA32Rol) \ V(IA32Popcnt) \
V(IA32Ror) \ V(IA32Bswap) \
V(IA32Lzcnt) \ V(IA32MFence) \
V(IA32Tzcnt) \ V(IA32LFence) \
V(IA32Popcnt) \ V(IA32Float32Cmp) \
V(IA32Bswap) \ V(IA32Float32Sqrt) \
V(IA32MFence) \ V(IA32Float32Round) \
V(IA32LFence) \ V(IA32Float64Cmp) \
V(IA32Float32Cmp) \ V(IA32Float64Mod) \
V(IA32Float32Sqrt) \ V(IA32Float32Max) \
V(IA32Float32Round) \ V(IA32Float64Max) \
V(IA32Float64Cmp) \ V(IA32Float32Min) \
V(IA32Float64Mod) \ V(IA32Float64Min) \
V(IA32Float32Max) \ V(IA32Float64Sqrt) \
V(IA32Float64Max) \ V(IA32Float64Round) \
V(IA32Float32Min) \ V(IA32Float32ToFloat64) \
V(IA32Float64Min) \ V(IA32Float64ToFloat32) \
V(IA32Float64Sqrt) \ V(IA32Float32ToInt32) \
V(IA32Float64Round) \ V(IA32Float32ToUint32) \
V(IA32Float32ToFloat64) \ V(IA32Float64ToInt32) \
V(IA32Float64ToFloat32) \ V(IA32Float64ToUint32) \
V(IA32Float32ToInt32) \ V(SSEInt32ToFloat32) \
V(IA32Float32ToUint32) \ V(IA32Uint32ToFloat32) \
V(IA32Float64ToInt32) \ V(SSEInt32ToFloat64) \
V(IA32Float64ToUint32) \ V(IA32Uint32ToFloat64) \
V(SSEInt32ToFloat32) \ V(IA32Float64ExtractLowWord32) \
V(IA32Uint32ToFloat32) \ V(IA32Float64ExtractHighWord32) \
V(SSEInt32ToFloat64) \ V(IA32Float64InsertLowWord32) \
V(IA32Uint32ToFloat64) \ V(IA32Float64InsertHighWord32) \
V(IA32Float64ExtractLowWord32) \ V(IA32Float64LoadLowWord32) \
V(IA32Float64ExtractHighWord32) \ V(IA32Float64SilenceNaN) \
V(IA32Float64InsertLowWord32) \ V(Float32Add) \
V(IA32Float64InsertHighWord32) \ V(Float32Sub) \
V(IA32Float64LoadLowWord32) \ V(Float64Add) \
V(IA32Float64SilenceNaN) \ V(Float64Sub) \
V(Float32Add) \ V(Float32Mul) \
V(Float32Sub) \ V(Float32Div) \
V(Float64Add) \ V(Float64Mul) \
V(Float64Sub) \ V(Float64Div) \
V(Float32Mul) \ V(Float64Abs) \
V(Float32Div) \ V(Float64Neg) \
V(Float64Mul) \ V(Float32Abs) \
V(Float64Div) \ V(Float32Neg) \
V(Float64Abs) \ V(IA32Movsxbl) \
V(Float64Neg) \ V(IA32Movzxbl) \
V(Float32Abs) \ V(IA32Movb) \
V(Float32Neg) \ V(IA32Movsxwl) \
V(IA32Movsxbl) \ V(IA32Movzxwl) \
V(IA32Movzxbl) \ V(IA32Movw) \
V(IA32Movb) \ V(IA32Movl) \
V(IA32Movsxwl) \ V(IA32Movss) \
V(IA32Movzxwl) \ V(IA32Movsd) \
V(IA32Movw) \ V(IA32Movdqu) \
V(IA32Movl) \ V(IA32Movlps) \
V(IA32Movss) \ V(IA32Movhps) \
V(IA32Movsd) \ V(IA32BitcastFI) \
V(IA32Movdqu) \ V(IA32BitcastIF) \
V(IA32Movlps) \ V(IA32Lea) \
V(IA32Movhps) \ V(IA32Push) \
V(IA32BitcastFI) \ V(IA32Poke) \
V(IA32BitcastIF) \ V(IA32Peek) \
V(IA32Lea) \ V(IA32F64x2Splat) \
V(IA32Push) \ V(F64x2ExtractLane) \
V(IA32Poke) \ V(F64x2ReplaceLane) \
V(IA32Peek) \ V(IA32F64x2Sqrt) \
V(IA32F64x2Splat) \ V(IA32F64x2Add) \
V(F64x2ExtractLane) \ V(IA32F64x2Sub) \
V(F64x2ReplaceLane) \ V(IA32F64x2Mul) \
V(IA32F64x2Sqrt) \ V(IA32F64x2Div) \
V(IA32F64x2Add) \ V(IA32F64x2Min) \
V(IA32F64x2Sub) \ V(IA32F64x2Max) \
V(IA32F64x2Mul) \ V(IA32F64x2Eq) \
V(IA32F64x2Div) \ V(IA32F64x2Ne) \
V(IA32F64x2Min) \ V(IA32F64x2Lt) \
V(IA32F64x2Max) \ V(IA32F64x2Le) \
V(IA32F64x2Eq) \ V(IA32F64x2Pmin) \
V(IA32F64x2Ne) \ V(IA32F64x2Pmax) \
V(IA32F64x2Lt) \ V(IA32F64x2Round) \
V(IA32F64x2Le) \ V(IA32F64x2ConvertLowI32x4S) \
V(IA32F64x2Pmin) \ V(IA32F64x2ConvertLowI32x4U) \
V(IA32F64x2Pmax) \ V(IA32F64x2PromoteLowF32x4) \
V(IA32F64x2Round) \ V(IA32I64x2SplatI32Pair) \
V(IA32F64x2ConvertLowI32x4S) \ V(IA32I64x2ReplaceLaneI32Pair) \
V(IA32F64x2ConvertLowI32x4U) \ V(IA32I64x2Abs) \
V(IA32F64x2PromoteLowF32x4) \ V(IA32I64x2Neg) \
V(IA32I64x2SplatI32Pair) \ V(IA32I64x2Shl) \
V(IA32I64x2ReplaceLaneI32Pair) \ V(IA32I64x2ShrS) \
V(IA32I64x2Abs) \ V(IA32I64x2Add) \
V(IA32I64x2Neg) \ V(IA32I64x2Sub) \
V(IA32I64x2Shl) \ V(IA32I64x2Mul) \
V(IA32I64x2ShrS) \ V(IA32I64x2ShrU) \
V(IA32I64x2Add) \ V(IA32I64x2BitMask) \
V(IA32I64x2Sub) \ V(IA32I64x2Eq) \
V(IA32I64x2Mul) \ V(IA32I64x2Ne) \
V(IA32I64x2ShrU) \ V(IA32I64x2GtS) \
V(IA32I64x2BitMask) \ V(IA32I64x2GeS) \
V(IA32I64x2Eq) \ V(IA32I64x2ExtMulLowI32x4S) \
V(IA32I64x2Ne) \ V(IA32I64x2ExtMulHighI32x4S) \
V(IA32I64x2GtS) \ V(IA32I64x2ExtMulLowI32x4U) \
V(IA32I64x2GeS) \ V(IA32I64x2ExtMulHighI32x4U) \
V(IA32I64x2ExtMulLowI32x4S) \ V(IA32I64x2SConvertI32x4Low) \
V(IA32I64x2ExtMulHighI32x4S) \ V(IA32I64x2SConvertI32x4High) \
V(IA32I64x2ExtMulLowI32x4U) \ V(IA32I64x2UConvertI32x4Low) \
V(IA32I64x2ExtMulHighI32x4U) \ V(IA32I64x2UConvertI32x4High) \
V(IA32I64x2SConvertI32x4Low) \ V(IA32F32x4Splat) \
V(IA32I64x2SConvertI32x4High) \ V(IA32F32x4ExtractLane) \
V(IA32I64x2UConvertI32x4Low) \ V(IA32Insertps) \
V(IA32I64x2UConvertI32x4High) \ V(IA32F32x4SConvertI32x4) \
V(IA32F32x4Splat) \ V(IA32F32x4UConvertI32x4) \
V(IA32F32x4ExtractLane) \ V(IA32F32x4Sqrt) \
V(IA32Insertps) \ V(IA32F32x4RecipApprox) \
V(IA32F32x4SConvertI32x4) \ V(IA32F32x4RecipSqrtApprox) \
V(IA32F32x4UConvertI32x4) \ V(IA32F32x4Add) \
V(IA32F32x4Sqrt) \ V(IA32F32x4Sub) \
V(IA32F32x4RecipApprox) \ V(IA32F32x4Mul) \
V(IA32F32x4RecipSqrtApprox) \ V(IA32F32x4Div) \
V(IA32F32x4Add) \ V(IA32F32x4Min) \
V(IA32F32x4Sub) \ V(IA32F32x4Max) \
V(IA32F32x4Mul) \ V(IA32F32x4Eq) \
V(IA32F32x4Div) \ V(IA32F32x4Ne) \
V(IA32F32x4Min) \ V(IA32F32x4Lt) \
V(IA32F32x4Max) \ V(IA32F32x4Le) \
V(IA32F32x4Eq) \ V(IA32F32x4Pmin) \
V(IA32F32x4Ne) \ V(IA32F32x4Pmax) \
V(IA32F32x4Lt) \ V(IA32F32x4Round) \
V(IA32F32x4Le) \ V(IA32F32x4DemoteF64x2Zero) \
V(IA32F32x4Pmin) \ V(IA32I32x4Splat) \
V(IA32F32x4Pmax) \ V(IA32I32x4ExtractLane) \
V(IA32F32x4Round) \ V(IA32I32x4SConvertF32x4) \
V(IA32F32x4DemoteF64x2Zero) \ V(IA32I32x4SConvertI16x8Low) \
V(IA32I32x4Splat) \ V(IA32I32x4SConvertI16x8High) \
V(IA32I32x4ExtractLane) \ V(IA32I32x4Neg) \
V(IA32I32x4SConvertF32x4) \ V(IA32I32x4Shl) \
V(IA32I32x4SConvertI16x8Low) \ V(IA32I32x4ShrS) \
V(IA32I32x4SConvertI16x8High) \ V(IA32I32x4Add) \
V(IA32I32x4Neg) \ V(IA32I32x4Sub) \
V(IA32I32x4Shl) \ V(IA32I32x4Mul) \
V(IA32I32x4ShrS) \ V(IA32I32x4MinS) \
V(IA32I32x4Add) \ V(IA32I32x4MaxS) \
V(IA32I32x4Sub) \ V(IA32I32x4Eq) \
V(IA32I32x4Mul) \ V(IA32I32x4Ne) \
V(IA32I32x4MinS) \ V(IA32I32x4GtS) \
V(IA32I32x4MaxS) \ V(IA32I32x4GeS) \
V(IA32I32x4Eq) \ V(SSEI32x4UConvertF32x4) \
V(IA32I32x4Ne) \ V(AVXI32x4UConvertF32x4) \
V(IA32I32x4GtS) \ V(IA32I32x4UConvertI16x8Low) \
V(IA32I32x4GeS) \ V(IA32I32x4UConvertI16x8High) \
V(SSEI32x4UConvertF32x4) \ V(IA32I32x4ShrU) \
V(AVXI32x4UConvertF32x4) \ V(IA32I32x4MinU) \
V(IA32I32x4UConvertI16x8Low) \ V(IA32I32x4MaxU) \
V(IA32I32x4UConvertI16x8High) \ V(SSEI32x4GtU) \
V(IA32I32x4ShrU) \ V(AVXI32x4GtU) \
V(IA32I32x4MinU) \ V(SSEI32x4GeU) \
V(IA32I32x4MaxU) \ V(AVXI32x4GeU) \
V(SSEI32x4GtU) \ V(IA32I32x4Abs) \
V(AVXI32x4GtU) \ V(IA32I32x4BitMask) \
V(SSEI32x4GeU) \ V(IA32I32x4DotI16x8S) \
V(AVXI32x4GeU) \ V(IA32I32x4ExtMulLowI16x8S) \
V(IA32I32x4Abs) \ V(IA32I32x4ExtMulHighI16x8S) \
V(IA32I32x4BitMask) \ V(IA32I32x4ExtMulLowI16x8U) \
V(IA32I32x4DotI16x8S) \ V(IA32I32x4ExtMulHighI16x8U) \
V(IA32I32x4ExtMulLowI16x8S) \ V(IA32I32x4ExtAddPairwiseI16x8S) \
V(IA32I32x4ExtMulHighI16x8S) \ V(IA32I32x4ExtAddPairwiseI16x8U) \
V(IA32I32x4ExtMulLowI16x8U) \ V(IA32I32x4TruncSatF64x2SZero) \
V(IA32I32x4ExtMulHighI16x8U) \ V(IA32I32x4TruncSatF64x2UZero) \
V(IA32I32x4ExtAddPairwiseI16x8S) \ V(IA32I16x8Splat) \
V(IA32I32x4ExtAddPairwiseI16x8U) \ V(IA32I16x8ExtractLaneS) \
V(IA32I32x4TruncSatF64x2SZero) \ V(IA32I16x8SConvertI8x16Low) \
V(IA32I32x4TruncSatF64x2UZero) \ V(IA32I16x8SConvertI8x16High) \
V(IA32I16x8Splat) \ V(IA32I16x8Neg) \
V(IA32I16x8ExtractLaneS) \ V(IA32I16x8Shl) \
V(IA32I16x8SConvertI8x16Low) \ V(IA32I16x8ShrS) \
V(IA32I16x8SConvertI8x16High) \ V(IA32I16x8SConvertI32x4) \
V(IA32I16x8Neg) \ V(IA32I16x8Add) \
V(IA32I16x8Shl) \ V(IA32I16x8AddSatS) \
V(IA32I16x8ShrS) \ V(IA32I16x8Sub) \
V(IA32I16x8SConvertI32x4) \ V(IA32I16x8SubSatS) \
V(IA32I16x8Add) \ V(IA32I16x8Mul) \
V(IA32I16x8AddSatS) \ V(IA32I16x8MinS) \
V(IA32I16x8Sub) \ V(IA32I16x8MaxS) \
V(IA32I16x8SubSatS) \ V(IA32I16x8Eq) \
V(IA32I16x8Mul) \ V(SSEI16x8Ne) \
V(IA32I16x8MinS) \ V(AVXI16x8Ne) \
V(IA32I16x8MaxS) \ V(IA32I16x8GtS) \
V(IA32I16x8Eq) \ V(SSEI16x8GeS) \
V(SSEI16x8Ne) \ V(AVXI16x8GeS) \
V(AVXI16x8Ne) \ V(IA32I16x8UConvertI8x16Low) \
V(IA32I16x8GtS) \ V(IA32I16x8UConvertI8x16High) \
V(SSEI16x8GeS) \ V(IA32I16x8ShrU) \
V(AVXI16x8GeS) \ V(IA32I16x8UConvertI32x4) \
V(IA32I16x8UConvertI8x16Low) \ V(IA32I16x8AddSatU) \
V(IA32I16x8UConvertI8x16High) \ V(IA32I16x8SubSatU) \
V(IA32I16x8ShrU) \ V(IA32I16x8MinU) \
V(IA32I16x8UConvertI32x4) \ V(IA32I16x8MaxU) \
V(IA32I16x8AddSatU) \ V(SSEI16x8GtU) \
V(IA32I16x8SubSatU) \ V(AVXI16x8GtU) \
V(IA32I16x8MinU) \ V(SSEI16x8GeU) \
V(IA32I16x8MaxU) \ V(AVXI16x8GeU) \
V(SSEI16x8GtU) \ V(IA32I16x8RoundingAverageU) \
V(AVXI16x8GtU) \ V(IA32I16x8Abs) \
V(SSEI16x8GeU) \ V(IA32I16x8BitMask) \
V(AVXI16x8GeU) \ V(IA32I16x8ExtMulLowI8x16S) \
V(IA32I16x8RoundingAverageU) \ V(IA32I16x8ExtMulHighI8x16S) \
V(IA32I16x8Abs) \ V(IA32I16x8ExtMulLowI8x16U) \
V(IA32I16x8BitMask) \ V(IA32I16x8ExtMulHighI8x16U) \
V(IA32I16x8ExtMulLowI8x16S) \ V(IA32I16x8ExtAddPairwiseI8x16S) \
V(IA32I16x8ExtMulHighI8x16S) \ V(IA32I16x8ExtAddPairwiseI8x16U) \
V(IA32I16x8ExtMulLowI8x16U) \ V(IA32I16x8Q15MulRSatS) \
V(IA32I16x8ExtMulHighI8x16U) \ V(IA32I8x16Splat) \
V(IA32I16x8ExtAddPairwiseI8x16S) \ V(IA32I8x16ExtractLaneS) \
V(IA32I16x8ExtAddPairwiseI8x16U) \ V(IA32Pinsrb) \
V(IA32I16x8Q15MulRSatS) \ V(IA32Pinsrw) \
V(IA32I8x16Splat) \ V(IA32Pinsrd) \
V(IA32I8x16ExtractLaneS) \ V(IA32Pextrb) \
V(IA32Pinsrb) \ V(IA32Pextrw) \
V(IA32Pinsrw) \ V(IA32S128Store32Lane) \
V(IA32Pinsrd) \ V(IA32I8x16SConvertI16x8) \
V(IA32Pextrb) \ V(IA32I8x16Neg) \
V(IA32Pextrw) \ V(IA32I8x16Shl) \
V(IA32S128Store32Lane) \ V(IA32I8x16ShrS) \
V(IA32I8x16SConvertI16x8) \ V(IA32I8x16Add) \
V(IA32I8x16Neg) \ V(IA32I8x16AddSatS) \
V(IA32I8x16Shl) \ V(IA32I8x16Sub) \
V(IA32I8x16ShrS) \ V(IA32I8x16SubSatS) \
V(IA32I8x16Add) \ V(IA32I8x16MinS) \
V(IA32I8x16AddSatS) \ V(IA32I8x16MaxS) \
V(IA32I8x16Sub) \ V(IA32I8x16Eq) \
V(IA32I8x16SubSatS) \ V(SSEI8x16Ne) \
V(IA32I8x16MinS) \ V(AVXI8x16Ne) \
V(IA32I8x16MaxS) \ V(IA32I8x16GtS) \
V(IA32I8x16Eq) \ V(SSEI8x16GeS) \
V(SSEI8x16Ne) \ V(AVXI8x16GeS) \
V(AVXI8x16Ne) \ V(IA32I8x16UConvertI16x8) \
V(IA32I8x16GtS) \ V(IA32I8x16AddSatU) \
V(SSEI8x16GeS) \ V(IA32I8x16SubSatU) \
V(AVXI8x16GeS) \ V(IA32I8x16ShrU) \
V(IA32I8x16UConvertI16x8) \ V(IA32I8x16MinU) \
V(IA32I8x16AddSatU) \ V(IA32I8x16MaxU) \
V(IA32I8x16SubSatU) \ V(SSEI8x16GtU) \
V(IA32I8x16ShrU) \ V(AVXI8x16GtU) \
V(IA32I8x16MinU) \ V(SSEI8x16GeU) \
V(IA32I8x16MaxU) \ V(AVXI8x16GeU) \
V(SSEI8x16GtU) \ V(IA32I8x16RoundingAverageU) \
V(AVXI8x16GtU) \ V(IA32I8x16Abs) \
V(SSEI8x16GeU) \ V(IA32I8x16BitMask) \
V(AVXI8x16GeU) \ V(IA32I8x16Popcnt) \
V(IA32I8x16RoundingAverageU) \ V(IA32S128Const) \
V(IA32I8x16Abs) \ V(IA32S128Zero) \
V(IA32I8x16BitMask) \ V(IA32S128AllOnes) \
V(IA32I8x16Popcnt) \ V(IA32S128Not) \
V(IA32S128Const) \ V(IA32S128And) \
V(IA32S128Zero) \ V(IA32S128Or) \
V(IA32S128AllOnes) \ V(IA32S128Xor) \
V(IA32S128Not) \ V(IA32S128Select) \
V(IA32S128And) \ V(IA32S128AndNot) \
V(IA32S128Or) \ V(IA32I8x16Swizzle) \
V(IA32S128Xor) \ V(IA32I8x16Shuffle) \
V(IA32S128Select) \ V(IA32S128Load8Splat) \
V(IA32S128AndNot) \ V(IA32S128Load16Splat) \
V(IA32I8x16Swizzle) \ V(IA32S128Load32Splat) \
V(IA32I8x16Shuffle) \ V(IA32S128Load64Splat) \
V(IA32S128Load8Splat) \ V(IA32S128Load8x8S) \
V(IA32S128Load16Splat) \ V(IA32S128Load8x8U) \
V(IA32S128Load32Splat) \ V(IA32S128Load16x4S) \
V(IA32S128Load64Splat) \ V(IA32S128Load16x4U) \
V(IA32S128Load8x8S) \ V(IA32S128Load32x2S) \
V(IA32S128Load8x8U) \ V(IA32S128Load32x2U) \
V(IA32S128Load16x4S) \ V(IA32S32x4Rotate) \
V(IA32S128Load16x4U) \ V(IA32S32x4Swizzle) \
V(IA32S128Load32x2S) \ V(IA32S32x4Shuffle) \
V(IA32S128Load32x2U) \ V(IA32S16x8Blend) \
V(IA32S32x4Rotate) \ V(IA32S16x8HalfShuffle1) \
V(IA32S32x4Swizzle) \ V(IA32S16x8HalfShuffle2) \
V(IA32S32x4Shuffle) \ V(IA32S8x16Alignr) \
V(IA32S16x8Blend) \ V(IA32S16x8Dup) \
V(IA32S16x8HalfShuffle1) \ V(IA32S8x16Dup) \
V(IA32S16x8HalfShuffle2) \ V(SSES16x8UnzipHigh) \
V(IA32S8x16Alignr) \ V(AVXS16x8UnzipHigh) \
V(IA32S16x8Dup) \ V(SSES16x8UnzipLow) \
V(IA32S8x16Dup) \ V(AVXS16x8UnzipLow) \
V(SSES16x8UnzipHigh) \ V(SSES8x16UnzipHigh) \
V(AVXS16x8UnzipHigh) \ V(AVXS8x16UnzipHigh) \
V(SSES16x8UnzipLow) \ V(SSES8x16UnzipLow) \
V(AVXS16x8UnzipLow) \ V(AVXS8x16UnzipLow) \
V(SSES8x16UnzipHigh) \ V(IA32S64x2UnpackHigh) \
V(AVXS8x16UnzipHigh) \ V(IA32S32x4UnpackHigh) \
V(SSES8x16UnzipLow) \ V(IA32S16x8UnpackHigh) \
V(AVXS8x16UnzipLow) \ V(IA32S8x16UnpackHigh) \
V(IA32S64x2UnpackHigh) \ V(IA32S64x2UnpackLow) \
V(IA32S32x4UnpackHigh) \ V(IA32S32x4UnpackLow) \
V(IA32S16x8UnpackHigh) \ V(IA32S16x8UnpackLow) \
V(IA32S8x16UnpackHigh) \ V(IA32S8x16UnpackLow) \
V(IA32S64x2UnpackLow) \ V(SSES8x16TransposeLow) \
V(IA32S32x4UnpackLow) \ V(AVXS8x16TransposeLow) \
V(IA32S16x8UnpackLow) \ V(SSES8x16TransposeHigh) \
V(IA32S8x16UnpackLow) \ V(AVXS8x16TransposeHigh) \
V(SSES8x16TransposeLow) \ V(SSES8x8Reverse) \
V(AVXS8x16TransposeLow) \ V(AVXS8x8Reverse) \
V(SSES8x16TransposeHigh) \ V(SSES8x4Reverse) \
V(AVXS8x16TransposeHigh) \ V(AVXS8x4Reverse) \
V(SSES8x8Reverse) \ V(SSES8x2Reverse) \
V(AVXS8x8Reverse) \ V(AVXS8x2Reverse) \
V(SSES8x4Reverse) \ V(IA32S128AnyTrue) \
V(AVXS8x4Reverse) \ V(IA32I64x2AllTrue) \
V(SSES8x2Reverse) \ V(IA32I32x4AllTrue) \
V(AVXS8x2Reverse) \ V(IA32I16x8AllTrue) \
V(IA32S128AnyTrue) \ V(IA32I8x16AllTrue) \
V(IA32I64x2AllTrue) \ V(IA32Word32AtomicPairLoad) \
V(IA32I32x4AllTrue) \ V(IA32Word32ReleasePairStore) \
V(IA32I16x8AllTrue) \ V(IA32Word32SeqCstPairStore) \
V(IA32I8x16AllTrue) \ V(IA32Word32AtomicPairAdd) \
V(IA32Word32AtomicPairLoad) \ V(IA32Word32AtomicPairSub) \
V(IA32Word32ReleasePairStore) \ V(IA32Word32AtomicPairAnd) \
V(IA32Word32SeqCstPairStore) \ V(IA32Word32AtomicPairOr) \
V(IA32Word32AtomicPairAdd) \ V(IA32Word32AtomicPairXor) \
V(IA32Word32AtomicPairSub) \ V(IA32Word32AtomicPairExchange) \
V(IA32Word32AtomicPairAnd) \
V(IA32Word32AtomicPairOr) \
V(IA32Word32AtomicPairXor) \
V(IA32Word32AtomicPairExchange) \
V(IA32Word32AtomicPairCompareExchange) V(IA32Word32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -315,6 +315,7 @@ using AccessModeField = base::BitField<MemoryAccessMode, 30, 2>;
// scary. {HasMemoryAccessMode} does not include these instructions, so they can // scary. {HasMemoryAccessMode} does not include these instructions, so they can
// be easily found by guarding encoding. // be easily found by guarding encoding.
inline bool HasMemoryAccessMode(ArchOpcode opcode) { inline bool HasMemoryAccessMode(ArchOpcode opcode) {
#if defined(TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST)
switch (opcode) { switch (opcode) {
#define CASE(Name) \ #define CASE(Name) \
case k##Name: \ case k##Name: \
@ -324,6 +325,9 @@ inline bool HasMemoryAccessMode(ArchOpcode opcode) {
default: default:
return false; return false;
} }
#else
return false;
#endif
} }
using DeoptImmedArgsCountField = base::BitField<int, 22, 2>; using DeoptImmedArgsCountField = base::BitField<int, 22, 2>;

View File

@ -12,369 +12,365 @@ namespace compiler {
// LOONG64-specific opcodes that specify which assembly sequence to emit. // LOONG64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(Loong64Add_d) \
V(Loong64Add_w) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(Loong64AddOvf_d) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(Loong64Sub_d) \
V(Loong64Add_d) \ V(Loong64Sub_w) \
V(Loong64Add_w) \ V(Loong64SubOvf_d) \
V(Loong64AddOvf_d) \ V(Loong64Mul_d) \
V(Loong64Sub_d) \ V(Loong64MulOvf_w) \
V(Loong64Sub_w) \ V(Loong64Mulh_d) \
V(Loong64SubOvf_d) \ V(Loong64Mulh_w) \
V(Loong64Mul_d) \ V(Loong64Mulh_wu) \
V(Loong64MulOvf_w) \ V(Loong64Mul_w) \
V(Loong64Mulh_d) \ V(Loong64Div_d) \
V(Loong64Mulh_w) \ V(Loong64Div_w) \
V(Loong64Mulh_wu) \ V(Loong64Div_du) \
V(Loong64Mul_w) \ V(Loong64Div_wu) \
V(Loong64Div_d) \ V(Loong64Mod_d) \
V(Loong64Div_w) \ V(Loong64Mod_w) \
V(Loong64Div_du) \ V(Loong64Mod_du) \
V(Loong64Div_wu) \ V(Loong64Mod_wu) \
V(Loong64Mod_d) \ V(Loong64And) \
V(Loong64Mod_w) \ V(Loong64And32) \
V(Loong64Mod_du) \ V(Loong64Or) \
V(Loong64Mod_wu) \ V(Loong64Or32) \
V(Loong64And) \ V(Loong64Nor) \
V(Loong64And32) \ V(Loong64Nor32) \
V(Loong64Or) \ V(Loong64Xor) \
V(Loong64Or32) \ V(Loong64Xor32) \
V(Loong64Nor) \ V(Loong64Alsl_d) \
V(Loong64Nor32) \ V(Loong64Alsl_w) \
V(Loong64Xor) \ V(Loong64Sll_d) \
V(Loong64Xor32) \ V(Loong64Sll_w) \
V(Loong64Alsl_d) \ V(Loong64Srl_d) \
V(Loong64Alsl_w) \ V(Loong64Srl_w) \
V(Loong64Sll_d) \ V(Loong64Sra_d) \
V(Loong64Sll_w) \ V(Loong64Sra_w) \
V(Loong64Srl_d) \ V(Loong64Rotr_d) \
V(Loong64Srl_w) \ V(Loong64Rotr_w) \
V(Loong64Sra_d) \ V(Loong64Bstrpick_d) \
V(Loong64Sra_w) \ V(Loong64Bstrpick_w) \
V(Loong64Rotr_d) \ V(Loong64Bstrins_d) \
V(Loong64Rotr_w) \ V(Loong64Bstrins_w) \
V(Loong64Bstrpick_d) \ V(Loong64ByteSwap64) \
V(Loong64Bstrpick_w) \ V(Loong64ByteSwap32) \
V(Loong64Bstrins_d) \ V(Loong64Clz_d) \
V(Loong64Bstrins_w) \ V(Loong64Clz_w) \
V(Loong64ByteSwap64) \ V(Loong64Mov) \
V(Loong64ByteSwap32) \ V(Loong64Tst) \
V(Loong64Clz_d) \ V(Loong64Cmp) \
V(Loong64Clz_w) \ V(Loong64Float32Cmp) \
V(Loong64Mov) \ V(Loong64Float32Add) \
V(Loong64Tst) \ V(Loong64Float32Sub) \
V(Loong64Cmp) \ V(Loong64Float32Mul) \
V(Loong64Float32Cmp) \ V(Loong64Float32Div) \
V(Loong64Float32Add) \ V(Loong64Float32Abs) \
V(Loong64Float32Sub) \ V(Loong64Float32Neg) \
V(Loong64Float32Mul) \ V(Loong64Float32Sqrt) \
V(Loong64Float32Div) \ V(Loong64Float32Max) \
V(Loong64Float32Abs) \ V(Loong64Float32Min) \
V(Loong64Float32Neg) \ V(Loong64Float32ToFloat64) \
V(Loong64Float32Sqrt) \ V(Loong64Float32RoundDown) \
V(Loong64Float32Max) \ V(Loong64Float32RoundUp) \
V(Loong64Float32Min) \ V(Loong64Float32RoundTruncate) \
V(Loong64Float32ToFloat64) \ V(Loong64Float32RoundTiesEven) \
V(Loong64Float32RoundDown) \ V(Loong64Float32ToInt32) \
V(Loong64Float32RoundUp) \ V(Loong64Float32ToInt64) \
V(Loong64Float32RoundTruncate) \ V(Loong64Float32ToUint32) \
V(Loong64Float32RoundTiesEven) \ V(Loong64Float32ToUint64) \
V(Loong64Float32ToInt32) \ V(Loong64Float64Cmp) \
V(Loong64Float32ToInt64) \ V(Loong64Float64Add) \
V(Loong64Float32ToUint32) \ V(Loong64Float64Sub) \
V(Loong64Float32ToUint64) \ V(Loong64Float64Mul) \
V(Loong64Float64Cmp) \ V(Loong64Float64Div) \
V(Loong64Float64Add) \ V(Loong64Float64Mod) \
V(Loong64Float64Sub) \ V(Loong64Float64Abs) \
V(Loong64Float64Mul) \ V(Loong64Float64Neg) \
V(Loong64Float64Div) \ V(Loong64Float64Sqrt) \
V(Loong64Float64Mod) \ V(Loong64Float64Max) \
V(Loong64Float64Abs) \ V(Loong64Float64Min) \
V(Loong64Float64Neg) \ V(Loong64Float64ToFloat32) \
V(Loong64Float64Sqrt) \ V(Loong64Float64RoundDown) \
V(Loong64Float64Max) \ V(Loong64Float64RoundUp) \
V(Loong64Float64Min) \ V(Loong64Float64RoundTruncate) \
V(Loong64Float64ToFloat32) \ V(Loong64Float64RoundTiesEven) \
V(Loong64Float64RoundDown) \ V(Loong64Float64ToInt32) \
V(Loong64Float64RoundUp) \ V(Loong64Float64ToInt64) \
V(Loong64Float64RoundTruncate) \ V(Loong64Float64ToUint32) \
V(Loong64Float64RoundTiesEven) \ V(Loong64Float64ToUint64) \
V(Loong64Float64ToInt32) \ V(Loong64Int32ToFloat32) \
V(Loong64Float64ToInt64) \ V(Loong64Int32ToFloat64) \
V(Loong64Float64ToUint32) \ V(Loong64Int64ToFloat32) \
V(Loong64Float64ToUint64) \ V(Loong64Int64ToFloat64) \
V(Loong64Int32ToFloat32) \ V(Loong64Uint32ToFloat32) \
V(Loong64Int32ToFloat64) \ V(Loong64Uint32ToFloat64) \
V(Loong64Int64ToFloat32) \ V(Loong64Uint64ToFloat32) \
V(Loong64Int64ToFloat64) \ V(Loong64Uint64ToFloat64) \
V(Loong64Uint32ToFloat32) \ V(Loong64Float64ExtractLowWord32) \
V(Loong64Uint32ToFloat64) \ V(Loong64Float64ExtractHighWord32) \
V(Loong64Uint64ToFloat32) \ V(Loong64Float64InsertLowWord32) \
V(Loong64Uint64ToFloat64) \ V(Loong64Float64InsertHighWord32) \
V(Loong64Float64ExtractLowWord32) \ V(Loong64BitcastDL) \
V(Loong64Float64ExtractHighWord32) \ V(Loong64BitcastLD) \
V(Loong64Float64InsertLowWord32) \ V(Loong64Float64SilenceNaN) \
V(Loong64Float64InsertHighWord32) \ V(Loong64Ld_b) \
V(Loong64BitcastDL) \ V(Loong64Ld_bu) \
V(Loong64BitcastLD) \ V(Loong64St_b) \
V(Loong64Float64SilenceNaN) \ V(Loong64Ld_h) \
V(Loong64Ld_b) \ V(Loong64Ld_hu) \
V(Loong64Ld_bu) \ V(Loong64St_h) \
V(Loong64St_b) \ V(Loong64Ld_w) \
V(Loong64Ld_h) \ V(Loong64Ld_wu) \
V(Loong64Ld_hu) \ V(Loong64St_w) \
V(Loong64St_h) \ V(Loong64Ld_d) \
V(Loong64Ld_w) \ V(Loong64St_d) \
V(Loong64Ld_wu) \ V(Loong64Fld_s) \
V(Loong64St_w) \ V(Loong64Fst_s) \
V(Loong64Ld_d) \ V(Loong64Fld_d) \
V(Loong64St_d) \ V(Loong64Fst_d) \
V(Loong64Fld_s) \ V(Loong64Push) \
V(Loong64Fst_s) \ V(Loong64Peek) \
V(Loong64Fld_d) \ V(Loong64Poke) \
V(Loong64Fst_d) \ V(Loong64StackClaim) \
V(Loong64Push) \ V(Loong64Ext_w_b) \
V(Loong64Peek) \ V(Loong64Ext_w_h) \
V(Loong64Poke) \ V(Loong64Dbar) \
V(Loong64StackClaim) \ V(Loong64S128Const) \
V(Loong64Ext_w_b) \ V(Loong64S128Zero) \
V(Loong64Ext_w_h) \ V(Loong64S128AllOnes) \
V(Loong64Dbar) \ V(Loong64I32x4Splat) \
V(Loong64S128Const) \ V(Loong64I32x4ExtractLane) \
V(Loong64S128Zero) \ V(Loong64I32x4ReplaceLane) \
V(Loong64S128AllOnes) \ V(Loong64I32x4Add) \
V(Loong64I32x4Splat) \ V(Loong64I32x4Sub) \
V(Loong64I32x4ExtractLane) \ V(Loong64F64x2Abs) \
V(Loong64I32x4ReplaceLane) \ V(Loong64F64x2Neg) \
V(Loong64I32x4Add) \ V(Loong64F32x4Splat) \
V(Loong64I32x4Sub) \ V(Loong64F32x4ExtractLane) \
V(Loong64F64x2Abs) \ V(Loong64F32x4ReplaceLane) \
V(Loong64F64x2Neg) \ V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4Splat) \ V(Loong64F32x4UConvertI32x4) \
V(Loong64F32x4ExtractLane) \ V(Loong64I32x4Mul) \
V(Loong64F32x4ReplaceLane) \ V(Loong64I32x4MaxS) \
V(Loong64F32x4SConvertI32x4) \ V(Loong64I32x4MinS) \
V(Loong64F32x4UConvertI32x4) \ V(Loong64I32x4Eq) \
V(Loong64I32x4Mul) \ V(Loong64I32x4Ne) \
V(Loong64I32x4MaxS) \ V(Loong64I32x4Shl) \
V(Loong64I32x4MinS) \ V(Loong64I32x4ShrS) \
V(Loong64I32x4Eq) \ V(Loong64I32x4ShrU) \
V(Loong64I32x4Ne) \ V(Loong64I32x4MaxU) \
V(Loong64I32x4Shl) \ V(Loong64I32x4MinU) \
V(Loong64I32x4ShrS) \ V(Loong64F64x2Sqrt) \
V(Loong64I32x4ShrU) \ V(Loong64F64x2Add) \
V(Loong64I32x4MaxU) \ V(Loong64F64x2Sub) \
V(Loong64I32x4MinU) \ V(Loong64F64x2Mul) \
V(Loong64F64x2Sqrt) \ V(Loong64F64x2Div) \
V(Loong64F64x2Add) \ V(Loong64F64x2Min) \
V(Loong64F64x2Sub) \ V(Loong64F64x2Max) \
V(Loong64F64x2Mul) \ V(Loong64F64x2Eq) \
V(Loong64F64x2Div) \ V(Loong64F64x2Ne) \
V(Loong64F64x2Min) \ V(Loong64F64x2Lt) \
V(Loong64F64x2Max) \ V(Loong64F64x2Le) \
V(Loong64F64x2Eq) \ V(Loong64F64x2Splat) \
V(Loong64F64x2Ne) \ V(Loong64F64x2ExtractLane) \
V(Loong64F64x2Lt) \ V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Le) \ V(Loong64F64x2Pmin) \
V(Loong64F64x2Splat) \ V(Loong64F64x2Pmax) \
V(Loong64F64x2ExtractLane) \ V(Loong64F64x2Ceil) \
V(Loong64F64x2ReplaceLane) \ V(Loong64F64x2Floor) \
V(Loong64F64x2Pmin) \ V(Loong64F64x2Trunc) \
V(Loong64F64x2Pmax) \ V(Loong64F64x2NearestInt) \
V(Loong64F64x2Ceil) \ V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2Floor) \ V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2Trunc) \ V(Loong64F64x2PromoteLowF32x4) \
V(Loong64F64x2NearestInt) \ V(Loong64I64x2Splat) \
V(Loong64F64x2ConvertLowI32x4S) \ V(Loong64I64x2ExtractLane) \
V(Loong64F64x2ConvertLowI32x4U) \ V(Loong64I64x2ReplaceLane) \
V(Loong64F64x2PromoteLowF32x4) \ V(Loong64I64x2Add) \
V(Loong64I64x2Splat) \ V(Loong64I64x2Sub) \
V(Loong64I64x2ExtractLane) \ V(Loong64I64x2Mul) \
V(Loong64I64x2ReplaceLane) \ V(Loong64I64x2Neg) \
V(Loong64I64x2Add) \ V(Loong64I64x2Shl) \
V(Loong64I64x2Sub) \ V(Loong64I64x2ShrS) \
V(Loong64I64x2Mul) \ V(Loong64I64x2ShrU) \
V(Loong64I64x2Neg) \ V(Loong64I64x2BitMask) \
V(Loong64I64x2Shl) \ V(Loong64I64x2Eq) \
V(Loong64I64x2ShrS) \ V(Loong64I64x2Ne) \
V(Loong64I64x2ShrU) \ V(Loong64I64x2GtS) \
V(Loong64I64x2BitMask) \ V(Loong64I64x2GeS) \
V(Loong64I64x2Eq) \ V(Loong64I64x2Abs) \
V(Loong64I64x2Ne) \ V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2GtS) \ V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2GeS) \ V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2Abs) \ V(Loong64I64x2UConvertI32x4High) \
V(Loong64I64x2SConvertI32x4Low) \ V(Loong64ExtMulLow) \
V(Loong64I64x2SConvertI32x4High) \ V(Loong64ExtMulHigh) \
V(Loong64I64x2UConvertI32x4Low) \ V(Loong64ExtAddPairwise) \
V(Loong64I64x2UConvertI32x4High) \ V(Loong64F32x4Abs) \
V(Loong64ExtMulLow) \ V(Loong64F32x4Neg) \
V(Loong64ExtMulHigh) \ V(Loong64F32x4Sqrt) \
V(Loong64ExtAddPairwise) \ V(Loong64F32x4RecipApprox) \
V(Loong64F32x4Abs) \ V(Loong64F32x4RecipSqrtApprox) \
V(Loong64F32x4Neg) \ V(Loong64F32x4Add) \
V(Loong64F32x4Sqrt) \ V(Loong64F32x4Sub) \
V(Loong64F32x4RecipApprox) \ V(Loong64F32x4Mul) \
V(Loong64F32x4RecipSqrtApprox) \ V(Loong64F32x4Div) \
V(Loong64F32x4Add) \ V(Loong64F32x4Max) \
V(Loong64F32x4Sub) \ V(Loong64F32x4Min) \
V(Loong64F32x4Mul) \ V(Loong64F32x4Eq) \
V(Loong64F32x4Div) \ V(Loong64F32x4Ne) \
V(Loong64F32x4Max) \ V(Loong64F32x4Lt) \
V(Loong64F32x4Min) \ V(Loong64F32x4Le) \
V(Loong64F32x4Eq) \ V(Loong64F32x4Pmin) \
V(Loong64F32x4Ne) \ V(Loong64F32x4Pmax) \
V(Loong64F32x4Lt) \ V(Loong64F32x4Ceil) \
V(Loong64F32x4Le) \ V(Loong64F32x4Floor) \
V(Loong64F32x4Pmin) \ V(Loong64F32x4Trunc) \
V(Loong64F32x4Pmax) \ V(Loong64F32x4NearestInt) \
V(Loong64F32x4Ceil) \ V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64F32x4Floor) \ V(Loong64I32x4SConvertF32x4) \
V(Loong64F32x4Trunc) \ V(Loong64I32x4UConvertF32x4) \
V(Loong64F32x4NearestInt) \ V(Loong64I32x4Neg) \
V(Loong64F32x4DemoteF64x2Zero) \ V(Loong64I32x4GtS) \
V(Loong64I32x4SConvertF32x4) \ V(Loong64I32x4GeS) \
V(Loong64I32x4UConvertF32x4) \ V(Loong64I32x4GtU) \
V(Loong64I32x4Neg) \ V(Loong64I32x4GeU) \
V(Loong64I32x4GtS) \ V(Loong64I32x4Abs) \
V(Loong64I32x4GeS) \ V(Loong64I32x4BitMask) \
V(Loong64I32x4GtU) \ V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4GeU) \ V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4Abs) \ V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I32x4BitMask) \ V(Loong64I16x8Splat) \
V(Loong64I32x4DotI16x8S) \ V(Loong64I16x8ExtractLaneU) \
V(Loong64I32x4TruncSatF64x2SZero) \ V(Loong64I16x8ExtractLaneS) \
V(Loong64I32x4TruncSatF64x2UZero) \ V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Splat) \ V(Loong64I16x8Neg) \
V(Loong64I16x8ExtractLaneU) \ V(Loong64I16x8Shl) \
V(Loong64I16x8ExtractLaneS) \ V(Loong64I16x8ShrS) \
V(Loong64I16x8ReplaceLane) \ V(Loong64I16x8ShrU) \
V(Loong64I16x8Neg) \ V(Loong64I16x8Add) \
V(Loong64I16x8Shl) \ V(Loong64I16x8AddSatS) \
V(Loong64I16x8ShrS) \ V(Loong64I16x8Sub) \
V(Loong64I16x8ShrU) \ V(Loong64I16x8SubSatS) \
V(Loong64I16x8Add) \ V(Loong64I16x8Mul) \
V(Loong64I16x8AddSatS) \ V(Loong64I16x8MaxS) \
V(Loong64I16x8Sub) \ V(Loong64I16x8MinS) \
V(Loong64I16x8SubSatS) \ V(Loong64I16x8Eq) \
V(Loong64I16x8Mul) \ V(Loong64I16x8Ne) \
V(Loong64I16x8MaxS) \ V(Loong64I16x8GtS) \
V(Loong64I16x8MinS) \ V(Loong64I16x8GeS) \
V(Loong64I16x8Eq) \ V(Loong64I16x8AddSatU) \
V(Loong64I16x8Ne) \ V(Loong64I16x8SubSatU) \
V(Loong64I16x8GtS) \ V(Loong64I16x8MaxU) \
V(Loong64I16x8GeS) \ V(Loong64I16x8MinU) \
V(Loong64I16x8AddSatU) \ V(Loong64I16x8GtU) \
V(Loong64I16x8SubSatU) \ V(Loong64I16x8GeU) \
V(Loong64I16x8MaxU) \ V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8MinU) \ V(Loong64I16x8Abs) \
V(Loong64I16x8GtU) \ V(Loong64I16x8BitMask) \
V(Loong64I16x8GeU) \ V(Loong64I16x8Q15MulRSatS) \
V(Loong64I16x8RoundingAverageU) \ V(Loong64I8x16Splat) \
V(Loong64I16x8Abs) \ V(Loong64I8x16ExtractLaneU) \
V(Loong64I16x8BitMask) \ V(Loong64I8x16ExtractLaneS) \
V(Loong64I16x8Q15MulRSatS) \ V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Splat) \ V(Loong64I8x16Neg) \
V(Loong64I8x16ExtractLaneU) \ V(Loong64I8x16Shl) \
V(Loong64I8x16ExtractLaneS) \ V(Loong64I8x16ShrS) \
V(Loong64I8x16ReplaceLane) \ V(Loong64I8x16Add) \
V(Loong64I8x16Neg) \ V(Loong64I8x16AddSatS) \
V(Loong64I8x16Shl) \ V(Loong64I8x16Sub) \
V(Loong64I8x16ShrS) \ V(Loong64I8x16SubSatS) \
V(Loong64I8x16Add) \ V(Loong64I8x16MaxS) \
V(Loong64I8x16AddSatS) \ V(Loong64I8x16MinS) \
V(Loong64I8x16Sub) \ V(Loong64I8x16Eq) \
V(Loong64I8x16SubSatS) \ V(Loong64I8x16Ne) \
V(Loong64I8x16MaxS) \ V(Loong64I8x16GtS) \
V(Loong64I8x16MinS) \ V(Loong64I8x16GeS) \
V(Loong64I8x16Eq) \ V(Loong64I8x16ShrU) \
V(Loong64I8x16Ne) \ V(Loong64I8x16AddSatU) \
V(Loong64I8x16GtS) \ V(Loong64I8x16SubSatU) \
V(Loong64I8x16GeS) \ V(Loong64I8x16MaxU) \
V(Loong64I8x16ShrU) \ V(Loong64I8x16MinU) \
V(Loong64I8x16AddSatU) \ V(Loong64I8x16GtU) \
V(Loong64I8x16SubSatU) \ V(Loong64I8x16GeU) \
V(Loong64I8x16MaxU) \ V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16MinU) \ V(Loong64I8x16Abs) \
V(Loong64I8x16GtU) \ V(Loong64I8x16Popcnt) \
V(Loong64I8x16GeU) \ V(Loong64I8x16BitMask) \
V(Loong64I8x16RoundingAverageU) \ V(Loong64S128And) \
V(Loong64I8x16Abs) \ V(Loong64S128Or) \
V(Loong64I8x16Popcnt) \ V(Loong64S128Xor) \
V(Loong64I8x16BitMask) \ V(Loong64S128Not) \
V(Loong64S128And) \ V(Loong64S128Select) \
V(Loong64S128Or) \ V(Loong64S128AndNot) \
V(Loong64S128Xor) \ V(Loong64I64x2AllTrue) \
V(Loong64S128Not) \ V(Loong64I32x4AllTrue) \
V(Loong64S128Select) \ V(Loong64I16x8AllTrue) \
V(Loong64S128AndNot) \ V(Loong64I8x16AllTrue) \
V(Loong64I64x2AllTrue) \ V(Loong64V128AnyTrue) \
V(Loong64I32x4AllTrue) \ V(Loong64S32x4InterleaveRight) \
V(Loong64I16x8AllTrue) \ V(Loong64S32x4InterleaveLeft) \
V(Loong64I8x16AllTrue) \ V(Loong64S32x4PackEven) \
V(Loong64V128AnyTrue) \ V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveRight) \ V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveLeft) \ V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4PackEven) \ V(Loong64S32x4Shuffle) \
V(Loong64S32x4PackOdd) \ V(Loong64S16x8InterleaveRight) \
V(Loong64S32x4InterleaveEven) \ V(Loong64S16x8InterleaveLeft) \
V(Loong64S32x4InterleaveOdd) \ V(Loong64S16x8PackEven) \
V(Loong64S32x4Shuffle) \ V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveRight) \ V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveLeft) \ V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x8PackEven) \ V(Loong64S16x4Reverse) \
V(Loong64S16x8PackOdd) \ V(Loong64S16x2Reverse) \
V(Loong64S16x8InterleaveEven) \ V(Loong64S8x16InterleaveRight) \
V(Loong64S16x8InterleaveOdd) \ V(Loong64S8x16InterleaveLeft) \
V(Loong64S16x4Reverse) \ V(Loong64S8x16PackEven) \
V(Loong64S16x2Reverse) \ V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveRight) \ V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveLeft) \ V(Loong64S8x16InterleaveOdd) \
V(Loong64S8x16PackEven) \ V(Loong64I8x16Shuffle) \
V(Loong64S8x16PackOdd) \ V(Loong64I8x16Swizzle) \
V(Loong64S8x16InterleaveEven) \ V(Loong64S8x16Concat) \
V(Loong64S8x16InterleaveOdd) \ V(Loong64S8x8Reverse) \
V(Loong64I8x16Shuffle) \ V(Loong64S8x4Reverse) \
V(Loong64I8x16Swizzle) \ V(Loong64S8x2Reverse) \
V(Loong64S8x16Concat) \ V(Loong64S128LoadSplat) \
V(Loong64S8x8Reverse) \ V(Loong64S128Load8x8S) \
V(Loong64S8x4Reverse) \ V(Loong64S128Load8x8U) \
V(Loong64S8x2Reverse) \ V(Loong64S128Load16x4S) \
V(Loong64S128LoadSplat) \ V(Loong64S128Load16x4U) \
V(Loong64S128Load8x8S) \ V(Loong64S128Load32x2S) \
V(Loong64S128Load8x8U) \ V(Loong64S128Load32x2U) \
V(Loong64S128Load16x4S) \ V(Loong64S128Load32Zero) \
V(Loong64S128Load16x4U) \ V(Loong64S128Load64Zero) \
V(Loong64S128Load32x2S) \ V(Loong64LoadLane) \
V(Loong64S128Load32x2U) \ V(Loong64StoreLane) \
V(Loong64S128Load32Zero) \ V(Loong64I32x4SConvertI16x8Low) \
V(Loong64S128Load64Zero) \ V(Loong64I32x4SConvertI16x8High) \
V(Loong64LoadLane) \ V(Loong64I32x4UConvertI16x8Low) \
V(Loong64StoreLane) \ V(Loong64I32x4UConvertI16x8High) \
V(Loong64I32x4SConvertI16x8Low) \ V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I32x4SConvertI16x8High) \ V(Loong64I16x8SConvertI8x16High) \
V(Loong64I32x4UConvertI16x8Low) \ V(Loong64I16x8SConvertI32x4) \
V(Loong64I32x4UConvertI16x8High) \ V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8SConvertI8x16Low) \ V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \ V(Loong64I16x8UConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \ V(Loong64I8x16SConvertI16x8) \
V(Loong64I16x8UConvertI32x4) \ V(Loong64I8x16UConvertI16x8) \
V(Loong64I16x8UConvertI8x16Low) \ V(Loong64StoreCompressTagged) \
V(Loong64I16x8UConvertI8x16High) \ V(Loong64Word64AtomicLoadUint32) \
V(Loong64I8x16SConvertI16x8) \ V(Loong64Word64AtomicLoadUint64) \
V(Loong64I8x16UConvertI16x8) \ V(Loong64Word64AtomicStoreWord64) \
V(Loong64StoreCompressTagged) \ V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicLoadUint32) \ V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicLoadUint64) \ V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicStoreWord64) \ V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicAddUint64) \ V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicSubUint64) \ V(Loong64Word64AtomicExchangeUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
V(Loong64Word64AtomicCompareExchangeUint64) V(Loong64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,373 +12,369 @@ namespace compiler {
// MIPS-specific opcodes that specify which assembly sequence to emit. // MIPS-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(MipsAdd) \
V(MipsAddOvf) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(MipsSub) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(MipsSubOvf) \
V(MipsAdd) \ V(MipsMul) \
V(MipsAddOvf) \ V(MipsMulOvf) \
V(MipsSub) \ V(MipsMulHigh) \
V(MipsSubOvf) \ V(MipsMulHighU) \
V(MipsMul) \ V(MipsDiv) \
V(MipsMulOvf) \ V(MipsDivU) \
V(MipsMulHigh) \ V(MipsMod) \
V(MipsMulHighU) \ V(MipsModU) \
V(MipsDiv) \ V(MipsAnd) \
V(MipsDivU) \ V(MipsOr) \
V(MipsMod) \ V(MipsNor) \
V(MipsModU) \ V(MipsXor) \
V(MipsAnd) \ V(MipsClz) \
V(MipsOr) \ V(MipsCtz) \
V(MipsNor) \ V(MipsPopcnt) \
V(MipsXor) \ V(MipsLsa) \
V(MipsClz) \ V(MipsShl) \
V(MipsCtz) \ V(MipsShr) \
V(MipsPopcnt) \ V(MipsSar) \
V(MipsLsa) \ V(MipsShlPair) \
V(MipsShl) \ V(MipsShrPair) \
V(MipsShr) \ V(MipsSarPair) \
V(MipsSar) \ V(MipsExt) \
V(MipsShlPair) \ V(MipsIns) \
V(MipsShrPair) \ V(MipsRor) \
V(MipsSarPair) \ V(MipsMov) \
V(MipsExt) \ V(MipsTst) \
V(MipsIns) \ V(MipsCmp) \
V(MipsRor) \ V(MipsCmpS) \
V(MipsMov) \ V(MipsAddS) \
V(MipsTst) \ V(MipsSubS) \
V(MipsCmp) \ V(MipsMulS) \
V(MipsCmpS) \ V(MipsDivS) \
V(MipsAddS) \ V(MipsAbsS) \
V(MipsSubS) \ V(MipsSqrtS) \
V(MipsMulS) \ V(MipsMaxS) \
V(MipsDivS) \ V(MipsMinS) \
V(MipsAbsS) \ V(MipsCmpD) \
V(MipsSqrtS) \ V(MipsAddD) \
V(MipsMaxS) \ V(MipsSubD) \
V(MipsMinS) \ V(MipsMulD) \
V(MipsCmpD) \ V(MipsDivD) \
V(MipsAddD) \ V(MipsModD) \
V(MipsSubD) \ V(MipsAbsD) \
V(MipsMulD) \ V(MipsSqrtD) \
V(MipsDivD) \ V(MipsMaxD) \
V(MipsModD) \ V(MipsMinD) \
V(MipsAbsD) \ V(MipsNegS) \
V(MipsSqrtD) \ V(MipsNegD) \
V(MipsMaxD) \ V(MipsAddPair) \
V(MipsMinD) \ V(MipsSubPair) \
V(MipsNegS) \ V(MipsMulPair) \
V(MipsNegD) \ V(MipsMaddS) \
V(MipsAddPair) \ V(MipsMaddD) \
V(MipsSubPair) \ V(MipsMsubS) \
V(MipsMulPair) \ V(MipsMsubD) \
V(MipsMaddS) \ V(MipsFloat32RoundDown) \
V(MipsMaddD) \ V(MipsFloat32RoundTruncate) \
V(MipsMsubS) \ V(MipsFloat32RoundUp) \
V(MipsMsubD) \ V(MipsFloat32RoundTiesEven) \
V(MipsFloat32RoundDown) \ V(MipsFloat64RoundDown) \
V(MipsFloat32RoundTruncate) \ V(MipsFloat64RoundTruncate) \
V(MipsFloat32RoundUp) \ V(MipsFloat64RoundUp) \
V(MipsFloat32RoundTiesEven) \ V(MipsFloat64RoundTiesEven) \
V(MipsFloat64RoundDown) \ V(MipsCvtSD) \
V(MipsFloat64RoundTruncate) \ V(MipsCvtDS) \
V(MipsFloat64RoundUp) \ V(MipsTruncWD) \
V(MipsFloat64RoundTiesEven) \ V(MipsRoundWD) \
V(MipsCvtSD) \ V(MipsFloorWD) \
V(MipsCvtDS) \ V(MipsCeilWD) \
V(MipsTruncWD) \ V(MipsTruncWS) \
V(MipsRoundWD) \ V(MipsRoundWS) \
V(MipsFloorWD) \ V(MipsFloorWS) \
V(MipsCeilWD) \ V(MipsCeilWS) \
V(MipsTruncWS) \ V(MipsTruncUwD) \
V(MipsRoundWS) \ V(MipsTruncUwS) \
V(MipsFloorWS) \ V(MipsCvtDW) \
V(MipsCeilWS) \ V(MipsCvtDUw) \
V(MipsTruncUwD) \ V(MipsCvtSW) \
V(MipsTruncUwS) \ V(MipsCvtSUw) \
V(MipsCvtDW) \ V(MipsLb) \
V(MipsCvtDUw) \ V(MipsLbu) \
V(MipsCvtSW) \ V(MipsSb) \
V(MipsCvtSUw) \ V(MipsLh) \
V(MipsLb) \ V(MipsUlh) \
V(MipsLbu) \ V(MipsLhu) \
V(MipsSb) \ V(MipsUlhu) \
V(MipsLh) \ V(MipsSh) \
V(MipsUlh) \ V(MipsUsh) \
V(MipsLhu) \ V(MipsLw) \
V(MipsUlhu) \ V(MipsUlw) \
V(MipsSh) \ V(MipsSw) \
V(MipsUsh) \ V(MipsUsw) \
V(MipsLw) \ V(MipsLwc1) \
V(MipsUlw) \ V(MipsUlwc1) \
V(MipsSw) \ V(MipsSwc1) \
V(MipsUsw) \ V(MipsUswc1) \
V(MipsLwc1) \ V(MipsLdc1) \
V(MipsUlwc1) \ V(MipsUldc1) \
V(MipsSwc1) \ V(MipsSdc1) \
V(MipsUswc1) \ V(MipsUsdc1) \
V(MipsLdc1) \ V(MipsFloat64ExtractLowWord32) \
V(MipsUldc1) \ V(MipsFloat64ExtractHighWord32) \
V(MipsSdc1) \ V(MipsFloat64InsertLowWord32) \
V(MipsUsdc1) \ V(MipsFloat64InsertHighWord32) \
V(MipsFloat64ExtractLowWord32) \ V(MipsFloat64SilenceNaN) \
V(MipsFloat64ExtractHighWord32) \ V(MipsFloat32Max) \
V(MipsFloat64InsertLowWord32) \ V(MipsFloat64Max) \
V(MipsFloat64InsertHighWord32) \ V(MipsFloat32Min) \
V(MipsFloat64SilenceNaN) \ V(MipsFloat64Min) \
V(MipsFloat32Max) \ V(MipsPush) \
V(MipsFloat64Max) \ V(MipsPeek) \
V(MipsFloat32Min) \ V(MipsStoreToStackSlot) \
V(MipsFloat64Min) \ V(MipsByteSwap32) \
V(MipsPush) \ V(MipsStackClaim) \
V(MipsPeek) \ V(MipsSeb) \
V(MipsStoreToStackSlot) \ V(MipsSeh) \
V(MipsByteSwap32) \ V(MipsSync) \
V(MipsStackClaim) \ V(MipsS128Zero) \
V(MipsSeb) \ V(MipsI32x4Splat) \
V(MipsSeh) \ V(MipsI32x4ExtractLane) \
V(MipsSync) \ V(MipsI32x4ReplaceLane) \
V(MipsS128Zero) \ V(MipsI32x4Add) \
V(MipsI32x4Splat) \ V(MipsI32x4Sub) \
V(MipsI32x4ExtractLane) \ V(MipsF64x2Abs) \
V(MipsI32x4ReplaceLane) \ V(MipsF64x2Neg) \
V(MipsI32x4Add) \ V(MipsF64x2Sqrt) \
V(MipsI32x4Sub) \ V(MipsF64x2Add) \
V(MipsF64x2Abs) \ V(MipsF64x2Sub) \
V(MipsF64x2Neg) \ V(MipsF64x2Mul) \
V(MipsF64x2Sqrt) \ V(MipsF64x2Div) \
V(MipsF64x2Add) \ V(MipsF64x2Min) \
V(MipsF64x2Sub) \ V(MipsF64x2Max) \
V(MipsF64x2Mul) \ V(MipsF64x2Eq) \
V(MipsF64x2Div) \ V(MipsF64x2Ne) \
V(MipsF64x2Min) \ V(MipsF64x2Lt) \
V(MipsF64x2Max) \ V(MipsF64x2Le) \
V(MipsF64x2Eq) \ V(MipsF64x2Pmin) \
V(MipsF64x2Ne) \ V(MipsF64x2Pmax) \
V(MipsF64x2Lt) \ V(MipsF64x2Ceil) \
V(MipsF64x2Le) \ V(MipsF64x2Floor) \
V(MipsF64x2Pmin) \ V(MipsF64x2Trunc) \
V(MipsF64x2Pmax) \ V(MipsF64x2NearestInt) \
V(MipsF64x2Ceil) \ V(MipsF64x2ConvertLowI32x4S) \
V(MipsF64x2Floor) \ V(MipsF64x2ConvertLowI32x4U) \
V(MipsF64x2Trunc) \ V(MipsF64x2PromoteLowF32x4) \
V(MipsF64x2NearestInt) \ V(MipsI64x2Add) \
V(MipsF64x2ConvertLowI32x4S) \ V(MipsI64x2Sub) \
V(MipsF64x2ConvertLowI32x4U) \ V(MipsI64x2Mul) \
V(MipsF64x2PromoteLowF32x4) \ V(MipsI64x2Neg) \
V(MipsI64x2Add) \ V(MipsI64x2Shl) \
V(MipsI64x2Sub) \ V(MipsI64x2ShrS) \
V(MipsI64x2Mul) \ V(MipsI64x2ShrU) \
V(MipsI64x2Neg) \ V(MipsI64x2BitMask) \
V(MipsI64x2Shl) \ V(MipsI64x2Eq) \
V(MipsI64x2ShrS) \ V(MipsI64x2Ne) \
V(MipsI64x2ShrU) \ V(MipsI64x2GtS) \
V(MipsI64x2BitMask) \ V(MipsI64x2GeS) \
V(MipsI64x2Eq) \ V(MipsI64x2Abs) \
V(MipsI64x2Ne) \ V(MipsI64x2SConvertI32x4Low) \
V(MipsI64x2GtS) \ V(MipsI64x2SConvertI32x4High) \
V(MipsI64x2GeS) \ V(MipsI64x2UConvertI32x4Low) \
V(MipsI64x2Abs) \ V(MipsI64x2UConvertI32x4High) \
V(MipsI64x2SConvertI32x4Low) \ V(MipsI64x2ExtMulLowI32x4S) \
V(MipsI64x2SConvertI32x4High) \ V(MipsI64x2ExtMulHighI32x4S) \
V(MipsI64x2UConvertI32x4Low) \ V(MipsI64x2ExtMulLowI32x4U) \
V(MipsI64x2UConvertI32x4High) \ V(MipsI64x2ExtMulHighI32x4U) \
V(MipsI64x2ExtMulLowI32x4S) \ V(MipsF32x4Splat) \
V(MipsI64x2ExtMulHighI32x4S) \ V(MipsF32x4ExtractLane) \
V(MipsI64x2ExtMulLowI32x4U) \ V(MipsF32x4ReplaceLane) \
V(MipsI64x2ExtMulHighI32x4U) \ V(MipsF32x4SConvertI32x4) \
V(MipsF32x4Splat) \ V(MipsF32x4UConvertI32x4) \
V(MipsF32x4ExtractLane) \ V(MipsF32x4DemoteF64x2Zero) \
V(MipsF32x4ReplaceLane) \ V(MipsI32x4Mul) \
V(MipsF32x4SConvertI32x4) \ V(MipsI32x4MaxS) \
V(MipsF32x4UConvertI32x4) \ V(MipsI32x4MinS) \
V(MipsF32x4DemoteF64x2Zero) \ V(MipsI32x4Eq) \
V(MipsI32x4Mul) \ V(MipsI32x4Ne) \
V(MipsI32x4MaxS) \ V(MipsI32x4Shl) \
V(MipsI32x4MinS) \ V(MipsI32x4ShrS) \
V(MipsI32x4Eq) \ V(MipsI32x4ShrU) \
V(MipsI32x4Ne) \ V(MipsI32x4MaxU) \
V(MipsI32x4Shl) \ V(MipsI32x4MinU) \
V(MipsI32x4ShrS) \ V(MipsF64x2Splat) \
V(MipsI32x4ShrU) \ V(MipsF64x2ExtractLane) \
V(MipsI32x4MaxU) \ V(MipsF64x2ReplaceLane) \
V(MipsI32x4MinU) \ V(MipsF32x4Abs) \
V(MipsF64x2Splat) \ V(MipsF32x4Neg) \
V(MipsF64x2ExtractLane) \ V(MipsF32x4Sqrt) \
V(MipsF64x2ReplaceLane) \ V(MipsF32x4RecipApprox) \
V(MipsF32x4Abs) \ V(MipsF32x4RecipSqrtApprox) \
V(MipsF32x4Neg) \ V(MipsF32x4Add) \
V(MipsF32x4Sqrt) \ V(MipsF32x4Sub) \
V(MipsF32x4RecipApprox) \ V(MipsF32x4Mul) \
V(MipsF32x4RecipSqrtApprox) \ V(MipsF32x4Div) \
V(MipsF32x4Add) \ V(MipsF32x4Max) \
V(MipsF32x4Sub) \ V(MipsF32x4Min) \
V(MipsF32x4Mul) \ V(MipsF32x4Eq) \
V(MipsF32x4Div) \ V(MipsF32x4Ne) \
V(MipsF32x4Max) \ V(MipsF32x4Lt) \
V(MipsF32x4Min) \ V(MipsF32x4Le) \
V(MipsF32x4Eq) \ V(MipsF32x4Pmin) \
V(MipsF32x4Ne) \ V(MipsF32x4Pmax) \
V(MipsF32x4Lt) \ V(MipsF32x4Ceil) \
V(MipsF32x4Le) \ V(MipsF32x4Floor) \
V(MipsF32x4Pmin) \ V(MipsF32x4Trunc) \
V(MipsF32x4Pmax) \ V(MipsF32x4NearestInt) \
V(MipsF32x4Ceil) \ V(MipsI32x4SConvertF32x4) \
V(MipsF32x4Floor) \ V(MipsI32x4UConvertF32x4) \
V(MipsF32x4Trunc) \ V(MipsI32x4Neg) \
V(MipsF32x4NearestInt) \ V(MipsI32x4GtS) \
V(MipsI32x4SConvertF32x4) \ V(MipsI32x4GeS) \
V(MipsI32x4UConvertF32x4) \ V(MipsI32x4GtU) \
V(MipsI32x4Neg) \ V(MipsI32x4GeU) \
V(MipsI32x4GtS) \ V(MipsI32x4Abs) \
V(MipsI32x4GeS) \ V(MipsI32x4BitMask) \
V(MipsI32x4GtU) \ V(MipsI32x4DotI16x8S) \
V(MipsI32x4GeU) \ V(MipsI32x4ExtMulLowI16x8S) \
V(MipsI32x4Abs) \ V(MipsI32x4ExtMulHighI16x8S) \
V(MipsI32x4BitMask) \ V(MipsI32x4ExtMulLowI16x8U) \
V(MipsI32x4DotI16x8S) \ V(MipsI32x4ExtMulHighI16x8U) \
V(MipsI32x4ExtMulLowI16x8S) \ V(MipsI32x4TruncSatF64x2SZero) \
V(MipsI32x4ExtMulHighI16x8S) \ V(MipsI32x4TruncSatF64x2UZero) \
V(MipsI32x4ExtMulLowI16x8U) \ V(MipsI32x4ExtAddPairwiseI16x8S) \
V(MipsI32x4ExtMulHighI16x8U) \ V(MipsI32x4ExtAddPairwiseI16x8U) \
V(MipsI32x4TruncSatF64x2SZero) \ V(MipsI16x8Splat) \
V(MipsI32x4TruncSatF64x2UZero) \ V(MipsI16x8ExtractLaneU) \
V(MipsI32x4ExtAddPairwiseI16x8S) \ V(MipsI16x8ExtractLaneS) \
V(MipsI32x4ExtAddPairwiseI16x8U) \ V(MipsI16x8ReplaceLane) \
V(MipsI16x8Splat) \ V(MipsI16x8Neg) \
V(MipsI16x8ExtractLaneU) \ V(MipsI16x8Shl) \
V(MipsI16x8ExtractLaneS) \ V(MipsI16x8ShrS) \
V(MipsI16x8ReplaceLane) \ V(MipsI16x8ShrU) \
V(MipsI16x8Neg) \ V(MipsI16x8Add) \
V(MipsI16x8Shl) \ V(MipsI16x8AddSatS) \
V(MipsI16x8ShrS) \ V(MipsI16x8Sub) \
V(MipsI16x8ShrU) \ V(MipsI16x8SubSatS) \
V(MipsI16x8Add) \ V(MipsI16x8Mul) \
V(MipsI16x8AddSatS) \ V(MipsI16x8MaxS) \
V(MipsI16x8Sub) \ V(MipsI16x8MinS) \
V(MipsI16x8SubSatS) \ V(MipsI16x8Eq) \
V(MipsI16x8Mul) \ V(MipsI16x8Ne) \
V(MipsI16x8MaxS) \ V(MipsI16x8GtS) \
V(MipsI16x8MinS) \ V(MipsI16x8GeS) \
V(MipsI16x8Eq) \ V(MipsI16x8AddSatU) \
V(MipsI16x8Ne) \ V(MipsI16x8SubSatU) \
V(MipsI16x8GtS) \ V(MipsI16x8MaxU) \
V(MipsI16x8GeS) \ V(MipsI16x8MinU) \
V(MipsI16x8AddSatU) \ V(MipsI16x8GtU) \
V(MipsI16x8SubSatU) \ V(MipsI16x8GeU) \
V(MipsI16x8MaxU) \ V(MipsI16x8RoundingAverageU) \
V(MipsI16x8MinU) \ V(MipsI16x8Abs) \
V(MipsI16x8GtU) \ V(MipsI16x8BitMask) \
V(MipsI16x8GeU) \ V(MipsI16x8Q15MulRSatS) \
V(MipsI16x8RoundingAverageU) \ V(MipsI16x8ExtMulLowI8x16S) \
V(MipsI16x8Abs) \ V(MipsI16x8ExtMulHighI8x16S) \
V(MipsI16x8BitMask) \ V(MipsI16x8ExtMulLowI8x16U) \
V(MipsI16x8Q15MulRSatS) \ V(MipsI16x8ExtMulHighI8x16U) \
V(MipsI16x8ExtMulLowI8x16S) \ V(MipsI16x8ExtAddPairwiseI8x16S) \
V(MipsI16x8ExtMulHighI8x16S) \ V(MipsI16x8ExtAddPairwiseI8x16U) \
V(MipsI16x8ExtMulLowI8x16U) \ V(MipsI8x16Splat) \
V(MipsI16x8ExtMulHighI8x16U) \ V(MipsI8x16ExtractLaneU) \
V(MipsI16x8ExtAddPairwiseI8x16S) \ V(MipsI8x16ExtractLaneS) \
V(MipsI16x8ExtAddPairwiseI8x16U) \ V(MipsI8x16ReplaceLane) \
V(MipsI8x16Splat) \ V(MipsI8x16Neg) \
V(MipsI8x16ExtractLaneU) \ V(MipsI8x16Shl) \
V(MipsI8x16ExtractLaneS) \ V(MipsI8x16ShrS) \
V(MipsI8x16ReplaceLane) \ V(MipsI8x16Add) \
V(MipsI8x16Neg) \ V(MipsI8x16AddSatS) \
V(MipsI8x16Shl) \ V(MipsI8x16Sub) \
V(MipsI8x16ShrS) \ V(MipsI8x16SubSatS) \
V(MipsI8x16Add) \ V(MipsI8x16MaxS) \
V(MipsI8x16AddSatS) \ V(MipsI8x16MinS) \
V(MipsI8x16Sub) \ V(MipsI8x16Eq) \
V(MipsI8x16SubSatS) \ V(MipsI8x16Ne) \
V(MipsI8x16MaxS) \ V(MipsI8x16GtS) \
V(MipsI8x16MinS) \ V(MipsI8x16GeS) \
V(MipsI8x16Eq) \ V(MipsI8x16ShrU) \
V(MipsI8x16Ne) \ V(MipsI8x16AddSatU) \
V(MipsI8x16GtS) \ V(MipsI8x16SubSatU) \
V(MipsI8x16GeS) \ V(MipsI8x16MaxU) \
V(MipsI8x16ShrU) \ V(MipsI8x16MinU) \
V(MipsI8x16AddSatU) \ V(MipsI8x16GtU) \
V(MipsI8x16SubSatU) \ V(MipsI8x16GeU) \
V(MipsI8x16MaxU) \ V(MipsI8x16RoundingAverageU) \
V(MipsI8x16MinU) \ V(MipsI8x16Abs) \
V(MipsI8x16GtU) \ V(MipsI8x16Popcnt) \
V(MipsI8x16GeU) \ V(MipsI8x16BitMask) \
V(MipsI8x16RoundingAverageU) \ V(MipsS128And) \
V(MipsI8x16Abs) \ V(MipsS128Or) \
V(MipsI8x16Popcnt) \ V(MipsS128Xor) \
V(MipsI8x16BitMask) \ V(MipsS128Not) \
V(MipsS128And) \ V(MipsS128Select) \
V(MipsS128Or) \ V(MipsS128AndNot) \
V(MipsS128Xor) \ V(MipsI64x2AllTrue) \
V(MipsS128Not) \ V(MipsI32x4AllTrue) \
V(MipsS128Select) \ V(MipsI16x8AllTrue) \
V(MipsS128AndNot) \ V(MipsI8x16AllTrue) \
V(MipsI64x2AllTrue) \ V(MipsV128AnyTrue) \
V(MipsI32x4AllTrue) \ V(MipsS32x4InterleaveRight) \
V(MipsI16x8AllTrue) \ V(MipsS32x4InterleaveLeft) \
V(MipsI8x16AllTrue) \ V(MipsS32x4PackEven) \
V(MipsV128AnyTrue) \ V(MipsS32x4PackOdd) \
V(MipsS32x4InterleaveRight) \ V(MipsS32x4InterleaveEven) \
V(MipsS32x4InterleaveLeft) \ V(MipsS32x4InterleaveOdd) \
V(MipsS32x4PackEven) \ V(MipsS32x4Shuffle) \
V(MipsS32x4PackOdd) \ V(MipsS16x8InterleaveRight) \
V(MipsS32x4InterleaveEven) \ V(MipsS16x8InterleaveLeft) \
V(MipsS32x4InterleaveOdd) \ V(MipsS16x8PackEven) \
V(MipsS32x4Shuffle) \ V(MipsS16x8PackOdd) \
V(MipsS16x8InterleaveRight) \ V(MipsS16x8InterleaveEven) \
V(MipsS16x8InterleaveLeft) \ V(MipsS16x8InterleaveOdd) \
V(MipsS16x8PackEven) \ V(MipsS16x4Reverse) \
V(MipsS16x8PackOdd) \ V(MipsS16x2Reverse) \
V(MipsS16x8InterleaveEven) \ V(MipsS8x16InterleaveRight) \
V(MipsS16x8InterleaveOdd) \ V(MipsS8x16InterleaveLeft) \
V(MipsS16x4Reverse) \ V(MipsS8x16PackEven) \
V(MipsS16x2Reverse) \ V(MipsS8x16PackOdd) \
V(MipsS8x16InterleaveRight) \ V(MipsS8x16InterleaveEven) \
V(MipsS8x16InterleaveLeft) \ V(MipsS8x16InterleaveOdd) \
V(MipsS8x16PackEven) \ V(MipsI8x16Shuffle) \
V(MipsS8x16PackOdd) \ V(MipsI8x16Swizzle) \
V(MipsS8x16InterleaveEven) \ V(MipsS8x16Concat) \
V(MipsS8x16InterleaveOdd) \ V(MipsS8x8Reverse) \
V(MipsI8x16Shuffle) \ V(MipsS8x4Reverse) \
V(MipsI8x16Swizzle) \ V(MipsS8x2Reverse) \
V(MipsS8x16Concat) \ V(MipsS128Load8Splat) \
V(MipsS8x8Reverse) \ V(MipsS128Load16Splat) \
V(MipsS8x4Reverse) \ V(MipsS128Load32Splat) \
V(MipsS8x2Reverse) \ V(MipsS128Load64Splat) \
V(MipsS128Load8Splat) \ V(MipsS128Load8x8S) \
V(MipsS128Load16Splat) \ V(MipsS128Load8x8U) \
V(MipsS128Load32Splat) \ V(MipsS128Load16x4S) \
V(MipsS128Load64Splat) \ V(MipsS128Load16x4U) \
V(MipsS128Load8x8S) \ V(MipsS128Load32x2S) \
V(MipsS128Load8x8U) \ V(MipsS128Load32x2U) \
V(MipsS128Load16x4S) \ V(MipsMsaLd) \
V(MipsS128Load16x4U) \ V(MipsMsaSt) \
V(MipsS128Load32x2S) \ V(MipsI32x4SConvertI16x8Low) \
V(MipsS128Load32x2U) \ V(MipsI32x4SConvertI16x8High) \
V(MipsMsaLd) \ V(MipsI32x4UConvertI16x8Low) \
V(MipsMsaSt) \ V(MipsI32x4UConvertI16x8High) \
V(MipsI32x4SConvertI16x8Low) \ V(MipsI16x8SConvertI8x16Low) \
V(MipsI32x4SConvertI16x8High) \ V(MipsI16x8SConvertI8x16High) \
V(MipsI32x4UConvertI16x8Low) \ V(MipsI16x8SConvertI32x4) \
V(MipsI32x4UConvertI16x8High) \ V(MipsI16x8UConvertI32x4) \
V(MipsI16x8SConvertI8x16Low) \ V(MipsI16x8UConvertI8x16Low) \
V(MipsI16x8SConvertI8x16High) \ V(MipsI16x8UConvertI8x16High) \
V(MipsI16x8SConvertI32x4) \ V(MipsI8x16SConvertI16x8) \
V(MipsI16x8UConvertI32x4) \ V(MipsI8x16UConvertI16x8) \
V(MipsI16x8UConvertI8x16Low) \ V(MipsWord32AtomicPairLoad) \
V(MipsI16x8UConvertI8x16High) \ V(MipsWord32AtomicPairStore) \
V(MipsI8x16SConvertI16x8) \ V(MipsWord32AtomicPairAdd) \
V(MipsI8x16UConvertI16x8) \ V(MipsWord32AtomicPairSub) \
V(MipsWord32AtomicPairLoad) \ V(MipsWord32AtomicPairAnd) \
V(MipsWord32AtomicPairStore) \ V(MipsWord32AtomicPairOr) \
V(MipsWord32AtomicPairAdd) \ V(MipsWord32AtomicPairXor) \
V(MipsWord32AtomicPairSub) \ V(MipsWord32AtomicPairExchange) \
V(MipsWord32AtomicPairAnd) \
V(MipsWord32AtomicPairOr) \
V(MipsWord32AtomicPairXor) \
V(MipsWord32AtomicPairExchange) \
V(MipsWord32AtomicPairCompareExchange) V(MipsWord32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,397 +12,393 @@ namespace compiler {
// MIPS64-specific opcodes that specify which assembly sequence to emit. // MIPS64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(Mips64Add) \
V(Mips64Dadd) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(Mips64DaddOvf) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(Mips64Sub) \
V(Mips64Add) \ V(Mips64Dsub) \
V(Mips64Dadd) \ V(Mips64DsubOvf) \
V(Mips64DaddOvf) \ V(Mips64Mul) \
V(Mips64Sub) \ V(Mips64MulOvf) \
V(Mips64Dsub) \ V(Mips64MulHigh) \
V(Mips64DsubOvf) \ V(Mips64DMulHigh) \
V(Mips64Mul) \ V(Mips64MulHighU) \
V(Mips64MulOvf) \ V(Mips64Dmul) \
V(Mips64MulHigh) \ V(Mips64Div) \
V(Mips64DMulHigh) \ V(Mips64Ddiv) \
V(Mips64MulHighU) \ V(Mips64DivU) \
V(Mips64Dmul) \ V(Mips64DdivU) \
V(Mips64Div) \ V(Mips64Mod) \
V(Mips64Ddiv) \ V(Mips64Dmod) \
V(Mips64DivU) \ V(Mips64ModU) \
V(Mips64DdivU) \ V(Mips64DmodU) \
V(Mips64Mod) \ V(Mips64And) \
V(Mips64Dmod) \ V(Mips64And32) \
V(Mips64ModU) \ V(Mips64Or) \
V(Mips64DmodU) \ V(Mips64Or32) \
V(Mips64And) \ V(Mips64Nor) \
V(Mips64And32) \ V(Mips64Nor32) \
V(Mips64Or) \ V(Mips64Xor) \
V(Mips64Or32) \ V(Mips64Xor32) \
V(Mips64Nor) \ V(Mips64Clz) \
V(Mips64Nor32) \ V(Mips64Lsa) \
V(Mips64Xor) \ V(Mips64Dlsa) \
V(Mips64Xor32) \ V(Mips64Shl) \
V(Mips64Clz) \ V(Mips64Shr) \
V(Mips64Lsa) \ V(Mips64Sar) \
V(Mips64Dlsa) \ V(Mips64Ext) \
V(Mips64Shl) \ V(Mips64Ins) \
V(Mips64Shr) \ V(Mips64Dext) \
V(Mips64Sar) \ V(Mips64Dins) \
V(Mips64Ext) \ V(Mips64Dclz) \
V(Mips64Ins) \ V(Mips64Ctz) \
V(Mips64Dext) \ V(Mips64Dctz) \
V(Mips64Dins) \ V(Mips64Popcnt) \
V(Mips64Dclz) \ V(Mips64Dpopcnt) \
V(Mips64Ctz) \ V(Mips64Dshl) \
V(Mips64Dctz) \ V(Mips64Dshr) \
V(Mips64Popcnt) \ V(Mips64Dsar) \
V(Mips64Dpopcnt) \ V(Mips64Ror) \
V(Mips64Dshl) \ V(Mips64Dror) \
V(Mips64Dshr) \ V(Mips64Mov) \
V(Mips64Dsar) \ V(Mips64Tst) \
V(Mips64Ror) \ V(Mips64Cmp) \
V(Mips64Dror) \ V(Mips64CmpS) \
V(Mips64Mov) \ V(Mips64AddS) \
V(Mips64Tst) \ V(Mips64SubS) \
V(Mips64Cmp) \ V(Mips64MulS) \
V(Mips64CmpS) \ V(Mips64DivS) \
V(Mips64AddS) \ V(Mips64AbsS) \
V(Mips64SubS) \ V(Mips64NegS) \
V(Mips64MulS) \ V(Mips64SqrtS) \
V(Mips64DivS) \ V(Mips64MaxS) \
V(Mips64AbsS) \ V(Mips64MinS) \
V(Mips64NegS) \ V(Mips64CmpD) \
V(Mips64SqrtS) \ V(Mips64AddD) \
V(Mips64MaxS) \ V(Mips64SubD) \
V(Mips64MinS) \ V(Mips64MulD) \
V(Mips64CmpD) \ V(Mips64DivD) \
V(Mips64AddD) \ V(Mips64ModD) \
V(Mips64SubD) \ V(Mips64AbsD) \
V(Mips64MulD) \ V(Mips64NegD) \
V(Mips64DivD) \ V(Mips64SqrtD) \
V(Mips64ModD) \ V(Mips64MaxD) \
V(Mips64AbsD) \ V(Mips64MinD) \
V(Mips64NegD) \ V(Mips64Float64RoundDown) \
V(Mips64SqrtD) \ V(Mips64Float64RoundTruncate) \
V(Mips64MaxD) \ V(Mips64Float64RoundUp) \
V(Mips64MinD) \ V(Mips64Float64RoundTiesEven) \
V(Mips64Float64RoundDown) \ V(Mips64Float32RoundDown) \
V(Mips64Float64RoundTruncate) \ V(Mips64Float32RoundTruncate) \
V(Mips64Float64RoundUp) \ V(Mips64Float32RoundUp) \
V(Mips64Float64RoundTiesEven) \ V(Mips64Float32RoundTiesEven) \
V(Mips64Float32RoundDown) \ V(Mips64CvtSD) \
V(Mips64Float32RoundTruncate) \ V(Mips64CvtDS) \
V(Mips64Float32RoundUp) \ V(Mips64TruncWD) \
V(Mips64Float32RoundTiesEven) \ V(Mips64RoundWD) \
V(Mips64CvtSD) \ V(Mips64FloorWD) \
V(Mips64CvtDS) \ V(Mips64CeilWD) \
V(Mips64TruncWD) \ V(Mips64TruncWS) \
V(Mips64RoundWD) \ V(Mips64RoundWS) \
V(Mips64FloorWD) \ V(Mips64FloorWS) \
V(Mips64CeilWD) \ V(Mips64CeilWS) \
V(Mips64TruncWS) \ V(Mips64TruncLS) \
V(Mips64RoundWS) \ V(Mips64TruncLD) \
V(Mips64FloorWS) \ V(Mips64TruncUwD) \
V(Mips64CeilWS) \ V(Mips64TruncUwS) \
V(Mips64TruncLS) \ V(Mips64TruncUlS) \
V(Mips64TruncLD) \ V(Mips64TruncUlD) \
V(Mips64TruncUwD) \ V(Mips64CvtDW) \
V(Mips64TruncUwS) \ V(Mips64CvtSL) \
V(Mips64TruncUlS) \ V(Mips64CvtSW) \
V(Mips64TruncUlD) \ V(Mips64CvtSUw) \
V(Mips64CvtDW) \ V(Mips64CvtSUl) \
V(Mips64CvtSL) \ V(Mips64CvtDL) \
V(Mips64CvtSW) \ V(Mips64CvtDUw) \
V(Mips64CvtSUw) \ V(Mips64CvtDUl) \
V(Mips64CvtSUl) \ V(Mips64Lb) \
V(Mips64CvtDL) \ V(Mips64Lbu) \
V(Mips64CvtDUw) \ V(Mips64Sb) \
V(Mips64CvtDUl) \ V(Mips64Lh) \
V(Mips64Lb) \ V(Mips64Ulh) \
V(Mips64Lbu) \ V(Mips64Lhu) \
V(Mips64Sb) \ V(Mips64Ulhu) \
V(Mips64Lh) \ V(Mips64Sh) \
V(Mips64Ulh) \ V(Mips64Ush) \
V(Mips64Lhu) \ V(Mips64Ld) \
V(Mips64Ulhu) \ V(Mips64Uld) \
V(Mips64Sh) \ V(Mips64Lw) \
V(Mips64Ush) \ V(Mips64Ulw) \
V(Mips64Ld) \ V(Mips64Lwu) \
V(Mips64Uld) \ V(Mips64Ulwu) \
V(Mips64Lw) \ V(Mips64Sw) \
V(Mips64Ulw) \ V(Mips64Usw) \
V(Mips64Lwu) \ V(Mips64Sd) \
V(Mips64Ulwu) \ V(Mips64Usd) \
V(Mips64Sw) \ V(Mips64Lwc1) \
V(Mips64Usw) \ V(Mips64Ulwc1) \
V(Mips64Sd) \ V(Mips64Swc1) \
V(Mips64Usd) \ V(Mips64Uswc1) \
V(Mips64Lwc1) \ V(Mips64Ldc1) \
V(Mips64Ulwc1) \ V(Mips64Uldc1) \
V(Mips64Swc1) \ V(Mips64Sdc1) \
V(Mips64Uswc1) \ V(Mips64Usdc1) \
V(Mips64Ldc1) \ V(Mips64BitcastDL) \
V(Mips64Uldc1) \ V(Mips64BitcastLD) \
V(Mips64Sdc1) \ V(Mips64Float64ExtractLowWord32) \
V(Mips64Usdc1) \ V(Mips64Float64ExtractHighWord32) \
V(Mips64BitcastDL) \ V(Mips64Float64InsertLowWord32) \
V(Mips64BitcastLD) \ V(Mips64Float64InsertHighWord32) \
V(Mips64Float64ExtractLowWord32) \ V(Mips64Float32Max) \
V(Mips64Float64ExtractHighWord32) \ V(Mips64Float64Max) \
V(Mips64Float64InsertLowWord32) \ V(Mips64Float32Min) \
V(Mips64Float64InsertHighWord32) \ V(Mips64Float64Min) \
V(Mips64Float32Max) \ V(Mips64Float64SilenceNaN) \
V(Mips64Float64Max) \ V(Mips64Push) \
V(Mips64Float32Min) \ V(Mips64Peek) \
V(Mips64Float64Min) \ V(Mips64StoreToStackSlot) \
V(Mips64Float64SilenceNaN) \ V(Mips64ByteSwap64) \
V(Mips64Push) \ V(Mips64ByteSwap32) \
V(Mips64Peek) \ V(Mips64StackClaim) \
V(Mips64StoreToStackSlot) \ V(Mips64Seb) \
V(Mips64ByteSwap64) \ V(Mips64Seh) \
V(Mips64ByteSwap32) \ V(Mips64Sync) \
V(Mips64StackClaim) \ V(Mips64AssertEqual) \
V(Mips64Seb) \ V(Mips64S128Const) \
V(Mips64Seh) \ V(Mips64S128Zero) \
V(Mips64Sync) \ V(Mips64S128AllOnes) \
V(Mips64AssertEqual) \ V(Mips64I32x4Splat) \
V(Mips64S128Const) \ V(Mips64I32x4ExtractLane) \
V(Mips64S128Zero) \ V(Mips64I32x4ReplaceLane) \
V(Mips64S128AllOnes) \ V(Mips64I32x4Add) \
V(Mips64I32x4Splat) \ V(Mips64I32x4Sub) \
V(Mips64I32x4ExtractLane) \ V(Mips64F64x2Abs) \
V(Mips64I32x4ReplaceLane) \ V(Mips64F64x2Neg) \
V(Mips64I32x4Add) \ V(Mips64F32x4Splat) \
V(Mips64I32x4Sub) \ V(Mips64F32x4ExtractLane) \
V(Mips64F64x2Abs) \ V(Mips64F32x4ReplaceLane) \
V(Mips64F64x2Neg) \ V(Mips64F32x4SConvertI32x4) \
V(Mips64F32x4Splat) \ V(Mips64F32x4UConvertI32x4) \
V(Mips64F32x4ExtractLane) \ V(Mips64I32x4Mul) \
V(Mips64F32x4ReplaceLane) \ V(Mips64I32x4MaxS) \
V(Mips64F32x4SConvertI32x4) \ V(Mips64I32x4MinS) \
V(Mips64F32x4UConvertI32x4) \ V(Mips64I32x4Eq) \
V(Mips64I32x4Mul) \ V(Mips64I32x4Ne) \
V(Mips64I32x4MaxS) \ V(Mips64I32x4Shl) \
V(Mips64I32x4MinS) \ V(Mips64I32x4ShrS) \
V(Mips64I32x4Eq) \ V(Mips64I32x4ShrU) \
V(Mips64I32x4Ne) \ V(Mips64I32x4MaxU) \
V(Mips64I32x4Shl) \ V(Mips64I32x4MinU) \
V(Mips64I32x4ShrS) \ V(Mips64F64x2Sqrt) \
V(Mips64I32x4ShrU) \ V(Mips64F64x2Add) \
V(Mips64I32x4MaxU) \ V(Mips64F64x2Sub) \
V(Mips64I32x4MinU) \ V(Mips64F64x2Mul) \
V(Mips64F64x2Sqrt) \ V(Mips64F64x2Div) \
V(Mips64F64x2Add) \ V(Mips64F64x2Min) \
V(Mips64F64x2Sub) \ V(Mips64F64x2Max) \
V(Mips64F64x2Mul) \ V(Mips64F64x2Eq) \
V(Mips64F64x2Div) \ V(Mips64F64x2Ne) \
V(Mips64F64x2Min) \ V(Mips64F64x2Lt) \
V(Mips64F64x2Max) \ V(Mips64F64x2Le) \
V(Mips64F64x2Eq) \ V(Mips64F64x2Splat) \
V(Mips64F64x2Ne) \ V(Mips64F64x2ExtractLane) \
V(Mips64F64x2Lt) \ V(Mips64F64x2ReplaceLane) \
V(Mips64F64x2Le) \ V(Mips64F64x2Pmin) \
V(Mips64F64x2Splat) \ V(Mips64F64x2Pmax) \
V(Mips64F64x2ExtractLane) \ V(Mips64F64x2Ceil) \
V(Mips64F64x2ReplaceLane) \ V(Mips64F64x2Floor) \
V(Mips64F64x2Pmin) \ V(Mips64F64x2Trunc) \
V(Mips64F64x2Pmax) \ V(Mips64F64x2NearestInt) \
V(Mips64F64x2Ceil) \ V(Mips64F64x2ConvertLowI32x4S) \
V(Mips64F64x2Floor) \ V(Mips64F64x2ConvertLowI32x4U) \
V(Mips64F64x2Trunc) \ V(Mips64F64x2PromoteLowF32x4) \
V(Mips64F64x2NearestInt) \ V(Mips64I64x2Splat) \
V(Mips64F64x2ConvertLowI32x4S) \ V(Mips64I64x2ExtractLane) \
V(Mips64F64x2ConvertLowI32x4U) \ V(Mips64I64x2ReplaceLane) \
V(Mips64F64x2PromoteLowF32x4) \ V(Mips64I64x2Add) \
V(Mips64I64x2Splat) \ V(Mips64I64x2Sub) \
V(Mips64I64x2ExtractLane) \ V(Mips64I64x2Mul) \
V(Mips64I64x2ReplaceLane) \ V(Mips64I64x2Neg) \
V(Mips64I64x2Add) \ V(Mips64I64x2Shl) \
V(Mips64I64x2Sub) \ V(Mips64I64x2ShrS) \
V(Mips64I64x2Mul) \ V(Mips64I64x2ShrU) \
V(Mips64I64x2Neg) \ V(Mips64I64x2BitMask) \
V(Mips64I64x2Shl) \ V(Mips64I64x2Eq) \
V(Mips64I64x2ShrS) \ V(Mips64I64x2Ne) \
V(Mips64I64x2ShrU) \ V(Mips64I64x2GtS) \
V(Mips64I64x2BitMask) \ V(Mips64I64x2GeS) \
V(Mips64I64x2Eq) \ V(Mips64I64x2Abs) \
V(Mips64I64x2Ne) \ V(Mips64I64x2SConvertI32x4Low) \
V(Mips64I64x2GtS) \ V(Mips64I64x2SConvertI32x4High) \
V(Mips64I64x2GeS) \ V(Mips64I64x2UConvertI32x4Low) \
V(Mips64I64x2Abs) \ V(Mips64I64x2UConvertI32x4High) \
V(Mips64I64x2SConvertI32x4Low) \ V(Mips64ExtMulLow) \
V(Mips64I64x2SConvertI32x4High) \ V(Mips64ExtMulHigh) \
V(Mips64I64x2UConvertI32x4Low) \ V(Mips64ExtAddPairwise) \
V(Mips64I64x2UConvertI32x4High) \ V(Mips64F32x4Abs) \
V(Mips64ExtMulLow) \ V(Mips64F32x4Neg) \
V(Mips64ExtMulHigh) \ V(Mips64F32x4Sqrt) \
V(Mips64ExtAddPairwise) \ V(Mips64F32x4RecipApprox) \
V(Mips64F32x4Abs) \ V(Mips64F32x4RecipSqrtApprox) \
V(Mips64F32x4Neg) \ V(Mips64F32x4Add) \
V(Mips64F32x4Sqrt) \ V(Mips64F32x4Sub) \
V(Mips64F32x4RecipApprox) \ V(Mips64F32x4Mul) \
V(Mips64F32x4RecipSqrtApprox) \ V(Mips64F32x4Div) \
V(Mips64F32x4Add) \ V(Mips64F32x4Max) \
V(Mips64F32x4Sub) \ V(Mips64F32x4Min) \
V(Mips64F32x4Mul) \ V(Mips64F32x4Eq) \
V(Mips64F32x4Div) \ V(Mips64F32x4Ne) \
V(Mips64F32x4Max) \ V(Mips64F32x4Lt) \
V(Mips64F32x4Min) \ V(Mips64F32x4Le) \
V(Mips64F32x4Eq) \ V(Mips64F32x4Pmin) \
V(Mips64F32x4Ne) \ V(Mips64F32x4Pmax) \
V(Mips64F32x4Lt) \ V(Mips64F32x4Ceil) \
V(Mips64F32x4Le) \ V(Mips64F32x4Floor) \
V(Mips64F32x4Pmin) \ V(Mips64F32x4Trunc) \
V(Mips64F32x4Pmax) \ V(Mips64F32x4NearestInt) \
V(Mips64F32x4Ceil) \ V(Mips64F32x4DemoteF64x2Zero) \
V(Mips64F32x4Floor) \ V(Mips64I32x4SConvertF32x4) \
V(Mips64F32x4Trunc) \ V(Mips64I32x4UConvertF32x4) \
V(Mips64F32x4NearestInt) \ V(Mips64I32x4Neg) \
V(Mips64F32x4DemoteF64x2Zero) \ V(Mips64I32x4GtS) \
V(Mips64I32x4SConvertF32x4) \ V(Mips64I32x4GeS) \
V(Mips64I32x4UConvertF32x4) \ V(Mips64I32x4GtU) \
V(Mips64I32x4Neg) \ V(Mips64I32x4GeU) \
V(Mips64I32x4GtS) \ V(Mips64I32x4Abs) \
V(Mips64I32x4GeS) \ V(Mips64I32x4BitMask) \
V(Mips64I32x4GtU) \ V(Mips64I32x4DotI16x8S) \
V(Mips64I32x4GeU) \ V(Mips64I32x4TruncSatF64x2SZero) \
V(Mips64I32x4Abs) \ V(Mips64I32x4TruncSatF64x2UZero) \
V(Mips64I32x4BitMask) \ V(Mips64I16x8Splat) \
V(Mips64I32x4DotI16x8S) \ V(Mips64I16x8ExtractLaneU) \
V(Mips64I32x4TruncSatF64x2SZero) \ V(Mips64I16x8ExtractLaneS) \
V(Mips64I32x4TruncSatF64x2UZero) \ V(Mips64I16x8ReplaceLane) \
V(Mips64I16x8Splat) \ V(Mips64I16x8Neg) \
V(Mips64I16x8ExtractLaneU) \ V(Mips64I16x8Shl) \
V(Mips64I16x8ExtractLaneS) \ V(Mips64I16x8ShrS) \
V(Mips64I16x8ReplaceLane) \ V(Mips64I16x8ShrU) \
V(Mips64I16x8Neg) \ V(Mips64I16x8Add) \
V(Mips64I16x8Shl) \ V(Mips64I16x8AddSatS) \
V(Mips64I16x8ShrS) \ V(Mips64I16x8Sub) \
V(Mips64I16x8ShrU) \ V(Mips64I16x8SubSatS) \
V(Mips64I16x8Add) \ V(Mips64I16x8Mul) \
V(Mips64I16x8AddSatS) \ V(Mips64I16x8MaxS) \
V(Mips64I16x8Sub) \ V(Mips64I16x8MinS) \
V(Mips64I16x8SubSatS) \ V(Mips64I16x8Eq) \
V(Mips64I16x8Mul) \ V(Mips64I16x8Ne) \
V(Mips64I16x8MaxS) \ V(Mips64I16x8GtS) \
V(Mips64I16x8MinS) \ V(Mips64I16x8GeS) \
V(Mips64I16x8Eq) \ V(Mips64I16x8AddSatU) \
V(Mips64I16x8Ne) \ V(Mips64I16x8SubSatU) \
V(Mips64I16x8GtS) \ V(Mips64I16x8MaxU) \
V(Mips64I16x8GeS) \ V(Mips64I16x8MinU) \
V(Mips64I16x8AddSatU) \ V(Mips64I16x8GtU) \
V(Mips64I16x8SubSatU) \ V(Mips64I16x8GeU) \
V(Mips64I16x8MaxU) \ V(Mips64I16x8RoundingAverageU) \
V(Mips64I16x8MinU) \ V(Mips64I16x8Abs) \
V(Mips64I16x8GtU) \ V(Mips64I16x8BitMask) \
V(Mips64I16x8GeU) \ V(Mips64I16x8Q15MulRSatS) \
V(Mips64I16x8RoundingAverageU) \ V(Mips64I8x16Splat) \
V(Mips64I16x8Abs) \ V(Mips64I8x16ExtractLaneU) \
V(Mips64I16x8BitMask) \ V(Mips64I8x16ExtractLaneS) \
V(Mips64I16x8Q15MulRSatS) \ V(Mips64I8x16ReplaceLane) \
V(Mips64I8x16Splat) \ V(Mips64I8x16Neg) \
V(Mips64I8x16ExtractLaneU) \ V(Mips64I8x16Shl) \
V(Mips64I8x16ExtractLaneS) \ V(Mips64I8x16ShrS) \
V(Mips64I8x16ReplaceLane) \ V(Mips64I8x16Add) \
V(Mips64I8x16Neg) \ V(Mips64I8x16AddSatS) \
V(Mips64I8x16Shl) \ V(Mips64I8x16Sub) \
V(Mips64I8x16ShrS) \ V(Mips64I8x16SubSatS) \
V(Mips64I8x16Add) \ V(Mips64I8x16MaxS) \
V(Mips64I8x16AddSatS) \ V(Mips64I8x16MinS) \
V(Mips64I8x16Sub) \ V(Mips64I8x16Eq) \
V(Mips64I8x16SubSatS) \ V(Mips64I8x16Ne) \
V(Mips64I8x16MaxS) \ V(Mips64I8x16GtS) \
V(Mips64I8x16MinS) \ V(Mips64I8x16GeS) \
V(Mips64I8x16Eq) \ V(Mips64I8x16ShrU) \
V(Mips64I8x16Ne) \ V(Mips64I8x16AddSatU) \
V(Mips64I8x16GtS) \ V(Mips64I8x16SubSatU) \
V(Mips64I8x16GeS) \ V(Mips64I8x16MaxU) \
V(Mips64I8x16ShrU) \ V(Mips64I8x16MinU) \
V(Mips64I8x16AddSatU) \ V(Mips64I8x16GtU) \
V(Mips64I8x16SubSatU) \ V(Mips64I8x16GeU) \
V(Mips64I8x16MaxU) \ V(Mips64I8x16RoundingAverageU) \
V(Mips64I8x16MinU) \ V(Mips64I8x16Abs) \
V(Mips64I8x16GtU) \ V(Mips64I8x16Popcnt) \
V(Mips64I8x16GeU) \ V(Mips64I8x16BitMask) \
V(Mips64I8x16RoundingAverageU) \ V(Mips64S128And) \
V(Mips64I8x16Abs) \ V(Mips64S128Or) \
V(Mips64I8x16Popcnt) \ V(Mips64S128Xor) \
V(Mips64I8x16BitMask) \ V(Mips64S128Not) \
V(Mips64S128And) \ V(Mips64S128Select) \
V(Mips64S128Or) \ V(Mips64S128AndNot) \
V(Mips64S128Xor) \ V(Mips64I64x2AllTrue) \
V(Mips64S128Not) \ V(Mips64I32x4AllTrue) \
V(Mips64S128Select) \ V(Mips64I16x8AllTrue) \
V(Mips64S128AndNot) \ V(Mips64I8x16AllTrue) \
V(Mips64I64x2AllTrue) \ V(Mips64V128AnyTrue) \
V(Mips64I32x4AllTrue) \ V(Mips64S32x4InterleaveRight) \
V(Mips64I16x8AllTrue) \ V(Mips64S32x4InterleaveLeft) \
V(Mips64I8x16AllTrue) \ V(Mips64S32x4PackEven) \
V(Mips64V128AnyTrue) \ V(Mips64S32x4PackOdd) \
V(Mips64S32x4InterleaveRight) \ V(Mips64S32x4InterleaveEven) \
V(Mips64S32x4InterleaveLeft) \ V(Mips64S32x4InterleaveOdd) \
V(Mips64S32x4PackEven) \ V(Mips64S32x4Shuffle) \
V(Mips64S32x4PackOdd) \ V(Mips64S16x8InterleaveRight) \
V(Mips64S32x4InterleaveEven) \ V(Mips64S16x8InterleaveLeft) \
V(Mips64S32x4InterleaveOdd) \ V(Mips64S16x8PackEven) \
V(Mips64S32x4Shuffle) \ V(Mips64S16x8PackOdd) \
V(Mips64S16x8InterleaveRight) \ V(Mips64S16x8InterleaveEven) \
V(Mips64S16x8InterleaveLeft) \ V(Mips64S16x8InterleaveOdd) \
V(Mips64S16x8PackEven) \ V(Mips64S16x4Reverse) \
V(Mips64S16x8PackOdd) \ V(Mips64S16x2Reverse) \
V(Mips64S16x8InterleaveEven) \ V(Mips64S8x16InterleaveRight) \
V(Mips64S16x8InterleaveOdd) \ V(Mips64S8x16InterleaveLeft) \
V(Mips64S16x4Reverse) \ V(Mips64S8x16PackEven) \
V(Mips64S16x2Reverse) \ V(Mips64S8x16PackOdd) \
V(Mips64S8x16InterleaveRight) \ V(Mips64S8x16InterleaveEven) \
V(Mips64S8x16InterleaveLeft) \ V(Mips64S8x16InterleaveOdd) \
V(Mips64S8x16PackEven) \ V(Mips64I8x16Shuffle) \
V(Mips64S8x16PackOdd) \ V(Mips64I8x16Swizzle) \
V(Mips64S8x16InterleaveEven) \ V(Mips64S8x16Concat) \
V(Mips64S8x16InterleaveOdd) \ V(Mips64S8x8Reverse) \
V(Mips64I8x16Shuffle) \ V(Mips64S8x4Reverse) \
V(Mips64I8x16Swizzle) \ V(Mips64S8x2Reverse) \
V(Mips64S8x16Concat) \ V(Mips64S128LoadSplat) \
V(Mips64S8x8Reverse) \ V(Mips64S128Load8x8S) \
V(Mips64S8x4Reverse) \ V(Mips64S128Load8x8U) \
V(Mips64S8x2Reverse) \ V(Mips64S128Load16x4S) \
V(Mips64S128LoadSplat) \ V(Mips64S128Load16x4U) \
V(Mips64S128Load8x8S) \ V(Mips64S128Load32x2S) \
V(Mips64S128Load8x8U) \ V(Mips64S128Load32x2U) \
V(Mips64S128Load16x4S) \ V(Mips64S128Load32Zero) \
V(Mips64S128Load16x4U) \ V(Mips64S128Load64Zero) \
V(Mips64S128Load32x2S) \ V(Mips64S128LoadLane) \
V(Mips64S128Load32x2U) \ V(Mips64S128StoreLane) \
V(Mips64S128Load32Zero) \ V(Mips64MsaLd) \
V(Mips64S128Load64Zero) \ V(Mips64MsaSt) \
V(Mips64S128LoadLane) \ V(Mips64I32x4SConvertI16x8Low) \
V(Mips64S128StoreLane) \ V(Mips64I32x4SConvertI16x8High) \
V(Mips64MsaLd) \ V(Mips64I32x4UConvertI16x8Low) \
V(Mips64MsaSt) \ V(Mips64I32x4UConvertI16x8High) \
V(Mips64I32x4SConvertI16x8Low) \ V(Mips64I16x8SConvertI8x16Low) \
V(Mips64I32x4SConvertI16x8High) \ V(Mips64I16x8SConvertI8x16High) \
V(Mips64I32x4UConvertI16x8Low) \ V(Mips64I16x8SConvertI32x4) \
V(Mips64I32x4UConvertI16x8High) \ V(Mips64I16x8UConvertI32x4) \
V(Mips64I16x8SConvertI8x16Low) \ V(Mips64I16x8UConvertI8x16Low) \
V(Mips64I16x8SConvertI8x16High) \ V(Mips64I16x8UConvertI8x16High) \
V(Mips64I16x8SConvertI32x4) \ V(Mips64I8x16SConvertI16x8) \
V(Mips64I16x8UConvertI32x4) \ V(Mips64I8x16UConvertI16x8) \
V(Mips64I16x8UConvertI8x16Low) \ V(Mips64StoreCompressTagged) \
V(Mips64I16x8UConvertI8x16High) \ V(Mips64Word64AtomicLoadUint64) \
V(Mips64I8x16SConvertI16x8) \ V(Mips64Word64AtomicStoreWord64) \
V(Mips64I8x16UConvertI16x8) \ V(Mips64Word64AtomicAddUint64) \
V(Mips64StoreCompressTagged) \ V(Mips64Word64AtomicSubUint64) \
V(Mips64Word64AtomicLoadUint64) \ V(Mips64Word64AtomicAndUint64) \
V(Mips64Word64AtomicStoreWord64) \ V(Mips64Word64AtomicOrUint64) \
V(Mips64Word64AtomicAddUint64) \ V(Mips64Word64AtomicXorUint64) \
V(Mips64Word64AtomicSubUint64) \ V(Mips64Word64AtomicExchangeUint64) \
V(Mips64Word64AtomicAndUint64) \
V(Mips64Word64AtomicOrUint64) \
V(Mips64Word64AtomicXorUint64) \
V(Mips64Word64AtomicExchangeUint64) \
V(Mips64Word64AtomicCompareExchangeUint64) V(Mips64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,410 +12,406 @@ namespace compiler {
// PPC-specific opcodes that specify which assembly sequence to emit. // PPC-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(PPC_Peek) \
V(PPC_Sync) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(PPC_And) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(PPC_AndComplement) \
V(PPC_Peek) \ V(PPC_Or) \
V(PPC_Sync) \ V(PPC_OrComplement) \
V(PPC_And) \ V(PPC_Xor) \
V(PPC_AndComplement) \ V(PPC_ShiftLeft32) \
V(PPC_Or) \ V(PPC_ShiftLeft64) \
V(PPC_OrComplement) \ V(PPC_ShiftLeftPair) \
V(PPC_Xor) \ V(PPC_ShiftRight32) \
V(PPC_ShiftLeft32) \ V(PPC_ShiftRight64) \
V(PPC_ShiftLeft64) \ V(PPC_ShiftRightPair) \
V(PPC_ShiftLeftPair) \ V(PPC_ShiftRightAlg32) \
V(PPC_ShiftRight32) \ V(PPC_ShiftRightAlg64) \
V(PPC_ShiftRight64) \ V(PPC_ShiftRightAlgPair) \
V(PPC_ShiftRightPair) \ V(PPC_RotRight32) \
V(PPC_ShiftRightAlg32) \ V(PPC_RotRight64) \
V(PPC_ShiftRightAlg64) \ V(PPC_Not) \
V(PPC_ShiftRightAlgPair) \ V(PPC_RotLeftAndMask32) \
V(PPC_RotRight32) \ V(PPC_RotLeftAndClear64) \
V(PPC_RotRight64) \ V(PPC_RotLeftAndClearLeft64) \
V(PPC_Not) \ V(PPC_RotLeftAndClearRight64) \
V(PPC_RotLeftAndMask32) \ V(PPC_Add32) \
V(PPC_RotLeftAndClear64) \ V(PPC_Add64) \
V(PPC_RotLeftAndClearLeft64) \ V(PPC_AddWithOverflow32) \
V(PPC_RotLeftAndClearRight64) \ V(PPC_AddPair) \
V(PPC_Add32) \ V(PPC_AddDouble) \
V(PPC_Add64) \ V(PPC_Sub) \
V(PPC_AddWithOverflow32) \ V(PPC_SubWithOverflow32) \
V(PPC_AddPair) \ V(PPC_SubPair) \
V(PPC_AddDouble) \ V(PPC_SubDouble) \
V(PPC_Sub) \ V(PPC_Mul32) \
V(PPC_SubWithOverflow32) \ V(PPC_Mul32WithHigh32) \
V(PPC_SubPair) \ V(PPC_Mul64) \
V(PPC_SubDouble) \ V(PPC_MulHigh32) \
V(PPC_Mul32) \ V(PPC_MulHighU32) \
V(PPC_Mul32WithHigh32) \ V(PPC_MulPair) \
V(PPC_Mul64) \ V(PPC_MulDouble) \
V(PPC_MulHigh32) \ V(PPC_Div32) \
V(PPC_MulHighU32) \ V(PPC_Div64) \
V(PPC_MulPair) \ V(PPC_DivU32) \
V(PPC_MulDouble) \ V(PPC_DivU64) \
V(PPC_Div32) \ V(PPC_DivDouble) \
V(PPC_Div64) \ V(PPC_Mod32) \
V(PPC_DivU32) \ V(PPC_Mod64) \
V(PPC_DivU64) \ V(PPC_ModU32) \
V(PPC_DivDouble) \ V(PPC_ModU64) \
V(PPC_Mod32) \ V(PPC_ModDouble) \
V(PPC_Mod64) \ V(PPC_Neg) \
V(PPC_ModU32) \ V(PPC_NegDouble) \
V(PPC_ModU64) \ V(PPC_SqrtDouble) \
V(PPC_ModDouble) \ V(PPC_FloorDouble) \
V(PPC_Neg) \ V(PPC_CeilDouble) \
V(PPC_NegDouble) \ V(PPC_TruncateDouble) \
V(PPC_SqrtDouble) \ V(PPC_RoundDouble) \
V(PPC_FloorDouble) \ V(PPC_MaxDouble) \
V(PPC_CeilDouble) \ V(PPC_MinDouble) \
V(PPC_TruncateDouble) \ V(PPC_AbsDouble) \
V(PPC_RoundDouble) \ V(PPC_Cntlz32) \
V(PPC_MaxDouble) \ V(PPC_Cntlz64) \
V(PPC_MinDouble) \ V(PPC_Popcnt32) \
V(PPC_AbsDouble) \ V(PPC_Popcnt64) \
V(PPC_Cntlz32) \ V(PPC_Cmp32) \
V(PPC_Cntlz64) \ V(PPC_Cmp64) \
V(PPC_Popcnt32) \ V(PPC_CmpDouble) \
V(PPC_Popcnt64) \ V(PPC_Tst32) \
V(PPC_Cmp32) \ V(PPC_Tst64) \
V(PPC_Cmp64) \ V(PPC_Push) \
V(PPC_CmpDouble) \ V(PPC_PushFrame) \
V(PPC_Tst32) \ V(PPC_StoreToStackSlot) \
V(PPC_Tst64) \ V(PPC_ExtendSignWord8) \
V(PPC_Push) \ V(PPC_ExtendSignWord16) \
V(PPC_PushFrame) \ V(PPC_ExtendSignWord32) \
V(PPC_StoreToStackSlot) \ V(PPC_Uint32ToUint64) \
V(PPC_ExtendSignWord8) \ V(PPC_Int64ToInt32) \
V(PPC_ExtendSignWord16) \ V(PPC_Int64ToFloat32) \
V(PPC_ExtendSignWord32) \ V(PPC_Int64ToDouble) \
V(PPC_Uint32ToUint64) \ V(PPC_Uint64ToFloat32) \
V(PPC_Int64ToInt32) \ V(PPC_Uint64ToDouble) \
V(PPC_Int64ToFloat32) \ V(PPC_Int32ToFloat32) \
V(PPC_Int64ToDouble) \ V(PPC_Int32ToDouble) \
V(PPC_Uint64ToFloat32) \ V(PPC_Uint32ToFloat32) \
V(PPC_Uint64ToDouble) \ V(PPC_Float32ToInt32) \
V(PPC_Int32ToFloat32) \ V(PPC_Float32ToUint32) \
V(PPC_Int32ToDouble) \ V(PPC_Uint32ToDouble) \
V(PPC_Uint32ToFloat32) \ V(PPC_Float32ToDouble) \
V(PPC_Float32ToInt32) \ V(PPC_Float64SilenceNaN) \
V(PPC_Float32ToUint32) \ V(PPC_DoubleToInt32) \
V(PPC_Uint32ToDouble) \ V(PPC_DoubleToUint32) \
V(PPC_Float32ToDouble) \ V(PPC_DoubleToInt64) \
V(PPC_Float64SilenceNaN) \ V(PPC_DoubleToUint64) \
V(PPC_DoubleToInt32) \ V(PPC_DoubleToFloat32) \
V(PPC_DoubleToUint32) \ V(PPC_DoubleExtractLowWord32) \
V(PPC_DoubleToInt64) \ V(PPC_DoubleExtractHighWord32) \
V(PPC_DoubleToUint64) \ V(PPC_DoubleInsertLowWord32) \
V(PPC_DoubleToFloat32) \ V(PPC_DoubleInsertHighWord32) \
V(PPC_DoubleExtractLowWord32) \ V(PPC_DoubleConstruct) \
V(PPC_DoubleExtractHighWord32) \ V(PPC_BitcastInt32ToFloat32) \
V(PPC_DoubleInsertLowWord32) \ V(PPC_BitcastFloat32ToInt32) \
V(PPC_DoubleInsertHighWord32) \ V(PPC_BitcastInt64ToDouble) \
V(PPC_DoubleConstruct) \ V(PPC_BitcastDoubleToInt64) \
V(PPC_BitcastInt32ToFloat32) \ V(PPC_LoadWordS8) \
V(PPC_BitcastFloat32ToInt32) \ V(PPC_LoadWordU8) \
V(PPC_BitcastInt64ToDouble) \ V(PPC_LoadWordS16) \
V(PPC_BitcastDoubleToInt64) \ V(PPC_LoadWordU16) \
V(PPC_LoadWordS8) \ V(PPC_LoadWordS32) \
V(PPC_LoadWordU8) \ V(PPC_LoadWordU32) \
V(PPC_LoadWordS16) \ V(PPC_LoadByteRev32) \
V(PPC_LoadWordU16) \ V(PPC_LoadWord64) \
V(PPC_LoadWordS32) \ V(PPC_LoadByteRev64) \
V(PPC_LoadWordU32) \ V(PPC_LoadFloat32) \
V(PPC_LoadByteRev32) \ V(PPC_LoadDouble) \
V(PPC_LoadWord64) \ V(PPC_LoadSimd128) \
V(PPC_LoadByteRev64) \ V(PPC_LoadReverseSimd128RR) \
V(PPC_LoadFloat32) \ V(PPC_StoreWord8) \
V(PPC_LoadDouble) \ V(PPC_StoreWord16) \
V(PPC_LoadSimd128) \ V(PPC_StoreWord32) \
V(PPC_LoadReverseSimd128RR) \ V(PPC_StoreByteRev32) \
V(PPC_StoreWord8) \ V(PPC_StoreWord64) \
V(PPC_StoreWord16) \ V(PPC_StoreByteRev64) \
V(PPC_StoreWord32) \ V(PPC_StoreFloat32) \
V(PPC_StoreByteRev32) \ V(PPC_StoreDouble) \
V(PPC_StoreWord64) \ V(PPC_StoreSimd128) \
V(PPC_StoreByteRev64) \ V(PPC_ByteRev32) \
V(PPC_StoreFloat32) \ V(PPC_ByteRev64) \
V(PPC_StoreDouble) \ V(PPC_AtomicExchangeUint8) \
V(PPC_StoreSimd128) \ V(PPC_AtomicExchangeUint16) \
V(PPC_ByteRev32) \ V(PPC_AtomicExchangeWord32) \
V(PPC_ByteRev64) \ V(PPC_AtomicExchangeWord64) \
V(PPC_AtomicExchangeUint8) \ V(PPC_AtomicCompareExchangeUint8) \
V(PPC_AtomicExchangeUint16) \ V(PPC_AtomicCompareExchangeUint16) \
V(PPC_AtomicExchangeWord32) \ V(PPC_AtomicCompareExchangeWord32) \
V(PPC_AtomicExchangeWord64) \ V(PPC_AtomicCompareExchangeWord64) \
V(PPC_AtomicCompareExchangeUint8) \ V(PPC_AtomicAddUint8) \
V(PPC_AtomicCompareExchangeUint16) \ V(PPC_AtomicAddUint16) \
V(PPC_AtomicCompareExchangeWord32) \ V(PPC_AtomicAddUint32) \
V(PPC_AtomicCompareExchangeWord64) \ V(PPC_AtomicAddUint64) \
V(PPC_AtomicAddUint8) \ V(PPC_AtomicAddInt8) \
V(PPC_AtomicAddUint16) \ V(PPC_AtomicAddInt16) \
V(PPC_AtomicAddUint32) \ V(PPC_AtomicAddInt32) \
V(PPC_AtomicAddUint64) \ V(PPC_AtomicAddInt64) \
V(PPC_AtomicAddInt8) \ V(PPC_AtomicSubUint8) \
V(PPC_AtomicAddInt16) \ V(PPC_AtomicSubUint16) \
V(PPC_AtomicAddInt32) \ V(PPC_AtomicSubUint32) \
V(PPC_AtomicAddInt64) \ V(PPC_AtomicSubUint64) \
V(PPC_AtomicSubUint8) \ V(PPC_AtomicSubInt8) \
V(PPC_AtomicSubUint16) \ V(PPC_AtomicSubInt16) \
V(PPC_AtomicSubUint32) \ V(PPC_AtomicSubInt32) \
V(PPC_AtomicSubUint64) \ V(PPC_AtomicSubInt64) \
V(PPC_AtomicSubInt8) \ V(PPC_AtomicAndUint8) \
V(PPC_AtomicSubInt16) \ V(PPC_AtomicAndUint16) \
V(PPC_AtomicSubInt32) \ V(PPC_AtomicAndUint32) \
V(PPC_AtomicSubInt64) \ V(PPC_AtomicAndUint64) \
V(PPC_AtomicAndUint8) \ V(PPC_AtomicAndInt8) \
V(PPC_AtomicAndUint16) \ V(PPC_AtomicAndInt16) \
V(PPC_AtomicAndUint32) \ V(PPC_AtomicAndInt32) \
V(PPC_AtomicAndUint64) \ V(PPC_AtomicAndInt64) \
V(PPC_AtomicAndInt8) \ V(PPC_AtomicOrUint8) \
V(PPC_AtomicAndInt16) \ V(PPC_AtomicOrUint16) \
V(PPC_AtomicAndInt32) \ V(PPC_AtomicOrUint32) \
V(PPC_AtomicAndInt64) \ V(PPC_AtomicOrUint64) \
V(PPC_AtomicOrUint8) \ V(PPC_AtomicOrInt8) \
V(PPC_AtomicOrUint16) \ V(PPC_AtomicOrInt16) \
V(PPC_AtomicOrUint32) \ V(PPC_AtomicOrInt32) \
V(PPC_AtomicOrUint64) \ V(PPC_AtomicOrInt64) \
V(PPC_AtomicOrInt8) \ V(PPC_AtomicXorUint8) \
V(PPC_AtomicOrInt16) \ V(PPC_AtomicXorUint16) \
V(PPC_AtomicOrInt32) \ V(PPC_AtomicXorUint32) \
V(PPC_AtomicOrInt64) \ V(PPC_AtomicXorUint64) \
V(PPC_AtomicXorUint8) \ V(PPC_AtomicXorInt8) \
V(PPC_AtomicXorUint16) \ V(PPC_AtomicXorInt16) \
V(PPC_AtomicXorUint32) \ V(PPC_AtomicXorInt32) \
V(PPC_AtomicXorUint64) \ V(PPC_AtomicXorInt64) \
V(PPC_AtomicXorInt8) \ V(PPC_F64x2Splat) \
V(PPC_AtomicXorInt16) \ V(PPC_F64x2ExtractLane) \
V(PPC_AtomicXorInt32) \ V(PPC_F64x2ReplaceLane) \
V(PPC_AtomicXorInt64) \ V(PPC_F64x2Add) \
V(PPC_F64x2Splat) \ V(PPC_F64x2Sub) \
V(PPC_F64x2ExtractLane) \ V(PPC_F64x2Mul) \
V(PPC_F64x2ReplaceLane) \ V(PPC_F64x2Eq) \
V(PPC_F64x2Add) \ V(PPC_F64x2Ne) \
V(PPC_F64x2Sub) \ V(PPC_F64x2Le) \
V(PPC_F64x2Mul) \ V(PPC_F64x2Lt) \
V(PPC_F64x2Eq) \ V(PPC_F64x2Abs) \
V(PPC_F64x2Ne) \ V(PPC_F64x2Neg) \
V(PPC_F64x2Le) \ V(PPC_F64x2Sqrt) \
V(PPC_F64x2Lt) \ V(PPC_F64x2Qfma) \
V(PPC_F64x2Abs) \ V(PPC_F64x2Qfms) \
V(PPC_F64x2Neg) \ V(PPC_F64x2Div) \
V(PPC_F64x2Sqrt) \ V(PPC_F64x2Min) \
V(PPC_F64x2Qfma) \ V(PPC_F64x2Max) \
V(PPC_F64x2Qfms) \ V(PPC_F64x2Ceil) \
V(PPC_F64x2Div) \ V(PPC_F64x2Floor) \
V(PPC_F64x2Min) \ V(PPC_F64x2Trunc) \
V(PPC_F64x2Max) \ V(PPC_F64x2Pmin) \
V(PPC_F64x2Ceil) \ V(PPC_F64x2Pmax) \
V(PPC_F64x2Floor) \ V(PPC_F64x2ConvertLowI32x4S) \
V(PPC_F64x2Trunc) \ V(PPC_F64x2ConvertLowI32x4U) \
V(PPC_F64x2Pmin) \ V(PPC_F64x2PromoteLowF32x4) \
V(PPC_F64x2Pmax) \ V(PPC_F32x4Splat) \
V(PPC_F64x2ConvertLowI32x4S) \ V(PPC_F32x4ExtractLane) \
V(PPC_F64x2ConvertLowI32x4U) \ V(PPC_F32x4ReplaceLane) \
V(PPC_F64x2PromoteLowF32x4) \ V(PPC_F32x4Add) \
V(PPC_F32x4Splat) \ V(PPC_F32x4Sub) \
V(PPC_F32x4ExtractLane) \ V(PPC_F32x4Mul) \
V(PPC_F32x4ReplaceLane) \ V(PPC_F32x4Eq) \
V(PPC_F32x4Add) \ V(PPC_F32x4Ne) \
V(PPC_F32x4Sub) \ V(PPC_F32x4Lt) \
V(PPC_F32x4Mul) \ V(PPC_F32x4Le) \
V(PPC_F32x4Eq) \ V(PPC_F32x4Abs) \
V(PPC_F32x4Ne) \ V(PPC_F32x4Neg) \
V(PPC_F32x4Lt) \ V(PPC_F32x4RecipApprox) \
V(PPC_F32x4Le) \ V(PPC_F32x4RecipSqrtApprox) \
V(PPC_F32x4Abs) \ V(PPC_F32x4Sqrt) \
V(PPC_F32x4Neg) \ V(PPC_F32x4SConvertI32x4) \
V(PPC_F32x4RecipApprox) \ V(PPC_F32x4UConvertI32x4) \
V(PPC_F32x4RecipSqrtApprox) \ V(PPC_F32x4Div) \
V(PPC_F32x4Sqrt) \ V(PPC_F32x4Min) \
V(PPC_F32x4SConvertI32x4) \ V(PPC_F32x4Max) \
V(PPC_F32x4UConvertI32x4) \ V(PPC_F32x4Ceil) \
V(PPC_F32x4Div) \ V(PPC_F32x4Floor) \
V(PPC_F32x4Min) \ V(PPC_F32x4Trunc) \
V(PPC_F32x4Max) \ V(PPC_F32x4Pmin) \
V(PPC_F32x4Ceil) \ V(PPC_F32x4Pmax) \
V(PPC_F32x4Floor) \ V(PPC_F32x4Qfma) \
V(PPC_F32x4Trunc) \ V(PPC_F32x4Qfms) \
V(PPC_F32x4Pmin) \ V(PPC_F32x4DemoteF64x2Zero) \
V(PPC_F32x4Pmax) \ V(PPC_I64x2Splat) \
V(PPC_F32x4Qfma) \ V(PPC_I64x2ExtractLane) \
V(PPC_F32x4Qfms) \ V(PPC_I64x2ReplaceLane) \
V(PPC_F32x4DemoteF64x2Zero) \ V(PPC_I64x2Add) \
V(PPC_I64x2Splat) \ V(PPC_I64x2Sub) \
V(PPC_I64x2ExtractLane) \ V(PPC_I64x2Mul) \
V(PPC_I64x2ReplaceLane) \ V(PPC_I64x2Eq) \
V(PPC_I64x2Add) \ V(PPC_I64x2Ne) \
V(PPC_I64x2Sub) \ V(PPC_I64x2GtS) \
V(PPC_I64x2Mul) \ V(PPC_I64x2GeS) \
V(PPC_I64x2Eq) \ V(PPC_I64x2Shl) \
V(PPC_I64x2Ne) \ V(PPC_I64x2ShrS) \
V(PPC_I64x2GtS) \ V(PPC_I64x2ShrU) \
V(PPC_I64x2GeS) \ V(PPC_I64x2Neg) \
V(PPC_I64x2Shl) \ V(PPC_I64x2BitMask) \
V(PPC_I64x2ShrS) \ V(PPC_I64x2SConvertI32x4Low) \
V(PPC_I64x2ShrU) \ V(PPC_I64x2SConvertI32x4High) \
V(PPC_I64x2Neg) \ V(PPC_I64x2UConvertI32x4Low) \
V(PPC_I64x2BitMask) \ V(PPC_I64x2UConvertI32x4High) \
V(PPC_I64x2SConvertI32x4Low) \ V(PPC_I64x2ExtMulLowI32x4S) \
V(PPC_I64x2SConvertI32x4High) \ V(PPC_I64x2ExtMulHighI32x4S) \
V(PPC_I64x2UConvertI32x4Low) \ V(PPC_I64x2ExtMulLowI32x4U) \
V(PPC_I64x2UConvertI32x4High) \ V(PPC_I64x2ExtMulHighI32x4U) \
V(PPC_I64x2ExtMulLowI32x4S) \ V(PPC_I64x2Abs) \
V(PPC_I64x2ExtMulHighI32x4S) \ V(PPC_I32x4Splat) \
V(PPC_I64x2ExtMulLowI32x4U) \ V(PPC_I32x4ExtractLane) \
V(PPC_I64x2ExtMulHighI32x4U) \ V(PPC_I32x4ReplaceLane) \
V(PPC_I64x2Abs) \ V(PPC_I32x4Add) \
V(PPC_I32x4Splat) \ V(PPC_I32x4Sub) \
V(PPC_I32x4ExtractLane) \ V(PPC_I32x4Mul) \
V(PPC_I32x4ReplaceLane) \ V(PPC_I32x4MinS) \
V(PPC_I32x4Add) \ V(PPC_I32x4MinU) \
V(PPC_I32x4Sub) \ V(PPC_I32x4MaxS) \
V(PPC_I32x4Mul) \ V(PPC_I32x4MaxU) \
V(PPC_I32x4MinS) \ V(PPC_I32x4Eq) \
V(PPC_I32x4MinU) \ V(PPC_I32x4Ne) \
V(PPC_I32x4MaxS) \ V(PPC_I32x4GtS) \
V(PPC_I32x4MaxU) \ V(PPC_I32x4GeS) \
V(PPC_I32x4Eq) \ V(PPC_I32x4GtU) \
V(PPC_I32x4Ne) \ V(PPC_I32x4GeU) \
V(PPC_I32x4GtS) \ V(PPC_I32x4Shl) \
V(PPC_I32x4GeS) \ V(PPC_I32x4ShrS) \
V(PPC_I32x4GtU) \ V(PPC_I32x4ShrU) \
V(PPC_I32x4GeU) \ V(PPC_I32x4Neg) \
V(PPC_I32x4Shl) \ V(PPC_I32x4Abs) \
V(PPC_I32x4ShrS) \ V(PPC_I32x4SConvertF32x4) \
V(PPC_I32x4ShrU) \ V(PPC_I32x4UConvertF32x4) \
V(PPC_I32x4Neg) \ V(PPC_I32x4SConvertI16x8Low) \
V(PPC_I32x4Abs) \ V(PPC_I32x4SConvertI16x8High) \
V(PPC_I32x4SConvertF32x4) \ V(PPC_I32x4UConvertI16x8Low) \
V(PPC_I32x4UConvertF32x4) \ V(PPC_I32x4UConvertI16x8High) \
V(PPC_I32x4SConvertI16x8Low) \ V(PPC_I32x4BitMask) \
V(PPC_I32x4SConvertI16x8High) \ V(PPC_I32x4DotI16x8S) \
V(PPC_I32x4UConvertI16x8Low) \ V(PPC_I32x4ExtAddPairwiseI16x8S) \
V(PPC_I32x4UConvertI16x8High) \ V(PPC_I32x4ExtAddPairwiseI16x8U) \
V(PPC_I32x4BitMask) \ V(PPC_I32x4ExtMulLowI16x8S) \
V(PPC_I32x4DotI16x8S) \ V(PPC_I32x4ExtMulHighI16x8S) \
V(PPC_I32x4ExtAddPairwiseI16x8S) \ V(PPC_I32x4ExtMulLowI16x8U) \
V(PPC_I32x4ExtAddPairwiseI16x8U) \ V(PPC_I32x4ExtMulHighI16x8U) \
V(PPC_I32x4ExtMulLowI16x8S) \ V(PPC_I32x4TruncSatF64x2SZero) \
V(PPC_I32x4ExtMulHighI16x8S) \ V(PPC_I32x4TruncSatF64x2UZero) \
V(PPC_I32x4ExtMulLowI16x8U) \ V(PPC_I16x8Splat) \
V(PPC_I32x4ExtMulHighI16x8U) \ V(PPC_I16x8ExtractLaneU) \
V(PPC_I32x4TruncSatF64x2SZero) \ V(PPC_I16x8ExtractLaneS) \
V(PPC_I32x4TruncSatF64x2UZero) \ V(PPC_I16x8ReplaceLane) \
V(PPC_I16x8Splat) \ V(PPC_I16x8Add) \
V(PPC_I16x8ExtractLaneU) \ V(PPC_I16x8Sub) \
V(PPC_I16x8ExtractLaneS) \ V(PPC_I16x8Mul) \
V(PPC_I16x8ReplaceLane) \ V(PPC_I16x8MinS) \
V(PPC_I16x8Add) \ V(PPC_I16x8MinU) \
V(PPC_I16x8Sub) \ V(PPC_I16x8MaxS) \
V(PPC_I16x8Mul) \ V(PPC_I16x8MaxU) \
V(PPC_I16x8MinS) \ V(PPC_I16x8Eq) \
V(PPC_I16x8MinU) \ V(PPC_I16x8Ne) \
V(PPC_I16x8MaxS) \ V(PPC_I16x8GtS) \
V(PPC_I16x8MaxU) \ V(PPC_I16x8GeS) \
V(PPC_I16x8Eq) \ V(PPC_I16x8GtU) \
V(PPC_I16x8Ne) \ V(PPC_I16x8GeU) \
V(PPC_I16x8GtS) \ V(PPC_I16x8Shl) \
V(PPC_I16x8GeS) \ V(PPC_I16x8ShrS) \
V(PPC_I16x8GtU) \ V(PPC_I16x8ShrU) \
V(PPC_I16x8GeU) \ V(PPC_I16x8Neg) \
V(PPC_I16x8Shl) \ V(PPC_I16x8Abs) \
V(PPC_I16x8ShrS) \ V(PPC_I16x8SConvertI32x4) \
V(PPC_I16x8ShrU) \ V(PPC_I16x8UConvertI32x4) \
V(PPC_I16x8Neg) \ V(PPC_I16x8SConvertI8x16Low) \
V(PPC_I16x8Abs) \ V(PPC_I16x8SConvertI8x16High) \
V(PPC_I16x8SConvertI32x4) \ V(PPC_I16x8UConvertI8x16Low) \
V(PPC_I16x8UConvertI32x4) \ V(PPC_I16x8UConvertI8x16High) \
V(PPC_I16x8SConvertI8x16Low) \ V(PPC_I16x8AddSatS) \
V(PPC_I16x8SConvertI8x16High) \ V(PPC_I16x8SubSatS) \
V(PPC_I16x8UConvertI8x16Low) \ V(PPC_I16x8AddSatU) \
V(PPC_I16x8UConvertI8x16High) \ V(PPC_I16x8SubSatU) \
V(PPC_I16x8AddSatS) \ V(PPC_I16x8RoundingAverageU) \
V(PPC_I16x8SubSatS) \ V(PPC_I16x8BitMask) \
V(PPC_I16x8AddSatU) \ V(PPC_I16x8ExtAddPairwiseI8x16S) \
V(PPC_I16x8SubSatU) \ V(PPC_I16x8ExtAddPairwiseI8x16U) \
V(PPC_I16x8RoundingAverageU) \ V(PPC_I16x8Q15MulRSatS) \
V(PPC_I16x8BitMask) \ V(PPC_I16x8ExtMulLowI8x16S) \
V(PPC_I16x8ExtAddPairwiseI8x16S) \ V(PPC_I16x8ExtMulHighI8x16S) \
V(PPC_I16x8ExtAddPairwiseI8x16U) \ V(PPC_I16x8ExtMulLowI8x16U) \
V(PPC_I16x8Q15MulRSatS) \ V(PPC_I16x8ExtMulHighI8x16U) \
V(PPC_I16x8ExtMulLowI8x16S) \ V(PPC_I8x16Splat) \
V(PPC_I16x8ExtMulHighI8x16S) \ V(PPC_I8x16ExtractLaneU) \
V(PPC_I16x8ExtMulLowI8x16U) \ V(PPC_I8x16ExtractLaneS) \
V(PPC_I16x8ExtMulHighI8x16U) \ V(PPC_I8x16ReplaceLane) \
V(PPC_I8x16Splat) \ V(PPC_I8x16Add) \
V(PPC_I8x16ExtractLaneU) \ V(PPC_I8x16Sub) \
V(PPC_I8x16ExtractLaneS) \ V(PPC_I8x16MinS) \
V(PPC_I8x16ReplaceLane) \ V(PPC_I8x16MinU) \
V(PPC_I8x16Add) \ V(PPC_I8x16MaxS) \
V(PPC_I8x16Sub) \ V(PPC_I8x16MaxU) \
V(PPC_I8x16MinS) \ V(PPC_I8x16Eq) \
V(PPC_I8x16MinU) \ V(PPC_I8x16Ne) \
V(PPC_I8x16MaxS) \ V(PPC_I8x16GtS) \
V(PPC_I8x16MaxU) \ V(PPC_I8x16GeS) \
V(PPC_I8x16Eq) \ V(PPC_I8x16GtU) \
V(PPC_I8x16Ne) \ V(PPC_I8x16GeU) \
V(PPC_I8x16GtS) \ V(PPC_I8x16Shl) \
V(PPC_I8x16GeS) \ V(PPC_I8x16ShrS) \
V(PPC_I8x16GtU) \ V(PPC_I8x16ShrU) \
V(PPC_I8x16GeU) \ V(PPC_I8x16Neg) \
V(PPC_I8x16Shl) \ V(PPC_I8x16Abs) \
V(PPC_I8x16ShrS) \ V(PPC_I8x16SConvertI16x8) \
V(PPC_I8x16ShrU) \ V(PPC_I8x16UConvertI16x8) \
V(PPC_I8x16Neg) \ V(PPC_I8x16AddSatS) \
V(PPC_I8x16Abs) \ V(PPC_I8x16SubSatS) \
V(PPC_I8x16SConvertI16x8) \ V(PPC_I8x16AddSatU) \
V(PPC_I8x16UConvertI16x8) \ V(PPC_I8x16SubSatU) \
V(PPC_I8x16AddSatS) \ V(PPC_I8x16RoundingAverageU) \
V(PPC_I8x16SubSatS) \ V(PPC_I8x16Shuffle) \
V(PPC_I8x16AddSatU) \ V(PPC_I8x16Swizzle) \
V(PPC_I8x16SubSatU) \ V(PPC_I8x16BitMask) \
V(PPC_I8x16RoundingAverageU) \ V(PPC_I8x16Popcnt) \
V(PPC_I8x16Shuffle) \ V(PPC_I64x2AllTrue) \
V(PPC_I8x16Swizzle) \ V(PPC_I32x4AllTrue) \
V(PPC_I8x16BitMask) \ V(PPC_I16x8AllTrue) \
V(PPC_I8x16Popcnt) \ V(PPC_I8x16AllTrue) \
V(PPC_I64x2AllTrue) \ V(PPC_V128AnyTrue) \
V(PPC_I32x4AllTrue) \ V(PPC_S128And) \
V(PPC_I16x8AllTrue) \ V(PPC_S128Or) \
V(PPC_I8x16AllTrue) \ V(PPC_S128Xor) \
V(PPC_V128AnyTrue) \ V(PPC_S128Const) \
V(PPC_S128And) \ V(PPC_S128Zero) \
V(PPC_S128Or) \ V(PPC_S128AllOnes) \
V(PPC_S128Xor) \ V(PPC_S128Not) \
V(PPC_S128Const) \ V(PPC_S128Select) \
V(PPC_S128Zero) \ V(PPC_S128AndNot) \
V(PPC_S128AllOnes) \ V(PPC_S128Load8Splat) \
V(PPC_S128Not) \ V(PPC_S128Load16Splat) \
V(PPC_S128Select) \ V(PPC_S128Load32Splat) \
V(PPC_S128AndNot) \ V(PPC_S128Load64Splat) \
V(PPC_S128Load8Splat) \ V(PPC_S128Load8x8S) \
V(PPC_S128Load16Splat) \ V(PPC_S128Load8x8U) \
V(PPC_S128Load32Splat) \ V(PPC_S128Load16x4S) \
V(PPC_S128Load64Splat) \ V(PPC_S128Load16x4U) \
V(PPC_S128Load8x8S) \ V(PPC_S128Load32x2S) \
V(PPC_S128Load8x8U) \ V(PPC_S128Load32x2U) \
V(PPC_S128Load16x4S) \ V(PPC_S128Load32Zero) \
V(PPC_S128Load16x4U) \ V(PPC_S128Load64Zero) \
V(PPC_S128Load32x2S) \ V(PPC_S128Load8Lane) \
V(PPC_S128Load32x2U) \ V(PPC_S128Load16Lane) \
V(PPC_S128Load32Zero) \ V(PPC_S128Load32Lane) \
V(PPC_S128Load64Zero) \ V(PPC_S128Load64Lane) \
V(PPC_S128Load8Lane) \ V(PPC_S128Store8Lane) \
V(PPC_S128Load16Lane) \ V(PPC_S128Store16Lane) \
V(PPC_S128Load32Lane) \ V(PPC_S128Store32Lane) \
V(PPC_S128Load64Lane) \ V(PPC_S128Store64Lane) \
V(PPC_S128Store8Lane) \ V(PPC_StoreCompressTagged) \
V(PPC_S128Store16Lane) \ V(PPC_LoadDecompressTaggedSigned) \
V(PPC_S128Store32Lane) \ V(PPC_LoadDecompressTaggedPointer) \
V(PPC_S128Store64Lane) \
V(PPC_StoreCompressTagged) \
V(PPC_LoadDecompressTaggedSigned) \
V(PPC_LoadDecompressTaggedPointer) \
V(PPC_LoadDecompressAnyTagged) V(PPC_LoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -9,400 +9,396 @@ namespace v8 {
namespace internal { namespace internal {
namespace compiler { namespace compiler {
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
// RISC-V-specific opcodes that specify which assembly sequence to emit. // RISC-V-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \ #define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(RiscvAdd32) \
V(RiscvAdd32) \ V(RiscvAdd64) \
V(RiscvAdd64) \ V(RiscvAddOvf64) \
V(RiscvAddOvf64) \ V(RiscvSub32) \
V(RiscvSub32) \ V(RiscvSub64) \
V(RiscvSub64) \ V(RiscvSubOvf64) \
V(RiscvSubOvf64) \ V(RiscvMul32) \
V(RiscvMul32) \ V(RiscvMulOvf32) \
V(RiscvMulOvf32) \ V(RiscvMulHigh32) \
V(RiscvMulHigh32) \ V(RiscvMulHigh64) \
V(RiscvMulHigh64) \ V(RiscvMulHighU32) \
V(RiscvMulHighU32) \ V(RiscvMul64) \
V(RiscvMul64) \ V(RiscvDiv32) \
V(RiscvDiv32) \ V(RiscvDiv64) \
V(RiscvDiv64) \ V(RiscvDivU32) \
V(RiscvDivU32) \ V(RiscvDivU64) \
V(RiscvDivU64) \ V(RiscvMod32) \
V(RiscvMod32) \ V(RiscvMod64) \
V(RiscvMod64) \ V(RiscvModU32) \
V(RiscvModU32) \ V(RiscvModU64) \
V(RiscvModU64) \ V(RiscvAnd) \
V(RiscvAnd) \ V(RiscvAnd32) \
V(RiscvAnd32) \ V(RiscvOr) \
V(RiscvOr) \ V(RiscvOr32) \
V(RiscvOr32) \ V(RiscvNor) \
V(RiscvNor) \ V(RiscvNor32) \
V(RiscvNor32) \ V(RiscvXor) \
V(RiscvXor) \ V(RiscvXor32) \
V(RiscvXor32) \ V(RiscvClz32) \
V(RiscvClz32) \ V(RiscvShl32) \
V(RiscvShl32) \ V(RiscvShr32) \
V(RiscvShr32) \ V(RiscvSar32) \
V(RiscvSar32) \ V(RiscvZeroExtendWord) \
V(RiscvZeroExtendWord) \ V(RiscvSignExtendWord) \
V(RiscvSignExtendWord) \ V(RiscvClz64) \
V(RiscvClz64) \ V(RiscvCtz32) \
V(RiscvCtz32) \ V(RiscvCtz64) \
V(RiscvCtz64) \ V(RiscvPopcnt32) \
V(RiscvPopcnt32) \ V(RiscvPopcnt64) \
V(RiscvPopcnt64) \ V(RiscvShl64) \
V(RiscvShl64) \ V(RiscvShr64) \
V(RiscvShr64) \ V(RiscvSar64) \
V(RiscvSar64) \ V(RiscvRor32) \
V(RiscvRor32) \ V(RiscvRor64) \
V(RiscvRor64) \ V(RiscvMov) \
V(RiscvMov) \ V(RiscvTst) \
V(RiscvTst) \ V(RiscvCmp) \
V(RiscvCmp) \ V(RiscvCmpZero) \
V(RiscvCmpZero) \ V(RiscvCmpS) \
V(RiscvCmpS) \ V(RiscvAddS) \
V(RiscvAddS) \ V(RiscvSubS) \
V(RiscvSubS) \ V(RiscvMulS) \
V(RiscvMulS) \ V(RiscvDivS) \
V(RiscvDivS) \ V(RiscvModS) \
V(RiscvModS) \ V(RiscvAbsS) \
V(RiscvAbsS) \ V(RiscvNegS) \
V(RiscvNegS) \ V(RiscvSqrtS) \
V(RiscvSqrtS) \ V(RiscvMaxS) \
V(RiscvMaxS) \ V(RiscvMinS) \
V(RiscvMinS) \ V(RiscvCmpD) \
V(RiscvCmpD) \ V(RiscvAddD) \
V(RiscvAddD) \ V(RiscvSubD) \
V(RiscvSubD) \ V(RiscvMulD) \
V(RiscvMulD) \ V(RiscvDivD) \
V(RiscvDivD) \ V(RiscvModD) \
V(RiscvModD) \ V(RiscvAbsD) \
V(RiscvAbsD) \ V(RiscvNegD) \
V(RiscvNegD) \ V(RiscvSqrtD) \
V(RiscvSqrtD) \ V(RiscvMaxD) \
V(RiscvMaxD) \ V(RiscvMinD) \
V(RiscvMinD) \ V(RiscvFloat64RoundDown) \
V(RiscvFloat64RoundDown) \ V(RiscvFloat64RoundTruncate) \
V(RiscvFloat64RoundTruncate) \ V(RiscvFloat64RoundUp) \
V(RiscvFloat64RoundUp) \ V(RiscvFloat64RoundTiesEven) \
V(RiscvFloat64RoundTiesEven) \ V(RiscvFloat32RoundDown) \
V(RiscvFloat32RoundDown) \ V(RiscvFloat32RoundTruncate) \
V(RiscvFloat32RoundTruncate) \ V(RiscvFloat32RoundUp) \
V(RiscvFloat32RoundUp) \ V(RiscvFloat32RoundTiesEven) \
V(RiscvFloat32RoundTiesEven) \ V(RiscvCvtSD) \
V(RiscvCvtSD) \ V(RiscvCvtDS) \
V(RiscvCvtDS) \ V(RiscvTruncWD) \
V(RiscvTruncWD) \ V(RiscvRoundWD) \
V(RiscvRoundWD) \ V(RiscvFloorWD) \
V(RiscvFloorWD) \ V(RiscvCeilWD) \
V(RiscvCeilWD) \ V(RiscvTruncWS) \
V(RiscvTruncWS) \ V(RiscvRoundWS) \
V(RiscvRoundWS) \ V(RiscvFloorWS) \
V(RiscvFloorWS) \ V(RiscvCeilWS) \
V(RiscvCeilWS) \ V(RiscvTruncLS) \
V(RiscvTruncLS) \ V(RiscvTruncLD) \
V(RiscvTruncLD) \ V(RiscvTruncUwD) \
V(RiscvTruncUwD) \ V(RiscvTruncUwS) \
V(RiscvTruncUwS) \ V(RiscvTruncUlS) \
V(RiscvTruncUlS) \ V(RiscvTruncUlD) \
V(RiscvTruncUlD) \ V(RiscvCvtDW) \
V(RiscvCvtDW) \ V(RiscvCvtSL) \
V(RiscvCvtSL) \ V(RiscvCvtSW) \
V(RiscvCvtSW) \ V(RiscvCvtSUw) \
V(RiscvCvtSUw) \ V(RiscvCvtSUl) \
V(RiscvCvtSUl) \ V(RiscvCvtDL) \
V(RiscvCvtDL) \ V(RiscvCvtDUw) \
V(RiscvCvtDUw) \ V(RiscvCvtDUl) \
V(RiscvCvtDUl) \ V(RiscvLb) \
V(RiscvLb) \ V(RiscvLbu) \
V(RiscvLbu) \ V(RiscvSb) \
V(RiscvSb) \ V(RiscvLh) \
V(RiscvLh) \ V(RiscvUlh) \
V(RiscvUlh) \ V(RiscvLhu) \
V(RiscvLhu) \ V(RiscvUlhu) \
V(RiscvUlhu) \ V(RiscvSh) \
V(RiscvSh) \ V(RiscvUsh) \
V(RiscvUsh) \ V(RiscvLd) \
V(RiscvLd) \ V(RiscvUld) \
V(RiscvUld) \ V(RiscvLw) \
V(RiscvLw) \ V(RiscvUlw) \
V(RiscvUlw) \ V(RiscvLwu) \
V(RiscvLwu) \ V(RiscvUlwu) \
V(RiscvUlwu) \ V(RiscvSw) \
V(RiscvSw) \ V(RiscvUsw) \
V(RiscvUsw) \ V(RiscvSd) \
V(RiscvSd) \ V(RiscvUsd) \
V(RiscvUsd) \ V(RiscvLoadFloat) \
V(RiscvLoadFloat) \ V(RiscvULoadFloat) \
V(RiscvULoadFloat) \ V(RiscvStoreFloat) \
V(RiscvStoreFloat) \ V(RiscvUStoreFloat) \
V(RiscvUStoreFloat) \ V(RiscvLoadDouble) \
V(RiscvLoadDouble) \ V(RiscvULoadDouble) \
V(RiscvULoadDouble) \ V(RiscvStoreDouble) \
V(RiscvStoreDouble) \ V(RiscvUStoreDouble) \
V(RiscvUStoreDouble) \ V(RiscvBitcastDL) \
V(RiscvBitcastDL) \ V(RiscvBitcastLD) \
V(RiscvBitcastLD) \ V(RiscvBitcastInt32ToFloat32) \
V(RiscvBitcastInt32ToFloat32) \ V(RiscvBitcastFloat32ToInt32) \
V(RiscvBitcastFloat32ToInt32) \ V(RiscvFloat64ExtractLowWord32) \
V(RiscvFloat64ExtractLowWord32) \ V(RiscvFloat64ExtractHighWord32) \
V(RiscvFloat64ExtractHighWord32) \ V(RiscvFloat64InsertLowWord32) \
V(RiscvFloat64InsertLowWord32) \ V(RiscvFloat64InsertHighWord32) \
V(RiscvFloat64InsertHighWord32) \ V(RiscvFloat32Max) \
V(RiscvFloat32Max) \ V(RiscvFloat64Max) \
V(RiscvFloat64Max) \ V(RiscvFloat32Min) \
V(RiscvFloat32Min) \ V(RiscvFloat64Min) \
V(RiscvFloat64Min) \ V(RiscvFloat64SilenceNaN) \
V(RiscvFloat64SilenceNaN) \ V(RiscvPush) \
V(RiscvPush) \ V(RiscvPeek) \
V(RiscvPeek) \ V(RiscvByteSwap64) \
V(RiscvByteSwap64) \ V(RiscvByteSwap32) \
V(RiscvByteSwap32) \ V(RiscvStoreToStackSlot) \
V(RiscvStoreToStackSlot) \ V(RiscvStackClaim) \
V(RiscvStackClaim) \ V(RiscvSignExtendByte) \
V(RiscvSignExtendByte) \ V(RiscvSignExtendShort) \
V(RiscvSignExtendShort) \ V(RiscvSync) \
V(RiscvSync) \ V(RiscvAssertEqual) \
V(RiscvAssertEqual) \ V(RiscvS128Const) \
V(RiscvS128Const) \ V(RiscvS128Zero) \
V(RiscvS128Zero) \ V(RiscvS128AllOnes) \
V(RiscvS128AllOnes) \ V(RiscvI32x4Splat) \
V(RiscvI32x4Splat) \ V(RiscvI32x4ExtractLane) \
V(RiscvI32x4ExtractLane) \ V(RiscvI32x4ReplaceLane) \
V(RiscvI32x4ReplaceLane) \ V(RiscvI32x4Add) \
V(RiscvI32x4Add) \ V(RiscvI32x4Sub) \
V(RiscvI32x4Sub) \ V(RiscvF64x2Abs) \
V(RiscvF64x2Abs) \ V(RiscvF64x2Neg) \
V(RiscvF64x2Neg) \ V(RiscvF32x4Splat) \
V(RiscvF32x4Splat) \ V(RiscvF32x4ExtractLane) \
V(RiscvF32x4ExtractLane) \ V(RiscvF32x4ReplaceLane) \
V(RiscvF32x4ReplaceLane) \ V(RiscvF32x4SConvertI32x4) \
V(RiscvF32x4SConvertI32x4) \ V(RiscvF32x4UConvertI32x4) \
V(RiscvF32x4UConvertI32x4) \ V(RiscvI64x2SConvertI32x4Low) \
V(RiscvI64x2SConvertI32x4Low) \ V(RiscvI64x2SConvertI32x4High) \
V(RiscvI64x2SConvertI32x4High) \ V(RiscvI64x2UConvertI32x4Low) \
V(RiscvI64x2UConvertI32x4Low) \ V(RiscvI64x2UConvertI32x4High) \
V(RiscvI64x2UConvertI32x4High) \ V(RiscvI32x4Mul) \
V(RiscvI32x4Mul) \ V(RiscvI32x4MaxS) \
V(RiscvI32x4MaxS) \ V(RiscvI32x4MinS) \
V(RiscvI32x4MinS) \ V(RiscvI32x4Eq) \
V(RiscvI32x4Eq) \ V(RiscvI32x4Ne) \
V(RiscvI32x4Ne) \ V(RiscvI32x4Shl) \
V(RiscvI32x4Shl) \ V(RiscvI32x4ShrS) \
V(RiscvI32x4ShrS) \ V(RiscvI32x4ShrU) \
V(RiscvI32x4ShrU) \ V(RiscvI32x4MaxU) \
V(RiscvI32x4MaxU) \ V(RiscvI32x4MinU) \
V(RiscvI32x4MinU) \ V(RiscvI64x2GtS) \
V(RiscvI64x2GtS) \ V(RiscvI64x2GeS) \
V(RiscvI64x2GeS) \ V(RiscvI64x2Eq) \
V(RiscvI64x2Eq) \ V(RiscvI64x2Ne) \
V(RiscvI64x2Ne) \ V(RiscvF64x2Sqrt) \
V(RiscvF64x2Sqrt) \ V(RiscvF64x2Add) \
V(RiscvF64x2Add) \ V(RiscvF64x2Sub) \
V(RiscvF64x2Sub) \ V(RiscvF64x2Mul) \
V(RiscvF64x2Mul) \ V(RiscvF64x2Div) \
V(RiscvF64x2Div) \ V(RiscvF64x2Min) \
V(RiscvF64x2Min) \ V(RiscvF64x2Max) \
V(RiscvF64x2Max) \ V(RiscvF64x2ConvertLowI32x4S) \
V(RiscvF64x2ConvertLowI32x4S) \ V(RiscvF64x2ConvertLowI32x4U) \
V(RiscvF64x2ConvertLowI32x4U) \ V(RiscvF64x2PromoteLowF32x4) \
V(RiscvF64x2PromoteLowF32x4) \ V(RiscvF64x2Eq) \
V(RiscvF64x2Eq) \ V(RiscvF64x2Ne) \
V(RiscvF64x2Ne) \ V(RiscvF64x2Lt) \
V(RiscvF64x2Lt) \ V(RiscvF64x2Le) \
V(RiscvF64x2Le) \ V(RiscvF64x2Splat) \
V(RiscvF64x2Splat) \ V(RiscvF64x2ExtractLane) \
V(RiscvF64x2ExtractLane) \ V(RiscvF64x2ReplaceLane) \
V(RiscvF64x2ReplaceLane) \ V(RiscvF64x2Pmin) \
V(RiscvF64x2Pmin) \ V(RiscvF64x2Pmax) \
V(RiscvF64x2Pmax) \ V(RiscvF64x2Ceil) \
V(RiscvF64x2Ceil) \ V(RiscvF64x2Floor) \
V(RiscvF64x2Floor) \ V(RiscvF64x2Trunc) \
V(RiscvF64x2Trunc) \ V(RiscvF64x2NearestInt) \
V(RiscvF64x2NearestInt) \ V(RiscvI64x2Splat) \
V(RiscvI64x2Splat) \ V(RiscvI64x2ExtractLane) \
V(RiscvI64x2ExtractLane) \ V(RiscvI64x2ReplaceLane) \
V(RiscvI64x2ReplaceLane) \ V(RiscvI64x2Add) \
V(RiscvI64x2Add) \ V(RiscvI64x2Sub) \
V(RiscvI64x2Sub) \ V(RiscvI64x2Mul) \
V(RiscvI64x2Mul) \ V(RiscvI64x2Abs) \
V(RiscvI64x2Abs) \ V(RiscvI64x2Neg) \
V(RiscvI64x2Neg) \ V(RiscvI64x2Shl) \
V(RiscvI64x2Shl) \ V(RiscvI64x2ShrS) \
V(RiscvI64x2ShrS) \ V(RiscvI64x2ShrU) \
V(RiscvI64x2ShrU) \ V(RiscvI64x2BitMask) \
V(RiscvI64x2BitMask) \ V(RiscvF32x4Abs) \
V(RiscvF32x4Abs) \ V(RiscvF32x4Neg) \
V(RiscvF32x4Neg) \ V(RiscvF32x4Sqrt) \
V(RiscvF32x4Sqrt) \ V(RiscvF32x4RecipApprox) \
V(RiscvF32x4RecipApprox) \ V(RiscvF32x4RecipSqrtApprox) \
V(RiscvF32x4RecipSqrtApprox) \ V(RiscvF32x4Add) \
V(RiscvF32x4Add) \ V(RiscvF32x4Sub) \
V(RiscvF32x4Sub) \ V(RiscvF32x4Mul) \
V(RiscvF32x4Mul) \ V(RiscvF32x4Div) \
V(RiscvF32x4Div) \ V(RiscvF32x4Max) \
V(RiscvF32x4Max) \ V(RiscvF32x4Min) \
V(RiscvF32x4Min) \ V(RiscvF32x4Eq) \
V(RiscvF32x4Eq) \ V(RiscvF32x4Ne) \
V(RiscvF32x4Ne) \ V(RiscvF32x4Lt) \
V(RiscvF32x4Lt) \ V(RiscvF32x4Le) \
V(RiscvF32x4Le) \ V(RiscvF32x4Pmin) \
V(RiscvF32x4Pmin) \ V(RiscvF32x4Pmax) \
V(RiscvF32x4Pmax) \ V(RiscvF32x4DemoteF64x2Zero) \
V(RiscvF32x4DemoteF64x2Zero) \ V(RiscvF32x4Ceil) \
V(RiscvF32x4Ceil) \ V(RiscvF32x4Floor) \
V(RiscvF32x4Floor) \ V(RiscvF32x4Trunc) \
V(RiscvF32x4Trunc) \ V(RiscvF32x4NearestInt) \
V(RiscvF32x4NearestInt) \ V(RiscvI32x4SConvertF32x4) \
V(RiscvI32x4SConvertF32x4) \ V(RiscvI32x4UConvertF32x4) \
V(RiscvI32x4UConvertF32x4) \ V(RiscvI32x4Neg) \
V(RiscvI32x4Neg) \ V(RiscvI32x4GtS) \
V(RiscvI32x4GtS) \ V(RiscvI32x4GeS) \
V(RiscvI32x4GeS) \ V(RiscvI32x4GtU) \
V(RiscvI32x4GtU) \ V(RiscvI32x4GeU) \
V(RiscvI32x4GeU) \ V(RiscvI32x4Abs) \
V(RiscvI32x4Abs) \ V(RiscvI32x4BitMask) \
V(RiscvI32x4BitMask) \ V(RiscvI32x4DotI16x8S) \
V(RiscvI32x4DotI16x8S) \ V(RiscvI32x4TruncSatF64x2SZero) \
V(RiscvI32x4TruncSatF64x2SZero) \ V(RiscvI32x4TruncSatF64x2UZero) \
V(RiscvI32x4TruncSatF64x2UZero) \ V(RiscvI16x8Splat) \
V(RiscvI16x8Splat) \ V(RiscvI16x8ExtractLaneU) \
V(RiscvI16x8ExtractLaneU) \ V(RiscvI16x8ExtractLaneS) \
V(RiscvI16x8ExtractLaneS) \ V(RiscvI16x8ReplaceLane) \
V(RiscvI16x8ReplaceLane) \ V(RiscvI16x8Neg) \
V(RiscvI16x8Neg) \ V(RiscvI16x8Shl) \
V(RiscvI16x8Shl) \ V(RiscvI16x8ShrS) \
V(RiscvI16x8ShrS) \ V(RiscvI16x8ShrU) \
V(RiscvI16x8ShrU) \ V(RiscvI16x8Add) \
V(RiscvI16x8Add) \ V(RiscvI16x8AddSatS) \
V(RiscvI16x8AddSatS) \ V(RiscvI16x8Sub) \
V(RiscvI16x8Sub) \ V(RiscvI16x8SubSatS) \
V(RiscvI16x8SubSatS) \ V(RiscvI16x8Mul) \
V(RiscvI16x8Mul) \ V(RiscvI16x8MaxS) \
V(RiscvI16x8MaxS) \ V(RiscvI16x8MinS) \
V(RiscvI16x8MinS) \ V(RiscvI16x8Eq) \
V(RiscvI16x8Eq) \ V(RiscvI16x8Ne) \
V(RiscvI16x8Ne) \ V(RiscvI16x8GtS) \
V(RiscvI16x8GtS) \ V(RiscvI16x8GeS) \
V(RiscvI16x8GeS) \ V(RiscvI16x8AddSatU) \
V(RiscvI16x8AddSatU) \ V(RiscvI16x8SubSatU) \
V(RiscvI16x8SubSatU) \ V(RiscvI16x8MaxU) \
V(RiscvI16x8MaxU) \ V(RiscvI16x8MinU) \
V(RiscvI16x8MinU) \ V(RiscvI16x8GtU) \
V(RiscvI16x8GtU) \ V(RiscvI16x8GeU) \
V(RiscvI16x8GeU) \ V(RiscvI16x8RoundingAverageU) \
V(RiscvI16x8RoundingAverageU) \ V(RiscvI16x8Q15MulRSatS) \
V(RiscvI16x8Q15MulRSatS) \ V(RiscvI16x8Abs) \
V(RiscvI16x8Abs) \ V(RiscvI16x8BitMask) \
V(RiscvI16x8BitMask) \ V(RiscvI8x16Splat) \
V(RiscvI8x16Splat) \ V(RiscvI8x16ExtractLaneU) \
V(RiscvI8x16ExtractLaneU) \ V(RiscvI8x16ExtractLaneS) \
V(RiscvI8x16ExtractLaneS) \ V(RiscvI8x16ReplaceLane) \
V(RiscvI8x16ReplaceLane) \ V(RiscvI8x16Neg) \
V(RiscvI8x16Neg) \ V(RiscvI8x16Shl) \
V(RiscvI8x16Shl) \ V(RiscvI8x16ShrS) \
V(RiscvI8x16ShrS) \ V(RiscvI8x16Add) \
V(RiscvI8x16Add) \ V(RiscvI8x16AddSatS) \
V(RiscvI8x16AddSatS) \ V(RiscvI8x16Sub) \
V(RiscvI8x16Sub) \ V(RiscvI8x16SubSatS) \
V(RiscvI8x16SubSatS) \ V(RiscvI8x16MaxS) \
V(RiscvI8x16MaxS) \ V(RiscvI8x16MinS) \
V(RiscvI8x16MinS) \ V(RiscvI8x16Eq) \
V(RiscvI8x16Eq) \ V(RiscvI8x16Ne) \
V(RiscvI8x16Ne) \ V(RiscvI8x16GtS) \
V(RiscvI8x16GtS) \ V(RiscvI8x16GeS) \
V(RiscvI8x16GeS) \ V(RiscvI8x16ShrU) \
V(RiscvI8x16ShrU) \ V(RiscvI8x16AddSatU) \
V(RiscvI8x16AddSatU) \ V(RiscvI8x16SubSatU) \
V(RiscvI8x16SubSatU) \ V(RiscvI8x16MaxU) \
V(RiscvI8x16MaxU) \ V(RiscvI8x16MinU) \
V(RiscvI8x16MinU) \ V(RiscvI8x16GtU) \
V(RiscvI8x16GtU) \ V(RiscvI8x16GeU) \
V(RiscvI8x16GeU) \ V(RiscvI8x16RoundingAverageU) \
V(RiscvI8x16RoundingAverageU) \ V(RiscvI8x16Abs) \
V(RiscvI8x16Abs) \ V(RiscvI8x16BitMask) \
V(RiscvI8x16BitMask) \ V(RiscvI8x16Popcnt) \
V(RiscvI8x16Popcnt) \ V(RiscvS128And) \
V(RiscvS128And) \ V(RiscvS128Or) \
V(RiscvS128Or) \ V(RiscvS128Xor) \
V(RiscvS128Xor) \ V(RiscvS128Not) \
V(RiscvS128Not) \ V(RiscvS128Select) \
V(RiscvS128Select) \ V(RiscvS128AndNot) \
V(RiscvS128AndNot) \ V(RiscvI32x4AllTrue) \
V(RiscvI32x4AllTrue) \ V(RiscvI16x8AllTrue) \
V(RiscvI16x8AllTrue) \ V(RiscvV128AnyTrue) \
V(RiscvV128AnyTrue) \ V(RiscvI8x16AllTrue) \
V(RiscvI8x16AllTrue) \ V(RiscvI64x2AllTrue) \
V(RiscvI64x2AllTrue) \ V(RiscvS32x4InterleaveRight) \
V(RiscvS32x4InterleaveRight) \ V(RiscvS32x4InterleaveLeft) \
V(RiscvS32x4InterleaveLeft) \ V(RiscvS32x4PackEven) \
V(RiscvS32x4PackEven) \ V(RiscvS32x4PackOdd) \
V(RiscvS32x4PackOdd) \ V(RiscvS32x4InterleaveEven) \
V(RiscvS32x4InterleaveEven) \ V(RiscvS32x4InterleaveOdd) \
V(RiscvS32x4InterleaveOdd) \ V(RiscvS32x4Shuffle) \
V(RiscvS32x4Shuffle) \ V(RiscvS16x8InterleaveRight) \
V(RiscvS16x8InterleaveRight) \ V(RiscvS16x8InterleaveLeft) \
V(RiscvS16x8InterleaveLeft) \ V(RiscvS16x8PackEven) \
V(RiscvS16x8PackEven) \ V(RiscvS16x8PackOdd) \
V(RiscvS16x8PackOdd) \ V(RiscvS16x8InterleaveEven) \
V(RiscvS16x8InterleaveEven) \ V(RiscvS16x8InterleaveOdd) \
V(RiscvS16x8InterleaveOdd) \ V(RiscvS16x4Reverse) \
V(RiscvS16x4Reverse) \ V(RiscvS16x2Reverse) \
V(RiscvS16x2Reverse) \ V(RiscvS8x16InterleaveRight) \
V(RiscvS8x16InterleaveRight) \ V(RiscvS8x16InterleaveLeft) \
V(RiscvS8x16InterleaveLeft) \ V(RiscvS8x16PackEven) \
V(RiscvS8x16PackEven) \ V(RiscvS8x16PackOdd) \
V(RiscvS8x16PackOdd) \ V(RiscvS8x16InterleaveEven) \
V(RiscvS8x16InterleaveEven) \ V(RiscvS8x16InterleaveOdd) \
V(RiscvS8x16InterleaveOdd) \ V(RiscvI8x16Shuffle) \
V(RiscvI8x16Shuffle) \ V(RiscvI8x16Swizzle) \
V(RiscvI8x16Swizzle) \ V(RiscvS8x16Concat) \
V(RiscvS8x16Concat) \ V(RiscvS8x8Reverse) \
V(RiscvS8x8Reverse) \ V(RiscvS8x4Reverse) \
V(RiscvS8x4Reverse) \ V(RiscvS8x2Reverse) \
V(RiscvS8x2Reverse) \ V(RiscvS128Load8Splat) \
V(RiscvS128Load8Splat) \ V(RiscvS128Load16Splat) \
V(RiscvS128Load16Splat) \ V(RiscvS128Load32Splat) \
V(RiscvS128Load32Splat) \ V(RiscvS128Load64Splat) \
V(RiscvS128Load64Splat) \ V(RiscvS128Load8x8S) \
V(RiscvS128Load8x8S) \ V(RiscvS128Load8x8U) \
V(RiscvS128Load8x8U) \ V(RiscvS128Load16x4S) \
V(RiscvS128Load16x4S) \ V(RiscvS128Load16x4U) \
V(RiscvS128Load16x4U) \ V(RiscvS128Load32x2S) \
V(RiscvS128Load32x2S) \ V(RiscvS128Load32x2U) \
V(RiscvS128Load32x2U) \ V(RiscvS128LoadLane) \
V(RiscvS128LoadLane) \ V(RiscvS128StoreLane) \
V(RiscvS128StoreLane) \ V(RiscvRvvLd) \
V(RiscvRvvLd) \ V(RiscvRvvSt) \
V(RiscvRvvSt) \ V(RiscvI32x4SConvertI16x8Low) \
V(RiscvI32x4SConvertI16x8Low) \ V(RiscvI32x4SConvertI16x8High) \
V(RiscvI32x4SConvertI16x8High) \ V(RiscvI32x4UConvertI16x8Low) \
V(RiscvI32x4UConvertI16x8Low) \ V(RiscvI32x4UConvertI16x8High) \
V(RiscvI32x4UConvertI16x8High) \ V(RiscvI16x8SConvertI8x16Low) \
V(RiscvI16x8SConvertI8x16Low) \ V(RiscvI16x8SConvertI8x16High) \
V(RiscvI16x8SConvertI8x16High) \ V(RiscvI16x8SConvertI32x4) \
V(RiscvI16x8SConvertI32x4) \ V(RiscvI16x8UConvertI32x4) \
V(RiscvI16x8UConvertI32x4) \ V(RiscvI16x8UConvertI8x16Low) \
V(RiscvI16x8UConvertI8x16Low) \ V(RiscvI16x8UConvertI8x16High) \
V(RiscvI16x8UConvertI8x16High) \ V(RiscvI8x16SConvertI16x8) \
V(RiscvI8x16SConvertI16x8) \ V(RiscvI8x16UConvertI16x8) \
V(RiscvI8x16UConvertI16x8) \ V(RiscvWord64AtomicLoadUint64) \
V(RiscvWord64AtomicLoadUint64) \ V(RiscvWord64AtomicStoreWord64) \
V(RiscvWord64AtomicStoreWord64) \ V(RiscvWord64AtomicAddUint64) \
V(RiscvWord64AtomicAddUint64) \ V(RiscvWord64AtomicSubUint64) \
V(RiscvWord64AtomicSubUint64) \ V(RiscvWord64AtomicAndUint64) \
V(RiscvWord64AtomicAndUint64) \ V(RiscvWord64AtomicOrUint64) \
V(RiscvWord64AtomicOrUint64) \ V(RiscvWord64AtomicXorUint64) \
V(RiscvWord64AtomicXorUint64) \ V(RiscvWord64AtomicExchangeUint64) \
V(RiscvWord64AtomicExchangeUint64) \ V(RiscvWord64AtomicCompareExchangeUint64) \
V(RiscvWord64AtomicCompareExchangeUint64) \ V(RiscvStoreCompressTagged) \
V(RiscvStoreCompressTagged) \ V(RiscvLoadDecompressTaggedSigned) \
V(RiscvLoadDecompressTaggedSigned) \ V(RiscvLoadDecompressTaggedPointer) \
V(RiscvLoadDecompressTaggedPointer) \
V(RiscvLoadDecompressAnyTagged) V(RiscvLoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.

View File

@ -12,396 +12,392 @@ namespace compiler {
// S390-specific opcodes that specify which assembly sequence to emit. // S390-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
// Opcodes that support a MemoryAccessMode. #define TARGET_ARCH_OPCODE_LIST(V) \
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None. V(S390_Peek) \
V(S390_Abs32) \
#define TARGET_ARCH_OPCODE_LIST(V) \ V(S390_Abs64) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \ V(S390_And32) \
V(S390_Peek) \ V(S390_And64) \
V(S390_Abs32) \ V(S390_Or32) \
V(S390_Abs64) \ V(S390_Or64) \
V(S390_And32) \ V(S390_Xor32) \
V(S390_And64) \ V(S390_Xor64) \
V(S390_Or32) \ V(S390_ShiftLeft32) \
V(S390_Or64) \ V(S390_ShiftLeft64) \
V(S390_Xor32) \ V(S390_ShiftRight32) \
V(S390_Xor64) \ V(S390_ShiftRight64) \
V(S390_ShiftLeft32) \ V(S390_ShiftRightArith32) \
V(S390_ShiftLeft64) \ V(S390_ShiftRightArith64) \
V(S390_ShiftRight32) \ V(S390_RotRight32) \
V(S390_ShiftRight64) \ V(S390_RotRight64) \
V(S390_ShiftRightArith32) \ V(S390_Not32) \
V(S390_ShiftRightArith64) \ V(S390_Not64) \
V(S390_RotRight32) \ V(S390_RotLeftAndClear64) \
V(S390_RotRight64) \ V(S390_RotLeftAndClearLeft64) \
V(S390_Not32) \ V(S390_RotLeftAndClearRight64) \
V(S390_Not64) \ V(S390_Lay) \
V(S390_RotLeftAndClear64) \ V(S390_Add32) \
V(S390_RotLeftAndClearLeft64) \ V(S390_Add64) \
V(S390_RotLeftAndClearRight64) \ V(S390_AddFloat) \
V(S390_Lay) \ V(S390_AddDouble) \
V(S390_Add32) \ V(S390_Sub32) \
V(S390_Add64) \ V(S390_Sub64) \
V(S390_AddFloat) \ V(S390_SubFloat) \
V(S390_AddDouble) \ V(S390_SubDouble) \
V(S390_Sub32) \ V(S390_Mul32) \
V(S390_Sub64) \ V(S390_Mul32WithOverflow) \
V(S390_SubFloat) \ V(S390_Mul64) \
V(S390_SubDouble) \ V(S390_MulHigh32) \
V(S390_Mul32) \ V(S390_MulHighU32) \
V(S390_Mul32WithOverflow) \ V(S390_MulFloat) \
V(S390_Mul64) \ V(S390_MulDouble) \
V(S390_MulHigh32) \ V(S390_Div32) \
V(S390_MulHighU32) \ V(S390_Div64) \
V(S390_MulFloat) \ V(S390_DivU32) \
V(S390_MulDouble) \ V(S390_DivU64) \
V(S390_Div32) \ V(S390_DivFloat) \
V(S390_Div64) \ V(S390_DivDouble) \
V(S390_DivU32) \ V(S390_Mod32) \
V(S390_DivU64) \ V(S390_Mod64) \
V(S390_DivFloat) \ V(S390_ModU32) \
V(S390_DivDouble) \ V(S390_ModU64) \
V(S390_Mod32) \ V(S390_ModDouble) \
V(S390_Mod64) \ V(S390_Neg32) \
V(S390_ModU32) \ V(S390_Neg64) \
V(S390_ModU64) \ V(S390_NegDouble) \
V(S390_ModDouble) \ V(S390_NegFloat) \
V(S390_Neg32) \ V(S390_SqrtFloat) \
V(S390_Neg64) \ V(S390_FloorFloat) \
V(S390_NegDouble) \ V(S390_CeilFloat) \
V(S390_NegFloat) \ V(S390_TruncateFloat) \
V(S390_SqrtFloat) \ V(S390_FloatNearestInt) \
V(S390_FloorFloat) \ V(S390_AbsFloat) \
V(S390_CeilFloat) \ V(S390_SqrtDouble) \
V(S390_TruncateFloat) \ V(S390_FloorDouble) \
V(S390_FloatNearestInt) \ V(S390_CeilDouble) \
V(S390_AbsFloat) \ V(S390_TruncateDouble) \
V(S390_SqrtDouble) \ V(S390_RoundDouble) \
V(S390_FloorDouble) \ V(S390_DoubleNearestInt) \
V(S390_CeilDouble) \ V(S390_MaxFloat) \
V(S390_TruncateDouble) \ V(S390_MaxDouble) \
V(S390_RoundDouble) \ V(S390_MinFloat) \
V(S390_DoubleNearestInt) \ V(S390_MinDouble) \
V(S390_MaxFloat) \ V(S390_AbsDouble) \
V(S390_MaxDouble) \ V(S390_Cntlz32) \
V(S390_MinFloat) \ V(S390_Cntlz64) \
V(S390_MinDouble) \ V(S390_Popcnt32) \
V(S390_AbsDouble) \ V(S390_Popcnt64) \
V(S390_Cntlz32) \ V(S390_Cmp32) \
V(S390_Cntlz64) \ V(S390_Cmp64) \
V(S390_Popcnt32) \ V(S390_CmpFloat) \
V(S390_Popcnt64) \ V(S390_CmpDouble) \
V(S390_Cmp32) \ V(S390_Tst32) \
V(S390_Cmp64) \ V(S390_Tst64) \
V(S390_CmpFloat) \ V(S390_Push) \
V(S390_CmpDouble) \ V(S390_PushFrame) \
V(S390_Tst32) \ V(S390_StoreToStackSlot) \
V(S390_Tst64) \ V(S390_SignExtendWord8ToInt32) \
V(S390_Push) \ V(S390_SignExtendWord16ToInt32) \
V(S390_PushFrame) \ V(S390_SignExtendWord8ToInt64) \
V(S390_StoreToStackSlot) \ V(S390_SignExtendWord16ToInt64) \
V(S390_SignExtendWord8ToInt32) \ V(S390_SignExtendWord32ToInt64) \
V(S390_SignExtendWord16ToInt32) \ V(S390_Uint32ToUint64) \
V(S390_SignExtendWord8ToInt64) \ V(S390_Int64ToInt32) \
V(S390_SignExtendWord16ToInt64) \ V(S390_Int64ToFloat32) \
V(S390_SignExtendWord32ToInt64) \ V(S390_Int64ToDouble) \
V(S390_Uint32ToUint64) \ V(S390_Uint64ToFloat32) \
V(S390_Int64ToInt32) \ V(S390_Uint64ToDouble) \
V(S390_Int64ToFloat32) \ V(S390_Int32ToFloat32) \
V(S390_Int64ToDouble) \ V(S390_Int32ToDouble) \
V(S390_Uint64ToFloat32) \ V(S390_Uint32ToFloat32) \
V(S390_Uint64ToDouble) \ V(S390_Uint32ToDouble) \
V(S390_Int32ToFloat32) \ V(S390_Float32ToInt64) \
V(S390_Int32ToDouble) \ V(S390_Float32ToUint64) \
V(S390_Uint32ToFloat32) \ V(S390_Float32ToInt32) \
V(S390_Uint32ToDouble) \ V(S390_Float32ToUint32) \
V(S390_Float32ToInt64) \ V(S390_Float32ToDouble) \
V(S390_Float32ToUint64) \ V(S390_Float64SilenceNaN) \
V(S390_Float32ToInt32) \ V(S390_DoubleToInt32) \
V(S390_Float32ToUint32) \ V(S390_DoubleToUint32) \
V(S390_Float32ToDouble) \ V(S390_DoubleToInt64) \
V(S390_Float64SilenceNaN) \ V(S390_DoubleToUint64) \
V(S390_DoubleToInt32) \ V(S390_DoubleToFloat32) \
V(S390_DoubleToUint32) \ V(S390_DoubleExtractLowWord32) \
V(S390_DoubleToInt64) \ V(S390_DoubleExtractHighWord32) \
V(S390_DoubleToUint64) \ V(S390_DoubleInsertLowWord32) \
V(S390_DoubleToFloat32) \ V(S390_DoubleInsertHighWord32) \
V(S390_DoubleExtractLowWord32) \ V(S390_DoubleConstruct) \
V(S390_DoubleExtractHighWord32) \ V(S390_BitcastInt32ToFloat32) \
V(S390_DoubleInsertLowWord32) \ V(S390_BitcastFloat32ToInt32) \
V(S390_DoubleInsertHighWord32) \ V(S390_BitcastInt64ToDouble) \
V(S390_DoubleConstruct) \ V(S390_BitcastDoubleToInt64) \
V(S390_BitcastInt32ToFloat32) \ V(S390_LoadWordS8) \
V(S390_BitcastFloat32ToInt32) \ V(S390_LoadWordU8) \
V(S390_BitcastInt64ToDouble) \ V(S390_LoadWordS16) \
V(S390_BitcastDoubleToInt64) \ V(S390_LoadWordU16) \
V(S390_LoadWordS8) \ V(S390_LoadWordS32) \
V(S390_LoadWordU8) \ V(S390_LoadWordU32) \
V(S390_LoadWordS16) \ V(S390_LoadAndTestWord32) \
V(S390_LoadWordU16) \ V(S390_LoadAndTestWord64) \
V(S390_LoadWordS32) \ V(S390_LoadAndTestFloat32) \
V(S390_LoadWordU32) \ V(S390_LoadAndTestFloat64) \
V(S390_LoadAndTestWord32) \ V(S390_LoadReverse16RR) \
V(S390_LoadAndTestWord64) \ V(S390_LoadReverse32RR) \
V(S390_LoadAndTestFloat32) \ V(S390_LoadReverse64RR) \
V(S390_LoadAndTestFloat64) \ V(S390_LoadReverseSimd128RR) \
V(S390_LoadReverse16RR) \ V(S390_LoadReverseSimd128) \
V(S390_LoadReverse32RR) \ V(S390_LoadReverse16) \
V(S390_LoadReverse64RR) \ V(S390_LoadReverse32) \
V(S390_LoadReverseSimd128RR) \ V(S390_LoadReverse64) \
V(S390_LoadReverseSimd128) \ V(S390_LoadWord64) \
V(S390_LoadReverse16) \ V(S390_LoadFloat32) \
V(S390_LoadReverse32) \ V(S390_LoadDouble) \
V(S390_LoadReverse64) \ V(S390_StoreWord8) \
V(S390_LoadWord64) \ V(S390_StoreWord16) \
V(S390_LoadFloat32) \ V(S390_StoreWord32) \
V(S390_LoadDouble) \ V(S390_StoreWord64) \
V(S390_StoreWord8) \ V(S390_StoreReverse16) \
V(S390_StoreWord16) \ V(S390_StoreReverse32) \
V(S390_StoreWord32) \ V(S390_StoreReverse64) \
V(S390_StoreWord64) \ V(S390_StoreReverseSimd128) \
V(S390_StoreReverse16) \ V(S390_StoreFloat32) \
V(S390_StoreReverse32) \ V(S390_StoreDouble) \
V(S390_StoreReverse64) \ V(S390_Word64AtomicExchangeUint64) \
V(S390_StoreReverseSimd128) \ V(S390_Word64AtomicCompareExchangeUint64) \
V(S390_StoreFloat32) \ V(S390_Word64AtomicAddUint64) \
V(S390_StoreDouble) \ V(S390_Word64AtomicSubUint64) \
V(S390_Word64AtomicExchangeUint64) \ V(S390_Word64AtomicAndUint64) \
V(S390_Word64AtomicCompareExchangeUint64) \ V(S390_Word64AtomicOrUint64) \
V(S390_Word64AtomicAddUint64) \ V(S390_Word64AtomicXorUint64) \
V(S390_Word64AtomicSubUint64) \ V(S390_F64x2Splat) \
V(S390_Word64AtomicAndUint64) \ V(S390_F64x2ReplaceLane) \
V(S390_Word64AtomicOrUint64) \ V(S390_F64x2Abs) \
V(S390_Word64AtomicXorUint64) \ V(S390_F64x2Neg) \
V(S390_F64x2Splat) \ V(S390_F64x2Sqrt) \
V(S390_F64x2ReplaceLane) \ V(S390_F64x2Add) \
V(S390_F64x2Abs) \ V(S390_F64x2Sub) \
V(S390_F64x2Neg) \ V(S390_F64x2Mul) \
V(S390_F64x2Sqrt) \ V(S390_F64x2Div) \
V(S390_F64x2Add) \ V(S390_F64x2Eq) \
V(S390_F64x2Sub) \ V(S390_F64x2Ne) \
V(S390_F64x2Mul) \ V(S390_F64x2Lt) \
V(S390_F64x2Div) \ V(S390_F64x2Le) \
V(S390_F64x2Eq) \ V(S390_F64x2Min) \
V(S390_F64x2Ne) \ V(S390_F64x2Max) \
V(S390_F64x2Lt) \ V(S390_F64x2ExtractLane) \
V(S390_F64x2Le) \ V(S390_F64x2Qfma) \
V(S390_F64x2Min) \ V(S390_F64x2Qfms) \
V(S390_F64x2Max) \ V(S390_F64x2Pmin) \
V(S390_F64x2ExtractLane) \ V(S390_F64x2Pmax) \
V(S390_F64x2Qfma) \ V(S390_F64x2Ceil) \
V(S390_F64x2Qfms) \ V(S390_F64x2Floor) \
V(S390_F64x2Pmin) \ V(S390_F64x2Trunc) \
V(S390_F64x2Pmax) \ V(S390_F64x2NearestInt) \
V(S390_F64x2Ceil) \ V(S390_F64x2ConvertLowI32x4S) \
V(S390_F64x2Floor) \ V(S390_F64x2ConvertLowI32x4U) \
V(S390_F64x2Trunc) \ V(S390_F64x2PromoteLowF32x4) \
V(S390_F64x2NearestInt) \ V(S390_F32x4Splat) \
V(S390_F64x2ConvertLowI32x4S) \ V(S390_F32x4ExtractLane) \
V(S390_F64x2ConvertLowI32x4U) \ V(S390_F32x4ReplaceLane) \
V(S390_F64x2PromoteLowF32x4) \ V(S390_F32x4Add) \
V(S390_F32x4Splat) \ V(S390_F32x4Sub) \
V(S390_F32x4ExtractLane) \ V(S390_F32x4Mul) \
V(S390_F32x4ReplaceLane) \ V(S390_F32x4Eq) \
V(S390_F32x4Add) \ V(S390_F32x4Ne) \
V(S390_F32x4Sub) \ V(S390_F32x4Lt) \
V(S390_F32x4Mul) \ V(S390_F32x4Le) \
V(S390_F32x4Eq) \ V(S390_F32x4Abs) \
V(S390_F32x4Ne) \ V(S390_F32x4Neg) \
V(S390_F32x4Lt) \ V(S390_F32x4RecipApprox) \
V(S390_F32x4Le) \ V(S390_F32x4RecipSqrtApprox) \
V(S390_F32x4Abs) \ V(S390_F32x4SConvertI32x4) \
V(S390_F32x4Neg) \ V(S390_F32x4UConvertI32x4) \
V(S390_F32x4RecipApprox) \ V(S390_F32x4Sqrt) \
V(S390_F32x4RecipSqrtApprox) \ V(S390_F32x4Div) \
V(S390_F32x4SConvertI32x4) \ V(S390_F32x4Min) \
V(S390_F32x4UConvertI32x4) \ V(S390_F32x4Max) \
V(S390_F32x4Sqrt) \ V(S390_F32x4Qfma) \
V(S390_F32x4Div) \ V(S390_F32x4Qfms) \
V(S390_F32x4Min) \ V(S390_F32x4Pmin) \
V(S390_F32x4Max) \ V(S390_F32x4Pmax) \
V(S390_F32x4Qfma) \ V(S390_F32x4Ceil) \
V(S390_F32x4Qfms) \ V(S390_F32x4Floor) \
V(S390_F32x4Pmin) \ V(S390_F32x4Trunc) \
V(S390_F32x4Pmax) \ V(S390_F32x4NearestInt) \
V(S390_F32x4Ceil) \ V(S390_F32x4DemoteF64x2Zero) \
V(S390_F32x4Floor) \ V(S390_I64x2Neg) \
V(S390_F32x4Trunc) \ V(S390_I64x2Add) \
V(S390_F32x4NearestInt) \ V(S390_I64x2Sub) \
V(S390_F32x4DemoteF64x2Zero) \ V(S390_I64x2Shl) \
V(S390_I64x2Neg) \ V(S390_I64x2ShrS) \
V(S390_I64x2Add) \ V(S390_I64x2ShrU) \
V(S390_I64x2Sub) \ V(S390_I64x2Mul) \
V(S390_I64x2Shl) \ V(S390_I64x2Splat) \
V(S390_I64x2ShrS) \ V(S390_I64x2ReplaceLane) \
V(S390_I64x2ShrU) \ V(S390_I64x2ExtractLane) \
V(S390_I64x2Mul) \ V(S390_I64x2Eq) \
V(S390_I64x2Splat) \ V(S390_I64x2BitMask) \
V(S390_I64x2ReplaceLane) \ V(S390_I64x2ExtMulLowI32x4S) \
V(S390_I64x2ExtractLane) \ V(S390_I64x2ExtMulHighI32x4S) \
V(S390_I64x2Eq) \ V(S390_I64x2ExtMulLowI32x4U) \
V(S390_I64x2BitMask) \ V(S390_I64x2ExtMulHighI32x4U) \
V(S390_I64x2ExtMulLowI32x4S) \ V(S390_I64x2SConvertI32x4Low) \
V(S390_I64x2ExtMulHighI32x4S) \ V(S390_I64x2SConvertI32x4High) \
V(S390_I64x2ExtMulLowI32x4U) \ V(S390_I64x2UConvertI32x4Low) \
V(S390_I64x2ExtMulHighI32x4U) \ V(S390_I64x2UConvertI32x4High) \
V(S390_I64x2SConvertI32x4Low) \ V(S390_I64x2Ne) \
V(S390_I64x2SConvertI32x4High) \ V(S390_I64x2GtS) \
V(S390_I64x2UConvertI32x4Low) \ V(S390_I64x2GeS) \
V(S390_I64x2UConvertI32x4High) \ V(S390_I64x2Abs) \
V(S390_I64x2Ne) \ V(S390_I32x4Splat) \
V(S390_I64x2GtS) \ V(S390_I32x4ExtractLane) \
V(S390_I64x2GeS) \ V(S390_I32x4ReplaceLane) \
V(S390_I64x2Abs) \ V(S390_I32x4Add) \
V(S390_I32x4Splat) \ V(S390_I32x4Sub) \
V(S390_I32x4ExtractLane) \ V(S390_I32x4Mul) \
V(S390_I32x4ReplaceLane) \ V(S390_I32x4MinS) \
V(S390_I32x4Add) \ V(S390_I32x4MinU) \
V(S390_I32x4Sub) \ V(S390_I32x4MaxS) \
V(S390_I32x4Mul) \ V(S390_I32x4MaxU) \
V(S390_I32x4MinS) \ V(S390_I32x4Eq) \
V(S390_I32x4MinU) \ V(S390_I32x4Ne) \
V(S390_I32x4MaxS) \ V(S390_I32x4GtS) \
V(S390_I32x4MaxU) \ V(S390_I32x4GeS) \
V(S390_I32x4Eq) \ V(S390_I32x4GtU) \
V(S390_I32x4Ne) \ V(S390_I32x4GeU) \
V(S390_I32x4GtS) \ V(S390_I32x4Neg) \
V(S390_I32x4GeS) \ V(S390_I32x4Shl) \
V(S390_I32x4GtU) \ V(S390_I32x4ShrS) \
V(S390_I32x4GeU) \ V(S390_I32x4ShrU) \
V(S390_I32x4Neg) \ V(S390_I32x4SConvertF32x4) \
V(S390_I32x4Shl) \ V(S390_I32x4UConvertF32x4) \
V(S390_I32x4ShrS) \ V(S390_I32x4SConvertI16x8Low) \
V(S390_I32x4ShrU) \ V(S390_I32x4SConvertI16x8High) \
V(S390_I32x4SConvertF32x4) \ V(S390_I32x4UConvertI16x8Low) \
V(S390_I32x4UConvertF32x4) \ V(S390_I32x4UConvertI16x8High) \
V(S390_I32x4SConvertI16x8Low) \ V(S390_I32x4Abs) \
V(S390_I32x4SConvertI16x8High) \ V(S390_I32x4BitMask) \
V(S390_I32x4UConvertI16x8Low) \ V(S390_I32x4DotI16x8S) \
V(S390_I32x4UConvertI16x8High) \ V(S390_I32x4ExtMulLowI16x8S) \
V(S390_I32x4Abs) \ V(S390_I32x4ExtMulHighI16x8S) \
V(S390_I32x4BitMask) \ V(S390_I32x4ExtMulLowI16x8U) \
V(S390_I32x4DotI16x8S) \ V(S390_I32x4ExtMulHighI16x8U) \
V(S390_I32x4ExtMulLowI16x8S) \ V(S390_I32x4ExtAddPairwiseI16x8S) \
V(S390_I32x4ExtMulHighI16x8S) \ V(S390_I32x4ExtAddPairwiseI16x8U) \
V(S390_I32x4ExtMulLowI16x8U) \ V(S390_I32x4TruncSatF64x2SZero) \
V(S390_I32x4ExtMulHighI16x8U) \ V(S390_I32x4TruncSatF64x2UZero) \
V(S390_I32x4ExtAddPairwiseI16x8S) \ V(S390_I16x8Splat) \
V(S390_I32x4ExtAddPairwiseI16x8U) \ V(S390_I16x8ExtractLaneU) \
V(S390_I32x4TruncSatF64x2SZero) \ V(S390_I16x8ExtractLaneS) \
V(S390_I32x4TruncSatF64x2UZero) \ V(S390_I16x8ReplaceLane) \
V(S390_I16x8Splat) \ V(S390_I16x8Add) \
V(S390_I16x8ExtractLaneU) \ V(S390_I16x8Sub) \
V(S390_I16x8ExtractLaneS) \ V(S390_I16x8Mul) \
V(S390_I16x8ReplaceLane) \ V(S390_I16x8MinS) \
V(S390_I16x8Add) \ V(S390_I16x8MinU) \
V(S390_I16x8Sub) \ V(S390_I16x8MaxS) \
V(S390_I16x8Mul) \ V(S390_I16x8MaxU) \
V(S390_I16x8MinS) \ V(S390_I16x8Eq) \
V(S390_I16x8MinU) \ V(S390_I16x8Ne) \
V(S390_I16x8MaxS) \ V(S390_I16x8GtS) \
V(S390_I16x8MaxU) \ V(S390_I16x8GeS) \
V(S390_I16x8Eq) \ V(S390_I16x8GtU) \
V(S390_I16x8Ne) \ V(S390_I16x8GeU) \
V(S390_I16x8GtS) \ V(S390_I16x8Shl) \
V(S390_I16x8GeS) \ V(S390_I16x8ShrS) \
V(S390_I16x8GtU) \ V(S390_I16x8ShrU) \
V(S390_I16x8GeU) \ V(S390_I16x8Neg) \
V(S390_I16x8Shl) \ V(S390_I16x8SConvertI32x4) \
V(S390_I16x8ShrS) \ V(S390_I16x8UConvertI32x4) \
V(S390_I16x8ShrU) \ V(S390_I16x8SConvertI8x16Low) \
V(S390_I16x8Neg) \ V(S390_I16x8SConvertI8x16High) \
V(S390_I16x8SConvertI32x4) \ V(S390_I16x8UConvertI8x16Low) \
V(S390_I16x8UConvertI32x4) \ V(S390_I16x8UConvertI8x16High) \
V(S390_I16x8SConvertI8x16Low) \ V(S390_I16x8AddSatS) \
V(S390_I16x8SConvertI8x16High) \ V(S390_I16x8SubSatS) \
V(S390_I16x8UConvertI8x16Low) \ V(S390_I16x8AddSatU) \
V(S390_I16x8UConvertI8x16High) \ V(S390_I16x8SubSatU) \
V(S390_I16x8AddSatS) \ V(S390_I16x8RoundingAverageU) \
V(S390_I16x8SubSatS) \ V(S390_I16x8Abs) \
V(S390_I16x8AddSatU) \ V(S390_I16x8BitMask) \
V(S390_I16x8SubSatU) \ V(S390_I16x8ExtMulLowI8x16S) \
V(S390_I16x8RoundingAverageU) \ V(S390_I16x8ExtMulHighI8x16S) \
V(S390_I16x8Abs) \ V(S390_I16x8ExtMulLowI8x16U) \
V(S390_I16x8BitMask) \ V(S390_I16x8ExtMulHighI8x16U) \
V(S390_I16x8ExtMulLowI8x16S) \ V(S390_I16x8ExtAddPairwiseI8x16S) \
V(S390_I16x8ExtMulHighI8x16S) \ V(S390_I16x8ExtAddPairwiseI8x16U) \
V(S390_I16x8ExtMulLowI8x16U) \ V(S390_I16x8Q15MulRSatS) \
V(S390_I16x8ExtMulHighI8x16U) \ V(S390_I8x16Splat) \
V(S390_I16x8ExtAddPairwiseI8x16S) \ V(S390_I8x16ExtractLaneU) \
V(S390_I16x8ExtAddPairwiseI8x16U) \ V(S390_I8x16ExtractLaneS) \
V(S390_I16x8Q15MulRSatS) \ V(S390_I8x16ReplaceLane) \
V(S390_I8x16Splat) \ V(S390_I8x16Add) \
V(S390_I8x16ExtractLaneU) \ V(S390_I8x16Sub) \
V(S390_I8x16ExtractLaneS) \ V(S390_I8x16MinS) \
V(S390_I8x16ReplaceLane) \ V(S390_I8x16MinU) \
V(S390_I8x16Add) \ V(S390_I8x16MaxS) \
V(S390_I8x16Sub) \ V(S390_I8x16MaxU) \
V(S390_I8x16MinS) \ V(S390_I8x16Eq) \
V(S390_I8x16MinU) \ V(S390_I8x16Ne) \
V(S390_I8x16MaxS) \ V(S390_I8x16GtS) \
V(S390_I8x16MaxU) \ V(S390_I8x16GeS) \
V(S390_I8x16Eq) \ V(S390_I8x16GtU) \
V(S390_I8x16Ne) \ V(S390_I8x16GeU) \
V(S390_I8x16GtS) \ V(S390_I8x16Shl) \
V(S390_I8x16GeS) \ V(S390_I8x16ShrS) \
V(S390_I8x16GtU) \ V(S390_I8x16ShrU) \
V(S390_I8x16GeU) \ V(S390_I8x16Neg) \
V(S390_I8x16Shl) \ V(S390_I8x16SConvertI16x8) \
V(S390_I8x16ShrS) \ V(S390_I8x16UConvertI16x8) \
V(S390_I8x16ShrU) \ V(S390_I8x16AddSatS) \
V(S390_I8x16Neg) \ V(S390_I8x16SubSatS) \
V(S390_I8x16SConvertI16x8) \ V(S390_I8x16AddSatU) \
V(S390_I8x16UConvertI16x8) \ V(S390_I8x16SubSatU) \
V(S390_I8x16AddSatS) \ V(S390_I8x16RoundingAverageU) \
V(S390_I8x16SubSatS) \ V(S390_I8x16Abs) \
V(S390_I8x16AddSatU) \ V(S390_I8x16BitMask) \
V(S390_I8x16SubSatU) \ V(S390_I8x16Shuffle) \
V(S390_I8x16RoundingAverageU) \ V(S390_I8x16Swizzle) \
V(S390_I8x16Abs) \ V(S390_I8x16Popcnt) \
V(S390_I8x16BitMask) \ V(S390_I64x2AllTrue) \
V(S390_I8x16Shuffle) \ V(S390_I32x4AllTrue) \
V(S390_I8x16Swizzle) \ V(S390_I16x8AllTrue) \
V(S390_I8x16Popcnt) \ V(S390_I8x16AllTrue) \
V(S390_I64x2AllTrue) \ V(S390_V128AnyTrue) \
V(S390_I32x4AllTrue) \ V(S390_S128And) \
V(S390_I16x8AllTrue) \ V(S390_S128Or) \
V(S390_I8x16AllTrue) \ V(S390_S128Xor) \
V(S390_V128AnyTrue) \ V(S390_S128Const) \
V(S390_S128And) \ V(S390_S128Zero) \
V(S390_S128Or) \ V(S390_S128AllOnes) \
V(S390_S128Xor) \ V(S390_S128Not) \
V(S390_S128Const) \ V(S390_S128Select) \
V(S390_S128Zero) \ V(S390_S128AndNot) \
V(S390_S128AllOnes) \ V(S390_S128Load8Splat) \
V(S390_S128Not) \ V(S390_S128Load16Splat) \
V(S390_S128Select) \ V(S390_S128Load32Splat) \
V(S390_S128AndNot) \ V(S390_S128Load64Splat) \
V(S390_S128Load8Splat) \ V(S390_S128Load8x8S) \
V(S390_S128Load16Splat) \ V(S390_S128Load8x8U) \
V(S390_S128Load32Splat) \ V(S390_S128Load16x4S) \
V(S390_S128Load64Splat) \ V(S390_S128Load16x4U) \
V(S390_S128Load8x8S) \ V(S390_S128Load32x2S) \
V(S390_S128Load8x8U) \ V(S390_S128Load32x2U) \
V(S390_S128Load16x4S) \ V(S390_S128Load32Zero) \
V(S390_S128Load16x4U) \ V(S390_S128Load64Zero) \
V(S390_S128Load32x2S) \ V(S390_S128Load8Lane) \
V(S390_S128Load32x2U) \ V(S390_S128Load16Lane) \
V(S390_S128Load32Zero) \ V(S390_S128Load32Lane) \
V(S390_S128Load64Zero) \ V(S390_S128Load64Lane) \
V(S390_S128Load8Lane) \ V(S390_S128Store8Lane) \
V(S390_S128Load16Lane) \ V(S390_S128Store16Lane) \
V(S390_S128Load32Lane) \ V(S390_S128Store32Lane) \
V(S390_S128Load64Lane) \ V(S390_S128Store64Lane) \
V(S390_S128Store8Lane) \ V(S390_StoreSimd128) \
V(S390_S128Store16Lane) \ V(S390_LoadSimd128) \
V(S390_S128Store32Lane) \ V(S390_StoreCompressTagged) \
V(S390_S128Store64Lane) \ V(S390_LoadDecompressTaggedSigned) \
V(S390_StoreSimd128) \ V(S390_LoadDecompressTaggedPointer) \
V(S390_LoadSimd128) \
V(S390_StoreCompressTagged) \
V(S390_LoadDecompressTaggedSigned) \
V(S390_LoadDecompressTaggedPointer) \
V(S390_LoadDecompressAnyTagged) V(S390_LoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.