Commit 65d9c441 authored by Zhao Jiazhong's avatar Zhao Jiazhong Committed by V8 LUCI CQ

[mips][loong64][compiler] Teach InstructionScheduler about protected memory accesses

Port commit e301d71f

Change-Id: I58bb66e86629b60bcb75b3cec3e293d75acc5f5c
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3184290
Auto-Submit: Zhao Jiazhong <zhaojiazhong-hf@loongson.cn>
Reviewed-by: 's avatarLiu yu <liuyu@loongson.cn>
Commit-Queue: Liu yu <liuyu@loongson.cn>
Cr-Commit-Position: refs/heads/main@{#77074}
parent fc4c843b
......@@ -11,365 +11,370 @@ namespace compiler {
// LOONG64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
V(Loong64Add_d) \
V(Loong64Add_w) \
V(Loong64AddOvf_d) \
V(Loong64Sub_d) \
V(Loong64Sub_w) \
V(Loong64SubOvf_d) \
V(Loong64Mul_d) \
V(Loong64MulOvf_w) \
V(Loong64Mulh_d) \
V(Loong64Mulh_w) \
V(Loong64Mulh_wu) \
V(Loong64Mul_w) \
V(Loong64Div_d) \
V(Loong64Div_w) \
V(Loong64Div_du) \
V(Loong64Div_wu) \
V(Loong64Mod_d) \
V(Loong64Mod_w) \
V(Loong64Mod_du) \
V(Loong64Mod_wu) \
V(Loong64And) \
V(Loong64And32) \
V(Loong64Or) \
V(Loong64Or32) \
V(Loong64Nor) \
V(Loong64Nor32) \
V(Loong64Xor) \
V(Loong64Xor32) \
V(Loong64Alsl_d) \
V(Loong64Alsl_w) \
V(Loong64Sll_d) \
V(Loong64Sll_w) \
V(Loong64Srl_d) \
V(Loong64Srl_w) \
V(Loong64Sra_d) \
V(Loong64Sra_w) \
V(Loong64Rotr_d) \
V(Loong64Rotr_w) \
V(Loong64Bstrpick_d) \
V(Loong64Bstrpick_w) \
V(Loong64Bstrins_d) \
V(Loong64Bstrins_w) \
V(Loong64ByteSwap64) \
V(Loong64ByteSwap32) \
V(Loong64Clz_d) \
V(Loong64Clz_w) \
V(Loong64Mov) \
V(Loong64Tst) \
V(Loong64Cmp) \
V(Loong64Float32Cmp) \
V(Loong64Float32Add) \
V(Loong64Float32Sub) \
V(Loong64Float32Mul) \
V(Loong64Float32Div) \
V(Loong64Float32Abs) \
V(Loong64Float32Neg) \
V(Loong64Float32Sqrt) \
V(Loong64Float32Max) \
V(Loong64Float32Min) \
V(Loong64Float32ToFloat64) \
V(Loong64Float32RoundDown) \
V(Loong64Float32RoundUp) \
V(Loong64Float32RoundTruncate) \
V(Loong64Float32RoundTiesEven) \
V(Loong64Float32ToInt32) \
V(Loong64Float32ToInt64) \
V(Loong64Float32ToUint32) \
V(Loong64Float32ToUint64) \
V(Loong64Float64Cmp) \
V(Loong64Float64Add) \
V(Loong64Float64Sub) \
V(Loong64Float64Mul) \
V(Loong64Float64Div) \
V(Loong64Float64Mod) \
V(Loong64Float64Abs) \
V(Loong64Float64Neg) \
V(Loong64Float64Sqrt) \
V(Loong64Float64Max) \
V(Loong64Float64Min) \
V(Loong64Float64ToFloat32) \
V(Loong64Float64RoundDown) \
V(Loong64Float64RoundUp) \
V(Loong64Float64RoundTruncate) \
V(Loong64Float64RoundTiesEven) \
V(Loong64Float64ToInt32) \
V(Loong64Float64ToInt64) \
V(Loong64Float64ToUint32) \
V(Loong64Float64ToUint64) \
V(Loong64Int32ToFloat32) \
V(Loong64Int32ToFloat64) \
V(Loong64Int64ToFloat32) \
V(Loong64Int64ToFloat64) \
V(Loong64Uint32ToFloat32) \
V(Loong64Uint32ToFloat64) \
V(Loong64Uint64ToFloat32) \
V(Loong64Uint64ToFloat64) \
V(Loong64Float64ExtractLowWord32) \
V(Loong64Float64ExtractHighWord32) \
V(Loong64Float64InsertLowWord32) \
V(Loong64Float64InsertHighWord32) \
V(Loong64BitcastDL) \
V(Loong64BitcastLD) \
V(Loong64Float64SilenceNaN) \
V(Loong64Ld_b) \
V(Loong64Ld_bu) \
V(Loong64St_b) \
V(Loong64Ld_h) \
V(Loong64Ld_hu) \
V(Loong64St_h) \
V(Loong64Ld_w) \
V(Loong64Ld_wu) \
V(Loong64St_w) \
V(Loong64Ld_d) \
V(Loong64St_d) \
V(Loong64Fld_s) \
V(Loong64Fst_s) \
V(Loong64Fld_d) \
V(Loong64Fst_d) \
V(Loong64Push) \
V(Loong64Peek) \
V(Loong64Poke) \
V(Loong64StackClaim) \
V(Loong64Ext_w_b) \
V(Loong64Ext_w_h) \
V(Loong64Dbar) \
V(Loong64S128Const) \
V(Loong64S128Zero) \
V(Loong64S128AllOnes) \
V(Loong64I32x4Splat) \
V(Loong64I32x4ExtractLane) \
V(Loong64I32x4ReplaceLane) \
V(Loong64I32x4Add) \
V(Loong64I32x4Sub) \
V(Loong64F64x2Abs) \
V(Loong64F64x2Neg) \
V(Loong64F32x4Splat) \
V(Loong64F32x4ExtractLane) \
V(Loong64F32x4ReplaceLane) \
V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4UConvertI32x4) \
V(Loong64I32x4Mul) \
V(Loong64I32x4MaxS) \
V(Loong64I32x4MinS) \
V(Loong64I32x4Eq) \
V(Loong64I32x4Ne) \
V(Loong64I32x4Shl) \
V(Loong64I32x4ShrS) \
V(Loong64I32x4ShrU) \
V(Loong64I32x4MaxU) \
V(Loong64I32x4MinU) \
V(Loong64F64x2Sqrt) \
V(Loong64F64x2Add) \
V(Loong64F64x2Sub) \
V(Loong64F64x2Mul) \
V(Loong64F64x2Div) \
V(Loong64F64x2Min) \
V(Loong64F64x2Max) \
V(Loong64F64x2Eq) \
V(Loong64F64x2Ne) \
V(Loong64F64x2Lt) \
V(Loong64F64x2Le) \
V(Loong64F64x2Splat) \
V(Loong64F64x2ExtractLane) \
V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Pmin) \
V(Loong64F64x2Pmax) \
V(Loong64F64x2Ceil) \
V(Loong64F64x2Floor) \
V(Loong64F64x2Trunc) \
V(Loong64F64x2NearestInt) \
V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2PromoteLowF32x4) \
V(Loong64I64x2Splat) \
V(Loong64I64x2ExtractLane) \
V(Loong64I64x2ReplaceLane) \
V(Loong64I64x2Add) \
V(Loong64I64x2Sub) \
V(Loong64I64x2Mul) \
V(Loong64I64x2Neg) \
V(Loong64I64x2Shl) \
V(Loong64I64x2ShrS) \
V(Loong64I64x2ShrU) \
V(Loong64I64x2BitMask) \
V(Loong64I64x2Eq) \
V(Loong64I64x2Ne) \
V(Loong64I64x2GtS) \
V(Loong64I64x2GeS) \
V(Loong64I64x2Abs) \
V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2UConvertI32x4High) \
V(Loong64ExtMulLow) \
V(Loong64ExtMulHigh) \
V(Loong64ExtAddPairwise) \
V(Loong64F32x4Abs) \
V(Loong64F32x4Neg) \
V(Loong64F32x4Sqrt) \
V(Loong64F32x4RecipApprox) \
V(Loong64F32x4RecipSqrtApprox) \
V(Loong64F32x4Add) \
V(Loong64F32x4Sub) \
V(Loong64F32x4Mul) \
V(Loong64F32x4Div) \
V(Loong64F32x4Max) \
V(Loong64F32x4Min) \
V(Loong64F32x4Eq) \
V(Loong64F32x4Ne) \
V(Loong64F32x4Lt) \
V(Loong64F32x4Le) \
V(Loong64F32x4Pmin) \
V(Loong64F32x4Pmax) \
V(Loong64F32x4Ceil) \
V(Loong64F32x4Floor) \
V(Loong64F32x4Trunc) \
V(Loong64F32x4NearestInt) \
V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64I32x4SConvertF32x4) \
V(Loong64I32x4UConvertF32x4) \
V(Loong64I32x4Neg) \
V(Loong64I32x4GtS) \
V(Loong64I32x4GeS) \
V(Loong64I32x4GtU) \
V(Loong64I32x4GeU) \
V(Loong64I32x4Abs) \
V(Loong64I32x4BitMask) \
V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I16x8Splat) \
V(Loong64I16x8ExtractLaneU) \
V(Loong64I16x8ExtractLaneS) \
V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Neg) \
V(Loong64I16x8Shl) \
V(Loong64I16x8ShrS) \
V(Loong64I16x8ShrU) \
V(Loong64I16x8Add) \
V(Loong64I16x8AddSatS) \
V(Loong64I16x8Sub) \
V(Loong64I16x8SubSatS) \
V(Loong64I16x8Mul) \
V(Loong64I16x8MaxS) \
V(Loong64I16x8MinS) \
V(Loong64I16x8Eq) \
V(Loong64I16x8Ne) \
V(Loong64I16x8GtS) \
V(Loong64I16x8GeS) \
V(Loong64I16x8AddSatU) \
V(Loong64I16x8SubSatU) \
V(Loong64I16x8MaxU) \
V(Loong64I16x8MinU) \
V(Loong64I16x8GtU) \
V(Loong64I16x8GeU) \
V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8Abs) \
V(Loong64I16x8BitMask) \
V(Loong64I16x8Q15MulRSatS) \
V(Loong64I8x16Splat) \
V(Loong64I8x16ExtractLaneU) \
V(Loong64I8x16ExtractLaneS) \
V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Neg) \
V(Loong64I8x16Shl) \
V(Loong64I8x16ShrS) \
V(Loong64I8x16Add) \
V(Loong64I8x16AddSatS) \
V(Loong64I8x16Sub) \
V(Loong64I8x16SubSatS) \
V(Loong64I8x16MaxS) \
V(Loong64I8x16MinS) \
V(Loong64I8x16Eq) \
V(Loong64I8x16Ne) \
V(Loong64I8x16GtS) \
V(Loong64I8x16GeS) \
V(Loong64I8x16ShrU) \
V(Loong64I8x16AddSatU) \
V(Loong64I8x16SubSatU) \
V(Loong64I8x16MaxU) \
V(Loong64I8x16MinU) \
V(Loong64I8x16GtU) \
V(Loong64I8x16GeU) \
V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16Abs) \
V(Loong64I8x16Popcnt) \
V(Loong64I8x16BitMask) \
V(Loong64S128And) \
V(Loong64S128Or) \
V(Loong64S128Xor) \
V(Loong64S128Not) \
V(Loong64S128Select) \
V(Loong64S128AndNot) \
V(Loong64I64x2AllTrue) \
V(Loong64I32x4AllTrue) \
V(Loong64I16x8AllTrue) \
V(Loong64I8x16AllTrue) \
V(Loong64V128AnyTrue) \
V(Loong64S32x4InterleaveRight) \
V(Loong64S32x4InterleaveLeft) \
V(Loong64S32x4PackEven) \
V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4Shuffle) \
V(Loong64S16x8InterleaveRight) \
V(Loong64S16x8InterleaveLeft) \
V(Loong64S16x8PackEven) \
V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x4Reverse) \
V(Loong64S16x2Reverse) \
V(Loong64S8x16InterleaveRight) \
V(Loong64S8x16InterleaveLeft) \
V(Loong64S8x16PackEven) \
V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveOdd) \
V(Loong64I8x16Shuffle) \
V(Loong64I8x16Swizzle) \
V(Loong64S8x16Concat) \
V(Loong64S8x8Reverse) \
V(Loong64S8x4Reverse) \
V(Loong64S8x2Reverse) \
V(Loong64S128LoadSplat) \
V(Loong64S128Load8x8S) \
V(Loong64S128Load8x8U) \
V(Loong64S128Load16x4S) \
V(Loong64S128Load16x4U) \
V(Loong64S128Load32x2S) \
V(Loong64S128Load32x2U) \
V(Loong64S128Load32Zero) \
V(Loong64S128Load64Zero) \
V(Loong64LoadLane) \
V(Loong64StoreLane) \
V(Loong64I32x4SConvertI16x8Low) \
V(Loong64I32x4SConvertI16x8High) \
V(Loong64I32x4UConvertI16x8Low) \
V(Loong64I32x4UConvertI16x8High) \
V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \
V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8UConvertI8x16High) \
V(Loong64I8x16SConvertI16x8) \
V(Loong64I8x16UConvertI16x8) \
V(Loong64StoreCompressTagged) \
V(Loong64Word64AtomicLoadUint32) \
V(Loong64Word64AtomicLoadUint64) \
V(Loong64Word64AtomicStoreWord64) \
V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(Loong64Add_d) \
V(Loong64Add_w) \
V(Loong64AddOvf_d) \
V(Loong64Sub_d) \
V(Loong64Sub_w) \
V(Loong64SubOvf_d) \
V(Loong64Mul_d) \
V(Loong64MulOvf_w) \
V(Loong64Mulh_d) \
V(Loong64Mulh_w) \
V(Loong64Mulh_wu) \
V(Loong64Mul_w) \
V(Loong64Div_d) \
V(Loong64Div_w) \
V(Loong64Div_du) \
V(Loong64Div_wu) \
V(Loong64Mod_d) \
V(Loong64Mod_w) \
V(Loong64Mod_du) \
V(Loong64Mod_wu) \
V(Loong64And) \
V(Loong64And32) \
V(Loong64Or) \
V(Loong64Or32) \
V(Loong64Nor) \
V(Loong64Nor32) \
V(Loong64Xor) \
V(Loong64Xor32) \
V(Loong64Alsl_d) \
V(Loong64Alsl_w) \
V(Loong64Sll_d) \
V(Loong64Sll_w) \
V(Loong64Srl_d) \
V(Loong64Srl_w) \
V(Loong64Sra_d) \
V(Loong64Sra_w) \
V(Loong64Rotr_d) \
V(Loong64Rotr_w) \
V(Loong64Bstrpick_d) \
V(Loong64Bstrpick_w) \
V(Loong64Bstrins_d) \
V(Loong64Bstrins_w) \
V(Loong64ByteSwap64) \
V(Loong64ByteSwap32) \
V(Loong64Clz_d) \
V(Loong64Clz_w) \
V(Loong64Mov) \
V(Loong64Tst) \
V(Loong64Cmp) \
V(Loong64Float32Cmp) \
V(Loong64Float32Add) \
V(Loong64Float32Sub) \
V(Loong64Float32Mul) \
V(Loong64Float32Div) \
V(Loong64Float32Abs) \
V(Loong64Float32Neg) \
V(Loong64Float32Sqrt) \
V(Loong64Float32Max) \
V(Loong64Float32Min) \
V(Loong64Float32ToFloat64) \
V(Loong64Float32RoundDown) \
V(Loong64Float32RoundUp) \
V(Loong64Float32RoundTruncate) \
V(Loong64Float32RoundTiesEven) \
V(Loong64Float32ToInt32) \
V(Loong64Float32ToInt64) \
V(Loong64Float32ToUint32) \
V(Loong64Float32ToUint64) \
V(Loong64Float64Cmp) \
V(Loong64Float64Add) \
V(Loong64Float64Sub) \
V(Loong64Float64Mul) \
V(Loong64Float64Div) \
V(Loong64Float64Mod) \
V(Loong64Float64Abs) \
V(Loong64Float64Neg) \
V(Loong64Float64Sqrt) \
V(Loong64Float64Max) \
V(Loong64Float64Min) \
V(Loong64Float64ToFloat32) \
V(Loong64Float64RoundDown) \
V(Loong64Float64RoundUp) \
V(Loong64Float64RoundTruncate) \
V(Loong64Float64RoundTiesEven) \
V(Loong64Float64ToInt32) \
V(Loong64Float64ToInt64) \
V(Loong64Float64ToUint32) \
V(Loong64Float64ToUint64) \
V(Loong64Int32ToFloat32) \
V(Loong64Int32ToFloat64) \
V(Loong64Int64ToFloat32) \
V(Loong64Int64ToFloat64) \
V(Loong64Uint32ToFloat32) \
V(Loong64Uint32ToFloat64) \
V(Loong64Uint64ToFloat32) \
V(Loong64Uint64ToFloat64) \
V(Loong64Float64ExtractLowWord32) \
V(Loong64Float64ExtractHighWord32) \
V(Loong64Float64InsertLowWord32) \
V(Loong64Float64InsertHighWord32) \
V(Loong64BitcastDL) \
V(Loong64BitcastLD) \
V(Loong64Float64SilenceNaN) \
V(Loong64Ld_b) \
V(Loong64Ld_bu) \
V(Loong64St_b) \
V(Loong64Ld_h) \
V(Loong64Ld_hu) \
V(Loong64St_h) \
V(Loong64Ld_w) \
V(Loong64Ld_wu) \
V(Loong64St_w) \
V(Loong64Ld_d) \
V(Loong64St_d) \
V(Loong64Fld_s) \
V(Loong64Fst_s) \
V(Loong64Fld_d) \
V(Loong64Fst_d) \
V(Loong64Push) \
V(Loong64Peek) \
V(Loong64Poke) \
V(Loong64StackClaim) \
V(Loong64Ext_w_b) \
V(Loong64Ext_w_h) \
V(Loong64Dbar) \
V(Loong64S128Const) \
V(Loong64S128Zero) \
V(Loong64S128AllOnes) \
V(Loong64I32x4Splat) \
V(Loong64I32x4ExtractLane) \
V(Loong64I32x4ReplaceLane) \
V(Loong64I32x4Add) \
V(Loong64I32x4Sub) \
V(Loong64F64x2Abs) \
V(Loong64F64x2Neg) \
V(Loong64F32x4Splat) \
V(Loong64F32x4ExtractLane) \
V(Loong64F32x4ReplaceLane) \
V(Loong64F32x4SConvertI32x4) \
V(Loong64F32x4UConvertI32x4) \
V(Loong64I32x4Mul) \
V(Loong64I32x4MaxS) \
V(Loong64I32x4MinS) \
V(Loong64I32x4Eq) \
V(Loong64I32x4Ne) \
V(Loong64I32x4Shl) \
V(Loong64I32x4ShrS) \
V(Loong64I32x4ShrU) \
V(Loong64I32x4MaxU) \
V(Loong64I32x4MinU) \
V(Loong64F64x2Sqrt) \
V(Loong64F64x2Add) \
V(Loong64F64x2Sub) \
V(Loong64F64x2Mul) \
V(Loong64F64x2Div) \
V(Loong64F64x2Min) \
V(Loong64F64x2Max) \
V(Loong64F64x2Eq) \
V(Loong64F64x2Ne) \
V(Loong64F64x2Lt) \
V(Loong64F64x2Le) \
V(Loong64F64x2Splat) \
V(Loong64F64x2ExtractLane) \
V(Loong64F64x2ReplaceLane) \
V(Loong64F64x2Pmin) \
V(Loong64F64x2Pmax) \
V(Loong64F64x2Ceil) \
V(Loong64F64x2Floor) \
V(Loong64F64x2Trunc) \
V(Loong64F64x2NearestInt) \
V(Loong64F64x2ConvertLowI32x4S) \
V(Loong64F64x2ConvertLowI32x4U) \
V(Loong64F64x2PromoteLowF32x4) \
V(Loong64I64x2Splat) \
V(Loong64I64x2ExtractLane) \
V(Loong64I64x2ReplaceLane) \
V(Loong64I64x2Add) \
V(Loong64I64x2Sub) \
V(Loong64I64x2Mul) \
V(Loong64I64x2Neg) \
V(Loong64I64x2Shl) \
V(Loong64I64x2ShrS) \
V(Loong64I64x2ShrU) \
V(Loong64I64x2BitMask) \
V(Loong64I64x2Eq) \
V(Loong64I64x2Ne) \
V(Loong64I64x2GtS) \
V(Loong64I64x2GeS) \
V(Loong64I64x2Abs) \
V(Loong64I64x2SConvertI32x4Low) \
V(Loong64I64x2SConvertI32x4High) \
V(Loong64I64x2UConvertI32x4Low) \
V(Loong64I64x2UConvertI32x4High) \
V(Loong64ExtMulLow) \
V(Loong64ExtMulHigh) \
V(Loong64ExtAddPairwise) \
V(Loong64F32x4Abs) \
V(Loong64F32x4Neg) \
V(Loong64F32x4Sqrt) \
V(Loong64F32x4RecipApprox) \
V(Loong64F32x4RecipSqrtApprox) \
V(Loong64F32x4Add) \
V(Loong64F32x4Sub) \
V(Loong64F32x4Mul) \
V(Loong64F32x4Div) \
V(Loong64F32x4Max) \
V(Loong64F32x4Min) \
V(Loong64F32x4Eq) \
V(Loong64F32x4Ne) \
V(Loong64F32x4Lt) \
V(Loong64F32x4Le) \
V(Loong64F32x4Pmin) \
V(Loong64F32x4Pmax) \
V(Loong64F32x4Ceil) \
V(Loong64F32x4Floor) \
V(Loong64F32x4Trunc) \
V(Loong64F32x4NearestInt) \
V(Loong64F32x4DemoteF64x2Zero) \
V(Loong64I32x4SConvertF32x4) \
V(Loong64I32x4UConvertF32x4) \
V(Loong64I32x4Neg) \
V(Loong64I32x4GtS) \
V(Loong64I32x4GeS) \
V(Loong64I32x4GtU) \
V(Loong64I32x4GeU) \
V(Loong64I32x4Abs) \
V(Loong64I32x4BitMask) \
V(Loong64I32x4DotI16x8S) \
V(Loong64I32x4TruncSatF64x2SZero) \
V(Loong64I32x4TruncSatF64x2UZero) \
V(Loong64I16x8Splat) \
V(Loong64I16x8ExtractLaneU) \
V(Loong64I16x8ExtractLaneS) \
V(Loong64I16x8ReplaceLane) \
V(Loong64I16x8Neg) \
V(Loong64I16x8Shl) \
V(Loong64I16x8ShrS) \
V(Loong64I16x8ShrU) \
V(Loong64I16x8Add) \
V(Loong64I16x8AddSatS) \
V(Loong64I16x8Sub) \
V(Loong64I16x8SubSatS) \
V(Loong64I16x8Mul) \
V(Loong64I16x8MaxS) \
V(Loong64I16x8MinS) \
V(Loong64I16x8Eq) \
V(Loong64I16x8Ne) \
V(Loong64I16x8GtS) \
V(Loong64I16x8GeS) \
V(Loong64I16x8AddSatU) \
V(Loong64I16x8SubSatU) \
V(Loong64I16x8MaxU) \
V(Loong64I16x8MinU) \
V(Loong64I16x8GtU) \
V(Loong64I16x8GeU) \
V(Loong64I16x8RoundingAverageU) \
V(Loong64I16x8Abs) \
V(Loong64I16x8BitMask) \
V(Loong64I16x8Q15MulRSatS) \
V(Loong64I8x16Splat) \
V(Loong64I8x16ExtractLaneU) \
V(Loong64I8x16ExtractLaneS) \
V(Loong64I8x16ReplaceLane) \
V(Loong64I8x16Neg) \
V(Loong64I8x16Shl) \
V(Loong64I8x16ShrS) \
V(Loong64I8x16Add) \
V(Loong64I8x16AddSatS) \
V(Loong64I8x16Sub) \
V(Loong64I8x16SubSatS) \
V(Loong64I8x16MaxS) \
V(Loong64I8x16MinS) \
V(Loong64I8x16Eq) \
V(Loong64I8x16Ne) \
V(Loong64I8x16GtS) \
V(Loong64I8x16GeS) \
V(Loong64I8x16ShrU) \
V(Loong64I8x16AddSatU) \
V(Loong64I8x16SubSatU) \
V(Loong64I8x16MaxU) \
V(Loong64I8x16MinU) \
V(Loong64I8x16GtU) \
V(Loong64I8x16GeU) \
V(Loong64I8x16RoundingAverageU) \
V(Loong64I8x16Abs) \
V(Loong64I8x16Popcnt) \
V(Loong64I8x16BitMask) \
V(Loong64S128And) \
V(Loong64S128Or) \
V(Loong64S128Xor) \
V(Loong64S128Not) \
V(Loong64S128Select) \
V(Loong64S128AndNot) \
V(Loong64I64x2AllTrue) \
V(Loong64I32x4AllTrue) \
V(Loong64I16x8AllTrue) \
V(Loong64I8x16AllTrue) \
V(Loong64V128AnyTrue) \
V(Loong64S32x4InterleaveRight) \
V(Loong64S32x4InterleaveLeft) \
V(Loong64S32x4PackEven) \
V(Loong64S32x4PackOdd) \
V(Loong64S32x4InterleaveEven) \
V(Loong64S32x4InterleaveOdd) \
V(Loong64S32x4Shuffle) \
V(Loong64S16x8InterleaveRight) \
V(Loong64S16x8InterleaveLeft) \
V(Loong64S16x8PackEven) \
V(Loong64S16x8PackOdd) \
V(Loong64S16x8InterleaveEven) \
V(Loong64S16x8InterleaveOdd) \
V(Loong64S16x4Reverse) \
V(Loong64S16x2Reverse) \
V(Loong64S8x16InterleaveRight) \
V(Loong64S8x16InterleaveLeft) \
V(Loong64S8x16PackEven) \
V(Loong64S8x16PackOdd) \
V(Loong64S8x16InterleaveEven) \
V(Loong64S8x16InterleaveOdd) \
V(Loong64I8x16Shuffle) \
V(Loong64I8x16Swizzle) \
V(Loong64S8x16Concat) \
V(Loong64S8x8Reverse) \
V(Loong64S8x4Reverse) \
V(Loong64S8x2Reverse) \
V(Loong64S128LoadSplat) \
V(Loong64S128Load8x8S) \
V(Loong64S128Load8x8U) \
V(Loong64S128Load16x4S) \
V(Loong64S128Load16x4U) \
V(Loong64S128Load32x2S) \
V(Loong64S128Load32x2U) \
V(Loong64S128Load32Zero) \
V(Loong64S128Load64Zero) \
V(Loong64LoadLane) \
V(Loong64StoreLane) \
V(Loong64I32x4SConvertI16x8Low) \
V(Loong64I32x4SConvertI16x8High) \
V(Loong64I32x4UConvertI16x8Low) \
V(Loong64I32x4UConvertI16x8High) \
V(Loong64I16x8SConvertI8x16Low) \
V(Loong64I16x8SConvertI8x16High) \
V(Loong64I16x8SConvertI32x4) \
V(Loong64I16x8UConvertI32x4) \
V(Loong64I16x8UConvertI8x16Low) \
V(Loong64I16x8UConvertI8x16High) \
V(Loong64I8x16SConvertI16x8) \
V(Loong64I8x16UConvertI16x8) \
V(Loong64StoreCompressTagged) \
V(Loong64Word64AtomicLoadUint32) \
V(Loong64Word64AtomicLoadUint64) \
V(Loong64Word64AtomicStoreWord64) \
V(Loong64Word64AtomicAddUint64) \
V(Loong64Word64AtomicSubUint64) \
V(Loong64Word64AtomicAndUint64) \
V(Loong64Word64AtomicOrUint64) \
V(Loong64Word64AtomicXorUint64) \
V(Loong64Word64AtomicExchangeUint64) \
V(Loong64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction.
......
......@@ -11,369 +11,374 @@ namespace compiler {
// MIPS-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
V(MipsAdd) \
V(MipsAddOvf) \
V(MipsSub) \
V(MipsSubOvf) \
V(MipsMul) \
V(MipsMulOvf) \
V(MipsMulHigh) \
V(MipsMulHighU) \
V(MipsDiv) \
V(MipsDivU) \
V(MipsMod) \
V(MipsModU) \
V(MipsAnd) \
V(MipsOr) \
V(MipsNor) \
V(MipsXor) \
V(MipsClz) \
V(MipsCtz) \
V(MipsPopcnt) \
V(MipsLsa) \
V(MipsShl) \
V(MipsShr) \
V(MipsSar) \
V(MipsShlPair) \
V(MipsShrPair) \
V(MipsSarPair) \
V(MipsExt) \
V(MipsIns) \
V(MipsRor) \
V(MipsMov) \
V(MipsTst) \
V(MipsCmp) \
V(MipsCmpS) \
V(MipsAddS) \
V(MipsSubS) \
V(MipsMulS) \
V(MipsDivS) \
V(MipsAbsS) \
V(MipsSqrtS) \
V(MipsMaxS) \
V(MipsMinS) \
V(MipsCmpD) \
V(MipsAddD) \
V(MipsSubD) \
V(MipsMulD) \
V(MipsDivD) \
V(MipsModD) \
V(MipsAbsD) \
V(MipsSqrtD) \
V(MipsMaxD) \
V(MipsMinD) \
V(MipsNegS) \
V(MipsNegD) \
V(MipsAddPair) \
V(MipsSubPair) \
V(MipsMulPair) \
V(MipsMaddS) \
V(MipsMaddD) \
V(MipsMsubS) \
V(MipsMsubD) \
V(MipsFloat32RoundDown) \
V(MipsFloat32RoundTruncate) \
V(MipsFloat32RoundUp) \
V(MipsFloat32RoundTiesEven) \
V(MipsFloat64RoundDown) \
V(MipsFloat64RoundTruncate) \
V(MipsFloat64RoundUp) \
V(MipsFloat64RoundTiesEven) \
V(MipsCvtSD) \
V(MipsCvtDS) \
V(MipsTruncWD) \
V(MipsRoundWD) \
V(MipsFloorWD) \
V(MipsCeilWD) \
V(MipsTruncWS) \
V(MipsRoundWS) \
V(MipsFloorWS) \
V(MipsCeilWS) \
V(MipsTruncUwD) \
V(MipsTruncUwS) \
V(MipsCvtDW) \
V(MipsCvtDUw) \
V(MipsCvtSW) \
V(MipsCvtSUw) \
V(MipsLb) \
V(MipsLbu) \
V(MipsSb) \
V(MipsLh) \
V(MipsUlh) \
V(MipsLhu) \
V(MipsUlhu) \
V(MipsSh) \
V(MipsUsh) \
V(MipsLw) \
V(MipsUlw) \
V(MipsSw) \
V(MipsUsw) \
V(MipsLwc1) \
V(MipsUlwc1) \
V(MipsSwc1) \
V(MipsUswc1) \
V(MipsLdc1) \
V(MipsUldc1) \
V(MipsSdc1) \
V(MipsUsdc1) \
V(MipsFloat64ExtractLowWord32) \
V(MipsFloat64ExtractHighWord32) \
V(MipsFloat64InsertLowWord32) \
V(MipsFloat64InsertHighWord32) \
V(MipsFloat64SilenceNaN) \
V(MipsFloat32Max) \
V(MipsFloat64Max) \
V(MipsFloat32Min) \
V(MipsFloat64Min) \
V(MipsPush) \
V(MipsPeek) \
V(MipsStoreToStackSlot) \
V(MipsByteSwap32) \
V(MipsStackClaim) \
V(MipsSeb) \
V(MipsSeh) \
V(MipsSync) \
V(MipsS128Zero) \
V(MipsI32x4Splat) \
V(MipsI32x4ExtractLane) \
V(MipsI32x4ReplaceLane) \
V(MipsI32x4Add) \
V(MipsI32x4Sub) \
V(MipsF64x2Abs) \
V(MipsF64x2Neg) \
V(MipsF64x2Sqrt) \
V(MipsF64x2Add) \
V(MipsF64x2Sub) \
V(MipsF64x2Mul) \
V(MipsF64x2Div) \
V(MipsF64x2Min) \
V(MipsF64x2Max) \
V(MipsF64x2Eq) \
V(MipsF64x2Ne) \
V(MipsF64x2Lt) \
V(MipsF64x2Le) \
V(MipsF64x2Pmin) \
V(MipsF64x2Pmax) \
V(MipsF64x2Ceil) \
V(MipsF64x2Floor) \
V(MipsF64x2Trunc) \
V(MipsF64x2NearestInt) \
V(MipsF64x2ConvertLowI32x4S) \
V(MipsF64x2ConvertLowI32x4U) \
V(MipsF64x2PromoteLowF32x4) \
V(MipsI64x2Add) \
V(MipsI64x2Sub) \
V(MipsI64x2Mul) \
V(MipsI64x2Neg) \
V(MipsI64x2Shl) \
V(MipsI64x2ShrS) \
V(MipsI64x2ShrU) \
V(MipsI64x2BitMask) \
V(MipsI64x2Eq) \
V(MipsI64x2Ne) \
V(MipsI64x2GtS) \
V(MipsI64x2GeS) \
V(MipsI64x2Abs) \
V(MipsI64x2SConvertI32x4Low) \
V(MipsI64x2SConvertI32x4High) \
V(MipsI64x2UConvertI32x4Low) \
V(MipsI64x2UConvertI32x4High) \
V(MipsI64x2ExtMulLowI32x4S) \
V(MipsI64x2ExtMulHighI32x4S) \
V(MipsI64x2ExtMulLowI32x4U) \
V(MipsI64x2ExtMulHighI32x4U) \
V(MipsF32x4Splat) \
V(MipsF32x4ExtractLane) \
V(MipsF32x4ReplaceLane) \
V(MipsF32x4SConvertI32x4) \
V(MipsF32x4UConvertI32x4) \
V(MipsF32x4DemoteF64x2Zero) \
V(MipsI32x4Mul) \
V(MipsI32x4MaxS) \
V(MipsI32x4MinS) \
V(MipsI32x4Eq) \
V(MipsI32x4Ne) \
V(MipsI32x4Shl) \
V(MipsI32x4ShrS) \
V(MipsI32x4ShrU) \
V(MipsI32x4MaxU) \
V(MipsI32x4MinU) \
V(MipsF64x2Splat) \
V(MipsF64x2ExtractLane) \
V(MipsF64x2ReplaceLane) \
V(MipsF32x4Abs) \
V(MipsF32x4Neg) \
V(MipsF32x4Sqrt) \
V(MipsF32x4RecipApprox) \
V(MipsF32x4RecipSqrtApprox) \
V(MipsF32x4Add) \
V(MipsF32x4Sub) \
V(MipsF32x4Mul) \
V(MipsF32x4Div) \
V(MipsF32x4Max) \
V(MipsF32x4Min) \
V(MipsF32x4Eq) \
V(MipsF32x4Ne) \
V(MipsF32x4Lt) \
V(MipsF32x4Le) \
V(MipsF32x4Pmin) \
V(MipsF32x4Pmax) \
V(MipsF32x4Ceil) \
V(MipsF32x4Floor) \
V(MipsF32x4Trunc) \
V(MipsF32x4NearestInt) \
V(MipsI32x4SConvertF32x4) \
V(MipsI32x4UConvertF32x4) \
V(MipsI32x4Neg) \
V(MipsI32x4GtS) \
V(MipsI32x4GeS) \
V(MipsI32x4GtU) \
V(MipsI32x4GeU) \
V(MipsI32x4Abs) \
V(MipsI32x4BitMask) \
V(MipsI32x4DotI16x8S) \
V(MipsI32x4ExtMulLowI16x8S) \
V(MipsI32x4ExtMulHighI16x8S) \
V(MipsI32x4ExtMulLowI16x8U) \
V(MipsI32x4ExtMulHighI16x8U) \
V(MipsI32x4TruncSatF64x2SZero) \
V(MipsI32x4TruncSatF64x2UZero) \
V(MipsI32x4ExtAddPairwiseI16x8S) \
V(MipsI32x4ExtAddPairwiseI16x8U) \
V(MipsI16x8Splat) \
V(MipsI16x8ExtractLaneU) \
V(MipsI16x8ExtractLaneS) \
V(MipsI16x8ReplaceLane) \
V(MipsI16x8Neg) \
V(MipsI16x8Shl) \
V(MipsI16x8ShrS) \
V(MipsI16x8ShrU) \
V(MipsI16x8Add) \
V(MipsI16x8AddSatS) \
V(MipsI16x8Sub) \
V(MipsI16x8SubSatS) \
V(MipsI16x8Mul) \
V(MipsI16x8MaxS) \
V(MipsI16x8MinS) \
V(MipsI16x8Eq) \
V(MipsI16x8Ne) \
V(MipsI16x8GtS) \
V(MipsI16x8GeS) \
V(MipsI16x8AddSatU) \
V(MipsI16x8SubSatU) \
V(MipsI16x8MaxU) \
V(MipsI16x8MinU) \
V(MipsI16x8GtU) \
V(MipsI16x8GeU) \
V(MipsI16x8RoundingAverageU) \
V(MipsI16x8Abs) \
V(MipsI16x8BitMask) \
V(MipsI16x8Q15MulRSatS) \
V(MipsI16x8ExtMulLowI8x16S) \
V(MipsI16x8ExtMulHighI8x16S) \
V(MipsI16x8ExtMulLowI8x16U) \
V(MipsI16x8ExtMulHighI8x16U) \
V(MipsI16x8ExtAddPairwiseI8x16S) \
V(MipsI16x8ExtAddPairwiseI8x16U) \
V(MipsI8x16Splat) \
V(MipsI8x16ExtractLaneU) \
V(MipsI8x16ExtractLaneS) \
V(MipsI8x16ReplaceLane) \
V(MipsI8x16Neg) \
V(MipsI8x16Shl) \
V(MipsI8x16ShrS) \
V(MipsI8x16Add) \
V(MipsI8x16AddSatS) \
V(MipsI8x16Sub) \
V(MipsI8x16SubSatS) \
V(MipsI8x16MaxS) \
V(MipsI8x16MinS) \
V(MipsI8x16Eq) \
V(MipsI8x16Ne) \
V(MipsI8x16GtS) \
V(MipsI8x16GeS) \
V(MipsI8x16ShrU) \
V(MipsI8x16AddSatU) \
V(MipsI8x16SubSatU) \
V(MipsI8x16MaxU) \
V(MipsI8x16MinU) \
V(MipsI8x16GtU) \
V(MipsI8x16GeU) \
V(MipsI8x16RoundingAverageU) \
V(MipsI8x16Abs) \
V(MipsI8x16Popcnt) \
V(MipsI8x16BitMask) \
V(MipsS128And) \
V(MipsS128Or) \
V(MipsS128Xor) \
V(MipsS128Not) \
V(MipsS128Select) \
V(MipsS128AndNot) \
V(MipsI64x2AllTrue) \
V(MipsI32x4AllTrue) \
V(MipsI16x8AllTrue) \
V(MipsI8x16AllTrue) \
V(MipsV128AnyTrue) \
V(MipsS32x4InterleaveRight) \
V(MipsS32x4InterleaveLeft) \
V(MipsS32x4PackEven) \
V(MipsS32x4PackOdd) \
V(MipsS32x4InterleaveEven) \
V(MipsS32x4InterleaveOdd) \
V(MipsS32x4Shuffle) \
V(MipsS16x8InterleaveRight) \
V(MipsS16x8InterleaveLeft) \
V(MipsS16x8PackEven) \
V(MipsS16x8PackOdd) \
V(MipsS16x8InterleaveEven) \
V(MipsS16x8InterleaveOdd) \
V(MipsS16x4Reverse) \
V(MipsS16x2Reverse) \
V(MipsS8x16InterleaveRight) \
V(MipsS8x16InterleaveLeft) \
V(MipsS8x16PackEven) \
V(MipsS8x16PackOdd) \
V(MipsS8x16InterleaveEven) \
V(MipsS8x16InterleaveOdd) \
V(MipsI8x16Shuffle) \
V(MipsI8x16Swizzle) \
V(MipsS8x16Concat) \
V(MipsS8x8Reverse) \
V(MipsS8x4Reverse) \
V(MipsS8x2Reverse) \
V(MipsS128Load8Splat) \
V(MipsS128Load16Splat) \
V(MipsS128Load32Splat) \
V(MipsS128Load64Splat) \
V(MipsS128Load8x8S) \
V(MipsS128Load8x8U) \
V(MipsS128Load16x4S) \
V(MipsS128Load16x4U) \
V(MipsS128Load32x2S) \
V(MipsS128Load32x2U) \
V(MipsMsaLd) \
V(MipsMsaSt) \
V(MipsI32x4SConvertI16x8Low) \
V(MipsI32x4SConvertI16x8High) \
V(MipsI32x4UConvertI16x8Low) \
V(MipsI32x4UConvertI16x8High) \
V(MipsI16x8SConvertI8x16Low) \
V(MipsI16x8SConvertI8x16High) \
V(MipsI16x8SConvertI32x4) \
V(MipsI16x8UConvertI32x4) \
V(MipsI16x8UConvertI8x16Low) \
V(MipsI16x8UConvertI8x16High) \
V(MipsI8x16SConvertI16x8) \
V(MipsI8x16UConvertI16x8) \
V(MipsWord32AtomicPairLoad) \
V(MipsWord32AtomicPairStore) \
V(MipsWord32AtomicPairAdd) \
V(MipsWord32AtomicPairSub) \
V(MipsWord32AtomicPairAnd) \
V(MipsWord32AtomicPairOr) \
V(MipsWord32AtomicPairXor) \
V(MipsWord32AtomicPairExchange) \
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(MipsAdd) \
V(MipsAddOvf) \
V(MipsSub) \
V(MipsSubOvf) \
V(MipsMul) \
V(MipsMulOvf) \
V(MipsMulHigh) \
V(MipsMulHighU) \
V(MipsDiv) \
V(MipsDivU) \
V(MipsMod) \
V(MipsModU) \
V(MipsAnd) \
V(MipsOr) \
V(MipsNor) \
V(MipsXor) \
V(MipsClz) \
V(MipsCtz) \
V(MipsPopcnt) \
V(MipsLsa) \
V(MipsShl) \
V(MipsShr) \
V(MipsSar) \
V(MipsShlPair) \
V(MipsShrPair) \
V(MipsSarPair) \
V(MipsExt) \
V(MipsIns) \
V(MipsRor) \
V(MipsMov) \
V(MipsTst) \
V(MipsCmp) \
V(MipsCmpS) \
V(MipsAddS) \
V(MipsSubS) \
V(MipsMulS) \
V(MipsDivS) \
V(MipsAbsS) \
V(MipsSqrtS) \
V(MipsMaxS) \
V(MipsMinS) \
V(MipsCmpD) \
V(MipsAddD) \
V(MipsSubD) \
V(MipsMulD) \
V(MipsDivD) \
V(MipsModD) \
V(MipsAbsD) \
V(MipsSqrtD) \
V(MipsMaxD) \
V(MipsMinD) \
V(MipsNegS) \
V(MipsNegD) \
V(MipsAddPair) \
V(MipsSubPair) \
V(MipsMulPair) \
V(MipsMaddS) \
V(MipsMaddD) \
V(MipsMsubS) \
V(MipsMsubD) \
V(MipsFloat32RoundDown) \
V(MipsFloat32RoundTruncate) \
V(MipsFloat32RoundUp) \
V(MipsFloat32RoundTiesEven) \
V(MipsFloat64RoundDown) \
V(MipsFloat64RoundTruncate) \
V(MipsFloat64RoundUp) \
V(MipsFloat64RoundTiesEven) \
V(MipsCvtSD) \
V(MipsCvtDS) \
V(MipsTruncWD) \
V(MipsRoundWD) \
V(MipsFloorWD) \
V(MipsCeilWD) \
V(MipsTruncWS) \
V(MipsRoundWS) \
V(MipsFloorWS) \
V(MipsCeilWS) \
V(MipsTruncUwD) \
V(MipsTruncUwS) \
V(MipsCvtDW) \
V(MipsCvtDUw) \
V(MipsCvtSW) \
V(MipsCvtSUw) \
V(MipsLb) \
V(MipsLbu) \
V(MipsSb) \
V(MipsLh) \
V(MipsUlh) \
V(MipsLhu) \
V(MipsUlhu) \
V(MipsSh) \
V(MipsUsh) \
V(MipsLw) \
V(MipsUlw) \
V(MipsSw) \
V(MipsUsw) \
V(MipsLwc1) \
V(MipsUlwc1) \
V(MipsSwc1) \
V(MipsUswc1) \
V(MipsLdc1) \
V(MipsUldc1) \
V(MipsSdc1) \
V(MipsUsdc1) \
V(MipsFloat64ExtractLowWord32) \
V(MipsFloat64ExtractHighWord32) \
V(MipsFloat64InsertLowWord32) \
V(MipsFloat64InsertHighWord32) \
V(MipsFloat64SilenceNaN) \
V(MipsFloat32Max) \
V(MipsFloat64Max) \
V(MipsFloat32Min) \
V(MipsFloat64Min) \
V(MipsPush) \
V(MipsPeek) \
V(MipsStoreToStackSlot) \
V(MipsByteSwap32) \
V(MipsStackClaim) \
V(MipsSeb) \
V(MipsSeh) \
V(MipsSync) \
V(MipsS128Zero) \
V(MipsI32x4Splat) \
V(MipsI32x4ExtractLane) \
V(MipsI32x4ReplaceLane) \
V(MipsI32x4Add) \
V(MipsI32x4Sub) \
V(MipsF64x2Abs) \
V(MipsF64x2Neg) \
V(MipsF64x2Sqrt) \
V(MipsF64x2Add) \
V(MipsF64x2Sub) \
V(MipsF64x2Mul) \
V(MipsF64x2Div) \
V(MipsF64x2Min) \
V(MipsF64x2Max) \
V(MipsF64x2Eq) \
V(MipsF64x2Ne) \
V(MipsF64x2Lt) \
V(MipsF64x2Le) \
V(MipsF64x2Pmin) \
V(MipsF64x2Pmax) \
V(MipsF64x2Ceil) \
V(MipsF64x2Floor) \
V(MipsF64x2Trunc) \
V(MipsF64x2NearestInt) \
V(MipsF64x2ConvertLowI32x4S) \
V(MipsF64x2ConvertLowI32x4U) \
V(MipsF64x2PromoteLowF32x4) \
V(MipsI64x2Add) \
V(MipsI64x2Sub) \
V(MipsI64x2Mul) \
V(MipsI64x2Neg) \
V(MipsI64x2Shl) \
V(MipsI64x2ShrS) \
V(MipsI64x2ShrU) \
V(MipsI64x2BitMask) \
V(MipsI64x2Eq) \
V(MipsI64x2Ne) \
V(MipsI64x2GtS) \
V(MipsI64x2GeS) \
V(MipsI64x2Abs) \
V(MipsI64x2SConvertI32x4Low) \
V(MipsI64x2SConvertI32x4High) \
V(MipsI64x2UConvertI32x4Low) \
V(MipsI64x2UConvertI32x4High) \
V(MipsI64x2ExtMulLowI32x4S) \
V(MipsI64x2ExtMulHighI32x4S) \
V(MipsI64x2ExtMulLowI32x4U) \
V(MipsI64x2ExtMulHighI32x4U) \
V(MipsF32x4Splat) \
V(MipsF32x4ExtractLane) \
V(MipsF32x4ReplaceLane) \
V(MipsF32x4SConvertI32x4) \
V(MipsF32x4UConvertI32x4) \
V(MipsF32x4DemoteF64x2Zero) \
V(MipsI32x4Mul) \
V(MipsI32x4MaxS) \
V(MipsI32x4MinS) \
V(MipsI32x4Eq) \
V(MipsI32x4Ne) \
V(MipsI32x4Shl) \
V(MipsI32x4ShrS) \
V(MipsI32x4ShrU) \
V(MipsI32x4MaxU) \
V(MipsI32x4MinU) \
V(MipsF64x2Splat) \
V(MipsF64x2ExtractLane) \
V(MipsF64x2ReplaceLane) \
V(MipsF32x4Abs) \
V(MipsF32x4Neg) \
V(MipsF32x4Sqrt) \
V(MipsF32x4RecipApprox) \
V(MipsF32x4RecipSqrtApprox) \
V(MipsF32x4Add) \
V(MipsF32x4Sub) \
V(MipsF32x4Mul) \
V(MipsF32x4Div) \
V(MipsF32x4Max) \
V(MipsF32x4Min) \
V(MipsF32x4Eq) \
V(MipsF32x4Ne) \
V(MipsF32x4Lt) \
V(MipsF32x4Le) \
V(MipsF32x4Pmin) \
V(MipsF32x4Pmax) \
V(MipsF32x4Ceil) \
V(MipsF32x4Floor) \
V(MipsF32x4Trunc) \
V(MipsF32x4NearestInt) \
V(MipsI32x4SConvertF32x4) \
V(MipsI32x4UConvertF32x4) \
V(MipsI32x4Neg) \
V(MipsI32x4GtS) \
V(MipsI32x4GeS) \
V(MipsI32x4GtU) \
V(MipsI32x4GeU) \
V(MipsI32x4Abs) \
V(MipsI32x4BitMask) \
V(MipsI32x4DotI16x8S) \
V(MipsI32x4ExtMulLowI16x8S) \
V(MipsI32x4ExtMulHighI16x8S) \
V(MipsI32x4ExtMulLowI16x8U) \
V(MipsI32x4ExtMulHighI16x8U) \
V(MipsI32x4TruncSatF64x2SZero) \
V(MipsI32x4TruncSatF64x2UZero) \
V(MipsI32x4ExtAddPairwiseI16x8S) \
V(MipsI32x4ExtAddPairwiseI16x8U) \
V(MipsI16x8Splat) \
V(MipsI16x8ExtractLaneU) \
V(MipsI16x8ExtractLaneS) \
V(MipsI16x8ReplaceLane) \
V(MipsI16x8Neg) \
V(MipsI16x8Shl) \
V(MipsI16x8ShrS) \
V(MipsI16x8ShrU) \
V(MipsI16x8Add) \
V(MipsI16x8AddSatS) \
V(MipsI16x8Sub) \
V(MipsI16x8SubSatS) \
V(MipsI16x8Mul) \
V(MipsI16x8MaxS) \
V(MipsI16x8MinS) \
V(MipsI16x8Eq) \
V(MipsI16x8Ne) \
V(MipsI16x8GtS) \
V(MipsI16x8GeS) \
V(MipsI16x8AddSatU) \
V(MipsI16x8SubSatU) \
V(MipsI16x8MaxU) \
V(MipsI16x8MinU) \
V(MipsI16x8GtU) \
V(MipsI16x8GeU) \
V(MipsI16x8RoundingAverageU) \
V(MipsI16x8Abs) \
V(MipsI16x8BitMask) \
V(MipsI16x8Q15MulRSatS) \
V(MipsI16x8ExtMulLowI8x16S) \
V(MipsI16x8ExtMulHighI8x16S) \
V(MipsI16x8ExtMulLowI8x16U) \
V(MipsI16x8ExtMulHighI8x16U) \
V(MipsI16x8ExtAddPairwiseI8x16S) \
V(MipsI16x8ExtAddPairwiseI8x16U) \
V(MipsI8x16Splat) \
V(MipsI8x16ExtractLaneU) \
V(MipsI8x16ExtractLaneS) \
V(MipsI8x16ReplaceLane) \
V(MipsI8x16Neg) \
V(MipsI8x16Shl) \
V(MipsI8x16ShrS) \
V(MipsI8x16Add) \
V(MipsI8x16AddSatS) \
V(MipsI8x16Sub) \
V(MipsI8x16SubSatS) \
V(MipsI8x16MaxS) \
V(MipsI8x16MinS) \
V(MipsI8x16Eq) \
V(MipsI8x16Ne) \
V(MipsI8x16GtS) \
V(MipsI8x16GeS) \
V(MipsI8x16ShrU) \
V(MipsI8x16AddSatU) \
V(MipsI8x16SubSatU) \
V(MipsI8x16MaxU) \
V(MipsI8x16MinU) \
V(MipsI8x16GtU) \
V(MipsI8x16GeU) \
V(MipsI8x16RoundingAverageU) \
V(MipsI8x16Abs) \
V(MipsI8x16Popcnt) \
V(MipsI8x16BitMask) \
V(MipsS128And) \
V(MipsS128Or) \
V(MipsS128Xor) \
V(MipsS128Not) \
V(MipsS128Select) \
V(MipsS128AndNot) \
V(MipsI64x2AllTrue) \
V(MipsI32x4AllTrue) \
V(MipsI16x8AllTrue) \
V(MipsI8x16AllTrue) \
V(MipsV128AnyTrue) \
V(MipsS32x4InterleaveRight) \
V(MipsS32x4InterleaveLeft) \
V(MipsS32x4PackEven) \
V(MipsS32x4PackOdd) \
V(MipsS32x4InterleaveEven) \
V(MipsS32x4InterleaveOdd) \
V(MipsS32x4Shuffle) \
V(MipsS16x8InterleaveRight) \
V(MipsS16x8InterleaveLeft) \
V(MipsS16x8PackEven) \
V(MipsS16x8PackOdd) \
V(MipsS16x8InterleaveEven) \
V(MipsS16x8InterleaveOdd) \
V(MipsS16x4Reverse) \
V(MipsS16x2Reverse) \
V(MipsS8x16InterleaveRight) \
V(MipsS8x16InterleaveLeft) \
V(MipsS8x16PackEven) \
V(MipsS8x16PackOdd) \
V(MipsS8x16InterleaveEven) \
V(MipsS8x16InterleaveOdd) \
V(MipsI8x16Shuffle) \
V(MipsI8x16Swizzle) \
V(MipsS8x16Concat) \
V(MipsS8x8Reverse) \
V(MipsS8x4Reverse) \
V(MipsS8x2Reverse) \
V(MipsS128Load8Splat) \
V(MipsS128Load16Splat) \
V(MipsS128Load32Splat) \
V(MipsS128Load64Splat) \
V(MipsS128Load8x8S) \
V(MipsS128Load8x8U) \
V(MipsS128Load16x4S) \
V(MipsS128Load16x4U) \
V(MipsS128Load32x2S) \
V(MipsS128Load32x2U) \
V(MipsMsaLd) \
V(MipsMsaSt) \
V(MipsI32x4SConvertI16x8Low) \
V(MipsI32x4SConvertI16x8High) \
V(MipsI32x4UConvertI16x8Low) \
V(MipsI32x4UConvertI16x8High) \
V(MipsI16x8SConvertI8x16Low) \
V(MipsI16x8SConvertI8x16High) \
V(MipsI16x8SConvertI32x4) \
V(MipsI16x8UConvertI32x4) \
V(MipsI16x8UConvertI8x16Low) \
V(MipsI16x8UConvertI8x16High) \
V(MipsI8x16SConvertI16x8) \
V(MipsI8x16UConvertI16x8) \
V(MipsWord32AtomicPairLoad) \
V(MipsWord32AtomicPairStore) \
V(MipsWord32AtomicPairAdd) \
V(MipsWord32AtomicPairSub) \
V(MipsWord32AtomicPairAnd) \
V(MipsWord32AtomicPairOr) \
V(MipsWord32AtomicPairXor) \
V(MipsWord32AtomicPairExchange) \
V(MipsWord32AtomicPairCompareExchange)
// Addressing modes represent the "shape" of inputs to an instruction.
......
......@@ -11,393 +11,398 @@ namespace compiler {
// MIPS64-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
V(Mips64Add) \
V(Mips64Dadd) \
V(Mips64DaddOvf) \
V(Mips64Sub) \
V(Mips64Dsub) \
V(Mips64DsubOvf) \
V(Mips64Mul) \
V(Mips64MulOvf) \
V(Mips64MulHigh) \
V(Mips64DMulHigh) \
V(Mips64MulHighU) \
V(Mips64Dmul) \
V(Mips64Div) \
V(Mips64Ddiv) \
V(Mips64DivU) \
V(Mips64DdivU) \
V(Mips64Mod) \
V(Mips64Dmod) \
V(Mips64ModU) \
V(Mips64DmodU) \
V(Mips64And) \
V(Mips64And32) \
V(Mips64Or) \
V(Mips64Or32) \
V(Mips64Nor) \
V(Mips64Nor32) \
V(Mips64Xor) \
V(Mips64Xor32) \
V(Mips64Clz) \
V(Mips64Lsa) \
V(Mips64Dlsa) \
V(Mips64Shl) \
V(Mips64Shr) \
V(Mips64Sar) \
V(Mips64Ext) \
V(Mips64Ins) \
V(Mips64Dext) \
V(Mips64Dins) \
V(Mips64Dclz) \
V(Mips64Ctz) \
V(Mips64Dctz) \
V(Mips64Popcnt) \
V(Mips64Dpopcnt) \
V(Mips64Dshl) \
V(Mips64Dshr) \
V(Mips64Dsar) \
V(Mips64Ror) \
V(Mips64Dror) \
V(Mips64Mov) \
V(Mips64Tst) \
V(Mips64Cmp) \
V(Mips64CmpS) \
V(Mips64AddS) \
V(Mips64SubS) \
V(Mips64MulS) \
V(Mips64DivS) \
V(Mips64AbsS) \
V(Mips64NegS) \
V(Mips64SqrtS) \
V(Mips64MaxS) \
V(Mips64MinS) \
V(Mips64CmpD) \
V(Mips64AddD) \
V(Mips64SubD) \
V(Mips64MulD) \
V(Mips64DivD) \
V(Mips64ModD) \
V(Mips64AbsD) \
V(Mips64NegD) \
V(Mips64SqrtD) \
V(Mips64MaxD) \
V(Mips64MinD) \
V(Mips64Float64RoundDown) \
V(Mips64Float64RoundTruncate) \
V(Mips64Float64RoundUp) \
V(Mips64Float64RoundTiesEven) \
V(Mips64Float32RoundDown) \
V(Mips64Float32RoundTruncate) \
V(Mips64Float32RoundUp) \
V(Mips64Float32RoundTiesEven) \
V(Mips64CvtSD) \
V(Mips64CvtDS) \
V(Mips64TruncWD) \
V(Mips64RoundWD) \
V(Mips64FloorWD) \
V(Mips64CeilWD) \
V(Mips64TruncWS) \
V(Mips64RoundWS) \
V(Mips64FloorWS) \
V(Mips64CeilWS) \
V(Mips64TruncLS) \
V(Mips64TruncLD) \
V(Mips64TruncUwD) \
V(Mips64TruncUwS) \
V(Mips64TruncUlS) \
V(Mips64TruncUlD) \
V(Mips64CvtDW) \
V(Mips64CvtSL) \
V(Mips64CvtSW) \
V(Mips64CvtSUw) \
V(Mips64CvtSUl) \
V(Mips64CvtDL) \
V(Mips64CvtDUw) \
V(Mips64CvtDUl) \
V(Mips64Lb) \
V(Mips64Lbu) \
V(Mips64Sb) \
V(Mips64Lh) \
V(Mips64Ulh) \
V(Mips64Lhu) \
V(Mips64Ulhu) \
V(Mips64Sh) \
V(Mips64Ush) \
V(Mips64Ld) \
V(Mips64Uld) \
V(Mips64Lw) \
V(Mips64Ulw) \
V(Mips64Lwu) \
V(Mips64Ulwu) \
V(Mips64Sw) \
V(Mips64Usw) \
V(Mips64Sd) \
V(Mips64Usd) \
V(Mips64Lwc1) \
V(Mips64Ulwc1) \
V(Mips64Swc1) \
V(Mips64Uswc1) \
V(Mips64Ldc1) \
V(Mips64Uldc1) \
V(Mips64Sdc1) \
V(Mips64Usdc1) \
V(Mips64BitcastDL) \
V(Mips64BitcastLD) \
V(Mips64Float64ExtractLowWord32) \
V(Mips64Float64ExtractHighWord32) \
V(Mips64Float64InsertLowWord32) \
V(Mips64Float64InsertHighWord32) \
V(Mips64Float32Max) \
V(Mips64Float64Max) \
V(Mips64Float32Min) \
V(Mips64Float64Min) \
V(Mips64Float64SilenceNaN) \
V(Mips64Push) \
V(Mips64Peek) \
V(Mips64StoreToStackSlot) \
V(Mips64ByteSwap64) \
V(Mips64ByteSwap32) \
V(Mips64StackClaim) \
V(Mips64Seb) \
V(Mips64Seh) \
V(Mips64Sync) \
V(Mips64AssertEqual) \
V(Mips64S128Const) \
V(Mips64S128Zero) \
V(Mips64S128AllOnes) \
V(Mips64I32x4Splat) \
V(Mips64I32x4ExtractLane) \
V(Mips64I32x4ReplaceLane) \
V(Mips64I32x4Add) \
V(Mips64I32x4Sub) \
V(Mips64F64x2Abs) \
V(Mips64F64x2Neg) \
V(Mips64F32x4Splat) \
V(Mips64F32x4ExtractLane) \
V(Mips64F32x4ReplaceLane) \
V(Mips64F32x4SConvertI32x4) \
V(Mips64F32x4UConvertI32x4) \
V(Mips64I32x4Mul) \
V(Mips64I32x4MaxS) \
V(Mips64I32x4MinS) \
V(Mips64I32x4Eq) \
V(Mips64I32x4Ne) \
V(Mips64I32x4Shl) \
V(Mips64I32x4ShrS) \
V(Mips64I32x4ShrU) \
V(Mips64I32x4MaxU) \
V(Mips64I32x4MinU) \
V(Mips64F64x2Sqrt) \
V(Mips64F64x2Add) \
V(Mips64F64x2Sub) \
V(Mips64F64x2Mul) \
V(Mips64F64x2Div) \
V(Mips64F64x2Min) \
V(Mips64F64x2Max) \
V(Mips64F64x2Eq) \
V(Mips64F64x2Ne) \
V(Mips64F64x2Lt) \
V(Mips64F64x2Le) \
V(Mips64F64x2Splat) \
V(Mips64F64x2ExtractLane) \
V(Mips64F64x2ReplaceLane) \
V(Mips64F64x2Pmin) \
V(Mips64F64x2Pmax) \
V(Mips64F64x2Ceil) \
V(Mips64F64x2Floor) \
V(Mips64F64x2Trunc) \
V(Mips64F64x2NearestInt) \
V(Mips64F64x2ConvertLowI32x4S) \
V(Mips64F64x2ConvertLowI32x4U) \
V(Mips64F64x2PromoteLowF32x4) \
V(Mips64I64x2Splat) \
V(Mips64I64x2ExtractLane) \
V(Mips64I64x2ReplaceLane) \
V(Mips64I64x2Add) \
V(Mips64I64x2Sub) \
V(Mips64I64x2Mul) \
V(Mips64I64x2Neg) \
V(Mips64I64x2Shl) \
V(Mips64I64x2ShrS) \
V(Mips64I64x2ShrU) \
V(Mips64I64x2BitMask) \
V(Mips64I64x2Eq) \
V(Mips64I64x2Ne) \
V(Mips64I64x2GtS) \
V(Mips64I64x2GeS) \
V(Mips64I64x2Abs) \
V(Mips64I64x2SConvertI32x4Low) \
V(Mips64I64x2SConvertI32x4High) \
V(Mips64I64x2UConvertI32x4Low) \
V(Mips64I64x2UConvertI32x4High) \
V(Mips64ExtMulLow) \
V(Mips64ExtMulHigh) \
V(Mips64ExtAddPairwise) \
V(Mips64F32x4Abs) \
V(Mips64F32x4Neg) \
V(Mips64F32x4Sqrt) \
V(Mips64F32x4RecipApprox) \
V(Mips64F32x4RecipSqrtApprox) \
V(Mips64F32x4Add) \
V(Mips64F32x4Sub) \
V(Mips64F32x4Mul) \
V(Mips64F32x4Div) \
V(Mips64F32x4Max) \
V(Mips64F32x4Min) \
V(Mips64F32x4Eq) \
V(Mips64F32x4Ne) \
V(Mips64F32x4Lt) \
V(Mips64F32x4Le) \
V(Mips64F32x4Pmin) \
V(Mips64F32x4Pmax) \
V(Mips64F32x4Ceil) \
V(Mips64F32x4Floor) \
V(Mips64F32x4Trunc) \
V(Mips64F32x4NearestInt) \
V(Mips64F32x4DemoteF64x2Zero) \
V(Mips64I32x4SConvertF32x4) \
V(Mips64I32x4UConvertF32x4) \
V(Mips64I32x4Neg) \
V(Mips64I32x4GtS) \
V(Mips64I32x4GeS) \
V(Mips64I32x4GtU) \
V(Mips64I32x4GeU) \
V(Mips64I32x4Abs) \
V(Mips64I32x4BitMask) \
V(Mips64I32x4DotI16x8S) \
V(Mips64I32x4TruncSatF64x2SZero) \
V(Mips64I32x4TruncSatF64x2UZero) \
V(Mips64I16x8Splat) \
V(Mips64I16x8ExtractLaneU) \
V(Mips64I16x8ExtractLaneS) \
V(Mips64I16x8ReplaceLane) \
V(Mips64I16x8Neg) \
V(Mips64I16x8Shl) \
V(Mips64I16x8ShrS) \
V(Mips64I16x8ShrU) \
V(Mips64I16x8Add) \
V(Mips64I16x8AddSatS) \
V(Mips64I16x8Sub) \
V(Mips64I16x8SubSatS) \
V(Mips64I16x8Mul) \
V(Mips64I16x8MaxS) \
V(Mips64I16x8MinS) \
V(Mips64I16x8Eq) \
V(Mips64I16x8Ne) \
V(Mips64I16x8GtS) \
V(Mips64I16x8GeS) \
V(Mips64I16x8AddSatU) \
V(Mips64I16x8SubSatU) \
V(Mips64I16x8MaxU) \
V(Mips64I16x8MinU) \
V(Mips64I16x8GtU) \
V(Mips64I16x8GeU) \
V(Mips64I16x8RoundingAverageU) \
V(Mips64I16x8Abs) \
V(Mips64I16x8BitMask) \
V(Mips64I16x8Q15MulRSatS) \
V(Mips64I8x16Splat) \
V(Mips64I8x16ExtractLaneU) \
V(Mips64I8x16ExtractLaneS) \
V(Mips64I8x16ReplaceLane) \
V(Mips64I8x16Neg) \
V(Mips64I8x16Shl) \
V(Mips64I8x16ShrS) \
V(Mips64I8x16Add) \
V(Mips64I8x16AddSatS) \
V(Mips64I8x16Sub) \
V(Mips64I8x16SubSatS) \
V(Mips64I8x16MaxS) \
V(Mips64I8x16MinS) \
V(Mips64I8x16Eq) \
V(Mips64I8x16Ne) \
V(Mips64I8x16GtS) \
V(Mips64I8x16GeS) \
V(Mips64I8x16ShrU) \
V(Mips64I8x16AddSatU) \
V(Mips64I8x16SubSatU) \
V(Mips64I8x16MaxU) \
V(Mips64I8x16MinU) \
V(Mips64I8x16GtU) \
V(Mips64I8x16GeU) \
V(Mips64I8x16RoundingAverageU) \
V(Mips64I8x16Abs) \
V(Mips64I8x16Popcnt) \
V(Mips64I8x16BitMask) \
V(Mips64S128And) \
V(Mips64S128Or) \
V(Mips64S128Xor) \
V(Mips64S128Not) \
V(Mips64S128Select) \
V(Mips64S128AndNot) \
V(Mips64I64x2AllTrue) \
V(Mips64I32x4AllTrue) \
V(Mips64I16x8AllTrue) \
V(Mips64I8x16AllTrue) \
V(Mips64V128AnyTrue) \
V(Mips64S32x4InterleaveRight) \
V(Mips64S32x4InterleaveLeft) \
V(Mips64S32x4PackEven) \
V(Mips64S32x4PackOdd) \
V(Mips64S32x4InterleaveEven) \
V(Mips64S32x4InterleaveOdd) \
V(Mips64S32x4Shuffle) \
V(Mips64S16x8InterleaveRight) \
V(Mips64S16x8InterleaveLeft) \
V(Mips64S16x8PackEven) \
V(Mips64S16x8PackOdd) \
V(Mips64S16x8InterleaveEven) \
V(Mips64S16x8InterleaveOdd) \
V(Mips64S16x4Reverse) \
V(Mips64S16x2Reverse) \
V(Mips64S8x16InterleaveRight) \
V(Mips64S8x16InterleaveLeft) \
V(Mips64S8x16PackEven) \
V(Mips64S8x16PackOdd) \
V(Mips64S8x16InterleaveEven) \
V(Mips64S8x16InterleaveOdd) \
V(Mips64I8x16Shuffle) \
V(Mips64I8x16Swizzle) \
V(Mips64S8x16Concat) \
V(Mips64S8x8Reverse) \
V(Mips64S8x4Reverse) \
V(Mips64S8x2Reverse) \
V(Mips64S128LoadSplat) \
V(Mips64S128Load8x8S) \
V(Mips64S128Load8x8U) \
V(Mips64S128Load16x4S) \
V(Mips64S128Load16x4U) \
V(Mips64S128Load32x2S) \
V(Mips64S128Load32x2U) \
V(Mips64S128Load32Zero) \
V(Mips64S128Load64Zero) \
V(Mips64S128LoadLane) \
V(Mips64S128StoreLane) \
V(Mips64MsaLd) \
V(Mips64MsaSt) \
V(Mips64I32x4SConvertI16x8Low) \
V(Mips64I32x4SConvertI16x8High) \
V(Mips64I32x4UConvertI16x8Low) \
V(Mips64I32x4UConvertI16x8High) \
V(Mips64I16x8SConvertI8x16Low) \
V(Mips64I16x8SConvertI8x16High) \
V(Mips64I16x8SConvertI32x4) \
V(Mips64I16x8UConvertI32x4) \
V(Mips64I16x8UConvertI8x16Low) \
V(Mips64I16x8UConvertI8x16High) \
V(Mips64I8x16SConvertI16x8) \
V(Mips64I8x16UConvertI16x8) \
V(Mips64StoreCompressTagged) \
V(Mips64Word64AtomicLoadUint64) \
V(Mips64Word64AtomicStoreWord64) \
V(Mips64Word64AtomicAddUint64) \
V(Mips64Word64AtomicSubUint64) \
V(Mips64Word64AtomicAndUint64) \
V(Mips64Word64AtomicOrUint64) \
V(Mips64Word64AtomicXorUint64) \
V(Mips64Word64AtomicExchangeUint64) \
// Opcodes that support a MemoryAccessMode.
#define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
#define TARGET_ARCH_OPCODE_LIST(V) \
TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(Mips64Add) \
V(Mips64Dadd) \
V(Mips64DaddOvf) \
V(Mips64Sub) \
V(Mips64Dsub) \
V(Mips64DsubOvf) \
V(Mips64Mul) \
V(Mips64MulOvf) \
V(Mips64MulHigh) \
V(Mips64DMulHigh) \
V(Mips64MulHighU) \
V(Mips64Dmul) \
V(Mips64Div) \
V(Mips64Ddiv) \
V(Mips64DivU) \
V(Mips64DdivU) \
V(Mips64Mod) \
V(Mips64Dmod) \
V(Mips64ModU) \
V(Mips64DmodU) \
V(Mips64And) \
V(Mips64And32) \
V(Mips64Or) \
V(Mips64Or32) \
V(Mips64Nor) \
V(Mips64Nor32) \
V(Mips64Xor) \
V(Mips64Xor32) \
V(Mips64Clz) \
V(Mips64Lsa) \
V(Mips64Dlsa) \
V(Mips64Shl) \
V(Mips64Shr) \
V(Mips64Sar) \
V(Mips64Ext) \
V(Mips64Ins) \
V(Mips64Dext) \
V(Mips64Dins) \
V(Mips64Dclz) \
V(Mips64Ctz) \
V(Mips64Dctz) \
V(Mips64Popcnt) \
V(Mips64Dpopcnt) \
V(Mips64Dshl) \
V(Mips64Dshr) \
V(Mips64Dsar) \
V(Mips64Ror) \
V(Mips64Dror) \
V(Mips64Mov) \
V(Mips64Tst) \
V(Mips64Cmp) \
V(Mips64CmpS) \
V(Mips64AddS) \
V(Mips64SubS) \
V(Mips64MulS) \
V(Mips64DivS) \
V(Mips64AbsS) \
V(Mips64NegS) \
V(Mips64SqrtS) \
V(Mips64MaxS) \
V(Mips64MinS) \
V(Mips64CmpD) \
V(Mips64AddD) \
V(Mips64SubD) \
V(Mips64MulD) \
V(Mips64DivD) \
V(Mips64ModD) \
V(Mips64AbsD) \
V(Mips64NegD) \
V(Mips64SqrtD) \
V(Mips64MaxD) \
V(Mips64MinD) \
V(Mips64Float64RoundDown) \
V(Mips64Float64RoundTruncate) \
V(Mips64Float64RoundUp) \
V(Mips64Float64RoundTiesEven) \
V(Mips64Float32RoundDown) \
V(Mips64Float32RoundTruncate) \
V(Mips64Float32RoundUp) \
V(Mips64Float32RoundTiesEven) \
V(Mips64CvtSD) \
V(Mips64CvtDS) \
V(Mips64TruncWD) \
V(Mips64RoundWD) \
V(Mips64FloorWD) \
V(Mips64CeilWD) \
V(Mips64TruncWS) \
V(Mips64RoundWS) \
V(Mips64FloorWS) \
V(Mips64CeilWS) \
V(Mips64TruncLS) \
V(Mips64TruncLD) \
V(Mips64TruncUwD) \
V(Mips64TruncUwS) \
V(Mips64TruncUlS) \
V(Mips64TruncUlD) \
V(Mips64CvtDW) \
V(Mips64CvtSL) \
V(Mips64CvtSW) \
V(Mips64CvtSUw) \
V(Mips64CvtSUl) \
V(Mips64CvtDL) \
V(Mips64CvtDUw) \
V(Mips64CvtDUl) \
V(Mips64Lb) \
V(Mips64Lbu) \
V(Mips64Sb) \
V(Mips64Lh) \
V(Mips64Ulh) \
V(Mips64Lhu) \
V(Mips64Ulhu) \
V(Mips64Sh) \
V(Mips64Ush) \
V(Mips64Ld) \
V(Mips64Uld) \
V(Mips64Lw) \
V(Mips64Ulw) \
V(Mips64Lwu) \
V(Mips64Ulwu) \
V(Mips64Sw) \
V(Mips64Usw) \
V(Mips64Sd) \
V(Mips64Usd) \
V(Mips64Lwc1) \
V(Mips64Ulwc1) \
V(Mips64Swc1) \
V(Mips64Uswc1) \
V(Mips64Ldc1) \
V(Mips64Uldc1) \
V(Mips64Sdc1) \
V(Mips64Usdc1) \
V(Mips64BitcastDL) \
V(Mips64BitcastLD) \
V(Mips64Float64ExtractLowWord32) \
V(Mips64Float64ExtractHighWord32) \
V(Mips64Float64InsertLowWord32) \
V(Mips64Float64InsertHighWord32) \
V(Mips64Float32Max) \
V(Mips64Float64Max) \
V(Mips64Float32Min) \
V(Mips64Float64Min) \
V(Mips64Float64SilenceNaN) \
V(Mips64Push) \
V(Mips64Peek) \
V(Mips64StoreToStackSlot) \
V(Mips64ByteSwap64) \
V(Mips64ByteSwap32) \
V(Mips64StackClaim) \
V(Mips64Seb) \
V(Mips64Seh) \
V(Mips64Sync) \
V(Mips64AssertEqual) \
V(Mips64S128Const) \
V(Mips64S128Zero) \
V(Mips64S128AllOnes) \
V(Mips64I32x4Splat) \
V(Mips64I32x4ExtractLane) \
V(Mips64I32x4ReplaceLane) \
V(Mips64I32x4Add) \
V(Mips64I32x4Sub) \
V(Mips64F64x2Abs) \
V(Mips64F64x2Neg) \
V(Mips64F32x4Splat) \
V(Mips64F32x4ExtractLane) \
V(Mips64F32x4ReplaceLane) \
V(Mips64F32x4SConvertI32x4) \
V(Mips64F32x4UConvertI32x4) \
V(Mips64I32x4Mul) \
V(Mips64I32x4MaxS) \
V(Mips64I32x4MinS) \
V(Mips64I32x4Eq) \
V(Mips64I32x4Ne) \
V(Mips64I32x4Shl) \
V(Mips64I32x4ShrS) \
V(Mips64I32x4ShrU) \
V(Mips64I32x4MaxU) \
V(Mips64I32x4MinU) \
V(Mips64F64x2Sqrt) \
V(Mips64F64x2Add) \
V(Mips64F64x2Sub) \
V(Mips64F64x2Mul) \
V(Mips64F64x2Div) \
V(Mips64F64x2Min) \
V(Mips64F64x2Max) \
V(Mips64F64x2Eq) \
V(Mips64F64x2Ne) \
V(Mips64F64x2Lt) \
V(Mips64F64x2Le) \
V(Mips64F64x2Splat) \
V(Mips64F64x2ExtractLane) \
V(Mips64F64x2ReplaceLane) \
V(Mips64F64x2Pmin) \
V(Mips64F64x2Pmax) \
V(Mips64F64x2Ceil) \
V(Mips64F64x2Floor) \
V(Mips64F64x2Trunc) \
V(Mips64F64x2NearestInt) \
V(Mips64F64x2ConvertLowI32x4S) \
V(Mips64F64x2ConvertLowI32x4U) \
V(Mips64F64x2PromoteLowF32x4) \
V(Mips64I64x2Splat) \
V(Mips64I64x2ExtractLane) \
V(Mips64I64x2ReplaceLane) \
V(Mips64I64x2Add) \
V(Mips64I64x2Sub) \
V(Mips64I64x2Mul) \
V(Mips64I64x2Neg) \
V(Mips64I64x2Shl) \
V(Mips64I64x2ShrS) \
V(Mips64I64x2ShrU) \
V(Mips64I64x2BitMask) \
V(Mips64I64x2Eq) \
V(Mips64I64x2Ne) \
V(Mips64I64x2GtS) \
V(Mips64I64x2GeS) \
V(Mips64I64x2Abs) \
V(Mips64I64x2SConvertI32x4Low) \
V(Mips64I64x2SConvertI32x4High) \
V(Mips64I64x2UConvertI32x4Low) \
V(Mips64I64x2UConvertI32x4High) \
V(Mips64ExtMulLow) \
V(Mips64ExtMulHigh) \
V(Mips64ExtAddPairwise) \
V(Mips64F32x4Abs) \
V(Mips64F32x4Neg) \
V(Mips64F32x4Sqrt) \
V(Mips64F32x4RecipApprox) \
V(Mips64F32x4RecipSqrtApprox) \
V(Mips64F32x4Add) \
V(Mips64F32x4Sub) \
V(Mips64F32x4Mul) \
V(Mips64F32x4Div) \
V(Mips64F32x4Max) \
V(Mips64F32x4Min) \
V(Mips64F32x4Eq) \
V(Mips64F32x4Ne) \
V(Mips64F32x4Lt) \
V(Mips64F32x4Le) \
V(Mips64F32x4Pmin) \
V(Mips64F32x4Pmax) \
V(Mips64F32x4Ceil) \
V(Mips64F32x4Floor) \
V(Mips64F32x4Trunc) \
V(Mips64F32x4NearestInt) \
V(Mips64F32x4DemoteF64x2Zero) \
V(Mips64I32x4SConvertF32x4) \
V(Mips64I32x4UConvertF32x4) \
V(Mips64I32x4Neg) \
V(Mips64I32x4GtS) \
V(Mips64I32x4GeS) \
V(Mips64I32x4GtU) \
V(Mips64I32x4GeU) \
V(Mips64I32x4Abs) \
V(Mips64I32x4BitMask) \
V(Mips64I32x4DotI16x8S) \
V(Mips64I32x4TruncSatF64x2SZero) \
V(Mips64I32x4TruncSatF64x2UZero) \
V(Mips64I16x8Splat) \
V(Mips64I16x8ExtractLaneU) \
V(Mips64I16x8ExtractLaneS) \
V(Mips64I16x8ReplaceLane) \
V(Mips64I16x8Neg) \
V(Mips64I16x8Shl) \
V(Mips64I16x8ShrS) \
V(Mips64I16x8ShrU) \
V(Mips64I16x8Add) \
V(Mips64I16x8AddSatS) \
V(Mips64I16x8Sub) \
V(Mips64I16x8SubSatS) \
V(Mips64I16x8Mul) \
V(Mips64I16x8MaxS) \
V(Mips64I16x8MinS) \
V(Mips64I16x8Eq) \
V(Mips64I16x8Ne) \
V(Mips64I16x8GtS) \
V(Mips64I16x8GeS) \
V(Mips64I16x8AddSatU) \
V(Mips64I16x8SubSatU) \
V(Mips64I16x8MaxU) \
V(Mips64I16x8MinU) \
V(Mips64I16x8GtU) \
V(Mips64I16x8GeU) \
V(Mips64I16x8RoundingAverageU) \
V(Mips64I16x8Abs) \
V(Mips64I16x8BitMask) \
V(Mips64I16x8Q15MulRSatS) \
V(Mips64I8x16Splat) \
V(Mips64I8x16ExtractLaneU) \
V(Mips64I8x16ExtractLaneS) \
V(Mips64I8x16ReplaceLane) \
V(Mips64I8x16Neg) \
V(Mips64I8x16Shl) \
V(Mips64I8x16ShrS) \
V(Mips64I8x16Add) \
V(Mips64I8x16AddSatS) \
V(Mips64I8x16Sub) \
V(Mips64I8x16SubSatS) \
V(Mips64I8x16MaxS) \
V(Mips64I8x16MinS) \
V(Mips64I8x16Eq) \
V(Mips64I8x16Ne) \
V(Mips64I8x16GtS) \
V(Mips64I8x16GeS) \
V(Mips64I8x16ShrU) \
V(Mips64I8x16AddSatU) \
V(Mips64I8x16SubSatU) \
V(Mips64I8x16MaxU) \
V(Mips64I8x16MinU) \
V(Mips64I8x16GtU) \
V(Mips64I8x16GeU) \
V(Mips64I8x16RoundingAverageU) \
V(Mips64I8x16Abs) \
V(Mips64I8x16Popcnt) \
V(Mips64I8x16BitMask) \
V(Mips64S128And) \
V(Mips64S128Or) \
V(Mips64S128Xor) \
V(Mips64S128Not) \
V(Mips64S128Select) \
V(Mips64S128AndNot) \
V(Mips64I64x2AllTrue) \
V(Mips64I32x4AllTrue) \
V(Mips64I16x8AllTrue) \
V(Mips64I8x16AllTrue) \
V(Mips64V128AnyTrue) \
V(Mips64S32x4InterleaveRight) \
V(Mips64S32x4InterleaveLeft) \
V(Mips64S32x4PackEven) \
V(Mips64S32x4PackOdd) \
V(Mips64S32x4InterleaveEven) \
V(Mips64S32x4InterleaveOdd) \
V(Mips64S32x4Shuffle) \
V(Mips64S16x8InterleaveRight) \
V(Mips64S16x8InterleaveLeft) \
V(Mips64S16x8PackEven) \
V(Mips64S16x8PackOdd) \
V(Mips64S16x8InterleaveEven) \
V(Mips64S16x8InterleaveOdd) \
V(Mips64S16x4Reverse) \
V(Mips64S16x2Reverse) \
V(Mips64S8x16InterleaveRight) \
V(Mips64S8x16InterleaveLeft) \
V(Mips64S8x16PackEven) \
V(Mips64S8x16PackOdd) \
V(Mips64S8x16InterleaveEven) \
V(Mips64S8x16InterleaveOdd) \
V(Mips64I8x16Shuffle) \
V(Mips64I8x16Swizzle) \
V(Mips64S8x16Concat) \
V(Mips64S8x8Reverse) \
V(Mips64S8x4Reverse) \
V(Mips64S8x2Reverse) \
V(Mips64S128LoadSplat) \
V(Mips64S128Load8x8S) \
V(Mips64S128Load8x8U) \
V(Mips64S128Load16x4S) \
V(Mips64S128Load16x4U) \
V(Mips64S128Load32x2S) \
V(Mips64S128Load32x2U) \
V(Mips64S128Load32Zero) \
V(Mips64S128Load64Zero) \
V(Mips64S128LoadLane) \
V(Mips64S128StoreLane) \
V(Mips64MsaLd) \
V(Mips64MsaSt) \
V(Mips64I32x4SConvertI16x8Low) \
V(Mips64I32x4SConvertI16x8High) \
V(Mips64I32x4UConvertI16x8Low) \
V(Mips64I32x4UConvertI16x8High) \
V(Mips64I16x8SConvertI8x16Low) \
V(Mips64I16x8SConvertI8x16High) \
V(Mips64I16x8SConvertI32x4) \
V(Mips64I16x8UConvertI32x4) \
V(Mips64I16x8UConvertI8x16Low) \
V(Mips64I16x8UConvertI8x16High) \
V(Mips64I8x16SConvertI16x8) \
V(Mips64I8x16UConvertI16x8) \
V(Mips64StoreCompressTagged) \
V(Mips64Word64AtomicLoadUint64) \
V(Mips64Word64AtomicStoreWord64) \
V(Mips64Word64AtomicAddUint64) \
V(Mips64Word64AtomicSubUint64) \
V(Mips64Word64AtomicAndUint64) \
V(Mips64Word64AtomicOrUint64) \
V(Mips64Word64AtomicXorUint64) \
V(Mips64Word64AtomicExchangeUint64) \
V(Mips64Word64AtomicCompareExchangeUint64)
// Addressing modes represent the "shape" of inputs to an instruction.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment