Commit 3c627b37 authored by Milad Fa's avatar Milad Fa Committed by V8 LUCI CQ

PPC/s390: [compiler] Teach InstructionScheduler about protected memory accesses

Port e301d71f

Original Commit Message:

    Because these instructions can trap, we don't want them to be reordered
    as freely as unprotected accesses.

    As part of this, make explicit which opcodes support a MemoryAccessMode.

R=neis@chromium.org, joransiu@ca.ibm.com, junyan@redhat.com, midawson@redhat.com
BUG=
LOG=N

Change-Id: I122a53a67c9d2b3b99c5c25395064b61969483a4
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3181539Reviewed-by: 's avatarGeorg Neis <neis@chromium.org>
Reviewed-by: 's avatarJunliang Yan <junyan@redhat.com>
Commit-Queue: Milad Fa <mfarazma@redhat.com>
Cr-Commit-Position: refs/heads/main@{#77046}
parent ec4a3124
...@@ -11,406 +11,411 @@ namespace compiler { ...@@ -11,406 +11,411 @@ namespace compiler {
// PPC-specific opcodes that specify which assembly sequence to emit. // PPC-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
V(PPC_Peek) \ // Opcodes that support a MemoryAccessMode.
V(PPC_Sync) \ #define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
V(PPC_And) \
V(PPC_AndComplement) \ #define TARGET_ARCH_OPCODE_LIST(V) \
V(PPC_Or) \ TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(PPC_OrComplement) \ V(PPC_Peek) \
V(PPC_Xor) \ V(PPC_Sync) \
V(PPC_ShiftLeft32) \ V(PPC_And) \
V(PPC_ShiftLeft64) \ V(PPC_AndComplement) \
V(PPC_ShiftLeftPair) \ V(PPC_Or) \
V(PPC_ShiftRight32) \ V(PPC_OrComplement) \
V(PPC_ShiftRight64) \ V(PPC_Xor) \
V(PPC_ShiftRightPair) \ V(PPC_ShiftLeft32) \
V(PPC_ShiftRightAlg32) \ V(PPC_ShiftLeft64) \
V(PPC_ShiftRightAlg64) \ V(PPC_ShiftLeftPair) \
V(PPC_ShiftRightAlgPair) \ V(PPC_ShiftRight32) \
V(PPC_RotRight32) \ V(PPC_ShiftRight64) \
V(PPC_RotRight64) \ V(PPC_ShiftRightPair) \
V(PPC_Not) \ V(PPC_ShiftRightAlg32) \
V(PPC_RotLeftAndMask32) \ V(PPC_ShiftRightAlg64) \
V(PPC_RotLeftAndClear64) \ V(PPC_ShiftRightAlgPair) \
V(PPC_RotLeftAndClearLeft64) \ V(PPC_RotRight32) \
V(PPC_RotLeftAndClearRight64) \ V(PPC_RotRight64) \
V(PPC_Add32) \ V(PPC_Not) \
V(PPC_Add64) \ V(PPC_RotLeftAndMask32) \
V(PPC_AddWithOverflow32) \ V(PPC_RotLeftAndClear64) \
V(PPC_AddPair) \ V(PPC_RotLeftAndClearLeft64) \
V(PPC_AddDouble) \ V(PPC_RotLeftAndClearRight64) \
V(PPC_Sub) \ V(PPC_Add32) \
V(PPC_SubWithOverflow32) \ V(PPC_Add64) \
V(PPC_SubPair) \ V(PPC_AddWithOverflow32) \
V(PPC_SubDouble) \ V(PPC_AddPair) \
V(PPC_Mul32) \ V(PPC_AddDouble) \
V(PPC_Mul32WithHigh32) \ V(PPC_Sub) \
V(PPC_Mul64) \ V(PPC_SubWithOverflow32) \
V(PPC_MulHigh32) \ V(PPC_SubPair) \
V(PPC_MulHighU32) \ V(PPC_SubDouble) \
V(PPC_MulPair) \ V(PPC_Mul32) \
V(PPC_MulDouble) \ V(PPC_Mul32WithHigh32) \
V(PPC_Div32) \ V(PPC_Mul64) \
V(PPC_Div64) \ V(PPC_MulHigh32) \
V(PPC_DivU32) \ V(PPC_MulHighU32) \
V(PPC_DivU64) \ V(PPC_MulPair) \
V(PPC_DivDouble) \ V(PPC_MulDouble) \
V(PPC_Mod32) \ V(PPC_Div32) \
V(PPC_Mod64) \ V(PPC_Div64) \
V(PPC_ModU32) \ V(PPC_DivU32) \
V(PPC_ModU64) \ V(PPC_DivU64) \
V(PPC_ModDouble) \ V(PPC_DivDouble) \
V(PPC_Neg) \ V(PPC_Mod32) \
V(PPC_NegDouble) \ V(PPC_Mod64) \
V(PPC_SqrtDouble) \ V(PPC_ModU32) \
V(PPC_FloorDouble) \ V(PPC_ModU64) \
V(PPC_CeilDouble) \ V(PPC_ModDouble) \
V(PPC_TruncateDouble) \ V(PPC_Neg) \
V(PPC_RoundDouble) \ V(PPC_NegDouble) \
V(PPC_MaxDouble) \ V(PPC_SqrtDouble) \
V(PPC_MinDouble) \ V(PPC_FloorDouble) \
V(PPC_AbsDouble) \ V(PPC_CeilDouble) \
V(PPC_Cntlz32) \ V(PPC_TruncateDouble) \
V(PPC_Cntlz64) \ V(PPC_RoundDouble) \
V(PPC_Popcnt32) \ V(PPC_MaxDouble) \
V(PPC_Popcnt64) \ V(PPC_MinDouble) \
V(PPC_Cmp32) \ V(PPC_AbsDouble) \
V(PPC_Cmp64) \ V(PPC_Cntlz32) \
V(PPC_CmpDouble) \ V(PPC_Cntlz64) \
V(PPC_Tst32) \ V(PPC_Popcnt32) \
V(PPC_Tst64) \ V(PPC_Popcnt64) \
V(PPC_Push) \ V(PPC_Cmp32) \
V(PPC_PushFrame) \ V(PPC_Cmp64) \
V(PPC_StoreToStackSlot) \ V(PPC_CmpDouble) \
V(PPC_ExtendSignWord8) \ V(PPC_Tst32) \
V(PPC_ExtendSignWord16) \ V(PPC_Tst64) \
V(PPC_ExtendSignWord32) \ V(PPC_Push) \
V(PPC_Uint32ToUint64) \ V(PPC_PushFrame) \
V(PPC_Int64ToInt32) \ V(PPC_StoreToStackSlot) \
V(PPC_Int64ToFloat32) \ V(PPC_ExtendSignWord8) \
V(PPC_Int64ToDouble) \ V(PPC_ExtendSignWord16) \
V(PPC_Uint64ToFloat32) \ V(PPC_ExtendSignWord32) \
V(PPC_Uint64ToDouble) \ V(PPC_Uint32ToUint64) \
V(PPC_Int32ToFloat32) \ V(PPC_Int64ToInt32) \
V(PPC_Int32ToDouble) \ V(PPC_Int64ToFloat32) \
V(PPC_Uint32ToFloat32) \ V(PPC_Int64ToDouble) \
V(PPC_Float32ToInt32) \ V(PPC_Uint64ToFloat32) \
V(PPC_Float32ToUint32) \ V(PPC_Uint64ToDouble) \
V(PPC_Uint32ToDouble) \ V(PPC_Int32ToFloat32) \
V(PPC_Float32ToDouble) \ V(PPC_Int32ToDouble) \
V(PPC_Float64SilenceNaN) \ V(PPC_Uint32ToFloat32) \
V(PPC_DoubleToInt32) \ V(PPC_Float32ToInt32) \
V(PPC_DoubleToUint32) \ V(PPC_Float32ToUint32) \
V(PPC_DoubleToInt64) \ V(PPC_Uint32ToDouble) \
V(PPC_DoubleToUint64) \ V(PPC_Float32ToDouble) \
V(PPC_DoubleToFloat32) \ V(PPC_Float64SilenceNaN) \
V(PPC_DoubleExtractLowWord32) \ V(PPC_DoubleToInt32) \
V(PPC_DoubleExtractHighWord32) \ V(PPC_DoubleToUint32) \
V(PPC_DoubleInsertLowWord32) \ V(PPC_DoubleToInt64) \
V(PPC_DoubleInsertHighWord32) \ V(PPC_DoubleToUint64) \
V(PPC_DoubleConstruct) \ V(PPC_DoubleToFloat32) \
V(PPC_BitcastInt32ToFloat32) \ V(PPC_DoubleExtractLowWord32) \
V(PPC_BitcastFloat32ToInt32) \ V(PPC_DoubleExtractHighWord32) \
V(PPC_BitcastInt64ToDouble) \ V(PPC_DoubleInsertLowWord32) \
V(PPC_BitcastDoubleToInt64) \ V(PPC_DoubleInsertHighWord32) \
V(PPC_LoadWordS8) \ V(PPC_DoubleConstruct) \
V(PPC_LoadWordU8) \ V(PPC_BitcastInt32ToFloat32) \
V(PPC_LoadWordS16) \ V(PPC_BitcastFloat32ToInt32) \
V(PPC_LoadWordU16) \ V(PPC_BitcastInt64ToDouble) \
V(PPC_LoadWordS32) \ V(PPC_BitcastDoubleToInt64) \
V(PPC_LoadWordU32) \ V(PPC_LoadWordS8) \
V(PPC_LoadByteRev32) \ V(PPC_LoadWordU8) \
V(PPC_LoadWord64) \ V(PPC_LoadWordS16) \
V(PPC_LoadByteRev64) \ V(PPC_LoadWordU16) \
V(PPC_LoadFloat32) \ V(PPC_LoadWordS32) \
V(PPC_LoadDouble) \ V(PPC_LoadWordU32) \
V(PPC_LoadSimd128) \ V(PPC_LoadByteRev32) \
V(PPC_LoadReverseSimd128RR) \ V(PPC_LoadWord64) \
V(PPC_StoreWord8) \ V(PPC_LoadByteRev64) \
V(PPC_StoreWord16) \ V(PPC_LoadFloat32) \
V(PPC_StoreWord32) \ V(PPC_LoadDouble) \
V(PPC_StoreByteRev32) \ V(PPC_LoadSimd128) \
V(PPC_StoreWord64) \ V(PPC_LoadReverseSimd128RR) \
V(PPC_StoreByteRev64) \ V(PPC_StoreWord8) \
V(PPC_StoreFloat32) \ V(PPC_StoreWord16) \
V(PPC_StoreDouble) \ V(PPC_StoreWord32) \
V(PPC_StoreSimd128) \ V(PPC_StoreByteRev32) \
V(PPC_ByteRev32) \ V(PPC_StoreWord64) \
V(PPC_ByteRev64) \ V(PPC_StoreByteRev64) \
V(PPC_AtomicExchangeUint8) \ V(PPC_StoreFloat32) \
V(PPC_AtomicExchangeUint16) \ V(PPC_StoreDouble) \
V(PPC_AtomicExchangeWord32) \ V(PPC_StoreSimd128) \
V(PPC_AtomicExchangeWord64) \ V(PPC_ByteRev32) \
V(PPC_AtomicCompareExchangeUint8) \ V(PPC_ByteRev64) \
V(PPC_AtomicCompareExchangeUint16) \ V(PPC_AtomicExchangeUint8) \
V(PPC_AtomicCompareExchangeWord32) \ V(PPC_AtomicExchangeUint16) \
V(PPC_AtomicCompareExchangeWord64) \ V(PPC_AtomicExchangeWord32) \
V(PPC_AtomicAddUint8) \ V(PPC_AtomicExchangeWord64) \
V(PPC_AtomicAddUint16) \ V(PPC_AtomicCompareExchangeUint8) \
V(PPC_AtomicAddUint32) \ V(PPC_AtomicCompareExchangeUint16) \
V(PPC_AtomicAddUint64) \ V(PPC_AtomicCompareExchangeWord32) \
V(PPC_AtomicAddInt8) \ V(PPC_AtomicCompareExchangeWord64) \
V(PPC_AtomicAddInt16) \ V(PPC_AtomicAddUint8) \
V(PPC_AtomicAddInt32) \ V(PPC_AtomicAddUint16) \
V(PPC_AtomicAddInt64) \ V(PPC_AtomicAddUint32) \
V(PPC_AtomicSubUint8) \ V(PPC_AtomicAddUint64) \
V(PPC_AtomicSubUint16) \ V(PPC_AtomicAddInt8) \
V(PPC_AtomicSubUint32) \ V(PPC_AtomicAddInt16) \
V(PPC_AtomicSubUint64) \ V(PPC_AtomicAddInt32) \
V(PPC_AtomicSubInt8) \ V(PPC_AtomicAddInt64) \
V(PPC_AtomicSubInt16) \ V(PPC_AtomicSubUint8) \
V(PPC_AtomicSubInt32) \ V(PPC_AtomicSubUint16) \
V(PPC_AtomicSubInt64) \ V(PPC_AtomicSubUint32) \
V(PPC_AtomicAndUint8) \ V(PPC_AtomicSubUint64) \
V(PPC_AtomicAndUint16) \ V(PPC_AtomicSubInt8) \
V(PPC_AtomicAndUint32) \ V(PPC_AtomicSubInt16) \
V(PPC_AtomicAndUint64) \ V(PPC_AtomicSubInt32) \
V(PPC_AtomicAndInt8) \ V(PPC_AtomicSubInt64) \
V(PPC_AtomicAndInt16) \ V(PPC_AtomicAndUint8) \
V(PPC_AtomicAndInt32) \ V(PPC_AtomicAndUint16) \
V(PPC_AtomicAndInt64) \ V(PPC_AtomicAndUint32) \
V(PPC_AtomicOrUint8) \ V(PPC_AtomicAndUint64) \
V(PPC_AtomicOrUint16) \ V(PPC_AtomicAndInt8) \
V(PPC_AtomicOrUint32) \ V(PPC_AtomicAndInt16) \
V(PPC_AtomicOrUint64) \ V(PPC_AtomicAndInt32) \
V(PPC_AtomicOrInt8) \ V(PPC_AtomicAndInt64) \
V(PPC_AtomicOrInt16) \ V(PPC_AtomicOrUint8) \
V(PPC_AtomicOrInt32) \ V(PPC_AtomicOrUint16) \
V(PPC_AtomicOrInt64) \ V(PPC_AtomicOrUint32) \
V(PPC_AtomicXorUint8) \ V(PPC_AtomicOrUint64) \
V(PPC_AtomicXorUint16) \ V(PPC_AtomicOrInt8) \
V(PPC_AtomicXorUint32) \ V(PPC_AtomicOrInt16) \
V(PPC_AtomicXorUint64) \ V(PPC_AtomicOrInt32) \
V(PPC_AtomicXorInt8) \ V(PPC_AtomicOrInt64) \
V(PPC_AtomicXorInt16) \ V(PPC_AtomicXorUint8) \
V(PPC_AtomicXorInt32) \ V(PPC_AtomicXorUint16) \
V(PPC_AtomicXorInt64) \ V(PPC_AtomicXorUint32) \
V(PPC_F64x2Splat) \ V(PPC_AtomicXorUint64) \
V(PPC_F64x2ExtractLane) \ V(PPC_AtomicXorInt8) \
V(PPC_F64x2ReplaceLane) \ V(PPC_AtomicXorInt16) \
V(PPC_F64x2Add) \ V(PPC_AtomicXorInt32) \
V(PPC_F64x2Sub) \ V(PPC_AtomicXorInt64) \
V(PPC_F64x2Mul) \ V(PPC_F64x2Splat) \
V(PPC_F64x2Eq) \ V(PPC_F64x2ExtractLane) \
V(PPC_F64x2Ne) \ V(PPC_F64x2ReplaceLane) \
V(PPC_F64x2Le) \ V(PPC_F64x2Add) \
V(PPC_F64x2Lt) \ V(PPC_F64x2Sub) \
V(PPC_F64x2Abs) \ V(PPC_F64x2Mul) \
V(PPC_F64x2Neg) \ V(PPC_F64x2Eq) \
V(PPC_F64x2Sqrt) \ V(PPC_F64x2Ne) \
V(PPC_F64x2Qfma) \ V(PPC_F64x2Le) \
V(PPC_F64x2Qfms) \ V(PPC_F64x2Lt) \
V(PPC_F64x2Div) \ V(PPC_F64x2Abs) \
V(PPC_F64x2Min) \ V(PPC_F64x2Neg) \
V(PPC_F64x2Max) \ V(PPC_F64x2Sqrt) \
V(PPC_F64x2Ceil) \ V(PPC_F64x2Qfma) \
V(PPC_F64x2Floor) \ V(PPC_F64x2Qfms) \
V(PPC_F64x2Trunc) \ V(PPC_F64x2Div) \
V(PPC_F64x2Pmin) \ V(PPC_F64x2Min) \
V(PPC_F64x2Pmax) \ V(PPC_F64x2Max) \
V(PPC_F64x2ConvertLowI32x4S) \ V(PPC_F64x2Ceil) \
V(PPC_F64x2ConvertLowI32x4U) \ V(PPC_F64x2Floor) \
V(PPC_F64x2PromoteLowF32x4) \ V(PPC_F64x2Trunc) \
V(PPC_F32x4Splat) \ V(PPC_F64x2Pmin) \
V(PPC_F32x4ExtractLane) \ V(PPC_F64x2Pmax) \
V(PPC_F32x4ReplaceLane) \ V(PPC_F64x2ConvertLowI32x4S) \
V(PPC_F32x4Add) \ V(PPC_F64x2ConvertLowI32x4U) \
V(PPC_F32x4Sub) \ V(PPC_F64x2PromoteLowF32x4) \
V(PPC_F32x4Mul) \ V(PPC_F32x4Splat) \
V(PPC_F32x4Eq) \ V(PPC_F32x4ExtractLane) \
V(PPC_F32x4Ne) \ V(PPC_F32x4ReplaceLane) \
V(PPC_F32x4Lt) \ V(PPC_F32x4Add) \
V(PPC_F32x4Le) \ V(PPC_F32x4Sub) \
V(PPC_F32x4Abs) \ V(PPC_F32x4Mul) \
V(PPC_F32x4Neg) \ V(PPC_F32x4Eq) \
V(PPC_F32x4RecipApprox) \ V(PPC_F32x4Ne) \
V(PPC_F32x4RecipSqrtApprox) \ V(PPC_F32x4Lt) \
V(PPC_F32x4Sqrt) \ V(PPC_F32x4Le) \
V(PPC_F32x4SConvertI32x4) \ V(PPC_F32x4Abs) \
V(PPC_F32x4UConvertI32x4) \ V(PPC_F32x4Neg) \
V(PPC_F32x4Div) \ V(PPC_F32x4RecipApprox) \
V(PPC_F32x4Min) \ V(PPC_F32x4RecipSqrtApprox) \
V(PPC_F32x4Max) \ V(PPC_F32x4Sqrt) \
V(PPC_F32x4Ceil) \ V(PPC_F32x4SConvertI32x4) \
V(PPC_F32x4Floor) \ V(PPC_F32x4UConvertI32x4) \
V(PPC_F32x4Trunc) \ V(PPC_F32x4Div) \
V(PPC_F32x4Pmin) \ V(PPC_F32x4Min) \
V(PPC_F32x4Pmax) \ V(PPC_F32x4Max) \
V(PPC_F32x4Qfma) \ V(PPC_F32x4Ceil) \
V(PPC_F32x4Qfms) \ V(PPC_F32x4Floor) \
V(PPC_F32x4DemoteF64x2Zero) \ V(PPC_F32x4Trunc) \
V(PPC_I64x2Splat) \ V(PPC_F32x4Pmin) \
V(PPC_I64x2ExtractLane) \ V(PPC_F32x4Pmax) \
V(PPC_I64x2ReplaceLane) \ V(PPC_F32x4Qfma) \
V(PPC_I64x2Add) \ V(PPC_F32x4Qfms) \
V(PPC_I64x2Sub) \ V(PPC_F32x4DemoteF64x2Zero) \
V(PPC_I64x2Mul) \ V(PPC_I64x2Splat) \
V(PPC_I64x2Eq) \ V(PPC_I64x2ExtractLane) \
V(PPC_I64x2Ne) \ V(PPC_I64x2ReplaceLane) \
V(PPC_I64x2GtS) \ V(PPC_I64x2Add) \
V(PPC_I64x2GeS) \ V(PPC_I64x2Sub) \
V(PPC_I64x2Shl) \ V(PPC_I64x2Mul) \
V(PPC_I64x2ShrS) \ V(PPC_I64x2Eq) \
V(PPC_I64x2ShrU) \ V(PPC_I64x2Ne) \
V(PPC_I64x2Neg) \ V(PPC_I64x2GtS) \
V(PPC_I64x2BitMask) \ V(PPC_I64x2GeS) \
V(PPC_I64x2SConvertI32x4Low) \ V(PPC_I64x2Shl) \
V(PPC_I64x2SConvertI32x4High) \ V(PPC_I64x2ShrS) \
V(PPC_I64x2UConvertI32x4Low) \ V(PPC_I64x2ShrU) \
V(PPC_I64x2UConvertI32x4High) \ V(PPC_I64x2Neg) \
V(PPC_I64x2ExtMulLowI32x4S) \ V(PPC_I64x2BitMask) \
V(PPC_I64x2ExtMulHighI32x4S) \ V(PPC_I64x2SConvertI32x4Low) \
V(PPC_I64x2ExtMulLowI32x4U) \ V(PPC_I64x2SConvertI32x4High) \
V(PPC_I64x2ExtMulHighI32x4U) \ V(PPC_I64x2UConvertI32x4Low) \
V(PPC_I64x2Abs) \ V(PPC_I64x2UConvertI32x4High) \
V(PPC_I32x4Splat) \ V(PPC_I64x2ExtMulLowI32x4S) \
V(PPC_I32x4ExtractLane) \ V(PPC_I64x2ExtMulHighI32x4S) \
V(PPC_I32x4ReplaceLane) \ V(PPC_I64x2ExtMulLowI32x4U) \
V(PPC_I32x4Add) \ V(PPC_I64x2ExtMulHighI32x4U) \
V(PPC_I32x4Sub) \ V(PPC_I64x2Abs) \
V(PPC_I32x4Mul) \ V(PPC_I32x4Splat) \
V(PPC_I32x4MinS) \ V(PPC_I32x4ExtractLane) \
V(PPC_I32x4MinU) \ V(PPC_I32x4ReplaceLane) \
V(PPC_I32x4MaxS) \ V(PPC_I32x4Add) \
V(PPC_I32x4MaxU) \ V(PPC_I32x4Sub) \
V(PPC_I32x4Eq) \ V(PPC_I32x4Mul) \
V(PPC_I32x4Ne) \ V(PPC_I32x4MinS) \
V(PPC_I32x4GtS) \ V(PPC_I32x4MinU) \
V(PPC_I32x4GeS) \ V(PPC_I32x4MaxS) \
V(PPC_I32x4GtU) \ V(PPC_I32x4MaxU) \
V(PPC_I32x4GeU) \ V(PPC_I32x4Eq) \
V(PPC_I32x4Shl) \ V(PPC_I32x4Ne) \
V(PPC_I32x4ShrS) \ V(PPC_I32x4GtS) \
V(PPC_I32x4ShrU) \ V(PPC_I32x4GeS) \
V(PPC_I32x4Neg) \ V(PPC_I32x4GtU) \
V(PPC_I32x4Abs) \ V(PPC_I32x4GeU) \
V(PPC_I32x4SConvertF32x4) \ V(PPC_I32x4Shl) \
V(PPC_I32x4UConvertF32x4) \ V(PPC_I32x4ShrS) \
V(PPC_I32x4SConvertI16x8Low) \ V(PPC_I32x4ShrU) \
V(PPC_I32x4SConvertI16x8High) \ V(PPC_I32x4Neg) \
V(PPC_I32x4UConvertI16x8Low) \ V(PPC_I32x4Abs) \
V(PPC_I32x4UConvertI16x8High) \ V(PPC_I32x4SConvertF32x4) \
V(PPC_I32x4BitMask) \ V(PPC_I32x4UConvertF32x4) \
V(PPC_I32x4DotI16x8S) \ V(PPC_I32x4SConvertI16x8Low) \
V(PPC_I32x4ExtAddPairwiseI16x8S) \ V(PPC_I32x4SConvertI16x8High) \
V(PPC_I32x4ExtAddPairwiseI16x8U) \ V(PPC_I32x4UConvertI16x8Low) \
V(PPC_I32x4ExtMulLowI16x8S) \ V(PPC_I32x4UConvertI16x8High) \
V(PPC_I32x4ExtMulHighI16x8S) \ V(PPC_I32x4BitMask) \
V(PPC_I32x4ExtMulLowI16x8U) \ V(PPC_I32x4DotI16x8S) \
V(PPC_I32x4ExtMulHighI16x8U) \ V(PPC_I32x4ExtAddPairwiseI16x8S) \
V(PPC_I32x4TruncSatF64x2SZero) \ V(PPC_I32x4ExtAddPairwiseI16x8U) \
V(PPC_I32x4TruncSatF64x2UZero) \ V(PPC_I32x4ExtMulLowI16x8S) \
V(PPC_I16x8Splat) \ V(PPC_I32x4ExtMulHighI16x8S) \
V(PPC_I16x8ExtractLaneU) \ V(PPC_I32x4ExtMulLowI16x8U) \
V(PPC_I16x8ExtractLaneS) \ V(PPC_I32x4ExtMulHighI16x8U) \
V(PPC_I16x8ReplaceLane) \ V(PPC_I32x4TruncSatF64x2SZero) \
V(PPC_I16x8Add) \ V(PPC_I32x4TruncSatF64x2UZero) \
V(PPC_I16x8Sub) \ V(PPC_I16x8Splat) \
V(PPC_I16x8Mul) \ V(PPC_I16x8ExtractLaneU) \
V(PPC_I16x8MinS) \ V(PPC_I16x8ExtractLaneS) \
V(PPC_I16x8MinU) \ V(PPC_I16x8ReplaceLane) \
V(PPC_I16x8MaxS) \ V(PPC_I16x8Add) \
V(PPC_I16x8MaxU) \ V(PPC_I16x8Sub) \
V(PPC_I16x8Eq) \ V(PPC_I16x8Mul) \
V(PPC_I16x8Ne) \ V(PPC_I16x8MinS) \
V(PPC_I16x8GtS) \ V(PPC_I16x8MinU) \
V(PPC_I16x8GeS) \ V(PPC_I16x8MaxS) \
V(PPC_I16x8GtU) \ V(PPC_I16x8MaxU) \
V(PPC_I16x8GeU) \ V(PPC_I16x8Eq) \
V(PPC_I16x8Shl) \ V(PPC_I16x8Ne) \
V(PPC_I16x8ShrS) \ V(PPC_I16x8GtS) \
V(PPC_I16x8ShrU) \ V(PPC_I16x8GeS) \
V(PPC_I16x8Neg) \ V(PPC_I16x8GtU) \
V(PPC_I16x8Abs) \ V(PPC_I16x8GeU) \
V(PPC_I16x8SConvertI32x4) \ V(PPC_I16x8Shl) \
V(PPC_I16x8UConvertI32x4) \ V(PPC_I16x8ShrS) \
V(PPC_I16x8SConvertI8x16Low) \ V(PPC_I16x8ShrU) \
V(PPC_I16x8SConvertI8x16High) \ V(PPC_I16x8Neg) \
V(PPC_I16x8UConvertI8x16Low) \ V(PPC_I16x8Abs) \
V(PPC_I16x8UConvertI8x16High) \ V(PPC_I16x8SConvertI32x4) \
V(PPC_I16x8AddSatS) \ V(PPC_I16x8UConvertI32x4) \
V(PPC_I16x8SubSatS) \ V(PPC_I16x8SConvertI8x16Low) \
V(PPC_I16x8AddSatU) \ V(PPC_I16x8SConvertI8x16High) \
V(PPC_I16x8SubSatU) \ V(PPC_I16x8UConvertI8x16Low) \
V(PPC_I16x8RoundingAverageU) \ V(PPC_I16x8UConvertI8x16High) \
V(PPC_I16x8BitMask) \ V(PPC_I16x8AddSatS) \
V(PPC_I16x8ExtAddPairwiseI8x16S) \ V(PPC_I16x8SubSatS) \
V(PPC_I16x8ExtAddPairwiseI8x16U) \ V(PPC_I16x8AddSatU) \
V(PPC_I16x8Q15MulRSatS) \ V(PPC_I16x8SubSatU) \
V(PPC_I16x8ExtMulLowI8x16S) \ V(PPC_I16x8RoundingAverageU) \
V(PPC_I16x8ExtMulHighI8x16S) \ V(PPC_I16x8BitMask) \
V(PPC_I16x8ExtMulLowI8x16U) \ V(PPC_I16x8ExtAddPairwiseI8x16S) \
V(PPC_I16x8ExtMulHighI8x16U) \ V(PPC_I16x8ExtAddPairwiseI8x16U) \
V(PPC_I8x16Splat) \ V(PPC_I16x8Q15MulRSatS) \
V(PPC_I8x16ExtractLaneU) \ V(PPC_I16x8ExtMulLowI8x16S) \
V(PPC_I8x16ExtractLaneS) \ V(PPC_I16x8ExtMulHighI8x16S) \
V(PPC_I8x16ReplaceLane) \ V(PPC_I16x8ExtMulLowI8x16U) \
V(PPC_I8x16Add) \ V(PPC_I16x8ExtMulHighI8x16U) \
V(PPC_I8x16Sub) \ V(PPC_I8x16Splat) \
V(PPC_I8x16MinS) \ V(PPC_I8x16ExtractLaneU) \
V(PPC_I8x16MinU) \ V(PPC_I8x16ExtractLaneS) \
V(PPC_I8x16MaxS) \ V(PPC_I8x16ReplaceLane) \
V(PPC_I8x16MaxU) \ V(PPC_I8x16Add) \
V(PPC_I8x16Eq) \ V(PPC_I8x16Sub) \
V(PPC_I8x16Ne) \ V(PPC_I8x16MinS) \
V(PPC_I8x16GtS) \ V(PPC_I8x16MinU) \
V(PPC_I8x16GeS) \ V(PPC_I8x16MaxS) \
V(PPC_I8x16GtU) \ V(PPC_I8x16MaxU) \
V(PPC_I8x16GeU) \ V(PPC_I8x16Eq) \
V(PPC_I8x16Shl) \ V(PPC_I8x16Ne) \
V(PPC_I8x16ShrS) \ V(PPC_I8x16GtS) \
V(PPC_I8x16ShrU) \ V(PPC_I8x16GeS) \
V(PPC_I8x16Neg) \ V(PPC_I8x16GtU) \
V(PPC_I8x16Abs) \ V(PPC_I8x16GeU) \
V(PPC_I8x16SConvertI16x8) \ V(PPC_I8x16Shl) \
V(PPC_I8x16UConvertI16x8) \ V(PPC_I8x16ShrS) \
V(PPC_I8x16AddSatS) \ V(PPC_I8x16ShrU) \
V(PPC_I8x16SubSatS) \ V(PPC_I8x16Neg) \
V(PPC_I8x16AddSatU) \ V(PPC_I8x16Abs) \
V(PPC_I8x16SubSatU) \ V(PPC_I8x16SConvertI16x8) \
V(PPC_I8x16RoundingAverageU) \ V(PPC_I8x16UConvertI16x8) \
V(PPC_I8x16Shuffle) \ V(PPC_I8x16AddSatS) \
V(PPC_I8x16Swizzle) \ V(PPC_I8x16SubSatS) \
V(PPC_I8x16BitMask) \ V(PPC_I8x16AddSatU) \
V(PPC_I8x16Popcnt) \ V(PPC_I8x16SubSatU) \
V(PPC_I64x2AllTrue) \ V(PPC_I8x16RoundingAverageU) \
V(PPC_I32x4AllTrue) \ V(PPC_I8x16Shuffle) \
V(PPC_I16x8AllTrue) \ V(PPC_I8x16Swizzle) \
V(PPC_I8x16AllTrue) \ V(PPC_I8x16BitMask) \
V(PPC_V128AnyTrue) \ V(PPC_I8x16Popcnt) \
V(PPC_S128And) \ V(PPC_I64x2AllTrue) \
V(PPC_S128Or) \ V(PPC_I32x4AllTrue) \
V(PPC_S128Xor) \ V(PPC_I16x8AllTrue) \
V(PPC_S128Const) \ V(PPC_I8x16AllTrue) \
V(PPC_S128Zero) \ V(PPC_V128AnyTrue) \
V(PPC_S128AllOnes) \ V(PPC_S128And) \
V(PPC_S128Not) \ V(PPC_S128Or) \
V(PPC_S128Select) \ V(PPC_S128Xor) \
V(PPC_S128AndNot) \ V(PPC_S128Const) \
V(PPC_S128Load8Splat) \ V(PPC_S128Zero) \
V(PPC_S128Load16Splat) \ V(PPC_S128AllOnes) \
V(PPC_S128Load32Splat) \ V(PPC_S128Not) \
V(PPC_S128Load64Splat) \ V(PPC_S128Select) \
V(PPC_S128Load8x8S) \ V(PPC_S128AndNot) \
V(PPC_S128Load8x8U) \ V(PPC_S128Load8Splat) \
V(PPC_S128Load16x4S) \ V(PPC_S128Load16Splat) \
V(PPC_S128Load16x4U) \ V(PPC_S128Load32Splat) \
V(PPC_S128Load32x2S) \ V(PPC_S128Load64Splat) \
V(PPC_S128Load32x2U) \ V(PPC_S128Load8x8S) \
V(PPC_S128Load32Zero) \ V(PPC_S128Load8x8U) \
V(PPC_S128Load64Zero) \ V(PPC_S128Load16x4S) \
V(PPC_S128Load8Lane) \ V(PPC_S128Load16x4U) \
V(PPC_S128Load16Lane) \ V(PPC_S128Load32x2S) \
V(PPC_S128Load32Lane) \ V(PPC_S128Load32x2U) \
V(PPC_S128Load64Lane) \ V(PPC_S128Load32Zero) \
V(PPC_S128Store8Lane) \ V(PPC_S128Load64Zero) \
V(PPC_S128Store16Lane) \ V(PPC_S128Load8Lane) \
V(PPC_S128Store32Lane) \ V(PPC_S128Load16Lane) \
V(PPC_S128Store64Lane) \ V(PPC_S128Load32Lane) \
V(PPC_StoreCompressTagged) \ V(PPC_S128Load64Lane) \
V(PPC_LoadDecompressTaggedSigned) \ V(PPC_S128Store8Lane) \
V(PPC_LoadDecompressTaggedPointer) \ V(PPC_S128Store16Lane) \
V(PPC_S128Store32Lane) \
V(PPC_S128Store64Lane) \
V(PPC_StoreCompressTagged) \
V(PPC_LoadDecompressTaggedSigned) \
V(PPC_LoadDecompressTaggedPointer) \
V(PPC_LoadDecompressAnyTagged) V(PPC_LoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.
......
...@@ -11,392 +11,397 @@ namespace compiler { ...@@ -11,392 +11,397 @@ namespace compiler {
// S390-specific opcodes that specify which assembly sequence to emit. // S390-specific opcodes that specify which assembly sequence to emit.
// Most opcodes specify a single instruction. // Most opcodes specify a single instruction.
#define TARGET_ARCH_OPCODE_LIST(V) \
V(S390_Peek) \ // Opcodes that support a MemoryAccessMode.
V(S390_Abs32) \ #define TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) // None.
V(S390_Abs64) \
V(S390_And32) \ #define TARGET_ARCH_OPCODE_LIST(V) \
V(S390_And64) \ TARGET_ARCH_OPCODE_WITH_MEMORY_ACCESS_MODE_LIST(V) \
V(S390_Or32) \ V(S390_Peek) \
V(S390_Or64) \ V(S390_Abs32) \
V(S390_Xor32) \ V(S390_Abs64) \
V(S390_Xor64) \ V(S390_And32) \
V(S390_ShiftLeft32) \ V(S390_And64) \
V(S390_ShiftLeft64) \ V(S390_Or32) \
V(S390_ShiftRight32) \ V(S390_Or64) \
V(S390_ShiftRight64) \ V(S390_Xor32) \
V(S390_ShiftRightArith32) \ V(S390_Xor64) \
V(S390_ShiftRightArith64) \ V(S390_ShiftLeft32) \
V(S390_RotRight32) \ V(S390_ShiftLeft64) \
V(S390_RotRight64) \ V(S390_ShiftRight32) \
V(S390_Not32) \ V(S390_ShiftRight64) \
V(S390_Not64) \ V(S390_ShiftRightArith32) \
V(S390_RotLeftAndClear64) \ V(S390_ShiftRightArith64) \
V(S390_RotLeftAndClearLeft64) \ V(S390_RotRight32) \
V(S390_RotLeftAndClearRight64) \ V(S390_RotRight64) \
V(S390_Lay) \ V(S390_Not32) \
V(S390_Add32) \ V(S390_Not64) \
V(S390_Add64) \ V(S390_RotLeftAndClear64) \
V(S390_AddFloat) \ V(S390_RotLeftAndClearLeft64) \
V(S390_AddDouble) \ V(S390_RotLeftAndClearRight64) \
V(S390_Sub32) \ V(S390_Lay) \
V(S390_Sub64) \ V(S390_Add32) \
V(S390_SubFloat) \ V(S390_Add64) \
V(S390_SubDouble) \ V(S390_AddFloat) \
V(S390_Mul32) \ V(S390_AddDouble) \
V(S390_Mul32WithOverflow) \ V(S390_Sub32) \
V(S390_Mul64) \ V(S390_Sub64) \
V(S390_MulHigh32) \ V(S390_SubFloat) \
V(S390_MulHighU32) \ V(S390_SubDouble) \
V(S390_MulFloat) \ V(S390_Mul32) \
V(S390_MulDouble) \ V(S390_Mul32WithOverflow) \
V(S390_Div32) \ V(S390_Mul64) \
V(S390_Div64) \ V(S390_MulHigh32) \
V(S390_DivU32) \ V(S390_MulHighU32) \
V(S390_DivU64) \ V(S390_MulFloat) \
V(S390_DivFloat) \ V(S390_MulDouble) \
V(S390_DivDouble) \ V(S390_Div32) \
V(S390_Mod32) \ V(S390_Div64) \
V(S390_Mod64) \ V(S390_DivU32) \
V(S390_ModU32) \ V(S390_DivU64) \
V(S390_ModU64) \ V(S390_DivFloat) \
V(S390_ModDouble) \ V(S390_DivDouble) \
V(S390_Neg32) \ V(S390_Mod32) \
V(S390_Neg64) \ V(S390_Mod64) \
V(S390_NegDouble) \ V(S390_ModU32) \
V(S390_NegFloat) \ V(S390_ModU64) \
V(S390_SqrtFloat) \ V(S390_ModDouble) \
V(S390_FloorFloat) \ V(S390_Neg32) \
V(S390_CeilFloat) \ V(S390_Neg64) \
V(S390_TruncateFloat) \ V(S390_NegDouble) \
V(S390_FloatNearestInt) \ V(S390_NegFloat) \
V(S390_AbsFloat) \ V(S390_SqrtFloat) \
V(S390_SqrtDouble) \ V(S390_FloorFloat) \
V(S390_FloorDouble) \ V(S390_CeilFloat) \
V(S390_CeilDouble) \ V(S390_TruncateFloat) \
V(S390_TruncateDouble) \ V(S390_FloatNearestInt) \
V(S390_RoundDouble) \ V(S390_AbsFloat) \
V(S390_DoubleNearestInt) \ V(S390_SqrtDouble) \
V(S390_MaxFloat) \ V(S390_FloorDouble) \
V(S390_MaxDouble) \ V(S390_CeilDouble) \
V(S390_MinFloat) \ V(S390_TruncateDouble) \
V(S390_MinDouble) \ V(S390_RoundDouble) \
V(S390_AbsDouble) \ V(S390_DoubleNearestInt) \
V(S390_Cntlz32) \ V(S390_MaxFloat) \
V(S390_Cntlz64) \ V(S390_MaxDouble) \
V(S390_Popcnt32) \ V(S390_MinFloat) \
V(S390_Popcnt64) \ V(S390_MinDouble) \
V(S390_Cmp32) \ V(S390_AbsDouble) \
V(S390_Cmp64) \ V(S390_Cntlz32) \
V(S390_CmpFloat) \ V(S390_Cntlz64) \
V(S390_CmpDouble) \ V(S390_Popcnt32) \
V(S390_Tst32) \ V(S390_Popcnt64) \
V(S390_Tst64) \ V(S390_Cmp32) \
V(S390_Push) \ V(S390_Cmp64) \
V(S390_PushFrame) \ V(S390_CmpFloat) \
V(S390_StoreToStackSlot) \ V(S390_CmpDouble) \
V(S390_SignExtendWord8ToInt32) \ V(S390_Tst32) \
V(S390_SignExtendWord16ToInt32) \ V(S390_Tst64) \
V(S390_SignExtendWord8ToInt64) \ V(S390_Push) \
V(S390_SignExtendWord16ToInt64) \ V(S390_PushFrame) \
V(S390_SignExtendWord32ToInt64) \ V(S390_StoreToStackSlot) \
V(S390_Uint32ToUint64) \ V(S390_SignExtendWord8ToInt32) \
V(S390_Int64ToInt32) \ V(S390_SignExtendWord16ToInt32) \
V(S390_Int64ToFloat32) \ V(S390_SignExtendWord8ToInt64) \
V(S390_Int64ToDouble) \ V(S390_SignExtendWord16ToInt64) \
V(S390_Uint64ToFloat32) \ V(S390_SignExtendWord32ToInt64) \
V(S390_Uint64ToDouble) \ V(S390_Uint32ToUint64) \
V(S390_Int32ToFloat32) \ V(S390_Int64ToInt32) \
V(S390_Int32ToDouble) \ V(S390_Int64ToFloat32) \
V(S390_Uint32ToFloat32) \ V(S390_Int64ToDouble) \
V(S390_Uint32ToDouble) \ V(S390_Uint64ToFloat32) \
V(S390_Float32ToInt64) \ V(S390_Uint64ToDouble) \
V(S390_Float32ToUint64) \ V(S390_Int32ToFloat32) \
V(S390_Float32ToInt32) \ V(S390_Int32ToDouble) \
V(S390_Float32ToUint32) \ V(S390_Uint32ToFloat32) \
V(S390_Float32ToDouble) \ V(S390_Uint32ToDouble) \
V(S390_Float64SilenceNaN) \ V(S390_Float32ToInt64) \
V(S390_DoubleToInt32) \ V(S390_Float32ToUint64) \
V(S390_DoubleToUint32) \ V(S390_Float32ToInt32) \
V(S390_DoubleToInt64) \ V(S390_Float32ToUint32) \
V(S390_DoubleToUint64) \ V(S390_Float32ToDouble) \
V(S390_DoubleToFloat32) \ V(S390_Float64SilenceNaN) \
V(S390_DoubleExtractLowWord32) \ V(S390_DoubleToInt32) \
V(S390_DoubleExtractHighWord32) \ V(S390_DoubleToUint32) \
V(S390_DoubleInsertLowWord32) \ V(S390_DoubleToInt64) \
V(S390_DoubleInsertHighWord32) \ V(S390_DoubleToUint64) \
V(S390_DoubleConstruct) \ V(S390_DoubleToFloat32) \
V(S390_BitcastInt32ToFloat32) \ V(S390_DoubleExtractLowWord32) \
V(S390_BitcastFloat32ToInt32) \ V(S390_DoubleExtractHighWord32) \
V(S390_BitcastInt64ToDouble) \ V(S390_DoubleInsertLowWord32) \
V(S390_BitcastDoubleToInt64) \ V(S390_DoubleInsertHighWord32) \
V(S390_LoadWordS8) \ V(S390_DoubleConstruct) \
V(S390_LoadWordU8) \ V(S390_BitcastInt32ToFloat32) \
V(S390_LoadWordS16) \ V(S390_BitcastFloat32ToInt32) \
V(S390_LoadWordU16) \ V(S390_BitcastInt64ToDouble) \
V(S390_LoadWordS32) \ V(S390_BitcastDoubleToInt64) \
V(S390_LoadWordU32) \ V(S390_LoadWordS8) \
V(S390_LoadAndTestWord32) \ V(S390_LoadWordU8) \
V(S390_LoadAndTestWord64) \ V(S390_LoadWordS16) \
V(S390_LoadAndTestFloat32) \ V(S390_LoadWordU16) \
V(S390_LoadAndTestFloat64) \ V(S390_LoadWordS32) \
V(S390_LoadReverse16RR) \ V(S390_LoadWordU32) \
V(S390_LoadReverse32RR) \ V(S390_LoadAndTestWord32) \
V(S390_LoadReverse64RR) \ V(S390_LoadAndTestWord64) \
V(S390_LoadReverseSimd128RR) \ V(S390_LoadAndTestFloat32) \
V(S390_LoadReverseSimd128) \ V(S390_LoadAndTestFloat64) \
V(S390_LoadReverse16) \ V(S390_LoadReverse16RR) \
V(S390_LoadReverse32) \ V(S390_LoadReverse32RR) \
V(S390_LoadReverse64) \ V(S390_LoadReverse64RR) \
V(S390_LoadWord64) \ V(S390_LoadReverseSimd128RR) \
V(S390_LoadFloat32) \ V(S390_LoadReverseSimd128) \
V(S390_LoadDouble) \ V(S390_LoadReverse16) \
V(S390_StoreWord8) \ V(S390_LoadReverse32) \
V(S390_StoreWord16) \ V(S390_LoadReverse64) \
V(S390_StoreWord32) \ V(S390_LoadWord64) \
V(S390_StoreWord64) \ V(S390_LoadFloat32) \
V(S390_StoreReverse16) \ V(S390_LoadDouble) \
V(S390_StoreReverse32) \ V(S390_StoreWord8) \
V(S390_StoreReverse64) \ V(S390_StoreWord16) \
V(S390_StoreReverseSimd128) \ V(S390_StoreWord32) \
V(S390_StoreFloat32) \ V(S390_StoreWord64) \
V(S390_StoreDouble) \ V(S390_StoreReverse16) \
V(S390_Word64AtomicExchangeUint64) \ V(S390_StoreReverse32) \
V(S390_Word64AtomicCompareExchangeUint64) \ V(S390_StoreReverse64) \
V(S390_Word64AtomicAddUint64) \ V(S390_StoreReverseSimd128) \
V(S390_Word64AtomicSubUint64) \ V(S390_StoreFloat32) \
V(S390_Word64AtomicAndUint64) \ V(S390_StoreDouble) \
V(S390_Word64AtomicOrUint64) \ V(S390_Word64AtomicExchangeUint64) \
V(S390_Word64AtomicXorUint64) \ V(S390_Word64AtomicCompareExchangeUint64) \
V(S390_F64x2Splat) \ V(S390_Word64AtomicAddUint64) \
V(S390_F64x2ReplaceLane) \ V(S390_Word64AtomicSubUint64) \
V(S390_F64x2Abs) \ V(S390_Word64AtomicAndUint64) \
V(S390_F64x2Neg) \ V(S390_Word64AtomicOrUint64) \
V(S390_F64x2Sqrt) \ V(S390_Word64AtomicXorUint64) \
V(S390_F64x2Add) \ V(S390_F64x2Splat) \
V(S390_F64x2Sub) \ V(S390_F64x2ReplaceLane) \
V(S390_F64x2Mul) \ V(S390_F64x2Abs) \
V(S390_F64x2Div) \ V(S390_F64x2Neg) \
V(S390_F64x2Eq) \ V(S390_F64x2Sqrt) \
V(S390_F64x2Ne) \ V(S390_F64x2Add) \
V(S390_F64x2Lt) \ V(S390_F64x2Sub) \
V(S390_F64x2Le) \ V(S390_F64x2Mul) \
V(S390_F64x2Min) \ V(S390_F64x2Div) \
V(S390_F64x2Max) \ V(S390_F64x2Eq) \
V(S390_F64x2ExtractLane) \ V(S390_F64x2Ne) \
V(S390_F64x2Qfma) \ V(S390_F64x2Lt) \
V(S390_F64x2Qfms) \ V(S390_F64x2Le) \
V(S390_F64x2Pmin) \ V(S390_F64x2Min) \
V(S390_F64x2Pmax) \ V(S390_F64x2Max) \
V(S390_F64x2Ceil) \ V(S390_F64x2ExtractLane) \
V(S390_F64x2Floor) \ V(S390_F64x2Qfma) \
V(S390_F64x2Trunc) \ V(S390_F64x2Qfms) \
V(S390_F64x2NearestInt) \ V(S390_F64x2Pmin) \
V(S390_F64x2ConvertLowI32x4S) \ V(S390_F64x2Pmax) \
V(S390_F64x2ConvertLowI32x4U) \ V(S390_F64x2Ceil) \
V(S390_F64x2PromoteLowF32x4) \ V(S390_F64x2Floor) \
V(S390_F32x4Splat) \ V(S390_F64x2Trunc) \
V(S390_F32x4ExtractLane) \ V(S390_F64x2NearestInt) \
V(S390_F32x4ReplaceLane) \ V(S390_F64x2ConvertLowI32x4S) \
V(S390_F32x4Add) \ V(S390_F64x2ConvertLowI32x4U) \
V(S390_F32x4Sub) \ V(S390_F64x2PromoteLowF32x4) \
V(S390_F32x4Mul) \ V(S390_F32x4Splat) \
V(S390_F32x4Eq) \ V(S390_F32x4ExtractLane) \
V(S390_F32x4Ne) \ V(S390_F32x4ReplaceLane) \
V(S390_F32x4Lt) \ V(S390_F32x4Add) \
V(S390_F32x4Le) \ V(S390_F32x4Sub) \
V(S390_F32x4Abs) \ V(S390_F32x4Mul) \
V(S390_F32x4Neg) \ V(S390_F32x4Eq) \
V(S390_F32x4RecipApprox) \ V(S390_F32x4Ne) \
V(S390_F32x4RecipSqrtApprox) \ V(S390_F32x4Lt) \
V(S390_F32x4SConvertI32x4) \ V(S390_F32x4Le) \
V(S390_F32x4UConvertI32x4) \ V(S390_F32x4Abs) \
V(S390_F32x4Sqrt) \ V(S390_F32x4Neg) \
V(S390_F32x4Div) \ V(S390_F32x4RecipApprox) \
V(S390_F32x4Min) \ V(S390_F32x4RecipSqrtApprox) \
V(S390_F32x4Max) \ V(S390_F32x4SConvertI32x4) \
V(S390_F32x4Qfma) \ V(S390_F32x4UConvertI32x4) \
V(S390_F32x4Qfms) \ V(S390_F32x4Sqrt) \
V(S390_F32x4Pmin) \ V(S390_F32x4Div) \
V(S390_F32x4Pmax) \ V(S390_F32x4Min) \
V(S390_F32x4Ceil) \ V(S390_F32x4Max) \
V(S390_F32x4Floor) \ V(S390_F32x4Qfma) \
V(S390_F32x4Trunc) \ V(S390_F32x4Qfms) \
V(S390_F32x4NearestInt) \ V(S390_F32x4Pmin) \
V(S390_F32x4DemoteF64x2Zero) \ V(S390_F32x4Pmax) \
V(S390_I64x2Neg) \ V(S390_F32x4Ceil) \
V(S390_I64x2Add) \ V(S390_F32x4Floor) \
V(S390_I64x2Sub) \ V(S390_F32x4Trunc) \
V(S390_I64x2Shl) \ V(S390_F32x4NearestInt) \
V(S390_I64x2ShrS) \ V(S390_F32x4DemoteF64x2Zero) \
V(S390_I64x2ShrU) \ V(S390_I64x2Neg) \
V(S390_I64x2Mul) \ V(S390_I64x2Add) \
V(S390_I64x2Splat) \ V(S390_I64x2Sub) \
V(S390_I64x2ReplaceLane) \ V(S390_I64x2Shl) \
V(S390_I64x2ExtractLane) \ V(S390_I64x2ShrS) \
V(S390_I64x2Eq) \ V(S390_I64x2ShrU) \
V(S390_I64x2BitMask) \ V(S390_I64x2Mul) \
V(S390_I64x2ExtMulLowI32x4S) \ V(S390_I64x2Splat) \
V(S390_I64x2ExtMulHighI32x4S) \ V(S390_I64x2ReplaceLane) \
V(S390_I64x2ExtMulLowI32x4U) \ V(S390_I64x2ExtractLane) \
V(S390_I64x2ExtMulHighI32x4U) \ V(S390_I64x2Eq) \
V(S390_I64x2SConvertI32x4Low) \ V(S390_I64x2BitMask) \
V(S390_I64x2SConvertI32x4High) \ V(S390_I64x2ExtMulLowI32x4S) \
V(S390_I64x2UConvertI32x4Low) \ V(S390_I64x2ExtMulHighI32x4S) \
V(S390_I64x2UConvertI32x4High) \ V(S390_I64x2ExtMulLowI32x4U) \
V(S390_I64x2Ne) \ V(S390_I64x2ExtMulHighI32x4U) \
V(S390_I64x2GtS) \ V(S390_I64x2SConvertI32x4Low) \
V(S390_I64x2GeS) \ V(S390_I64x2SConvertI32x4High) \
V(S390_I64x2Abs) \ V(S390_I64x2UConvertI32x4Low) \
V(S390_I32x4Splat) \ V(S390_I64x2UConvertI32x4High) \
V(S390_I32x4ExtractLane) \ V(S390_I64x2Ne) \
V(S390_I32x4ReplaceLane) \ V(S390_I64x2GtS) \
V(S390_I32x4Add) \ V(S390_I64x2GeS) \
V(S390_I32x4Sub) \ V(S390_I64x2Abs) \
V(S390_I32x4Mul) \ V(S390_I32x4Splat) \
V(S390_I32x4MinS) \ V(S390_I32x4ExtractLane) \
V(S390_I32x4MinU) \ V(S390_I32x4ReplaceLane) \
V(S390_I32x4MaxS) \ V(S390_I32x4Add) \
V(S390_I32x4MaxU) \ V(S390_I32x4Sub) \
V(S390_I32x4Eq) \ V(S390_I32x4Mul) \
V(S390_I32x4Ne) \ V(S390_I32x4MinS) \
V(S390_I32x4GtS) \ V(S390_I32x4MinU) \
V(S390_I32x4GeS) \ V(S390_I32x4MaxS) \
V(S390_I32x4GtU) \ V(S390_I32x4MaxU) \
V(S390_I32x4GeU) \ V(S390_I32x4Eq) \
V(S390_I32x4Neg) \ V(S390_I32x4Ne) \
V(S390_I32x4Shl) \ V(S390_I32x4GtS) \
V(S390_I32x4ShrS) \ V(S390_I32x4GeS) \
V(S390_I32x4ShrU) \ V(S390_I32x4GtU) \
V(S390_I32x4SConvertF32x4) \ V(S390_I32x4GeU) \
V(S390_I32x4UConvertF32x4) \ V(S390_I32x4Neg) \
V(S390_I32x4SConvertI16x8Low) \ V(S390_I32x4Shl) \
V(S390_I32x4SConvertI16x8High) \ V(S390_I32x4ShrS) \
V(S390_I32x4UConvertI16x8Low) \ V(S390_I32x4ShrU) \
V(S390_I32x4UConvertI16x8High) \ V(S390_I32x4SConvertF32x4) \
V(S390_I32x4Abs) \ V(S390_I32x4UConvertF32x4) \
V(S390_I32x4BitMask) \ V(S390_I32x4SConvertI16x8Low) \
V(S390_I32x4DotI16x8S) \ V(S390_I32x4SConvertI16x8High) \
V(S390_I32x4ExtMulLowI16x8S) \ V(S390_I32x4UConvertI16x8Low) \
V(S390_I32x4ExtMulHighI16x8S) \ V(S390_I32x4UConvertI16x8High) \
V(S390_I32x4ExtMulLowI16x8U) \ V(S390_I32x4Abs) \
V(S390_I32x4ExtMulHighI16x8U) \ V(S390_I32x4BitMask) \
V(S390_I32x4ExtAddPairwiseI16x8S) \ V(S390_I32x4DotI16x8S) \
V(S390_I32x4ExtAddPairwiseI16x8U) \ V(S390_I32x4ExtMulLowI16x8S) \
V(S390_I32x4TruncSatF64x2SZero) \ V(S390_I32x4ExtMulHighI16x8S) \
V(S390_I32x4TruncSatF64x2UZero) \ V(S390_I32x4ExtMulLowI16x8U) \
V(S390_I16x8Splat) \ V(S390_I32x4ExtMulHighI16x8U) \
V(S390_I16x8ExtractLaneU) \ V(S390_I32x4ExtAddPairwiseI16x8S) \
V(S390_I16x8ExtractLaneS) \ V(S390_I32x4ExtAddPairwiseI16x8U) \
V(S390_I16x8ReplaceLane) \ V(S390_I32x4TruncSatF64x2SZero) \
V(S390_I16x8Add) \ V(S390_I32x4TruncSatF64x2UZero) \
V(S390_I16x8Sub) \ V(S390_I16x8Splat) \
V(S390_I16x8Mul) \ V(S390_I16x8ExtractLaneU) \
V(S390_I16x8MinS) \ V(S390_I16x8ExtractLaneS) \
V(S390_I16x8MinU) \ V(S390_I16x8ReplaceLane) \
V(S390_I16x8MaxS) \ V(S390_I16x8Add) \
V(S390_I16x8MaxU) \ V(S390_I16x8Sub) \
V(S390_I16x8Eq) \ V(S390_I16x8Mul) \
V(S390_I16x8Ne) \ V(S390_I16x8MinS) \
V(S390_I16x8GtS) \ V(S390_I16x8MinU) \
V(S390_I16x8GeS) \ V(S390_I16x8MaxS) \
V(S390_I16x8GtU) \ V(S390_I16x8MaxU) \
V(S390_I16x8GeU) \ V(S390_I16x8Eq) \
V(S390_I16x8Shl) \ V(S390_I16x8Ne) \
V(S390_I16x8ShrS) \ V(S390_I16x8GtS) \
V(S390_I16x8ShrU) \ V(S390_I16x8GeS) \
V(S390_I16x8Neg) \ V(S390_I16x8GtU) \
V(S390_I16x8SConvertI32x4) \ V(S390_I16x8GeU) \
V(S390_I16x8UConvertI32x4) \ V(S390_I16x8Shl) \
V(S390_I16x8SConvertI8x16Low) \ V(S390_I16x8ShrS) \
V(S390_I16x8SConvertI8x16High) \ V(S390_I16x8ShrU) \
V(S390_I16x8UConvertI8x16Low) \ V(S390_I16x8Neg) \
V(S390_I16x8UConvertI8x16High) \ V(S390_I16x8SConvertI32x4) \
V(S390_I16x8AddSatS) \ V(S390_I16x8UConvertI32x4) \
V(S390_I16x8SubSatS) \ V(S390_I16x8SConvertI8x16Low) \
V(S390_I16x8AddSatU) \ V(S390_I16x8SConvertI8x16High) \
V(S390_I16x8SubSatU) \ V(S390_I16x8UConvertI8x16Low) \
V(S390_I16x8RoundingAverageU) \ V(S390_I16x8UConvertI8x16High) \
V(S390_I16x8Abs) \ V(S390_I16x8AddSatS) \
V(S390_I16x8BitMask) \ V(S390_I16x8SubSatS) \
V(S390_I16x8ExtMulLowI8x16S) \ V(S390_I16x8AddSatU) \
V(S390_I16x8ExtMulHighI8x16S) \ V(S390_I16x8SubSatU) \
V(S390_I16x8ExtMulLowI8x16U) \ V(S390_I16x8RoundingAverageU) \
V(S390_I16x8ExtMulHighI8x16U) \ V(S390_I16x8Abs) \
V(S390_I16x8ExtAddPairwiseI8x16S) \ V(S390_I16x8BitMask) \
V(S390_I16x8ExtAddPairwiseI8x16U) \ V(S390_I16x8ExtMulLowI8x16S) \
V(S390_I16x8Q15MulRSatS) \ V(S390_I16x8ExtMulHighI8x16S) \
V(S390_I8x16Splat) \ V(S390_I16x8ExtMulLowI8x16U) \
V(S390_I8x16ExtractLaneU) \ V(S390_I16x8ExtMulHighI8x16U) \
V(S390_I8x16ExtractLaneS) \ V(S390_I16x8ExtAddPairwiseI8x16S) \
V(S390_I8x16ReplaceLane) \ V(S390_I16x8ExtAddPairwiseI8x16U) \
V(S390_I8x16Add) \ V(S390_I16x8Q15MulRSatS) \
V(S390_I8x16Sub) \ V(S390_I8x16Splat) \
V(S390_I8x16MinS) \ V(S390_I8x16ExtractLaneU) \
V(S390_I8x16MinU) \ V(S390_I8x16ExtractLaneS) \
V(S390_I8x16MaxS) \ V(S390_I8x16ReplaceLane) \
V(S390_I8x16MaxU) \ V(S390_I8x16Add) \
V(S390_I8x16Eq) \ V(S390_I8x16Sub) \
V(S390_I8x16Ne) \ V(S390_I8x16MinS) \
V(S390_I8x16GtS) \ V(S390_I8x16MinU) \
V(S390_I8x16GeS) \ V(S390_I8x16MaxS) \
V(S390_I8x16GtU) \ V(S390_I8x16MaxU) \
V(S390_I8x16GeU) \ V(S390_I8x16Eq) \
V(S390_I8x16Shl) \ V(S390_I8x16Ne) \
V(S390_I8x16ShrS) \ V(S390_I8x16GtS) \
V(S390_I8x16ShrU) \ V(S390_I8x16GeS) \
V(S390_I8x16Neg) \ V(S390_I8x16GtU) \
V(S390_I8x16SConvertI16x8) \ V(S390_I8x16GeU) \
V(S390_I8x16UConvertI16x8) \ V(S390_I8x16Shl) \
V(S390_I8x16AddSatS) \ V(S390_I8x16ShrS) \
V(S390_I8x16SubSatS) \ V(S390_I8x16ShrU) \
V(S390_I8x16AddSatU) \ V(S390_I8x16Neg) \
V(S390_I8x16SubSatU) \ V(S390_I8x16SConvertI16x8) \
V(S390_I8x16RoundingAverageU) \ V(S390_I8x16UConvertI16x8) \
V(S390_I8x16Abs) \ V(S390_I8x16AddSatS) \
V(S390_I8x16BitMask) \ V(S390_I8x16SubSatS) \
V(S390_I8x16Shuffle) \ V(S390_I8x16AddSatU) \
V(S390_I8x16Swizzle) \ V(S390_I8x16SubSatU) \
V(S390_I8x16Popcnt) \ V(S390_I8x16RoundingAverageU) \
V(S390_I64x2AllTrue) \ V(S390_I8x16Abs) \
V(S390_I32x4AllTrue) \ V(S390_I8x16BitMask) \
V(S390_I16x8AllTrue) \ V(S390_I8x16Shuffle) \
V(S390_I8x16AllTrue) \ V(S390_I8x16Swizzle) \
V(S390_V128AnyTrue) \ V(S390_I8x16Popcnt) \
V(S390_S128And) \ V(S390_I64x2AllTrue) \
V(S390_S128Or) \ V(S390_I32x4AllTrue) \
V(S390_S128Xor) \ V(S390_I16x8AllTrue) \
V(S390_S128Const) \ V(S390_I8x16AllTrue) \
V(S390_S128Zero) \ V(S390_V128AnyTrue) \
V(S390_S128AllOnes) \ V(S390_S128And) \
V(S390_S128Not) \ V(S390_S128Or) \
V(S390_S128Select) \ V(S390_S128Xor) \
V(S390_S128AndNot) \ V(S390_S128Const) \
V(S390_S128Load8Splat) \ V(S390_S128Zero) \
V(S390_S128Load16Splat) \ V(S390_S128AllOnes) \
V(S390_S128Load32Splat) \ V(S390_S128Not) \
V(S390_S128Load64Splat) \ V(S390_S128Select) \
V(S390_S128Load8x8S) \ V(S390_S128AndNot) \
V(S390_S128Load8x8U) \ V(S390_S128Load8Splat) \
V(S390_S128Load16x4S) \ V(S390_S128Load16Splat) \
V(S390_S128Load16x4U) \ V(S390_S128Load32Splat) \
V(S390_S128Load32x2S) \ V(S390_S128Load64Splat) \
V(S390_S128Load32x2U) \ V(S390_S128Load8x8S) \
V(S390_S128Load32Zero) \ V(S390_S128Load8x8U) \
V(S390_S128Load64Zero) \ V(S390_S128Load16x4S) \
V(S390_S128Load8Lane) \ V(S390_S128Load16x4U) \
V(S390_S128Load16Lane) \ V(S390_S128Load32x2S) \
V(S390_S128Load32Lane) \ V(S390_S128Load32x2U) \
V(S390_S128Load64Lane) \ V(S390_S128Load32Zero) \
V(S390_S128Store8Lane) \ V(S390_S128Load64Zero) \
V(S390_S128Store16Lane) \ V(S390_S128Load8Lane) \
V(S390_S128Store32Lane) \ V(S390_S128Load16Lane) \
V(S390_S128Store64Lane) \ V(S390_S128Load32Lane) \
V(S390_StoreSimd128) \ V(S390_S128Load64Lane) \
V(S390_LoadSimd128) \ V(S390_S128Store8Lane) \
V(S390_StoreCompressTagged) \ V(S390_S128Store16Lane) \
V(S390_LoadDecompressTaggedSigned) \ V(S390_S128Store32Lane) \
V(S390_LoadDecompressTaggedPointer) \ V(S390_S128Store64Lane) \
V(S390_StoreSimd128) \
V(S390_LoadSimd128) \
V(S390_StoreCompressTagged) \
V(S390_LoadDecompressTaggedSigned) \
V(S390_LoadDecompressTaggedPointer) \
V(S390_LoadDecompressAnyTagged) V(S390_LoadDecompressAnyTagged)
// Addressing modes represent the "shape" of inputs to an instruction. // Addressing modes represent the "shape" of inputs to an instruction.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment