mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-04-29 03:00:14 -04:00
* ReprEnum for repr roundtrips * dsl * bugfixes * vdsty fixes * cleaner * fix * fix cdna fields * tests all pass
2403 lines
66 KiB
Python
2403 lines
66 KiB
Python
# autogenerated from AMD ISA XML - do not edit
|
|
from extra.assembly.amd.autogen.common import ReprEnum, Fmt, FMT_BITS, OpType # noqa: F401
|
|
|
|
class DSOp(ReprEnum):
|
|
DS_ADD_U32 = 0
|
|
DS_SUB_U32 = 1
|
|
DS_RSUB_U32 = 2
|
|
DS_INC_U32 = 3
|
|
DS_DEC_U32 = 4
|
|
DS_MIN_I32 = 5
|
|
DS_MAX_I32 = 6
|
|
DS_MIN_U32 = 7
|
|
DS_MAX_U32 = 8
|
|
DS_AND_B32 = 9
|
|
DS_OR_B32 = 10
|
|
DS_XOR_B32 = 11
|
|
DS_MSKOR_B32 = 12
|
|
DS_WRITE_B32 = 13
|
|
DS_WRITE2_B32 = 14
|
|
DS_WRITE2ST64_B32 = 15
|
|
DS_CMPST_B32 = 16
|
|
DS_CMPST_F32 = 17
|
|
DS_MIN_F32 = 18
|
|
DS_MAX_F32 = 19
|
|
DS_NOP = 20
|
|
DS_ADD_F32 = 21
|
|
DS_PK_ADD_F16 = 23
|
|
DS_PK_ADD_BF16 = 24
|
|
DS_WRITE_ADDTID_B32 = 29
|
|
DS_WRITE_B8 = 30
|
|
DS_WRITE_B16 = 31
|
|
DS_ADD_RTN_U32 = 32
|
|
DS_SUB_RTN_U32 = 33
|
|
DS_RSUB_RTN_U32 = 34
|
|
DS_INC_RTN_U32 = 35
|
|
DS_DEC_RTN_U32 = 36
|
|
DS_MIN_RTN_I32 = 37
|
|
DS_MAX_RTN_I32 = 38
|
|
DS_MIN_RTN_U32 = 39
|
|
DS_MAX_RTN_U32 = 40
|
|
DS_AND_RTN_B32 = 41
|
|
DS_OR_RTN_B32 = 42
|
|
DS_XOR_RTN_B32 = 43
|
|
DS_MSKOR_RTN_B32 = 44
|
|
DS_WRXCHG_RTN_B32 = 45
|
|
DS_WRXCHG2_RTN_B32 = 46
|
|
DS_WRXCHG2ST64_RTN_B32 = 47
|
|
DS_CMPST_RTN_B32 = 48
|
|
DS_CMPST_RTN_F32 = 49
|
|
DS_MIN_RTN_F32 = 50
|
|
DS_MAX_RTN_F32 = 51
|
|
DS_WRAP_RTN_B32 = 52
|
|
DS_ADD_RTN_F32 = 53
|
|
DS_READ_B32 = 54
|
|
DS_READ2_B32 = 55
|
|
DS_READ2ST64_B32 = 56
|
|
DS_READ_I8 = 57
|
|
DS_READ_U8 = 58
|
|
DS_READ_I16 = 59
|
|
DS_READ_U16 = 60
|
|
DS_SWIZZLE_B32 = 61
|
|
DS_PERMUTE_B32 = 62
|
|
DS_BPERMUTE_B32 = 63
|
|
DS_ADD_U64 = 64
|
|
DS_SUB_U64 = 65
|
|
DS_RSUB_U64 = 66
|
|
DS_INC_U64 = 67
|
|
DS_DEC_U64 = 68
|
|
DS_MIN_I64 = 69
|
|
DS_MAX_I64 = 70
|
|
DS_MIN_U64 = 71
|
|
DS_MAX_U64 = 72
|
|
DS_AND_B64 = 73
|
|
DS_OR_B64 = 74
|
|
DS_XOR_B64 = 75
|
|
DS_MSKOR_B64 = 76
|
|
DS_WRITE_B64 = 77
|
|
DS_WRITE2_B64 = 78
|
|
DS_WRITE2ST64_B64 = 79
|
|
DS_CMPST_B64 = 80
|
|
DS_CMPST_F64 = 81
|
|
DS_MIN_F64 = 82
|
|
DS_MAX_F64 = 83
|
|
DS_WRITE_B8_D16_HI = 84
|
|
DS_WRITE_B16_D16_HI = 85
|
|
DS_READ_U8_D16 = 86
|
|
DS_READ_U8_D16_HI = 87
|
|
DS_READ_I8_D16 = 88
|
|
DS_READ_I8_D16_HI = 89
|
|
DS_READ_U16_D16 = 90
|
|
DS_READ_U16_D16_HI = 91
|
|
DS_ADD_F64 = 92
|
|
DS_ADD_RTN_U64 = 96
|
|
DS_SUB_RTN_U64 = 97
|
|
DS_RSUB_RTN_U64 = 98
|
|
DS_INC_RTN_U64 = 99
|
|
DS_DEC_RTN_U64 = 100
|
|
DS_MIN_RTN_I64 = 101
|
|
DS_MAX_RTN_I64 = 102
|
|
DS_MIN_RTN_U64 = 103
|
|
DS_MAX_RTN_U64 = 104
|
|
DS_AND_RTN_B64 = 105
|
|
DS_OR_RTN_B64 = 106
|
|
DS_XOR_RTN_B64 = 107
|
|
DS_MSKOR_RTN_B64 = 108
|
|
DS_WRXCHG_RTN_B64 = 109
|
|
DS_WRXCHG2_RTN_B64 = 110
|
|
DS_WRXCHG2ST64_RTN_B64 = 111
|
|
DS_CMPST_RTN_B64 = 112
|
|
DS_CMPST_RTN_F64 = 113
|
|
DS_MIN_RTN_F64 = 114
|
|
DS_MAX_RTN_F64 = 115
|
|
DS_READ_B64 = 118
|
|
DS_READ2_B64 = 119
|
|
DS_READ2ST64_B64 = 120
|
|
DS_ADD_RTN_F64 = 124
|
|
DS_CONDXCHG32_RTN_B64 = 126
|
|
DS_GWS_SEMA_RELEASE_ALL = 152
|
|
DS_GWS_SEMA_V = 154
|
|
DS_GWS_SEMA_P = 156
|
|
DS_READ_ADDTID_B32 = 182
|
|
DS_PK_ADD_RTN_F16 = 183
|
|
DS_PK_ADD_RTN_BF16 = 184
|
|
DS_CONSUME = 189
|
|
DS_APPEND = 190
|
|
DS_WRITE_B96 = 222
|
|
DS_WRITE_B128 = 223
|
|
DS_READ_B64_TR_B4 = 224
|
|
DS_READ_B96_TR_B6 = 225
|
|
DS_READ_B64_TR_B8 = 226
|
|
DS_READ_B64_TR_B16 = 227
|
|
DS_READ_B96 = 254
|
|
DS_READ_B128 = 255
|
|
|
|
class FLATOp(ReprEnum):
|
|
FLAT_LOAD_UBYTE = 16
|
|
FLAT_LOAD_SBYTE = 17
|
|
FLAT_LOAD_USHORT = 18
|
|
FLAT_LOAD_SSHORT = 19
|
|
FLAT_LOAD_DWORD = 20
|
|
FLAT_LOAD_DWORDX2 = 21
|
|
FLAT_LOAD_DWORDX3 = 22
|
|
FLAT_LOAD_DWORDX4 = 23
|
|
FLAT_STORE_BYTE = 24
|
|
FLAT_STORE_BYTE_D16_HI = 25
|
|
FLAT_STORE_SHORT = 26
|
|
FLAT_STORE_SHORT_D16_HI = 27
|
|
FLAT_STORE_DWORD = 28
|
|
FLAT_STORE_DWORDX2 = 29
|
|
FLAT_STORE_DWORDX3 = 30
|
|
FLAT_STORE_DWORDX4 = 31
|
|
FLAT_LOAD_UBYTE_D16 = 32
|
|
FLAT_LOAD_UBYTE_D16_HI = 33
|
|
FLAT_LOAD_SBYTE_D16 = 34
|
|
FLAT_LOAD_SBYTE_D16_HI = 35
|
|
FLAT_LOAD_SHORT_D16 = 36
|
|
FLAT_LOAD_SHORT_D16_HI = 37
|
|
FLAT_ATOMIC_SWAP = 64
|
|
FLAT_ATOMIC_CMPSWAP = 65
|
|
FLAT_ATOMIC_ADD = 66
|
|
FLAT_ATOMIC_SUB = 67
|
|
FLAT_ATOMIC_SMIN = 68
|
|
FLAT_ATOMIC_UMIN = 69
|
|
FLAT_ATOMIC_SMAX = 70
|
|
FLAT_ATOMIC_UMAX = 71
|
|
FLAT_ATOMIC_AND = 72
|
|
FLAT_ATOMIC_OR = 73
|
|
FLAT_ATOMIC_XOR = 74
|
|
FLAT_ATOMIC_INC = 75
|
|
FLAT_ATOMIC_DEC = 76
|
|
FLAT_ATOMIC_ADD_F32 = 77
|
|
FLAT_ATOMIC_PK_ADD_F16 = 78
|
|
FLAT_ATOMIC_ADD_F64 = 79
|
|
FLAT_ATOMIC_MIN_F64 = 80
|
|
FLAT_ATOMIC_MAX_F64 = 81
|
|
FLAT_ATOMIC_PK_ADD_BF16 = 82
|
|
FLAT_ATOMIC_SWAP_X2 = 96
|
|
FLAT_ATOMIC_CMPSWAP_X2 = 97
|
|
FLAT_ATOMIC_ADD_X2 = 98
|
|
FLAT_ATOMIC_SUB_X2 = 99
|
|
FLAT_ATOMIC_SMIN_X2 = 100
|
|
FLAT_ATOMIC_UMIN_X2 = 101
|
|
FLAT_ATOMIC_SMAX_X2 = 102
|
|
FLAT_ATOMIC_UMAX_X2 = 103
|
|
FLAT_ATOMIC_AND_X2 = 104
|
|
FLAT_ATOMIC_OR_X2 = 105
|
|
FLAT_ATOMIC_XOR_X2 = 106
|
|
FLAT_ATOMIC_INC_X2 = 107
|
|
FLAT_ATOMIC_DEC_X2 = 108
|
|
|
|
class GLOBALOp(ReprEnum):
|
|
GLOBAL_LOAD_UBYTE = 16
|
|
GLOBAL_LOAD_SBYTE = 17
|
|
GLOBAL_LOAD_USHORT = 18
|
|
GLOBAL_LOAD_SSHORT = 19
|
|
GLOBAL_LOAD_DWORD = 20
|
|
GLOBAL_LOAD_DWORDX2 = 21
|
|
GLOBAL_LOAD_DWORDX3 = 22
|
|
GLOBAL_LOAD_DWORDX4 = 23
|
|
GLOBAL_STORE_BYTE = 24
|
|
GLOBAL_STORE_BYTE_D16_HI = 25
|
|
GLOBAL_STORE_SHORT = 26
|
|
GLOBAL_STORE_SHORT_D16_HI = 27
|
|
GLOBAL_STORE_DWORD = 28
|
|
GLOBAL_STORE_DWORDX2 = 29
|
|
GLOBAL_STORE_DWORDX3 = 30
|
|
GLOBAL_STORE_DWORDX4 = 31
|
|
GLOBAL_LOAD_UBYTE_D16 = 32
|
|
GLOBAL_LOAD_UBYTE_D16_HI = 33
|
|
GLOBAL_LOAD_SBYTE_D16 = 34
|
|
GLOBAL_LOAD_SBYTE_D16_HI = 35
|
|
GLOBAL_LOAD_SHORT_D16 = 36
|
|
GLOBAL_LOAD_SHORT_D16_HI = 37
|
|
GLOBAL_LOAD_LDS_UBYTE = 38
|
|
GLOBAL_LOAD_LDS_SBYTE = 39
|
|
GLOBAL_LOAD_LDS_USHORT = 40
|
|
GLOBAL_LOAD_LDS_SSHORT = 41
|
|
GLOBAL_LOAD_LDS_DWORD = 42
|
|
GLOBAL_ATOMIC_SWAP = 64
|
|
GLOBAL_ATOMIC_CMPSWAP = 65
|
|
GLOBAL_ATOMIC_ADD = 66
|
|
GLOBAL_ATOMIC_SUB = 67
|
|
GLOBAL_ATOMIC_SMIN = 68
|
|
GLOBAL_ATOMIC_UMIN = 69
|
|
GLOBAL_ATOMIC_SMAX = 70
|
|
GLOBAL_ATOMIC_UMAX = 71
|
|
GLOBAL_ATOMIC_AND = 72
|
|
GLOBAL_ATOMIC_OR = 73
|
|
GLOBAL_ATOMIC_XOR = 74
|
|
GLOBAL_ATOMIC_INC = 75
|
|
GLOBAL_ATOMIC_DEC = 76
|
|
GLOBAL_ATOMIC_ADD_F32 = 77
|
|
GLOBAL_ATOMIC_PK_ADD_F16 = 78
|
|
GLOBAL_ATOMIC_ADD_F64 = 79
|
|
GLOBAL_ATOMIC_MIN_F64 = 80
|
|
GLOBAL_ATOMIC_MAX_F64 = 81
|
|
GLOBAL_ATOMIC_PK_ADD_BF16 = 82
|
|
GLOBAL_ATOMIC_SWAP_X2 = 96
|
|
GLOBAL_ATOMIC_CMPSWAP_X2 = 97
|
|
GLOBAL_ATOMIC_ADD_X2 = 98
|
|
GLOBAL_ATOMIC_SUB_X2 = 99
|
|
GLOBAL_ATOMIC_SMIN_X2 = 100
|
|
GLOBAL_ATOMIC_UMIN_X2 = 101
|
|
GLOBAL_ATOMIC_SMAX_X2 = 102
|
|
GLOBAL_ATOMIC_UMAX_X2 = 103
|
|
GLOBAL_ATOMIC_AND_X2 = 104
|
|
GLOBAL_ATOMIC_OR_X2 = 105
|
|
GLOBAL_ATOMIC_XOR_X2 = 106
|
|
GLOBAL_ATOMIC_INC_X2 = 107
|
|
GLOBAL_ATOMIC_DEC_X2 = 108
|
|
GLOBAL_LOAD_LDS_DWORDX4 = 125
|
|
GLOBAL_LOAD_LDS_DWORDX3 = 126
|
|
|
|
class HWREG(ReprEnum):
|
|
HW_REG_MODE = 1
|
|
HW_REG_STATUS = 2
|
|
HW_REG_TRAPSTS = 3
|
|
HW_REG_HW_ID = 4
|
|
HW_REG_GPR_ALLOC = 5
|
|
HW_REG_LDS_ALLOC = 6
|
|
HW_REG_IB_STS = 7
|
|
HW_REG_PC_LO = 8
|
|
HW_REG_PC_HI = 9
|
|
HW_REG_INST_DW0 = 10
|
|
HW_REG_INST_DW1 = 11
|
|
HW_REG_IB_DBG0 = 12
|
|
HW_REG_IB_DBG1 = 13
|
|
HW_REG_FLUSH_IB = 14
|
|
HW_REG_SH_MEM_BASES = 15
|
|
HW_REG_SQ_SHADER_TBA_LO = 16
|
|
HW_REG_SQ_SHADER_TBA_HI = 17
|
|
HW_REG_SQ_SHADER_TMA_LO = 18
|
|
HW_REG_SQ_SHADER_TMA_HI = 19
|
|
HW_REG_XCC_ID = 20
|
|
HW_REG_SQ_PERF_SNAPSHOT_DATA = 21
|
|
HW_REG_SQ_PERF_SNAPSHOT_DATA1 = 22
|
|
HW_REG_SQ_PERF_SNAPSHOT_PC_LO = 23
|
|
HW_REG_SQ_PERF_SNAPSHOT_PC_HI = 24
|
|
|
|
class MTBUFOp(ReprEnum):
|
|
TBUFFER_LOAD_FORMAT_X = 0
|
|
TBUFFER_LOAD_FORMAT_XY = 1
|
|
TBUFFER_LOAD_FORMAT_XYZ = 2
|
|
TBUFFER_LOAD_FORMAT_XYZW = 3
|
|
TBUFFER_STORE_FORMAT_X = 4
|
|
TBUFFER_STORE_FORMAT_XY = 5
|
|
TBUFFER_STORE_FORMAT_XYZ = 6
|
|
TBUFFER_STORE_FORMAT_XYZW = 7
|
|
TBUFFER_LOAD_FORMAT_D16_X = 8
|
|
TBUFFER_LOAD_FORMAT_D16_XY = 9
|
|
TBUFFER_LOAD_FORMAT_D16_XYZ = 10
|
|
TBUFFER_LOAD_FORMAT_D16_XYZW = 11
|
|
TBUFFER_STORE_FORMAT_D16_X = 12
|
|
TBUFFER_STORE_FORMAT_D16_XY = 13
|
|
TBUFFER_STORE_FORMAT_D16_XYZ = 14
|
|
TBUFFER_STORE_FORMAT_D16_XYZW = 15
|
|
|
|
class MUBUFOp(ReprEnum):
|
|
BUFFER_LOAD_FORMAT_X = 0
|
|
BUFFER_LOAD_FORMAT_XY = 1
|
|
BUFFER_LOAD_FORMAT_XYZ = 2
|
|
BUFFER_LOAD_FORMAT_XYZW = 3
|
|
BUFFER_STORE_FORMAT_X = 4
|
|
BUFFER_STORE_FORMAT_XY = 5
|
|
BUFFER_STORE_FORMAT_XYZ = 6
|
|
BUFFER_STORE_FORMAT_XYZW = 7
|
|
BUFFER_LOAD_FORMAT_D16_X = 8
|
|
BUFFER_LOAD_FORMAT_D16_XY = 9
|
|
BUFFER_LOAD_FORMAT_D16_XYZ = 10
|
|
BUFFER_LOAD_FORMAT_D16_XYZW = 11
|
|
BUFFER_STORE_FORMAT_D16_X = 12
|
|
BUFFER_STORE_FORMAT_D16_XY = 13
|
|
BUFFER_STORE_FORMAT_D16_XYZ = 14
|
|
BUFFER_STORE_FORMAT_D16_XYZW = 15
|
|
BUFFER_LOAD_UBYTE = 16
|
|
BUFFER_LOAD_SBYTE = 17
|
|
BUFFER_LOAD_USHORT = 18
|
|
BUFFER_LOAD_SSHORT = 19
|
|
BUFFER_LOAD_DWORD = 20
|
|
BUFFER_LOAD_DWORDX2 = 21
|
|
BUFFER_LOAD_DWORDX3 = 22
|
|
BUFFER_LOAD_DWORDX4 = 23
|
|
BUFFER_STORE_BYTE = 24
|
|
BUFFER_STORE_BYTE_D16_HI = 25
|
|
BUFFER_STORE_SHORT = 26
|
|
BUFFER_STORE_SHORT_D16_HI = 27
|
|
BUFFER_STORE_DWORD = 28
|
|
BUFFER_STORE_DWORDX2 = 29
|
|
BUFFER_STORE_DWORDX3 = 30
|
|
BUFFER_STORE_DWORDX4 = 31
|
|
BUFFER_LOAD_UBYTE_D16 = 32
|
|
BUFFER_LOAD_UBYTE_D16_HI = 33
|
|
BUFFER_LOAD_SBYTE_D16 = 34
|
|
BUFFER_LOAD_SBYTE_D16_HI = 35
|
|
BUFFER_LOAD_SHORT_D16 = 36
|
|
BUFFER_LOAD_SHORT_D16_HI = 37
|
|
BUFFER_LOAD_FORMAT_D16_HI_X = 38
|
|
BUFFER_STORE_FORMAT_D16_HI_X = 39
|
|
BUFFER_WBL2 = 40
|
|
BUFFER_INV = 41
|
|
BUFFER_ATOMIC_SWAP = 64
|
|
BUFFER_ATOMIC_CMPSWAP = 65
|
|
BUFFER_ATOMIC_ADD = 66
|
|
BUFFER_ATOMIC_SUB = 67
|
|
BUFFER_ATOMIC_SMIN = 68
|
|
BUFFER_ATOMIC_UMIN = 69
|
|
BUFFER_ATOMIC_SMAX = 70
|
|
BUFFER_ATOMIC_UMAX = 71
|
|
BUFFER_ATOMIC_AND = 72
|
|
BUFFER_ATOMIC_OR = 73
|
|
BUFFER_ATOMIC_XOR = 74
|
|
BUFFER_ATOMIC_INC = 75
|
|
BUFFER_ATOMIC_DEC = 76
|
|
BUFFER_ATOMIC_ADD_F32 = 77
|
|
BUFFER_ATOMIC_PK_ADD_F16 = 78
|
|
BUFFER_ATOMIC_ADD_F64 = 79
|
|
BUFFER_ATOMIC_MIN_F64 = 80
|
|
BUFFER_ATOMIC_MAX_F64 = 81
|
|
BUFFER_ATOMIC_PK_ADD_BF16 = 82
|
|
BUFFER_ATOMIC_SWAP_X2 = 96
|
|
BUFFER_ATOMIC_CMPSWAP_X2 = 97
|
|
BUFFER_ATOMIC_ADD_X2 = 98
|
|
BUFFER_ATOMIC_SUB_X2 = 99
|
|
BUFFER_ATOMIC_SMIN_X2 = 100
|
|
BUFFER_ATOMIC_UMIN_X2 = 101
|
|
BUFFER_ATOMIC_SMAX_X2 = 102
|
|
BUFFER_ATOMIC_UMAX_X2 = 103
|
|
BUFFER_ATOMIC_AND_X2 = 104
|
|
BUFFER_ATOMIC_OR_X2 = 105
|
|
BUFFER_ATOMIC_XOR_X2 = 106
|
|
BUFFER_ATOMIC_INC_X2 = 107
|
|
BUFFER_ATOMIC_DEC_X2 = 108
|
|
|
|
class SCRATCHOp(ReprEnum):
|
|
SCRATCH_LOAD_UBYTE = 16
|
|
SCRATCH_LOAD_SBYTE = 17
|
|
SCRATCH_LOAD_USHORT = 18
|
|
SCRATCH_LOAD_SSHORT = 19
|
|
SCRATCH_LOAD_DWORD = 20
|
|
SCRATCH_LOAD_DWORDX2 = 21
|
|
SCRATCH_LOAD_DWORDX3 = 22
|
|
SCRATCH_LOAD_DWORDX4 = 23
|
|
SCRATCH_STORE_BYTE = 24
|
|
SCRATCH_STORE_BYTE_D16_HI = 25
|
|
SCRATCH_STORE_SHORT = 26
|
|
SCRATCH_STORE_SHORT_D16_HI = 27
|
|
SCRATCH_STORE_DWORD = 28
|
|
SCRATCH_STORE_DWORDX2 = 29
|
|
SCRATCH_STORE_DWORDX3 = 30
|
|
SCRATCH_STORE_DWORDX4 = 31
|
|
SCRATCH_LOAD_UBYTE_D16 = 32
|
|
SCRATCH_LOAD_UBYTE_D16_HI = 33
|
|
SCRATCH_LOAD_SBYTE_D16 = 34
|
|
SCRATCH_LOAD_SBYTE_D16_HI = 35
|
|
SCRATCH_LOAD_SHORT_D16 = 36
|
|
SCRATCH_LOAD_SHORT_D16_HI = 37
|
|
SCRATCH_LOAD_LDS_UBYTE = 38
|
|
SCRATCH_LOAD_LDS_SBYTE = 39
|
|
SCRATCH_LOAD_LDS_USHORT = 40
|
|
SCRATCH_LOAD_LDS_SSHORT = 41
|
|
SCRATCH_LOAD_LDS_DWORD = 42
|
|
|
|
class SMEMOp(ReprEnum):
|
|
S_LOAD_DWORD = 0
|
|
S_LOAD_DWORDX2 = 1
|
|
S_LOAD_DWORDX4 = 2
|
|
S_LOAD_DWORDX8 = 3
|
|
S_LOAD_DWORDX16 = 4
|
|
S_SCRATCH_LOAD_DWORD = 5
|
|
S_SCRATCH_LOAD_DWORDX2 = 6
|
|
S_SCRATCH_LOAD_DWORDX4 = 7
|
|
S_BUFFER_LOAD_DWORD = 8
|
|
S_BUFFER_LOAD_DWORDX2 = 9
|
|
S_BUFFER_LOAD_DWORDX4 = 10
|
|
S_BUFFER_LOAD_DWORDX8 = 11
|
|
S_BUFFER_LOAD_DWORDX16 = 12
|
|
S_STORE_DWORD = 16
|
|
S_STORE_DWORDX2 = 17
|
|
S_STORE_DWORDX4 = 18
|
|
S_SCRATCH_STORE_DWORD = 21
|
|
S_SCRATCH_STORE_DWORDX2 = 22
|
|
S_SCRATCH_STORE_DWORDX4 = 23
|
|
S_BUFFER_STORE_DWORD = 24
|
|
S_BUFFER_STORE_DWORDX2 = 25
|
|
S_BUFFER_STORE_DWORDX4 = 26
|
|
S_DCACHE_INV = 32
|
|
S_DCACHE_WB = 33
|
|
S_DCACHE_INV_VOL = 34
|
|
S_DCACHE_WB_VOL = 35
|
|
S_MEMTIME = 36
|
|
S_MEMREALTIME = 37
|
|
S_ATC_PROBE = 38
|
|
S_ATC_PROBE_BUFFER = 39
|
|
S_DCACHE_DISCARD = 40
|
|
S_DCACHE_DISCARD_X2 = 41
|
|
S_BUFFER_ATOMIC_SWAP = 64
|
|
S_BUFFER_ATOMIC_CMPSWAP = 65
|
|
S_BUFFER_ATOMIC_ADD = 66
|
|
S_BUFFER_ATOMIC_SUB = 67
|
|
S_BUFFER_ATOMIC_SMIN = 68
|
|
S_BUFFER_ATOMIC_UMIN = 69
|
|
S_BUFFER_ATOMIC_SMAX = 70
|
|
S_BUFFER_ATOMIC_UMAX = 71
|
|
S_BUFFER_ATOMIC_AND = 72
|
|
S_BUFFER_ATOMIC_OR = 73
|
|
S_BUFFER_ATOMIC_XOR = 74
|
|
S_BUFFER_ATOMIC_INC = 75
|
|
S_BUFFER_ATOMIC_DEC = 76
|
|
S_BUFFER_ATOMIC_SWAP_X2 = 96
|
|
S_BUFFER_ATOMIC_CMPSWAP_X2 = 97
|
|
S_BUFFER_ATOMIC_ADD_X2 = 98
|
|
S_BUFFER_ATOMIC_SUB_X2 = 99
|
|
S_BUFFER_ATOMIC_SMIN_X2 = 100
|
|
S_BUFFER_ATOMIC_UMIN_X2 = 101
|
|
S_BUFFER_ATOMIC_SMAX_X2 = 102
|
|
S_BUFFER_ATOMIC_UMAX_X2 = 103
|
|
S_BUFFER_ATOMIC_AND_X2 = 104
|
|
S_BUFFER_ATOMIC_OR_X2 = 105
|
|
S_BUFFER_ATOMIC_XOR_X2 = 106
|
|
S_BUFFER_ATOMIC_INC_X2 = 107
|
|
S_BUFFER_ATOMIC_DEC_X2 = 108
|
|
S_ATOMIC_SWAP = 128
|
|
S_ATOMIC_CMPSWAP = 129
|
|
S_ATOMIC_ADD = 130
|
|
S_ATOMIC_SUB = 131
|
|
S_ATOMIC_SMIN = 132
|
|
S_ATOMIC_UMIN = 133
|
|
S_ATOMIC_SMAX = 134
|
|
S_ATOMIC_UMAX = 135
|
|
S_ATOMIC_AND = 136
|
|
S_ATOMIC_OR = 137
|
|
S_ATOMIC_XOR = 138
|
|
S_ATOMIC_INC = 139
|
|
S_ATOMIC_DEC = 140
|
|
S_ATOMIC_SWAP_X2 = 160
|
|
S_ATOMIC_CMPSWAP_X2 = 161
|
|
S_ATOMIC_ADD_X2 = 162
|
|
S_ATOMIC_SUB_X2 = 163
|
|
S_ATOMIC_SMIN_X2 = 164
|
|
S_ATOMIC_UMIN_X2 = 165
|
|
S_ATOMIC_SMAX_X2 = 166
|
|
S_ATOMIC_UMAX_X2 = 167
|
|
S_ATOMIC_AND_X2 = 168
|
|
S_ATOMIC_OR_X2 = 169
|
|
S_ATOMIC_XOR_X2 = 170
|
|
S_ATOMIC_INC_X2 = 171
|
|
S_ATOMIC_DEC_X2 = 172
|
|
|
|
class SOP1Op(ReprEnum):
|
|
S_MOV_B32 = 0
|
|
S_MOV_B64 = 1
|
|
S_CMOV_B32 = 2
|
|
S_CMOV_B64 = 3
|
|
S_NOT_B32 = 4
|
|
S_NOT_B64 = 5
|
|
S_WQM_B32 = 6
|
|
S_WQM_B64 = 7
|
|
S_BREV_B32 = 8
|
|
S_BREV_B64 = 9
|
|
S_BCNT0_I32_B32 = 10
|
|
S_BCNT0_I32_B64 = 11
|
|
S_BCNT1_I32_B32 = 12
|
|
S_BCNT1_I32_B64 = 13
|
|
S_FF0_I32_B32 = 14
|
|
S_FF0_I32_B64 = 15
|
|
S_FF1_I32_B32 = 16
|
|
S_FF1_I32_B64 = 17
|
|
S_FLBIT_I32_B32 = 18
|
|
S_FLBIT_I32_B64 = 19
|
|
S_FLBIT_I32 = 20
|
|
S_FLBIT_I32_I64 = 21
|
|
S_SEXT_I32_I8 = 22
|
|
S_SEXT_I32_I16 = 23
|
|
S_BITSET0_B32 = 24
|
|
S_BITSET0_B64 = 25
|
|
S_BITSET1_B32 = 26
|
|
S_BITSET1_B64 = 27
|
|
S_GETPC_B64 = 28
|
|
S_SETPC_B64 = 29
|
|
S_SWAPPC_B64 = 30
|
|
S_RFE_B64 = 31
|
|
S_AND_SAVEEXEC_B64 = 32
|
|
S_OR_SAVEEXEC_B64 = 33
|
|
S_XOR_SAVEEXEC_B64 = 34
|
|
S_ANDN2_SAVEEXEC_B64 = 35
|
|
S_ORN2_SAVEEXEC_B64 = 36
|
|
S_NAND_SAVEEXEC_B64 = 37
|
|
S_NOR_SAVEEXEC_B64 = 38
|
|
S_XNOR_SAVEEXEC_B64 = 39
|
|
S_QUADMASK_B32 = 40
|
|
S_QUADMASK_B64 = 41
|
|
S_MOVRELS_B32 = 42
|
|
S_MOVRELS_B64 = 43
|
|
S_MOVRELD_B32 = 44
|
|
S_MOVRELD_B64 = 45
|
|
S_CBRANCH_JOIN = 46
|
|
S_ABS_I32 = 48
|
|
S_SET_GPR_IDX_IDX = 50
|
|
S_ANDN1_SAVEEXEC_B64 = 51
|
|
S_ORN1_SAVEEXEC_B64 = 52
|
|
S_ANDN1_WREXEC_B64 = 53
|
|
S_ANDN2_WREXEC_B64 = 54
|
|
S_BITREPLICATE_B64_B32 = 55
|
|
|
|
class SOP2Op(ReprEnum):
|
|
S_ADD_U32 = 0
|
|
S_SUB_U32 = 1
|
|
S_ADD_I32 = 2
|
|
S_SUB_I32 = 3
|
|
S_ADDC_U32 = 4
|
|
S_SUBB_U32 = 5
|
|
S_MIN_I32 = 6
|
|
S_MIN_U32 = 7
|
|
S_MAX_I32 = 8
|
|
S_MAX_U32 = 9
|
|
S_CSELECT_B32 = 10
|
|
S_CSELECT_B64 = 11
|
|
S_AND_B32 = 12
|
|
S_AND_B64 = 13
|
|
S_OR_B32 = 14
|
|
S_OR_B64 = 15
|
|
S_XOR_B32 = 16
|
|
S_XOR_B64 = 17
|
|
S_ANDN2_B32 = 18
|
|
S_ANDN2_B64 = 19
|
|
S_ORN2_B32 = 20
|
|
S_ORN2_B64 = 21
|
|
S_NAND_B32 = 22
|
|
S_NAND_B64 = 23
|
|
S_NOR_B32 = 24
|
|
S_NOR_B64 = 25
|
|
S_XNOR_B32 = 26
|
|
S_XNOR_B64 = 27
|
|
S_LSHL_B32 = 28
|
|
S_LSHL_B64 = 29
|
|
S_LSHR_B32 = 30
|
|
S_LSHR_B64 = 31
|
|
S_ASHR_I32 = 32
|
|
S_ASHR_I64 = 33
|
|
S_BFM_B32 = 34
|
|
S_BFM_B64 = 35
|
|
S_MUL_I32 = 36
|
|
S_BFE_U32 = 37
|
|
S_BFE_I32 = 38
|
|
S_BFE_U64 = 39
|
|
S_BFE_I64 = 40
|
|
S_CBRANCH_G_FORK = 41
|
|
S_ABSDIFF_I32 = 42
|
|
S_RFE_RESTORE_B64 = 43
|
|
S_MUL_HI_U32 = 44
|
|
S_MUL_HI_I32 = 45
|
|
S_LSHL1_ADD_U32 = 46
|
|
S_LSHL2_ADD_U32 = 47
|
|
S_LSHL3_ADD_U32 = 48
|
|
S_LSHL4_ADD_U32 = 49
|
|
S_PACK_LL_B32_B16 = 50
|
|
S_PACK_LH_B32_B16 = 51
|
|
S_PACK_HH_B32_B16 = 52
|
|
|
|
class SOPCOp(ReprEnum):
|
|
S_CMP_EQ_I32 = 0
|
|
S_CMP_LG_I32 = 1
|
|
S_CMP_GT_I32 = 2
|
|
S_CMP_GE_I32 = 3
|
|
S_CMP_LT_I32 = 4
|
|
S_CMP_LE_I32 = 5
|
|
S_CMP_EQ_U32 = 6
|
|
S_CMP_LG_U32 = 7
|
|
S_CMP_GT_U32 = 8
|
|
S_CMP_GE_U32 = 9
|
|
S_CMP_LT_U32 = 10
|
|
S_CMP_LE_U32 = 11
|
|
S_BITCMP0_B32 = 12
|
|
S_BITCMP1_B32 = 13
|
|
S_BITCMP0_B64 = 14
|
|
S_BITCMP1_B64 = 15
|
|
S_SETVSKIP = 16
|
|
S_SET_GPR_IDX_ON = 17
|
|
S_CMP_EQ_U64 = 18
|
|
S_CMP_LG_U64 = 19
|
|
|
|
class SOPKOp(ReprEnum):
|
|
S_MOVK_I32 = 0
|
|
S_CMOVK_I32 = 1
|
|
S_CMPK_EQ_I32 = 2
|
|
S_CMPK_LG_I32 = 3
|
|
S_CMPK_GT_I32 = 4
|
|
S_CMPK_GE_I32 = 5
|
|
S_CMPK_LT_I32 = 6
|
|
S_CMPK_LE_I32 = 7
|
|
S_CMPK_EQ_U32 = 8
|
|
S_CMPK_LG_U32 = 9
|
|
S_CMPK_GT_U32 = 10
|
|
S_CMPK_GE_U32 = 11
|
|
S_CMPK_LT_U32 = 12
|
|
S_CMPK_LE_U32 = 13
|
|
S_ADDK_I32 = 14
|
|
S_MULK_I32 = 15
|
|
S_CBRANCH_I_FORK = 16
|
|
S_GETREG_B32 = 17
|
|
S_SETREG_B32 = 18
|
|
S_SETREG_IMM32_B32 = 20
|
|
S_CALL_B64 = 21
|
|
|
|
class SOPPOp(ReprEnum):
|
|
S_NOP = 0
|
|
S_ENDPGM = 1
|
|
S_BRANCH = 2
|
|
S_WAKEUP = 3
|
|
S_CBRANCH_SCC0 = 4
|
|
S_CBRANCH_SCC1 = 5
|
|
S_CBRANCH_VCCZ = 6
|
|
S_CBRANCH_VCCNZ = 7
|
|
S_CBRANCH_EXECZ = 8
|
|
S_CBRANCH_EXECNZ = 9
|
|
S_BARRIER = 10
|
|
S_SETKILL = 11
|
|
S_WAITCNT = 12
|
|
S_SETHALT = 13
|
|
S_SLEEP = 14
|
|
S_SETPRIO = 15
|
|
S_SENDMSG = 16
|
|
S_SENDMSGHALT = 17
|
|
S_TRAP = 18
|
|
S_ICACHE_INV = 19
|
|
S_INCPERFLEVEL = 20
|
|
S_DECPERFLEVEL = 21
|
|
S_TTRACEDATA = 22
|
|
S_CBRANCH_CDBGSYS = 23
|
|
S_CBRANCH_CDBGUSER = 24
|
|
S_CBRANCH_CDBGSYS_OR_USER = 25
|
|
S_CBRANCH_CDBGSYS_AND_USER = 26
|
|
S_ENDPGM_SAVED = 27
|
|
S_SET_GPR_IDX_OFF = 28
|
|
S_SET_GPR_IDX_MODE = 29
|
|
S_ENDPGM_ORDERED_PS_DONE = 30
|
|
S_SET_VALU_COEXEC_MODE = 31
|
|
|
|
class VOP1Op(ReprEnum):
|
|
V_NOP_E32 = 0
|
|
V_MOV_B32_E32 = 1
|
|
V_READFIRSTLANE_B32_E32 = 2
|
|
V_CVT_I32_F64_E32 = 3
|
|
V_CVT_F64_I32_E32 = 4
|
|
V_CVT_F32_I32_E32 = 5
|
|
V_CVT_F32_U32_E32 = 6
|
|
V_CVT_U32_F32_E32 = 7
|
|
V_CVT_I32_F32_E32 = 8
|
|
V_CVT_F16_F32_E32 = 10
|
|
V_CVT_F32_F16_E32 = 11
|
|
V_CVT_RPI_I32_F32_E32 = 12
|
|
V_CVT_FLR_I32_F32_E32 = 13
|
|
V_CVT_OFF_F32_I4_E32 = 14
|
|
V_CVT_F32_F64_E32 = 15
|
|
V_CVT_F64_F32_E32 = 16
|
|
V_CVT_F32_UBYTE0_E32 = 17
|
|
V_CVT_F32_UBYTE1_E32 = 18
|
|
V_CVT_F32_UBYTE2_E32 = 19
|
|
V_CVT_F32_UBYTE3_E32 = 20
|
|
V_CVT_U32_F64_E32 = 21
|
|
V_CVT_F64_U32_E32 = 22
|
|
V_TRUNC_F64_E32 = 23
|
|
V_CEIL_F64_E32 = 24
|
|
V_RNDNE_F64_E32 = 25
|
|
V_FLOOR_F64_E32 = 26
|
|
V_FRACT_F32_E32 = 27
|
|
V_TRUNC_F32_E32 = 28
|
|
V_CEIL_F32_E32 = 29
|
|
V_RNDNE_F32_E32 = 30
|
|
V_FLOOR_F32_E32 = 31
|
|
V_EXP_F32_E32 = 32
|
|
V_LOG_F32_E32 = 33
|
|
V_RCP_F32_E32 = 34
|
|
V_RCP_IFLAG_F32_E32 = 35
|
|
V_RSQ_F32_E32 = 36
|
|
V_RCP_F64_E32 = 37
|
|
V_RSQ_F64_E32 = 38
|
|
V_SQRT_F32_E32 = 39
|
|
V_SQRT_F64_E32 = 40
|
|
V_SIN_F32_E32 = 41
|
|
V_COS_F32_E32 = 42
|
|
V_NOT_B32_E32 = 43
|
|
V_BFREV_B32_E32 = 44
|
|
V_FFBH_U32_E32 = 45
|
|
V_FFBL_B32_E32 = 46
|
|
V_FFBH_I32_E32 = 47
|
|
V_FREXP_EXP_I32_F64_E32 = 48
|
|
V_FREXP_MANT_F64_E32 = 49
|
|
V_FRACT_F64_E32 = 50
|
|
V_FREXP_EXP_I32_F32_E32 = 51
|
|
V_FREXP_MANT_F32_E32 = 52
|
|
V_CLREXCP_E32 = 53
|
|
V_SCREEN_PARTITION_4SE_B32_E32 = 55
|
|
V_MOV_B64_E32 = 56
|
|
V_CVT_F16_U16_E32 = 57
|
|
V_CVT_F16_I16_E32 = 58
|
|
V_CVT_U16_F16_E32 = 59
|
|
V_CVT_I16_F16_E32 = 60
|
|
V_RCP_F16_E32 = 61
|
|
V_SQRT_F16_E32 = 62
|
|
V_RSQ_F16_E32 = 63
|
|
V_LOG_F16_E32 = 64
|
|
V_EXP_F16_E32 = 65
|
|
V_FREXP_MANT_F16_E32 = 66
|
|
V_FREXP_EXP_I16_F16_E32 = 67
|
|
V_FLOOR_F16_E32 = 68
|
|
V_CEIL_F16_E32 = 69
|
|
V_TRUNC_F16_E32 = 70
|
|
V_RNDNE_F16_E32 = 71
|
|
V_FRACT_F16_E32 = 72
|
|
V_SIN_F16_E32 = 73
|
|
V_COS_F16_E32 = 74
|
|
V_EXP_LEGACY_F32_E32 = 75
|
|
V_LOG_LEGACY_F32_E32 = 76
|
|
V_CVT_NORM_I16_F16_E32 = 77
|
|
V_CVT_NORM_U16_F16_E32 = 78
|
|
V_SAT_PK_U8_I16_E32 = 79
|
|
V_SWAP_B32_E32 = 81
|
|
V_ACCVGPR_MOV_B32_E32 = 82
|
|
V_CVT_F32_FP8_E32 = 84
|
|
V_CVT_F32_BF8_E32 = 85
|
|
V_CVT_PK_F32_FP8_E32 = 86
|
|
V_CVT_PK_F32_BF8_E32 = 87
|
|
V_PRNG_B32_E32 = 88
|
|
V_PERMLANE16_SWAP_B32_E32 = 89
|
|
V_PERMLANE32_SWAP_B32_E32 = 90
|
|
V_CVT_F32_BF16_E32 = 91
|
|
V_NOP = V_NOP_E32
|
|
V_MOV_B32 = V_MOV_B32_E32
|
|
V_READFIRSTLANE_B32 = V_READFIRSTLANE_B32_E32
|
|
V_CVT_I32_F64 = V_CVT_I32_F64_E32
|
|
V_CVT_F64_I32 = V_CVT_F64_I32_E32
|
|
V_CVT_F32_I32 = V_CVT_F32_I32_E32
|
|
V_CVT_F32_U32 = V_CVT_F32_U32_E32
|
|
V_CVT_U32_F32 = V_CVT_U32_F32_E32
|
|
V_CVT_I32_F32 = V_CVT_I32_F32_E32
|
|
V_CVT_F16_F32 = V_CVT_F16_F32_E32
|
|
V_CVT_F32_F16 = V_CVT_F32_F16_E32
|
|
V_CVT_RPI_I32_F32 = V_CVT_RPI_I32_F32_E32
|
|
V_CVT_FLR_I32_F32 = V_CVT_FLR_I32_F32_E32
|
|
V_CVT_OFF_F32_I4 = V_CVT_OFF_F32_I4_E32
|
|
V_CVT_F32_F64 = V_CVT_F32_F64_E32
|
|
V_CVT_F64_F32 = V_CVT_F64_F32_E32
|
|
V_CVT_F32_UBYTE0 = V_CVT_F32_UBYTE0_E32
|
|
V_CVT_F32_UBYTE1 = V_CVT_F32_UBYTE1_E32
|
|
V_CVT_F32_UBYTE2 = V_CVT_F32_UBYTE2_E32
|
|
V_CVT_F32_UBYTE3 = V_CVT_F32_UBYTE3_E32
|
|
V_CVT_U32_F64 = V_CVT_U32_F64_E32
|
|
V_CVT_F64_U32 = V_CVT_F64_U32_E32
|
|
V_TRUNC_F64 = V_TRUNC_F64_E32
|
|
V_CEIL_F64 = V_CEIL_F64_E32
|
|
V_RNDNE_F64 = V_RNDNE_F64_E32
|
|
V_FLOOR_F64 = V_FLOOR_F64_E32
|
|
V_FRACT_F32 = V_FRACT_F32_E32
|
|
V_TRUNC_F32 = V_TRUNC_F32_E32
|
|
V_CEIL_F32 = V_CEIL_F32_E32
|
|
V_RNDNE_F32 = V_RNDNE_F32_E32
|
|
V_FLOOR_F32 = V_FLOOR_F32_E32
|
|
V_EXP_F32 = V_EXP_F32_E32
|
|
V_LOG_F32 = V_LOG_F32_E32
|
|
V_RCP_F32 = V_RCP_F32_E32
|
|
V_RCP_IFLAG_F32 = V_RCP_IFLAG_F32_E32
|
|
V_RSQ_F32 = V_RSQ_F32_E32
|
|
V_RCP_F64 = V_RCP_F64_E32
|
|
V_RSQ_F64 = V_RSQ_F64_E32
|
|
V_SQRT_F32 = V_SQRT_F32_E32
|
|
V_SQRT_F64 = V_SQRT_F64_E32
|
|
V_SIN_F32 = V_SIN_F32_E32
|
|
V_COS_F32 = V_COS_F32_E32
|
|
V_NOT_B32 = V_NOT_B32_E32
|
|
V_BFREV_B32 = V_BFREV_B32_E32
|
|
V_FFBH_U32 = V_FFBH_U32_E32
|
|
V_FFBL_B32 = V_FFBL_B32_E32
|
|
V_FFBH_I32 = V_FFBH_I32_E32
|
|
V_FREXP_EXP_I32_F64 = V_FREXP_EXP_I32_F64_E32
|
|
V_FREXP_MANT_F64 = V_FREXP_MANT_F64_E32
|
|
V_FRACT_F64 = V_FRACT_F64_E32
|
|
V_FREXP_EXP_I32_F32 = V_FREXP_EXP_I32_F32_E32
|
|
V_FREXP_MANT_F32 = V_FREXP_MANT_F32_E32
|
|
V_CLREXCP = V_CLREXCP_E32
|
|
V_SCREEN_PARTITION_4SE_B32 = V_SCREEN_PARTITION_4SE_B32_E32
|
|
V_MOV_B64 = V_MOV_B64_E32
|
|
V_CVT_F16_U16 = V_CVT_F16_U16_E32
|
|
V_CVT_F16_I16 = V_CVT_F16_I16_E32
|
|
V_CVT_U16_F16 = V_CVT_U16_F16_E32
|
|
V_CVT_I16_F16 = V_CVT_I16_F16_E32
|
|
V_RCP_F16 = V_RCP_F16_E32
|
|
V_SQRT_F16 = V_SQRT_F16_E32
|
|
V_RSQ_F16 = V_RSQ_F16_E32
|
|
V_LOG_F16 = V_LOG_F16_E32
|
|
V_EXP_F16 = V_EXP_F16_E32
|
|
V_FREXP_MANT_F16 = V_FREXP_MANT_F16_E32
|
|
V_FREXP_EXP_I16_F16 = V_FREXP_EXP_I16_F16_E32
|
|
V_FLOOR_F16 = V_FLOOR_F16_E32
|
|
V_CEIL_F16 = V_CEIL_F16_E32
|
|
V_TRUNC_F16 = V_TRUNC_F16_E32
|
|
V_RNDNE_F16 = V_RNDNE_F16_E32
|
|
V_FRACT_F16 = V_FRACT_F16_E32
|
|
V_SIN_F16 = V_SIN_F16_E32
|
|
V_COS_F16 = V_COS_F16_E32
|
|
V_EXP_LEGACY_F32 = V_EXP_LEGACY_F32_E32
|
|
V_LOG_LEGACY_F32 = V_LOG_LEGACY_F32_E32
|
|
V_CVT_NORM_I16_F16 = V_CVT_NORM_I16_F16_E32
|
|
V_CVT_NORM_U16_F16 = V_CVT_NORM_U16_F16_E32
|
|
V_SAT_PK_U8_I16 = V_SAT_PK_U8_I16_E32
|
|
V_SWAP_B32 = V_SWAP_B32_E32
|
|
V_ACCVGPR_MOV_B32 = V_ACCVGPR_MOV_B32_E32
|
|
V_CVT_F32_FP8 = V_CVT_F32_FP8_E32
|
|
V_CVT_F32_BF8 = V_CVT_F32_BF8_E32
|
|
V_CVT_PK_F32_FP8 = V_CVT_PK_F32_FP8_E32
|
|
V_CVT_PK_F32_BF8 = V_CVT_PK_F32_BF8_E32
|
|
V_PRNG_B32 = V_PRNG_B32_E32
|
|
V_PERMLANE16_SWAP_B32 = V_PERMLANE16_SWAP_B32_E32
|
|
V_PERMLANE32_SWAP_B32 = V_PERMLANE32_SWAP_B32_E32
|
|
V_CVT_F32_BF16 = V_CVT_F32_BF16_E32
|
|
|
|
class VOP2Op(ReprEnum):
|
|
V_CNDMASK_B32_E32 = 0
|
|
V_ADD_F32_E32 = 1
|
|
V_SUB_F32_E32 = 2
|
|
V_SUBREV_F32_E32 = 3
|
|
V_FMAC_F64_E32 = 4
|
|
V_MUL_F32_E32 = 5
|
|
V_MUL_I32_I24_E32 = 6
|
|
V_MUL_HI_I32_I24_E32 = 7
|
|
V_MUL_U32_U24_E32 = 8
|
|
V_MUL_HI_U32_U24_E32 = 9
|
|
V_MIN_F32_E32 = 10
|
|
V_MAX_F32_E32 = 11
|
|
V_MIN_I32_E32 = 12
|
|
V_MAX_I32_E32 = 13
|
|
V_MIN_U32_E32 = 14
|
|
V_MAX_U32_E32 = 15
|
|
V_LSHRREV_B32_E32 = 16
|
|
V_ASHRREV_I32_E32 = 17
|
|
V_LSHLREV_B32_E32 = 18
|
|
V_AND_B32_E32 = 19
|
|
V_OR_B32_E32 = 20
|
|
V_XOR_B32_E32 = 21
|
|
V_DOT2C_F32_BF16_E32 = 22
|
|
V_FMAMK_F32_E32 = 23
|
|
V_FMAAK_F32_E32 = 24
|
|
V_ADD_CO_U32_E32 = 25
|
|
V_SUB_CO_U32_E32 = 26
|
|
V_SUBREV_CO_U32_E32 = 27
|
|
V_ADDC_CO_U32_E32 = 28
|
|
V_SUBB_CO_U32_E32 = 29
|
|
V_SUBBREV_CO_U32_E32 = 30
|
|
V_ADD_F16_E32 = 31
|
|
V_SUB_F16_E32 = 32
|
|
V_SUBREV_F16_E32 = 33
|
|
V_MUL_F16_E32 = 34
|
|
V_MAC_F16_E32 = 35
|
|
V_MADMK_F16_E32 = 36
|
|
V_MADAK_F16_E32 = 37
|
|
V_ADD_U16_E32 = 38
|
|
V_SUB_U16_E32 = 39
|
|
V_SUBREV_U16_E32 = 40
|
|
V_MUL_LO_U16_E32 = 41
|
|
V_LSHLREV_B16_E32 = 42
|
|
V_LSHRREV_B16_E32 = 43
|
|
V_ASHRREV_I16_E32 = 44
|
|
V_MAX_F16_E32 = 45
|
|
V_MIN_F16_E32 = 46
|
|
V_MAX_U16_E32 = 47
|
|
V_MAX_I16_E32 = 48
|
|
V_MIN_U16_E32 = 49
|
|
V_MIN_I16_E32 = 50
|
|
V_LDEXP_F16_E32 = 51
|
|
V_ADD_U32_E32 = 52
|
|
V_SUB_U32_E32 = 53
|
|
V_SUBREV_U32_E32 = 54
|
|
V_DOT2C_F32_F16_E32 = 55
|
|
V_DOT2C_I32_I16_E32 = 56
|
|
V_DOT4C_I32_I8_E32 = 57
|
|
V_DOT8C_I32_I4_E32 = 58
|
|
V_FMAC_F32_E32 = 59
|
|
V_PK_FMAC_F16_E32 = 60
|
|
V_XNOR_B32_E32 = 61
|
|
V_CNDMASK_B32 = V_CNDMASK_B32_E32
|
|
V_ADD_F32 = V_ADD_F32_E32
|
|
V_SUB_F32 = V_SUB_F32_E32
|
|
V_SUBREV_F32 = V_SUBREV_F32_E32
|
|
V_FMAC_F64 = V_FMAC_F64_E32
|
|
V_MUL_F32 = V_MUL_F32_E32
|
|
V_MUL_I32_I24 = V_MUL_I32_I24_E32
|
|
V_MUL_HI_I32_I24 = V_MUL_HI_I32_I24_E32
|
|
V_MUL_U32_U24 = V_MUL_U32_U24_E32
|
|
V_MUL_HI_U32_U24 = V_MUL_HI_U32_U24_E32
|
|
V_MIN_F32 = V_MIN_F32_E32
|
|
V_MAX_F32 = V_MAX_F32_E32
|
|
V_MIN_I32 = V_MIN_I32_E32
|
|
V_MAX_I32 = V_MAX_I32_E32
|
|
V_MIN_U32 = V_MIN_U32_E32
|
|
V_MAX_U32 = V_MAX_U32_E32
|
|
V_LSHRREV_B32 = V_LSHRREV_B32_E32
|
|
V_ASHRREV_I32 = V_ASHRREV_I32_E32
|
|
V_LSHLREV_B32 = V_LSHLREV_B32_E32
|
|
V_AND_B32 = V_AND_B32_E32
|
|
V_OR_B32 = V_OR_B32_E32
|
|
V_XOR_B32 = V_XOR_B32_E32
|
|
V_DOT2C_F32_BF16 = V_DOT2C_F32_BF16_E32
|
|
V_FMAMK_F32 = V_FMAMK_F32_E32
|
|
V_FMAAK_F32 = V_FMAAK_F32_E32
|
|
V_ADD_CO_U32 = V_ADD_CO_U32_E32
|
|
V_SUB_CO_U32 = V_SUB_CO_U32_E32
|
|
V_SUBREV_CO_U32 = V_SUBREV_CO_U32_E32
|
|
V_ADDC_CO_U32 = V_ADDC_CO_U32_E32
|
|
V_SUBB_CO_U32 = V_SUBB_CO_U32_E32
|
|
V_SUBBREV_CO_U32 = V_SUBBREV_CO_U32_E32
|
|
V_ADD_F16 = V_ADD_F16_E32
|
|
V_SUB_F16 = V_SUB_F16_E32
|
|
V_SUBREV_F16 = V_SUBREV_F16_E32
|
|
V_MUL_F16 = V_MUL_F16_E32
|
|
V_MAC_F16 = V_MAC_F16_E32
|
|
V_MADMK_F16 = V_MADMK_F16_E32
|
|
V_MADAK_F16 = V_MADAK_F16_E32
|
|
V_ADD_U16 = V_ADD_U16_E32
|
|
V_SUB_U16 = V_SUB_U16_E32
|
|
V_SUBREV_U16 = V_SUBREV_U16_E32
|
|
V_MUL_LO_U16 = V_MUL_LO_U16_E32
|
|
V_LSHLREV_B16 = V_LSHLREV_B16_E32
|
|
V_LSHRREV_B16 = V_LSHRREV_B16_E32
|
|
V_ASHRREV_I16 = V_ASHRREV_I16_E32
|
|
V_MAX_F16 = V_MAX_F16_E32
|
|
V_MIN_F16 = V_MIN_F16_E32
|
|
V_MAX_U16 = V_MAX_U16_E32
|
|
V_MAX_I16 = V_MAX_I16_E32
|
|
V_MIN_U16 = V_MIN_U16_E32
|
|
V_MIN_I16 = V_MIN_I16_E32
|
|
V_LDEXP_F16 = V_LDEXP_F16_E32
|
|
V_ADD_U32 = V_ADD_U32_E32
|
|
V_SUB_U32 = V_SUB_U32_E32
|
|
V_SUBREV_U32 = V_SUBREV_U32_E32
|
|
V_DOT2C_F32_F16 = V_DOT2C_F32_F16_E32
|
|
V_DOT2C_I32_I16 = V_DOT2C_I32_I16_E32
|
|
V_DOT4C_I32_I8 = V_DOT4C_I32_I8_E32
|
|
V_DOT8C_I32_I4 = V_DOT8C_I32_I4_E32
|
|
V_FMAC_F32 = V_FMAC_F32_E32
|
|
V_PK_FMAC_F16 = V_PK_FMAC_F16_E32
|
|
V_XNOR_B32 = V_XNOR_B32_E32
|
|
|
|
class VOP3Op(ReprEnum):
|
|
V_CMP_CLASS_F32_E64 = 16
|
|
V_CMPX_CLASS_F32_E64 = 17
|
|
V_CMP_CLASS_F64_E64 = 18
|
|
V_CMPX_CLASS_F64_E64 = 19
|
|
V_CMP_CLASS_F16_E64 = 20
|
|
V_CMPX_CLASS_F16_E64 = 21
|
|
V_CMP_F_F16_E64 = 32
|
|
V_CMP_LT_F16_E64 = 33
|
|
V_CMP_EQ_F16_E64 = 34
|
|
V_CMP_LE_F16_E64 = 35
|
|
V_CMP_GT_F16_E64 = 36
|
|
V_CMP_LG_F16_E64 = 37
|
|
V_CMP_GE_F16_E64 = 38
|
|
V_CMP_O_F16_E64 = 39
|
|
V_CMP_U_F16_E64 = 40
|
|
V_CMP_NGE_F16_E64 = 41
|
|
V_CMP_NLG_F16_E64 = 42
|
|
V_CMP_NGT_F16_E64 = 43
|
|
V_CMP_NLE_F16_E64 = 44
|
|
V_CMP_NEQ_F16_E64 = 45
|
|
V_CMP_NLT_F16_E64 = 46
|
|
V_CMP_TRU_F16_E64 = 47
|
|
V_CMPX_F_F16_E64 = 48
|
|
V_CMPX_LT_F16_E64 = 49
|
|
V_CMPX_EQ_F16_E64 = 50
|
|
V_CMPX_LE_F16_E64 = 51
|
|
V_CMPX_GT_F16_E64 = 52
|
|
V_CMPX_LG_F16_E64 = 53
|
|
V_CMPX_GE_F16_E64 = 54
|
|
V_CMPX_O_F16_E64 = 55
|
|
V_CMPX_U_F16_E64 = 56
|
|
V_CMPX_NGE_F16_E64 = 57
|
|
V_CMPX_NLG_F16_E64 = 58
|
|
V_CMPX_NGT_F16_E64 = 59
|
|
V_CMPX_NLE_F16_E64 = 60
|
|
V_CMPX_NEQ_F16_E64 = 61
|
|
V_CMPX_NLT_F16_E64 = 62
|
|
V_CMPX_TRU_F16_E64 = 63
|
|
V_CMP_F_F32_E64 = 64
|
|
V_CMP_LT_F32_E64 = 65
|
|
V_CMP_EQ_F32_E64 = 66
|
|
V_CMP_LE_F32_E64 = 67
|
|
V_CMP_GT_F32_E64 = 68
|
|
V_CMP_LG_F32_E64 = 69
|
|
V_CMP_GE_F32_E64 = 70
|
|
V_CMP_O_F32_E64 = 71
|
|
V_CMP_U_F32_E64 = 72
|
|
V_CMP_NGE_F32_E64 = 73
|
|
V_CMP_NLG_F32_E64 = 74
|
|
V_CMP_NGT_F32_E64 = 75
|
|
V_CMP_NLE_F32_E64 = 76
|
|
V_CMP_NEQ_F32_E64 = 77
|
|
V_CMP_NLT_F32_E64 = 78
|
|
V_CMP_TRU_F32_E64 = 79
|
|
V_CMPX_F_F32_E64 = 80
|
|
V_CMPX_LT_F32_E64 = 81
|
|
V_CMPX_EQ_F32_E64 = 82
|
|
V_CMPX_LE_F32_E64 = 83
|
|
V_CMPX_GT_F32_E64 = 84
|
|
V_CMPX_LG_F32_E64 = 85
|
|
V_CMPX_GE_F32_E64 = 86
|
|
V_CMPX_O_F32_E64 = 87
|
|
V_CMPX_U_F32_E64 = 88
|
|
V_CMPX_NGE_F32_E64 = 89
|
|
V_CMPX_NLG_F32_E64 = 90
|
|
V_CMPX_NGT_F32_E64 = 91
|
|
V_CMPX_NLE_F32_E64 = 92
|
|
V_CMPX_NEQ_F32_E64 = 93
|
|
V_CMPX_NLT_F32_E64 = 94
|
|
V_CMPX_TRU_F32_E64 = 95
|
|
V_CMP_F_F64_E64 = 96
|
|
V_CMP_LT_F64_E64 = 97
|
|
V_CMP_EQ_F64_E64 = 98
|
|
V_CMP_LE_F64_E64 = 99
|
|
V_CMP_GT_F64_E64 = 100
|
|
V_CMP_LG_F64_E64 = 101
|
|
V_CMP_GE_F64_E64 = 102
|
|
V_CMP_O_F64_E64 = 103
|
|
V_CMP_U_F64_E64 = 104
|
|
V_CMP_NGE_F64_E64 = 105
|
|
V_CMP_NLG_F64_E64 = 106
|
|
V_CMP_NGT_F64_E64 = 107
|
|
V_CMP_NLE_F64_E64 = 108
|
|
V_CMP_NEQ_F64_E64 = 109
|
|
V_CMP_NLT_F64_E64 = 110
|
|
V_CMP_TRU_F64_E64 = 111
|
|
V_CMPX_F_F64_E64 = 112
|
|
V_CMPX_LT_F64_E64 = 113
|
|
V_CMPX_EQ_F64_E64 = 114
|
|
V_CMPX_LE_F64_E64 = 115
|
|
V_CMPX_GT_F64_E64 = 116
|
|
V_CMPX_LG_F64_E64 = 117
|
|
V_CMPX_GE_F64_E64 = 118
|
|
V_CMPX_O_F64_E64 = 119
|
|
V_CMPX_U_F64_E64 = 120
|
|
V_CMPX_NGE_F64_E64 = 121
|
|
V_CMPX_NLG_F64_E64 = 122
|
|
V_CMPX_NGT_F64_E64 = 123
|
|
V_CMPX_NLE_F64_E64 = 124
|
|
V_CMPX_NEQ_F64_E64 = 125
|
|
V_CMPX_NLT_F64_E64 = 126
|
|
V_CMPX_TRU_F64_E64 = 127
|
|
V_CMP_F_I16_E64 = 160
|
|
V_CMP_LT_I16_E64 = 161
|
|
V_CMP_EQ_I16_E64 = 162
|
|
V_CMP_LE_I16_E64 = 163
|
|
V_CMP_GT_I16_E64 = 164
|
|
V_CMP_NE_I16_E64 = 165
|
|
V_CMP_GE_I16_E64 = 166
|
|
V_CMP_T_I16_E64 = 167
|
|
V_CMP_F_U16_E64 = 168
|
|
V_CMP_LT_U16_E64 = 169
|
|
V_CMP_EQ_U16_E64 = 170
|
|
V_CMP_LE_U16_E64 = 171
|
|
V_CMP_GT_U16_E64 = 172
|
|
V_CMP_NE_U16_E64 = 173
|
|
V_CMP_GE_U16_E64 = 174
|
|
V_CMP_T_U16_E64 = 175
|
|
V_CMPX_F_I16_E64 = 176
|
|
V_CMPX_LT_I16_E64 = 177
|
|
V_CMPX_EQ_I16_E64 = 178
|
|
V_CMPX_LE_I16_E64 = 179
|
|
V_CMPX_GT_I16_E64 = 180
|
|
V_CMPX_NE_I16_E64 = 181
|
|
V_CMPX_GE_I16_E64 = 182
|
|
V_CMPX_T_I16_E64 = 183
|
|
V_CMPX_F_U16_E64 = 184
|
|
V_CMPX_LT_U16_E64 = 185
|
|
V_CMPX_EQ_U16_E64 = 186
|
|
V_CMPX_LE_U16_E64 = 187
|
|
V_CMPX_GT_U16_E64 = 188
|
|
V_CMPX_NE_U16_E64 = 189
|
|
V_CMPX_GE_U16_E64 = 190
|
|
V_CMPX_T_U16_E64 = 191
|
|
V_CMP_F_I32_E64 = 192
|
|
V_CMP_LT_I32_E64 = 193
|
|
V_CMP_EQ_I32_E64 = 194
|
|
V_CMP_LE_I32_E64 = 195
|
|
V_CMP_GT_I32_E64 = 196
|
|
V_CMP_NE_I32_E64 = 197
|
|
V_CMP_GE_I32_E64 = 198
|
|
V_CMP_T_I32_E64 = 199
|
|
V_CMP_F_U32_E64 = 200
|
|
V_CMP_LT_U32_E64 = 201
|
|
V_CMP_EQ_U32_E64 = 202
|
|
V_CMP_LE_U32_E64 = 203
|
|
V_CMP_GT_U32_E64 = 204
|
|
V_CMP_NE_U32_E64 = 205
|
|
V_CMP_GE_U32_E64 = 206
|
|
V_CMP_T_U32_E64 = 207
|
|
V_CMPX_F_I32_E64 = 208
|
|
V_CMPX_LT_I32_E64 = 209
|
|
V_CMPX_EQ_I32_E64 = 210
|
|
V_CMPX_LE_I32_E64 = 211
|
|
V_CMPX_GT_I32_E64 = 212
|
|
V_CMPX_NE_I32_E64 = 213
|
|
V_CMPX_GE_I32_E64 = 214
|
|
V_CMPX_T_I32_E64 = 215
|
|
V_CMPX_F_U32_E64 = 216
|
|
V_CMPX_LT_U32_E64 = 217
|
|
V_CMPX_EQ_U32_E64 = 218
|
|
V_CMPX_LE_U32_E64 = 219
|
|
V_CMPX_GT_U32_E64 = 220
|
|
V_CMPX_NE_U32_E64 = 221
|
|
V_CMPX_GE_U32_E64 = 222
|
|
V_CMPX_T_U32_E64 = 223
|
|
V_CMP_F_I64_E64 = 224
|
|
V_CMP_LT_I64_E64 = 225
|
|
V_CMP_EQ_I64_E64 = 226
|
|
V_CMP_LE_I64_E64 = 227
|
|
V_CMP_GT_I64_E64 = 228
|
|
V_CMP_NE_I64_E64 = 229
|
|
V_CMP_GE_I64_E64 = 230
|
|
V_CMP_T_I64_E64 = 231
|
|
V_CMP_F_U64_E64 = 232
|
|
V_CMP_LT_U64_E64 = 233
|
|
V_CMP_EQ_U64_E64 = 234
|
|
V_CMP_LE_U64_E64 = 235
|
|
V_CMP_GT_U64_E64 = 236
|
|
V_CMP_NE_U64_E64 = 237
|
|
V_CMP_GE_U64_E64 = 238
|
|
V_CMP_T_U64_E64 = 239
|
|
V_CMPX_F_I64_E64 = 240
|
|
V_CMPX_LT_I64_E64 = 241
|
|
V_CMPX_EQ_I64_E64 = 242
|
|
V_CMPX_LE_I64_E64 = 243
|
|
V_CMPX_GT_I64_E64 = 244
|
|
V_CMPX_NE_I64_E64 = 245
|
|
V_CMPX_GE_I64_E64 = 246
|
|
V_CMPX_T_I64_E64 = 247
|
|
V_CMPX_F_U64_E64 = 248
|
|
V_CMPX_LT_U64_E64 = 249
|
|
V_CMPX_EQ_U64_E64 = 250
|
|
V_CMPX_LE_U64_E64 = 251
|
|
V_CMPX_GT_U64_E64 = 252
|
|
V_CMPX_NE_U64_E64 = 253
|
|
V_CMPX_GE_U64_E64 = 254
|
|
V_CMPX_T_U64_E64 = 255
|
|
V_CNDMASK_B32_E64 = 256
|
|
V_ADD_F32_E64 = 257
|
|
V_SUB_F32_E64 = 258
|
|
V_SUBREV_F32_E64 = 259
|
|
V_FMAC_F64_E64 = 260
|
|
V_MUL_F32_E64 = 261
|
|
V_MUL_I32_I24_E64 = 262
|
|
V_MUL_HI_I32_I24_E64 = 263
|
|
V_MUL_U32_U24_E64 = 264
|
|
V_MUL_HI_U32_U24_E64 = 265
|
|
V_MIN_F32_E64 = 266
|
|
V_MAX_F32_E64 = 267
|
|
V_MIN_I32_E64 = 268
|
|
V_MAX_I32_E64 = 269
|
|
V_MIN_U32_E64 = 270
|
|
V_MAX_U32_E64 = 271
|
|
V_LSHRREV_B32_E64 = 272
|
|
V_ASHRREV_I32_E64 = 273
|
|
V_LSHLREV_B32_E64 = 274
|
|
V_AND_B32_E64 = 275
|
|
V_OR_B32_E64 = 276
|
|
V_XOR_B32_E64 = 277
|
|
V_DOT2C_F32_BF16_E64 = 278
|
|
V_ADD_F16_E64 = 287
|
|
V_SUB_F16_E64 = 288
|
|
V_SUBREV_F16_E64 = 289
|
|
V_MUL_F16_E64 = 290
|
|
V_MAC_F16_E64 = 291
|
|
V_ADD_U16_E64 = 294
|
|
V_SUB_U16_E64 = 295
|
|
V_SUBREV_U16_E64 = 296
|
|
V_MUL_LO_U16_E64 = 297
|
|
V_LSHLREV_B16_E64 = 298
|
|
V_LSHRREV_B16_E64 = 299
|
|
V_ASHRREV_I16_E64 = 300
|
|
V_MAX_F16_E64 = 301
|
|
V_MIN_F16_E64 = 302
|
|
V_MAX_U16_E64 = 303
|
|
V_MAX_I16_E64 = 304
|
|
V_MIN_U16_E64 = 305
|
|
V_MIN_I16_E64 = 306
|
|
V_LDEXP_F16_E64 = 307
|
|
V_ADD_U32_E64 = 308
|
|
V_SUB_U32_E64 = 309
|
|
V_SUBREV_U32_E64 = 310
|
|
V_DOT2C_F32_F16_E64 = 311
|
|
V_DOT2C_I32_I16_E64 = 312
|
|
V_DOT4C_I32_I8_E64 = 313
|
|
V_DOT8C_I32_I4_E64 = 314
|
|
V_FMAC_F32_E64 = 315
|
|
V_PK_FMAC_F16_E64 = 316
|
|
V_XNOR_B32_E64 = 317
|
|
V_NOP_E64 = 320
|
|
V_MOV_B32_E64 = 321
|
|
V_READFIRSTLANE_B32_E64 = 322
|
|
V_CVT_I32_F64_E64 = 323
|
|
V_CVT_F64_I32_E64 = 324
|
|
V_CVT_F32_I32_E64 = 325
|
|
V_CVT_F32_U32_E64 = 326
|
|
V_CVT_U32_F32_E64 = 327
|
|
V_CVT_I32_F32_E64 = 328
|
|
V_CVT_F16_F32_E64 = 330
|
|
V_CVT_F32_F16_E64 = 331
|
|
V_CVT_RPI_I32_F32_E64 = 332
|
|
V_CVT_FLR_I32_F32_E64 = 333
|
|
V_CVT_OFF_F32_I4_E64 = 334
|
|
V_CVT_F32_F64_E64 = 335
|
|
V_CVT_F64_F32_E64 = 336
|
|
V_CVT_F32_UBYTE0_E64 = 337
|
|
V_CVT_F32_UBYTE1_E64 = 338
|
|
V_CVT_F32_UBYTE2_E64 = 339
|
|
V_CVT_F32_UBYTE3_E64 = 340
|
|
V_CVT_U32_F64_E64 = 341
|
|
V_CVT_F64_U32_E64 = 342
|
|
V_TRUNC_F64_E64 = 343
|
|
V_CEIL_F64_E64 = 344
|
|
V_RNDNE_F64_E64 = 345
|
|
V_FLOOR_F64_E64 = 346
|
|
V_FRACT_F32_E64 = 347
|
|
V_TRUNC_F32_E64 = 348
|
|
V_CEIL_F32_E64 = 349
|
|
V_RNDNE_F32_E64 = 350
|
|
V_FLOOR_F32_E64 = 351
|
|
V_EXP_F32_E64 = 352
|
|
V_LOG_F32_E64 = 353
|
|
V_RCP_F32_E64 = 354
|
|
V_RCP_IFLAG_F32_E64 = 355
|
|
V_RSQ_F32_E64 = 356
|
|
V_RCP_F64_E64 = 357
|
|
V_RSQ_F64_E64 = 358
|
|
V_SQRT_F32_E64 = 359
|
|
V_SQRT_F64_E64 = 360
|
|
V_SIN_F32_E64 = 361
|
|
V_COS_F32_E64 = 362
|
|
V_NOT_B32_E64 = 363
|
|
V_BFREV_B32_E64 = 364
|
|
V_FFBH_U32_E64 = 365
|
|
V_FFBL_B32_E64 = 366
|
|
V_FFBH_I32_E64 = 367
|
|
V_FREXP_EXP_I32_F64_E64 = 368
|
|
V_FREXP_MANT_F64_E64 = 369
|
|
V_FRACT_F64_E64 = 370
|
|
V_FREXP_EXP_I32_F32_E64 = 371
|
|
V_FREXP_MANT_F32_E64 = 372
|
|
V_CLREXCP_E64 = 373
|
|
V_SCREEN_PARTITION_4SE_B32_E64 = 375
|
|
V_MOV_B64_E64 = 376
|
|
V_CVT_F16_U16_E64 = 377
|
|
V_CVT_F16_I16_E64 = 378
|
|
V_CVT_U16_F16_E64 = 379
|
|
V_CVT_I16_F16_E64 = 380
|
|
V_RCP_F16_E64 = 381
|
|
V_SQRT_F16_E64 = 382
|
|
V_RSQ_F16_E64 = 383
|
|
V_LOG_F16_E64 = 384
|
|
V_EXP_F16_E64 = 385
|
|
V_FREXP_MANT_F16_E64 = 386
|
|
V_FREXP_EXP_I16_F16_E64 = 387
|
|
V_FLOOR_F16_E64 = 388
|
|
V_CEIL_F16_E64 = 389
|
|
V_TRUNC_F16_E64 = 390
|
|
V_RNDNE_F16_E64 = 391
|
|
V_FRACT_F16_E64 = 392
|
|
V_SIN_F16_E64 = 393
|
|
V_COS_F16_E64 = 394
|
|
V_EXP_LEGACY_F32_E64 = 395
|
|
V_LOG_LEGACY_F32_E64 = 396
|
|
V_CVT_NORM_I16_F16_E64 = 397
|
|
V_CVT_NORM_U16_F16_E64 = 398
|
|
V_SAT_PK_U8_I16_E64 = 399
|
|
V_SWAP_B32_E64 = 401
|
|
V_ACCVGPR_MOV_B32_E64 = 402
|
|
V_CVT_F32_FP8_E64 = 404
|
|
V_CVT_F32_BF8_E64 = 405
|
|
V_CVT_PK_F32_FP8_E64 = 406
|
|
V_CVT_PK_F32_BF8_E64 = 407
|
|
V_PRNG_B32_E64 = 408
|
|
V_PERMLANE16_SWAP_B32_E64 = 409
|
|
V_PERMLANE32_SWAP_B32_E64 = 410
|
|
V_CVT_F32_BF16_E64 = 411
|
|
V_MAD_I32_I24_E64 = 450
|
|
V_MAD_U32_U24_E64 = 451
|
|
V_CUBEID_F32_E64 = 452
|
|
V_CUBESC_F32_E64 = 453
|
|
V_CUBETC_F32_E64 = 454
|
|
V_CUBEMA_F32_E64 = 455
|
|
V_BFE_U32_E64 = 456
|
|
V_BFE_I32_E64 = 457
|
|
V_BFI_B32_E64 = 458
|
|
V_FMA_F32_E64 = 459
|
|
V_FMA_F64_E64 = 460
|
|
V_LERP_U8_E64 = 461
|
|
V_ALIGNBIT_B32_E64 = 462
|
|
V_ALIGNBYTE_B32_E64 = 463
|
|
V_MIN3_F32_E64 = 464
|
|
V_MIN3_I32_E64 = 465
|
|
V_MIN3_U32_E64 = 466
|
|
V_MAX3_F32_E64 = 467
|
|
V_MAX3_I32_E64 = 468
|
|
V_MAX3_U32_E64 = 469
|
|
V_MED3_F32_E64 = 470
|
|
V_MED3_I32_E64 = 471
|
|
V_MED3_U32_E64 = 472
|
|
V_SAD_U8_E64 = 473
|
|
V_SAD_HI_U8_E64 = 474
|
|
V_SAD_U16_E64 = 475
|
|
V_SAD_U32_E64 = 476
|
|
V_CVT_PK_U8_F32_E64 = 477
|
|
V_DIV_FIXUP_F32_E64 = 478
|
|
V_DIV_FIXUP_F64_E64 = 479
|
|
V_DIV_FMAS_F32_E64 = 482
|
|
V_DIV_FMAS_F64_E64 = 483
|
|
V_MSAD_U8_E64 = 484
|
|
V_QSAD_PK_U16_U8_E64 = 485
|
|
V_MQSAD_PK_U16_U8_E64 = 486
|
|
V_MQSAD_U32_U8_E64 = 487
|
|
V_MAD_LEGACY_F16_E64 = 490
|
|
V_MAD_LEGACY_U16_E64 = 491
|
|
V_MAD_LEGACY_I16_E64 = 492
|
|
V_PERM_B32_E64 = 493
|
|
V_FMA_LEGACY_F16_E64 = 494
|
|
V_DIV_FIXUP_LEGACY_F16_E64 = 495
|
|
V_CVT_PKACCUM_U8_F32_E64 = 496
|
|
V_MAD_U32_U16_E64 = 497
|
|
V_MAD_I32_I16_E64 = 498
|
|
V_XAD_U32_E64 = 499
|
|
V_MIN3_F16_E64 = 500
|
|
V_MIN3_I16_E64 = 501
|
|
V_MIN3_U16_E64 = 502
|
|
V_MAX3_F16_E64 = 503
|
|
V_MAX3_I16_E64 = 504
|
|
V_MAX3_U16_E64 = 505
|
|
V_MED3_F16_E64 = 506
|
|
V_MED3_I16_E64 = 507
|
|
V_MED3_U16_E64 = 508
|
|
V_LSHL_ADD_U32_E64 = 509
|
|
V_ADD_LSHL_U32_E64 = 510
|
|
V_ADD3_U32_E64 = 511
|
|
V_LSHL_OR_B32 = 512
|
|
V_AND_OR_B32 = 513
|
|
V_OR3_B32 = 514
|
|
V_MAD_F16 = 515
|
|
V_MAD_U16 = 516
|
|
V_MAD_I16 = 517
|
|
V_FMA_F16 = 518
|
|
V_DIV_FIXUP_F16 = 519
|
|
V_LSHL_ADD_U64 = 520
|
|
V_BITOP3_B16 = 563
|
|
V_BITOP3_B32 = 564
|
|
V_CVT_SCALEF32_PK_FP8_F32 = 565
|
|
V_CVT_SCALEF32_PK_BF8_F32 = 566
|
|
V_CVT_SCALEF32_SR_FP8_F32 = 567
|
|
V_CVT_SCALEF32_SR_BF8_F32 = 568
|
|
V_CVT_SCALEF32_PK_F32_FP8 = 569
|
|
V_CVT_SCALEF32_PK_F32_BF8 = 570
|
|
V_CVT_SCALEF32_F32_FP8 = 571
|
|
V_CVT_SCALEF32_F32_BF8 = 572
|
|
V_CVT_SCALEF32_PK_FP4_F32 = 573
|
|
V_CVT_SCALEF32_SR_PK_FP4_F32 = 574
|
|
V_CVT_SCALEF32_PK_F32_FP4 = 575
|
|
V_CVT_SCALEF32_PK_FP8_F16 = 576
|
|
V_CVT_SCALEF32_PK_BF8_F16 = 577
|
|
V_CVT_SCALEF32_SR_FP8_F16 = 578
|
|
V_CVT_SCALEF32_SR_BF8_F16 = 579
|
|
V_CVT_SCALEF32_PK_FP8_BF16 = 580
|
|
V_CVT_SCALEF32_PK_BF8_BF16 = 581
|
|
V_CVT_SCALEF32_SR_FP8_BF16 = 582
|
|
V_CVT_SCALEF32_SR_BF8_BF16 = 583
|
|
V_CVT_SCALEF32_PK_F16_FP8 = 584
|
|
V_CVT_SCALEF32_PK_F16_BF8 = 585
|
|
V_CVT_SCALEF32_F16_FP8 = 586
|
|
V_CVT_SCALEF32_F16_BF8 = 587
|
|
V_CVT_SCALEF32_PK_FP4_F16 = 588
|
|
V_CVT_SCALEF32_PK_FP4_BF16 = 589
|
|
V_CVT_SCALEF32_SR_PK_FP4_F16 = 590
|
|
V_CVT_SCALEF32_SR_PK_FP4_BF16 = 591
|
|
V_CVT_SCALEF32_PK_F16_FP4 = 592
|
|
V_CVT_SCALEF32_PK_BF16_FP4 = 593
|
|
V_CVT_SCALEF32_2XPK16_FP6_F32 = 594
|
|
V_CVT_SCALEF32_2XPK16_BF6_F32 = 595
|
|
V_CVT_SCALEF32_SR_PK32_FP6_F32 = 596
|
|
V_CVT_SCALEF32_SR_PK32_BF6_F32 = 597
|
|
V_CVT_SCALEF32_PK32_F32_FP6 = 598
|
|
V_CVT_SCALEF32_PK32_F32_BF6 = 599
|
|
V_CVT_SCALEF32_PK32_FP6_F16 = 600
|
|
V_CVT_SCALEF32_PK32_FP6_BF16 = 601
|
|
V_CVT_SCALEF32_PK32_BF6_F16 = 602
|
|
V_CVT_SCALEF32_PK32_BF6_BF16 = 603
|
|
V_CVT_SCALEF32_SR_PK32_FP6_F16 = 604
|
|
V_CVT_SCALEF32_SR_PK32_FP6_BF16 = 605
|
|
V_CVT_SCALEF32_SR_PK32_BF6_F16 = 606
|
|
V_CVT_SCALEF32_SR_PK32_BF6_BF16 = 607
|
|
V_CVT_SCALEF32_PK32_F16_FP6 = 608
|
|
V_CVT_SCALEF32_PK32_BF16_FP6 = 609
|
|
V_CVT_SCALEF32_PK32_F16_BF6 = 610
|
|
V_CVT_SCALEF32_PK32_BF16_BF6 = 611
|
|
V_ASHR_PK_I8_I32 = 613
|
|
V_ASHR_PK_U8_I32 = 614
|
|
V_CVT_PK_F16_F32 = 615
|
|
V_CVT_PK_BF16_F32 = 616
|
|
V_CVT_SCALEF32_PK_BF16_FP8 = 617
|
|
V_CVT_SCALEF32_PK_BF16_BF8 = 618
|
|
V_ADD_F64 = 640
|
|
V_MUL_F64 = 641
|
|
V_MIN_F64 = 642
|
|
V_MAX_F64 = 643
|
|
V_LDEXP_F64 = 644
|
|
V_MUL_LO_U32 = 645
|
|
V_MUL_HI_U32 = 646
|
|
V_MUL_HI_I32 = 647
|
|
V_LDEXP_F32 = 648
|
|
V_READLANE_B32 = 649
|
|
V_WRITELANE_B32 = 650
|
|
V_BCNT_U32_B32 = 651
|
|
V_MBCNT_LO_U32_B32 = 652
|
|
V_MBCNT_HI_U32_B32 = 653
|
|
V_LSHLREV_B64 = 655
|
|
V_LSHRREV_B64 = 656
|
|
V_ASHRREV_I64 = 657
|
|
V_TRIG_PREOP_F64 = 658
|
|
V_BFM_B32 = 659
|
|
V_CVT_PKNORM_I16_F32 = 660
|
|
V_CVT_PKNORM_U16_F32 = 661
|
|
V_CVT_PKRTZ_F16_F32 = 662
|
|
V_CVT_PK_U16_U32 = 663
|
|
V_CVT_PK_I16_I32 = 664
|
|
V_CVT_PKNORM_I16_F16 = 665
|
|
V_CVT_PKNORM_U16_F16 = 666
|
|
V_ADD_I32 = 668
|
|
V_SUB_I32 = 669
|
|
V_ADD_I16 = 670
|
|
V_SUB_I16 = 671
|
|
V_PACK_B32_F16 = 672
|
|
V_MUL_LEGACY_F32 = 673
|
|
V_CVT_PK_FP8_F32 = 674
|
|
V_CVT_PK_BF8_F32 = 675
|
|
V_CVT_SR_FP8_F32 = 676
|
|
V_CVT_SR_BF8_F32 = 677
|
|
V_CVT_SR_F16_F32 = 678
|
|
V_CVT_SR_BF16_F32 = 679
|
|
V_MINIMUM3_F32 = 680
|
|
V_MAXIMUM3_F32 = 681
|
|
V_CMP_CLASS_F32 = V_CMP_CLASS_F32_E64
|
|
V_CMPX_CLASS_F32 = V_CMPX_CLASS_F32_E64
|
|
V_CMP_CLASS_F64 = V_CMP_CLASS_F64_E64
|
|
V_CMPX_CLASS_F64 = V_CMPX_CLASS_F64_E64
|
|
V_CMP_CLASS_F16 = V_CMP_CLASS_F16_E64
|
|
V_CMPX_CLASS_F16 = V_CMPX_CLASS_F16_E64
|
|
V_CMP_F_F16 = V_CMP_F_F16_E64
|
|
V_CMP_LT_F16 = V_CMP_LT_F16_E64
|
|
V_CMP_EQ_F16 = V_CMP_EQ_F16_E64
|
|
V_CMP_LE_F16 = V_CMP_LE_F16_E64
|
|
V_CMP_GT_F16 = V_CMP_GT_F16_E64
|
|
V_CMP_LG_F16 = V_CMP_LG_F16_E64
|
|
V_CMP_GE_F16 = V_CMP_GE_F16_E64
|
|
V_CMP_O_F16 = V_CMP_O_F16_E64
|
|
V_CMP_U_F16 = V_CMP_U_F16_E64
|
|
V_CMP_NGE_F16 = V_CMP_NGE_F16_E64
|
|
V_CMP_NLG_F16 = V_CMP_NLG_F16_E64
|
|
V_CMP_NGT_F16 = V_CMP_NGT_F16_E64
|
|
V_CMP_NLE_F16 = V_CMP_NLE_F16_E64
|
|
V_CMP_NEQ_F16 = V_CMP_NEQ_F16_E64
|
|
V_CMP_NLT_F16 = V_CMP_NLT_F16_E64
|
|
V_CMP_TRU_F16 = V_CMP_TRU_F16_E64
|
|
V_CMPX_F_F16 = V_CMPX_F_F16_E64
|
|
V_CMPX_LT_F16 = V_CMPX_LT_F16_E64
|
|
V_CMPX_EQ_F16 = V_CMPX_EQ_F16_E64
|
|
V_CMPX_LE_F16 = V_CMPX_LE_F16_E64
|
|
V_CMPX_GT_F16 = V_CMPX_GT_F16_E64
|
|
V_CMPX_LG_F16 = V_CMPX_LG_F16_E64
|
|
V_CMPX_GE_F16 = V_CMPX_GE_F16_E64
|
|
V_CMPX_O_F16 = V_CMPX_O_F16_E64
|
|
V_CMPX_U_F16 = V_CMPX_U_F16_E64
|
|
V_CMPX_NGE_F16 = V_CMPX_NGE_F16_E64
|
|
V_CMPX_NLG_F16 = V_CMPX_NLG_F16_E64
|
|
V_CMPX_NGT_F16 = V_CMPX_NGT_F16_E64
|
|
V_CMPX_NLE_F16 = V_CMPX_NLE_F16_E64
|
|
V_CMPX_NEQ_F16 = V_CMPX_NEQ_F16_E64
|
|
V_CMPX_NLT_F16 = V_CMPX_NLT_F16_E64
|
|
V_CMPX_TRU_F16 = V_CMPX_TRU_F16_E64
|
|
V_CMP_F_F32 = V_CMP_F_F32_E64
|
|
V_CMP_LT_F32 = V_CMP_LT_F32_E64
|
|
V_CMP_EQ_F32 = V_CMP_EQ_F32_E64
|
|
V_CMP_LE_F32 = V_CMP_LE_F32_E64
|
|
V_CMP_GT_F32 = V_CMP_GT_F32_E64
|
|
V_CMP_LG_F32 = V_CMP_LG_F32_E64
|
|
V_CMP_GE_F32 = V_CMP_GE_F32_E64
|
|
V_CMP_O_F32 = V_CMP_O_F32_E64
|
|
V_CMP_U_F32 = V_CMP_U_F32_E64
|
|
V_CMP_NGE_F32 = V_CMP_NGE_F32_E64
|
|
V_CMP_NLG_F32 = V_CMP_NLG_F32_E64
|
|
V_CMP_NGT_F32 = V_CMP_NGT_F32_E64
|
|
V_CMP_NLE_F32 = V_CMP_NLE_F32_E64
|
|
V_CMP_NEQ_F32 = V_CMP_NEQ_F32_E64
|
|
V_CMP_NLT_F32 = V_CMP_NLT_F32_E64
|
|
V_CMP_TRU_F32 = V_CMP_TRU_F32_E64
|
|
V_CMPX_F_F32 = V_CMPX_F_F32_E64
|
|
V_CMPX_LT_F32 = V_CMPX_LT_F32_E64
|
|
V_CMPX_EQ_F32 = V_CMPX_EQ_F32_E64
|
|
V_CMPX_LE_F32 = V_CMPX_LE_F32_E64
|
|
V_CMPX_GT_F32 = V_CMPX_GT_F32_E64
|
|
V_CMPX_LG_F32 = V_CMPX_LG_F32_E64
|
|
V_CMPX_GE_F32 = V_CMPX_GE_F32_E64
|
|
V_CMPX_O_F32 = V_CMPX_O_F32_E64
|
|
V_CMPX_U_F32 = V_CMPX_U_F32_E64
|
|
V_CMPX_NGE_F32 = V_CMPX_NGE_F32_E64
|
|
V_CMPX_NLG_F32 = V_CMPX_NLG_F32_E64
|
|
V_CMPX_NGT_F32 = V_CMPX_NGT_F32_E64
|
|
V_CMPX_NLE_F32 = V_CMPX_NLE_F32_E64
|
|
V_CMPX_NEQ_F32 = V_CMPX_NEQ_F32_E64
|
|
V_CMPX_NLT_F32 = V_CMPX_NLT_F32_E64
|
|
V_CMPX_TRU_F32 = V_CMPX_TRU_F32_E64
|
|
V_CMP_F_F64 = V_CMP_F_F64_E64
|
|
V_CMP_LT_F64 = V_CMP_LT_F64_E64
|
|
V_CMP_EQ_F64 = V_CMP_EQ_F64_E64
|
|
V_CMP_LE_F64 = V_CMP_LE_F64_E64
|
|
V_CMP_GT_F64 = V_CMP_GT_F64_E64
|
|
V_CMP_LG_F64 = V_CMP_LG_F64_E64
|
|
V_CMP_GE_F64 = V_CMP_GE_F64_E64
|
|
V_CMP_O_F64 = V_CMP_O_F64_E64
|
|
V_CMP_U_F64 = V_CMP_U_F64_E64
|
|
V_CMP_NGE_F64 = V_CMP_NGE_F64_E64
|
|
V_CMP_NLG_F64 = V_CMP_NLG_F64_E64
|
|
V_CMP_NGT_F64 = V_CMP_NGT_F64_E64
|
|
V_CMP_NLE_F64 = V_CMP_NLE_F64_E64
|
|
V_CMP_NEQ_F64 = V_CMP_NEQ_F64_E64
|
|
V_CMP_NLT_F64 = V_CMP_NLT_F64_E64
|
|
V_CMP_TRU_F64 = V_CMP_TRU_F64_E64
|
|
V_CMPX_F_F64 = V_CMPX_F_F64_E64
|
|
V_CMPX_LT_F64 = V_CMPX_LT_F64_E64
|
|
V_CMPX_EQ_F64 = V_CMPX_EQ_F64_E64
|
|
V_CMPX_LE_F64 = V_CMPX_LE_F64_E64
|
|
V_CMPX_GT_F64 = V_CMPX_GT_F64_E64
|
|
V_CMPX_LG_F64 = V_CMPX_LG_F64_E64
|
|
V_CMPX_GE_F64 = V_CMPX_GE_F64_E64
|
|
V_CMPX_O_F64 = V_CMPX_O_F64_E64
|
|
V_CMPX_U_F64 = V_CMPX_U_F64_E64
|
|
V_CMPX_NGE_F64 = V_CMPX_NGE_F64_E64
|
|
V_CMPX_NLG_F64 = V_CMPX_NLG_F64_E64
|
|
V_CMPX_NGT_F64 = V_CMPX_NGT_F64_E64
|
|
V_CMPX_NLE_F64 = V_CMPX_NLE_F64_E64
|
|
V_CMPX_NEQ_F64 = V_CMPX_NEQ_F64_E64
|
|
V_CMPX_NLT_F64 = V_CMPX_NLT_F64_E64
|
|
V_CMPX_TRU_F64 = V_CMPX_TRU_F64_E64
|
|
V_CMP_F_I16 = V_CMP_F_I16_E64
|
|
V_CMP_LT_I16 = V_CMP_LT_I16_E64
|
|
V_CMP_EQ_I16 = V_CMP_EQ_I16_E64
|
|
V_CMP_LE_I16 = V_CMP_LE_I16_E64
|
|
V_CMP_GT_I16 = V_CMP_GT_I16_E64
|
|
V_CMP_NE_I16 = V_CMP_NE_I16_E64
|
|
V_CMP_GE_I16 = V_CMP_GE_I16_E64
|
|
V_CMP_T_I16 = V_CMP_T_I16_E64
|
|
V_CMP_F_U16 = V_CMP_F_U16_E64
|
|
V_CMP_LT_U16 = V_CMP_LT_U16_E64
|
|
V_CMP_EQ_U16 = V_CMP_EQ_U16_E64
|
|
V_CMP_LE_U16 = V_CMP_LE_U16_E64
|
|
V_CMP_GT_U16 = V_CMP_GT_U16_E64
|
|
V_CMP_NE_U16 = V_CMP_NE_U16_E64
|
|
V_CMP_GE_U16 = V_CMP_GE_U16_E64
|
|
V_CMP_T_U16 = V_CMP_T_U16_E64
|
|
V_CMPX_F_I16 = V_CMPX_F_I16_E64
|
|
V_CMPX_LT_I16 = V_CMPX_LT_I16_E64
|
|
V_CMPX_EQ_I16 = V_CMPX_EQ_I16_E64
|
|
V_CMPX_LE_I16 = V_CMPX_LE_I16_E64
|
|
V_CMPX_GT_I16 = V_CMPX_GT_I16_E64
|
|
V_CMPX_NE_I16 = V_CMPX_NE_I16_E64
|
|
V_CMPX_GE_I16 = V_CMPX_GE_I16_E64
|
|
V_CMPX_T_I16 = V_CMPX_T_I16_E64
|
|
V_CMPX_F_U16 = V_CMPX_F_U16_E64
|
|
V_CMPX_LT_U16 = V_CMPX_LT_U16_E64
|
|
V_CMPX_EQ_U16 = V_CMPX_EQ_U16_E64
|
|
V_CMPX_LE_U16 = V_CMPX_LE_U16_E64
|
|
V_CMPX_GT_U16 = V_CMPX_GT_U16_E64
|
|
V_CMPX_NE_U16 = V_CMPX_NE_U16_E64
|
|
V_CMPX_GE_U16 = V_CMPX_GE_U16_E64
|
|
V_CMPX_T_U16 = V_CMPX_T_U16_E64
|
|
V_CMP_F_I32 = V_CMP_F_I32_E64
|
|
V_CMP_LT_I32 = V_CMP_LT_I32_E64
|
|
V_CMP_EQ_I32 = V_CMP_EQ_I32_E64
|
|
V_CMP_LE_I32 = V_CMP_LE_I32_E64
|
|
V_CMP_GT_I32 = V_CMP_GT_I32_E64
|
|
V_CMP_NE_I32 = V_CMP_NE_I32_E64
|
|
V_CMP_GE_I32 = V_CMP_GE_I32_E64
|
|
V_CMP_T_I32 = V_CMP_T_I32_E64
|
|
V_CMP_F_U32 = V_CMP_F_U32_E64
|
|
V_CMP_LT_U32 = V_CMP_LT_U32_E64
|
|
V_CMP_EQ_U32 = V_CMP_EQ_U32_E64
|
|
V_CMP_LE_U32 = V_CMP_LE_U32_E64
|
|
V_CMP_GT_U32 = V_CMP_GT_U32_E64
|
|
V_CMP_NE_U32 = V_CMP_NE_U32_E64
|
|
V_CMP_GE_U32 = V_CMP_GE_U32_E64
|
|
V_CMP_T_U32 = V_CMP_T_U32_E64
|
|
V_CMPX_F_I32 = V_CMPX_F_I32_E64
|
|
V_CMPX_LT_I32 = V_CMPX_LT_I32_E64
|
|
V_CMPX_EQ_I32 = V_CMPX_EQ_I32_E64
|
|
V_CMPX_LE_I32 = V_CMPX_LE_I32_E64
|
|
V_CMPX_GT_I32 = V_CMPX_GT_I32_E64
|
|
V_CMPX_NE_I32 = V_CMPX_NE_I32_E64
|
|
V_CMPX_GE_I32 = V_CMPX_GE_I32_E64
|
|
V_CMPX_T_I32 = V_CMPX_T_I32_E64
|
|
V_CMPX_F_U32 = V_CMPX_F_U32_E64
|
|
V_CMPX_LT_U32 = V_CMPX_LT_U32_E64
|
|
V_CMPX_EQ_U32 = V_CMPX_EQ_U32_E64
|
|
V_CMPX_LE_U32 = V_CMPX_LE_U32_E64
|
|
V_CMPX_GT_U32 = V_CMPX_GT_U32_E64
|
|
V_CMPX_NE_U32 = V_CMPX_NE_U32_E64
|
|
V_CMPX_GE_U32 = V_CMPX_GE_U32_E64
|
|
V_CMPX_T_U32 = V_CMPX_T_U32_E64
|
|
V_CMP_F_I64 = V_CMP_F_I64_E64
|
|
V_CMP_LT_I64 = V_CMP_LT_I64_E64
|
|
V_CMP_EQ_I64 = V_CMP_EQ_I64_E64
|
|
V_CMP_LE_I64 = V_CMP_LE_I64_E64
|
|
V_CMP_GT_I64 = V_CMP_GT_I64_E64
|
|
V_CMP_NE_I64 = V_CMP_NE_I64_E64
|
|
V_CMP_GE_I64 = V_CMP_GE_I64_E64
|
|
V_CMP_T_I64 = V_CMP_T_I64_E64
|
|
V_CMP_F_U64 = V_CMP_F_U64_E64
|
|
V_CMP_LT_U64 = V_CMP_LT_U64_E64
|
|
V_CMP_EQ_U64 = V_CMP_EQ_U64_E64
|
|
V_CMP_LE_U64 = V_CMP_LE_U64_E64
|
|
V_CMP_GT_U64 = V_CMP_GT_U64_E64
|
|
V_CMP_NE_U64 = V_CMP_NE_U64_E64
|
|
V_CMP_GE_U64 = V_CMP_GE_U64_E64
|
|
V_CMP_T_U64 = V_CMP_T_U64_E64
|
|
V_CMPX_F_I64 = V_CMPX_F_I64_E64
|
|
V_CMPX_LT_I64 = V_CMPX_LT_I64_E64
|
|
V_CMPX_EQ_I64 = V_CMPX_EQ_I64_E64
|
|
V_CMPX_LE_I64 = V_CMPX_LE_I64_E64
|
|
V_CMPX_GT_I64 = V_CMPX_GT_I64_E64
|
|
V_CMPX_NE_I64 = V_CMPX_NE_I64_E64
|
|
V_CMPX_GE_I64 = V_CMPX_GE_I64_E64
|
|
V_CMPX_T_I64 = V_CMPX_T_I64_E64
|
|
V_CMPX_F_U64 = V_CMPX_F_U64_E64
|
|
V_CMPX_LT_U64 = V_CMPX_LT_U64_E64
|
|
V_CMPX_EQ_U64 = V_CMPX_EQ_U64_E64
|
|
V_CMPX_LE_U64 = V_CMPX_LE_U64_E64
|
|
V_CMPX_GT_U64 = V_CMPX_GT_U64_E64
|
|
V_CMPX_NE_U64 = V_CMPX_NE_U64_E64
|
|
V_CMPX_GE_U64 = V_CMPX_GE_U64_E64
|
|
V_CMPX_T_U64 = V_CMPX_T_U64_E64
|
|
V_CNDMASK_B32 = V_CNDMASK_B32_E64
|
|
V_ADD_F32 = V_ADD_F32_E64
|
|
V_SUB_F32 = V_SUB_F32_E64
|
|
V_SUBREV_F32 = V_SUBREV_F32_E64
|
|
V_FMAC_F64 = V_FMAC_F64_E64
|
|
V_MUL_F32 = V_MUL_F32_E64
|
|
V_MUL_I32_I24 = V_MUL_I32_I24_E64
|
|
V_MUL_HI_I32_I24 = V_MUL_HI_I32_I24_E64
|
|
V_MUL_U32_U24 = V_MUL_U32_U24_E64
|
|
V_MUL_HI_U32_U24 = V_MUL_HI_U32_U24_E64
|
|
V_MIN_F32 = V_MIN_F32_E64
|
|
V_MAX_F32 = V_MAX_F32_E64
|
|
V_MIN_I32 = V_MIN_I32_E64
|
|
V_MAX_I32 = V_MAX_I32_E64
|
|
V_MIN_U32 = V_MIN_U32_E64
|
|
V_MAX_U32 = V_MAX_U32_E64
|
|
V_LSHRREV_B32 = V_LSHRREV_B32_E64
|
|
V_ASHRREV_I32 = V_ASHRREV_I32_E64
|
|
V_LSHLREV_B32 = V_LSHLREV_B32_E64
|
|
V_AND_B32 = V_AND_B32_E64
|
|
V_OR_B32 = V_OR_B32_E64
|
|
V_XOR_B32 = V_XOR_B32_E64
|
|
V_DOT2C_F32_BF16 = V_DOT2C_F32_BF16_E64
|
|
V_ADD_F16 = V_ADD_F16_E64
|
|
V_SUB_F16 = V_SUB_F16_E64
|
|
V_SUBREV_F16 = V_SUBREV_F16_E64
|
|
V_MUL_F16 = V_MUL_F16_E64
|
|
V_MAC_F16 = V_MAC_F16_E64
|
|
V_ADD_U16 = V_ADD_U16_E64
|
|
V_SUB_U16 = V_SUB_U16_E64
|
|
V_SUBREV_U16 = V_SUBREV_U16_E64
|
|
V_MUL_LO_U16 = V_MUL_LO_U16_E64
|
|
V_LSHLREV_B16 = V_LSHLREV_B16_E64
|
|
V_LSHRREV_B16 = V_LSHRREV_B16_E64
|
|
V_ASHRREV_I16 = V_ASHRREV_I16_E64
|
|
V_MAX_F16 = V_MAX_F16_E64
|
|
V_MIN_F16 = V_MIN_F16_E64
|
|
V_MAX_U16 = V_MAX_U16_E64
|
|
V_MAX_I16 = V_MAX_I16_E64
|
|
V_MIN_U16 = V_MIN_U16_E64
|
|
V_MIN_I16 = V_MIN_I16_E64
|
|
V_LDEXP_F16 = V_LDEXP_F16_E64
|
|
V_ADD_U32 = V_ADD_U32_E64
|
|
V_SUB_U32 = V_SUB_U32_E64
|
|
V_SUBREV_U32 = V_SUBREV_U32_E64
|
|
V_DOT2C_F32_F16 = V_DOT2C_F32_F16_E64
|
|
V_DOT2C_I32_I16 = V_DOT2C_I32_I16_E64
|
|
V_DOT4C_I32_I8 = V_DOT4C_I32_I8_E64
|
|
V_DOT8C_I32_I4 = V_DOT8C_I32_I4_E64
|
|
V_FMAC_F32 = V_FMAC_F32_E64
|
|
V_PK_FMAC_F16 = V_PK_FMAC_F16_E64
|
|
V_XNOR_B32 = V_XNOR_B32_E64
|
|
V_NOP = V_NOP_E64
|
|
V_MOV_B32 = V_MOV_B32_E64
|
|
V_READFIRSTLANE_B32 = V_READFIRSTLANE_B32_E64
|
|
V_CVT_I32_F64 = V_CVT_I32_F64_E64
|
|
V_CVT_F64_I32 = V_CVT_F64_I32_E64
|
|
V_CVT_F32_I32 = V_CVT_F32_I32_E64
|
|
V_CVT_F32_U32 = V_CVT_F32_U32_E64
|
|
V_CVT_U32_F32 = V_CVT_U32_F32_E64
|
|
V_CVT_I32_F32 = V_CVT_I32_F32_E64
|
|
V_CVT_F16_F32 = V_CVT_F16_F32_E64
|
|
V_CVT_F32_F16 = V_CVT_F32_F16_E64
|
|
V_CVT_RPI_I32_F32 = V_CVT_RPI_I32_F32_E64
|
|
V_CVT_FLR_I32_F32 = V_CVT_FLR_I32_F32_E64
|
|
V_CVT_OFF_F32_I4 = V_CVT_OFF_F32_I4_E64
|
|
V_CVT_F32_F64 = V_CVT_F32_F64_E64
|
|
V_CVT_F64_F32 = V_CVT_F64_F32_E64
|
|
V_CVT_F32_UBYTE0 = V_CVT_F32_UBYTE0_E64
|
|
V_CVT_F32_UBYTE1 = V_CVT_F32_UBYTE1_E64
|
|
V_CVT_F32_UBYTE2 = V_CVT_F32_UBYTE2_E64
|
|
V_CVT_F32_UBYTE3 = V_CVT_F32_UBYTE3_E64
|
|
V_CVT_U32_F64 = V_CVT_U32_F64_E64
|
|
V_CVT_F64_U32 = V_CVT_F64_U32_E64
|
|
V_TRUNC_F64 = V_TRUNC_F64_E64
|
|
V_CEIL_F64 = V_CEIL_F64_E64
|
|
V_RNDNE_F64 = V_RNDNE_F64_E64
|
|
V_FLOOR_F64 = V_FLOOR_F64_E64
|
|
V_FRACT_F32 = V_FRACT_F32_E64
|
|
V_TRUNC_F32 = V_TRUNC_F32_E64
|
|
V_CEIL_F32 = V_CEIL_F32_E64
|
|
V_RNDNE_F32 = V_RNDNE_F32_E64
|
|
V_FLOOR_F32 = V_FLOOR_F32_E64
|
|
V_EXP_F32 = V_EXP_F32_E64
|
|
V_LOG_F32 = V_LOG_F32_E64
|
|
V_RCP_F32 = V_RCP_F32_E64
|
|
V_RCP_IFLAG_F32 = V_RCP_IFLAG_F32_E64
|
|
V_RSQ_F32 = V_RSQ_F32_E64
|
|
V_RCP_F64 = V_RCP_F64_E64
|
|
V_RSQ_F64 = V_RSQ_F64_E64
|
|
V_SQRT_F32 = V_SQRT_F32_E64
|
|
V_SQRT_F64 = V_SQRT_F64_E64
|
|
V_SIN_F32 = V_SIN_F32_E64
|
|
V_COS_F32 = V_COS_F32_E64
|
|
V_NOT_B32 = V_NOT_B32_E64
|
|
V_BFREV_B32 = V_BFREV_B32_E64
|
|
V_FFBH_U32 = V_FFBH_U32_E64
|
|
V_FFBL_B32 = V_FFBL_B32_E64
|
|
V_FFBH_I32 = V_FFBH_I32_E64
|
|
V_FREXP_EXP_I32_F64 = V_FREXP_EXP_I32_F64_E64
|
|
V_FREXP_MANT_F64 = V_FREXP_MANT_F64_E64
|
|
V_FRACT_F64 = V_FRACT_F64_E64
|
|
V_FREXP_EXP_I32_F32 = V_FREXP_EXP_I32_F32_E64
|
|
V_FREXP_MANT_F32 = V_FREXP_MANT_F32_E64
|
|
V_CLREXCP = V_CLREXCP_E64
|
|
V_SCREEN_PARTITION_4SE_B32 = V_SCREEN_PARTITION_4SE_B32_E64
|
|
V_MOV_B64 = V_MOV_B64_E64
|
|
V_CVT_F16_U16 = V_CVT_F16_U16_E64
|
|
V_CVT_F16_I16 = V_CVT_F16_I16_E64
|
|
V_CVT_U16_F16 = V_CVT_U16_F16_E64
|
|
V_CVT_I16_F16 = V_CVT_I16_F16_E64
|
|
V_RCP_F16 = V_RCP_F16_E64
|
|
V_SQRT_F16 = V_SQRT_F16_E64
|
|
V_RSQ_F16 = V_RSQ_F16_E64
|
|
V_LOG_F16 = V_LOG_F16_E64
|
|
V_EXP_F16 = V_EXP_F16_E64
|
|
V_FREXP_MANT_F16 = V_FREXP_MANT_F16_E64
|
|
V_FREXP_EXP_I16_F16 = V_FREXP_EXP_I16_F16_E64
|
|
V_FLOOR_F16 = V_FLOOR_F16_E64
|
|
V_CEIL_F16 = V_CEIL_F16_E64
|
|
V_TRUNC_F16 = V_TRUNC_F16_E64
|
|
V_RNDNE_F16 = V_RNDNE_F16_E64
|
|
V_FRACT_F16 = V_FRACT_F16_E64
|
|
V_SIN_F16 = V_SIN_F16_E64
|
|
V_COS_F16 = V_COS_F16_E64
|
|
V_EXP_LEGACY_F32 = V_EXP_LEGACY_F32_E64
|
|
V_LOG_LEGACY_F32 = V_LOG_LEGACY_F32_E64
|
|
V_CVT_NORM_I16_F16 = V_CVT_NORM_I16_F16_E64
|
|
V_CVT_NORM_U16_F16 = V_CVT_NORM_U16_F16_E64
|
|
V_SAT_PK_U8_I16 = V_SAT_PK_U8_I16_E64
|
|
V_SWAP_B32 = V_SWAP_B32_E64
|
|
V_ACCVGPR_MOV_B32 = V_ACCVGPR_MOV_B32_E64
|
|
V_CVT_F32_FP8 = V_CVT_F32_FP8_E64
|
|
V_CVT_F32_BF8 = V_CVT_F32_BF8_E64
|
|
V_CVT_PK_F32_FP8 = V_CVT_PK_F32_FP8_E64
|
|
V_CVT_PK_F32_BF8 = V_CVT_PK_F32_BF8_E64
|
|
V_PRNG_B32 = V_PRNG_B32_E64
|
|
V_PERMLANE16_SWAP_B32 = V_PERMLANE16_SWAP_B32_E64
|
|
V_PERMLANE32_SWAP_B32 = V_PERMLANE32_SWAP_B32_E64
|
|
V_CVT_F32_BF16 = V_CVT_F32_BF16_E64
|
|
V_MAD_I32_I24 = V_MAD_I32_I24_E64
|
|
V_MAD_U32_U24 = V_MAD_U32_U24_E64
|
|
V_CUBEID_F32 = V_CUBEID_F32_E64
|
|
V_CUBESC_F32 = V_CUBESC_F32_E64
|
|
V_CUBETC_F32 = V_CUBETC_F32_E64
|
|
V_CUBEMA_F32 = V_CUBEMA_F32_E64
|
|
V_BFE_U32 = V_BFE_U32_E64
|
|
V_BFE_I32 = V_BFE_I32_E64
|
|
V_BFI_B32 = V_BFI_B32_E64
|
|
V_FMA_F32 = V_FMA_F32_E64
|
|
V_FMA_F64 = V_FMA_F64_E64
|
|
V_LERP_U8 = V_LERP_U8_E64
|
|
V_ALIGNBIT_B32 = V_ALIGNBIT_B32_E64
|
|
V_ALIGNBYTE_B32 = V_ALIGNBYTE_B32_E64
|
|
V_MIN3_F32 = V_MIN3_F32_E64
|
|
V_MIN3_I32 = V_MIN3_I32_E64
|
|
V_MIN3_U32 = V_MIN3_U32_E64
|
|
V_MAX3_F32 = V_MAX3_F32_E64
|
|
V_MAX3_I32 = V_MAX3_I32_E64
|
|
V_MAX3_U32 = V_MAX3_U32_E64
|
|
V_MED3_F32 = V_MED3_F32_E64
|
|
V_MED3_I32 = V_MED3_I32_E64
|
|
V_MED3_U32 = V_MED3_U32_E64
|
|
V_SAD_U8 = V_SAD_U8_E64
|
|
V_SAD_HI_U8 = V_SAD_HI_U8_E64
|
|
V_SAD_U16 = V_SAD_U16_E64
|
|
V_SAD_U32 = V_SAD_U32_E64
|
|
V_CVT_PK_U8_F32 = V_CVT_PK_U8_F32_E64
|
|
V_DIV_FIXUP_F32 = V_DIV_FIXUP_F32_E64
|
|
V_DIV_FIXUP_F64 = V_DIV_FIXUP_F64_E64
|
|
V_DIV_FMAS_F32 = V_DIV_FMAS_F32_E64
|
|
V_DIV_FMAS_F64 = V_DIV_FMAS_F64_E64
|
|
V_MSAD_U8 = V_MSAD_U8_E64
|
|
V_QSAD_PK_U16_U8 = V_QSAD_PK_U16_U8_E64
|
|
V_MQSAD_PK_U16_U8 = V_MQSAD_PK_U16_U8_E64
|
|
V_MQSAD_U32_U8 = V_MQSAD_U32_U8_E64
|
|
V_MAD_LEGACY_F16 = V_MAD_LEGACY_F16_E64
|
|
V_MAD_LEGACY_U16 = V_MAD_LEGACY_U16_E64
|
|
V_MAD_LEGACY_I16 = V_MAD_LEGACY_I16_E64
|
|
V_PERM_B32 = V_PERM_B32_E64
|
|
V_FMA_LEGACY_F16 = V_FMA_LEGACY_F16_E64
|
|
V_DIV_FIXUP_LEGACY_F16 = V_DIV_FIXUP_LEGACY_F16_E64
|
|
V_CVT_PKACCUM_U8_F32 = V_CVT_PKACCUM_U8_F32_E64
|
|
V_MAD_U32_U16 = V_MAD_U32_U16_E64
|
|
V_MAD_I32_I16 = V_MAD_I32_I16_E64
|
|
V_XAD_U32 = V_XAD_U32_E64
|
|
V_MIN3_F16 = V_MIN3_F16_E64
|
|
V_MIN3_I16 = V_MIN3_I16_E64
|
|
V_MIN3_U16 = V_MIN3_U16_E64
|
|
V_MAX3_F16 = V_MAX3_F16_E64
|
|
V_MAX3_I16 = V_MAX3_I16_E64
|
|
V_MAX3_U16 = V_MAX3_U16_E64
|
|
V_MED3_F16 = V_MED3_F16_E64
|
|
V_MED3_I16 = V_MED3_I16_E64
|
|
V_MED3_U16 = V_MED3_U16_E64
|
|
V_LSHL_ADD_U32 = V_LSHL_ADD_U32_E64
|
|
V_ADD_LSHL_U32 = V_ADD_LSHL_U32_E64
|
|
V_ADD3_U32 = V_ADD3_U32_E64
|
|
|
|
class VOP3POp(ReprEnum):
|
|
V_PK_MAD_I16 = 0
|
|
V_PK_MUL_LO_U16 = 1
|
|
V_PK_ADD_I16 = 2
|
|
V_PK_SUB_I16 = 3
|
|
V_PK_LSHLREV_B16 = 4
|
|
V_PK_LSHRREV_B16 = 5
|
|
V_PK_ASHRREV_I16 = 6
|
|
V_PK_MAX_I16 = 7
|
|
V_PK_MIN_I16 = 8
|
|
V_PK_MAD_U16 = 9
|
|
V_PK_ADD_U16 = 10
|
|
V_PK_SUB_U16 = 11
|
|
V_PK_MAX_U16 = 12
|
|
V_PK_MIN_U16 = 13
|
|
V_PK_FMA_F16 = 14
|
|
V_PK_ADD_F16 = 15
|
|
V_PK_MUL_F16 = 16
|
|
V_PK_MIN_F16 = 17
|
|
V_PK_MAX_F16 = 18
|
|
V_DOT2_F32_BF16 = 26
|
|
V_PK_MINIMUM3_F16 = 27
|
|
V_PK_MAXIMUM3_F16 = 28
|
|
V_MAD_MIX_F32 = 32
|
|
V_MAD_MIXLO_F16 = 33
|
|
V_MAD_MIXHI_F16 = 34
|
|
V_DOT2_F32_F16 = 35
|
|
V_DOT2_I32_I16 = 38
|
|
V_DOT2_U32_U16 = 39
|
|
V_DOT4_I32_I8 = 40
|
|
V_DOT4_U32_U8 = 41
|
|
V_DOT8_I32_I4 = 42
|
|
V_DOT8_U32_U4 = 43
|
|
V_MFMA_LD_SCALE_B32 = 44
|
|
V_MFMA_F32_16X16X128_F8F6F4 = 45
|
|
V_MFMA_F32_32X32X64_F8F6F4 = 46
|
|
V_PK_FMA_F32 = 48
|
|
V_PK_MUL_F32 = 49
|
|
V_PK_ADD_F32 = 50
|
|
V_PK_MOV_B32 = 51
|
|
V_MFMA_F32_16X16X32_BF16 = 53
|
|
V_MFMA_I32_16X16X64_I8 = 54
|
|
V_MFMA_F32_32X32X16_BF16 = 55
|
|
V_MFMA_I32_32X32X32_I8 = 56
|
|
V_SMFMAC_F32_16X16X64_BF16 = 57
|
|
V_SMFMAC_I32_16X16X128_I8 = 58
|
|
V_SMFMAC_F32_16X16X128_BF8_BF8 = 59
|
|
V_SMFMAC_F32_16X16X128_BF8_FP8 = 60
|
|
V_SMFMAC_F32_16X16X128_FP8_BF8 = 61
|
|
V_MFMA_F32_16X16X8_XF32 = 62
|
|
V_MFMA_F32_32X32X4_XF32 = 63
|
|
V_MFMA_F32_32X32X1_2B_F32 = 64
|
|
V_MFMA_F32_16X16X1_4B_F32 = 65
|
|
V_MFMA_F32_4X4X1_16B_F32 = 66
|
|
V_SMFMAC_F32_16X16X128_FP8_FP8 = 67
|
|
V_MFMA_F32_32X32X2_F32 = 68
|
|
V_MFMA_F32_16X16X4_F32 = 69
|
|
V_SMFMAC_F32_32X32X32_BF16 = 70
|
|
V_SMFMAC_I32_32X32X64_I8 = 71
|
|
V_MFMA_F32_32X32X4_2B_F16 = 72
|
|
V_MFMA_F32_16X16X4_4B_F16 = 73
|
|
V_MFMA_F32_4X4X4_16B_F16 = 74
|
|
V_SMFMAC_F32_32X32X64_BF8_BF8 = 75
|
|
V_MFMA_F32_32X32X8_F16 = 76
|
|
V_MFMA_F32_16X16X16_F16 = 77
|
|
V_SMFMAC_F32_32X32X64_BF8_FP8 = 78
|
|
V_SMFMAC_F32_32X32X64_FP8_BF8 = 79
|
|
V_MFMA_I32_32X32X4_2B_I8 = 80
|
|
V_MFMA_I32_16X16X4_4B_I8 = 81
|
|
V_MFMA_I32_4X4X4_16B_I8 = 82
|
|
V_SMFMAC_F32_32X32X64_FP8_FP8 = 83
|
|
V_MFMA_F32_16X16X32_F16 = 84
|
|
V_MFMA_F32_32X32X16_F16 = 85
|
|
V_MFMA_I32_32X32X16_I8 = 86
|
|
V_MFMA_I32_16X16X32_I8 = 87
|
|
V_ACCVGPR_READ = 88
|
|
V_ACCVGPR_WRITE = 89
|
|
V_SMFMAC_F32_16X16X64_F16 = 90
|
|
V_SMFMAC_F32_32X32X32_F16 = 91
|
|
V_MFMA_F32_32X32X4_2B_BF16 = 93
|
|
V_MFMA_F32_16X16X4_4B_BF16 = 94
|
|
V_MFMA_F32_4X4X4_16B_BF16 = 95
|
|
V_MFMA_F32_32X32X8_BF16 = 96
|
|
V_MFMA_F32_16X16X16_BF16 = 97
|
|
V_SMFMAC_F32_16X16X32_F16 = 98
|
|
V_SMFMAC_F32_32X32X16_F16 = 100
|
|
V_SMFMAC_F32_16X16X32_BF16 = 102
|
|
V_SMFMAC_F32_32X32X16_BF16 = 104
|
|
V_SMFMAC_I32_16X16X64_I8 = 106
|
|
V_SMFMAC_I32_32X32X32_I8 = 108
|
|
V_MFMA_F64_16X16X4_F64 = 110
|
|
V_MFMA_F64_4X4X4_4B_F64 = 111
|
|
V_MFMA_F32_16X16X32_BF8_BF8 = 112
|
|
V_MFMA_F32_16X16X32_BF8_FP8 = 113
|
|
V_MFMA_F32_16X16X32_FP8_BF8 = 114
|
|
V_MFMA_F32_16X16X32_FP8_FP8 = 115
|
|
V_MFMA_F32_32X32X16_BF8_BF8 = 116
|
|
V_MFMA_F32_32X32X16_BF8_FP8 = 117
|
|
V_MFMA_F32_32X32X16_FP8_BF8 = 118
|
|
V_MFMA_F32_32X32X16_FP8_FP8 = 119
|
|
V_SMFMAC_F32_16X16X64_BF8_BF8 = 120
|
|
V_SMFMAC_F32_16X16X64_BF8_FP8 = 121
|
|
V_SMFMAC_F32_16X16X64_FP8_BF8 = 122
|
|
V_SMFMAC_F32_16X16X64_FP8_FP8 = 123
|
|
V_SMFMAC_F32_32X32X32_BF8_BF8 = 124
|
|
V_SMFMAC_F32_32X32X32_BF8_FP8 = 125
|
|
V_SMFMAC_F32_32X32X32_FP8_BF8 = 126
|
|
V_SMFMAC_F32_32X32X32_FP8_FP8 = 127
|
|
|
|
class VOP3PX2Op(ReprEnum):
|
|
V_MFMA_SCALE_F32_16X16X128_F8F6F4 = 45
|
|
V_MFMA_SCALE_F32_32X32X64_F8F6F4 = 46
|
|
|
|
class VOP3SDOp(ReprEnum):
|
|
V_ADD_CO_U32 = 281
|
|
V_SUB_CO_U32 = 282
|
|
V_SUBREV_CO_U32 = 283
|
|
V_ADDC_CO_U32 = 284
|
|
V_SUBB_CO_U32 = 285
|
|
V_SUBBREV_CO_U32 = 286
|
|
V_DIV_SCALE_F32 = 480
|
|
V_DIV_SCALE_F64 = 481
|
|
V_MAD_U64_U32 = 488
|
|
V_MAD_I64_I32 = 489
|
|
|
|
class VOPCOp(ReprEnum):
|
|
V_CMP_CLASS_F32_E32 = 16
|
|
V_CMPX_CLASS_F32_E32 = 17
|
|
V_CMP_CLASS_F64_E32 = 18
|
|
V_CMPX_CLASS_F64_E32 = 19
|
|
V_CMP_CLASS_F16_E32 = 20
|
|
V_CMPX_CLASS_F16_E32 = 21
|
|
V_CMP_F_F16_E32 = 32
|
|
V_CMP_LT_F16_E32 = 33
|
|
V_CMP_EQ_F16_E32 = 34
|
|
V_CMP_LE_F16_E32 = 35
|
|
V_CMP_GT_F16_E32 = 36
|
|
V_CMP_LG_F16_E32 = 37
|
|
V_CMP_GE_F16_E32 = 38
|
|
V_CMP_O_F16_E32 = 39
|
|
V_CMP_U_F16_E32 = 40
|
|
V_CMP_NGE_F16_E32 = 41
|
|
V_CMP_NLG_F16_E32 = 42
|
|
V_CMP_NGT_F16_E32 = 43
|
|
V_CMP_NLE_F16_E32 = 44
|
|
V_CMP_NEQ_F16_E32 = 45
|
|
V_CMP_NLT_F16_E32 = 46
|
|
V_CMP_TRU_F16_E32 = 47
|
|
V_CMPX_F_F16_E32 = 48
|
|
V_CMPX_LT_F16_E32 = 49
|
|
V_CMPX_EQ_F16_E32 = 50
|
|
V_CMPX_LE_F16_E32 = 51
|
|
V_CMPX_GT_F16_E32 = 52
|
|
V_CMPX_LG_F16_E32 = 53
|
|
V_CMPX_GE_F16_E32 = 54
|
|
V_CMPX_O_F16_E32 = 55
|
|
V_CMPX_U_F16_E32 = 56
|
|
V_CMPX_NGE_F16_E32 = 57
|
|
V_CMPX_NLG_F16_E32 = 58
|
|
V_CMPX_NGT_F16_E32 = 59
|
|
V_CMPX_NLE_F16_E32 = 60
|
|
V_CMPX_NEQ_F16_E32 = 61
|
|
V_CMPX_NLT_F16_E32 = 62
|
|
V_CMPX_TRU_F16_E32 = 63
|
|
V_CMP_F_F32_E32 = 64
|
|
V_CMP_LT_F32_E32 = 65
|
|
V_CMP_EQ_F32_E32 = 66
|
|
V_CMP_LE_F32_E32 = 67
|
|
V_CMP_GT_F32_E32 = 68
|
|
V_CMP_LG_F32_E32 = 69
|
|
V_CMP_GE_F32_E32 = 70
|
|
V_CMP_O_F32_E32 = 71
|
|
V_CMP_U_F32_E32 = 72
|
|
V_CMP_NGE_F32_E32 = 73
|
|
V_CMP_NLG_F32_E32 = 74
|
|
V_CMP_NGT_F32_E32 = 75
|
|
V_CMP_NLE_F32_E32 = 76
|
|
V_CMP_NEQ_F32_E32 = 77
|
|
V_CMP_NLT_F32_E32 = 78
|
|
V_CMP_TRU_F32_E32 = 79
|
|
V_CMPX_F_F32_E32 = 80
|
|
V_CMPX_LT_F32_E32 = 81
|
|
V_CMPX_EQ_F32_E32 = 82
|
|
V_CMPX_LE_F32_E32 = 83
|
|
V_CMPX_GT_F32_E32 = 84
|
|
V_CMPX_LG_F32_E32 = 85
|
|
V_CMPX_GE_F32_E32 = 86
|
|
V_CMPX_O_F32_E32 = 87
|
|
V_CMPX_U_F32_E32 = 88
|
|
V_CMPX_NGE_F32_E32 = 89
|
|
V_CMPX_NLG_F32_E32 = 90
|
|
V_CMPX_NGT_F32_E32 = 91
|
|
V_CMPX_NLE_F32_E32 = 92
|
|
V_CMPX_NEQ_F32_E32 = 93
|
|
V_CMPX_NLT_F32_E32 = 94
|
|
V_CMPX_TRU_F32_E32 = 95
|
|
V_CMP_F_F64_E32 = 96
|
|
V_CMP_LT_F64_E32 = 97
|
|
V_CMP_EQ_F64_E32 = 98
|
|
V_CMP_LE_F64_E32 = 99
|
|
V_CMP_GT_F64_E32 = 100
|
|
V_CMP_LG_F64_E32 = 101
|
|
V_CMP_GE_F64_E32 = 102
|
|
V_CMP_O_F64_E32 = 103
|
|
V_CMP_U_F64_E32 = 104
|
|
V_CMP_NGE_F64_E32 = 105
|
|
V_CMP_NLG_F64_E32 = 106
|
|
V_CMP_NGT_F64_E32 = 107
|
|
V_CMP_NLE_F64_E32 = 108
|
|
V_CMP_NEQ_F64_E32 = 109
|
|
V_CMP_NLT_F64_E32 = 110
|
|
V_CMP_TRU_F64_E32 = 111
|
|
V_CMPX_F_F64_E32 = 112
|
|
V_CMPX_LT_F64_E32 = 113
|
|
V_CMPX_EQ_F64_E32 = 114
|
|
V_CMPX_LE_F64_E32 = 115
|
|
V_CMPX_GT_F64_E32 = 116
|
|
V_CMPX_LG_F64_E32 = 117
|
|
V_CMPX_GE_F64_E32 = 118
|
|
V_CMPX_O_F64_E32 = 119
|
|
V_CMPX_U_F64_E32 = 120
|
|
V_CMPX_NGE_F64_E32 = 121
|
|
V_CMPX_NLG_F64_E32 = 122
|
|
V_CMPX_NGT_F64_E32 = 123
|
|
V_CMPX_NLE_F64_E32 = 124
|
|
V_CMPX_NEQ_F64_E32 = 125
|
|
V_CMPX_NLT_F64_E32 = 126
|
|
V_CMPX_TRU_F64_E32 = 127
|
|
V_CMP_F_I16_E32 = 160
|
|
V_CMP_LT_I16_E32 = 161
|
|
V_CMP_EQ_I16_E32 = 162
|
|
V_CMP_LE_I16_E32 = 163
|
|
V_CMP_GT_I16_E32 = 164
|
|
V_CMP_NE_I16_E32 = 165
|
|
V_CMP_GE_I16_E32 = 166
|
|
V_CMP_T_I16_E32 = 167
|
|
V_CMP_F_U16_E32 = 168
|
|
V_CMP_LT_U16_E32 = 169
|
|
V_CMP_EQ_U16_E32 = 170
|
|
V_CMP_LE_U16_E32 = 171
|
|
V_CMP_GT_U16_E32 = 172
|
|
V_CMP_NE_U16_E32 = 173
|
|
V_CMP_GE_U16_E32 = 174
|
|
V_CMP_T_U16_E32 = 175
|
|
V_CMPX_F_I16_E32 = 176
|
|
V_CMPX_LT_I16_E32 = 177
|
|
V_CMPX_EQ_I16_E32 = 178
|
|
V_CMPX_LE_I16_E32 = 179
|
|
V_CMPX_GT_I16_E32 = 180
|
|
V_CMPX_NE_I16_E32 = 181
|
|
V_CMPX_GE_I16_E32 = 182
|
|
V_CMPX_T_I16_E32 = 183
|
|
V_CMPX_F_U16_E32 = 184
|
|
V_CMPX_LT_U16_E32 = 185
|
|
V_CMPX_EQ_U16_E32 = 186
|
|
V_CMPX_LE_U16_E32 = 187
|
|
V_CMPX_GT_U16_E32 = 188
|
|
V_CMPX_NE_U16_E32 = 189
|
|
V_CMPX_GE_U16_E32 = 190
|
|
V_CMPX_T_U16_E32 = 191
|
|
V_CMP_F_I32_E32 = 192
|
|
V_CMP_LT_I32_E32 = 193
|
|
V_CMP_EQ_I32_E32 = 194
|
|
V_CMP_LE_I32_E32 = 195
|
|
V_CMP_GT_I32_E32 = 196
|
|
V_CMP_NE_I32_E32 = 197
|
|
V_CMP_GE_I32_E32 = 198
|
|
V_CMP_T_I32_E32 = 199
|
|
V_CMP_F_U32_E32 = 200
|
|
V_CMP_LT_U32_E32 = 201
|
|
V_CMP_EQ_U32_E32 = 202
|
|
V_CMP_LE_U32_E32 = 203
|
|
V_CMP_GT_U32_E32 = 204
|
|
V_CMP_NE_U32_E32 = 205
|
|
V_CMP_GE_U32_E32 = 206
|
|
V_CMP_T_U32_E32 = 207
|
|
V_CMPX_F_I32_E32 = 208
|
|
V_CMPX_LT_I32_E32 = 209
|
|
V_CMPX_EQ_I32_E32 = 210
|
|
V_CMPX_LE_I32_E32 = 211
|
|
V_CMPX_GT_I32_E32 = 212
|
|
V_CMPX_NE_I32_E32 = 213
|
|
V_CMPX_GE_I32_E32 = 214
|
|
V_CMPX_T_I32_E32 = 215
|
|
V_CMPX_F_U32_E32 = 216
|
|
V_CMPX_LT_U32_E32 = 217
|
|
V_CMPX_EQ_U32_E32 = 218
|
|
V_CMPX_LE_U32_E32 = 219
|
|
V_CMPX_GT_U32_E32 = 220
|
|
V_CMPX_NE_U32_E32 = 221
|
|
V_CMPX_GE_U32_E32 = 222
|
|
V_CMPX_T_U32_E32 = 223
|
|
V_CMP_F_I64_E32 = 224
|
|
V_CMP_LT_I64_E32 = 225
|
|
V_CMP_EQ_I64_E32 = 226
|
|
V_CMP_LE_I64_E32 = 227
|
|
V_CMP_GT_I64_E32 = 228
|
|
V_CMP_NE_I64_E32 = 229
|
|
V_CMP_GE_I64_E32 = 230
|
|
V_CMP_T_I64_E32 = 231
|
|
V_CMP_F_U64_E32 = 232
|
|
V_CMP_LT_U64_E32 = 233
|
|
V_CMP_EQ_U64_E32 = 234
|
|
V_CMP_LE_U64_E32 = 235
|
|
V_CMP_GT_U64_E32 = 236
|
|
V_CMP_NE_U64_E32 = 237
|
|
V_CMP_GE_U64_E32 = 238
|
|
V_CMP_T_U64_E32 = 239
|
|
V_CMPX_F_I64_E32 = 240
|
|
V_CMPX_LT_I64_E32 = 241
|
|
V_CMPX_EQ_I64_E32 = 242
|
|
V_CMPX_LE_I64_E32 = 243
|
|
V_CMPX_GT_I64_E32 = 244
|
|
V_CMPX_NE_I64_E32 = 245
|
|
V_CMPX_GE_I64_E32 = 246
|
|
V_CMPX_T_I64_E32 = 247
|
|
V_CMPX_F_U64_E32 = 248
|
|
V_CMPX_LT_U64_E32 = 249
|
|
V_CMPX_EQ_U64_E32 = 250
|
|
V_CMPX_LE_U64_E32 = 251
|
|
V_CMPX_GT_U64_E32 = 252
|
|
V_CMPX_NE_U64_E32 = 253
|
|
V_CMPX_GE_U64_E32 = 254
|
|
V_CMPX_T_U64_E32 = 255
|
|
V_CMP_CLASS_F32 = V_CMP_CLASS_F32_E32
|
|
V_CMPX_CLASS_F32 = V_CMPX_CLASS_F32_E32
|
|
V_CMP_CLASS_F64 = V_CMP_CLASS_F64_E32
|
|
V_CMPX_CLASS_F64 = V_CMPX_CLASS_F64_E32
|
|
V_CMP_CLASS_F16 = V_CMP_CLASS_F16_E32
|
|
V_CMPX_CLASS_F16 = V_CMPX_CLASS_F16_E32
|
|
V_CMP_F_F16 = V_CMP_F_F16_E32
|
|
V_CMP_LT_F16 = V_CMP_LT_F16_E32
|
|
V_CMP_EQ_F16 = V_CMP_EQ_F16_E32
|
|
V_CMP_LE_F16 = V_CMP_LE_F16_E32
|
|
V_CMP_GT_F16 = V_CMP_GT_F16_E32
|
|
V_CMP_LG_F16 = V_CMP_LG_F16_E32
|
|
V_CMP_GE_F16 = V_CMP_GE_F16_E32
|
|
V_CMP_O_F16 = V_CMP_O_F16_E32
|
|
V_CMP_U_F16 = V_CMP_U_F16_E32
|
|
V_CMP_NGE_F16 = V_CMP_NGE_F16_E32
|
|
V_CMP_NLG_F16 = V_CMP_NLG_F16_E32
|
|
V_CMP_NGT_F16 = V_CMP_NGT_F16_E32
|
|
V_CMP_NLE_F16 = V_CMP_NLE_F16_E32
|
|
V_CMP_NEQ_F16 = V_CMP_NEQ_F16_E32
|
|
V_CMP_NLT_F16 = V_CMP_NLT_F16_E32
|
|
V_CMP_TRU_F16 = V_CMP_TRU_F16_E32
|
|
V_CMPX_F_F16 = V_CMPX_F_F16_E32
|
|
V_CMPX_LT_F16 = V_CMPX_LT_F16_E32
|
|
V_CMPX_EQ_F16 = V_CMPX_EQ_F16_E32
|
|
V_CMPX_LE_F16 = V_CMPX_LE_F16_E32
|
|
V_CMPX_GT_F16 = V_CMPX_GT_F16_E32
|
|
V_CMPX_LG_F16 = V_CMPX_LG_F16_E32
|
|
V_CMPX_GE_F16 = V_CMPX_GE_F16_E32
|
|
V_CMPX_O_F16 = V_CMPX_O_F16_E32
|
|
V_CMPX_U_F16 = V_CMPX_U_F16_E32
|
|
V_CMPX_NGE_F16 = V_CMPX_NGE_F16_E32
|
|
V_CMPX_NLG_F16 = V_CMPX_NLG_F16_E32
|
|
V_CMPX_NGT_F16 = V_CMPX_NGT_F16_E32
|
|
V_CMPX_NLE_F16 = V_CMPX_NLE_F16_E32
|
|
V_CMPX_NEQ_F16 = V_CMPX_NEQ_F16_E32
|
|
V_CMPX_NLT_F16 = V_CMPX_NLT_F16_E32
|
|
V_CMPX_TRU_F16 = V_CMPX_TRU_F16_E32
|
|
V_CMP_F_F32 = V_CMP_F_F32_E32
|
|
V_CMP_LT_F32 = V_CMP_LT_F32_E32
|
|
V_CMP_EQ_F32 = V_CMP_EQ_F32_E32
|
|
V_CMP_LE_F32 = V_CMP_LE_F32_E32
|
|
V_CMP_GT_F32 = V_CMP_GT_F32_E32
|
|
V_CMP_LG_F32 = V_CMP_LG_F32_E32
|
|
V_CMP_GE_F32 = V_CMP_GE_F32_E32
|
|
V_CMP_O_F32 = V_CMP_O_F32_E32
|
|
V_CMP_U_F32 = V_CMP_U_F32_E32
|
|
V_CMP_NGE_F32 = V_CMP_NGE_F32_E32
|
|
V_CMP_NLG_F32 = V_CMP_NLG_F32_E32
|
|
V_CMP_NGT_F32 = V_CMP_NGT_F32_E32
|
|
V_CMP_NLE_F32 = V_CMP_NLE_F32_E32
|
|
V_CMP_NEQ_F32 = V_CMP_NEQ_F32_E32
|
|
V_CMP_NLT_F32 = V_CMP_NLT_F32_E32
|
|
V_CMP_TRU_F32 = V_CMP_TRU_F32_E32
|
|
V_CMPX_F_F32 = V_CMPX_F_F32_E32
|
|
V_CMPX_LT_F32 = V_CMPX_LT_F32_E32
|
|
V_CMPX_EQ_F32 = V_CMPX_EQ_F32_E32
|
|
V_CMPX_LE_F32 = V_CMPX_LE_F32_E32
|
|
V_CMPX_GT_F32 = V_CMPX_GT_F32_E32
|
|
V_CMPX_LG_F32 = V_CMPX_LG_F32_E32
|
|
V_CMPX_GE_F32 = V_CMPX_GE_F32_E32
|
|
V_CMPX_O_F32 = V_CMPX_O_F32_E32
|
|
V_CMPX_U_F32 = V_CMPX_U_F32_E32
|
|
V_CMPX_NGE_F32 = V_CMPX_NGE_F32_E32
|
|
V_CMPX_NLG_F32 = V_CMPX_NLG_F32_E32
|
|
V_CMPX_NGT_F32 = V_CMPX_NGT_F32_E32
|
|
V_CMPX_NLE_F32 = V_CMPX_NLE_F32_E32
|
|
V_CMPX_NEQ_F32 = V_CMPX_NEQ_F32_E32
|
|
V_CMPX_NLT_F32 = V_CMPX_NLT_F32_E32
|
|
V_CMPX_TRU_F32 = V_CMPX_TRU_F32_E32
|
|
V_CMP_F_F64 = V_CMP_F_F64_E32
|
|
V_CMP_LT_F64 = V_CMP_LT_F64_E32
|
|
V_CMP_EQ_F64 = V_CMP_EQ_F64_E32
|
|
V_CMP_LE_F64 = V_CMP_LE_F64_E32
|
|
V_CMP_GT_F64 = V_CMP_GT_F64_E32
|
|
V_CMP_LG_F64 = V_CMP_LG_F64_E32
|
|
V_CMP_GE_F64 = V_CMP_GE_F64_E32
|
|
V_CMP_O_F64 = V_CMP_O_F64_E32
|
|
V_CMP_U_F64 = V_CMP_U_F64_E32
|
|
V_CMP_NGE_F64 = V_CMP_NGE_F64_E32
|
|
V_CMP_NLG_F64 = V_CMP_NLG_F64_E32
|
|
V_CMP_NGT_F64 = V_CMP_NGT_F64_E32
|
|
V_CMP_NLE_F64 = V_CMP_NLE_F64_E32
|
|
V_CMP_NEQ_F64 = V_CMP_NEQ_F64_E32
|
|
V_CMP_NLT_F64 = V_CMP_NLT_F64_E32
|
|
V_CMP_TRU_F64 = V_CMP_TRU_F64_E32
|
|
V_CMPX_F_F64 = V_CMPX_F_F64_E32
|
|
V_CMPX_LT_F64 = V_CMPX_LT_F64_E32
|
|
V_CMPX_EQ_F64 = V_CMPX_EQ_F64_E32
|
|
V_CMPX_LE_F64 = V_CMPX_LE_F64_E32
|
|
V_CMPX_GT_F64 = V_CMPX_GT_F64_E32
|
|
V_CMPX_LG_F64 = V_CMPX_LG_F64_E32
|
|
V_CMPX_GE_F64 = V_CMPX_GE_F64_E32
|
|
V_CMPX_O_F64 = V_CMPX_O_F64_E32
|
|
V_CMPX_U_F64 = V_CMPX_U_F64_E32
|
|
V_CMPX_NGE_F64 = V_CMPX_NGE_F64_E32
|
|
V_CMPX_NLG_F64 = V_CMPX_NLG_F64_E32
|
|
V_CMPX_NGT_F64 = V_CMPX_NGT_F64_E32
|
|
V_CMPX_NLE_F64 = V_CMPX_NLE_F64_E32
|
|
V_CMPX_NEQ_F64 = V_CMPX_NEQ_F64_E32
|
|
V_CMPX_NLT_F64 = V_CMPX_NLT_F64_E32
|
|
V_CMPX_TRU_F64 = V_CMPX_TRU_F64_E32
|
|
V_CMP_F_I16 = V_CMP_F_I16_E32
|
|
V_CMP_LT_I16 = V_CMP_LT_I16_E32
|
|
V_CMP_EQ_I16 = V_CMP_EQ_I16_E32
|
|
V_CMP_LE_I16 = V_CMP_LE_I16_E32
|
|
V_CMP_GT_I16 = V_CMP_GT_I16_E32
|
|
V_CMP_NE_I16 = V_CMP_NE_I16_E32
|
|
V_CMP_GE_I16 = V_CMP_GE_I16_E32
|
|
V_CMP_T_I16 = V_CMP_T_I16_E32
|
|
V_CMP_F_U16 = V_CMP_F_U16_E32
|
|
V_CMP_LT_U16 = V_CMP_LT_U16_E32
|
|
V_CMP_EQ_U16 = V_CMP_EQ_U16_E32
|
|
V_CMP_LE_U16 = V_CMP_LE_U16_E32
|
|
V_CMP_GT_U16 = V_CMP_GT_U16_E32
|
|
V_CMP_NE_U16 = V_CMP_NE_U16_E32
|
|
V_CMP_GE_U16 = V_CMP_GE_U16_E32
|
|
V_CMP_T_U16 = V_CMP_T_U16_E32
|
|
V_CMPX_F_I16 = V_CMPX_F_I16_E32
|
|
V_CMPX_LT_I16 = V_CMPX_LT_I16_E32
|
|
V_CMPX_EQ_I16 = V_CMPX_EQ_I16_E32
|
|
V_CMPX_LE_I16 = V_CMPX_LE_I16_E32
|
|
V_CMPX_GT_I16 = V_CMPX_GT_I16_E32
|
|
V_CMPX_NE_I16 = V_CMPX_NE_I16_E32
|
|
V_CMPX_GE_I16 = V_CMPX_GE_I16_E32
|
|
V_CMPX_T_I16 = V_CMPX_T_I16_E32
|
|
V_CMPX_F_U16 = V_CMPX_F_U16_E32
|
|
V_CMPX_LT_U16 = V_CMPX_LT_U16_E32
|
|
V_CMPX_EQ_U16 = V_CMPX_EQ_U16_E32
|
|
V_CMPX_LE_U16 = V_CMPX_LE_U16_E32
|
|
V_CMPX_GT_U16 = V_CMPX_GT_U16_E32
|
|
V_CMPX_NE_U16 = V_CMPX_NE_U16_E32
|
|
V_CMPX_GE_U16 = V_CMPX_GE_U16_E32
|
|
V_CMPX_T_U16 = V_CMPX_T_U16_E32
|
|
V_CMP_F_I32 = V_CMP_F_I32_E32
|
|
V_CMP_LT_I32 = V_CMP_LT_I32_E32
|
|
V_CMP_EQ_I32 = V_CMP_EQ_I32_E32
|
|
V_CMP_LE_I32 = V_CMP_LE_I32_E32
|
|
V_CMP_GT_I32 = V_CMP_GT_I32_E32
|
|
V_CMP_NE_I32 = V_CMP_NE_I32_E32
|
|
V_CMP_GE_I32 = V_CMP_GE_I32_E32
|
|
V_CMP_T_I32 = V_CMP_T_I32_E32
|
|
V_CMP_F_U32 = V_CMP_F_U32_E32
|
|
V_CMP_LT_U32 = V_CMP_LT_U32_E32
|
|
V_CMP_EQ_U32 = V_CMP_EQ_U32_E32
|
|
V_CMP_LE_U32 = V_CMP_LE_U32_E32
|
|
V_CMP_GT_U32 = V_CMP_GT_U32_E32
|
|
V_CMP_NE_U32 = V_CMP_NE_U32_E32
|
|
V_CMP_GE_U32 = V_CMP_GE_U32_E32
|
|
V_CMP_T_U32 = V_CMP_T_U32_E32
|
|
V_CMPX_F_I32 = V_CMPX_F_I32_E32
|
|
V_CMPX_LT_I32 = V_CMPX_LT_I32_E32
|
|
V_CMPX_EQ_I32 = V_CMPX_EQ_I32_E32
|
|
V_CMPX_LE_I32 = V_CMPX_LE_I32_E32
|
|
V_CMPX_GT_I32 = V_CMPX_GT_I32_E32
|
|
V_CMPX_NE_I32 = V_CMPX_NE_I32_E32
|
|
V_CMPX_GE_I32 = V_CMPX_GE_I32_E32
|
|
V_CMPX_T_I32 = V_CMPX_T_I32_E32
|
|
V_CMPX_F_U32 = V_CMPX_F_U32_E32
|
|
V_CMPX_LT_U32 = V_CMPX_LT_U32_E32
|
|
V_CMPX_EQ_U32 = V_CMPX_EQ_U32_E32
|
|
V_CMPX_LE_U32 = V_CMPX_LE_U32_E32
|
|
V_CMPX_GT_U32 = V_CMPX_GT_U32_E32
|
|
V_CMPX_NE_U32 = V_CMPX_NE_U32_E32
|
|
V_CMPX_GE_U32 = V_CMPX_GE_U32_E32
|
|
V_CMPX_T_U32 = V_CMPX_T_U32_E32
|
|
V_CMP_F_I64 = V_CMP_F_I64_E32
|
|
V_CMP_LT_I64 = V_CMP_LT_I64_E32
|
|
V_CMP_EQ_I64 = V_CMP_EQ_I64_E32
|
|
V_CMP_LE_I64 = V_CMP_LE_I64_E32
|
|
V_CMP_GT_I64 = V_CMP_GT_I64_E32
|
|
V_CMP_NE_I64 = V_CMP_NE_I64_E32
|
|
V_CMP_GE_I64 = V_CMP_GE_I64_E32
|
|
V_CMP_T_I64 = V_CMP_T_I64_E32
|
|
V_CMP_F_U64 = V_CMP_F_U64_E32
|
|
V_CMP_LT_U64 = V_CMP_LT_U64_E32
|
|
V_CMP_EQ_U64 = V_CMP_EQ_U64_E32
|
|
V_CMP_LE_U64 = V_CMP_LE_U64_E32
|
|
V_CMP_GT_U64 = V_CMP_GT_U64_E32
|
|
V_CMP_NE_U64 = V_CMP_NE_U64_E32
|
|
V_CMP_GE_U64 = V_CMP_GE_U64_E32
|
|
V_CMP_T_U64 = V_CMP_T_U64_E32
|
|
V_CMPX_F_I64 = V_CMPX_F_I64_E32
|
|
V_CMPX_LT_I64 = V_CMPX_LT_I64_E32
|
|
V_CMPX_EQ_I64 = V_CMPX_EQ_I64_E32
|
|
V_CMPX_LE_I64 = V_CMPX_LE_I64_E32
|
|
V_CMPX_GT_I64 = V_CMPX_GT_I64_E32
|
|
V_CMPX_NE_I64 = V_CMPX_NE_I64_E32
|
|
V_CMPX_GE_I64 = V_CMPX_GE_I64_E32
|
|
V_CMPX_T_I64 = V_CMPX_T_I64_E32
|
|
V_CMPX_F_U64 = V_CMPX_F_U64_E32
|
|
V_CMPX_LT_U64 = V_CMPX_LT_U64_E32
|
|
V_CMPX_EQ_U64 = V_CMPX_EQ_U64_E32
|
|
V_CMPX_LE_U64 = V_CMPX_LE_U64_E32
|
|
V_CMPX_GT_U64 = V_CMPX_GT_U64_E32
|
|
V_CMPX_NE_U64 = V_CMPX_NE_U64_E32
|
|
V_CMPX_GE_U64 = V_CMPX_GE_U64_E32
|
|
V_CMPX_T_U64 = V_CMPX_T_U64_E32
|