summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--sys/ia64/disasm/disasm.h325
-rw-r--r--sys/ia64/disasm/disasm_decode.c2511
-rw-r--r--sys/ia64/disasm/disasm_extract.c2519
-rw-r--r--sys/ia64/disasm/disasm_format.c344
-rw-r--r--sys/ia64/disasm/disasm_int.h216
5 files changed, 5915 insertions, 0 deletions
diff --git a/sys/ia64/disasm/disasm.h b/sys/ia64/disasm/disasm.h
new file mode 100644
index 0000000..0037bd3
--- /dev/null
+++ b/sys/ia64/disasm/disasm.h
@@ -0,0 +1,325 @@
+/*
+ * Copyright (c) 2000-2003 Marcel Moolenaar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * $FreeBSD$
+ */
+
+#ifndef _DISASM_H_
+#define _DISASM_H_
+
+#ifndef _DISASM_INT_H_
+#define ASM_ADDITIONAL_OPCODES ASM_OP_NUMBER_OF_OPCODES
+#endif
+
+/* Application registers. */
+#define AR_K0 0
+#define AR_K1 1
+#define AR_K2 2
+#define AR_K3 3
+#define AR_K4 4
+#define AR_K5 5
+#define AR_K6 6
+#define AR_K7 7
+#define AR_RSC 16
+#define AR_BSP 17
+#define AR_BSPSTORE 18
+#define AR_RNAT 19
+#define AR_FCR 21
+#define AR_EFLAG 24
+#define AR_CSD 25
+#define AR_SSD 26
+#define AR_CFLG 27
+#define AR_FSR 28
+#define AR_FIR 29
+#define AR_FDR 30
+#define AR_CCV 32
+#define AR_UNAT 36
+#define AR_FPSR 40
+#define AR_ITC 44
+#define AR_PFS 64
+#define AR_LC 65
+#define AR_EC 66
+
+/* Control registers. */
+#define CR_DCR 0
+#define CR_ITM 1
+#define CR_IVA 2
+#define CR_PTA 8
+#define CR_IPSR 16
+#define CR_ISR 17
+#define CR_IIP 19
+#define CR_IFA 20
+#define CR_ITIR 21
+#define CR_IIPA 22
+#define CR_IFS 23
+#define CR_IIM 24
+#define CR_IHA 25
+#define CR_LID 64
+#define CR_IVR 65
+#define CR_TPR 66
+#define CR_EOI 67
+#define CR_IRR0 68
+#define CR_IRR1 69
+#define CR_IRR2 70
+#define CR_IRR3 71
+#define CR_ITV 72
+#define CR_PMV 73
+#define CR_CMCV 74
+#define CR_LRR0 80
+#define CR_LRR1 81
+
+enum asm_cmpltr_class {
+ ASM_CC_NONE,
+ ASM_CC_ACLR,
+ ASM_CC_BSW, ASM_CC_BTYPE, ASM_CC_BWH,
+ ASM_CC_CHK, ASM_CC_CLRRRB, ASM_CC_CREL, ASM_CC_CTYPE,
+ ASM_CC_DEP, ASM_CC_DH,
+ ASM_CC_FC, ASM_CC_FCREL, ASM_CC_FCTYPE, ASM_CC_FCVT, ASM_CC_FLDTYPE,
+ ASM_CC_FMERGE, ASM_CC_FREL, ASM_CC_FSWAP,
+ ASM_CC_GETF,
+ ASM_CC_IH, ASM_CC_INVALA, ASM_CC_IPWH, ASM_CC_ITC, ASM_CC_ITR,
+ ASM_CC_LDHINT, ASM_CC_LDTYPE, ASM_CC_LFETCH, ASM_CC_LFHINT,
+ ASM_CC_LFTYPE, ASM_CC_LR,
+ ASM_CC_MF, ASM_CC_MOV, ASM_CC_MWH,
+ ASM_CC_PAVG, ASM_CC_PC, ASM_CC_PH, ASM_CC_PREL, ASM_CC_PRTYPE,
+ ASM_CC_PTC, ASM_CC_PTR, ASM_CC_PVEC,
+ ASM_CC_SAT, ASM_CC_SEM, ASM_CC_SETF, ASM_CC_SF, ASM_CC_SRLZ,
+ ASM_CC_STHINT, ASM_CC_STTYPE, ASM_CC_SYNC,
+ ASM_CC_RW,
+ ASM_CC_TREL, ASM_CC_TRUNC,
+ ASM_CC_UNIT, ASM_CC_UNPACK, ASM_CC_UNS,
+ ASM_CC_XMA
+};
+
+enum asm_cmpltr_type {
+ ASM_CT_NONE,
+ ASM_CT_COND = ASM_CT_NONE,
+
+ ASM_CT_0, ASM_CT_1,
+ ASM_CT_A, ASM_CT_ACQ, ASM_CT_AND,
+ ASM_CT_B, ASM_CT_BIAS,
+ ASM_CT_C_CLR, ASM_CT_C_CLR_ACQ, ASM_CT_C_NC, ASM_CT_CALL,
+ ASM_CT_CEXIT, ASM_CT_CLOOP, ASM_CT_CLR, ASM_CT_CTOP,
+ ASM_CT_D, ASM_CT_DC_DC, ASM_CT_DC_NT, ASM_CT_DPNT, ASM_CT_DPTK,
+ ASM_CT_E, ASM_CT_EQ, ASM_CT_EXCL, ASM_CT_EXIT, ASM_CT_EXP,
+ ASM_CT_F, ASM_CT_FAULT, ASM_CT_FEW, ASM_CT_FILL, ASM_CT_FX, ASM_CT_FXU,
+ ASM_CT_G, ASM_CT_GA, ASM_CT_GE, ASM_CT_GT,
+ ASM_CT_H, ASM_CT_HU,
+ ASM_CT_I, ASM_CT_IA, ASM_CT_IMP,
+ ASM_CT_L, ASM_CT_LE, ASM_CT_LOOP, ASM_CT_LR, ASM_CT_LT, ASM_CT_LTU,
+ ASM_CT_M, ASM_CT_MANY,
+ ASM_CT_NC, ASM_CT_NE, ASM_CT_NEQ, ASM_CT_NL, ASM_CT_NLE, ASM_CT_NLT,
+ ASM_CT_NM, ASM_CT_NR, ASM_CT_NS, ASM_CT_NT_DC, ASM_CT_NT_NT,
+ ASM_CT_NT_TK, ASM_CT_NT1, ASM_CT_NT2, ASM_CT_NTA, ASM_CT_NZ,
+ ASM_CT_OR, ASM_CT_OR_ANDCM, ASM_CT_ORD,
+ ASM_CT_PR,
+ ASM_CT_R, ASM_CT_RAZ, ASM_CT_REL, ASM_CT_RET, ASM_CT_RW,
+ ASM_CT_S, ASM_CT_S0, ASM_CT_S1, ASM_CT_S2, ASM_CT_S3, ASM_CT_SA,
+ ASM_CT_SE, ASM_CT_SIG, ASM_CT_SPILL, ASM_CT_SPNT, ASM_CT_SPTK,
+ ASM_CT_SSS,
+ ASM_CT_TK_DC, ASM_CT_TK_NT, ASM_CT_TK_TK, ASM_CT_TRUNC,
+ ASM_CT_U, ASM_CT_UNC, ASM_CT_UNORD, ASM_CT_USS, ASM_CT_UUS, ASM_CT_UUU,
+ ASM_CT_W, ASM_CT_WEXIT, ASM_CT_WTOP,
+ ASM_CT_X, ASM_CT_XF,
+ ASM_CT_Z,
+};
+
+/* Completer. */
+struct asm_cmpltr {
+ enum asm_cmpltr_class c_class;
+ enum asm_cmpltr_type c_type;
+};
+
+/* Operand types. */
+enum asm_oper_type {
+ ASM_OPER_NONE,
+ ASM_OPER_AREG, /* = ar# */
+ ASM_OPER_BREG, /* = b# */
+ ASM_OPER_CPUID, /* = cpuid[r#] */
+ ASM_OPER_CREG, /* = cr# */
+ ASM_OPER_DBR, /* = dbr[r#] */
+ ASM_OPER_DISP, /* IP relative displacement. */
+ ASM_OPER_DTR, /* = dtr[r#] */
+ ASM_OPER_FREG, /* = f# */
+ ASM_OPER_GREG, /* = r# */
+ ASM_OPER_IBR, /* = ibr[r#] */
+ ASM_OPER_IMM, /* Immediate */
+ ASM_OPER_IP, /* = ip */
+ ASM_OPER_ITR, /* = itr[r#] */
+ ASM_OPER_MEM, /* = [r#] */
+ ASM_OPER_MSR, /* = msr[r#] */
+ ASM_OPER_PKR, /* = pkr[r#] */
+ ASM_OPER_PMC, /* = pmc[r#] */
+ ASM_OPER_PMD, /* = pmd[r#] */
+ ASM_OPER_PR, /* = pr */
+ ASM_OPER_PR_ROT, /* = pr.rot */
+ ASM_OPER_PREG, /* = p# */
+ ASM_OPER_PSR, /* = psr */
+ ASM_OPER_PSR_L, /* = psr.l */
+ ASM_OPER_PSR_UM, /* = psr.um */
+ ASM_OPER_RR /* = rr[r#] */
+};
+
+/* Operand */
+struct asm_oper {
+ enum asm_oper_type o_type;
+ int o_read:1;
+ int o_write:1;
+ uint64_t o_value;
+};
+
+/* Instruction formats. */
+enum asm_fmt {
+ ASM_FMT_NONE,
+ ASM_FMT_A = 0x0100,
+ ASM_FMT_A1, ASM_FMT_A2, ASM_FMT_A3, ASM_FMT_A4,
+ ASM_FMT_A5, ASM_FMT_A6, ASM_FMT_A7, ASM_FMT_A8,
+ ASM_FMT_A9, ASM_FMT_A10,
+ ASM_FMT_B = 0x0200,
+ ASM_FMT_B1, ASM_FMT_B2, ASM_FMT_B3, ASM_FMT_B4,
+ ASM_FMT_B5, ASM_FMT_B6, ASM_FMT_B7, ASM_FMT_B8,
+ ASM_FMT_B9,
+ ASM_FMT_F = 0x0300,
+ ASM_FMT_F1, ASM_FMT_F2, ASM_FMT_F3, ASM_FMT_F4,
+ ASM_FMT_F5, ASM_FMT_F6, ASM_FMT_F7, ASM_FMT_F8,
+ ASM_FMT_F9, ASM_FMT_F10, ASM_FMT_F11, ASM_FMT_F12,
+ ASM_FMT_F13, ASM_FMT_F14, ASM_FMT_F15,
+ ASM_FMT_I = 0x0400,
+ ASM_FMT_I1, ASM_FMT_I2, ASM_FMT_I3, ASM_FMT_I4,
+ ASM_FMT_I5, ASM_FMT_I6, ASM_FMT_I7, ASM_FMT_I8,
+ ASM_FMT_I9, ASM_FMT_I10, ASM_FMT_I11, ASM_FMT_I12,
+ ASM_FMT_I13, ASM_FMT_I14, ASM_FMT_I15, ASM_FMT_I16,
+ ASM_FMT_I17, ASM_FMT_I19, ASM_FMT_I20, ASM_FMT_I21,
+ ASM_FMT_I22, ASM_FMT_I23, ASM_FMT_I24, ASM_FMT_I25,
+ ASM_FMT_I26, ASM_FMT_I27, ASM_FMT_I28, ASM_FMT_I29,
+ ASM_FMT_M = 0x0500,
+ ASM_FMT_M1, ASM_FMT_M2, ASM_FMT_M3, ASM_FMT_M4,
+ ASM_FMT_M5, ASM_FMT_M6, ASM_FMT_M7, ASM_FMT_M8,
+ ASM_FMT_M9, ASM_FMT_M10, ASM_FMT_M11, ASM_FMT_M12,
+ ASM_FMT_M13, ASM_FMT_M14, ASM_FMT_M15, ASM_FMT_M16,
+ ASM_FMT_M17, ASM_FMT_M18, ASM_FMT_M19, ASM_FMT_M20,
+ ASM_FMT_M21, ASM_FMT_M22, ASM_FMT_M23, ASM_FMT_M24,
+ ASM_FMT_M25, ASM_FMT_M26, ASM_FMT_M27, ASM_FMT_M28,
+ ASM_FMT_M29, ASM_FMT_M30, ASM_FMT_M31, ASM_FMT_M32,
+ ASM_FMT_M33, ASM_FMT_M34, ASM_FMT_M35, ASM_FMT_M36,
+ ASM_FMT_M37, ASM_FMT_M38, ASM_FMT_M39, ASM_FMT_M40,
+ ASM_FMT_M41, ASM_FMT_M42, ASM_FMT_M43, ASM_FMT_M44,
+ ASM_FMT_M45, ASM_FMT_M46,
+ ASM_FMT_X = 0x0600,
+ ASM_FMT_X1, ASM_FMT_X2, ASM_FMT_X3, ASM_FMT_X4
+};
+
+/* Instruction opcodes. */
+enum asm_op {
+ ASM_OP_NONE,
+ ASM_OP_ADD, ASM_OP_ADDL, ASM_OP_ADDP4, ASM_OP_ADDS, ASM_OP_ALLOC,
+ ASM_OP_AND, ASM_OP_ANDCM,
+ ASM_OP_BR, ASM_OP_BREAK, ASM_OP_BRL, ASM_OP_BRP, ASM_OP_BSW,
+ ASM_OP_CHK, ASM_OP_CLRRRB, ASM_OP_CMP, ASM_OP_CMP4, ASM_OP_CMP8XCHG16,
+ ASM_OP_CMPXCHG1, ASM_OP_CMPXCHG2, ASM_OP_CMPXCHG4, ASM_OP_CMPXCHG8,
+ ASM_OP_COVER, ASM_OP_CZX1, ASM_OP_CZX2,
+ ASM_OP_DEP,
+ ASM_OP_EPC, ASM_OP_EXTR,
+ ASM_OP_FAMAX, ASM_OP_FAMIN, ASM_OP_FAND, ASM_OP_FANDCM, ASM_OP_FC,
+ ASM_OP_FCHKF, ASM_OP_FCLASS, ASM_OP_FCLRF, ASM_OP_FCMP, ASM_OP_FCVT,
+ ASM_OP_FETCHADD4, ASM_OP_FETCHADD8, ASM_OP_FLUSHRS, ASM_OP_FMA,
+ ASM_OP_FMAX, ASM_OP_FMERGE, ASM_OP_FMIN, ASM_OP_FMIX, ASM_OP_FMS,
+ ASM_OP_FNMA, ASM_OP_FOR, ASM_OP_FPACK, ASM_OP_FPAMAX, ASM_OP_FPAMIN,
+ ASM_OP_FPCMP, ASM_OP_FPCVT, ASM_OP_FPMA, ASM_OP_FPMAX, ASM_OP_FPMERGE,
+ ASM_OP_FPMIN, ASM_OP_FPMS, ASM_OP_FPNMA, ASM_OP_FPRCPA,
+ ASM_OP_FPRSQRTA, ASM_OP_FRCPA, ASM_OP_FRSQRTA, ASM_OP_FSELECT,
+ ASM_OP_FSETC, ASM_OP_FSWAP, ASM_OP_FSXT, ASM_OP_FWB, ASM_OP_FXOR,
+ ASM_OP_GETF,
+ ASM_OP_INVALA, ASM_OP_ITC, ASM_OP_ITR,
+ ASM_OP_LD1, ASM_OP_LD16, ASM_OP_LD2, ASM_OP_LD4, ASM_OP_LD8,
+ ASM_OP_LDF, ASM_OP_LDF8, ASM_OP_LDFD, ASM_OP_LDFE, ASM_OP_LDFP8,
+ ASM_OP_LDFPD, ASM_OP_LDFPS, ASM_OP_LDFS, ASM_OP_LFETCH, ASM_OP_LOADRS,
+ ASM_OP_MF, ASM_OP_MIX1, ASM_OP_MIX2, ASM_OP_MIX4, ASM_OP_MOV,
+ ASM_OP_MOVL, ASM_OP_MUX1, ASM_OP_MUX2,
+ ASM_OP_NOP,
+ ASM_OP_OR,
+ ASM_OP_PACK2, ASM_OP_PACK4, ASM_OP_PADD1, ASM_OP_PADD2, ASM_OP_PADD4,
+ ASM_OP_PAVG1, ASM_OP_PAVG2, ASM_OP_PAVGSUB1, ASM_OP_PAVGSUB2,
+ ASM_OP_PCMP1, ASM_OP_PCMP2, ASM_OP_PCMP4, ASM_OP_PMAX1, ASM_OP_PMAX2,
+ ASM_OP_PMIN1, ASM_OP_PMIN2, ASM_OP_PMPY2, ASM_OP_PMPYSHR2,
+ ASM_OP_POPCNT, ASM_OP_PROBE, ASM_OP_PSAD1, ASM_OP_PSHL2, ASM_OP_PSHL4,
+ ASM_OP_PSHLADD2, ASM_OP_PSHR2, ASM_OP_PSHR4, ASM_OP_PSHRADD2,
+ ASM_OP_PSUB1, ASM_OP_PSUB2, ASM_OP_PSUB4, ASM_OP_PTC, ASM_OP_PTR,
+ ASM_OP_RFI, ASM_OP_RSM, ASM_OP_RUM,
+ ASM_OP_SETF, ASM_OP_SHL, ASM_OP_SHLADD, ASM_OP_SHLADDP4, ASM_OP_SHR,
+ ASM_OP_SHRP, ASM_OP_SRLZ, ASM_OP_SSM, ASM_OP_ST1, ASM_OP_ST16,
+ ASM_OP_ST2, ASM_OP_ST4, ASM_OP_ST8, ASM_OP_STF, ASM_OP_STF8,
+ ASM_OP_STFD, ASM_OP_STFE, ASM_OP_STFS, ASM_OP_SUB, ASM_OP_SUM,
+ ASM_OP_SXT1, ASM_OP_SXT2, ASM_OP_SXT4, ASM_OP_SYNC,
+ ASM_OP_TAK, ASM_OP_TBIT, ASM_OP_THASH, ASM_OP_TNAT, ASM_OP_TPA,
+ ASM_OP_TTAG,
+ ASM_OP_UNPACK1, ASM_OP_UNPACK2, ASM_OP_UNPACK4,
+ ASM_OP_XCHG1, ASM_OP_XCHG2, ASM_OP_XCHG4, ASM_OP_XCHG8, ASM_OP_XMA,
+ ASM_OP_XOR,
+ ASM_OP_ZXT1, ASM_OP_ZXT2, ASM_OP_ZXT4,
+ /* Additional opcodes used only internally. */
+ ASM_ADDITIONAL_OPCODES
+};
+
+/* Instruction. */
+struct asm_inst {
+ uint64_t i_bits;
+ struct asm_oper i_oper[7];
+ struct asm_cmpltr i_cmpltr[5];
+ enum asm_fmt i_format;
+ enum asm_op i_op;
+ int i_ncmpltrs;
+ int i_srcidx;
+};
+
+struct asm_bundle {
+ const char *b_templ;
+ struct asm_inst b_inst[3];
+};
+
+/* Functional units. */
+enum asm_unit {
+ ASM_UNIT_NONE,
+ ASM_UNIT_A = 0x0100, /* A unit. */
+ ASM_UNIT_B = 0x0200, /* B unit. */
+ ASM_UNIT_F = 0x0300, /* F unit. */
+ ASM_UNIT_I = 0x0400, /* I unit. */
+ ASM_UNIT_M = 0x0500, /* M unit. */
+ ASM_UNIT_X = 0x0600 /* X unit. */
+};
+
+#ifdef _DISASM_INT_H_
+int asm_extract(enum asm_op, enum asm_fmt, uint64_t, struct asm_bundle *, int);
+#endif
+
+int asm_decode(uint64_t, struct asm_bundle *);
+
+void asm_completer(const struct asm_cmpltr *, char *);
+void asm_mnemonic(const enum asm_op, char *);
+void asm_operand(const struct asm_oper *, char *, uint64_t);
+void asm_print_bundle(const struct asm_bundle *, uint64_t);
+void asm_print_inst(const struct asm_bundle *, int, uint64_t);
+
+#endif /* _DISASM_H_ */
diff --git a/sys/ia64/disasm/disasm_decode.c b/sys/ia64/disasm/disasm_decode.c
new file mode 100644
index 0000000..d8e4d55
--- /dev/null
+++ b/sys/ia64/disasm/disasm_decode.c
@@ -0,0 +1,2511 @@
+/*
+ * Copyright (c) 2000-2003 Marcel Moolenaar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/cdefs.h>
+__FBSDID("$FreeBSD$");
+
+#include <sys/param.h>
+#include <sys/systm.h>
+
+#include <ia64/disasm/disasm_int.h>
+#include <ia64/disasm/disasm.h>
+
+/*
+ * Template names.
+ */
+static const char *asm_templname[] = {
+ "MII", "MII;", "MI;I", "MI;I;", "MLX", "MLX;", 0, 0,
+ "MMI", "MMI;", "M;MI", "M;MI;", "MFI", "MFI;", "MMF", "MMF;",
+ "MIB", "MIB;", "MBB", "MBB;", 0, 0, "BBB", "BBB;",
+ "MMB", "MMB;", 0, 0, "MFB", "MFB;", 0, 0
+};
+
+/*
+ * Decode A-unit instructions.
+ */
+static int
+asm_decodeA(uint64_t bits, struct asm_bundle *b, int slot)
+{
+ enum asm_fmt fmt;
+ enum asm_op op;
+
+ fmt = ASM_FMT_NONE, op = ASM_OP_NONE;
+ switch((int)OPCODE(bits)) {
+ case 0x8:
+ switch (FIELD(bits, 34, 2)) { /* x2a */
+ case 0x0:
+ if (FIELD(bits, 33, 1) == 0) { /* ve */
+ switch (FIELD(bits, 29, 4)) { /* x4 */
+ case 0x0:
+ if (FIELD(bits, 27, 2) <= 1) /* x2b */
+ op = ASM_OP_ADD,
+ fmt = ASM_FMT_A1;
+ break;
+ case 0x1:
+ if (FIELD(bits, 27, 2) <= 1) /* x2b */
+ op = ASM_OP_SUB,
+ fmt = ASM_FMT_A1;
+ break;
+ case 0x2:
+ if (FIELD(bits, 27, 2) == 0) /* x2b */
+ op = ASM_OP_ADDP4,
+ fmt = ASM_FMT_A1;
+ break;
+ case 0x3:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_AND,
+ fmt = ASM_FMT_A1;
+ break;
+ case 0x1:
+ op = ASM_OP_ANDCM,
+ fmt = ASM_FMT_A1;
+ break;
+ case 0x2:
+ op = ASM_OP_OR,
+ fmt = ASM_FMT_A1;
+ break;
+ case 0x3:
+ op = ASM_OP_XOR,
+ fmt = ASM_FMT_A1;
+ break;
+ }
+ break;
+ case 0xB:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_AND,
+ fmt = ASM_FMT_A3;
+ break;
+ case 0x1:
+ op = ASM_OP_ANDCM,
+ fmt = ASM_FMT_A3;
+ break;
+ case 0x2:
+ op = ASM_OP_OR,
+ fmt = ASM_FMT_A3;
+ break;
+ case 0x3:
+ op = ASM_OP_XOR,
+ fmt = ASM_FMT_A3;
+ break;
+ }
+ break;
+ case 0x4:
+ op = ASM_OP_SHLADD, fmt = ASM_FMT_A2;
+ break;
+ case 0x6:
+ op = ASM_OP_SHLADDP4, fmt = ASM_FMT_A2;
+ break;
+ case 0x9:
+ if (FIELD(bits, 27, 2) == 1) /* x2b */
+ op = ASM_OP_SUB,
+ fmt = ASM_FMT_A3;
+ break;
+ }
+ }
+ break;
+ case 0x1:
+ switch (FIELD(bits, 29, 8)) { /* za + x2a + zb + x4 */
+ case 0x20:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PADD1_, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PADD1_SSS,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x2:
+ op = ASM_OP_PADD1_UUU,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x3:
+ op = ASM_OP_PADD1_UUS,
+ fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x21:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PSUB1_, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PSUB1_SSS,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x2:
+ op = ASM_OP_PSUB1_UUU,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x3:
+ op = ASM_OP_PSUB1_UUS,
+ fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x22:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x2:
+ op = ASM_OP_PAVG1_, fmt = ASM_FMT_A9;
+ break;
+ case 0x3:
+ op = ASM_OP_PAVG1_RAZ,
+ fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x23:
+ if (FIELD(bits, 27, 2) == 2) /* x2b */
+ op = ASM_OP_PAVGSUB1, fmt = ASM_FMT_A9;
+ break;
+ case 0x29:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PCMP1_EQ, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PCMP1_GT, fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x30:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PADD2_, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PADD2_SSS,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x2:
+ op = ASM_OP_PADD2_UUU,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x3:
+ op = ASM_OP_PADD2_UUS,
+ fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x31:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PSUB2_, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PSUB2_SSS,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x2:
+ op = ASM_OP_PSUB2_UUU,
+ fmt = ASM_FMT_A9;
+ break;
+ case 0x3:
+ op = ASM_OP_PSUB2_UUS,
+ fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x32:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x2:
+ op = ASM_OP_PAVG2_, fmt = ASM_FMT_A9;
+ break;
+ case 0x3:
+ op = ASM_OP_PAVG2_RAZ,
+ fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0x33:
+ if (FIELD(bits, 27, 2) == 2) /* x2b */
+ op = ASM_OP_PAVGSUB2, fmt = ASM_FMT_A9;
+ break;
+ case 0x34:
+ op = ASM_OP_PSHLADD2, fmt = ASM_FMT_A10;
+ break;
+ case 0x36:
+ op = ASM_OP_PSHRADD2, fmt = ASM_FMT_A10;
+ break;
+ case 0x39:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PCMP2_EQ, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PCMP2_GT, fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ case 0xA0:
+ if (FIELD(bits, 27, 2) == 0) /* x2b */
+ op = ASM_OP_PADD4, fmt = ASM_FMT_A9;
+ break;
+ case 0xA1:
+ if (FIELD(bits, 27, 2) == 0) /* x2b */
+ op = ASM_OP_PSUB4, fmt = ASM_FMT_A9;
+ break;
+ case 0xA9:
+ switch (FIELD(bits, 27, 2)) { /* x2b */
+ case 0x0:
+ op = ASM_OP_PCMP4_EQ, fmt = ASM_FMT_A9;
+ break;
+ case 0x1:
+ op = ASM_OP_PCMP4_GT, fmt = ASM_FMT_A9;
+ break;
+ }
+ break;
+ }
+ break;
+ case 0x2:
+ if (FIELD(bits, 33, 1) == 0) /* ve */
+ op = ASM_OP_ADDS, fmt = ASM_FMT_A4;
+ break;
+ case 0x3:
+ if (FIELD(bits, 33, 1) == 0) /* ve */
+ op = ASM_OP_ADDP4, fmt = ASM_FMT_A4;
+ break;
+ }
+ break;
+ case 0x9:
+ op = ASM_OP_ADDL, fmt = ASM_FMT_A5;
+ break;
+ case 0xC: case 0xD: case 0xE:
+ if (FIELD(bits, 12, 1) == 0) { /* c */
+ switch (FIELD(bits, 33, 8)) { /* maj + tb + x2 + ta */
+ case 0xC0:
+ op = ASM_OP_CMP_LT, fmt = ASM_FMT_A6;
+ break;
+ case 0xC1:
+ op = ASM_OP_CMP_EQ_AND, fmt = ASM_FMT_A6;
+ break;
+ case 0xC2:
+ op = ASM_OP_CMP4_LT, fmt = ASM_FMT_A6;
+ break;
+ case 0xC3:
+ op = ASM_OP_CMP4_EQ_AND, fmt = ASM_FMT_A6;
+ break;
+ case 0xC4: case 0xCC:
+ op = ASM_OP_CMP_LT, fmt = ASM_FMT_A8;
+ break;
+ case 0xC5: case 0xCD:
+ op = ASM_OP_CMP_EQ_AND, fmt = ASM_FMT_A8;
+ break;
+ case 0xC6: case 0xCE:
+ op = ASM_OP_CMP4_LT, fmt = ASM_FMT_A8;
+ break;
+ case 0xC7: case 0xCF:
+ op = ASM_OP_CMP4_EQ_AND, fmt = ASM_FMT_A8;
+ break;
+ case 0xC8:
+ op = ASM_OP_CMP_GT_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xC9:
+ op = ASM_OP_CMP_GE_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xCA:
+ op = ASM_OP_CMP4_GT_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xCB:
+ op = ASM_OP_CMP4_GE_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xD0:
+ op = ASM_OP_CMP_LTU, fmt = ASM_FMT_A6;
+ break;
+ case 0xD1:
+ op = ASM_OP_CMP_EQ_OR, fmt = ASM_FMT_A6;
+ break;
+ case 0xD2:
+ op = ASM_OP_CMP4_LTU, fmt = ASM_FMT_A6;
+ break;
+ case 0xD3:
+ op = ASM_OP_CMP4_EQ_OR, fmt = ASM_FMT_A6;
+ break;
+ case 0xD4: case 0xDC:
+ op = ASM_OP_CMP_LTU, fmt = ASM_FMT_A8;
+ break;
+ case 0xD5: case 0xDD:
+ op = ASM_OP_CMP_EQ_OR, fmt = ASM_FMT_A8;
+ break;
+ case 0xD6: case 0xDE:
+ op = ASM_OP_CMP4_LTU, fmt = ASM_FMT_A8;
+ break;
+ case 0xD7: case 0xDF:
+ op = ASM_OP_CMP4_EQ_OR, fmt = ASM_FMT_A8;
+ break;
+ case 0xD8:
+ op = ASM_OP_CMP_GT_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xD9:
+ op = ASM_OP_CMP_GE_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xDA:
+ op = ASM_OP_CMP4_GT_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xDB:
+ op = ASM_OP_CMP4_GE_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xE0:
+ op = ASM_OP_CMP_EQ, fmt = ASM_FMT_A6;
+ break;
+ case 0xE1:
+ op = ASM_OP_CMP_EQ_OR_ANDCM, fmt = ASM_FMT_A6;
+ break;
+ case 0xE2:
+ op = ASM_OP_CMP4_EQ, fmt = ASM_FMT_A6;
+ break;
+ case 0xE3:
+ op = ASM_OP_CMP4_EQ_OR_ANDCM, fmt = ASM_FMT_A6;
+ break;
+ case 0xE4: case 0xEC:
+ op = ASM_OP_CMP_EQ, fmt = ASM_FMT_A8;
+ break;
+ case 0xE5: case 0xED:
+ op = ASM_OP_CMP_EQ_OR_ANDCM, fmt = ASM_FMT_A8;
+ break;
+ case 0xE6: case 0xEE:
+ op = ASM_OP_CMP4_EQ, fmt = ASM_FMT_A8;
+ break;
+ case 0xE7: case 0xEF:
+ op = ASM_OP_CMP4_EQ_OR_ANDCM, fmt = ASM_FMT_A8;
+ break;
+ case 0xE8:
+ op = ASM_OP_CMP_GT_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ case 0xE9:
+ op = ASM_OP_CMP_GE_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ case 0xEA:
+ op = ASM_OP_CMP4_GT_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ case 0xEB:
+ op = ASM_OP_CMP4_GE_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ }
+ } else {
+ switch (FIELD(bits, 33, 8)) { /* maj + tb + x2 + ta */
+ case 0xC0:
+ op = ASM_OP_CMP_LT_UNC, fmt = ASM_FMT_A6;
+ break;
+ case 0xC1:
+ op = ASM_OP_CMP_NE_AND, fmt = ASM_FMT_A6;
+ break;
+ case 0xC2:
+ op = ASM_OP_CMP4_LT_UNC, fmt = ASM_FMT_A6;
+ break;
+ case 0xC3:
+ op = ASM_OP_CMP4_NE_AND, fmt = ASM_FMT_A6;
+ break;
+ case 0xC4: case 0xCC:
+ op = ASM_OP_CMP_LT_UNC, fmt = ASM_FMT_A8;
+ break;
+ case 0xC5: case 0xCD:
+ op = ASM_OP_CMP_NE_AND, fmt = ASM_FMT_A8;
+ break;
+ case 0xC6: case 0xCE:
+ op = ASM_OP_CMP4_LT_UNC, fmt = ASM_FMT_A8;
+ break;
+ case 0xC7: case 0xCF:
+ op = ASM_OP_CMP4_NE_AND, fmt = ASM_FMT_A8;
+ break;
+ case 0xC8:
+ op = ASM_OP_CMP_LE_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xC9:
+ op = ASM_OP_CMP_LT_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xCA:
+ op = ASM_OP_CMP4_LE_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xCB:
+ op = ASM_OP_CMP4_LT_AND, fmt = ASM_FMT_A7;
+ break;
+ case 0xD0:
+ op = ASM_OP_CMP_LTU_UNC, fmt = ASM_FMT_A6;
+ break;
+ case 0xD1:
+ op = ASM_OP_CMP_NE_OR, fmt = ASM_FMT_A6;
+ break;
+ case 0xD2:
+ op = ASM_OP_CMP4_LTU_UNC, fmt = ASM_FMT_A6;
+ break;
+ case 0xD3:
+ op = ASM_OP_CMP4_NE_OR, fmt = ASM_FMT_A6;
+ break;
+ case 0xD4: case 0xDC:
+ op = ASM_OP_CMP_LTU_UNC, fmt = ASM_FMT_A8;
+ break;
+ case 0xD5: case 0xDD:
+ op = ASM_OP_CMP_NE_OR, fmt = ASM_FMT_A8;
+ break;
+ case 0xD6: case 0xDE:
+ op = ASM_OP_CMP4_LTU_UNC, fmt = ASM_FMT_A8;
+ break;
+ case 0xD7: case 0xDF:
+ op = ASM_OP_CMP4_NE_OR, fmt = ASM_FMT_A8;
+ break;
+ case 0xD8:
+ op = ASM_OP_CMP_LE_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xD9:
+ op = ASM_OP_CMP_LT_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xDA:
+ op = ASM_OP_CMP4_LE_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xDB:
+ op = ASM_OP_CMP4_LT_OR, fmt = ASM_FMT_A7;
+ break;
+ case 0xE0:
+ op = ASM_OP_CMP_EQ_UNC, fmt = ASM_FMT_A6;
+ break;
+ case 0xE1:
+ op = ASM_OP_CMP_NE_OR_ANDCM, fmt = ASM_FMT_A6;
+ break;
+ case 0xE2:
+ op = ASM_OP_CMP4_EQ_UNC, fmt = ASM_FMT_A6;
+ break;
+ case 0xE3:
+ op = ASM_OP_CMP4_NE_OR_ANDCM, fmt = ASM_FMT_A6;
+ break;
+ case 0xE4: case 0xEC:
+ op = ASM_OP_CMP_EQ_UNC, fmt = ASM_FMT_A8;
+ break;
+ case 0xE5: case 0xED:
+ op = ASM_OP_CMP_NE_OR_ANDCM, fmt = ASM_FMT_A8;
+ break;
+ case 0xE6: case 0xEE:
+ op = ASM_OP_CMP4_EQ_UNC, fmt = ASM_FMT_A8;
+ break;
+ case 0xE7: case 0xEF:
+ op = ASM_OP_CMP4_NE_OR_ANDCM, fmt = ASM_FMT_A8;
+ break;
+ case 0xE8:
+ op = ASM_OP_CMP_LE_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ case 0xE9:
+ op = ASM_OP_CMP_LT_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ case 0xEA:
+ op = ASM_OP_CMP4_LE_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ case 0xEB:
+ op = ASM_OP_CMP4_LT_OR_ANDCM, fmt = ASM_FMT_A7;
+ break;
+ }
+ }
+ break;
+ }
+
+ if (op != ASM_OP_NONE)
+ return (asm_extract(op, fmt, bits, b, slot));
+ return (0);
+}
+
+/*
+ * Decode B-unit instructions.
+ */
+static int
+asm_decodeB(uint64_t ip, struct asm_bundle *b, int slot)
+{
+ uint64_t bits;
+ enum asm_fmt fmt;
+ enum asm_op op;
+
+ bits = SLOT(ip, slot);
+ fmt = ASM_FMT_NONE, op = ASM_OP_NONE;
+
+ switch((int)OPCODE(bits)) {
+ case 0x0:
+ switch (FIELD(bits, 27, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_BREAK_B, fmt = ASM_FMT_B9;
+ break;
+ case 0x2:
+ op = ASM_OP_COVER, fmt = ASM_FMT_B8;
+ break;
+ case 0x4:
+ op = ASM_OP_CLRRRB_, fmt = ASM_FMT_B8;
+ break;
+ case 0x5:
+ op = ASM_OP_CLRRRB_PR, fmt = ASM_FMT_B8;
+ break;
+ case 0x8:
+ op = ASM_OP_RFI, fmt = ASM_FMT_B8;
+ break;
+ case 0xC:
+ op = ASM_OP_BSW_0, fmt = ASM_FMT_B8;
+ break;
+ case 0xD:
+ op = ASM_OP_BSW_1, fmt = ASM_FMT_B8;
+ break;
+ case 0x10:
+ op = ASM_OP_EPC, fmt = ASM_FMT_B8;
+ break;
+ case 0x20:
+ switch (FIELD(bits, 6, 3)) { /* btype */
+ case 0x0:
+ op = ASM_OP_BR_COND, fmt = ASM_FMT_B4;
+ break;
+ case 0x1:
+ op = ASM_OP_BR_IA, fmt = ASM_FMT_B4;
+ break;
+ }
+ break;
+ case 0x21:
+ if (FIELD(bits, 6, 3) == 4) /* btype */
+ op = ASM_OP_BR_RET, fmt = ASM_FMT_B4;
+ break;
+ }
+ break;
+ case 0x1:
+ op = ASM_OP_BR_CALL, fmt = ASM_FMT_B5;
+ break;
+ case 0x2:
+ switch (FIELD(bits, 27, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_NOP_B, fmt = ASM_FMT_B9;
+ break;
+ case 0x10:
+ op = ASM_OP_BRP_, fmt = ASM_FMT_B7;
+ break;
+ case 0x11:
+ op = ASM_OP_BRP_RET, fmt = ASM_FMT_B7;
+ break;
+ }
+ break;
+ case 0x4:
+ switch (FIELD(bits, 6, 3)) { /* btype */
+ case 0x0:
+ op = ASM_OP_BR_COND, fmt = ASM_FMT_B1;
+ break;
+ case 0x2:
+ op = ASM_OP_BR_WEXIT, fmt = ASM_FMT_B1;
+ break;
+ case 0x3:
+ op = ASM_OP_BR_WTOP, fmt = ASM_FMT_B1;
+ break;
+ case 0x5:
+ op = ASM_OP_BR_CLOOP, fmt = ASM_FMT_B2;
+ break;
+ case 0x6:
+ op = ASM_OP_BR_CEXIT, fmt = ASM_FMT_B2;
+ break;
+ case 0x7:
+ op = ASM_OP_BR_CTOP, fmt = ASM_FMT_B2;
+ break;
+ }
+ break;
+ case 0x5:
+ op = ASM_OP_BR_CALL, fmt = ASM_FMT_B3;
+ break;
+ case 0x7:
+ op = ASM_OP_BRP_, fmt = ASM_FMT_B6;
+ break;
+ }
+
+ if (op != ASM_OP_NONE)
+ return (asm_extract(op, fmt, bits, b, slot));
+ return (0);
+}
+
+/*
+ * Decode F-unit instructions.
+ */
+static int
+asm_decodeF(uint64_t ip, struct asm_bundle *b, int slot)
+{
+ uint64_t bits;
+ enum asm_fmt fmt;
+ enum asm_op op;
+
+ bits = SLOT(ip, slot);
+ fmt = ASM_FMT_NONE, op = ASM_OP_NONE;
+
+ switch((int)OPCODE(bits)) {
+ case 0x0:
+ if (FIELD(bits, 33, 1) == 0) { /* x */
+ switch (FIELD(bits, 27, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_BREAK_F, fmt = ASM_FMT_F15;
+ break;
+ case 0x1:
+ op = ASM_OP_NOP_F, fmt = ASM_FMT_F15;
+ break;
+ case 0x4:
+ op = ASM_OP_FSETC, fmt = ASM_FMT_F12;
+ break;
+ case 0x5:
+ op = ASM_OP_FCLRF, fmt = ASM_FMT_F13;
+ break;
+ case 0x8:
+ op = ASM_OP_FCHKF, fmt = ASM_FMT_F14;
+ break;
+ case 0x10:
+ op = ASM_OP_FMERGE_S, fmt = ASM_FMT_F9;
+ break;
+ case 0x11:
+ op = ASM_OP_FMERGE_NS, fmt = ASM_FMT_F9;
+ break;
+ case 0x12:
+ op = ASM_OP_FMERGE_SE, fmt = ASM_FMT_F9;
+ break;
+ case 0x14:
+ op = ASM_OP_FMIN, fmt = ASM_FMT_F8;
+ break;
+ case 0x15:
+ op = ASM_OP_FMAX, fmt = ASM_FMT_F8;
+ break;
+ case 0x16:
+ op = ASM_OP_FAMIN, fmt = ASM_FMT_F8;
+ break;
+ case 0x17:
+ op = ASM_OP_FAMAX, fmt = ASM_FMT_F8;
+ break;
+ case 0x18:
+ op = ASM_OP_FCVT_FX, fmt = ASM_FMT_F10;
+ break;
+ case 0x19:
+ op = ASM_OP_FCVT_FXU, fmt = ASM_FMT_F10;
+ break;
+ case 0x1A:
+ op = ASM_OP_FCVT_FX_TRUNC, fmt = ASM_FMT_F10;
+ break;
+ case 0x1B:
+ op = ASM_OP_FCVT_FXU_TRUNC, fmt = ASM_FMT_F10;
+ break;
+ case 0x1C:
+ op = ASM_OP_FCVT_XF, fmt = ASM_FMT_F11;
+ break;
+ case 0x28:
+ op = ASM_OP_FPACK, fmt = ASM_FMT_F9;
+ break;
+ case 0x2C:
+ op = ASM_OP_FAND, fmt = ASM_FMT_F9;
+ break;
+ case 0x2D:
+ op = ASM_OP_FANDCM, fmt = ASM_FMT_F9;
+ break;
+ case 0x2E:
+ op = ASM_OP_FOR, fmt = ASM_FMT_F9;
+ break;
+ case 0x2F:
+ op = ASM_OP_FXOR, fmt = ASM_FMT_F9;
+ break;
+ case 0x34:
+ op = ASM_OP_FSWAP_, fmt = ASM_FMT_F9;
+ break;
+ case 0x35:
+ op = ASM_OP_FSWAP_NL, fmt = ASM_FMT_F9;
+ break;
+ case 0x36:
+ op = ASM_OP_FSWAP_NR, fmt = ASM_FMT_F9;
+ break;
+ case 0x39:
+ op = ASM_OP_FMIX_LR, fmt = ASM_FMT_F9;
+ break;
+ case 0x3A:
+ op = ASM_OP_FMIX_R, fmt = ASM_FMT_F9;
+ break;
+ case 0x3B:
+ op = ASM_OP_FMIX_L, fmt = ASM_FMT_F9;
+ break;
+ case 0x3C:
+ op = ASM_OP_FSXT_R, fmt = ASM_FMT_F9;
+ break;
+ case 0x3D:
+ op = ASM_OP_FSXT_L, fmt = ASM_FMT_F9;
+ break;
+ }
+ } else {
+ if (FIELD(bits, 36, 1) == 0) /* q */
+ op = ASM_OP_FRCPA, fmt = ASM_FMT_F6;
+ else
+ op = ASM_OP_FRSQRTA, fmt = ASM_FMT_F7;
+ }
+ break;
+ case 0x1:
+ if (FIELD(bits, 33, 1) == 0) { /* x */
+ switch (FIELD(bits, 27, 6)) { /* x6 */
+ case 0x10:
+ op = ASM_OP_FPMERGE_S, fmt = ASM_FMT_F9;
+ break;
+ case 0x11:
+ op = ASM_OP_FPMERGE_NS, fmt = ASM_FMT_F9;
+ break;
+ case 0x12:
+ op = ASM_OP_FPMERGE_SE, fmt = ASM_FMT_F9;
+ break;
+ case 0x14:
+ op = ASM_OP_FPMIN, fmt = ASM_FMT_F8;
+ break;
+ case 0x15:
+ op = ASM_OP_FPMAX, fmt = ASM_FMT_F8;
+ break;
+ case 0x16:
+ op = ASM_OP_FPAMIN, fmt = ASM_FMT_F8;
+ break;
+ case 0x17:
+ op = ASM_OP_FPAMAX, fmt = ASM_FMT_F8;
+ break;
+ case 0x18:
+ op = ASM_OP_FPCVT_FX, fmt = ASM_FMT_F10;
+ break;
+ case 0x19:
+ op = ASM_OP_FPCVT_FXU, fmt = ASM_FMT_F10;
+ break;
+ case 0x1A:
+ op = ASM_OP_FPCVT_FX_TRUNC, fmt = ASM_FMT_F10;
+ break;
+ case 0x1B:
+ op = ASM_OP_FPCVT_FXU_TRUNC, fmt = ASM_FMT_F10;
+ break;
+ case 0x30:
+ op = ASM_OP_FPCMP_EQ, fmt = ASM_FMT_F8;
+ break;
+ case 0x31:
+ op = ASM_OP_FPCMP_LT, fmt = ASM_FMT_F8;
+ break;
+ case 0x32:
+ op = ASM_OP_FPCMP_LE, fmt = ASM_FMT_F8;
+ break;
+ case 0x33:
+ op = ASM_OP_FPCMP_UNORD, fmt = ASM_FMT_F8;
+ break;
+ case 0x34:
+ op = ASM_OP_FPCMP_NEQ, fmt = ASM_FMT_F8;
+ break;
+ case 0x35:
+ op = ASM_OP_FPCMP_NLT, fmt = ASM_FMT_F8;
+ break;
+ case 0x36:
+ op = ASM_OP_FPCMP_NLE, fmt = ASM_FMT_F8;
+ break;
+ case 0x37:
+ op = ASM_OP_FPCMP_ORD, fmt = ASM_FMT_F8;
+ break;
+ }
+ } else {
+ if (FIELD(bits, 36, 1) == 0) /* q */
+ op = ASM_OP_FPRCPA, fmt = ASM_FMT_F6;
+ else
+ op = ASM_OP_FPRSQRTA, fmt = ASM_FMT_F7;
+ }
+ break;
+ case 0x4:
+ op = ASM_OP_FCMP, fmt = ASM_FMT_F4;
+ break;
+ case 0x5:
+ op = ASM_OP_FCLASS_M, fmt = ASM_FMT_F5;
+ break;
+ case 0x8:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FMA_, fmt = ASM_FMT_F1;
+ else
+ op = ASM_OP_FMA_S, fmt = ASM_FMT_F1;
+ break;
+ case 0x9:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FMA_D, fmt = ASM_FMT_F1;
+ else
+ op = ASM_OP_FPMA, fmt = ASM_FMT_F1;
+ break;
+ case 0xA:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FMS_, fmt = ASM_FMT_F1;
+ else
+ op = ASM_OP_FMS_S, fmt = ASM_FMT_F1;
+ break;
+ case 0xB:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FMS_D, fmt = ASM_FMT_F1;
+ else
+ op = ASM_OP_FPMS, fmt = ASM_FMT_F1;
+ break;
+ case 0xC:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FNMA_, fmt = ASM_FMT_F1;
+ else
+ op = ASM_OP_FNMA_S, fmt = ASM_FMT_F1;
+ break;
+ case 0xD:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FNMA_D, fmt = ASM_FMT_F1;
+ else
+ op = ASM_OP_FPNMA, fmt = ASM_FMT_F1;
+ break;
+ case 0xE:
+ if (FIELD(bits, 36, 1) == 1) { /* x */
+ switch (FIELD(bits, 34, 2)) { /* x2 */
+ case 0x0:
+ op = ASM_OP_XMA_L, fmt = ASM_FMT_F2;
+ break;
+ case 0x2:
+ op = ASM_OP_XMA_HU, fmt = ASM_FMT_F2;
+ break;
+ case 0x3:
+ op = ASM_OP_XMA_H, fmt = ASM_FMT_F2;
+ break;
+ }
+ } else
+ op = ASM_OP_FSELECT, fmt = ASM_FMT_F3;
+ break;
+ }
+
+ if (op != ASM_OP_NONE)
+ return (asm_extract(op, fmt, bits, b, slot));
+ return (0);
+}
+
+/*
+ * Decode I-unit instructions.
+ */
+static int
+asm_decodeI(uint64_t ip, struct asm_bundle *b, int slot)
+{
+ uint64_t bits;
+ enum asm_fmt fmt;
+ enum asm_op op;
+
+ bits = SLOT(ip, slot);
+ if ((int)OPCODE(bits) >= 8)
+ return (asm_decodeA(bits, b, slot));
+ fmt = ASM_FMT_NONE, op = ASM_OP_NONE;
+
+ switch((int)OPCODE(bits)) {
+ case 0x0:
+ switch (FIELD(bits, 33, 3)) { /* x3 */
+ case 0x0:
+ switch (FIELD(bits, 27, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_BREAK_I, fmt = ASM_FMT_I19;
+ break;
+ case 0x1:
+ op = ASM_OP_NOP_I, fmt = ASM_FMT_I19;
+ break;
+ case 0xA:
+ op = ASM_OP_MOV_I, fmt = ASM_FMT_I27;
+ break;
+ case 0x10:
+ op = ASM_OP_ZXT1, fmt = ASM_FMT_I29;
+ break;
+ case 0x11:
+ op = ASM_OP_ZXT2, fmt = ASM_FMT_I29;
+ break;
+ case 0x12:
+ op = ASM_OP_ZXT4, fmt = ASM_FMT_I29;
+ break;
+ case 0x14:
+ op = ASM_OP_SXT1, fmt = ASM_FMT_I29;
+ break;
+ case 0x15:
+ op = ASM_OP_SXT2, fmt = ASM_FMT_I29;
+ break;
+ case 0x16:
+ op = ASM_OP_SXT4, fmt = ASM_FMT_I29;
+ break;
+ case 0x18:
+ op = ASM_OP_CZX1_L, fmt = ASM_FMT_I29;
+ break;
+ case 0x19:
+ op = ASM_OP_CZX2_L, fmt = ASM_FMT_I29;
+ break;
+ case 0x1C:
+ op = ASM_OP_CZX1_R, fmt = ASM_FMT_I29;
+ break;
+ case 0x1D:
+ op = ASM_OP_CZX2_R, fmt = ASM_FMT_I29;
+ break;
+ case 0x2A:
+ op = ASM_OP_MOV_I, fmt = ASM_FMT_I26;
+ break;
+ case 0x30:
+ op = ASM_OP_MOV_IP, fmt = ASM_FMT_I25;
+ break;
+ case 0x31:
+ op = ASM_OP_MOV_, fmt = ASM_FMT_I22;
+ break;
+ case 0x32:
+ op = ASM_OP_MOV_I, fmt = ASM_FMT_I28;
+ break;
+ case 0x33:
+ op = ASM_OP_MOV_PR, fmt = ASM_FMT_I25;
+ break;
+ }
+ break;
+ case 0x1:
+ op = ASM_OP_CHK_S_I, fmt = ASM_FMT_I20;
+ break;
+ case 0x2:
+ op = ASM_OP_MOV_, fmt = ASM_FMT_I24;
+ break;
+ case 0x3:
+ op = ASM_OP_MOV_, fmt = ASM_FMT_I23;
+ break;
+ case 0x7:
+ if (FIELD(bits, 22, 1) == 0) /* x */
+ op = ASM_OP_MOV_, fmt = ASM_FMT_I21;
+ else
+ op = ASM_OP_MOV_RET, fmt = ASM_FMT_I21;
+ break;
+ }
+ break;
+ case 0x4:
+ op = ASM_OP_DEP_, fmt = ASM_FMT_I15;
+ break;
+ case 0x5:
+ switch (FIELD(bits, 33, 3)) { /* x + x2 */
+ case 0x0:
+ if (FIELD(bits, 36, 1) == 0) { /* tb */
+ switch (FIELD(bits, 12, 2)) { /* c + y */
+ case 0x0:
+ op = ASM_OP_TBIT_Z, fmt = ASM_FMT_I16;
+ break;
+ case 0x1:
+ op = ASM_OP_TBIT_Z_UNC,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x2:
+ op = ASM_OP_TNAT_Z, fmt = ASM_FMT_I17;
+ break;
+ case 0x3:
+ op = ASM_OP_TNAT_Z_UNC,
+ fmt = ASM_FMT_I17;
+ break;
+ }
+ } else {
+ switch (FIELD(bits, 12, 2)) { /* c + y */
+ case 0x0:
+ op = ASM_OP_TBIT_Z_AND,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x1:
+ op = ASM_OP_TBIT_NZ_AND,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x2:
+ op = ASM_OP_TNAT_Z_AND,
+ fmt = ASM_FMT_I17;
+ break;
+ case 0x3:
+ op = ASM_OP_TNAT_NZ_AND,
+ fmt = ASM_FMT_I17;
+ break;
+ }
+ }
+ break;
+ case 0x1:
+ if (FIELD(bits, 36, 1) == 0) { /* tb */
+ switch (FIELD(bits, 12, 2)) { /* c + y */
+ case 0x0:
+ op = ASM_OP_TBIT_Z_OR,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x1:
+ op = ASM_OP_TBIT_NZ_OR,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x2:
+ op = ASM_OP_TNAT_Z_OR,
+ fmt = ASM_FMT_I17;
+ break;
+ case 0x3:
+ op = ASM_OP_TNAT_NZ_OR,
+ fmt = ASM_FMT_I17;
+ break;
+ }
+ } else {
+ switch (FIELD(bits, 12, 2)) { /* c + y */
+ case 0x0:
+ op = ASM_OP_TBIT_Z_OR_ANDCM,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x1:
+ op = ASM_OP_TBIT_NZ_OR_ANDCM,
+ fmt = ASM_FMT_I16;
+ break;
+ case 0x2:
+ op = ASM_OP_TNAT_Z_OR_ANDCM,
+ fmt = ASM_FMT_I17;
+ break;
+ case 0x3:
+ op = ASM_OP_TNAT_NZ_OR_ANDCM,
+ fmt = ASM_FMT_I17;
+ break;
+ }
+ }
+ break;
+ case 0x2:
+ op = ASM_OP_EXTR, fmt = ASM_FMT_I11;
+ break;
+ case 0x3:
+ if (FIELD(bits, 26, 1) == 0) /* y */
+ op = ASM_OP_DEP_Z, fmt = ASM_FMT_I12;
+ else
+ op = ASM_OP_DEP_Z, fmt = ASM_FMT_I13;
+ break;
+ case 0x6:
+ op = ASM_OP_SHRP, fmt = ASM_FMT_I10;
+ break;
+ case 0x7:
+ op = ASM_OP_DEP_, fmt = ASM_FMT_I14;
+ break;
+ }
+ break;
+ case 0x7:
+ switch (FIELD(bits, 32, 5)) { /* ve + zb + x2a + za */
+ case 0x2:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x0:
+ op = ASM_OP_PSHR2_U, fmt = ASM_FMT_I5;
+ break;
+ case 0x1: case 0x5: case 0x9: case 0xD:
+ op = ASM_OP_PMPYSHR2_U, fmt = ASM_FMT_I1;
+ break;
+ case 0x2:
+ op = ASM_OP_PSHR2_, fmt = ASM_FMT_I5;
+ break;
+ case 0x3: case 0x7: case 0xB: case 0xF:
+ op = ASM_OP_PMPYSHR2_, fmt = ASM_FMT_I1;
+ break;
+ case 0x4:
+ op = ASM_OP_PSHL2, fmt = ASM_FMT_I7;
+ break;
+ }
+ break;
+ case 0x6:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x1:
+ op = ASM_OP_PSHR2_U, fmt = ASM_FMT_I6;
+ break;
+ case 0x3:
+ op = ASM_OP_PSHR2_, fmt = ASM_FMT_I6;
+ break;
+ case 0x9:
+ op = ASM_OP_POPCNT, fmt = ASM_FMT_I9;
+ break;
+ }
+ break;
+ case 0x8:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x1:
+ op = ASM_OP_PMIN1_U, fmt = ASM_FMT_I2;
+ break;
+ case 0x4:
+ op = ASM_OP_UNPACK1_H, fmt = ASM_FMT_I2;
+ break;
+ case 0x5:
+ op = ASM_OP_PMAX1_U, fmt = ASM_FMT_I2;
+ break;
+ case 0x6:
+ op = ASM_OP_UNPACK1_L, fmt = ASM_FMT_I2;
+ break;
+ case 0x8:
+ op = ASM_OP_MIX1_R, fmt = ASM_FMT_I2;
+ break;
+ case 0xA:
+ op = ASM_OP_MIX1_L, fmt = ASM_FMT_I2;
+ break;
+ case 0xB:
+ op = ASM_OP_PSAD1, fmt = ASM_FMT_I2;
+ break;
+ }
+ break;
+ case 0xA:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x0:
+ op = ASM_OP_PACK2_USS, fmt = ASM_FMT_I2;
+ break;
+ case 0x2:
+ op = ASM_OP_PACK2_SSS, fmt = ASM_FMT_I2;
+ break;
+ case 0x3:
+ op = ASM_OP_PMIN2, fmt = ASM_FMT_I2;
+ break;
+ case 0x4:
+ op = ASM_OP_UNPACK2_H, fmt = ASM_FMT_I2;
+ break;
+ case 0x6:
+ op = ASM_OP_UNPACK2_L, fmt = ASM_FMT_I2;
+ break;
+ case 0x7:
+ op = ASM_OP_PMAX2, fmt = ASM_FMT_I2;
+ break;
+ case 0x8:
+ op = ASM_OP_MIX2_R, fmt = ASM_FMT_I2;
+ break;
+ case 0xA:
+ op = ASM_OP_MIX2_L, fmt = ASM_FMT_I2;
+ break;
+ case 0xD:
+ op = ASM_OP_PMPY2_R, fmt = ASM_FMT_I2;
+ break;
+ case 0xF:
+ op = ASM_OP_PMPY2_L, fmt = ASM_FMT_I2;
+ break;
+ }
+ break;
+ case 0xC:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0xA:
+ op = ASM_OP_MUX1, fmt = ASM_FMT_I3;
+ break;
+ }
+ break;
+ case 0xE:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x5:
+ op = ASM_OP_PSHL2, fmt = ASM_FMT_I8;
+ break;
+ case 0xA:
+ op = ASM_OP_MUX2, fmt = ASM_FMT_I4;
+ break;
+ }
+ break;
+ case 0x10:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x0:
+ op = ASM_OP_PSHR4_U, fmt = ASM_FMT_I5;
+ break;
+ case 0x2:
+ op = ASM_OP_PSHR4_, fmt = ASM_FMT_I5;
+ break;
+ case 0x4:
+ op = ASM_OP_PSHL4, fmt = ASM_FMT_I7;
+ break;
+ }
+ break;
+ case 0x12:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x0:
+ op = ASM_OP_SHR_U, fmt = ASM_FMT_I5;
+ break;
+ case 0x2:
+ op = ASM_OP_SHR_, fmt = ASM_FMT_I5;
+ break;
+ case 0x4:
+ op = ASM_OP_SHL, fmt = ASM_FMT_I7;
+ break;
+ }
+ break;
+ case 0x14:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x1:
+ op = ASM_OP_PSHR4_U, fmt = ASM_FMT_I6;
+ break;
+ case 0x3:
+ op = ASM_OP_PSHR4_, fmt = ASM_FMT_I6;
+ break;
+ }
+ break;
+ case 0x18:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x2:
+ op = ASM_OP_PACK4_SSS, fmt = ASM_FMT_I2;
+ break;
+ case 0x4:
+ op = ASM_OP_UNPACK4_H, fmt = ASM_FMT_I2;
+ break;
+ case 0x6:
+ op = ASM_OP_UNPACK4_L, fmt = ASM_FMT_I2;
+ break;
+ case 0x8:
+ op = ASM_OP_MIX4_R, fmt = ASM_FMT_I2;
+ break;
+ case 0xA:
+ op = ASM_OP_MIX4_L, fmt = ASM_FMT_I2;
+ break;
+ }
+ break;
+ case 0x1C:
+ switch (FIELD(bits, 28, 4)) { /* x2b + x2c */
+ case 0x5:
+ op = ASM_OP_PSHL4, fmt = ASM_FMT_I8;
+ break;
+ }
+ break;
+ }
+ break;
+ }
+
+ if (op != ASM_OP_NONE)
+ return (asm_extract(op, fmt, bits, b, slot));
+ return (0);
+}
+
+/*
+ * Decode M-unit instructions.
+ */
+static int
+asm_decodeM(uint64_t ip, struct asm_bundle *b, int slot)
+{
+ uint64_t bits;
+ enum asm_fmt fmt;
+ enum asm_op op;
+
+ bits = SLOT(ip, slot);
+ if ((int)OPCODE(bits) >= 8)
+ return (asm_decodeA(bits, b, slot));
+ fmt = ASM_FMT_NONE, op = ASM_OP_NONE;
+
+ switch((int)OPCODE(bits)) {
+ case 0x0:
+ switch (FIELD(bits, 33, 3)) { /* x3 */
+ case 0x0:
+ switch (FIELD(bits, 27, 6)) { /* x6 (x4 + x2) */
+ case 0x0:
+ op = ASM_OP_BREAK_M, fmt = ASM_FMT_M37;
+ break;
+ case 0x1:
+ op = ASM_OP_NOP_M, fmt = ASM_FMT_M37;
+ break;
+ case 0x4: case 0x14: case 0x24: case 0x34:
+ op = ASM_OP_SUM, fmt = ASM_FMT_M44;
+ break;
+ case 0x5: case 0x15: case 0x25: case 0x35:
+ op = ASM_OP_RUM, fmt = ASM_FMT_M44;
+ break;
+ case 0x6: case 0x16: case 0x26: case 0x36:
+ op = ASM_OP_SSM, fmt = ASM_FMT_M44;
+ break;
+ case 0x7: case 0x17: case 0x27: case 0x37:
+ op = ASM_OP_RSM, fmt = ASM_FMT_M44;
+ break;
+ case 0xA:
+ op = ASM_OP_LOADRS, fmt = ASM_FMT_M25;
+ break;
+ case 0xC:
+ op = ASM_OP_FLUSHRS, fmt = ASM_FMT_M25;
+ break;
+ case 0x10:
+ op = ASM_OP_INVALA_, fmt = ASM_FMT_M24;
+ break;
+ case 0x12:
+ op = ASM_OP_INVALA_E, fmt = ASM_FMT_M26;
+ break;
+ case 0x13:
+ op = ASM_OP_INVALA_E, fmt = ASM_FMT_M27;
+ break;
+ case 0x20:
+ op = ASM_OP_FWB, fmt = ASM_FMT_M24;
+ break;
+ case 0x22:
+ op = ASM_OP_MF_, fmt = ASM_FMT_M24;
+ break;
+ case 0x23:
+ op = ASM_OP_MF_A, fmt = ASM_FMT_M24;
+ break;
+ case 0x28:
+ op = ASM_OP_MOV_M, fmt = ASM_FMT_M30;
+ break;
+ case 0x30:
+ op = ASM_OP_SRLZ_D, fmt = ASM_FMT_M24;
+ break;
+ case 0x31:
+ op = ASM_OP_SRLZ_I, fmt = ASM_FMT_M24;
+ break;
+ case 0x33:
+ op = ASM_OP_SYNC_I, fmt = ASM_FMT_M24;
+ break;
+ }
+ break;
+ case 0x4:
+ op = ASM_OP_CHK_A_NC, fmt = ASM_FMT_M22;
+ break;
+ case 0x5:
+ op = ASM_OP_CHK_A_CLR, fmt = ASM_FMT_M22;
+ break;
+ case 0x6:
+ op = ASM_OP_CHK_A_NC, fmt = ASM_FMT_M23;
+ break;
+ case 0x7:
+ op = ASM_OP_CHK_A_CLR, fmt = ASM_FMT_M23;
+ break;
+ }
+ break;
+ case 0x1:
+ switch (FIELD(bits, 33, 3)) { /* x3 */
+ case 0x0:
+ switch (FIELD(bits, 27, 6)) { /* x6 (x4 + x2) */
+ case 0x0:
+ op = ASM_OP_MOV_RR, fmt = ASM_FMT_M42;
+ break;
+ case 0x1:
+ op = ASM_OP_MOV_DBR, fmt = ASM_FMT_M42;
+ break;
+ case 0x2:
+ op = ASM_OP_MOV_IBR, fmt = ASM_FMT_M42;
+ break;
+ case 0x3:
+ op = ASM_OP_MOV_PKR, fmt = ASM_FMT_M42;
+ break;
+ case 0x4:
+ op = ASM_OP_MOV_PMC, fmt = ASM_FMT_M42;
+ break;
+ case 0x5:
+ op = ASM_OP_MOV_PMD, fmt = ASM_FMT_M42;
+ break;
+ case 0x6:
+ op = ASM_OP_MOV_MSR, fmt = ASM_FMT_M42;
+ break;
+ case 0x9:
+ op = ASM_OP_PTC_L, fmt = ASM_FMT_M45;
+ break;
+ case 0xA:
+ op = ASM_OP_PTC_G, fmt = ASM_FMT_M45;
+ break;
+ case 0xB:
+ op = ASM_OP_PTC_GA, fmt = ASM_FMT_M45;
+ break;
+ case 0xC:
+ op = ASM_OP_PTR_D, fmt = ASM_FMT_M45;
+ break;
+ case 0xD:
+ op = ASM_OP_PTR_I, fmt = ASM_FMT_M45;
+ break;
+ case 0xE:
+ op = ASM_OP_ITR_D, fmt = ASM_FMT_M42;
+ break;
+ case 0xF:
+ op = ASM_OP_ITR_I, fmt = ASM_FMT_M42;
+ break;
+ case 0x10:
+ op = ASM_OP_MOV_RR, fmt = ASM_FMT_M43;
+ break;
+ case 0x11:
+ op = ASM_OP_MOV_DBR, fmt = ASM_FMT_M43;
+ break;
+ case 0x12:
+ op = ASM_OP_MOV_IBR, fmt = ASM_FMT_M43;
+ break;
+ case 0x13:
+ op = ASM_OP_MOV_PKR, fmt = ASM_FMT_M43;
+ break;
+ case 0x14:
+ op = ASM_OP_MOV_PMC, fmt = ASM_FMT_M43;
+ break;
+ case 0x15:
+ op = ASM_OP_MOV_PMD, fmt = ASM_FMT_M43;
+ break;
+ case 0x16:
+ op = ASM_OP_MOV_MSR, fmt = ASM_FMT_M43;
+ break;
+ case 0x17:
+ op = ASM_OP_MOV_CPUID, fmt = ASM_FMT_M43;
+ break;
+ case 0x18:
+ op = ASM_OP_PROBE_R, fmt = ASM_FMT_M39;
+ break;
+ case 0x19:
+ op = ASM_OP_PROBE_W, fmt = ASM_FMT_M39;
+ break;
+ case 0x1A:
+ op = ASM_OP_THASH, fmt = ASM_FMT_M46;
+ break;
+ case 0x1B:
+ op = ASM_OP_TTAG, fmt = ASM_FMT_M46;
+ break;
+ case 0x1E:
+ op = ASM_OP_TPA, fmt = ASM_FMT_M46;
+ break;
+ case 0x1F:
+ op = ASM_OP_TAK, fmt = ASM_FMT_M46;
+ break;
+ case 0x21:
+ op = ASM_OP_MOV_PSR_UM, fmt = ASM_FMT_M36;
+ break;
+ case 0x22:
+ op = ASM_OP_MOV_M, fmt = ASM_FMT_M31;
+ break;
+ case 0x24:
+ op = ASM_OP_MOV_, fmt = ASM_FMT_M33;
+ break;
+ case 0x25:
+ op = ASM_OP_MOV_PSR, fmt = ASM_FMT_M36;
+ break;
+ case 0x29:
+ op = ASM_OP_MOV_PSR_UM, fmt = ASM_FMT_M35;
+ break;
+ case 0x2A:
+ op = ASM_OP_MOV_M, fmt = ASM_FMT_M29;
+ break;
+ case 0x2C:
+ op = ASM_OP_MOV_, fmt = ASM_FMT_M32;
+ break;
+ case 0x2D:
+ op = ASM_OP_MOV_PSR_L, fmt = ASM_FMT_M35;
+ break;
+ case 0x2E:
+ op = ASM_OP_ITC_D, fmt = ASM_FMT_M42;
+ break;
+ case 0x2F:
+ op = ASM_OP_ITC_I, fmt = ASM_FMT_M42;
+ break;
+ case 0x30:
+ if (FIELD(bits, 36, 1) == 0) /* x */
+ op = ASM_OP_FC_, fmt = ASM_FMT_M28;
+ else
+ op = ASM_OP_FC_I, fmt = ASM_FMT_M28;
+ break;
+ case 0x31:
+ op = ASM_OP_PROBE_RW_FAULT, fmt = ASM_FMT_M40;
+ break;
+ case 0x32:
+ op = ASM_OP_PROBE_R_FAULT, fmt = ASM_FMT_M40;
+ break;
+ case 0x33:
+ op = ASM_OP_PROBE_W_FAULT, fmt = ASM_FMT_M40;
+ break;
+ case 0x34:
+ op = ASM_OP_PTC_E, fmt = ASM_FMT_M28;
+ break;
+ case 0x38:
+ op = ASM_OP_PROBE_R, fmt = ASM_FMT_M38;
+ break;
+ case 0x39:
+ op = ASM_OP_PROBE_W, fmt = ASM_FMT_M38;
+ break;
+ }
+ break;
+ case 0x1:
+ op = ASM_OP_CHK_S_M, fmt = ASM_FMT_M20;
+ break;
+ case 0x3:
+ op = ASM_OP_CHK_S, fmt = ASM_FMT_M21;
+ break;
+ case 0x6:
+ op = ASM_OP_ALLOC, fmt = ASM_FMT_M34;
+ break;
+ }
+ break;
+ case 0x4:
+ if (FIELD(bits, 27, 1) == 0) { /* x */
+ switch (FIELD(bits, 30, 7)) { /* x6 + m */
+ case 0x0:
+ op = ASM_OP_LD1_, fmt = ASM_FMT_M1;
+ break;
+ case 0x1:
+ op = ASM_OP_LD2_, fmt = ASM_FMT_M1;
+ break;
+ case 0x2:
+ op = ASM_OP_LD4_, fmt = ASM_FMT_M1;
+ break;
+ case 0x3:
+ op = ASM_OP_LD8_, fmt = ASM_FMT_M1;
+ break;
+ case 0x4:
+ op = ASM_OP_LD1_S, fmt = ASM_FMT_M1;
+ break;
+ case 0x5:
+ op = ASM_OP_LD2_S, fmt = ASM_FMT_M1;
+ break;
+ case 0x6:
+ op = ASM_OP_LD4_S, fmt = ASM_FMT_M1;
+ break;
+ case 0x7:
+ op = ASM_OP_LD8_S, fmt = ASM_FMT_M1;
+ break;
+ case 0x8:
+ op = ASM_OP_LD1_A, fmt = ASM_FMT_M1;
+ break;
+ case 0x9:
+ op = ASM_OP_LD2_A, fmt = ASM_FMT_M1;
+ break;
+ case 0xA:
+ op = ASM_OP_LD4_A, fmt = ASM_FMT_M1;
+ break;
+ case 0xB:
+ op = ASM_OP_LD8_A, fmt = ASM_FMT_M1;
+ break;
+ case 0xC:
+ op = ASM_OP_LD1_SA, fmt = ASM_FMT_M1;
+ break;
+ case 0xD:
+ op = ASM_OP_LD2_SA, fmt = ASM_FMT_M1;
+ break;
+ case 0xE:
+ op = ASM_OP_LD4_SA, fmt = ASM_FMT_M1;
+ break;
+ case 0xF:
+ op = ASM_OP_LD8_SA, fmt = ASM_FMT_M1;
+ break;
+ case 0x10:
+ op = ASM_OP_LD1_BIAS, fmt = ASM_FMT_M1;
+ break;
+ case 0x11:
+ op = ASM_OP_LD2_BIAS, fmt = ASM_FMT_M1;
+ break;
+ case 0x12:
+ op = ASM_OP_LD4_BIAS, fmt = ASM_FMT_M1;
+ break;
+ case 0x13:
+ op = ASM_OP_LD8_BIAS, fmt = ASM_FMT_M1;
+ break;
+ case 0x14:
+ op = ASM_OP_LD1_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x15:
+ op = ASM_OP_LD2_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x16:
+ op = ASM_OP_LD4_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x17:
+ op = ASM_OP_LD8_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x1B:
+ op = ASM_OP_LD8_FILL, fmt = ASM_FMT_M1;
+ break;
+ case 0x20:
+ op = ASM_OP_LD1_C_CLR, fmt = ASM_FMT_M1;
+ break;
+ case 0x21:
+ op = ASM_OP_LD2_C_CLR, fmt = ASM_FMT_M1;
+ break;
+ case 0x22:
+ op = ASM_OP_LD4_C_CLR, fmt = ASM_FMT_M1;
+ break;
+ case 0x23:
+ op = ASM_OP_LD8_C_CLR, fmt = ASM_FMT_M1;
+ break;
+ case 0x24:
+ op = ASM_OP_LD1_C_NC, fmt = ASM_FMT_M1;
+ break;
+ case 0x25:
+ op = ASM_OP_LD2_C_NC, fmt = ASM_FMT_M1;
+ break;
+ case 0x26:
+ op = ASM_OP_LD4_C_NC, fmt = ASM_FMT_M1;
+ break;
+ case 0x27:
+ op = ASM_OP_LD8_C_NC, fmt = ASM_FMT_M1;
+ break;
+ case 0x28:
+ op = ASM_OP_LD1_C_CLR_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x29:
+ op = ASM_OP_LD2_C_CLR_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x2A:
+ op = ASM_OP_LD4_C_CLR_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x2B:
+ op = ASM_OP_LD8_C_CLR_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x30:
+ op = ASM_OP_ST1_, fmt = ASM_FMT_M4;
+ break;
+ case 0x31:
+ op = ASM_OP_ST2_, fmt = ASM_FMT_M4;
+ break;
+ case 0x32:
+ op = ASM_OP_ST4_, fmt = ASM_FMT_M4;
+ break;
+ case 0x33:
+ op = ASM_OP_ST8_, fmt = ASM_FMT_M4;
+ break;
+ case 0x34:
+ op = ASM_OP_ST1_REL, fmt = ASM_FMT_M4;
+ break;
+ case 0x35:
+ op = ASM_OP_ST2_REL, fmt = ASM_FMT_M4;
+ break;
+ case 0x36:
+ op = ASM_OP_ST4_REL, fmt = ASM_FMT_M4;
+ break;
+ case 0x37:
+ op = ASM_OP_ST8_REL, fmt = ASM_FMT_M4;
+ break;
+ case 0x3B:
+ op = ASM_OP_ST8_SPILL, fmt = ASM_FMT_M4;
+ break;
+ case 0x40:
+ op = ASM_OP_LD1_, fmt = ASM_FMT_M2;
+ break;
+ case 0x41:
+ op = ASM_OP_LD2_, fmt = ASM_FMT_M2;
+ break;
+ case 0x42:
+ op = ASM_OP_LD4_, fmt = ASM_FMT_M2;
+ break;
+ case 0x43:
+ op = ASM_OP_LD8_, fmt = ASM_FMT_M2;
+ break;
+ case 0x44:
+ op = ASM_OP_LD1_S, fmt = ASM_FMT_M2;
+ break;
+ case 0x45:
+ op = ASM_OP_LD2_S, fmt = ASM_FMT_M2;
+ break;
+ case 0x46:
+ op = ASM_OP_LD4_S, fmt = ASM_FMT_M2;
+ break;
+ case 0x47:
+ op = ASM_OP_LD8_S, fmt = ASM_FMT_M2;
+ break;
+ case 0x48:
+ op = ASM_OP_LD1_A, fmt = ASM_FMT_M2;
+ break;
+ case 0x49:
+ op = ASM_OP_LD2_A, fmt = ASM_FMT_M2;
+ break;
+ case 0x4A:
+ op = ASM_OP_LD4_A, fmt = ASM_FMT_M2;
+ break;
+ case 0x4B:
+ op = ASM_OP_LD8_A, fmt = ASM_FMT_M2;
+ break;
+ case 0x4C:
+ op = ASM_OP_LD1_SA, fmt = ASM_FMT_M2;
+ break;
+ case 0x4D:
+ op = ASM_OP_LD2_SA, fmt = ASM_FMT_M2;
+ break;
+ case 0x4E:
+ op = ASM_OP_LD4_SA, fmt = ASM_FMT_M2;
+ break;
+ case 0x4F:
+ op = ASM_OP_LD8_SA, fmt = ASM_FMT_M2;
+ break;
+ case 0x50:
+ op = ASM_OP_LD1_BIAS, fmt = ASM_FMT_M2;
+ break;
+ case 0x51:
+ op = ASM_OP_LD2_BIAS, fmt = ASM_FMT_M2;
+ break;
+ case 0x52:
+ op = ASM_OP_LD4_BIAS, fmt = ASM_FMT_M2;
+ break;
+ case 0x53:
+ op = ASM_OP_LD8_BIAS, fmt = ASM_FMT_M2;
+ break;
+ case 0x54:
+ op = ASM_OP_LD1_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x55:
+ op = ASM_OP_LD2_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x56:
+ op = ASM_OP_LD4_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x57:
+ op = ASM_OP_LD8_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x5B:
+ op = ASM_OP_LD8_FILL, fmt = ASM_FMT_M2;
+ break;
+ case 0x60:
+ op = ASM_OP_LD1_C_CLR, fmt = ASM_FMT_M2;
+ break;
+ case 0x61:
+ op = ASM_OP_LD2_C_CLR, fmt = ASM_FMT_M2;
+ break;
+ case 0x62:
+ op = ASM_OP_LD4_C_CLR, fmt = ASM_FMT_M2;
+ break;
+ case 0x63:
+ op = ASM_OP_LD8_C_CLR, fmt = ASM_FMT_M2;
+ break;
+ case 0x64:
+ op = ASM_OP_LD1_C_NC, fmt = ASM_FMT_M2;
+ break;
+ case 0x65:
+ op = ASM_OP_LD2_C_NC, fmt = ASM_FMT_M2;
+ break;
+ case 0x66:
+ op = ASM_OP_LD4_C_NC, fmt = ASM_FMT_M2;
+ break;
+ case 0x67:
+ op = ASM_OP_LD8_C_NC, fmt = ASM_FMT_M2;
+ break;
+ case 0x68:
+ op = ASM_OP_LD1_C_CLR_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x69:
+ op = ASM_OP_LD2_C_CLR_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x6A:
+ op = ASM_OP_LD4_C_CLR_ACQ, fmt = ASM_FMT_M2;
+ break;
+ case 0x6B:
+ op = ASM_OP_LD8_C_CLR_ACQ, fmt = ASM_FMT_M2;
+ break;
+ }
+ } else {
+ switch (FIELD(bits, 30, 7)) { /* x6 + m */
+ case 0x0:
+ op = ASM_OP_CMPXCHG1_ACQ, fmt = ASM_FMT_M16;
+ break;
+ case 0x1:
+ op = ASM_OP_CMPXCHG2_ACQ, fmt = ASM_FMT_M16;
+ break;
+ case 0x2:
+ op = ASM_OP_CMPXCHG4_ACQ, fmt = ASM_FMT_M16;
+ break;
+ case 0x3:
+ op = ASM_OP_CMPXCHG8_ACQ, fmt = ASM_FMT_M16;
+ break;
+ case 0x4:
+ op = ASM_OP_CMPXCHG1_REL, fmt = ASM_FMT_M16;
+ break;
+ case 0x5:
+ op = ASM_OP_CMPXCHG2_REL, fmt = ASM_FMT_M16;
+ break;
+ case 0x6:
+ op = ASM_OP_CMPXCHG4_REL, fmt = ASM_FMT_M16;
+ break;
+ case 0x7:
+ op = ASM_OP_CMPXCHG8_REL, fmt = ASM_FMT_M16;
+ break;
+ case 0x8:
+ op = ASM_OP_XCHG1, fmt = ASM_FMT_M16;
+ break;
+ case 0x9:
+ op = ASM_OP_XCHG2, fmt = ASM_FMT_M16;
+ break;
+ case 0xA:
+ op = ASM_OP_XCHG4, fmt = ASM_FMT_M16;
+ break;
+ case 0xB:
+ op = ASM_OP_XCHG8, fmt = ASM_FMT_M16;
+ break;
+ case 0x12:
+ op = ASM_OP_FETCHADD4_ACQ, fmt = ASM_FMT_M17;
+ break;
+ case 0x13:
+ op = ASM_OP_FETCHADD8_ACQ, fmt = ASM_FMT_M17;
+ break;
+ case 0x16:
+ op = ASM_OP_FETCHADD4_REL, fmt = ASM_FMT_M17;
+ break;
+ case 0x17:
+ op = ASM_OP_FETCHADD8_REL, fmt = ASM_FMT_M17;
+ break;
+ case 0x1C:
+ op = ASM_OP_GETF_SIG, fmt = ASM_FMT_M19;
+ break;
+ case 0x1D:
+ op = ASM_OP_GETF_EXP, fmt = ASM_FMT_M19;
+ break;
+ case 0x1E:
+ op = ASM_OP_GETF_S, fmt = ASM_FMT_M19;
+ break;
+ case 0x1F:
+ op = ASM_OP_GETF_D, fmt = ASM_FMT_M19;
+ break;
+ case 0x20:
+ op = ASM_OP_CMP8XCHG16_ACQ, fmt = ASM_FMT_M16;
+ break;
+ case 0x24:
+ op = ASM_OP_CMP8XCHG16_REL, fmt = ASM_FMT_M16;
+ break;
+ case 0x28:
+ op = ASM_OP_LD16_, fmt = ASM_FMT_M1;
+ break;
+ case 0x2C:
+ op = ASM_OP_LD16_ACQ, fmt = ASM_FMT_M1;
+ break;
+ case 0x30:
+ op = ASM_OP_ST16_, fmt = ASM_FMT_M4;
+ break;
+ case 0x34:
+ op = ASM_OP_ST16_REL, fmt = ASM_FMT_M4;
+ break;
+ }
+ }
+ break;
+ case 0x5:
+ switch (FIELD(bits, 30, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_LD1_, fmt = ASM_FMT_M3;
+ break;
+ case 0x1:
+ op = ASM_OP_LD2_, fmt = ASM_FMT_M3;
+ break;
+ case 0x2:
+ op = ASM_OP_LD4_, fmt = ASM_FMT_M3;
+ break;
+ case 0x3:
+ op = ASM_OP_LD8_, fmt = ASM_FMT_M3;
+ break;
+ case 0x4:
+ op = ASM_OP_LD1_S, fmt = ASM_FMT_M3;
+ break;
+ case 0x5:
+ op = ASM_OP_LD2_S, fmt = ASM_FMT_M3;
+ break;
+ case 0x6:
+ op = ASM_OP_LD4_S, fmt = ASM_FMT_M3;
+ break;
+ case 0x7:
+ op = ASM_OP_LD8_S, fmt = ASM_FMT_M3;
+ break;
+ case 0x8:
+ op = ASM_OP_LD1_A, fmt = ASM_FMT_M3;
+ break;
+ case 0x9:
+ op = ASM_OP_LD2_A, fmt = ASM_FMT_M3;
+ break;
+ case 0xA:
+ op = ASM_OP_LD4_A, fmt = ASM_FMT_M3;
+ break;
+ case 0xB:
+ op = ASM_OP_LD8_A, fmt = ASM_FMT_M3;
+ break;
+ case 0xC:
+ op = ASM_OP_LD1_SA, fmt = ASM_FMT_M3;
+ break;
+ case 0xD:
+ op = ASM_OP_LD2_SA, fmt = ASM_FMT_M3;
+ break;
+ case 0xE:
+ op = ASM_OP_LD4_SA, fmt = ASM_FMT_M3;
+ break;
+ case 0xF:
+ op = ASM_OP_LD8_SA, fmt = ASM_FMT_M3;
+ break;
+ case 0x10:
+ op = ASM_OP_LD1_BIAS, fmt = ASM_FMT_M3;
+ break;
+ case 0x11:
+ op = ASM_OP_LD2_BIAS, fmt = ASM_FMT_M3;
+ break;
+ case 0x12:
+ op = ASM_OP_LD4_BIAS, fmt = ASM_FMT_M3;
+ break;
+ case 0x13:
+ op = ASM_OP_LD8_BIAS, fmt = ASM_FMT_M3;
+ break;
+ case 0x14:
+ op = ASM_OP_LD1_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x15:
+ op = ASM_OP_LD2_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x16:
+ op = ASM_OP_LD4_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x17:
+ op = ASM_OP_LD8_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x1B:
+ op = ASM_OP_LD8_FILL, fmt = ASM_FMT_M3;
+ break;
+ case 0x20:
+ op = ASM_OP_LD1_C_CLR, fmt = ASM_FMT_M3;
+ break;
+ case 0x21:
+ op = ASM_OP_LD2_C_CLR, fmt = ASM_FMT_M3;
+ break;
+ case 0x22:
+ op = ASM_OP_LD4_C_CLR, fmt = ASM_FMT_M3;
+ break;
+ case 0x23:
+ op = ASM_OP_LD8_C_CLR, fmt = ASM_FMT_M3;
+ break;
+ case 0x24:
+ op = ASM_OP_LD1_C_NC, fmt = ASM_FMT_M3;
+ break;
+ case 0x25:
+ op = ASM_OP_LD2_C_NC, fmt = ASM_FMT_M3;
+ break;
+ case 0x26:
+ op = ASM_OP_LD4_C_NC, fmt = ASM_FMT_M3;
+ break;
+ case 0x27:
+ op = ASM_OP_LD8_C_NC, fmt = ASM_FMT_M3;
+ break;
+ case 0x28:
+ op = ASM_OP_LD1_C_CLR_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x29:
+ op = ASM_OP_LD2_C_CLR_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x2A:
+ op = ASM_OP_LD4_C_CLR_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x2B:
+ op = ASM_OP_LD8_C_CLR_ACQ, fmt = ASM_FMT_M3;
+ break;
+ case 0x30:
+ op = ASM_OP_ST1_, fmt = ASM_FMT_M5;
+ break;
+ case 0x31:
+ op = ASM_OP_ST2_, fmt = ASM_FMT_M5;
+ break;
+ case 0x32:
+ op = ASM_OP_ST4_, fmt = ASM_FMT_M5;
+ break;
+ case 0x33:
+ op = ASM_OP_ST8_, fmt = ASM_FMT_M5;
+ break;
+ case 0x34:
+ op = ASM_OP_ST1_REL, fmt = ASM_FMT_M5;
+ break;
+ case 0x35:
+ op = ASM_OP_ST2_REL, fmt = ASM_FMT_M5;
+ break;
+ case 0x36:
+ op = ASM_OP_ST4_REL, fmt = ASM_FMT_M5;
+ break;
+ case 0x37:
+ op = ASM_OP_ST8_REL, fmt = ASM_FMT_M5;
+ break;
+ case 0x3B:
+ op = ASM_OP_ST8_SPILL, fmt = ASM_FMT_M5;
+ break;
+ }
+ break;
+ case 0x6:
+ if (FIELD(bits, 27, 1) == 0) { /* x */
+ switch (FIELD(bits, 30, 7)) { /* x6 + m */
+ case 0x0:
+ op = ASM_OP_LDFE_, fmt = ASM_FMT_M6;
+ break;
+ case 0x1:
+ op = ASM_OP_LDF8_, fmt = ASM_FMT_M6;
+ break;
+ case 0x2:
+ op = ASM_OP_LDFS_, fmt = ASM_FMT_M6;
+ break;
+ case 0x3:
+ op = ASM_OP_LDFD_, fmt = ASM_FMT_M6;
+ break;
+ case 0x4:
+ op = ASM_OP_LDFE_S, fmt = ASM_FMT_M6;
+ break;
+ case 0x5:
+ op = ASM_OP_LDF8_S, fmt = ASM_FMT_M6;
+ break;
+ case 0x6:
+ op = ASM_OP_LDFS_S, fmt = ASM_FMT_M6;
+ break;
+ case 0x7:
+ op = ASM_OP_LDFD_S, fmt = ASM_FMT_M6;
+ break;
+ case 0x8:
+ op = ASM_OP_LDFE_A, fmt = ASM_FMT_M6;
+ break;
+ case 0x9:
+ op = ASM_OP_LDF8_A, fmt = ASM_FMT_M6;
+ break;
+ case 0xA:
+ op = ASM_OP_LDFS_A, fmt = ASM_FMT_M6;
+ break;
+ case 0xB:
+ op = ASM_OP_LDFD_A, fmt = ASM_FMT_M6;
+ break;
+ case 0xC:
+ op = ASM_OP_LDFE_SA, fmt = ASM_FMT_M6;
+ break;
+ case 0xD:
+ op = ASM_OP_LDF8_SA, fmt = ASM_FMT_M6;
+ break;
+ case 0xE:
+ op = ASM_OP_LDFS_SA, fmt = ASM_FMT_M6;
+ break;
+ case 0xF:
+ op = ASM_OP_LDFD_SA, fmt = ASM_FMT_M6;
+ break;
+ case 0x1B:
+ op = ASM_OP_LDF_FILL, fmt = ASM_FMT_M6;
+ break;
+ case 0x20:
+ op = ASM_OP_LDFE_C_CLR, fmt = ASM_FMT_M6;
+ break;
+ case 0x21:
+ op = ASM_OP_LDF8_C_CLR, fmt = ASM_FMT_M6;
+ break;
+ case 0x22:
+ op = ASM_OP_LDFS_C_CLR, fmt = ASM_FMT_M6;
+ break;
+ case 0x23:
+ op = ASM_OP_LDFD_C_CLR, fmt = ASM_FMT_M6;
+ break;
+ case 0x24:
+ op = ASM_OP_LDFE_C_NC, fmt = ASM_FMT_M6;
+ break;
+ case 0x25:
+ op = ASM_OP_LDF8_C_NC, fmt = ASM_FMT_M6;
+ break;
+ case 0x26:
+ op = ASM_OP_LDFS_C_NC, fmt = ASM_FMT_M6;
+ break;
+ case 0x27:
+ op = ASM_OP_LDFD_C_NC, fmt = ASM_FMT_M6;
+ break;
+ case 0x2C:
+ op = ASM_OP_LFETCH_, fmt = ASM_FMT_M13;
+ break;
+ case 0x2D:
+ op = ASM_OP_LFETCH_EXCL, fmt = ASM_FMT_M13;
+ break;
+ case 0x2E:
+ op = ASM_OP_LFETCH_FAULT, fmt = ASM_FMT_M13;
+ break;
+ case 0x2F:
+ op = ASM_OP_LFETCH_FAULT_EXCL,
+ fmt = ASM_FMT_M13;
+ break;
+ case 0x30:
+ op = ASM_OP_STFE, fmt = ASM_FMT_M9;
+ break;
+ case 0x31:
+ op = ASM_OP_STF8, fmt = ASM_FMT_M9;
+ break;
+ case 0x32:
+ op = ASM_OP_STFS, fmt = ASM_FMT_M9;
+ break;
+ case 0x33:
+ op = ASM_OP_STFD, fmt = ASM_FMT_M9;
+ break;
+ case 0x3B:
+ op = ASM_OP_STF_SPILL, fmt = ASM_FMT_M9;
+ break;
+ case 0x40:
+ op = ASM_OP_LDFE_, fmt = ASM_FMT_M7;
+ break;
+ case 0x41:
+ op = ASM_OP_LDF8_, fmt = ASM_FMT_M7;
+ break;
+ case 0x42:
+ op = ASM_OP_LDFS_, fmt = ASM_FMT_M7;
+ break;
+ case 0x43:
+ op = ASM_OP_LDFD_, fmt = ASM_FMT_M7;
+ break;
+ case 0x44:
+ op = ASM_OP_LDFE_S, fmt = ASM_FMT_M7;
+ break;
+ case 0x45:
+ op = ASM_OP_LDF8_S, fmt = ASM_FMT_M7;
+ break;
+ case 0x46:
+ op = ASM_OP_LDFS_S, fmt = ASM_FMT_M7;
+ break;
+ case 0x47:
+ op = ASM_OP_LDFD_S, fmt = ASM_FMT_M7;
+ break;
+ case 0x48:
+ op = ASM_OP_LDFE_A, fmt = ASM_FMT_M7;
+ break;
+ case 0x49:
+ op = ASM_OP_LDF8_A, fmt = ASM_FMT_M7;
+ break;
+ case 0x4A:
+ op = ASM_OP_LDFS_A, fmt = ASM_FMT_M7;
+ break;
+ case 0x4B:
+ op = ASM_OP_LDFD_A, fmt = ASM_FMT_M7;
+ break;
+ case 0x4C:
+ op = ASM_OP_LDFE_SA, fmt = ASM_FMT_M7;
+ break;
+ case 0x4D:
+ op = ASM_OP_LDF8_SA, fmt = ASM_FMT_M7;
+ break;
+ case 0x4E:
+ op = ASM_OP_LDFS_SA, fmt = ASM_FMT_M7;
+ break;
+ case 0x4F:
+ op = ASM_OP_LDFD_SA, fmt = ASM_FMT_M7;
+ break;
+ case 0x5B:
+ op = ASM_OP_LDF_FILL, fmt = ASM_FMT_M7;
+ break;
+ case 0x60:
+ op = ASM_OP_LDFE_C_CLR, fmt = ASM_FMT_M7;
+ break;
+ case 0x61:
+ op = ASM_OP_LDF8_C_CLR, fmt = ASM_FMT_M7;
+ break;
+ case 0x62:
+ op = ASM_OP_LDFS_C_CLR, fmt = ASM_FMT_M7;
+ break;
+ case 0x63:
+ op = ASM_OP_LDFD_C_CLR, fmt = ASM_FMT_M7;
+ break;
+ case 0x64:
+ op = ASM_OP_LDFE_C_NC, fmt = ASM_FMT_M7;
+ break;
+ case 0x65:
+ op = ASM_OP_LDF8_C_NC, fmt = ASM_FMT_M7;
+ break;
+ case 0x66:
+ op = ASM_OP_LDFS_C_NC, fmt = ASM_FMT_M7;
+ break;
+ case 0x67:
+ op = ASM_OP_LDFD_C_NC, fmt = ASM_FMT_M7;
+ break;
+ case 0x6C:
+ op = ASM_OP_LFETCH_, fmt = ASM_FMT_M14;
+ break;
+ case 0x6D:
+ op = ASM_OP_LFETCH_EXCL, fmt = ASM_FMT_M14;
+ break;
+ case 0x6E:
+ op = ASM_OP_LFETCH_FAULT, fmt = ASM_FMT_M14;
+ break;
+ case 0x6F:
+ op = ASM_OP_LFETCH_FAULT_EXCL,
+ fmt = ASM_FMT_M14;
+ break;
+ }
+ } else {
+ switch (FIELD(bits, 30, 7)) { /* x6 + m */
+ case 0x1:
+ op = ASM_OP_LDFP8_, fmt = ASM_FMT_M11;
+ break;
+ case 0x2:
+ op = ASM_OP_LDFPS_, fmt = ASM_FMT_M11;
+ break;
+ case 0x3:
+ op = ASM_OP_LDFPD_, fmt = ASM_FMT_M11;
+ break;
+ case 0x5:
+ op = ASM_OP_LDFP8_S, fmt = ASM_FMT_M11;
+ break;
+ case 0x6:
+ op = ASM_OP_LDFPS_S, fmt = ASM_FMT_M11;
+ break;
+ case 0x7:
+ op = ASM_OP_LDFPD_S, fmt = ASM_FMT_M11;
+ break;
+ case 0x9:
+ op = ASM_OP_LDFP8_A, fmt = ASM_FMT_M11;
+ break;
+ case 0xA:
+ op = ASM_OP_LDFPS_A, fmt = ASM_FMT_M11;
+ break;
+ case 0xB:
+ op = ASM_OP_LDFPD_A, fmt = ASM_FMT_M11;
+ break;
+ case 0xD:
+ op = ASM_OP_LDFP8_SA, fmt = ASM_FMT_M11;
+ break;
+ case 0xE:
+ op = ASM_OP_LDFPS_SA, fmt = ASM_FMT_M11;
+ break;
+ case 0xF:
+ op = ASM_OP_LDFPD_SA, fmt = ASM_FMT_M11;
+ break;
+ case 0x1C:
+ op = ASM_OP_SETF_SIG, fmt = ASM_FMT_M18;
+ break;
+ case 0x1D:
+ op = ASM_OP_SETF_EXP, fmt = ASM_FMT_M18;
+ break;
+ case 0x1E:
+ op = ASM_OP_SETF_S, fmt = ASM_FMT_M18;
+ break;
+ case 0x1F:
+ op = ASM_OP_SETF_D, fmt = ASM_FMT_M18;
+ break;
+ case 0x21:
+ op = ASM_OP_LDFP8_C_CLR, fmt = ASM_FMT_M11;
+ break;
+ case 0x22:
+ op = ASM_OP_LDFPS_C_CLR, fmt = ASM_FMT_M11;
+ break;
+ case 0x23:
+ op = ASM_OP_LDFPD_C_CLR, fmt = ASM_FMT_M11;
+ break;
+ case 0x25:
+ op = ASM_OP_LDFP8_C_NC, fmt = ASM_FMT_M11;
+ break;
+ case 0x26:
+ op = ASM_OP_LDFPS_C_NC, fmt = ASM_FMT_M11;
+ break;
+ case 0x27:
+ op = ASM_OP_LDFPD_C_NC, fmt = ASM_FMT_M11;
+ break;
+ case 0x41:
+ op = ASM_OP_LDFP8_, fmt = ASM_FMT_M12;
+ break;
+ case 0x42:
+ op = ASM_OP_LDFPS_, fmt = ASM_FMT_M12;
+ break;
+ case 0x43:
+ op = ASM_OP_LDFPD_, fmt = ASM_FMT_M12;
+ break;
+ case 0x45:
+ op = ASM_OP_LDFP8_S, fmt = ASM_FMT_M12;
+ break;
+ case 0x46:
+ op = ASM_OP_LDFPS_S, fmt = ASM_FMT_M12;
+ break;
+ case 0x47:
+ op = ASM_OP_LDFPD_S, fmt = ASM_FMT_M12;
+ break;
+ case 0x49:
+ op = ASM_OP_LDFP8_A, fmt = ASM_FMT_M12;
+ break;
+ case 0x4A:
+ op = ASM_OP_LDFPS_A, fmt = ASM_FMT_M12;
+ break;
+ case 0x4B:
+ op = ASM_OP_LDFPD_A, fmt = ASM_FMT_M12;
+ break;
+ case 0x4D:
+ op = ASM_OP_LDFP8_SA, fmt = ASM_FMT_M12;
+ break;
+ case 0x4E:
+ op = ASM_OP_LDFPS_SA, fmt = ASM_FMT_M12;
+ break;
+ case 0x4F:
+ op = ASM_OP_LDFPD_SA, fmt = ASM_FMT_M12;
+ break;
+ case 0x61:
+ op = ASM_OP_LDFP8_C_CLR, fmt = ASM_FMT_M12;
+ break;
+ case 0x62:
+ op = ASM_OP_LDFPS_C_CLR, fmt = ASM_FMT_M12;
+ break;
+ case 0x63:
+ op = ASM_OP_LDFPD_C_CLR, fmt = ASM_FMT_M12;
+ break;
+ case 0x65:
+ op = ASM_OP_LDFP8_C_NC, fmt = ASM_FMT_M12;
+ break;
+ case 0x66:
+ op = ASM_OP_LDFPS_C_NC, fmt = ASM_FMT_M12;
+ break;
+ case 0x67:
+ op = ASM_OP_LDFPD_C_NC, fmt = ASM_FMT_M12;
+ break;
+ }
+ }
+ break;
+ case 0x7:
+ switch (FIELD(bits, 30, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_LDFE_, fmt = ASM_FMT_M8;
+ break;
+ case 0x1:
+ op = ASM_OP_LDF8_, fmt = ASM_FMT_M8;
+ break;
+ case 0x2:
+ op = ASM_OP_LDFS_, fmt = ASM_FMT_M8;
+ break;
+ case 0x3:
+ op = ASM_OP_LDFD_, fmt = ASM_FMT_M8;
+ break;
+ case 0x4:
+ op = ASM_OP_LDFE_S, fmt = ASM_FMT_M8;
+ break;
+ case 0x5:
+ op = ASM_OP_LDF8_S, fmt = ASM_FMT_M8;
+ break;
+ case 0x6:
+ op = ASM_OP_LDFS_S, fmt = ASM_FMT_M8;
+ break;
+ case 0x7:
+ op = ASM_OP_LDFD_S, fmt = ASM_FMT_M8;
+ break;
+ case 0x8:
+ op = ASM_OP_LDFE_A, fmt = ASM_FMT_M8;
+ break;
+ case 0x9:
+ op = ASM_OP_LDF8_A, fmt = ASM_FMT_M8;
+ break;
+ case 0xA:
+ op = ASM_OP_LDFS_A, fmt = ASM_FMT_M8;
+ break;
+ case 0xB:
+ op = ASM_OP_LDFD_A, fmt = ASM_FMT_M8;
+ break;
+ case 0xC:
+ op = ASM_OP_LDFE_SA, fmt = ASM_FMT_M8;
+ break;
+ case 0xD:
+ op = ASM_OP_LDF8_SA, fmt = ASM_FMT_M8;
+ break;
+ case 0xE:
+ op = ASM_OP_LDFS_SA, fmt = ASM_FMT_M8;
+ break;
+ case 0xF:
+ op = ASM_OP_LDFD_SA, fmt = ASM_FMT_M8;
+ break;
+ case 0x1B:
+ op = ASM_OP_LDF_FILL, fmt = ASM_FMT_M8;
+ break;
+ case 0x20:
+ op = ASM_OP_LDFE_C_CLR, fmt = ASM_FMT_M8;
+ break;
+ case 0x21:
+ op = ASM_OP_LDF8_C_CLR, fmt = ASM_FMT_M8;
+ break;
+ case 0x22:
+ op = ASM_OP_LDFS_C_CLR, fmt = ASM_FMT_M8;
+ break;
+ case 0x23:
+ op = ASM_OP_LDFD_C_CLR, fmt = ASM_FMT_M8;
+ break;
+ case 0x24:
+ op = ASM_OP_LDFE_C_NC, fmt = ASM_FMT_M8;
+ break;
+ case 0x25:
+ op = ASM_OP_LDF8_C_NC, fmt = ASM_FMT_M8;
+ break;
+ case 0x26:
+ op = ASM_OP_LDFS_C_NC, fmt = ASM_FMT_M8;
+ break;
+ case 0x27:
+ op = ASM_OP_LDFD_C_NC, fmt = ASM_FMT_M8;
+ break;
+ case 0x2C:
+ op = ASM_OP_LFETCH_, fmt = ASM_FMT_M15;
+ break;
+ case 0x2D:
+ op = ASM_OP_LFETCH_EXCL, fmt = ASM_FMT_M15;
+ break;
+ case 0x2E:
+ op = ASM_OP_LFETCH_FAULT, fmt = ASM_FMT_M15;
+ break;
+ case 0x2F:
+ op = ASM_OP_LFETCH_FAULT_EXCL, fmt = ASM_FMT_M15;
+ break;
+ case 0x30:
+ op = ASM_OP_STFE, fmt = ASM_FMT_M10;
+ break;
+ case 0x31:
+ op = ASM_OP_STF8, fmt = ASM_FMT_M10;
+ break;
+ case 0x32:
+ op = ASM_OP_STFS, fmt = ASM_FMT_M10;
+ break;
+ case 0x33:
+ op = ASM_OP_STFD, fmt = ASM_FMT_M10;
+ break;
+ case 0x3B:
+ op = ASM_OP_STF_SPILL, fmt = ASM_FMT_M10;
+ break;
+ }
+ break;
+ }
+
+ if (op != ASM_OP_NONE)
+ return (asm_extract(op, fmt, bits, b, slot));
+ return (0);
+}
+
+/*
+ * Decode X-unit instructions.
+ */
+static int
+asm_decodeX(uint64_t ip, struct asm_bundle *b, int slot)
+{
+ uint64_t bits;
+ enum asm_fmt fmt;
+ enum asm_op op;
+
+ KASSERT(slot == 2, ("foo"));
+ bits = SLOT(ip, slot);
+ fmt = ASM_FMT_NONE, op = ASM_OP_NONE;
+ /* Initialize slot 1 (slot - 1) */
+ b->b_inst[slot - 1].i_format = ASM_FMT_NONE;
+ b->b_inst[slot - 1].i_bits = SLOT(ip, slot - 1);
+
+ switch((int)OPCODE(bits)) {
+ case 0x0:
+ if (FIELD(bits, 33, 3) == 0) { /* x3 */
+ switch (FIELD(bits, 27, 6)) { /* x6 */
+ case 0x0:
+ op = ASM_OP_BREAK_X, fmt = ASM_FMT_X1;
+ break;
+ case 0x1:
+ op = ASM_OP_NOP_X, fmt = ASM_FMT_X1;
+ break;
+ }
+ }
+ break;
+ case 0x6:
+ if (FIELD(bits, 20, 1) == 0)
+ op = ASM_OP_MOVL, fmt = ASM_FMT_X2;
+ break;
+ case 0xC:
+ if (FIELD(bits, 6, 3) == 0) /* btype */
+ op = ASM_OP_BRL_COND, fmt = ASM_FMT_X3;
+ break;
+ case 0xD:
+ op = ASM_OP_BRL_CALL, fmt = ASM_FMT_X4;
+ break;
+ }
+
+ if (op != ASM_OP_NONE)
+ return (asm_extract(op, fmt, bits, b, slot));
+ return (0);
+}
+
+int
+asm_decode(uint64_t ip, struct asm_bundle *b)
+{
+ const char *tp;
+ unsigned int slot;
+ int ok;
+
+ memset(b, 0, sizeof(*b));
+
+ b->b_templ = asm_templname[TMPL(ip)];
+ if (b->b_templ == 0)
+ return (0);
+
+ slot = 0;
+ tp = b->b_templ;
+
+ ok = 1;
+ while (ok && *tp != 0) {
+ switch (*tp++) {
+ case 'B':
+ ok = asm_decodeB(ip, b, slot++);
+ break;
+ case 'F':
+ ok = asm_decodeF(ip, b, slot++);
+ break;
+ case 'I':
+ ok = asm_decodeI(ip, b, slot++);
+ break;
+ case 'L':
+ ok = (slot++ == 1) ? 1 : 0;
+ break;
+ case 'M':
+ ok = asm_decodeM(ip, b, slot++);
+ break;
+ case 'X':
+ ok = asm_decodeX(ip, b, slot++);
+ break;
+ case ';':
+ ok = 1;
+ break;
+ default:
+ ok = 0;
+ break;
+ }
+ }
+ return (ok);
+}
diff --git a/sys/ia64/disasm/disasm_extract.c b/sys/ia64/disasm/disasm_extract.c
new file mode 100644
index 0000000..ae173fc
--- /dev/null
+++ b/sys/ia64/disasm/disasm_extract.c
@@ -0,0 +1,2519 @@
+/*
+ * Copyright (c) 2000-2003 Marcel Moolenaar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/cdefs.h>
+__FBSDID("$FreeBSD$");
+
+#include <sys/param.h>
+#include <sys/systm.h>
+
+#include <machine/stdarg.h>
+
+#include <ia64/disasm/disasm_int.h>
+#include <ia64/disasm/disasm.h>
+
+#define FRAG(o,l) ((int)((o << 8) | (l & 0xff)))
+#define FRAG_OFS(f) (f >> 8)
+#define FRAG_LEN(f) (f & 0xff)
+
+/*
+ * Support functions.
+ */
+static void
+asm_cmpltr_add(struct asm_inst *i, enum asm_cmpltr_class c,
+ enum asm_cmpltr_type t)
+{
+
+ i->i_cmpltr[i->i_ncmpltrs].c_class = c;
+ i->i_cmpltr[i->i_ncmpltrs].c_type = t;
+ i->i_ncmpltrs++;
+ KASSERT(i->i_ncmpltrs < 6, ("foo"));
+}
+
+static void
+asm_hint(struct asm_inst *i, enum asm_cmpltr_class c)
+{
+
+ switch (FIELD(i->i_bits, 28, 2)) { /* hint */
+ case 0:
+ asm_cmpltr_add(i, c, ASM_CT_NONE);
+ break;
+ case 1:
+ asm_cmpltr_add(i, c, ASM_CT_NT1);
+ break;
+ case 2:
+ asm_cmpltr_add(i, c, ASM_CT_NT2);
+ break;
+ case 3:
+ asm_cmpltr_add(i, c, ASM_CT_NTA);
+ break;
+ }
+}
+
+static void
+asm_sf(struct asm_inst *i)
+{
+
+ switch (FIELD(i->i_bits, 34, 2)) {
+ case 0:
+ asm_cmpltr_add(i, ASM_CC_SF, ASM_CT_S0);
+ break;
+ case 1:
+ asm_cmpltr_add(i, ASM_CC_SF, ASM_CT_S1);
+ break;
+ case 2:
+ asm_cmpltr_add(i, ASM_CC_SF, ASM_CT_S2);
+ break;
+ case 3:
+ asm_cmpltr_add(i, ASM_CC_SF, ASM_CT_S3);
+ break;
+ }
+}
+
+static void
+asm_brhint(struct asm_inst *i)
+{
+ uint64_t bits = i->i_bits;
+
+ switch (FIELD(bits, 33, 2)) { /* bwh */
+ case 0:
+ asm_cmpltr_add(i, ASM_CC_BWH, ASM_CT_SPTK);
+ break;
+ case 1:
+ asm_cmpltr_add(i, ASM_CC_BWH, ASM_CT_SPNT);
+ break;
+ case 2:
+ asm_cmpltr_add(i, ASM_CC_BWH, ASM_CT_DPTK);
+ break;
+ case 3:
+ asm_cmpltr_add(i, ASM_CC_BWH, ASM_CT_DPNT);
+ break;
+ }
+
+ if (FIELD(bits, 12, 1)) /* ph */
+ asm_cmpltr_add(i, ASM_CC_PH, ASM_CT_MANY);
+ else
+ asm_cmpltr_add(i, ASM_CC_PH, ASM_CT_FEW);
+
+ if (FIELD(bits, 35, 1)) /* dh */
+ asm_cmpltr_add(i, ASM_CC_DH, ASM_CT_CLR);
+ else
+ asm_cmpltr_add(i, ASM_CC_DH, ASM_CT_NONE);
+}
+
+static void
+asm_brphint(struct asm_inst *i)
+{
+ uint64_t bits = i->i_bits;
+
+ switch (FIELD(bits, 3, 2)) { /* ipwh, indwh */
+ case 0:
+ asm_cmpltr_add(i, ASM_CC_IPWH, ASM_CT_SPTK);
+ break;
+ case 1:
+ asm_cmpltr_add(i, ASM_CC_IPWH, ASM_CT_LOOP);
+ break;
+ case 2:
+ asm_cmpltr_add(i, ASM_CC_IPWH, ASM_CT_DPTK);
+ break;
+ case 3:
+ asm_cmpltr_add(i, ASM_CC_IPWH, ASM_CT_EXIT);
+ break;
+ }
+
+ if (FIELD(bits, 5, 1)) /* ph */
+ asm_cmpltr_add(i, ASM_CC_PH, ASM_CT_MANY);
+ else
+ asm_cmpltr_add(i, ASM_CC_PH, ASM_CT_FEW);
+
+ switch (FIELD(bits, 0, 3)) { /* pvec */
+ case 0:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_DC_DC);
+ break;
+ case 1:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_DC_NT);
+ break;
+ case 2:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_TK_DC);
+ break;
+ case 3:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_TK_TK);
+ break;
+ case 4:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_TK_NT);
+ break;
+ case 5:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_NT_DC);
+ break;
+ case 6:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_NT_TK);
+ break;
+ case 7:
+ asm_cmpltr_add(i, ASM_CC_PVEC, ASM_CT_NT_NT);
+ break;
+ }
+
+ if (FIELD(bits, 35, 1)) /* ih */
+ asm_cmpltr_add(i, ASM_CC_IH, ASM_CT_IMP);
+ else
+ asm_cmpltr_add(i, ASM_CC_IH, ASM_CT_NONE);
+}
+
+static enum asm_oper_type
+asm_normalize(struct asm_inst *i, enum asm_op op)
+{
+ enum asm_oper_type ot = ASM_OPER_NONE;
+
+ switch (op) {
+ case ASM_OP_BR_CALL:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_CALL);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_CEXIT:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_CEXIT);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_CLOOP:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_CLOOP);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_COND:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_COND);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_CTOP:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_CTOP);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_IA:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_IA);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_RET:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_RET);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_WEXIT:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_WEXIT);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BR_WTOP:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_WTOP);
+ op = ASM_OP_BR;
+ break;
+ case ASM_OP_BREAK_B:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_B);
+ op = ASM_OP_BREAK;
+ break;
+ case ASM_OP_BREAK_F:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_F);
+ op = ASM_OP_BREAK;
+ break;
+ case ASM_OP_BREAK_I:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_I);
+ op = ASM_OP_BREAK;
+ break;
+ case ASM_OP_BREAK_M:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_M);
+ op = ASM_OP_BREAK;
+ break;
+ case ASM_OP_BREAK_X:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_X);
+ op = ASM_OP_BREAK;
+ break;
+ case ASM_OP_BRL_COND:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_COND);
+ op = ASM_OP_BRL;
+ break;
+ case ASM_OP_BRL_CALL:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_CALL);
+ op = ASM_OP_BRL;
+ break;
+ case ASM_OP_BRP_:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_NONE);
+ op = ASM_OP_BRP;
+ break;
+ case ASM_OP_BRP_RET:
+ asm_cmpltr_add(i, ASM_CC_BTYPE, ASM_CT_RET);
+ op = ASM_OP_BRP;
+ break;
+ case ASM_OP_BSW_0:
+ asm_cmpltr_add(i, ASM_CC_BSW, ASM_CT_0);
+ op = ASM_OP_BSW;
+ break;
+ case ASM_OP_BSW_1:
+ asm_cmpltr_add(i, ASM_CC_BSW, ASM_CT_1);
+ op = ASM_OP_BSW;
+ break;
+ case ASM_OP_CHK_A_CLR:
+ asm_cmpltr_add(i, ASM_CC_CHK, ASM_CT_A);
+ asm_cmpltr_add(i, ASM_CC_ACLR, ASM_CT_CLR);
+ op = ASM_OP_CHK;
+ break;
+ case ASM_OP_CHK_A_NC:
+ asm_cmpltr_add(i, ASM_CC_CHK, ASM_CT_A);
+ asm_cmpltr_add(i, ASM_CC_ACLR, ASM_CT_NC);
+ op = ASM_OP_CHK;
+ break;
+ case ASM_OP_CHK_S:
+ asm_cmpltr_add(i, ASM_CC_CHK, ASM_CT_S);
+ op = ASM_OP_CHK;
+ break;
+ case ASM_OP_CHK_S_I:
+ asm_cmpltr_add(i, ASM_CC_CHK, ASM_CT_S);
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_I);
+ op = ASM_OP_CHK;
+ break;
+ case ASM_OP_CHK_S_M:
+ asm_cmpltr_add(i, ASM_CC_CHK, ASM_CT_S);
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_M);
+ op = ASM_OP_CHK;
+ break;
+ case ASM_OP_CLRRRB_:
+ asm_cmpltr_add(i, ASM_CC_CLRRRB, ASM_CT_NONE);
+ op = ASM_OP_CLRRRB;
+ break;
+ case ASM_OP_CLRRRB_PR:
+ asm_cmpltr_add(i, ASM_CC_CLRRRB, ASM_CT_PR);
+ op = ASM_OP_CLRRRB;
+ break;
+ case ASM_OP_CMP_EQ:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_EQ_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_EQ_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_EQ_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_EQ_UNC:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_GE_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_GE_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_GE_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_GT_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_GT_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_GT_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LE_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LE_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LE_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LT:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LT_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LT_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LT_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LT_UNC:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LTU:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LTU);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_LTU_UNC:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LTU);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_NE_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_NE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_NE_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_NE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP_NE_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_NE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP;
+ break;
+ case ASM_OP_CMP4_EQ:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_EQ_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_EQ_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_EQ_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_EQ_UNC:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_EQ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_GE_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_GE_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_GE_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_GT_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_GT_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_GT_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_GT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LE_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LE_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LE_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LT:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LT_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LT_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LT_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LT_UNC:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LT);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LTU:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LTU);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_LTU_UNC:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_LTU);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_NE_AND:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_NE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_NE_OR:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_NE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP4_NE_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_CREL, ASM_CT_NE);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_CMP4;
+ break;
+ case ASM_OP_CMP8XCHG16_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_CMP8XCHG16;
+ break;
+ case ASM_OP_CMP8XCHG16_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_CMP8XCHG16;
+ break;
+ case ASM_OP_CMPXCHG1_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_CMPXCHG1;
+ break;
+ case ASM_OP_CMPXCHG1_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_CMPXCHG1;
+ break;
+ case ASM_OP_CMPXCHG2_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_CMPXCHG2;
+ break;
+ case ASM_OP_CMPXCHG2_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_CMPXCHG2;
+ break;
+ case ASM_OP_CMPXCHG4_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_CMPXCHG4;
+ break;
+ case ASM_OP_CMPXCHG4_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_CMPXCHG4;
+ break;
+ case ASM_OP_CMPXCHG8_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_CMPXCHG8;
+ break;
+ case ASM_OP_CMPXCHG8_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_CMPXCHG8;
+ break;
+ case ASM_OP_CZX1_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_CZX1;
+ break;
+ case ASM_OP_CZX1_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_CZX1;
+ break;
+ case ASM_OP_CZX2_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_CZX2;
+ break;
+ case ASM_OP_CZX2_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_CZX2;
+ break;
+ case ASM_OP_DEP_:
+ asm_cmpltr_add(i, ASM_CC_DEP, ASM_CT_NONE);
+ op = ASM_OP_DEP;
+ break;
+ case ASM_OP_DEP_Z:
+ asm_cmpltr_add(i, ASM_CC_DEP, ASM_CT_Z);
+ op = ASM_OP_DEP;
+ break;
+ case ASM_OP_FC_:
+ asm_cmpltr_add(i, ASM_CC_FC, ASM_CT_NONE);
+ op = ASM_OP_FC;
+ break;
+ case ASM_OP_FC_I:
+ asm_cmpltr_add(i, ASM_CC_FC, ASM_CT_I);
+ op = ASM_OP_FC;
+ break;
+ case ASM_OP_FCLASS_M:
+ asm_cmpltr_add(i, ASM_CC_FCREL, ASM_CT_M);
+ op = ASM_OP_FCLASS;
+ break;
+ case ASM_OP_FCVT_FX:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FX);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_NONE);
+ op = ASM_OP_FCVT;
+ break;
+ case ASM_OP_FCVT_FX_TRUNC:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FX);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_TRUNC);
+ op = ASM_OP_FCVT;
+ break;
+ case ASM_OP_FCVT_FXU:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FXU);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_NONE);
+ op = ASM_OP_FCVT;
+ break;
+ case ASM_OP_FCVT_FXU_TRUNC:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FXU);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_TRUNC);
+ op = ASM_OP_FCVT;
+ break;
+ case ASM_OP_FCVT_XF:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_XF);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_NONE);
+ op = ASM_OP_FCVT;
+ break;
+ case ASM_OP_FETCHADD4_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_FETCHADD4;
+ break;
+ case ASM_OP_FETCHADD4_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_FETCHADD4;
+ break;
+ case ASM_OP_FETCHADD8_ACQ:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_ACQ);
+ op = ASM_OP_FETCHADD8;
+ break;
+ case ASM_OP_FETCHADD8_REL:
+ asm_cmpltr_add(i, ASM_CC_SEM, ASM_CT_REL);
+ op = ASM_OP_FETCHADD8;
+ break;
+ case ASM_OP_FMA_:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_NONE);
+ op = ASM_OP_FMA;
+ break;
+ case ASM_OP_FMA_D:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_D);
+ op = ASM_OP_FMA;
+ break;
+ case ASM_OP_FMA_S:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_S);
+ op = ASM_OP_FMA;
+ break;
+ case ASM_OP_FMERGE_NS:
+ asm_cmpltr_add(i, ASM_CC_FMERGE, ASM_CT_NS);
+ op = ASM_OP_FMERGE;
+ break;
+ case ASM_OP_FMERGE_S:
+ asm_cmpltr_add(i, ASM_CC_FMERGE, ASM_CT_S);
+ op = ASM_OP_FMERGE;
+ break;
+ case ASM_OP_FMERGE_SE:
+ asm_cmpltr_add(i, ASM_CC_FMERGE, ASM_CT_SE);
+ op = ASM_OP_FMERGE;
+ break;
+ case ASM_OP_FMIX_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_FMIX;
+ break;
+ case ASM_OP_FMIX_LR:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_LR);
+ op = ASM_OP_FMIX;
+ break;
+ case ASM_OP_FMIX_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_FMIX;
+ break;
+ case ASM_OP_FMS_:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_NONE);
+ op = ASM_OP_FMS;
+ break;
+ case ASM_OP_FMS_D:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_D);
+ op = ASM_OP_FMS;
+ break;
+ case ASM_OP_FMS_S:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_S);
+ op = ASM_OP_FMS;
+ break;
+ case ASM_OP_FNMA_:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_NONE);
+ op = ASM_OP_FNMA;
+ break;
+ case ASM_OP_FNMA_D:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_D);
+ op = ASM_OP_FNMA;
+ break;
+ case ASM_OP_FNMA_S:
+ asm_cmpltr_add(i, ASM_CC_PC, ASM_CT_S);
+ op = ASM_OP_FNMA;
+ break;
+ case ASM_OP_FPCMP_EQ:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_EQ);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_LE:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_LE);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_LT:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_LT);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_NEQ:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_NEQ);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_NLE:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_NLE);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_NLT:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_NLT);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_ORD:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_ORD);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCMP_UNORD:
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_UNORD);
+ op = ASM_OP_FPCMP;
+ break;
+ case ASM_OP_FPCVT_FX:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FX);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_NONE);
+ op = ASM_OP_FPCVT;
+ break;
+ case ASM_OP_FPCVT_FX_TRUNC:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FX);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_TRUNC);
+ op = ASM_OP_FPCVT;
+ break;
+ case ASM_OP_FPCVT_FXU:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FXU);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_NONE);
+ op = ASM_OP_FPCVT;
+ break;
+ case ASM_OP_FPCVT_FXU_TRUNC:
+ asm_cmpltr_add(i, ASM_CC_FCVT, ASM_CT_FXU);
+ asm_cmpltr_add(i, ASM_CC_TRUNC, ASM_CT_TRUNC);
+ op = ASM_OP_FPCVT;
+ break;
+ case ASM_OP_FPMERGE_NS:
+ asm_cmpltr_add(i, ASM_CC_FMERGE, ASM_CT_NS);
+ op = ASM_OP_FPMERGE;
+ break;
+ case ASM_OP_FPMERGE_S:
+ asm_cmpltr_add(i, ASM_CC_FMERGE, ASM_CT_S);
+ op = ASM_OP_FPMERGE;
+ break;
+ case ASM_OP_FPMERGE_SE:
+ asm_cmpltr_add(i, ASM_CC_FMERGE, ASM_CT_SE);
+ op = ASM_OP_FPMERGE;
+ break;
+ case ASM_OP_FSWAP_:
+ asm_cmpltr_add(i, ASM_CC_FSWAP, ASM_CT_NONE);
+ op = ASM_OP_FSWAP;
+ break;
+ case ASM_OP_FSWAP_NL:
+ asm_cmpltr_add(i, ASM_CC_FSWAP, ASM_CT_NL);
+ op = ASM_OP_FSWAP;
+ break;
+ case ASM_OP_FSWAP_NR:
+ asm_cmpltr_add(i, ASM_CC_FSWAP, ASM_CT_NR);
+ op = ASM_OP_FSWAP;
+ break;
+ case ASM_OP_FSXT_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_FSXT;
+ break;
+ case ASM_OP_FSXT_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_FSXT;
+ break;
+ case ASM_OP_GETF_D:
+ asm_cmpltr_add(i, ASM_CC_GETF, ASM_CT_D);
+ op = ASM_OP_GETF;
+ break;
+ case ASM_OP_GETF_EXP:
+ asm_cmpltr_add(i, ASM_CC_GETF, ASM_CT_EXP);
+ op = ASM_OP_GETF;
+ break;
+ case ASM_OP_GETF_S:
+ asm_cmpltr_add(i, ASM_CC_GETF, ASM_CT_S);
+ op = ASM_OP_GETF;
+ break;
+ case ASM_OP_GETF_SIG:
+ asm_cmpltr_add(i, ASM_CC_GETF, ASM_CT_SIG);
+ op = ASM_OP_GETF;
+ break;
+ case ASM_OP_INVALA_:
+ asm_cmpltr_add(i, ASM_CC_INVALA, ASM_CT_NONE);
+ op = ASM_OP_INVALA;
+ break;
+ case ASM_OP_INVALA_E:
+ asm_cmpltr_add(i, ASM_CC_INVALA, ASM_CT_E);
+ op = ASM_OP_INVALA;
+ break;
+ case ASM_OP_ITC_D:
+ asm_cmpltr_add(i, ASM_CC_ITC, ASM_CT_D);
+ op = ASM_OP_ITC;
+ break;
+ case ASM_OP_ITC_I:
+ asm_cmpltr_add(i, ASM_CC_ITC, ASM_CT_I);
+ op = ASM_OP_ITC;
+ break;
+ case ASM_OP_ITR_D:
+ asm_cmpltr_add(i, ASM_CC_ITR, ASM_CT_D);
+ ot = ASM_OPER_DTR;
+ op = ASM_OP_ITR;
+ break;
+ case ASM_OP_ITR_I:
+ asm_cmpltr_add(i, ASM_CC_ITR, ASM_CT_I);
+ ot = ASM_OPER_ITR;
+ op = ASM_OP_ITR;
+ break;
+ case ASM_OP_LD1_:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_A:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_A);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_ACQ);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_BIAS:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_BIAS);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_C_CLR_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR_ACQ);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_C_NC:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_S:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_S);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD1_SA:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_SA);
+ op = ASM_OP_LD1;
+ break;
+ case ASM_OP_LD16_:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LD16;
+ break;
+ case ASM_OP_LD16_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_ACQ);
+ op = ASM_OP_LD16;
+ break;
+ case ASM_OP_LD2_:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_A:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_A);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_ACQ);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_BIAS:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_BIAS);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_C_CLR_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR_ACQ);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_C_NC:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_S:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_S);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD2_SA:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_SA);
+ op = ASM_OP_LD2;
+ break;
+ case ASM_OP_LD4_:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_A:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_A);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_ACQ);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_BIAS:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_BIAS);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_C_CLR_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR_ACQ);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_C_NC:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_S:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_S);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD4_SA:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_SA);
+ op = ASM_OP_LD4;
+ break;
+ case ASM_OP_LD8_:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_A:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_A);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_ACQ);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_BIAS:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_BIAS);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_C_CLR_ACQ:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_CLR_ACQ);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_C_NC:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_FILL:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_FILL);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_S:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_S);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LD8_SA:
+ asm_cmpltr_add(i, ASM_CC_LDTYPE, ASM_CT_SA);
+ op = ASM_OP_LD8;
+ break;
+ case ASM_OP_LDF_FILL:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_FILL);
+ op = ASM_OP_LDF;
+ break;
+ case ASM_OP_LDF8_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDF8;
+ break;
+ case ASM_OP_LDF8_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDF8;
+ break;
+ case ASM_OP_LDF8_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDF8;
+ break;
+ case ASM_OP_LDF8_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDF8;
+ break;
+ case ASM_OP_LDF8_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDF8;
+ break;
+ case ASM_OP_LDF8_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDF8;
+ break;
+ case ASM_OP_LDFD_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDFD;
+ break;
+ case ASM_OP_LDFD_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDFD;
+ break;
+ case ASM_OP_LDFD_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDFD;
+ break;
+ case ASM_OP_LDFD_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDFD;
+ break;
+ case ASM_OP_LDFD_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDFD;
+ break;
+ case ASM_OP_LDFD_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDFD;
+ break;
+ case ASM_OP_LDFE_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDFE;
+ break;
+ case ASM_OP_LDFE_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDFE;
+ break;
+ case ASM_OP_LDFE_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDFE;
+ break;
+ case ASM_OP_LDFE_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDFE;
+ break;
+ case ASM_OP_LDFE_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDFE;
+ break;
+ case ASM_OP_LDFE_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDFE;
+ break;
+ case ASM_OP_LDFP8_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDFP8;
+ break;
+ case ASM_OP_LDFP8_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDFP8;
+ break;
+ case ASM_OP_LDFP8_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDFP8;
+ break;
+ case ASM_OP_LDFP8_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDFP8;
+ break;
+ case ASM_OP_LDFP8_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDFP8;
+ break;
+ case ASM_OP_LDFP8_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDFP8;
+ break;
+ case ASM_OP_LDFPD_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDFPD;
+ break;
+ case ASM_OP_LDFPD_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDFPD;
+ break;
+ case ASM_OP_LDFPD_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDFPD;
+ break;
+ case ASM_OP_LDFPD_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDFPD;
+ break;
+ case ASM_OP_LDFPD_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDFPD;
+ break;
+ case ASM_OP_LDFPD_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDFPD;
+ break;
+ case ASM_OP_LDFPS_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDFPS;
+ break;
+ case ASM_OP_LDFPS_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDFPS;
+ break;
+ case ASM_OP_LDFPS_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDFPS;
+ break;
+ case ASM_OP_LDFPS_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDFPS;
+ break;
+ case ASM_OP_LDFPS_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDFPS;
+ break;
+ case ASM_OP_LDFPS_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDFPS;
+ break;
+ case ASM_OP_LDFS_:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_NONE);
+ op = ASM_OP_LDFS;
+ break;
+ case ASM_OP_LDFS_A:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_A);
+ op = ASM_OP_LDFS;
+ break;
+ case ASM_OP_LDFS_C_CLR:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_CLR);
+ op = ASM_OP_LDFS;
+ break;
+ case ASM_OP_LDFS_C_NC:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_C_NC);
+ op = ASM_OP_LDFS;
+ break;
+ case ASM_OP_LDFS_S:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_S);
+ op = ASM_OP_LDFS;
+ break;
+ case ASM_OP_LDFS_SA:
+ asm_cmpltr_add(i, ASM_CC_FLDTYPE, ASM_CT_SA);
+ op = ASM_OP_LDFS;
+ break;
+ case ASM_OP_LFETCH_:
+ asm_cmpltr_add(i, ASM_CC_LFTYPE, ASM_CT_NONE);
+ asm_cmpltr_add(i, ASM_CC_LFETCH, ASM_CT_NONE);
+ op = ASM_OP_LFETCH;
+ break;
+ case ASM_OP_LFETCH_EXCL:
+ asm_cmpltr_add(i, ASM_CC_LFTYPE, ASM_CT_NONE);
+ asm_cmpltr_add(i, ASM_CC_LFETCH, ASM_CT_EXCL);
+ op = ASM_OP_LFETCH;
+ break;
+ case ASM_OP_LFETCH_FAULT:
+ asm_cmpltr_add(i, ASM_CC_LFTYPE, ASM_CT_FAULT);
+ asm_cmpltr_add(i, ASM_CC_LFETCH, ASM_CT_NONE);
+ op = ASM_OP_LFETCH;
+ break;
+ case ASM_OP_LFETCH_FAULT_EXCL:
+ asm_cmpltr_add(i, ASM_CC_LFTYPE, ASM_CT_FAULT);
+ asm_cmpltr_add(i, ASM_CC_LFETCH, ASM_CT_EXCL);
+ op = ASM_OP_LFETCH;
+ break;
+ case ASM_OP_MF_:
+ asm_cmpltr_add(i, ASM_CC_MF, ASM_CT_NONE);
+ op = ASM_OP_MF;
+ break;
+ case ASM_OP_MF_A:
+ asm_cmpltr_add(i, ASM_CC_MF, ASM_CT_A);
+ op = ASM_OP_MF;
+ break;
+ case ASM_OP_MIX1_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_MIX1;
+ break;
+ case ASM_OP_MIX1_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_MIX1;
+ break;
+ case ASM_OP_MIX2_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_MIX2;
+ break;
+ case ASM_OP_MIX2_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_MIX2;
+ break;
+ case ASM_OP_MIX4_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_MIX4;
+ break;
+ case ASM_OP_MIX4_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_MIX4;
+ break;
+ case ASM_OP_MOV_:
+ asm_cmpltr_add(i, ASM_CC_MOV, ASM_CT_NONE);
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_I:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_I);
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_M:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_M);
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_RET:
+ asm_cmpltr_add(i, ASM_CC_MOV, ASM_CT_RET);
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_CPUID:
+ ot = ASM_OPER_CPUID;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_DBR:
+ ot = ASM_OPER_DBR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_IBR:
+ ot = ASM_OPER_IBR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_IP:
+ ot = ASM_OPER_IP;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_MSR:
+ ot = ASM_OPER_MSR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PKR:
+ ot = ASM_OPER_PKR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PMC:
+ ot = ASM_OPER_PMC;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PMD:
+ ot = ASM_OPER_PMD;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PR:
+ ot = ASM_OPER_PR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PSR:
+ ot = ASM_OPER_PSR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PSR_L:
+ ot = ASM_OPER_PSR_L;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_PSR_UM:
+ ot = ASM_OPER_PSR_UM;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_MOV_RR:
+ ot = ASM_OPER_RR;
+ op = ASM_OP_MOV;
+ break;
+ case ASM_OP_NOP_B:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_B);
+ op = ASM_OP_NOP;
+ break;
+ case ASM_OP_NOP_F:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_F);
+ op = ASM_OP_NOP;
+ break;
+ case ASM_OP_NOP_I:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_I);
+ op = ASM_OP_NOP;
+ break;
+ case ASM_OP_NOP_M:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_M);
+ op = ASM_OP_NOP;
+ break;
+ case ASM_OP_NOP_X:
+ asm_cmpltr_add(i, ASM_CC_UNIT, ASM_CT_X);
+ op = ASM_OP_NOP;
+ break;
+ case ASM_OP_PACK2_SSS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_SSS);
+ op = ASM_OP_PACK2;
+ break;
+ case ASM_OP_PACK2_USS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_USS);
+ op = ASM_OP_PACK2;
+ break;
+ case ASM_OP_PACK4_SSS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_SSS);
+ op = ASM_OP_PACK4;
+ break;
+ case ASM_OP_PADD1_:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_NONE);
+ op = ASM_OP_PADD1;
+ break;
+ case ASM_OP_PADD1_SSS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_SSS);
+ op = ASM_OP_PADD1;
+ break;
+ case ASM_OP_PADD1_UUS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUS);
+ op = ASM_OP_PADD1;
+ break;
+ case ASM_OP_PADD1_UUU:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUU);
+ op = ASM_OP_PADD1;
+ break;
+ case ASM_OP_PADD2_:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_NONE);
+ op = ASM_OP_PADD2;
+ break;
+ case ASM_OP_PADD2_SSS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_SSS);
+ op = ASM_OP_PADD2;
+ break;
+ case ASM_OP_PADD2_UUS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUS);
+ op = ASM_OP_PADD2;
+ break;
+ case ASM_OP_PADD2_UUU:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUU);
+ op = ASM_OP_PADD2;
+ break;
+ case ASM_OP_PAVG1_:
+ asm_cmpltr_add(i, ASM_CC_PAVG, ASM_CT_NONE);
+ op = ASM_OP_PAVG1;
+ break;
+ case ASM_OP_PAVG1_RAZ:
+ asm_cmpltr_add(i, ASM_CC_PAVG, ASM_CT_RAZ);
+ op = ASM_OP_PAVG1;
+ break;
+ case ASM_OP_PAVG2_:
+ asm_cmpltr_add(i, ASM_CC_PAVG, ASM_CT_NONE);
+ op = ASM_OP_PAVG2;
+ break;
+ case ASM_OP_PAVG2_RAZ:
+ asm_cmpltr_add(i, ASM_CC_PAVG, ASM_CT_RAZ);
+ op = ASM_OP_PAVG2;
+ break;
+ case ASM_OP_PCMP1_EQ:
+ asm_cmpltr_add(i, ASM_CC_PREL, ASM_CT_EQ);
+ op = ASM_OP_PCMP1;
+ break;
+ case ASM_OP_PCMP1_GT:
+ asm_cmpltr_add(i, ASM_CC_PREL, ASM_CT_GT);
+ op = ASM_OP_PCMP1;
+ break;
+ case ASM_OP_PCMP2_EQ:
+ asm_cmpltr_add(i, ASM_CC_PREL, ASM_CT_EQ);
+ op = ASM_OP_PCMP2;
+ break;
+ case ASM_OP_PCMP2_GT:
+ asm_cmpltr_add(i, ASM_CC_PREL, ASM_CT_GT);
+ op = ASM_OP_PCMP2;
+ break;
+ case ASM_OP_PCMP4_EQ:
+ asm_cmpltr_add(i, ASM_CC_PREL, ASM_CT_EQ);
+ op = ASM_OP_PCMP4;
+ break;
+ case ASM_OP_PCMP4_GT:
+ asm_cmpltr_add(i, ASM_CC_PREL, ASM_CT_GT);
+ op = ASM_OP_PCMP4;
+ break;
+ case ASM_OP_PMAX1_U:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_U);
+ op = ASM_OP_PMAX1;
+ break;
+ case ASM_OP_PMIN1_U:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_U);
+ op = ASM_OP_PMIN1;
+ break;
+ case ASM_OP_PMPY2_L:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_L);
+ op = ASM_OP_PMPY2;
+ break;
+ case ASM_OP_PMPY2_R:
+ asm_cmpltr_add(i, ASM_CC_LR, ASM_CT_R);
+ op = ASM_OP_PMPY2;
+ break;
+ case ASM_OP_PMPYSHR2_:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_NONE);
+ op = ASM_OP_PMPYSHR2;
+ break;
+ case ASM_OP_PMPYSHR2_U:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_U);
+ op = ASM_OP_PMPYSHR2;
+ break;
+ case ASM_OP_PROBE_R:
+ asm_cmpltr_add(i, ASM_CC_RW, ASM_CT_R);
+ asm_cmpltr_add(i, ASM_CC_PRTYPE, ASM_CT_NONE);
+ op = ASM_OP_PROBE;
+ break;
+ case ASM_OP_PROBE_R_FAULT:
+ asm_cmpltr_add(i, ASM_CC_RW, ASM_CT_R);
+ asm_cmpltr_add(i, ASM_CC_PRTYPE, ASM_CT_FAULT);
+ op = ASM_OP_PROBE;
+ break;
+ case ASM_OP_PROBE_RW_FAULT:
+ asm_cmpltr_add(i, ASM_CC_RW, ASM_CT_RW);
+ asm_cmpltr_add(i, ASM_CC_PRTYPE, ASM_CT_FAULT);
+ op = ASM_OP_PROBE;
+ break;
+ case ASM_OP_PROBE_W:
+ asm_cmpltr_add(i, ASM_CC_RW, ASM_CT_W);
+ asm_cmpltr_add(i, ASM_CC_PRTYPE, ASM_CT_NONE);
+ op = ASM_OP_PROBE;
+ break;
+ case ASM_OP_PROBE_W_FAULT:
+ asm_cmpltr_add(i, ASM_CC_RW, ASM_CT_W);
+ asm_cmpltr_add(i, ASM_CC_PRTYPE, ASM_CT_FAULT);
+ op = ASM_OP_PROBE;
+ break;
+ case ASM_OP_PSHR2_:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_NONE);
+ op = ASM_OP_PSHR2;
+ break;
+ case ASM_OP_PSHR2_U:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_U);
+ op = ASM_OP_PSHR2;
+ break;
+ case ASM_OP_PSHR4_:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_NONE);
+ op = ASM_OP_PSHR4;
+ break;
+ case ASM_OP_PSHR4_U:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_U);
+ op = ASM_OP_PSHR4;
+ break;
+ case ASM_OP_PSUB1_:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_NONE);
+ op = ASM_OP_PSUB1;
+ break;
+ case ASM_OP_PSUB1_SSS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_SSS);
+ op = ASM_OP_PSUB1;
+ break;
+ case ASM_OP_PSUB1_UUS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUS);
+ op = ASM_OP_PSUB1;
+ break;
+ case ASM_OP_PSUB1_UUU:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUU);
+ op = ASM_OP_PSUB1;
+ break;
+ case ASM_OP_PSUB2_:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_NONE);
+ op = ASM_OP_PSUB2;
+ break;
+ case ASM_OP_PSUB2_SSS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_SSS);
+ op = ASM_OP_PSUB2;
+ break;
+ case ASM_OP_PSUB2_UUS:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUS);
+ op = ASM_OP_PSUB2;
+ break;
+ case ASM_OP_PSUB2_UUU:
+ asm_cmpltr_add(i, ASM_CC_SAT, ASM_CT_UUU);
+ op = ASM_OP_PSUB2;
+ break;
+ case ASM_OP_PTC_E:
+ asm_cmpltr_add(i, ASM_CC_PTC, ASM_CT_E);
+ op = ASM_OP_PTC;
+ break;
+ case ASM_OP_PTC_G:
+ asm_cmpltr_add(i, ASM_CC_PTC, ASM_CT_G);
+ op = ASM_OP_PTC;
+ break;
+ case ASM_OP_PTC_GA:
+ asm_cmpltr_add(i, ASM_CC_PTC, ASM_CT_GA);
+ op = ASM_OP_PTC;
+ break;
+ case ASM_OP_PTC_L:
+ asm_cmpltr_add(i, ASM_CC_PTC, ASM_CT_L);
+ op = ASM_OP_PTC;
+ break;
+ case ASM_OP_PTR_D:
+ asm_cmpltr_add(i, ASM_CC_PTR, ASM_CT_D);
+ op = ASM_OP_PTR;
+ break;
+ case ASM_OP_PTR_I:
+ asm_cmpltr_add(i, ASM_CC_PTR, ASM_CT_I);
+ op = ASM_OP_PTR;
+ break;
+ case ASM_OP_SETF_D:
+ asm_cmpltr_add(i, ASM_CC_SETF, ASM_CT_D);
+ op = ASM_OP_SETF;
+ break;
+ case ASM_OP_SETF_EXP:
+ asm_cmpltr_add(i, ASM_CC_SETF, ASM_CT_EXP);
+ op = ASM_OP_SETF;
+ break;
+ case ASM_OP_SETF_S:
+ asm_cmpltr_add(i, ASM_CC_SETF, ASM_CT_S);
+ op = ASM_OP_SETF;
+ break;
+ case ASM_OP_SETF_SIG:
+ asm_cmpltr_add(i, ASM_CC_SETF, ASM_CT_SIG);
+ op = ASM_OP_SETF;
+ break;
+ case ASM_OP_SHR_:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_NONE);
+ op = ASM_OP_SHR;
+ break;
+ case ASM_OP_SHR_U:
+ asm_cmpltr_add(i, ASM_CC_UNS, ASM_CT_U);
+ op = ASM_OP_SHR;
+ break;
+ case ASM_OP_SRLZ_D:
+ asm_cmpltr_add(i, ASM_CC_SRLZ, ASM_CT_D);
+ op = ASM_OP_SRLZ;
+ break;
+ case ASM_OP_SRLZ_I:
+ asm_cmpltr_add(i, ASM_CC_SRLZ, ASM_CT_I);
+ op = ASM_OP_SRLZ;
+ break;
+ case ASM_OP_ST1_:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_NONE);
+ op = ASM_OP_ST1;
+ break;
+ case ASM_OP_ST1_REL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_REL);
+ op = ASM_OP_ST1;
+ break;
+ case ASM_OP_ST16_:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_NONE);
+ op = ASM_OP_ST16;
+ break;
+ case ASM_OP_ST16_REL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_REL);
+ op = ASM_OP_ST16;
+ break;
+ case ASM_OP_ST2_:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_NONE);
+ op = ASM_OP_ST2;
+ break;
+ case ASM_OP_ST2_REL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_REL);
+ op = ASM_OP_ST2;
+ break;
+ case ASM_OP_ST4_:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_NONE);
+ op = ASM_OP_ST4;
+ break;
+ case ASM_OP_ST4_REL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_REL);
+ op = ASM_OP_ST4;
+ break;
+ case ASM_OP_ST8_:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_NONE);
+ op = ASM_OP_ST8;
+ break;
+ case ASM_OP_ST8_REL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_REL);
+ op = ASM_OP_ST8;
+ break;
+ case ASM_OP_ST8_SPILL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_SPILL);
+ op = ASM_OP_ST8;
+ break;
+ case ASM_OP_STF_SPILL:
+ asm_cmpltr_add(i, ASM_CC_STTYPE, ASM_CT_SPILL);
+ op = ASM_OP_STF;
+ break;
+ case ASM_OP_SYNC_I:
+ asm_cmpltr_add(i, ASM_CC_SYNC, ASM_CT_I);
+ op = ASM_OP_SYNC;
+ break;
+ case ASM_OP_TBIT_NZ_AND:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_NZ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_NZ_OR:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_NZ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_NZ_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_NZ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_Z:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_Z_AND:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_Z_OR:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_Z_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TBIT_Z_UNC:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_TBIT;
+ break;
+ case ASM_OP_TNAT_NZ_AND:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_NZ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_NZ_OR:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_NZ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_NZ_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_NZ);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_Z:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_NONE);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_Z_AND:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_AND);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_Z_OR:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_Z_OR_ANDCM:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_OR_ANDCM);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_TNAT_Z_UNC:
+ asm_cmpltr_add(i, ASM_CC_TREL, ASM_CT_Z);
+ asm_cmpltr_add(i, ASM_CC_CTYPE, ASM_CT_UNC);
+ op = ASM_OP_TNAT;
+ break;
+ case ASM_OP_UNPACK1_H:
+ asm_cmpltr_add(i, ASM_CC_UNPACK, ASM_CT_H);
+ op = ASM_OP_UNPACK1;
+ break;
+ case ASM_OP_UNPACK1_L:
+ asm_cmpltr_add(i, ASM_CC_UNPACK, ASM_CT_L);
+ op = ASM_OP_UNPACK1;
+ break;
+ case ASM_OP_UNPACK2_H:
+ asm_cmpltr_add(i, ASM_CC_UNPACK, ASM_CT_H);
+ op = ASM_OP_UNPACK2;
+ break;
+ case ASM_OP_UNPACK2_L:
+ asm_cmpltr_add(i, ASM_CC_UNPACK, ASM_CT_L);
+ op = ASM_OP_UNPACK2;
+ break;
+ case ASM_OP_UNPACK4_H:
+ asm_cmpltr_add(i, ASM_CC_UNPACK, ASM_CT_H);
+ op = ASM_OP_UNPACK4;
+ break;
+ case ASM_OP_UNPACK4_L:
+ asm_cmpltr_add(i, ASM_CC_UNPACK, ASM_CT_L);
+ op = ASM_OP_UNPACK4;
+ break;
+ case ASM_OP_XMA_H:
+ asm_cmpltr_add(i, ASM_CC_XMA, ASM_CT_H);
+ op = ASM_OP_XMA;
+ break;
+ case ASM_OP_XMA_HU:
+ asm_cmpltr_add(i, ASM_CC_XMA, ASM_CT_HU);
+ op = ASM_OP_XMA;
+ break;
+ case ASM_OP_XMA_L:
+ asm_cmpltr_add(i, ASM_CC_XMA, ASM_CT_L);
+ op = ASM_OP_XMA;
+ break;
+ default:
+ KASSERT(op < ASM_OP_NUMBER_OF_OPCODES, ("foo"));
+ break;
+ }
+ i->i_op = op;
+ return (ot);
+}
+
+static __inline void
+op_imm(struct asm_inst *i, int op, uint64_t val)
+{
+ i->i_oper[op].o_type = ASM_OPER_IMM;
+ i->i_oper[op].o_value = val;
+}
+
+static __inline void
+op_type(struct asm_inst *i, int op, enum asm_oper_type ot)
+{
+ i->i_oper[op].o_type = ot;
+}
+
+static __inline void
+op_value(struct asm_inst *i, int op, uint64_t val)
+{
+ i->i_oper[op].o_value = val;
+}
+
+static __inline void
+operand(struct asm_inst *i, int op, enum asm_oper_type ot, uint64_t bits,
+ int o, int l)
+{
+ i->i_oper[op].o_type = ot;
+ i->i_oper[op].o_value = FIELD(bits, o, l);
+}
+
+static uint64_t
+imm(uint64_t bits, int sign, int o, int l)
+{
+ uint64_t val = FIELD(bits, o, l);
+
+ if (sign && (val & (1LL << (l - 1))) != 0)
+ val |= -1LL << l;
+ return (val);
+}
+
+static void
+s_imm(struct asm_inst *i, int op, uint64_t bits, int o, int l)
+{
+ i->i_oper[op].o_type = ASM_OPER_IMM;
+ i->i_oper[op].o_value = imm(bits, 1, o, l);
+}
+
+static void
+u_imm(struct asm_inst *i, int op, uint64_t bits, int o, int l)
+{
+ i->i_oper[op].o_type = ASM_OPER_IMM;
+ i->i_oper[op].o_value = imm(bits, 0, o, l);
+}
+
+static uint64_t
+vimm(uint64_t bits, int sign, va_list ap)
+{
+ uint64_t val = 0;
+ int len = 0;
+ int frag;
+
+ while ((frag = va_arg(ap, int)) != 0) {
+ val |= (uint64_t)FIELD(bits, FRAG_OFS(frag), FRAG_LEN(frag))
+ << len;
+ len += FRAG_LEN(frag);
+ }
+ if (sign && (val & (1LL << (len - 1))) != 0)
+ val |= -1LL << len;
+ return (val);
+}
+
+static void
+s_immf(struct asm_inst *i, int op, uint64_t bits, ...)
+{
+ va_list ap;
+ va_start(ap, bits);
+ i->i_oper[op].o_type = ASM_OPER_IMM;
+ i->i_oper[op].o_value = vimm(bits, 1, ap);
+ va_end(ap);
+}
+
+static void
+u_immf(struct asm_inst *i, int op, uint64_t bits, ...)
+{
+ va_list ap;
+ va_start(ap, bits);
+ i->i_oper[op].o_type = ASM_OPER_IMM;
+ i->i_oper[op].o_value = vimm(bits, 0, ap);
+ va_end(ap);
+}
+
+static void
+disp(struct asm_inst *i, int op, uint64_t bits, ...)
+{
+ va_list ap;
+ va_start(ap, bits);
+ i->i_oper[op].o_type = ASM_OPER_DISP;
+ i->i_oper[op].o_value = vimm(bits, 1, ap) << 4;
+ va_end(ap);
+}
+
+static __inline void
+combine(uint64_t *dst, int dl, uint64_t src, int sl, int so)
+{
+ *dst = (*dst & ((1LL << dl) - 1LL)) |
+ ((uint64_t)_FLD64(src, so, sl) << dl);
+}
+
+int
+asm_extract(enum asm_op op, enum asm_fmt fmt, uint64_t bits,
+ struct asm_bundle *b, int slot)
+{
+ struct asm_inst *i = b->b_inst + slot;
+ enum asm_oper_type ot;
+
+ KASSERT(op != ASM_OP_NONE, ("foo"));
+ i->i_bits = bits;
+ i->i_format = fmt;
+ i->i_srcidx = 2;
+
+ ot = asm_normalize(i, op);
+
+ if (fmt != ASM_FMT_B6 && fmt != ASM_FMT_B7)
+ operand(i, 0, ASM_OPER_PREG, bits, 0, 6);
+
+ switch (fmt) {
+ case ASM_FMT_A1:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ if ((op == ASM_OP_ADD && FIELD(bits, 27, 2) == 1) ||
+ (op == ASM_OP_SUB && FIELD(bits, 27, 2) == 0))
+ op_imm(i, 4, 1LL);
+ break;
+ case ASM_FMT_A2:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ op_imm(i, 3, 1LL + FIELD(bits, 27, 2));
+ operand(i, 4, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_A3:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(36,1), 0);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_A4:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(27,6), FRAG(36,1), 0);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_A5:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(27,9), FRAG(22,5),
+ FRAG(36,1), 0);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 2);
+ break;
+ case ASM_FMT_A6: /* 2 dst */
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 4, ASM_OPER_GREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_A7: /* 2 dst */
+ if (FIELD(bits, 13, 7) != 0)
+ return (0);
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 4, ASM_OPER_GREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_A8: /* 2 dst */
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ s_immf(i, 3, bits, FRAG(13,7), FRAG(36,1), 0);
+ operand(i, 4, ASM_OPER_GREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_A9:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_A10:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ op_imm(i, 3, 1LL + FIELD(bits, 27, 2));
+ operand(i, 4, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_B1: /* 0 dst */
+ asm_brhint(i);
+ disp(i, 1, bits, FRAG(13,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_B2: /* 0 dst */
+ if (FIELD(bits, 0, 6) != 0)
+ return (0);
+ asm_brhint(i);
+ disp(i, 1, bits, FRAG(13,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_B3:
+ asm_brhint(i);
+ operand(i, 1, ASM_OPER_BREG, bits, 6, 3);
+ disp(i, 2, bits, FRAG(13,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_B4: /* 0 dst */
+ asm_brhint(i);
+ operand(i, 1, ASM_OPER_BREG, bits, 13, 3);
+ break;
+ case ASM_FMT_B5:
+#if 0
+ if (FIELD(bits, 32, 1) == 0)
+ return (0);
+#endif
+ asm_brhint(i);
+ operand(i, 1, ASM_OPER_BREG, bits, 6, 3);
+ operand(i, 2, ASM_OPER_BREG, bits, 13, 3);
+ break;
+ case ASM_FMT_B6: /* 0 dst */
+ asm_brphint(i);
+ disp(i, 1, bits, FRAG(13,20), FRAG(36,1), 0);
+ disp(i, 2, bits, FRAG(6,7), FRAG(33,2), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_B7: /* 0 dst */
+ asm_brphint(i);
+ operand(i, 1, ASM_OPER_BREG, bits, 13, 3);
+ disp(i, 2, bits, FRAG(6,7), FRAG(33,2), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_B8:
+ /* no operands */
+ break;
+ case ASM_FMT_B9: /* 0 dst */
+ u_immf(i, 1, bits, FRAG(6,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_F1:
+ asm_sf(i);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_FREG, bits, 20, 7);
+ operand(i, 4, ASM_OPER_FREG, bits, 27, 7);
+ break;
+ case ASM_FMT_F2:
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_FREG, bits, 20, 7);
+ operand(i, 4, ASM_OPER_FREG, bits, 27, 7);
+ break;
+ case ASM_FMT_F3:
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_FREG, bits, 20, 7);
+ operand(i, 4, ASM_OPER_FREG, bits, 27, 7);
+ break;
+ case ASM_FMT_F4: /* 2 dst */
+ if (FIELD(bits, 33, 1)) { /* ra */
+ if (FIELD(bits, 36, 1)) /* rb */
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_UNORD);
+ else
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_LE);
+ } else {
+ if (FIELD(bits, 36, 1)) /* rb */
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_LT);
+ else
+ asm_cmpltr_add(i, ASM_CC_FREL, ASM_CT_EQ);
+ }
+ if (FIELD(bits, 12, 1)) /* ta */
+ asm_cmpltr_add(i, ASM_CC_FCTYPE, ASM_CT_UNC);
+ else
+ asm_cmpltr_add(i, ASM_CC_FCTYPE, ASM_CT_NONE);
+ asm_sf(i);
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 4, ASM_OPER_FREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_F5: /* 2 dst */
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_FREG, bits, 13, 7);
+ u_immf(i, 4, bits, FRAG(33,2), FRAG(20,7), 0);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_F6: /* 2 dst */
+ asm_sf(i);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 4, ASM_OPER_FREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_F7: /* 2 dst */
+ asm_sf(i);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_FREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_F8:
+ asm_sf(i);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_FREG, bits, 20, 7);
+ break;
+ case ASM_FMT_F9:
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_FREG, bits, 20, 7);
+ break;
+ case ASM_FMT_F10:
+ asm_sf(i);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ break;
+ case ASM_FMT_F11:
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ break;
+ case ASM_FMT_F12: /* 0 dst */
+ asm_sf(i);
+ u_imm(i, 1, bits, 13, 7);
+ u_imm(i, 2, bits, 20, 7);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_F13:
+ asm_sf(i);
+ /* no operands */
+ break;
+ case ASM_FMT_F14: /* 0 dst */
+ asm_sf(i);
+ disp(i, 1, bits, FRAG(6,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_F15: /* 0 dst */
+ u_imm(i, 1, bits, 6, 20);
+ break;
+ case ASM_FMT_I1:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ switch (FIELD(bits, 30, 2)) {
+ case 0: op_imm(i, 4, 0LL); break;
+ case 1: op_imm(i, 4, 7LL); break;
+ case 2: op_imm(i, 4, 15LL); break;
+ case 3: op_imm(i, 4, 16LL); break;
+ }
+ break;
+ case ASM_FMT_I2:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_I3:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ u_imm(i, 3, bits, 20, 4);
+ break;
+ case ASM_FMT_I4:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ u_imm(i, 3, bits, 20, 8);
+ break;
+ case ASM_FMT_I5:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_I6:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ u_imm(i, 3, bits, 14, 5);
+ break;
+ case ASM_FMT_I7:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_I8:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ op_imm(i, 3, 31LL - FIELD(bits, 20, 5));
+ break;
+ case ASM_FMT_I9:
+ if (FIELD(bits, 13, 7) != 0)
+ return (0);
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_I10:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ u_imm(i, 4, bits, 27, 6);
+ break;
+ case ASM_FMT_I11:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ u_imm(i, 3, bits, 14, 6);
+ op_imm(i, 4, 1LL + FIELD(bits, 27, 6));
+ break;
+ case ASM_FMT_I12:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ op_imm(i, 3, 63LL - FIELD(bits, 20, 6));
+ op_imm(i, 4, 1LL + FIELD(bits, 27, 6));
+ break;
+ case ASM_FMT_I13:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(36,1), 0);
+ op_imm(i, 3, 63LL - FIELD(bits, 20, 6));
+ op_imm(i, 4, 1LL + FIELD(bits, 27, 6));
+ break;
+ case ASM_FMT_I14:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ s_imm(i, 2, bits, 36, 1);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ op_imm(i, 4, 63LL - FIELD(bits, 14, 6));
+ op_imm(i, 5, 1LL + FIELD(bits, 27, 6));
+ break;
+ case ASM_FMT_I15:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ op_imm(i, 4, 63LL - FIELD(bits, 31, 6));
+ op_imm(i, 5, 1LL + FIELD(bits, 27, 4));
+ break;
+ case ASM_FMT_I16: /* 2 dst */
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ u_imm(i, 4, bits, 14, 6);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_I17: /* 2 dst */
+ operand(i, 1, ASM_OPER_PREG, bits, 6, 6);
+ operand(i, 2, ASM_OPER_PREG, bits, 27, 6);
+ operand(i, 3, ASM_OPER_GREG, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_I19:
+ u_immf(i, 1, bits, FRAG(6,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_I20: /* 0 dst */
+ operand(i, 1, ASM_OPER_GREG, bits, 13, 7);
+ disp(i, 2, bits, FRAG(6,7), FRAG(20,13), FRAG(36,1), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_I21:
+ switch (FIELD(bits, 20, 2)) { /* wh */
+ case 0: asm_cmpltr_add(i, ASM_CC_MWH, ASM_CT_SPTK); break;
+ case 1: asm_cmpltr_add(i, ASM_CC_MWH, ASM_CT_NONE); break;
+ case 2: asm_cmpltr_add(i, ASM_CC_MWH, ASM_CT_DPTK); break;
+ case 3: return (0);
+ }
+ if (FIELD(bits, 23, 1)) /* ih */
+ asm_cmpltr_add(i, ASM_CC_IH, ASM_CT_IMP);
+ else
+ asm_cmpltr_add(i, ASM_CC_IH, ASM_CT_NONE);
+ operand(i, 1, ASM_OPER_BREG, bits, 6, 3);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ disp(i, 3, bits, FRAG(24,9), 0);
+ break;
+ case ASM_FMT_I22:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_BREG, bits, 13, 3);
+ break;
+ case ASM_FMT_I23:
+ op_type(i, 1, ASM_OPER_PR);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ u_immf(i, 3, bits, FRAG(6,7), FRAG(24,8), FRAG(36,1), 0);
+ i->i_oper[3].o_value <<= 1;
+ break;
+ case ASM_FMT_I24:
+ op_type(i, 1, ASM_OPER_PR_ROT);
+ s_immf(i, 2, bits, FRAG(6,27), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_I25:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ op_type(i, 2, ot);
+ break;
+ case ASM_FMT_I26:
+ operand(i, 1, ASM_OPER_AREG, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_I27:
+ operand(i, 1, ASM_OPER_AREG, bits, 20, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_I28:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_AREG, bits, 20, 7);
+ break;
+ case ASM_FMT_I29:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_M1:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ if (i->i_op == ASM_OP_LD16) {
+ op_type(i, 2, ASM_OPER_AREG);
+ op_value(i, 2, AR_CSD);
+ i->i_srcidx++;
+ }
+ operand(i, i->i_srcidx, ASM_OPER_MEM, bits, 20, 7);
+ break;
+ case ASM_FMT_M2:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M3:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ s_immf(i, 3, bits, FRAG(13,7), FRAG(27,1), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M4:
+ asm_hint(i, ASM_CC_STHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ if (i->i_op == ASM_OP_ST16) {
+ op_type(i, 3, ASM_OPER_AREG);
+ op_value(i, 3, AR_CSD);
+ }
+ break;
+ case ASM_FMT_M5:
+ asm_hint(i, ASM_CC_STHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ s_immf(i, 3, bits, FRAG(6,7), FRAG(27,1), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M6:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ break;
+ case ASM_FMT_M7:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M8:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ s_immf(i, 3, bits, FRAG(13,7), FRAG(27,1), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M9:
+ asm_hint(i, ASM_CC_STHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M10:
+ asm_hint(i, ASM_CC_STHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ s_immf(i, 3, bits, FRAG(6,7), FRAG(27,1), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M11: /* 2 dst */
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_MEM, bits, 20, 7);
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_M12: /* 2 dst */
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ operand(i, 3, ASM_OPER_MEM, bits, 20, 7);
+ op_imm(i, 4, 8LL << FIELD(bits, 30, 1));
+ i->i_srcidx++;
+ break;
+ case ASM_FMT_M13:
+ asm_hint(i, ASM_CC_LFHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ break;
+ case ASM_FMT_M14: /* 0 dst */
+ asm_hint(i, ASM_CC_LFHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M15: /* 0 dst */
+ asm_hint(i, ASM_CC_LFHINT);
+ operand(i, 1, ASM_OPER_MEM, bits, 20, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(27,1), FRAG(36,1), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M16: {
+ int oper;
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ if (i->i_op == ASM_OP_CMP8XCHG16) {
+ op_type(i, 4, ASM_OPER_AREG);
+ op_value(i, 4, AR_CSD);
+ oper = 5;
+ } else
+ oper = 4;
+ if (FIELD(bits, 30, 6) < 8) {
+ op_type(i, oper, ASM_OPER_AREG);
+ op_value(i, oper, AR_CCV);
+ }
+ break;
+ }
+ case ASM_FMT_M17:
+ asm_hint(i, ASM_CC_LDHINT);
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_MEM, bits, 20, 7);
+ switch (FIELD(bits, 13, 2)) {
+ case 0: op_imm(i, 3, 1LL << 4); break;
+ case 1: op_imm(i, 3, 1LL << 3); break;
+ case 2: op_imm(i, 3, 1LL << 2); break;
+ case 3: op_imm(i, 3, 1LL); break;
+ }
+ if (FIELD(bits, 15, 1))
+ i->i_oper[3].o_value *= -1LL;
+ break;
+ case ASM_FMT_M18:
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M19:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_FREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M20: /* 0 dst */
+ operand(i, 1, ASM_OPER_GREG, bits, 13, 7);
+ disp(i, 2, bits, FRAG(6,7), FRAG(20,13), FRAG(36,1), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M21: /* 0 dst */
+ operand(i, 1, ASM_OPER_FREG, bits, 13, 7);
+ disp(i, 2, bits, FRAG(6,7), FRAG(20,13), FRAG(36,1), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M22: /* 0 dst */
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ disp(i, 2, bits, FRAG(13,20), FRAG(36,1), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M23: /* 0 dst */
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ disp(i, 2, bits, FRAG(13,20), FRAG(36,1), 0);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M24:
+ /* no operands */
+ break;
+ case ASM_FMT_M25:
+ if (FIELD(bits, 0, 6) != 0)
+ return (0);
+ /* no operands */
+ break;
+ case ASM_FMT_M26:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ break;
+ case ASM_FMT_M27:
+ operand(i, 1, ASM_OPER_FREG, bits, 6, 7);
+ break;
+ case ASM_FMT_M28:
+ operand(i, 1, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_M29:
+ operand(i, 1, ASM_OPER_AREG, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M30:
+ operand(i, 1, ASM_OPER_AREG, bits, 20, 7);
+ s_immf(i, 2, bits, FRAG(13,7), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M31:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_AREG, bits, 20, 7);
+ break;
+ case ASM_FMT_M32:
+ operand(i, 1, ASM_OPER_CREG, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M33:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_CREG, bits, 20, 7);
+ break;
+ case ASM_FMT_M34: {
+ uint64_t loc, out;
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ op_type(i, 2, ASM_OPER_AREG);
+ op_value(i, 2, AR_PFS);
+ loc = FIELD(bits, 20, 7);
+ out = FIELD(bits, 13, 7) - loc;
+ op_imm(i, 3, 0);
+ op_imm(i, 4, loc);
+ op_imm(i, 5, out);
+ op_imm(i, 6, (uint64_t)FIELD(bits, 27, 4) << 3);
+ break;
+ }
+ case ASM_FMT_M35:
+ if (FIELD(bits, 27, 6) == 0x2D)
+ op_type(i, 1, ASM_OPER_PSR_L);
+ else
+ op_type(i, 1, ASM_OPER_PSR_UM);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M36:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ if (FIELD(bits, 27, 6) == 0x25)
+ op_type(i, 2, ASM_OPER_PSR);
+ else
+ op_type(i, 2, ASM_OPER_PSR_UM);
+ break;
+ case ASM_FMT_M37:
+ u_immf(i, 1, bits, FRAG(6,20), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M38:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ operand(i, 3, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M39:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ u_imm(i, 3, bits, 13, 2);
+ break;
+ case ASM_FMT_M40: /* 0 dst */
+ operand(i, 1, ASM_OPER_GREG, bits, 20, 7);
+ u_imm(i, 2, bits, 13, 2);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M41:
+ operand(i, 1, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M42:
+ operand(i, 1, ot, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ break;
+ case ASM_FMT_M43:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ot, bits, 20, 7);
+ break;
+ case ASM_FMT_M44:
+ u_immf(i, 1, bits, FRAG(6,21), FRAG(31,2), FRAG(36,1), 0);
+ break;
+ case ASM_FMT_M45: /* 0 dst */
+ operand(i, 1, ASM_OPER_GREG, bits, 20, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 13, 7);
+ i->i_srcidx--;
+ break;
+ case ASM_FMT_M46:
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ operand(i, 2, ASM_OPER_GREG, bits, 20, 7);
+ break;
+ case ASM_FMT_X1:
+ KASSERT(slot == 2, ("foo"));
+ u_immf(i, 1, bits, FRAG(6,20), FRAG(36,1), 0);
+ combine(&i->i_oper[1].o_value, 21, b->b_inst[1].i_bits, 41, 0);
+ break;
+ case ASM_FMT_X2:
+ KASSERT(slot == 2, ("foo"));
+ operand(i, 1, ASM_OPER_GREG, bits, 6, 7);
+ u_immf(i, 2, bits, FRAG(13,7), FRAG(27,9), FRAG(22,5),
+ FRAG(21,1), 0);
+ combine(&i->i_oper[2].o_value, 22, b->b_inst[1].i_bits, 41, 0);
+ combine(&i->i_oper[2].o_value, 63, bits, 1, 36);
+ break;
+ case ASM_FMT_X3:
+ KASSERT(slot == 2, ("foo"));
+ asm_brhint(i);
+ u_imm(i, 1, bits, 13, 20);
+ combine(&i->i_oper[1].o_value, 20, b->b_inst[1].i_bits, 39, 2);
+ combine(&i->i_oper[1].o_value, 59, bits, 1, 36);
+ i->i_oper[1].o_value <<= 4;
+ i->i_oper[1].o_type = ASM_OPER_DISP;
+ break;
+ case ASM_FMT_X4:
+ KASSERT(slot == 2, ("foo"));
+ asm_brhint(i);
+ operand(i, 1, ASM_OPER_BREG, bits, 6, 3);
+ u_imm(i, 2, bits, 13, 20);
+ combine(&i->i_oper[2].o_value, 20, b->b_inst[1].i_bits, 39, 2);
+ combine(&i->i_oper[2].o_value, 59, bits, 1, 36);
+ i->i_oper[2].o_value <<= 4;
+ i->i_oper[2].o_type = ASM_OPER_DISP;
+ break;
+ default:
+ KASSERT(fmt == ASM_FMT_NONE, ("foo"));
+ return (0);
+ }
+
+ return (1);
+}
diff --git a/sys/ia64/disasm/disasm_format.c b/sys/ia64/disasm/disasm_format.c
new file mode 100644
index 0000000..3c7fe27
--- /dev/null
+++ b/sys/ia64/disasm/disasm_format.c
@@ -0,0 +1,344 @@
+/*
+ * Copyright (c) 2000-2003 Marcel Moolenaar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/cdefs.h>
+__FBSDID("$FreeBSD$");
+
+#include <sys/param.h>
+#include <sys/systm.h>
+
+#include <ia64/disasm/disasm_int.h>
+#include <ia64/disasm/disasm.h>
+
+/*
+ * Mnemonics (keep in sync with enum asm_op).
+ */
+static const char *asm_mnemonics[] = {
+ NULL,
+ "add", "addl", "addp4", "adds", "alloc", "and", "andcm",
+ "br", "break", "brl", "brp", "bsw",
+ "chk", "clrrrb", "cmp", "cmp4", "cmp8xchg16", "cmpxchg1", "cmpxchg2",
+ "cmpxchg4", "cmpxchg8", "cover", "czx1", "czx2",
+ "dep",
+ "epc", "extr",
+ "famax", "famin", "fand", "fandcm", "fc", "fchkf", "fclass", "fclrf",
+ "fcmp", "fcvt", "fetchadd4", "fetchadd8", "flushrs", "fma", "fmax",
+ "fmerge", "fmin", "fmix", "fms", "fnma", "for", "fpack", "fpamax",
+ "fpamin", "fpcmp", "fpcvt", "fpma", "fpmax", "fpmerge", "fpmin",
+ "fpms", "fpnma", "fprcpa", "fprsqrta", "frcpa", "frsqrta", "fselect",
+ "fsetc", "fswap", "fsxt", "fwb", "fxor",
+ "getf",
+ "invala", "itc", "itr",
+ "ld1", "ld16", "ld2", "ld4", "ld8", "ldf", "ldf8", "ldfd", "ldfe",
+ "ldfp8", "ldfpd", "ldfps", "ldfs", "lfetch", "loadrs",
+ "mf", "mix1", "mix2", "mix4", "mov", "movl", "mux1", "mux2",
+ "nop",
+ "or",
+ "pack2", "pack4", "padd1", "padd2", "padd4", "pavg1", "pavg2",
+ "pavgsub1", "pavgsub2", "pcmp1", "pcmp2", "pcmp4", "pmax1", "pmax2",
+ "pmin1", "pmin2", "pmpy2", "pmpyshr2", "popcnt", "probe", "psad1",
+ "pshl2", "pshl4", "pshladd2", "pshr2", "pshr4", "pshradd2", "psub1",
+ "psub2", "psub4", "ptc", "ptr",
+ "rfi", "rsm", "rum",
+ "setf", "shl", "shladd", "shladdp4", "shr", "shrp", "srlz", "ssm",
+ "st1", "st16", "st2", "st4", "st8", "stf", "stf8", "stfd", "stfe",
+ "stfs", "sub", "sum", "sxt1", "sxt2", "sxt4", "sync",
+ "tak", "tbit", "thash", "tnat", "tpa", "ttag",
+ "unpack1", "unpack2", "unpack4",
+ "xchg1", "xchg2", "xchg4", "xchg8", "xma", "xor",
+ "zxt1", "zxt2", "zxt4"
+};
+
+/*
+ * Completers (keep in sync with enum asm_cmpltr_type).
+ */
+static const char *asm_completers[] = {
+ "",
+ ".0", ".1",
+ ".a", ".acq", ".and",
+ ".b", ".bias",
+ ".c.clr", ".c.clr.acq", ".c.nc", ".call", ".cexit", ".cloop", ".clr",
+ ".ctop",
+ ".d", ".dc.dc", ".dc.nt", ".dpnt", ".dptk",
+ ".e", ".eq", ".excl", ".exit", ".exp",
+ ".f", ".fault", ".few", ".fill", ".fx", ".fxu",
+ ".g", ".ga", ".ge", ".gt",
+ ".h", ".hu",
+ ".i", ".ia", ".imp",
+ ".l", ".le", ".loop", ".lr", ".lt", ".ltu",
+ ".m", ".many",
+ ".nc", ".ne", ".neq", ".nl", ".nle", ".nlt", ".nm", ".nr", ".ns",
+ ".nt.dc", ".nt.nt", ".nt.tk", ".nt1", ".nt2", ".nta", ".nz",
+ ".or", ".or.andcm", ".ord",
+ ".pr",
+ ".r", ".raz", ".rel", ".ret", ".rw",
+ ".s", ".s0", ".s1", ".s2", ".s3", ".sa", ".se", ".sig", ".spill",
+ ".spnt", ".sptk", ".sss",
+ ".tk.dc", ".tk.nt", ".tk.tk", ".trunc",
+ ".u", ".unc", ".unord", ".uss", ".uus", ".uuu",
+ ".w", ".wexit", ".wtop",
+ ".x", ".xf",
+ ".z"
+};
+
+void
+asm_completer(const struct asm_cmpltr *c, char *buf)
+{
+ strcpy(buf, asm_completers[c->c_type]);
+}
+
+void
+asm_mnemonic(enum asm_op op, char *buf)
+{
+ strcpy(buf, asm_mnemonics[(op < ASM_OP_INTERNAL_OPCODES) ? op : 0]);
+}
+
+void
+asm_operand(const struct asm_oper *o, char *buf, uint64_t ip)
+{
+ const char *n;
+
+ n = NULL;
+ switch (o->o_type) {
+ case ASM_OPER_AREG:
+ switch ((int)o->o_value) {
+ case AR_K0: n = "k0"; break;
+ case AR_K1: n = "k1"; break;
+ case AR_K2: n = "k2"; break;
+ case AR_K3: n = "k3"; break;
+ case AR_K4: n = "k4"; break;
+ case AR_K5: n = "k5"; break;
+ case AR_K6: n = "k6"; break;
+ case AR_K7: n = "k7"; break;
+ case AR_RSC: n = "rsc"; break;
+ case AR_BSP: n = "bsp"; break;
+ case AR_BSPSTORE: n = "bspstore"; break;
+ case AR_RNAT: n = "rnat"; break;
+ case AR_FCR: n = "fcr"; break;
+ case AR_EFLAG: n = "eflag"; break;
+ case AR_CSD: n = "csd"; break;
+ case AR_SSD: n = "ssd"; break;
+ case AR_CFLG: n = "cflg"; break;
+ case AR_FSR: n = "fsr"; break;
+ case AR_FIR: n = "fir"; break;
+ case AR_FDR: n = "fdr"; break;
+ case AR_CCV: n = "ccv"; break;
+ case AR_UNAT: n = "unat"; break;
+ case AR_FPSR: n = "fpsr"; break;
+ case AR_ITC: n = "itc"; break;
+ case AR_PFS: n = "pfs"; break;
+ case AR_LC: n = "lc"; break;
+ case AR_EC: n = "ec"; break;
+ default:
+ sprintf(buf, "ar%d", (int)o->o_value);
+ return;
+ }
+ sprintf(buf, "ar.%s", n);
+ return;
+ case ASM_OPER_BREG:
+ if (o->o_value != 0)
+ sprintf(buf, "b%d", (int)o->o_value);
+ else
+ strcpy(buf, "rp");
+ return;
+ case ASM_OPER_CPUID:
+ n = "cpuid";
+ break;
+ case ASM_OPER_CREG:
+ switch ((int)o->o_value) {
+ case CR_DCR: n = "dcr"; break;
+ case CR_ITM: n = "itm"; break;
+ case CR_IVA: n = "iva"; break;
+ case CR_PTA: n = "pta"; break;
+ case CR_IPSR: n = "ipsr"; break;
+ case CR_ISR: n = "isr"; break;
+ case CR_IIP: n = "iip"; break;
+ case CR_IFA: n = "ifa"; break;
+ case CR_ITIR: n = "itir"; break;
+ case CR_IIPA: n = "iipa"; break;
+ case CR_IFS: n = "ifs"; break;
+ case CR_IIM: n = "iim"; break;
+ case CR_IHA: n = "iha"; break;
+ case CR_LID: n = "lid"; break;
+ case CR_IVR: n = "ivr"; break;
+ case CR_TPR: n = "tpr"; break;
+ case CR_EOI: n = "eoi"; break;
+ case CR_IRR0: n = "irr0"; break;
+ case CR_IRR1: n = "irr1"; break;
+ case CR_IRR2: n = "irr2"; break;
+ case CR_IRR3: n = "irr3"; break;
+ case CR_ITV: n = "itv"; break;
+ case CR_PMV: n = "pmv"; break;
+ case CR_CMCV: n = "cmcv"; break;
+ case CR_LRR0: n = "lrr0"; break;
+ case CR_LRR1: n = "lrr1"; break;
+ default:
+ sprintf(buf, "cr%d", (int)o->o_value);
+ return;
+ }
+ sprintf(buf, "cr.%s", n);
+ return;
+ case ASM_OPER_DBR:
+ n = "dbr";
+ break;
+ case ASM_OPER_DISP:
+ sprintf(buf, "%lx", ip + o->o_value);
+ return;
+ case ASM_OPER_DTR:
+ n = "dtr";
+ break;
+ case ASM_OPER_FREG:
+ sprintf(buf, "f%d", (int)o->o_value);
+ return;
+ case ASM_OPER_GREG:
+ break;
+ case ASM_OPER_IBR:
+ n = "ibr";
+ break;
+ case ASM_OPER_IMM:
+ sprintf(buf, "0x%lx", o->o_value);
+ return;
+ case ASM_OPER_IP:
+ strcpy(buf, "ip");
+ return;
+ case ASM_OPER_ITR:
+ n = "itr";
+ break;
+ case ASM_OPER_MEM:
+ n = "";
+ break;
+ case ASM_OPER_MSR:
+ n = "msr";
+ break;
+ case ASM_OPER_PKR:
+ n = "pkr";
+ break;
+ case ASM_OPER_PMC:
+ n = "pmc";
+ break;
+ case ASM_OPER_PMD:
+ n = "pmd";
+ break;
+ case ASM_OPER_PR:
+ strcpy(buf, "pr");
+ return;
+ case ASM_OPER_PR_ROT:
+ strcpy(buf, "pr.rot");
+ return;
+ case ASM_OPER_PREG:
+ sprintf(buf, "p%d", (int)o->o_value);
+ return;
+ case ASM_OPER_PSR:
+ strcpy(buf, "psr");
+ return;
+ case ASM_OPER_PSR_L:
+ strcpy(buf, "psr.l");
+ return;
+ case ASM_OPER_PSR_UM:
+ strcpy(buf, "psr.um");
+ return;
+ case ASM_OPER_RR:
+ n = "rr";
+ break;
+ case ASM_OPER_NONE:
+ KASSERT(0, ("foo"));
+ break;
+ }
+ if (n != NULL)
+ buf += sprintf(buf, "%s[", n);
+ switch ((int)o->o_value) {
+ case 1: strcpy(buf, "gp"); buf += 2; break;
+ case 12: strcpy(buf, "sp"); buf += 2; break;
+ case 13: strcpy(buf, "tp"); buf += 2; break;
+ default: buf += sprintf(buf, "r%d", (int)o->o_value); break;
+ }
+ if (n != NULL)
+ strcpy(buf, "]");
+}
+
+void
+asm_print_bundle(const struct asm_bundle *b, uint64_t ip)
+{
+ asm_print_inst(b, 0, ip);
+ asm_print_inst(b, 1, ip);
+ asm_print_inst(b, 2, ip);
+}
+
+void
+asm_print_inst(const struct asm_bundle *b, int slot, uint64_t ip)
+{
+ char buf[32];
+ const struct asm_inst *i;
+ const char *tmpl;
+ int n, w;
+
+ tmpl = b->b_templ + slot;
+ if (*tmpl == ';' || (slot == 2 && b->b_templ[1] == ';'))
+ tmpl++;
+ i = b->b_inst + slot;
+ if (*tmpl == 'L' || i->i_op == ASM_OP_NONE)
+ return;
+
+ /* Address + slot. */
+ printf("%lx[%c] ", ip + slot, *tmpl);
+
+ /* Predicate. */
+ if (i->i_oper[0].o_value != 0) {
+ asm_operand(i->i_oper+0, buf, ip);
+ w = printf("(%s)", buf);
+ } else
+ w = 0;
+ while (w++ < 8)
+ printf(" ");
+
+ /* Mnemonic & completers. */
+ asm_mnemonic(i->i_op, buf);
+ w = printf(buf);
+ n = 0;
+ while (n < i->i_ncmpltrs) {
+ asm_completer(i->i_cmpltr + n, buf);
+ w += printf(buf);
+ n++;
+ }
+ while (w++ < 15)
+ printf(" ");
+ printf(" ");
+
+ /* Operands. */
+ n = 1;
+ while (n < 7 && i->i_oper[n].o_type != ASM_OPER_NONE) {
+ if (n > 1) {
+ if (n == i->i_srcidx)
+ printf(" = ");
+ else
+ printf(", ");
+ }
+ asm_operand(i->i_oper + n, buf, ip);
+ printf(buf);
+ n++;
+ }
+ printf("\n");
+}
diff --git a/sys/ia64/disasm/disasm_int.h b/sys/ia64/disasm/disasm_int.h
new file mode 100644
index 0000000..37e5371
--- /dev/null
+++ b/sys/ia64/disasm/disasm_int.h
@@ -0,0 +1,216 @@
+/*
+ * Copyright (c) 2000-2003 Marcel Moolenaar
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+ * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+ * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * $FreeBSD$
+ */
+
+#ifndef _DISASM_INT_H_
+#define _DISASM_INT_H_
+
+#ifdef _DISASM_H_
+#error Include disasm_int.h before disasm.h
+#endif
+
+/*
+ * Instruction bundle specifics.
+ */
+#define TMPL_BITS 5
+#define SLOT_BITS 41
+#define SLOT_COUNT 3
+
+#define BUNDLE_SIZE (SLOT_COUNT * SLOT_BITS + TMPL_BITS)
+#define BUNDLE_BYTES ((BUNDLE_SIZE+7) >> 3)
+#define TMPL_MASK ((1 << TMPL_BITS) - 1)
+#define SLOT_MASK ((1ULL << SLOT_BITS) - 1ULL)
+#define TMPL(p) (*(const uint8_t*)(p) & TMPL_MASK)
+#define _U32(p,i) ((uint64_t)(((const uint32_t*)(p))[i]))
+#define _SLOT(p,i) (_U32(p,i) | (_U32(p,(i)+1)<<32))
+#define SLOT(p,i) ((_SLOT(p,i) >> (TMPL_BITS+((i)<<3)+(i))) & SLOT_MASK)
+
+/*
+ * Instruction specifics
+ */
+#define _FLD64(i,o,l) ((i >> o) & ((1LL << l) - 1LL))
+#define FIELD(i,o,l) ((uint32_t)_FLD64(i,o,l))
+#define OPCODE(i) FIELD(i, 37, 4)
+#define QP_BITS 6
+#define QP(i) FIELD(i, 0, QP_BITS)
+#define REG_BITS 7
+#define REG(i,r) FIELD(i, ((r) - 1) * REG_BITS + QP_BITS, REG_BITS)
+
+/*
+ * Opcodes used internally as sentinels to denote either a lack of more
+ * specific information or to preserve the additional state/information
+ * we already have and need to pass around for later use.
+ */
+#define ASM_ADDITIONAL_OPCODES \
+ ASM_OP_INTERNAL_OPCODES, \
+ ASM_OP_BR_CALL, ASM_OP_BR_CEXIT, ASM_OP_BR_CLOOP, \
+ ASM_OP_BR_COND, ASM_OP_BR_CTOP, ASM_OP_BR_IA, ASM_OP_BR_RET, \
+ ASM_OP_BR_WEXIT, ASM_OP_BR_WTOP, \
+ ASM_OP_BREAK_B, ASM_OP_BREAK_F, ASM_OP_BREAK_I, ASM_OP_BREAK_M, \
+ ASM_OP_BREAK_X, \
+ ASM_OP_BRL_COND, ASM_OP_BRL_CALL, \
+ ASM_OP_BRP_, ASM_OP_BRP_RET, \
+ ASM_OP_BSW_0, ASM_OP_BSW_1, \
+ ASM_OP_CHK_A_CLR, ASM_OP_CHK_A_NC, ASM_OP_CHK_S, \
+ ASM_OP_CHK_S_I, ASM_OP_CHK_S_M, \
+ ASM_OP_CLRRRB_, ASM_OP_CLRRRB_PR, \
+ ASM_OP_CMP_EQ, ASM_OP_CMP_EQ_AND, ASM_OP_CMP_EQ_OR, \
+ ASM_OP_CMP_EQ_OR_ANDCM, ASM_OP_CMP_EQ_UNC, ASM_OP_CMP_GE_AND, \
+ ASM_OP_CMP_GE_OR, ASM_OP_CMP_GE_OR_ANDCM, ASM_OP_CMP_GT_AND, \
+ ASM_OP_CMP_GT_OR, ASM_OP_CMP_GT_OR_ANDCM, ASM_OP_CMP_LE_AND, \
+ ASM_OP_CMP_LE_OR, ASM_OP_CMP_LE_OR_ANDCM, ASM_OP_CMP_LT, \
+ ASM_OP_CMP_LT_AND, ASM_OP_CMP_LT_OR, ASM_OP_CMP_LT_OR_ANDCM, \
+ ASM_OP_CMP_LT_UNC, ASM_OP_CMP_LTU, ASM_OP_CMP_LTU_UNC, \
+ ASM_OP_CMP_NE_AND, ASM_OP_CMP_NE_OR, ASM_OP_CMP_NE_OR_ANDCM, \
+ ASM_OP_CMP4_EQ, ASM_OP_CMP4_EQ_AND, ASM_OP_CMP4_EQ_OR, \
+ ASM_OP_CMP4_EQ_OR_ANDCM, ASM_OP_CMP4_EQ_UNC, ASM_OP_CMP4_GE_AND,\
+ ASM_OP_CMP4_GE_OR, ASM_OP_CMP4_GE_OR_ANDCM, ASM_OP_CMP4_GT_AND, \
+ ASM_OP_CMP4_GT_OR, ASM_OP_CMP4_GT_OR_ANDCM, ASM_OP_CMP4_LE_AND, \
+ ASM_OP_CMP4_LE_OR, ASM_OP_CMP4_LE_OR_ANDCM, ASM_OP_CMP4_LT, \
+ ASM_OP_CMP4_LT_AND, ASM_OP_CMP4_LT_OR, ASM_OP_CMP4_LT_OR_ANDCM, \
+ ASM_OP_CMP4_LT_UNC, ASM_OP_CMP4_LTU, ASM_OP_CMP4_LTU_UNC, \
+ ASM_OP_CMP4_NE_AND, ASM_OP_CMP4_NE_OR, ASM_OP_CMP4_NE_OR_ANDCM, \
+ ASM_OP_CMP8XCHG16_ACQ, ASM_OP_CMP8XCHG16_REL, \
+ ASM_OP_CMPXCHG1_ACQ, ASM_OP_CMPXCHG1_REL, \
+ ASM_OP_CMPXCHG2_ACQ, ASM_OP_CMPXCHG2_REL, \
+ ASM_OP_CMPXCHG4_ACQ, ASM_OP_CMPXCHG4_REL, \
+ ASM_OP_CMPXCHG8_ACQ, ASM_OP_CMPXCHG8_REL, \
+ ASM_OP_CZX1_L, ASM_OP_CZX1_R, \
+ ASM_OP_CZX2_L, ASM_OP_CZX2_R, \
+ ASM_OP_DEP_, ASM_OP_DEP_Z, \
+ ASM_OP_FC_, ASM_OP_FC_I, \
+ ASM_OP_FCLASS_M, \
+ ASM_OP_FCVT_FX, ASM_OP_FCVT_FX_TRUNC, ASM_OP_FCVT_FXU, \
+ ASM_OP_FCVT_FXU_TRUNC, ASM_OP_FCVT_XF, \
+ ASM_OP_FETCHADD4_ACQ, ASM_OP_FETCHADD4_REL, \
+ ASM_OP_FETCHADD8_ACQ, ASM_OP_FETCHADD8_REL, \
+ ASM_OP_FMA_, ASM_OP_FMA_D, ASM_OP_FMA_S, \
+ ASM_OP_FMERGE_NS, ASM_OP_FMERGE_S, ASM_OP_FMERGE_SE, \
+ ASM_OP_FMIX_L, ASM_OP_FMIX_LR, ASM_OP_FMIX_R, \
+ ASM_OP_FMS_, ASM_OP_FMS_D, ASM_OP_FMS_S, \
+ ASM_OP_FNMA_, ASM_OP_FNMA_D, ASM_OP_FNMA_S, \
+ ASM_OP_FPCMP_EQ, ASM_OP_FPCMP_LE, ASM_OP_FPCMP_LT, \
+ ASM_OP_FPCMP_NEQ, ASM_OP_FPCMP_NLE, ASM_OP_FPCMP_NLT, \
+ ASM_OP_FPCMP_ORD, ASM_OP_FPCMP_UNORD, \
+ ASM_OP_FPCVT_FX, ASM_OP_FPCVT_FX_TRUNC, ASM_OP_FPCVT_FXU, \
+ ASM_OP_FPCVT_FXU_TRUNC, \
+ ASM_OP_FPMERGE_NS, ASM_OP_FPMERGE_S, ASM_OP_FPMERGE_SE, \
+ ASM_OP_FSWAP_, ASM_OP_FSWAP_NL, ASM_OP_FSWAP_NR, \
+ ASM_OP_FSXT_L, ASM_OP_FSXT_R, \
+ ASM_OP_GETF_D, ASM_OP_GETF_EXP, ASM_OP_GETF_S, ASM_OP_GETF_SIG, \
+ ASM_OP_INVALA_, ASM_OP_INVALA_E, \
+ ASM_OP_ITC_D, ASM_OP_ITC_I, \
+ ASM_OP_ITR_D, ASM_OP_ITR_I, \
+ ASM_OP_LD1_, ASM_OP_LD1_A, ASM_OP_LD1_ACQ, ASM_OP_LD1_BIAS, \
+ ASM_OP_LD1_C_CLR, ASM_OP_LD1_C_CLR_ACQ, ASM_OP_LD1_C_NC, \
+ ASM_OP_LD1_S, ASM_OP_LD1_SA, \
+ ASM_OP_LD16_, ASM_OP_LD16_ACQ, \
+ ASM_OP_LD2_, ASM_OP_LD2_A, ASM_OP_LD2_ACQ, ASM_OP_LD2_BIAS, \
+ ASM_OP_LD2_C_CLR, ASM_OP_LD2_C_CLR_ACQ, ASM_OP_LD2_C_NC, \
+ ASM_OP_LD2_S, ASM_OP_LD2_SA, \
+ ASM_OP_LD4_, ASM_OP_LD4_A, ASM_OP_LD4_ACQ, ASM_OP_LD4_BIAS, \
+ ASM_OP_LD4_C_CLR, ASM_OP_LD4_C_CLR_ACQ, ASM_OP_LD4_C_NC, \
+ ASM_OP_LD4_S, ASM_OP_LD4_SA, \
+ ASM_OP_LD8_, ASM_OP_LD8_A, ASM_OP_LD8_ACQ, ASM_OP_LD8_BIAS, \
+ ASM_OP_LD8_C_CLR, ASM_OP_LD8_C_CLR_ACQ, ASM_OP_LD8_C_NC, \
+ ASM_OP_LD8_FILL, ASM_OP_LD8_S, ASM_OP_LD8_SA, \
+ ASM_OP_LDF_FILL, \
+ ASM_OP_LDF8_, ASM_OP_LDF8_A, ASM_OP_LDF8_C_CLR, \
+ ASM_OP_LDF8_C_NC, ASM_OP_LDF8_S, ASM_OP_LDF8_SA, \
+ ASM_OP_LDFD_, ASM_OP_LDFD_A, ASM_OP_LDFD_C_CLR, \
+ ASM_OP_LDFD_C_NC, ASM_OP_LDFD_S, ASM_OP_LDFD_SA, \
+ ASM_OP_LDFE_, ASM_OP_LDFE_A, ASM_OP_LDFE_C_CLR, \
+ ASM_OP_LDFE_C_NC, ASM_OP_LDFE_S, ASM_OP_LDFE_SA, \
+ ASM_OP_LDFP8_, ASM_OP_LDFP8_A, ASM_OP_LDFP8_C_CLR, \
+ ASM_OP_LDFP8_C_NC, ASM_OP_LDFP8_S, ASM_OP_LDFP8_SA, \
+ ASM_OP_LDFPD_, ASM_OP_LDFPD_A, ASM_OP_LDFPD_C_CLR, \
+ ASM_OP_LDFPD_C_NC, ASM_OP_LDFPD_S, ASM_OP_LDFPD_SA, \
+ ASM_OP_LDFPS_, ASM_OP_LDFPS_A, ASM_OP_LDFPS_C_CLR, \
+ ASM_OP_LDFPS_C_NC, ASM_OP_LDFPS_S, ASM_OP_LDFPS_SA, \
+ ASM_OP_LDFS_, ASM_OP_LDFS_A, ASM_OP_LDFS_C_CLR, \
+ ASM_OP_LDFS_C_NC, ASM_OP_LDFS_S, ASM_OP_LDFS_SA, \
+ ASM_OP_LFETCH_, ASM_OP_LFETCH_EXCL, ASM_OP_LFETCH_FAULT, \
+ ASM_OP_LFETCH_FAULT_EXCL, \
+ ASM_OP_MF_, ASM_OP_MF_A, \
+ ASM_OP_MIX1_L, ASM_OP_MIX1_R, \
+ ASM_OP_MIX2_L, ASM_OP_MIX2_R, \
+ ASM_OP_MIX4_L, ASM_OP_MIX4_R, \
+ ASM_OP_MOV_, ASM_OP_MOV_CPUID, ASM_OP_MOV_DBR, ASM_OP_MOV_I, \
+ ASM_OP_MOV_IBR, ASM_OP_MOV_IP, ASM_OP_MOV_M, ASM_OP_MOV_MSR, \
+ ASM_OP_MOV_PKR, ASM_OP_MOV_PMC, ASM_OP_MOV_PMD, ASM_OP_MOV_PR, \
+ ASM_OP_MOV_PSR, ASM_OP_MOV_PSR_L, ASM_OP_MOV_PSR_UM, \
+ ASM_OP_MOV_RET, ASM_OP_MOV_RR, \
+ ASM_OP_NOP_B, ASM_OP_NOP_F, ASM_OP_NOP_I, ASM_OP_NOP_M, \
+ ASM_OP_NOP_X, \
+ ASM_OP_PACK2_SSS, ASM_OP_PACK2_USS, \
+ ASM_OP_PACK4_SSS, \
+ ASM_OP_PADD1_, ASM_OP_PADD1_SSS, ASM_OP_PADD1_UUS, \
+ ASM_OP_PADD1_UUU, \
+ ASM_OP_PADD2_, ASM_OP_PADD2_SSS, ASM_OP_PADD2_UUS, \
+ ASM_OP_PADD2_UUU, \
+ ASM_OP_PAVG1_, ASM_OP_PAVG1_RAZ, \
+ ASM_OP_PAVG2_, ASM_OP_PAVG2_RAZ, \
+ ASM_OP_PCMP1_EQ, ASM_OP_PCMP1_GT, \
+ ASM_OP_PCMP2_EQ, ASM_OP_PCMP2_GT, \
+ ASM_OP_PCMP4_EQ, ASM_OP_PCMP4_GT, \
+ ASM_OP_PMAX1_U, \
+ ASM_OP_PMIN1_U, \
+ ASM_OP_PMPY2_L, ASM_OP_PMPY2_R, \
+ ASM_OP_PMPYSHR2_, ASM_OP_PMPYSHR2_U, \
+ ASM_OP_PROBE_R, ASM_OP_PROBE_R_FAULT, ASM_OP_PROBE_RW_FAULT, \
+ ASM_OP_PROBE_W, ASM_OP_PROBE_W_FAULT, \
+ ASM_OP_PSHR2_, ASM_OP_PSHR2_U, \
+ ASM_OP_PSHR4_, ASM_OP_PSHR4_U, \
+ ASM_OP_PSUB1_, ASM_OP_PSUB1_SSS, ASM_OP_PSUB1_UUS, \
+ ASM_OP_PSUB1_UUU, \
+ ASM_OP_PSUB2_, ASM_OP_PSUB2_SSS, ASM_OP_PSUB2_UUS, \
+ ASM_OP_PSUB2_UUU, \
+ ASM_OP_PTC_E, ASM_OP_PTC_G, ASM_OP_PTC_GA, ASM_OP_PTC_L, \
+ ASM_OP_PTR_D, ASM_OP_PTR_I, \
+ ASM_OP_SETF_EXP, ASM_OP_SETF_D, ASM_OP_SETF_S, ASM_OP_SETF_SIG, \
+ ASM_OP_SHR_, ASM_OP_SHR_U, \
+ ASM_OP_SRLZ_D, ASM_OP_SRLZ_I, \
+ ASM_OP_ST1_, ASM_OP_ST1_REL, \
+ ASM_OP_ST16_, ASM_OP_ST16_REL, \
+ ASM_OP_ST2_, ASM_OP_ST2_REL, \
+ ASM_OP_ST4_, ASM_OP_ST4_REL, \
+ ASM_OP_ST8_, ASM_OP_ST8_REL, ASM_OP_ST8_SPILL, \
+ ASM_OP_STF_SPILL, \
+ ASM_OP_SYNC_I, \
+ ASM_OP_TBIT_NZ_AND, ASM_OP_TBIT_NZ_OR, ASM_OP_TBIT_NZ_OR_ANDCM, \
+ ASM_OP_TBIT_Z, ASM_OP_TBIT_Z_AND, ASM_OP_TBIT_Z_OR, \
+ ASM_OP_TBIT_Z_OR_ANDCM, ASM_OP_TBIT_Z_UNC, \
+ ASM_OP_TNAT_NZ_AND, ASM_OP_TNAT_NZ_OR, ASM_OP_TNAT_NZ_OR_ANDCM, \
+ ASM_OP_TNAT_Z, ASM_OP_TNAT_Z_AND, ASM_OP_TNAT_Z_OR, \
+ ASM_OP_TNAT_Z_OR_ANDCM, ASM_OP_TNAT_Z_UNC, \
+ ASM_OP_UNPACK1_H, ASM_OP_UNPACK1_L, \
+ ASM_OP_UNPACK2_H, ASM_OP_UNPACK2_L, \
+ ASM_OP_UNPACK4_H, ASM_OP_UNPACK4_L, \
+ ASM_OP_XMA_H, ASM_OP_XMA_HU, ASM_OP_XMA_L, \
+ ASM_OP_NUMBER_OF_OPCODES
+
+#endif /* _DISASM_INT_H_ */
OpenPOWER on IntegriCloud