2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
27 #include "disas/disas.h"
28 #include "exec/exec-all.h"
30 #include "exec/cpu_ldst.h"
31 #include "hw/mips/cpudevs.h"
33 #include "exec/helper-proto.h"
34 #include "exec/helper-gen.h"
35 #include "exec/semihost.h"
37 #include "target/mips/trace.h"
38 #include "trace-tcg.h"
39 #include "exec/translator.h"
42 #define MIPS_DEBUG_DISAS 0
44 /* MIPS major opcodes */
45 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
48 /* indirect opcode tables */
49 OPC_SPECIAL = (0x00 << 26),
50 OPC_REGIMM = (0x01 << 26),
51 OPC_CP0 = (0x10 << 26),
52 OPC_CP1 = (0x11 << 26),
53 OPC_CP2 = (0x12 << 26),
54 OPC_CP3 = (0x13 << 26),
55 OPC_SPECIAL2 = (0x1C << 26),
56 OPC_SPECIAL3 = (0x1F << 26),
57 /* arithmetic with immediate */
58 OPC_ADDI = (0x08 << 26),
59 OPC_ADDIU = (0x09 << 26),
60 OPC_SLTI = (0x0A << 26),
61 OPC_SLTIU = (0x0B << 26),
62 /* logic with immediate */
63 OPC_ANDI = (0x0C << 26),
64 OPC_ORI = (0x0D << 26),
65 OPC_XORI = (0x0E << 26),
66 OPC_LUI = (0x0F << 26),
67 /* arithmetic with immediate */
68 OPC_DADDI = (0x18 << 26),
69 OPC_DADDIU = (0x19 << 26),
70 /* Jump and branches */
72 OPC_JAL = (0x03 << 26),
73 OPC_BEQ = (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
74 OPC_BEQL = (0x14 << 26),
75 OPC_BNE = (0x05 << 26),
76 OPC_BNEL = (0x15 << 26),
77 OPC_BLEZ = (0x06 << 26),
78 OPC_BLEZL = (0x16 << 26),
79 OPC_BGTZ = (0x07 << 26),
80 OPC_BGTZL = (0x17 << 26),
81 OPC_JALX = (0x1D << 26),
82 OPC_DAUI = (0x1D << 26),
84 OPC_LDL = (0x1A << 26),
85 OPC_LDR = (0x1B << 26),
86 OPC_LB = (0x20 << 26),
87 OPC_LH = (0x21 << 26),
88 OPC_LWL = (0x22 << 26),
89 OPC_LW = (0x23 << 26),
90 OPC_LWPC = OPC_LW | 0x5,
91 OPC_LBU = (0x24 << 26),
92 OPC_LHU = (0x25 << 26),
93 OPC_LWR = (0x26 << 26),
94 OPC_LWU = (0x27 << 26),
95 OPC_SB = (0x28 << 26),
96 OPC_SH = (0x29 << 26),
97 OPC_SWL = (0x2A << 26),
98 OPC_SW = (0x2B << 26),
99 OPC_SDL = (0x2C << 26),
100 OPC_SDR = (0x2D << 26),
101 OPC_SWR = (0x2E << 26),
102 OPC_LL = (0x30 << 26),
103 OPC_LLD = (0x34 << 26),
104 OPC_LD = (0x37 << 26),
105 OPC_LDPC = OPC_LD | 0x5,
106 OPC_SC = (0x38 << 26),
107 OPC_SCD = (0x3C << 26),
108 OPC_SD = (0x3F << 26),
109 /* Floating point load/store */
110 OPC_LWC1 = (0x31 << 26),
111 OPC_LWC2 = (0x32 << 26),
112 OPC_LDC1 = (0x35 << 26),
113 OPC_LDC2 = (0x36 << 26),
114 OPC_SWC1 = (0x39 << 26),
115 OPC_SWC2 = (0x3A << 26),
116 OPC_SDC1 = (0x3D << 26),
117 OPC_SDC2 = (0x3E << 26),
118 /* Compact Branches */
119 OPC_BLEZALC = (0x06 << 26),
120 OPC_BGEZALC = (0x06 << 26),
121 OPC_BGEUC = (0x06 << 26),
122 OPC_BGTZALC = (0x07 << 26),
123 OPC_BLTZALC = (0x07 << 26),
124 OPC_BLTUC = (0x07 << 26),
125 OPC_BOVC = (0x08 << 26),
126 OPC_BEQZALC = (0x08 << 26),
127 OPC_BEQC = (0x08 << 26),
128 OPC_BLEZC = (0x16 << 26),
129 OPC_BGEZC = (0x16 << 26),
130 OPC_BGEC = (0x16 << 26),
131 OPC_BGTZC = (0x17 << 26),
132 OPC_BLTZC = (0x17 << 26),
133 OPC_BLTC = (0x17 << 26),
134 OPC_BNVC = (0x18 << 26),
135 OPC_BNEZALC = (0x18 << 26),
136 OPC_BNEC = (0x18 << 26),
137 OPC_BC = (0x32 << 26),
138 OPC_BEQZC = (0x36 << 26),
139 OPC_JIC = (0x36 << 26),
140 OPC_BALC = (0x3A << 26),
141 OPC_BNEZC = (0x3E << 26),
142 OPC_JIALC = (0x3E << 26),
143 /* MDMX ASE specific */
144 OPC_MDMX = (0x1E << 26),
145 /* MSA ASE, same as MDMX */
147 /* Cache and prefetch */
148 OPC_CACHE = (0x2F << 26),
149 OPC_PREF = (0x33 << 26),
150 /* PC-relative address computation / loads */
151 OPC_PCREL = (0x3B << 26),
154 /* PC-relative address computation / loads */
155 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
156 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
158 /* Instructions determined by bits 19 and 20 */
159 OPC_ADDIUPC = OPC_PCREL | (0 << 19),
160 R6_OPC_LWPC = OPC_PCREL | (1 << 19),
161 OPC_LWUPC = OPC_PCREL | (2 << 19),
163 /* Instructions determined by bits 16 ... 20 */
164 OPC_AUIPC = OPC_PCREL | (0x1e << 16),
165 OPC_ALUIPC = OPC_PCREL | (0x1f << 16),
168 R6_OPC_LDPC = OPC_PCREL | (6 << 18),
171 /* MIPS special opcodes */
172 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
176 OPC_SLL = 0x00 | OPC_SPECIAL,
177 /* NOP is SLL r0, r0, 0 */
178 /* SSNOP is SLL r0, r0, 1 */
179 /* EHB is SLL r0, r0, 3 */
180 OPC_SRL = 0x02 | OPC_SPECIAL, /* also ROTR */
181 OPC_ROTR = OPC_SRL | (1 << 21),
182 OPC_SRA = 0x03 | OPC_SPECIAL,
183 OPC_SLLV = 0x04 | OPC_SPECIAL,
184 OPC_SRLV = 0x06 | OPC_SPECIAL, /* also ROTRV */
185 OPC_ROTRV = OPC_SRLV | (1 << 6),
186 OPC_SRAV = 0x07 | OPC_SPECIAL,
187 OPC_DSLLV = 0x14 | OPC_SPECIAL,
188 OPC_DSRLV = 0x16 | OPC_SPECIAL, /* also DROTRV */
189 OPC_DROTRV = OPC_DSRLV | (1 << 6),
190 OPC_DSRAV = 0x17 | OPC_SPECIAL,
191 OPC_DSLL = 0x38 | OPC_SPECIAL,
192 OPC_DSRL = 0x3A | OPC_SPECIAL, /* also DROTR */
193 OPC_DROTR = OPC_DSRL | (1 << 21),
194 OPC_DSRA = 0x3B | OPC_SPECIAL,
195 OPC_DSLL32 = 0x3C | OPC_SPECIAL,
196 OPC_DSRL32 = 0x3E | OPC_SPECIAL, /* also DROTR32 */
197 OPC_DROTR32 = OPC_DSRL32 | (1 << 21),
198 OPC_DSRA32 = 0x3F | OPC_SPECIAL,
199 /* Multiplication / division */
200 OPC_MULT = 0x18 | OPC_SPECIAL,
201 OPC_MULTU = 0x19 | OPC_SPECIAL,
202 OPC_DIV = 0x1A | OPC_SPECIAL,
203 OPC_DIVU = 0x1B | OPC_SPECIAL,
204 OPC_DMULT = 0x1C | OPC_SPECIAL,
205 OPC_DMULTU = 0x1D | OPC_SPECIAL,
206 OPC_DDIV = 0x1E | OPC_SPECIAL,
207 OPC_DDIVU = 0x1F | OPC_SPECIAL,
209 /* 2 registers arithmetic / logic */
210 OPC_ADD = 0x20 | OPC_SPECIAL,
211 OPC_ADDU = 0x21 | OPC_SPECIAL,
212 OPC_SUB = 0x22 | OPC_SPECIAL,
213 OPC_SUBU = 0x23 | OPC_SPECIAL,
214 OPC_AND = 0x24 | OPC_SPECIAL,
215 OPC_OR = 0x25 | OPC_SPECIAL,
216 OPC_XOR = 0x26 | OPC_SPECIAL,
217 OPC_NOR = 0x27 | OPC_SPECIAL,
218 OPC_SLT = 0x2A | OPC_SPECIAL,
219 OPC_SLTU = 0x2B | OPC_SPECIAL,
220 OPC_DADD = 0x2C | OPC_SPECIAL,
221 OPC_DADDU = 0x2D | OPC_SPECIAL,
222 OPC_DSUB = 0x2E | OPC_SPECIAL,
223 OPC_DSUBU = 0x2F | OPC_SPECIAL,
225 OPC_JR = 0x08 | OPC_SPECIAL, /* Also JR.HB */
226 OPC_JALR = 0x09 | OPC_SPECIAL, /* Also JALR.HB */
228 OPC_TGE = 0x30 | OPC_SPECIAL,
229 OPC_TGEU = 0x31 | OPC_SPECIAL,
230 OPC_TLT = 0x32 | OPC_SPECIAL,
231 OPC_TLTU = 0x33 | OPC_SPECIAL,
232 OPC_TEQ = 0x34 | OPC_SPECIAL,
233 OPC_TNE = 0x36 | OPC_SPECIAL,
234 /* HI / LO registers load & stores */
235 OPC_MFHI = 0x10 | OPC_SPECIAL,
236 OPC_MTHI = 0x11 | OPC_SPECIAL,
237 OPC_MFLO = 0x12 | OPC_SPECIAL,
238 OPC_MTLO = 0x13 | OPC_SPECIAL,
239 /* Conditional moves */
240 OPC_MOVZ = 0x0A | OPC_SPECIAL,
241 OPC_MOVN = 0x0B | OPC_SPECIAL,
243 OPC_SELEQZ = 0x35 | OPC_SPECIAL,
244 OPC_SELNEZ = 0x37 | OPC_SPECIAL,
246 OPC_MOVCI = 0x01 | OPC_SPECIAL,
249 OPC_PMON = 0x05 | OPC_SPECIAL, /* unofficial */
250 OPC_SYSCALL = 0x0C | OPC_SPECIAL,
251 OPC_BREAK = 0x0D | OPC_SPECIAL,
252 OPC_SPIM = 0x0E | OPC_SPECIAL, /* unofficial */
253 OPC_SYNC = 0x0F | OPC_SPECIAL,
255 OPC_SPECIAL28_RESERVED = 0x28 | OPC_SPECIAL,
256 OPC_SPECIAL29_RESERVED = 0x29 | OPC_SPECIAL,
257 OPC_SPECIAL39_RESERVED = 0x39 | OPC_SPECIAL,
258 OPC_SPECIAL3D_RESERVED = 0x3D | OPC_SPECIAL,
261 /* R6 Multiply and Divide instructions have the same Opcode
262 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
263 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
266 R6_OPC_MUL = OPC_MULT | (2 << 6),
267 R6_OPC_MUH = OPC_MULT | (3 << 6),
268 R6_OPC_MULU = OPC_MULTU | (2 << 6),
269 R6_OPC_MUHU = OPC_MULTU | (3 << 6),
270 R6_OPC_DIV = OPC_DIV | (2 << 6),
271 R6_OPC_MOD = OPC_DIV | (3 << 6),
272 R6_OPC_DIVU = OPC_DIVU | (2 << 6),
273 R6_OPC_MODU = OPC_DIVU | (3 << 6),
275 R6_OPC_DMUL = OPC_DMULT | (2 << 6),
276 R6_OPC_DMUH = OPC_DMULT | (3 << 6),
277 R6_OPC_DMULU = OPC_DMULTU | (2 << 6),
278 R6_OPC_DMUHU = OPC_DMULTU | (3 << 6),
279 R6_OPC_DDIV = OPC_DDIV | (2 << 6),
280 R6_OPC_DMOD = OPC_DDIV | (3 << 6),
281 R6_OPC_DDIVU = OPC_DDIVU | (2 << 6),
282 R6_OPC_DMODU = OPC_DDIVU | (3 << 6),
284 R6_OPC_CLZ = 0x10 | OPC_SPECIAL,
285 R6_OPC_CLO = 0x11 | OPC_SPECIAL,
286 R6_OPC_DCLZ = 0x12 | OPC_SPECIAL,
287 R6_OPC_DCLO = 0x13 | OPC_SPECIAL,
288 R6_OPC_SDBBP = 0x0e | OPC_SPECIAL,
290 OPC_LSA = 0x05 | OPC_SPECIAL,
291 OPC_DLSA = 0x15 | OPC_SPECIAL,
294 /* Multiplication variants of the vr54xx. */
295 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
298 OPC_VR54XX_MULS = (0x03 << 6) | OPC_MULT,
299 OPC_VR54XX_MULSU = (0x03 << 6) | OPC_MULTU,
300 OPC_VR54XX_MACC = (0x05 << 6) | OPC_MULT,
301 OPC_VR54XX_MACCU = (0x05 << 6) | OPC_MULTU,
302 OPC_VR54XX_MSAC = (0x07 << 6) | OPC_MULT,
303 OPC_VR54XX_MSACU = (0x07 << 6) | OPC_MULTU,
304 OPC_VR54XX_MULHI = (0x09 << 6) | OPC_MULT,
305 OPC_VR54XX_MULHIU = (0x09 << 6) | OPC_MULTU,
306 OPC_VR54XX_MULSHI = (0x0B << 6) | OPC_MULT,
307 OPC_VR54XX_MULSHIU = (0x0B << 6) | OPC_MULTU,
308 OPC_VR54XX_MACCHI = (0x0D << 6) | OPC_MULT,
309 OPC_VR54XX_MACCHIU = (0x0D << 6) | OPC_MULTU,
310 OPC_VR54XX_MSACHI = (0x0F << 6) | OPC_MULT,
311 OPC_VR54XX_MSACHIU = (0x0F << 6) | OPC_MULTU,
314 /* REGIMM (rt field) opcodes */
315 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
318 OPC_BLTZ = (0x00 << 16) | OPC_REGIMM,
319 OPC_BLTZL = (0x02 << 16) | OPC_REGIMM,
320 OPC_BGEZ = (0x01 << 16) | OPC_REGIMM,
321 OPC_BGEZL = (0x03 << 16) | OPC_REGIMM,
322 OPC_BLTZAL = (0x10 << 16) | OPC_REGIMM,
323 OPC_BLTZALL = (0x12 << 16) | OPC_REGIMM,
324 OPC_BGEZAL = (0x11 << 16) | OPC_REGIMM,
325 OPC_BGEZALL = (0x13 << 16) | OPC_REGIMM,
326 OPC_TGEI = (0x08 << 16) | OPC_REGIMM,
327 OPC_TGEIU = (0x09 << 16) | OPC_REGIMM,
328 OPC_TLTI = (0x0A << 16) | OPC_REGIMM,
329 OPC_TLTIU = (0x0B << 16) | OPC_REGIMM,
330 OPC_TEQI = (0x0C << 16) | OPC_REGIMM,
331 OPC_TNEI = (0x0E << 16) | OPC_REGIMM,
332 OPC_SIGRIE = (0x17 << 16) | OPC_REGIMM,
333 OPC_SYNCI = (0x1F << 16) | OPC_REGIMM,
335 OPC_DAHI = (0x06 << 16) | OPC_REGIMM,
336 OPC_DATI = (0x1e << 16) | OPC_REGIMM,
339 /* Special2 opcodes */
340 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
343 /* Multiply & xxx operations */
344 OPC_MADD = 0x00 | OPC_SPECIAL2,
345 OPC_MADDU = 0x01 | OPC_SPECIAL2,
346 OPC_MUL = 0x02 | OPC_SPECIAL2,
347 OPC_MSUB = 0x04 | OPC_SPECIAL2,
348 OPC_MSUBU = 0x05 | OPC_SPECIAL2,
350 OPC_MULT_G_2F = 0x10 | OPC_SPECIAL2,
351 OPC_DMULT_G_2F = 0x11 | OPC_SPECIAL2,
352 OPC_MULTU_G_2F = 0x12 | OPC_SPECIAL2,
353 OPC_DMULTU_G_2F = 0x13 | OPC_SPECIAL2,
354 OPC_DIV_G_2F = 0x14 | OPC_SPECIAL2,
355 OPC_DDIV_G_2F = 0x15 | OPC_SPECIAL2,
356 OPC_DIVU_G_2F = 0x16 | OPC_SPECIAL2,
357 OPC_DDIVU_G_2F = 0x17 | OPC_SPECIAL2,
358 OPC_MOD_G_2F = 0x1c | OPC_SPECIAL2,
359 OPC_DMOD_G_2F = 0x1d | OPC_SPECIAL2,
360 OPC_MODU_G_2F = 0x1e | OPC_SPECIAL2,
361 OPC_DMODU_G_2F = 0x1f | OPC_SPECIAL2,
363 OPC_CLZ = 0x20 | OPC_SPECIAL2,
364 OPC_CLO = 0x21 | OPC_SPECIAL2,
365 OPC_DCLZ = 0x24 | OPC_SPECIAL2,
366 OPC_DCLO = 0x25 | OPC_SPECIAL2,
368 OPC_SDBBP = 0x3F | OPC_SPECIAL2,
371 /* Special3 opcodes */
372 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
375 OPC_EXT = 0x00 | OPC_SPECIAL3,
376 OPC_DEXTM = 0x01 | OPC_SPECIAL3,
377 OPC_DEXTU = 0x02 | OPC_SPECIAL3,
378 OPC_DEXT = 0x03 | OPC_SPECIAL3,
379 OPC_INS = 0x04 | OPC_SPECIAL3,
380 OPC_DINSM = 0x05 | OPC_SPECIAL3,
381 OPC_DINSU = 0x06 | OPC_SPECIAL3,
382 OPC_DINS = 0x07 | OPC_SPECIAL3,
383 OPC_FORK = 0x08 | OPC_SPECIAL3,
384 OPC_YIELD = 0x09 | OPC_SPECIAL3,
385 OPC_BSHFL = 0x20 | OPC_SPECIAL3,
386 OPC_DBSHFL = 0x24 | OPC_SPECIAL3,
387 OPC_RDHWR = 0x3B | OPC_SPECIAL3,
390 OPC_MULT_G_2E = 0x18 | OPC_SPECIAL3,
391 OPC_MULTU_G_2E = 0x19 | OPC_SPECIAL3,
392 OPC_DIV_G_2E = 0x1A | OPC_SPECIAL3,
393 OPC_DIVU_G_2E = 0x1B | OPC_SPECIAL3,
394 OPC_DMULT_G_2E = 0x1C | OPC_SPECIAL3,
395 OPC_DMULTU_G_2E = 0x1D | OPC_SPECIAL3,
396 OPC_DDIV_G_2E = 0x1E | OPC_SPECIAL3,
397 OPC_DDIVU_G_2E = 0x1F | OPC_SPECIAL3,
398 OPC_MOD_G_2E = 0x22 | OPC_SPECIAL3,
399 OPC_MODU_G_2E = 0x23 | OPC_SPECIAL3,
400 OPC_DMOD_G_2E = 0x26 | OPC_SPECIAL3,
401 OPC_DMODU_G_2E = 0x27 | OPC_SPECIAL3,
404 OPC_LX_DSP = 0x0A | OPC_SPECIAL3,
405 /* MIPS DSP Arithmetic */
406 OPC_ADDU_QB_DSP = 0x10 | OPC_SPECIAL3,
407 OPC_ADDU_OB_DSP = 0x14 | OPC_SPECIAL3,
408 OPC_ABSQ_S_PH_DSP = 0x12 | OPC_SPECIAL3,
409 OPC_ABSQ_S_QH_DSP = 0x16 | OPC_SPECIAL3,
410 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
411 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_CMPU_EQ_QB_DSP = 0x11 | OPC_SPECIAL3,
413 OPC_CMPU_EQ_OB_DSP = 0x15 | OPC_SPECIAL3,
414 /* MIPS DSP GPR-Based Shift Sub-class */
415 OPC_SHLL_QB_DSP = 0x13 | OPC_SPECIAL3,
416 OPC_SHLL_OB_DSP = 0x17 | OPC_SPECIAL3,
417 /* MIPS DSP Multiply Sub-class insns */
418 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
419 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
420 OPC_DPA_W_PH_DSP = 0x30 | OPC_SPECIAL3,
421 OPC_DPAQ_W_QH_DSP = 0x34 | OPC_SPECIAL3,
422 /* DSP Bit/Manipulation Sub-class */
423 OPC_INSV_DSP = 0x0C | OPC_SPECIAL3,
424 OPC_DINSV_DSP = 0x0D | OPC_SPECIAL3,
425 /* MIPS DSP Append Sub-class */
426 OPC_APPEND_DSP = 0x31 | OPC_SPECIAL3,
427 OPC_DAPPEND_DSP = 0x35 | OPC_SPECIAL3,
428 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
429 OPC_EXTR_W_DSP = 0x38 | OPC_SPECIAL3,
430 OPC_DEXTR_W_DSP = 0x3C | OPC_SPECIAL3,
433 OPC_LWLE = 0x19 | OPC_SPECIAL3,
434 OPC_LWRE = 0x1A | OPC_SPECIAL3,
435 OPC_CACHEE = 0x1B | OPC_SPECIAL3,
436 OPC_SBE = 0x1C | OPC_SPECIAL3,
437 OPC_SHE = 0x1D | OPC_SPECIAL3,
438 OPC_SCE = 0x1E | OPC_SPECIAL3,
439 OPC_SWE = 0x1F | OPC_SPECIAL3,
440 OPC_SWLE = 0x21 | OPC_SPECIAL3,
441 OPC_SWRE = 0x22 | OPC_SPECIAL3,
442 OPC_PREFE = 0x23 | OPC_SPECIAL3,
443 OPC_LBUE = 0x28 | OPC_SPECIAL3,
444 OPC_LHUE = 0x29 | OPC_SPECIAL3,
445 OPC_LBE = 0x2C | OPC_SPECIAL3,
446 OPC_LHE = 0x2D | OPC_SPECIAL3,
447 OPC_LLE = 0x2E | OPC_SPECIAL3,
448 OPC_LWE = 0x2F | OPC_SPECIAL3,
451 R6_OPC_PREF = 0x35 | OPC_SPECIAL3,
452 R6_OPC_CACHE = 0x25 | OPC_SPECIAL3,
453 R6_OPC_LL = 0x36 | OPC_SPECIAL3,
454 R6_OPC_SC = 0x26 | OPC_SPECIAL3,
455 R6_OPC_LLD = 0x37 | OPC_SPECIAL3,
456 R6_OPC_SCD = 0x27 | OPC_SPECIAL3,
460 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
463 OPC_WSBH = (0x02 << 6) | OPC_BSHFL,
464 OPC_SEB = (0x10 << 6) | OPC_BSHFL,
465 OPC_SEH = (0x18 << 6) | OPC_BSHFL,
466 OPC_ALIGN = (0x08 << 6) | OPC_BSHFL, /* 010.bp */
467 OPC_ALIGN_END = (0x0B << 6) | OPC_BSHFL, /* 010.00 to 010.11 */
468 OPC_BITSWAP = (0x00 << 6) | OPC_BSHFL /* 00000 */
472 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
475 OPC_DSBH = (0x02 << 6) | OPC_DBSHFL,
476 OPC_DSHD = (0x05 << 6) | OPC_DBSHFL,
477 OPC_DALIGN = (0x08 << 6) | OPC_DBSHFL, /* 01.bp */
478 OPC_DALIGN_END = (0x0F << 6) | OPC_DBSHFL, /* 01.000 to 01.111 */
479 OPC_DBITSWAP = (0x00 << 6) | OPC_DBSHFL, /* 00000 */
482 /* MIPS DSP REGIMM opcodes */
484 OPC_BPOSGE32 = (0x1C << 16) | OPC_REGIMM,
485 OPC_BPOSGE64 = (0x1D << 16) | OPC_REGIMM,
488 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
491 OPC_LBUX = (0x06 << 6) | OPC_LX_DSP,
492 OPC_LHX = (0x04 << 6) | OPC_LX_DSP,
493 OPC_LWX = (0x00 << 6) | OPC_LX_DSP,
494 OPC_LDX = (0x08 << 6) | OPC_LX_DSP,
497 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
499 /* MIPS DSP Arithmetic Sub-class */
500 OPC_ADDQ_PH = (0x0A << 6) | OPC_ADDU_QB_DSP,
501 OPC_ADDQ_S_PH = (0x0E << 6) | OPC_ADDU_QB_DSP,
502 OPC_ADDQ_S_W = (0x16 << 6) | OPC_ADDU_QB_DSP,
503 OPC_ADDU_QB = (0x00 << 6) | OPC_ADDU_QB_DSP,
504 OPC_ADDU_S_QB = (0x04 << 6) | OPC_ADDU_QB_DSP,
505 OPC_ADDU_PH = (0x08 << 6) | OPC_ADDU_QB_DSP,
506 OPC_ADDU_S_PH = (0x0C << 6) | OPC_ADDU_QB_DSP,
507 OPC_SUBQ_PH = (0x0B << 6) | OPC_ADDU_QB_DSP,
508 OPC_SUBQ_S_PH = (0x0F << 6) | OPC_ADDU_QB_DSP,
509 OPC_SUBQ_S_W = (0x17 << 6) | OPC_ADDU_QB_DSP,
510 OPC_SUBU_QB = (0x01 << 6) | OPC_ADDU_QB_DSP,
511 OPC_SUBU_S_QB = (0x05 << 6) | OPC_ADDU_QB_DSP,
512 OPC_SUBU_PH = (0x09 << 6) | OPC_ADDU_QB_DSP,
513 OPC_SUBU_S_PH = (0x0D << 6) | OPC_ADDU_QB_DSP,
514 OPC_ADDSC = (0x10 << 6) | OPC_ADDU_QB_DSP,
515 OPC_ADDWC = (0x11 << 6) | OPC_ADDU_QB_DSP,
516 OPC_MODSUB = (0x12 << 6) | OPC_ADDU_QB_DSP,
517 OPC_RADDU_W_QB = (0x14 << 6) | OPC_ADDU_QB_DSP,
518 /* MIPS DSP Multiply Sub-class insns */
519 OPC_MULEU_S_PH_QBL = (0x06 << 6) | OPC_ADDU_QB_DSP,
520 OPC_MULEU_S_PH_QBR = (0x07 << 6) | OPC_ADDU_QB_DSP,
521 OPC_MULQ_RS_PH = (0x1F << 6) | OPC_ADDU_QB_DSP,
522 OPC_MULEQ_S_W_PHL = (0x1C << 6) | OPC_ADDU_QB_DSP,
523 OPC_MULEQ_S_W_PHR = (0x1D << 6) | OPC_ADDU_QB_DSP,
524 OPC_MULQ_S_PH = (0x1E << 6) | OPC_ADDU_QB_DSP,
527 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
528 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
530 /* MIPS DSP Arithmetic Sub-class */
531 OPC_ADDUH_QB = (0x00 << 6) | OPC_ADDUH_QB_DSP,
532 OPC_ADDUH_R_QB = (0x02 << 6) | OPC_ADDUH_QB_DSP,
533 OPC_ADDQH_PH = (0x08 << 6) | OPC_ADDUH_QB_DSP,
534 OPC_ADDQH_R_PH = (0x0A << 6) | OPC_ADDUH_QB_DSP,
535 OPC_ADDQH_W = (0x10 << 6) | OPC_ADDUH_QB_DSP,
536 OPC_ADDQH_R_W = (0x12 << 6) | OPC_ADDUH_QB_DSP,
537 OPC_SUBUH_QB = (0x01 << 6) | OPC_ADDUH_QB_DSP,
538 OPC_SUBUH_R_QB = (0x03 << 6) | OPC_ADDUH_QB_DSP,
539 OPC_SUBQH_PH = (0x09 << 6) | OPC_ADDUH_QB_DSP,
540 OPC_SUBQH_R_PH = (0x0B << 6) | OPC_ADDUH_QB_DSP,
541 OPC_SUBQH_W = (0x11 << 6) | OPC_ADDUH_QB_DSP,
542 OPC_SUBQH_R_W = (0x13 << 6) | OPC_ADDUH_QB_DSP,
543 /* MIPS DSP Multiply Sub-class insns */
544 OPC_MUL_PH = (0x0C << 6) | OPC_ADDUH_QB_DSP,
545 OPC_MUL_S_PH = (0x0E << 6) | OPC_ADDUH_QB_DSP,
546 OPC_MULQ_S_W = (0x16 << 6) | OPC_ADDUH_QB_DSP,
547 OPC_MULQ_RS_W = (0x17 << 6) | OPC_ADDUH_QB_DSP,
550 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
552 /* MIPS DSP Arithmetic Sub-class */
553 OPC_ABSQ_S_QB = (0x01 << 6) | OPC_ABSQ_S_PH_DSP,
554 OPC_ABSQ_S_PH = (0x09 << 6) | OPC_ABSQ_S_PH_DSP,
555 OPC_ABSQ_S_W = (0x11 << 6) | OPC_ABSQ_S_PH_DSP,
556 OPC_PRECEQ_W_PHL = (0x0C << 6) | OPC_ABSQ_S_PH_DSP,
557 OPC_PRECEQ_W_PHR = (0x0D << 6) | OPC_ABSQ_S_PH_DSP,
558 OPC_PRECEQU_PH_QBL = (0x04 << 6) | OPC_ABSQ_S_PH_DSP,
559 OPC_PRECEQU_PH_QBR = (0x05 << 6) | OPC_ABSQ_S_PH_DSP,
560 OPC_PRECEQU_PH_QBLA = (0x06 << 6) | OPC_ABSQ_S_PH_DSP,
561 OPC_PRECEQU_PH_QBRA = (0x07 << 6) | OPC_ABSQ_S_PH_DSP,
562 OPC_PRECEU_PH_QBL = (0x1C << 6) | OPC_ABSQ_S_PH_DSP,
563 OPC_PRECEU_PH_QBR = (0x1D << 6) | OPC_ABSQ_S_PH_DSP,
564 OPC_PRECEU_PH_QBLA = (0x1E << 6) | OPC_ABSQ_S_PH_DSP,
565 OPC_PRECEU_PH_QBRA = (0x1F << 6) | OPC_ABSQ_S_PH_DSP,
566 /* DSP Bit/Manipulation Sub-class */
567 OPC_BITREV = (0x1B << 6) | OPC_ABSQ_S_PH_DSP,
568 OPC_REPL_QB = (0x02 << 6) | OPC_ABSQ_S_PH_DSP,
569 OPC_REPLV_QB = (0x03 << 6) | OPC_ABSQ_S_PH_DSP,
570 OPC_REPL_PH = (0x0A << 6) | OPC_ABSQ_S_PH_DSP,
571 OPC_REPLV_PH = (0x0B << 6) | OPC_ABSQ_S_PH_DSP,
574 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
576 /* MIPS DSP Arithmetic Sub-class */
577 OPC_PRECR_QB_PH = (0x0D << 6) | OPC_CMPU_EQ_QB_DSP,
578 OPC_PRECRQ_QB_PH = (0x0C << 6) | OPC_CMPU_EQ_QB_DSP,
579 OPC_PRECR_SRA_PH_W = (0x1E << 6) | OPC_CMPU_EQ_QB_DSP,
580 OPC_PRECR_SRA_R_PH_W = (0x1F << 6) | OPC_CMPU_EQ_QB_DSP,
581 OPC_PRECRQ_PH_W = (0x14 << 6) | OPC_CMPU_EQ_QB_DSP,
582 OPC_PRECRQ_RS_PH_W = (0x15 << 6) | OPC_CMPU_EQ_QB_DSP,
583 OPC_PRECRQU_S_QB_PH = (0x0F << 6) | OPC_CMPU_EQ_QB_DSP,
584 /* DSP Compare-Pick Sub-class */
585 OPC_CMPU_EQ_QB = (0x00 << 6) | OPC_CMPU_EQ_QB_DSP,
586 OPC_CMPU_LT_QB = (0x01 << 6) | OPC_CMPU_EQ_QB_DSP,
587 OPC_CMPU_LE_QB = (0x02 << 6) | OPC_CMPU_EQ_QB_DSP,
588 OPC_CMPGU_EQ_QB = (0x04 << 6) | OPC_CMPU_EQ_QB_DSP,
589 OPC_CMPGU_LT_QB = (0x05 << 6) | OPC_CMPU_EQ_QB_DSP,
590 OPC_CMPGU_LE_QB = (0x06 << 6) | OPC_CMPU_EQ_QB_DSP,
591 OPC_CMPGDU_EQ_QB = (0x18 << 6) | OPC_CMPU_EQ_QB_DSP,
592 OPC_CMPGDU_LT_QB = (0x19 << 6) | OPC_CMPU_EQ_QB_DSP,
593 OPC_CMPGDU_LE_QB = (0x1A << 6) | OPC_CMPU_EQ_QB_DSP,
594 OPC_CMP_EQ_PH = (0x08 << 6) | OPC_CMPU_EQ_QB_DSP,
595 OPC_CMP_LT_PH = (0x09 << 6) | OPC_CMPU_EQ_QB_DSP,
596 OPC_CMP_LE_PH = (0x0A << 6) | OPC_CMPU_EQ_QB_DSP,
597 OPC_PICK_QB = (0x03 << 6) | OPC_CMPU_EQ_QB_DSP,
598 OPC_PICK_PH = (0x0B << 6) | OPC_CMPU_EQ_QB_DSP,
599 OPC_PACKRL_PH = (0x0E << 6) | OPC_CMPU_EQ_QB_DSP,
602 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
604 /* MIPS DSP GPR-Based Shift Sub-class */
605 OPC_SHLL_QB = (0x00 << 6) | OPC_SHLL_QB_DSP,
606 OPC_SHLLV_QB = (0x02 << 6) | OPC_SHLL_QB_DSP,
607 OPC_SHLL_PH = (0x08 << 6) | OPC_SHLL_QB_DSP,
608 OPC_SHLLV_PH = (0x0A << 6) | OPC_SHLL_QB_DSP,
609 OPC_SHLL_S_PH = (0x0C << 6) | OPC_SHLL_QB_DSP,
610 OPC_SHLLV_S_PH = (0x0E << 6) | OPC_SHLL_QB_DSP,
611 OPC_SHLL_S_W = (0x14 << 6) | OPC_SHLL_QB_DSP,
612 OPC_SHLLV_S_W = (0x16 << 6) | OPC_SHLL_QB_DSP,
613 OPC_SHRL_QB = (0x01 << 6) | OPC_SHLL_QB_DSP,
614 OPC_SHRLV_QB = (0x03 << 6) | OPC_SHLL_QB_DSP,
615 OPC_SHRL_PH = (0x19 << 6) | OPC_SHLL_QB_DSP,
616 OPC_SHRLV_PH = (0x1B << 6) | OPC_SHLL_QB_DSP,
617 OPC_SHRA_QB = (0x04 << 6) | OPC_SHLL_QB_DSP,
618 OPC_SHRA_R_QB = (0x05 << 6) | OPC_SHLL_QB_DSP,
619 OPC_SHRAV_QB = (0x06 << 6) | OPC_SHLL_QB_DSP,
620 OPC_SHRAV_R_QB = (0x07 << 6) | OPC_SHLL_QB_DSP,
621 OPC_SHRA_PH = (0x09 << 6) | OPC_SHLL_QB_DSP,
622 OPC_SHRAV_PH = (0x0B << 6) | OPC_SHLL_QB_DSP,
623 OPC_SHRA_R_PH = (0x0D << 6) | OPC_SHLL_QB_DSP,
624 OPC_SHRAV_R_PH = (0x0F << 6) | OPC_SHLL_QB_DSP,
625 OPC_SHRA_R_W = (0x15 << 6) | OPC_SHLL_QB_DSP,
626 OPC_SHRAV_R_W = (0x17 << 6) | OPC_SHLL_QB_DSP,
629 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
631 /* MIPS DSP Multiply Sub-class insns */
632 OPC_DPAU_H_QBL = (0x03 << 6) | OPC_DPA_W_PH_DSP,
633 OPC_DPAU_H_QBR = (0x07 << 6) | OPC_DPA_W_PH_DSP,
634 OPC_DPSU_H_QBL = (0x0B << 6) | OPC_DPA_W_PH_DSP,
635 OPC_DPSU_H_QBR = (0x0F << 6) | OPC_DPA_W_PH_DSP,
636 OPC_DPA_W_PH = (0x00 << 6) | OPC_DPA_W_PH_DSP,
637 OPC_DPAX_W_PH = (0x08 << 6) | OPC_DPA_W_PH_DSP,
638 OPC_DPAQ_S_W_PH = (0x04 << 6) | OPC_DPA_W_PH_DSP,
639 OPC_DPAQX_S_W_PH = (0x18 << 6) | OPC_DPA_W_PH_DSP,
640 OPC_DPAQX_SA_W_PH = (0x1A << 6) | OPC_DPA_W_PH_DSP,
641 OPC_DPS_W_PH = (0x01 << 6) | OPC_DPA_W_PH_DSP,
642 OPC_DPSX_W_PH = (0x09 << 6) | OPC_DPA_W_PH_DSP,
643 OPC_DPSQ_S_W_PH = (0x05 << 6) | OPC_DPA_W_PH_DSP,
644 OPC_DPSQX_S_W_PH = (0x19 << 6) | OPC_DPA_W_PH_DSP,
645 OPC_DPSQX_SA_W_PH = (0x1B << 6) | OPC_DPA_W_PH_DSP,
646 OPC_MULSAQ_S_W_PH = (0x06 << 6) | OPC_DPA_W_PH_DSP,
647 OPC_DPAQ_SA_L_W = (0x0C << 6) | OPC_DPA_W_PH_DSP,
648 OPC_DPSQ_SA_L_W = (0x0D << 6) | OPC_DPA_W_PH_DSP,
649 OPC_MAQ_S_W_PHL = (0x14 << 6) | OPC_DPA_W_PH_DSP,
650 OPC_MAQ_S_W_PHR = (0x16 << 6) | OPC_DPA_W_PH_DSP,
651 OPC_MAQ_SA_W_PHL = (0x10 << 6) | OPC_DPA_W_PH_DSP,
652 OPC_MAQ_SA_W_PHR = (0x12 << 6) | OPC_DPA_W_PH_DSP,
653 OPC_MULSA_W_PH = (0x02 << 6) | OPC_DPA_W_PH_DSP,
656 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
658 /* DSP Bit/Manipulation Sub-class */
659 OPC_INSV = (0x00 << 6) | OPC_INSV_DSP,
662 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
664 /* MIPS DSP Append Sub-class */
665 OPC_APPEND = (0x00 << 6) | OPC_APPEND_DSP,
666 OPC_PREPEND = (0x01 << 6) | OPC_APPEND_DSP,
667 OPC_BALIGN = (0x10 << 6) | OPC_APPEND_DSP,
670 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
672 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
673 OPC_EXTR_W = (0x00 << 6) | OPC_EXTR_W_DSP,
674 OPC_EXTR_R_W = (0x04 << 6) | OPC_EXTR_W_DSP,
675 OPC_EXTR_RS_W = (0x06 << 6) | OPC_EXTR_W_DSP,
676 OPC_EXTR_S_H = (0x0E << 6) | OPC_EXTR_W_DSP,
677 OPC_EXTRV_S_H = (0x0F << 6) | OPC_EXTR_W_DSP,
678 OPC_EXTRV_W = (0x01 << 6) | OPC_EXTR_W_DSP,
679 OPC_EXTRV_R_W = (0x05 << 6) | OPC_EXTR_W_DSP,
680 OPC_EXTRV_RS_W = (0x07 << 6) | OPC_EXTR_W_DSP,
681 OPC_EXTP = (0x02 << 6) | OPC_EXTR_W_DSP,
682 OPC_EXTPV = (0x03 << 6) | OPC_EXTR_W_DSP,
683 OPC_EXTPDP = (0x0A << 6) | OPC_EXTR_W_DSP,
684 OPC_EXTPDPV = (0x0B << 6) | OPC_EXTR_W_DSP,
685 OPC_SHILO = (0x1A << 6) | OPC_EXTR_W_DSP,
686 OPC_SHILOV = (0x1B << 6) | OPC_EXTR_W_DSP,
687 OPC_MTHLIP = (0x1F << 6) | OPC_EXTR_W_DSP,
688 OPC_WRDSP = (0x13 << 6) | OPC_EXTR_W_DSP,
689 OPC_RDDSP = (0x12 << 6) | OPC_EXTR_W_DSP,
692 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
694 /* MIPS DSP Arithmetic Sub-class */
695 OPC_PRECEQ_L_PWL = (0x14 << 6) | OPC_ABSQ_S_QH_DSP,
696 OPC_PRECEQ_L_PWR = (0x15 << 6) | OPC_ABSQ_S_QH_DSP,
697 OPC_PRECEQ_PW_QHL = (0x0C << 6) | OPC_ABSQ_S_QH_DSP,
698 OPC_PRECEQ_PW_QHR = (0x0D << 6) | OPC_ABSQ_S_QH_DSP,
699 OPC_PRECEQ_PW_QHLA = (0x0E << 6) | OPC_ABSQ_S_QH_DSP,
700 OPC_PRECEQ_PW_QHRA = (0x0F << 6) | OPC_ABSQ_S_QH_DSP,
701 OPC_PRECEQU_QH_OBL = (0x04 << 6) | OPC_ABSQ_S_QH_DSP,
702 OPC_PRECEQU_QH_OBR = (0x05 << 6) | OPC_ABSQ_S_QH_DSP,
703 OPC_PRECEQU_QH_OBLA = (0x06 << 6) | OPC_ABSQ_S_QH_DSP,
704 OPC_PRECEQU_QH_OBRA = (0x07 << 6) | OPC_ABSQ_S_QH_DSP,
705 OPC_PRECEU_QH_OBL = (0x1C << 6) | OPC_ABSQ_S_QH_DSP,
706 OPC_PRECEU_QH_OBR = (0x1D << 6) | OPC_ABSQ_S_QH_DSP,
707 OPC_PRECEU_QH_OBLA = (0x1E << 6) | OPC_ABSQ_S_QH_DSP,
708 OPC_PRECEU_QH_OBRA = (0x1F << 6) | OPC_ABSQ_S_QH_DSP,
709 OPC_ABSQ_S_OB = (0x01 << 6) | OPC_ABSQ_S_QH_DSP,
710 OPC_ABSQ_S_PW = (0x11 << 6) | OPC_ABSQ_S_QH_DSP,
711 OPC_ABSQ_S_QH = (0x09 << 6) | OPC_ABSQ_S_QH_DSP,
712 /* DSP Bit/Manipulation Sub-class */
713 OPC_REPL_OB = (0x02 << 6) | OPC_ABSQ_S_QH_DSP,
714 OPC_REPL_PW = (0x12 << 6) | OPC_ABSQ_S_QH_DSP,
715 OPC_REPL_QH = (0x0A << 6) | OPC_ABSQ_S_QH_DSP,
716 OPC_REPLV_OB = (0x03 << 6) | OPC_ABSQ_S_QH_DSP,
717 OPC_REPLV_PW = (0x13 << 6) | OPC_ABSQ_S_QH_DSP,
718 OPC_REPLV_QH = (0x0B << 6) | OPC_ABSQ_S_QH_DSP,
721 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
723 /* MIPS DSP Multiply Sub-class insns */
724 OPC_MULEQ_S_PW_QHL = (0x1C << 6) | OPC_ADDU_OB_DSP,
725 OPC_MULEQ_S_PW_QHR = (0x1D << 6) | OPC_ADDU_OB_DSP,
726 OPC_MULEU_S_QH_OBL = (0x06 << 6) | OPC_ADDU_OB_DSP,
727 OPC_MULEU_S_QH_OBR = (0x07 << 6) | OPC_ADDU_OB_DSP,
728 OPC_MULQ_RS_QH = (0x1F << 6) | OPC_ADDU_OB_DSP,
729 /* MIPS DSP Arithmetic Sub-class */
730 OPC_RADDU_L_OB = (0x14 << 6) | OPC_ADDU_OB_DSP,
731 OPC_SUBQ_PW = (0x13 << 6) | OPC_ADDU_OB_DSP,
732 OPC_SUBQ_S_PW = (0x17 << 6) | OPC_ADDU_OB_DSP,
733 OPC_SUBQ_QH = (0x0B << 6) | OPC_ADDU_OB_DSP,
734 OPC_SUBQ_S_QH = (0x0F << 6) | OPC_ADDU_OB_DSP,
735 OPC_SUBU_OB = (0x01 << 6) | OPC_ADDU_OB_DSP,
736 OPC_SUBU_S_OB = (0x05 << 6) | OPC_ADDU_OB_DSP,
737 OPC_SUBU_QH = (0x09 << 6) | OPC_ADDU_OB_DSP,
738 OPC_SUBU_S_QH = (0x0D << 6) | OPC_ADDU_OB_DSP,
739 OPC_SUBUH_OB = (0x19 << 6) | OPC_ADDU_OB_DSP,
740 OPC_SUBUH_R_OB = (0x1B << 6) | OPC_ADDU_OB_DSP,
741 OPC_ADDQ_PW = (0x12 << 6) | OPC_ADDU_OB_DSP,
742 OPC_ADDQ_S_PW = (0x16 << 6) | OPC_ADDU_OB_DSP,
743 OPC_ADDQ_QH = (0x0A << 6) | OPC_ADDU_OB_DSP,
744 OPC_ADDQ_S_QH = (0x0E << 6) | OPC_ADDU_OB_DSP,
745 OPC_ADDU_OB = (0x00 << 6) | OPC_ADDU_OB_DSP,
746 OPC_ADDU_S_OB = (0x04 << 6) | OPC_ADDU_OB_DSP,
747 OPC_ADDU_QH = (0x08 << 6) | OPC_ADDU_OB_DSP,
748 OPC_ADDU_S_QH = (0x0C << 6) | OPC_ADDU_OB_DSP,
749 OPC_ADDUH_OB = (0x18 << 6) | OPC_ADDU_OB_DSP,
750 OPC_ADDUH_R_OB = (0x1A << 6) | OPC_ADDU_OB_DSP,
753 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
755 /* DSP Compare-Pick Sub-class */
756 OPC_CMP_EQ_PW = (0x10 << 6) | OPC_CMPU_EQ_OB_DSP,
757 OPC_CMP_LT_PW = (0x11 << 6) | OPC_CMPU_EQ_OB_DSP,
758 OPC_CMP_LE_PW = (0x12 << 6) | OPC_CMPU_EQ_OB_DSP,
759 OPC_CMP_EQ_QH = (0x08 << 6) | OPC_CMPU_EQ_OB_DSP,
760 OPC_CMP_LT_QH = (0x09 << 6) | OPC_CMPU_EQ_OB_DSP,
761 OPC_CMP_LE_QH = (0x0A << 6) | OPC_CMPU_EQ_OB_DSP,
762 OPC_CMPGDU_EQ_OB = (0x18 << 6) | OPC_CMPU_EQ_OB_DSP,
763 OPC_CMPGDU_LT_OB = (0x19 << 6) | OPC_CMPU_EQ_OB_DSP,
764 OPC_CMPGDU_LE_OB = (0x1A << 6) | OPC_CMPU_EQ_OB_DSP,
765 OPC_CMPGU_EQ_OB = (0x04 << 6) | OPC_CMPU_EQ_OB_DSP,
766 OPC_CMPGU_LT_OB = (0x05 << 6) | OPC_CMPU_EQ_OB_DSP,
767 OPC_CMPGU_LE_OB = (0x06 << 6) | OPC_CMPU_EQ_OB_DSP,
768 OPC_CMPU_EQ_OB = (0x00 << 6) | OPC_CMPU_EQ_OB_DSP,
769 OPC_CMPU_LT_OB = (0x01 << 6) | OPC_CMPU_EQ_OB_DSP,
770 OPC_CMPU_LE_OB = (0x02 << 6) | OPC_CMPU_EQ_OB_DSP,
771 OPC_PACKRL_PW = (0x0E << 6) | OPC_CMPU_EQ_OB_DSP,
772 OPC_PICK_OB = (0x03 << 6) | OPC_CMPU_EQ_OB_DSP,
773 OPC_PICK_PW = (0x13 << 6) | OPC_CMPU_EQ_OB_DSP,
774 OPC_PICK_QH = (0x0B << 6) | OPC_CMPU_EQ_OB_DSP,
775 /* MIPS DSP Arithmetic Sub-class */
776 OPC_PRECR_OB_QH = (0x0D << 6) | OPC_CMPU_EQ_OB_DSP,
777 OPC_PRECR_SRA_QH_PW = (0x1E << 6) | OPC_CMPU_EQ_OB_DSP,
778 OPC_PRECR_SRA_R_QH_PW = (0x1F << 6) | OPC_CMPU_EQ_OB_DSP,
779 OPC_PRECRQ_OB_QH = (0x0C << 6) | OPC_CMPU_EQ_OB_DSP,
780 OPC_PRECRQ_PW_L = (0x1C << 6) | OPC_CMPU_EQ_OB_DSP,
781 OPC_PRECRQ_QH_PW = (0x14 << 6) | OPC_CMPU_EQ_OB_DSP,
782 OPC_PRECRQ_RS_QH_PW = (0x15 << 6) | OPC_CMPU_EQ_OB_DSP,
783 OPC_PRECRQU_S_OB_QH = (0x0F << 6) | OPC_CMPU_EQ_OB_DSP,
786 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
788 /* DSP Append Sub-class */
789 OPC_DAPPEND = (0x00 << 6) | OPC_DAPPEND_DSP,
790 OPC_PREPENDD = (0x03 << 6) | OPC_DAPPEND_DSP,
791 OPC_PREPENDW = (0x01 << 6) | OPC_DAPPEND_DSP,
792 OPC_DBALIGN = (0x10 << 6) | OPC_DAPPEND_DSP,
795 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
798 OPC_DMTHLIP = (0x1F << 6) | OPC_DEXTR_W_DSP,
799 OPC_DSHILO = (0x1A << 6) | OPC_DEXTR_W_DSP,
800 OPC_DEXTP = (0x02 << 6) | OPC_DEXTR_W_DSP,
801 OPC_DEXTPDP = (0x0A << 6) | OPC_DEXTR_W_DSP,
802 OPC_DEXTPDPV = (0x0B << 6) | OPC_DEXTR_W_DSP,
803 OPC_DEXTPV = (0x03 << 6) | OPC_DEXTR_W_DSP,
804 OPC_DEXTR_L = (0x10 << 6) | OPC_DEXTR_W_DSP,
805 OPC_DEXTR_R_L = (0x14 << 6) | OPC_DEXTR_W_DSP,
806 OPC_DEXTR_RS_L = (0x16 << 6) | OPC_DEXTR_W_DSP,
807 OPC_DEXTR_W = (0x00 << 6) | OPC_DEXTR_W_DSP,
808 OPC_DEXTR_R_W = (0x04 << 6) | OPC_DEXTR_W_DSP,
809 OPC_DEXTR_RS_W = (0x06 << 6) | OPC_DEXTR_W_DSP,
810 OPC_DEXTR_S_H = (0x0E << 6) | OPC_DEXTR_W_DSP,
811 OPC_DEXTRV_L = (0x11 << 6) | OPC_DEXTR_W_DSP,
812 OPC_DEXTRV_R_L = (0x15 << 6) | OPC_DEXTR_W_DSP,
813 OPC_DEXTRV_RS_L = (0x17 << 6) | OPC_DEXTR_W_DSP,
814 OPC_DEXTRV_S_H = (0x0F << 6) | OPC_DEXTR_W_DSP,
815 OPC_DEXTRV_W = (0x01 << 6) | OPC_DEXTR_W_DSP,
816 OPC_DEXTRV_R_W = (0x05 << 6) | OPC_DEXTR_W_DSP,
817 OPC_DEXTRV_RS_W = (0x07 << 6) | OPC_DEXTR_W_DSP,
818 OPC_DSHILOV = (0x1B << 6) | OPC_DEXTR_W_DSP,
821 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
823 /* DSP Bit/Manipulation Sub-class */
824 OPC_DINSV = (0x00 << 6) | OPC_DINSV_DSP,
827 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
829 /* MIPS DSP Multiply Sub-class insns */
830 OPC_DMADD = (0x19 << 6) | OPC_DPAQ_W_QH_DSP,
831 OPC_DMADDU = (0x1D << 6) | OPC_DPAQ_W_QH_DSP,
832 OPC_DMSUB = (0x1B << 6) | OPC_DPAQ_W_QH_DSP,
833 OPC_DMSUBU = (0x1F << 6) | OPC_DPAQ_W_QH_DSP,
834 OPC_DPA_W_QH = (0x00 << 6) | OPC_DPAQ_W_QH_DSP,
835 OPC_DPAQ_S_W_QH = (0x04 << 6) | OPC_DPAQ_W_QH_DSP,
836 OPC_DPAQ_SA_L_PW = (0x0C << 6) | OPC_DPAQ_W_QH_DSP,
837 OPC_DPAU_H_OBL = (0x03 << 6) | OPC_DPAQ_W_QH_DSP,
838 OPC_DPAU_H_OBR = (0x07 << 6) | OPC_DPAQ_W_QH_DSP,
839 OPC_DPS_W_QH = (0x01 << 6) | OPC_DPAQ_W_QH_DSP,
840 OPC_DPSQ_S_W_QH = (0x05 << 6) | OPC_DPAQ_W_QH_DSP,
841 OPC_DPSQ_SA_L_PW = (0x0D << 6) | OPC_DPAQ_W_QH_DSP,
842 OPC_DPSU_H_OBL = (0x0B << 6) | OPC_DPAQ_W_QH_DSP,
843 OPC_DPSU_H_OBR = (0x0F << 6) | OPC_DPAQ_W_QH_DSP,
844 OPC_MAQ_S_L_PWL = (0x1C << 6) | OPC_DPAQ_W_QH_DSP,
845 OPC_MAQ_S_L_PWR = (0x1E << 6) | OPC_DPAQ_W_QH_DSP,
846 OPC_MAQ_S_W_QHLL = (0x14 << 6) | OPC_DPAQ_W_QH_DSP,
847 OPC_MAQ_SA_W_QHLL = (0x10 << 6) | OPC_DPAQ_W_QH_DSP,
848 OPC_MAQ_S_W_QHLR = (0x15 << 6) | OPC_DPAQ_W_QH_DSP,
849 OPC_MAQ_SA_W_QHLR = (0x11 << 6) | OPC_DPAQ_W_QH_DSP,
850 OPC_MAQ_S_W_QHRL = (0x16 << 6) | OPC_DPAQ_W_QH_DSP,
851 OPC_MAQ_SA_W_QHRL = (0x12 << 6) | OPC_DPAQ_W_QH_DSP,
852 OPC_MAQ_S_W_QHRR = (0x17 << 6) | OPC_DPAQ_W_QH_DSP,
853 OPC_MAQ_SA_W_QHRR = (0x13 << 6) | OPC_DPAQ_W_QH_DSP,
854 OPC_MULSAQ_S_L_PW = (0x0E << 6) | OPC_DPAQ_W_QH_DSP,
855 OPC_MULSAQ_S_W_QH = (0x06 << 6) | OPC_DPAQ_W_QH_DSP,
858 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
860 /* MIPS DSP GPR-Based Shift Sub-class */
861 OPC_SHLL_PW = (0x10 << 6) | OPC_SHLL_OB_DSP,
862 OPC_SHLL_S_PW = (0x14 << 6) | OPC_SHLL_OB_DSP,
863 OPC_SHLLV_OB = (0x02 << 6) | OPC_SHLL_OB_DSP,
864 OPC_SHLLV_PW = (0x12 << 6) | OPC_SHLL_OB_DSP,
865 OPC_SHLLV_S_PW = (0x16 << 6) | OPC_SHLL_OB_DSP,
866 OPC_SHLLV_QH = (0x0A << 6) | OPC_SHLL_OB_DSP,
867 OPC_SHLLV_S_QH = (0x0E << 6) | OPC_SHLL_OB_DSP,
868 OPC_SHRA_PW = (0x11 << 6) | OPC_SHLL_OB_DSP,
869 OPC_SHRA_R_PW = (0x15 << 6) | OPC_SHLL_OB_DSP,
870 OPC_SHRAV_OB = (0x06 << 6) | OPC_SHLL_OB_DSP,
871 OPC_SHRAV_R_OB = (0x07 << 6) | OPC_SHLL_OB_DSP,
872 OPC_SHRAV_PW = (0x13 << 6) | OPC_SHLL_OB_DSP,
873 OPC_SHRAV_R_PW = (0x17 << 6) | OPC_SHLL_OB_DSP,
874 OPC_SHRAV_QH = (0x0B << 6) | OPC_SHLL_OB_DSP,
875 OPC_SHRAV_R_QH = (0x0F << 6) | OPC_SHLL_OB_DSP,
876 OPC_SHRLV_OB = (0x03 << 6) | OPC_SHLL_OB_DSP,
877 OPC_SHRLV_QH = (0x1B << 6) | OPC_SHLL_OB_DSP,
878 OPC_SHLL_OB = (0x00 << 6) | OPC_SHLL_OB_DSP,
879 OPC_SHLL_QH = (0x08 << 6) | OPC_SHLL_OB_DSP,
880 OPC_SHLL_S_QH = (0x0C << 6) | OPC_SHLL_OB_DSP,
881 OPC_SHRA_OB = (0x04 << 6) | OPC_SHLL_OB_DSP,
882 OPC_SHRA_R_OB = (0x05 << 6) | OPC_SHLL_OB_DSP,
883 OPC_SHRA_QH = (0x09 << 6) | OPC_SHLL_OB_DSP,
884 OPC_SHRA_R_QH = (0x0D << 6) | OPC_SHLL_OB_DSP,
885 OPC_SHRL_OB = (0x01 << 6) | OPC_SHLL_OB_DSP,
886 OPC_SHRL_QH = (0x19 << 6) | OPC_SHLL_OB_DSP,
889 /* Coprocessor 0 (rs field) */
890 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
893 OPC_MFC0 = (0x00 << 21) | OPC_CP0,
894 OPC_DMFC0 = (0x01 << 21) | OPC_CP0,
895 OPC_MFHC0 = (0x02 << 21) | OPC_CP0,
896 OPC_MTC0 = (0x04 << 21) | OPC_CP0,
897 OPC_DMTC0 = (0x05 << 21) | OPC_CP0,
898 OPC_MTHC0 = (0x06 << 21) | OPC_CP0,
899 OPC_MFTR = (0x08 << 21) | OPC_CP0,
900 OPC_RDPGPR = (0x0A << 21) | OPC_CP0,
901 OPC_MFMC0 = (0x0B << 21) | OPC_CP0,
902 OPC_MTTR = (0x0C << 21) | OPC_CP0,
903 OPC_WRPGPR = (0x0E << 21) | OPC_CP0,
904 OPC_C0 = (0x10 << 21) | OPC_CP0,
905 OPC_C0_1 = (0x11 << 21) | OPC_CP0,
906 OPC_C0_2 = (0x12 << 21) | OPC_CP0,
907 OPC_C0_3 = (0x13 << 21) | OPC_CP0,
908 OPC_C0_4 = (0x14 << 21) | OPC_CP0,
909 OPC_C0_5 = (0x15 << 21) | OPC_CP0,
910 OPC_C0_6 = (0x16 << 21) | OPC_CP0,
911 OPC_C0_7 = (0x17 << 21) | OPC_CP0,
912 OPC_C0_8 = (0x18 << 21) | OPC_CP0,
913 OPC_C0_9 = (0x19 << 21) | OPC_CP0,
914 OPC_C0_A = (0x1A << 21) | OPC_CP0,
915 OPC_C0_B = (0x1B << 21) | OPC_CP0,
916 OPC_C0_C = (0x1C << 21) | OPC_CP0,
917 OPC_C0_D = (0x1D << 21) | OPC_CP0,
918 OPC_C0_E = (0x1E << 21) | OPC_CP0,
919 OPC_C0_F = (0x1F << 21) | OPC_CP0,
923 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
926 OPC_DMT = 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
927 OPC_EMT = 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
928 OPC_DVPE = 0x01 | (0 << 5) | OPC_MFMC0,
929 OPC_EVPE = 0x01 | (1 << 5) | OPC_MFMC0,
930 OPC_DI = (0 << 5) | (0x0C << 11) | OPC_MFMC0,
931 OPC_EI = (1 << 5) | (0x0C << 11) | OPC_MFMC0,
932 OPC_DVP = 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0,
933 OPC_EVP = 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0,
936 /* Coprocessor 0 (with rs == C0) */
937 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
940 OPC_TLBR = 0x01 | OPC_C0,
941 OPC_TLBWI = 0x02 | OPC_C0,
942 OPC_TLBINV = 0x03 | OPC_C0,
943 OPC_TLBINVF = 0x04 | OPC_C0,
944 OPC_TLBWR = 0x06 | OPC_C0,
945 OPC_TLBP = 0x08 | OPC_C0,
946 OPC_RFE = 0x10 | OPC_C0,
947 OPC_ERET = 0x18 | OPC_C0,
948 OPC_DERET = 0x1F | OPC_C0,
949 OPC_WAIT = 0x20 | OPC_C0,
952 /* Coprocessor 1 (rs field) */
953 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
955 /* Values for the fmt field in FP instructions */
957 /* 0 - 15 are reserved */
958 FMT_S = 16, /* single fp */
959 FMT_D = 17, /* double fp */
960 FMT_E = 18, /* extended fp */
961 FMT_Q = 19, /* quad fp */
962 FMT_W = 20, /* 32-bit fixed */
963 FMT_L = 21, /* 64-bit fixed */
964 FMT_PS = 22, /* paired single fp */
965 /* 23 - 31 are reserved */
969 OPC_MFC1 = (0x00 << 21) | OPC_CP1,
970 OPC_DMFC1 = (0x01 << 21) | OPC_CP1,
971 OPC_CFC1 = (0x02 << 21) | OPC_CP1,
972 OPC_MFHC1 = (0x03 << 21) | OPC_CP1,
973 OPC_MTC1 = (0x04 << 21) | OPC_CP1,
974 OPC_DMTC1 = (0x05 << 21) | OPC_CP1,
975 OPC_CTC1 = (0x06 << 21) | OPC_CP1,
976 OPC_MTHC1 = (0x07 << 21) | OPC_CP1,
977 OPC_BC1 = (0x08 << 21) | OPC_CP1, /* bc */
978 OPC_BC1ANY2 = (0x09 << 21) | OPC_CP1,
979 OPC_BC1ANY4 = (0x0A << 21) | OPC_CP1,
980 OPC_BZ_V = (0x0B << 21) | OPC_CP1,
981 OPC_BNZ_V = (0x0F << 21) | OPC_CP1,
982 OPC_S_FMT = (FMT_S << 21) | OPC_CP1,
983 OPC_D_FMT = (FMT_D << 21) | OPC_CP1,
984 OPC_E_FMT = (FMT_E << 21) | OPC_CP1,
985 OPC_Q_FMT = (FMT_Q << 21) | OPC_CP1,
986 OPC_W_FMT = (FMT_W << 21) | OPC_CP1,
987 OPC_L_FMT = (FMT_L << 21) | OPC_CP1,
988 OPC_PS_FMT = (FMT_PS << 21) | OPC_CP1,
989 OPC_BC1EQZ = (0x09 << 21) | OPC_CP1,
990 OPC_BC1NEZ = (0x0D << 21) | OPC_CP1,
991 OPC_BZ_B = (0x18 << 21) | OPC_CP1,
992 OPC_BZ_H = (0x19 << 21) | OPC_CP1,
993 OPC_BZ_W = (0x1A << 21) | OPC_CP1,
994 OPC_BZ_D = (0x1B << 21) | OPC_CP1,
995 OPC_BNZ_B = (0x1C << 21) | OPC_CP1,
996 OPC_BNZ_H = (0x1D << 21) | OPC_CP1,
997 OPC_BNZ_W = (0x1E << 21) | OPC_CP1,
998 OPC_BNZ_D = (0x1F << 21) | OPC_CP1,
1001 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
1002 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
1005 OPC_BC1F = (0x00 << 16) | OPC_BC1,
1006 OPC_BC1T = (0x01 << 16) | OPC_BC1,
1007 OPC_BC1FL = (0x02 << 16) | OPC_BC1,
1008 OPC_BC1TL = (0x03 << 16) | OPC_BC1,
1012 OPC_BC1FANY2 = (0x00 << 16) | OPC_BC1ANY2,
1013 OPC_BC1TANY2 = (0x01 << 16) | OPC_BC1ANY2,
1017 OPC_BC1FANY4 = (0x00 << 16) | OPC_BC1ANY4,
1018 OPC_BC1TANY4 = (0x01 << 16) | OPC_BC1ANY4,
1021 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
1024 OPC_MFC2 = (0x00 << 21) | OPC_CP2,
1025 OPC_DMFC2 = (0x01 << 21) | OPC_CP2,
1026 OPC_CFC2 = (0x02 << 21) | OPC_CP2,
1027 OPC_MFHC2 = (0x03 << 21) | OPC_CP2,
1028 OPC_MTC2 = (0x04 << 21) | OPC_CP2,
1029 OPC_DMTC2 = (0x05 << 21) | OPC_CP2,
1030 OPC_CTC2 = (0x06 << 21) | OPC_CP2,
1031 OPC_MTHC2 = (0x07 << 21) | OPC_CP2,
1032 OPC_BC2 = (0x08 << 21) | OPC_CP2,
1033 OPC_BC2EQZ = (0x09 << 21) | OPC_CP2,
1034 OPC_BC2NEZ = (0x0D << 21) | OPC_CP2,
1037 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1040 OPC_PADDSH = (24 << 21) | (0x00) | OPC_CP2,
1041 OPC_PADDUSH = (25 << 21) | (0x00) | OPC_CP2,
1042 OPC_PADDH = (26 << 21) | (0x00) | OPC_CP2,
1043 OPC_PADDW = (27 << 21) | (0x00) | OPC_CP2,
1044 OPC_PADDSB = (28 << 21) | (0x00) | OPC_CP2,
1045 OPC_PADDUSB = (29 << 21) | (0x00) | OPC_CP2,
1046 OPC_PADDB = (30 << 21) | (0x00) | OPC_CP2,
1047 OPC_PADDD = (31 << 21) | (0x00) | OPC_CP2,
1049 OPC_PSUBSH = (24 << 21) | (0x01) | OPC_CP2,
1050 OPC_PSUBUSH = (25 << 21) | (0x01) | OPC_CP2,
1051 OPC_PSUBH = (26 << 21) | (0x01) | OPC_CP2,
1052 OPC_PSUBW = (27 << 21) | (0x01) | OPC_CP2,
1053 OPC_PSUBSB = (28 << 21) | (0x01) | OPC_CP2,
1054 OPC_PSUBUSB = (29 << 21) | (0x01) | OPC_CP2,
1055 OPC_PSUBB = (30 << 21) | (0x01) | OPC_CP2,
1056 OPC_PSUBD = (31 << 21) | (0x01) | OPC_CP2,
1058 OPC_PSHUFH = (24 << 21) | (0x02) | OPC_CP2,
1059 OPC_PACKSSWH = (25 << 21) | (0x02) | OPC_CP2,
1060 OPC_PACKSSHB = (26 << 21) | (0x02) | OPC_CP2,
1061 OPC_PACKUSHB = (27 << 21) | (0x02) | OPC_CP2,
1062 OPC_XOR_CP2 = (28 << 21) | (0x02) | OPC_CP2,
1063 OPC_NOR_CP2 = (29 << 21) | (0x02) | OPC_CP2,
1064 OPC_AND_CP2 = (30 << 21) | (0x02) | OPC_CP2,
1065 OPC_PANDN = (31 << 21) | (0x02) | OPC_CP2,
1067 OPC_PUNPCKLHW = (24 << 21) | (0x03) | OPC_CP2,
1068 OPC_PUNPCKHHW = (25 << 21) | (0x03) | OPC_CP2,
1069 OPC_PUNPCKLBH = (26 << 21) | (0x03) | OPC_CP2,
1070 OPC_PUNPCKHBH = (27 << 21) | (0x03) | OPC_CP2,
1071 OPC_PINSRH_0 = (28 << 21) | (0x03) | OPC_CP2,
1072 OPC_PINSRH_1 = (29 << 21) | (0x03) | OPC_CP2,
1073 OPC_PINSRH_2 = (30 << 21) | (0x03) | OPC_CP2,
1074 OPC_PINSRH_3 = (31 << 21) | (0x03) | OPC_CP2,
1076 OPC_PAVGH = (24 << 21) | (0x08) | OPC_CP2,
1077 OPC_PAVGB = (25 << 21) | (0x08) | OPC_CP2,
1078 OPC_PMAXSH = (26 << 21) | (0x08) | OPC_CP2,
1079 OPC_PMINSH = (27 << 21) | (0x08) | OPC_CP2,
1080 OPC_PMAXUB = (28 << 21) | (0x08) | OPC_CP2,
1081 OPC_PMINUB = (29 << 21) | (0x08) | OPC_CP2,
1083 OPC_PCMPEQW = (24 << 21) | (0x09) | OPC_CP2,
1084 OPC_PCMPGTW = (25 << 21) | (0x09) | OPC_CP2,
1085 OPC_PCMPEQH = (26 << 21) | (0x09) | OPC_CP2,
1086 OPC_PCMPGTH = (27 << 21) | (0x09) | OPC_CP2,
1087 OPC_PCMPEQB = (28 << 21) | (0x09) | OPC_CP2,
1088 OPC_PCMPGTB = (29 << 21) | (0x09) | OPC_CP2,
1090 OPC_PSLLW = (24 << 21) | (0x0A) | OPC_CP2,
1091 OPC_PSLLH = (25 << 21) | (0x0A) | OPC_CP2,
1092 OPC_PMULLH = (26 << 21) | (0x0A) | OPC_CP2,
1093 OPC_PMULHH = (27 << 21) | (0x0A) | OPC_CP2,
1094 OPC_PMULUW = (28 << 21) | (0x0A) | OPC_CP2,
1095 OPC_PMULHUH = (29 << 21) | (0x0A) | OPC_CP2,
1097 OPC_PSRLW = (24 << 21) | (0x0B) | OPC_CP2,
1098 OPC_PSRLH = (25 << 21) | (0x0B) | OPC_CP2,
1099 OPC_PSRAW = (26 << 21) | (0x0B) | OPC_CP2,
1100 OPC_PSRAH = (27 << 21) | (0x0B) | OPC_CP2,
1101 OPC_PUNPCKLWD = (28 << 21) | (0x0B) | OPC_CP2,
1102 OPC_PUNPCKHWD = (29 << 21) | (0x0B) | OPC_CP2,
1104 OPC_ADDU_CP2 = (24 << 21) | (0x0C) | OPC_CP2,
1105 OPC_OR_CP2 = (25 << 21) | (0x0C) | OPC_CP2,
1106 OPC_ADD_CP2 = (26 << 21) | (0x0C) | OPC_CP2,
1107 OPC_DADD_CP2 = (27 << 21) | (0x0C) | OPC_CP2,
1108 OPC_SEQU_CP2 = (28 << 21) | (0x0C) | OPC_CP2,
1109 OPC_SEQ_CP2 = (29 << 21) | (0x0C) | OPC_CP2,
1111 OPC_SUBU_CP2 = (24 << 21) | (0x0D) | OPC_CP2,
1112 OPC_PASUBUB = (25 << 21) | (0x0D) | OPC_CP2,
1113 OPC_SUB_CP2 = (26 << 21) | (0x0D) | OPC_CP2,
1114 OPC_DSUB_CP2 = (27 << 21) | (0x0D) | OPC_CP2,
1115 OPC_SLTU_CP2 = (28 << 21) | (0x0D) | OPC_CP2,
1116 OPC_SLT_CP2 = (29 << 21) | (0x0D) | OPC_CP2,
1118 OPC_SLL_CP2 = (24 << 21) | (0x0E) | OPC_CP2,
1119 OPC_DSLL_CP2 = (25 << 21) | (0x0E) | OPC_CP2,
1120 OPC_PEXTRH = (26 << 21) | (0x0E) | OPC_CP2,
1121 OPC_PMADDHW = (27 << 21) | (0x0E) | OPC_CP2,
1122 OPC_SLEU_CP2 = (28 << 21) | (0x0E) | OPC_CP2,
1123 OPC_SLE_CP2 = (29 << 21) | (0x0E) | OPC_CP2,
1125 OPC_SRL_CP2 = (24 << 21) | (0x0F) | OPC_CP2,
1126 OPC_DSRL_CP2 = (25 << 21) | (0x0F) | OPC_CP2,
1127 OPC_SRA_CP2 = (26 << 21) | (0x0F) | OPC_CP2,
1128 OPC_DSRA_CP2 = (27 << 21) | (0x0F) | OPC_CP2,
1129 OPC_BIADD = (28 << 21) | (0x0F) | OPC_CP2,
1130 OPC_PMOVMSKB = (29 << 21) | (0x0F) | OPC_CP2,
1134 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1137 OPC_LWXC1 = 0x00 | OPC_CP3,
1138 OPC_LDXC1 = 0x01 | OPC_CP3,
1139 OPC_LUXC1 = 0x05 | OPC_CP3,
1140 OPC_SWXC1 = 0x08 | OPC_CP3,
1141 OPC_SDXC1 = 0x09 | OPC_CP3,
1142 OPC_SUXC1 = 0x0D | OPC_CP3,
1143 OPC_PREFX = 0x0F | OPC_CP3,
1144 OPC_ALNV_PS = 0x1E | OPC_CP3,
1145 OPC_MADD_S = 0x20 | OPC_CP3,
1146 OPC_MADD_D = 0x21 | OPC_CP3,
1147 OPC_MADD_PS = 0x26 | OPC_CP3,
1148 OPC_MSUB_S = 0x28 | OPC_CP3,
1149 OPC_MSUB_D = 0x29 | OPC_CP3,
1150 OPC_MSUB_PS = 0x2E | OPC_CP3,
1151 OPC_NMADD_S = 0x30 | OPC_CP3,
1152 OPC_NMADD_D = 0x31 | OPC_CP3,
1153 OPC_NMADD_PS= 0x36 | OPC_CP3,
1154 OPC_NMSUB_S = 0x38 | OPC_CP3,
1155 OPC_NMSUB_D = 0x39 | OPC_CP3,
1156 OPC_NMSUB_PS= 0x3E | OPC_CP3,
1160 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1162 OPC_MSA_I8_00 = 0x00 | OPC_MSA,
1163 OPC_MSA_I8_01 = 0x01 | OPC_MSA,
1164 OPC_MSA_I8_02 = 0x02 | OPC_MSA,
1165 OPC_MSA_I5_06 = 0x06 | OPC_MSA,
1166 OPC_MSA_I5_07 = 0x07 | OPC_MSA,
1167 OPC_MSA_BIT_09 = 0x09 | OPC_MSA,
1168 OPC_MSA_BIT_0A = 0x0A | OPC_MSA,
1169 OPC_MSA_3R_0D = 0x0D | OPC_MSA,
1170 OPC_MSA_3R_0E = 0x0E | OPC_MSA,
1171 OPC_MSA_3R_0F = 0x0F | OPC_MSA,
1172 OPC_MSA_3R_10 = 0x10 | OPC_MSA,
1173 OPC_MSA_3R_11 = 0x11 | OPC_MSA,
1174 OPC_MSA_3R_12 = 0x12 | OPC_MSA,
1175 OPC_MSA_3R_13 = 0x13 | OPC_MSA,
1176 OPC_MSA_3R_14 = 0x14 | OPC_MSA,
1177 OPC_MSA_3R_15 = 0x15 | OPC_MSA,
1178 OPC_MSA_ELM = 0x19 | OPC_MSA,
1179 OPC_MSA_3RF_1A = 0x1A | OPC_MSA,
1180 OPC_MSA_3RF_1B = 0x1B | OPC_MSA,
1181 OPC_MSA_3RF_1C = 0x1C | OPC_MSA,
1182 OPC_MSA_VEC = 0x1E | OPC_MSA,
1184 /* MI10 instruction */
1185 OPC_LD_B = (0x20) | OPC_MSA,
1186 OPC_LD_H = (0x21) | OPC_MSA,
1187 OPC_LD_W = (0x22) | OPC_MSA,
1188 OPC_LD_D = (0x23) | OPC_MSA,
1189 OPC_ST_B = (0x24) | OPC_MSA,
1190 OPC_ST_H = (0x25) | OPC_MSA,
1191 OPC_ST_W = (0x26) | OPC_MSA,
1192 OPC_ST_D = (0x27) | OPC_MSA,
1196 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1197 OPC_ADDVI_df = (0x0 << 23) | OPC_MSA_I5_06,
1198 OPC_CEQI_df = (0x0 << 23) | OPC_MSA_I5_07,
1199 OPC_SUBVI_df = (0x1 << 23) | OPC_MSA_I5_06,
1200 OPC_MAXI_S_df = (0x2 << 23) | OPC_MSA_I5_06,
1201 OPC_CLTI_S_df = (0x2 << 23) | OPC_MSA_I5_07,
1202 OPC_MAXI_U_df = (0x3 << 23) | OPC_MSA_I5_06,
1203 OPC_CLTI_U_df = (0x3 << 23) | OPC_MSA_I5_07,
1204 OPC_MINI_S_df = (0x4 << 23) | OPC_MSA_I5_06,
1205 OPC_CLEI_S_df = (0x4 << 23) | OPC_MSA_I5_07,
1206 OPC_MINI_U_df = (0x5 << 23) | OPC_MSA_I5_06,
1207 OPC_CLEI_U_df = (0x5 << 23) | OPC_MSA_I5_07,
1208 OPC_LDI_df = (0x6 << 23) | OPC_MSA_I5_07,
1210 /* I8 instruction */
1211 OPC_ANDI_B = (0x0 << 24) | OPC_MSA_I8_00,
1212 OPC_BMNZI_B = (0x0 << 24) | OPC_MSA_I8_01,
1213 OPC_SHF_B = (0x0 << 24) | OPC_MSA_I8_02,
1214 OPC_ORI_B = (0x1 << 24) | OPC_MSA_I8_00,
1215 OPC_BMZI_B = (0x1 << 24) | OPC_MSA_I8_01,
1216 OPC_SHF_H = (0x1 << 24) | OPC_MSA_I8_02,
1217 OPC_NORI_B = (0x2 << 24) | OPC_MSA_I8_00,
1218 OPC_BSELI_B = (0x2 << 24) | OPC_MSA_I8_01,
1219 OPC_SHF_W = (0x2 << 24) | OPC_MSA_I8_02,
1220 OPC_XORI_B = (0x3 << 24) | OPC_MSA_I8_00,
1222 /* VEC/2R/2RF instruction */
1223 OPC_AND_V = (0x00 << 21) | OPC_MSA_VEC,
1224 OPC_OR_V = (0x01 << 21) | OPC_MSA_VEC,
1225 OPC_NOR_V = (0x02 << 21) | OPC_MSA_VEC,
1226 OPC_XOR_V = (0x03 << 21) | OPC_MSA_VEC,
1227 OPC_BMNZ_V = (0x04 << 21) | OPC_MSA_VEC,
1228 OPC_BMZ_V = (0x05 << 21) | OPC_MSA_VEC,
1229 OPC_BSEL_V = (0x06 << 21) | OPC_MSA_VEC,
1231 OPC_MSA_2R = (0x18 << 21) | OPC_MSA_VEC,
1232 OPC_MSA_2RF = (0x19 << 21) | OPC_MSA_VEC,
1234 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1235 OPC_FILL_df = (0x00 << 18) | OPC_MSA_2R,
1236 OPC_PCNT_df = (0x01 << 18) | OPC_MSA_2R,
1237 OPC_NLOC_df = (0x02 << 18) | OPC_MSA_2R,
1238 OPC_NLZC_df = (0x03 << 18) | OPC_MSA_2R,
1240 /* 2RF instruction df(bit 16) = _w, _d */
1241 OPC_FCLASS_df = (0x00 << 17) | OPC_MSA_2RF,
1242 OPC_FTRUNC_S_df = (0x01 << 17) | OPC_MSA_2RF,
1243 OPC_FTRUNC_U_df = (0x02 << 17) | OPC_MSA_2RF,
1244 OPC_FSQRT_df = (0x03 << 17) | OPC_MSA_2RF,
1245 OPC_FRSQRT_df = (0x04 << 17) | OPC_MSA_2RF,
1246 OPC_FRCP_df = (0x05 << 17) | OPC_MSA_2RF,
1247 OPC_FRINT_df = (0x06 << 17) | OPC_MSA_2RF,
1248 OPC_FLOG2_df = (0x07 << 17) | OPC_MSA_2RF,
1249 OPC_FEXUPL_df = (0x08 << 17) | OPC_MSA_2RF,
1250 OPC_FEXUPR_df = (0x09 << 17) | OPC_MSA_2RF,
1251 OPC_FFQL_df = (0x0A << 17) | OPC_MSA_2RF,
1252 OPC_FFQR_df = (0x0B << 17) | OPC_MSA_2RF,
1253 OPC_FTINT_S_df = (0x0C << 17) | OPC_MSA_2RF,
1254 OPC_FTINT_U_df = (0x0D << 17) | OPC_MSA_2RF,
1255 OPC_FFINT_S_df = (0x0E << 17) | OPC_MSA_2RF,
1256 OPC_FFINT_U_df = (0x0F << 17) | OPC_MSA_2RF,
1258 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1259 OPC_SLL_df = (0x0 << 23) | OPC_MSA_3R_0D,
1260 OPC_ADDV_df = (0x0 << 23) | OPC_MSA_3R_0E,
1261 OPC_CEQ_df = (0x0 << 23) | OPC_MSA_3R_0F,
1262 OPC_ADD_A_df = (0x0 << 23) | OPC_MSA_3R_10,
1263 OPC_SUBS_S_df = (0x0 << 23) | OPC_MSA_3R_11,
1264 OPC_MULV_df = (0x0 << 23) | OPC_MSA_3R_12,
1265 OPC_DOTP_S_df = (0x0 << 23) | OPC_MSA_3R_13,
1266 OPC_SLD_df = (0x0 << 23) | OPC_MSA_3R_14,
1267 OPC_VSHF_df = (0x0 << 23) | OPC_MSA_3R_15,
1268 OPC_SRA_df = (0x1 << 23) | OPC_MSA_3R_0D,
1269 OPC_SUBV_df = (0x1 << 23) | OPC_MSA_3R_0E,
1270 OPC_ADDS_A_df = (0x1 << 23) | OPC_MSA_3R_10,
1271 OPC_SUBS_U_df = (0x1 << 23) | OPC_MSA_3R_11,
1272 OPC_MADDV_df = (0x1 << 23) | OPC_MSA_3R_12,
1273 OPC_DOTP_U_df = (0x1 << 23) | OPC_MSA_3R_13,
1274 OPC_SPLAT_df = (0x1 << 23) | OPC_MSA_3R_14,
1275 OPC_SRAR_df = (0x1 << 23) | OPC_MSA_3R_15,
1276 OPC_SRL_df = (0x2 << 23) | OPC_MSA_3R_0D,
1277 OPC_MAX_S_df = (0x2 << 23) | OPC_MSA_3R_0E,
1278 OPC_CLT_S_df = (0x2 << 23) | OPC_MSA_3R_0F,
1279 OPC_ADDS_S_df = (0x2 << 23) | OPC_MSA_3R_10,
1280 OPC_SUBSUS_U_df = (0x2 << 23) | OPC_MSA_3R_11,
1281 OPC_MSUBV_df = (0x2 << 23) | OPC_MSA_3R_12,
1282 OPC_DPADD_S_df = (0x2 << 23) | OPC_MSA_3R_13,
1283 OPC_PCKEV_df = (0x2 << 23) | OPC_MSA_3R_14,
1284 OPC_SRLR_df = (0x2 << 23) | OPC_MSA_3R_15,
1285 OPC_BCLR_df = (0x3 << 23) | OPC_MSA_3R_0D,
1286 OPC_MAX_U_df = (0x3 << 23) | OPC_MSA_3R_0E,
1287 OPC_CLT_U_df = (0x3 << 23) | OPC_MSA_3R_0F,
1288 OPC_ADDS_U_df = (0x3 << 23) | OPC_MSA_3R_10,
1289 OPC_SUBSUU_S_df = (0x3 << 23) | OPC_MSA_3R_11,
1290 OPC_DPADD_U_df = (0x3 << 23) | OPC_MSA_3R_13,
1291 OPC_PCKOD_df = (0x3 << 23) | OPC_MSA_3R_14,
1292 OPC_BSET_df = (0x4 << 23) | OPC_MSA_3R_0D,
1293 OPC_MIN_S_df = (0x4 << 23) | OPC_MSA_3R_0E,
1294 OPC_CLE_S_df = (0x4 << 23) | OPC_MSA_3R_0F,
1295 OPC_AVE_S_df = (0x4 << 23) | OPC_MSA_3R_10,
1296 OPC_ASUB_S_df = (0x4 << 23) | OPC_MSA_3R_11,
1297 OPC_DIV_S_df = (0x4 << 23) | OPC_MSA_3R_12,
1298 OPC_DPSUB_S_df = (0x4 << 23) | OPC_MSA_3R_13,
1299 OPC_ILVL_df = (0x4 << 23) | OPC_MSA_3R_14,
1300 OPC_HADD_S_df = (0x4 << 23) | OPC_MSA_3R_15,
1301 OPC_BNEG_df = (0x5 << 23) | OPC_MSA_3R_0D,
1302 OPC_MIN_U_df = (0x5 << 23) | OPC_MSA_3R_0E,
1303 OPC_CLE_U_df = (0x5 << 23) | OPC_MSA_3R_0F,
1304 OPC_AVE_U_df = (0x5 << 23) | OPC_MSA_3R_10,
1305 OPC_ASUB_U_df = (0x5 << 23) | OPC_MSA_3R_11,
1306 OPC_DIV_U_df = (0x5 << 23) | OPC_MSA_3R_12,
1307 OPC_DPSUB_U_df = (0x5 << 23) | OPC_MSA_3R_13,
1308 OPC_ILVR_df = (0x5 << 23) | OPC_MSA_3R_14,
1309 OPC_HADD_U_df = (0x5 << 23) | OPC_MSA_3R_15,
1310 OPC_BINSL_df = (0x6 << 23) | OPC_MSA_3R_0D,
1311 OPC_MAX_A_df = (0x6 << 23) | OPC_MSA_3R_0E,
1312 OPC_AVER_S_df = (0x6 << 23) | OPC_MSA_3R_10,
1313 OPC_MOD_S_df = (0x6 << 23) | OPC_MSA_3R_12,
1314 OPC_ILVEV_df = (0x6 << 23) | OPC_MSA_3R_14,
1315 OPC_HSUB_S_df = (0x6 << 23) | OPC_MSA_3R_15,
1316 OPC_BINSR_df = (0x7 << 23) | OPC_MSA_3R_0D,
1317 OPC_MIN_A_df = (0x7 << 23) | OPC_MSA_3R_0E,
1318 OPC_AVER_U_df = (0x7 << 23) | OPC_MSA_3R_10,
1319 OPC_MOD_U_df = (0x7 << 23) | OPC_MSA_3R_12,
1320 OPC_ILVOD_df = (0x7 << 23) | OPC_MSA_3R_14,
1321 OPC_HSUB_U_df = (0x7 << 23) | OPC_MSA_3R_15,
1323 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1324 OPC_SLDI_df = (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM,
1325 OPC_CTCMSA = (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM,
1326 OPC_SPLATI_df = (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM,
1327 OPC_CFCMSA = (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM,
1328 OPC_COPY_S_df = (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM,
1329 OPC_MOVE_V = (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM,
1330 OPC_COPY_U_df = (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM,
1331 OPC_INSERT_df = (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM,
1332 OPC_INSVE_df = (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM,
1334 /* 3RF instruction _df(bit 21) = _w, _d */
1335 OPC_FCAF_df = (0x0 << 22) | OPC_MSA_3RF_1A,
1336 OPC_FADD_df = (0x0 << 22) | OPC_MSA_3RF_1B,
1337 OPC_FCUN_df = (0x1 << 22) | OPC_MSA_3RF_1A,
1338 OPC_FSUB_df = (0x1 << 22) | OPC_MSA_3RF_1B,
1339 OPC_FCOR_df = (0x1 << 22) | OPC_MSA_3RF_1C,
1340 OPC_FCEQ_df = (0x2 << 22) | OPC_MSA_3RF_1A,
1341 OPC_FMUL_df = (0x2 << 22) | OPC_MSA_3RF_1B,
1342 OPC_FCUNE_df = (0x2 << 22) | OPC_MSA_3RF_1C,
1343 OPC_FCUEQ_df = (0x3 << 22) | OPC_MSA_3RF_1A,
1344 OPC_FDIV_df = (0x3 << 22) | OPC_MSA_3RF_1B,
1345 OPC_FCNE_df = (0x3 << 22) | OPC_MSA_3RF_1C,
1346 OPC_FCLT_df = (0x4 << 22) | OPC_MSA_3RF_1A,
1347 OPC_FMADD_df = (0x4 << 22) | OPC_MSA_3RF_1B,
1348 OPC_MUL_Q_df = (0x4 << 22) | OPC_MSA_3RF_1C,
1349 OPC_FCULT_df = (0x5 << 22) | OPC_MSA_3RF_1A,
1350 OPC_FMSUB_df = (0x5 << 22) | OPC_MSA_3RF_1B,
1351 OPC_MADD_Q_df = (0x5 << 22) | OPC_MSA_3RF_1C,
1352 OPC_FCLE_df = (0x6 << 22) | OPC_MSA_3RF_1A,
1353 OPC_MSUB_Q_df = (0x6 << 22) | OPC_MSA_3RF_1C,
1354 OPC_FCULE_df = (0x7 << 22) | OPC_MSA_3RF_1A,
1355 OPC_FEXP2_df = (0x7 << 22) | OPC_MSA_3RF_1B,
1356 OPC_FSAF_df = (0x8 << 22) | OPC_MSA_3RF_1A,
1357 OPC_FEXDO_df = (0x8 << 22) | OPC_MSA_3RF_1B,
1358 OPC_FSUN_df = (0x9 << 22) | OPC_MSA_3RF_1A,
1359 OPC_FSOR_df = (0x9 << 22) | OPC_MSA_3RF_1C,
1360 OPC_FSEQ_df = (0xA << 22) | OPC_MSA_3RF_1A,
1361 OPC_FTQ_df = (0xA << 22) | OPC_MSA_3RF_1B,
1362 OPC_FSUNE_df = (0xA << 22) | OPC_MSA_3RF_1C,
1363 OPC_FSUEQ_df = (0xB << 22) | OPC_MSA_3RF_1A,
1364 OPC_FSNE_df = (0xB << 22) | OPC_MSA_3RF_1C,
1365 OPC_FSLT_df = (0xC << 22) | OPC_MSA_3RF_1A,
1366 OPC_FMIN_df = (0xC << 22) | OPC_MSA_3RF_1B,
1367 OPC_MULR_Q_df = (0xC << 22) | OPC_MSA_3RF_1C,
1368 OPC_FSULT_df = (0xD << 22) | OPC_MSA_3RF_1A,
1369 OPC_FMIN_A_df = (0xD << 22) | OPC_MSA_3RF_1B,
1370 OPC_MADDR_Q_df = (0xD << 22) | OPC_MSA_3RF_1C,
1371 OPC_FSLE_df = (0xE << 22) | OPC_MSA_3RF_1A,
1372 OPC_FMAX_df = (0xE << 22) | OPC_MSA_3RF_1B,
1373 OPC_MSUBR_Q_df = (0xE << 22) | OPC_MSA_3RF_1C,
1374 OPC_FSULE_df = (0xF << 22) | OPC_MSA_3RF_1A,
1375 OPC_FMAX_A_df = (0xF << 22) | OPC_MSA_3RF_1B,
1377 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1378 OPC_SLLI_df = (0x0 << 23) | OPC_MSA_BIT_09,
1379 OPC_SAT_S_df = (0x0 << 23) | OPC_MSA_BIT_0A,
1380 OPC_SRAI_df = (0x1 << 23) | OPC_MSA_BIT_09,
1381 OPC_SAT_U_df = (0x1 << 23) | OPC_MSA_BIT_0A,
1382 OPC_SRLI_df = (0x2 << 23) | OPC_MSA_BIT_09,
1383 OPC_SRARI_df = (0x2 << 23) | OPC_MSA_BIT_0A,
1384 OPC_BCLRI_df = (0x3 << 23) | OPC_MSA_BIT_09,
1385 OPC_SRLRI_df = (0x3 << 23) | OPC_MSA_BIT_0A,
1386 OPC_BSETI_df = (0x4 << 23) | OPC_MSA_BIT_09,
1387 OPC_BNEGI_df = (0x5 << 23) | OPC_MSA_BIT_09,
1388 OPC_BINSLI_df = (0x6 << 23) | OPC_MSA_BIT_09,
1389 OPC_BINSRI_df = (0x7 << 23) | OPC_MSA_BIT_09,
1394 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1395 * ============================================
1397 * MXU (full name: MIPS eXtension/enhanced Unit) is an SIMD extension of MIPS32
1398 * instructions set. It is designed to fit the needs of signal, graphical and
1399 * video processing applications. MXU instruction set is used in Xburst family
1400 * of microprocessors by Ingenic.
1402 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1403 * the control register.
1407 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1408 * Programming Manual", Ingenic Semiconductor Co, Ltd., 2017
1412 /* global register indices */
1413 static TCGv cpu_gpr[32], cpu_PC;
1414 static TCGv cpu_HI[MIPS_DSP_ACC], cpu_LO[MIPS_DSP_ACC];
1415 static TCGv cpu_dspctrl, btarget, bcond;
1416 static TCGv_i32 hflags;
1417 static TCGv_i32 fpu_fcr0, fpu_fcr31;
1418 static TCGv_i64 fpu_f64[32];
1419 static TCGv_i64 msa_wr_d[64];
1421 #include "exec/gen-icount.h"
1423 #define gen_helper_0e0i(name, arg) do { \
1424 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1425 gen_helper_##name(cpu_env, helper_tmp); \
1426 tcg_temp_free_i32(helper_tmp); \
1429 #define gen_helper_0e1i(name, arg1, arg2) do { \
1430 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1431 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1432 tcg_temp_free_i32(helper_tmp); \
1435 #define gen_helper_1e0i(name, ret, arg1) do { \
1436 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1437 gen_helper_##name(ret, cpu_env, helper_tmp); \
1438 tcg_temp_free_i32(helper_tmp); \
1441 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1442 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1443 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1444 tcg_temp_free_i32(helper_tmp); \
1447 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1448 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1449 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1450 tcg_temp_free_i32(helper_tmp); \
1453 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1454 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1455 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1456 tcg_temp_free_i32(helper_tmp); \
1459 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1460 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1461 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1462 tcg_temp_free_i32(helper_tmp); \
1465 typedef struct DisasContext {
1466 DisasContextBase base;
1467 target_ulong saved_pc;
1468 target_ulong page_start;
1471 int32_t CP0_Config1;
1472 int32_t CP0_Config3;
1473 int32_t CP0_Config5;
1474 /* Routine used to access memory */
1476 TCGMemOp default_tcg_memop_mask;
1477 uint32_t hflags, saved_hflags;
1478 target_ulong btarget;
1489 int CP0_LLAddr_shift;
1498 #define DISAS_STOP DISAS_TARGET_0
1499 #define DISAS_EXIT DISAS_TARGET_1
1501 static const char * const regnames[] = {
1502 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1503 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1504 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1505 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1508 static const char * const regnames_HI[] = {
1509 "HI0", "HI1", "HI2", "HI3",
1512 static const char * const regnames_LO[] = {
1513 "LO0", "LO1", "LO2", "LO3",
1516 static const char * const fregnames[] = {
1517 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1518 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1519 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1520 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1523 static const char * const msaregnames[] = {
1524 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1525 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1526 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1527 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1528 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1529 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1530 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1531 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1532 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1533 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1534 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1535 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1536 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1537 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1538 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1539 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1542 #define LOG_DISAS(...) \
1544 if (MIPS_DEBUG_DISAS) { \
1545 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1549 #define MIPS_INVAL(op) \
1551 if (MIPS_DEBUG_DISAS) { \
1552 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1553 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1554 ctx->base.pc_next, ctx->opcode, op, \
1555 ctx->opcode >> 26, ctx->opcode & 0x3F, \
1556 ((ctx->opcode >> 16) & 0x1F)); \
1560 /* General purpose registers moves. */
1561 static inline void gen_load_gpr (TCGv t, int reg)
1564 tcg_gen_movi_tl(t, 0);
1566 tcg_gen_mov_tl(t, cpu_gpr[reg]);
1569 static inline void gen_store_gpr (TCGv t, int reg)
1572 tcg_gen_mov_tl(cpu_gpr[reg], t);
1575 /* Moves to/from shadow registers. */
1576 static inline void gen_load_srsgpr (int from, int to)
1578 TCGv t0 = tcg_temp_new();
1581 tcg_gen_movi_tl(t0, 0);
1583 TCGv_i32 t2 = tcg_temp_new_i32();
1584 TCGv_ptr addr = tcg_temp_new_ptr();
1586 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUMIPSState, CP0_SRSCtl));
1587 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
1588 tcg_gen_andi_i32(t2, t2, 0xf);
1589 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
1590 tcg_gen_ext_i32_ptr(addr, t2);
1591 tcg_gen_add_ptr(addr, cpu_env, addr);
1593 tcg_gen_ld_tl(t0, addr, sizeof(target_ulong) * from);
1594 tcg_temp_free_ptr(addr);
1595 tcg_temp_free_i32(t2);
1597 gen_store_gpr(t0, to);
1601 static inline void gen_store_srsgpr (int from, int to)
1604 TCGv t0 = tcg_temp_new();
1605 TCGv_i32 t2 = tcg_temp_new_i32();
1606 TCGv_ptr addr = tcg_temp_new_ptr();
1608 gen_load_gpr(t0, from);
1609 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUMIPSState, CP0_SRSCtl));
1610 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
1611 tcg_gen_andi_i32(t2, t2, 0xf);
1612 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
1613 tcg_gen_ext_i32_ptr(addr, t2);
1614 tcg_gen_add_ptr(addr, cpu_env, addr);
1616 tcg_gen_st_tl(t0, addr, sizeof(target_ulong) * to);
1617 tcg_temp_free_ptr(addr);
1618 tcg_temp_free_i32(t2);
1624 static inline void gen_save_pc(target_ulong pc)
1626 tcg_gen_movi_tl(cpu_PC, pc);
1629 static inline void save_cpu_state(DisasContext *ctx, int do_save_pc)
1631 LOG_DISAS("hflags %08x saved %08x\n", ctx->hflags, ctx->saved_hflags);
1632 if (do_save_pc && ctx->base.pc_next != ctx->saved_pc) {
1633 gen_save_pc(ctx->base.pc_next);
1634 ctx->saved_pc = ctx->base.pc_next;
1636 if (ctx->hflags != ctx->saved_hflags) {
1637 tcg_gen_movi_i32(hflags, ctx->hflags);
1638 ctx->saved_hflags = ctx->hflags;
1639 switch (ctx->hflags & MIPS_HFLAG_BMASK_BASE) {
1645 tcg_gen_movi_tl(btarget, ctx->btarget);
1651 static inline void restore_cpu_state(CPUMIPSState *env, DisasContext *ctx)
1653 ctx->saved_hflags = ctx->hflags;
1654 switch (ctx->hflags & MIPS_HFLAG_BMASK_BASE) {
1660 ctx->btarget = env->btarget;
1665 static inline void generate_exception_err(DisasContext *ctx, int excp, int err)
1667 TCGv_i32 texcp = tcg_const_i32(excp);
1668 TCGv_i32 terr = tcg_const_i32(err);
1669 save_cpu_state(ctx, 1);
1670 gen_helper_raise_exception_err(cpu_env, texcp, terr);
1671 tcg_temp_free_i32(terr);
1672 tcg_temp_free_i32(texcp);
1673 ctx->base.is_jmp = DISAS_NORETURN;
1676 static inline void generate_exception(DisasContext *ctx, int excp)
1678 gen_helper_0e0i(raise_exception, excp);
1681 static inline void generate_exception_end(DisasContext *ctx, int excp)
1683 generate_exception_err(ctx, excp, 0);
1686 /* Floating point register moves. */
1687 static void gen_load_fpr32(DisasContext *ctx, TCGv_i32 t, int reg)
1689 if (ctx->hflags & MIPS_HFLAG_FRE) {
1690 generate_exception(ctx, EXCP_RI);
1692 tcg_gen_extrl_i64_i32(t, fpu_f64[reg]);
1695 static void gen_store_fpr32(DisasContext *ctx, TCGv_i32 t, int reg)
1698 if (ctx->hflags & MIPS_HFLAG_FRE) {
1699 generate_exception(ctx, EXCP_RI);
1701 t64 = tcg_temp_new_i64();
1702 tcg_gen_extu_i32_i64(t64, t);
1703 tcg_gen_deposit_i64(fpu_f64[reg], fpu_f64[reg], t64, 0, 32);
1704 tcg_temp_free_i64(t64);
1707 static void gen_load_fpr32h(DisasContext *ctx, TCGv_i32 t, int reg)
1709 if (ctx->hflags & MIPS_HFLAG_F64) {
1710 tcg_gen_extrh_i64_i32(t, fpu_f64[reg]);
1712 gen_load_fpr32(ctx, t, reg | 1);
1716 static void gen_store_fpr32h(DisasContext *ctx, TCGv_i32 t, int reg)
1718 if (ctx->hflags & MIPS_HFLAG_F64) {
1719 TCGv_i64 t64 = tcg_temp_new_i64();
1720 tcg_gen_extu_i32_i64(t64, t);
1721 tcg_gen_deposit_i64(fpu_f64[reg], fpu_f64[reg], t64, 32, 32);
1722 tcg_temp_free_i64(t64);
1724 gen_store_fpr32(ctx, t, reg | 1);
1728 static void gen_load_fpr64(DisasContext *ctx, TCGv_i64 t, int reg)
1730 if (ctx->hflags & MIPS_HFLAG_F64) {
1731 tcg_gen_mov_i64(t, fpu_f64[reg]);
1733 tcg_gen_concat32_i64(t, fpu_f64[reg & ~1], fpu_f64[reg | 1]);
1737 static void gen_store_fpr64(DisasContext *ctx, TCGv_i64 t, int reg)
1739 if (ctx->hflags & MIPS_HFLAG_F64) {
1740 tcg_gen_mov_i64(fpu_f64[reg], t);
1743 tcg_gen_deposit_i64(fpu_f64[reg & ~1], fpu_f64[reg & ~1], t, 0, 32);
1744 t0 = tcg_temp_new_i64();
1745 tcg_gen_shri_i64(t0, t, 32);
1746 tcg_gen_deposit_i64(fpu_f64[reg | 1], fpu_f64[reg | 1], t0, 0, 32);
1747 tcg_temp_free_i64(t0);
1751 static inline int get_fp_bit (int cc)
1759 /* Addresses computation */
1760 static inline void gen_op_addr_add (DisasContext *ctx, TCGv ret, TCGv arg0, TCGv arg1)
1762 tcg_gen_add_tl(ret, arg0, arg1);
1764 #if defined(TARGET_MIPS64)
1765 if (ctx->hflags & MIPS_HFLAG_AWRAP) {
1766 tcg_gen_ext32s_i64(ret, ret);
1771 static inline void gen_op_addr_addi(DisasContext *ctx, TCGv ret, TCGv base,
1774 tcg_gen_addi_tl(ret, base, ofs);
1776 #if defined(TARGET_MIPS64)
1777 if (ctx->hflags & MIPS_HFLAG_AWRAP) {
1778 tcg_gen_ext32s_i64(ret, ret);
1783 /* Addresses computation (translation time) */
1784 static target_long addr_add(DisasContext *ctx, target_long base,
1787 target_long sum = base + offset;
1789 #if defined(TARGET_MIPS64)
1790 if (ctx->hflags & MIPS_HFLAG_AWRAP) {
1797 /* Sign-extract the low 32-bits to a target_long. */
1798 static inline void gen_move_low32(TCGv ret, TCGv_i64 arg)
1800 #if defined(TARGET_MIPS64)
1801 tcg_gen_ext32s_i64(ret, arg);
1803 tcg_gen_extrl_i64_i32(ret, arg);
1807 /* Sign-extract the high 32-bits to a target_long. */
1808 static inline void gen_move_high32(TCGv ret, TCGv_i64 arg)
1810 #if defined(TARGET_MIPS64)
1811 tcg_gen_sari_i64(ret, arg, 32);
1813 tcg_gen_extrh_i64_i32(ret, arg);
1817 static inline void check_cp0_enabled(DisasContext *ctx)
1819 if (unlikely(!(ctx->hflags & MIPS_HFLAG_CP0)))
1820 generate_exception_err(ctx, EXCP_CpU, 0);
1823 static inline void check_cp1_enabled(DisasContext *ctx)
1825 if (unlikely(!(ctx->hflags & MIPS_HFLAG_FPU)))
1826 generate_exception_err(ctx, EXCP_CpU, 1);
1829 /* Verify that the processor is running with COP1X instructions enabled.
1830 This is associated with the nabla symbol in the MIPS32 and MIPS64
1833 static inline void check_cop1x(DisasContext *ctx)
1835 if (unlikely(!(ctx->hflags & MIPS_HFLAG_COP1X)))
1836 generate_exception_end(ctx, EXCP_RI);
1839 /* Verify that the processor is running with 64-bit floating-point
1840 operations enabled. */
1842 static inline void check_cp1_64bitmode(DisasContext *ctx)
1844 if (unlikely(~ctx->hflags & (MIPS_HFLAG_F64 | MIPS_HFLAG_COP1X)))
1845 generate_exception_end(ctx, EXCP_RI);
1849 * Verify if floating point register is valid; an operation is not defined
1850 * if bit 0 of any register specification is set and the FR bit in the
1851 * Status register equals zero, since the register numbers specify an
1852 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1853 * in the Status register equals one, both even and odd register numbers
1854 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1856 * Multiple 64 bit wide registers can be checked by calling
1857 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1859 static inline void check_cp1_registers(DisasContext *ctx, int regs)
1861 if (unlikely(!(ctx->hflags & MIPS_HFLAG_F64) && (regs & 1)))
1862 generate_exception_end(ctx, EXCP_RI);
1865 /* Verify that the processor is running with DSP instructions enabled.
1866 This is enabled by CP0 Status register MX(24) bit.
1869 static inline void check_dsp(DisasContext *ctx)
1871 if (unlikely(!(ctx->hflags & MIPS_HFLAG_DSP))) {
1872 if (ctx->insn_flags & ASE_DSP) {
1873 generate_exception_end(ctx, EXCP_DSPDIS);
1875 generate_exception_end(ctx, EXCP_RI);
1880 static inline void check_dspr2(DisasContext *ctx)
1882 if (unlikely(!(ctx->hflags & MIPS_HFLAG_DSPR2))) {
1883 if (ctx->insn_flags & ASE_DSP) {
1884 generate_exception_end(ctx, EXCP_DSPDIS);
1886 generate_exception_end(ctx, EXCP_RI);
1891 /* This code generates a "reserved instruction" exception if the
1892 CPU does not support the instruction set corresponding to flags. */
1893 static inline void check_insn(DisasContext *ctx, int flags)
1895 if (unlikely(!(ctx->insn_flags & flags))) {
1896 generate_exception_end(ctx, EXCP_RI);
1900 /* This code generates a "reserved instruction" exception if the
1901 CPU has corresponding flag set which indicates that the instruction
1902 has been removed. */
1903 static inline void check_insn_opc_removed(DisasContext *ctx, int flags)
1905 if (unlikely(ctx->insn_flags & flags)) {
1906 generate_exception_end(ctx, EXCP_RI);
1910 /* This code generates a "reserved instruction" exception if the
1911 CPU does not support 64-bit paired-single (PS) floating point data type */
1912 static inline void check_ps(DisasContext *ctx)
1914 if (unlikely(!ctx->ps)) {
1915 generate_exception(ctx, EXCP_RI);
1917 check_cp1_64bitmode(ctx);
1920 #ifdef TARGET_MIPS64
1921 /* This code generates a "reserved instruction" exception if 64-bit
1922 instructions are not enabled. */
1923 static inline void check_mips_64(DisasContext *ctx)
1925 if (unlikely(!(ctx->hflags & MIPS_HFLAG_64)))
1926 generate_exception_end(ctx, EXCP_RI);
1930 #ifndef CONFIG_USER_ONLY
1931 static inline void check_mvh(DisasContext *ctx)
1933 if (unlikely(!ctx->mvh)) {
1934 generate_exception(ctx, EXCP_RI);
1940 * This code generates a "reserved instruction" exception if the
1941 * Config5 XNP bit is set.
1943 static inline void check_xnp(DisasContext *ctx)
1945 if (unlikely(ctx->CP0_Config5 & (1 << CP0C5_XNP))) {
1946 generate_exception_end(ctx, EXCP_RI);
1951 * This code generates a "reserved instruction" exception if the
1952 * Config3 MT bit is NOT set.
1954 static inline void check_mt(DisasContext *ctx)
1956 if (unlikely(!(ctx->CP0_Config3 & (1 << CP0C3_MT)))) {
1957 generate_exception_end(ctx, EXCP_RI);
1961 #ifndef CONFIG_USER_ONLY
1963 * This code generates a "coprocessor unusable" exception if CP0 is not
1964 * available, and, if that is not the case, generates a "reserved instruction"
1965 * exception if the Config5 MT bit is NOT set. This is needed for availability
1966 * control of some of MT ASE instructions.
1968 static inline void check_cp0_mt(DisasContext *ctx)
1970 if (unlikely(!(ctx->hflags & MIPS_HFLAG_CP0))) {
1971 generate_exception_err(ctx, EXCP_CpU, 0);
1973 if (unlikely(!(ctx->CP0_Config3 & (1 << CP0C3_MT)))) {
1974 generate_exception_err(ctx, EXCP_RI, 0);
1981 * This code generates a "reserved instruction" exception if the
1982 * Config5 NMS bit is set.
1984 static inline void check_nms(DisasContext *ctx)
1986 if (unlikely(ctx->CP0_Config5 & (1 << CP0C5_NMS))) {
1987 generate_exception_end(ctx, EXCP_RI);
1992 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1993 calling interface for 32 and 64-bit FPRs. No sense in changing
1994 all callers for gen_load_fpr32 when we need the CTX parameter for
1996 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1997 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1998 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1999 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
2000 int ft, int fs, int cc) \
2002 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
2003 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
2012 check_cp1_registers(ctx, fs | ft); \
2020 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
2021 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
2023 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
2024 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
2025 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
2026 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
2027 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
2028 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
2029 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
2030 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
2031 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
2032 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
2033 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
2034 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
2035 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
2036 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
2037 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
2038 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
2041 tcg_temp_free_i##bits (fp0); \
2042 tcg_temp_free_i##bits (fp1); \
2045 FOP_CONDS(, 0, d, FMT_D, 64)
2046 FOP_CONDS(abs, 1, d, FMT_D, 64)
2047 FOP_CONDS(, 0, s, FMT_S, 32)
2048 FOP_CONDS(abs, 1, s, FMT_S, 32)
2049 FOP_CONDS(, 0, ps, FMT_PS, 64)
2050 FOP_CONDS(abs, 1, ps, FMT_PS, 64)
2053 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
2054 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
2055 int ft, int fs, int fd) \
2057 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
2058 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
2059 if (ifmt == FMT_D) { \
2060 check_cp1_registers(ctx, fs | ft | fd); \
2062 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
2063 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
2066 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
2069 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
2072 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
2075 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
2078 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
2081 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
2084 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
2087 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
2090 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
2093 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2096 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2099 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2102 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2105 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2108 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2111 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2114 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2117 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2120 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2123 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2126 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2129 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2135 tcg_temp_free_i ## bits (fp0); \
2136 tcg_temp_free_i ## bits (fp1); \
2139 FOP_CONDNS(d, FMT_D, 64, gen_store_fpr64(ctx, fp0, fd))
2140 FOP_CONDNS(s, FMT_S, 32, gen_store_fpr32(ctx, fp0, fd))
2142 #undef gen_ldcmp_fpr32
2143 #undef gen_ldcmp_fpr64
2145 /* load/store instructions. */
2146 #ifdef CONFIG_USER_ONLY
2147 #define OP_LD_ATOMIC(insn,fname) \
2148 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2149 DisasContext *ctx) \
2151 TCGv t0 = tcg_temp_new(); \
2152 tcg_gen_mov_tl(t0, arg1); \
2153 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2154 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2155 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2156 tcg_temp_free(t0); \
2159 #define OP_LD_ATOMIC(insn,fname) \
2160 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2161 DisasContext *ctx) \
2163 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2166 OP_LD_ATOMIC(ll,ld32s);
2167 #if defined(TARGET_MIPS64)
2168 OP_LD_ATOMIC(lld,ld64);
2172 #ifdef CONFIG_USER_ONLY
2173 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2174 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2175 DisasContext *ctx) \
2177 TCGv t0 = tcg_temp_new(); \
2178 TCGLabel *l1 = gen_new_label(); \
2179 TCGLabel *l2 = gen_new_label(); \
2181 tcg_gen_andi_tl(t0, arg2, almask); \
2182 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2183 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2184 generate_exception(ctx, EXCP_AdES); \
2185 gen_set_label(l1); \
2186 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2187 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2188 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2189 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2190 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2191 generate_exception_end(ctx, EXCP_SC); \
2192 gen_set_label(l2); \
2193 tcg_gen_movi_tl(t0, 0); \
2194 gen_store_gpr(t0, rt); \
2195 tcg_temp_free(t0); \
2198 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2199 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, int mem_idx, \
2200 DisasContext *ctx) \
2202 TCGv t0 = tcg_temp_new(); \
2203 gen_helper_1e2i(insn, t0, arg1, arg2, mem_idx); \
2204 gen_store_gpr(t0, rt); \
2205 tcg_temp_free(t0); \
2208 OP_ST_ATOMIC(sc,st32,ld32s,0x3);
2209 #if defined(TARGET_MIPS64)
2210 OP_ST_ATOMIC(scd,st64,ld64,0x7);
2214 static void gen_base_offset_addr (DisasContext *ctx, TCGv addr,
2215 int base, int offset)
2218 tcg_gen_movi_tl(addr, offset);
2219 } else if (offset == 0) {
2220 gen_load_gpr(addr, base);
2222 tcg_gen_movi_tl(addr, offset);
2223 gen_op_addr_add(ctx, addr, cpu_gpr[base], addr);
2227 static target_ulong pc_relative_pc (DisasContext *ctx)
2229 target_ulong pc = ctx->base.pc_next;
2231 if (ctx->hflags & MIPS_HFLAG_BMASK) {
2232 int branch_bytes = ctx->hflags & MIPS_HFLAG_BDS16 ? 2 : 4;
2237 pc &= ~(target_ulong)3;
2242 static void gen_ld(DisasContext *ctx, uint32_t opc,
2243 int rt, int base, int offset)
2246 int mem_idx = ctx->mem_idx;
2248 if (rt == 0 && ctx->insn_flags & (INSN_LOONGSON2E | INSN_LOONGSON2F)) {
2249 /* Loongson CPU uses a load to zero register for prefetch.
2250 We emulate it as a NOP. On other CPU we must perform the
2251 actual memory access. */
2255 t0 = tcg_temp_new();
2256 gen_base_offset_addr(ctx, t0, base, offset);
2259 #if defined(TARGET_MIPS64)
2261 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEUL |
2262 ctx->default_tcg_memop_mask);
2263 gen_store_gpr(t0, rt);
2266 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEQ |
2267 ctx->default_tcg_memop_mask);
2268 gen_store_gpr(t0, rt);
2272 op_ld_lld(t0, t0, mem_idx, ctx);
2273 gen_store_gpr(t0, rt);
2276 t1 = tcg_temp_new();
2277 /* Do a byte access to possibly trigger a page
2278 fault with the unaligned address. */
2279 tcg_gen_qemu_ld_tl(t1, t0, mem_idx, MO_UB);
2280 tcg_gen_andi_tl(t1, t0, 7);
2281 #ifndef TARGET_WORDS_BIGENDIAN
2282 tcg_gen_xori_tl(t1, t1, 7);
2284 tcg_gen_shli_tl(t1, t1, 3);
2285 tcg_gen_andi_tl(t0, t0, ~7);
2286 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEQ);
2287 tcg_gen_shl_tl(t0, t0, t1);
2288 t2 = tcg_const_tl(-1);
2289 tcg_gen_shl_tl(t2, t2, t1);
2290 gen_load_gpr(t1, rt);
2291 tcg_gen_andc_tl(t1, t1, t2);
2293 tcg_gen_or_tl(t0, t0, t1);
2295 gen_store_gpr(t0, rt);
2298 t1 = tcg_temp_new();
2299 /* Do a byte access to possibly trigger a page
2300 fault with the unaligned address. */
2301 tcg_gen_qemu_ld_tl(t1, t0, mem_idx, MO_UB);
2302 tcg_gen_andi_tl(t1, t0, 7);
2303 #ifdef TARGET_WORDS_BIGENDIAN
2304 tcg_gen_xori_tl(t1, t1, 7);
2306 tcg_gen_shli_tl(t1, t1, 3);
2307 tcg_gen_andi_tl(t0, t0, ~7);
2308 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEQ);
2309 tcg_gen_shr_tl(t0, t0, t1);
2310 tcg_gen_xori_tl(t1, t1, 63);
2311 t2 = tcg_const_tl(0xfffffffffffffffeull);
2312 tcg_gen_shl_tl(t2, t2, t1);
2313 gen_load_gpr(t1, rt);
2314 tcg_gen_and_tl(t1, t1, t2);
2316 tcg_gen_or_tl(t0, t0, t1);
2318 gen_store_gpr(t0, rt);
2321 t1 = tcg_const_tl(pc_relative_pc(ctx));
2322 gen_op_addr_add(ctx, t0, t0, t1);
2324 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEQ);
2325 gen_store_gpr(t0, rt);
2329 t1 = tcg_const_tl(pc_relative_pc(ctx));
2330 gen_op_addr_add(ctx, t0, t0, t1);
2332 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TESL);
2333 gen_store_gpr(t0, rt);
2336 mem_idx = MIPS_HFLAG_UM;
2339 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TESL |
2340 ctx->default_tcg_memop_mask);
2341 gen_store_gpr(t0, rt);
2344 mem_idx = MIPS_HFLAG_UM;
2347 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TESW |
2348 ctx->default_tcg_memop_mask);
2349 gen_store_gpr(t0, rt);
2352 mem_idx = MIPS_HFLAG_UM;
2355 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEUW |
2356 ctx->default_tcg_memop_mask);
2357 gen_store_gpr(t0, rt);
2360 mem_idx = MIPS_HFLAG_UM;
2363 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_SB);
2364 gen_store_gpr(t0, rt);
2367 mem_idx = MIPS_HFLAG_UM;
2370 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_UB);
2371 gen_store_gpr(t0, rt);
2374 mem_idx = MIPS_HFLAG_UM;
2377 t1 = tcg_temp_new();
2378 /* Do a byte access to possibly trigger a page
2379 fault with the unaligned address. */
2380 tcg_gen_qemu_ld_tl(t1, t0, mem_idx, MO_UB);
2381 tcg_gen_andi_tl(t1, t0, 3);
2382 #ifndef TARGET_WORDS_BIGENDIAN
2383 tcg_gen_xori_tl(t1, t1, 3);
2385 tcg_gen_shli_tl(t1, t1, 3);
2386 tcg_gen_andi_tl(t0, t0, ~3);
2387 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEUL);
2388 tcg_gen_shl_tl(t0, t0, t1);
2389 t2 = tcg_const_tl(-1);
2390 tcg_gen_shl_tl(t2, t2, t1);
2391 gen_load_gpr(t1, rt);
2392 tcg_gen_andc_tl(t1, t1, t2);
2394 tcg_gen_or_tl(t0, t0, t1);
2396 tcg_gen_ext32s_tl(t0, t0);
2397 gen_store_gpr(t0, rt);
2400 mem_idx = MIPS_HFLAG_UM;
2403 t1 = tcg_temp_new();
2404 /* Do a byte access to possibly trigger a page
2405 fault with the unaligned address. */
2406 tcg_gen_qemu_ld_tl(t1, t0, mem_idx, MO_UB);
2407 tcg_gen_andi_tl(t1, t0, 3);
2408 #ifdef TARGET_WORDS_BIGENDIAN
2409 tcg_gen_xori_tl(t1, t1, 3);
2411 tcg_gen_shli_tl(t1, t1, 3);
2412 tcg_gen_andi_tl(t0, t0, ~3);
2413 tcg_gen_qemu_ld_tl(t0, t0, mem_idx, MO_TEUL);
2414 tcg_gen_shr_tl(t0, t0, t1);
2415 tcg_gen_xori_tl(t1, t1, 31);
2416 t2 = tcg_const_tl(0xfffffffeull);
2417 tcg_gen_shl_tl(t2, t2, t1);
2418 gen_load_gpr(t1, rt);
2419 tcg_gen_and_tl(t1, t1, t2);
2421 tcg_gen_or_tl(t0, t0, t1);
2423 tcg_gen_ext32s_tl(t0, t0);
2424 gen_store_gpr(t0, rt);
2427 mem_idx = MIPS_HFLAG_UM;
2431 op_ld_ll(t0, t0, mem_idx, ctx);
2432 gen_store_gpr(t0, rt);
2438 static void gen_llwp(DisasContext *ctx, uint32_t base, int16_t offset,
2439 uint32_t reg1, uint32_t reg2)
2441 TCGv taddr = tcg_temp_new();
2442 TCGv_i64 tval = tcg_temp_new_i64();
2443 TCGv tmp1 = tcg_temp_new();
2444 TCGv tmp2 = tcg_temp_new();
2446 gen_base_offset_addr(ctx, taddr, base, offset);
2447 tcg_gen_qemu_ld64(tval, taddr, ctx->mem_idx);
2448 #ifdef TARGET_WORDS_BIGENDIAN
2449 tcg_gen_extr_i64_tl(tmp2, tmp1, tval);
2451 tcg_gen_extr_i64_tl(tmp1, tmp2, tval);
2453 gen_store_gpr(tmp1, reg1);
2454 tcg_temp_free(tmp1);
2455 gen_store_gpr(tmp2, reg2);
2456 tcg_temp_free(tmp2);
2457 tcg_gen_st_i64(tval, cpu_env, offsetof(CPUMIPSState, llval_wp));
2458 tcg_temp_free_i64(tval);
2459 tcg_gen_st_tl(taddr, cpu_env, offsetof(CPUMIPSState, lladdr));
2460 tcg_temp_free(taddr);
2464 static void gen_st (DisasContext *ctx, uint32_t opc, int rt,
2465 int base, int offset)
2467 TCGv t0 = tcg_temp_new();
2468 TCGv t1 = tcg_temp_new();
2469 int mem_idx = ctx->mem_idx;
2471 gen_base_offset_addr(ctx, t0, base, offset);
2472 gen_load_gpr(t1, rt);
2474 #if defined(TARGET_MIPS64)
2476 tcg_gen_qemu_st_tl(t1, t0, mem_idx, MO_TEQ |
2477 ctx->default_tcg_memop_mask);
2480 gen_helper_0e2i(sdl, t1, t0, mem_idx);
2483 gen_helper_0e2i(sdr, t1, t0, mem_idx);
2487 mem_idx = MIPS_HFLAG_UM;
2490 tcg_gen_qemu_st_tl(t1, t0, mem_idx, MO_TEUL |
2491 ctx->default_tcg_memop_mask);
2494 mem_idx = MIPS_HFLAG_UM;
2497 tcg_gen_qemu_st_tl(t1, t0, mem_idx, MO_TEUW |
2498 ctx->default_tcg_memop_mask);
2501 mem_idx = MIPS_HFLAG_UM;
2504 tcg_gen_qemu_st_tl(t1, t0, mem_idx, MO_8);
2507 mem_idx = MIPS_HFLAG_UM;
2510 gen_helper_0e2i(swl, t1, t0, mem_idx);
2513 mem_idx = MIPS_HFLAG_UM;
2516 gen_helper_0e2i(swr, t1, t0, mem_idx);
2524 /* Store conditional */
2525 static void gen_st_cond (DisasContext *ctx, uint32_t opc, int rt,
2526 int base, int16_t offset)
2529 int mem_idx = ctx->mem_idx;
2531 #ifdef CONFIG_USER_ONLY
2532 t0 = tcg_temp_local_new();
2533 t1 = tcg_temp_local_new();
2535 t0 = tcg_temp_new();
2536 t1 = tcg_temp_new();
2538 gen_base_offset_addr(ctx, t0, base, offset);
2539 gen_load_gpr(t1, rt);
2541 #if defined(TARGET_MIPS64)
2544 op_st_scd(t1, t0, rt, mem_idx, ctx);
2548 mem_idx = MIPS_HFLAG_UM;
2552 op_st_sc(t1, t0, rt, mem_idx, ctx);
2559 static void gen_scwp(DisasContext *ctx, uint32_t base, int16_t offset,
2560 uint32_t reg1, uint32_t reg2)
2562 TCGv taddr = tcg_temp_local_new();
2563 TCGv lladdr = tcg_temp_local_new();
2564 TCGv_i64 tval = tcg_temp_new_i64();
2565 TCGv_i64 llval = tcg_temp_new_i64();
2566 TCGv_i64 val = tcg_temp_new_i64();
2567 TCGv tmp1 = tcg_temp_new();
2568 TCGv tmp2 = tcg_temp_new();
2569 TCGLabel *lab_fail = gen_new_label();
2570 TCGLabel *lab_done = gen_new_label();
2572 gen_base_offset_addr(ctx, taddr, base, offset);
2574 tcg_gen_ld_tl(lladdr, cpu_env, offsetof(CPUMIPSState, lladdr));
2575 tcg_gen_brcond_tl(TCG_COND_NE, taddr, lladdr, lab_fail);
2577 gen_load_gpr(tmp1, reg1);
2578 gen_load_gpr(tmp2, reg2);
2580 #ifdef TARGET_WORDS_BIGENDIAN
2581 tcg_gen_concat_tl_i64(tval, tmp2, tmp1);
2583 tcg_gen_concat_tl_i64(tval, tmp1, tmp2);
2586 tcg_gen_ld_i64(llval, cpu_env, offsetof(CPUMIPSState, llval_wp));
2587 tcg_gen_atomic_cmpxchg_i64(val, taddr, llval, tval,
2588 ctx->mem_idx, MO_64);
2590 tcg_gen_movi_tl(cpu_gpr[reg1], 1);
2592 tcg_gen_brcond_i64(TCG_COND_EQ, val, llval, lab_done);
2594 gen_set_label(lab_fail);
2597 tcg_gen_movi_tl(cpu_gpr[reg1], 0);
2599 gen_set_label(lab_done);
2600 tcg_gen_movi_tl(lladdr, -1);
2601 tcg_gen_st_tl(lladdr, cpu_env, offsetof(CPUMIPSState, lladdr));
2604 /* Load and store */
2605 static void gen_flt_ldst (DisasContext *ctx, uint32_t opc, int ft,
2608 /* Don't do NOP if destination is zero: we must perform the actual
2613 TCGv_i32 fp0 = tcg_temp_new_i32();
2614 tcg_gen_qemu_ld_i32(fp0, t0, ctx->mem_idx, MO_TESL |
2615 ctx->default_tcg_memop_mask);
2616 gen_store_fpr32(ctx, fp0, ft);
2617 tcg_temp_free_i32(fp0);
2622 TCGv_i32 fp0 = tcg_temp_new_i32();
2623 gen_load_fpr32(ctx, fp0, ft);
2624 tcg_gen_qemu_st_i32(fp0, t0, ctx->mem_idx, MO_TEUL |
2625 ctx->default_tcg_memop_mask);
2626 tcg_temp_free_i32(fp0);
2631 TCGv_i64 fp0 = tcg_temp_new_i64();
2632 tcg_gen_qemu_ld_i64(fp0, t0, ctx->mem_idx, MO_TEQ |
2633 ctx->default_tcg_memop_mask);
2634 gen_store_fpr64(ctx, fp0, ft);
2635 tcg_temp_free_i64(fp0);
2640 TCGv_i64 fp0 = tcg_temp_new_i64();
2641 gen_load_fpr64(ctx, fp0, ft);
2642 tcg_gen_qemu_st_i64(fp0, t0, ctx->mem_idx, MO_TEQ |
2643 ctx->default_tcg_memop_mask);
2644 tcg_temp_free_i64(fp0);
2648 MIPS_INVAL("flt_ldst");
2649 generate_exception_end(ctx, EXCP_RI);
2654 static void gen_cop1_ldst(DisasContext *ctx, uint32_t op, int rt,
2655 int rs, int16_t imm)
2657 TCGv t0 = tcg_temp_new();
2659 if (ctx->CP0_Config1 & (1 << CP0C1_FP)) {
2660 check_cp1_enabled(ctx);
2664 check_insn(ctx, ISA_MIPS2);
2667 gen_base_offset_addr(ctx, t0, rs, imm);
2668 gen_flt_ldst(ctx, op, rt, t0);
2671 generate_exception_err(ctx, EXCP_CpU, 1);
2676 /* Arithmetic with immediate operand */
2677 static void gen_arith_imm(DisasContext *ctx, uint32_t opc,
2678 int rt, int rs, int imm)
2680 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
2682 if (rt == 0 && opc != OPC_ADDI && opc != OPC_DADDI) {
2683 /* If no destination, treat it as a NOP.
2684 For addi, we must generate the overflow exception when needed. */
2690 TCGv t0 = tcg_temp_local_new();
2691 TCGv t1 = tcg_temp_new();
2692 TCGv t2 = tcg_temp_new();
2693 TCGLabel *l1 = gen_new_label();
2695 gen_load_gpr(t1, rs);
2696 tcg_gen_addi_tl(t0, t1, uimm);
2697 tcg_gen_ext32s_tl(t0, t0);
2699 tcg_gen_xori_tl(t1, t1, ~uimm);
2700 tcg_gen_xori_tl(t2, t0, uimm);
2701 tcg_gen_and_tl(t1, t1, t2);
2703 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
2705 /* operands of same sign, result different sign */
2706 generate_exception(ctx, EXCP_OVERFLOW);
2708 tcg_gen_ext32s_tl(t0, t0);
2709 gen_store_gpr(t0, rt);
2715 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
2716 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
2718 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
2721 #if defined(TARGET_MIPS64)
2724 TCGv t0 = tcg_temp_local_new();
2725 TCGv t1 = tcg_temp_new();
2726 TCGv t2 = tcg_temp_new();
2727 TCGLabel *l1 = gen_new_label();
2729 gen_load_gpr(t1, rs);
2730 tcg_gen_addi_tl(t0, t1, uimm);
2732 tcg_gen_xori_tl(t1, t1, ~uimm);
2733 tcg_gen_xori_tl(t2, t0, uimm);
2734 tcg_gen_and_tl(t1, t1, t2);
2736 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
2738 /* operands of same sign, result different sign */
2739 generate_exception(ctx, EXCP_OVERFLOW);
2741 gen_store_gpr(t0, rt);
2747 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
2749 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
2756 /* Logic with immediate operand */
2757 static void gen_logic_imm(DisasContext *ctx, uint32_t opc,
2758 int rt, int rs, int16_t imm)
2763 /* If no destination, treat it as a NOP. */
2766 uimm = (uint16_t)imm;
2769 if (likely(rs != 0))
2770 tcg_gen_andi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
2772 tcg_gen_movi_tl(cpu_gpr[rt], 0);
2776 tcg_gen_ori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
2778 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
2781 if (likely(rs != 0))
2782 tcg_gen_xori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
2784 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
2787 if (rs != 0 && (ctx->insn_flags & ISA_MIPS32R6)) {
2789 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], imm << 16);
2790 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
2792 tcg_gen_movi_tl(cpu_gpr[rt], imm << 16);
2801 /* Set on less than with immediate operand */
2802 static void gen_slt_imm(DisasContext *ctx, uint32_t opc,
2803 int rt, int rs, int16_t imm)
2805 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
2809 /* If no destination, treat it as a NOP. */
2812 t0 = tcg_temp_new();
2813 gen_load_gpr(t0, rs);
2816 tcg_gen_setcondi_tl(TCG_COND_LT, cpu_gpr[rt], t0, uimm);
2819 tcg_gen_setcondi_tl(TCG_COND_LTU, cpu_gpr[rt], t0, uimm);
2825 /* Shifts with immediate operand */
2826 static void gen_shift_imm(DisasContext *ctx, uint32_t opc,
2827 int rt, int rs, int16_t imm)
2829 target_ulong uimm = ((uint16_t)imm) & 0x1f;
2833 /* If no destination, treat it as a NOP. */
2837 t0 = tcg_temp_new();
2838 gen_load_gpr(t0, rs);
2841 tcg_gen_shli_tl(t0, t0, uimm);
2842 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
2845 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
2849 tcg_gen_ext32u_tl(t0, t0);
2850 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
2852 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
2857 TCGv_i32 t1 = tcg_temp_new_i32();
2859 tcg_gen_trunc_tl_i32(t1, t0);
2860 tcg_gen_rotri_i32(t1, t1, uimm);
2861 tcg_gen_ext_i32_tl(cpu_gpr[rt], t1);
2862 tcg_temp_free_i32(t1);
2864 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
2867 #if defined(TARGET_MIPS64)
2869 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm);
2872 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
2875 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
2879 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm);
2881 tcg_gen_mov_tl(cpu_gpr[rt], t0);
2885 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm + 32);
2888 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm + 32);
2891 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm + 32);
2894 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm + 32);
2902 static void gen_arith(DisasContext *ctx, uint32_t opc,
2903 int rd, int rs, int rt)
2905 if (rd == 0 && opc != OPC_ADD && opc != OPC_SUB
2906 && opc != OPC_DADD && opc != OPC_DSUB) {
2907 /* If no destination, treat it as a NOP.
2908 For add & sub, we must generate the overflow exception when needed. */
2915 TCGv t0 = tcg_temp_local_new();
2916 TCGv t1 = tcg_temp_new();
2917 TCGv t2 = tcg_temp_new();
2918 TCGLabel *l1 = gen_new_label();
2920 gen_load_gpr(t1, rs);
2921 gen_load_gpr(t2, rt);
2922 tcg_gen_add_tl(t0, t1, t2);
2923 tcg_gen_ext32s_tl(t0, t0);
2924 tcg_gen_xor_tl(t1, t1, t2);
2925 tcg_gen_xor_tl(t2, t0, t2);
2926 tcg_gen_andc_tl(t1, t2, t1);
2928 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
2930 /* operands of same sign, result different sign */
2931 generate_exception(ctx, EXCP_OVERFLOW);
2933 gen_store_gpr(t0, rd);
2938 if (rs != 0 && rt != 0) {
2939 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
2940 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
2941 } else if (rs == 0 && rt != 0) {
2942 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
2943 } else if (rs != 0 && rt == 0) {
2944 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
2946 tcg_gen_movi_tl(cpu_gpr[rd], 0);
2951 TCGv t0 = tcg_temp_local_new();
2952 TCGv t1 = tcg_temp_new();
2953 TCGv t2 = tcg_temp_new();
2954 TCGLabel *l1 = gen_new_label();
2956 gen_load_gpr(t1, rs);
2957 gen_load_gpr(t2, rt);
2958 tcg_gen_sub_tl(t0, t1, t2);
2959 tcg_gen_ext32s_tl(t0, t0);
2960 tcg_gen_xor_tl(t2, t1, t2);
2961 tcg_gen_xor_tl(t1, t0, t1);
2962 tcg_gen_and_tl(t1, t1, t2);
2964 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
2966 /* operands of different sign, first operand and result different sign */
2967 generate_exception(ctx, EXCP_OVERFLOW);
2969 gen_store_gpr(t0, rd);
2974 if (rs != 0 && rt != 0) {
2975 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
2976 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
2977 } else if (rs == 0 && rt != 0) {
2978 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
2979 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
2980 } else if (rs != 0 && rt == 0) {
2981 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
2983 tcg_gen_movi_tl(cpu_gpr[rd], 0);
2986 #if defined(TARGET_MIPS64)
2989 TCGv t0 = tcg_temp_local_new();
2990 TCGv t1 = tcg_temp_new();
2991 TCGv t2 = tcg_temp_new();
2992 TCGLabel *l1 = gen_new_label();
2994 gen_load_gpr(t1, rs);
2995 gen_load_gpr(t2, rt);
2996 tcg_gen_add_tl(t0, t1, t2);
2997 tcg_gen_xor_tl(t1, t1, t2);
2998 tcg_gen_xor_tl(t2, t0, t2);
2999 tcg_gen_andc_tl(t1, t2, t1);
3001 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
3003 /* operands of same sign, result different sign */
3004 generate_exception(ctx, EXCP_OVERFLOW);
3006 gen_store_gpr(t0, rd);
3011 if (rs != 0 && rt != 0) {
3012 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3013 } else if (rs == 0 && rt != 0) {
3014 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
3015 } else if (rs != 0 && rt == 0) {
3016 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
3018 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3023 TCGv t0 = tcg_temp_local_new();
3024 TCGv t1 = tcg_temp_new();
3025 TCGv t2 = tcg_temp_new();
3026 TCGLabel *l1 = gen_new_label();
3028 gen_load_gpr(t1, rs);
3029 gen_load_gpr(t2, rt);
3030 tcg_gen_sub_tl(t0, t1, t2);
3031 tcg_gen_xor_tl(t2, t1, t2);
3032 tcg_gen_xor_tl(t1, t0, t1);
3033 tcg_gen_and_tl(t1, t1, t2);
3035 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
3037 /* operands of different sign, first operand and result different sign */
3038 generate_exception(ctx, EXCP_OVERFLOW);
3040 gen_store_gpr(t0, rd);
3045 if (rs != 0 && rt != 0) {
3046 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3047 } else if (rs == 0 && rt != 0) {
3048 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
3049 } else if (rs != 0 && rt == 0) {
3050 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
3052 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3057 if (likely(rs != 0 && rt != 0)) {
3058 tcg_gen_mul_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3059 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3061 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3067 /* Conditional move */
3068 static void gen_cond_move(DisasContext *ctx, uint32_t opc,
3069 int rd, int rs, int rt)
3074 /* If no destination, treat it as a NOP. */
3078 t0 = tcg_temp_new();
3079 gen_load_gpr(t0, rt);
3080 t1 = tcg_const_tl(0);
3081 t2 = tcg_temp_new();
3082 gen_load_gpr(t2, rs);
3085 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rd], t0, t1, t2, cpu_gpr[rd]);
3088 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_gpr[rd], t0, t1, t2, cpu_gpr[rd]);
3091 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rd], t0, t1, t2, t1);
3094 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_gpr[rd], t0, t1, t2, t1);
3103 static void gen_logic(DisasContext *ctx, uint32_t opc,
3104 int rd, int rs, int rt)
3107 /* If no destination, treat it as a NOP. */
3113 if (likely(rs != 0 && rt != 0)) {
3114 tcg_gen_and_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3116 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3120 if (rs != 0 && rt != 0) {
3121 tcg_gen_nor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3122 } else if (rs == 0 && rt != 0) {
3123 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rt]);
3124 } else if (rs != 0 && rt == 0) {
3125 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rs]);
3127 tcg_gen_movi_tl(cpu_gpr[rd], ~((target_ulong)0));
3131 if (likely(rs != 0 && rt != 0)) {
3132 tcg_gen_or_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3133 } else if (rs == 0 && rt != 0) {
3134 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
3135 } else if (rs != 0 && rt == 0) {
3136 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
3138 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3142 if (likely(rs != 0 && rt != 0)) {
3143 tcg_gen_xor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
3144 } else if (rs == 0 && rt != 0) {
3145 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
3146 } else if (rs != 0 && rt == 0) {
3147 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
3149 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3155 /* Set on lower than */
3156 static void gen_slt(DisasContext *ctx, uint32_t opc,
3157 int rd, int rs, int rt)
3162 /* If no destination, treat it as a NOP. */
3166 t0 = tcg_temp_new();
3167 t1 = tcg_temp_new();
3168 gen_load_gpr(t0, rs);
3169 gen_load_gpr(t1, rt);
3172 tcg_gen_setcond_tl(TCG_COND_LT, cpu_gpr[rd], t0, t1);
3175 tcg_gen_setcond_tl(TCG_COND_LTU, cpu_gpr[rd], t0, t1);
3183 static void gen_shift(DisasContext *ctx, uint32_t opc,
3184 int rd, int rs, int rt)
3189 /* If no destination, treat it as a NOP.
3190 For add & sub, we must generate the overflow exception when needed. */
3194 t0 = tcg_temp_new();
3195 t1 = tcg_temp_new();
3196 gen_load_gpr(t0, rs);
3197 gen_load_gpr(t1, rt);
3200 tcg_gen_andi_tl(t0, t0, 0x1f);
3201 tcg_gen_shl_tl(t0, t1, t0);
3202 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
3205 tcg_gen_andi_tl(t0, t0, 0x1f);
3206 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
3209 tcg_gen_ext32u_tl(t1, t1);
3210 tcg_gen_andi_tl(t0, t0, 0x1f);
3211 tcg_gen_shr_tl(t0, t1, t0);
3212 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
3216 TCGv_i32 t2 = tcg_temp_new_i32();
3217 TCGv_i32 t3 = tcg_temp_new_i32();
3219 tcg_gen_trunc_tl_i32(t2, t0);
3220 tcg_gen_trunc_tl_i32(t3, t1);
3221 tcg_gen_andi_i32(t2, t2, 0x1f);
3222 tcg_gen_rotr_i32(t2, t3, t2);
3223 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
3224 tcg_temp_free_i32(t2);
3225 tcg_temp_free_i32(t3);
3228 #if defined(TARGET_MIPS64)
3230 tcg_gen_andi_tl(t0, t0, 0x3f);
3231 tcg_gen_shl_tl(cpu_gpr[rd], t1, t0);
3234 tcg_gen_andi_tl(t0, t0, 0x3f);
3235 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
3238 tcg_gen_andi_tl(t0, t0, 0x3f);
3239 tcg_gen_shr_tl(cpu_gpr[rd], t1, t0);
3242 tcg_gen_andi_tl(t0, t0, 0x3f);
3243 tcg_gen_rotr_tl(cpu_gpr[rd], t1, t0);
3251 /* Arithmetic on HI/LO registers */
3252 static void gen_HILO(DisasContext *ctx, uint32_t opc, int acc, int reg)
3254 if (reg == 0 && (opc == OPC_MFHI || opc == OPC_MFLO)) {
3265 #if defined(TARGET_MIPS64)
3267 tcg_gen_ext32s_tl(cpu_gpr[reg], cpu_HI[acc]);
3271 tcg_gen_mov_tl(cpu_gpr[reg], cpu_HI[acc]);
3275 #if defined(TARGET_MIPS64)
3277 tcg_gen_ext32s_tl(cpu_gpr[reg], cpu_LO[acc]);
3281 tcg_gen_mov_tl(cpu_gpr[reg], cpu_LO[acc]);
3286 #if defined(TARGET_MIPS64)
3288 tcg_gen_ext32s_tl(cpu_HI[acc], cpu_gpr[reg]);
3292 tcg_gen_mov_tl(cpu_HI[acc], cpu_gpr[reg]);
3295 tcg_gen_movi_tl(cpu_HI[acc], 0);
3300 #if defined(TARGET_MIPS64)
3302 tcg_gen_ext32s_tl(cpu_LO[acc], cpu_gpr[reg]);
3306 tcg_gen_mov_tl(cpu_LO[acc], cpu_gpr[reg]);
3309 tcg_gen_movi_tl(cpu_LO[acc], 0);
3315 static inline void gen_r6_ld(target_long addr, int reg, int memidx,
3318 TCGv t0 = tcg_const_tl(addr);
3319 tcg_gen_qemu_ld_tl(t0, t0, memidx, memop);
3320 gen_store_gpr(t0, reg);
3324 static inline void gen_pcrel(DisasContext *ctx, int opc, target_ulong pc,
3330 switch (MASK_OPC_PCREL_TOP2BITS(opc)) {
3333 offset = sextract32(ctx->opcode << 2, 0, 21);
3334 addr = addr_add(ctx, pc, offset);
3335 tcg_gen_movi_tl(cpu_gpr[rs], addr);
3339 offset = sextract32(ctx->opcode << 2, 0, 21);
3340 addr = addr_add(ctx, pc, offset);
3341 gen_r6_ld(addr, rs, ctx->mem_idx, MO_TESL);
3343 #if defined(TARGET_MIPS64)
3346 offset = sextract32(ctx->opcode << 2, 0, 21);
3347 addr = addr_add(ctx, pc, offset);
3348 gen_r6_ld(addr, rs, ctx->mem_idx, MO_TEUL);
3352 switch (MASK_OPC_PCREL_TOP5BITS(opc)) {
3355 offset = sextract32(ctx->opcode, 0, 16) << 16;
3356 addr = addr_add(ctx, pc, offset);
3357 tcg_gen_movi_tl(cpu_gpr[rs], addr);
3362 offset = sextract32(ctx->opcode, 0, 16) << 16;
3363 addr = ~0xFFFF & addr_add(ctx, pc, offset);
3364 tcg_gen_movi_tl(cpu_gpr[rs], addr);
3367 #if defined(TARGET_MIPS64)
3368 case R6_OPC_LDPC: /* bits 16 and 17 are part of immediate */
3369 case R6_OPC_LDPC + (1 << 16):
3370 case R6_OPC_LDPC + (2 << 16):
3371 case R6_OPC_LDPC + (3 << 16):
3373 offset = sextract32(ctx->opcode << 3, 0, 21);
3374 addr = addr_add(ctx, (pc & ~0x7), offset);
3375 gen_r6_ld(addr, rs, ctx->mem_idx, MO_TEQ);
3379 MIPS_INVAL("OPC_PCREL");
3380 generate_exception_end(ctx, EXCP_RI);
3387 static void gen_r6_muldiv(DisasContext *ctx, int opc, int rd, int rs, int rt)
3396 t0 = tcg_temp_new();
3397 t1 = tcg_temp_new();
3399 gen_load_gpr(t0, rs);
3400 gen_load_gpr(t1, rt);
3405 TCGv t2 = tcg_temp_new();
3406 TCGv t3 = tcg_temp_new();
3407 tcg_gen_ext32s_tl(t0, t0);
3408 tcg_gen_ext32s_tl(t1, t1);
3409 tcg_gen_setcondi_tl(TCG_COND_EQ, t2, t0, INT_MIN);
3410 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, -1);
3411 tcg_gen_and_tl(t2, t2, t3);
3412 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, 0);
3413 tcg_gen_or_tl(t2, t2, t3);
3414 tcg_gen_movi_tl(t3, 0);
3415 tcg_gen_movcond_tl(TCG_COND_NE, t1, t2, t3, t2, t1);
3416 tcg_gen_div_tl(cpu_gpr[rd], t0, t1);
3417 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3424 TCGv t2 = tcg_temp_new();
3425 TCGv t3 = tcg_temp_new();
3426 tcg_gen_ext32s_tl(t0, t0);
3427 tcg_gen_ext32s_tl(t1, t1);
3428 tcg_gen_setcondi_tl(TCG_COND_EQ, t2, t0, INT_MIN);
3429 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, -1);
3430 tcg_gen_and_tl(t2, t2, t3);
3431 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, 0);
3432 tcg_gen_or_tl(t2, t2, t3);
3433 tcg_gen_movi_tl(t3, 0);
3434 tcg_gen_movcond_tl(TCG_COND_NE, t1, t2, t3, t2, t1);
3435 tcg_gen_rem_tl(cpu_gpr[rd], t0, t1);
3436 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3443 TCGv t2 = tcg_const_tl(0);
3444 TCGv t3 = tcg_const_tl(1);
3445 tcg_gen_ext32u_tl(t0, t0);
3446 tcg_gen_ext32u_tl(t1, t1);
3447 tcg_gen_movcond_tl(TCG_COND_EQ, t1, t1, t2, t3, t1);
3448 tcg_gen_divu_tl(cpu_gpr[rd], t0, t1);
3449 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3456 TCGv t2 = tcg_const_tl(0);
3457 TCGv t3 = tcg_const_tl(1);
3458 tcg_gen_ext32u_tl(t0, t0);
3459 tcg_gen_ext32u_tl(t1, t1);
3460 tcg_gen_movcond_tl(TCG_COND_EQ, t1, t1, t2, t3, t1);
3461 tcg_gen_remu_tl(cpu_gpr[rd], t0, t1);
3462 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3469 TCGv_i32 t2 = tcg_temp_new_i32();
3470 TCGv_i32 t3 = tcg_temp_new_i32();
3471 tcg_gen_trunc_tl_i32(t2, t0);
3472 tcg_gen_trunc_tl_i32(t3, t1);
3473 tcg_gen_mul_i32(t2, t2, t3);
3474 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
3475 tcg_temp_free_i32(t2);
3476 tcg_temp_free_i32(t3);
3481 TCGv_i32 t2 = tcg_temp_new_i32();
3482 TCGv_i32 t3 = tcg_temp_new_i32();
3483 tcg_gen_trunc_tl_i32(t2, t0);
3484 tcg_gen_trunc_tl_i32(t3, t1);
3485 tcg_gen_muls2_i32(t2, t3, t2, t3);
3486 tcg_gen_ext_i32_tl(cpu_gpr[rd], t3);
3487 tcg_temp_free_i32(t2);
3488 tcg_temp_free_i32(t3);
3493 TCGv_i32 t2 = tcg_temp_new_i32();
3494 TCGv_i32 t3 = tcg_temp_new_i32();
3495 tcg_gen_trunc_tl_i32(t2, t0);
3496 tcg_gen_trunc_tl_i32(t3, t1);
3497 tcg_gen_mul_i32(t2, t2, t3);
3498 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
3499 tcg_temp_free_i32(t2);
3500 tcg_temp_free_i32(t3);
3505 TCGv_i32 t2 = tcg_temp_new_i32();
3506 TCGv_i32 t3 = tcg_temp_new_i32();
3507 tcg_gen_trunc_tl_i32(t2, t0);
3508 tcg_gen_trunc_tl_i32(t3, t1);
3509 tcg_gen_mulu2_i32(t2, t3, t2, t3);
3510 tcg_gen_ext_i32_tl(cpu_gpr[rd], t3);
3511 tcg_temp_free_i32(t2);
3512 tcg_temp_free_i32(t3);
3515 #if defined(TARGET_MIPS64)
3518 TCGv t2 = tcg_temp_new();
3519 TCGv t3 = tcg_temp_new();
3520 tcg_gen_setcondi_tl(TCG_COND_EQ, t2, t0, -1LL << 63);
3521 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, -1LL);
3522 tcg_gen_and_tl(t2, t2, t3);
3523 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, 0);
3524 tcg_gen_or_tl(t2, t2, t3);
3525 tcg_gen_movi_tl(t3, 0);
3526 tcg_gen_movcond_tl(TCG_COND_NE, t1, t2, t3, t2, t1);
3527 tcg_gen_div_tl(cpu_gpr[rd], t0, t1);
3534 TCGv t2 = tcg_temp_new();
3535 TCGv t3 = tcg_temp_new();
3536 tcg_gen_setcondi_tl(TCG_COND_EQ, t2, t0, -1LL << 63);
3537 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, -1LL);
3538 tcg_gen_and_tl(t2, t2, t3);
3539 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, 0);
3540 tcg_gen_or_tl(t2, t2, t3);
3541 tcg_gen_movi_tl(t3, 0);
3542 tcg_gen_movcond_tl(TCG_COND_NE, t1, t2, t3, t2, t1);
3543 tcg_gen_rem_tl(cpu_gpr[rd], t0, t1);
3550 TCGv t2 = tcg_const_tl(0);
3551 TCGv t3 = tcg_const_tl(1);
3552 tcg_gen_movcond_tl(TCG_COND_EQ, t1, t1, t2, t3, t1);
3553 tcg_gen_divu_i64(cpu_gpr[rd], t0, t1);
3560 TCGv t2 = tcg_const_tl(0);
3561 TCGv t3 = tcg_const_tl(1);
3562 tcg_gen_movcond_tl(TCG_COND_EQ, t1, t1, t2, t3, t1);
3563 tcg_gen_remu_i64(cpu_gpr[rd], t0, t1);
3569 tcg_gen_mul_i64(cpu_gpr[rd], t0, t1);
3573 TCGv t2 = tcg_temp_new();
3574 tcg_gen_muls2_i64(t2, cpu_gpr[rd], t0, t1);
3579 tcg_gen_mul_i64(cpu_gpr[rd], t0, t1);
3583 TCGv t2 = tcg_temp_new();
3584 tcg_gen_mulu2_i64(t2, cpu_gpr[rd], t0, t1);
3590 MIPS_INVAL("r6 mul/div");
3591 generate_exception_end(ctx, EXCP_RI);
3599 static void gen_muldiv(DisasContext *ctx, uint32_t opc,
3600 int acc, int rs, int rt)
3604 t0 = tcg_temp_new();
3605 t1 = tcg_temp_new();
3607 gen_load_gpr(t0, rs);
3608 gen_load_gpr(t1, rt);
3617 TCGv t2 = tcg_temp_new();
3618 TCGv t3 = tcg_temp_new();
3619 tcg_gen_ext32s_tl(t0, t0);
3620 tcg_gen_ext32s_tl(t1, t1);
3621 tcg_gen_setcondi_tl(TCG_COND_EQ, t2, t0, INT_MIN);
3622 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, -1);
3623 tcg_gen_and_tl(t2, t2, t3);
3624 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, 0);
3625 tcg_gen_or_tl(t2, t2, t3);
3626 tcg_gen_movi_tl(t3, 0);
3627 tcg_gen_movcond_tl(TCG_COND_NE, t1, t2, t3, t2, t1);
3628 tcg_gen_div_tl(cpu_LO[acc], t0, t1);
3629 tcg_gen_rem_tl(cpu_HI[acc], t0, t1);
3630 tcg_gen_ext32s_tl(cpu_LO[acc], cpu_LO[acc]);
3631 tcg_gen_ext32s_tl(cpu_HI[acc], cpu_HI[acc]);
3638 TCGv t2 = tcg_const_tl(0);
3639 TCGv t3 = tcg_const_tl(1);
3640 tcg_gen_ext32u_tl(t0, t0);
3641 tcg_gen_ext32u_tl(t1, t1);
3642 tcg_gen_movcond_tl(TCG_COND_EQ, t1, t1, t2, t3, t1);
3643 tcg_gen_divu_tl(cpu_LO[acc], t0, t1);
3644 tcg_gen_remu_tl(cpu_HI[acc], t0, t1);
3645 tcg_gen_ext32s_tl(cpu_LO[acc], cpu_LO[acc]);
3646 tcg_gen_ext32s_tl(cpu_HI[acc], cpu_HI[acc]);
3653 TCGv_i32 t2 = tcg_temp_new_i32();
3654 TCGv_i32 t3 = tcg_temp_new_i32();
3655 tcg_gen_trunc_tl_i32(t2, t0);
3656 tcg_gen_trunc_tl_i32(t3, t1);
3657 tcg_gen_muls2_i32(t2, t3, t2, t3);
3658 tcg_gen_ext_i32_tl(cpu_LO[acc], t2);
3659 tcg_gen_ext_i32_tl(cpu_HI[acc], t3);
3660 tcg_temp_free_i32(t2);
3661 tcg_temp_free_i32(t3);
3666 TCGv_i32 t2 = tcg_temp_new_i32();
3667 TCGv_i32 t3 = tcg_temp_new_i32();
3668 tcg_gen_trunc_tl_i32(t2, t0);
3669 tcg_gen_trunc_tl_i32(t3, t1);
3670 tcg_gen_mulu2_i32(t2, t3, t2, t3);
3671 tcg_gen_ext_i32_tl(cpu_LO[acc], t2);
3672 tcg_gen_ext_i32_tl(cpu_HI[acc], t3);
3673 tcg_temp_free_i32(t2);
3674 tcg_temp_free_i32(t3);
3677 #if defined(TARGET_MIPS64)
3680 TCGv t2 = tcg_temp_new();
3681 TCGv t3 = tcg_temp_new();
3682 tcg_gen_setcondi_tl(TCG_COND_EQ, t2, t0, -1LL << 63);
3683 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, -1LL);
3684 tcg_gen_and_tl(t2, t2, t3);
3685 tcg_gen_setcondi_tl(TCG_COND_EQ, t3, t1, 0);
3686 tcg_gen_or_tl(t2, t2, t3);
3687 tcg_gen_movi_tl(t3, 0);
3688 tcg_gen_movcond_tl(TCG_COND_NE, t1, t2, t3, t2, t1);
3689 tcg_gen_div_tl(cpu_LO[acc], t0, t1);
3690 tcg_gen_rem_tl(cpu_HI[acc], t0, t1);
3697 TCGv t2 = tcg_const_tl(0);
3698 TCGv t3 = tcg_const_tl(1);
3699 tcg_gen_movcond_tl(TCG_COND_EQ, t1, t1, t2, t3, t1);
3700 tcg_gen_divu_i64(cpu_LO[acc], t0, t1);
3701 tcg_gen_remu_i64(cpu_HI[acc], t0, t1);
3707 tcg_gen_muls2_i64(cpu_LO[acc], cpu_HI[acc], t0, t1);
3710 tcg_gen_mulu2_i64(cpu_LO[acc], cpu_HI[acc], t0, t1);
3715 TCGv_i64 t2 = tcg_temp_new_i64();
3716 TCGv_i64 t3 = tcg_temp_new_i64();
3718 tcg_gen_ext_tl_i64(t2, t0);
3719 tcg_gen_ext_tl_i64(t3, t1);
3720 tcg_gen_mul_i64(t2, t2, t3);
3721 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
3722 tcg_gen_add_i64(t2, t2, t3);
3723 tcg_temp_free_i64(t3);
3724 gen_move_low32(cpu_LO[acc], t2);
3725 gen_move_high32(cpu_HI[acc], t2);
3726 tcg_temp_free_i64(t2);
3731 TCGv_i64 t2 = tcg_temp_new_i64();
3732 TCGv_i64 t3 = tcg_temp_new_i64();
3734 tcg_gen_ext32u_tl(t0, t0);
3735 tcg_gen_ext32u_tl(t1, t1);
3736 tcg_gen_extu_tl_i64(t2, t0);
3737 tcg_gen_extu_tl_i64(t3, t1);
3738 tcg_gen_mul_i64(t2, t2, t3);
3739 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
3740 tcg_gen_add_i64(t2, t2, t3);
3741 tcg_temp_free_i64(t3);
3742 gen_move_low32(cpu_LO[acc], t2);
3743 gen_move_high32(cpu_HI[acc], t2);
3744 tcg_temp_free_i64(t2);
3749 TCGv_i64 t2 = tcg_temp_new_i64();
3750 TCGv_i64 t3 = tcg_temp_new_i64();
3752 tcg_gen_ext_tl_i64(t2, t0);
3753 tcg_gen_ext_tl_i64(t3, t1);
3754 tcg_gen_mul_i64(t2, t2, t3);
3755 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
3756 tcg_gen_sub_i64(t2, t3, t2);
3757 tcg_temp_free_i64(t3);
3758 gen_move_low32(cpu_LO[acc], t2);
3759 gen_move_high32(cpu_HI[acc], t2);
3760 tcg_temp_free_i64(t2);
3765 TCGv_i64 t2 = tcg_temp_new_i64();
3766 TCGv_i64 t3 = tcg_temp_new_i64();
3768 tcg_gen_ext32u_tl(t0, t0);
3769 tcg_gen_ext32u_tl(t1, t1);
3770 tcg_gen_extu_tl_i64(t2, t0);
3771 tcg_gen_extu_tl_i64(t3, t1);
3772 tcg_gen_mul_i64(t2, t2, t3);
3773 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
3774 tcg_gen_sub_i64(t2, t3, t2);
3775 tcg_temp_free_i64(t3);
3776 gen_move_low32(cpu_LO[acc], t2);
3777 gen_move_high32(cpu_HI[acc], t2);
3778 tcg_temp_free_i64(t2);
3782 MIPS_INVAL("mul/div");
3783 generate_exception_end(ctx, EXCP_RI);
3791 static void gen_mul_vr54xx (DisasContext *ctx, uint32_t opc,
3792 int rd, int rs, int rt)
3794 TCGv t0 = tcg_temp_new();
3795 TCGv t1 = tcg_temp_new();
3797 gen_load_gpr(t0, rs);
3798 gen_load_gpr(t1, rt);
3801 case OPC_VR54XX_MULS:
3802 gen_helper_muls(t0, cpu_env, t0, t1);
3804 case OPC_VR54XX_MULSU:
3805 gen_helper_mulsu(t0, cpu_env, t0, t1);
3807 case OPC_VR54XX_MACC:
3808 gen_helper_macc(t0, cpu_env, t0, t1);
3810 case OPC_VR54XX_MACCU:
3811 gen_helper_maccu(t0, cpu_env, t0, t1);
3813 case OPC_VR54XX_MSAC:
3814 gen_helper_msac(t0, cpu_env, t0, t1);
3816 case OPC_VR54XX_MSACU:
3817 gen_helper_msacu(t0, cpu_env, t0, t1);
3819 case OPC_VR54XX_MULHI:
3820 gen_helper_mulhi(t0, cpu_env, t0, t1);
3822 case OPC_VR54XX_MULHIU:
3823 gen_helper_mulhiu(t0, cpu_env, t0, t1);
3825 case OPC_VR54XX_MULSHI:
3826 gen_helper_mulshi(t0, cpu_env, t0, t1);
3828 case OPC_VR54XX_MULSHIU:
3829 gen_helper_mulshiu(t0, cpu_env, t0, t1);
3831 case OPC_VR54XX_MACCHI:
3832 gen_helper_macchi(t0, cpu_env, t0, t1);
3834 case OPC_VR54XX_MACCHIU:
3835 gen_helper_macchiu(t0, cpu_env, t0, t1);
3837 case OPC_VR54XX_MSACHI:
3838 gen_helper_msachi(t0, cpu_env, t0, t1);
3840 case OPC_VR54XX_MSACHIU:
3841 gen_helper_msachiu(t0, cpu_env, t0, t1);
3844 MIPS_INVAL("mul vr54xx");
3845 generate_exception_end(ctx, EXCP_RI);
3848 gen_store_gpr(t0, rd);
3855 static void gen_cl (DisasContext *ctx, uint32_t opc,
3865 gen_load_gpr(t0, rs);
3870 #if defined(TARGET_MIPS64)
3874 tcg_gen_not_tl(t0, t0);
3883 tcg_gen_ext32u_tl(t0, t0);
3884 tcg_gen_clzi_tl(t0, t0, TARGET_LONG_BITS);
3885 tcg_gen_subi_tl(t0, t0, TARGET_LONG_BITS - 32);
3887 #if defined(TARGET_MIPS64)
3892 tcg_gen_clzi_i64(t0, t0, 64);
3898 /* Godson integer instructions */
3899 static void gen_loongson_integer(DisasContext *ctx, uint32_t opc,
3900 int rd, int rs, int rt)
3912 case OPC_MULTU_G_2E:
3913 case OPC_MULTU_G_2F:
3914 #if defined(TARGET_MIPS64)
3915 case OPC_DMULT_G_2E:
3916 case OPC_DMULT_G_2F:
3917 case OPC_DMULTU_G_2E:
3918 case OPC_DMULTU_G_2F:
3920 t0 = tcg_temp_new();
3921 t1 = tcg_temp_new();
3924 t0 = tcg_temp_local_new();
3925 t1 = tcg_temp_local_new();
3929 gen_load_gpr(t0, rs);
3930 gen_load_gpr(t1, rt);
3935 tcg_gen_mul_tl(cpu_gpr[rd], t0, t1);
3936 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3938 case OPC_MULTU_G_2E:
3939 case OPC_MULTU_G_2F:
3940 tcg_gen_ext32u_tl(t0, t0);
3941 tcg_gen_ext32u_tl(t1, t1);
3942 tcg_gen_mul_tl(cpu_gpr[rd], t0, t1);
3943 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3948 TCGLabel *l1 = gen_new_label();
3949 TCGLabel *l2 = gen_new_label();
3950 TCGLabel *l3 = gen_new_label();
3951 tcg_gen_ext32s_tl(t0, t0);
3952 tcg_gen_ext32s_tl(t1, t1);
3953 tcg_gen_brcondi_tl(TCG_COND_NE, t1, 0, l1);
3954 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3957 tcg_gen_brcondi_tl(TCG_COND_NE, t0, INT_MIN, l2);
3958 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1, l2);
3959 tcg_gen_mov_tl(cpu_gpr[rd], t0);
3962 tcg_gen_div_tl(cpu_gpr[rd], t0, t1);
3963 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3970 TCGLabel *l1 = gen_new_label();
3971 TCGLabel *l2 = gen_new_label();
3972 tcg_gen_ext32u_tl(t0, t0);
3973 tcg_gen_ext32u_tl(t1, t1);
3974 tcg_gen_brcondi_tl(TCG_COND_NE, t1, 0, l1);
3975 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3978 tcg_gen_divu_tl(cpu_gpr[rd], t0, t1);
3979 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
3986 TCGLabel *l1 = gen_new_label();
3987 TCGLabel *l2 = gen_new_label();
3988 TCGLabel *l3 = gen_new_label();
3989 tcg_gen_ext32u_tl(t0, t0);
3990 tcg_gen_ext32u_tl(t1, t1);
3991 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
3992 tcg_gen_brcondi_tl(TCG_COND_NE, t0, INT_MIN, l2);
3993 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1, l2);
3995 tcg_gen_movi_tl(cpu_gpr[rd], 0);
3998 tcg_gen_rem_tl(cpu_gpr[rd], t0, t1);
3999 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
4006 TCGLabel *l1 = gen_new_label();
4007 TCGLabel *l2 = gen_new_label();
4008 tcg_gen_ext32u_tl(t0, t0);
4009 tcg_gen_ext32u_tl(t1, t1);
4010 tcg_gen_brcondi_tl(TCG_COND_NE, t1, 0, l1);
4011 tcg_gen_movi_tl(cpu_gpr[rd], 0);
4014 tcg_gen_remu_tl(cpu_gpr[rd], t0, t1);
4015 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
4019 #if defined(TARGET_MIPS64)
4020 case OPC_DMULT_G_2E:
4021 case OPC_DMULT_G_2F:
4022 tcg_gen_mul_tl(cpu_gpr[rd], t0, t1);
4024 case OPC_DMULTU_G_2E:
4025 case OPC_DMULTU_G_2F:
4026 tcg_gen_mul_tl(cpu_gpr[rd], t0, t1);
4031 TCGLabel *l1 = gen_new_label();
4032 TCGLabel *l2 = gen_new_label();
4033 TCGLabel *l3 = gen_new_label();
4034 tcg_gen_brcondi_tl(TCG_COND_NE, t1, 0, l1);
4035 tcg_gen_movi_tl(cpu_gpr[rd], 0);
4038 tcg_gen_brcondi_tl(TCG_COND_NE, t0, -1LL << 63, l2);
4039 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1LL, l2);
4040 tcg_gen_mov_tl(cpu_gpr[rd], t0);
4043 tcg_gen_div_tl(cpu_gpr[rd], t0, t1);
4047 case OPC_DDIVU_G_2E:
4048 case OPC_DDIVU_G_2F:
4050 TCGLabel *l1 = gen_new_label();
4051 TCGLabel *l2 = gen_new_label();
4052 tcg_gen_brcondi_tl(TCG_COND_NE, t1, 0, l1);
4053 tcg_gen_movi_tl(cpu_gpr[rd], 0);
4056 tcg_gen_divu_tl(cpu_gpr[rd], t0, t1);
4063 TCGLabel *l1 = gen_new_label();
4064 TCGLabel *l2 = gen_new_label();
4065 TCGLabel *l3 = gen_new_label();
4066 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
4067 tcg_gen_brcondi_tl(TCG_COND_NE, t0, -1LL << 63, l2);
4068 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1LL, l2);
4070 tcg_gen_movi_tl(cpu_gpr[rd], 0);
4073 tcg_gen_rem_tl(cpu_gpr[rd], t0, t1);
4077 case OPC_DMODU_G_2E:
4078 case OPC_DMODU_G_2F:
4080 TCGLabel *l1 = gen_new_label();
4081 TCGLabel *l2 = gen_new_label();
4082 tcg_gen_brcondi_tl(TCG_COND_NE, t1, 0, l1);
4083 tcg_gen_movi_tl(cpu_gpr[rd], 0);
4086 tcg_gen_remu_tl(cpu_gpr[rd], t0, t1);
4097 /* Loongson multimedia instructions */
4098 static void gen_loongson_multimedia(DisasContext *ctx, int rd, int rs, int rt)
4100 uint32_t opc, shift_max;
4103 opc = MASK_LMI(ctx->opcode);
4109 t0 = tcg_temp_local_new_i64();
4110 t1 = tcg_temp_local_new_i64();
4113 t0 = tcg_temp_new_i64();
4114 t1 = tcg_temp_new_i64();
4118 check_cp1_enabled(ctx);
4119 gen_load_fpr64(ctx, t0, rs);
4120 gen_load_fpr64(ctx, t1, rt);
4122 #define LMI_HELPER(UP, LO) \
4123 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
4124 #define LMI_HELPER_1(UP, LO) \
4125 case OPC_##UP: gen_helper_##LO(t0, t0); break
4126 #define LMI_DIRECT(UP, LO, OP) \
4127 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
4130 LMI_HELPER(PADDSH, paddsh);
4131 LMI_HELPER(PADDUSH, paddush);
4132 LMI_HELPER(PADDH, paddh);
4133 LMI_HELPER(PADDW, paddw);
4134 LMI_HELPER(PADDSB, paddsb);
4135 LMI_HELPER(PADDUSB, paddusb);
4136 LMI_HELPER(PADDB, paddb);
4138 LMI_HELPER(PSUBSH, psubsh);
4139 LMI_HELPER(PSUBUSH, psubush);
4140 LMI_HELPER(PSUBH, psubh);
4141 LMI_HELPER(PSUBW, psubw);
4142 LMI_HELPER(PSUBSB, psubsb);
4143 LMI_HELPER(PSUBUSB, psubusb);
4144 LMI_HELPER(PSUBB, psubb);
4146 LMI_HELPER(PSHUFH, pshufh);
4147 LMI_HELPER(PACKSSWH, packsswh);
4148 LMI_HELPER(PACKSSHB, packsshb);
4149 LMI_HELPER(PACKUSHB, packushb);
4151 LMI_HELPER(PUNPCKLHW, punpcklhw);
4152 LMI_HELPER(PUNPCKHHW, punpckhhw);
4153 LMI_HELPER(PUNPCKLBH, punpcklbh);
4154 LMI_HELPER(PUNPCKHBH, punpckhbh);
4155 LMI_HELPER(PUNPCKLWD, punpcklwd);
4156 LMI_HELPER(PUNPCKHWD, punpckhwd);
4158 LMI_HELPER(PAVGH, pavgh);
4159 LMI_HELPER(PAVGB, pavgb);
4160 LMI_HELPER(PMAXSH, pmaxsh);
4161 LMI_HELPER(PMINSH, pminsh);
4162 LMI_HELPER(PMAXUB, pmaxub);
4163 LMI_HELPER(PMINUB, pminub);
4165 LMI_HELPER(PCMPEQW, pcmpeqw);
4166 LMI_HELPER(PCMPGTW, pcmpgtw);
4167 LMI_HELPER(PCMPEQH, pcmpeqh);
4168 LMI_HELPER(PCMPGTH, pcmpgth);
4169 LMI_HELPER(PCMPEQB, pcmpeqb);
4170 LMI_HELPER(PCMPGTB, pcmpgtb);
4172 LMI_HELPER(PSLLW, psllw);
4173 LMI_HELPER(PSLLH, psllh);
4174 LMI_HELPER(PSRLW, psrlw);
4175 LMI_HELPER(PSRLH, psrlh);
4176 LMI_HELPER(PSRAW, psraw);
4177 LMI_HELPER(PSRAH, psrah);
4179 LMI_HELPER(PMULLH, pmullh);
4180 LMI_HELPER(PMULHH, pmulhh);
4181 LMI_HELPER(PMULHUH, pmulhuh);
4182 LMI_HELPER(PMADDHW, pmaddhw);
4184 LMI_HELPER(PASUBUB, pasubub);
4185 LMI_HELPER_1(BIADD, biadd);
4186 LMI_HELPER_1(PMOVMSKB, pmovmskb);
4188 LMI_DIRECT(PADDD, paddd, add);
4189 LMI_DIRECT(PSUBD, psubd, sub);
4190 LMI_DIRECT(XOR_CP2, xor, xor);
4191 LMI_DIRECT(NOR_CP2, nor, nor);
4192 LMI_DIRECT(AND_CP2, and, and);
4193 LMI_DIRECT(OR_CP2, or, or);
4196 tcg_gen_andc_i64(t0, t1, t0);
4200 tcg_gen_deposit_i64(t0, t0, t1, 0, 16);
4203 tcg_gen_deposit_i64(t0, t0, t1, 16, 16);
4206 tcg_gen_deposit_i64(t0, t0, t1, 32, 16);
4209 tcg_gen_deposit_i64(t0, t0, t1, 48, 16);
4213 tcg_gen_andi_i64(t1, t1, 3);
4214 tcg_gen_shli_i64(t1, t1, 4);
4215 tcg_gen_shr_i64(t0, t0, t1);
4216 tcg_gen_ext16u_i64(t0, t0);
4220 tcg_gen_add_i64(t0, t0, t1);
4221 tcg_gen_ext32s_i64(t0, t0);
4224 tcg_gen_sub_i64(t0, t0, t1);
4225 tcg_gen_ext32s_i64(t0, t0);
4247 /* Make sure shift count isn't TCG undefined behaviour. */
4248 tcg_gen_andi_i64(t1, t1, shift_max - 1);
4253 tcg_gen_shl_i64(t0, t0, t1);
4257 /* Since SRA is UndefinedResult without sign-extended inputs,
4258 we can treat SRA and DSRA the same. */
4259 tcg_gen_sar_i64(t0, t0, t1);
4262 /* We want to shift in zeros for SRL; zero-extend first. */
4263 tcg_gen_ext32u_i64(t0, t0);
4266 tcg_gen_shr_i64(t0, t0, t1);
4270 if (shift_max == 32) {
4271 tcg_gen_ext32s_i64(t0, t0);
4274 /* Shifts larger than MAX produce zero. */
4275 tcg_gen_setcondi_i64(TCG_COND_LTU, t1, t1, shift_max);
4276 tcg_gen_neg_i64(t1, t1);
4277 tcg_gen_and_i64(t0, t0, t1);
4283 TCGv_i64 t2 = tcg_temp_new_i64();
4284 TCGLabel *lab = gen_new_label();
4286 tcg_gen_mov_i64(t2, t0);
4287 tcg_gen_add_i64(t0, t1, t2);
4288 if (opc == OPC_ADD_CP2) {
4289 tcg_gen_ext32s_i64(t0, t0);
4291 tcg_gen_xor_i64(t1, t1, t2);
4292 tcg_gen_xor_i64(t2, t2, t0);
4293 tcg_gen_andc_i64(t1, t2, t1);
4294 tcg_temp_free_i64(t2);
4295 tcg_gen_brcondi_i64(TCG_COND_GE, t1, 0, lab);
4296 generate_exception(ctx, EXCP_OVERFLOW);
4304 TCGv_i64 t2 = tcg_temp_new_i64();
4305 TCGLabel *lab = gen_new_label();
4307 tcg_gen_mov_i64(t2, t0);
4308 tcg_gen_sub_i64(t0, t1, t2);
4309 if (opc == OPC_SUB_CP2) {
4310 tcg_gen_ext32s_i64(t0, t0);
4312 tcg_gen_xor_i64(t1, t1, t2);
4313 tcg_gen_xor_i64(t2, t2, t0);
4314 tcg_gen_and_i64(t1, t1, t2);
4315 tcg_temp_free_i64(t2);
4316 tcg_gen_brcondi_i64(TCG_COND_GE, t1, 0, lab);
4317 generate_exception(ctx, EXCP_OVERFLOW);
4323 tcg_gen_ext32u_i64(t0, t0);
4324 tcg_gen_ext32u_i64(t1, t1);
4325 tcg_gen_mul_i64(t0, t0, t1);
4334 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4335 FD field is the CC field? */
4337 MIPS_INVAL("loongson_cp2");
4338 generate_exception_end(ctx, EXCP_RI);
4345 gen_store_fpr64(ctx, t0, rd);
4347 tcg_temp_free_i64(t0);
4348 tcg_temp_free_i64(t1);
4352 static void gen_trap (DisasContext *ctx, uint32_t opc,
4353 int rs, int rt, int16_t imm)
4356 TCGv t0 = tcg_temp_new();
4357 TCGv t1 = tcg_temp_new();
4360 /* Load needed operands */
4368 /* Compare two registers */
4370 gen_load_gpr(t0, rs);
4371 gen_load_gpr(t1, rt);
4381 /* Compare register to immediate */
4382 if (rs != 0 || imm != 0) {
4383 gen_load_gpr(t0, rs);
4384 tcg_gen_movi_tl(t1, (int32_t)imm);
4391 case OPC_TEQ: /* rs == rs */
4392 case OPC_TEQI: /* r0 == 0 */
4393 case OPC_TGE: /* rs >= rs */
4394 case OPC_TGEI: /* r0 >= 0 */
4395 case OPC_TGEU: /* rs >= rs unsigned */
4396 case OPC_TGEIU: /* r0 >= 0 unsigned */
4398 generate_exception_end(ctx, EXCP_TRAP);
4400 case OPC_TLT: /* rs < rs */
4401 case OPC_TLTI: /* r0 < 0 */
4402 case OPC_TLTU: /* rs < rs unsigned */
4403 case OPC_TLTIU: /* r0 < 0 unsigned */
4404 case OPC_TNE: /* rs != rs */
4405 case OPC_TNEI: /* r0 != 0 */
4406 /* Never trap: treat as NOP. */
4410 TCGLabel *l1 = gen_new_label();
4415 tcg_gen_brcond_tl(TCG_COND_NE, t0, t1, l1);
4419 tcg_gen_brcond_tl(TCG_COND_LT, t0, t1, l1);
4423 tcg_gen_brcond_tl(TCG_COND_LTU, t0, t1, l1);
4427 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4431 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
4435 tcg_gen_brcond_tl(TCG_COND_EQ, t0, t1, l1);
4438 generate_exception(ctx, EXCP_TRAP);
4445 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4447 if (unlikely(ctx->base.singlestep_enabled)) {
4451 #ifndef CONFIG_USER_ONLY
4452 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
4458 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4460 if (use_goto_tb(ctx, dest)) {
4463 tcg_gen_exit_tb(ctx->base.tb, n);
4466 if (ctx->base.singlestep_enabled) {
4467 save_cpu_state(ctx, 0);
4468 gen_helper_raise_exception_debug(cpu_env);
4470 tcg_gen_lookup_and_goto_ptr();
4474 /* Branches (before delay slot) */
4475 static void gen_compute_branch (DisasContext *ctx, uint32_t opc,
4477 int rs, int rt, int32_t offset,
4480 target_ulong btgt = -1;
4482 int bcond_compute = 0;
4483 TCGv t0 = tcg_temp_new();
4484 TCGv t1 = tcg_temp_new();
4486 if (ctx->hflags & MIPS_HFLAG_BMASK) {
4487 #ifdef MIPS_DEBUG_DISAS
4488 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4489 TARGET_FMT_lx "\n", ctx->base.pc_next);
4491 generate_exception_end(ctx, EXCP_RI);
4495 /* Load needed operands */
4501 /* Compare two registers */
4503 gen_load_gpr(t0, rs);
4504 gen_load_gpr(t1, rt);
4507 btgt = ctx->base.pc_next + insn_bytes + offset;
4521 /* Compare to zero */
4523 gen_load_gpr(t0, rs);
4526 btgt = ctx->base.pc_next + insn_bytes + offset;
4529 #if defined(TARGET_MIPS64)
4531 tcg_gen_andi_tl(t0, cpu_dspctrl, 0x7F);
4533 tcg_gen_andi_tl(t0, cpu_dspctrl, 0x3F);
4536 btgt = ctx->base.pc_next + insn_bytes + offset;
4541 /* Jump to immediate */
4542 btgt = ((ctx->base.pc_next + insn_bytes) & (int32_t)0xF0000000) |
4547 /* Jump to register */
4548 if (offset != 0 && offset != 16) {
4549 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4550 others are reserved. */
4551 MIPS_INVAL("jump hint");
4552 generate_exception_end(ctx, EXCP_RI);
4555 gen_load_gpr(btarget, rs);
4558 MIPS_INVAL("branch/jump");
4559 generate_exception_end(ctx, EXCP_RI);
4562 if (bcond_compute == 0) {
4563 /* No condition to be computed */
4565 case OPC_BEQ: /* rx == rx */
4566 case OPC_BEQL: /* rx == rx likely */
4567 case OPC_BGEZ: /* 0 >= 0 */
4568 case OPC_BGEZL: /* 0 >= 0 likely */
4569 case OPC_BLEZ: /* 0 <= 0 */
4570 case OPC_BLEZL: /* 0 <= 0 likely */
4572 ctx->hflags |= MIPS_HFLAG_B;
4574 case OPC_BGEZAL: /* 0 >= 0 */
4575 case OPC_BGEZALL: /* 0 >= 0 likely */
4576 /* Always take and link */
4578 ctx->hflags |= MIPS_HFLAG_B;
4580 case OPC_BNE: /* rx != rx */
4581 case OPC_BGTZ: /* 0 > 0 */
4582 case OPC_BLTZ: /* 0 < 0 */
4585 case OPC_BLTZAL: /* 0 < 0 */
4586 /* Handle as an unconditional branch to get correct delay
4589 btgt = ctx->base.pc_next + insn_bytes + delayslot_size;
4590 ctx->hflags |= MIPS_HFLAG_B;
4592 case OPC_BLTZALL: /* 0 < 0 likely */
4593 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 8);
4594 /* Skip the instruction in the delay slot */
4595 ctx->base.pc_next += 4;
4597 case OPC_BNEL: /* rx != rx likely */
4598 case OPC_BGTZL: /* 0 > 0 likely */
4599 case OPC_BLTZL: /* 0 < 0 likely */
4600 /* Skip the instruction in the delay slot */
4601 ctx->base.pc_next += 4;
4604 ctx->hflags |= MIPS_HFLAG_B;
4607 ctx->hflags |= MIPS_HFLAG_BX;
4611 ctx->hflags |= MIPS_HFLAG_B;
4614 ctx->hflags |= MIPS_HFLAG_BR;
4618 ctx->hflags |= MIPS_HFLAG_BR;
4621 MIPS_INVAL("branch/jump");
4622 generate_exception_end(ctx, EXCP_RI);
4628 tcg_gen_setcond_tl(TCG_COND_EQ, bcond, t0, t1);
4631 tcg_gen_setcond_tl(TCG_COND_EQ, bcond, t0, t1);
4634 tcg_gen_setcond_tl(TCG_COND_NE, bcond, t0, t1);
4637 tcg_gen_setcond_tl(TCG_COND_NE, bcond, t0, t1);
4640 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 0);
4643 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 0);
4646 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 0);
4650 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 0);
4654 tcg_gen_setcondi_tl(TCG_COND_GT, bcond, t0, 0);
4657 tcg_gen_setcondi_tl(TCG_COND_GT, bcond, t0, 0);
4660 tcg_gen_setcondi_tl(TCG_COND_LE, bcond, t0, 0);
4663 tcg_gen_setcondi_tl(TCG_COND_LE, bcond, t0, 0);
4666 tcg_gen_setcondi_tl(TCG_COND_LT, bcond, t0, 0);
4669 tcg_gen_setcondi_tl(TCG_COND_LT, bcond, t0, 0);
4672 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 32);
4674 #if defined(TARGET_MIPS64)
4676 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 64);
4680 tcg_gen_setcondi_tl(TCG_COND_LT, bcond, t0, 0);
4683 ctx->hflags |= MIPS_HFLAG_BC;
4686 tcg_gen_setcondi_tl(TCG_COND_LT, bcond, t0, 0);
4689 ctx->hflags |= MIPS_HFLAG_BL;
4692 MIPS_INVAL("conditional branch/jump");
4693 generate_exception_end(ctx, EXCP_RI);
4698 ctx->btarget = btgt;
4700 switch (delayslot_size) {
4702 ctx->hflags |= MIPS_HFLAG_BDS16;
4705 ctx->hflags |= MIPS_HFLAG_BDS32;
4710 int post_delay = insn_bytes + delayslot_size;
4711 int lowbit = !!(ctx->hflags & MIPS_HFLAG_M16);
4713 tcg_gen_movi_tl(cpu_gpr[blink],
4714 ctx->base.pc_next + post_delay + lowbit);
4718 if (insn_bytes == 2)
4719 ctx->hflags |= MIPS_HFLAG_B16;
4725 /* nanoMIPS Branches */
4726 static void gen_compute_branch_nm(DisasContext *ctx, uint32_t opc,
4728 int rs, int rt, int32_t offset)
4730 target_ulong btgt = -1;
4731 int bcond_compute = 0;
4732 TCGv t0 = tcg_temp_new();
4733 TCGv t1 = tcg_temp_new();
4735 /* Load needed operands */
4739 /* Compare two registers */
4741 gen_load_gpr(t0, rs);
4742 gen_load_gpr(t1, rt);
4745 btgt = ctx->base.pc_next + insn_bytes + offset;
4748 /* Compare to zero */
4750 gen_load_gpr(t0, rs);
4753 btgt = ctx->base.pc_next + insn_bytes + offset;
4756 tcg_gen_andi_tl(t0, cpu_dspctrl, 0x3F);
4758 btgt = ctx->base.pc_next + insn_bytes + offset;
4762 /* Jump to register */
4763 if (offset != 0 && offset != 16) {
4764 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4765 others are reserved. */
4766 MIPS_INVAL("jump hint");
4767 generate_exception_end(ctx, EXCP_RI);
4770 gen_load_gpr(btarget, rs);
4773 MIPS_INVAL("branch/jump");
4774 generate_exception_end(ctx, EXCP_RI);
4777 if (bcond_compute == 0) {
4778 /* No condition to be computed */
4780 case OPC_BEQ: /* rx == rx */
4782 ctx->hflags |= MIPS_HFLAG_B;
4784 case OPC_BGEZAL: /* 0 >= 0 */
4785 /* Always take and link */
4786 tcg_gen_movi_tl(cpu_gpr[31],
4787 ctx->base.pc_next + insn_bytes);
4788 ctx->hflags |= MIPS_HFLAG_B;
4790 case OPC_BNE: /* rx != rx */
4791 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 8);
4792 /* Skip the instruction in the delay slot */
4793 ctx->base.pc_next += 4;
4796 ctx->hflags |= MIPS_HFLAG_BR;
4800 tcg_gen_movi_tl(cpu_gpr[rt],
4801 ctx->base.pc_next + insn_bytes);
4803 ctx->hflags |= MIPS_HFLAG_BR;
4806 MIPS_INVAL("branch/jump");
4807 generate_exception_end(ctx, EXCP_RI);
4813 tcg_gen_setcond_tl(TCG_COND_EQ, bcond, t0, t1);
4816 tcg_gen_setcond_tl(TCG_COND_NE, bcond, t0, t1);
4819 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 0);
4820 tcg_gen_movi_tl(cpu_gpr[31],
4821 ctx->base.pc_next + insn_bytes);
4824 tcg_gen_setcondi_tl(TCG_COND_GE, bcond, t0, 32);
4826 ctx->hflags |= MIPS_HFLAG_BC;
4829 MIPS_INVAL("conditional branch/jump");
4830 generate_exception_end(ctx, EXCP_RI);
4835 ctx->btarget = btgt;
4838 if (insn_bytes == 2) {
4839 ctx->hflags |= MIPS_HFLAG_B16;
4846 /* special3 bitfield operations */
4847 static void gen_bitops (DisasContext *ctx, uint32_t opc, int rt,
4848 int rs, int lsb, int msb)
4850 TCGv t0 = tcg_temp_new();
4851 TCGv t1 = tcg_temp_new();
4853 gen_load_gpr(t1, rs);
4856 if (lsb + msb > 31) {
4860 tcg_gen_extract_tl(t0, t1, lsb, msb + 1);
4862 /* The two checks together imply that lsb == 0,
4863 so this is a simple sign-extension. */
4864 tcg_gen_ext32s_tl(t0, t1);
4867 #if defined(TARGET_MIPS64)
4876 if (lsb + msb > 63) {
4879 tcg_gen_extract_tl(t0, t1, lsb, msb + 1);
4886 gen_load_gpr(t0, rt);
4887 tcg_gen_deposit_tl(t0, t0, t1, lsb, msb - lsb + 1);
4888 tcg_gen_ext32s_tl(t0, t0);
4890 #if defined(TARGET_MIPS64)
4901 gen_load_gpr(t0, rt);
4902 tcg_gen_deposit_tl(t0, t0, t1, lsb, msb - lsb + 1);
4907 MIPS_INVAL("bitops");
4908 generate_exception_end(ctx, EXCP_RI);
4913 gen_store_gpr(t0, rt);
4918 static void gen_bshfl (DisasContext *ctx, uint32_t op2, int rt, int rd)
4923 /* If no destination, treat it as a NOP. */
4927 t0 = tcg_temp_new();
4928 gen_load_gpr(t0, rt);
4932 TCGv t1 = tcg_temp_new();
4933 TCGv t2 = tcg_const_tl(0x00FF00FF);
4935 tcg_gen_shri_tl(t1, t0, 8);
4936 tcg_gen_and_tl(t1, t1, t2);
4937 tcg_gen_and_tl(t0, t0, t2);
4938 tcg_gen_shli_tl(t0, t0, 8);
4939 tcg_gen_or_tl(t0, t0, t1);
4942 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
4946 tcg_gen_ext8s_tl(cpu_gpr[rd], t0);
4949 tcg_gen_ext16s_tl(cpu_gpr[rd], t0);
4951 #if defined(TARGET_MIPS64)
4954 TCGv t1 = tcg_temp_new();
4955 TCGv t2 = tcg_const_tl(0x00FF00FF00FF00FFULL);
4957 tcg_gen_shri_tl(t1, t0, 8);
4958 tcg_gen_and_tl(t1, t1, t2);
4959 tcg_gen_and_tl(t0, t0, t2);
4960 tcg_gen_shli_tl(t0, t0, 8);
4961 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
4968 TCGv t1 = tcg_temp_new();
4969 TCGv t2 = tcg_const_tl(0x0000FFFF0000FFFFULL);
4971 tcg_gen_shri_tl(t1, t0, 16);
4972 tcg_gen_and_tl(t1, t1, t2);
4973 tcg_gen_and_tl(t0, t0, t2);
4974 tcg_gen_shli_tl(t0, t0, 16);
4975 tcg_gen_or_tl(t0, t0, t1);
4976 tcg_gen_shri_tl(t1, t0, 32);
4977 tcg_gen_shli_tl(t0, t0, 32);
4978 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
4985 MIPS_INVAL("bsfhl");
4986 generate_exception_end(ctx, EXCP_RI);
4993 static void gen_lsa(DisasContext *ctx, int opc, int rd, int rs, int rt,
5002 t0 = tcg_temp_new();
5003 t1 = tcg_temp_new();
5004 gen_load_gpr(t0, rs);
5005 gen_load_gpr(t1, rt);
5006 tcg_gen_shli_tl(t0, t0, imm2 + 1);
5007 tcg_gen_add_tl(cpu_gpr[rd], t0, t1);
5008 if (opc == OPC_LSA) {
5009 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
5018 static void gen_align_bits(DisasContext *ctx, int wordsz, int rd, int rs,
5026 t0 = tcg_temp_new();
5027 if (bits == 0 || bits == wordsz) {
5029 gen_load_gpr(t0, rt);
5031 gen_load_gpr(t0, rs);
5035 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
5037 #if defined(TARGET_MIPS64)
5039 tcg_gen_mov_tl(cpu_gpr[rd], t0);
5044 TCGv t1 = tcg_temp_new();
5045 gen_load_gpr(t0, rt);
5046 gen_load_gpr(t1, rs);
5050 TCGv_i64 t2 = tcg_temp_new_i64();
5051 tcg_gen_concat_tl_i64(t2, t1, t0);
5052 tcg_gen_shri_i64(t2, t2, 32 - bits);
5053 gen_move_low32(cpu_gpr[rd], t2);
5054 tcg_temp_free_i64(t2);
5057 #if defined(TARGET_MIPS64)
5059 tcg_gen_shli_tl(t0, t0, bits);
5060 tcg_gen_shri_tl(t1, t1, 64 - bits);
5061 tcg_gen_or_tl(cpu_gpr[rd], t1, t0);
5071 static void gen_align(DisasContext *ctx, int wordsz, int rd, int rs, int rt,
5074 gen_align_bits(ctx, wordsz, rd, rs, rt, bp * 8);
5077 static void gen_ext(DisasContext *ctx, int wordsz, int rd, int rs, int rt,
5080 gen_align_bits(ctx, wordsz, rd, rs, rt, wordsz - shift);
5083 static void gen_bitswap(DisasContext *ctx, int opc, int rd, int rt)
5090 t0 = tcg_temp_new();
5091 gen_load_gpr(t0, rt);
5094 gen_helper_bitswap(cpu_gpr[rd], t0);
5096 #if defined(TARGET_MIPS64)
5098 gen_helper_dbitswap(cpu_gpr[rd], t0);
5105 #ifndef CONFIG_USER_ONLY
5106 /* CP0 (MMU and control) */
5107 static inline void gen_mthc0_entrylo(TCGv arg, target_ulong off)
5109 TCGv_i64 t0 = tcg_temp_new_i64();
5110 TCGv_i64 t1 = tcg_temp_new_i64();
5112 tcg_gen_ext_tl_i64(t0, arg);
5113 tcg_gen_ld_i64(t1, cpu_env, off);
5114 #if defined(TARGET_MIPS64)
5115 tcg_gen_deposit_i64(t1, t1, t0, 30, 32);
5117 tcg_gen_concat32_i64(t1, t1, t0);
5119 tcg_gen_st_i64(t1, cpu_env, off);
5120 tcg_temp_free_i64(t1);
5121 tcg_temp_free_i64(t0);
5124 static inline void gen_mthc0_store64(TCGv arg, target_ulong off)
5126 TCGv_i64 t0 = tcg_temp_new_i64();
5127 TCGv_i64 t1 = tcg_temp_new_i64();
5129 tcg_gen_ext_tl_i64(t0, arg);
5130 tcg_gen_ld_i64(t1, cpu_env, off);
5131 tcg_gen_concat32_i64(t1, t1, t0);
5132 tcg_gen_st_i64(t1, cpu_env, off);
5133 tcg_temp_free_i64(t1);
5134 tcg_temp_free_i64(t0);
5137 static inline void gen_mfhc0_entrylo(TCGv arg, target_ulong off)
5139 TCGv_i64 t0 = tcg_temp_new_i64();
5141 tcg_gen_ld_i64(t0, cpu_env, off);
5142 #if defined(TARGET_MIPS64)
5143 tcg_gen_shri_i64(t0, t0, 30);
5145 tcg_gen_shri_i64(t0, t0, 32);
5147 gen_move_low32(arg, t0);
5148 tcg_temp_free_i64(t0);
5151 static inline void gen_mfhc0_load64(TCGv arg, target_ulong off, int shift)
5153 TCGv_i64 t0 = tcg_temp_new_i64();
5155 tcg_gen_ld_i64(t0, cpu_env, off);
5156 tcg_gen_shri_i64(t0, t0, 32 + shift);
5157 gen_move_low32(arg, t0);
5158 tcg_temp_free_i64(t0);
5161 static inline void gen_mfc0_load32 (TCGv arg, target_ulong off)
5163 TCGv_i32 t0 = tcg_temp_new_i32();
5165 tcg_gen_ld_i32(t0, cpu_env, off);
5166 tcg_gen_ext_i32_tl(arg, t0);
5167 tcg_temp_free_i32(t0);
5170 static inline void gen_mfc0_load64 (TCGv arg, target_ulong off)
5172 tcg_gen_ld_tl(arg, cpu_env, off);
5173 tcg_gen_ext32s_tl(arg, arg);
5176 static inline void gen_mtc0_store32 (TCGv arg, target_ulong off)
5178 TCGv_i32 t0 = tcg_temp_new_i32();
5180 tcg_gen_trunc_tl_i32(t0, arg);
5181 tcg_gen_st_i32(t0, cpu_env, off);
5182 tcg_temp_free_i32(t0);
5185 #define CP0_CHECK(c) \
5188 goto cp0_unimplemented; \
5192 static void gen_mfhc0(DisasContext *ctx, TCGv arg, int reg, int sel)
5194 const char *rn = "invalid";
5200 CP0_CHECK(ctx->hflags & MIPS_HFLAG_ELPA);
5201 gen_mfhc0_entrylo(arg, offsetof(CPUMIPSState, CP0_EntryLo0));
5205 goto cp0_unimplemented;
5211 CP0_CHECK(ctx->hflags & MIPS_HFLAG_ELPA);
5212 gen_mfhc0_entrylo(arg, offsetof(CPUMIPSState, CP0_EntryLo1));
5216 goto cp0_unimplemented;
5222 gen_mfhc0_load64(arg, offsetof(CPUMIPSState, lladdr),
5223 ctx->CP0_LLAddr_shift);
5227 CP0_CHECK(ctx->mrp);
5228 gen_helper_mfhc0_maar(arg, cpu_env);
5232 goto cp0_unimplemented;
5241 gen_mfhc0_load64(arg, offsetof(CPUMIPSState, CP0_TagLo), 0);
5245 goto cp0_unimplemented;
5249 goto cp0_unimplemented;
5251 trace_mips_translate_c0("mfhc0", rn, reg, sel);
5255 qemu_log_mask(LOG_UNIMP, "mfhc0 %s (reg %d sel %d)\n", rn, reg, sel);
5256 tcg_gen_movi_tl(arg, 0);
5259 static void gen_mthc0(DisasContext *ctx, TCGv arg, int reg, int sel)
5261 const char *rn = "invalid";
5262 uint64_t mask = ctx->PAMask >> 36;
5268 CP0_CHECK(ctx->hflags & MIPS_HFLAG_ELPA);
5269 tcg_gen_andi_tl(arg, arg, mask);
5270 gen_mthc0_entrylo(arg, offsetof(CPUMIPSState, CP0_EntryLo0));
5274 goto cp0_unimplemented;
5280 CP0_CHECK(ctx->hflags & MIPS_HFLAG_ELPA);
5281 tcg_gen_andi_tl(arg, arg, mask);
5282 gen_mthc0_entrylo(arg, offsetof(CPUMIPSState, CP0_EntryLo1));
5286 goto cp0_unimplemented;
5292 /* LLAddr is read-only (the only exception is bit 0 if LLB is
5293 supported); the CP0_LLAddr_rw_bitmask does not seem to be
5294 relevant for modern MIPS cores supporting MTHC0, therefore
5295 treating MTHC0 to LLAddr as NOP. */
5299 CP0_CHECK(ctx->mrp);
5300 gen_helper_mthc0_maar(cpu_env, arg);
5304 goto cp0_unimplemented;
5313 tcg_gen_andi_tl(arg, arg, mask);
5314 gen_mthc0_store64(arg, offsetof(CPUMIPSState, CP0_TagLo));
5318 goto cp0_unimplemented;
5322 goto cp0_unimplemented;
5324 trace_mips_translate_c0("mthc0", rn, reg, sel);
5327 qemu_log_mask(LOG_UNIMP, "mthc0 %s (reg %d sel %d)\n", rn, reg, sel);
5330 static inline void gen_mfc0_unimplemented(DisasContext *ctx, TCGv arg)
5332 if (ctx->insn_flags & ISA_MIPS32R6) {
5333 tcg_gen_movi_tl(arg, 0);
5335 tcg_gen_movi_tl(arg, ~0);
5339 static void gen_mfc0(DisasContext *ctx, TCGv arg, int reg, int sel)
5341 const char *rn = "invalid";
5344 check_insn(ctx, ISA_MIPS32);
5350 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Index));
5354 CP0_CHECK(ctx->insn_flags & ASE_MT);
5355 gen_helper_mfc0_mvpcontrol(arg, cpu_env);
5359 CP0_CHECK(ctx->insn_flags & ASE_MT);
5360 gen_helper_mfc0_mvpconf0(arg, cpu_env);
5364 CP0_CHECK(ctx->insn_flags & ASE_MT);
5365 gen_helper_mfc0_mvpconf1(arg, cpu_env);
5370 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPControl));
5374 goto cp0_unimplemented;
5380 CP0_CHECK(!(ctx->insn_flags & ISA_MIPS32R6));
5381 gen_helper_mfc0_random(arg, cpu_env);
5385 CP0_CHECK(ctx->insn_flags & ASE_MT);
5386 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEControl));
5390 CP0_CHECK(ctx->insn_flags & ASE_MT);
5391 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEConf0));
5395 CP0_CHECK(ctx->insn_flags & ASE_MT);
5396 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEConf1));
5400 CP0_CHECK(ctx->insn_flags & ASE_MT);
5401 gen_mfc0_load64(arg, offsetof(CPUMIPSState, CP0_YQMask));
5405 CP0_CHECK(ctx->insn_flags & ASE_MT);
5406 gen_mfc0_load64(arg, offsetof(CPUMIPSState, CP0_VPESchedule));
5410 CP0_CHECK(ctx->insn_flags & ASE_MT);
5411 gen_mfc0_load64(arg, offsetof(CPUMIPSState, CP0_VPEScheFBack));
5412 rn = "VPEScheFBack";
5415 CP0_CHECK(ctx->insn_flags & ASE_MT);
5416 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEOpt));
5420 goto cp0_unimplemented;
5427 TCGv_i64 tmp = tcg_temp_new_i64();
5428 tcg_gen_ld_i64(tmp, cpu_env,
5429 offsetof(CPUMIPSState, CP0_EntryLo0));
5430 #if defined(TARGET_MIPS64)
5432 /* Move RI/XI fields to bits 31:30 */
5433 tcg_gen_shri_tl(arg, tmp, CP0EnLo_XI);
5434 tcg_gen_deposit_tl(tmp, tmp, arg, 30, 2);
5437 gen_move_low32(arg, tmp);
5438 tcg_temp_free_i64(tmp);
5443 CP0_CHECK(ctx->insn_flags & ASE_MT);
5444 gen_helper_mfc0_tcstatus(arg, cpu_env);
5448 CP0_CHECK(ctx->insn_flags & ASE_MT);
5449 gen_helper_mfc0_tcbind(arg, cpu_env);
5453 CP0_CHECK(ctx->insn_flags & ASE_MT);
5454 gen_helper_mfc0_tcrestart(arg, cpu_env);
5458 CP0_CHECK(ctx->insn_flags & ASE_MT);
5459 gen_helper_mfc0_tchalt(arg, cpu_env);
5463 CP0_CHECK(ctx->insn_flags & ASE_MT);
5464 gen_helper_mfc0_tccontext(arg, cpu_env);
5468 CP0_CHECK(ctx->insn_flags & ASE_MT);
5469 gen_helper_mfc0_tcschedule(arg, cpu_env);
5473 CP0_CHECK(ctx->insn_flags & ASE_MT);
5474 gen_helper_mfc0_tcschefback(arg, cpu_env);
5478 goto cp0_unimplemented;
5485 TCGv_i64 tmp = tcg_temp_new_i64();
5486 tcg_gen_ld_i64(tmp, cpu_env,
5487 offsetof(CPUMIPSState, CP0_EntryLo1));
5488 #if defined(TARGET_MIPS64)
5490 /* Move RI/XI fields to bits 31:30 */
5491 tcg_gen_shri_tl(arg, tmp, CP0EnLo_XI);
5492 tcg_gen_deposit_tl(tmp, tmp, arg, 30, 2);
5495 gen_move_low32(arg, tmp);
5496 tcg_temp_free_i64(tmp);
5502 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_GlobalNumber));
5503 rn = "GlobalNumber";
5506 goto cp0_unimplemented;
5512 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_Context));
5513 tcg_gen_ext32s_tl(arg, arg);
5517 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5518 rn = "ContextConfig";
5519 goto cp0_unimplemented;
5521 CP0_CHECK(ctx->ulri);
5522 tcg_gen_ld_tl(arg, cpu_env,
5523 offsetof(CPUMIPSState, active_tc.CP0_UserLocal));
5524 tcg_gen_ext32s_tl(arg, arg);
5528 goto cp0_unimplemented;
5534 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_PageMask));
5538 check_insn(ctx, ISA_MIPS32R2);
5539 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_PageGrain));
5544 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_SegCtl0));
5545 tcg_gen_ext32s_tl(arg, arg);
5550 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_SegCtl1));
5551 tcg_gen_ext32s_tl(arg, arg);
5556 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_SegCtl2));
5557 tcg_gen_ext32s_tl(arg, arg);
5561 goto cp0_unimplemented;
5567 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Wired));
5571 check_insn(ctx, ISA_MIPS32R2);
5572 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf0));
5576 check_insn(ctx, ISA_MIPS32R2);
5577 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf1));
5581 check_insn(ctx, ISA_MIPS32R2);
5582 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf2));
5586 check_insn(ctx, ISA_MIPS32R2);
5587 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf3));
5591 check_insn(ctx, ISA_MIPS32R2);
5592 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf4));
5596 goto cp0_unimplemented;
5602 check_insn(ctx, ISA_MIPS32R2);
5603 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_HWREna));
5607 goto cp0_unimplemented;
5613 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr));
5614 tcg_gen_ext32s_tl(arg, arg);
5619 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_BadInstr));
5624 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_BadInstrP));
5629 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_BadInstrX));
5630 tcg_gen_andi_tl(arg, arg, ~0xffff);
5634 goto cp0_unimplemented;
5640 /* Mark as an IO operation because we read the time. */
5641 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5644 gen_helper_mfc0_count(arg, cpu_env);
5645 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5648 /* Break the TB to be able to take timer interrupts immediately
5649 after reading count. DISAS_STOP isn't sufficient, we need to
5650 ensure we break completely out of translated code. */
5651 gen_save_pc(ctx->base.pc_next + 4);
5652 ctx->base.is_jmp = DISAS_EXIT;
5655 /* 6,7 are implementation dependent */
5657 goto cp0_unimplemented;
5663 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EntryHi));
5664 tcg_gen_ext32s_tl(arg, arg);
5668 goto cp0_unimplemented;
5674 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Compare));
5677 /* 6,7 are implementation dependent */
5679 goto cp0_unimplemented;
5685 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Status));
5689 check_insn(ctx, ISA_MIPS32R2);
5690 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_IntCtl));
5694 check_insn(ctx, ISA_MIPS32R2);
5695 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSCtl));
5699 check_insn(ctx, ISA_MIPS32R2);
5700 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSMap));
5704 goto cp0_unimplemented;
5710 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Cause));
5714 goto cp0_unimplemented;
5720 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EPC));
5721 tcg_gen_ext32s_tl(arg, arg);
5725 goto cp0_unimplemented;
5731 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_PRid));
5735 check_insn(ctx, ISA_MIPS32R2);
5736 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EBase));
5737 tcg_gen_ext32s_tl(arg, arg);
5741 check_insn(ctx, ISA_MIPS32R2);
5742 CP0_CHECK(ctx->cmgcr);
5743 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_CMGCRBase));
5744 tcg_gen_ext32s_tl(arg, arg);
5748 goto cp0_unimplemented;
5754 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config0));
5758 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config1));
5762 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config2));
5766 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config3));
5770 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config4));
5774 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config5));
5777 /* 6,7 are implementation dependent */
5779 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config6));
5783 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config7));
5787 goto cp0_unimplemented;
5793 gen_helper_mfc0_lladdr(arg, cpu_env);
5797 CP0_CHECK(ctx->mrp);
5798 gen_helper_mfc0_maar(arg, cpu_env);
5802 CP0_CHECK(ctx->mrp);
5803 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_MAARI));
5807 goto cp0_unimplemented;
5820 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
5821 gen_helper_1e0i(mfc0_watchlo, arg, sel);
5825 goto cp0_unimplemented;
5838 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
5839 gen_helper_1e0i(mfc0_watchhi, arg, sel);
5843 goto cp0_unimplemented;
5849 #if defined(TARGET_MIPS64)
5850 check_insn(ctx, ISA_MIPS3);
5851 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_XContext));
5852 tcg_gen_ext32s_tl(arg, arg);
5857 goto cp0_unimplemented;
5861 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5862 CP0_CHECK(!(ctx->insn_flags & ISA_MIPS32R6));
5865 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Framemask));
5869 goto cp0_unimplemented;
5873 tcg_gen_movi_tl(arg, 0); /* unimplemented */
5874 rn = "'Diagnostic"; /* implementation dependent */
5879 gen_helper_mfc0_debug(arg, cpu_env); /* EJTAG support */
5883 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5884 rn = "TraceControl";
5885 goto cp0_unimplemented;
5887 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5888 rn = "TraceControl2";
5889 goto cp0_unimplemented;
5891 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5892 rn = "UserTraceData";
5893 goto cp0_unimplemented;
5895 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5897 goto cp0_unimplemented;
5899 goto cp0_unimplemented;
5906 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_DEPC));
5907 tcg_gen_ext32s_tl(arg, arg);
5911 goto cp0_unimplemented;
5917 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Performance0));
5918 rn = "Performance0";
5921 // gen_helper_mfc0_performance1(arg);
5922 rn = "Performance1";
5923 goto cp0_unimplemented;
5925 // gen_helper_mfc0_performance2(arg);
5926 rn = "Performance2";
5927 goto cp0_unimplemented;
5929 // gen_helper_mfc0_performance3(arg);
5930 rn = "Performance3";
5931 goto cp0_unimplemented;
5933 // gen_helper_mfc0_performance4(arg);
5934 rn = "Performance4";
5935 goto cp0_unimplemented;
5937 // gen_helper_mfc0_performance5(arg);
5938 rn = "Performance5";
5939 goto cp0_unimplemented;
5941 // gen_helper_mfc0_performance6(arg);
5942 rn = "Performance6";
5943 goto cp0_unimplemented;
5945 // gen_helper_mfc0_performance7(arg);
5946 rn = "Performance7";
5947 goto cp0_unimplemented;
5949 goto cp0_unimplemented;
5955 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_ErrCtl));
5959 goto cp0_unimplemented;
5968 tcg_gen_movi_tl(arg, 0); /* unimplemented */
5972 goto cp0_unimplemented;
5982 TCGv_i64 tmp = tcg_temp_new_i64();
5983 tcg_gen_ld_i64(tmp, cpu_env, offsetof(CPUMIPSState, CP0_TagLo));
5984 gen_move_low32(arg, tmp);
5985 tcg_temp_free_i64(tmp);
5993 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_DataLo));
5997 goto cp0_unimplemented;
6006 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_TagHi));
6013 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_DataHi));
6017 goto cp0_unimplemented;
6023 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_ErrorEPC));
6024 tcg_gen_ext32s_tl(arg, arg);
6028 goto cp0_unimplemented;
6035 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_DESAVE));
6044 CP0_CHECK(ctx->kscrexist & (1 << sel));
6045 tcg_gen_ld_tl(arg, cpu_env,
6046 offsetof(CPUMIPSState, CP0_KScratch[sel-2]));
6047 tcg_gen_ext32s_tl(arg, arg);
6051 goto cp0_unimplemented;
6055 goto cp0_unimplemented;
6057 trace_mips_translate_c0("mfc0", rn, reg, sel);
6061 qemu_log_mask(LOG_UNIMP, "mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
6062 gen_mfc0_unimplemented(ctx, arg);
6065 static void gen_mtc0(DisasContext *ctx, TCGv arg, int reg, int sel)
6067 const char *rn = "invalid";
6070 check_insn(ctx, ISA_MIPS32);
6072 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
6080 gen_helper_mtc0_index(cpu_env, arg);
6084 CP0_CHECK(ctx->insn_flags & ASE_MT);
6085 gen_helper_mtc0_mvpcontrol(cpu_env, arg);
6089 CP0_CHECK(ctx->insn_flags & ASE_MT);
6094 CP0_CHECK(ctx->insn_flags & ASE_MT);
6104 goto cp0_unimplemented;
6114 CP0_CHECK(ctx->insn_flags & ASE_MT);
6115 gen_helper_mtc0_vpecontrol(cpu_env, arg);
6119 CP0_CHECK(ctx->insn_flags & ASE_MT);
6120 gen_helper_mtc0_vpeconf0(cpu_env, arg);
6124 CP0_CHECK(ctx->insn_flags & ASE_MT);
6125 gen_helper_mtc0_vpeconf1(cpu_env, arg);
6129 CP0_CHECK(ctx->insn_flags & ASE_MT);
6130 gen_helper_mtc0_yqmask(cpu_env, arg);
6134 CP0_CHECK(ctx->insn_flags & ASE_MT);
6135 tcg_gen_st_tl(arg, cpu_env,
6136 offsetof(CPUMIPSState, CP0_VPESchedule));
6140 CP0_CHECK(ctx->insn_flags & ASE_MT);
6141 tcg_gen_st_tl(arg, cpu_env,
6142 offsetof(CPUMIPSState, CP0_VPEScheFBack));
6143 rn = "VPEScheFBack";
6146 CP0_CHECK(ctx->insn_flags & ASE_MT);
6147 gen_helper_mtc0_vpeopt(cpu_env, arg);
6151 goto cp0_unimplemented;
6157 gen_helper_mtc0_entrylo0(cpu_env, arg);
6161 CP0_CHECK(ctx->insn_flags & ASE_MT);
6162 gen_helper_mtc0_tcstatus(cpu_env, arg);
6166 CP0_CHECK(ctx->insn_flags & ASE_MT);
6167 gen_helper_mtc0_tcbind(cpu_env, arg);
6171 CP0_CHECK(ctx->insn_flags & ASE_MT);
6172 gen_helper_mtc0_tcrestart(cpu_env, arg);
6176 CP0_CHECK(ctx->insn_flags & ASE_MT);
6177 gen_helper_mtc0_tchalt(cpu_env, arg);
6181 CP0_CHECK(ctx->insn_flags & ASE_MT);
6182 gen_helper_mtc0_tccontext(cpu_env, arg);
6186 CP0_CHECK(ctx->insn_flags & ASE_MT);
6187 gen_helper_mtc0_tcschedule(cpu_env, arg);
6191 CP0_CHECK(ctx->insn_flags & ASE_MT);
6192 gen_helper_mtc0_tcschefback(cpu_env, arg);
6196 goto cp0_unimplemented;
6202 gen_helper_mtc0_entrylo1(cpu_env, arg);
6208 rn = "GlobalNumber";
6211 goto cp0_unimplemented;
6217 gen_helper_mtc0_context(cpu_env, arg);
6221 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
6222 rn = "ContextConfig";
6223 goto cp0_unimplemented;
6225 CP0_CHECK(ctx->ulri);
6226 tcg_gen_st_tl(arg, cpu_env,
6227 offsetof(CPUMIPSState, active_tc.CP0_UserLocal));
6231 goto cp0_unimplemented;
6237 gen_helper_mtc0_pagemask(cpu_env, arg);
6241 check_insn(ctx, ISA_MIPS32R2);
6242 gen_helper_mtc0_pagegrain(cpu_env, arg);
6244 ctx->base.is_jmp = DISAS_STOP;
6248 gen_helper_mtc0_segctl0(cpu_env, arg);
6253 gen_helper_mtc0_segctl1(cpu_env, arg);
6258 gen_helper_mtc0_segctl2(cpu_env, arg);
6262 goto cp0_unimplemented;
6268 gen_helper_mtc0_wired(cpu_env, arg);
6272 check_insn(ctx, ISA_MIPS32R2);
6273 gen_helper_mtc0_srsconf0(cpu_env, arg);
6277 check_insn(ctx, ISA_MIPS32R2);
6278 gen_helper_mtc0_srsconf1(cpu_env, arg);
6282 check_insn(ctx, ISA_MIPS32R2);
6283 gen_helper_mtc0_srsconf2(cpu_env, arg);
6287 check_insn(ctx, ISA_MIPS32R2);
6288 gen_helper_mtc0_srsconf3(cpu_env, arg);
6292 check_insn(ctx, ISA_MIPS32R2);
6293 gen_helper_mtc0_srsconf4(cpu_env, arg);
6297 goto cp0_unimplemented;
6303 check_insn(ctx, ISA_MIPS32R2);
6304 gen_helper_mtc0_hwrena(cpu_env, arg);
6305 ctx->base.is_jmp = DISAS_STOP;
6309 goto cp0_unimplemented;
6331 goto cp0_unimplemented;
6337 gen_helper_mtc0_count(cpu_env, arg);
6340 /* 6,7 are implementation dependent */
6342 goto cp0_unimplemented;
6348 gen_helper_mtc0_entryhi(cpu_env, arg);
6352 goto cp0_unimplemented;
6358 gen_helper_mtc0_compare(cpu_env, arg);
6361 /* 6,7 are implementation dependent */
6363 goto cp0_unimplemented;
6369 save_cpu_state(ctx, 1);
6370 gen_helper_mtc0_status(cpu_env, arg);
6371 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6372 gen_save_pc(ctx->base.pc_next + 4);
6373 ctx->base.is_jmp = DISAS_EXIT;
6377 check_insn(ctx, ISA_MIPS32R2);
6378 gen_helper_mtc0_intctl(cpu_env, arg);
6379 /* Stop translation as we may have switched the execution mode */
6380 ctx->base.is_jmp = DISAS_STOP;
6384 check_insn(ctx, ISA_MIPS32R2);
6385 gen_helper_mtc0_srsctl(cpu_env, arg);
6386 /* Stop translation as we may have switched the execution mode */
6387 ctx->base.is_jmp = DISAS_STOP;
6391 check_insn(ctx, ISA_MIPS32R2);
6392 gen_mtc0_store32(arg, offsetof(CPUMIPSState, CP0_SRSMap));
6393 /* Stop translation as we may have switched the execution mode */
6394 ctx->base.is_jmp = DISAS_STOP;
6398 goto cp0_unimplemented;
6404 save_cpu_state(ctx, 1);
6405 gen_helper_mtc0_cause(cpu_env, arg);
6406 /* Stop translation as we may have triggered an interrupt.
6407 * DISAS_STOP isn't sufficient, we need to ensure we break out of
6408 * translated code to check for pending interrupts. */
6409 gen_save_pc(ctx->base.pc_next + 4);
6410 ctx->base.is_jmp = DISAS_EXIT;
6414 goto cp0_unimplemented;
6420 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EPC));
6424 goto cp0_unimplemented;
6434 check_insn(ctx, ISA_MIPS32R2);
6435 gen_helper_mtc0_ebase(cpu_env, arg);
6439 goto cp0_unimplemented;
6445 gen_helper_mtc0_config0(cpu_env, arg);
6447 /* Stop translation as we may have switched the execution mode */
6448 ctx->base.is_jmp = DISAS_STOP;
6451 /* ignored, read only */
6455 gen_helper_mtc0_config2(cpu_env, arg);
6457 /* Stop translation as we may have switched the execution mode */
6458 ctx->base.is_jmp = DISAS_STOP;
6461 gen_helper_mtc0_config3(cpu_env, arg);
6463 /* Stop translation as we may have switched the execution mode */
6464 ctx->base.is_jmp = DISAS_STOP;
6467 gen_helper_mtc0_config4(cpu_env, arg);
6469 ctx->base.is_jmp = DISAS_STOP;
6472 gen_helper_mtc0_config5(cpu_env, arg);
6474 /* Stop translation as we may have switched the execution mode */
6475 ctx->base.is_jmp = DISAS_STOP;
6477 /* 6,7 are implementation dependent */
6487 rn = "Invalid config selector";
6488 goto cp0_unimplemented;
6494 gen_helper_mtc0_lladdr(cpu_env, arg);
6498 CP0_CHECK(ctx->mrp);
6499 gen_helper_mtc0_maar(cpu_env, arg);
6503 CP0_CHECK(ctx->mrp);
6504 gen_helper_mtc0_maari(cpu_env, arg);
6508 goto cp0_unimplemented;
6521 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
6522 gen_helper_0e1i(mtc0_watchlo, arg, sel);
6526 goto cp0_unimplemented;
6539 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
6540 gen_helper_0e1i(mtc0_watchhi, arg, sel);
6544 goto cp0_unimplemented;
6550 #if defined(TARGET_MIPS64)
6551 check_insn(ctx, ISA_MIPS3);
6552 gen_helper_mtc0_xcontext(cpu_env, arg);
6557 goto cp0_unimplemented;
6561 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6562 CP0_CHECK(!(ctx->insn_flags & ISA_MIPS32R6));
6565 gen_helper_mtc0_framemask(cpu_env, arg);
6569 goto cp0_unimplemented;
6574 rn = "Diagnostic"; /* implementation dependent */
6579 gen_helper_mtc0_debug(cpu_env, arg); /* EJTAG support */
6580 /* DISAS_STOP isn't good enough here, hflags may have changed. */
6581 gen_save_pc(ctx->base.pc_next + 4);
6582 ctx->base.is_jmp = DISAS_EXIT;
6586 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6587 rn = "TraceControl";
6588 /* Stop translation as we may have switched the execution mode */
6589 ctx->base.is_jmp = DISAS_STOP;
6590 goto cp0_unimplemented;
6592 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6593 rn = "TraceControl2";
6594 /* Stop translation as we may have switched the execution mode */
6595 ctx->base.is_jmp = DISAS_STOP;
6596 goto cp0_unimplemented;
6598 /* Stop translation as we may have switched the execution mode */
6599 ctx->base.is_jmp = DISAS_STOP;
6600 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6601 rn = "UserTraceData";
6602 /* Stop translation as we may have switched the execution mode */
6603 ctx->base.is_jmp = DISAS_STOP;
6604 goto cp0_unimplemented;
6606 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6607 /* Stop translation as we may have switched the execution mode */
6608 ctx->base.is_jmp = DISAS_STOP;
6610 goto cp0_unimplemented;
6612 goto cp0_unimplemented;
6619 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_DEPC));
6623 goto cp0_unimplemented;
6629 gen_helper_mtc0_performance0(cpu_env, arg);
6630 rn = "Performance0";
6633 // gen_helper_mtc0_performance1(arg);
6634 rn = "Performance1";
6635 goto cp0_unimplemented;
6637 // gen_helper_mtc0_performance2(arg);
6638 rn = "Performance2";
6639 goto cp0_unimplemented;
6641 // gen_helper_mtc0_performance3(arg);
6642 rn = "Performance3";
6643 goto cp0_unimplemented;
6645 // gen_helper_mtc0_performance4(arg);
6646 rn = "Performance4";
6647 goto cp0_unimplemented;
6649 // gen_helper_mtc0_performance5(arg);
6650 rn = "Performance5";
6651 goto cp0_unimplemented;
6653 // gen_helper_mtc0_performance6(arg);
6654 rn = "Performance6";
6655 goto cp0_unimplemented;
6657 // gen_helper_mtc0_performance7(arg);
6658 rn = "Performance7";
6659 goto cp0_unimplemented;
6661 goto cp0_unimplemented;
6667 gen_helper_mtc0_errctl(cpu_env, arg);
6668 ctx->base.is_jmp = DISAS_STOP;
6672 goto cp0_unimplemented;
6685 goto cp0_unimplemented;
6694 gen_helper_mtc0_taglo(cpu_env, arg);
6701 gen_helper_mtc0_datalo(cpu_env, arg);
6705 goto cp0_unimplemented;
6714 gen_helper_mtc0_taghi(cpu_env, arg);
6721 gen_helper_mtc0_datahi(cpu_env, arg);
6726 goto cp0_unimplemented;
6732 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_ErrorEPC));
6736 goto cp0_unimplemented;
6743 gen_mtc0_store32(arg, offsetof(CPUMIPSState, CP0_DESAVE));
6752 CP0_CHECK(ctx->kscrexist & (1 << sel));
6753 tcg_gen_st_tl(arg, cpu_env,
6754 offsetof(CPUMIPSState, CP0_KScratch[sel-2]));
6758 goto cp0_unimplemented;
6762 goto cp0_unimplemented;
6764 trace_mips_translate_c0("mtc0", rn, reg, sel);
6766 /* For simplicity assume that all writes can cause interrupts. */
6767 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
6769 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
6770 * translated code to check for pending interrupts. */
6771 gen_save_pc(ctx->base.pc_next + 4);
6772 ctx->base.is_jmp = DISAS_EXIT;
6777 qemu_log_mask(LOG_UNIMP, "mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
6780 #if defined(TARGET_MIPS64)
6781 static void gen_dmfc0(DisasContext *ctx, TCGv arg, int reg, int sel)
6783 const char *rn = "invalid";
6786 check_insn(ctx, ISA_MIPS64);
6792 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Index));
6796 CP0_CHECK(ctx->insn_flags & ASE_MT);
6797 gen_helper_mfc0_mvpcontrol(arg, cpu_env);
6801 CP0_CHECK(ctx->insn_flags & ASE_MT);
6802 gen_helper_mfc0_mvpconf0(arg, cpu_env);
6806 CP0_CHECK(ctx->insn_flags & ASE_MT);
6807 gen_helper_mfc0_mvpconf1(arg, cpu_env);
6812 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPControl));
6816 goto cp0_unimplemented;
6822 CP0_CHECK(!(ctx->insn_flags & ISA_MIPS32R6));
6823 gen_helper_mfc0_random(arg, cpu_env);
6827 CP0_CHECK(ctx->insn_flags & ASE_MT);
6828 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEControl));
6832 CP0_CHECK(ctx->insn_flags & ASE_MT);
6833 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEConf0));
6837 CP0_CHECK(ctx->insn_flags & ASE_MT);
6838 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEConf1));
6842 CP0_CHECK(ctx->insn_flags & ASE_MT);
6843 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_YQMask));
6847 CP0_CHECK(ctx->insn_flags & ASE_MT);
6848 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_VPESchedule));
6852 CP0_CHECK(ctx->insn_flags & ASE_MT);
6853 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_VPEScheFBack));
6854 rn = "VPEScheFBack";
6857 CP0_CHECK(ctx->insn_flags & ASE_MT);
6858 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_VPEOpt));
6862 goto cp0_unimplemented;
6868 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EntryLo0));
6872 CP0_CHECK(ctx->insn_flags & ASE_MT);
6873 gen_helper_mfc0_tcstatus(arg, cpu_env);
6877 CP0_CHECK(ctx->insn_flags & ASE_MT);
6878 gen_helper_mfc0_tcbind(arg, cpu_env);
6882 CP0_CHECK(ctx->insn_flags & ASE_MT);
6883 gen_helper_dmfc0_tcrestart(arg, cpu_env);
6887 CP0_CHECK(ctx->insn_flags & ASE_MT);
6888 gen_helper_dmfc0_tchalt(arg, cpu_env);
6892 CP0_CHECK(ctx->insn_flags & ASE_MT);
6893 gen_helper_dmfc0_tccontext(arg, cpu_env);
6897 CP0_CHECK(ctx->insn_flags & ASE_MT);
6898 gen_helper_dmfc0_tcschedule(arg, cpu_env);
6902 CP0_CHECK(ctx->insn_flags & ASE_MT);
6903 gen_helper_dmfc0_tcschefback(arg, cpu_env);
6907 goto cp0_unimplemented;
6913 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EntryLo1));
6918 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_GlobalNumber));
6919 rn = "GlobalNumber";
6922 goto cp0_unimplemented;
6928 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_Context));
6932 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6933 rn = "ContextConfig";
6934 goto cp0_unimplemented;
6936 CP0_CHECK(ctx->ulri);
6937 tcg_gen_ld_tl(arg, cpu_env,
6938 offsetof(CPUMIPSState, active_tc.CP0_UserLocal));
6942 goto cp0_unimplemented;
6948 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_PageMask));
6952 check_insn(ctx, ISA_MIPS32R2);
6953 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_PageGrain));
6958 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_SegCtl0));
6963 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_SegCtl1));
6968 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_SegCtl2));
6972 goto cp0_unimplemented;
6978 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Wired));
6982 check_insn(ctx, ISA_MIPS32R2);
6983 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf0));
6987 check_insn(ctx, ISA_MIPS32R2);
6988 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf1));
6992 check_insn(ctx, ISA_MIPS32R2);
6993 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf2));
6997 check_insn(ctx, ISA_MIPS32R2);
6998 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf3));
7002 check_insn(ctx, ISA_MIPS32R2);
7003 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSConf4));
7007 goto cp0_unimplemented;
7013 check_insn(ctx, ISA_MIPS32R2);
7014 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_HWREna));
7018 goto cp0_unimplemented;
7024 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr));
7029 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_BadInstr));
7034 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_BadInstrP));
7039 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_BadInstrX));
7040 tcg_gen_andi_tl(arg, arg, ~0xffff);
7044 goto cp0_unimplemented;
7050 /* Mark as an IO operation because we read the time. */
7051 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
7054 gen_helper_mfc0_count(arg, cpu_env);
7055 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
7058 /* Break the TB to be able to take timer interrupts immediately
7059 after reading count. DISAS_STOP isn't sufficient, we need to
7060 ensure we break completely out of translated code. */
7061 gen_save_pc(ctx->base.pc_next + 4);
7062 ctx->base.is_jmp = DISAS_EXIT;
7065 /* 6,7 are implementation dependent */
7067 goto cp0_unimplemented;
7073 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EntryHi));
7077 goto cp0_unimplemented;
7083 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Compare));
7086 /* 6,7 are implementation dependent */
7088 goto cp0_unimplemented;
7094 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Status));
7098 check_insn(ctx, ISA_MIPS32R2);
7099 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_IntCtl));
7103 check_insn(ctx, ISA_MIPS32R2);
7104 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSCtl));
7108 check_insn(ctx, ISA_MIPS32R2);
7109 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_SRSMap));
7113 goto cp0_unimplemented;
7119 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Cause));
7123 goto cp0_unimplemented;
7129 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EPC));
7133 goto cp0_unimplemented;
7139 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_PRid));
7143 check_insn(ctx, ISA_MIPS32R2);
7144 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EBase));
7148 check_insn(ctx, ISA_MIPS32R2);
7149 CP0_CHECK(ctx->cmgcr);
7150 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_CMGCRBase));
7154 goto cp0_unimplemented;
7160 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config0));
7164 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config1));
7168 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config2));
7172 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config3));
7176 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config4));
7180 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config5));
7183 /* 6,7 are implementation dependent */
7185 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config6));
7189 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Config7));
7193 goto cp0_unimplemented;
7199 gen_helper_dmfc0_lladdr(arg, cpu_env);
7203 CP0_CHECK(ctx->mrp);
7204 gen_helper_dmfc0_maar(arg, cpu_env);
7208 CP0_CHECK(ctx->mrp);
7209 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_MAARI));
7213 goto cp0_unimplemented;
7226 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
7227 gen_helper_1e0i(dmfc0_watchlo, arg, sel);
7231 goto cp0_unimplemented;
7244 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
7245 gen_helper_1e0i(mfc0_watchhi, arg, sel);
7249 goto cp0_unimplemented;
7255 check_insn(ctx, ISA_MIPS3);
7256 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_XContext));
7260 goto cp0_unimplemented;
7264 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7265 CP0_CHECK(!(ctx->insn_flags & ISA_MIPS32R6));
7268 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Framemask));
7272 goto cp0_unimplemented;
7276 tcg_gen_movi_tl(arg, 0); /* unimplemented */
7277 rn = "'Diagnostic"; /* implementation dependent */
7282 gen_helper_mfc0_debug(arg, cpu_env); /* EJTAG support */
7286 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
7287 rn = "TraceControl";
7288 goto cp0_unimplemented;
7290 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
7291 rn = "TraceControl2";
7292 goto cp0_unimplemented;
7294 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
7295 rn = "UserTraceData";
7296 goto cp0_unimplemented;
7298 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
7300 goto cp0_unimplemented;
7302 goto cp0_unimplemented;
7309 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_DEPC));
7313 goto cp0_unimplemented;
7319 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_Performance0));
7320 rn = "Performance0";
7323 // gen_helper_dmfc0_performance1(arg);
7324 rn = "Performance1";
7325 goto cp0_unimplemented;
7327 // gen_helper_dmfc0_performance2(arg);
7328 rn = "Performance2";
7329 goto cp0_unimplemented;
7331 // gen_helper_dmfc0_performance3(arg);
7332 rn = "Performance3";
7333 goto cp0_unimplemented;
7335 // gen_helper_dmfc0_performance4(arg);
7336 rn = "Performance4";
7337 goto cp0_unimplemented;
7339 // gen_helper_dmfc0_performance5(arg);
7340 rn = "Performance5";
7341 goto cp0_unimplemented;
7343 // gen_helper_dmfc0_performance6(arg);
7344 rn = "Performance6";
7345 goto cp0_unimplemented;
7347 // gen_helper_dmfc0_performance7(arg);
7348 rn = "Performance7";
7349 goto cp0_unimplemented;
7351 goto cp0_unimplemented;
7357 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_ErrCtl));
7361 goto cp0_unimplemented;
7371 tcg_gen_movi_tl(arg, 0); /* unimplemented */
7375 goto cp0_unimplemented;
7384 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_TagLo));
7391 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_DataLo));
7395 goto cp0_unimplemented;
7404 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_TagHi));
7411 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_DataHi));
7415 goto cp0_unimplemented;
7421 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_ErrorEPC));
7425 goto cp0_unimplemented;
7432 gen_mfc0_load32(arg, offsetof(CPUMIPSState, CP0_DESAVE));
7441 CP0_CHECK(ctx->kscrexist & (1 << sel));
7442 tcg_gen_ld_tl(arg, cpu_env,
7443 offsetof(CPUMIPSState, CP0_KScratch[sel-2]));
7447 goto cp0_unimplemented;
7451 goto cp0_unimplemented;
7453 trace_mips_translate_c0("dmfc0", rn, reg, sel);
7457 qemu_log_mask(LOG_UNIMP, "dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
7458 gen_mfc0_unimplemented(ctx, arg);
7461 static void gen_dmtc0(DisasContext *ctx, TCGv arg, int reg, int sel)
7463 const char *rn = "invalid";
7466 check_insn(ctx, ISA_MIPS64);
7468 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
7476 gen_helper_mtc0_index(cpu_env, arg);
7480 CP0_CHECK(ctx->insn_flags & ASE_MT);
7481 gen_helper_mtc0_mvpcontrol(cpu_env, arg);
7485 CP0_CHECK(ctx->insn_flags & ASE_MT);
7490 CP0_CHECK(ctx->insn_flags & ASE_MT);
7500 goto cp0_unimplemented;
7510 CP0_CHECK(ctx->insn_flags & ASE_MT);
7511 gen_helper_mtc0_vpecontrol(cpu_env, arg);
7515 CP0_CHECK(ctx->insn_flags & ASE_MT);
7516 gen_helper_mtc0_vpeconf0(cpu_env, arg);
7520 CP0_CHECK(ctx->insn_flags & ASE_MT);
7521 gen_helper_mtc0_vpeconf1(cpu_env, arg);
7525 CP0_CHECK(ctx->insn_flags & ASE_MT);
7526 gen_helper_mtc0_yqmask(cpu_env, arg);
7530 CP0_CHECK(ctx->insn_flags & ASE_MT);
7531 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_VPESchedule));
7535 CP0_CHECK(ctx->insn_flags & ASE_MT);
7536 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_VPEScheFBack));
7537 rn = "VPEScheFBack";
7540 CP0_CHECK(ctx->insn_flags & ASE_MT);
7541 gen_helper_mtc0_vpeopt(cpu_env, arg);
7545 goto cp0_unimplemented;
7551 gen_helper_dmtc0_entrylo0(cpu_env, arg);
7555 CP0_CHECK(ctx->insn_flags & ASE_MT);
7556 gen_helper_mtc0_tcstatus(cpu_env, arg);
7560 CP0_CHECK(ctx->insn_flags & ASE_MT);
7561 gen_helper_mtc0_tcbind(cpu_env, arg);
7565 CP0_CHECK(ctx->insn_flags & ASE_MT);
7566 gen_helper_mtc0_tcrestart(cpu_env, arg);
7570 CP0_CHECK(ctx->insn_flags & ASE_MT);
7571 gen_helper_mtc0_tchalt(cpu_env, arg);
7575 CP0_CHECK(ctx->insn_flags & ASE_MT);
7576 gen_helper_mtc0_tccontext(cpu_env, arg);
7580 CP0_CHECK(ctx->insn_flags & ASE_MT);
7581 gen_helper_mtc0_tcschedule(cpu_env, arg);
7585 CP0_CHECK(ctx->insn_flags & ASE_MT);
7586 gen_helper_mtc0_tcschefback(cpu_env, arg);
7590 goto cp0_unimplemented;
7596 gen_helper_dmtc0_entrylo1(cpu_env, arg);
7602 rn = "GlobalNumber";
7605 goto cp0_unimplemented;
7611 gen_helper_mtc0_context(cpu_env, arg);
7615 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7616 rn = "ContextConfig";
7617 goto cp0_unimplemented;
7619 CP0_CHECK(ctx->ulri);
7620 tcg_gen_st_tl(arg, cpu_env,
7621 offsetof(CPUMIPSState, active_tc.CP0_UserLocal));
7625 goto cp0_unimplemented;
7631 gen_helper_mtc0_pagemask(cpu_env, arg);
7635 check_insn(ctx, ISA_MIPS32R2);
7636 gen_helper_mtc0_pagegrain(cpu_env, arg);
7641 gen_helper_mtc0_segctl0(cpu_env, arg);
7646 gen_helper_mtc0_segctl1(cpu_env, arg);
7651 gen_helper_mtc0_segctl2(cpu_env, arg);
7655 goto cp0_unimplemented;
7661 gen_helper_mtc0_wired(cpu_env, arg);
7665 check_insn(ctx, ISA_MIPS32R2);
7666 gen_helper_mtc0_srsconf0(cpu_env, arg);
7670 check_insn(ctx, ISA_MIPS32R2);
7671 gen_helper_mtc0_srsconf1(cpu_env, arg);
7675 check_insn(ctx, ISA_MIPS32R2);
7676 gen_helper_mtc0_srsconf2(cpu_env, arg);
7680 check_insn(ctx, ISA_MIPS32R2);
7681 gen_helper_mtc0_srsconf3(cpu_env, arg);
7685 check_insn(ctx, ISA_MIPS32R2);
7686 gen_helper_mtc0_srsconf4(cpu_env, arg);
7690 goto cp0_unimplemented;
7696 check_insn(ctx, ISA_MIPS32R2);
7697 gen_helper_mtc0_hwrena(cpu_env, arg);
7698 ctx->base.is_jmp = DISAS_STOP;
7702 goto cp0_unimplemented;
7724 goto cp0_unimplemented;
7730 gen_helper_mtc0_count(cpu_env, arg);
7733 /* 6,7 are implementation dependent */
7735 goto cp0_unimplemented;
7737 /* Stop translation as we may have switched the execution mode */
7738 ctx->base.is_jmp = DISAS_STOP;
7743 gen_helper_mtc0_entryhi(cpu_env, arg);
7747 goto cp0_unimplemented;
7753 gen_helper_mtc0_compare(cpu_env, arg);
7756 /* 6,7 are implementation dependent */
7758 goto cp0_unimplemented;
7760 /* Stop translation as we may have switched the execution mode */
7761 ctx->base.is_jmp = DISAS_STOP;
7766 save_cpu_state(ctx, 1);
7767 gen_helper_mtc0_status(cpu_env, arg);
7768 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7769 gen_save_pc(ctx->base.pc_next + 4);
7770 ctx->base.is_jmp = DISAS_EXIT;
7774 check_insn(ctx, ISA_MIPS32R2);
7775 gen_helper_mtc0_intctl(cpu_env, arg);
7776 /* Stop translation as we may have switched the execution mode */
7777 ctx->base.is_jmp = DISAS_STOP;
7781 check_insn(ctx, ISA_MIPS32R2);
7782 gen_helper_mtc0_srsctl(cpu_env, arg);
7783 /* Stop translation as we may have switched the execution mode */
7784 ctx->base.is_jmp = DISAS_STOP;
7788 check_insn(ctx, ISA_MIPS32R2);
7789 gen_mtc0_store32(arg, offsetof(CPUMIPSState, CP0_SRSMap));
7790 /* Stop translation as we may have switched the execution mode */
7791 ctx->base.is_jmp = DISAS_STOP;
7795 goto cp0_unimplemented;
7801 save_cpu_state(ctx, 1);
7802 gen_helper_mtc0_cause(cpu_env, arg);
7803 /* Stop translation as we may have triggered an interrupt.
7804 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7805 * translated code to check for pending interrupts. */
7806 gen_save_pc(ctx->base.pc_next + 4);
7807 ctx->base.is_jmp = DISAS_EXIT;
7811 goto cp0_unimplemented;
7817 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_EPC));
7821 goto cp0_unimplemented;
7831 check_insn(ctx, ISA_MIPS32R2);
7832 gen_helper_mtc0_ebase(cpu_env, arg);
7836 goto cp0_unimplemented;
7842 gen_helper_mtc0_config0(cpu_env, arg);
7844 /* Stop translation as we may have switched the execution mode */
7845 ctx->base.is_jmp = DISAS_STOP;
7848 /* ignored, read only */
7852 gen_helper_mtc0_config2(cpu_env, arg);
7854 /* Stop translation as we may have switched the execution mode */
7855 ctx->base.is_jmp = DISAS_STOP;
7858 gen_helper_mtc0_config3(cpu_env, arg);
7860 /* Stop translation as we may have switched the execution mode */
7861 ctx->base.is_jmp = DISAS_STOP;
7864 /* currently ignored */
7868 gen_helper_mtc0_config5(cpu_env, arg);
7870 /* Stop translation as we may have switched the execution mode */
7871 ctx->base.is_jmp = DISAS_STOP;
7873 /* 6,7 are implementation dependent */
7875 rn = "Invalid config selector";
7876 goto cp0_unimplemented;
7882 gen_helper_mtc0_lladdr(cpu_env, arg);
7886 CP0_CHECK(ctx->mrp);
7887 gen_helper_mtc0_maar(cpu_env, arg);
7891 CP0_CHECK(ctx->mrp);
7892 gen_helper_mtc0_maari(cpu_env, arg);
7896 goto cp0_unimplemented;
7909 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
7910 gen_helper_0e1i(mtc0_watchlo, arg, sel);
7914 goto cp0_unimplemented;
7927 CP0_CHECK(ctx->CP0_Config1 & (1 << CP0C1_WR));
7928 gen_helper_0e1i(mtc0_watchhi, arg, sel);
7932 goto cp0_unimplemented;
7938 check_insn(ctx, ISA_MIPS3);
7939 gen_helper_mtc0_xcontext(cpu_env, arg);
7943 goto cp0_unimplemented;
7947 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7948 CP0_CHECK(!(ctx->insn_flags & ISA_MIPS32R6));
7951 gen_helper_mtc0_framemask(cpu_env, arg);
7955 goto cp0_unimplemented;
7960 rn = "Diagnostic"; /* implementation dependent */
7965 gen_helper_mtc0_debug(cpu_env, arg); /* EJTAG support */
7966 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7967 gen_save_pc(ctx->base.pc_next + 4);
7968 ctx->base.is_jmp = DISAS_EXIT;
7972 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7973 /* Stop translation as we may have switched the execution mode */
7974 ctx->base.is_jmp = DISAS_STOP;
7975 rn = "TraceControl";
7976 goto cp0_unimplemented;
7978 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7979 /* Stop translation as we may have switched the execution mode */
7980 ctx->base.is_jmp = DISAS_STOP;
7981 rn = "TraceControl2";
7982 goto cp0_unimplemented;
7984 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7985 /* Stop translation as we may have switched the execution mode */
7986 ctx->base.is_jmp = DISAS_STOP;
7987 rn = "UserTraceData";
7988 goto cp0_unimplemented;
7990 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7991 /* Stop translation as we may have switched the execution mode */
7992 ctx->base.is_jmp = DISAS_STOP;
7994 goto cp0_unimplemented;
7996 goto cp0_unimplemented;
8003 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_DEPC));
8007 goto cp0_unimplemented;
8013 gen_helper_mtc0_performance0(cpu_env, arg);
8014 rn = "Performance0";
8017 // gen_helper_mtc0_performance1(cpu_env, arg);
8018 rn = "Performance1";
8019 goto cp0_unimplemented;
8021 // gen_helper_mtc0_performance2(cpu_env, arg);
8022 rn = "Performance2";
8023 goto cp0_unimplemented;
8025 // gen_helper_mtc0_performance3(cpu_env, arg);
8026 rn = "Performance3";
8027 goto cp0_unimplemented;
8029 // gen_helper_mtc0_performance4(cpu_env, arg);
8030 rn = "Performance4";
8031 goto cp0_unimplemented;
8033 // gen_helper_mtc0_performance5(cpu_env, arg);
8034 rn = "Performance5";
8035 goto cp0_unimplemented;
8037 // gen_helper_mtc0_performance6(cpu_env, arg);
8038 rn = "Performance6";
8039 goto cp0_unimplemented;
8041 // gen_helper_mtc0_performance7(cpu_env, arg);
8042 rn = "Performance7";
8043 goto cp0_unimplemented;
8045 goto cp0_unimplemented;
8051 gen_helper_mtc0_errctl(cpu_env, arg);
8052 ctx->base.is_jmp = DISAS_STOP;
8056 goto cp0_unimplemented;
8069 goto cp0_unimplemented;
8078 gen_helper_mtc0_taglo(cpu_env, arg);
8085 gen_helper_mtc0_datalo(cpu_env, arg);
8089 goto cp0_unimplemented;
8098 gen_helper_mtc0_taghi(cpu_env, arg);
8105 gen_helper_mtc0_datahi(cpu_env, arg);
8110 goto cp0_unimplemented;
8116 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUMIPSState, CP0_ErrorEPC));
8120 goto cp0_unimplemented;
8127 gen_mtc0_store32(arg, offsetof(CPUMIPSState, CP0_DESAVE));
8136 CP0_CHECK(ctx->kscrexist & (1 << sel));
8137 tcg_gen_st_tl(arg, cpu_env,
8138 offsetof(CPUMIPSState, CP0_KScratch[sel-2]));
8142 goto cp0_unimplemented;
8146 goto cp0_unimplemented;
8148 trace_mips_translate_c0("dmtc0", rn, reg, sel);
8150 /* For simplicity assume that all writes can cause interrupts. */
8151 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
8153 /* DISAS_STOP isn't sufficient, we need to ensure we break out of
8154 * translated code to check for pending interrupts. */
8155 gen_save_pc(ctx->base.pc_next + 4);
8156 ctx->base.is_jmp = DISAS_EXIT;
8161 qemu_log_mask(LOG_UNIMP, "dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
8163 #endif /* TARGET_MIPS64 */
8165 static void gen_mftr(CPUMIPSState *env, DisasContext *ctx, int rt, int rd,
8166 int u, int sel, int h)
8168 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
8169 TCGv t0 = tcg_temp_local_new();
8171 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
8172 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
8173 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
8174 tcg_gen_movi_tl(t0, -1);
8175 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
8176 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
8177 tcg_gen_movi_tl(t0, -1);
8183 gen_helper_mftc0_vpecontrol(t0, cpu_env);
8186 gen_helper_mftc0_vpeconf0(t0, cpu_env);
8196 gen_helper_mftc0_tcstatus(t0, cpu_env);
8199 gen_helper_mftc0_tcbind(t0, cpu_env);
8202 gen_helper_mftc0_tcrestart(t0, cpu_env);
8205 gen_helper_mftc0_tchalt(t0, cpu_env);
8208 gen_helper_mftc0_tccontext(t0, cpu_env);
8211 gen_helper_mftc0_tcschedule(t0, cpu_env);
8214 gen_helper_mftc0_tcschefback(t0, cpu_env);
8217 gen_mfc0(ctx, t0, rt, sel);
8224 gen_helper_mftc0_entryhi(t0, cpu_env);
8227 gen_mfc0(ctx, t0, rt, sel);
8233 gen_helper_mftc0_status(t0, cpu_env);
8236 gen_mfc0(ctx, t0, rt, sel);
8242 gen_helper_mftc0_cause(t0, cpu_env);
8252 gen_helper_mftc0_epc(t0, cpu_env);
8262 gen_helper_mftc0_ebase(t0, cpu_env);
8279 gen_helper_mftc0_configx(t0, cpu_env, tcg_const_tl(sel));
8289 gen_helper_mftc0_debug(t0, cpu_env);
8292 gen_mfc0(ctx, t0, rt, sel);
8297 gen_mfc0(ctx, t0, rt, sel);
8299 } else switch (sel) {
8300 /* GPR registers. */
8302 gen_helper_1e0i(mftgpr, t0, rt);
8304 /* Auxiliary CPU registers */
8308 gen_helper_1e0i(mftlo, t0, 0);
8311 gen_helper_1e0i(mfthi, t0, 0);
8314 gen_helper_1e0i(mftacx, t0, 0);
8317 gen_helper_1e0i(mftlo, t0, 1);
8320 gen_helper_1e0i(mfthi, t0, 1);
8323 gen_helper_1e0i(mftacx, t0, 1);
8326 gen_helper_1e0i(mftlo, t0, 2);
8329 gen_helper_1e0i(mfthi, t0, 2);
8332 gen_helper_1e0i(mftacx, t0, 2);
8335 gen_helper_1e0i(mftlo, t0, 3);
8338 gen_helper_1e0i(mfthi, t0, 3);
8341 gen_helper_1e0i(mftacx, t0, 3);
8344 gen_helper_mftdsp(t0, cpu_env);
8350 /* Floating point (COP1). */
8352 /* XXX: For now we support only a single FPU context. */
8354 TCGv_i32 fp0 = tcg_temp_new_i32();
8356 gen_load_fpr32(ctx, fp0, rt);
8357 tcg_gen_ext_i32_tl(t0, fp0);
8358 tcg_temp_free_i32(fp0);
8360 TCGv_i32 fp0 = tcg_temp_new_i32();
8362 gen_load_fpr32h(ctx, fp0, rt);
8363 tcg_gen_ext_i32_tl(t0, fp0);
8364 tcg_temp_free_i32(fp0);
8368 /* XXX: For now we support only a single FPU context. */
8369 gen_helper_1e0i(cfc1, t0, rt);
8371 /* COP2: Not implemented. */
8378 trace_mips_translate_tr("mftr", rt, u, sel, h);
8379 gen_store_gpr(t0, rd);
8385 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
8386 generate_exception_end(ctx, EXCP_RI);
8389 static void gen_mttr(CPUMIPSState *env, DisasContext *ctx, int rd, int rt,
8390 int u, int sel, int h)
8392 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
8393 TCGv t0 = tcg_temp_local_new();
8395 gen_load_gpr(t0, rt);
8396 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
8397 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
8398 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
8400 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
8401 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
8408 gen_helper_mttc0_vpecontrol(cpu_env, t0);
8411 gen_helper_mttc0_vpeconf0(cpu_env, t0);
8421 gen_helper_mttc0_tcstatus(cpu_env, t0);
8424 gen_helper_mttc0_tcbind(cpu_env, t0);
8427 gen_helper_mttc0_tcrestart(cpu_env, t0);
8430 gen_helper_mttc0_tchalt(cpu_env, t0);
8433 gen_helper_mttc0_tccontext(cpu_env, t0);
8436 gen_helper_mttc0_tcschedule(cpu_env, t0);
8439 gen_helper_mttc0_tcschefback(cpu_env, t0);
8442 gen_mtc0(ctx, t0, rd, sel);
8449 gen_helper_mttc0_entryhi(cpu_env, t0);
8452 gen_mtc0(ctx, t0, rd, sel);
8458 gen_helper_mttc0_status(cpu_env, t0);
8461 gen_mtc0(ctx, t0, rd, sel);
8467 gen_helper_mttc0_cause(cpu_env, t0);
8477 gen_helper_mttc0_ebase(cpu_env, t0);
8487 gen_helper_mttc0_debug(cpu_env, t0);
8490 gen_mtc0(ctx, t0, rd, sel);
8495 gen_mtc0(ctx, t0, rd, sel);
8497 } else switch (sel) {
8498 /* GPR registers. */
8500 gen_helper_0e1i(mttgpr, t0, rd);
8502 /* Auxiliary CPU registers */
8506 gen_helper_0e1i(mttlo, t0, 0);
8509 gen_helper_0e1i(mtthi, t0, 0);
8512 gen_helper_0e1i(mttacx, t0, 0);
8515 gen_helper_0e1i(mttlo, t0, 1);
8518 gen_helper_0e1i(mtthi, t0, 1);
8521 gen_helper_0e1i(mttacx, t0, 1);
8524 gen_helper_0e1i(mttlo, t0, 2);
8527 gen_helper_0e1i(mtthi, t0, 2);
8530 gen_helper_0e1i(mttacx, t0, 2);
8533 gen_helper_0e1i(mttlo, t0, 3);
8536 gen_helper_0e1i(mtthi, t0, 3);
8539 gen_helper_0e1i(mttacx, t0, 3);
8542 gen_helper_mttdsp(cpu_env, t0);
8548 /* Floating point (COP1). */
8550 /* XXX: For now we support only a single FPU context. */
8552 TCGv_i32 fp0 = tcg_temp_new_i32();
8554 tcg_gen_trunc_tl_i32(fp0, t0);
8555 gen_store_fpr32(ctx, fp0, rd);
8556 tcg_temp_free_i32(fp0);
8558 TCGv_i32 fp0 = tcg_temp_new_i32();
8560 tcg_gen_trunc_tl_i32(fp0, t0);
8561 gen_store_fpr32h(ctx, fp0, rd);
8562 tcg_temp_free_i32(fp0);
8566 /* XXX: For now we support only a single FPU context. */
8568 TCGv_i32 fs_tmp = tcg_const_i32(rd);
8570 gen_helper_0e2i(ctc1, t0, fs_tmp, rt);
8571 tcg_temp_free_i32(fs_tmp);
8573 /* Stop translation as we may have changed hflags */
8574 ctx->base.is_jmp = DISAS_STOP;
8576 /* COP2: Not implemented. */
8583 trace_mips_translate_tr("mttr", rd, u, sel, h);
8589 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
8590 generate_exception_end(ctx, EXCP_RI);
8593 static void gen_cp0 (CPUMIPSState *env, DisasContext *ctx, uint32_t opc, int rt, int rd)
8595 const char *opn = "ldst";
8597 check_cp0_enabled(ctx);
8604 gen_mfc0(ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
8609 TCGv t0 = tcg_temp_new();
8611 gen_load_gpr(t0, rt);
8612 gen_mtc0(ctx, t0, rd, ctx->opcode & 0x7);
8617 #if defined(TARGET_MIPS64)
8619 check_insn(ctx, ISA_MIPS3);
8624 gen_dmfc0(ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
8628 check_insn(ctx, ISA_MIPS3);
8630 TCGv t0 = tcg_temp_new();
8632 gen_load_gpr(t0, rt);
8633 gen_dmtc0(ctx, t0, rd, ctx->opcode & 0x7);
8645 gen_mfhc0(ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
8651 TCGv t0 = tcg_temp_new();
8652 gen_load_gpr(t0, rt);
8653 gen_mthc0(ctx, t0, rd, ctx->opcode & 0x7);
8659 check_cp0_enabled(ctx);
8664 gen_mftr(env, ctx, rt, rd, (ctx->opcode >> 5) & 1,
8665 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
8669 check_cp0_enabled(ctx);
8670 gen_mttr(env, ctx, rd, rt, (ctx->opcode >> 5) & 1,
8671 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
8676 if (!env->tlb->helper_tlbwi)
8678 gen_helper_tlbwi(cpu_env);
8683 if (!env->tlb->helper_tlbinv) {
8686 gen_helper_tlbinv(cpu_env);
8687 } /* treat as nop if TLBINV not supported */
8692 if (!env->tlb->helper_tlbinvf) {
8695 gen_helper_tlbinvf(cpu_env);
8696 } /* treat as nop if TLBINV not supported */
8700 if (!env->tlb->helper_tlbwr)
8702 gen_helper_tlbwr(cpu_env);
8706 if (!env->tlb->helper_tlbp)
8708 gen_helper_tlbp(cpu_env);
8712 if (!env->tlb->helper_tlbr)
8714 gen_helper_tlbr(cpu_env);
8716 case OPC_ERET: /* OPC_ERETNC */
8717 if ((ctx->insn_flags & ISA_MIPS32R6) &&
8718 (ctx->hflags & MIPS_HFLAG_BMASK)) {
8721 int bit_shift = (ctx->hflags & MIPS_HFLAG_M16) ? 16 : 6;
8722 if (ctx->opcode & (1 << bit_shift)) {
8725 check_insn(ctx, ISA_MIPS32R5);
8726 gen_helper_eretnc(cpu_env);
8730 check_insn(ctx, ISA_MIPS2);
8731 gen_helper_eret(cpu_env);
8733 ctx->base.is_jmp = DISAS_EXIT;
8738 check_insn(ctx, ISA_MIPS32);
8739 if ((ctx->insn_flags & ISA_MIPS32R6) &&
8740 (ctx->hflags & MIPS_HFLAG_BMASK)) {
8743 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
8745 generate_exception_end(ctx, EXCP_RI);
8747 gen_helper_deret(cpu_env);
8748 ctx->base.is_jmp = DISAS_EXIT;
8753 check_insn(ctx, ISA_MIPS3 | ISA_MIPS32);
8754 if ((ctx->insn_flags & ISA_MIPS32R6) &&
8755 (ctx->hflags & MIPS_HFLAG_BMASK)) {
8758 /* If we get an exception, we want to restart at next instruction */
8759 ctx->base.pc_next += 4;
8760 save_cpu_state(ctx, 1);
8761 ctx->base.pc_next -= 4;
8762 gen_helper_wait(cpu_env);
8763 ctx->base.is_jmp = DISAS_NORETURN;
8768 generate_exception_end(ctx, EXCP_RI);
8771 (void)opn; /* avoid a compiler warning */
8773 #endif /* !CONFIG_USER_ONLY */
8775 /* CP1 Branches (before delay slot) */
8776 static void gen_compute_branch1(DisasContext *ctx, uint32_t op,
8777 int32_t cc, int32_t offset)
8779 target_ulong btarget;
8780 TCGv_i32 t0 = tcg_temp_new_i32();
8782 if ((ctx->insn_flags & ISA_MIPS32R6) && (ctx->hflags & MIPS_HFLAG_BMASK)) {
8783 generate_exception_end(ctx, EXCP_RI);
8788 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32);
8790 btarget = ctx->base.pc_next + 4 + offset;
8794 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8795 tcg_gen_not_i32(t0, t0);
8796 tcg_gen_andi_i32(t0, t0, 1);
8797 tcg_gen_extu_i32_tl(bcond, t0);
8800 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8801 tcg_gen_not_i32(t0, t0);
8802 tcg_gen_andi_i32(t0, t0, 1);
8803 tcg_gen_extu_i32_tl(bcond, t0);
8806 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8807 tcg_gen_andi_i32(t0, t0, 1);
8808 tcg_gen_extu_i32_tl(bcond, t0);
8811 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8812 tcg_gen_andi_i32(t0, t0, 1);
8813 tcg_gen_extu_i32_tl(bcond, t0);
8815 ctx->hflags |= MIPS_HFLAG_BL;
8819 TCGv_i32 t1 = tcg_temp_new_i32();
8820 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8821 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
8822 tcg_gen_nand_i32(t0, t0, t1);
8823 tcg_temp_free_i32(t1);
8824 tcg_gen_andi_i32(t0, t0, 1);
8825 tcg_gen_extu_i32_tl(bcond, t0);
8830 TCGv_i32 t1 = tcg_temp_new_i32();
8831 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8832 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
8833 tcg_gen_or_i32(t0, t0, t1);
8834 tcg_temp_free_i32(t1);
8835 tcg_gen_andi_i32(t0, t0, 1);
8836 tcg_gen_extu_i32_tl(bcond, t0);
8841 TCGv_i32 t1 = tcg_temp_new_i32();
8842 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8843 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
8844 tcg_gen_and_i32(t0, t0, t1);
8845 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
8846 tcg_gen_and_i32(t0, t0, t1);
8847 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
8848 tcg_gen_nand_i32(t0, t0, t1);
8849 tcg_temp_free_i32(t1);
8850 tcg_gen_andi_i32(t0, t0, 1);
8851 tcg_gen_extu_i32_tl(bcond, t0);
8856 TCGv_i32 t1 = tcg_temp_new_i32();
8857 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
8858 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
8859 tcg_gen_or_i32(t0, t0, t1);
8860 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
8861 tcg_gen_or_i32(t0, t0, t1);
8862 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
8863 tcg_gen_or_i32(t0, t0, t1);
8864 tcg_temp_free_i32(t1);
8865 tcg_gen_andi_i32(t0, t0, 1);
8866 tcg_gen_extu_i32_tl(bcond, t0);
8869 ctx->hflags |= MIPS_HFLAG_BC;
8872 MIPS_INVAL("cp1 cond branch");
8873 generate_exception_end(ctx, EXCP_RI);
8876 ctx->btarget = btarget;
8877 ctx->hflags |= MIPS_HFLAG_BDS32;
8879 tcg_temp_free_i32(t0);
8882 /* R6 CP1 Branches */
8883 static void gen_compute_branch1_r6(DisasContext *ctx, uint32_t op,
8884 int32_t ft, int32_t offset,
8887 target_ulong btarget;
8888 TCGv_i64 t0 = tcg_temp_new_i64();
8890 if (ctx->hflags & MIPS_HFLAG_BMASK) {
8891 #ifdef MIPS_DEBUG_DISAS
8892 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8893 "\n", ctx->base.pc_next);
8895 generate_exception_end(ctx, EXCP_RI);
8899 gen_load_fpr64(ctx, t0, ft);
8900 tcg_gen_andi_i64(t0, t0, 1);
8902 btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
8906 tcg_gen_xori_i64(t0, t0, 1);
8907 ctx->hflags |= MIPS_HFLAG_BC;
8910 /* t0 already set */
8911 ctx->hflags |= MIPS_HFLAG_BC;
8914 MIPS_INVAL("cp1 cond branch");
8915 generate_exception_end(ctx, EXCP_RI);
8919 tcg_gen_trunc_i64_tl(bcond, t0);
8921 ctx->btarget = btarget;
8923 switch (delayslot_size) {
8925 ctx->hflags |= MIPS_HFLAG_BDS16;
8928 ctx->hflags |= MIPS_HFLAG_BDS32;
8933 tcg_temp_free_i64(t0);
8936 /* Coprocessor 1 (FPU) */
8938 #define FOP(func, fmt) (((fmt) << 21) | (func))
8941 OPC_ADD_S = FOP(0, FMT_S),
8942 OPC_SUB_S = FOP(1, FMT_S),
8943 OPC_MUL_S = FOP(2, FMT_S),
8944 OPC_DIV_S = FOP(3, FMT_S),
8945 OPC_SQRT_S = FOP(4, FMT_S),
8946 OPC_ABS_S = FOP(5, FMT_S),
8947 OPC_MOV_S = FOP(6, FMT_S),
8948 OPC_NEG_S = FOP(7, FMT_S),
8949 OPC_ROUND_L_S = FOP(8, FMT_S),
8950 OPC_TRUNC_L_S = FOP(9, FMT_S),
8951 OPC_CEIL_L_S = FOP(10, FMT_S),
8952 OPC_FLOOR_L_S = FOP(11, FMT_S),
8953 OPC_ROUND_W_S = FOP(12, FMT_S),
8954 OPC_TRUNC_W_S = FOP(13, FMT_S),
8955 OPC_CEIL_W_S = FOP(14, FMT_S),
8956 OPC_FLOOR_W_S = FOP(15, FMT_S),
8957 OPC_SEL_S = FOP(16, FMT_S),
8958 OPC_MOVCF_S = FOP(17, FMT_S),
8959 OPC_MOVZ_S = FOP(18, FMT_S),
8960 OPC_MOVN_S = FOP(19, FMT_S),
8961 OPC_SELEQZ_S = FOP(20, FMT_S),
8962 OPC_RECIP_S = FOP(21, FMT_S),
8963 OPC_RSQRT_S = FOP(22, FMT_S),
8964 OPC_SELNEZ_S = FOP(23, FMT_S),
8965 OPC_MADDF_S = FOP(24, FMT_S),
8966 OPC_MSUBF_S = FOP(25, FMT_S),
8967 OPC_RINT_S = FOP(26, FMT_S),
8968 OPC_CLASS_S = FOP(27, FMT_S),
8969 OPC_MIN_S = FOP(28, FMT_S),
8970 OPC_RECIP2_S = FOP(28, FMT_S),
8971 OPC_MINA_S = FOP(29, FMT_S),
8972 OPC_RECIP1_S = FOP(29, FMT_S),
8973 OPC_MAX_S = FOP(30, FMT_S),
8974 OPC_RSQRT1_S = FOP(30, FMT_S),
8975 OPC_MAXA_S = FOP(31, FMT_S),
8976 OPC_RSQRT2_S = FOP(31, FMT_S),
8977 OPC_CVT_D_S = FOP(33, FMT_S),
8978 OPC_CVT_W_S = FOP(36, FMT_S),
8979 OPC_CVT_L_S = FOP(37, FMT_S),
8980 OPC_CVT_PS_S = FOP(38, FMT_S),
8981 OPC_CMP_F_S = FOP (48, FMT_S),
8982 OPC_CMP_UN_S = FOP (49, FMT_S),
8983 OPC_CMP_EQ_S = FOP (50, FMT_S),
8984 OPC_CMP_UEQ_S = FOP (51, FMT_S),
8985 OPC_CMP_OLT_S = FOP (52, FMT_S),
8986 OPC_CMP_ULT_S = FOP (53, FMT_S),
8987 OPC_CMP_OLE_S = FOP (54, FMT_S),
8988 OPC_CMP_ULE_S = FOP (55, FMT_S),
8989 OPC_CMP_SF_S = FOP (56, FMT_S),
8990 OPC_CMP_NGLE_S = FOP (57, FMT_S),
8991 OPC_CMP_SEQ_S = FOP (58, FMT_S),
8992 OPC_CMP_NGL_S = FOP (59, FMT_S),
8993 OPC_CMP_LT_S = FOP (60, FMT_S),
8994 OPC_CMP_NGE_S = FOP (61, FMT_S),
8995 OPC_CMP_LE_S = FOP (62, FMT_S),
8996 OPC_CMP_NGT_S = FOP (63, FMT_S),
8998 OPC_ADD_D = FOP(0, FMT_D),
8999 OPC_SUB_D = FOP(1, FMT_D),
9000 OPC_MUL_D = FOP(2, FMT_D),
9001 OPC_DIV_D = FOP(3, FMT_D),
9002 OPC_SQRT_D = FOP(4, FMT_D),
9003 OPC_ABS_D = FOP(5, FMT_D),
9004 OPC_MOV_D = FOP(6, FMT_D),
9005 OPC_NEG_D = FOP(7, FMT_D),
9006 OPC_ROUND_L_D = FOP(8, FMT_D),
9007 OPC_TRUNC_L_D = FOP(9, FMT_D),
9008 OPC_CEIL_L_D = FOP(10, FMT_D),
9009 OPC_FLOOR_L_D = FOP(11, FMT_D),
9010 OPC_ROUND_W_D = FOP(12, FMT_D),
9011 OPC_TRUNC_W_D = FOP(13, FMT_D),
9012 OPC_CEIL_W_D = FOP(14, FMT_D),
9013 OPC_FLOOR_W_D = FOP(15, FMT_D),
9014 OPC_SEL_D = FOP(16, FMT_D),
9015 OPC_MOVCF_D = FOP(17, FMT_D),
9016 OPC_MOVZ_D = FOP(18, FMT_D),
9017 OPC_MOVN_D = FOP(19, FMT_D),
9018 OPC_SELEQZ_D = FOP(20, FMT_D),
9019 OPC_RECIP_D = FOP(21, FMT_D),
9020 OPC_RSQRT_D = FOP(22, FMT_D),
9021 OPC_SELNEZ_D = FOP(23, FMT_D),
9022 OPC_MADDF_D = FOP(24, FMT_D),
9023 OPC_MSUBF_D = FOP(25, FMT_D),
9024 OPC_RINT_D = FOP(26, FMT_D),
9025 OPC_CLASS_D = FOP(27, FMT_D),
9026 OPC_MIN_D = FOP(28, FMT_D),
9027 OPC_RECIP2_D = FOP(28, FMT_D),
9028 OPC_MINA_D = FOP(29, FMT_D),
9029 OPC_RECIP1_D = FOP(29, FMT_D),
9030 OPC_MAX_D = FOP(30, FMT_D),
9031 OPC_RSQRT1_D = FOP(30, FMT_D),
9032 OPC_MAXA_D = FOP(31, FMT_D),
9033 OPC_RSQRT2_D = FOP(31, FMT_D),
9034 OPC_CVT_S_D = FOP(32, FMT_D),
9035 OPC_CVT_W_D = FOP(36, FMT_D),
9036 OPC_CVT_L_D = FOP(37, FMT_D),
9037 OPC_CMP_F_D = FOP (48, FMT_D),
9038 OPC_CMP_UN_D = FOP (49, FMT_D),
9039 OPC_CMP_EQ_D = FOP (50, FMT_D),
9040 OPC_CMP_UEQ_D = FOP (51, FMT_D),
9041 OPC_CMP_OLT_D = FOP (52, FMT_D),
9042 OPC_CMP_ULT_D = FOP (53, FMT_D),
9043 OPC_CMP_OLE_D = FOP (54, FMT_D),
9044 OPC_CMP_ULE_D = FOP (55, FMT_D),
9045 OPC_CMP_SF_D = FOP (56, FMT_D),
9046 OPC_CMP_NGLE_D = FOP (57, FMT_D),
9047 OPC_CMP_SEQ_D = FOP (58, FMT_D),
9048 OPC_CMP_NGL_D = FOP (59, FMT_D),
9049 OPC_CMP_LT_D = FOP (60, FMT_D),
9050 OPC_CMP_NGE_D = FOP (61, FMT_D),
9051 OPC_CMP_LE_D = FOP (62, FMT_D),
9052 OPC_CMP_NGT_D = FOP (63, FMT_D),
9054 OPC_CVT_S_W = FOP(32, FMT_W),
9055 OPC_CVT_D_W = FOP(33, FMT_W),
9056 OPC_CVT_S_L = FOP(32, FMT_L),
9057 OPC_CVT_D_L = FOP(33, FMT_L),
9058 OPC_CVT_PS_PW = FOP(38, FMT_W),
9060 OPC_ADD_PS = FOP(0, FMT_PS),
9061 OPC_SUB_PS = FOP(1, FMT_PS),
9062 OPC_MUL_PS = FOP(2, FMT_PS),
9063 OPC_DIV_PS = FOP(3, FMT_PS),
9064 OPC_ABS_PS = FOP(5, FMT_PS),
9065 OPC_MOV_PS = FOP(6, FMT_PS),
9066 OPC_NEG_PS = FOP(7, FMT_PS),
9067 OPC_MOVCF_PS = FOP(17, FMT_PS),
9068 OPC_MOVZ_PS = FOP(18, FMT_PS),
9069 OPC_MOVN_PS = FOP(19, FMT_PS),
9070 OPC_ADDR_PS = FOP(24, FMT_PS),
9071 OPC_MULR_PS = FOP(26, FMT_PS),
9072 OPC_RECIP2_PS = FOP(28, FMT_PS),
9073 OPC_RECIP1_PS = FOP(29, FMT_PS),
9074 OPC_RSQRT1_PS = FOP(30, FMT_PS),
9075 OPC_RSQRT2_PS = FOP(31, FMT_PS),
9077 OPC_CVT_S_PU = FOP(32, FMT_PS),
9078 OPC_CVT_PW_PS = FOP(36, FMT_PS),
9079 OPC_CVT_S_PL = FOP(40, FMT_PS),
9080 OPC_PLL_PS = FOP(44, FMT_PS),
9081 OPC_PLU_PS = FOP(45, FMT_PS),
9082 OPC_PUL_PS = FOP(46, FMT_PS),
9083 OPC_PUU_PS = FOP(47, FMT_PS),
9084 OPC_CMP_F_PS = FOP (48, FMT_PS),
9085 OPC_CMP_UN_PS = FOP (49, FMT_PS),
9086 OPC_CMP_EQ_PS = FOP (50, FMT_PS),
9087 OPC_CMP_UEQ_PS = FOP (51, FMT_PS),
9088 OPC_CMP_OLT_PS = FOP (52, FMT_PS),
9089 OPC_CMP_ULT_PS = FOP (53, FMT_PS),
9090 OPC_CMP_OLE_PS = FOP (54, FMT_PS),
9091 OPC_CMP_ULE_PS = FOP (55, FMT_PS),
9092 OPC_CMP_SF_PS = FOP (56, FMT_PS),
9093 OPC_CMP_NGLE_PS = FOP (57, FMT_PS),
9094 OPC_CMP_SEQ_PS = FOP (58, FMT_PS),
9095 OPC_CMP_NGL_PS = FOP (59, FMT_PS),
9096 OPC_CMP_LT_PS = FOP (60, FMT_PS),
9097 OPC_CMP_NGE_PS = FOP (61, FMT_PS),
9098 OPC_CMP_LE_PS = FOP (62, FMT_PS),
9099 OPC_CMP_NGT_PS = FOP (63, FMT_PS),
9103 R6_OPC_CMP_AF_S = FOP(0, FMT_W),
9104 R6_OPC_CMP_UN_S = FOP(1, FMT_W),
9105 R6_OPC_CMP_EQ_S = FOP(2, FMT_W),
9106 R6_OPC_CMP_UEQ_S = FOP(3, FMT_W),
9107 R6_OPC_CMP_LT_S = FOP(4, FMT_W),
9108 R6_OPC_CMP_ULT_S = FOP(5, FMT_W),
9109 R6_OPC_CMP_LE_S = FOP(6, FMT_W),
9110 R6_OPC_CMP_ULE_S = FOP(7, FMT_W),
9111 R6_OPC_CMP_SAF_S = FOP(8, FMT_W),
9112 R6_OPC_CMP_SUN_S = FOP(9, FMT_W),
9113 R6_OPC_CMP_SEQ_S = FOP(10, FMT_W),
9114 R6_OPC_CMP_SEUQ_S = FOP(11, FMT_W),
9115 R6_OPC_CMP_SLT_S = FOP(12, FMT_W),
9116 R6_OPC_CMP_SULT_S = FOP(13, FMT_W),
9117 R6_OPC_CMP_SLE_S = FOP(14, FMT_W),
9118 R6_OPC_CMP_SULE_S = FOP(15, FMT_W),
9119 R6_OPC_CMP_OR_S = FOP(17, FMT_W),
9120 R6_OPC_CMP_UNE_S = FOP(18, FMT_W),
9121 R6_OPC_CMP_NE_S = FOP(19, FMT_W),
9122 R6_OPC_CMP_SOR_S = FOP(25, FMT_W),
9123 R6_OPC_CMP_SUNE_S = FOP(26, FMT_W),
9124 R6_OPC_CMP_SNE_S = FOP(27, FMT_W),
9126 R6_OPC_CMP_AF_D = FOP(0, FMT_L),
9127 R6_OPC_CMP_UN_D = FOP(1, FMT_L),
9128 R6_OPC_CMP_EQ_D = FOP(2, FMT_L),
9129 R6_OPC_CMP_UEQ_D = FOP(3, FMT_L),
9130 R6_OPC_CMP_LT_D = FOP(4, FMT_L),
9131 R6_OPC_CMP_ULT_D = FOP(5, FMT_L),
9132 R6_OPC_CMP_LE_D = FOP(6, FMT_L),
9133 R6_OPC_CMP_ULE_D = FOP(7, FMT_L),
9134 R6_OPC_CMP_SAF_D = FOP(8, FMT_L),
9135 R6_OPC_CMP_SUN_D = FOP(9, FMT_L),
9136 R6_OPC_CMP_SEQ_D = FOP(10, FMT_L),
9137 R6_OPC_CMP_SEUQ_D = FOP(11, FMT_L),
9138 R6_OPC_CMP_SLT_D = FOP(12, FMT_L),
9139 R6_OPC_CMP_SULT_D = FOP(13, FMT_L),
9140 R6_OPC_CMP_SLE_D = FOP(14, FMT_L),
9141 R6_OPC_CMP_SULE_D = FOP(15, FMT_L),
9142 R6_OPC_CMP_OR_D = FOP(17, FMT_L),
9143 R6_OPC_CMP_UNE_D = FOP(18, FMT_L),
9144 R6_OPC_CMP_NE_D = FOP(19, FMT_L),
9145 R6_OPC_CMP_SOR_D = FOP(25, FMT_L),
9146 R6_OPC_CMP_SUNE_D = FOP(26, FMT_L),
9147 R6_OPC_CMP_SNE_D = FOP(27, FMT_L),
9149 static void gen_cp1 (DisasContext *ctx, uint32_t opc, int rt, int fs)
9151 TCGv t0 = tcg_temp_new();
9156 TCGv_i32 fp0 = tcg_temp_new_i32();
9158 gen_load_fpr32(ctx, fp0, fs);
9159 tcg_gen_ext_i32_tl(t0, fp0);
9160 tcg_temp_free_i32(fp0);
9162 gen_store_gpr(t0, rt);
9165 gen_load_gpr(t0, rt);
9167 TCGv_i32 fp0 = tcg_temp_new_i32();
9169 tcg_gen_trunc_tl_i32(fp0, t0);
9170 gen_store_fpr32(ctx, fp0, fs);
9171 tcg_temp_free_i32(fp0);
9175 gen_helper_1e0i(cfc1, t0, fs);
9176 gen_store_gpr(t0, rt);
9179 gen_load_gpr(t0, rt);
9180 save_cpu_state(ctx, 0);
9182 TCGv_i32 fs_tmp = tcg_const_i32(fs);
9184 gen_helper_0e2i(ctc1, t0, fs_tmp, rt);
9185 tcg_temp_free_i32(fs_tmp);
9187 /* Stop translation as we may have changed hflags */
9188 ctx->base.is_jmp = DISAS_STOP;
9190 #if defined(TARGET_MIPS64)
9192 gen_load_fpr64(ctx, t0, fs);
9193 gen_store_gpr(t0, rt);
9196 gen_load_gpr(t0, rt);
9197 gen_store_fpr64(ctx, t0, fs);
9202 TCGv_i32 fp0 = tcg_temp_new_i32();
9204 gen_load_fpr32h(ctx, fp0, fs);
9205 tcg_gen_ext_i32_tl(t0, fp0);
9206 tcg_temp_free_i32(fp0);
9208 gen_store_gpr(t0, rt);
9211 gen_load_gpr(t0, rt);
9213 TCGv_i32 fp0 = tcg_temp_new_i32();
9215 tcg_gen_trunc_tl_i32(fp0, t0);
9216 gen_store_fpr32h(ctx, fp0, fs);
9217 tcg_temp_free_i32(fp0);
9221 MIPS_INVAL("cp1 move");
9222 generate_exception_end(ctx, EXCP_RI);
9230 static void gen_movci (DisasContext *ctx, int rd, int rs, int cc, int tf)
9246 l1 = gen_new_label();
9247 t0 = tcg_temp_new_i32();
9248 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
9249 tcg_gen_brcondi_i32(cond, t0, 0, l1);
9250 tcg_temp_free_i32(t0);
9252 tcg_gen_movi_tl(cpu_gpr[rd], 0);
9254 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
9259 static inline void gen_movcf_s(DisasContext *ctx, int fs, int fd, int cc,
9263 TCGv_i32 t0 = tcg_temp_new_i32();
9264 TCGLabel *l1 = gen_new_label();
9271 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
9272 tcg_gen_brcondi_i32(cond, t0, 0, l1);
9273 gen_load_fpr32(ctx, t0, fs);
9274 gen_store_fpr32(ctx, t0, fd);
9276 tcg_temp_free_i32(t0);
9279 static inline void gen_movcf_d (DisasContext *ctx, int fs, int fd, int cc, int tf)
9282 TCGv_i32 t0 = tcg_temp_new_i32();
9284 TCGLabel *l1 = gen_new_label();
9291 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
9292 tcg_gen_brcondi_i32(cond, t0, 0, l1);
9293 tcg_temp_free_i32(t0);
9294 fp0 = tcg_temp_new_i64();
9295 gen_load_fpr64(ctx, fp0, fs);
9296 gen_store_fpr64(ctx, fp0, fd);
9297 tcg_temp_free_i64(fp0);
9301 static inline void gen_movcf_ps(DisasContext *ctx, int fs, int fd,
9305 TCGv_i32 t0 = tcg_temp_new_i32();
9306 TCGLabel *l1 = gen_new_label();
9307 TCGLabel *l2 = gen_new_label();
9314 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
9315 tcg_gen_brcondi_i32(cond, t0, 0, l1);
9316 gen_load_fpr32(ctx, t0, fs);
9317 gen_store_fpr32(ctx, t0, fd);
9320 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc+1));
9321 tcg_gen_brcondi_i32(cond, t0, 0, l2);
9322 gen_load_fpr32h(ctx, t0, fs);
9323 gen_store_fpr32h(ctx, t0, fd);
9324 tcg_temp_free_i32(t0);
9328 static void gen_sel_s(DisasContext *ctx, enum fopcode op1, int fd, int ft,
9331 TCGv_i32 t1 = tcg_const_i32(0);
9332 TCGv_i32 fp0 = tcg_temp_new_i32();
9333 TCGv_i32 fp1 = tcg_temp_new_i32();
9334 TCGv_i32 fp2 = tcg_temp_new_i32();
9335 gen_load_fpr32(ctx, fp0, fd);
9336 gen_load_fpr32(ctx, fp1, ft);
9337 gen_load_fpr32(ctx, fp2, fs);
9341 tcg_gen_andi_i32(fp0, fp0, 1);
9342 tcg_gen_movcond_i32(TCG_COND_NE, fp0, fp0, t1, fp1, fp2);
9345 tcg_gen_andi_i32(fp1, fp1, 1);
9346 tcg_gen_movcond_i32(TCG_COND_EQ, fp0, fp1, t1, fp2, t1);
9349 tcg_gen_andi_i32(fp1, fp1, 1);
9350 tcg_gen_movcond_i32(TCG_COND_NE, fp0, fp1, t1, fp2, t1);
9353 MIPS_INVAL("gen_sel_s");
9354 generate_exception_end(ctx, EXCP_RI);
9358 gen_store_fpr32(ctx, fp0, fd);
9359 tcg_temp_free_i32(fp2);
9360 tcg_temp_free_i32(fp1);
9361 tcg_temp_free_i32(fp0);
9362 tcg_temp_free_i32(t1);
9365 static void gen_sel_d(DisasContext *ctx, enum fopcode op1, int fd, int ft,
9368 TCGv_i64 t1 = tcg_const_i64(0);
9369 TCGv_i64 fp0 = tcg_temp_new_i64();
9370 TCGv_i64 fp1 = tcg_temp_new_i64();
9371 TCGv_i64 fp2 = tcg_temp_new_i64();
9372 gen_load_fpr64(ctx, fp0, fd);
9373 gen_load_fpr64(ctx, fp1, ft);
9374 gen_load_fpr64(ctx, fp2, fs);
9378 tcg_gen_andi_i64(fp0, fp0, 1);
9379 tcg_gen_movcond_i64(TCG_COND_NE, fp0, fp0, t1, fp1, fp2);
9382 tcg_gen_andi_i64(fp1, fp1, 1);
9383 tcg_gen_movcond_i64(TCG_COND_EQ, fp0, fp1, t1, fp2, t1);
9386 tcg_gen_andi_i64(fp1, fp1, 1);
9387 tcg_gen_movcond_i64(TCG_COND_NE, fp0, fp1, t1, fp2, t1);
9390 MIPS_INVAL("gen_sel_d");
9391 generate_exception_end(ctx, EXCP_RI);
9395 gen_store_fpr64(ctx, fp0, fd);
9396 tcg_temp_free_i64(fp2);
9397 tcg_temp_free_i64(fp1);
9398 tcg_temp_free_i64(fp0);
9399 tcg_temp_free_i64(t1);
9402 static void gen_farith (DisasContext *ctx, enum fopcode op1,
9403 int ft, int fs, int fd, int cc)
9405 uint32_t func = ctx->opcode & 0x3f;
9409 TCGv_i32 fp0 = tcg_temp_new_i32();
9410 TCGv_i32 fp1 = tcg_temp_new_i32();
9412 gen_load_fpr32(ctx, fp0, fs);
9413 gen_load_fpr32(ctx, fp1, ft);
9414 gen_helper_float_add_s(fp0, cpu_env, fp0, fp1);
9415 tcg_temp_free_i32(fp1);
9416 gen_store_fpr32(ctx, fp0, fd);
9417 tcg_temp_free_i32(fp0);
9422 TCGv_i32 fp0 = tcg_temp_new_i32();
9423 TCGv_i32 fp1 = tcg_temp_new_i32();
9425 gen_load_fpr32(ctx, fp0, fs);
9426 gen_load_fpr32(ctx, fp1, ft);
9427 gen_helper_float_sub_s(fp0, cpu_env, fp0, fp1);
9428 tcg_temp_free_i32(fp1);
9429 gen_store_fpr32(ctx, fp0, fd);
9430 tcg_temp_free_i32(fp0);
9435 TCGv_i32 fp0 = tcg_temp_new_i32();
9436 TCGv_i32 fp1 = tcg_temp_new_i32();
9438 gen_load_fpr32(ctx, fp0, fs);
9439 gen_load_fpr32(ctx, fp1, ft);
9440 gen_helper_float_mul_s(fp0, cpu_env, fp0, fp1);
9441 tcg_temp_free_i32(fp1);
9442 gen_store_fpr32(ctx, fp0, fd);
9443 tcg_temp_free_i32(fp0);
9448 TCGv_i32 fp0 = tcg_temp_new_i32();
9449 TCGv_i32 fp1 = tcg_temp_new_i32();
9451 gen_load_fpr32(ctx, fp0, fs);
9452 gen_load_fpr32(ctx, fp1, ft);
9453 gen_helper_float_div_s(fp0, cpu_env, fp0, fp1);
9454 tcg_temp_free_i32(fp1);
9455 gen_store_fpr32(ctx, fp0, fd);
9456 tcg_temp_free_i32(fp0);
9461 TCGv_i32 fp0 = tcg_temp_new_i32();
9463 gen_load_fpr32(ctx, fp0, fs);
9464 gen_helper_float_sqrt_s(fp0, cpu_env, fp0);
9465 gen_store_fpr32(ctx, fp0, fd);
9466 tcg_temp_free_i32(fp0);
9471 TCGv_i32 fp0 = tcg_temp_new_i32();
9473 gen_load_fpr32(ctx, fp0, fs);
9475 tcg_gen_andi_i32(fp0, fp0, 0x7fffffffUL);
9477 gen_helper_float_abs_s(fp0, fp0);
9479 gen_store_fpr32(ctx, fp0, fd);
9480 tcg_temp_free_i32(fp0);
9485 TCGv_i32 fp0 = tcg_temp_new_i32();
9487 gen_load_fpr32(ctx, fp0, fs);
9488 gen_store_fpr32(ctx, fp0, fd);
9489 tcg_temp_free_i32(fp0);
9494 TCGv_i32 fp0 = tcg_temp_new_i32();
9496 gen_load_fpr32(ctx, fp0, fs);
9498 tcg_gen_xori_i32(fp0, fp0, 1UL << 31);
9500 gen_helper_float_chs_s(fp0, fp0);
9502 gen_store_fpr32(ctx, fp0, fd);
9503 tcg_temp_free_i32(fp0);
9507 check_cp1_64bitmode(ctx);
9509 TCGv_i32 fp32 = tcg_temp_new_i32();
9510 TCGv_i64 fp64 = tcg_temp_new_i64();
9512 gen_load_fpr32(ctx, fp32, fs);
9514 gen_helper_float_round_2008_l_s(fp64, cpu_env, fp32);
9516 gen_helper_float_round_l_s(fp64, cpu_env, fp32);
9518 tcg_temp_free_i32(fp32);
9519 gen_store_fpr64(ctx, fp64, fd);
9520 tcg_temp_free_i64(fp64);
9524 check_cp1_64bitmode(ctx);
9526 TCGv_i32 fp32 = tcg_temp_new_i32();
9527 TCGv_i64 fp64 = tcg_temp_new_i64();
9529 gen_load_fpr32(ctx, fp32, fs);
9531 gen_helper_float_trunc_2008_l_s(fp64, cpu_env, fp32);
9533 gen_helper_float_trunc_l_s(fp64, cpu_env, fp32);
9535 tcg_temp_free_i32(fp32);
9536 gen_store_fpr64(ctx, fp64, fd);
9537 tcg_temp_free_i64(fp64);
9541 check_cp1_64bitmode(ctx);
9543 TCGv_i32 fp32 = tcg_temp_new_i32();
9544 TCGv_i64 fp64 = tcg_temp_new_i64();
9546 gen_load_fpr32(ctx, fp32, fs);
9548 gen_helper_float_ceil_2008_l_s(fp64, cpu_env, fp32);
9550 gen_helper_float_ceil_l_s(fp64, cpu_env, fp32);
9552 tcg_temp_free_i32(fp32);
9553 gen_store_fpr64(ctx, fp64, fd);
9554 tcg_temp_free_i64(fp64);
9558 check_cp1_64bitmode(ctx);
9560 TCGv_i32 fp32 = tcg_temp_new_i32();
9561 TCGv_i64 fp64 = tcg_temp_new_i64();
9563 gen_load_fpr32(ctx, fp32, fs);
9565 gen_helper_float_floor_2008_l_s(fp64, cpu_env, fp32);
9567 gen_helper_float_floor_l_s(fp64, cpu_env, fp32);
9569 tcg_temp_free_i32(fp32);
9570 gen_store_fpr64(ctx, fp64, fd);
9571 tcg_temp_free_i64(fp64);
9576 TCGv_i32 fp0 = tcg_temp_new_i32();
9578 gen_load_fpr32(ctx, fp0, fs);
9580 gen_helper_float_round_2008_w_s(fp0, cpu_env, fp0);
9582 gen_helper_float_round_w_s(fp0, cpu_env, fp0);
9584 gen_store_fpr32(ctx, fp0, fd);
9585 tcg_temp_free_i32(fp0);
9590 TCGv_i32 fp0 = tcg_temp_new_i32();
9592 gen_load_fpr32(ctx, fp0, fs);
9594 gen_helper_float_trunc_2008_w_s(fp0, cpu_env, fp0);
9596 gen_helper_float_trunc_w_s(fp0, cpu_env, fp0);
9598 gen_store_fpr32(ctx, fp0, fd);
9599 tcg_temp_free_i32(fp0);
9604 TCGv_i32 fp0 = tcg_temp_new_i32();
9606 gen_load_fpr32(ctx, fp0, fs);
9608 gen_helper_float_ceil_2008_w_s(fp0, cpu_env, fp0);
9610 gen_helper_float_ceil_w_s(fp0, cpu_env, fp0);
9612 gen_store_fpr32(ctx, fp0, fd);
9613 tcg_temp_free_i32(fp0);
9618 TCGv_i32 fp0 = tcg_temp_new_i32();
9620 gen_load_fpr32(ctx, fp0, fs);
9622 gen_helper_float_floor_2008_w_s(fp0, cpu_env, fp0);
9624 gen_helper_float_floor_w_s(fp0, cpu_env, fp0);
9626 gen_store_fpr32(ctx, fp0, fd);
9627 tcg_temp_free_i32(fp0);
9631 check_insn(ctx, ISA_MIPS32R6);
9632 gen_sel_s(ctx, op1, fd, ft, fs);
9635 check_insn(ctx, ISA_MIPS32R6);
9636 gen_sel_s(ctx, op1, fd, ft, fs);
9639 check_insn(ctx, ISA_MIPS32R6);
9640 gen_sel_s(ctx, op1, fd, ft, fs);
9643 check_insn_opc_removed(ctx, ISA_MIPS32R6);
9644 gen_movcf_s(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
9647 check_insn_opc_removed(ctx, ISA_MIPS32R6);
9649 TCGLabel *l1 = gen_new_label();
9653 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
9655 fp0 = tcg_temp_new_i32();
9656 gen_load_fpr32(ctx, fp0, fs);
9657 gen_store_fpr32(ctx, fp0, fd);
9658 tcg_temp_free_i32(fp0);
9663 check_insn_opc_removed(ctx, ISA_MIPS32R6);
9665 TCGLabel *l1 = gen_new_label();
9669 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
9670 fp0 = tcg_temp_new_i32();
9671 gen_load_fpr32(ctx, fp0, fs);
9672 gen_store_fpr32(ctx, fp0, fd);
9673 tcg_temp_free_i32(fp0);
9680 TCGv_i32 fp0 = tcg_temp_new_i32();
9682 gen_load_fpr32(ctx, fp0, fs);
9683 gen_helper_float_recip_s(fp0, cpu_env, fp0);
9684 gen_store_fpr32(ctx, fp0, fd);
9685 tcg_temp_free_i32(fp0);
9690 TCGv_i32 fp0 = tcg_temp_new_i32();
9692 gen_load_fpr32(ctx, fp0, fs);
9693 gen_helper_float_rsqrt_s(fp0, cpu_env, fp0);
9694 gen_store_fpr32(ctx, fp0, fd);
9695 tcg_temp_free_i32(fp0);
9699 check_insn(ctx, ISA_MIPS32R6);
9701 TCGv_i32 fp0 = tcg_temp_new_i32();
9702 TCGv_i32 fp1 = tcg_temp_new_i32();
9703 TCGv_i32 fp2 = tcg_temp_new_i32();
9704 gen_load_fpr32(ctx, fp0, fs);
9705 gen_load_fpr32(ctx, fp1, ft);
9706 gen_load_fpr32(ctx, fp2, fd);
9707 gen_helper_float_maddf_s(fp2, cpu_env, fp0, fp1, fp2);
9708 gen_store_fpr32(ctx, fp2, fd);
9709 tcg_temp_free_i32(fp2);
9710 tcg_temp_free_i32(fp1);
9711 tcg_temp_free_i32(fp0);
9715 check_insn(ctx, ISA_MIPS32R6);
9717 TCGv_i32 fp0 = tcg_temp_new_i32();
9718 TCGv_i32 fp1 = tcg_temp_new_i32();
9719 TCGv_i32 fp2 = tcg_temp_new_i32();
9720 gen_load_fpr32(ctx, fp0, fs);
9721 gen_load_fpr32(ctx, fp1, ft);
9722 gen_load_fpr32(ctx, fp2, fd);
9723 gen_helper_float_msubf_s(fp2, cpu_env, fp0, fp1, fp2);
9724 gen_store_fpr32(ctx, fp2, fd);
9725 tcg_temp_free_i32(fp2);
9726 tcg_temp_free_i32(fp1);
9727 tcg_temp_free_i32(fp0);
9731 check_insn(ctx, ISA_MIPS32R6);
9733 TCGv_i32 fp0 = tcg_temp_new_i32();
9734 gen_load_fpr32(ctx, fp0, fs);
9735 gen_helper_float_rint_s(fp0, cpu_env, fp0);
9736 gen_store_fpr32(ctx, fp0, fd);
9737 tcg_temp_free_i32(fp0);
9741 check_insn(ctx, ISA_MIPS32R6);
9743 TCGv_i32 fp0 = tcg_temp_new_i32();
9744 gen_load_fpr32(ctx, fp0, fs);
9745 gen_helper_float_class_s(fp0, cpu_env, fp0);
9746 gen_store_fpr32(ctx, fp0, fd);
9747 tcg_temp_free_i32(fp0);
9750 case OPC_MIN_S: /* OPC_RECIP2_S */
9751 if (ctx->insn_flags & ISA_MIPS32R6) {
9753 TCGv_i32 fp0 = tcg_temp_new_i32();
9754 TCGv_i32 fp1 = tcg_temp_new_i32();
9755 TCGv_i32 fp2 = tcg_temp_new_i32();
9756 gen_load_fpr32(ctx, fp0, fs);
9757 gen_load_fpr32(ctx, fp1, ft);
9758 gen_helper_float_min_s(fp2, cpu_env, fp0, fp1);
9759 gen_store_fpr32(ctx, fp2, fd);
9760 tcg_temp_free_i32(fp2);
9761 tcg_temp_free_i32(fp1);
9762 tcg_temp_free_i32(fp0);
9765 check_cp1_64bitmode(ctx);
9767 TCGv_i32 fp0 = tcg_temp_new_i32();
9768 TCGv_i32 fp1 = tcg_temp_new_i32();
9770 gen_load_fpr32(ctx, fp0, fs);
9771 gen_load_fpr32(ctx, fp1, ft);
9772 gen_helper_float_recip2_s(fp0, cpu_env, fp0, fp1);
9773 tcg_temp_free_i32(fp1);
9774 gen_store_fpr32(ctx, fp0, fd);
9775 tcg_temp_free_i32(fp0);
9779 case OPC_MINA_S: /* OPC_RECIP1_S */
9780 if (ctx->insn_flags & ISA_MIPS32R6) {
9782 TCGv_i32 fp0 = tcg_temp_new_i32();
9783 TCGv_i32 fp1 = tcg_temp_new_i32();
9784 TCGv_i32 fp2 = tcg_temp_new_i32();
9785 gen_load_fpr32(ctx, fp0, fs);
9786 gen_load_fpr32(ctx, fp1, ft);
9787 gen_helper_float_mina_s(fp2, cpu_env, fp0, fp1);
9788 gen_store_fpr32(ctx, fp2, fd);
9789 tcg_temp_free_i32(fp2);
9790 tcg_temp_free_i32(fp1);
9791 tcg_temp_free_i32(fp0);
9794 check_cp1_64bitmode(ctx);
9796 TCGv_i32 fp0 = tcg_temp_new_i32();
9798 gen_load_fpr32(ctx, fp0, fs);
9799 gen_helper_float_recip1_s(fp0, cpu_env, fp0);
9800 gen_store_fpr32(ctx, fp0, fd);
9801 tcg_temp_free_i32(fp0);
9805 case OPC_MAX_S: /* OPC_RSQRT1_S */
9806 if (ctx->insn_flags & ISA_MIPS32R6) {
9808 TCGv_i32 fp0 = tcg_temp_new_i32();
9809 TCGv_i32 fp1 = tcg_temp_new_i32();
9810 gen_load_fpr32(ctx, fp0, fs);
9811 gen_load_fpr32(ctx, fp1, ft);
9812 gen_helper_float_max_s(fp1, cpu_env, fp0, fp1);
9813 gen_store_fpr32(ctx, fp1, fd);
9814 tcg_temp_free_i32(fp1);
9815 tcg_temp_free_i32(fp0);
9818 check_cp1_64bitmode(ctx);
9820 TCGv_i32 fp0 = tcg_temp_new_i32();
9822 gen_load_fpr32(ctx, fp0, fs);
9823 gen_helper_float_rsqrt1_s(fp0, cpu_env, fp0);
9824 gen_store_fpr32(ctx, fp0, fd);
9825 tcg_temp_free_i32(fp0);
9829 case OPC_MAXA_S: /* OPC_RSQRT2_S */
9830 if (ctx->insn_flags & ISA_MIPS32R6) {
9832 TCGv_i32 fp0 = tcg_temp_new_i32();
9833 TCGv_i32 fp1 = tcg_temp_new_i32();
9834 gen_load_fpr32(ctx, fp0, fs);
9835 gen_load_fpr32(ctx, fp1, ft);
9836 gen_helper_float_maxa_s(fp1, cpu_env, fp0, fp1);
9837 gen_store_fpr32(ctx, fp1, fd);
9838 tcg_temp_free_i32(fp1);
9839 tcg_temp_free_i32(fp0);
9842 check_cp1_64bitmode(ctx);
9844 TCGv_i32 fp0 = tcg_temp_new_i32();
9845 TCGv_i32 fp1 = tcg_temp_new_i32();
9847 gen_load_fpr32(ctx, fp0, fs);
9848 gen_load_fpr32(ctx, fp1, ft);
9849 gen_helper_float_rsqrt2_s(fp0, cpu_env, fp0, fp1);
9850 tcg_temp_free_i32(fp1);
9851 gen_store_fpr32(ctx, fp0, fd);
9852 tcg_temp_free_i32(fp0);
9857 check_cp1_registers(ctx, fd);
9859 TCGv_i32 fp32 = tcg_temp_new_i32();
9860 TCGv_i64 fp64 = tcg_temp_new_i64();
9862 gen_load_fpr32(ctx, fp32, fs);
9863 gen_helper_float_cvtd_s(fp64, cpu_env, fp32);
9864 tcg_temp_free_i32(fp32);
9865 gen_store_fpr64(ctx, fp64, fd);
9866 tcg_temp_free_i64(fp64);
9871 TCGv_i32 fp0 = tcg_temp_new_i32();
9873 gen_load_fpr32(ctx, fp0, fs);
9875 gen_helper_float_cvt_2008_w_s(fp0, cpu_env, fp0);
9877 gen_helper_float_cvt_w_s(fp0, cpu_env, fp0);
9879 gen_store_fpr32(ctx, fp0, fd);
9880 tcg_temp_free_i32(fp0);
9884 check_cp1_64bitmode(ctx);
9886 TCGv_i32 fp32 = tcg_temp_new_i32();
9887 TCGv_i64 fp64 = tcg_temp_new_i64();
9889 gen_load_fpr32(ctx, fp32, fs);
9891 gen_helper_float_cvt_2008_l_s(fp64, cpu_env, fp32);
9893 gen_helper_float_cvt_l_s(fp64, cpu_env, fp32);
9895 tcg_temp_free_i32(fp32);
9896 gen_store_fpr64(ctx, fp64, fd);
9897 tcg_temp_free_i64(fp64);
9903 TCGv_i64 fp64 = tcg_temp_new_i64();
9904 TCGv_i32 fp32_0 = tcg_temp_new_i32();
9905 TCGv_i32 fp32_1 = tcg_temp_new_i32();
9907 gen_load_fpr32(ctx, fp32_0, fs);
9908 gen_load_fpr32(ctx, fp32_1, ft);
9909 tcg_gen_concat_i32_i64(fp64, fp32_1, fp32_0);
9910 tcg_temp_free_i32(fp32_1);
9911 tcg_temp_free_i32(fp32_0);
9912 gen_store_fpr64(ctx, fp64, fd);
9913 tcg_temp_free_i64(fp64);
9925 case OPC_CMP_NGLE_S:
9932 check_insn_opc_removed(ctx, ISA_MIPS32R6);
9933 if (ctx->opcode & (1 << 6)) {
9934 gen_cmpabs_s(ctx, func-48, ft, fs, cc);
9936 gen_cmp_s(ctx, func-48, ft, fs, cc);
9940 check_cp1_registers(ctx, fs | ft | fd);
9942 TCGv_i64 fp0 = tcg_temp_new_i64();
9943 TCGv_i64 fp1 = tcg_temp_new_i64();
9945 gen_load_fpr64(ctx, fp0, fs);
9946 gen_load_fpr64(ctx, fp1, ft);
9947 gen_helper_float_add_d(fp0, cpu_env, fp0, fp1);
9948 tcg_temp_free_i64(fp1);
9949 gen_store_fpr64(ctx, fp0, fd);
9950 tcg_temp_free_i64(fp0);
9954 check_cp1_registers(ctx, fs | ft | fd);
9956 TCGv_i64 fp0 = tcg_temp_new_i64();
9957 TCGv_i64 fp1 = tcg_temp_new_i64();
9959 gen_load_fpr64(ctx, fp0, fs);
9960 gen_load_fpr64(ctx, fp1, ft);
9961 gen_helper_float_sub_d(fp0, cpu_env, fp0, fp1);
9962 tcg_temp_free_i64(fp1);
9963 gen_store_fpr64(ctx, fp0, fd);
9964 tcg_temp_free_i64(fp0);
9968 check_cp1_registers(ctx, fs | ft | fd);
9970 TCGv_i64 fp0 = tcg_temp_new_i64();
9971 TCGv_i64 fp1 = tcg_temp_new_i64();
9973 gen_load_fpr64(ctx, fp0, fs);
9974 gen_load_fpr64(ctx, fp1, ft);
9975 gen_helper_float_mul_d(fp0, cpu_env, fp0, fp1);
9976 tcg_temp_free_i64(fp1);
9977 gen_store_fpr64(ctx, fp0, fd);
9978 tcg_temp_free_i64(fp0);
9982 check_cp1_registers(ctx, fs | ft | fd);
9984 TCGv_i64 fp0 = tcg_temp_new_i64();
9985 TCGv_i64 fp1 = tcg_temp_new_i64();
9987 gen_load_fpr64(ctx, fp0, fs);
9988 gen_load_fpr64(ctx, fp1, ft);
9989 gen_helper_float_div_d(fp0, cpu_env, fp0, fp1);
9990 tcg_temp_free_i64(fp1);
9991 gen_store_fpr64(ctx, fp0, fd);
9992 tcg_temp_free_i64(fp0);
9996 check_cp1_registers(ctx, fs | fd);
9998 TCGv_i64 fp0 = tcg_temp_new_i64();
10000 gen_load_fpr64(ctx, fp0, fs);
10001 gen_helper_float_sqrt_d(fp0, cpu_env, fp0);
10002 gen_store_fpr64(ctx, fp0, fd);
10003 tcg_temp_free_i64(fp0);
10007 check_cp1_registers(ctx, fs | fd);
10009 TCGv_i64 fp0 = tcg_temp_new_i64();
10011 gen_load_fpr64(ctx, fp0, fs);
10012 if (ctx->abs2008) {
10013 tcg_gen_andi_i64(fp0, fp0, 0x7fffffffffffffffULL);
10015 gen_helper_float_abs_d(fp0, fp0);
10017 gen_store_fpr64(ctx, fp0, fd);
10018 tcg_temp_free_i64(fp0);
10022 check_cp1_registers(ctx, fs | fd);
10024 TCGv_i64 fp0 = tcg_temp_new_i64();
10026 gen_load_fpr64(ctx, fp0, fs);
10027 gen_store_fpr64(ctx, fp0, fd);
10028 tcg_temp_free_i64(fp0);
10032 check_cp1_registers(ctx, fs | fd);
10034 TCGv_i64 fp0 = tcg_temp_new_i64();
10036 gen_load_fpr64(ctx, fp0, fs);
10037 if (ctx->abs2008) {
10038 tcg_gen_xori_i64(fp0, fp0, 1ULL << 63);
10040 gen_helper_float_chs_d(fp0, fp0);
10042 gen_store_fpr64(ctx, fp0, fd);
10043 tcg_temp_free_i64(fp0);
10046 case OPC_ROUND_L_D:
10047 check_cp1_64bitmode(ctx);
10049 TCGv_i64 fp0 = tcg_temp_new_i64();
10051 gen_load_fpr64(ctx, fp0, fs);
10052 if (ctx->nan2008) {
10053 gen_helper_float_round_2008_l_d(fp0, cpu_env, fp0);
10055 gen_helper_float_round_l_d(fp0, cpu_env, fp0);
10057 gen_store_fpr64(ctx, fp0, fd);
10058 tcg_temp_free_i64(fp0);
10061 case OPC_TRUNC_L_D:
10062 check_cp1_64bitmode(ctx);
10064 TCGv_i64 fp0 = tcg_temp_new_i64();
10066 gen_load_fpr64(ctx, fp0, fs);
10067 if (ctx->nan2008) {
10068 gen_helper_float_trunc_2008_l_d(fp0, cpu_env, fp0);
10070 gen_helper_float_trunc_l_d(fp0, cpu_env, fp0);
10072 gen_store_fpr64(ctx, fp0, fd);
10073 tcg_temp_free_i64(fp0);
10077 check_cp1_64bitmode(ctx);
10079 TCGv_i64 fp0 = tcg_temp_new_i64();
10081 gen_load_fpr64(ctx, fp0, fs);
10082 if (ctx->nan2008) {
10083 gen_helper_float_ceil_2008_l_d(fp0, cpu_env, fp0);
10085 gen_helper_float_ceil_l_d(fp0, cpu_env, fp0);
10087 gen_store_fpr64(ctx, fp0, fd);
10088 tcg_temp_free_i64(fp0);
10091 case OPC_FLOOR_L_D:
10092 check_cp1_64bitmode(ctx);
10094 TCGv_i64 fp0 = tcg_temp_new_i64();
10096 gen_load_fpr64(ctx, fp0, fs);
10097 if (ctx->nan2008) {
10098 gen_helper_float_floor_2008_l_d(fp0, cpu_env, fp0);
10100 gen_helper_float_floor_l_d(fp0, cpu_env, fp0);
10102 gen_store_fpr64(ctx, fp0, fd);
10103 tcg_temp_free_i64(fp0);
10106 case OPC_ROUND_W_D:
10107 check_cp1_registers(ctx, fs);
10109 TCGv_i32 fp32 = tcg_temp_new_i32();
10110 TCGv_i64 fp64 = tcg_temp_new_i64();
10112 gen_load_fpr64(ctx, fp64, fs);
10113 if (ctx->nan2008) {
10114 gen_helper_float_round_2008_w_d(fp32, cpu_env, fp64);
10116 gen_helper_float_round_w_d(fp32, cpu_env, fp64);
10118 tcg_temp_free_i64(fp64);
10119 gen_store_fpr32(ctx, fp32, fd);
10120 tcg_temp_free_i32(fp32);
10123 case OPC_TRUNC_W_D:
10124 check_cp1_registers(ctx, fs);
10126 TCGv_i32 fp32 = tcg_temp_new_i32();
10127 TCGv_i64 fp64 = tcg_temp_new_i64();
10129 gen_load_fpr64(ctx, fp64, fs);
10130 if (ctx->nan2008) {
10131 gen_helper_float_trunc_2008_w_d(fp32, cpu_env, fp64);
10133 gen_helper_float_trunc_w_d(fp32, cpu_env, fp64);
10135 tcg_temp_free_i64(fp64);
10136 gen_store_fpr32(ctx, fp32, fd);
10137 tcg_temp_free_i32(fp32);
10141 check_cp1_registers(ctx, fs);
10143 TCGv_i32 fp32 = tcg_temp_new_i32();
10144 TCGv_i64 fp64 = tcg_temp_new_i64();
10146 gen_load_fpr64(ctx, fp64, fs);
10147 if (ctx->nan2008) {
10148 gen_helper_float_ceil_2008_w_d(fp32, cpu_env, fp64);
10150 gen_helper_float_ceil_w_d(fp32, cpu_env, fp64);
10152 tcg_temp_free_i64(fp64);
10153 gen_store_fpr32(ctx, fp32, fd);
10154 tcg_temp_free_i32(fp32);
10157 case OPC_FLOOR_W_D:
10158 check_cp1_registers(ctx, fs);
10160 TCGv_i32 fp32 = tcg_temp_new_i32();
10161 TCGv_i64 fp64 = tcg_temp_new_i64();
10163 gen_load_fpr64(ctx, fp64, fs);
10164 if (ctx->nan2008) {
10165 gen_helper_float_floor_2008_w_d(fp32, cpu_env, fp64);
10167 gen_helper_float_floor_w_d(fp32, cpu_env, fp64);
10169 tcg_temp_free_i64(fp64);
10170 gen_store_fpr32(ctx, fp32, fd);
10171 tcg_temp_free_i32(fp32);
10175 check_insn(ctx, ISA_MIPS32R6);
10176 gen_sel_d(ctx, op1, fd, ft, fs);
10179 check_insn(ctx, ISA_MIPS32R6);
10180 gen_sel_d(ctx, op1, fd, ft, fs);
10183 check_insn(ctx, ISA_MIPS32R6);
10184 gen_sel_d(ctx, op1, fd, ft, fs);
10187 check_insn_opc_removed(ctx, ISA_MIPS32R6);
10188 gen_movcf_d(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
10191 check_insn_opc_removed(ctx, ISA_MIPS32R6);
10193 TCGLabel *l1 = gen_new_label();
10197 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
10199 fp0 = tcg_temp_new_i64();
10200 gen_load_fpr64(ctx, fp0, fs);
10201 gen_store_fpr64(ctx, fp0, fd);
10202 tcg_temp_free_i64(fp0);
10207 check_insn_opc_removed(ctx, ISA_MIPS32R6);
10209 TCGLabel *l1 = gen_new_label();
10213 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
10214 fp0 = tcg_temp_new_i64();
10215 gen_load_fpr64(ctx, fp0, fs);
10216 gen_store_fpr64(ctx, fp0, fd);
10217 tcg_temp_free_i64(fp0);
10223 check_cp1_registers(ctx, fs | fd);
10225 TCGv_i64 fp0 = tcg_temp_new_i64();
10227 gen_load_fpr64(ctx, fp0, fs);
10228 gen_helper_float_recip_d(fp0, cpu_env, fp0);
10229 gen_store_fpr64(ctx, fp0, fd);
10230 tcg_temp_free_i64(fp0);
10234 check_cp1_registers(ctx, fs | fd);
10236 TCGv_i64 fp0 = tcg_temp_new_i64();
10238 gen_load_fpr64(ctx, fp0, fs);
10239 gen_helper_float_rsqrt_d(fp0, cpu_env, fp0);
10240 gen_store_fpr64(ctx, fp0, fd);
10241 tcg_temp_free_i64(fp0);
10245 check_insn(ctx, ISA_MIPS32R6);
10247 TCGv_i64 fp0 = tcg_temp_new_i64();
10248 TCGv_i64 fp1 = tcg_temp_new_i64();
10249 TCGv_i64 fp2 = tcg_temp_new_i64();
10250 gen_load_fpr64(ctx, fp0, fs);
10251 gen_load_fpr64(ctx, fp1, ft);
10252 gen_load_fpr64(ctx, fp2, fd);
10253 gen_helper_float_maddf_d(fp2, cpu_env, fp0, fp1, fp2);
10254 gen_store_fpr64(ctx, fp2, fd);
10255 tcg_temp_free_i64(fp2);
10256 tcg_temp_free_i64(fp1);
10257 tcg_temp_free_i64(fp0);
10261 check_insn(ctx, ISA_MIPS32R6);
10263 TCGv_i64 fp0 = tcg_temp_new_i64();
10264 TCGv_i64 fp1 = tcg_temp_new_i64();
10265 TCGv_i64 fp2 = tcg_temp_new_i64();
10266 gen_load_fpr64(ctx, fp0, fs);
10267 gen_load_fpr64(ctx, fp1, ft);
10268 gen_load_fpr64(ctx, fp2, fd);
10269 gen_helper_float_msubf_d(fp2, cpu_env, fp0, fp1, fp2);
10270 gen_store_fpr64(ctx, fp2, fd);
10271 tcg_temp_free_i64(fp2);
10272 tcg_temp_free_i64(fp1);
10273 tcg_temp_free_i64(fp0);
10277 check_insn(ctx, ISA_MIPS32R6);
10279 TCGv_i64 fp0 = tcg_temp_new_i64();
10280 gen_load_fpr64(ctx, fp0, fs);
10281 gen_helper_float_rint_d(fp0, cpu_env, fp0);
10282 gen_store_fpr64(ctx, fp0, fd);
10283 tcg_temp_free_i64(fp0);
10287 check_insn(ctx, ISA_MIPS32R6);
10289 TCGv_i64 fp0 = tcg_temp_new_i64();
10290 gen_load_fpr64(ctx, fp0, fs);
10291 gen_helper_float_class_d(fp0, cpu_env, fp0);
10292 gen_store_fpr64(ctx, fp0, fd);
10293 tcg_temp_free_i64(fp0);
10296 case OPC_MIN_D: /* OPC_RECIP2_D */
10297 if (ctx->insn_flags & ISA_MIPS32R6) {
10299 TCGv_i64 fp0 = tcg_temp_new_i64();
10300 TCGv_i64 fp1 = tcg_temp_new_i64();
10301 gen_load_fpr64(ctx, fp0, fs);
10302 gen_load_fpr64(ctx, fp1, ft);
10303 gen_helper_float_min_d(fp1, cpu_env, fp0, fp1);
10304 gen_store_fpr64(ctx, fp1, fd);
10305 tcg_temp_free_i64(fp1);
10306 tcg_temp_free_i64(fp0);
10309 check_cp1_64bitmode(ctx);
10311 TCGv_i64 fp0 = tcg_temp_new_i64();
10312 TCGv_i64 fp1 = tcg_temp_new_i64();
10314 gen_load_fpr64(ctx, fp0, fs);
10315 gen_load_fpr64(ctx, fp1, ft);
10316 gen_helper_float_recip2_d(fp0, cpu_env, fp0, fp1);
10317 tcg_temp_free_i64(fp1);
10318 gen_store_fpr64(ctx, fp0, fd);
10319 tcg_temp_free_i64(fp0);
10323 case OPC_MINA_D: /* OPC_RECIP1_D */
10324 if (ctx->insn_flags & ISA_MIPS32R6) {
10326 TCGv_i64 fp0 = tcg_temp_new_i64();
10327 TCGv_i64 fp1 = tcg_temp_new_i64();
10328 gen_load_fpr64(ctx, fp0, fs);
10329 gen_load_fpr64(ctx, fp1, ft);
10330 gen_helper_float_mina_d(fp1, cpu_env, fp0, fp1);
10331 gen_store_fpr64(ctx, fp1, fd);
10332 tcg_temp_free_i64(fp1);
10333 tcg_temp_free_i64(fp0);
10336 check_cp1_64bitmode(ctx);
10338 TCGv_i64 fp0 = tcg_temp_new_i64();
10340 gen_load_fpr64(ctx, fp0, fs);
10341 gen_helper_float_recip1_d(fp0, cpu_env, fp0);
10342 gen_store_fpr64(ctx, fp0, fd);
10343 tcg_temp_free_i64(fp0);
10347 case OPC_MAX_D: /* OPC_RSQRT1_D */
10348 if (ctx->insn_flags & ISA_MIPS32R6) {
10350 TCGv_i64 fp0 = tcg_temp_new_i64();
10351 TCGv_i64 fp1 = tcg_temp_new_i64();
10352 gen_load_fpr64(ctx, fp0, fs);
10353 gen_load_fpr64(ctx, fp1, ft);
10354 gen_helper_float_max_d(fp1, cpu_env, fp0, fp1);
10355 gen_store_fpr64(ctx, fp1, fd);
10356 tcg_temp_free_i64(fp1);
10357 tcg_temp_free_i64(fp0);
10360 check_cp1_64bitmode(ctx);
10362 TCGv_i64 fp0 = tcg_temp_new_i64();
10364 gen_load_fpr64(ctx, fp0, fs);
10365 gen_helper_float_rsqrt1_d(fp0, cpu_env, fp0);
10366 gen_store_fpr64(ctx, fp0, fd);
10367 tcg_temp_free_i64(fp0);
10371 case OPC_MAXA_D: /* OPC_RSQRT2_D */
10372 if (ctx->insn_flags & ISA_MIPS32R6) {
10374 TCGv_i64 fp0 = tcg_temp_new_i64();
10375 TCGv_i64 fp1 = tcg_temp_new_i64();
10376 gen_load_fpr64(ctx, fp0, fs);
10377 gen_load_fpr64(ctx, fp1, ft);
10378 gen_helper_float_maxa_d(fp1, cpu_env, fp0, fp1);
10379 gen_store_fpr64(ctx, fp1, fd);
10380 tcg_temp_free_i64(fp1);
10381 tcg_temp_free_i64(fp0);
10384 check_cp1_64bitmode(ctx);
10386 TCGv_i64 fp0 = tcg_temp_new_i64();
10387 TCGv_i64 fp1 = tcg_temp_new_i64();
10389 gen_load_fpr64(ctx, fp0, fs);
10390 gen_load_fpr64(ctx, fp1, ft);
10391 gen_helper_float_rsqrt2_d(fp0, cpu_env, fp0, fp1);
10392 tcg_temp_free_i64(fp1);
10393 gen_store_fpr64(ctx, fp0, fd);
10394 tcg_temp_free_i64(fp0);
10401 case OPC_CMP_UEQ_D:
10402 case OPC_CMP_OLT_D:
10403 case OPC_CMP_ULT_D:
10404 case OPC_CMP_OLE_D:
10405 case OPC_CMP_ULE_D:
10407 case OPC_CMP_NGLE_D:
10408 case OPC_CMP_SEQ_D:
10409 case OPC_CMP_NGL_D:
10411 case OPC_CMP_NGE_D:
10413 case OPC_CMP_NGT_D:
10414 check_insn_opc_removed(ctx, ISA_MIPS32R6);
10415 if (ctx->opcode & (1 << 6)) {
10416 gen_cmpabs_d(ctx, func-48, ft, fs, cc);
10418 gen_cmp_d(ctx, func-48, ft, fs, cc);
10422 check_cp1_registers(ctx, fs);
10424 TCGv_i32 fp32 = tcg_temp_new_i32();
10425 TCGv_i64 fp64 = tcg_temp_new_i64();
10427 gen_load_fpr64(ctx, fp64, fs);
10428 gen_helper_float_cvts_d(fp32, cpu_env, fp64);
10429 tcg_temp_free_i64(fp64);
10430 gen_store_fpr32(ctx, fp32, fd);
10431 tcg_temp_free_i32(fp32);
10435 check_cp1_registers(ctx, fs);
10437 TCGv_i32 fp32 = tcg_temp_new_i32();
10438 TCGv_i64 fp64 = tcg_temp_new_i64();
10440 gen_load_fpr64(ctx, fp64, fs);
10441 if (ctx->nan2008) {
10442 gen_helper_float_cvt_2008_w_d(fp32, cpu_env, fp64);
10444 gen_helper_float_cvt_w_d(fp32, cpu_env, fp64);
10446 tcg_temp_free_i64(fp64);
10447 gen_store_fpr32(ctx, fp32, fd);
10448 tcg_temp_free_i32(fp32);
10452 check_cp1_64bitmode(ctx);
10454 TCGv_i64 fp0 = tcg_temp_new_i64();
10456 gen_load_fpr64(ctx, fp0, fs);
10457 if (ctx->nan2008) {
10458 gen_helper_float_cvt_2008_l_d(fp0, cpu_env, fp0);
10460 gen_helper_float_cvt_l_d(fp0, cpu_env, fp0);
10462 gen_store_fpr64(ctx, fp0, fd);
10463 tcg_temp_free_i64(fp0);
10468 TCGv_i32 fp0 = tcg_temp_new_i32();
10470 gen_load_fpr32(ctx, fp0, fs);
10471 gen_helper_float_cvts_w(fp0, cpu_env, fp0);
10472 gen_store_fpr32(ctx, fp0, fd);
10473 tcg_temp_free_i32(fp0);
10477 check_cp1_registers(ctx, fd);
10479 TCGv_i32 fp32 = tcg_temp_new_i32();
10480 TCGv_i64 fp64 = tcg_temp_new_i64();
10482 gen_load_fpr32(ctx, fp32, fs);
10483 gen_helper_float_cvtd_w(fp64, cpu_env, fp32);
10484 tcg_temp_free_i32(fp32);
10485 gen_store_fpr64(ctx, fp64, fd);
10486 tcg_temp_free_i64(fp64);
10490 check_cp1_64bitmode(ctx);
10492 TCGv_i32 fp32 = tcg_temp_new_i32();
10493 TCGv_i64 fp64 = tcg_temp_new_i64();
10495 gen_load_fpr64(ctx, fp64, fs);
10496 gen_helper_float_cvts_l(fp32, cpu_env, fp64);
10497 tcg_temp_free_i64(fp64);
10498 gen_store_fpr32(ctx, fp32, fd);
10499 tcg_temp_free_i32(fp32);
10503 check_cp1_64bitmode(ctx);
10505 TCGv_i64 fp0 = tcg_temp_new_i64();
10507 gen_load_fpr64(ctx, fp0, fs);
10508 gen_helper_float_cvtd_l(fp0, cpu_env, fp0);
10509 gen_store_fpr64(ctx, fp0, fd);
10510 tcg_temp_free_i64(fp0);
10513 case OPC_CVT_PS_PW:
10516 TCGv_i64 fp0 = tcg_temp_new_i64();
10518 gen_load_fpr64(ctx, fp0, fs);
10519 gen_helper_float_cvtps_pw(fp0, cpu_env, fp0);
10520 gen_store_fpr64(ctx, fp0, fd);
10521 tcg_temp_free_i64(fp0);
10527 TCGv_i64 fp0 = tcg_temp_new_i64();
10528 TCGv_i64 fp1 = tcg_temp_new_i64();
10530 gen_load_fpr64(ctx, fp0, fs);
10531 gen_load_fpr64(ctx, fp1, ft);
10532 gen_helper_float_add_ps(fp0, cpu_env, fp0, fp1);
10533 tcg_temp_free_i64(fp1);
10534 gen_store_fpr64(ctx, fp0, fd);
10535 tcg_temp_free_i64(fp0);
10541 TCGv_i64 fp0 = tcg_temp_new_i64();
10542 TCGv_i64 fp1 = tcg_temp_new_i64();
10544 gen_load_fpr64(ctx, fp0, fs);
10545 gen_load_fpr64(ctx, fp1, ft);
10546 gen_helper_float_sub_ps(fp0, cpu_env, fp0, fp1);
10547 tcg_temp_free_i64(fp1);
10548 gen_store_fpr64(ctx, fp0, fd);
10549 tcg_temp_free_i64(fp0);
10555 TCGv_i64 fp0 = tcg_temp_new_i64();
10556 TCGv_i64 fp1 = tcg_temp_new_i64();
10558 gen_load_fpr64(ctx, fp0, fs);
10559 gen_load_fpr64(ctx, fp1, ft);
10560 gen_helper_float_mul_ps(fp0, cpu_env, fp0, fp1);
10561 tcg_temp_free_i64(fp1);
10562 gen_store_fpr64(ctx, fp0, fd);
10563 tcg_temp_free_i64(fp0);
10569 TCGv_i64 fp0 = tcg_temp_new_i64();
10571 gen_load_fpr64(ctx, fp0, fs);
10572 gen_helper_float_abs_ps(fp0, fp0);
10573 gen_store_fpr64(ctx, fp0, fd);
10574 tcg_temp_free_i64(fp0);
10580 TCGv_i64 fp0 = tcg_temp_new_i64();
10582 gen_load_fpr64(ctx, fp0, fs);
10583 gen_store_fpr64(ctx, fp0, fd);
10584 tcg_temp_free_i64(fp0);
10590 TCGv_i64 fp0 = tcg_temp_new_i64();
10592 gen_load_fpr64(ctx, fp0, fs);
10593 gen_helper_float_chs_ps(fp0, fp0);
10594 gen_store_fpr64(ctx, fp0, fd);
10595 tcg_temp_free_i64(fp0);
10600 gen_movcf_ps(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
10605 TCGLabel *l1 = gen_new_label();
10609 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
10610 fp0 = tcg_temp_new_i64();
10611 gen_load_fpr64(ctx, fp0, fs);
10612 gen_store_fpr64(ctx, fp0, fd);
10613 tcg_temp_free_i64(fp0);
10620 TCGLabel *l1 = gen_new_label();
10624 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
10625 fp0 = tcg_temp_new_i64();
10626 gen_load_fpr64(ctx, fp0, fs);
10627 gen_store_fpr64(ctx, fp0, fd);
10628 tcg_temp_free_i64(fp0);
10636 TCGv_i64 fp0 = tcg_temp_new_i64();
10637 TCGv_i64 fp1 = tcg_temp_new_i64();
10639 gen_load_fpr64(ctx, fp0, ft);
10640 gen_load_fpr64(ctx, fp1, fs);
10641 gen_helper_float_addr_ps(fp0, cpu_env, fp0, fp1);
10642 tcg_temp_free_i64(fp1);
10643 gen_store_fpr64(ctx, fp0, fd);
10644 tcg_temp_free_i64(fp0);
10650 TCGv_i64 fp0 = tcg_temp_new_i64();
10651 TCGv_i64 fp1 = tcg_temp_new_i64();
10653 gen_load_fpr64(ctx, fp0, ft);
10654 gen_load_fpr64(ctx, fp1, fs);
10655 gen_helper_float_mulr_ps(fp0, cpu_env, fp0, fp1);
10656 tcg_temp_free_i64(fp1);
10657 gen_store_fpr64(ctx, fp0, fd);
10658 tcg_temp_free_i64(fp0);
10661 case OPC_RECIP2_PS:
10664 TCGv_i64 fp0 = tcg_temp_new_i64();
10665 TCGv_i64 fp1 = tcg_temp_new_i64();
10667 gen_load_fpr64(ctx, fp0, fs);
10668 gen_load_fpr64(ctx, fp1, ft);
10669 gen_helper_float_recip2_ps(fp0, cpu_env, fp0, fp1);
10670 tcg_temp_free_i64(fp1);
10671 gen_store_fpr64(ctx, fp0, fd);
10672 tcg_temp_free_i64(fp0);
10675 case OPC_RECIP1_PS:
10678 TCGv_i64 fp0 = tcg_temp_new_i64();
10680 gen_load_fpr64(ctx, fp0, fs);
10681 gen_helper_float_recip1_ps(fp0, cpu_env, fp0);
10682 gen_store_fpr64(ctx, fp0, fd);
10683 tcg_temp_free_i64(fp0);
10686 case OPC_RSQRT1_PS:
10689 TCGv_i64 fp0 = tcg_temp_new_i64();
10691 gen_load_fpr64(ctx, fp0, fs);
10692 gen_helper_float_rsqrt1_ps(fp0, cpu_env, fp0);
10693 gen_store_fpr64(ctx, fp0, fd);
10694 tcg_temp_free_i64(fp0);
10697 case OPC_RSQRT2_PS:
10700 TCGv_i64 fp0 = tcg_temp_new_i64();
10701 TCGv_i64 fp1 = tcg_temp_new_i64();
10703 gen_load_fpr64(ctx, fp0, fs);
10704 gen_load_fpr64(ctx, fp1, ft);
10705 gen_helper_float_rsqrt2_ps(fp0, cpu_env, fp0, fp1);
10706 tcg_temp_free_i64(fp1);
10707 gen_store_fpr64(ctx, fp0, fd);
10708 tcg_temp_free_i64(fp0);
10712 check_cp1_64bitmode(ctx);
10714 TCGv_i32 fp0 = tcg_temp_new_i32();
10716 gen_load_fpr32h(ctx, fp0, fs);
10717 gen_helper_float_cvts_pu(fp0, cpu_env, fp0);
10718 gen_store_fpr32(ctx, fp0, fd);
10719 tcg_temp_free_i32(fp0);
10722 case OPC_CVT_PW_PS:
10725 TCGv_i64 fp0 = tcg_temp_new_i64();
10727 gen_load_fpr64(ctx, fp0, fs);
10728 gen_helper_float_cvtpw_ps(fp0, cpu_env, fp0);
10729 gen_store_fpr64(ctx, fp0, fd);
10730 tcg_temp_free_i64(fp0);
10734 check_cp1_64bitmode(ctx);
10736 TCGv_i32 fp0 = tcg_temp_new_i32();
10738 gen_load_fpr32(ctx, fp0, fs);
10739 gen_helper_float_cvts_pl(fp0, cpu_env, fp0);
10740 gen_store_fpr32(ctx, fp0, fd);
10741 tcg_temp_free_i32(fp0);
10747 TCGv_i32 fp0 = tcg_temp_new_i32();
10748 TCGv_i32 fp1 = tcg_temp_new_i32();
10750 gen_load_fpr32(ctx, fp0, fs);
10751 gen_load_fpr32(ctx, fp1, ft);
10752 gen_store_fpr32h(ctx, fp0, fd);
10753 gen_store_fpr32(ctx, fp1, fd);
10754 tcg_temp_free_i32(fp0);
10755 tcg_temp_free_i32(fp1);
10761 TCGv_i32 fp0 = tcg_temp_new_i32();
10762 TCGv_i32 fp1 = tcg_temp_new_i32();
10764 gen_load_fpr32(ctx, fp0, fs);
10765 gen_load_fpr32h(ctx, fp1, ft);
10766 gen_store_fpr32(ctx, fp1, fd);
10767 gen_store_fpr32h(ctx, fp0, fd);
10768 tcg_temp_free_i32(fp0);
10769 tcg_temp_free_i32(fp1);
10775 TCGv_i32 fp0 = tcg_temp_new_i32();
10776 TCGv_i32 fp1 = tcg_temp_new_i32();
10778 gen_load_fpr32h(ctx, fp0, fs);
10779 gen_load_fpr32(ctx, fp1, ft);
10780 gen_store_fpr32(ctx, fp1, fd);
10781 gen_store_fpr32h(ctx, fp0, fd);
10782 tcg_temp_free_i32(fp0);
10783 tcg_temp_free_i32(fp1);
10789 TCGv_i32 fp0 = tcg_temp_new_i32();
10790 TCGv_i32 fp1 = tcg_temp_new_i32();
10792 gen_load_fpr32h(ctx, fp0, fs);
10793 gen_load_fpr32h(ctx, fp1, ft);
10794 gen_store_fpr32(ctx, fp1, fd);
10795 gen_store_fpr32h(ctx, fp0, fd);
10796 tcg_temp_free_i32(fp0);
10797 tcg_temp_free_i32(fp1);
10801 case OPC_CMP_UN_PS:
10802 case OPC_CMP_EQ_PS:
10803 case OPC_CMP_UEQ_PS:
10804 case OPC_CMP_OLT_PS:
10805 case OPC_CMP_ULT_PS:
10806 case OPC_CMP_OLE_PS:
10807 case OPC_CMP_ULE_PS:
10808 case OPC_CMP_SF_PS:
10809 case OPC_CMP_NGLE_PS:
10810 case OPC_CMP_SEQ_PS:
10811 case OPC_CMP_NGL_PS:
10812 case OPC_CMP_LT_PS:
10813 case OPC_CMP_NGE_PS:
10814 case OPC_CMP_LE_PS:
10815 case OPC_CMP_NGT_PS:
10816 if (ctx->opcode & (1 << 6)) {
10817 gen_cmpabs_ps(ctx, func-48, ft, fs, cc);
10819 gen_cmp_ps(ctx, func-48, ft, fs, cc);
10823 MIPS_INVAL("farith");
10824 generate_exception_end(ctx, EXCP_RI);
10829 /* Coprocessor 3 (FPU) */
10830 static void gen_flt3_ldst (DisasContext *ctx, uint32_t opc,
10831 int fd, int fs, int base, int index)
10833 TCGv t0 = tcg_temp_new();
10836 gen_load_gpr(t0, index);
10837 } else if (index == 0) {
10838 gen_load_gpr(t0, base);
10840 gen_op_addr_add(ctx, t0, cpu_gpr[base], cpu_gpr[index]);
10842 /* Don't do NOP if destination is zero: we must perform the actual
10848 TCGv_i32 fp0 = tcg_temp_new_i32();
10850 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TESL);
10851 tcg_gen_trunc_tl_i32(fp0, t0);
10852 gen_store_fpr32(ctx, fp0, fd);
10853 tcg_temp_free_i32(fp0);
10858 check_cp1_registers(ctx, fd);
10860 TCGv_i64 fp0 = tcg_temp_new_i64();
10861 tcg_gen_qemu_ld_i64(fp0, t0, ctx->mem_idx, MO_TEQ);
10862 gen_store_fpr64(ctx, fp0, fd);
10863 tcg_temp_free_i64(fp0);
10867 check_cp1_64bitmode(ctx);
10868 tcg_gen_andi_tl(t0, t0, ~0x7);
10870 TCGv_i64 fp0 = tcg_temp_new_i64();
10872 tcg_gen_qemu_ld_i64(fp0, t0, ctx->mem_idx, MO_TEQ);
10873 gen_store_fpr64(ctx, fp0, fd);
10874 tcg_temp_free_i64(fp0);
10880 TCGv_i32 fp0 = tcg_temp_new_i32();
10881 gen_load_fpr32(ctx, fp0, fs);
10882 tcg_gen_qemu_st_i32(fp0, t0, ctx->mem_idx, MO_TEUL);
10883 tcg_temp_free_i32(fp0);
10888 check_cp1_registers(ctx, fs);
10890 TCGv_i64 fp0 = tcg_temp_new_i64();
10891 gen_load_fpr64(ctx, fp0, fs);
10892 tcg_gen_qemu_st_i64(fp0, t0, ctx->mem_idx, MO_TEQ);
10893 tcg_temp_free_i64(fp0);
10897 check_cp1_64bitmode(ctx);
10898 tcg_gen_andi_tl(t0, t0, ~0x7);
10900 TCGv_i64 fp0 = tcg_temp_new_i64();
10901 gen_load_fpr64(ctx, fp0, fs);
10902 tcg_gen_qemu_st_i64(fp0, t0, ctx->mem_idx, MO_TEQ);
10903 tcg_temp_free_i64(fp0);
10910 static void gen_flt3_arith (DisasContext *ctx, uint32_t opc,
10911 int fd, int fr, int fs, int ft)
10917 TCGv t0 = tcg_temp_local_new();
10918 TCGv_i32 fp = tcg_temp_new_i32();
10919 TCGv_i32 fph = tcg_temp_new_i32();
10920 TCGLabel *l1 = gen_new_label();
10921 TCGLabel *l2 = gen_new_label();
10923 gen_load_gpr(t0, fr);
10924 tcg_gen_andi_tl(t0, t0, 0x7);
10926 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
10927 gen_load_fpr32(ctx, fp, fs);
10928 gen_load_fpr32h(ctx, fph, fs);
10929 gen_store_fpr32(ctx, fp, fd);
10930 gen_store_fpr32h(ctx, fph, fd);
10933 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 4, l2);
10935 #ifdef TARGET_WORDS_BIGENDIAN
10936 gen_load_fpr32(ctx, fp, fs);
10937 gen_load_fpr32h(ctx, fph, ft);
10938 gen_store_fpr32h(ctx, fp, fd);
10939 gen_store_fpr32(ctx, fph, fd);
10941 gen_load_fpr32h(ctx, fph, fs);
10942 gen_load_fpr32(ctx, fp, ft);
10943 gen_store_fpr32(ctx, fph, fd);
10944 gen_store_fpr32h(ctx, fp, fd);
10947 tcg_temp_free_i32(fp);
10948 tcg_temp_free_i32(fph);
10954 TCGv_i32 fp0 = tcg_temp_new_i32();
10955 TCGv_i32 fp1 = tcg_temp_new_i32();
10956 TCGv_i32 fp2 = tcg_temp_new_i32();
10958 gen_load_fpr32(ctx, fp0, fs);
10959 gen_load_fpr32(ctx, fp1, ft);
10960 gen_load_fpr32(ctx, fp2, fr);
10961 gen_helper_float_madd_s(fp2, cpu_env, fp0, fp1, fp2);
10962 tcg_temp_free_i32(fp0);
10963 tcg_temp_free_i32(fp1);
10964 gen_store_fpr32(ctx, fp2, fd);
10965 tcg_temp_free_i32(fp2);
10970 check_cp1_registers(ctx, fd | fs | ft | fr);
10972 TCGv_i64 fp0 = tcg_temp_new_i64();
10973 TCGv_i64 fp1 = tcg_temp_new_i64();
10974 TCGv_i64 fp2 = tcg_temp_new_i64();
10976 gen_load_fpr64(ctx, fp0, fs);
10977 gen_load_fpr64(ctx, fp1, ft);
10978 gen_load_fpr64(ctx, fp2, fr);
10979 gen_helper_float_madd_d(fp2, cpu_env, fp0, fp1, fp2);
10980 tcg_temp_free_i64(fp0);
10981 tcg_temp_free_i64(fp1);
10982 gen_store_fpr64(ctx, fp2, fd);
10983 tcg_temp_free_i64(fp2);
10989 TCGv_i64 fp0 = tcg_temp_new_i64();
10990 TCGv_i64 fp1 = tcg_temp_new_i64();
10991 TCGv_i64 fp2 = tcg_temp_new_i64();
10993 gen_load_fpr64(ctx, fp0, fs);
10994 gen_load_fpr64(ctx, fp1, ft);
10995 gen_load_fpr64(ctx, fp2, fr);
10996 gen_helper_float_madd_ps(fp2, cpu_env, fp0, fp1, fp2);
10997 tcg_temp_free_i64(fp0);
10998 tcg_temp_free_i64(fp1);
10999 gen_store_fpr64(ctx, fp2, fd);
11000 tcg_temp_free_i64(fp2);
11006 TCGv_i32 fp0 = tcg_temp_new_i32();
11007 TCGv_i32 fp1 = tcg_temp_new_i32();
11008 TCGv_i32 fp2 = tcg_temp_new_i32();
11010 gen_load_fpr32(ctx, fp0, fs);
11011 gen_load_fpr32(ctx, fp1, ft);
11012 gen_load_fpr32(ctx, fp2, fr);
11013 gen_helper_float_msub_s(fp2, cpu_env, fp0, fp1, fp2);
11014 tcg_temp_free_i32(fp0);
11015 tcg_temp_free_i32(fp1);
11016 gen_store_fpr32(ctx, fp2, fd);
11017 tcg_temp_free_i32(fp2);
11022 check_cp1_registers(ctx, fd | fs | ft | fr);
11024 TCGv_i64 fp0 = tcg_temp_new_i64();
11025 TCGv_i64 fp1 = tcg_temp_new_i64();
11026 TCGv_i64 fp2 = tcg_temp_new_i64();
11028 gen_load_fpr64(ctx, fp0, fs);
11029 gen_load_fpr64(ctx, fp1, ft);
11030 gen_load_fpr64(ctx, fp2, fr);
11031 gen_helper_float_msub_d(fp2, cpu_env, fp0, fp1, fp2);
11032 tcg_temp_free_i64(fp0);
11033 tcg_temp_free_i64(fp1);
11034 gen_store_fpr64(ctx, fp2, fd);
11035 tcg_temp_free_i64(fp2);
11041 TCGv_i64 fp0 = tcg_temp_new_i64();
11042 TCGv_i64 fp1 = tcg_temp_new_i64();
11043 TCGv_i64 fp2 = tcg_temp_new_i64();
11045 gen_load_fpr64(ctx, fp0, fs);
11046 gen_load_fpr64(ctx, fp1, ft);
11047 gen_load_fpr64(ctx, fp2, fr);
11048 gen_helper_float_msub_ps(fp2, cpu_env, fp0, fp1, fp2);
11049 tcg_temp_free_i64(fp0);
11050 tcg_temp_free_i64(fp1);
11051 gen_store_fpr64(ctx, fp2, fd);
11052 tcg_temp_free_i64(fp2);
11058 TCGv_i32 fp0 = tcg_temp_new_i32();
11059 TCGv_i32 fp1 = tcg_temp_new_i32();
11060 TCGv_i32 fp2 = tcg_temp_new_i32();
11062 gen_load_fpr32(ctx, fp0, fs);
11063 gen_load_fpr32(ctx, fp1, ft);
11064 gen_load_fpr32(ctx, fp2, fr);
11065 gen_helper_float_nmadd_s(fp2, cpu_env, fp0, fp1, fp2);
11066 tcg_temp_free_i32(fp0);
11067 tcg_temp_free_i32(fp1);
11068 gen_store_fpr32(ctx, fp2, fd);
11069 tcg_temp_free_i32(fp2);
11074 check_cp1_registers(ctx, fd | fs | ft | fr);
11076 TCGv_i64 fp0 = tcg_temp_new_i64();
11077 TCGv_i64 fp1 = tcg_temp_new_i64();
11078 TCGv_i64 fp2 = tcg_temp_new_i64();
11080 gen_load_fpr64(ctx, fp0, fs);
11081 gen_load_fpr64(ctx, fp1, ft);
11082 gen_load_fpr64(ctx, fp2, fr);
11083 gen_helper_float_nmadd_d(fp2, cpu_env, fp0, fp1, fp2);
11084 tcg_temp_free_i64(fp0);
11085 tcg_temp_free_i64(fp1);
11086 gen_store_fpr64(ctx, fp2, fd);
11087 tcg_temp_free_i64(fp2);
11093 TCGv_i64 fp0 = tcg_temp_new_i64();
11094 TCGv_i64 fp1 = tcg_temp_new_i64();
11095 TCGv_i64 fp2 = tcg_temp_new_i64();
11097 gen_load_fpr64(ctx, fp0, fs);
11098 gen_load_fpr64(ctx, fp1, ft);
11099 gen_load_fpr64(ctx, fp2, fr);
11100 gen_helper_float_nmadd_ps(fp2, cpu_env, fp0, fp1, fp2);
11101 tcg_temp_free_i64(fp0);
11102 tcg_temp_free_i64(fp1);
11103 gen_store_fpr64(ctx, fp2, fd);
11104 tcg_temp_free_i64(fp2);
11110 TCGv_i32 fp0 = tcg_temp_new_i32();
11111 TCGv_i32 fp1 = tcg_temp_new_i32();
11112 TCGv_i32 fp2 = tcg_temp_new_i32();
11114 gen_load_fpr32(ctx, fp0, fs);
11115 gen_load_fpr32(ctx, fp1, ft);
11116 gen_load_fpr32(ctx, fp2, fr);
11117 gen_helper_float_nmsub_s(fp2, cpu_env, fp0, fp1, fp2);
11118 tcg_temp_free_i32(fp0);
11119 tcg_temp_free_i32(fp1);
11120 gen_store_fpr32(ctx, fp2, fd);
11121 tcg_temp_free_i32(fp2);
11126 check_cp1_registers(ctx, fd | fs | ft | fr);
11128 TCGv_i64 fp0 = tcg_temp_new_i64();
11129 TCGv_i64 fp1 = tcg_temp_new_i64();
11130 TCGv_i64 fp2 = tcg_temp_new_i64();
11132 gen_load_fpr64(ctx, fp0, fs);
11133 gen_load_fpr64(ctx, fp1, ft);
11134 gen_load_fpr64(ctx, fp2, fr);
11135 gen_helper_float_nmsub_d(fp2, cpu_env, fp0, fp1, fp2);
11136 tcg_temp_free_i64(fp0);
11137 tcg_temp_free_i64(fp1);
11138 gen_store_fpr64(ctx, fp2, fd);
11139 tcg_temp_free_i64(fp2);
11145 TCGv_i64 fp0 = tcg_temp_new_i64();
11146 TCGv_i64 fp1 = tcg_temp_new_i64();
11147 TCGv_i64 fp2 = tcg_temp_new_i64();
11149 gen_load_fpr64(ctx, fp0, fs);
11150 gen_load_fpr64(ctx, fp1, ft);
11151 gen_load_fpr64(ctx, fp2, fr);
11152 gen_helper_float_nmsub_ps(fp2, cpu_env, fp0, fp1, fp2);
11153 tcg_temp_free_i64(fp0);
11154 tcg_temp_free_i64(fp1);
11155 gen_store_fpr64(ctx, fp2, fd);
11156 tcg_temp_free_i64(fp2);
11160 MIPS_INVAL("flt3_arith");
11161 generate_exception_end(ctx, EXCP_RI);
11166 static void gen_rdhwr(DisasContext *ctx, int rt, int rd, int sel)
11170 #if !defined(CONFIG_USER_ONLY)
11171 /* The Linux kernel will emulate rdhwr if it's not supported natively.
11172 Therefore only check the ISA in system mode. */
11173 check_insn(ctx, ISA_MIPS32R2);
11175 t0 = tcg_temp_new();
11179 gen_helper_rdhwr_cpunum(t0, cpu_env);
11180 gen_store_gpr(t0, rt);
11183 gen_helper_rdhwr_synci_step(t0, cpu_env);
11184 gen_store_gpr(t0, rt);
11187 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
11190 gen_helper_rdhwr_cc(t0, cpu_env);
11191 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
11194 gen_store_gpr(t0, rt);
11195 /* Break the TB to be able to take timer interrupts immediately
11196 after reading count. DISAS_STOP isn't sufficient, we need to ensure
11197 we break completely out of translated code. */
11198 gen_save_pc(ctx->base.pc_next + 4);
11199 ctx->base.is_jmp = DISAS_EXIT;
11202 gen_helper_rdhwr_ccres(t0, cpu_env);
11203 gen_store_gpr(t0, rt);
11206 check_insn(ctx, ISA_MIPS32R6);
11208 /* Performance counter registers are not implemented other than
11209 * control register 0.
11211 generate_exception(ctx, EXCP_RI);
11213 gen_helper_rdhwr_performance(t0, cpu_env);
11214 gen_store_gpr(t0, rt);
11217 check_insn(ctx, ISA_MIPS32R6);
11218 gen_helper_rdhwr_xnp(t0, cpu_env);
11219 gen_store_gpr(t0, rt);
11222 #if defined(CONFIG_USER_ONLY)
11223 tcg_gen_ld_tl(t0, cpu_env,
11224 offsetof(CPUMIPSState, active_tc.CP0_UserLocal));
11225 gen_store_gpr(t0, rt);
11228 if ((ctx->hflags & MIPS_HFLAG_CP0) ||
11229 (ctx->hflags & MIPS_HFLAG_HWRENA_ULR)) {
11230 tcg_gen_ld_tl(t0, cpu_env,
11231 offsetof(CPUMIPSState, active_tc.CP0_UserLocal));
11232 gen_store_gpr(t0, rt);
11234 generate_exception_end(ctx, EXCP_RI);
11238 default: /* Invalid */
11239 MIPS_INVAL("rdhwr");
11240 generate_exception_end(ctx, EXCP_RI);
11246 static inline void clear_branch_hflags(DisasContext *ctx)
11248 ctx->hflags &= ~MIPS_HFLAG_BMASK;
11249 if (ctx->base.is_jmp == DISAS_NEXT) {
11250 save_cpu_state(ctx, 0);
11252 /* it is not safe to save ctx->hflags as hflags may be changed
11253 in execution time by the instruction in delay / forbidden slot. */
11254 tcg_gen_andi_i32(hflags, hflags, ~MIPS_HFLAG_BMASK);
11258 static void gen_branch(DisasContext *ctx, int insn_bytes)
11260 if (ctx->hflags & MIPS_HFLAG_BMASK) {
11261 int proc_hflags = ctx->hflags & MIPS_HFLAG_BMASK;
11262 /* Branches completion */
11263 clear_branch_hflags(ctx);
11264 ctx->base.is_jmp = DISAS_NORETURN;
11265 /* FIXME: Need to clear can_do_io. */
11266 switch (proc_hflags & MIPS_HFLAG_BMASK_BASE) {
11267 case MIPS_HFLAG_FBNSLOT:
11268 gen_goto_tb(ctx, 0, ctx->base.pc_next + insn_bytes);
11271 /* unconditional branch */
11272 if (proc_hflags & MIPS_HFLAG_BX) {
11273 tcg_gen_xori_i32(hflags, hflags, MIPS_HFLAG_M16);
11275 gen_goto_tb(ctx, 0, ctx->btarget);
11277 case MIPS_HFLAG_BL:
11278 /* blikely taken case */
11279 gen_goto_tb(ctx, 0, ctx->btarget);
11281 case MIPS_HFLAG_BC:
11282 /* Conditional branch */
11284 TCGLabel *l1 = gen_new_label();
11286 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
11287 gen_goto_tb(ctx, 1, ctx->base.pc_next + insn_bytes);
11289 gen_goto_tb(ctx, 0, ctx->btarget);
11292 case MIPS_HFLAG_BR:
11293 /* unconditional branch to register */
11294 if (ctx->insn_flags & (ASE_MIPS16 | ASE_MICROMIPS)) {
11295 TCGv t0 = tcg_temp_new();
11296 TCGv_i32 t1 = tcg_temp_new_i32();
11298 tcg_gen_andi_tl(t0, btarget, 0x1);
11299 tcg_gen_trunc_tl_i32(t1, t0);
11301 tcg_gen_andi_i32(hflags, hflags, ~(uint32_t)MIPS_HFLAG_M16);
11302 tcg_gen_shli_i32(t1, t1, MIPS_HFLAG_M16_SHIFT);
11303 tcg_gen_or_i32(hflags, hflags, t1);
11304 tcg_temp_free_i32(t1);
11306 tcg_gen_andi_tl(cpu_PC, btarget, ~(target_ulong)0x1);
11308 tcg_gen_mov_tl(cpu_PC, btarget);
11310 if (ctx->base.singlestep_enabled) {
11311 save_cpu_state(ctx, 0);
11312 gen_helper_raise_exception_debug(cpu_env);
11314 tcg_gen_lookup_and_goto_ptr();
11317 fprintf(stderr, "unknown branch 0x%x\n", proc_hflags);
11323 /* Compact Branches */
11324 static void gen_compute_compact_branch(DisasContext *ctx, uint32_t opc,
11325 int rs, int rt, int32_t offset)
11327 int bcond_compute = 0;
11328 TCGv t0 = tcg_temp_new();
11329 TCGv t1 = tcg_temp_new();
11330 int m16_lowbit = (ctx->hflags & MIPS_HFLAG_M16) != 0;
11332 if (ctx->hflags & MIPS_HFLAG_BMASK) {
11333 #ifdef MIPS_DEBUG_DISAS
11334 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
11335 "\n", ctx->base.pc_next);
11337 generate_exception_end(ctx, EXCP_RI);
11341 /* Load needed operands and calculate btarget */
11343 /* compact branch */
11344 case OPC_BOVC: /* OPC_BEQZALC, OPC_BEQC */
11345 case OPC_BNVC: /* OPC_BNEZALC, OPC_BNEC */
11346 gen_load_gpr(t0, rs);
11347 gen_load_gpr(t1, rt);
11349 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
11350 if (rs <= rt && rs == 0) {
11351 /* OPC_BEQZALC, OPC_BNEZALC */
11352 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 4 + m16_lowbit);
11355 case OPC_BLEZC: /* OPC_BGEZC, OPC_BGEC */
11356 case OPC_BGTZC: /* OPC_BLTZC, OPC_BLTC */
11357 gen_load_gpr(t0, rs);
11358 gen_load_gpr(t1, rt);
11360 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
11362 case OPC_BLEZALC: /* OPC_BGEZALC, OPC_BGEUC */
11363 case OPC_BGTZALC: /* OPC_BLTZALC, OPC_BLTUC */
11364 if (rs == 0 || rs == rt) {
11365 /* OPC_BLEZALC, OPC_BGEZALC */
11366 /* OPC_BGTZALC, OPC_BLTZALC */
11367 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 4 + m16_lowbit);
11369 gen_load_gpr(t0, rs);
11370 gen_load_gpr(t1, rt);
11372 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
11376 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
11381 /* OPC_BEQZC, OPC_BNEZC */
11382 gen_load_gpr(t0, rs);
11384 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
11386 /* OPC_JIC, OPC_JIALC */
11387 TCGv tbase = tcg_temp_new();
11388 TCGv toffset = tcg_temp_new();
11390 gen_load_gpr(tbase, rt);
11391 tcg_gen_movi_tl(toffset, offset);
11392 gen_op_addr_add(ctx, btarget, tbase, toffset);
11393 tcg_temp_free(tbase);
11394 tcg_temp_free(toffset);
11398 MIPS_INVAL("Compact branch/jump");
11399 generate_exception_end(ctx, EXCP_RI);
11403 if (bcond_compute == 0) {
11404 /* Uncoditional compact branch */
11407 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 4 + m16_lowbit);
11410 ctx->hflags |= MIPS_HFLAG_BR;
11413 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 4 + m16_lowbit);
11416 ctx->hflags |= MIPS_HFLAG_B;
11419 MIPS_INVAL("Compact branch/jump");
11420 generate_exception_end(ctx, EXCP_RI);
11424 /* Generating branch here as compact branches don't have delay slot */
11425 gen_branch(ctx, 4);
11427 /* Conditional compact branch */
11428 TCGLabel *fs = gen_new_label();
11429 save_cpu_state(ctx, 0);
11432 case OPC_BLEZALC: /* OPC_BGEZALC, OPC_BGEUC */
11433 if (rs == 0 && rt != 0) {
11435 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE), t1, 0, fs);
11436 } else if (rs != 0 && rt != 0 && rs == rt) {
11438 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE), t1, 0, fs);
11441 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU), t0, t1, fs);
11444 case OPC_BGTZALC: /* OPC_BLTZALC, OPC_BLTUC */
11445 if (rs == 0 && rt != 0) {
11447 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT), t1, 0, fs);
11448 } else if (rs != 0 && rt != 0 && rs == rt) {
11450 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT), t1, 0, fs);
11453 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU), t0, t1, fs);
11456 case OPC_BLEZC: /* OPC_BGEZC, OPC_BGEC */
11457 if (rs == 0 && rt != 0) {
11459 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE), t1, 0, fs);
11460 } else if (rs != 0 && rt != 0 && rs == rt) {
11462 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE), t1, 0, fs);
11465 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE), t0, t1, fs);
11468 case OPC_BGTZC: /* OPC_BLTZC, OPC_BLTC */
11469 if (rs == 0 && rt != 0) {
11471 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT), t1, 0, fs);
11472 } else if (rs != 0 && rt != 0 && rs == rt) {
11474 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT), t1, 0, fs);
11477 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT), t0, t1, fs);
11480 case OPC_BOVC: /* OPC_BEQZALC, OPC_BEQC */
11481 case OPC_BNVC: /* OPC_BNEZALC, OPC_BNEC */
11483 /* OPC_BOVC, OPC_BNVC */
11484 TCGv t2 = tcg_temp_new();
11485 TCGv t3 = tcg_temp_new();
11486 TCGv t4 = tcg_temp_new();
11487 TCGv input_overflow = tcg_temp_new();
11489 gen_load_gpr(t0, rs);
11490 gen_load_gpr(t1, rt);
11491 tcg_gen_ext32s_tl(t2, t0);
11492 tcg_gen_setcond_tl(TCG_COND_NE, input_overflow, t2, t0);
11493 tcg_gen_ext32s_tl(t3, t1);
11494 tcg_gen_setcond_tl(TCG_COND_NE, t4, t3, t1);
11495 tcg_gen_or_tl(input_overflow, input_overflow, t4);
11497 tcg_gen_add_tl(t4, t2, t3);
11498 tcg_gen_ext32s_tl(t4, t4);
11499 tcg_gen_xor_tl(t2, t2, t3);
11500 tcg_gen_xor_tl(t3, t4, t3);
11501 tcg_gen_andc_tl(t2, t3, t2);
11502 tcg_gen_setcondi_tl(TCG_COND_LT, t4, t2, 0);
11503 tcg_gen_or_tl(t4, t4, input_overflow);
11504 if (opc == OPC_BOVC) {
11506 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE), t4, 0, fs);
11509 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ), t4, 0, fs);
11511 tcg_temp_free(input_overflow);
11515 } else if (rs < rt && rs == 0) {
11516 /* OPC_BEQZALC, OPC_BNEZALC */
11517 if (opc == OPC_BEQZALC) {
11519 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ), t1, 0, fs);
11522 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE), t1, 0, fs);
11525 /* OPC_BEQC, OPC_BNEC */
11526 if (opc == OPC_BEQC) {
11528 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ), t0, t1, fs);
11531 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE), t0, t1, fs);
11536 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ), t0, 0, fs);
11539 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE), t0, 0, fs);
11542 MIPS_INVAL("Compact conditional branch/jump");
11543 generate_exception_end(ctx, EXCP_RI);
11547 /* Generating branch here as compact branches don't have delay slot */
11548 gen_goto_tb(ctx, 1, ctx->btarget);
11551 ctx->hflags |= MIPS_HFLAG_FBNSLOT;
11559 /* ISA extensions (ASEs) */
11560 /* MIPS16 extension to MIPS32 */
11562 /* MIPS16 major opcodes */
11564 M16_OPC_ADDIUSP = 0x00,
11565 M16_OPC_ADDIUPC = 0x01,
11567 M16_OPC_JAL = 0x03,
11568 M16_OPC_BEQZ = 0x04,
11569 M16_OPC_BNEQZ = 0x05,
11570 M16_OPC_SHIFT = 0x06,
11572 M16_OPC_RRIA = 0x08,
11573 M16_OPC_ADDIU8 = 0x09,
11574 M16_OPC_SLTI = 0x0a,
11575 M16_OPC_SLTIU = 0x0b,
11578 M16_OPC_CMPI = 0x0e,
11582 M16_OPC_LWSP = 0x12,
11584 M16_OPC_LBU = 0x14,
11585 M16_OPC_LHU = 0x15,
11586 M16_OPC_LWPC = 0x16,
11587 M16_OPC_LWU = 0x17,
11590 M16_OPC_SWSP = 0x1a,
11592 M16_OPC_RRR = 0x1c,
11594 M16_OPC_EXTEND = 0x1e,
11598 /* I8 funct field */
11617 /* RR funct field */
11651 /* I64 funct field */
11659 I64_DADDIUPC = 0x6,
11663 /* RR ry field for CNVT */
11665 RR_RY_CNVT_ZEB = 0x0,
11666 RR_RY_CNVT_ZEH = 0x1,
11667 RR_RY_CNVT_ZEW = 0x2,
11668 RR_RY_CNVT_SEB = 0x4,
11669 RR_RY_CNVT_SEH = 0x5,
11670 RR_RY_CNVT_SEW = 0x6,
11673 static int xlat (int r)
11675 static int map[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11680 static void gen_mips16_save (DisasContext *ctx,
11681 int xsregs, int aregs,
11682 int do_ra, int do_s0, int do_s1,
11685 TCGv t0 = tcg_temp_new();
11686 TCGv t1 = tcg_temp_new();
11687 TCGv t2 = tcg_temp_new();
11717 generate_exception_end(ctx, EXCP_RI);
11723 gen_base_offset_addr(ctx, t0, 29, 12);
11724 gen_load_gpr(t1, 7);
11725 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
11728 gen_base_offset_addr(ctx, t0, 29, 8);
11729 gen_load_gpr(t1, 6);
11730 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
11733 gen_base_offset_addr(ctx, t0, 29, 4);
11734 gen_load_gpr(t1, 5);
11735 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
11738 gen_base_offset_addr(ctx, t0, 29, 0);
11739 gen_load_gpr(t1, 4);
11740 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
11743 gen_load_gpr(t0, 29);
11745 #define DECR_AND_STORE(reg) do { \
11746 tcg_gen_movi_tl(t2, -4); \
11747 gen_op_addr_add(ctx, t0, t0, t2); \
11748 gen_load_gpr(t1, reg); \
11749 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11753 DECR_AND_STORE(31);
11758 DECR_AND_STORE(30);
11761 DECR_AND_STORE(23);
11764 DECR_AND_STORE(22);
11767 DECR_AND_STORE(21);
11770 DECR_AND_STORE(20);
11773 DECR_AND_STORE(19);
11776 DECR_AND_STORE(18);
11780 DECR_AND_STORE(17);
11783 DECR_AND_STORE(16);
11813 generate_exception_end(ctx, EXCP_RI);
11829 #undef DECR_AND_STORE
11831 tcg_gen_movi_tl(t2, -framesize);
11832 gen_op_addr_add(ctx, cpu_gpr[29], cpu_gpr[29], t2);
11838 static void gen_mips16_restore (DisasContext *ctx,
11839 int xsregs, int aregs,
11840 int do_ra, int do_s0, int do_s1,
11844 TCGv t0 = tcg_temp_new();
11845 TCGv t1 = tcg_temp_new();
11846 TCGv t2 = tcg_temp_new();
11848 tcg_gen_movi_tl(t2, framesize);
11849 gen_op_addr_add(ctx, t0, cpu_gpr[29], t2);
11851 #define DECR_AND_LOAD(reg) do { \
11852 tcg_gen_movi_tl(t2, -4); \
11853 gen_op_addr_add(ctx, t0, t0, t2); \
11854 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11855 gen_store_gpr(t1, reg); \
11919 generate_exception_end(ctx, EXCP_RI);
11935 #undef DECR_AND_LOAD
11937 tcg_gen_movi_tl(t2, framesize);
11938 gen_op_addr_add(ctx, cpu_gpr[29], cpu_gpr[29], t2);
11944 static void gen_addiupc (DisasContext *ctx, int rx, int imm,
11945 int is_64_bit, int extended)
11949 if (extended && (ctx->hflags & MIPS_HFLAG_BMASK)) {
11950 generate_exception_end(ctx, EXCP_RI);
11954 t0 = tcg_temp_new();
11956 tcg_gen_movi_tl(t0, pc_relative_pc(ctx));
11957 tcg_gen_addi_tl(cpu_gpr[rx], t0, imm);
11959 tcg_gen_ext32s_tl(cpu_gpr[rx], cpu_gpr[rx]);
11965 static void gen_cache_operation(DisasContext *ctx, uint32_t op, int base,
11968 TCGv_i32 t0 = tcg_const_i32(op);
11969 TCGv t1 = tcg_temp_new();
11970 gen_base_offset_addr(ctx, t1, base, offset);
11971 gen_helper_cache(cpu_env, t1, t0);
11974 #if defined(TARGET_MIPS64)
11975 static void decode_i64_mips16 (DisasContext *ctx,
11976 int ry, int funct, int16_t offset,
11981 check_insn(ctx, ISA_MIPS3);
11982 check_mips_64(ctx);
11983 offset = extended ? offset : offset << 3;
11984 gen_ld(ctx, OPC_LD, ry, 29, offset);
11987 check_insn(ctx, ISA_MIPS3);
11988 check_mips_64(ctx);
11989 offset = extended ? offset : offset << 3;
11990 gen_st(ctx, OPC_SD, ry, 29, offset);
11993 check_insn(ctx, ISA_MIPS3);
11994 check_mips_64(ctx);
11995 offset = extended ? offset : (ctx->opcode & 0xff) << 3;
11996 gen_st(ctx, OPC_SD, 31, 29, offset);
11999 check_insn(ctx, ISA_MIPS3);
12000 check_mips_64(ctx);
12001 offset = extended ? offset : ((int8_t)ctx->opcode) << 3;
12002 gen_arith_imm(ctx, OPC_DADDIU, 29, 29, offset);
12005 check_insn(ctx, ISA_MIPS3);
12006 check_mips_64(ctx);
12007 if (extended && (ctx->hflags & MIPS_HFLAG_BMASK)) {
12008 generate_exception_end(ctx, EXCP_RI);
12010 offset = extended ? offset : offset << 3;
12011 gen_ld(ctx, OPC_LDPC, ry, 0, offset);
12015 check_insn(ctx, ISA_MIPS3);
12016 check_mips_64(ctx);
12017 offset = extended ? offset : ((int8_t)(offset << 3)) >> 3;
12018 gen_arith_imm(ctx, OPC_DADDIU, ry, ry, offset);
12021 check_insn(ctx, ISA_MIPS3);
12022 check_mips_64(ctx);
12023 offset = extended ? offset : offset << 2;
12024 gen_addiupc(ctx, ry, offset, 1, extended);
12027 check_insn(ctx, ISA_MIPS3);
12028 check_mips_64(ctx);
12029 offset = extended ? offset : offset << 2;
12030 gen_arith_imm(ctx, OPC_DADDIU, ry, 29, offset);
12036 static int decode_extended_mips16_opc (CPUMIPSState *env, DisasContext *ctx)
12038 int extend = cpu_lduw_code(env, ctx->base.pc_next + 2);
12039 int op, rx, ry, funct, sa;
12040 int16_t imm, offset;
12042 ctx->opcode = (ctx->opcode << 16) | extend;
12043 op = (ctx->opcode >> 11) & 0x1f;
12044 sa = (ctx->opcode >> 22) & 0x1f;
12045 funct = (ctx->opcode >> 8) & 0x7;
12046 rx = xlat((ctx->opcode >> 8) & 0x7);
12047 ry = xlat((ctx->opcode >> 5) & 0x7);
12048 offset = imm = (int16_t) (((ctx->opcode >> 16) & 0x1f) << 11
12049 | ((ctx->opcode >> 21) & 0x3f) << 5
12050 | (ctx->opcode & 0x1f));
12052 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
12055 case M16_OPC_ADDIUSP:
12056 gen_arith_imm(ctx, OPC_ADDIU, rx, 29, imm);
12058 case M16_OPC_ADDIUPC:
12059 gen_addiupc(ctx, rx, imm, 0, 1);
12062 gen_compute_branch(ctx, OPC_BEQ, 4, 0, 0, offset << 1, 0);
12063 /* No delay slot, so just process as a normal instruction */
12066 gen_compute_branch(ctx, OPC_BEQ, 4, rx, 0, offset << 1, 0);
12067 /* No delay slot, so just process as a normal instruction */
12069 case M16_OPC_BNEQZ:
12070 gen_compute_branch(ctx, OPC_BNE, 4, rx, 0, offset << 1, 0);
12071 /* No delay slot, so just process as a normal instruction */
12073 case M16_OPC_SHIFT:
12074 switch (ctx->opcode & 0x3) {
12076 gen_shift_imm(ctx, OPC_SLL, rx, ry, sa);
12079 #if defined(TARGET_MIPS64)
12080 check_mips_64(ctx);
12081 gen_shift_imm(ctx, OPC_DSLL, rx, ry, sa);
12083 generate_exception_end(ctx, EXCP_RI);
12087 gen_shift_imm(ctx, OPC_SRL, rx, ry, sa);
12090 gen_shift_imm(ctx, OPC_SRA, rx, ry, sa);
12094 #if defined(TARGET_MIPS64)
12096 check_insn(ctx, ISA_MIPS3);
12097 check_mips_64(ctx);
12098 gen_ld(ctx, OPC_LD, ry, rx, offset);
12102 imm = ctx->opcode & 0xf;
12103 imm = imm | ((ctx->opcode >> 20) & 0x7f) << 4;
12104 imm = imm | ((ctx->opcode >> 16) & 0xf) << 11;
12105 imm = (int16_t) (imm << 1) >> 1;
12106 if ((ctx->opcode >> 4) & 0x1) {
12107 #if defined(TARGET_MIPS64)
12108 check_mips_64(ctx);
12109 gen_arith_imm(ctx, OPC_DADDIU, ry, rx, imm);
12111 generate_exception_end(ctx, EXCP_RI);
12114 gen_arith_imm(ctx, OPC_ADDIU, ry, rx, imm);
12117 case M16_OPC_ADDIU8:
12118 gen_arith_imm(ctx, OPC_ADDIU, rx, rx, imm);
12121 gen_slt_imm(ctx, OPC_SLTI, 24, rx, imm);
12123 case M16_OPC_SLTIU:
12124 gen_slt_imm(ctx, OPC_SLTIU, 24, rx, imm);
12129 gen_compute_branch(ctx, OPC_BEQ, 4, 24, 0, offset << 1, 0);
12132 gen_compute_branch(ctx, OPC_BNE, 4, 24, 0, offset << 1, 0);
12135 gen_st(ctx, OPC_SW, 31, 29, imm);
12138 gen_arith_imm(ctx, OPC_ADDIU, 29, 29, imm);
12141 check_insn(ctx, ISA_MIPS32);
12143 int xsregs = (ctx->opcode >> 24) & 0x7;
12144 int aregs = (ctx->opcode >> 16) & 0xf;
12145 int do_ra = (ctx->opcode >> 6) & 0x1;
12146 int do_s0 = (ctx->opcode >> 5) & 0x1;
12147 int do_s1 = (ctx->opcode >> 4) & 0x1;
12148 int framesize = (((ctx->opcode >> 20) & 0xf) << 4
12149 | (ctx->opcode & 0xf)) << 3;
12151 if (ctx->opcode & (1 << 7)) {
12152 gen_mips16_save(ctx, xsregs, aregs,
12153 do_ra, do_s0, do_s1,
12156 gen_mips16_restore(ctx, xsregs, aregs,
12157 do_ra, do_s0, do_s1,
12163 generate_exception_end(ctx, EXCP_RI);
12168 tcg_gen_movi_tl(cpu_gpr[rx], (uint16_t) imm);
12171 tcg_gen_xori_tl(cpu_gpr[24], cpu_gpr[rx], (uint16_t) imm);
12173 #if defined(TARGET_MIPS64)
12175 check_insn(ctx, ISA_MIPS3);
12176 check_mips_64(ctx);
12177 gen_st(ctx, OPC_SD, ry, rx, offset);
12181 gen_ld(ctx, OPC_LB, ry, rx, offset);
12184 gen_ld(ctx, OPC_LH, ry, rx, offset);
12187 gen_ld(ctx, OPC_LW, rx, 29, offset);
12190 gen_ld(ctx, OPC_LW, ry, rx, offset);
12193 gen_ld(ctx, OPC_LBU, ry, rx, offset);
12196 gen_ld(ctx, OPC_LHU, ry, rx, offset);
12199 gen_ld(ctx, OPC_LWPC, rx, 0, offset);
12201 #if defined(TARGET_MIPS64)
12203 check_insn(ctx, ISA_MIPS3);
12204 check_mips_64(ctx);
12205 gen_ld(ctx, OPC_LWU, ry, rx, offset);
12209 gen_st(ctx, OPC_SB, ry, rx, offset);
12212 gen_st(ctx, OPC_SH, ry, rx, offset);
12215 gen_st(ctx, OPC_SW, rx, 29, offset);
12218 gen_st(ctx, OPC_SW, ry, rx, offset);
12220 #if defined(TARGET_MIPS64)
12222 decode_i64_mips16(ctx, ry, funct, offset, 1);
12226 generate_exception_end(ctx, EXCP_RI);
12233 static inline bool is_uhi(int sdbbp_code)
12235 #ifdef CONFIG_USER_ONLY
12238 return semihosting_enabled() && sdbbp_code == 1;
12242 static int decode_mips16_opc (CPUMIPSState *env, DisasContext *ctx)
12246 int op, cnvt_op, op1, offset;
12250 op = (ctx->opcode >> 11) & 0x1f;
12251 sa = (ctx->opcode >> 2) & 0x7;
12252 sa = sa == 0 ? 8 : sa;
12253 rx = xlat((ctx->opcode >> 8) & 0x7);
12254 cnvt_op = (ctx->opcode >> 5) & 0x7;
12255 ry = xlat((ctx->opcode >> 5) & 0x7);
12256 op1 = offset = ctx->opcode & 0x1f;
12261 case M16_OPC_ADDIUSP:
12263 int16_t imm = ((uint8_t) ctx->opcode) << 2;
12265 gen_arith_imm(ctx, OPC_ADDIU, rx, 29, imm);
12268 case M16_OPC_ADDIUPC:
12269 gen_addiupc(ctx, rx, ((uint8_t) ctx->opcode) << 2, 0, 0);
12272 offset = (ctx->opcode & 0x7ff) << 1;
12273 offset = (int16_t)(offset << 4) >> 4;
12274 gen_compute_branch(ctx, OPC_BEQ, 2, 0, 0, offset, 0);
12275 /* No delay slot, so just process as a normal instruction */
12278 offset = cpu_lduw_code(env, ctx->base.pc_next + 2);
12279 offset = (((ctx->opcode & 0x1f) << 21)
12280 | ((ctx->opcode >> 5) & 0x1f) << 16
12282 op = ((ctx->opcode >> 10) & 0x1) ? OPC_JALX : OPC_JAL;
12283 gen_compute_branch(ctx, op, 4, rx, ry, offset, 2);
12287 gen_compute_branch(ctx, OPC_BEQ, 2, rx, 0,
12288 ((int8_t)ctx->opcode) << 1, 0);
12289 /* No delay slot, so just process as a normal instruction */
12291 case M16_OPC_BNEQZ:
12292 gen_compute_branch(ctx, OPC_BNE, 2, rx, 0,
12293 ((int8_t)ctx->opcode) << 1, 0);
12294 /* No delay slot, so just process as a normal instruction */
12296 case M16_OPC_SHIFT:
12297 switch (ctx->opcode & 0x3) {
12299 gen_shift_imm(ctx, OPC_SLL, rx, ry, sa);
12302 #if defined(TARGET_MIPS64)
12303 check_insn(ctx, ISA_MIPS3);
12304 check_mips_64(ctx);
12305 gen_shift_imm(ctx, OPC_DSLL, rx, ry, sa);
12307 generate_exception_end(ctx, EXCP_RI);
12311 gen_shift_imm(ctx, OPC_SRL, rx, ry, sa);
12314 gen_shift_imm(ctx, OPC_SRA, rx, ry, sa);
12318 #if defined(TARGET_MIPS64)
12320 check_insn(ctx, ISA_MIPS3);
12321 check_mips_64(ctx);
12322 gen_ld(ctx, OPC_LD, ry, rx, offset << 3);
12327 int16_t imm = (int8_t)((ctx->opcode & 0xf) << 4) >> 4;
12329 if ((ctx->opcode >> 4) & 1) {
12330 #if defined(TARGET_MIPS64)
12331 check_insn(ctx, ISA_MIPS3);
12332 check_mips_64(ctx);
12333 gen_arith_imm(ctx, OPC_DADDIU, ry, rx, imm);
12335 generate_exception_end(ctx, EXCP_RI);
12338 gen_arith_imm(ctx, OPC_ADDIU, ry, rx, imm);
12342 case M16_OPC_ADDIU8:
12344 int16_t imm = (int8_t) ctx->opcode;
12346 gen_arith_imm(ctx, OPC_ADDIU, rx, rx, imm);
12351 int16_t imm = (uint8_t) ctx->opcode;
12352 gen_slt_imm(ctx, OPC_SLTI, 24, rx, imm);
12355 case M16_OPC_SLTIU:
12357 int16_t imm = (uint8_t) ctx->opcode;
12358 gen_slt_imm(ctx, OPC_SLTIU, 24, rx, imm);
12365 funct = (ctx->opcode >> 8) & 0x7;
12368 gen_compute_branch(ctx, OPC_BEQ, 2, 24, 0,
12369 ((int8_t)ctx->opcode) << 1, 0);
12372 gen_compute_branch(ctx, OPC_BNE, 2, 24, 0,
12373 ((int8_t)ctx->opcode) << 1, 0);
12376 gen_st(ctx, OPC_SW, 31, 29, (ctx->opcode & 0xff) << 2);
12379 gen_arith_imm(ctx, OPC_ADDIU, 29, 29,
12380 ((int8_t)ctx->opcode) << 3);
12383 check_insn(ctx, ISA_MIPS32);
12385 int do_ra = ctx->opcode & (1 << 6);
12386 int do_s0 = ctx->opcode & (1 << 5);
12387 int do_s1 = ctx->opcode & (1 << 4);
12388 int framesize = ctx->opcode & 0xf;
12390 if (framesize == 0) {
12393 framesize = framesize << 3;
12396 if (ctx->opcode & (1 << 7)) {
12397 gen_mips16_save(ctx, 0, 0,
12398 do_ra, do_s0, do_s1, framesize);
12400 gen_mips16_restore(ctx, 0, 0,
12401 do_ra, do_s0, do_s1, framesize);
12407 int rz = xlat(ctx->opcode & 0x7);
12409 reg32 = (((ctx->opcode >> 3) & 0x3) << 3) |
12410 ((ctx->opcode >> 5) & 0x7);
12411 gen_arith(ctx, OPC_ADDU, reg32, rz, 0);
12415 reg32 = ctx->opcode & 0x1f;
12416 gen_arith(ctx, OPC_ADDU, ry, reg32, 0);
12419 generate_exception_end(ctx, EXCP_RI);
12426 int16_t imm = (uint8_t) ctx->opcode;
12428 gen_arith_imm(ctx, OPC_ADDIU, rx, 0, imm);
12433 int16_t imm = (uint8_t) ctx->opcode;
12434 gen_logic_imm(ctx, OPC_XORI, 24, rx, imm);
12437 #if defined(TARGET_MIPS64)
12439 check_insn(ctx, ISA_MIPS3);
12440 check_mips_64(ctx);
12441 gen_st(ctx, OPC_SD, ry, rx, offset << 3);
12445 gen_ld(ctx, OPC_LB, ry, rx, offset);
12448 gen_ld(ctx, OPC_LH, ry, rx, offset << 1);
12451 gen_ld(ctx, OPC_LW, rx, 29, ((uint8_t)ctx->opcode) << 2);
12454 gen_ld(ctx, OPC_LW, ry, rx, offset << 2);
12457 gen_ld(ctx, OPC_LBU, ry, rx, offset);
12460 gen_ld(ctx, OPC_LHU, ry, rx, offset << 1);
12463 gen_ld(ctx, OPC_LWPC, rx, 0, ((uint8_t)ctx->opcode) << 2);
12465 #if defined (TARGET_MIPS64)
12467 check_insn(ctx, ISA_MIPS3);
12468 check_mips_64(ctx);
12469 gen_ld(ctx, OPC_LWU, ry, rx, offset << 2);
12473 gen_st(ctx, OPC_SB, ry, rx, offset);
12476 gen_st(ctx, OPC_SH, ry, rx, offset << 1);
12479 gen_st(ctx, OPC_SW, rx, 29, ((uint8_t)ctx->opcode) << 2);
12482 gen_st(ctx, OPC_SW, ry, rx, offset << 2);
12486 int rz = xlat((ctx->opcode >> 2) & 0x7);
12489 switch (ctx->opcode & 0x3) {
12491 mips32_op = OPC_ADDU;
12494 mips32_op = OPC_SUBU;
12496 #if defined(TARGET_MIPS64)
12498 mips32_op = OPC_DADDU;
12499 check_insn(ctx, ISA_MIPS3);
12500 check_mips_64(ctx);
12503 mips32_op = OPC_DSUBU;
12504 check_insn(ctx, ISA_MIPS3);
12505 check_mips_64(ctx);
12509 generate_exception_end(ctx, EXCP_RI);
12513 gen_arith(ctx, mips32_op, rz, rx, ry);
12522 int nd = (ctx->opcode >> 7) & 0x1;
12523 int link = (ctx->opcode >> 6) & 0x1;
12524 int ra = (ctx->opcode >> 5) & 0x1;
12527 check_insn(ctx, ISA_MIPS32);
12536 gen_compute_branch(ctx, op, 2, ra ? 31 : rx, 31, 0,
12541 if (is_uhi(extract32(ctx->opcode, 5, 6))) {
12542 gen_helper_do_semihosting(cpu_env);
12544 /* XXX: not clear which exception should be raised
12545 * when in debug mode...
12547 check_insn(ctx, ISA_MIPS32);
12548 generate_exception_end(ctx, EXCP_DBp);
12552 gen_slt(ctx, OPC_SLT, 24, rx, ry);
12555 gen_slt(ctx, OPC_SLTU, 24, rx, ry);
12558 generate_exception_end(ctx, EXCP_BREAK);
12561 gen_shift(ctx, OPC_SLLV, ry, rx, ry);
12564 gen_shift(ctx, OPC_SRLV, ry, rx, ry);
12567 gen_shift(ctx, OPC_SRAV, ry, rx, ry);
12569 #if defined (TARGET_MIPS64)
12571 check_insn(ctx, ISA_MIPS3);
12572 check_mips_64(ctx);
12573 gen_shift_imm(ctx, OPC_DSRL, ry, ry, sa);
12577 gen_logic(ctx, OPC_XOR, 24, rx, ry);
12580 gen_arith(ctx, OPC_SUBU, rx, 0, ry);
12583 gen_logic(ctx, OPC_AND, rx, rx, ry);
12586 gen_logic(ctx, OPC_OR, rx, rx, ry);
12589 gen_logic(ctx, OPC_XOR, rx, rx, ry);
12592 gen_logic(ctx, OPC_NOR, rx, ry, 0);
12595 gen_HILO(ctx, OPC_MFHI, 0, rx);
12598 check_insn(ctx, ISA_MIPS32);
12600 case RR_RY_CNVT_ZEB:
12601 tcg_gen_ext8u_tl(cpu_gpr[rx], cpu_gpr[rx]);
12603 case RR_RY_CNVT_ZEH:
12604 tcg_gen_ext16u_tl(cpu_gpr[rx], cpu_gpr[rx]);
12606 case RR_RY_CNVT_SEB:
12607 tcg_gen_ext8s_tl(cpu_gpr[rx], cpu_gpr[rx]);
12609 case RR_RY_CNVT_SEH:
12610 tcg_gen_ext16s_tl(cpu_gpr[rx], cpu_gpr[rx]);
12612 #if defined (TARGET_MIPS64)
12613 case RR_RY_CNVT_ZEW:
12614 check_insn(ctx, ISA_MIPS64);
12615 check_mips_64(ctx);
12616 tcg_gen_ext32u_tl(cpu_gpr[rx], cpu_gpr[rx]);
12618 case RR_RY_CNVT_SEW:
12619 check_insn(ctx, ISA_MIPS64);
12620 check_mips_64(ctx);
12621 tcg_gen_ext32s_tl(cpu_gpr[rx], cpu_gpr[rx]);
12625 generate_exception_end(ctx, EXCP_RI);
12630 gen_HILO(ctx, OPC_MFLO, 0, rx);
12632 #if defined (TARGET_MIPS64)
12634 check_insn(ctx, ISA_MIPS3);
12635 check_mips_64(ctx);
12636 gen_shift_imm(ctx, OPC_DSRA, ry, ry, sa);
12639 check_insn(ctx, ISA_MIPS3);
12640 check_mips_64(ctx);
12641 gen_shift(ctx, OPC_DSLLV, ry, rx, ry);
12644 check_insn(ctx, ISA_MIPS3);
12645 check_mips_64(ctx);
12646 gen_shift(ctx, OPC_DSRLV, ry, rx, ry);
12649 check_insn(ctx, ISA_MIPS3);
12650 check_mips_64(ctx);
12651 gen_shift(ctx, OPC_DSRAV, ry, rx, ry);
12655 gen_muldiv(ctx, OPC_MULT, 0, rx, ry);
12658 gen_muldiv(ctx, OPC_MULTU, 0, rx, ry);
12661 gen_muldiv(ctx, OPC_DIV, 0, rx, ry);
12664 gen_muldiv(ctx, OPC_DIVU, 0, rx, ry);
12666 #if defined (TARGET_MIPS64)
12668 check_insn(ctx, ISA_MIPS3);
12669 check_mips_64(ctx);
12670 gen_muldiv(ctx, OPC_DMULT, 0, rx, ry);
12673 check_insn(ctx, ISA_MIPS3);
12674 check_mips_64(ctx);
12675 gen_muldiv(ctx, OPC_DMULTU, 0, rx, ry);
12678 check_insn(ctx, ISA_MIPS3);
12679 check_mips_64(ctx);
12680 gen_muldiv(ctx, OPC_DDIV, 0, rx, ry);
12683 check_insn(ctx, ISA_MIPS3);
12684 check_mips_64(ctx);
12685 gen_muldiv(ctx, OPC_DDIVU, 0, rx, ry);
12689 generate_exception_end(ctx, EXCP_RI);
12693 case M16_OPC_EXTEND:
12694 decode_extended_mips16_opc(env, ctx);
12697 #if defined(TARGET_MIPS64)
12699 funct = (ctx->opcode >> 8) & 0x7;
12700 decode_i64_mips16(ctx, ry, funct, offset, 0);
12704 generate_exception_end(ctx, EXCP_RI);
12711 /* microMIPS extension to MIPS32/MIPS64 */
12714 * microMIPS32/microMIPS64 major opcodes
12716 * 1. MIPS Architecture for Programmers Volume II-B:
12717 * The microMIPS32 Instruction Set (Revision 3.05)
12719 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12721 * 2. MIPS Architecture For Programmers Volume II-A:
12722 * The MIPS64 Instruction Set (Revision 3.51)
12752 POOL32S = 0x16, /* MIPS64 */
12753 DADDIU32 = 0x17, /* MIPS64 */
12782 /* 0x29 is reserved */
12795 /* 0x31 is reserved */
12808 SD32 = 0x36, /* MIPS64 */
12809 LD32 = 0x37, /* MIPS64 */
12811 /* 0x39 is reserved */
12827 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12849 /* POOL32A encoding of minor opcode field */
12852 /* These opcodes are distinguished only by bits 9..6; those bits are
12853 * what are recorded below. */
12890 /* The following can be distinguished by their lower 6 bits. */
12900 /* POOL32AXF encoding of minor opcode field extension */
12903 * 1. MIPS Architecture for Programmers Volume II-B:
12904 * The microMIPS32 Instruction Set (Revision 3.05)
12906 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12908 * 2. MIPS Architecture for Programmers VolumeIV-e:
12909 * The MIPS DSP Application-Specific Extension
12910 * to the microMIPS32 Architecture (Revision 2.34)
12912 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12927 /* begin of microMIPS32 DSP */
12929 /* bits 13..12 for 0x01 */
12935 /* bits 13..12 for 0x2a */
12941 /* bits 13..12 for 0x32 */
12945 /* end of microMIPS32 DSP */
12947 /* bits 15..12 for 0x2c */
12964 /* bits 15..12 for 0x34 */
12972 /* bits 15..12 for 0x3c */
12974 JR = 0x0, /* alias */
12982 /* bits 15..12 for 0x05 */
12986 /* bits 15..12 for 0x0d */
12998 /* bits 15..12 for 0x15 */
13004 /* bits 15..12 for 0x1d */
13008 /* bits 15..12 for 0x2d */
13013 /* bits 15..12 for 0x35 */
13020 /* POOL32B encoding of minor opcode field (bits 15..12) */
13036 /* POOL32C encoding of minor opcode field (bits 15..12) */
13057 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
13070 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
13083 /* POOL32F encoding of minor opcode field (bits 5..0) */
13086 /* These are the bit 7..6 values */
13095 /* These are the bit 8..6 values */
13120 MOVZ_FMT_05 = 0x05,
13154 CABS_COND_FMT = 0x1c, /* MIPS3D */
13161 /* POOL32Fxf encoding of minor opcode extension field */
13199 /* POOL32I encoding of minor opcode field (bits 25..21) */
13229 /* These overlap and are distinguished by bit16 of the instruction */
13238 /* POOL16A encoding of minor opcode field */
13245 /* POOL16B encoding of minor opcode field */
13252 /* POOL16C encoding of minor opcode field */
13272 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
13296 /* POOL16D encoding of minor opcode field */
13303 /* POOL16E encoding of minor opcode field */
13310 static int mmreg (int r)
13312 static const int map[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13317 /* Used for 16-bit store instructions. */
13318 static int mmreg2 (int r)
13320 static const int map[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
13325 #define uMIPS_RD(op) ((op >> 7) & 0x7)
13326 #define uMIPS_RS(op) ((op >> 4) & 0x7)
13327 #define uMIPS_RS2(op) uMIPS_RS(op)
13328 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
13329 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
13330 #define uMIPS_RS5(op) (op & 0x1f)
13332 /* Signed immediate */
13333 #define SIMM(op, start, width) \
13334 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
13337 /* Zero-extended immediate */
13338 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
13340 static void gen_addiur1sp(DisasContext *ctx)
13342 int rd = mmreg(uMIPS_RD(ctx->opcode));
13344 gen_arith_imm(ctx, OPC_ADDIU, rd, 29, ((ctx->opcode >> 1) & 0x3f) << 2);
13347 static void gen_addiur2(DisasContext *ctx)
13349 static const int decoded_imm[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
13350 int rd = mmreg(uMIPS_RD(ctx->opcode));
13351 int rs = mmreg(uMIPS_RS(ctx->opcode));
13353 gen_arith_imm(ctx, OPC_ADDIU, rd, rs, decoded_imm[ZIMM(ctx->opcode, 1, 3)]);
13356 static void gen_addiusp(DisasContext *ctx)
13358 int encoded = ZIMM(ctx->opcode, 1, 9);
13361 if (encoded <= 1) {
13362 decoded = 256 + encoded;
13363 } else if (encoded <= 255) {
13365 } else if (encoded <= 509) {
13366 decoded = encoded - 512;
13368 decoded = encoded - 768;
13371 gen_arith_imm(ctx, OPC_ADDIU, 29, 29, decoded << 2);
13374 static void gen_addius5(DisasContext *ctx)
13376 int imm = SIMM(ctx->opcode, 1, 4);
13377 int rd = (ctx->opcode >> 5) & 0x1f;
13379 gen_arith_imm(ctx, OPC_ADDIU, rd, rd, imm);
13382 static void gen_andi16(DisasContext *ctx)
13384 static const int decoded_imm[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
13385 31, 32, 63, 64, 255, 32768, 65535 };
13386 int rd = mmreg(uMIPS_RD(ctx->opcode));
13387 int rs = mmreg(uMIPS_RS(ctx->opcode));
13388 int encoded = ZIMM(ctx->opcode, 0, 4);
13390 gen_logic_imm(ctx, OPC_ANDI, rd, rs, decoded_imm[encoded]);
13393 static void gen_ldst_multiple (DisasContext *ctx, uint32_t opc, int reglist,
13394 int base, int16_t offset)
13399 if (ctx->hflags & MIPS_HFLAG_BMASK) {
13400 generate_exception_end(ctx, EXCP_RI);
13404 t0 = tcg_temp_new();
13406 gen_base_offset_addr(ctx, t0, base, offset);
13408 t1 = tcg_const_tl(reglist);
13409 t2 = tcg_const_i32(ctx->mem_idx);
13411 save_cpu_state(ctx, 1);
13414 gen_helper_lwm(cpu_env, t0, t1, t2);
13417 gen_helper_swm(cpu_env, t0, t1, t2);
13419 #ifdef TARGET_MIPS64
13421 gen_helper_ldm(cpu_env, t0, t1, t2);
13424 gen_helper_sdm(cpu_env, t0, t1, t2);
13430 tcg_temp_free_i32(t2);
13434 static void gen_pool16c_insn(DisasContext *ctx)
13436 int rd = mmreg((ctx->opcode >> 3) & 0x7);
13437 int rs = mmreg(ctx->opcode & 0x7);
13439 switch (((ctx->opcode) >> 4) & 0x3f) {
13444 gen_logic(ctx, OPC_NOR, rd, rs, 0);
13450 gen_logic(ctx, OPC_XOR, rd, rd, rs);
13456 gen_logic(ctx, OPC_AND, rd, rd, rs);
13462 gen_logic(ctx, OPC_OR, rd, rd, rs);
13469 static const int lwm_convert[] = { 0x11, 0x12, 0x13, 0x14 };
13470 int offset = ZIMM(ctx->opcode, 0, 4);
13472 gen_ldst_multiple(ctx, LWM32, lwm_convert[(ctx->opcode >> 4) & 0x3],
13481 static const int swm_convert[] = { 0x11, 0x12, 0x13, 0x14 };
13482 int offset = ZIMM(ctx->opcode, 0, 4);
13484 gen_ldst_multiple(ctx, SWM32, swm_convert[(ctx->opcode >> 4) & 0x3],
13491 int reg = ctx->opcode & 0x1f;
13493 gen_compute_branch(ctx, OPC_JR, 2, reg, 0, 0, 4);
13499 int reg = ctx->opcode & 0x1f;
13500 gen_compute_branch(ctx, OPC_JR, 2, reg, 0, 0, 0);
13501 /* Let normal delay slot handling in our caller take us
13502 to the branch target. */
13507 gen_compute_branch(ctx, OPC_JALR, 2, ctx->opcode & 0x1f, 31, 0, 4);
13508 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
13512 gen_compute_branch(ctx, OPC_JALR, 2, ctx->opcode & 0x1f, 31, 0, 2);
13513 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
13517 gen_HILO(ctx, OPC_MFHI, 0, uMIPS_RS5(ctx->opcode));
13521 gen_HILO(ctx, OPC_MFLO, 0, uMIPS_RS5(ctx->opcode));
13524 generate_exception_end(ctx, EXCP_BREAK);
13527 if (is_uhi(extract32(ctx->opcode, 0, 4))) {
13528 gen_helper_do_semihosting(cpu_env);
13530 /* XXX: not clear which exception should be raised
13531 * when in debug mode...
13533 check_insn(ctx, ISA_MIPS32);
13534 generate_exception_end(ctx, EXCP_DBp);
13537 case JRADDIUSP + 0:
13538 case JRADDIUSP + 1:
13540 int imm = ZIMM(ctx->opcode, 0, 5);
13541 gen_compute_branch(ctx, OPC_JR, 2, 31, 0, 0, 0);
13542 gen_arith_imm(ctx, OPC_ADDIU, 29, 29, imm << 2);
13543 /* Let normal delay slot handling in our caller take us
13544 to the branch target. */
13548 generate_exception_end(ctx, EXCP_RI);
13553 static inline void gen_movep(DisasContext *ctx, int enc_dest, int enc_rt,
13556 int rd, rs, re, rt;
13557 static const int rd_enc[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
13558 static const int re_enc[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
13559 static const int rs_rt_enc[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
13560 rd = rd_enc[enc_dest];
13561 re = re_enc[enc_dest];
13562 rs = rs_rt_enc[enc_rs];
13563 rt = rs_rt_enc[enc_rt];
13565 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
13567 tcg_gen_movi_tl(cpu_gpr[rd], 0);
13570 tcg_gen_mov_tl(cpu_gpr[re], cpu_gpr[rt]);
13572 tcg_gen_movi_tl(cpu_gpr[re], 0);
13576 static void gen_pool16c_r6_insn(DisasContext *ctx)
13578 int rt = mmreg((ctx->opcode >> 7) & 0x7);
13579 int rs = mmreg((ctx->opcode >> 4) & 0x7);
13581 switch (ctx->opcode & 0xf) {
13583 gen_logic(ctx, OPC_NOR, rt, rs, 0);
13586 gen_logic(ctx, OPC_AND, rt, rt, rs);
13590 int lwm_converted = 0x11 + extract32(ctx->opcode, 8, 2);
13591 int offset = extract32(ctx->opcode, 4, 4);
13592 gen_ldst_multiple(ctx, LWM32, lwm_converted, 29, offset << 2);
13595 case R6_JRC16: /* JRCADDIUSP */
13596 if ((ctx->opcode >> 4) & 1) {
13598 int imm = extract32(ctx->opcode, 5, 5);
13599 gen_compute_branch(ctx, OPC_JR, 2, 31, 0, 0, 0);
13600 gen_arith_imm(ctx, OPC_ADDIU, 29, 29, imm << 2);
13603 rs = extract32(ctx->opcode, 5, 5);
13604 gen_compute_branch(ctx, OPC_JR, 2, rs, 0, 0, 0);
13616 int enc_dest = uMIPS_RD(ctx->opcode);
13617 int enc_rt = uMIPS_RS2(ctx->opcode);
13618 int enc_rs = (ctx->opcode & 3) | ((ctx->opcode >> 1) & 4);
13619 gen_movep(ctx, enc_dest, enc_rt, enc_rs);
13623 gen_logic(ctx, OPC_XOR, rt, rt, rs);
13626 gen_logic(ctx, OPC_OR, rt, rt, rs);
13630 int swm_converted = 0x11 + extract32(ctx->opcode, 8, 2);
13631 int offset = extract32(ctx->opcode, 4, 4);
13632 gen_ldst_multiple(ctx, SWM32, swm_converted, 29, offset << 2);
13635 case JALRC16: /* BREAK16, SDBBP16 */
13636 switch (ctx->opcode & 0x3f) {
13638 case JALRC16 + 0x20:
13640 gen_compute_branch(ctx, OPC_JALR, 2, (ctx->opcode >> 5) & 0x1f,
13645 generate_exception(ctx, EXCP_BREAK);
13649 if (is_uhi(extract32(ctx->opcode, 6, 4))) {
13650 gen_helper_do_semihosting(cpu_env);
13652 if (ctx->hflags & MIPS_HFLAG_SBRI) {
13653 generate_exception(ctx, EXCP_RI);
13655 generate_exception(ctx, EXCP_DBp);
13662 generate_exception(ctx, EXCP_RI);
13667 static void gen_ldxs (DisasContext *ctx, int base, int index, int rd)
13669 TCGv t0 = tcg_temp_new();
13670 TCGv t1 = tcg_temp_new();
13672 gen_load_gpr(t0, base);
13675 gen_load_gpr(t1, index);
13676 tcg_gen_shli_tl(t1, t1, 2);
13677 gen_op_addr_add(ctx, t0, t1, t0);
13680 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL);
13681 gen_store_gpr(t1, rd);
13687 static void gen_ldst_pair (DisasContext *ctx, uint32_t opc, int rd,
13688 int base, int16_t offset)
13692 if (ctx->hflags & MIPS_HFLAG_BMASK || rd == 31) {
13693 generate_exception_end(ctx, EXCP_RI);
13697 t0 = tcg_temp_new();
13698 t1 = tcg_temp_new();
13700 gen_base_offset_addr(ctx, t0, base, offset);
13705 generate_exception_end(ctx, EXCP_RI);
13708 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL);
13709 gen_store_gpr(t1, rd);
13710 tcg_gen_movi_tl(t1, 4);
13711 gen_op_addr_add(ctx, t0, t0, t1);
13712 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL);
13713 gen_store_gpr(t1, rd+1);
13716 gen_load_gpr(t1, rd);
13717 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
13718 tcg_gen_movi_tl(t1, 4);
13719 gen_op_addr_add(ctx, t0, t0, t1);
13720 gen_load_gpr(t1, rd+1);
13721 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
13723 #ifdef TARGET_MIPS64
13726 generate_exception_end(ctx, EXCP_RI);
13729 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TEQ);
13730 gen_store_gpr(t1, rd);
13731 tcg_gen_movi_tl(t1, 8);
13732 gen_op_addr_add(ctx, t0, t0, t1);
13733 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TEQ);
13734 gen_store_gpr(t1, rd+1);
13737 gen_load_gpr(t1, rd);
13738 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEQ);
13739 tcg_gen_movi_tl(t1, 8);
13740 gen_op_addr_add(ctx, t0, t0, t1);
13741 gen_load_gpr(t1, rd+1);
13742 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEQ);
13750 static void gen_sync(int stype)
13752 TCGBar tcg_mo = TCG_BAR_SC;
13755 case 0x4: /* SYNC_WMB */
13756 tcg_mo |= TCG_MO_ST_ST;
13758 case 0x10: /* SYNC_MB */
13759 tcg_mo |= TCG_MO_ALL;
13761 case 0x11: /* SYNC_ACQUIRE */
13762 tcg_mo |= TCG_MO_LD_LD | TCG_MO_LD_ST;
13764 case 0x12: /* SYNC_RELEASE */
13765 tcg_mo |= TCG_MO_ST_ST | TCG_MO_LD_ST;
13767 case 0x13: /* SYNC_RMB */
13768 tcg_mo |= TCG_MO_LD_LD;
13771 tcg_mo |= TCG_MO_ALL;
13775 tcg_gen_mb(tcg_mo);
13778 static void gen_pool32axf (CPUMIPSState *env, DisasContext *ctx, int rt, int rs)
13780 int extension = (ctx->opcode >> 6) & 0x3f;
13781 int minor = (ctx->opcode >> 12) & 0xf;
13782 uint32_t mips32_op;
13784 switch (extension) {
13786 mips32_op = OPC_TEQ;
13789 mips32_op = OPC_TGE;
13792 mips32_op = OPC_TGEU;
13795 mips32_op = OPC_TLT;
13798 mips32_op = OPC_TLTU;
13801 mips32_op = OPC_TNE;
13803 gen_trap(ctx, mips32_op, rs, rt, -1);
13805 #ifndef CONFIG_USER_ONLY
13808 check_cp0_enabled(ctx);
13810 /* Treat as NOP. */
13813 gen_mfc0(ctx, cpu_gpr[rt], rs, (ctx->opcode >> 11) & 0x7);
13817 check_cp0_enabled(ctx);
13819 TCGv t0 = tcg_temp_new();
13821 gen_load_gpr(t0, rt);
13822 gen_mtc0(ctx, t0, rs, (ctx->opcode >> 11) & 0x7);
13828 switch (minor & 3) {
13830 gen_muldiv(ctx, OPC_MADD, (ctx->opcode >> 14) & 3, rs, rt);
13833 gen_muldiv(ctx, OPC_MADDU, (ctx->opcode >> 14) & 3, rs, rt);
13836 gen_muldiv(ctx, OPC_MSUB, (ctx->opcode >> 14) & 3, rs, rt);
13839 gen_muldiv(ctx, OPC_MSUBU, (ctx->opcode >> 14) & 3, rs, rt);
13842 goto pool32axf_invalid;
13846 switch (minor & 3) {
13848 gen_muldiv(ctx, OPC_MULT, (ctx->opcode >> 14) & 3, rs, rt);
13851 gen_muldiv(ctx, OPC_MULTU, (ctx->opcode >> 14) & 3, rs, rt);
13854 goto pool32axf_invalid;
13860 check_insn(ctx, ISA_MIPS32R6);
13861 gen_bitswap(ctx, OPC_BITSWAP, rs, rt);
13864 gen_bshfl(ctx, OPC_SEB, rs, rt);
13867 gen_bshfl(ctx, OPC_SEH, rs, rt);
13870 mips32_op = OPC_CLO;
13873 mips32_op = OPC_CLZ;
13875 check_insn(ctx, ISA_MIPS32);
13876 gen_cl(ctx, mips32_op, rt, rs);
13879 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13880 gen_rdhwr(ctx, rt, rs, 0);
13883 gen_bshfl(ctx, OPC_WSBH, rs, rt);
13886 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13887 mips32_op = OPC_MULT;
13890 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13891 mips32_op = OPC_MULTU;
13894 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13895 mips32_op = OPC_DIV;
13898 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13899 mips32_op = OPC_DIVU;
13902 check_insn(ctx, ISA_MIPS32);
13903 gen_muldiv(ctx, mips32_op, 0, rs, rt);
13906 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13907 mips32_op = OPC_MADD;
13910 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13911 mips32_op = OPC_MADDU;
13914 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13915 mips32_op = OPC_MSUB;
13918 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13919 mips32_op = OPC_MSUBU;
13921 check_insn(ctx, ISA_MIPS32);
13922 gen_muldiv(ctx, mips32_op, 0, rs, rt);
13925 goto pool32axf_invalid;
13936 generate_exception_err(ctx, EXCP_CpU, 2);
13939 goto pool32axf_invalid;
13944 case JALR: /* JALRC */
13945 case JALR_HB: /* JALRC_HB */
13946 if (ctx->insn_flags & ISA_MIPS32R6) {
13947 /* JALRC, JALRC_HB */
13948 gen_compute_branch(ctx, OPC_JALR, 4, rs, rt, 0, 0);
13950 /* JALR, JALR_HB */
13951 gen_compute_branch(ctx, OPC_JALR, 4, rs, rt, 0, 4);
13952 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
13957 check_insn_opc_removed(ctx, ISA_MIPS32R6);
13958 gen_compute_branch(ctx, OPC_JALR, 4, rs, rt, 0, 2);
13959 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
13962 goto pool32axf_invalid;
13968 check_cp0_enabled(ctx);
13969 check_insn(ctx, ISA_MIPS32R2);
13970 gen_load_srsgpr(rs, rt);
13973 check_cp0_enabled(ctx);
13974 check_insn(ctx, ISA_MIPS32R2);
13975 gen_store_srsgpr(rs, rt);
13978 goto pool32axf_invalid;
13981 #ifndef CONFIG_USER_ONLY
13985 mips32_op = OPC_TLBP;
13988 mips32_op = OPC_TLBR;
13991 mips32_op = OPC_TLBWI;
13994 mips32_op = OPC_TLBWR;
13997 mips32_op = OPC_TLBINV;
14000 mips32_op = OPC_TLBINVF;
14003 mips32_op = OPC_WAIT;
14006 mips32_op = OPC_DERET;
14009 mips32_op = OPC_ERET;
14011 gen_cp0(env, ctx, mips32_op, rt, rs);
14014 goto pool32axf_invalid;
14020 check_cp0_enabled(ctx);
14022 TCGv t0 = tcg_temp_new();
14024 save_cpu_state(ctx, 1);
14025 gen_helper_di(t0, cpu_env);
14026 gen_store_gpr(t0, rs);
14027 /* Stop translation as we may have switched the execution mode */
14028 ctx->base.is_jmp = DISAS_STOP;
14033 check_cp0_enabled(ctx);
14035 TCGv t0 = tcg_temp_new();
14037 save_cpu_state(ctx, 1);
14038 gen_helper_ei(t0, cpu_env);
14039 gen_store_gpr(t0, rs);
14040 /* DISAS_STOP isn't sufficient, we need to ensure we break out
14041 of translated code to check for pending interrupts. */
14042 gen_save_pc(ctx->base.pc_next + 4);
14043 ctx->base.is_jmp = DISAS_EXIT;
14048 goto pool32axf_invalid;
14055 gen_sync(extract32(ctx->opcode, 16, 5));
14058 generate_exception_end(ctx, EXCP_SYSCALL);
14061 if (is_uhi(extract32(ctx->opcode, 16, 10))) {
14062 gen_helper_do_semihosting(cpu_env);
14064 check_insn(ctx, ISA_MIPS32);
14065 if (ctx->hflags & MIPS_HFLAG_SBRI) {
14066 generate_exception_end(ctx, EXCP_RI);
14068 generate_exception_end(ctx, EXCP_DBp);
14073 goto pool32axf_invalid;
14077 switch (minor & 3) {
14079 gen_HILO(ctx, OPC_MFHI, minor >> 2, rs);
14082 gen_HILO(ctx, OPC_MFLO, minor >> 2, rs);
14085 gen_HILO(ctx, OPC_MTHI, minor >> 2, rs);
14088 gen_HILO(ctx, OPC_MTLO, minor >> 2, rs);
14091 goto pool32axf_invalid;
14095 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14098 gen_HILO(ctx, OPC_MFHI, 0, rs);
14101 gen_HILO(ctx, OPC_MFLO, 0, rs);
14104 gen_HILO(ctx, OPC_MTHI, 0, rs);
14107 gen_HILO(ctx, OPC_MTLO, 0, rs);
14110 goto pool32axf_invalid;
14115 MIPS_INVAL("pool32axf");
14116 generate_exception_end(ctx, EXCP_RI);
14121 /* Values for microMIPS fmt field. Variable-width, depending on which
14122 formats the instruction supports. */
14141 static void gen_pool32fxf(DisasContext *ctx, int rt, int rs)
14143 int extension = (ctx->opcode >> 6) & 0x3ff;
14144 uint32_t mips32_op;
14146 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
14147 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
14148 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
14150 switch (extension) {
14151 case FLOAT_1BIT_FMT(CFC1, 0):
14152 mips32_op = OPC_CFC1;
14154 case FLOAT_1BIT_FMT(CTC1, 0):
14155 mips32_op = OPC_CTC1;
14157 case FLOAT_1BIT_FMT(MFC1, 0):
14158 mips32_op = OPC_MFC1;
14160 case FLOAT_1BIT_FMT(MTC1, 0):
14161 mips32_op = OPC_MTC1;
14163 case FLOAT_1BIT_FMT(MFHC1, 0):
14164 mips32_op = OPC_MFHC1;
14166 case FLOAT_1BIT_FMT(MTHC1, 0):
14167 mips32_op = OPC_MTHC1;
14169 gen_cp1(ctx, mips32_op, rt, rs);
14172 /* Reciprocal square root */
14173 case FLOAT_1BIT_FMT(RSQRT_FMT, FMT_SD_S):
14174 mips32_op = OPC_RSQRT_S;
14176 case FLOAT_1BIT_FMT(RSQRT_FMT, FMT_SD_D):
14177 mips32_op = OPC_RSQRT_D;
14181 case FLOAT_1BIT_FMT(SQRT_FMT, FMT_SD_S):
14182 mips32_op = OPC_SQRT_S;
14184 case FLOAT_1BIT_FMT(SQRT_FMT, FMT_SD_D):
14185 mips32_op = OPC_SQRT_D;
14189 case FLOAT_1BIT_FMT(RECIP_FMT, FMT_SD_S):
14190 mips32_op = OPC_RECIP_S;
14192 case FLOAT_1BIT_FMT(RECIP_FMT, FMT_SD_D):
14193 mips32_op = OPC_RECIP_D;
14197 case FLOAT_1BIT_FMT(FLOOR_L, FMT_SD_S):
14198 mips32_op = OPC_FLOOR_L_S;
14200 case FLOAT_1BIT_FMT(FLOOR_L, FMT_SD_D):
14201 mips32_op = OPC_FLOOR_L_D;
14203 case FLOAT_1BIT_FMT(FLOOR_W, FMT_SD_S):
14204 mips32_op = OPC_FLOOR_W_S;
14206 case FLOAT_1BIT_FMT(FLOOR_W, FMT_SD_D):
14207 mips32_op = OPC_FLOOR_W_D;
14211 case FLOAT_1BIT_FMT(CEIL_L, FMT_SD_S):
14212 mips32_op = OPC_CEIL_L_S;
14214 case FLOAT_1BIT_FMT(CEIL_L, FMT_SD_D):
14215 mips32_op = OPC_CEIL_L_D;
14217 case FLOAT_1BIT_FMT(CEIL_W, FMT_SD_S):
14218 mips32_op = OPC_CEIL_W_S;
14220 case FLOAT_1BIT_FMT(CEIL_W, FMT_SD_D):
14221 mips32_op = OPC_CEIL_W_D;
14225 case FLOAT_1BIT_FMT(TRUNC_L, FMT_SD_S):
14226 mips32_op = OPC_TRUNC_L_S;
14228 case FLOAT_1BIT_FMT(TRUNC_L, FMT_SD_D):
14229 mips32_op = OPC_TRUNC_L_D;
14231 case FLOAT_1BIT_FMT(TRUNC_W, FMT_SD_S):
14232 mips32_op = OPC_TRUNC_W_S;
14234 case FLOAT_1BIT_FMT(TRUNC_W, FMT_SD_D):
14235 mips32_op = OPC_TRUNC_W_D;
14239 case FLOAT_1BIT_FMT(ROUND_L, FMT_SD_S):
14240 mips32_op = OPC_ROUND_L_S;
14242 case FLOAT_1BIT_FMT(ROUND_L, FMT_SD_D):
14243 mips32_op = OPC_ROUND_L_D;
14245 case FLOAT_1BIT_FMT(ROUND_W, FMT_SD_S):
14246 mips32_op = OPC_ROUND_W_S;
14248 case FLOAT_1BIT_FMT(ROUND_W, FMT_SD_D):
14249 mips32_op = OPC_ROUND_W_D;
14252 /* Integer to floating-point conversion */
14253 case FLOAT_1BIT_FMT(CVT_L, FMT_SD_S):
14254 mips32_op = OPC_CVT_L_S;
14256 case FLOAT_1BIT_FMT(CVT_L, FMT_SD_D):
14257 mips32_op = OPC_CVT_L_D;
14259 case FLOAT_1BIT_FMT(CVT_W, FMT_SD_S):
14260 mips32_op = OPC_CVT_W_S;
14262 case FLOAT_1BIT_FMT(CVT_W, FMT_SD_D):
14263 mips32_op = OPC_CVT_W_D;
14266 /* Paired-foo conversions */
14267 case FLOAT_1BIT_FMT(CVT_S_PL, 0):
14268 mips32_op = OPC_CVT_S_PL;
14270 case FLOAT_1BIT_FMT(CVT_S_PU, 0):
14271 mips32_op = OPC_CVT_S_PU;
14273 case FLOAT_1BIT_FMT(CVT_PW_PS, 0):
14274 mips32_op = OPC_CVT_PW_PS;
14276 case FLOAT_1BIT_FMT(CVT_PS_PW, 0):
14277 mips32_op = OPC_CVT_PS_PW;
14280 /* Floating-point moves */
14281 case FLOAT_2BIT_FMT(MOV_FMT, FMT_SDPS_S):
14282 mips32_op = OPC_MOV_S;
14284 case FLOAT_2BIT_FMT(MOV_FMT, FMT_SDPS_D):
14285 mips32_op = OPC_MOV_D;
14287 case FLOAT_2BIT_FMT(MOV_FMT, FMT_SDPS_PS):
14288 mips32_op = OPC_MOV_PS;
14291 /* Absolute value */
14292 case FLOAT_2BIT_FMT(ABS_FMT, FMT_SDPS_S):
14293 mips32_op = OPC_ABS_S;
14295 case FLOAT_2BIT_FMT(ABS_FMT, FMT_SDPS_D):
14296 mips32_op = OPC_ABS_D;
14298 case FLOAT_2BIT_FMT(ABS_FMT, FMT_SDPS_PS):
14299 mips32_op = OPC_ABS_PS;
14303 case FLOAT_2BIT_FMT(NEG_FMT, FMT_SDPS_S):
14304 mips32_op = OPC_NEG_S;
14306 case FLOAT_2BIT_FMT(NEG_FMT, FMT_SDPS_D):
14307 mips32_op = OPC_NEG_D;
14309 case FLOAT_2BIT_FMT(NEG_FMT, FMT_SDPS_PS):
14310 mips32_op = OPC_NEG_PS;
14313 /* Reciprocal square root step */
14314 case FLOAT_2BIT_FMT(RSQRT1_FMT, FMT_SDPS_S):
14315 mips32_op = OPC_RSQRT1_S;
14317 case FLOAT_2BIT_FMT(RSQRT1_FMT, FMT_SDPS_D):
14318 mips32_op = OPC_RSQRT1_D;
14320 case FLOAT_2BIT_FMT(RSQRT1_FMT, FMT_SDPS_PS):
14321 mips32_op = OPC_RSQRT1_PS;
14324 /* Reciprocal step */
14325 case FLOAT_2BIT_FMT(RECIP1_FMT, FMT_SDPS_S):
14326 mips32_op = OPC_RECIP1_S;
14328 case FLOAT_2BIT_FMT(RECIP1_FMT, FMT_SDPS_D):
14329 mips32_op = OPC_RECIP1_S;
14331 case FLOAT_2BIT_FMT(RECIP1_FMT, FMT_SDPS_PS):
14332 mips32_op = OPC_RECIP1_PS;
14335 /* Conversions from double */
14336 case FLOAT_2BIT_FMT(CVT_D, FMT_SWL_S):
14337 mips32_op = OPC_CVT_D_S;
14339 case FLOAT_2BIT_FMT(CVT_D, FMT_SWL_W):
14340 mips32_op = OPC_CVT_D_W;
14342 case FLOAT_2BIT_FMT(CVT_D, FMT_SWL_L):
14343 mips32_op = OPC_CVT_D_L;
14346 /* Conversions from single */
14347 case FLOAT_2BIT_FMT(CVT_S, FMT_DWL_D):
14348 mips32_op = OPC_CVT_S_D;
14350 case FLOAT_2BIT_FMT(CVT_S, FMT_DWL_W):
14351 mips32_op = OPC_CVT_S_W;
14353 case FLOAT_2BIT_FMT(CVT_S, FMT_DWL_L):
14354 mips32_op = OPC_CVT_S_L;
14356 gen_farith(ctx, mips32_op, -1, rs, rt, 0);
14359 /* Conditional moves on floating-point codes */
14360 case COND_FLOAT_MOV(MOVT, 0):
14361 case COND_FLOAT_MOV(MOVT, 1):
14362 case COND_FLOAT_MOV(MOVT, 2):
14363 case COND_FLOAT_MOV(MOVT, 3):
14364 case COND_FLOAT_MOV(MOVT, 4):
14365 case COND_FLOAT_MOV(MOVT, 5):
14366 case COND_FLOAT_MOV(MOVT, 6):
14367 case COND_FLOAT_MOV(MOVT, 7):
14368 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14369 gen_movci(ctx, rt, rs, (ctx->opcode >> 13) & 0x7, 1);
14371 case COND_FLOAT_MOV(MOVF, 0):
14372 case COND_FLOAT_MOV(MOVF, 1):
14373 case COND_FLOAT_MOV(MOVF, 2):
14374 case COND_FLOAT_MOV(MOVF, 3):
14375 case COND_FLOAT_MOV(MOVF, 4):
14376 case COND_FLOAT_MOV(MOVF, 5):
14377 case COND_FLOAT_MOV(MOVF, 6):
14378 case COND_FLOAT_MOV(MOVF, 7):
14379 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14380 gen_movci(ctx, rt, rs, (ctx->opcode >> 13) & 0x7, 0);
14383 MIPS_INVAL("pool32fxf");
14384 generate_exception_end(ctx, EXCP_RI);
14389 static void decode_micromips32_opc(CPUMIPSState *env, DisasContext *ctx)
14393 int rt, rs, rd, rr;
14395 uint32_t op, minor, minor2, mips32_op;
14396 uint32_t cond, fmt, cc;
14398 insn = cpu_lduw_code(env, ctx->base.pc_next + 2);
14399 ctx->opcode = (ctx->opcode << 16) | insn;
14401 rt = (ctx->opcode >> 21) & 0x1f;
14402 rs = (ctx->opcode >> 16) & 0x1f;
14403 rd = (ctx->opcode >> 11) & 0x1f;
14404 rr = (ctx->opcode >> 6) & 0x1f;
14405 imm = (int16_t) ctx->opcode;
14407 op = (ctx->opcode >> 26) & 0x3f;
14410 minor = ctx->opcode & 0x3f;
14413 minor = (ctx->opcode >> 6) & 0xf;
14416 mips32_op = OPC_SLL;
14419 mips32_op = OPC_SRA;
14422 mips32_op = OPC_SRL;
14425 mips32_op = OPC_ROTR;
14427 gen_shift_imm(ctx, mips32_op, rt, rs, rd);
14430 check_insn(ctx, ISA_MIPS32R6);
14431 gen_cond_move(ctx, OPC_SELEQZ, rd, rs, rt);
14434 check_insn(ctx, ISA_MIPS32R6);
14435 gen_cond_move(ctx, OPC_SELNEZ, rd, rs, rt);
14438 check_insn(ctx, ISA_MIPS32R6);
14439 gen_rdhwr(ctx, rt, rs, extract32(ctx->opcode, 11, 3));
14442 goto pool32a_invalid;
14446 minor = (ctx->opcode >> 6) & 0xf;
14450 mips32_op = OPC_ADD;
14453 mips32_op = OPC_ADDU;
14456 mips32_op = OPC_SUB;
14459 mips32_op = OPC_SUBU;
14462 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14463 mips32_op = OPC_MUL;
14465 gen_arith(ctx, mips32_op, rd, rs, rt);
14469 mips32_op = OPC_SLLV;
14472 mips32_op = OPC_SRLV;
14475 mips32_op = OPC_SRAV;
14478 mips32_op = OPC_ROTRV;
14480 gen_shift(ctx, mips32_op, rd, rs, rt);
14482 /* Logical operations */
14484 mips32_op = OPC_AND;
14487 mips32_op = OPC_OR;
14490 mips32_op = OPC_NOR;
14493 mips32_op = OPC_XOR;
14495 gen_logic(ctx, mips32_op, rd, rs, rt);
14497 /* Set less than */
14499 mips32_op = OPC_SLT;
14502 mips32_op = OPC_SLTU;
14504 gen_slt(ctx, mips32_op, rd, rs, rt);
14507 goto pool32a_invalid;
14511 minor = (ctx->opcode >> 6) & 0xf;
14513 /* Conditional moves */
14514 case MOVN: /* MUL */
14515 if (ctx->insn_flags & ISA_MIPS32R6) {
14517 gen_r6_muldiv(ctx, R6_OPC_MUL, rd, rs, rt);
14520 gen_cond_move(ctx, OPC_MOVN, rd, rs, rt);
14523 case MOVZ: /* MUH */
14524 if (ctx->insn_flags & ISA_MIPS32R6) {
14526 gen_r6_muldiv(ctx, R6_OPC_MUH, rd, rs, rt);
14529 gen_cond_move(ctx, OPC_MOVZ, rd, rs, rt);
14533 check_insn(ctx, ISA_MIPS32R6);
14534 gen_r6_muldiv(ctx, R6_OPC_MULU, rd, rs, rt);
14537 check_insn(ctx, ISA_MIPS32R6);
14538 gen_r6_muldiv(ctx, R6_OPC_MUHU, rd, rs, rt);
14540 case LWXS: /* DIV */
14541 if (ctx->insn_flags & ISA_MIPS32R6) {
14543 gen_r6_muldiv(ctx, R6_OPC_DIV, rd, rs, rt);
14546 gen_ldxs(ctx, rs, rt, rd);
14550 check_insn(ctx, ISA_MIPS32R6);
14551 gen_r6_muldiv(ctx, R6_OPC_MOD, rd, rs, rt);
14554 check_insn(ctx, ISA_MIPS32R6);
14555 gen_r6_muldiv(ctx, R6_OPC_DIVU, rd, rs, rt);
14558 check_insn(ctx, ISA_MIPS32R6);
14559 gen_r6_muldiv(ctx, R6_OPC_MODU, rd, rs, rt);
14562 goto pool32a_invalid;
14566 gen_bitops(ctx, OPC_INS, rt, rs, rr, rd);
14569 check_insn(ctx, ISA_MIPS32R6);
14570 gen_lsa(ctx, OPC_LSA, rd, rs, rt,
14571 extract32(ctx->opcode, 9, 2));
14574 check_insn(ctx, ISA_MIPS32R6);
14575 gen_align(ctx, 32, rd, rs, rt, extract32(ctx->opcode, 9, 2));
14578 gen_bitops(ctx, OPC_EXT, rt, rs, rr, rd);
14581 gen_pool32axf(env, ctx, rt, rs);
14584 generate_exception_end(ctx, EXCP_BREAK);
14587 check_insn(ctx, ISA_MIPS32R6);
14588 generate_exception_end(ctx, EXCP_RI);
14592 MIPS_INVAL("pool32a");
14593 generate_exception_end(ctx, EXCP_RI);
14598 minor = (ctx->opcode >> 12) & 0xf;
14601 check_cp0_enabled(ctx);
14602 if (ctx->hflags & MIPS_HFLAG_ITC_CACHE) {
14603 gen_cache_operation(ctx, rt, rs, imm);
14608 /* COP2: Not implemented. */
14609 generate_exception_err(ctx, EXCP_CpU, 2);
14611 #ifdef TARGET_MIPS64
14614 check_insn(ctx, ISA_MIPS3);
14615 check_mips_64(ctx);
14620 gen_ldst_pair(ctx, minor, rt, rs, SIMM(ctx->opcode, 0, 12));
14622 #ifdef TARGET_MIPS64
14625 check_insn(ctx, ISA_MIPS3);
14626 check_mips_64(ctx);
14631 gen_ldst_multiple(ctx, minor, rt, rs, SIMM(ctx->opcode, 0, 12));
14634 MIPS_INVAL("pool32b");
14635 generate_exception_end(ctx, EXCP_RI);
14640 if (ctx->CP0_Config1 & (1 << CP0C1_FP)) {
14641 minor = ctx->opcode & 0x3f;
14642 check_cp1_enabled(ctx);
14645 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14646 mips32_op = OPC_ALNV_PS;
14649 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14650 mips32_op = OPC_MADD_S;
14653 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14654 mips32_op = OPC_MADD_D;
14657 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14658 mips32_op = OPC_MADD_PS;
14661 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14662 mips32_op = OPC_MSUB_S;
14665 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14666 mips32_op = OPC_MSUB_D;
14669 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14670 mips32_op = OPC_MSUB_PS;
14673 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14674 mips32_op = OPC_NMADD_S;
14677 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14678 mips32_op = OPC_NMADD_D;
14681 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14682 mips32_op = OPC_NMADD_PS;
14685 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14686 mips32_op = OPC_NMSUB_S;
14689 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14690 mips32_op = OPC_NMSUB_D;
14693 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14694 mips32_op = OPC_NMSUB_PS;
14696 gen_flt3_arith(ctx, mips32_op, rd, rr, rs, rt);
14698 case CABS_COND_FMT:
14699 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14700 cond = (ctx->opcode >> 6) & 0xf;
14701 cc = (ctx->opcode >> 13) & 0x7;
14702 fmt = (ctx->opcode >> 10) & 0x3;
14705 gen_cmpabs_s(ctx, cond, rt, rs, cc);
14708 gen_cmpabs_d(ctx, cond, rt, rs, cc);
14711 gen_cmpabs_ps(ctx, cond, rt, rs, cc);
14714 goto pool32f_invalid;
14718 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14719 cond = (ctx->opcode >> 6) & 0xf;
14720 cc = (ctx->opcode >> 13) & 0x7;
14721 fmt = (ctx->opcode >> 10) & 0x3;
14724 gen_cmp_s(ctx, cond, rt, rs, cc);
14727 gen_cmp_d(ctx, cond, rt, rs, cc);
14730 gen_cmp_ps(ctx, cond, rt, rs, cc);
14733 goto pool32f_invalid;
14737 check_insn(ctx, ISA_MIPS32R6);
14738 gen_r6_cmp_s(ctx, (ctx->opcode >> 6) & 0x1f, rt, rs, rd);
14741 check_insn(ctx, ISA_MIPS32R6);
14742 gen_r6_cmp_d(ctx, (ctx->opcode >> 6) & 0x1f, rt, rs, rd);
14745 gen_pool32fxf(ctx, rt, rs);
14749 switch ((ctx->opcode >> 6) & 0x7) {
14751 mips32_op = OPC_PLL_PS;
14754 mips32_op = OPC_PLU_PS;
14757 mips32_op = OPC_PUL_PS;
14760 mips32_op = OPC_PUU_PS;
14763 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14764 mips32_op = OPC_CVT_PS_S;
14766 gen_farith(ctx, mips32_op, rt, rs, rd, 0);
14769 goto pool32f_invalid;
14773 check_insn(ctx, ISA_MIPS32R6);
14774 switch ((ctx->opcode >> 9) & 0x3) {
14776 gen_farith(ctx, OPC_MIN_S, rt, rs, rd, 0);
14779 gen_farith(ctx, OPC_MIN_D, rt, rs, rd, 0);
14782 goto pool32f_invalid;
14787 switch ((ctx->opcode >> 6) & 0x7) {
14789 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14790 mips32_op = OPC_LWXC1;
14793 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14794 mips32_op = OPC_SWXC1;
14797 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14798 mips32_op = OPC_LDXC1;
14801 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14802 mips32_op = OPC_SDXC1;
14805 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14806 mips32_op = OPC_LUXC1;
14809 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14810 mips32_op = OPC_SUXC1;
14812 gen_flt3_ldst(ctx, mips32_op, rd, rd, rt, rs);
14815 goto pool32f_invalid;
14819 check_insn(ctx, ISA_MIPS32R6);
14820 switch ((ctx->opcode >> 9) & 0x3) {
14822 gen_farith(ctx, OPC_MAX_S, rt, rs, rd, 0);
14825 gen_farith(ctx, OPC_MAX_D, rt, rs, rd, 0);
14828 goto pool32f_invalid;
14833 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14834 fmt = (ctx->opcode >> 9) & 0x3;
14835 switch ((ctx->opcode >> 6) & 0x7) {
14839 mips32_op = OPC_RSQRT2_S;
14842 mips32_op = OPC_RSQRT2_D;
14845 mips32_op = OPC_RSQRT2_PS;
14848 goto pool32f_invalid;
14854 mips32_op = OPC_RECIP2_S;
14857 mips32_op = OPC_RECIP2_D;
14860 mips32_op = OPC_RECIP2_PS;
14863 goto pool32f_invalid;
14867 mips32_op = OPC_ADDR_PS;
14870 mips32_op = OPC_MULR_PS;
14872 gen_farith(ctx, mips32_op, rt, rs, rd, 0);
14875 goto pool32f_invalid;
14879 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14880 cc = (ctx->opcode >> 13) & 0x7;
14881 fmt = (ctx->opcode >> 9) & 0x3;
14882 switch ((ctx->opcode >> 6) & 0x7) {
14883 case MOVF_FMT: /* RINT_FMT */
14884 if (ctx->insn_flags & ISA_MIPS32R6) {
14888 gen_farith(ctx, OPC_RINT_S, 0, rt, rs, 0);
14891 gen_farith(ctx, OPC_RINT_D, 0, rt, rs, 0);
14894 goto pool32f_invalid;
14900 gen_movcf_s(ctx, rs, rt, cc, 0);
14903 gen_movcf_d(ctx, rs, rt, cc, 0);
14907 gen_movcf_ps(ctx, rs, rt, cc, 0);
14910 goto pool32f_invalid;
14914 case MOVT_FMT: /* CLASS_FMT */
14915 if (ctx->insn_flags & ISA_MIPS32R6) {
14919 gen_farith(ctx, OPC_CLASS_S, 0, rt, rs, 0);
14922 gen_farith(ctx, OPC_CLASS_D, 0, rt, rs, 0);
14925 goto pool32f_invalid;
14931 gen_movcf_s(ctx, rs, rt, cc, 1);
14934 gen_movcf_d(ctx, rs, rt, cc, 1);
14938 gen_movcf_ps(ctx, rs, rt, cc, 1);
14941 goto pool32f_invalid;
14946 check_insn_opc_removed(ctx, ISA_MIPS32R6);
14949 goto pool32f_invalid;
14952 #define FINSN_3ARG_SDPS(prfx) \
14953 switch ((ctx->opcode >> 8) & 0x3) { \
14955 mips32_op = OPC_##prfx##_S; \
14958 mips32_op = OPC_##prfx##_D; \
14960 case FMT_SDPS_PS: \
14962 mips32_op = OPC_##prfx##_PS; \
14965 goto pool32f_invalid; \
14968 check_insn(ctx, ISA_MIPS32R6);
14969 switch ((ctx->opcode >> 9) & 0x3) {
14971 gen_farith(ctx, OPC_MINA_S, rt, rs, rd, 0);
14974 gen_farith(ctx, OPC_MINA_D, rt, rs, rd, 0);
14977 goto pool32f_invalid;
14981 check_insn(ctx, ISA_MIPS32R6);
14982 switch ((ctx->opcode >> 9) & 0x3) {
14984 gen_farith(ctx, OPC_MAXA_S, rt, rs, rd, 0);
14987 gen_farith(ctx, OPC_MAXA_D, rt, rs, rd, 0);
14990 goto pool32f_invalid;
14994 /* regular FP ops */
14995 switch ((ctx->opcode >> 6) & 0x3) {
14997 FINSN_3ARG_SDPS(ADD);
15000 FINSN_3ARG_SDPS(SUB);
15003 FINSN_3ARG_SDPS(MUL);
15006 fmt = (ctx->opcode >> 8) & 0x3;
15008 mips32_op = OPC_DIV_D;
15009 } else if (fmt == 0) {
15010 mips32_op = OPC_DIV_S;
15012 goto pool32f_invalid;
15016 goto pool32f_invalid;
15021 switch ((ctx->opcode >> 6) & 0x7) {
15022 case MOVN_FMT: /* SELNEZ_FMT */
15023 if (ctx->insn_flags & ISA_MIPS32R6) {
15025 switch ((ctx->opcode >> 9) & 0x3) {
15027 gen_sel_s(ctx, OPC_SELNEZ_S, rd, rt, rs);
15030 gen_sel_d(ctx, OPC_SELNEZ_D, rd, rt, rs);
15033 goto pool32f_invalid;
15037 FINSN_3ARG_SDPS(MOVN);
15041 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15042 FINSN_3ARG_SDPS(MOVN);
15044 case MOVZ_FMT: /* SELEQZ_FMT */
15045 if (ctx->insn_flags & ISA_MIPS32R6) {
15047 switch ((ctx->opcode >> 9) & 0x3) {
15049 gen_sel_s(ctx, OPC_SELEQZ_S, rd, rt, rs);
15052 gen_sel_d(ctx, OPC_SELEQZ_D, rd, rt, rs);
15055 goto pool32f_invalid;
15059 FINSN_3ARG_SDPS(MOVZ);
15063 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15064 FINSN_3ARG_SDPS(MOVZ);
15067 check_insn(ctx, ISA_MIPS32R6);
15068 switch ((ctx->opcode >> 9) & 0x3) {
15070 gen_sel_s(ctx, OPC_SEL_S, rd, rt, rs);
15073 gen_sel_d(ctx, OPC_SEL_D, rd, rt, rs);
15076 goto pool32f_invalid;
15080 check_insn(ctx, ISA_MIPS32R6);
15081 switch ((ctx->opcode >> 9) & 0x3) {
15083 mips32_op = OPC_MADDF_S;
15086 mips32_op = OPC_MADDF_D;
15089 goto pool32f_invalid;
15093 check_insn(ctx, ISA_MIPS32R6);
15094 switch ((ctx->opcode >> 9) & 0x3) {
15096 mips32_op = OPC_MSUBF_S;
15099 mips32_op = OPC_MSUBF_D;
15102 goto pool32f_invalid;
15106 goto pool32f_invalid;
15110 gen_farith(ctx, mips32_op, rt, rs, rd, 0);
15114 MIPS_INVAL("pool32f");
15115 generate_exception_end(ctx, EXCP_RI);
15119 generate_exception_err(ctx, EXCP_CpU, 1);
15123 minor = (ctx->opcode >> 21) & 0x1f;
15126 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15127 gen_compute_branch(ctx, OPC_BLTZ, 4, rs, -1, imm << 1, 4);
15130 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15131 gen_compute_branch(ctx, OPC_BLTZAL, 4, rs, -1, imm << 1, 4);
15132 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15135 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15136 gen_compute_branch(ctx, OPC_BLTZAL, 4, rs, -1, imm << 1, 2);
15137 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15140 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15141 gen_compute_branch(ctx, OPC_BGEZ, 4, rs, -1, imm << 1, 4);
15144 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15145 gen_compute_branch(ctx, OPC_BGEZAL, 4, rs, -1, imm << 1, 4);
15146 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15149 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15150 gen_compute_branch(ctx, OPC_BGEZAL, 4, rs, -1, imm << 1, 2);
15151 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15154 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15155 gen_compute_branch(ctx, OPC_BLEZ, 4, rs, -1, imm << 1, 4);
15158 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15159 gen_compute_branch(ctx, OPC_BGTZ, 4, rs, -1, imm << 1, 4);
15163 case TLTI: /* BC1EQZC */
15164 if (ctx->insn_flags & ISA_MIPS32R6) {
15166 check_cp1_enabled(ctx);
15167 gen_compute_branch1_r6(ctx, OPC_BC1EQZ, rs, imm << 1, 0);
15170 mips32_op = OPC_TLTI;
15174 case TGEI: /* BC1NEZC */
15175 if (ctx->insn_flags & ISA_MIPS32R6) {
15177 check_cp1_enabled(ctx);
15178 gen_compute_branch1_r6(ctx, OPC_BC1NEZ, rs, imm << 1, 0);
15181 mips32_op = OPC_TGEI;
15186 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15187 mips32_op = OPC_TLTIU;
15190 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15191 mips32_op = OPC_TGEIU;
15193 case TNEI: /* SYNCI */
15194 if (ctx->insn_flags & ISA_MIPS32R6) {
15196 /* Break the TB to be able to sync copied instructions
15198 ctx->base.is_jmp = DISAS_STOP;
15201 mips32_op = OPC_TNEI;
15206 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15207 mips32_op = OPC_TEQI;
15209 gen_trap(ctx, mips32_op, rs, -1, imm);
15214 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15215 gen_compute_branch(ctx, minor == BNEZC ? OPC_BNE : OPC_BEQ,
15216 4, rs, 0, imm << 1, 0);
15217 /* Compact branches don't have a delay slot, so just let
15218 the normal delay slot handling take us to the branch
15222 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15223 gen_logic_imm(ctx, OPC_LUI, rs, 0, imm);
15226 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15227 /* Break the TB to be able to sync copied instructions
15229 ctx->base.is_jmp = DISAS_STOP;
15233 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15234 /* COP2: Not implemented. */
15235 generate_exception_err(ctx, EXCP_CpU, 2);
15238 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15239 mips32_op = (ctx->opcode & (1 << 16)) ? OPC_BC1FANY2 : OPC_BC1F;
15242 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15243 mips32_op = (ctx->opcode & (1 << 16)) ? OPC_BC1TANY2 : OPC_BC1T;
15246 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15247 mips32_op = OPC_BC1FANY4;
15250 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15251 mips32_op = OPC_BC1TANY4;
15254 check_insn(ctx, ASE_MIPS3D);
15257 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
15258 check_cp1_enabled(ctx);
15259 gen_compute_branch1(ctx, mips32_op,
15260 (ctx->opcode >> 18) & 0x7, imm << 1);
15262 generate_exception_err(ctx, EXCP_CpU, 1);
15267 /* MIPS DSP: not implemented */
15270 MIPS_INVAL("pool32i");
15271 generate_exception_end(ctx, EXCP_RI);
15276 minor = (ctx->opcode >> 12) & 0xf;
15277 offset = sextract32(ctx->opcode, 0,
15278 (ctx->insn_flags & ISA_MIPS32R6) ? 9 : 12);
15281 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15282 mips32_op = OPC_LWL;
15285 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15286 mips32_op = OPC_SWL;
15289 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15290 mips32_op = OPC_LWR;
15293 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15294 mips32_op = OPC_SWR;
15296 #if defined(TARGET_MIPS64)
15298 check_insn(ctx, ISA_MIPS3);
15299 check_mips_64(ctx);
15300 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15301 mips32_op = OPC_LDL;
15304 check_insn(ctx, ISA_MIPS3);
15305 check_mips_64(ctx);
15306 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15307 mips32_op = OPC_SDL;
15310 check_insn(ctx, ISA_MIPS3);
15311 check_mips_64(ctx);
15312 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15313 mips32_op = OPC_LDR;
15316 check_insn(ctx, ISA_MIPS3);
15317 check_mips_64(ctx);
15318 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15319 mips32_op = OPC_SDR;
15322 check_insn(ctx, ISA_MIPS3);
15323 check_mips_64(ctx);
15324 mips32_op = OPC_LWU;
15327 check_insn(ctx, ISA_MIPS3);
15328 check_mips_64(ctx);
15329 mips32_op = OPC_LLD;
15333 mips32_op = OPC_LL;
15336 gen_ld(ctx, mips32_op, rt, rs, offset);
15339 gen_st(ctx, mips32_op, rt, rs, offset);
15342 gen_st_cond(ctx, OPC_SC, rt, rs, offset);
15344 #if defined(TARGET_MIPS64)
15346 check_insn(ctx, ISA_MIPS3);
15347 check_mips_64(ctx);
15348 gen_st_cond(ctx, OPC_SCD, rt, rs, offset);
15353 MIPS_INVAL("pool32c ld-eva");
15354 generate_exception_end(ctx, EXCP_RI);
15357 check_cp0_enabled(ctx);
15359 minor2 = (ctx->opcode >> 9) & 0x7;
15360 offset = sextract32(ctx->opcode, 0, 9);
15363 mips32_op = OPC_LBUE;
15366 mips32_op = OPC_LHUE;
15369 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15370 mips32_op = OPC_LWLE;
15373 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15374 mips32_op = OPC_LWRE;
15377 mips32_op = OPC_LBE;
15380 mips32_op = OPC_LHE;
15383 mips32_op = OPC_LLE;
15386 mips32_op = OPC_LWE;
15392 MIPS_INVAL("pool32c st-eva");
15393 generate_exception_end(ctx, EXCP_RI);
15396 check_cp0_enabled(ctx);
15398 minor2 = (ctx->opcode >> 9) & 0x7;
15399 offset = sextract32(ctx->opcode, 0, 9);
15402 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15403 mips32_op = OPC_SWLE;
15406 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15407 mips32_op = OPC_SWRE;
15410 /* Treat as no-op */
15411 if ((ctx->insn_flags & ISA_MIPS32R6) && (rt >= 24)) {
15412 /* hint codes 24-31 are reserved and signal RI */
15413 generate_exception(ctx, EXCP_RI);
15417 /* Treat as no-op */
15418 if (ctx->hflags & MIPS_HFLAG_ITC_CACHE) {
15419 gen_cache_operation(ctx, rt, rs, offset);
15423 mips32_op = OPC_SBE;
15426 mips32_op = OPC_SHE;
15429 gen_st_cond(ctx, OPC_SCE, rt, rs, offset);
15432 mips32_op = OPC_SWE;
15437 /* Treat as no-op */
15438 if ((ctx->insn_flags & ISA_MIPS32R6) && (rt >= 24)) {
15439 /* hint codes 24-31 are reserved and signal RI */
15440 generate_exception(ctx, EXCP_RI);
15444 MIPS_INVAL("pool32c");
15445 generate_exception_end(ctx, EXCP_RI);
15449 case ADDI32: /* AUI, LUI */
15450 if (ctx->insn_flags & ISA_MIPS32R6) {
15452 gen_logic_imm(ctx, OPC_LUI, rt, rs, imm);
15455 mips32_op = OPC_ADDI;
15460 mips32_op = OPC_ADDIU;
15462 gen_arith_imm(ctx, mips32_op, rt, rs, imm);
15465 /* Logical operations */
15467 mips32_op = OPC_ORI;
15470 mips32_op = OPC_XORI;
15473 mips32_op = OPC_ANDI;
15475 gen_logic_imm(ctx, mips32_op, rt, rs, imm);
15478 /* Set less than immediate */
15480 mips32_op = OPC_SLTI;
15483 mips32_op = OPC_SLTIU;
15485 gen_slt_imm(ctx, mips32_op, rt, rs, imm);
15488 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15489 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
15490 gen_compute_branch(ctx, OPC_JALX, 4, rt, rs, offset, 4);
15491 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15493 case JALS32: /* BOVC, BEQC, BEQZALC */
15494 if (ctx->insn_flags & ISA_MIPS32R6) {
15497 mips32_op = OPC_BOVC;
15498 } else if (rs < rt && rs == 0) {
15500 mips32_op = OPC_BEQZALC;
15503 mips32_op = OPC_BEQC;
15505 gen_compute_compact_branch(ctx, mips32_op, rs, rt, imm << 1);
15508 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 1;
15509 gen_compute_branch(ctx, OPC_JAL, 4, rt, rs, offset, 2);
15510 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15513 case BEQ32: /* BC */
15514 if (ctx->insn_flags & ISA_MIPS32R6) {
15516 gen_compute_compact_branch(ctx, OPC_BC, 0, 0,
15517 sextract32(ctx->opcode << 1, 0, 27));
15520 gen_compute_branch(ctx, OPC_BEQ, 4, rt, rs, imm << 1, 4);
15523 case BNE32: /* BALC */
15524 if (ctx->insn_flags & ISA_MIPS32R6) {
15526 gen_compute_compact_branch(ctx, OPC_BALC, 0, 0,
15527 sextract32(ctx->opcode << 1, 0, 27));
15530 gen_compute_branch(ctx, OPC_BNE, 4, rt, rs, imm << 1, 4);
15533 case J32: /* BGTZC, BLTZC, BLTC */
15534 if (ctx->insn_flags & ISA_MIPS32R6) {
15535 if (rs == 0 && rt != 0) {
15537 mips32_op = OPC_BGTZC;
15538 } else if (rs != 0 && rt != 0 && rs == rt) {
15540 mips32_op = OPC_BLTZC;
15543 mips32_op = OPC_BLTC;
15545 gen_compute_compact_branch(ctx, mips32_op, rs, rt, imm << 1);
15548 gen_compute_branch(ctx, OPC_J, 4, rt, rs,
15549 (int32_t)(ctx->opcode & 0x3FFFFFF) << 1, 4);
15552 case JAL32: /* BLEZC, BGEZC, BGEC */
15553 if (ctx->insn_flags & ISA_MIPS32R6) {
15554 if (rs == 0 && rt != 0) {
15556 mips32_op = OPC_BLEZC;
15557 } else if (rs != 0 && rt != 0 && rs == rt) {
15559 mips32_op = OPC_BGEZC;
15562 mips32_op = OPC_BGEC;
15564 gen_compute_compact_branch(ctx, mips32_op, rs, rt, imm << 1);
15567 gen_compute_branch(ctx, OPC_JAL, 4, rt, rs,
15568 (int32_t)(ctx->opcode & 0x3FFFFFF) << 1, 4);
15569 ctx->hflags |= MIPS_HFLAG_BDS_STRICT;
15572 /* Floating point (COP1) */
15574 mips32_op = OPC_LWC1;
15577 mips32_op = OPC_LDC1;
15580 mips32_op = OPC_SWC1;
15583 mips32_op = OPC_SDC1;
15585 gen_cop1_ldst(ctx, mips32_op, rt, rs, imm);
15587 case ADDIUPC: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15588 if (ctx->insn_flags & ISA_MIPS32R6) {
15589 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
15590 switch ((ctx->opcode >> 16) & 0x1f) {
15599 gen_pcrel(ctx, OPC_ADDIUPC, ctx->base.pc_next & ~0x3, rt);
15602 gen_pcrel(ctx, OPC_AUIPC, ctx->base.pc_next, rt);
15605 gen_pcrel(ctx, OPC_ALUIPC, ctx->base.pc_next, rt);
15615 gen_pcrel(ctx, R6_OPC_LWPC, ctx->base.pc_next & ~0x3, rt);
15618 generate_exception(ctx, EXCP_RI);
15623 int reg = mmreg(ZIMM(ctx->opcode, 23, 3));
15624 offset = SIMM(ctx->opcode, 0, 23) << 2;
15626 gen_addiupc(ctx, reg, offset, 0, 0);
15629 case BNVC: /* BNEC, BNEZALC */
15630 check_insn(ctx, ISA_MIPS32R6);
15633 mips32_op = OPC_BNVC;
15634 } else if (rs < rt && rs == 0) {
15636 mips32_op = OPC_BNEZALC;
15639 mips32_op = OPC_BNEC;
15641 gen_compute_compact_branch(ctx, mips32_op, rs, rt, imm << 1);
15643 case R6_BNEZC: /* JIALC */
15644 check_insn(ctx, ISA_MIPS32R6);
15647 gen_compute_compact_branch(ctx, OPC_BNEZC, rt, 0,
15648 sextract32(ctx->opcode << 1, 0, 22));
15651 gen_compute_compact_branch(ctx, OPC_JIALC, 0, rs, imm);
15654 case R6_BEQZC: /* JIC */
15655 check_insn(ctx, ISA_MIPS32R6);
15658 gen_compute_compact_branch(ctx, OPC_BEQZC, rt, 0,
15659 sextract32(ctx->opcode << 1, 0, 22));
15662 gen_compute_compact_branch(ctx, OPC_JIC, 0, rs, imm);
15665 case BLEZALC: /* BGEZALC, BGEUC */
15666 check_insn(ctx, ISA_MIPS32R6);
15667 if (rs == 0 && rt != 0) {
15669 mips32_op = OPC_BLEZALC;
15670 } else if (rs != 0 && rt != 0 && rs == rt) {
15672 mips32_op = OPC_BGEZALC;
15675 mips32_op = OPC_BGEUC;
15677 gen_compute_compact_branch(ctx, mips32_op, rs, rt, imm << 1);
15679 case BGTZALC: /* BLTZALC, BLTUC */
15680 check_insn(ctx, ISA_MIPS32R6);
15681 if (rs == 0 && rt != 0) {
15683 mips32_op = OPC_BGTZALC;
15684 } else if (rs != 0 && rt != 0 && rs == rt) {
15686 mips32_op = OPC_BLTZALC;
15689 mips32_op = OPC_BLTUC;
15691 gen_compute_compact_branch(ctx, mips32_op, rs, rt, imm << 1);
15693 /* Loads and stores */
15695 mips32_op = OPC_LB;
15698 mips32_op = OPC_LBU;
15701 mips32_op = OPC_LH;
15704 mips32_op = OPC_LHU;
15707 mips32_op = OPC_LW;
15709 #ifdef TARGET_MIPS64
15711 check_insn(ctx, ISA_MIPS3);
15712 check_mips_64(ctx);
15713 mips32_op = OPC_LD;
15716 check_insn(ctx, ISA_MIPS3);
15717 check_mips_64(ctx);
15718 mips32_op = OPC_SD;
15722 mips32_op = OPC_SB;
15725 mips32_op = OPC_SH;
15728 mips32_op = OPC_SW;
15731 gen_ld(ctx, mips32_op, rt, rs, imm);
15734 gen_st(ctx, mips32_op, rt, rs, imm);
15737 generate_exception_end(ctx, EXCP_RI);
15742 static int decode_micromips_opc (CPUMIPSState *env, DisasContext *ctx)
15746 /* make sure instructions are on a halfword boundary */
15747 if (ctx->base.pc_next & 0x1) {
15748 env->CP0_BadVAddr = ctx->base.pc_next;
15749 generate_exception_end(ctx, EXCP_AdEL);
15753 op = (ctx->opcode >> 10) & 0x3f;
15754 /* Enforce properly-sized instructions in a delay slot */
15755 if (ctx->hflags & MIPS_HFLAG_BDS_STRICT) {
15756 switch (op & 0x7) { /* MSB-3..MSB-5 */
15758 /* POOL32A, POOL32B, POOL32I, POOL32C */
15760 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
15762 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
15764 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15766 /* LB32, LH32, LWC132, LDC132, LW32 */
15767 if (ctx->hflags & MIPS_HFLAG_BDS16) {
15768 generate_exception_end(ctx, EXCP_RI);
15773 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15775 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15777 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15778 if (ctx->hflags & MIPS_HFLAG_BDS32) {
15779 generate_exception_end(ctx, EXCP_RI);
15789 int rd = mmreg(uMIPS_RD(ctx->opcode));
15790 int rs1 = mmreg(uMIPS_RS1(ctx->opcode));
15791 int rs2 = mmreg(uMIPS_RS2(ctx->opcode));
15794 switch (ctx->opcode & 0x1) {
15802 if (ctx->insn_flags & ISA_MIPS32R6) {
15803 /* In the Release 6 the register number location in
15804 * the instruction encoding has changed.
15806 gen_arith(ctx, opc, rs1, rd, rs2);
15808 gen_arith(ctx, opc, rd, rs1, rs2);
15814 int rd = mmreg(uMIPS_RD(ctx->opcode));
15815 int rs = mmreg(uMIPS_RS(ctx->opcode));
15816 int amount = (ctx->opcode >> 1) & 0x7;
15818 amount = amount == 0 ? 8 : amount;
15820 switch (ctx->opcode & 0x1) {
15829 gen_shift_imm(ctx, opc, rd, rs, amount);
15833 if (ctx->insn_flags & ISA_MIPS32R6) {
15834 gen_pool16c_r6_insn(ctx);
15836 gen_pool16c_insn(ctx);
15841 int rd = mmreg(uMIPS_RD(ctx->opcode));
15842 int rb = 28; /* GP */
15843 int16_t offset = SIMM(ctx->opcode, 0, 7) << 2;
15845 gen_ld(ctx, OPC_LW, rd, rb, offset);
15849 check_insn_opc_removed(ctx, ISA_MIPS32R6);
15850 if (ctx->opcode & 1) {
15851 generate_exception_end(ctx, EXCP_RI);
15854 int enc_dest = uMIPS_RD(ctx->opcode);
15855 int enc_rt = uMIPS_RS2(ctx->opcode);
15856 int enc_rs = uMIPS_RS1(ctx->opcode);
15857 gen_movep(ctx, enc_dest, enc_rt, enc_rs);
15862 int rd = mmreg(uMIPS_RD(ctx->opcode));
15863 int rb = mmreg(uMIPS_RS(ctx->opcode));
15864 int16_t offset = ZIMM(ctx->opcode, 0, 4);
15865 offset = (offset == 0xf ? -1 : offset);
15867 gen_ld(ctx, OPC_LBU, rd, rb, offset);
15872 int rd = mmreg(uMIPS_RD(ctx->opcode));
15873 int rb = mmreg(uMIPS_RS(ctx->opcode));
15874 int16_t offset = ZIMM(ctx->opcode, 0, 4) << 1;
15876 gen_ld(ctx, OPC_LHU, rd, rb, offset);
15881 int rd = (ctx->opcode >> 5) & 0x1f;
15882 int rb = 29; /* SP */
15883 int16_t offset = ZIMM(ctx->opcode, 0, 5) << 2;
15885 gen_ld(ctx, OPC_LW, rd, rb, offset);
15890 int rd = mmreg(uMIPS_RD(ctx->opcode));
15891 int rb = mmreg(uMIPS_RS(ctx->opcode));
15892 int16_t offset = ZIMM(ctx->opcode, 0, 4) << 2;
15894 gen_ld(ctx, OPC_LW, rd, rb, offset);
15899 int rd = mmreg2(uMIPS_RD(ctx->opcode));
15900 int rb = mmreg(uMIPS_RS(ctx->opcode));
15901 int16_t offset = ZIMM(ctx->opcode, 0, 4);
15903 gen_st(ctx, OPC_SB, rd, rb, offset);
15908 int rd = mmreg2(uMIPS_RD(ctx->opcode));
15909 int rb = mmreg(uMIPS_RS(ctx->opcode));
15910 int16_t offset = ZIMM(ctx->opcode, 0, 4) << 1;
15912 gen_st(ctx, OPC_SH, rd, rb, offset);
15917 int rd = (ctx->opcode >> 5) & 0x1f;
15918 int rb = 29; /* SP */
15919 int16_t offset = ZIMM(ctx->opcode, 0, 5) << 2;
15921 gen_st(ctx, OPC_SW, rd, rb, offset);
15926 int rd = mmreg2(uMIPS_RD(ctx->opcode));
15927 int rb = mmreg(uMIPS_RS(ctx->opcode));
15928 int16_t offset = ZIMM(ctx->opcode, 0, 4) << 2;
15930 gen_st(ctx, OPC_SW, rd, rb, offset);
15935 int rd = uMIPS_RD5(ctx->opcode);
15936 int rs = uMIPS_RS5(ctx->opcode);
15938 gen_arith(ctx, OPC_ADDU, rd, rs, 0);
15945 switch (ctx->opcode & 0x1) {
15955 switch (ctx->opcode & 0x1) {
15960 gen_addiur1sp(ctx);
15964 case B16: /* BC16 */
15965 gen_compute_branch(ctx, OPC_BEQ, 2, 0, 0,
15966 sextract32(ctx->opcode, 0, 10) << 1,
15967 (ctx->insn_flags & ISA_MIPS32R6) ? 0 : 4);
15969 case BNEZ16: /* BNEZC16 */
15970 case BEQZ16: /* BEQZC16 */
15971 gen_compute_branch(ctx, op == BNEZ16 ? OPC_BNE : OPC_BEQ, 2,
15972 mmreg(uMIPS_RD(ctx->opcode)),
15973 0, sextract32(ctx->opcode, 0, 7) << 1,
15974 (ctx->insn_flags & ISA_MIPS32R6) ? 0 : 4);
15979 int reg = mmreg(uMIPS_RD(ctx->opcode));
15980 int imm = ZIMM(ctx->opcode, 0, 7);
15982 imm = (imm == 0x7f ? -1 : imm);
15983 tcg_gen_movi_tl(cpu_gpr[reg], imm);
15989 generate_exception_end(ctx, EXCP_RI);
15992 decode_micromips32_opc(env, ctx);
16005 /* MAJOR, P16, and P32 pools opcodes */
16009 NM_MOVE_BALC = 0x02,
16017 NM_P16_SHIFT = 0x0c,
16035 NM_P_LS_U12 = 0x21,
16045 NM_P16_ADDU = 0x2c,
16059 NM_MOVEPREV = 0x3f,
16062 /* POOL32A instruction pool */
16064 NM_POOL32A0 = 0x00,
16065 NM_SPECIAL2 = 0x01,
16068 NM_POOL32A5 = 0x05,
16069 NM_POOL32A7 = 0x07,
16072 /* P.GP.W instruction pool */
16074 NM_ADDIUGP_W = 0x00,
16079 /* P48I instruction pool */
16083 NM_ADDIUGP48 = 0x02,
16084 NM_ADDIUPC48 = 0x03,
16089 /* P.U12 instruction pool */
16098 NM_ADDIUNEG = 0x08,
16105 /* POOL32F instruction pool */
16107 NM_POOL32F_0 = 0x00,
16108 NM_POOL32F_3 = 0x03,
16109 NM_POOL32F_5 = 0x05,
16112 /* POOL32S instruction pool */
16114 NM_POOL32S_0 = 0x00,
16115 NM_POOL32S_4 = 0x04,
16118 /* P.LUI instruction pool */
16124 /* P.GP.BH instruction pool */
16129 NM_ADDIUGP_B = 0x03,
16132 NM_P_GP_CP1 = 0x06,
16135 /* P.LS.U12 instruction pool */
16140 NM_P_PREFU12 = 0x03,
16153 /* P.LS.S9 instruction pool */
16159 NM_P_LS_UAWM = 0x05,
16162 /* P.BAL instruction pool */
16168 /* P.J instruction pool */
16171 NM_JALRC_HB = 0x01,
16172 NM_P_BALRSC = 0x08,
16175 /* P.BR1 instruction pool */
16183 /* P.BR2 instruction pool */
16190 /* P.BRI instruction pool */
16202 /* P16.SHIFT instruction pool */
16208 /* POOL16C instruction pool */
16210 NM_POOL16C_0 = 0x00,
16214 /* P16.A1 instruction pool */
16216 NM_ADDIUR1SP = 0x01,
16219 /* P16.A2 instruction pool */
16222 NM_P_ADDIURS5 = 0x01,
16225 /* P16.ADDU instruction pool */
16231 /* P16.SR instruction pool */
16234 NM_RESTORE_JRC16 = 0x01,
16237 /* P16.4X4 instruction pool */
16243 /* P16.LB instruction pool */
16250 /* P16.LH instruction pool */
16257 /* P.RI instruction pool */
16260 NM_P_SYSCALL = 0x01,
16265 /* POOL32A0 instruction pool */
16300 NM_D_E_MT_VPE = 0x56,
16308 /* POOL32A5 instruction pool */
16310 NM_CMP_EQ_PH = 0x00,
16311 NM_CMP_LT_PH = 0x08,
16312 NM_CMP_LE_PH = 0x10,
16313 NM_CMPGU_EQ_QB = 0x18,
16314 NM_CMPGU_LT_QB = 0x20,
16315 NM_CMPGU_LE_QB = 0x28,
16316 NM_CMPGDU_EQ_QB = 0x30,
16317 NM_CMPGDU_LT_QB = 0x38,
16318 NM_CMPGDU_LE_QB = 0x40,
16319 NM_CMPU_EQ_QB = 0x48,
16320 NM_CMPU_LT_QB = 0x50,
16321 NM_CMPU_LE_QB = 0x58,
16322 NM_ADDQ_S_W = 0x60,
16323 NM_SUBQ_S_W = 0x68,
16327 NM_ADDQ_S_PH = 0x01,
16328 NM_ADDQH_R_PH = 0x09,
16329 NM_ADDQH_R_W = 0x11,
16330 NM_ADDU_S_QB = 0x19,
16331 NM_ADDU_S_PH = 0x21,
16332 NM_ADDUH_R_QB = 0x29,
16333 NM_SHRAV_R_PH = 0x31,
16334 NM_SHRAV_R_QB = 0x39,
16335 NM_SUBQ_S_PH = 0x41,
16336 NM_SUBQH_R_PH = 0x49,
16337 NM_SUBQH_R_W = 0x51,
16338 NM_SUBU_S_QB = 0x59,
16339 NM_SUBU_S_PH = 0x61,
16340 NM_SUBUH_R_QB = 0x69,
16341 NM_SHLLV_S_PH = 0x71,
16342 NM_PRECR_SRA_R_PH_W = 0x79,
16344 NM_MULEU_S_PH_QBL = 0x12,
16345 NM_MULEU_S_PH_QBR = 0x1a,
16346 NM_MULQ_RS_PH = 0x22,
16347 NM_MULQ_S_PH = 0x2a,
16348 NM_MULQ_RS_W = 0x32,
16349 NM_MULQ_S_W = 0x3a,
16352 NM_SHRAV_R_W = 0x5a,
16353 NM_SHRLV_PH = 0x62,
16354 NM_SHRLV_QB = 0x6a,
16355 NM_SHLLV_QB = 0x72,
16356 NM_SHLLV_S_W = 0x7a,
16360 NM_MULEQ_S_W_PHL = 0x04,
16361 NM_MULEQ_S_W_PHR = 0x0c,
16363 NM_MUL_S_PH = 0x05,
16364 NM_PRECR_QB_PH = 0x0d,
16365 NM_PRECRQ_QB_PH = 0x15,
16366 NM_PRECRQ_PH_W = 0x1d,
16367 NM_PRECRQ_RS_PH_W = 0x25,
16368 NM_PRECRQU_S_QB_PH = 0x2d,
16369 NM_PACKRL_PH = 0x35,
16373 NM_SHRA_R_W = 0x5e,
16374 NM_SHRA_R_PH = 0x66,
16375 NM_SHLL_S_PH = 0x76,
16376 NM_SHLL_S_W = 0x7e,
16381 /* POOL32A7 instruction pool */
16386 NM_POOL32AXF = 0x07,
16389 /* P.SR instruction pool */
16395 /* P.SHIFT instruction pool */
16403 /* P.ROTX instruction pool */
16408 /* P.INS instruction pool */
16413 /* P.EXT instruction pool */
16418 /* POOL32F_0 (fmt) instruction pool */
16423 NM_SELEQZ_S = 0x07,
16424 NM_SELEQZ_D = 0x47,
16428 NM_SELNEZ_S = 0x0f,
16429 NM_SELNEZ_D = 0x4f,
16444 /* POOL32F_3 instruction pool */
16448 NM_MINA_FMT = 0x04,
16449 NM_MAXA_FMT = 0x05,
16450 NM_POOL32FXF = 0x07,
16453 /* POOL32F_5 instruction pool */
16455 NM_CMP_CONDN_S = 0x00,
16456 NM_CMP_CONDN_D = 0x02,
16459 /* P.GP.LH instruction pool */
16465 /* P.GP.SH instruction pool */
16470 /* P.GP.CP1 instruction pool */
16478 /* P.LS.S0 instruction pool */
16495 NM_P_PREFS9 = 0x03,
16501 /* P.LS.S1 instruction pool */
16503 NM_ASET_ACLR = 0x02,
16511 /* P.LS.WM instruction pool */
16517 /* P.LS.UAWM instruction pool */
16523 /* P.BR3A instruction pool */
16529 NM_BPOSGE32C = 0x04,
16532 /* P16.RI instruction pool */
16534 NM_P16_SYSCALL = 0x01,
16539 /* POOL16C_0 instruction pool */
16541 NM_POOL16C_00 = 0x00,
16544 /* P16.JRC instruction pool */
16550 /* P.SYSCALL instruction pool */
16556 /* P.TRAP instruction pool */
16562 /* P.CMOVE instruction pool */
16568 /* POOL32Axf instruction pool */
16570 NM_POOL32AXF_1 = 0x01,
16571 NM_POOL32AXF_2 = 0x02,
16572 NM_POOL32AXF_4 = 0x04,
16573 NM_POOL32AXF_5 = 0x05,
16574 NM_POOL32AXF_7 = 0x07,
16577 /* POOL32Axf_1 instruction pool */
16579 NM_POOL32AXF_1_0 = 0x00,
16580 NM_POOL32AXF_1_1 = 0x01,
16581 NM_POOL32AXF_1_3 = 0x03,
16582 NM_POOL32AXF_1_4 = 0x04,
16583 NM_POOL32AXF_1_5 = 0x05,
16584 NM_POOL32AXF_1_7 = 0x07,
16587 /* POOL32Axf_2 instruction pool */
16589 NM_POOL32AXF_2_0_7 = 0x00,
16590 NM_POOL32AXF_2_8_15 = 0x01,
16591 NM_POOL32AXF_2_16_23 = 0x02,
16592 NM_POOL32AXF_2_24_31 = 0x03,
16595 /* POOL32Axf_7 instruction pool */
16597 NM_SHRA_R_QB = 0x0,
16602 /* POOL32Axf_1_0 instruction pool */
16610 /* POOL32Axf_1_1 instruction pool */
16616 /* POOL32Axf_1_3 instruction pool */
16624 /* POOL32Axf_1_4 instruction pool */
16630 /* POOL32Axf_1_5 instruction pool */
16632 NM_MAQ_S_W_PHR = 0x0,
16633 NM_MAQ_S_W_PHL = 0x1,
16634 NM_MAQ_SA_W_PHR = 0x2,
16635 NM_MAQ_SA_W_PHL = 0x3,
16638 /* POOL32Axf_1_7 instruction pool */
16642 NM_EXTR_RS_W = 0x2,
16646 /* POOL32Axf_2_0_7 instruction pool */
16649 NM_DPAQ_S_W_PH = 0x1,
16651 NM_DPSQ_S_W_PH = 0x3,
16658 /* POOL32Axf_2_8_15 instruction pool */
16660 NM_DPAX_W_PH = 0x0,
16661 NM_DPAQ_SA_L_W = 0x1,
16662 NM_DPSX_W_PH = 0x2,
16663 NM_DPSQ_SA_L_W = 0x3,
16666 NM_EXTRV_R_W = 0x7,
16669 /* POOL32Axf_2_16_23 instruction pool */
16671 NM_DPAU_H_QBL = 0x0,
16672 NM_DPAQX_S_W_PH = 0x1,
16673 NM_DPSU_H_QBL = 0x2,
16674 NM_DPSQX_S_W_PH = 0x3,
16677 NM_MULSA_W_PH = 0x6,
16678 NM_EXTRV_RS_W = 0x7,
16681 /* POOL32Axf_2_24_31 instruction pool */
16683 NM_DPAU_H_QBR = 0x0,
16684 NM_DPAQX_SA_W_PH = 0x1,
16685 NM_DPSU_H_QBR = 0x2,
16686 NM_DPSQX_SA_W_PH = 0x3,
16689 NM_MULSAQ_S_W_PH = 0x6,
16690 NM_EXTRV_S_H = 0x7,
16693 /* POOL32Axf_{4, 5} instruction pool */
16712 /* nanoMIPS DSP instructions */
16713 NM_ABSQ_S_QB = 0x00,
16714 NM_ABSQ_S_PH = 0x08,
16715 NM_ABSQ_S_W = 0x10,
16716 NM_PRECEQ_W_PHL = 0x28,
16717 NM_PRECEQ_W_PHR = 0x30,
16718 NM_PRECEQU_PH_QBL = 0x38,
16719 NM_PRECEQU_PH_QBR = 0x48,
16720 NM_PRECEU_PH_QBL = 0x58,
16721 NM_PRECEU_PH_QBR = 0x68,
16722 NM_PRECEQU_PH_QBLA = 0x39,
16723 NM_PRECEQU_PH_QBRA = 0x49,
16724 NM_PRECEU_PH_QBLA = 0x59,
16725 NM_PRECEU_PH_QBRA = 0x69,
16726 NM_REPLV_PH = 0x01,
16727 NM_REPLV_QB = 0x09,
16730 NM_RADDU_W_QB = 0x78,
16736 /* PP.SR instruction pool */
16740 NM_RESTORE_JRC = 0x03,
16743 /* P.SR.F instruction pool */
16746 NM_RESTOREF = 0x01,
16749 /* P16.SYSCALL instruction pool */
16751 NM_SYSCALL16 = 0x00,
16752 NM_HYPCALL16 = 0x01,
16755 /* POOL16C_00 instruction pool */
16763 /* PP.LSX and PP.LSXS instruction pool */
16801 /* ERETx instruction pool */
16807 /* POOL32FxF_{0, 1} insturction pool */
16816 NM_CVT_S_PL = 0x84,
16817 NM_CVT_S_PU = 0xa4,
16819 NM_CVT_L_S = 0x004,
16820 NM_CVT_L_D = 0x104,
16821 NM_CVT_W_S = 0x024,
16822 NM_CVT_W_D = 0x124,
16824 NM_RSQRT_S = 0x008,
16825 NM_RSQRT_D = 0x108,
16830 NM_RECIP_S = 0x048,
16831 NM_RECIP_D = 0x148,
16833 NM_FLOOR_L_S = 0x00c,
16834 NM_FLOOR_L_D = 0x10c,
16836 NM_FLOOR_W_S = 0x02c,
16837 NM_FLOOR_W_D = 0x12c,
16839 NM_CEIL_L_S = 0x04c,
16840 NM_CEIL_L_D = 0x14c,
16841 NM_CEIL_W_S = 0x06c,
16842 NM_CEIL_W_D = 0x16c,
16843 NM_TRUNC_L_S = 0x08c,
16844 NM_TRUNC_L_D = 0x18c,
16845 NM_TRUNC_W_S = 0x0ac,
16846 NM_TRUNC_W_D = 0x1ac,
16847 NM_ROUND_L_S = 0x0cc,
16848 NM_ROUND_L_D = 0x1cc,
16849 NM_ROUND_W_S = 0x0ec,
16850 NM_ROUND_W_D = 0x1ec,
16858 NM_CVT_D_S = 0x04d,
16859 NM_CVT_D_W = 0x0cd,
16860 NM_CVT_D_L = 0x14d,
16861 NM_CVT_S_D = 0x06d,
16862 NM_CVT_S_W = 0x0ed,
16863 NM_CVT_S_L = 0x16d,
16866 /* P.LL instruction pool */
16872 /* P.SC instruction pool */
16878 /* P.DVP instruction pool */
16887 * nanoMIPS decoding engine
16892 /* extraction utilities */
16894 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
16895 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
16896 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
16897 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
16898 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
16899 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
16901 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
16902 static inline int decode_gpr_gpr3(int r)
16904 static const int map[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
16906 return map[r & 0x7];
16909 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
16910 static inline int decode_gpr_gpr3_src_store(int r)
16912 static const int map[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
16914 return map[r & 0x7];
16917 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
16918 static inline int decode_gpr_gpr4(int r)
16920 static const int map[] = { 8, 9, 10, 11, 4, 5, 6, 7,
16921 16, 17, 18, 19, 20, 21, 22, 23 };
16923 return map[r & 0xf];
16926 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
16927 static inline int decode_gpr_gpr4_zero(int r)
16929 static const int map[] = { 8, 9, 10, 0, 4, 5, 6, 7,
16930 16, 17, 18, 19, 20, 21, 22, 23 };
16932 return map[r & 0xf];
16936 /* extraction utilities */
16938 #define NANOMIPS_EXTRACT_RD(op) ((op >> 7) & 0x7)
16939 #define NANOMIPS_EXTRACT_RS(op) ((op >> 4) & 0x7)
16940 #define NANOMIPS_EXTRACT_RS2(op) uMIPS_RS(op)
16941 #define NANOMIPS_EXTRACT_RS1(op) ((op >> 1) & 0x7)
16942 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
16943 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
16946 static void gen_adjust_sp(DisasContext *ctx, int u)
16948 gen_op_addr_addi(ctx, cpu_gpr[29], cpu_gpr[29], u);
16951 static void gen_save(DisasContext *ctx, uint8_t rt, uint8_t count,
16952 uint8_t gp, uint16_t u)
16955 TCGv va = tcg_temp_new();
16956 TCGv t0 = tcg_temp_new();
16958 while (counter != count) {
16959 bool use_gp = gp && (counter == count - 1);
16960 int this_rt = use_gp ? 28 : (rt & 0x10) | ((rt + counter) & 0x1f);
16961 int this_offset = -((counter + 1) << 2);
16962 gen_base_offset_addr(ctx, va, 29, this_offset);
16963 gen_load_gpr(t0, this_rt);
16964 tcg_gen_qemu_st_tl(t0, va, ctx->mem_idx,
16965 (MO_TEUL | ctx->default_tcg_memop_mask));
16969 /* adjust stack pointer */
16970 gen_adjust_sp(ctx, -u);
16976 static void gen_restore(DisasContext *ctx, uint8_t rt, uint8_t count,
16977 uint8_t gp, uint16_t u)
16980 TCGv va = tcg_temp_new();
16981 TCGv t0 = tcg_temp_new();
16983 while (counter != count) {
16984 bool use_gp = gp && (counter == count - 1);
16985 int this_rt = use_gp ? 28 : (rt & 0x10) | ((rt + counter) & 0x1f);
16986 int this_offset = u - ((counter + 1) << 2);
16987 gen_base_offset_addr(ctx, va, 29, this_offset);
16988 tcg_gen_qemu_ld_tl(t0, va, ctx->mem_idx, MO_TESL |
16989 ctx->default_tcg_memop_mask);
16990 tcg_gen_ext32s_tl(t0, t0);
16991 gen_store_gpr(t0, this_rt);
16995 /* adjust stack pointer */
16996 gen_adjust_sp(ctx, u);
17002 static void gen_pool16c_nanomips_insn(DisasContext *ctx)
17004 int rt = decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx->opcode));
17005 int rs = decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx->opcode));
17007 switch (extract32(ctx->opcode, 2, 2)) {
17009 gen_logic(ctx, OPC_NOR, rt, rs, 0);
17012 gen_logic(ctx, OPC_AND, rt, rt, rs);
17015 gen_logic(ctx, OPC_XOR, rt, rt, rs);
17018 gen_logic(ctx, OPC_OR, rt, rt, rs);
17023 static void gen_pool32a0_nanomips_insn(CPUMIPSState *env, DisasContext *ctx)
17025 int rt = extract32(ctx->opcode, 21, 5);
17026 int rs = extract32(ctx->opcode, 16, 5);
17027 int rd = extract32(ctx->opcode, 11, 5);
17029 switch (extract32(ctx->opcode, 3, 7)) {
17031 switch (extract32(ctx->opcode, 10, 1)) {
17034 gen_trap(ctx, OPC_TEQ, rs, rt, -1);
17038 gen_trap(ctx, OPC_TNE, rs, rt, -1);
17044 gen_rdhwr(ctx, rt, rs, extract32(ctx->opcode, 11, 3));
17048 gen_bshfl(ctx, OPC_SEB, rs, rt);
17051 gen_bshfl(ctx, OPC_SEH, rs, rt);
17054 gen_shift(ctx, OPC_SLLV, rd, rt, rs);
17057 gen_shift(ctx, OPC_SRLV, rd, rt, rs);
17060 gen_shift(ctx, OPC_SRAV, rd, rt, rs);
17063 gen_shift(ctx, OPC_ROTRV, rd, rt, rs);
17066 gen_arith(ctx, OPC_ADD, rd, rs, rt);
17069 gen_arith(ctx, OPC_ADDU, rd, rs, rt);
17073 gen_arith(ctx, OPC_SUB, rd, rs, rt);
17076 gen_arith(ctx, OPC_SUBU, rd, rs, rt);
17079 switch (extract32(ctx->opcode, 10, 1)) {
17081 gen_cond_move(ctx, OPC_MOVZ, rd, rs, rt);
17084 gen_cond_move(ctx, OPC_MOVN, rd, rs, rt);
17089 gen_logic(ctx, OPC_AND, rd, rs, rt);
17092 gen_logic(ctx, OPC_OR, rd, rs, rt);
17095 gen_logic(ctx, OPC_NOR, rd, rs, rt);
17098 gen_logic(ctx, OPC_XOR, rd, rs, rt);
17101 gen_slt(ctx, OPC_SLT, rd, rs, rt);
17106 #ifndef CONFIG_USER_ONLY
17107 TCGv t0 = tcg_temp_new();
17108 switch (extract32(ctx->opcode, 10, 1)) {
17111 check_cp0_enabled(ctx);
17112 gen_helper_dvp(t0, cpu_env);
17113 gen_store_gpr(t0, rt);
17118 check_cp0_enabled(ctx);
17119 gen_helper_evp(t0, cpu_env);
17120 gen_store_gpr(t0, rt);
17127 gen_slt(ctx, OPC_SLTU, rd, rs, rt);
17132 TCGv t0 = tcg_temp_new();
17133 TCGv t1 = tcg_temp_new();
17134 TCGv t2 = tcg_temp_new();
17136 gen_load_gpr(t1, rs);
17137 gen_load_gpr(t2, rt);
17138 tcg_gen_add_tl(t0, t1, t2);
17139 tcg_gen_ext32s_tl(t0, t0);
17140 tcg_gen_xor_tl(t1, t1, t2);
17141 tcg_gen_xor_tl(t2, t0, t2);
17142 tcg_gen_andc_tl(t1, t2, t1);
17144 /* operands of same sign, result different sign */
17145 tcg_gen_setcondi_tl(TCG_COND_LT, t0, t1, 0);
17146 gen_store_gpr(t0, rd);
17154 gen_r6_muldiv(ctx, R6_OPC_MUL, rd, rs, rt);
17157 gen_r6_muldiv(ctx, R6_OPC_MUH, rd, rs, rt);
17160 gen_r6_muldiv(ctx, R6_OPC_MULU, rd, rs, rt);
17163 gen_r6_muldiv(ctx, R6_OPC_MUHU, rd, rs, rt);
17166 gen_r6_muldiv(ctx, R6_OPC_DIV, rd, rs, rt);
17169 gen_r6_muldiv(ctx, R6_OPC_MOD, rd, rs, rt);
17172 gen_r6_muldiv(ctx, R6_OPC_DIVU, rd, rs, rt);
17175 gen_r6_muldiv(ctx, R6_OPC_MODU, rd, rs, rt);
17177 #ifndef CONFIG_USER_ONLY
17179 check_cp0_enabled(ctx);
17181 /* Treat as NOP. */
17184 gen_mfc0(ctx, cpu_gpr[rt], rs, extract32(ctx->opcode, 11, 3));
17187 check_cp0_enabled(ctx);
17189 TCGv t0 = tcg_temp_new();
17191 gen_load_gpr(t0, rt);
17192 gen_mtc0(ctx, t0, rs, extract32(ctx->opcode, 11, 3));
17196 case NM_D_E_MT_VPE:
17198 uint8_t sc = extract32(ctx->opcode, 10, 1);
17199 TCGv t0 = tcg_temp_new();
17206 gen_helper_dmt(t0);
17207 gen_store_gpr(t0, rt);
17208 } else if (rs == 0) {
17211 gen_helper_dvpe(t0, cpu_env);
17212 gen_store_gpr(t0, rt);
17214 generate_exception_end(ctx, EXCP_RI);
17221 gen_helper_emt(t0);
17222 gen_store_gpr(t0, rt);
17223 } else if (rs == 0) {
17226 gen_helper_evpe(t0, cpu_env);
17227 gen_store_gpr(t0, rt);
17229 generate_exception_end(ctx, EXCP_RI);
17240 TCGv t0 = tcg_temp_new();
17241 TCGv t1 = tcg_temp_new();
17243 gen_load_gpr(t0, rt);
17244 gen_load_gpr(t1, rs);
17245 gen_helper_fork(t0, t1);
17252 check_cp0_enabled(ctx);
17254 /* Treat as NOP. */
17257 gen_mftr(env, ctx, rs, rt, extract32(ctx->opcode, 10, 1),
17258 extract32(ctx->opcode, 11, 5), extract32(ctx->opcode, 3, 1));
17262 check_cp0_enabled(ctx);
17263 gen_mttr(env, ctx, rs, rt, extract32(ctx->opcode, 10, 1),
17264 extract32(ctx->opcode, 11, 5), extract32(ctx->opcode, 3, 1));
17269 TCGv t0 = tcg_temp_new();
17271 gen_load_gpr(t0, rs);
17272 gen_helper_yield(t0, cpu_env, t0);
17273 gen_store_gpr(t0, rt);
17279 generate_exception_end(ctx, EXCP_RI);
17285 static void gen_pool32axf_1_5_nanomips_insn(DisasContext *ctx, uint32_t opc,
17286 int ret, int v1, int v2)
17292 t0 = tcg_temp_new_i32();
17294 v0_t = tcg_temp_new();
17295 v1_t = tcg_temp_new();
17297 tcg_gen_movi_i32(t0, v2 >> 3);
17299 gen_load_gpr(v0_t, ret);
17300 gen_load_gpr(v1_t, v1);
17303 case NM_MAQ_S_W_PHR:
17305 gen_helper_maq_s_w_phr(t0, v1_t, v0_t, cpu_env);
17307 case NM_MAQ_S_W_PHL:
17309 gen_helper_maq_s_w_phl(t0, v1_t, v0_t, cpu_env);
17311 case NM_MAQ_SA_W_PHR:
17313 gen_helper_maq_sa_w_phr(t0, v1_t, v0_t, cpu_env);
17315 case NM_MAQ_SA_W_PHL:
17317 gen_helper_maq_sa_w_phl(t0, v1_t, v0_t, cpu_env);
17320 generate_exception_end(ctx, EXCP_RI);
17324 tcg_temp_free_i32(t0);
17326 tcg_temp_free(v0_t);
17327 tcg_temp_free(v1_t);
17331 static void gen_pool32axf_1_nanomips_insn(DisasContext *ctx, uint32_t opc,
17332 int ret, int v1, int v2)
17335 TCGv t0 = tcg_temp_new();
17336 TCGv t1 = tcg_temp_new();
17337 TCGv v0_t = tcg_temp_new();
17339 gen_load_gpr(v0_t, v1);
17342 case NM_POOL32AXF_1_0:
17344 switch (extract32(ctx->opcode, 12, 2)) {
17346 gen_HILO(ctx, OPC_MFHI, v2 >> 3, ret);
17349 gen_HILO(ctx, OPC_MFLO, v2 >> 3, ret);
17352 gen_HILO(ctx, OPC_MTHI, v2 >> 3, v1);
17355 gen_HILO(ctx, OPC_MTLO, v2 >> 3, v1);
17359 case NM_POOL32AXF_1_1:
17361 switch (extract32(ctx->opcode, 12, 2)) {
17363 tcg_gen_movi_tl(t0, v2);
17364 gen_helper_mthlip(t0, v0_t, cpu_env);
17367 tcg_gen_movi_tl(t0, v2 >> 3);
17368 gen_helper_shilo(t0, v0_t, cpu_env);
17371 generate_exception_end(ctx, EXCP_RI);
17375 case NM_POOL32AXF_1_3:
17377 imm = extract32(ctx->opcode, 14, 7);
17378 switch (extract32(ctx->opcode, 12, 2)) {
17380 tcg_gen_movi_tl(t0, imm);
17381 gen_helper_rddsp(t0, t0, cpu_env);
17382 gen_store_gpr(t0, ret);
17385 gen_load_gpr(t0, ret);
17386 tcg_gen_movi_tl(t1, imm);
17387 gen_helper_wrdsp(t0, t1, cpu_env);
17390 tcg_gen_movi_tl(t0, v2 >> 3);
17391 tcg_gen_movi_tl(t1, v1);
17392 gen_helper_extp(t0, t0, t1, cpu_env);
17393 gen_store_gpr(t0, ret);
17396 tcg_gen_movi_tl(t0, v2 >> 3);
17397 tcg_gen_movi_tl(t1, v1);
17398 gen_helper_extpdp(t0, t0, t1, cpu_env);
17399 gen_store_gpr(t0, ret);
17403 case NM_POOL32AXF_1_4:
17405 tcg_gen_movi_tl(t0, v2 >> 2);
17406 switch (extract32(ctx->opcode, 12, 1)) {
17408 gen_helper_shll_qb(t0, t0, v0_t, cpu_env);
17409 gen_store_gpr(t0, ret);
17412 gen_helper_shrl_qb(t0, t0, v0_t);
17413 gen_store_gpr(t0, ret);
17417 case NM_POOL32AXF_1_5:
17418 opc = extract32(ctx->opcode, 12, 2);
17419 gen_pool32axf_1_5_nanomips_insn(ctx, opc, ret, v1, v2);
17421 case NM_POOL32AXF_1_7:
17423 tcg_gen_movi_tl(t0, v2 >> 3);
17424 tcg_gen_movi_tl(t1, v1);
17425 switch (extract32(ctx->opcode, 12, 2)) {
17427 gen_helper_extr_w(t0, t0, t1, cpu_env);
17428 gen_store_gpr(t0, ret);
17431 gen_helper_extr_r_w(t0, t0, t1, cpu_env);
17432 gen_store_gpr(t0, ret);
17435 gen_helper_extr_rs_w(t0, t0, t1, cpu_env);
17436 gen_store_gpr(t0, ret);
17439 gen_helper_extr_s_h(t0, t0, t1, cpu_env);
17440 gen_store_gpr(t0, ret);
17445 generate_exception_end(ctx, EXCP_RI);
17451 tcg_temp_free(v0_t);
17454 static void gen_pool32axf_2_multiply(DisasContext *ctx, uint32_t opc,
17455 TCGv v0, TCGv v1, int rd)
17459 t0 = tcg_temp_new_i32();
17461 tcg_gen_movi_i32(t0, rd >> 3);
17464 case NM_POOL32AXF_2_0_7:
17465 switch (extract32(ctx->opcode, 9, 3)) {
17468 gen_helper_dpa_w_ph(t0, v1, v0, cpu_env);
17470 case NM_DPAQ_S_W_PH:
17472 gen_helper_dpaq_s_w_ph(t0, v1, v0, cpu_env);
17476 gen_helper_dps_w_ph(t0, v1, v0, cpu_env);
17478 case NM_DPSQ_S_W_PH:
17480 gen_helper_dpsq_s_w_ph(t0, v1, v0, cpu_env);
17483 generate_exception_end(ctx, EXCP_RI);
17487 case NM_POOL32AXF_2_8_15:
17488 switch (extract32(ctx->opcode, 9, 3)) {
17491 gen_helper_dpax_w_ph(t0, v0, v1, cpu_env);
17493 case NM_DPAQ_SA_L_W:
17495 gen_helper_dpaq_sa_l_w(t0, v0, v1, cpu_env);
17499 gen_helper_dpsx_w_ph(t0, v0, v1, cpu_env);
17501 case NM_DPSQ_SA_L_W:
17503 gen_helper_dpsq_sa_l_w(t0, v0, v1, cpu_env);
17506 generate_exception_end(ctx, EXCP_RI);
17510 case NM_POOL32AXF_2_16_23:
17511 switch (extract32(ctx->opcode, 9, 3)) {
17512 case NM_DPAU_H_QBL:
17514 gen_helper_dpau_h_qbl(t0, v0, v1, cpu_env);
17516 case NM_DPAQX_S_W_PH:
17518 gen_helper_dpaqx_s_w_ph(t0, v0, v1, cpu_env);
17520 case NM_DPSU_H_QBL:
17522 gen_helper_dpsu_h_qbl(t0, v0, v1, cpu_env);
17524 case NM_DPSQX_S_W_PH:
17526 gen_helper_dpsqx_s_w_ph(t0, v0, v1, cpu_env);
17528 case NM_MULSA_W_PH:
17530 gen_helper_mulsa_w_ph(t0, v0, v1, cpu_env);
17533 generate_exception_end(ctx, EXCP_RI);
17537 case NM_POOL32AXF_2_24_31:
17538 switch (extract32(ctx->opcode, 9, 3)) {
17539 case NM_DPAU_H_QBR:
17541 gen_helper_dpau_h_qbr(t0, v1, v0, cpu_env);
17543 case NM_DPAQX_SA_W_PH:
17545 gen_helper_dpaqx_sa_w_ph(t0, v1, v0, cpu_env);
17547 case NM_DPSU_H_QBR:
17549 gen_helper_dpsu_h_qbr(t0, v1, v0, cpu_env);
17551 case NM_DPSQX_SA_W_PH:
17553 gen_helper_dpsqx_sa_w_ph(t0, v1, v0, cpu_env);
17555 case NM_MULSAQ_S_W_PH:
17557 gen_helper_mulsaq_s_w_ph(t0, v1, v0, cpu_env);
17560 generate_exception_end(ctx, EXCP_RI);
17565 generate_exception_end(ctx, EXCP_RI);
17569 tcg_temp_free_i32(t0);
17572 static void gen_pool32axf_2_nanomips_insn(DisasContext *ctx, uint32_t opc,
17573 int rt, int rs, int rd)
17576 TCGv t0 = tcg_temp_new();
17577 TCGv t1 = tcg_temp_new();
17578 TCGv v0_t = tcg_temp_new();
17579 TCGv v1_t = tcg_temp_new();
17581 gen_load_gpr(v0_t, rt);
17582 gen_load_gpr(v1_t, rs);
17585 case NM_POOL32AXF_2_0_7:
17586 switch (extract32(ctx->opcode, 9, 3)) {
17588 case NM_DPAQ_S_W_PH:
17590 case NM_DPSQ_S_W_PH:
17591 gen_pool32axf_2_multiply(ctx, opc, v0_t, v1_t, rd);
17596 gen_load_gpr(t0, rs);
17598 if (rd != 0 && rd != 2) {
17599 tcg_gen_shli_tl(cpu_gpr[ret], cpu_gpr[ret], 8 * rd);
17600 tcg_gen_ext32u_tl(t0, t0);
17601 tcg_gen_shri_tl(t0, t0, 8 * (4 - rd));
17602 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
17604 tcg_gen_ext32s_tl(cpu_gpr[ret], cpu_gpr[ret]);
17610 int acc = extract32(ctx->opcode, 14, 2);
17611 TCGv_i64 t2 = tcg_temp_new_i64();
17612 TCGv_i64 t3 = tcg_temp_new_i64();
17614 gen_load_gpr(t0, rt);
17615 gen_load_gpr(t1, rs);
17616 tcg_gen_ext_tl_i64(t2, t0);
17617 tcg_gen_ext_tl_i64(t3, t1);
17618 tcg_gen_mul_i64(t2, t2, t3);
17619 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
17620 tcg_gen_add_i64(t2, t2, t3);
17621 tcg_temp_free_i64(t3);
17622 gen_move_low32(cpu_LO[acc], t2);
17623 gen_move_high32(cpu_HI[acc], t2);
17624 tcg_temp_free_i64(t2);
17630 int acc = extract32(ctx->opcode, 14, 2);
17631 TCGv_i32 t2 = tcg_temp_new_i32();
17632 TCGv_i32 t3 = tcg_temp_new_i32();
17634 gen_load_gpr(t0, rs);
17635 gen_load_gpr(t1, rt);
17636 tcg_gen_trunc_tl_i32(t2, t0);
17637 tcg_gen_trunc_tl_i32(t3, t1);
17638 tcg_gen_muls2_i32(t2, t3, t2, t3);
17639 tcg_gen_ext_i32_tl(cpu_LO[acc], t2);
17640 tcg_gen_ext_i32_tl(cpu_HI[acc], t3);
17641 tcg_temp_free_i32(t2);
17642 tcg_temp_free_i32(t3);
17647 gen_load_gpr(v1_t, rs);
17648 tcg_gen_movi_tl(t0, rd >> 3);
17649 gen_helper_extr_w(t0, t0, v1_t, cpu_env);
17650 gen_store_gpr(t0, ret);
17654 case NM_POOL32AXF_2_8_15:
17655 switch (extract32(ctx->opcode, 9, 3)) {
17657 case NM_DPAQ_SA_L_W:
17659 case NM_DPSQ_SA_L_W:
17660 gen_pool32axf_2_multiply(ctx, opc, v0_t, v1_t, rd);
17665 int acc = extract32(ctx->opcode, 14, 2);
17666 TCGv_i64 t2 = tcg_temp_new_i64();
17667 TCGv_i64 t3 = tcg_temp_new_i64();
17669 gen_load_gpr(t0, rs);
17670 gen_load_gpr(t1, rt);
17671 tcg_gen_ext32u_tl(t0, t0);
17672 tcg_gen_ext32u_tl(t1, t1);
17673 tcg_gen_extu_tl_i64(t2, t0);
17674 tcg_gen_extu_tl_i64(t3, t1);
17675 tcg_gen_mul_i64(t2, t2, t3);
17676 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
17677 tcg_gen_add_i64(t2, t2, t3);
17678 tcg_temp_free_i64(t3);
17679 gen_move_low32(cpu_LO[acc], t2);
17680 gen_move_high32(cpu_HI[acc], t2);
17681 tcg_temp_free_i64(t2);
17687 int acc = extract32(ctx->opcode, 14, 2);
17688 TCGv_i32 t2 = tcg_temp_new_i32();
17689 TCGv_i32 t3 = tcg_temp_new_i32();
17691 gen_load_gpr(t0, rs);
17692 gen_load_gpr(t1, rt);
17693 tcg_gen_trunc_tl_i32(t2, t0);
17694 tcg_gen_trunc_tl_i32(t3, t1);
17695 tcg_gen_mulu2_i32(t2, t3, t2, t3);
17696 tcg_gen_ext_i32_tl(cpu_LO[acc], t2);
17697 tcg_gen_ext_i32_tl(cpu_HI[acc], t3);
17698 tcg_temp_free_i32(t2);
17699 tcg_temp_free_i32(t3);
17704 tcg_gen_movi_tl(t0, rd >> 3);
17705 gen_helper_extr_r_w(t0, t0, v1_t, cpu_env);
17706 gen_store_gpr(t0, ret);
17709 generate_exception_end(ctx, EXCP_RI);
17713 case NM_POOL32AXF_2_16_23:
17714 switch (extract32(ctx->opcode, 9, 3)) {
17715 case NM_DPAU_H_QBL:
17716 case NM_DPAQX_S_W_PH:
17717 case NM_DPSU_H_QBL:
17718 case NM_DPSQX_S_W_PH:
17719 case NM_MULSA_W_PH:
17720 gen_pool32axf_2_multiply(ctx, opc, v0_t, v1_t, rd);
17724 tcg_gen_movi_tl(t0, rd >> 3);
17725 gen_helper_extp(t0, t0, v1_t, cpu_env);
17726 gen_store_gpr(t0, ret);
17731 int acc = extract32(ctx->opcode, 14, 2);
17732 TCGv_i64 t2 = tcg_temp_new_i64();
17733 TCGv_i64 t3 = tcg_temp_new_i64();
17735 gen_load_gpr(t0, rs);
17736 gen_load_gpr(t1, rt);
17737 tcg_gen_ext_tl_i64(t2, t0);
17738 tcg_gen_ext_tl_i64(t3, t1);
17739 tcg_gen_mul_i64(t2, t2, t3);
17740 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
17741 tcg_gen_sub_i64(t2, t3, t2);
17742 tcg_temp_free_i64(t3);
17743 gen_move_low32(cpu_LO[acc], t2);
17744 gen_move_high32(cpu_HI[acc], t2);
17745 tcg_temp_free_i64(t2);
17748 case NM_EXTRV_RS_W:
17750 tcg_gen_movi_tl(t0, rd >> 3);
17751 gen_helper_extr_rs_w(t0, t0, v1_t, cpu_env);
17752 gen_store_gpr(t0, ret);
17756 case NM_POOL32AXF_2_24_31:
17757 switch (extract32(ctx->opcode, 9, 3)) {
17758 case NM_DPAU_H_QBR:
17759 case NM_DPAQX_SA_W_PH:
17760 case NM_DPSU_H_QBR:
17761 case NM_DPSQX_SA_W_PH:
17762 case NM_MULSAQ_S_W_PH:
17763 gen_pool32axf_2_multiply(ctx, opc, v0_t, v1_t, rd);
17767 tcg_gen_movi_tl(t0, rd >> 3);
17768 gen_helper_extpdp(t0, t0, v1_t, cpu_env);
17769 gen_store_gpr(t0, ret);
17774 int acc = extract32(ctx->opcode, 14, 2);
17775 TCGv_i64 t2 = tcg_temp_new_i64();
17776 TCGv_i64 t3 = tcg_temp_new_i64();
17778 gen_load_gpr(t0, rs);
17779 gen_load_gpr(t1, rt);
17780 tcg_gen_ext32u_tl(t0, t0);
17781 tcg_gen_ext32u_tl(t1, t1);
17782 tcg_gen_extu_tl_i64(t2, t0);
17783 tcg_gen_extu_tl_i64(t3, t1);
17784 tcg_gen_mul_i64(t2, t2, t3);
17785 tcg_gen_concat_tl_i64(t3, cpu_LO[acc], cpu_HI[acc]);
17786 tcg_gen_sub_i64(t2, t3, t2);
17787 tcg_temp_free_i64(t3);
17788 gen_move_low32(cpu_LO[acc], t2);
17789 gen_move_high32(cpu_HI[acc], t2);
17790 tcg_temp_free_i64(t2);
17795 tcg_gen_movi_tl(t0, rd >> 3);
17796 gen_helper_extr_s_h(t0, t0, v0_t, cpu_env);
17797 gen_store_gpr(t0, ret);
17802 generate_exception_end(ctx, EXCP_RI);
17809 tcg_temp_free(v0_t);
17810 tcg_temp_free(v1_t);
17813 static void gen_pool32axf_4_nanomips_insn(DisasContext *ctx, uint32_t opc,
17817 TCGv t0 = tcg_temp_new();
17818 TCGv v0_t = tcg_temp_new();
17820 gen_load_gpr(v0_t, rs);
17825 gen_helper_absq_s_qb(v0_t, v0_t, cpu_env);
17826 gen_store_gpr(v0_t, ret);
17830 gen_helper_absq_s_ph(v0_t, v0_t, cpu_env);
17831 gen_store_gpr(v0_t, ret);
17835 gen_helper_absq_s_w(v0_t, v0_t, cpu_env);
17836 gen_store_gpr(v0_t, ret);
17838 case NM_PRECEQ_W_PHL:
17840 tcg_gen_andi_tl(v0_t, v0_t, 0xFFFF0000);
17841 tcg_gen_ext32s_tl(v0_t, v0_t);
17842 gen_store_gpr(v0_t, ret);
17844 case NM_PRECEQ_W_PHR:
17846 tcg_gen_andi_tl(v0_t, v0_t, 0x0000FFFF);
17847 tcg_gen_shli_tl(v0_t, v0_t, 16);
17848 tcg_gen_ext32s_tl(v0_t, v0_t);
17849 gen_store_gpr(v0_t, ret);
17851 case NM_PRECEQU_PH_QBL:
17853 gen_helper_precequ_ph_qbl(v0_t, v0_t);
17854 gen_store_gpr(v0_t, ret);
17856 case NM_PRECEQU_PH_QBR:
17858 gen_helper_precequ_ph_qbr(v0_t, v0_t);
17859 gen_store_gpr(v0_t, ret);
17861 case NM_PRECEQU_PH_QBLA:
17863 gen_helper_precequ_ph_qbla(v0_t, v0_t);
17864 gen_store_gpr(v0_t, ret);
17866 case NM_PRECEQU_PH_QBRA:
17868 gen_helper_precequ_ph_qbra(v0_t, v0_t);
17869 gen_store_gpr(v0_t, ret);
17871 case NM_PRECEU_PH_QBL:
17873 gen_helper_preceu_ph_qbl(v0_t, v0_t);
17874 gen_store_gpr(v0_t, ret);
17876 case NM_PRECEU_PH_QBR:
17878 gen_helper_preceu_ph_qbr(v0_t, v0_t);
17879 gen_store_gpr(v0_t, ret);
17881 case NM_PRECEU_PH_QBLA:
17883 gen_helper_preceu_ph_qbla(v0_t, v0_t);
17884 gen_store_gpr(v0_t, ret);
17886 case NM_PRECEU_PH_QBRA:
17888 gen_helper_preceu_ph_qbra(v0_t, v0_t);
17889 gen_store_gpr(v0_t, ret);
17893 tcg_gen_ext16u_tl(v0_t, v0_t);
17894 tcg_gen_shli_tl(t0, v0_t, 16);
17895 tcg_gen_or_tl(v0_t, v0_t, t0);
17896 tcg_gen_ext32s_tl(v0_t, v0_t);
17897 gen_store_gpr(v0_t, ret);
17901 tcg_gen_ext8u_tl(v0_t, v0_t);
17902 tcg_gen_shli_tl(t0, v0_t, 8);
17903 tcg_gen_or_tl(v0_t, v0_t, t0);
17904 tcg_gen_shli_tl(t0, v0_t, 16);
17905 tcg_gen_or_tl(v0_t, v0_t, t0);
17906 tcg_gen_ext32s_tl(v0_t, v0_t);
17907 gen_store_gpr(v0_t, ret);
17911 gen_helper_bitrev(v0_t, v0_t);
17912 gen_store_gpr(v0_t, ret);
17917 TCGv tv0 = tcg_temp_new();
17919 gen_load_gpr(tv0, rt);
17920 gen_helper_insv(v0_t, cpu_env, v0_t, tv0);
17921 gen_store_gpr(v0_t, ret);
17922 tcg_temp_free(tv0);
17925 case NM_RADDU_W_QB:
17927 gen_helper_raddu_w_qb(v0_t, v0_t);
17928 gen_store_gpr(v0_t, ret);
17931 gen_bitswap(ctx, OPC_BITSWAP, ret, rs);
17935 gen_cl(ctx, OPC_CLO, ret, rs);
17939 gen_cl(ctx, OPC_CLZ, ret, rs);
17942 gen_bshfl(ctx, OPC_WSBH, ret, rs);
17945 generate_exception_end(ctx, EXCP_RI);
17949 tcg_temp_free(v0_t);
17953 static void gen_pool32axf_7_nanomips_insn(DisasContext *ctx, uint32_t opc,
17954 int rt, int rs, int rd)
17956 TCGv t0 = tcg_temp_new();
17957 TCGv rs_t = tcg_temp_new();
17959 gen_load_gpr(rs_t, rs);
17964 tcg_gen_movi_tl(t0, rd >> 2);
17965 switch (extract32(ctx->opcode, 12, 1)) {
17968 gen_helper_shra_qb(t0, t0, rs_t);
17969 gen_store_gpr(t0, rt);
17973 gen_helper_shra_r_qb(t0, t0, rs_t);
17974 gen_store_gpr(t0, rt);
17980 tcg_gen_movi_tl(t0, rd >> 1);
17981 gen_helper_shrl_ph(t0, t0, rs_t);
17982 gen_store_gpr(t0, rt);
17988 target_long result;
17989 imm = extract32(ctx->opcode, 13, 8);
17990 result = (uint32_t)imm << 24 |
17991 (uint32_t)imm << 16 |
17992 (uint32_t)imm << 8 |
17994 result = (int32_t)result;
17995 tcg_gen_movi_tl(t0, result);
17996 gen_store_gpr(t0, rt);
18000 generate_exception_end(ctx, EXCP_RI);
18004 tcg_temp_free(rs_t);
18008 static void gen_pool32axf_nanomips_insn(CPUMIPSState *env, DisasContext *ctx)
18010 int rt = extract32(ctx->opcode, 21, 5);
18011 int rs = extract32(ctx->opcode, 16, 5);
18012 int rd = extract32(ctx->opcode, 11, 5);
18014 switch (extract32(ctx->opcode, 6, 3)) {
18015 case NM_POOL32AXF_1:
18017 int32_t op1 = extract32(ctx->opcode, 9, 3);
18018 gen_pool32axf_1_nanomips_insn(ctx, op1, rt, rs, rd);
18021 case NM_POOL32AXF_2:
18023 int32_t op1 = extract32(ctx->opcode, 12, 2);
18024 gen_pool32axf_2_nanomips_insn(ctx, op1, rt, rs, rd);
18027 case NM_POOL32AXF_4:
18029 int32_t op1 = extract32(ctx->opcode, 9, 7);
18030 gen_pool32axf_4_nanomips_insn(ctx, op1, rt, rs);
18033 case NM_POOL32AXF_5:
18034 switch (extract32(ctx->opcode, 9, 7)) {
18035 #ifndef CONFIG_USER_ONLY
18037 gen_cp0(env, ctx, OPC_TLBP, 0, 0);
18040 gen_cp0(env, ctx, OPC_TLBR, 0, 0);
18043 gen_cp0(env, ctx, OPC_TLBWI, 0, 0);
18046 gen_cp0(env, ctx, OPC_TLBWR, 0, 0);
18049 gen_cp0(env, ctx, OPC_TLBINV, 0, 0);
18052 gen_cp0(env, ctx, OPC_TLBINVF, 0, 0);
18055 check_cp0_enabled(ctx);
18057 TCGv t0 = tcg_temp_new();
18059 save_cpu_state(ctx, 1);
18060 gen_helper_di(t0, cpu_env);
18061 gen_store_gpr(t0, rt);
18062 /* Stop translation as we may have switched the execution mode */
18063 ctx->base.is_jmp = DISAS_STOP;
18068 check_cp0_enabled(ctx);
18070 TCGv t0 = tcg_temp_new();
18072 save_cpu_state(ctx, 1);
18073 gen_helper_ei(t0, cpu_env);
18074 gen_store_gpr(t0, rt);
18075 /* Stop translation as we may have switched the execution mode */
18076 ctx->base.is_jmp = DISAS_STOP;
18081 gen_load_srsgpr(rs, rt);
18084 gen_store_srsgpr(rs, rt);
18087 gen_cp0(env, ctx, OPC_WAIT, 0, 0);
18090 gen_cp0(env, ctx, OPC_DERET, 0, 0);
18093 gen_cp0(env, ctx, OPC_ERET, 0, 0);
18097 generate_exception_end(ctx, EXCP_RI);
18101 case NM_POOL32AXF_7:
18103 int32_t op1 = extract32(ctx->opcode, 9, 3);
18104 gen_pool32axf_7_nanomips_insn(ctx, op1, rt, rs, rd);
18108 generate_exception_end(ctx, EXCP_RI);
18113 /* Immediate Value Compact Branches */
18114 static void gen_compute_imm_branch(DisasContext *ctx, uint32_t opc,
18115 int rt, int32_t imm, int32_t offset)
18118 int bcond_compute = 0;
18119 TCGv t0 = tcg_temp_new();
18120 TCGv t1 = tcg_temp_new();
18122 gen_load_gpr(t0, rt);
18123 tcg_gen_movi_tl(t1, imm);
18124 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
18126 /* Load needed operands and calculate btarget */
18129 if (rt == 0 && imm == 0) {
18130 /* Unconditional branch */
18131 } else if (rt == 0 && imm != 0) {
18136 cond = TCG_COND_EQ;
18142 if (imm >= 32 && !(ctx->hflags & MIPS_HFLAG_64)) {
18143 generate_exception_end(ctx, EXCP_RI);
18145 } else if (rt == 0 && opc == NM_BBEQZC) {
18146 /* Unconditional branch */
18147 } else if (rt == 0 && opc == NM_BBNEZC) {
18151 tcg_gen_shri_tl(t0, t0, imm);
18152 tcg_gen_andi_tl(t0, t0, 1);
18153 tcg_gen_movi_tl(t1, 0);
18155 if (opc == NM_BBEQZC) {
18156 cond = TCG_COND_EQ;
18158 cond = TCG_COND_NE;
18163 if (rt == 0 && imm == 0) {
18166 } else if (rt == 0 && imm != 0) {
18167 /* Unconditional branch */
18170 cond = TCG_COND_NE;
18174 if (rt == 0 && imm == 0) {
18175 /* Unconditional branch */
18178 cond = TCG_COND_GE;
18183 cond = TCG_COND_LT;
18186 if (rt == 0 && imm == 0) {
18187 /* Unconditional branch */
18190 cond = TCG_COND_GEU;
18195 cond = TCG_COND_LTU;
18198 MIPS_INVAL("Immediate Value Compact branch");
18199 generate_exception_end(ctx, EXCP_RI);
18203 if (bcond_compute == 0) {
18204 /* Uncoditional compact branch */
18205 gen_goto_tb(ctx, 0, ctx->btarget);
18207 /* Conditional compact branch */
18208 TCGLabel *fs = gen_new_label();
18210 tcg_gen_brcond_tl(tcg_invert_cond(cond), t0, t1, fs);
18212 gen_goto_tb(ctx, 1, ctx->btarget);
18215 gen_goto_tb(ctx, 0, ctx->base.pc_next + 4);
18223 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
18224 static void gen_compute_nanomips_pbalrsc_branch(DisasContext *ctx, int rs,
18227 TCGv t0 = tcg_temp_new();
18228 TCGv t1 = tcg_temp_new();
18231 gen_load_gpr(t0, rs);
18235 tcg_gen_movi_tl(cpu_gpr[rt], ctx->base.pc_next + 4);
18238 /* calculate btarget */
18239 tcg_gen_shli_tl(t0, t0, 1);
18240 tcg_gen_movi_tl(t1, ctx->base.pc_next + 4);
18241 gen_op_addr_add(ctx, btarget, t1, t0);
18243 /* unconditional branch to register */
18244 tcg_gen_mov_tl(cpu_PC, btarget);
18245 tcg_gen_lookup_and_goto_ptr();
18251 /* nanoMIPS Branches */
18252 static void gen_compute_compact_branch_nm(DisasContext *ctx, uint32_t opc,
18253 int rs, int rt, int32_t offset)
18255 int bcond_compute = 0;
18256 TCGv t0 = tcg_temp_new();
18257 TCGv t1 = tcg_temp_new();
18259 /* Load needed operands and calculate btarget */
18261 /* compact branch */
18264 gen_load_gpr(t0, rs);
18265 gen_load_gpr(t1, rt);
18267 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
18271 if (rs == 0 || rs == rt) {
18272 /* OPC_BLEZALC, OPC_BGEZALC */
18273 /* OPC_BGTZALC, OPC_BLTZALC */
18274 tcg_gen_movi_tl(cpu_gpr[31], ctx->base.pc_next + 4);
18276 gen_load_gpr(t0, rs);
18277 gen_load_gpr(t1, rt);
18279 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
18282 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
18286 /* OPC_BEQZC, OPC_BNEZC */
18287 gen_load_gpr(t0, rs);
18289 ctx->btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
18291 /* OPC_JIC, OPC_JIALC */
18292 TCGv tbase = tcg_temp_new();
18293 TCGv toffset = tcg_temp_new();
18295 gen_load_gpr(tbase, rt);
18296 tcg_gen_movi_tl(toffset, offset);
18297 gen_op_addr_add(ctx, btarget, tbase, toffset);
18298 tcg_temp_free(tbase);
18299 tcg_temp_free(toffset);
18303 MIPS_INVAL("Compact branch/jump");
18304 generate_exception_end(ctx, EXCP_RI);
18308 if (bcond_compute == 0) {
18309 /* Uncoditional compact branch */
18312 gen_goto_tb(ctx, 0, ctx->btarget);
18315 MIPS_INVAL("Compact branch/jump");
18316 generate_exception_end(ctx, EXCP_RI);
18320 /* Conditional compact branch */
18321 TCGLabel *fs = gen_new_label();
18325 if (rs == 0 && rt != 0) {
18327 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE), t1, 0, fs);
18328 } else if (rs != 0 && rt != 0 && rs == rt) {
18330 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE), t1, 0, fs);
18333 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU), t0, t1, fs);
18337 if (rs == 0 && rt != 0) {
18339 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT), t1, 0, fs);
18340 } else if (rs != 0 && rt != 0 && rs == rt) {
18342 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT), t1, 0, fs);
18345 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU), t0, t1, fs);
18349 if (rs == 0 && rt != 0) {
18351 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE), t1, 0, fs);
18352 } else if (rs != 0 && rt != 0 && rs == rt) {
18354 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE), t1, 0, fs);
18357 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE), t0, t1, fs);
18361 if (rs == 0 && rt != 0) {
18363 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT), t1, 0, fs);
18364 } else if (rs != 0 && rt != 0 && rs == rt) {
18366 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT), t1, 0, fs);
18369 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT), t0, t1, fs);
18373 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ), t0, 0, fs);
18376 MIPS_INVAL("Compact conditional branch/jump");
18377 generate_exception_end(ctx, EXCP_RI);
18381 /* Generating branch here as compact branches don't have delay slot */
18382 gen_goto_tb(ctx, 1, ctx->btarget);
18385 gen_goto_tb(ctx, 0, ctx->base.pc_next + 4);
18394 /* nanoMIPS CP1 Branches */
18395 static void gen_compute_branch_cp1_nm(DisasContext *ctx, uint32_t op,
18396 int32_t ft, int32_t offset)
18398 target_ulong btarget;
18399 TCGv_i64 t0 = tcg_temp_new_i64();
18401 gen_load_fpr64(ctx, t0, ft);
18402 tcg_gen_andi_i64(t0, t0, 1);
18404 btarget = addr_add(ctx, ctx->base.pc_next + 4, offset);
18408 tcg_gen_xori_i64(t0, t0, 1);
18409 ctx->hflags |= MIPS_HFLAG_BC;
18412 /* t0 already set */
18413 ctx->hflags |= MIPS_HFLAG_BC;
18416 MIPS_INVAL("cp1 cond branch");
18417 generate_exception_end(ctx, EXCP_RI);
18421 tcg_gen_trunc_i64_tl(bcond, t0);
18423 ctx->btarget = btarget;
18426 tcg_temp_free_i64(t0);
18430 static void gen_p_lsx(DisasContext *ctx, int rd, int rs, int rt)
18433 t0 = tcg_temp_new();
18434 t1 = tcg_temp_new();
18436 gen_load_gpr(t0, rs);
18437 gen_load_gpr(t1, rt);
18439 if ((extract32(ctx->opcode, 6, 1)) == 1) {
18440 /* PP.LSXS instructions require shifting */
18441 switch (extract32(ctx->opcode, 7, 4)) {
18446 tcg_gen_shli_tl(t0, t0, 1);
18453 tcg_gen_shli_tl(t0, t0, 2);
18457 tcg_gen_shli_tl(t0, t0, 3);
18461 gen_op_addr_add(ctx, t0, t0, t1);
18463 switch (extract32(ctx->opcode, 7, 4)) {
18465 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx,
18467 gen_store_gpr(t0, rd);
18471 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx,
18473 gen_store_gpr(t0, rd);
18477 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx,
18479 gen_store_gpr(t0, rd);
18482 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx,
18484 gen_store_gpr(t0, rd);
18488 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx,
18490 gen_store_gpr(t0, rd);
18494 gen_load_gpr(t1, rd);
18495 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx,
18501 gen_load_gpr(t1, rd);
18502 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx,
18508 gen_load_gpr(t1, rd);
18509 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx,
18513 /*case NM_LWC1XS:*/
18515 /*case NM_LDC1XS:*/
18517 /*case NM_SWC1XS:*/
18519 /*case NM_SDC1XS:*/
18520 if (ctx->CP0_Config1 & (1 << CP0C1_FP)) {
18521 check_cp1_enabled(ctx);
18522 switch (extract32(ctx->opcode, 7, 4)) {
18524 /*case NM_LWC1XS:*/
18525 gen_flt_ldst(ctx, OPC_LWC1, rd, t0);
18528 /*case NM_LDC1XS:*/
18529 gen_flt_ldst(ctx, OPC_LDC1, rd, t0);
18532 /*case NM_SWC1XS:*/
18533 gen_flt_ldst(ctx, OPC_SWC1, rd, t0);
18536 /*case NM_SDC1XS:*/
18537 gen_flt_ldst(ctx, OPC_SDC1, rd, t0);
18541 generate_exception_err(ctx, EXCP_CpU, 1);
18545 generate_exception_end(ctx, EXCP_RI);
18553 static void gen_pool32f_nanomips_insn(DisasContext *ctx)
18557 rt = extract32(ctx->opcode, 21, 5);
18558 rs = extract32(ctx->opcode, 16, 5);
18559 rd = extract32(ctx->opcode, 11, 5);
18561 if (!(ctx->CP0_Config1 & (1 << CP0C1_FP))) {
18562 generate_exception_end(ctx, EXCP_RI);
18565 check_cp1_enabled(ctx);
18566 switch (extract32(ctx->opcode, 0, 3)) {
18568 switch (extract32(ctx->opcode, 3, 7)) {
18570 gen_farith(ctx, OPC_RINT_S, 0, rt, rs, 0);
18573 gen_farith(ctx, OPC_RINT_D, 0, rt, rs, 0);
18576 gen_farith(ctx, OPC_CLASS_S, 0, rt, rs, 0);
18579 gen_farith(ctx, OPC_CLASS_D, 0, rt, rs, 0);
18582 gen_farith(ctx, OPC_ADD_S, rt, rs, rd, 0);
18585 gen_farith(ctx, OPC_ADD_D, rt, rs, rd, 0);
18588 gen_farith(ctx, OPC_SUB_S, rt, rs, rd, 0);
18591 gen_farith(ctx, OPC_SUB_D, rt, rs, rd, 0);
18594 gen_farith(ctx, OPC_MUL_S, rt, rs, rd, 0);
18597 gen_farith(ctx, OPC_MUL_D, rt, rs, rd, 0);
18600 gen_farith(ctx, OPC_DIV_S, rt, rs, rd, 0);
18603 gen_farith(ctx, OPC_DIV_D, rt, rs, rd, 0);
18606 gen_sel_s(ctx, OPC_SELEQZ_S, rd, rt, rs);
18609 gen_sel_d(ctx, OPC_SELEQZ_D, rd, rt, rs);
18612 gen_sel_s(ctx, OPC_SELNEZ_S, rd, rt, rs);
18615 gen_sel_d(ctx, OPC_SELNEZ_D, rd, rt, rs);
18618 gen_sel_s(ctx, OPC_SEL_S, rd, rt, rs);
18621 gen_sel_d(ctx, OPC_SEL_D, rd, rt, rs);
18624 gen_farith(ctx, OPC_MADDF_S, rt, rs, rd, 0);
18627 gen_farith(ctx, OPC_MADDF_D, rt, rs, rd, 0);
18630 gen_farith(ctx, OPC_MSUBF_S, rt, rs, rd, 0);
18633 gen_farith(ctx, OPC_MSUBF_D, rt, rs, rd, 0);
18636 generate_exception_end(ctx, EXCP_RI);
18641 switch (extract32(ctx->opcode, 3, 3)) {
18643 switch (extract32(ctx->opcode, 9, 1)) {
18645 gen_farith(ctx, OPC_MIN_S, rt, rs, rd, 0);
18648 gen_farith(ctx, OPC_MIN_D, rt, rs, rd, 0);
18653 switch (extract32(ctx->opcode, 9, 1)) {
18655 gen_farith(ctx, OPC_MAX_S, rt, rs, rd, 0);
18658 gen_farith(ctx, OPC_MAX_D, rt, rs, rd, 0);
18663 switch (extract32(ctx->opcode, 9, 1)) {
18665 gen_farith(ctx, OPC_MINA_S, rt, rs, rd, 0);
18668 gen_farith(ctx, OPC_MINA_D, rt, rs, rd, 0);
18673 switch (extract32(ctx->opcode, 9, 1)) {
18675 gen_farith(ctx, OPC_MAXA_S, rt, rs, rd, 0);
18678 gen_farith(ctx, OPC_MAXA_D, rt, rs, rd, 0);
18683 switch (extract32(ctx->opcode, 6, 8)) {
18685 gen_cp1(ctx, OPC_CFC1, rt, rs);
18688 gen_cp1(ctx, OPC_CTC1, rt, rs);
18691 gen_cp1(ctx, OPC_MFC1, rt, rs);
18694 gen_cp1(ctx, OPC_MTC1, rt, rs);
18697 gen_cp1(ctx, OPC_MFHC1, rt, rs);
18700 gen_cp1(ctx, OPC_MTHC1, rt, rs);
18703 gen_farith(ctx, OPC_CVT_S_PL, -1, rs, rt, 0);
18706 gen_farith(ctx, OPC_CVT_S_PU, -1, rs, rt, 0);
18709 switch (extract32(ctx->opcode, 6, 9)) {
18711 gen_farith(ctx, OPC_CVT_L_S, -1, rs, rt, 0);
18714 gen_farith(ctx, OPC_CVT_L_D, -1, rs, rt, 0);
18717 gen_farith(ctx, OPC_CVT_W_S, -1, rs, rt, 0);
18720 gen_farith(ctx, OPC_CVT_W_D, -1, rs, rt, 0);
18723 gen_farith(ctx, OPC_RSQRT_S, -1, rs, rt, 0);
18726 gen_farith(ctx, OPC_RSQRT_D, -1, rs, rt, 0);
18729 gen_farith(ctx, OPC_SQRT_S, -1, rs, rt, 0);
18732 gen_farith(ctx, OPC_SQRT_D, -1, rs, rt, 0);
18735 gen_farith(ctx, OPC_RECIP_S, -1, rs, rt, 0);
18738 gen_farith(ctx, OPC_RECIP_D, -1, rs, rt, 0);
18741 gen_farith(ctx, OPC_FLOOR_L_S, -1, rs, rt, 0);
18744 gen_farith(ctx, OPC_FLOOR_L_D, -1, rs, rt, 0);
18747 gen_farith(ctx, OPC_FLOOR_W_S, -1, rs, rt, 0);
18750 gen_farith(ctx, OPC_FLOOR_W_D, -1, rs, rt, 0);
18753 gen_farith(ctx, OPC_CEIL_L_S, -1, rs, rt, 0);
18756 gen_farith(ctx, OPC_CEIL_L_D, -1, rs, rt, 0);
18759 gen_farith(ctx, OPC_CEIL_W_S, -1, rs, rt, 0);
18762 gen_farith(ctx, OPC_CEIL_W_D, -1, rs, rt, 0);
18765 gen_farith(ctx, OPC_TRUNC_L_S, -1, rs, rt, 0);
18768 gen_farith(ctx, OPC_TRUNC_L_D, -1, rs, rt, 0);
18771 gen_farith(ctx, OPC_TRUNC_W_S, -1, rs, rt, 0);
18774 gen_farith(ctx, OPC_TRUNC_W_D, -1, rs, rt, 0);
18777 gen_farith(ctx, OPC_ROUND_L_S, -1, rs, rt, 0);
18780 gen_farith(ctx, OPC_ROUND_L_D, -1, rs, rt, 0);
18783 gen_farith(ctx, OPC_ROUND_W_S, -1, rs, rt, 0);
18786 gen_farith(ctx, OPC_ROUND_W_D, -1, rs, rt, 0);
18789 gen_farith(ctx, OPC_MOV_S, -1, rs, rt, 0);
18792 gen_farith(ctx, OPC_MOV_D, -1, rs, rt, 0);
18795 gen_farith(ctx, OPC_ABS_S, -1, rs, rt, 0);
18798 gen_farith(ctx, OPC_ABS_D, -1, rs, rt, 0);
18801 gen_farith(ctx, OPC_NEG_S, -1, rs, rt, 0);
18804 gen_farith(ctx, OPC_NEG_D, -1, rs, rt, 0);
18807 gen_farith(ctx, OPC_CVT_D_S, -1, rs, rt, 0);
18810 gen_farith(ctx, OPC_CVT_D_W, -1, rs, rt, 0);
18813 gen_farith(ctx, OPC_CVT_D_L, -1, rs, rt, 0);
18816 gen_farith(ctx, OPC_CVT_S_D, -1, rs, rt, 0);
18819 gen_farith(ctx, OPC_CVT_S_W, -1, rs, rt, 0);
18822 gen_farith(ctx, OPC_CVT_S_L, -1, rs, rt, 0);
18825 generate_exception_end(ctx, EXCP_RI);
18834 switch (extract32(ctx->opcode, 3, 3)) {
18835 case NM_CMP_CONDN_S:
18836 gen_r6_cmp_s(ctx, extract32(ctx->opcode, 6, 5), rt, rs, rd);
18838 case NM_CMP_CONDN_D:
18839 gen_r6_cmp_d(ctx, extract32(ctx->opcode, 6, 5), rt, rs, rd);
18842 generate_exception_end(ctx, EXCP_RI);
18847 generate_exception_end(ctx, EXCP_RI);
18852 static void gen_pool32a5_nanomips_insn(DisasContext *ctx, int opc,
18853 int rd, int rs, int rt)
18856 TCGv t0 = tcg_temp_new();
18857 TCGv v1_t = tcg_temp_new();
18858 TCGv v2_t = tcg_temp_new();
18860 gen_load_gpr(v1_t, rs);
18861 gen_load_gpr(v2_t, rt);
18866 gen_helper_cmp_eq_ph(v1_t, v2_t, cpu_env);
18870 gen_helper_cmp_lt_ph(v1_t, v2_t, cpu_env);
18874 gen_helper_cmp_le_ph(v1_t, v2_t, cpu_env);
18876 case NM_CMPU_EQ_QB:
18878 gen_helper_cmpu_eq_qb(v1_t, v2_t, cpu_env);
18880 case NM_CMPU_LT_QB:
18882 gen_helper_cmpu_lt_qb(v1_t, v2_t, cpu_env);
18884 case NM_CMPU_LE_QB:
18886 gen_helper_cmpu_le_qb(v1_t, v2_t, cpu_env);
18888 case NM_CMPGU_EQ_QB:
18890 gen_helper_cmpgu_eq_qb(v1_t, v1_t, v2_t);
18891 gen_store_gpr(v1_t, ret);
18893 case NM_CMPGU_LT_QB:
18895 gen_helper_cmpgu_lt_qb(v1_t, v1_t, v2_t);
18896 gen_store_gpr(v1_t, ret);
18898 case NM_CMPGU_LE_QB:
18900 gen_helper_cmpgu_le_qb(v1_t, v1_t, v2_t);
18901 gen_store_gpr(v1_t, ret);
18903 case NM_CMPGDU_EQ_QB:
18905 gen_helper_cmpgu_eq_qb(v1_t, v1_t, v2_t);
18906 tcg_gen_deposit_tl(cpu_dspctrl, cpu_dspctrl, v1_t, 24, 4);
18907 gen_store_gpr(v1_t, ret);
18909 case NM_CMPGDU_LT_QB:
18911 gen_helper_cmpgu_lt_qb(v1_t, v1_t, v2_t);
18912 tcg_gen_deposit_tl(cpu_dspctrl, cpu_dspctrl, v1_t, 24, 4);
18913 gen_store_gpr(v1_t, ret);
18915 case NM_CMPGDU_LE_QB:
18917 gen_helper_cmpgu_le_qb(v1_t, v1_t, v2_t);
18918 tcg_gen_deposit_tl(cpu_dspctrl, cpu_dspctrl, v1_t, 24, 4);
18919 gen_store_gpr(v1_t, ret);
18923 gen_helper_packrl_ph(v1_t, v1_t, v2_t);
18924 gen_store_gpr(v1_t, ret);
18928 gen_helper_pick_qb(v1_t, v1_t, v2_t, cpu_env);
18929 gen_store_gpr(v1_t, ret);
18933 gen_helper_pick_ph(v1_t, v1_t, v2_t, cpu_env);
18934 gen_store_gpr(v1_t, ret);
18938 gen_helper_addq_s_w(v1_t, v1_t, v2_t, cpu_env);
18939 gen_store_gpr(v1_t, ret);
18943 gen_helper_subq_s_w(v1_t, v1_t, v2_t, cpu_env);
18944 gen_store_gpr(v1_t, ret);
18948 gen_helper_addsc(v1_t, v1_t, v2_t, cpu_env);
18949 gen_store_gpr(v1_t, ret);
18953 gen_helper_addwc(v1_t, v1_t, v2_t, cpu_env);
18954 gen_store_gpr(v1_t, ret);
18958 switch (extract32(ctx->opcode, 10, 1)) {
18961 gen_helper_addq_ph(v1_t, v1_t, v2_t, cpu_env);
18962 gen_store_gpr(v1_t, ret);
18966 gen_helper_addq_s_ph(v1_t, v1_t, v2_t, cpu_env);
18967 gen_store_gpr(v1_t, ret);
18971 case NM_ADDQH_R_PH:
18973 switch (extract32(ctx->opcode, 10, 1)) {
18976 gen_helper_addqh_ph(v1_t, v1_t, v2_t);
18977 gen_store_gpr(v1_t, ret);
18981 gen_helper_addqh_r_ph(v1_t, v1_t, v2_t);
18982 gen_store_gpr(v1_t, ret);
18988 switch (extract32(ctx->opcode, 10, 1)) {
18991 gen_helper_addqh_w(v1_t, v1_t, v2_t);
18992 gen_store_gpr(v1_t, ret);
18996 gen_helper_addqh_r_w(v1_t, v1_t, v2_t);
18997 gen_store_gpr(v1_t, ret);
19003 switch (extract32(ctx->opcode, 10, 1)) {
19006 gen_helper_addu_qb(v1_t, v1_t, v2_t, cpu_env);
19007 gen_store_gpr(v1_t, ret);
19011 gen_helper_addu_s_qb(v1_t, v1_t, v2_t, cpu_env);
19012 gen_store_gpr(v1_t, ret);
19018 switch (extract32(ctx->opcode, 10, 1)) {
19021 gen_helper_addu_ph(v1_t, v1_t, v2_t, cpu_env);
19022 gen_store_gpr(v1_t, ret);
19026 gen_helper_addu_s_ph(v1_t, v1_t, v2_t, cpu_env);
19027 gen_store_gpr(v1_t, ret);
19031 case NM_ADDUH_R_QB:
19033 switch (extract32(ctx->opcode, 10, 1)) {
19036 gen_helper_adduh_qb(v1_t, v1_t, v2_t);
19037 gen_store_gpr(v1_t, ret);
19041 gen_helper_adduh_r_qb(v1_t, v1_t, v2_t);
19042 gen_store_gpr(v1_t, ret);
19046 case NM_SHRAV_R_PH:
19048 switch (extract32(ctx->opcode, 10, 1)) {
19051 gen_helper_shra_ph(v1_t, v1_t, v2_t);
19052 gen_store_gpr(v1_t, ret);
19056 gen_helper_shra_r_ph(v1_t, v1_t, v2_t);
19057 gen_store_gpr(v1_t, ret);
19061 case NM_SHRAV_R_QB:
19063 switch (extract32(ctx->opcode, 10, 1)) {
19066 gen_helper_shra_qb(v1_t, v1_t, v2_t);
19067 gen_store_gpr(v1_t, ret);
19071 gen_helper_shra_r_qb(v1_t, v1_t, v2_t);
19072 gen_store_gpr(v1_t, ret);
19078 switch (extract32(ctx->opcode, 10, 1)) {
19081 gen_helper_subq_ph(v1_t, v1_t, v2_t, cpu_env);
19082 gen_store_gpr(v1_t, ret);
19086 gen_helper_subq_s_ph(v1_t, v1_t, v2_t, cpu_env);
19087 gen_store_gpr(v1_t, ret);
19091 case NM_SUBQH_R_PH:
19093 switch (extract32(ctx->opcode, 10, 1)) {
19096 gen_helper_subqh_ph(v1_t, v1_t, v2_t);
19097 gen_store_gpr(v1_t, ret);
19101 gen_helper_subqh_r_ph(v1_t, v1_t, v2_t);
19102 gen_store_gpr(v1_t, ret);
19108 switch (extract32(ctx->opcode, 10, 1)) {
19111 gen_helper_subqh_w(v1_t, v1_t, v2_t);
19112 gen_store_gpr(v1_t, ret);
19116 gen_helper_subqh_r_w(v1_t, v1_t, v2_t);
19117 gen_store_gpr(v1_t, ret);
19123 switch (extract32(ctx->opcode, 10, 1)) {
19126 gen_helper_subu_qb(v1_t, v1_t, v2_t, cpu_env);
19127 gen_store_gpr(v1_t, ret);
19131 gen_helper_subu_s_qb(v1_t, v1_t, v2_t, cpu_env);
19132 gen_store_gpr(v1_t, ret);
19138 switch (extract32(ctx->opcode, 10, 1)) {
19141 gen_helper_subu_ph(v1_t, v1_t, v2_t, cpu_env);
19142 gen_store_gpr(v1_t, ret);
19146 gen_helper_subu_s_ph(v1_t, v1_t, v2_t, cpu_env);
19147 gen_store_gpr(v1_t, ret);
19151 case NM_SUBUH_R_QB:
19153 switch (extract32(ctx->opcode, 10, 1)) {
19156 gen_helper_subuh_qb(v1_t, v1_t, v2_t);
19157 gen_store_gpr(v1_t, ret);
19161 gen_helper_subuh_r_qb(v1_t, v1_t, v2_t);
19162 gen_store_gpr(v1_t, ret);
19166 case NM_SHLLV_S_PH:
19168 switch (extract32(ctx->opcode, 10, 1)) {
19171 gen_helper_shll_ph(v1_t, v1_t, v2_t, cpu_env);
19172 gen_store_gpr(v1_t, ret);
19176 gen_helper_shll_s_ph(v1_t, v1_t, v2_t, cpu_env);
19177 gen_store_gpr(v1_t, ret);
19181 case NM_PRECR_SRA_R_PH_W:
19183 switch (extract32(ctx->opcode, 10, 1)) {
19185 /* PRECR_SRA_PH_W */
19187 TCGv_i32 sa_t = tcg_const_i32(rd);
19188 gen_helper_precr_sra_ph_w(v1_t, sa_t, v1_t,
19190 gen_store_gpr(v1_t, rt);
19191 tcg_temp_free_i32(sa_t);
19195 /* PRECR_SRA_R_PH_W */
19197 TCGv_i32 sa_t = tcg_const_i32(rd);
19198 gen_helper_precr_sra_r_ph_w(v1_t, sa_t, v1_t,
19200 gen_store_gpr(v1_t, rt);
19201 tcg_temp_free_i32(sa_t);
19206 case NM_MULEU_S_PH_QBL:
19208 gen_helper_muleu_s_ph_qbl(v1_t, v1_t, v2_t, cpu_env);
19209 gen_store_gpr(v1_t, ret);
19211 case NM_MULEU_S_PH_QBR:
19213 gen_helper_muleu_s_ph_qbr(v1_t, v1_t, v2_t, cpu_env);
19214 gen_store_gpr(v1_t, ret);
19216 case NM_MULQ_RS_PH:
19218 gen_helper_mulq_rs_ph(v1_t, v1_t, v2_t, cpu_env);
19219 gen_store_gpr(v1_t, ret);
19223 gen_helper_mulq_s_ph(v1_t, v1_t, v2_t, cpu_env);
19224 gen_store_gpr(v1_t, ret);
19228 gen_helper_mulq_rs_w(v1_t, v1_t, v2_t, cpu_env);
19229 gen_store_gpr(v1_t, ret);
19233 gen_helper_mulq_s_w(v1_t, v1_t, v2_t, cpu_env);
19234 gen_store_gpr(v1_t, ret);
19238 gen_load_gpr(t0, rs);
19240 tcg_gen_deposit_tl(cpu_gpr[rt], t0, cpu_gpr[rt], rd, 32 - rd);
19242 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
19246 gen_helper_modsub(v1_t, v1_t, v2_t);
19247 gen_store_gpr(v1_t, ret);
19251 gen_helper_shra_r_w(v1_t, v1_t, v2_t);
19252 gen_store_gpr(v1_t, ret);
19256 gen_helper_shrl_ph(v1_t, v1_t, v2_t);
19257 gen_store_gpr(v1_t, ret);
19261 gen_helper_shrl_qb(v1_t, v1_t, v2_t);
19262 gen_store_gpr(v1_t, ret);
19266 gen_helper_shll_qb(v1_t, v1_t, v2_t, cpu_env);
19267 gen_store_gpr(v1_t, ret);
19271 gen_helper_shll_s_w(v1_t, v1_t, v2_t, cpu_env);
19272 gen_store_gpr(v1_t, ret);
19277 TCGv tv0 = tcg_temp_new();
19278 TCGv tv1 = tcg_temp_new();
19279 int16_t imm = extract32(ctx->opcode, 16, 7);
19281 tcg_gen_movi_tl(tv0, rd >> 3);
19282 tcg_gen_movi_tl(tv1, imm);
19283 gen_helper_shilo(tv0, tv1, cpu_env);
19286 case NM_MULEQ_S_W_PHL:
19288 gen_helper_muleq_s_w_phl(v1_t, v1_t, v2_t, cpu_env);
19289 gen_store_gpr(v1_t, ret);
19291 case NM_MULEQ_S_W_PHR:
19293 gen_helper_muleq_s_w_phr(v1_t, v1_t, v2_t, cpu_env);
19294 gen_store_gpr(v1_t, ret);
19298 switch (extract32(ctx->opcode, 10, 1)) {
19301 gen_helper_mul_ph(v1_t, v1_t, v2_t, cpu_env);
19302 gen_store_gpr(v1_t, ret);
19306 gen_helper_mul_s_ph(v1_t, v1_t, v2_t, cpu_env);
19307 gen_store_gpr(v1_t, ret);
19311 case NM_PRECR_QB_PH:
19313 gen_helper_precr_qb_ph(v1_t, v1_t, v2_t);
19314 gen_store_gpr(v1_t, ret);
19316 case NM_PRECRQ_QB_PH:
19318 gen_helper_precrq_qb_ph(v1_t, v1_t, v2_t);
19319 gen_store_gpr(v1_t, ret);
19321 case NM_PRECRQ_PH_W:
19323 gen_helper_precrq_ph_w(v1_t, v1_t, v2_t);
19324 gen_store_gpr(v1_t, ret);
19326 case NM_PRECRQ_RS_PH_W:
19328 gen_helper_precrq_rs_ph_w(v1_t, v1_t, v2_t, cpu_env);
19329 gen_store_gpr(v1_t, ret);
19331 case NM_PRECRQU_S_QB_PH:
19333 gen_helper_precrqu_s_qb_ph(v1_t, v1_t, v2_t, cpu_env);
19334 gen_store_gpr(v1_t, ret);
19338 tcg_gen_movi_tl(t0, rd);
19339 gen_helper_shra_r_w(v1_t, t0, v1_t);
19340 gen_store_gpr(v1_t, rt);
19344 tcg_gen_movi_tl(t0, rd >> 1);
19345 switch (extract32(ctx->opcode, 10, 1)) {
19348 gen_helper_shra_ph(v1_t, t0, v1_t);
19350 gen_store_gpr(v1_t, rt);
19353 gen_helper_shra_r_ph(v1_t, t0, v1_t);
19354 gen_store_gpr(v1_t, rt);
19360 tcg_gen_movi_tl(t0, rd >> 1);
19361 switch (extract32(ctx->opcode, 10, 2)) {
19364 gen_helper_shll_ph(v1_t, t0, v1_t, cpu_env);
19365 gen_store_gpr(v1_t, rt);
19369 gen_helper_shll_s_ph(v1_t, t0, v1_t, cpu_env);
19370 gen_store_gpr(v1_t, rt);
19373 generate_exception_end(ctx, EXCP_RI);
19379 tcg_gen_movi_tl(t0, rd);
19380 gen_helper_shll_s_w(v1_t, t0, v1_t, cpu_env);
19381 gen_store_gpr(v1_t, rt);
19387 imm = sextract32(ctx->opcode, 11, 11);
19388 imm = (int16_t)(imm << 6) >> 6;
19390 tcg_gen_movi_tl(cpu_gpr[rt], dup_const(MO_16, imm));
19395 generate_exception_end(ctx, EXCP_RI);
19400 static int decode_nanomips_32_48_opc(CPUMIPSState *env, DisasContext *ctx)
19408 insn = cpu_lduw_code(env, ctx->base.pc_next + 2);
19409 ctx->opcode = (ctx->opcode << 16) | insn;
19411 rt = extract32(ctx->opcode, 21, 5);
19412 rs = extract32(ctx->opcode, 16, 5);
19413 rd = extract32(ctx->opcode, 11, 5);
19415 op = extract32(ctx->opcode, 26, 6);
19420 switch (extract32(ctx->opcode, 19, 2)) {
19423 generate_exception_end(ctx, EXCP_RI);
19426 if ((extract32(ctx->opcode, 18, 1)) == NM_SYSCALL) {
19427 generate_exception_end(ctx, EXCP_SYSCALL);
19429 generate_exception_end(ctx, EXCP_RI);
19433 generate_exception_end(ctx, EXCP_BREAK);
19436 if (is_uhi(extract32(ctx->opcode, 0, 19))) {
19437 gen_helper_do_semihosting(cpu_env);
19439 if (ctx->hflags & MIPS_HFLAG_SBRI) {
19440 generate_exception_end(ctx, EXCP_RI);
19442 generate_exception_end(ctx, EXCP_DBp);
19449 imm = extract32(ctx->opcode, 0, 16);
19451 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], imm);
19453 tcg_gen_movi_tl(cpu_gpr[rt], imm);
19455 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
19460 offset = sextract32(ctx->opcode, 0, 1) << 21 |
19461 extract32(ctx->opcode, 1, 20) << 1;
19462 target_long addr = addr_add(ctx, ctx->base.pc_next + 4, offset);
19463 tcg_gen_movi_tl(cpu_gpr[rt], addr);
19467 switch (ctx->opcode & 0x07) {
19469 gen_pool32a0_nanomips_insn(env, ctx);
19473 int32_t op1 = extract32(ctx->opcode, 3, 7);
19474 gen_pool32a5_nanomips_insn(ctx, op1, rd, rs, rt);
19478 switch (extract32(ctx->opcode, 3, 3)) {
19480 gen_p_lsx(ctx, rd, rs, rt);
19483 /* In nanoMIPS, the shift field directly encodes the shift
19484 * amount, meaning that the supported shift values are in
19485 * the range 0 to 3 (instead of 1 to 4 in MIPSR6). */
19486 gen_lsa(ctx, OPC_LSA, rd, rs, rt,
19487 extract32(ctx->opcode, 9, 2) - 1);
19490 gen_ext(ctx, 32, rd, rs, rt, extract32(ctx->opcode, 6, 5));
19493 gen_pool32axf_nanomips_insn(env, ctx);
19496 generate_exception_end(ctx, EXCP_RI);
19501 generate_exception_end(ctx, EXCP_RI);
19506 switch (ctx->opcode & 0x03) {
19509 offset = extract32(ctx->opcode, 0, 21);
19510 gen_op_addr_addi(ctx, cpu_gpr[rt], cpu_gpr[28], offset);
19514 gen_ld(ctx, OPC_LW, rt, 28, extract32(ctx->opcode, 2, 19) << 2);
19517 gen_st(ctx, OPC_SW, rt, 28, extract32(ctx->opcode, 2, 19) << 2);
19520 generate_exception_end(ctx, EXCP_RI);
19526 insn = cpu_lduw_code(env, ctx->base.pc_next + 4);
19527 target_long addr_off = extract32(ctx->opcode, 0, 16) | insn << 16;
19528 switch (extract32(ctx->opcode, 16, 5)) {
19532 tcg_gen_movi_tl(cpu_gpr[rt], addr_off);
19538 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rt], addr_off);
19539 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
19545 gen_op_addr_addi(ctx, cpu_gpr[rt], cpu_gpr[28], addr_off);
19551 target_long addr = addr_add(ctx, ctx->base.pc_next + 6,
19554 tcg_gen_movi_tl(cpu_gpr[rt], addr);
19561 t0 = tcg_temp_new();
19563 target_long addr = addr_add(ctx, ctx->base.pc_next + 6,
19566 tcg_gen_movi_tl(t0, addr);
19567 tcg_gen_qemu_ld_tl(cpu_gpr[rt], t0, ctx->mem_idx, MO_TESL);
19575 t0 = tcg_temp_new();
19576 t1 = tcg_temp_new();
19578 target_long addr = addr_add(ctx, ctx->base.pc_next + 6,
19581 tcg_gen_movi_tl(t0, addr);
19582 gen_load_gpr(t1, rt);
19584 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL);
19591 generate_exception_end(ctx, EXCP_RI);
19597 switch (extract32(ctx->opcode, 12, 4)) {
19599 gen_logic_imm(ctx, OPC_ORI, rt, rs, extract32(ctx->opcode, 0, 12));
19602 gen_logic_imm(ctx, OPC_XORI, rt, rs, extract32(ctx->opcode, 0, 12));
19605 gen_logic_imm(ctx, OPC_ANDI, rt, rs, extract32(ctx->opcode, 0, 12));
19608 switch (extract32(ctx->opcode, 20, 1)) {
19610 switch (ctx->opcode & 3) {
19612 gen_save(ctx, rt, extract32(ctx->opcode, 16, 4),
19613 extract32(ctx->opcode, 2, 1),
19614 extract32(ctx->opcode, 3, 9) << 3);
19617 case NM_RESTORE_JRC:
19618 gen_restore(ctx, rt, extract32(ctx->opcode, 16, 4),
19619 extract32(ctx->opcode, 2, 1),
19620 extract32(ctx->opcode, 3, 9) << 3);
19621 if ((ctx->opcode & 3) == NM_RESTORE_JRC) {
19622 gen_compute_branch_nm(ctx, OPC_JR, 2, 31, 0, 0);
19626 generate_exception_end(ctx, EXCP_RI);
19631 generate_exception_end(ctx, EXCP_RI);
19636 gen_slt_imm(ctx, OPC_SLTI, rt, rs, extract32(ctx->opcode, 0, 12));
19639 gen_slt_imm(ctx, OPC_SLTIU, rt, rs, extract32(ctx->opcode, 0, 12));
19643 TCGv t0 = tcg_temp_new();
19645 imm = extract32(ctx->opcode, 0, 12);
19646 gen_load_gpr(t0, rs);
19647 tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, imm);
19648 gen_store_gpr(t0, rt);
19654 imm = (int16_t) extract32(ctx->opcode, 0, 12);
19655 gen_arith_imm(ctx, OPC_ADDIU, rt, rs, -imm);
19659 int shift = extract32(ctx->opcode, 0, 5);
19660 switch (extract32(ctx->opcode, 5, 4)) {
19662 if (rt == 0 && shift == 0) {
19664 } else if (rt == 0 && shift == 3) {
19665 /* EHB - treat as NOP */
19666 } else if (rt == 0 && shift == 5) {
19667 /* PAUSE - treat as NOP */
19668 } else if (rt == 0 && shift == 6) {
19670 gen_sync(extract32(ctx->opcode, 16, 5));
19673 gen_shift_imm(ctx, OPC_SLL, rt, rs,
19674 extract32(ctx->opcode, 0, 5));
19678 gen_shift_imm(ctx, OPC_SRL, rt, rs,
19679 extract32(ctx->opcode, 0, 5));
19682 gen_shift_imm(ctx, OPC_SRA, rt, rs,
19683 extract32(ctx->opcode, 0, 5));
19686 gen_shift_imm(ctx, OPC_ROTR, rt, rs,
19687 extract32(ctx->opcode, 0, 5));
19695 TCGv t0 = tcg_temp_new();
19696 TCGv_i32 shift = tcg_const_i32(extract32(ctx->opcode, 0, 5));
19697 TCGv_i32 shiftx = tcg_const_i32(extract32(ctx->opcode, 7, 4)
19699 TCGv_i32 stripe = tcg_const_i32(extract32(ctx->opcode, 6, 1));
19701 gen_load_gpr(t0, rs);
19702 gen_helper_rotx(cpu_gpr[rt], t0, shift, shiftx, stripe);
19705 tcg_temp_free_i32(shift);
19706 tcg_temp_free_i32(shiftx);
19707 tcg_temp_free_i32(stripe);
19711 switch (((ctx->opcode >> 10) & 2) |
19712 (extract32(ctx->opcode, 5, 1))) {
19715 gen_bitops(ctx, OPC_INS, rt, rs, extract32(ctx->opcode, 0, 5),
19716 extract32(ctx->opcode, 6, 5));
19719 generate_exception_end(ctx, EXCP_RI);
19724 switch (((ctx->opcode >> 10) & 2) |
19725 (extract32(ctx->opcode, 5, 1))) {
19728 gen_bitops(ctx, OPC_EXT, rt, rs, extract32(ctx->opcode, 0, 5),
19729 extract32(ctx->opcode, 6, 5));
19732 generate_exception_end(ctx, EXCP_RI);
19737 generate_exception_end(ctx, EXCP_RI);
19742 gen_pool32f_nanomips_insn(ctx);
19747 switch (extract32(ctx->opcode, 1, 1)) {
19750 tcg_gen_movi_tl(cpu_gpr[rt],
19751 sextract32(ctx->opcode, 0, 1) << 31 |
19752 extract32(ctx->opcode, 2, 10) << 21 |
19753 extract32(ctx->opcode, 12, 9) << 12);
19758 offset = sextract32(ctx->opcode, 0, 1) << 31 |
19759 extract32(ctx->opcode, 2, 10) << 21 |
19760 extract32(ctx->opcode, 12, 9) << 12;
19762 addr = ~0xFFF & addr_add(ctx, ctx->base.pc_next + 4, offset);
19763 tcg_gen_movi_tl(cpu_gpr[rt], addr);
19770 uint32_t u = extract32(ctx->opcode, 0, 18);
19772 switch (extract32(ctx->opcode, 18, 3)) {
19774 gen_ld(ctx, OPC_LB, rt, 28, u);
19777 gen_st(ctx, OPC_SB, rt, 28, u);
19780 gen_ld(ctx, OPC_LBU, rt, 28, u);
19784 gen_op_addr_addi(ctx, cpu_gpr[rt], cpu_gpr[28], u);
19789 switch (ctx->opcode & 1) {
19791 gen_ld(ctx, OPC_LH, rt, 28, u);
19794 gen_ld(ctx, OPC_LHU, rt, 28, u);
19800 switch (ctx->opcode & 1) {
19802 gen_st(ctx, OPC_SH, rt, 28, u);
19805 generate_exception_end(ctx, EXCP_RI);
19811 switch (ctx->opcode & 0x3) {
19813 gen_cop1_ldst(ctx, OPC_LWC1, rt, 28, u);
19816 gen_cop1_ldst(ctx, OPC_LDC1, rt, 28, u);
19819 gen_cop1_ldst(ctx, OPC_SWC1, rt, 28, u);
19822 gen_cop1_ldst(ctx, OPC_SDC1, rt, 28, u);
19827 generate_exception_end(ctx, EXCP_RI);
19834 uint32_t u = extract32(ctx->opcode, 0, 12);
19836 switch (extract32(ctx->opcode, 12, 4)) {
19840 /* Break the TB to be able to sync copied instructions
19842 ctx->base.is_jmp = DISAS_STOP;
19845 /* Treat as NOP. */
19849 gen_ld(ctx, OPC_LB, rt, rs, u);
19852 gen_ld(ctx, OPC_LH, rt, rs, u);
19855 gen_ld(ctx, OPC_LW, rt, rs, u);
19858 gen_ld(ctx, OPC_LBU, rt, rs, u);
19861 gen_ld(ctx, OPC_LHU, rt, rs, u);
19864 gen_st(ctx, OPC_SB, rt, rs, u);
19867 gen_st(ctx, OPC_SH, rt, rs, u);
19870 gen_st(ctx, OPC_SW, rt, rs, u);
19873 gen_cop1_ldst(ctx, OPC_LWC1, rt, rs, u);
19876 gen_cop1_ldst(ctx, OPC_LDC1, rt, rs, u);
19879 gen_cop1_ldst(ctx, OPC_SWC1, rt, rs, u);
19882 gen_cop1_ldst(ctx, OPC_SDC1, rt, rs, u);
19885 generate_exception_end(ctx, EXCP_RI);
19892 int32_t s = (sextract32(ctx->opcode, 15, 1) << 8) |
19893 extract32(ctx->opcode, 0, 8);
19895 switch (extract32(ctx->opcode, 8, 3)) {
19897 switch (extract32(ctx->opcode, 11, 4)) {
19899 gen_ld(ctx, OPC_LB, rt, rs, s);
19902 gen_ld(ctx, OPC_LH, rt, rs, s);
19905 gen_ld(ctx, OPC_LW, rt, rs, s);
19908 gen_ld(ctx, OPC_LBU, rt, rs, s);
19911 gen_ld(ctx, OPC_LHU, rt, rs, s);
19914 gen_st(ctx, OPC_SB, rt, rs, s);
19917 gen_st(ctx, OPC_SH, rt, rs, s);
19920 gen_st(ctx, OPC_SW, rt, rs, s);
19923 gen_cop1_ldst(ctx, OPC_LWC1, rt, rs, s);
19926 gen_cop1_ldst(ctx, OPC_LDC1, rt, rs, s);
19929 gen_cop1_ldst(ctx, OPC_SWC1, rt, rs, s);
19932 gen_cop1_ldst(ctx, OPC_SDC1, rt, rs, s);
19937 /* Break the TB to be able to sync copied instructions
19939 ctx->base.is_jmp = DISAS_STOP;
19942 /* Treat as NOP. */
19946 generate_exception_end(ctx, EXCP_RI);
19951 switch (extract32(ctx->opcode, 11, 4)) {
19956 TCGv t0 = tcg_temp_new();
19957 TCGv t1 = tcg_temp_new();
19959 gen_base_offset_addr(ctx, t0, rs, s);
19961 switch (extract32(ctx->opcode, 11, 4)) {
19963 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TESW |
19965 gen_store_gpr(t0, rt);
19968 gen_load_gpr(t1, rt);
19969 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUW |
19978 switch (ctx->opcode & 0x03) {
19980 gen_ld(ctx, OPC_LL, rt, rs, s);
19984 gen_llwp(ctx, rs, 0, rt, extract32(ctx->opcode, 3, 5));
19989 switch (ctx->opcode & 0x03) {
19991 gen_st_cond(ctx, OPC_SC, rt, rs, s);
19995 gen_scwp(ctx, rs, 0, rt, extract32(ctx->opcode, 3, 5));
20000 check_cp0_enabled(ctx);
20001 if (ctx->hflags & MIPS_HFLAG_ITC_CACHE) {
20002 gen_cache_operation(ctx, rt, rs, s);
20011 int count = extract32(ctx->opcode, 12, 3);
20014 offset = sextract32(ctx->opcode, 15, 1) << 8 |
20015 extract32(ctx->opcode, 0, 8);
20016 TCGv va = tcg_temp_new();
20017 TCGv t1 = tcg_temp_new();
20018 TCGMemOp memop = (extract32(ctx->opcode, 8, 3)) ==
20019 NM_P_LS_UAWM ? MO_UNALN : 0;
20021 count = (count == 0) ? 8 : count;
20022 while (counter != count) {
20023 int this_rt = ((rt + counter) & 0x1f) | (rt & 0x10);
20024 int this_offset = offset + (counter << 2);
20026 gen_base_offset_addr(ctx, va, rs, this_offset);
20028 switch (extract32(ctx->opcode, 11, 1)) {
20030 tcg_gen_qemu_ld_tl(t1, va, ctx->mem_idx,
20032 gen_store_gpr(t1, this_rt);
20033 if ((this_rt == rs) &&
20034 (counter != (count - 1))) {
20035 /* UNPREDICTABLE */
20039 this_rt = (rt == 0) ? 0 : this_rt;
20040 gen_load_gpr(t1, this_rt);
20041 tcg_gen_qemu_st_tl(t1, va, ctx->mem_idx,
20052 generate_exception_end(ctx, EXCP_RI);
20060 TCGv t0 = tcg_temp_new();
20061 int32_t s = sextract32(ctx->opcode, 0, 1) << 21 |
20062 extract32(ctx->opcode, 1, 20) << 1;
20063 rd = (extract32(ctx->opcode, 24, 1)) == 0 ? 4 : 5;
20064 rt = decode_gpr_gpr4_zero(extract32(ctx->opcode, 25, 1) << 3 |
20065 extract32(ctx->opcode, 21, 3));
20066 gen_load_gpr(t0, rt);
20067 tcg_gen_mov_tl(cpu_gpr[rd], t0);
20068 gen_compute_branch_nm(ctx, OPC_BGEZAL, 4, 0, 0, s);
20074 int32_t s = sextract32(ctx->opcode, 0, 1) << 25 |
20075 extract32(ctx->opcode, 1, 24) << 1;
20077 if ((extract32(ctx->opcode, 25, 1)) == 0) {
20079 gen_compute_branch_nm(ctx, OPC_BEQ, 4, 0, 0, s);
20082 gen_compute_branch_nm(ctx, OPC_BGEZAL, 4, 0, 0, s);
20087 switch (extract32(ctx->opcode, 12, 4)) {
20090 gen_compute_branch_nm(ctx, OPC_JALR, 4, rs, rt, 0);
20093 gen_compute_nanomips_pbalrsc_branch(ctx, rs, rt);
20096 generate_exception_end(ctx, EXCP_RI);
20102 int32_t s = sextract32(ctx->opcode, 0, 1) << 14 |
20103 extract32(ctx->opcode, 1, 13) << 1;
20104 switch (extract32(ctx->opcode, 14, 2)) {
20107 gen_compute_branch_nm(ctx, OPC_BEQ, 4, rs, rt, s);
20110 s = sextract32(ctx->opcode, 0, 1) << 14 |
20111 extract32(ctx->opcode, 1, 13) << 1;
20112 check_cp1_enabled(ctx);
20113 switch (extract32(ctx->opcode, 16, 5)) {
20115 gen_compute_branch_cp1_nm(ctx, OPC_BC1EQZ, rt, s);
20118 gen_compute_branch_cp1_nm(ctx, OPC_BC1NEZ, rt, s);
20123 int32_t imm = extract32(ctx->opcode, 1, 13) |
20124 extract32(ctx->opcode, 0, 1) << 13;
20126 gen_compute_branch_nm(ctx, OPC_BPOSGE32, 4, -1, -2,
20131 generate_exception_end(ctx, EXCP_RI);
20137 gen_compute_compact_branch_nm(ctx, OPC_BC, rs, rt, s);
20139 gen_compute_compact_branch_nm(ctx, OPC_BGEC, rs, rt, s);
20143 if (rs == rt || rt == 0) {
20144 gen_compute_compact_branch_nm(ctx, OPC_BC, 0, 0, s);
20145 } else if (rs == 0) {
20146 gen_compute_compact_branch_nm(ctx, OPC_BEQZC, rt, 0, s);
20148 gen_compute_compact_branch_nm(ctx, OPC_BGEUC, rs, rt, s);
20156 int32_t s = sextract32(ctx->opcode, 0, 1) << 14 |
20157 extract32(ctx->opcode, 1, 13) << 1;
20158 switch (extract32(ctx->opcode, 14, 2)) {
20161 gen_compute_branch_nm(ctx, OPC_BNE, 4, rs, rt, s);
20164 if (rs != 0 && rt != 0 && rs == rt) {
20166 ctx->hflags |= MIPS_HFLAG_FBNSLOT;
20168 gen_compute_compact_branch_nm(ctx, OPC_BLTC, rs, rt, s);
20172 if (rs == 0 || rs == rt) {
20174 ctx->hflags |= MIPS_HFLAG_FBNSLOT;
20176 gen_compute_compact_branch_nm(ctx, OPC_BLTUC, rs, rt, s);
20180 generate_exception_end(ctx, EXCP_RI);
20187 int32_t s = sextract32(ctx->opcode, 0, 1) << 11 |
20188 extract32(ctx->opcode, 1, 10) << 1;
20189 uint32_t u = extract32(ctx->opcode, 11, 7);
20191 gen_compute_imm_branch(ctx, extract32(ctx->opcode, 18, 3),
20196 generate_exception_end(ctx, EXCP_RI);
20202 static int decode_nanomips_opc(CPUMIPSState *env, DisasContext *ctx)
20205 int rt = decode_gpr_gpr3(NANOMIPS_EXTRACT_RD(ctx->opcode));
20206 int rs = decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx->opcode));
20207 int rd = decode_gpr_gpr3(NANOMIPS_EXTRACT_RS1(ctx->opcode));
20211 /* make sure instructions are on a halfword boundary */
20212 if (ctx->base.pc_next & 0x1) {
20213 TCGv tmp = tcg_const_tl(ctx->base.pc_next);
20214 tcg_gen_st_tl(tmp, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr));
20215 tcg_temp_free(tmp);
20216 generate_exception_end(ctx, EXCP_AdEL);
20220 op = extract32(ctx->opcode, 10, 6);
20223 rt = NANOMIPS_EXTRACT_RD5(ctx->opcode);
20226 rs = NANOMIPS_EXTRACT_RS5(ctx->opcode);
20227 gen_arith(ctx, OPC_ADDU, rt, rs, 0);
20230 switch (extract32(ctx->opcode, 3, 2)) {
20231 case NM_P16_SYSCALL:
20232 if (extract32(ctx->opcode, 2, 1) == 0) {
20233 generate_exception_end(ctx, EXCP_SYSCALL);
20235 generate_exception_end(ctx, EXCP_RI);
20239 generate_exception_end(ctx, EXCP_BREAK);
20242 if (is_uhi(extract32(ctx->opcode, 0, 3))) {
20243 gen_helper_do_semihosting(cpu_env);
20245 if (ctx->hflags & MIPS_HFLAG_SBRI) {
20246 generate_exception_end(ctx, EXCP_RI);
20248 generate_exception_end(ctx, EXCP_DBp);
20253 generate_exception_end(ctx, EXCP_RI);
20260 int shift = extract32(ctx->opcode, 0, 3);
20262 shift = (shift == 0) ? 8 : shift;
20264 switch (extract32(ctx->opcode, 3, 1)) {
20272 gen_shift_imm(ctx, opc, rt, rs, shift);
20276 switch (ctx->opcode & 1) {
20278 gen_pool16c_nanomips_insn(ctx);
20281 gen_ldxs(ctx, rt, rs, rd);
20286 switch (extract32(ctx->opcode, 6, 1)) {
20288 imm = extract32(ctx->opcode, 0, 6) << 2;
20289 gen_arith_imm(ctx, OPC_ADDIU, rt, 29, imm);
20292 generate_exception_end(ctx, EXCP_RI);
20297 switch (extract32(ctx->opcode, 3, 1)) {
20299 imm = extract32(ctx->opcode, 0, 3) << 2;
20300 gen_arith_imm(ctx, OPC_ADDIU, rt, rs, imm);
20302 case NM_P_ADDIURS5:
20303 rt = extract32(ctx->opcode, 5, 5);
20305 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
20306 imm = (sextract32(ctx->opcode, 4, 1) << 3) |
20307 (extract32(ctx->opcode, 0, 3));
20308 gen_arith_imm(ctx, OPC_ADDIU, rt, rt, imm);
20314 switch (ctx->opcode & 0x1) {
20316 gen_arith(ctx, OPC_ADDU, rd, rs, rt);
20319 gen_arith(ctx, OPC_SUBU, rd, rs, rt);
20324 rt = (extract32(ctx->opcode, 9, 1) << 3) |
20325 extract32(ctx->opcode, 5, 3);
20326 rs = (extract32(ctx->opcode, 4, 1) << 3) |
20327 extract32(ctx->opcode, 0, 3);
20328 rt = decode_gpr_gpr4(rt);
20329 rs = decode_gpr_gpr4(rs);
20330 switch ((extract32(ctx->opcode, 7, 2) & 0x2) |
20331 (extract32(ctx->opcode, 3, 1))) {
20334 gen_arith(ctx, OPC_ADDU, rt, rs, rt);
20338 gen_r6_muldiv(ctx, R6_OPC_MUL, rt, rs, rt);
20341 generate_exception_end(ctx, EXCP_RI);
20347 int imm = extract32(ctx->opcode, 0, 7);
20348 imm = (imm == 0x7f ? -1 : imm);
20350 tcg_gen_movi_tl(cpu_gpr[rt], imm);
20356 uint32_t u = extract32(ctx->opcode, 0, 4);
20357 u = (u == 12) ? 0xff :
20358 (u == 13) ? 0xffff : u;
20359 gen_logic_imm(ctx, OPC_ANDI, rt, rs, u);
20363 offset = extract32(ctx->opcode, 0, 2);
20364 switch (extract32(ctx->opcode, 2, 2)) {
20366 gen_ld(ctx, OPC_LB, rt, rs, offset);
20369 rt = decode_gpr_gpr3_src_store(
20370 NANOMIPS_EXTRACT_RD(ctx->opcode));
20371 gen_st(ctx, OPC_SB, rt, rs, offset);
20374 gen_ld(ctx, OPC_LBU, rt, rs, offset);
20377 generate_exception_end(ctx, EXCP_RI);
20382 offset = extract32(ctx->opcode, 1, 2) << 1;
20383 switch ((extract32(ctx->opcode, 3, 1) << 1) | (ctx->opcode & 1)) {
20385 gen_ld(ctx, OPC_LH, rt, rs, offset);
20388 rt = decode_gpr_gpr3_src_store(
20389 NANOMIPS_EXTRACT_RD(ctx->opcode));
20390 gen_st(ctx, OPC_SH, rt, rs, offset);
20393 gen_ld(ctx, OPC_LHU, rt, rs, offset);
20396 generate_exception_end(ctx, EXCP_RI);
20401 offset = extract32(ctx->opcode, 0, 4) << 2;
20402 gen_ld(ctx, OPC_LW, rt, rs, offset);
20405 rt = NANOMIPS_EXTRACT_RD5(ctx->opcode);
20406 offset = extract32(ctx->opcode, 0, 5) << 2;
20407 gen_ld(ctx, OPC_LW, rt, 29, offset);
20411 rt = (extract32(ctx->opcode, 9, 1) << 3) |
20412 extract32(ctx->opcode, 5, 3);
20413 rs = (extract32(ctx->opcode, 4, 1) << 3) |
20414 extract32(ctx->opcode, 0, 3);
20415 offset = (extract32(ctx->opcode, 3, 1) << 3) |
20416 (extract32(ctx->opcode, 8, 1) << 2);
20417 rt = decode_gpr_gpr4(rt);
20418 rs = decode_gpr_gpr4(rs);
20419 gen_ld(ctx, OPC_LW, rt, rs, offset);
20423 rt = (extract32(ctx->opcode, 9, 1) << 3) |
20424 extract32(ctx->opcode, 5, 3);
20425 rs = (extract32(ctx->opcode, 4, 1) << 3) |
20426 extract32(ctx->opcode, 0, 3);
20427 offset = (extract32(ctx->opcode, 3, 1) << 3) |
20428 (extract32(ctx->opcode, 8, 1) << 2);
20429 rt = decode_gpr_gpr4_zero(rt);
20430 rs = decode_gpr_gpr4(rs);
20431 gen_st(ctx, OPC_SW, rt, rs, offset);
20434 offset = extract32(ctx->opcode, 0, 7) << 2;
20435 gen_ld(ctx, OPC_LW, rt, 28, offset);
20438 rt = NANOMIPS_EXTRACT_RD5(ctx->opcode);
20439 offset = extract32(ctx->opcode, 0, 5) << 2;
20440 gen_st(ctx, OPC_SW, rt, 29, offset);
20443 rt = decode_gpr_gpr3_src_store(
20444 NANOMIPS_EXTRACT_RD(ctx->opcode));
20445 rs = decode_gpr_gpr3(NANOMIPS_EXTRACT_RS(ctx->opcode));
20446 offset = extract32(ctx->opcode, 0, 4) << 2;
20447 gen_st(ctx, OPC_SW, rt, rs, offset);
20450 rt = decode_gpr_gpr3_src_store(
20451 NANOMIPS_EXTRACT_RD(ctx->opcode));
20452 offset = extract32(ctx->opcode, 0, 7) << 2;
20453 gen_st(ctx, OPC_SW, rt, 28, offset);
20456 gen_compute_branch_nm(ctx, OPC_BEQ, 2, 0, 0,
20457 (sextract32(ctx->opcode, 0, 1) << 10) |
20458 (extract32(ctx->opcode, 1, 9) << 1));
20461 gen_compute_branch_nm(ctx, OPC_BGEZAL, 2, 0, 0,
20462 (sextract32(ctx->opcode, 0, 1) << 10) |
20463 (extract32(ctx->opcode, 1, 9) << 1));
20466 gen_compute_branch_nm(ctx, OPC_BEQ, 2, rt, 0,
20467 (sextract32(ctx->opcode, 0, 1) << 7) |
20468 (extract32(ctx->opcode, 1, 6) << 1));
20471 gen_compute_branch_nm(ctx, OPC_BNE, 2, rt, 0,
20472 (sextract32(ctx->opcode, 0, 1) << 7) |
20473 (extract32(ctx->opcode, 1, 6) << 1));
20476 switch (ctx->opcode & 0xf) {
20479 switch (extract32(ctx->opcode, 4, 1)) {
20481 gen_compute_branch_nm(ctx, OPC_JR, 2,
20482 extract32(ctx->opcode, 5, 5), 0, 0);
20485 gen_compute_branch_nm(ctx, OPC_JALR, 2,
20486 extract32(ctx->opcode, 5, 5), 31, 0);
20493 uint32_t opc = extract32(ctx->opcode, 4, 3) <
20494 extract32(ctx->opcode, 7, 3) ? OPC_BEQ : OPC_BNE;
20495 gen_compute_branch_nm(ctx, opc, 2, rs, rt,
20496 extract32(ctx->opcode, 0, 4) << 1);
20503 int count = extract32(ctx->opcode, 0, 4);
20504 int u = extract32(ctx->opcode, 4, 4) << 4;
20506 rt = 30 + extract32(ctx->opcode, 9, 1);
20507 switch (extract32(ctx->opcode, 8, 1)) {
20509 gen_save(ctx, rt, count, 0, u);
20511 case NM_RESTORE_JRC16:
20512 gen_restore(ctx, rt, count, 0, u);
20513 gen_compute_branch_nm(ctx, OPC_JR, 2, 31, 0, 0);
20522 static const int gpr2reg1[] = {4, 5, 6, 7};
20523 static const int gpr2reg2[] = {5, 6, 7, 8};
20525 int rd2 = extract32(ctx->opcode, 3, 1) << 1 |
20526 extract32(ctx->opcode, 8, 1);
20527 int r1 = gpr2reg1[rd2];
20528 int r2 = gpr2reg2[rd2];
20529 int r3 = extract32(ctx->opcode, 4, 1) << 3 |
20530 extract32(ctx->opcode, 0, 3);
20531 int r4 = extract32(ctx->opcode, 9, 1) << 3 |
20532 extract32(ctx->opcode, 5, 3);
20533 TCGv t0 = tcg_temp_new();
20534 TCGv t1 = tcg_temp_new();
20535 if (op == NM_MOVEP) {
20538 rs = decode_gpr_gpr4_zero(r3);
20539 rt = decode_gpr_gpr4_zero(r4);
20541 rd = decode_gpr_gpr4(r3);
20542 re = decode_gpr_gpr4(r4);
20546 gen_load_gpr(t0, rs);
20547 gen_load_gpr(t1, rt);
20548 tcg_gen_mov_tl(cpu_gpr[rd], t0);
20549 tcg_gen_mov_tl(cpu_gpr[re], t1);
20555 return decode_nanomips_32_48_opc(env, ctx);
20562 /* SmartMIPS extension to MIPS32 */
20564 #if defined(TARGET_MIPS64)
20566 /* MDMX extension to MIPS64 */
20570 /* MIPSDSP functions. */
20571 static void gen_mipsdsp_ld(DisasContext *ctx, uint32_t opc,
20572 int rd, int base, int offset)
20577 t0 = tcg_temp_new();
20580 gen_load_gpr(t0, offset);
20581 } else if (offset == 0) {
20582 gen_load_gpr(t0, base);
20584 gen_op_addr_add(ctx, t0, cpu_gpr[base], cpu_gpr[offset]);
20589 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_UB);
20590 gen_store_gpr(t0, rd);
20593 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TESW);
20594 gen_store_gpr(t0, rd);
20597 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TESL);
20598 gen_store_gpr(t0, rd);
20600 #if defined(TARGET_MIPS64)
20602 tcg_gen_qemu_ld_tl(t0, t0, ctx->mem_idx, MO_TEQ);
20603 gen_store_gpr(t0, rd);
20610 static void gen_mipsdsp_arith(DisasContext *ctx, uint32_t op1, uint32_t op2,
20611 int ret, int v1, int v2)
20617 /* Treat as NOP. */
20621 v1_t = tcg_temp_new();
20622 v2_t = tcg_temp_new();
20624 gen_load_gpr(v1_t, v1);
20625 gen_load_gpr(v2_t, v2);
20628 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
20629 case OPC_MULT_G_2E:
20633 gen_helper_adduh_qb(cpu_gpr[ret], v1_t, v2_t);
20635 case OPC_ADDUH_R_QB:
20636 gen_helper_adduh_r_qb(cpu_gpr[ret], v1_t, v2_t);
20639 gen_helper_addqh_ph(cpu_gpr[ret], v1_t, v2_t);
20641 case OPC_ADDQH_R_PH:
20642 gen_helper_addqh_r_ph(cpu_gpr[ret], v1_t, v2_t);
20645 gen_helper_addqh_w(cpu_gpr[ret], v1_t, v2_t);
20647 case OPC_ADDQH_R_W:
20648 gen_helper_addqh_r_w(cpu_gpr[ret], v1_t, v2_t);
20651 gen_helper_subuh_qb(cpu_gpr[ret], v1_t, v2_t);
20653 case OPC_SUBUH_R_QB:
20654 gen_helper_subuh_r_qb(cpu_gpr[ret], v1_t, v2_t);
20657 gen_helper_subqh_ph(cpu_gpr[ret], v1_t, v2_t);
20659 case OPC_SUBQH_R_PH:
20660 gen_helper_subqh_r_ph(cpu_gpr[ret], v1_t, v2_t);
20663 gen_helper_subqh_w(cpu_gpr[ret], v1_t, v2_t);
20665 case OPC_SUBQH_R_W:
20666 gen_helper_subqh_r_w(cpu_gpr[ret], v1_t, v2_t);
20670 case OPC_ABSQ_S_PH_DSP:
20672 case OPC_ABSQ_S_QB:
20674 gen_helper_absq_s_qb(cpu_gpr[ret], v2_t, cpu_env);
20676 case OPC_ABSQ_S_PH:
20678 gen_helper_absq_s_ph(cpu_gpr[ret], v2_t, cpu_env);
20682 gen_helper_absq_s_w(cpu_gpr[ret], v2_t, cpu_env);
20684 case OPC_PRECEQ_W_PHL:
20686 tcg_gen_andi_tl(cpu_gpr[ret], v2_t, 0xFFFF0000);
20687 tcg_gen_ext32s_tl(cpu_gpr[ret], cpu_gpr[ret]);
20689 case OPC_PRECEQ_W_PHR:
20691 tcg_gen_andi_tl(cpu_gpr[ret], v2_t, 0x0000FFFF);
20692 tcg_gen_shli_tl(cpu_gpr[ret], cpu_gpr[ret], 16);
20693 tcg_gen_ext32s_tl(cpu_gpr[ret], cpu_gpr[ret]);
20695 case OPC_PRECEQU_PH_QBL:
20697 gen_helper_precequ_ph_qbl(cpu_gpr[ret], v2_t);
20699 case OPC_PRECEQU_PH_QBR:
20701 gen_helper_precequ_ph_qbr(cpu_gpr[ret], v2_t);
20703 case OPC_PRECEQU_PH_QBLA:
20705 gen_helper_precequ_ph_qbla(cpu_gpr[ret], v2_t);
20707 case OPC_PRECEQU_PH_QBRA:
20709 gen_helper_precequ_ph_qbra(cpu_gpr[ret], v2_t);
20711 case OPC_PRECEU_PH_QBL:
20713 gen_helper_preceu_ph_qbl(cpu_gpr[ret], v2_t);
20715 case OPC_PRECEU_PH_QBR:
20717 gen_helper_preceu_ph_qbr(cpu_gpr[ret], v2_t);
20719 case OPC_PRECEU_PH_QBLA:
20721 gen_helper_preceu_ph_qbla(cpu_gpr[ret], v2_t);
20723 case OPC_PRECEU_PH_QBRA:
20725 gen_helper_preceu_ph_qbra(cpu_gpr[ret], v2_t);
20729 case OPC_ADDU_QB_DSP:
20733 gen_helper_addq_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20735 case OPC_ADDQ_S_PH:
20737 gen_helper_addq_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20741 gen_helper_addq_s_w(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20745 gen_helper_addu_qb(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20747 case OPC_ADDU_S_QB:
20749 gen_helper_addu_s_qb(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20753 gen_helper_addu_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20755 case OPC_ADDU_S_PH:
20757 gen_helper_addu_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20761 gen_helper_subq_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20763 case OPC_SUBQ_S_PH:
20765 gen_helper_subq_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20769 gen_helper_subq_s_w(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20773 gen_helper_subu_qb(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20775 case OPC_SUBU_S_QB:
20777 gen_helper_subu_s_qb(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20781 gen_helper_subu_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20783 case OPC_SUBU_S_PH:
20785 gen_helper_subu_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20789 gen_helper_addsc(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20793 gen_helper_addwc(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20797 gen_helper_modsub(cpu_gpr[ret], v1_t, v2_t);
20799 case OPC_RADDU_W_QB:
20801 gen_helper_raddu_w_qb(cpu_gpr[ret], v1_t);
20805 case OPC_CMPU_EQ_QB_DSP:
20807 case OPC_PRECR_QB_PH:
20809 gen_helper_precr_qb_ph(cpu_gpr[ret], v1_t, v2_t);
20811 case OPC_PRECRQ_QB_PH:
20813 gen_helper_precrq_qb_ph(cpu_gpr[ret], v1_t, v2_t);
20815 case OPC_PRECR_SRA_PH_W:
20818 TCGv_i32 sa_t = tcg_const_i32(v2);
20819 gen_helper_precr_sra_ph_w(cpu_gpr[ret], sa_t, v1_t,
20821 tcg_temp_free_i32(sa_t);
20824 case OPC_PRECR_SRA_R_PH_W:
20827 TCGv_i32 sa_t = tcg_const_i32(v2);
20828 gen_helper_precr_sra_r_ph_w(cpu_gpr[ret], sa_t, v1_t,
20830 tcg_temp_free_i32(sa_t);
20833 case OPC_PRECRQ_PH_W:
20835 gen_helper_precrq_ph_w(cpu_gpr[ret], v1_t, v2_t);
20837 case OPC_PRECRQ_RS_PH_W:
20839 gen_helper_precrq_rs_ph_w(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20841 case OPC_PRECRQU_S_QB_PH:
20843 gen_helper_precrqu_s_qb_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20847 #ifdef TARGET_MIPS64
20848 case OPC_ABSQ_S_QH_DSP:
20850 case OPC_PRECEQ_L_PWL:
20852 tcg_gen_andi_tl(cpu_gpr[ret], v2_t, 0xFFFFFFFF00000000ull);
20854 case OPC_PRECEQ_L_PWR:
20856 tcg_gen_shli_tl(cpu_gpr[ret], v2_t, 32);
20858 case OPC_PRECEQ_PW_QHL:
20860 gen_helper_preceq_pw_qhl(cpu_gpr[ret], v2_t);
20862 case OPC_PRECEQ_PW_QHR:
20864 gen_helper_preceq_pw_qhr(cpu_gpr[ret], v2_t);
20866 case OPC_PRECEQ_PW_QHLA:
20868 gen_helper_preceq_pw_qhla(cpu_gpr[ret], v2_t);
20870 case OPC_PRECEQ_PW_QHRA:
20872 gen_helper_preceq_pw_qhra(cpu_gpr[ret], v2_t);
20874 case OPC_PRECEQU_QH_OBL:
20876 gen_helper_precequ_qh_obl(cpu_gpr[ret], v2_t);
20878 case OPC_PRECEQU_QH_OBR:
20880 gen_helper_precequ_qh_obr(cpu_gpr[ret], v2_t);
20882 case OPC_PRECEQU_QH_OBLA:
20884 gen_helper_precequ_qh_obla(cpu_gpr[ret], v2_t);
20886 case OPC_PRECEQU_QH_OBRA:
20888 gen_helper_precequ_qh_obra(cpu_gpr[ret], v2_t);
20890 case OPC_PRECEU_QH_OBL:
20892 gen_helper_preceu_qh_obl(cpu_gpr[ret], v2_t);
20894 case OPC_PRECEU_QH_OBR:
20896 gen_helper_preceu_qh_obr(cpu_gpr[ret], v2_t);
20898 case OPC_PRECEU_QH_OBLA:
20900 gen_helper_preceu_qh_obla(cpu_gpr[ret], v2_t);
20902 case OPC_PRECEU_QH_OBRA:
20904 gen_helper_preceu_qh_obra(cpu_gpr[ret], v2_t);
20906 case OPC_ABSQ_S_OB:
20908 gen_helper_absq_s_ob(cpu_gpr[ret], v2_t, cpu_env);
20910 case OPC_ABSQ_S_PW:
20912 gen_helper_absq_s_pw(cpu_gpr[ret], v2_t, cpu_env);
20914 case OPC_ABSQ_S_QH:
20916 gen_helper_absq_s_qh(cpu_gpr[ret], v2_t, cpu_env);
20920 case OPC_ADDU_OB_DSP:
20922 case OPC_RADDU_L_OB:
20924 gen_helper_raddu_l_ob(cpu_gpr[ret], v1_t);
20928 gen_helper_subq_pw(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20930 case OPC_SUBQ_S_PW:
20932 gen_helper_subq_s_pw(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20936 gen_helper_subq_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20938 case OPC_SUBQ_S_QH:
20940 gen_helper_subq_s_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20944 gen_helper_subu_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20946 case OPC_SUBU_S_OB:
20948 gen_helper_subu_s_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20952 gen_helper_subu_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20954 case OPC_SUBU_S_QH:
20956 gen_helper_subu_s_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20960 gen_helper_subuh_ob(cpu_gpr[ret], v1_t, v2_t);
20962 case OPC_SUBUH_R_OB:
20964 gen_helper_subuh_r_ob(cpu_gpr[ret], v1_t, v2_t);
20968 gen_helper_addq_pw(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20970 case OPC_ADDQ_S_PW:
20972 gen_helper_addq_s_pw(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20976 gen_helper_addq_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20978 case OPC_ADDQ_S_QH:
20980 gen_helper_addq_s_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20984 gen_helper_addu_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20986 case OPC_ADDU_S_OB:
20988 gen_helper_addu_s_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20992 gen_helper_addu_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
20994 case OPC_ADDU_S_QH:
20996 gen_helper_addu_s_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21000 gen_helper_adduh_ob(cpu_gpr[ret], v1_t, v2_t);
21002 case OPC_ADDUH_R_OB:
21004 gen_helper_adduh_r_ob(cpu_gpr[ret], v1_t, v2_t);
21008 case OPC_CMPU_EQ_OB_DSP:
21010 case OPC_PRECR_OB_QH:
21012 gen_helper_precr_ob_qh(cpu_gpr[ret], v1_t, v2_t);
21014 case OPC_PRECR_SRA_QH_PW:
21017 TCGv_i32 ret_t = tcg_const_i32(ret);
21018 gen_helper_precr_sra_qh_pw(v2_t, v1_t, v2_t, ret_t);
21019 tcg_temp_free_i32(ret_t);
21022 case OPC_PRECR_SRA_R_QH_PW:
21025 TCGv_i32 sa_v = tcg_const_i32(ret);
21026 gen_helper_precr_sra_r_qh_pw(v2_t, v1_t, v2_t, sa_v);
21027 tcg_temp_free_i32(sa_v);
21030 case OPC_PRECRQ_OB_QH:
21032 gen_helper_precrq_ob_qh(cpu_gpr[ret], v1_t, v2_t);
21034 case OPC_PRECRQ_PW_L:
21036 gen_helper_precrq_pw_l(cpu_gpr[ret], v1_t, v2_t);
21038 case OPC_PRECRQ_QH_PW:
21040 gen_helper_precrq_qh_pw(cpu_gpr[ret], v1_t, v2_t);
21042 case OPC_PRECRQ_RS_QH_PW:
21044 gen_helper_precrq_rs_qh_pw(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21046 case OPC_PRECRQU_S_OB_QH:
21048 gen_helper_precrqu_s_ob_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21055 tcg_temp_free(v1_t);
21056 tcg_temp_free(v2_t);
21059 static void gen_mipsdsp_shift(DisasContext *ctx, uint32_t opc,
21060 int ret, int v1, int v2)
21068 /* Treat as NOP. */
21072 t0 = tcg_temp_new();
21073 v1_t = tcg_temp_new();
21074 v2_t = tcg_temp_new();
21076 tcg_gen_movi_tl(t0, v1);
21077 gen_load_gpr(v1_t, v1);
21078 gen_load_gpr(v2_t, v2);
21081 case OPC_SHLL_QB_DSP:
21083 op2 = MASK_SHLL_QB(ctx->opcode);
21087 gen_helper_shll_qb(cpu_gpr[ret], t0, v2_t, cpu_env);
21091 gen_helper_shll_qb(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21095 gen_helper_shll_ph(cpu_gpr[ret], t0, v2_t, cpu_env);
21099 gen_helper_shll_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21101 case OPC_SHLL_S_PH:
21103 gen_helper_shll_s_ph(cpu_gpr[ret], t0, v2_t, cpu_env);
21105 case OPC_SHLLV_S_PH:
21107 gen_helper_shll_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21111 gen_helper_shll_s_w(cpu_gpr[ret], t0, v2_t, cpu_env);
21113 case OPC_SHLLV_S_W:
21115 gen_helper_shll_s_w(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21119 gen_helper_shrl_qb(cpu_gpr[ret], t0, v2_t);
21123 gen_helper_shrl_qb(cpu_gpr[ret], v1_t, v2_t);
21127 gen_helper_shrl_ph(cpu_gpr[ret], t0, v2_t);
21131 gen_helper_shrl_ph(cpu_gpr[ret], v1_t, v2_t);
21135 gen_helper_shra_qb(cpu_gpr[ret], t0, v2_t);
21137 case OPC_SHRA_R_QB:
21139 gen_helper_shra_r_qb(cpu_gpr[ret], t0, v2_t);
21143 gen_helper_shra_qb(cpu_gpr[ret], v1_t, v2_t);
21145 case OPC_SHRAV_R_QB:
21147 gen_helper_shra_r_qb(cpu_gpr[ret], v1_t, v2_t);
21151 gen_helper_shra_ph(cpu_gpr[ret], t0, v2_t);
21153 case OPC_SHRA_R_PH:
21155 gen_helper_shra_r_ph(cpu_gpr[ret], t0, v2_t);
21159 gen_helper_shra_ph(cpu_gpr[ret], v1_t, v2_t);
21161 case OPC_SHRAV_R_PH:
21163 gen_helper_shra_r_ph(cpu_gpr[ret], v1_t, v2_t);
21167 gen_helper_shra_r_w(cpu_gpr[ret], t0, v2_t);
21169 case OPC_SHRAV_R_W:
21171 gen_helper_shra_r_w(cpu_gpr[ret], v1_t, v2_t);
21173 default: /* Invalid */
21174 MIPS_INVAL("MASK SHLL.QB");
21175 generate_exception_end(ctx, EXCP_RI);
21180 #ifdef TARGET_MIPS64
21181 case OPC_SHLL_OB_DSP:
21182 op2 = MASK_SHLL_OB(ctx->opcode);
21186 gen_helper_shll_pw(cpu_gpr[ret], v2_t, t0, cpu_env);
21190 gen_helper_shll_pw(cpu_gpr[ret], v2_t, v1_t, cpu_env);
21192 case OPC_SHLL_S_PW:
21194 gen_helper_shll_s_pw(cpu_gpr[ret], v2_t, t0, cpu_env);
21196 case OPC_SHLLV_S_PW:
21198 gen_helper_shll_s_pw(cpu_gpr[ret], v2_t, v1_t, cpu_env);
21202 gen_helper_shll_ob(cpu_gpr[ret], v2_t, t0, cpu_env);
21206 gen_helper_shll_ob(cpu_gpr[ret], v2_t, v1_t, cpu_env);
21210 gen_helper_shll_qh(cpu_gpr[ret], v2_t, t0, cpu_env);
21214 gen_helper_shll_qh(cpu_gpr[ret], v2_t, v1_t, cpu_env);
21216 case OPC_SHLL_S_QH:
21218 gen_helper_shll_s_qh(cpu_gpr[ret], v2_t, t0, cpu_env);
21220 case OPC_SHLLV_S_QH:
21222 gen_helper_shll_s_qh(cpu_gpr[ret], v2_t, v1_t, cpu_env);
21226 gen_helper_shra_ob(cpu_gpr[ret], v2_t, t0);
21230 gen_helper_shra_ob(cpu_gpr[ret], v2_t, v1_t);
21232 case OPC_SHRA_R_OB:
21234 gen_helper_shra_r_ob(cpu_gpr[ret], v2_t, t0);
21236 case OPC_SHRAV_R_OB:
21238 gen_helper_shra_r_ob(cpu_gpr[ret], v2_t, v1_t);
21242 gen_helper_shra_pw(cpu_gpr[ret], v2_t, t0);
21246 gen_helper_shra_pw(cpu_gpr[ret], v2_t, v1_t);
21248 case OPC_SHRA_R_PW:
21250 gen_helper_shra_r_pw(cpu_gpr[ret], v2_t, t0);
21252 case OPC_SHRAV_R_PW:
21254 gen_helper_shra_r_pw(cpu_gpr[ret], v2_t, v1_t);
21258 gen_helper_shra_qh(cpu_gpr[ret], v2_t, t0);
21262 gen_helper_shra_qh(cpu_gpr[ret], v2_t, v1_t);
21264 case OPC_SHRA_R_QH:
21266 gen_helper_shra_r_qh(cpu_gpr[ret], v2_t, t0);
21268 case OPC_SHRAV_R_QH:
21270 gen_helper_shra_r_qh(cpu_gpr[ret], v2_t, v1_t);
21274 gen_helper_shrl_ob(cpu_gpr[ret], v2_t, t0);
21278 gen_helper_shrl_ob(cpu_gpr[ret], v2_t, v1_t);
21282 gen_helper_shrl_qh(cpu_gpr[ret], v2_t, t0);
21286 gen_helper_shrl_qh(cpu_gpr[ret], v2_t, v1_t);
21288 default: /* Invalid */
21289 MIPS_INVAL("MASK SHLL.OB");
21290 generate_exception_end(ctx, EXCP_RI);
21298 tcg_temp_free(v1_t);
21299 tcg_temp_free(v2_t);
21302 static void gen_mipsdsp_multiply(DisasContext *ctx, uint32_t op1, uint32_t op2,
21303 int ret, int v1, int v2, int check_ret)
21309 if ((ret == 0) && (check_ret == 1)) {
21310 /* Treat as NOP. */
21314 t0 = tcg_temp_new_i32();
21315 v1_t = tcg_temp_new();
21316 v2_t = tcg_temp_new();
21318 tcg_gen_movi_i32(t0, ret);
21319 gen_load_gpr(v1_t, v1);
21320 gen_load_gpr(v2_t, v2);
21323 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
21324 * the same mask and op1. */
21325 case OPC_MULT_G_2E:
21329 gen_helper_mul_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21332 gen_helper_mul_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21335 gen_helper_mulq_s_w(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21337 case OPC_MULQ_RS_W:
21338 gen_helper_mulq_rs_w(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21342 case OPC_DPA_W_PH_DSP:
21344 case OPC_DPAU_H_QBL:
21346 gen_helper_dpau_h_qbl(t0, v1_t, v2_t, cpu_env);
21348 case OPC_DPAU_H_QBR:
21350 gen_helper_dpau_h_qbr(t0, v1_t, v2_t, cpu_env);
21352 case OPC_DPSU_H_QBL:
21354 gen_helper_dpsu_h_qbl(t0, v1_t, v2_t, cpu_env);
21356 case OPC_DPSU_H_QBR:
21358 gen_helper_dpsu_h_qbr(t0, v1_t, v2_t, cpu_env);
21362 gen_helper_dpa_w_ph(t0, v1_t, v2_t, cpu_env);
21364 case OPC_DPAX_W_PH:
21366 gen_helper_dpax_w_ph(t0, v1_t, v2_t, cpu_env);
21368 case OPC_DPAQ_S_W_PH:
21370 gen_helper_dpaq_s_w_ph(t0, v1_t, v2_t, cpu_env);
21372 case OPC_DPAQX_S_W_PH:
21374 gen_helper_dpaqx_s_w_ph(t0, v1_t, v2_t, cpu_env);
21376 case OPC_DPAQX_SA_W_PH:
21378 gen_helper_dpaqx_sa_w_ph(t0, v1_t, v2_t, cpu_env);
21382 gen_helper_dps_w_ph(t0, v1_t, v2_t, cpu_env);
21384 case OPC_DPSX_W_PH:
21386 gen_helper_dpsx_w_ph(t0, v1_t, v2_t, cpu_env);
21388 case OPC_DPSQ_S_W_PH:
21390 gen_helper_dpsq_s_w_ph(t0, v1_t, v2_t, cpu_env);
21392 case OPC_DPSQX_S_W_PH:
21394 gen_helper_dpsqx_s_w_ph(t0, v1_t, v2_t, cpu_env);
21396 case OPC_DPSQX_SA_W_PH:
21398 gen_helper_dpsqx_sa_w_ph(t0, v1_t, v2_t, cpu_env);
21400 case OPC_MULSAQ_S_W_PH:
21402 gen_helper_mulsaq_s_w_ph(t0, v1_t, v2_t, cpu_env);
21404 case OPC_DPAQ_SA_L_W:
21406 gen_helper_dpaq_sa_l_w(t0, v1_t, v2_t, cpu_env);
21408 case OPC_DPSQ_SA_L_W:
21410 gen_helper_dpsq_sa_l_w(t0, v1_t, v2_t, cpu_env);
21412 case OPC_MAQ_S_W_PHL:
21414 gen_helper_maq_s_w_phl(t0, v1_t, v2_t, cpu_env);
21416 case OPC_MAQ_S_W_PHR:
21418 gen_helper_maq_s_w_phr(t0, v1_t, v2_t, cpu_env);
21420 case OPC_MAQ_SA_W_PHL:
21422 gen_helper_maq_sa_w_phl(t0, v1_t, v2_t, cpu_env);
21424 case OPC_MAQ_SA_W_PHR:
21426 gen_helper_maq_sa_w_phr(t0, v1_t, v2_t, cpu_env);
21428 case OPC_MULSA_W_PH:
21430 gen_helper_mulsa_w_ph(t0, v1_t, v2_t, cpu_env);
21434 #ifdef TARGET_MIPS64
21435 case OPC_DPAQ_W_QH_DSP:
21437 int ac = ret & 0x03;
21438 tcg_gen_movi_i32(t0, ac);
21443 gen_helper_dmadd(v1_t, v2_t, t0, cpu_env);
21447 gen_helper_dmaddu(v1_t, v2_t, t0, cpu_env);
21451 gen_helper_dmsub(v1_t, v2_t, t0, cpu_env);
21455 gen_helper_dmsubu(v1_t, v2_t, t0, cpu_env);
21459 gen_helper_dpa_w_qh(v1_t, v2_t, t0, cpu_env);
21461 case OPC_DPAQ_S_W_QH:
21463 gen_helper_dpaq_s_w_qh(v1_t, v2_t, t0, cpu_env);
21465 case OPC_DPAQ_SA_L_PW:
21467 gen_helper_dpaq_sa_l_pw(v1_t, v2_t, t0, cpu_env);
21469 case OPC_DPAU_H_OBL:
21471 gen_helper_dpau_h_obl(v1_t, v2_t, t0, cpu_env);
21473 case OPC_DPAU_H_OBR:
21475 gen_helper_dpau_h_obr(v1_t, v2_t, t0, cpu_env);
21479 gen_helper_dps_w_qh(v1_t, v2_t, t0, cpu_env);
21481 case OPC_DPSQ_S_W_QH:
21483 gen_helper_dpsq_s_w_qh(v1_t, v2_t, t0, cpu_env);
21485 case OPC_DPSQ_SA_L_PW:
21487 gen_helper_dpsq_sa_l_pw(v1_t, v2_t, t0, cpu_env);
21489 case OPC_DPSU_H_OBL:
21491 gen_helper_dpsu_h_obl(v1_t, v2_t, t0, cpu_env);
21493 case OPC_DPSU_H_OBR:
21495 gen_helper_dpsu_h_obr(v1_t, v2_t, t0, cpu_env);
21497 case OPC_MAQ_S_L_PWL:
21499 gen_helper_maq_s_l_pwl(v1_t, v2_t, t0, cpu_env);
21501 case OPC_MAQ_S_L_PWR:
21503 gen_helper_maq_s_l_pwr(v1_t, v2_t, t0, cpu_env);
21505 case OPC_MAQ_S_W_QHLL:
21507 gen_helper_maq_s_w_qhll(v1_t, v2_t, t0, cpu_env);
21509 case OPC_MAQ_SA_W_QHLL:
21511 gen_helper_maq_sa_w_qhll(v1_t, v2_t, t0, cpu_env);
21513 case OPC_MAQ_S_W_QHLR:
21515 gen_helper_maq_s_w_qhlr(v1_t, v2_t, t0, cpu_env);
21517 case OPC_MAQ_SA_W_QHLR:
21519 gen_helper_maq_sa_w_qhlr(v1_t, v2_t, t0, cpu_env);
21521 case OPC_MAQ_S_W_QHRL:
21523 gen_helper_maq_s_w_qhrl(v1_t, v2_t, t0, cpu_env);
21525 case OPC_MAQ_SA_W_QHRL:
21527 gen_helper_maq_sa_w_qhrl(v1_t, v2_t, t0, cpu_env);
21529 case OPC_MAQ_S_W_QHRR:
21531 gen_helper_maq_s_w_qhrr(v1_t, v2_t, t0, cpu_env);
21533 case OPC_MAQ_SA_W_QHRR:
21535 gen_helper_maq_sa_w_qhrr(v1_t, v2_t, t0, cpu_env);
21537 case OPC_MULSAQ_S_L_PW:
21539 gen_helper_mulsaq_s_l_pw(v1_t, v2_t, t0, cpu_env);
21541 case OPC_MULSAQ_S_W_QH:
21543 gen_helper_mulsaq_s_w_qh(v1_t, v2_t, t0, cpu_env);
21549 case OPC_ADDU_QB_DSP:
21551 case OPC_MULEU_S_PH_QBL:
21553 gen_helper_muleu_s_ph_qbl(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21555 case OPC_MULEU_S_PH_QBR:
21557 gen_helper_muleu_s_ph_qbr(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21559 case OPC_MULQ_RS_PH:
21561 gen_helper_mulq_rs_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21563 case OPC_MULEQ_S_W_PHL:
21565 gen_helper_muleq_s_w_phl(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21567 case OPC_MULEQ_S_W_PHR:
21569 gen_helper_muleq_s_w_phr(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21571 case OPC_MULQ_S_PH:
21573 gen_helper_mulq_s_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21577 #ifdef TARGET_MIPS64
21578 case OPC_ADDU_OB_DSP:
21580 case OPC_MULEQ_S_PW_QHL:
21582 gen_helper_muleq_s_pw_qhl(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21584 case OPC_MULEQ_S_PW_QHR:
21586 gen_helper_muleq_s_pw_qhr(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21588 case OPC_MULEU_S_QH_OBL:
21590 gen_helper_muleu_s_qh_obl(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21592 case OPC_MULEU_S_QH_OBR:
21594 gen_helper_muleu_s_qh_obr(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21596 case OPC_MULQ_RS_QH:
21598 gen_helper_mulq_rs_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21605 tcg_temp_free_i32(t0);
21606 tcg_temp_free(v1_t);
21607 tcg_temp_free(v2_t);
21610 static void gen_mipsdsp_bitinsn(DisasContext *ctx, uint32_t op1, uint32_t op2,
21618 /* Treat as NOP. */
21622 t0 = tcg_temp_new();
21623 val_t = tcg_temp_new();
21624 gen_load_gpr(val_t, val);
21627 case OPC_ABSQ_S_PH_DSP:
21631 gen_helper_bitrev(cpu_gpr[ret], val_t);
21636 target_long result;
21637 imm = (ctx->opcode >> 16) & 0xFF;
21638 result = (uint32_t)imm << 24 |
21639 (uint32_t)imm << 16 |
21640 (uint32_t)imm << 8 |
21642 result = (int32_t)result;
21643 tcg_gen_movi_tl(cpu_gpr[ret], result);
21648 tcg_gen_ext8u_tl(cpu_gpr[ret], val_t);
21649 tcg_gen_shli_tl(t0, cpu_gpr[ret], 8);
21650 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21651 tcg_gen_shli_tl(t0, cpu_gpr[ret], 16);
21652 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21653 tcg_gen_ext32s_tl(cpu_gpr[ret], cpu_gpr[ret]);
21658 imm = (ctx->opcode >> 16) & 0x03FF;
21659 imm = (int16_t)(imm << 6) >> 6;
21660 tcg_gen_movi_tl(cpu_gpr[ret], \
21661 (target_long)((int32_t)imm << 16 | \
21667 tcg_gen_ext16u_tl(cpu_gpr[ret], val_t);
21668 tcg_gen_shli_tl(t0, cpu_gpr[ret], 16);
21669 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21670 tcg_gen_ext32s_tl(cpu_gpr[ret], cpu_gpr[ret]);
21674 #ifdef TARGET_MIPS64
21675 case OPC_ABSQ_S_QH_DSP:
21682 imm = (ctx->opcode >> 16) & 0xFF;
21683 temp = ((uint64_t)imm << 8) | (uint64_t)imm;
21684 temp = (temp << 16) | temp;
21685 temp = (temp << 32) | temp;
21686 tcg_gen_movi_tl(cpu_gpr[ret], temp);
21694 imm = (ctx->opcode >> 16) & 0x03FF;
21695 imm = (int16_t)(imm << 6) >> 6;
21696 temp = ((target_long)imm << 32) \
21697 | ((target_long)imm & 0xFFFFFFFF);
21698 tcg_gen_movi_tl(cpu_gpr[ret], temp);
21706 imm = (ctx->opcode >> 16) & 0x03FF;
21707 imm = (int16_t)(imm << 6) >> 6;
21709 temp = ((uint64_t)(uint16_t)imm << 48) |
21710 ((uint64_t)(uint16_t)imm << 32) |
21711 ((uint64_t)(uint16_t)imm << 16) |
21712 (uint64_t)(uint16_t)imm;
21713 tcg_gen_movi_tl(cpu_gpr[ret], temp);
21718 tcg_gen_ext8u_tl(cpu_gpr[ret], val_t);
21719 tcg_gen_shli_tl(t0, cpu_gpr[ret], 8);
21720 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21721 tcg_gen_shli_tl(t0, cpu_gpr[ret], 16);
21722 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21723 tcg_gen_shli_tl(t0, cpu_gpr[ret], 32);
21724 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21728 tcg_gen_ext32u_i64(cpu_gpr[ret], val_t);
21729 tcg_gen_shli_tl(t0, cpu_gpr[ret], 32);
21730 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21734 tcg_gen_ext16u_tl(cpu_gpr[ret], val_t);
21735 tcg_gen_shli_tl(t0, cpu_gpr[ret], 16);
21736 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21737 tcg_gen_shli_tl(t0, cpu_gpr[ret], 32);
21738 tcg_gen_or_tl(cpu_gpr[ret], cpu_gpr[ret], t0);
21745 tcg_temp_free(val_t);
21748 static void gen_mipsdsp_add_cmp_pick(DisasContext *ctx,
21749 uint32_t op1, uint32_t op2,
21750 int ret, int v1, int v2, int check_ret)
21756 if ((ret == 0) && (check_ret == 1)) {
21757 /* Treat as NOP. */
21761 t1 = tcg_temp_new();
21762 v1_t = tcg_temp_new();
21763 v2_t = tcg_temp_new();
21765 gen_load_gpr(v1_t, v1);
21766 gen_load_gpr(v2_t, v2);
21769 case OPC_CMPU_EQ_QB_DSP:
21771 case OPC_CMPU_EQ_QB:
21773 gen_helper_cmpu_eq_qb(v1_t, v2_t, cpu_env);
21775 case OPC_CMPU_LT_QB:
21777 gen_helper_cmpu_lt_qb(v1_t, v2_t, cpu_env);
21779 case OPC_CMPU_LE_QB:
21781 gen_helper_cmpu_le_qb(v1_t, v2_t, cpu_env);
21783 case OPC_CMPGU_EQ_QB:
21785 gen_helper_cmpgu_eq_qb(cpu_gpr[ret], v1_t, v2_t);
21787 case OPC_CMPGU_LT_QB:
21789 gen_helper_cmpgu_lt_qb(cpu_gpr[ret], v1_t, v2_t);
21791 case OPC_CMPGU_LE_QB:
21793 gen_helper_cmpgu_le_qb(cpu_gpr[ret], v1_t, v2_t);
21795 case OPC_CMPGDU_EQ_QB:
21797 gen_helper_cmpgu_eq_qb(t1, v1_t, v2_t);
21798 tcg_gen_mov_tl(cpu_gpr[ret], t1);
21799 tcg_gen_andi_tl(cpu_dspctrl, cpu_dspctrl, 0xF0FFFFFF);
21800 tcg_gen_shli_tl(t1, t1, 24);
21801 tcg_gen_or_tl(cpu_dspctrl, cpu_dspctrl, t1);
21803 case OPC_CMPGDU_LT_QB:
21805 gen_helper_cmpgu_lt_qb(t1, v1_t, v2_t);
21806 tcg_gen_mov_tl(cpu_gpr[ret], t1);
21807 tcg_gen_andi_tl(cpu_dspctrl, cpu_dspctrl, 0xF0FFFFFF);
21808 tcg_gen_shli_tl(t1, t1, 24);
21809 tcg_gen_or_tl(cpu_dspctrl, cpu_dspctrl, t1);
21811 case OPC_CMPGDU_LE_QB:
21813 gen_helper_cmpgu_le_qb(t1, v1_t, v2_t);
21814 tcg_gen_mov_tl(cpu_gpr[ret], t1);
21815 tcg_gen_andi_tl(cpu_dspctrl, cpu_dspctrl, 0xF0FFFFFF);
21816 tcg_gen_shli_tl(t1, t1, 24);
21817 tcg_gen_or_tl(cpu_dspctrl, cpu_dspctrl, t1);
21819 case OPC_CMP_EQ_PH:
21821 gen_helper_cmp_eq_ph(v1_t, v2_t, cpu_env);
21823 case OPC_CMP_LT_PH:
21825 gen_helper_cmp_lt_ph(v1_t, v2_t, cpu_env);
21827 case OPC_CMP_LE_PH:
21829 gen_helper_cmp_le_ph(v1_t, v2_t, cpu_env);
21833 gen_helper_pick_qb(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21837 gen_helper_pick_ph(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21839 case OPC_PACKRL_PH:
21841 gen_helper_packrl_ph(cpu_gpr[ret], v1_t, v2_t);
21845 #ifdef TARGET_MIPS64
21846 case OPC_CMPU_EQ_OB_DSP:
21848 case OPC_CMP_EQ_PW:
21850 gen_helper_cmp_eq_pw(v1_t, v2_t, cpu_env);
21852 case OPC_CMP_LT_PW:
21854 gen_helper_cmp_lt_pw(v1_t, v2_t, cpu_env);
21856 case OPC_CMP_LE_PW:
21858 gen_helper_cmp_le_pw(v1_t, v2_t, cpu_env);
21860 case OPC_CMP_EQ_QH:
21862 gen_helper_cmp_eq_qh(v1_t, v2_t, cpu_env);
21864 case OPC_CMP_LT_QH:
21866 gen_helper_cmp_lt_qh(v1_t, v2_t, cpu_env);
21868 case OPC_CMP_LE_QH:
21870 gen_helper_cmp_le_qh(v1_t, v2_t, cpu_env);
21872 case OPC_CMPGDU_EQ_OB:
21874 gen_helper_cmpgdu_eq_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21876 case OPC_CMPGDU_LT_OB:
21878 gen_helper_cmpgdu_lt_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21880 case OPC_CMPGDU_LE_OB:
21882 gen_helper_cmpgdu_le_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21884 case OPC_CMPGU_EQ_OB:
21886 gen_helper_cmpgu_eq_ob(cpu_gpr[ret], v1_t, v2_t);
21888 case OPC_CMPGU_LT_OB:
21890 gen_helper_cmpgu_lt_ob(cpu_gpr[ret], v1_t, v2_t);
21892 case OPC_CMPGU_LE_OB:
21894 gen_helper_cmpgu_le_ob(cpu_gpr[ret], v1_t, v2_t);
21896 case OPC_CMPU_EQ_OB:
21898 gen_helper_cmpu_eq_ob(v1_t, v2_t, cpu_env);
21900 case OPC_CMPU_LT_OB:
21902 gen_helper_cmpu_lt_ob(v1_t, v2_t, cpu_env);
21904 case OPC_CMPU_LE_OB:
21906 gen_helper_cmpu_le_ob(v1_t, v2_t, cpu_env);
21908 case OPC_PACKRL_PW:
21910 gen_helper_packrl_pw(cpu_gpr[ret], v1_t, v2_t);
21914 gen_helper_pick_ob(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21918 gen_helper_pick_pw(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21922 gen_helper_pick_qh(cpu_gpr[ret], v1_t, v2_t, cpu_env);
21930 tcg_temp_free(v1_t);
21931 tcg_temp_free(v2_t);
21934 static void gen_mipsdsp_append(CPUMIPSState *env, DisasContext *ctx,
21935 uint32_t op1, int rt, int rs, int sa)
21942 /* Treat as NOP. */
21946 t0 = tcg_temp_new();
21947 gen_load_gpr(t0, rs);
21950 case OPC_APPEND_DSP:
21951 switch (MASK_APPEND(ctx->opcode)) {
21954 tcg_gen_deposit_tl(cpu_gpr[rt], t0, cpu_gpr[rt], sa, 32 - sa);
21956 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
21960 tcg_gen_ext32u_tl(cpu_gpr[rt], cpu_gpr[rt]);
21961 tcg_gen_shri_tl(cpu_gpr[rt], cpu_gpr[rt], sa);
21962 tcg_gen_shli_tl(t0, t0, 32 - sa);
21963 tcg_gen_or_tl(cpu_gpr[rt], cpu_gpr[rt], t0);
21965 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
21969 if (sa != 0 && sa != 2) {
21970 tcg_gen_shli_tl(cpu_gpr[rt], cpu_gpr[rt], 8 * sa);
21971 tcg_gen_ext32u_tl(t0, t0);
21972 tcg_gen_shri_tl(t0, t0, 8 * (4 - sa));
21973 tcg_gen_or_tl(cpu_gpr[rt], cpu_gpr[rt], t0);
21975 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
21977 default: /* Invalid */
21978 MIPS_INVAL("MASK APPEND");
21979 generate_exception_end(ctx, EXCP_RI);
21983 #ifdef TARGET_MIPS64
21984 case OPC_DAPPEND_DSP:
21985 switch (MASK_DAPPEND(ctx->opcode)) {
21988 tcg_gen_deposit_tl(cpu_gpr[rt], t0, cpu_gpr[rt], sa, 64 - sa);
21992 tcg_gen_shri_tl(cpu_gpr[rt], cpu_gpr[rt], 0x20 | sa);
21993 tcg_gen_shli_tl(t0, t0, 64 - (0x20 | sa));
21994 tcg_gen_or_tl(cpu_gpr[rt], t0, t0);
21998 tcg_gen_shri_tl(cpu_gpr[rt], cpu_gpr[rt], sa);
21999 tcg_gen_shli_tl(t0, t0, 64 - sa);
22000 tcg_gen_or_tl(cpu_gpr[rt], cpu_gpr[rt], t0);
22005 if (sa != 0 && sa != 2 && sa != 4) {
22006 tcg_gen_shli_tl(cpu_gpr[rt], cpu_gpr[rt], 8 * sa);
22007 tcg_gen_shri_tl(t0, t0, 8 * (8 - sa));
22008 tcg_gen_or_tl(cpu_gpr[rt], cpu_gpr[rt], t0);
22011 default: /* Invalid */
22012 MIPS_INVAL("MASK DAPPEND");
22013 generate_exception_end(ctx, EXCP_RI);
22022 static void gen_mipsdsp_accinsn(DisasContext *ctx, uint32_t op1, uint32_t op2,
22023 int ret, int v1, int v2, int check_ret)
22032 if ((ret == 0) && (check_ret == 1)) {
22033 /* Treat as NOP. */
22037 t0 = tcg_temp_new();
22038 t1 = tcg_temp_new();
22039 v1_t = tcg_temp_new();
22040 v2_t = tcg_temp_new();
22042 gen_load_gpr(v1_t, v1);
22043 gen_load_gpr(v2_t, v2);
22046 case OPC_EXTR_W_DSP:
22050 tcg_gen_movi_tl(t0, v2);
22051 tcg_gen_movi_tl(t1, v1);
22052 gen_helper_extr_w(cpu_gpr[ret], t0, t1, cpu_env);
22055 tcg_gen_movi_tl(t0, v2);
22056 tcg_gen_movi_tl(t1, v1);
22057 gen_helper_extr_r_w(cpu_gpr[ret], t0, t1, cpu_env);
22059 case OPC_EXTR_RS_W:
22060 tcg_gen_movi_tl(t0, v2);
22061 tcg_gen_movi_tl(t1, v1);
22062 gen_helper_extr_rs_w(cpu_gpr[ret], t0, t1, cpu_env);
22065 tcg_gen_movi_tl(t0, v2);
22066 tcg_gen_movi_tl(t1, v1);
22067 gen_helper_extr_s_h(cpu_gpr[ret], t0, t1, cpu_env);
22069 case OPC_EXTRV_S_H:
22070 tcg_gen_movi_tl(t0, v2);
22071 gen_helper_extr_s_h(cpu_gpr[ret], t0, v1_t, cpu_env);
22074 tcg_gen_movi_tl(t0, v2);
22075 gen_helper_extr_w(cpu_gpr[ret], t0, v1_t, cpu_env);
22077 case OPC_EXTRV_R_W:
22078 tcg_gen_movi_tl(t0, v2);
22079 gen_helper_extr_r_w(cpu_gpr[ret], t0, v1_t, cpu_env);
22081 case OPC_EXTRV_RS_W:
22082 tcg_gen_movi_tl(t0, v2);
22083 gen_helper_extr_rs_w(cpu_gpr[ret], t0, v1_t, cpu_env);
22086 tcg_gen_movi_tl(t0, v2);
22087 tcg_gen_movi_tl(t1, v1);
22088 gen_helper_extp(cpu_gpr[ret], t0, t1, cpu_env);
22091 tcg_gen_movi_tl(t0, v2);
22092 gen_helper_extp(cpu_gpr[ret], t0, v1_t, cpu_env);
22095 tcg_gen_movi_tl(t0, v2);
22096 tcg_gen_movi_tl(t1, v1);
22097 gen_helper_extpdp(cpu_gpr[ret], t0, t1, cpu_env);
22100 tcg_gen_movi_tl(t0, v2);
22101 gen_helper_extpdp(cpu_gpr[ret], t0, v1_t, cpu_env);
22104 imm = (ctx->opcode >> 20) & 0x3F;
22105 tcg_gen_movi_tl(t0, ret);
22106 tcg_gen_movi_tl(t1, imm);
22107 gen_helper_shilo(t0, t1, cpu_env);
22110 tcg_gen_movi_tl(t0, ret);
22111 gen_helper_shilo(t0, v1_t, cpu_env);
22114 tcg_gen_movi_tl(t0, ret);
22115 gen_helper_mthlip(t0, v1_t, cpu_env);
22118 imm = (ctx->opcode >> 11) & 0x3FF;
22119 tcg_gen_movi_tl(t0, imm);
22120 gen_helper_wrdsp(v1_t, t0, cpu_env);
22123 imm = (ctx->opcode >> 16) & 0x03FF;
22124 tcg_gen_movi_tl(t0, imm);
22125 gen_helper_rddsp(cpu_gpr[ret], t0, cpu_env);
22129 #ifdef TARGET_MIPS64
22130 case OPC_DEXTR_W_DSP:
22134 tcg_gen_movi_tl(t0, ret);
22135 gen_helper_dmthlip(v1_t, t0, cpu_env);
22139 int shift = (ctx->opcode >> 19) & 0x7F;
22140 int ac = (ctx->opcode >> 11) & 0x03;
22141 tcg_gen_movi_tl(t0, shift);
22142 tcg_gen_movi_tl(t1, ac);
22143 gen_helper_dshilo(t0, t1, cpu_env);
22148 int ac = (ctx->opcode >> 11) & 0x03;
22149 tcg_gen_movi_tl(t0, ac);
22150 gen_helper_dshilo(v1_t, t0, cpu_env);
22154 tcg_gen_movi_tl(t0, v2);
22155 tcg_gen_movi_tl(t1, v1);
22157 gen_helper_dextp(cpu_gpr[ret], t0, t1, cpu_env);
22160 tcg_gen_movi_tl(t0, v2);
22161 gen_helper_dextp(cpu_gpr[ret], t0, v1_t, cpu_env);
22164 tcg_gen_movi_tl(t0, v2);
22165 tcg_gen_movi_tl(t1, v1);
22166 gen_helper_dextpdp(cpu_gpr[ret], t0, t1, cpu_env);
22169 tcg_gen_movi_tl(t0, v2);
22170 gen_helper_dextpdp(cpu_gpr[ret], t0, v1_t, cpu_env);
22173 tcg_gen_movi_tl(t0, v2);
22174 tcg_gen_movi_tl(t1, v1);
22175 gen_helper_dextr_l(cpu_gpr[ret], t0, t1, cpu_env);
22177 case OPC_DEXTR_R_L:
22178 tcg_gen_movi_tl(t0, v2);
22179 tcg_gen_movi_tl(t1, v1);
22180 gen_helper_dextr_r_l(cpu_gpr[ret], t0, t1, cpu_env);
22182 case OPC_DEXTR_RS_L:
22183 tcg_gen_movi_tl(t0, v2);
22184 tcg_gen_movi_tl(t1, v1);
22185 gen_helper_dextr_rs_l(cpu_gpr[ret], t0, t1, cpu_env);
22188 tcg_gen_movi_tl(t0, v2);
22189 tcg_gen_movi_tl(t1, v1);
22190 gen_helper_dextr_w(cpu_gpr[ret], t0, t1, cpu_env);
22192 case OPC_DEXTR_R_W:
22193 tcg_gen_movi_tl(t0, v2);
22194 tcg_gen_movi_tl(t1, v1);
22195 gen_helper_dextr_r_w(cpu_gpr[ret], t0, t1, cpu_env);
22197 case OPC_DEXTR_RS_W:
22198 tcg_gen_movi_tl(t0, v2);
22199 tcg_gen_movi_tl(t1, v1);
22200 gen_helper_dextr_rs_w(cpu_gpr[ret], t0, t1, cpu_env);
22202 case OPC_DEXTR_S_H:
22203 tcg_gen_movi_tl(t0, v2);
22204 tcg_gen_movi_tl(t1, v1);
22205 gen_helper_dextr_s_h(cpu_gpr[ret], t0, t1, cpu_env);
22207 case OPC_DEXTRV_S_H:
22208 tcg_gen_movi_tl(t0, v2);
22209 tcg_gen_movi_tl(t1, v1);
22210 gen_helper_dextr_s_h(cpu_gpr[ret], t0, t1, cpu_env);
22213 tcg_gen_movi_tl(t0, v2);
22214 gen_helper_dextr_l(cpu_gpr[ret], t0, v1_t, cpu_env);
22216 case OPC_DEXTRV_R_L:
22217 tcg_gen_movi_tl(t0, v2);
22218 gen_helper_dextr_r_l(cpu_gpr[ret], t0, v1_t, cpu_env);
22220 case OPC_DEXTRV_RS_L:
22221 tcg_gen_movi_tl(t0, v2);
22222 gen_helper_dextr_rs_l(cpu_gpr[ret], t0, v1_t, cpu_env);
22225 tcg_gen_movi_tl(t0, v2);
22226 gen_helper_dextr_w(cpu_gpr[ret], t0, v1_t, cpu_env);
22228 case OPC_DEXTRV_R_W:
22229 tcg_gen_movi_tl(t0, v2);
22230 gen_helper_dextr_r_w(cpu_gpr[ret], t0, v1_t, cpu_env);
22232 case OPC_DEXTRV_RS_W:
22233 tcg_gen_movi_tl(t0, v2);
22234 gen_helper_dextr_rs_w(cpu_gpr[ret], t0, v1_t, cpu_env);
22243 tcg_temp_free(v1_t);
22244 tcg_temp_free(v2_t);
22247 /* End MIPSDSP functions. */
22249 static void decode_opc_special_r6(CPUMIPSState *env, DisasContext *ctx)
22251 int rs, rt, rd, sa;
22254 rs = (ctx->opcode >> 21) & 0x1f;
22255 rt = (ctx->opcode >> 16) & 0x1f;
22256 rd = (ctx->opcode >> 11) & 0x1f;
22257 sa = (ctx->opcode >> 6) & 0x1f;
22259 op1 = MASK_SPECIAL(ctx->opcode);
22262 gen_lsa(ctx, op1, rd, rs, rt, extract32(ctx->opcode, 6, 2));
22268 op2 = MASK_R6_MULDIV(ctx->opcode);
22278 gen_r6_muldiv(ctx, op2, rd, rs, rt);
22281 MIPS_INVAL("special_r6 muldiv");
22282 generate_exception_end(ctx, EXCP_RI);
22288 gen_cond_move(ctx, op1, rd, rs, rt);
22292 if (rt == 0 && sa == 1) {
22293 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
22294 We need additionally to check other fields */
22295 gen_cl(ctx, op1, rd, rs);
22297 generate_exception_end(ctx, EXCP_RI);
22301 if (is_uhi(extract32(ctx->opcode, 6, 20))) {
22302 gen_helper_do_semihosting(cpu_env);
22304 if (ctx->hflags & MIPS_HFLAG_SBRI) {
22305 generate_exception_end(ctx, EXCP_RI);
22307 generate_exception_end(ctx, EXCP_DBp);
22311 #if defined(TARGET_MIPS64)
22313 check_mips_64(ctx);
22314 gen_lsa(ctx, op1, rd, rs, rt, extract32(ctx->opcode, 6, 2));
22318 if (rt == 0 && sa == 1) {
22319 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
22320 We need additionally to check other fields */
22321 check_mips_64(ctx);
22322 gen_cl(ctx, op1, rd, rs);
22324 generate_exception_end(ctx, EXCP_RI);
22332 op2 = MASK_R6_MULDIV(ctx->opcode);
22342 check_mips_64(ctx);
22343 gen_r6_muldiv(ctx, op2, rd, rs, rt);
22346 MIPS_INVAL("special_r6 muldiv");
22347 generate_exception_end(ctx, EXCP_RI);
22352 default: /* Invalid */
22353 MIPS_INVAL("special_r6");
22354 generate_exception_end(ctx, EXCP_RI);
22359 static void decode_opc_special_legacy(CPUMIPSState *env, DisasContext *ctx)
22361 int rs, rt, rd, sa;
22364 rs = (ctx->opcode >> 21) & 0x1f;
22365 rt = (ctx->opcode >> 16) & 0x1f;
22366 rd = (ctx->opcode >> 11) & 0x1f;
22367 sa = (ctx->opcode >> 6) & 0x1f;
22369 op1 = MASK_SPECIAL(ctx->opcode);
22371 case OPC_MOVN: /* Conditional move */
22373 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32 |
22374 INSN_LOONGSON2E | INSN_LOONGSON2F);
22375 gen_cond_move(ctx, op1, rd, rs, rt);
22377 case OPC_MFHI: /* Move from HI/LO */
22379 gen_HILO(ctx, op1, rs & 3, rd);
22382 case OPC_MTLO: /* Move to HI/LO */
22383 gen_HILO(ctx, op1, rd & 3, rs);
22386 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32);
22387 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
22388 check_cp1_enabled(ctx);
22389 gen_movci(ctx, rd, rs, (ctx->opcode >> 18) & 0x7,
22390 (ctx->opcode >> 16) & 1);
22392 generate_exception_err(ctx, EXCP_CpU, 1);
22398 check_insn(ctx, INSN_VR54XX);
22399 op1 = MASK_MUL_VR54XX(ctx->opcode);
22400 gen_mul_vr54xx(ctx, op1, rd, rs, rt);
22402 gen_muldiv(ctx, op1, rd & 3, rs, rt);
22407 gen_muldiv(ctx, op1, 0, rs, rt);
22409 #if defined(TARGET_MIPS64)
22414 check_insn(ctx, ISA_MIPS3);
22415 check_mips_64(ctx);
22416 gen_muldiv(ctx, op1, 0, rs, rt);
22420 gen_compute_branch(ctx, op1, 4, rs, rd, sa, 4);
22423 #ifdef MIPS_STRICT_STANDARD
22424 MIPS_INVAL("SPIM");
22425 generate_exception_end(ctx, EXCP_RI);
22427 /* Implemented as RI exception for now. */
22428 MIPS_INVAL("spim (unofficial)");
22429 generate_exception_end(ctx, EXCP_RI);
22432 default: /* Invalid */
22433 MIPS_INVAL("special_legacy");
22434 generate_exception_end(ctx, EXCP_RI);
22439 static void decode_opc_special(CPUMIPSState *env, DisasContext *ctx)
22441 int rs, rt, rd, sa;
22444 rs = (ctx->opcode >> 21) & 0x1f;
22445 rt = (ctx->opcode >> 16) & 0x1f;
22446 rd = (ctx->opcode >> 11) & 0x1f;
22447 sa = (ctx->opcode >> 6) & 0x1f;
22449 op1 = MASK_SPECIAL(ctx->opcode);
22451 case OPC_SLL: /* Shift with immediate */
22452 if (sa == 5 && rd == 0 &&
22453 rs == 0 && rt == 0) { /* PAUSE */
22454 if ((ctx->insn_flags & ISA_MIPS32R6) &&
22455 (ctx->hflags & MIPS_HFLAG_BMASK)) {
22456 generate_exception_end(ctx, EXCP_RI);
22462 gen_shift_imm(ctx, op1, rd, rt, sa);
22465 switch ((ctx->opcode >> 21) & 0x1f) {
22467 /* rotr is decoded as srl on non-R2 CPUs */
22468 if (ctx->insn_flags & ISA_MIPS32R2) {
22473 gen_shift_imm(ctx, op1, rd, rt, sa);
22476 generate_exception_end(ctx, EXCP_RI);
22484 gen_arith(ctx, op1, rd, rs, rt);
22486 case OPC_SLLV: /* Shifts */
22488 gen_shift(ctx, op1, rd, rs, rt);
22491 switch ((ctx->opcode >> 6) & 0x1f) {
22493 /* rotrv is decoded as srlv on non-R2 CPUs */
22494 if (ctx->insn_flags & ISA_MIPS32R2) {
22499 gen_shift(ctx, op1, rd, rs, rt);
22502 generate_exception_end(ctx, EXCP_RI);
22506 case OPC_SLT: /* Set on less than */
22508 gen_slt(ctx, op1, rd, rs, rt);
22510 case OPC_AND: /* Logic*/
22514 gen_logic(ctx, op1, rd, rs, rt);
22517 gen_compute_branch(ctx, op1, 4, rs, rd, sa, 4);
22519 case OPC_TGE: /* Traps */
22525 check_insn(ctx, ISA_MIPS2);
22526 gen_trap(ctx, op1, rs, rt, -1);
22528 case OPC_LSA: /* OPC_PMON */
22529 if ((ctx->insn_flags & ISA_MIPS32R6) ||
22530 (env->CP0_Config3 & (1 << CP0C3_MSAP))) {
22531 decode_opc_special_r6(env, ctx);
22533 /* Pmon entry point, also R4010 selsl */
22534 #ifdef MIPS_STRICT_STANDARD
22535 MIPS_INVAL("PMON / selsl");
22536 generate_exception_end(ctx, EXCP_RI);
22538 gen_helper_0e0i(pmon, sa);
22543 generate_exception_end(ctx, EXCP_SYSCALL);
22546 generate_exception_end(ctx, EXCP_BREAK);
22549 check_insn(ctx, ISA_MIPS2);
22550 gen_sync(extract32(ctx->opcode, 6, 5));
22553 #if defined(TARGET_MIPS64)
22554 /* MIPS64 specific opcodes */
22559 check_insn(ctx, ISA_MIPS3);
22560 check_mips_64(ctx);
22561 gen_shift_imm(ctx, op1, rd, rt, sa);
22564 switch ((ctx->opcode >> 21) & 0x1f) {
22566 /* drotr is decoded as dsrl on non-R2 CPUs */
22567 if (ctx->insn_flags & ISA_MIPS32R2) {
22572 check_insn(ctx, ISA_MIPS3);
22573 check_mips_64(ctx);
22574 gen_shift_imm(ctx, op1, rd, rt, sa);
22577 generate_exception_end(ctx, EXCP_RI);
22582 switch ((ctx->opcode >> 21) & 0x1f) {
22584 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
22585 if (ctx->insn_flags & ISA_MIPS32R2) {
22590 check_insn(ctx, ISA_MIPS3);
22591 check_mips_64(ctx);
22592 gen_shift_imm(ctx, op1, rd, rt, sa);
22595 generate_exception_end(ctx, EXCP_RI);
22603 check_insn(ctx, ISA_MIPS3);
22604 check_mips_64(ctx);
22605 gen_arith(ctx, op1, rd, rs, rt);
22609 check_insn(ctx, ISA_MIPS3);
22610 check_mips_64(ctx);
22611 gen_shift(ctx, op1, rd, rs, rt);
22614 switch ((ctx->opcode >> 6) & 0x1f) {
22616 /* drotrv is decoded as dsrlv on non-R2 CPUs */
22617 if (ctx->insn_flags & ISA_MIPS32R2) {
22622 check_insn(ctx, ISA_MIPS3);
22623 check_mips_64(ctx);
22624 gen_shift(ctx, op1, rd, rs, rt);
22627 generate_exception_end(ctx, EXCP_RI);
22632 if ((ctx->insn_flags & ISA_MIPS32R6) ||
22633 (env->CP0_Config3 & (1 << CP0C3_MSAP))) {
22634 decode_opc_special_r6(env, ctx);
22639 if (ctx->insn_flags & ISA_MIPS32R6) {
22640 decode_opc_special_r6(env, ctx);
22642 decode_opc_special_legacy(env, ctx);
22647 static void decode_opc_special2_legacy(CPUMIPSState *env, DisasContext *ctx)
22652 check_insn_opc_removed(ctx, ISA_MIPS32R6);
22654 rs = (ctx->opcode >> 21) & 0x1f;
22655 rt = (ctx->opcode >> 16) & 0x1f;
22656 rd = (ctx->opcode >> 11) & 0x1f;
22658 op1 = MASK_SPECIAL2(ctx->opcode);
22660 case OPC_MADD: /* Multiply and add/sub */
22664 check_insn(ctx, ISA_MIPS32);
22665 gen_muldiv(ctx, op1, rd & 3, rs, rt);
22668 gen_arith(ctx, op1, rd, rs, rt);
22671 case OPC_DIVU_G_2F:
22672 case OPC_MULT_G_2F:
22673 case OPC_MULTU_G_2F:
22675 case OPC_MODU_G_2F:
22676 check_insn(ctx, INSN_LOONGSON2F);
22677 gen_loongson_integer(ctx, op1, rd, rs, rt);
22681 check_insn(ctx, ISA_MIPS32);
22682 gen_cl(ctx, op1, rd, rs);
22685 if (is_uhi(extract32(ctx->opcode, 6, 20))) {
22686 gen_helper_do_semihosting(cpu_env);
22688 /* XXX: not clear which exception should be raised
22689 * when in debug mode...
22691 check_insn(ctx, ISA_MIPS32);
22692 generate_exception_end(ctx, EXCP_DBp);
22695 #if defined(TARGET_MIPS64)
22698 check_insn(ctx, ISA_MIPS64);
22699 check_mips_64(ctx);
22700 gen_cl(ctx, op1, rd, rs);
22702 case OPC_DMULT_G_2F:
22703 case OPC_DMULTU_G_2F:
22704 case OPC_DDIV_G_2F:
22705 case OPC_DDIVU_G_2F:
22706 case OPC_DMOD_G_2F:
22707 case OPC_DMODU_G_2F:
22708 check_insn(ctx, INSN_LOONGSON2F);
22709 gen_loongson_integer(ctx, op1, rd, rs, rt);
22712 default: /* Invalid */
22713 MIPS_INVAL("special2_legacy");
22714 generate_exception_end(ctx, EXCP_RI);
22719 static void decode_opc_special3_r6(CPUMIPSState *env, DisasContext *ctx)
22721 int rs, rt, rd, sa;
22725 rs = (ctx->opcode >> 21) & 0x1f;
22726 rt = (ctx->opcode >> 16) & 0x1f;
22727 rd = (ctx->opcode >> 11) & 0x1f;
22728 sa = (ctx->opcode >> 6) & 0x1f;
22729 imm = (int16_t)ctx->opcode >> 7;
22731 op1 = MASK_SPECIAL3(ctx->opcode);
22735 /* hint codes 24-31 are reserved and signal RI */
22736 generate_exception_end(ctx, EXCP_RI);
22738 /* Treat as NOP. */
22741 check_cp0_enabled(ctx);
22742 if (ctx->hflags & MIPS_HFLAG_ITC_CACHE) {
22743 gen_cache_operation(ctx, rt, rs, imm);
22747 gen_st_cond(ctx, op1, rt, rs, imm);
22750 gen_ld(ctx, op1, rt, rs, imm);
22755 /* Treat as NOP. */
22758 op2 = MASK_BSHFL(ctx->opcode);
22761 case OPC_ALIGN_END:
22762 gen_align(ctx, 32, rd, rs, rt, sa & 3);
22765 gen_bitswap(ctx, op2, rd, rt);
22770 #if defined(TARGET_MIPS64)
22772 gen_st_cond(ctx, op1, rt, rs, imm);
22775 gen_ld(ctx, op1, rt, rs, imm);
22778 check_mips_64(ctx);
22781 /* Treat as NOP. */
22784 op2 = MASK_DBSHFL(ctx->opcode);
22787 case OPC_DALIGN_END:
22788 gen_align(ctx, 64, rd, rs, rt, sa & 7);
22791 gen_bitswap(ctx, op2, rd, rt);
22798 default: /* Invalid */
22799 MIPS_INVAL("special3_r6");
22800 generate_exception_end(ctx, EXCP_RI);
22805 static void decode_opc_special3_legacy(CPUMIPSState *env, DisasContext *ctx)
22810 rs = (ctx->opcode >> 21) & 0x1f;
22811 rt = (ctx->opcode >> 16) & 0x1f;
22812 rd = (ctx->opcode >> 11) & 0x1f;
22814 op1 = MASK_SPECIAL3(ctx->opcode);
22817 case OPC_DIVU_G_2E:
22819 case OPC_MODU_G_2E:
22820 case OPC_MULT_G_2E:
22821 case OPC_MULTU_G_2E:
22822 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
22823 * the same mask and op1. */
22824 if ((ctx->insn_flags & ASE_DSPR2) && (op1 == OPC_MULT_G_2E)) {
22825 op2 = MASK_ADDUH_QB(ctx->opcode);
22828 case OPC_ADDUH_R_QB:
22830 case OPC_ADDQH_R_PH:
22832 case OPC_ADDQH_R_W:
22834 case OPC_SUBUH_R_QB:
22836 case OPC_SUBQH_R_PH:
22838 case OPC_SUBQH_R_W:
22839 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
22844 case OPC_MULQ_RS_W:
22845 gen_mipsdsp_multiply(ctx, op1, op2, rd, rs, rt, 1);
22848 MIPS_INVAL("MASK ADDUH.QB");
22849 generate_exception_end(ctx, EXCP_RI);
22852 } else if (ctx->insn_flags & INSN_LOONGSON2E) {
22853 gen_loongson_integer(ctx, op1, rd, rs, rt);
22855 generate_exception_end(ctx, EXCP_RI);
22859 op2 = MASK_LX(ctx->opcode);
22861 #if defined(TARGET_MIPS64)
22867 gen_mipsdsp_ld(ctx, op2, rd, rs, rt);
22869 default: /* Invalid */
22870 MIPS_INVAL("MASK LX");
22871 generate_exception_end(ctx, EXCP_RI);
22875 case OPC_ABSQ_S_PH_DSP:
22876 op2 = MASK_ABSQ_S_PH(ctx->opcode);
22878 case OPC_ABSQ_S_QB:
22879 case OPC_ABSQ_S_PH:
22881 case OPC_PRECEQ_W_PHL:
22882 case OPC_PRECEQ_W_PHR:
22883 case OPC_PRECEQU_PH_QBL:
22884 case OPC_PRECEQU_PH_QBR:
22885 case OPC_PRECEQU_PH_QBLA:
22886 case OPC_PRECEQU_PH_QBRA:
22887 case OPC_PRECEU_PH_QBL:
22888 case OPC_PRECEU_PH_QBR:
22889 case OPC_PRECEU_PH_QBLA:
22890 case OPC_PRECEU_PH_QBRA:
22891 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
22898 gen_mipsdsp_bitinsn(ctx, op1, op2, rd, rt);
22901 MIPS_INVAL("MASK ABSQ_S.PH");
22902 generate_exception_end(ctx, EXCP_RI);
22906 case OPC_ADDU_QB_DSP:
22907 op2 = MASK_ADDU_QB(ctx->opcode);
22910 case OPC_ADDQ_S_PH:
22913 case OPC_ADDU_S_QB:
22915 case OPC_ADDU_S_PH:
22917 case OPC_SUBQ_S_PH:
22920 case OPC_SUBU_S_QB:
22922 case OPC_SUBU_S_PH:
22926 case OPC_RADDU_W_QB:
22927 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
22929 case OPC_MULEU_S_PH_QBL:
22930 case OPC_MULEU_S_PH_QBR:
22931 case OPC_MULQ_RS_PH:
22932 case OPC_MULEQ_S_W_PHL:
22933 case OPC_MULEQ_S_W_PHR:
22934 case OPC_MULQ_S_PH:
22935 gen_mipsdsp_multiply(ctx, op1, op2, rd, rs, rt, 1);
22937 default: /* Invalid */
22938 MIPS_INVAL("MASK ADDU.QB");
22939 generate_exception_end(ctx, EXCP_RI);
22944 case OPC_CMPU_EQ_QB_DSP:
22945 op2 = MASK_CMPU_EQ_QB(ctx->opcode);
22947 case OPC_PRECR_SRA_PH_W:
22948 case OPC_PRECR_SRA_R_PH_W:
22949 gen_mipsdsp_arith(ctx, op1, op2, rt, rs, rd);
22951 case OPC_PRECR_QB_PH:
22952 case OPC_PRECRQ_QB_PH:
22953 case OPC_PRECRQ_PH_W:
22954 case OPC_PRECRQ_RS_PH_W:
22955 case OPC_PRECRQU_S_QB_PH:
22956 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
22958 case OPC_CMPU_EQ_QB:
22959 case OPC_CMPU_LT_QB:
22960 case OPC_CMPU_LE_QB:
22961 case OPC_CMP_EQ_PH:
22962 case OPC_CMP_LT_PH:
22963 case OPC_CMP_LE_PH:
22964 gen_mipsdsp_add_cmp_pick(ctx, op1, op2, rd, rs, rt, 0);
22966 case OPC_CMPGU_EQ_QB:
22967 case OPC_CMPGU_LT_QB:
22968 case OPC_CMPGU_LE_QB:
22969 case OPC_CMPGDU_EQ_QB:
22970 case OPC_CMPGDU_LT_QB:
22971 case OPC_CMPGDU_LE_QB:
22974 case OPC_PACKRL_PH:
22975 gen_mipsdsp_add_cmp_pick(ctx, op1, op2, rd, rs, rt, 1);
22977 default: /* Invalid */
22978 MIPS_INVAL("MASK CMPU.EQ.QB");
22979 generate_exception_end(ctx, EXCP_RI);
22983 case OPC_SHLL_QB_DSP:
22984 gen_mipsdsp_shift(ctx, op1, rd, rs, rt);
22986 case OPC_DPA_W_PH_DSP:
22987 op2 = MASK_DPA_W_PH(ctx->opcode);
22989 case OPC_DPAU_H_QBL:
22990 case OPC_DPAU_H_QBR:
22991 case OPC_DPSU_H_QBL:
22992 case OPC_DPSU_H_QBR:
22994 case OPC_DPAX_W_PH:
22995 case OPC_DPAQ_S_W_PH:
22996 case OPC_DPAQX_S_W_PH:
22997 case OPC_DPAQX_SA_W_PH:
22999 case OPC_DPSX_W_PH:
23000 case OPC_DPSQ_S_W_PH:
23001 case OPC_DPSQX_S_W_PH:
23002 case OPC_DPSQX_SA_W_PH:
23003 case OPC_MULSAQ_S_W_PH:
23004 case OPC_DPAQ_SA_L_W:
23005 case OPC_DPSQ_SA_L_W:
23006 case OPC_MAQ_S_W_PHL:
23007 case OPC_MAQ_S_W_PHR:
23008 case OPC_MAQ_SA_W_PHL:
23009 case OPC_MAQ_SA_W_PHR:
23010 case OPC_MULSA_W_PH:
23011 gen_mipsdsp_multiply(ctx, op1, op2, rd, rs, rt, 0);
23013 default: /* Invalid */
23014 MIPS_INVAL("MASK DPAW.PH");
23015 generate_exception_end(ctx, EXCP_RI);
23020 op2 = MASK_INSV(ctx->opcode);
23031 t0 = tcg_temp_new();
23032 t1 = tcg_temp_new();
23034 gen_load_gpr(t0, rt);
23035 gen_load_gpr(t1, rs);
23037 gen_helper_insv(cpu_gpr[rt], cpu_env, t1, t0);
23043 default: /* Invalid */
23044 MIPS_INVAL("MASK INSV");
23045 generate_exception_end(ctx, EXCP_RI);
23049 case OPC_APPEND_DSP:
23050 gen_mipsdsp_append(env, ctx, op1, rt, rs, rd);
23052 case OPC_EXTR_W_DSP:
23053 op2 = MASK_EXTR_W(ctx->opcode);
23057 case OPC_EXTR_RS_W:
23059 case OPC_EXTRV_S_H:
23061 case OPC_EXTRV_R_W:
23062 case OPC_EXTRV_RS_W:
23067 gen_mipsdsp_accinsn(ctx, op1, op2, rt, rs, rd, 1);
23070 gen_mipsdsp_accinsn(ctx, op1, op2, rd, rs, rt, 1);
23076 gen_mipsdsp_accinsn(ctx, op1, op2, rd, rs, rt, 0);
23078 default: /* Invalid */
23079 MIPS_INVAL("MASK EXTR.W");
23080 generate_exception_end(ctx, EXCP_RI);
23084 #if defined(TARGET_MIPS64)
23085 case OPC_DDIV_G_2E:
23086 case OPC_DDIVU_G_2E:
23087 case OPC_DMULT_G_2E:
23088 case OPC_DMULTU_G_2E:
23089 case OPC_DMOD_G_2E:
23090 case OPC_DMODU_G_2E:
23091 check_insn(ctx, INSN_LOONGSON2E);
23092 gen_loongson_integer(ctx, op1, rd, rs, rt);
23094 case OPC_ABSQ_S_QH_DSP:
23095 op2 = MASK_ABSQ_S_QH(ctx->opcode);
23097 case OPC_PRECEQ_L_PWL:
23098 case OPC_PRECEQ_L_PWR:
23099 case OPC_PRECEQ_PW_QHL:
23100 case OPC_PRECEQ_PW_QHR:
23101 case OPC_PRECEQ_PW_QHLA:
23102 case OPC_PRECEQ_PW_QHRA:
23103 case OPC_PRECEQU_QH_OBL:
23104 case OPC_PRECEQU_QH_OBR:
23105 case OPC_PRECEQU_QH_OBLA:
23106 case OPC_PRECEQU_QH_OBRA:
23107 case OPC_PRECEU_QH_OBL:
23108 case OPC_PRECEU_QH_OBR:
23109 case OPC_PRECEU_QH_OBLA:
23110 case OPC_PRECEU_QH_OBRA:
23111 case OPC_ABSQ_S_OB:
23112 case OPC_ABSQ_S_PW:
23113 case OPC_ABSQ_S_QH:
23114 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
23122 gen_mipsdsp_bitinsn(ctx, op1, op2, rd, rt);
23124 default: /* Invalid */
23125 MIPS_INVAL("MASK ABSQ_S.QH");
23126 generate_exception_end(ctx, EXCP_RI);
23130 case OPC_ADDU_OB_DSP:
23131 op2 = MASK_ADDU_OB(ctx->opcode);
23133 case OPC_RADDU_L_OB:
23135 case OPC_SUBQ_S_PW:
23137 case OPC_SUBQ_S_QH:
23139 case OPC_SUBU_S_OB:
23141 case OPC_SUBU_S_QH:
23143 case OPC_SUBUH_R_OB:
23145 case OPC_ADDQ_S_PW:
23147 case OPC_ADDQ_S_QH:
23149 case OPC_ADDU_S_OB:
23151 case OPC_ADDU_S_QH:
23153 case OPC_ADDUH_R_OB:
23154 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
23156 case OPC_MULEQ_S_PW_QHL:
23157 case OPC_MULEQ_S_PW_QHR:
23158 case OPC_MULEU_S_QH_OBL:
23159 case OPC_MULEU_S_QH_OBR:
23160 case OPC_MULQ_RS_QH:
23161 gen_mipsdsp_multiply(ctx, op1, op2, rd, rs, rt, 1);
23163 default: /* Invalid */
23164 MIPS_INVAL("MASK ADDU.OB");
23165 generate_exception_end(ctx, EXCP_RI);
23169 case OPC_CMPU_EQ_OB_DSP:
23170 op2 = MASK_CMPU_EQ_OB(ctx->opcode);
23172 case OPC_PRECR_SRA_QH_PW:
23173 case OPC_PRECR_SRA_R_QH_PW:
23174 /* Return value is rt. */
23175 gen_mipsdsp_arith(ctx, op1, op2, rt, rs, rd);
23177 case OPC_PRECR_OB_QH:
23178 case OPC_PRECRQ_OB_QH:
23179 case OPC_PRECRQ_PW_L:
23180 case OPC_PRECRQ_QH_PW:
23181 case OPC_PRECRQ_RS_QH_PW:
23182 case OPC_PRECRQU_S_OB_QH:
23183 gen_mipsdsp_arith(ctx, op1, op2, rd, rs, rt);
23185 case OPC_CMPU_EQ_OB:
23186 case OPC_CMPU_LT_OB:
23187 case OPC_CMPU_LE_OB:
23188 case OPC_CMP_EQ_QH:
23189 case OPC_CMP_LT_QH:
23190 case OPC_CMP_LE_QH:
23191 case OPC_CMP_EQ_PW:
23192 case OPC_CMP_LT_PW:
23193 case OPC_CMP_LE_PW:
23194 gen_mipsdsp_add_cmp_pick(ctx, op1, op2, rd, rs, rt, 0);
23196 case OPC_CMPGDU_EQ_OB:
23197 case OPC_CMPGDU_LT_OB:
23198 case OPC_CMPGDU_LE_OB:
23199 case OPC_CMPGU_EQ_OB:
23200 case OPC_CMPGU_LT_OB:
23201 case OPC_CMPGU_LE_OB:
23202 case OPC_PACKRL_PW:
23206 gen_mipsdsp_add_cmp_pick(ctx, op1, op2, rd, rs, rt, 1);
23208 default: /* Invalid */
23209 MIPS_INVAL("MASK CMPU_EQ.OB");
23210 generate_exception_end(ctx, EXCP_RI);
23214 case OPC_DAPPEND_DSP:
23215 gen_mipsdsp_append(env, ctx, op1, rt, rs, rd);
23217 case OPC_DEXTR_W_DSP:
23218 op2 = MASK_DEXTR_W(ctx->opcode);
23225 case OPC_DEXTR_R_L:
23226 case OPC_DEXTR_RS_L:
23228 case OPC_DEXTR_R_W:
23229 case OPC_DEXTR_RS_W:
23230 case OPC_DEXTR_S_H:
23232 case OPC_DEXTRV_R_L:
23233 case OPC_DEXTRV_RS_L:
23234 case OPC_DEXTRV_S_H:
23236 case OPC_DEXTRV_R_W:
23237 case OPC_DEXTRV_RS_W:
23238 gen_mipsdsp_accinsn(ctx, op1, op2, rt, rs, rd, 1);
23243 gen_mipsdsp_accinsn(ctx, op1, op2, rd, rs, rt, 0);
23245 default: /* Invalid */
23246 MIPS_INVAL("MASK EXTR.W");
23247 generate_exception_end(ctx, EXCP_RI);
23251 case OPC_DPAQ_W_QH_DSP:
23252 op2 = MASK_DPAQ_W_QH(ctx->opcode);
23254 case OPC_DPAU_H_OBL:
23255 case OPC_DPAU_H_OBR:
23256 case OPC_DPSU_H_OBL:
23257 case OPC_DPSU_H_OBR:
23259 case OPC_DPAQ_S_W_QH:
23261 case OPC_DPSQ_S_W_QH:
23262 case OPC_MULSAQ_S_W_QH:
23263 case OPC_DPAQ_SA_L_PW:
23264 case OPC_DPSQ_SA_L_PW:
23265 case OPC_MULSAQ_S_L_PW:
23266 gen_mipsdsp_multiply(ctx, op1, op2, rd, rs, rt, 0);
23268 case OPC_MAQ_S_W_QHLL:
23269 case OPC_MAQ_S_W_QHLR:
23270 case OPC_MAQ_S_W_QHRL:
23271 case OPC_MAQ_S_W_QHRR:
23272 case OPC_MAQ_SA_W_QHLL:
23273 case OPC_MAQ_SA_W_QHLR:
23274 case OPC_MAQ_SA_W_QHRL:
23275 case OPC_MAQ_SA_W_QHRR:
23276 case OPC_MAQ_S_L_PWL:
23277 case OPC_MAQ_S_L_PWR:
23282 gen_mipsdsp_multiply(ctx, op1, op2, rd, rs, rt, 0);
23284 default: /* Invalid */
23285 MIPS_INVAL("MASK DPAQ.W.QH");
23286 generate_exception_end(ctx, EXCP_RI);
23290 case OPC_DINSV_DSP:
23291 op2 = MASK_INSV(ctx->opcode);
23302 t0 = tcg_temp_new();
23303 t1 = tcg_temp_new();
23305 gen_load_gpr(t0, rt);
23306 gen_load_gpr(t1, rs);
23308 gen_helper_dinsv(cpu_gpr[rt], cpu_env, t1, t0);
23314 default: /* Invalid */
23315 MIPS_INVAL("MASK DINSV");
23316 generate_exception_end(ctx, EXCP_RI);
23320 case OPC_SHLL_OB_DSP:
23321 gen_mipsdsp_shift(ctx, op1, rd, rs, rt);
23324 default: /* Invalid */
23325 MIPS_INVAL("special3_legacy");
23326 generate_exception_end(ctx, EXCP_RI);
23331 static void decode_opc_special3(CPUMIPSState *env, DisasContext *ctx)
23333 int rs, rt, rd, sa;
23337 rs = (ctx->opcode >> 21) & 0x1f;
23338 rt = (ctx->opcode >> 16) & 0x1f;
23339 rd = (ctx->opcode >> 11) & 0x1f;
23340 sa = (ctx->opcode >> 6) & 0x1f;
23341 imm = sextract32(ctx->opcode, 7, 9);
23343 op1 = MASK_SPECIAL3(ctx->opcode);
23346 * EVA loads and stores overlap Loongson 2E instructions decoded by
23347 * decode_opc_special3_legacy(), so be careful to allow their decoding when
23354 check_insn_opc_removed(ctx, ISA_MIPS32R6);
23362 check_cp0_enabled(ctx);
23363 gen_ld(ctx, op1, rt, rs, imm);
23367 check_insn_opc_removed(ctx, ISA_MIPS32R6);
23372 check_cp0_enabled(ctx);
23373 gen_st(ctx, op1, rt, rs, imm);
23376 check_cp0_enabled(ctx);
23377 gen_st_cond(ctx, op1, rt, rs, imm);
23380 check_cp0_enabled(ctx);
23381 if (ctx->hflags & MIPS_HFLAG_ITC_CACHE) {
23382 gen_cache_operation(ctx, rt, rs, imm);
23384 /* Treat as NOP. */
23387 check_cp0_enabled(ctx);
23388 /* Treat as NOP. */
23396 check_insn(ctx, ISA_MIPS32R2);
23397 gen_bitops(ctx, op1, rt, rs, sa, rd);
23400 op2 = MASK_BSHFL(ctx->opcode);
23403 case OPC_ALIGN_END:
23405 check_insn(ctx, ISA_MIPS32R6);
23406 decode_opc_special3_r6(env, ctx);
23409 check_insn(ctx, ISA_MIPS32R2);
23410 gen_bshfl(ctx, op2, rt, rd);
23414 #if defined(TARGET_MIPS64)
23421 check_insn(ctx, ISA_MIPS64R2);
23422 check_mips_64(ctx);
23423 gen_bitops(ctx, op1, rt, rs, sa, rd);
23426 op2 = MASK_DBSHFL(ctx->opcode);
23429 case OPC_DALIGN_END:
23431 check_insn(ctx, ISA_MIPS32R6);
23432 decode_opc_special3_r6(env, ctx);
23435 check_insn(ctx, ISA_MIPS64R2);
23436 check_mips_64(ctx);
23437 op2 = MASK_DBSHFL(ctx->opcode);
23438 gen_bshfl(ctx, op2, rt, rd);
23444 gen_rdhwr(ctx, rt, rd, extract32(ctx->opcode, 6, 3));
23449 TCGv t0 = tcg_temp_new();
23450 TCGv t1 = tcg_temp_new();
23452 gen_load_gpr(t0, rt);
23453 gen_load_gpr(t1, rs);
23454 gen_helper_fork(t0, t1);
23462 TCGv t0 = tcg_temp_new();
23464 gen_load_gpr(t0, rs);
23465 gen_helper_yield(t0, cpu_env, t0);
23466 gen_store_gpr(t0, rd);
23471 if (ctx->insn_flags & ISA_MIPS32R6) {
23472 decode_opc_special3_r6(env, ctx);
23474 decode_opc_special3_legacy(env, ctx);
23479 /* MIPS SIMD Architecture (MSA) */
23480 static inline int check_msa_access(DisasContext *ctx)
23482 if (unlikely((ctx->hflags & MIPS_HFLAG_FPU) &&
23483 !(ctx->hflags & MIPS_HFLAG_F64))) {
23484 generate_exception_end(ctx, EXCP_RI);
23488 if (unlikely(!(ctx->hflags & MIPS_HFLAG_MSA))) {
23489 if (ctx->insn_flags & ASE_MSA) {
23490 generate_exception_end(ctx, EXCP_MSADIS);
23493 generate_exception_end(ctx, EXCP_RI);
23500 static void gen_check_zero_element(TCGv tresult, uint8_t df, uint8_t wt)
23502 /* generates tcg ops to check if any element is 0 */
23503 /* Note this function only works with MSA_WRLEN = 128 */
23504 uint64_t eval_zero_or_big = 0;
23505 uint64_t eval_big = 0;
23506 TCGv_i64 t0 = tcg_temp_new_i64();
23507 TCGv_i64 t1 = tcg_temp_new_i64();
23510 eval_zero_or_big = 0x0101010101010101ULL;
23511 eval_big = 0x8080808080808080ULL;
23514 eval_zero_or_big = 0x0001000100010001ULL;
23515 eval_big = 0x8000800080008000ULL;
23518 eval_zero_or_big = 0x0000000100000001ULL;
23519 eval_big = 0x8000000080000000ULL;
23522 eval_zero_or_big = 0x0000000000000001ULL;
23523 eval_big = 0x8000000000000000ULL;
23526 tcg_gen_subi_i64(t0, msa_wr_d[wt<<1], eval_zero_or_big);
23527 tcg_gen_andc_i64(t0, t0, msa_wr_d[wt<<1]);
23528 tcg_gen_andi_i64(t0, t0, eval_big);
23529 tcg_gen_subi_i64(t1, msa_wr_d[(wt<<1)+1], eval_zero_or_big);
23530 tcg_gen_andc_i64(t1, t1, msa_wr_d[(wt<<1)+1]);
23531 tcg_gen_andi_i64(t1, t1, eval_big);
23532 tcg_gen_or_i64(t0, t0, t1);
23533 /* if all bits are zero then all elements are not zero */
23534 /* if some bit is non-zero then some element is zero */
23535 tcg_gen_setcondi_i64(TCG_COND_NE, t0, t0, 0);
23536 tcg_gen_trunc_i64_tl(tresult, t0);
23537 tcg_temp_free_i64(t0);
23538 tcg_temp_free_i64(t1);
23541 static void gen_msa_branch(CPUMIPSState *env, DisasContext *ctx, uint32_t op1)
23543 uint8_t df = (ctx->opcode >> 21) & 0x3;
23544 uint8_t wt = (ctx->opcode >> 16) & 0x1f;
23545 int64_t s16 = (int16_t)ctx->opcode;
23547 check_msa_access(ctx);
23549 if (ctx->hflags & MIPS_HFLAG_BMASK) {
23550 generate_exception_end(ctx, EXCP_RI);
23557 TCGv_i64 t0 = tcg_temp_new_i64();
23558 tcg_gen_or_i64(t0, msa_wr_d[wt<<1], msa_wr_d[(wt<<1)+1]);
23559 tcg_gen_setcondi_i64((op1 == OPC_BZ_V) ?
23560 TCG_COND_EQ : TCG_COND_NE, t0, t0, 0);
23561 tcg_gen_trunc_i64_tl(bcond, t0);
23562 tcg_temp_free_i64(t0);
23569 gen_check_zero_element(bcond, df, wt);
23575 gen_check_zero_element(bcond, df, wt);
23576 tcg_gen_setcondi_tl(TCG_COND_EQ, bcond, bcond, 0);
23580 ctx->btarget = ctx->base.pc_next + (s16 << 2) + 4;
23582 ctx->hflags |= MIPS_HFLAG_BC;
23583 ctx->hflags |= MIPS_HFLAG_BDS32;
23586 static void gen_msa_i8(CPUMIPSState *env, DisasContext *ctx)
23588 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
23589 uint8_t i8 = (ctx->opcode >> 16) & 0xff;
23590 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
23591 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
23593 TCGv_i32 twd = tcg_const_i32(wd);
23594 TCGv_i32 tws = tcg_const_i32(ws);
23595 TCGv_i32 ti8 = tcg_const_i32(i8);
23597 switch (MASK_MSA_I8(ctx->opcode)) {
23599 gen_helper_msa_andi_b(cpu_env, twd, tws, ti8);
23602 gen_helper_msa_ori_b(cpu_env, twd, tws, ti8);
23605 gen_helper_msa_nori_b(cpu_env, twd, tws, ti8);
23608 gen_helper_msa_xori_b(cpu_env, twd, tws, ti8);
23611 gen_helper_msa_bmnzi_b(cpu_env, twd, tws, ti8);
23614 gen_helper_msa_bmzi_b(cpu_env, twd, tws, ti8);
23617 gen_helper_msa_bseli_b(cpu_env, twd, tws, ti8);
23623 uint8_t df = (ctx->opcode >> 24) & 0x3;
23624 if (df == DF_DOUBLE) {
23625 generate_exception_end(ctx, EXCP_RI);
23627 TCGv_i32 tdf = tcg_const_i32(df);
23628 gen_helper_msa_shf_df(cpu_env, tdf, twd, tws, ti8);
23629 tcg_temp_free_i32(tdf);
23634 MIPS_INVAL("MSA instruction");
23635 generate_exception_end(ctx, EXCP_RI);
23639 tcg_temp_free_i32(twd);
23640 tcg_temp_free_i32(tws);
23641 tcg_temp_free_i32(ti8);
23644 static void gen_msa_i5(CPUMIPSState *env, DisasContext *ctx)
23646 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23647 uint8_t df = (ctx->opcode >> 21) & 0x3;
23648 int8_t s5 = (int8_t) sextract32(ctx->opcode, 16, 5);
23649 uint8_t u5 = (ctx->opcode >> 16) & 0x1f;
23650 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
23651 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
23653 TCGv_i32 tdf = tcg_const_i32(df);
23654 TCGv_i32 twd = tcg_const_i32(wd);
23655 TCGv_i32 tws = tcg_const_i32(ws);
23656 TCGv_i32 timm = tcg_temp_new_i32();
23657 tcg_gen_movi_i32(timm, u5);
23659 switch (MASK_MSA_I5(ctx->opcode)) {
23661 gen_helper_msa_addvi_df(cpu_env, tdf, twd, tws, timm);
23664 gen_helper_msa_subvi_df(cpu_env, tdf, twd, tws, timm);
23666 case OPC_MAXI_S_df:
23667 tcg_gen_movi_i32(timm, s5);
23668 gen_helper_msa_maxi_s_df(cpu_env, tdf, twd, tws, timm);
23670 case OPC_MAXI_U_df:
23671 gen_helper_msa_maxi_u_df(cpu_env, tdf, twd, tws, timm);
23673 case OPC_MINI_S_df:
23674 tcg_gen_movi_i32(timm, s5);
23675 gen_helper_msa_mini_s_df(cpu_env, tdf, twd, tws, timm);
23677 case OPC_MINI_U_df:
23678 gen_helper_msa_mini_u_df(cpu_env, tdf, twd, tws, timm);
23681 tcg_gen_movi_i32(timm, s5);
23682 gen_helper_msa_ceqi_df(cpu_env, tdf, twd, tws, timm);
23684 case OPC_CLTI_S_df:
23685 tcg_gen_movi_i32(timm, s5);
23686 gen_helper_msa_clti_s_df(cpu_env, tdf, twd, tws, timm);
23688 case OPC_CLTI_U_df:
23689 gen_helper_msa_clti_u_df(cpu_env, tdf, twd, tws, timm);
23691 case OPC_CLEI_S_df:
23692 tcg_gen_movi_i32(timm, s5);
23693 gen_helper_msa_clei_s_df(cpu_env, tdf, twd, tws, timm);
23695 case OPC_CLEI_U_df:
23696 gen_helper_msa_clei_u_df(cpu_env, tdf, twd, tws, timm);
23700 int32_t s10 = sextract32(ctx->opcode, 11, 10);
23701 tcg_gen_movi_i32(timm, s10);
23702 gen_helper_msa_ldi_df(cpu_env, tdf, twd, timm);
23706 MIPS_INVAL("MSA instruction");
23707 generate_exception_end(ctx, EXCP_RI);
23711 tcg_temp_free_i32(tdf);
23712 tcg_temp_free_i32(twd);
23713 tcg_temp_free_i32(tws);
23714 tcg_temp_free_i32(timm);
23717 static void gen_msa_bit(CPUMIPSState *env, DisasContext *ctx)
23719 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23720 uint8_t dfm = (ctx->opcode >> 16) & 0x7f;
23721 uint32_t df = 0, m = 0;
23722 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
23723 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
23730 if ((dfm & 0x40) == 0x00) {
23733 } else if ((dfm & 0x60) == 0x40) {
23736 } else if ((dfm & 0x70) == 0x60) {
23739 } else if ((dfm & 0x78) == 0x70) {
23743 generate_exception_end(ctx, EXCP_RI);
23747 tdf = tcg_const_i32(df);
23748 tm = tcg_const_i32(m);
23749 twd = tcg_const_i32(wd);
23750 tws = tcg_const_i32(ws);
23752 switch (MASK_MSA_BIT(ctx->opcode)) {
23754 gen_helper_msa_slli_df(cpu_env, tdf, twd, tws, tm);
23757 gen_helper_msa_srai_df(cpu_env, tdf, twd, tws, tm);
23760 gen_helper_msa_srli_df(cpu_env, tdf, twd, tws, tm);
23763 gen_helper_msa_bclri_df(cpu_env, tdf, twd, tws, tm);
23766 gen_helper_msa_bseti_df(cpu_env, tdf, twd, tws, tm);
23769 gen_helper_msa_bnegi_df(cpu_env, tdf, twd, tws, tm);
23771 case OPC_BINSLI_df:
23772 gen_helper_msa_binsli_df(cpu_env, tdf, twd, tws, tm);
23774 case OPC_BINSRI_df:
23775 gen_helper_msa_binsri_df(cpu_env, tdf, twd, tws, tm);
23778 gen_helper_msa_sat_s_df(cpu_env, tdf, twd, tws, tm);
23781 gen_helper_msa_sat_u_df(cpu_env, tdf, twd, tws, tm);
23784 gen_helper_msa_srari_df(cpu_env, tdf, twd, tws, tm);
23787 gen_helper_msa_srlri_df(cpu_env, tdf, twd, tws, tm);
23790 MIPS_INVAL("MSA instruction");
23791 generate_exception_end(ctx, EXCP_RI);
23795 tcg_temp_free_i32(tdf);
23796 tcg_temp_free_i32(tm);
23797 tcg_temp_free_i32(twd);
23798 tcg_temp_free_i32(tws);
23801 static void gen_msa_3r(CPUMIPSState *env, DisasContext *ctx)
23803 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
23804 uint8_t df = (ctx->opcode >> 21) & 0x3;
23805 uint8_t wt = (ctx->opcode >> 16) & 0x1f;
23806 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
23807 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
23809 TCGv_i32 tdf = tcg_const_i32(df);
23810 TCGv_i32 twd = tcg_const_i32(wd);
23811 TCGv_i32 tws = tcg_const_i32(ws);
23812 TCGv_i32 twt = tcg_const_i32(wt);
23814 switch (MASK_MSA_3R(ctx->opcode)) {
23816 gen_helper_msa_sll_df(cpu_env, tdf, twd, tws, twt);
23819 gen_helper_msa_addv_df(cpu_env, tdf, twd, tws, twt);
23822 gen_helper_msa_ceq_df(cpu_env, tdf, twd, tws, twt);
23825 gen_helper_msa_add_a_df(cpu_env, tdf, twd, tws, twt);
23827 case OPC_SUBS_S_df:
23828 gen_helper_msa_subs_s_df(cpu_env, tdf, twd, tws, twt);
23831 gen_helper_msa_mulv_df(cpu_env, tdf, twd, tws, twt);
23834 gen_helper_msa_sld_df(cpu_env, tdf, twd, tws, twt);
23837 gen_helper_msa_vshf_df(cpu_env, tdf, twd, tws, twt);
23840 gen_helper_msa_sra_df(cpu_env, tdf, twd, tws, twt);
23843 gen_helper_msa_subv_df(cpu_env, tdf, twd, tws, twt);
23845 case OPC_ADDS_A_df:
23846 gen_helper_msa_adds_a_df(cpu_env, tdf, twd, tws, twt);
23848 case OPC_SUBS_U_df:
23849 gen_helper_msa_subs_u_df(cpu_env, tdf, twd, tws, twt);
23852 gen_helper_msa_maddv_df(cpu_env, tdf, twd, tws, twt);
23855 gen_helper_msa_splat_df(cpu_env, tdf, twd, tws, twt);
23858 gen_helper_msa_srar_df(cpu_env, tdf, twd, tws, twt);
23861 gen_helper_msa_srl_df(cpu_env, tdf, twd, tws, twt);
23864 gen_helper_msa_max_s_df(cpu_env, tdf, twd, tws, twt);
23867 gen_helper_msa_clt_s_df(cpu_env, tdf, twd, tws, twt);
23869 case OPC_ADDS_S_df:
23870 gen_helper_msa_adds_s_df(cpu_env, tdf, twd, tws, twt);
23872 case OPC_SUBSUS_U_df:
23873 gen_helper_msa_subsus_u_df(cpu_env, tdf, twd, tws, twt);
23876 gen_helper_msa_msubv_df(cpu_env, tdf, twd, tws, twt);
23879 gen_helper_msa_pckev_df(cpu_env, tdf, twd, tws, twt);
23882 gen_helper_msa_srlr_df(cpu_env, tdf, twd, tws, twt);
23885 gen_helper_msa_bclr_df(cpu_env, tdf, twd, tws, twt);
23888 gen_helper_msa_max_u_df(cpu_env, tdf, twd, tws, twt);
23891 gen_helper_msa_clt_u_df(cpu_env, tdf, twd, tws, twt);
23893 case OPC_ADDS_U_df:
23894 gen_helper_msa_adds_u_df(cpu_env, tdf, twd, tws, twt);
23896 case OPC_SUBSUU_S_df:
23897 gen_helper_msa_subsuu_s_df(cpu_env, tdf, twd, tws, twt);
23900 gen_helper_msa_pckod_df(cpu_env, tdf, twd, tws, twt);
23903 gen_helper_msa_bset_df(cpu_env, tdf, twd, tws, twt);
23906 gen_helper_msa_min_s_df(cpu_env, tdf, twd, tws, twt);
23909 gen_helper_msa_cle_s_df(cpu_env, tdf, twd, tws, twt);
23912 gen_helper_msa_ave_s_df(cpu_env, tdf, twd, tws, twt);
23914 case OPC_ASUB_S_df:
23915 gen_helper_msa_asub_s_df(cpu_env, tdf, twd, tws, twt);
23918 gen_helper_msa_div_s_df(cpu_env, tdf, twd, tws, twt);
23921 gen_helper_msa_ilvl_df(cpu_env, tdf, twd, tws, twt);
23924 gen_helper_msa_bneg_df(cpu_env, tdf, twd, tws, twt);
23927 gen_helper_msa_min_u_df(cpu_env, tdf, twd, tws, twt);
23930 gen_helper_msa_cle_u_df(cpu_env, tdf, twd, tws, twt);
23933 gen_helper_msa_ave_u_df(cpu_env, tdf, twd, tws, twt);
23935 case OPC_ASUB_U_df:
23936 gen_helper_msa_asub_u_df(cpu_env, tdf, twd, tws, twt);
23939 gen_helper_msa_div_u_df(cpu_env, tdf, twd, tws, twt);
23942 gen_helper_msa_ilvr_df(cpu_env, tdf, twd, tws, twt);
23945 gen_helper_msa_binsl_df(cpu_env, tdf, twd, tws, twt);
23948 gen_helper_msa_max_a_df(cpu_env, tdf, twd, tws, twt);
23950 case OPC_AVER_S_df:
23951 gen_helper_msa_aver_s_df(cpu_env, tdf, twd, tws, twt);
23954 gen_helper_msa_mod_s_df(cpu_env, tdf, twd, tws, twt);
23957 gen_helper_msa_ilvev_df(cpu_env, tdf, twd, tws, twt);
23960 gen_helper_msa_binsr_df(cpu_env, tdf, twd, tws, twt);
23963 gen_helper_msa_min_a_df(cpu_env, tdf, twd, tws, twt);
23965 case OPC_AVER_U_df:
23966 gen_helper_msa_aver_u_df(cpu_env, tdf, twd, tws, twt);
23969 gen_helper_msa_mod_u_df(cpu_env, tdf, twd, tws, twt);
23972 gen_helper_msa_ilvod_df(cpu_env, tdf, twd, tws, twt);
23975 case OPC_DOTP_S_df:
23976 case OPC_DOTP_U_df:
23977 case OPC_DPADD_S_df:
23978 case OPC_DPADD_U_df:
23979 case OPC_DPSUB_S_df:
23980 case OPC_HADD_S_df:
23981 case OPC_DPSUB_U_df:
23982 case OPC_HADD_U_df:
23983 case OPC_HSUB_S_df:
23984 case OPC_HSUB_U_df:
23985 if (df == DF_BYTE) {
23986 generate_exception_end(ctx, EXCP_RI);
23989 switch (MASK_MSA_3R(ctx->opcode)) {
23990 case OPC_DOTP_S_df:
23991 gen_helper_msa_dotp_s_df(cpu_env, tdf, twd, tws, twt);
23993 case OPC_DOTP_U_df:
23994 gen_helper_msa_dotp_u_df(cpu_env, tdf, twd, tws, twt);
23996 case OPC_DPADD_S_df:
23997 gen_helper_msa_dpadd_s_df(cpu_env, tdf, twd, tws, twt);
23999 case OPC_DPADD_U_df:
24000 gen_helper_msa_dpadd_u_df(cpu_env, tdf, twd, tws, twt);
24002 case OPC_DPSUB_S_df:
24003 gen_helper_msa_dpsub_s_df(cpu_env, tdf, twd, tws, twt);
24005 case OPC_HADD_S_df:
24006 gen_helper_msa_hadd_s_df(cpu_env, tdf, twd, tws, twt);
24008 case OPC_DPSUB_U_df:
24009 gen_helper_msa_dpsub_u_df(cpu_env, tdf, twd, tws, twt);
24011 case OPC_HADD_U_df:
24012 gen_helper_msa_hadd_u_df(cpu_env, tdf, twd, tws, twt);
24014 case OPC_HSUB_S_df:
24015 gen_helper_msa_hsub_s_df(cpu_env, tdf, twd, tws, twt);
24017 case OPC_HSUB_U_df:
24018 gen_helper_msa_hsub_u_df(cpu_env, tdf, twd, tws, twt);
24023 MIPS_INVAL("MSA instruction");
24024 generate_exception_end(ctx, EXCP_RI);
24027 tcg_temp_free_i32(twd);
24028 tcg_temp_free_i32(tws);
24029 tcg_temp_free_i32(twt);
24030 tcg_temp_free_i32(tdf);
24033 static void gen_msa_elm_3e(CPUMIPSState *env, DisasContext *ctx)
24035 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
24036 uint8_t source = (ctx->opcode >> 11) & 0x1f;
24037 uint8_t dest = (ctx->opcode >> 6) & 0x1f;
24038 TCGv telm = tcg_temp_new();
24039 TCGv_i32 tsr = tcg_const_i32(source);
24040 TCGv_i32 tdt = tcg_const_i32(dest);
24042 switch (MASK_MSA_ELM_DF3E(ctx->opcode)) {
24044 gen_load_gpr(telm, source);
24045 gen_helper_msa_ctcmsa(cpu_env, telm, tdt);
24048 gen_helper_msa_cfcmsa(telm, cpu_env, tsr);
24049 gen_store_gpr(telm, dest);
24052 gen_helper_msa_move_v(cpu_env, tdt, tsr);
24055 MIPS_INVAL("MSA instruction");
24056 generate_exception_end(ctx, EXCP_RI);
24060 tcg_temp_free(telm);
24061 tcg_temp_free_i32(tdt);
24062 tcg_temp_free_i32(tsr);
24065 static void gen_msa_elm_df(CPUMIPSState *env, DisasContext *ctx, uint32_t df,
24068 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
24069 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
24070 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
24072 TCGv_i32 tws = tcg_const_i32(ws);
24073 TCGv_i32 twd = tcg_const_i32(wd);
24074 TCGv_i32 tn = tcg_const_i32(n);
24075 TCGv_i32 tdf = tcg_const_i32(df);
24077 switch (MASK_MSA_ELM(ctx->opcode)) {
24079 gen_helper_msa_sldi_df(cpu_env, tdf, twd, tws, tn);
24081 case OPC_SPLATI_df:
24082 gen_helper_msa_splati_df(cpu_env, tdf, twd, tws, tn);
24085 gen_helper_msa_insve_df(cpu_env, tdf, twd, tws, tn);
24087 case OPC_COPY_S_df:
24088 case OPC_COPY_U_df:
24089 case OPC_INSERT_df:
24090 #if !defined(TARGET_MIPS64)
24091 /* Double format valid only for MIPS64 */
24092 if (df == DF_DOUBLE) {
24093 generate_exception_end(ctx, EXCP_RI);
24097 switch (MASK_MSA_ELM(ctx->opcode)) {
24098 case OPC_COPY_S_df:
24099 if (likely(wd != 0)) {
24100 gen_helper_msa_copy_s_df(cpu_env, tdf, twd, tws, tn);
24103 case OPC_COPY_U_df:
24104 if (likely(wd != 0)) {
24105 gen_helper_msa_copy_u_df(cpu_env, tdf, twd, tws, tn);
24108 case OPC_INSERT_df:
24109 gen_helper_msa_insert_df(cpu_env, tdf, twd, tws, tn);
24114 MIPS_INVAL("MSA instruction");
24115 generate_exception_end(ctx, EXCP_RI);
24117 tcg_temp_free_i32(twd);
24118 tcg_temp_free_i32(tws);
24119 tcg_temp_free_i32(tn);
24120 tcg_temp_free_i32(tdf);
24123 static void gen_msa_elm(CPUMIPSState *env, DisasContext *ctx)
24125 uint8_t dfn = (ctx->opcode >> 16) & 0x3f;
24126 uint32_t df = 0, n = 0;
24128 if ((dfn & 0x30) == 0x00) {
24131 } else if ((dfn & 0x38) == 0x20) {
24134 } else if ((dfn & 0x3c) == 0x30) {
24137 } else if ((dfn & 0x3e) == 0x38) {
24140 } else if (dfn == 0x3E) {
24141 /* CTCMSA, CFCMSA, MOVE.V */
24142 gen_msa_elm_3e(env, ctx);
24145 generate_exception_end(ctx, EXCP_RI);
24149 gen_msa_elm_df(env, ctx, df, n);
24152 static void gen_msa_3rf(CPUMIPSState *env, DisasContext *ctx)
24154 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
24155 uint8_t df = (ctx->opcode >> 21) & 0x1;
24156 uint8_t wt = (ctx->opcode >> 16) & 0x1f;
24157 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
24158 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
24160 TCGv_i32 twd = tcg_const_i32(wd);
24161 TCGv_i32 tws = tcg_const_i32(ws);
24162 TCGv_i32 twt = tcg_const_i32(wt);
24163 TCGv_i32 tdf = tcg_temp_new_i32();
24165 /* adjust df value for floating-point instruction */
24166 tcg_gen_movi_i32(tdf, df + 2);
24168 switch (MASK_MSA_3RF(ctx->opcode)) {
24170 gen_helper_msa_fcaf_df(cpu_env, tdf, twd, tws, twt);
24173 gen_helper_msa_fadd_df(cpu_env, tdf, twd, tws, twt);
24176 gen_helper_msa_fcun_df(cpu_env, tdf, twd, tws, twt);
24179 gen_helper_msa_fsub_df(cpu_env, tdf, twd, tws, twt);
24182 gen_helper_msa_fcor_df(cpu_env, tdf, twd, tws, twt);
24185 gen_helper_msa_fceq_df(cpu_env, tdf, twd, tws, twt);
24188 gen_helper_msa_fmul_df(cpu_env, tdf, twd, tws, twt);
24191 gen_helper_msa_fcune_df(cpu_env, tdf, twd, tws, twt);
24194 gen_helper_msa_fcueq_df(cpu_env, tdf, twd, tws, twt);
24197 gen_helper_msa_fdiv_df(cpu_env, tdf, twd, tws, twt);
24200 gen_helper_msa_fcne_df(cpu_env, tdf, twd, tws, twt);
24203 gen_helper_msa_fclt_df(cpu_env, tdf, twd, tws, twt);
24206 gen_helper_msa_fmadd_df(cpu_env, tdf, twd, tws, twt);
24209 tcg_gen_movi_i32(tdf, df + 1);
24210 gen_helper_msa_mul_q_df(cpu_env, tdf, twd, tws, twt);
24213 gen_helper_msa_fcult_df(cpu_env, tdf, twd, tws, twt);
24216 gen_helper_msa_fmsub_df(cpu_env, tdf, twd, tws, twt);
24218 case OPC_MADD_Q_df:
24219 tcg_gen_movi_i32(tdf, df + 1);
24220 gen_helper_msa_madd_q_df(cpu_env, tdf, twd, tws, twt);
24223 gen_helper_msa_fcle_df(cpu_env, tdf, twd, tws, twt);
24225 case OPC_MSUB_Q_df:
24226 tcg_gen_movi_i32(tdf, df + 1);
24227 gen_helper_msa_msub_q_df(cpu_env, tdf, twd, tws, twt);
24230 gen_helper_msa_fcule_df(cpu_env, tdf, twd, tws, twt);
24233 gen_helper_msa_fexp2_df(cpu_env, tdf, twd, tws, twt);
24236 gen_helper_msa_fsaf_df(cpu_env, tdf, twd, tws, twt);
24239 gen_helper_msa_fexdo_df(cpu_env, tdf, twd, tws, twt);
24242 gen_helper_msa_fsun_df(cpu_env, tdf, twd, tws, twt);
24245 gen_helper_msa_fsor_df(cpu_env, tdf, twd, tws, twt);
24248 gen_helper_msa_fseq_df(cpu_env, tdf, twd, tws, twt);
24251 gen_helper_msa_ftq_df(cpu_env, tdf, twd, tws, twt);
24254 gen_helper_msa_fsune_df(cpu_env, tdf, twd, tws, twt);
24257 gen_helper_msa_fsueq_df(cpu_env, tdf, twd, tws, twt);
24260 gen_helper_msa_fsne_df(cpu_env, tdf, twd, tws, twt);
24263 gen_helper_msa_fslt_df(cpu_env, tdf, twd, tws, twt);
24266 gen_helper_msa_fmin_df(cpu_env, tdf, twd, tws, twt);
24268 case OPC_MULR_Q_df:
24269 tcg_gen_movi_i32(tdf, df + 1);
24270 gen_helper_msa_mulr_q_df(cpu_env, tdf, twd, tws, twt);
24273 gen_helper_msa_fsult_df(cpu_env, tdf, twd, tws, twt);
24275 case OPC_FMIN_A_df:
24276 gen_helper_msa_fmin_a_df(cpu_env, tdf, twd, tws, twt);
24278 case OPC_MADDR_Q_df:
24279 tcg_gen_movi_i32(tdf, df + 1);
24280 gen_helper_msa_maddr_q_df(cpu_env, tdf, twd, tws, twt);
24283 gen_helper_msa_fsle_df(cpu_env, tdf, twd, tws, twt);
24286 gen_helper_msa_fmax_df(cpu_env, tdf, twd, tws, twt);
24288 case OPC_MSUBR_Q_df:
24289 tcg_gen_movi_i32(tdf, df + 1);
24290 gen_helper_msa_msubr_q_df(cpu_env, tdf, twd, tws, twt);
24293 gen_helper_msa_fsule_df(cpu_env, tdf, twd, tws, twt);
24295 case OPC_FMAX_A_df:
24296 gen_helper_msa_fmax_a_df(cpu_env, tdf, twd, tws, twt);
24299 MIPS_INVAL("MSA instruction");
24300 generate_exception_end(ctx, EXCP_RI);
24304 tcg_temp_free_i32(twd);
24305 tcg_temp_free_i32(tws);
24306 tcg_temp_free_i32(twt);
24307 tcg_temp_free_i32(tdf);
24310 static void gen_msa_2r(CPUMIPSState *env, DisasContext *ctx)
24312 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
24313 (op & (0x7 << 18)))
24314 uint8_t wt = (ctx->opcode >> 16) & 0x1f;
24315 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
24316 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
24317 uint8_t df = (ctx->opcode >> 16) & 0x3;
24318 TCGv_i32 twd = tcg_const_i32(wd);
24319 TCGv_i32 tws = tcg_const_i32(ws);
24320 TCGv_i32 twt = tcg_const_i32(wt);
24321 TCGv_i32 tdf = tcg_const_i32(df);
24323 switch (MASK_MSA_2R(ctx->opcode)) {
24325 #if !defined(TARGET_MIPS64)
24326 /* Double format valid only for MIPS64 */
24327 if (df == DF_DOUBLE) {
24328 generate_exception_end(ctx, EXCP_RI);
24332 gen_helper_msa_fill_df(cpu_env, tdf, twd, tws); /* trs */
24335 gen_helper_msa_pcnt_df(cpu_env, tdf, twd, tws);
24338 gen_helper_msa_nloc_df(cpu_env, tdf, twd, tws);
24341 gen_helper_msa_nlzc_df(cpu_env, tdf, twd, tws);
24344 MIPS_INVAL("MSA instruction");
24345 generate_exception_end(ctx, EXCP_RI);
24349 tcg_temp_free_i32(twd);
24350 tcg_temp_free_i32(tws);
24351 tcg_temp_free_i32(twt);
24352 tcg_temp_free_i32(tdf);
24355 static void gen_msa_2rf(CPUMIPSState *env, DisasContext *ctx)
24357 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
24358 (op & (0xf << 17)))
24359 uint8_t wt = (ctx->opcode >> 16) & 0x1f;
24360 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
24361 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
24362 uint8_t df = (ctx->opcode >> 16) & 0x1;
24363 TCGv_i32 twd = tcg_const_i32(wd);
24364 TCGv_i32 tws = tcg_const_i32(ws);
24365 TCGv_i32 twt = tcg_const_i32(wt);
24366 /* adjust df value for floating-point instruction */
24367 TCGv_i32 tdf = tcg_const_i32(df + 2);
24369 switch (MASK_MSA_2RF(ctx->opcode)) {
24370 case OPC_FCLASS_df:
24371 gen_helper_msa_fclass_df(cpu_env, tdf, twd, tws);
24373 case OPC_FTRUNC_S_df:
24374 gen_helper_msa_ftrunc_s_df(cpu_env, tdf, twd, tws);
24376 case OPC_FTRUNC_U_df:
24377 gen_helper_msa_ftrunc_u_df(cpu_env, tdf, twd, tws);
24380 gen_helper_msa_fsqrt_df(cpu_env, tdf, twd, tws);
24382 case OPC_FRSQRT_df:
24383 gen_helper_msa_frsqrt_df(cpu_env, tdf, twd, tws);
24386 gen_helper_msa_frcp_df(cpu_env, tdf, twd, tws);
24389 gen_helper_msa_frint_df(cpu_env, tdf, twd, tws);
24392 gen_helper_msa_flog2_df(cpu_env, tdf, twd, tws);
24394 case OPC_FEXUPL_df:
24395 gen_helper_msa_fexupl_df(cpu_env, tdf, twd, tws);
24397 case OPC_FEXUPR_df:
24398 gen_helper_msa_fexupr_df(cpu_env, tdf, twd, tws);
24401 gen_helper_msa_ffql_df(cpu_env, tdf, twd, tws);
24404 gen_helper_msa_ffqr_df(cpu_env, tdf, twd, tws);
24406 case OPC_FTINT_S_df:
24407 gen_helper_msa_ftint_s_df(cpu_env, tdf, twd, tws);
24409 case OPC_FTINT_U_df:
24410 gen_helper_msa_ftint_u_df(cpu_env, tdf, twd, tws);
24412 case OPC_FFINT_S_df:
24413 gen_helper_msa_ffint_s_df(cpu_env, tdf, twd, tws);
24415 case OPC_FFINT_U_df:
24416 gen_helper_msa_ffint_u_df(cpu_env, tdf, twd, tws);
24420 tcg_temp_free_i32(twd);
24421 tcg_temp_free_i32(tws);
24422 tcg_temp_free_i32(twt);
24423 tcg_temp_free_i32(tdf);
24426 static void gen_msa_vec_v(CPUMIPSState *env, DisasContext *ctx)
24428 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
24429 uint8_t wt = (ctx->opcode >> 16) & 0x1f;
24430 uint8_t ws = (ctx->opcode >> 11) & 0x1f;
24431 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
24432 TCGv_i32 twd = tcg_const_i32(wd);
24433 TCGv_i32 tws = tcg_const_i32(ws);
24434 TCGv_i32 twt = tcg_const_i32(wt);
24436 switch (MASK_MSA_VEC(ctx->opcode)) {
24438 gen_helper_msa_and_v(cpu_env, twd, tws, twt);
24441 gen_helper_msa_or_v(cpu_env, twd, tws, twt);
24444 gen_helper_msa_nor_v(cpu_env, twd, tws, twt);
24447 gen_helper_msa_xor_v(cpu_env, twd, tws, twt);
24450 gen_helper_msa_bmnz_v(cpu_env, twd, tws, twt);
24453 gen_helper_msa_bmz_v(cpu_env, twd, tws, twt);
24456 gen_helper_msa_bsel_v(cpu_env, twd, tws, twt);
24459 MIPS_INVAL("MSA instruction");
24460 generate_exception_end(ctx, EXCP_RI);
24464 tcg_temp_free_i32(twd);
24465 tcg_temp_free_i32(tws);
24466 tcg_temp_free_i32(twt);
24469 static void gen_msa_vec(CPUMIPSState *env, DisasContext *ctx)
24471 switch (MASK_MSA_VEC(ctx->opcode)) {
24479 gen_msa_vec_v(env, ctx);
24482 gen_msa_2r(env, ctx);
24485 gen_msa_2rf(env, ctx);
24488 MIPS_INVAL("MSA instruction");
24489 generate_exception_end(ctx, EXCP_RI);
24494 static void gen_msa(CPUMIPSState *env, DisasContext *ctx)
24496 uint32_t opcode = ctx->opcode;
24497 check_insn(ctx, ASE_MSA);
24498 check_msa_access(ctx);
24500 switch (MASK_MSA_MINOR(opcode)) {
24501 case OPC_MSA_I8_00:
24502 case OPC_MSA_I8_01:
24503 case OPC_MSA_I8_02:
24504 gen_msa_i8(env, ctx);
24506 case OPC_MSA_I5_06:
24507 case OPC_MSA_I5_07:
24508 gen_msa_i5(env, ctx);
24510 case OPC_MSA_BIT_09:
24511 case OPC_MSA_BIT_0A:
24512 gen_msa_bit(env, ctx);
24514 case OPC_MSA_3R_0D:
24515 case OPC_MSA_3R_0E:
24516 case OPC_MSA_3R_0F:
24517 case OPC_MSA_3R_10:
24518 case OPC_MSA_3R_11:
24519 case OPC_MSA_3R_12:
24520 case OPC_MSA_3R_13:
24521 case OPC_MSA_3R_14:
24522 case OPC_MSA_3R_15:
24523 gen_msa_3r(env, ctx);
24526 gen_msa_elm(env, ctx);
24528 case OPC_MSA_3RF_1A:
24529 case OPC_MSA_3RF_1B:
24530 case OPC_MSA_3RF_1C:
24531 gen_msa_3rf(env, ctx);
24534 gen_msa_vec(env, ctx);
24545 int32_t s10 = sextract32(ctx->opcode, 16, 10);
24546 uint8_t rs = (ctx->opcode >> 11) & 0x1f;
24547 uint8_t wd = (ctx->opcode >> 6) & 0x1f;
24548 uint8_t df = (ctx->opcode >> 0) & 0x3;
24550 TCGv_i32 twd = tcg_const_i32(wd);
24551 TCGv taddr = tcg_temp_new();
24552 gen_base_offset_addr(ctx, taddr, rs, s10 << df);
24554 switch (MASK_MSA_MINOR(opcode)) {
24556 gen_helper_msa_ld_b(cpu_env, twd, taddr);
24559 gen_helper_msa_ld_h(cpu_env, twd, taddr);
24562 gen_helper_msa_ld_w(cpu_env, twd, taddr);
24565 gen_helper_msa_ld_d(cpu_env, twd, taddr);
24568 gen_helper_msa_st_b(cpu_env, twd, taddr);
24571 gen_helper_msa_st_h(cpu_env, twd, taddr);
24574 gen_helper_msa_st_w(cpu_env, twd, taddr);
24577 gen_helper_msa_st_d(cpu_env, twd, taddr);
24581 tcg_temp_free_i32(twd);
24582 tcg_temp_free(taddr);
24586 MIPS_INVAL("MSA instruction");
24587 generate_exception_end(ctx, EXCP_RI);
24593 static void decode_opc(CPUMIPSState *env, DisasContext *ctx)
24596 int rs, rt, rd, sa;
24600 /* make sure instructions are on a word boundary */
24601 if (ctx->base.pc_next & 0x3) {
24602 env->CP0_BadVAddr = ctx->base.pc_next;
24603 generate_exception_err(ctx, EXCP_AdEL, EXCP_INST_NOTAVAIL);
24607 /* Handle blikely not taken case */
24608 if ((ctx->hflags & MIPS_HFLAG_BMASK_BASE) == MIPS_HFLAG_BL) {
24609 TCGLabel *l1 = gen_new_label();
24611 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
24612 tcg_gen_movi_i32(hflags, ctx->hflags & ~MIPS_HFLAG_BMASK);
24613 gen_goto_tb(ctx, 1, ctx->base.pc_next + 4);
24617 op = MASK_OP_MAJOR(ctx->opcode);
24618 rs = (ctx->opcode >> 21) & 0x1f;
24619 rt = (ctx->opcode >> 16) & 0x1f;
24620 rd = (ctx->opcode >> 11) & 0x1f;
24621 sa = (ctx->opcode >> 6) & 0x1f;
24622 imm = (int16_t)ctx->opcode;
24625 decode_opc_special(env, ctx);
24628 decode_opc_special2_legacy(env, ctx);
24631 decode_opc_special3(env, ctx);
24634 op1 = MASK_REGIMM(ctx->opcode);
24636 case OPC_BLTZL: /* REGIMM branches */
24640 check_insn(ctx, ISA_MIPS2);
24641 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24645 gen_compute_branch(ctx, op1, 4, rs, -1, imm << 2, 4);
24649 if (ctx->insn_flags & ISA_MIPS32R6) {
24651 /* OPC_NAL, OPC_BAL */
24652 gen_compute_branch(ctx, op1, 4, 0, -1, imm << 2, 4);
24654 generate_exception_end(ctx, EXCP_RI);
24657 gen_compute_branch(ctx, op1, 4, rs, -1, imm << 2, 4);
24660 case OPC_TGEI: /* REGIMM traps */
24667 check_insn(ctx, ISA_MIPS2);
24668 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24669 gen_trap(ctx, op1, rs, -1, imm);
24672 check_insn(ctx, ISA_MIPS32R6);
24673 generate_exception_end(ctx, EXCP_RI);
24676 check_insn(ctx, ISA_MIPS32R2);
24677 /* Break the TB to be able to sync copied instructions
24679 ctx->base.is_jmp = DISAS_STOP;
24681 case OPC_BPOSGE32: /* MIPS DSP branch */
24682 #if defined(TARGET_MIPS64)
24686 gen_compute_branch(ctx, op1, 4, -1, -2, (int32_t)imm << 2, 4);
24688 #if defined(TARGET_MIPS64)
24690 check_insn(ctx, ISA_MIPS32R6);
24691 check_mips_64(ctx);
24693 tcg_gen_addi_tl(cpu_gpr[rs], cpu_gpr[rs], (int64_t)imm << 32);
24697 check_insn(ctx, ISA_MIPS32R6);
24698 check_mips_64(ctx);
24700 tcg_gen_addi_tl(cpu_gpr[rs], cpu_gpr[rs], (int64_t)imm << 48);
24704 default: /* Invalid */
24705 MIPS_INVAL("regimm");
24706 generate_exception_end(ctx, EXCP_RI);
24711 check_cp0_enabled(ctx);
24712 op1 = MASK_CP0(ctx->opcode);
24720 #if defined(TARGET_MIPS64)
24724 #ifndef CONFIG_USER_ONLY
24725 gen_cp0(env, ctx, op1, rt, rd);
24726 #endif /* !CONFIG_USER_ONLY */
24744 #ifndef CONFIG_USER_ONLY
24745 gen_cp0(env, ctx, MASK_C0(ctx->opcode), rt, rd);
24746 #endif /* !CONFIG_USER_ONLY */
24749 #ifndef CONFIG_USER_ONLY
24752 TCGv t0 = tcg_temp_new();
24754 op2 = MASK_MFMC0(ctx->opcode);
24758 gen_helper_dmt(t0);
24759 gen_store_gpr(t0, rt);
24763 gen_helper_emt(t0);
24764 gen_store_gpr(t0, rt);
24768 gen_helper_dvpe(t0, cpu_env);
24769 gen_store_gpr(t0, rt);
24773 gen_helper_evpe(t0, cpu_env);
24774 gen_store_gpr(t0, rt);
24777 check_insn(ctx, ISA_MIPS32R6);
24779 gen_helper_dvp(t0, cpu_env);
24780 gen_store_gpr(t0, rt);
24784 check_insn(ctx, ISA_MIPS32R6);
24786 gen_helper_evp(t0, cpu_env);
24787 gen_store_gpr(t0, rt);
24791 check_insn(ctx, ISA_MIPS32R2);
24792 save_cpu_state(ctx, 1);
24793 gen_helper_di(t0, cpu_env);
24794 gen_store_gpr(t0, rt);
24795 /* Stop translation as we may have switched
24796 the execution mode. */
24797 ctx->base.is_jmp = DISAS_STOP;
24800 check_insn(ctx, ISA_MIPS32R2);
24801 save_cpu_state(ctx, 1);
24802 gen_helper_ei(t0, cpu_env);
24803 gen_store_gpr(t0, rt);
24804 /* DISAS_STOP isn't sufficient, we need to ensure we break
24805 out of translated code to check for pending interrupts */
24806 gen_save_pc(ctx->base.pc_next + 4);
24807 ctx->base.is_jmp = DISAS_EXIT;
24809 default: /* Invalid */
24810 MIPS_INVAL("mfmc0");
24811 generate_exception_end(ctx, EXCP_RI);
24816 #endif /* !CONFIG_USER_ONLY */
24819 check_insn(ctx, ISA_MIPS32R2);
24820 gen_load_srsgpr(rt, rd);
24823 check_insn(ctx, ISA_MIPS32R2);
24824 gen_store_srsgpr(rt, rd);
24828 generate_exception_end(ctx, EXCP_RI);
24832 case OPC_BOVC: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
24833 if (ctx->insn_flags & ISA_MIPS32R6) {
24834 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
24835 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
24838 /* Arithmetic with immediate opcode */
24839 gen_arith_imm(ctx, op, rt, rs, imm);
24843 gen_arith_imm(ctx, op, rt, rs, imm);
24845 case OPC_SLTI: /* Set on less than with immediate opcode */
24847 gen_slt_imm(ctx, op, rt, rs, imm);
24849 case OPC_ANDI: /* Arithmetic with immediate opcode */
24850 case OPC_LUI: /* OPC_AUI */
24853 gen_logic_imm(ctx, op, rt, rs, imm);
24855 case OPC_J: /* Jump */
24857 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
24858 gen_compute_branch(ctx, op, 4, rs, rt, offset, 4);
24861 case OPC_BLEZC: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
24862 if (ctx->insn_flags & ISA_MIPS32R6) {
24864 generate_exception_end(ctx, EXCP_RI);
24867 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
24868 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
24871 gen_compute_branch(ctx, op, 4, rs, rt, imm << 2, 4);
24874 case OPC_BGTZC: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
24875 if (ctx->insn_flags & ISA_MIPS32R6) {
24877 generate_exception_end(ctx, EXCP_RI);
24880 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
24881 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
24884 gen_compute_branch(ctx, op, 4, rs, rt, imm << 2, 4);
24887 case OPC_BLEZALC: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
24890 gen_compute_branch(ctx, op, 4, rs, rt, imm << 2, 4);
24892 check_insn(ctx, ISA_MIPS32R6);
24893 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
24894 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
24897 case OPC_BGTZALC: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
24900 gen_compute_branch(ctx, op, 4, rs, rt, imm << 2, 4);
24902 check_insn(ctx, ISA_MIPS32R6);
24903 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
24904 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
24909 check_insn(ctx, ISA_MIPS2);
24910 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24914 gen_compute_branch(ctx, op, 4, rs, rt, imm << 2, 4);
24916 case OPC_LL: /* Load and stores */
24917 check_insn(ctx, ISA_MIPS2);
24921 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24929 gen_ld(ctx, op, rt, rs, imm);
24933 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24938 gen_st(ctx, op, rt, rs, imm);
24941 check_insn(ctx, ISA_MIPS2);
24942 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24943 gen_st_cond(ctx, op, rt, rs, imm);
24946 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24947 check_cp0_enabled(ctx);
24948 check_insn(ctx, ISA_MIPS3 | ISA_MIPS32);
24949 if (ctx->hflags & MIPS_HFLAG_ITC_CACHE) {
24950 gen_cache_operation(ctx, rt, rs, imm);
24952 /* Treat as NOP. */
24955 check_insn_opc_removed(ctx, ISA_MIPS32R6);
24956 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32);
24957 /* Treat as NOP. */
24960 /* Floating point (COP1). */
24965 gen_cop1_ldst(ctx, op, rt, rs, imm);
24969 op1 = MASK_CP1(ctx->opcode);
24974 check_cp1_enabled(ctx);
24975 check_insn(ctx, ISA_MIPS32R2);
24981 check_cp1_enabled(ctx);
24982 gen_cp1(ctx, op1, rt, rd);
24984 #if defined(TARGET_MIPS64)
24987 check_cp1_enabled(ctx);
24988 check_insn(ctx, ISA_MIPS3);
24989 check_mips_64(ctx);
24990 gen_cp1(ctx, op1, rt, rd);
24993 case OPC_BC1EQZ: /* OPC_BC1ANY2 */
24994 check_cp1_enabled(ctx);
24995 if (ctx->insn_flags & ISA_MIPS32R6) {
24997 gen_compute_branch1_r6(ctx, MASK_CP1(ctx->opcode),
25002 check_insn(ctx, ASE_MIPS3D);
25003 gen_compute_branch1(ctx, MASK_BC1(ctx->opcode),
25004 (rt >> 2) & 0x7, imm << 2);
25008 check_cp1_enabled(ctx);
25009 check_insn(ctx, ISA_MIPS32R6);
25010 gen_compute_branch1_r6(ctx, MASK_CP1(ctx->opcode),
25014 check_cp1_enabled(ctx);
25015 check_insn_opc_removed(ctx, ISA_MIPS32R6);
25017 check_insn(ctx, ASE_MIPS3D);
25020 check_cp1_enabled(ctx);
25021 check_insn_opc_removed(ctx, ISA_MIPS32R6);
25022 gen_compute_branch1(ctx, MASK_BC1(ctx->opcode),
25023 (rt >> 2) & 0x7, imm << 2);
25030 check_cp1_enabled(ctx);
25031 gen_farith(ctx, ctx->opcode & FOP(0x3f, 0x1f), rt, rd, sa,
25037 int r6_op = ctx->opcode & FOP(0x3f, 0x1f);
25038 check_cp1_enabled(ctx);
25039 if (ctx->insn_flags & ISA_MIPS32R6) {
25041 case R6_OPC_CMP_AF_S:
25042 case R6_OPC_CMP_UN_S:
25043 case R6_OPC_CMP_EQ_S:
25044 case R6_OPC_CMP_UEQ_S:
25045 case R6_OPC_CMP_LT_S:
25046 case R6_OPC_CMP_ULT_S:
25047 case R6_OPC_CMP_LE_S:
25048 case R6_OPC_CMP_ULE_S:
25049 case R6_OPC_CMP_SAF_S:
25050 case R6_OPC_CMP_SUN_S:
25051 case R6_OPC_CMP_SEQ_S:
25052 case R6_OPC_CMP_SEUQ_S:
25053 case R6_OPC_CMP_SLT_S:
25054 case R6_OPC_CMP_SULT_S:
25055 case R6_OPC_CMP_SLE_S:
25056 case R6_OPC_CMP_SULE_S:
25057 case R6_OPC_CMP_OR_S:
25058 case R6_OPC_CMP_UNE_S:
25059 case R6_OPC_CMP_NE_S:
25060 case R6_OPC_CMP_SOR_S:
25061 case R6_OPC_CMP_SUNE_S:
25062 case R6_OPC_CMP_SNE_S:
25063 gen_r6_cmp_s(ctx, ctx->opcode & 0x1f, rt, rd, sa);
25065 case R6_OPC_CMP_AF_D:
25066 case R6_OPC_CMP_UN_D:
25067 case R6_OPC_CMP_EQ_D:
25068 case R6_OPC_CMP_UEQ_D:
25069 case R6_OPC_CMP_LT_D:
25070 case R6_OPC_CMP_ULT_D:
25071 case R6_OPC_CMP_LE_D:
25072 case R6_OPC_CMP_ULE_D:
25073 case R6_OPC_CMP_SAF_D:
25074 case R6_OPC_CMP_SUN_D:
25075 case R6_OPC_CMP_SEQ_D:
25076 case R6_OPC_CMP_SEUQ_D:
25077 case R6_OPC_CMP_SLT_D:
25078 case R6_OPC_CMP_SULT_D:
25079 case R6_OPC_CMP_SLE_D:
25080 case R6_OPC_CMP_SULE_D:
25081 case R6_OPC_CMP_OR_D:
25082 case R6_OPC_CMP_UNE_D:
25083 case R6_OPC_CMP_NE_D:
25084 case R6_OPC_CMP_SOR_D:
25085 case R6_OPC_CMP_SUNE_D:
25086 case R6_OPC_CMP_SNE_D:
25087 gen_r6_cmp_d(ctx, ctx->opcode & 0x1f, rt, rd, sa);
25090 gen_farith(ctx, ctx->opcode & FOP(0x3f, 0x1f),
25091 rt, rd, sa, (imm >> 8) & 0x7);
25096 gen_farith(ctx, ctx->opcode & FOP(0x3f, 0x1f), rt, rd, sa,
25111 check_insn(ctx, ASE_MSA);
25112 gen_msa_branch(env, ctx, op1);
25116 generate_exception_end(ctx, EXCP_RI);
25121 /* Compact branches [R6] and COP2 [non-R6] */
25122 case OPC_BC: /* OPC_LWC2 */
25123 case OPC_BALC: /* OPC_SWC2 */
25124 if (ctx->insn_flags & ISA_MIPS32R6) {
25125 /* OPC_BC, OPC_BALC */
25126 gen_compute_compact_branch(ctx, op, 0, 0,
25127 sextract32(ctx->opcode << 2, 0, 28));
25129 /* OPC_LWC2, OPC_SWC2 */
25130 /* COP2: Not implemented. */
25131 generate_exception_err(ctx, EXCP_CpU, 2);
25134 case OPC_BEQZC: /* OPC_JIC, OPC_LDC2 */
25135 case OPC_BNEZC: /* OPC_JIALC, OPC_SDC2 */
25136 if (ctx->insn_flags & ISA_MIPS32R6) {
25138 /* OPC_BEQZC, OPC_BNEZC */
25139 gen_compute_compact_branch(ctx, op, rs, 0,
25140 sextract32(ctx->opcode << 2, 0, 23));
25142 /* OPC_JIC, OPC_JIALC */
25143 gen_compute_compact_branch(ctx, op, 0, rt, imm);
25146 /* OPC_LWC2, OPC_SWC2 */
25147 /* COP2: Not implemented. */
25148 generate_exception_err(ctx, EXCP_CpU, 2);
25152 check_insn(ctx, INSN_LOONGSON2F);
25153 /* Note that these instructions use different fields. */
25154 gen_loongson_multimedia(ctx, sa, rd, rt);
25158 check_insn_opc_removed(ctx, ISA_MIPS32R6);
25159 if (ctx->CP0_Config1 & (1 << CP0C1_FP)) {
25160 check_cp1_enabled(ctx);
25161 op1 = MASK_CP3(ctx->opcode);
25165 check_insn(ctx, ISA_MIPS5 | ISA_MIPS32R2);
25171 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32R2);
25172 gen_flt3_ldst(ctx, op1, sa, rd, rs, rt);
25175 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32R2);
25176 /* Treat as NOP. */
25179 check_insn(ctx, ISA_MIPS5 | ISA_MIPS32R2);
25193 check_insn(ctx, ISA_MIPS4 | ISA_MIPS32R2);
25194 gen_flt3_arith(ctx, op1, sa, rs, rd, rt);
25198 generate_exception_end(ctx, EXCP_RI);
25202 generate_exception_err(ctx, EXCP_CpU, 1);
25206 #if defined(TARGET_MIPS64)
25207 /* MIPS64 opcodes */
25211 check_insn_opc_removed(ctx, ISA_MIPS32R6);
25215 check_insn(ctx, ISA_MIPS3);
25216 check_mips_64(ctx);
25217 gen_ld(ctx, op, rt, rs, imm);
25221 check_insn_opc_removed(ctx, ISA_MIPS32R6);
25224 check_insn(ctx, ISA_MIPS3);
25225 check_mips_64(ctx);
25226 gen_st(ctx, op, rt, rs, imm);
25229 check_insn_opc_removed(ctx, ISA_MIPS32R6);
25230 check_insn(ctx, ISA_MIPS3);
25231 check_mips_64(ctx);
25232 gen_st_cond(ctx, op, rt, rs, imm);
25234 case OPC_BNVC: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
25235 if (ctx->insn_flags & ISA_MIPS32R6) {
25236 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
25237 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
25240 check_insn(ctx, ISA_MIPS3);
25241 check_mips_64(ctx);
25242 gen_arith_imm(ctx, op, rt, rs, imm);
25246 check_insn(ctx, ISA_MIPS3);
25247 check_mips_64(ctx);
25248 gen_arith_imm(ctx, op, rt, rs, imm);
25251 case OPC_BNVC: /* OPC_BNEZALC, OPC_BNEC */
25252 if (ctx->insn_flags & ISA_MIPS32R6) {
25253 gen_compute_compact_branch(ctx, op, rs, rt, imm << 2);
25255 MIPS_INVAL("major opcode");
25256 generate_exception_end(ctx, EXCP_RI);
25260 case OPC_DAUI: /* OPC_JALX */
25261 if (ctx->insn_flags & ISA_MIPS32R6) {
25262 #if defined(TARGET_MIPS64)
25264 check_mips_64(ctx);
25266 generate_exception(ctx, EXCP_RI);
25267 } else if (rt != 0) {
25268 TCGv t0 = tcg_temp_new();
25269 gen_load_gpr(t0, rs);
25270 tcg_gen_addi_tl(cpu_gpr[rt], t0, imm << 16);
25274 generate_exception_end(ctx, EXCP_RI);
25275 MIPS_INVAL("major opcode");
25279 check_insn(ctx, ASE_MIPS16 | ASE_MICROMIPS);
25280 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
25281 gen_compute_branch(ctx, op, 4, rs, rt, offset, 4);
25284 case OPC_MSA: /* OPC_MDMX */
25285 /* MDMX: Not implemented. */
25289 check_insn(ctx, ISA_MIPS32R6);
25290 gen_pcrel(ctx, ctx->opcode, ctx->base.pc_next, rs);
25292 default: /* Invalid */
25293 MIPS_INVAL("major opcode");
25294 generate_exception_end(ctx, EXCP_RI);
25299 static void mips_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
25301 DisasContext *ctx = container_of(dcbase, DisasContext, base);
25302 CPUMIPSState *env = cs->env_ptr;
25304 ctx->page_start = ctx->base.pc_first & TARGET_PAGE_MASK;
25305 ctx->saved_pc = -1;
25306 ctx->insn_flags = env->insn_flags;
25307 ctx->CP0_Config1 = env->CP0_Config1;
25308 ctx->CP0_Config3 = env->CP0_Config3;
25309 ctx->CP0_Config5 = env->CP0_Config5;
25311 ctx->kscrexist = (env->CP0_Config4 >> CP0C4_KScrExist) & 0xff;
25312 ctx->rxi = (env->CP0_Config3 >> CP0C3_RXI) & 1;
25313 ctx->ie = (env->CP0_Config4 >> CP0C4_IE) & 3;
25314 ctx->bi = (env->CP0_Config3 >> CP0C3_BI) & 1;
25315 ctx->bp = (env->CP0_Config3 >> CP0C3_BP) & 1;
25316 ctx->PAMask = env->PAMask;
25317 ctx->mvh = (env->CP0_Config5 >> CP0C5_MVH) & 1;
25318 ctx->eva = (env->CP0_Config5 >> CP0C5_EVA) & 1;
25319 ctx->sc = (env->CP0_Config3 >> CP0C3_SC) & 1;
25320 ctx->CP0_LLAddr_shift = env->CP0_LLAddr_shift;
25321 ctx->cmgcr = (env->CP0_Config3 >> CP0C3_CMGCR) & 1;
25322 /* Restore delay slot state from the tb context. */
25323 ctx->hflags = (uint32_t)ctx->base.tb->flags; /* FIXME: maybe use 64 bits? */
25324 ctx->ulri = (env->CP0_Config3 >> CP0C3_ULRI) & 1;
25325 ctx->ps = ((env->active_fpu.fcr0 >> FCR0_PS) & 1) ||
25326 (env->insn_flags & (INSN_LOONGSON2E | INSN_LOONGSON2F));
25327 ctx->vp = (env->CP0_Config5 >> CP0C5_VP) & 1;
25328 ctx->mrp = (env->CP0_Config5 >> CP0C5_MRP) & 1;
25329 ctx->nan2008 = (env->active_fpu.fcr31 >> FCR31_NAN2008) & 1;
25330 ctx->abs2008 = (env->active_fpu.fcr31 >> FCR31_ABS2008) & 1;
25331 restore_cpu_state(env, ctx);
25332 #ifdef CONFIG_USER_ONLY
25333 ctx->mem_idx = MIPS_HFLAG_UM;
25335 ctx->mem_idx = hflags_mmu_index(ctx->hflags);
25337 ctx->default_tcg_memop_mask = (ctx->insn_flags & ISA_MIPS32R6) ?
25338 MO_UNALN : MO_ALIGN;
25340 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx->base.tb, ctx->mem_idx,
25344 static void mips_tr_tb_start(DisasContextBase *dcbase, CPUState *cs)
25348 static void mips_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
25350 DisasContext *ctx = container_of(dcbase, DisasContext, base);
25352 tcg_gen_insn_start(ctx->base.pc_next, ctx->hflags & MIPS_HFLAG_BMASK,
25356 static bool mips_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
25357 const CPUBreakpoint *bp)
25359 DisasContext *ctx = container_of(dcbase, DisasContext, base);
25361 save_cpu_state(ctx, 1);
25362 ctx->base.is_jmp = DISAS_NORETURN;
25363 gen_helper_raise_exception_debug(cpu_env);
25364 /* The address covered by the breakpoint must be included in
25365 [tb->pc, tb->pc + tb->size) in order to for it to be
25366 properly cleared -- thus we increment the PC here so that
25367 the logic setting tb->size below does the right thing. */
25368 ctx->base.pc_next += 4;
25372 static void mips_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
25374 CPUMIPSState *env = cs->env_ptr;
25375 DisasContext *ctx = container_of(dcbase, DisasContext, base);
25379 is_slot = ctx->hflags & MIPS_HFLAG_BMASK;
25380 if (ctx->insn_flags & ISA_NANOMIPS32) {
25381 ctx->opcode = cpu_lduw_code(env, ctx->base.pc_next);
25382 insn_bytes = decode_nanomips_opc(env, ctx);
25383 } else if (!(ctx->hflags & MIPS_HFLAG_M16)) {
25384 ctx->opcode = cpu_ldl_code(env, ctx->base.pc_next);
25386 decode_opc(env, ctx);
25387 } else if (ctx->insn_flags & ASE_MICROMIPS) {
25388 ctx->opcode = cpu_lduw_code(env, ctx->base.pc_next);
25389 insn_bytes = decode_micromips_opc(env, ctx);
25390 } else if (ctx->insn_flags & ASE_MIPS16) {
25391 ctx->opcode = cpu_lduw_code(env, ctx->base.pc_next);
25392 insn_bytes = decode_mips16_opc(env, ctx);
25394 generate_exception_end(ctx, EXCP_RI);
25395 g_assert(ctx->base.is_jmp == DISAS_NORETURN);
25399 if (ctx->hflags & MIPS_HFLAG_BMASK) {
25400 if (!(ctx->hflags & (MIPS_HFLAG_BDS16 | MIPS_HFLAG_BDS32 |
25401 MIPS_HFLAG_FBNSLOT))) {
25402 /* force to generate branch as there is neither delay nor
25406 if ((ctx->hflags & MIPS_HFLAG_M16) &&
25407 (ctx->hflags & MIPS_HFLAG_FBNSLOT)) {
25408 /* Force to generate branch as microMIPS R6 doesn't restrict
25409 branches in the forbidden slot. */
25414 gen_branch(ctx, insn_bytes);
25416 ctx->base.pc_next += insn_bytes;
25418 if (ctx->base.is_jmp != DISAS_NEXT) {
25421 /* Execute a branch and its delay slot as a single instruction.
25422 This is what GDB expects and is consistent with what the
25423 hardware does (e.g. if a delay slot instruction faults, the
25424 reported PC is the PC of the branch). */
25425 if (ctx->base.singlestep_enabled &&
25426 (ctx->hflags & MIPS_HFLAG_BMASK) == 0) {
25427 ctx->base.is_jmp = DISAS_TOO_MANY;
25429 if (ctx->base.pc_next - ctx->page_start >= TARGET_PAGE_SIZE) {
25430 ctx->base.is_jmp = DISAS_TOO_MANY;
25434 static void mips_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
25436 DisasContext *ctx = container_of(dcbase, DisasContext, base);
25438 if (ctx->base.singlestep_enabled && ctx->base.is_jmp != DISAS_NORETURN) {
25439 save_cpu_state(ctx, ctx->base.is_jmp != DISAS_EXIT);
25440 gen_helper_raise_exception_debug(cpu_env);
25442 switch (ctx->base.is_jmp) {
25444 gen_save_pc(ctx->base.pc_next);
25445 tcg_gen_lookup_and_goto_ptr();
25448 case DISAS_TOO_MANY:
25449 save_cpu_state(ctx, 0);
25450 gen_goto_tb(ctx, 0, ctx->base.pc_next);
25453 tcg_gen_exit_tb(NULL, 0);
25455 case DISAS_NORETURN:
25458 g_assert_not_reached();
25463 static void mips_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
25465 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
25466 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
25469 static const TranslatorOps mips_tr_ops = {
25470 .init_disas_context = mips_tr_init_disas_context,
25471 .tb_start = mips_tr_tb_start,
25472 .insn_start = mips_tr_insn_start,
25473 .breakpoint_check = mips_tr_breakpoint_check,
25474 .translate_insn = mips_tr_translate_insn,
25475 .tb_stop = mips_tr_tb_stop,
25476 .disas_log = mips_tr_disas_log,
25479 void gen_intermediate_code(CPUState *cs, struct TranslationBlock *tb)
25483 translator_loop(&mips_tr_ops, &ctx.base, cs, tb);
25486 static void fpu_dump_state(CPUMIPSState *env, FILE *f, fprintf_function fpu_fprintf,
25490 int is_fpu64 = !!(env->hflags & MIPS_HFLAG_F64);
25492 #define printfpr(fp) \
25495 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
25496 " fd:%13g fs:%13g psu: %13g\n", \
25497 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
25498 (double)(fp)->fd, \
25499 (double)(fp)->fs[FP_ENDIAN_IDX], \
25500 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
25503 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
25504 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
25505 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
25506 " fd:%13g fs:%13g psu:%13g\n", \
25507 tmp.w[FP_ENDIAN_IDX], tmp.d, \
25509 (double)tmp.fs[FP_ENDIAN_IDX], \
25510 (double)tmp.fs[!FP_ENDIAN_IDX]); \
25515 fpu_fprintf(f, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
25516 env->active_fpu.fcr0, env->active_fpu.fcr31, is_fpu64,
25517 get_float_exception_flags(&env->active_fpu.fp_status));
25518 for (i = 0; i < 32; (is_fpu64) ? i++ : (i += 2)) {
25519 fpu_fprintf(f, "%3s: ", fregnames[i]);
25520 printfpr(&env->active_fpu.fpr[i]);
25526 void mips_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
25529 MIPSCPU *cpu = MIPS_CPU(cs);
25530 CPUMIPSState *env = &cpu->env;
25533 cpu_fprintf(f, "pc=0x" TARGET_FMT_lx " HI=0x" TARGET_FMT_lx
25534 " LO=0x" TARGET_FMT_lx " ds %04x "
25535 TARGET_FMT_lx " " TARGET_FMT_ld "\n",
25536 env->active_tc.PC, env->active_tc.HI[0], env->active_tc.LO[0],
25537 env->hflags, env->btarget, env->bcond);
25538 for (i = 0; i < 32; i++) {
25540 cpu_fprintf(f, "GPR%02d:", i);
25541 cpu_fprintf(f, " %s " TARGET_FMT_lx, regnames[i], env->active_tc.gpr[i]);
25543 cpu_fprintf(f, "\n");
25546 cpu_fprintf(f, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx "\n",
25547 env->CP0_Status, env->CP0_Cause, env->CP0_EPC);
25548 cpu_fprintf(f, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
25550 env->CP0_Config0, env->CP0_Config1, env->lladdr);
25551 cpu_fprintf(f, " Config2 0x%08x Config3 0x%08x\n",
25552 env->CP0_Config2, env->CP0_Config3);
25553 cpu_fprintf(f, " Config4 0x%08x Config5 0x%08x\n",
25554 env->CP0_Config4, env->CP0_Config5);
25555 if ((flags & CPU_DUMP_FPU) && (env->hflags & MIPS_HFLAG_FPU)) {
25556 fpu_dump_state(env, f, cpu_fprintf, flags);
25560 void mips_tcg_init(void)
25565 for (i = 1; i < 32; i++)
25566 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
25567 offsetof(CPUMIPSState, active_tc.gpr[i]),
25570 for (i = 0; i < 32; i++) {
25571 int off = offsetof(CPUMIPSState, active_fpu.fpr[i].wr.d[0]);
25573 tcg_global_mem_new_i64(cpu_env, off, msaregnames[i * 2]);
25574 /* The scalar floating-point unit (FPU) registers are mapped on
25575 * the MSA vector registers. */
25576 fpu_f64[i] = msa_wr_d[i * 2];
25577 off = offsetof(CPUMIPSState, active_fpu.fpr[i].wr.d[1]);
25578 msa_wr_d[i * 2 + 1] =
25579 tcg_global_mem_new_i64(cpu_env, off, msaregnames[i * 2 + 1]);
25582 cpu_PC = tcg_global_mem_new(cpu_env,
25583 offsetof(CPUMIPSState, active_tc.PC), "PC");
25584 for (i = 0; i < MIPS_DSP_ACC; i++) {
25585 cpu_HI[i] = tcg_global_mem_new(cpu_env,
25586 offsetof(CPUMIPSState, active_tc.HI[i]),
25588 cpu_LO[i] = tcg_global_mem_new(cpu_env,
25589 offsetof(CPUMIPSState, active_tc.LO[i]),
25592 cpu_dspctrl = tcg_global_mem_new(cpu_env,
25593 offsetof(CPUMIPSState, active_tc.DSPControl),
25595 bcond = tcg_global_mem_new(cpu_env,
25596 offsetof(CPUMIPSState, bcond), "bcond");
25597 btarget = tcg_global_mem_new(cpu_env,
25598 offsetof(CPUMIPSState, btarget), "btarget");
25599 hflags = tcg_global_mem_new_i32(cpu_env,
25600 offsetof(CPUMIPSState, hflags), "hflags");
25602 fpu_fcr0 = tcg_global_mem_new_i32(cpu_env,
25603 offsetof(CPUMIPSState, active_fpu.fcr0),
25605 fpu_fcr31 = tcg_global_mem_new_i32(cpu_env,
25606 offsetof(CPUMIPSState, active_fpu.fcr31),
25610 #include "translate_init.inc.c"
25612 void cpu_mips_realize_env(CPUMIPSState *env)
25614 env->exception_base = (int32_t)0xBFC00000;
25616 #ifndef CONFIG_USER_ONLY
25617 mmu_init(env, env->cpu_model);
25619 fpu_init(env, env->cpu_model);
25620 mvp_init(env, env->cpu_model);
25623 bool cpu_supports_cps_smp(const char *cpu_type)
25625 const MIPSCPUClass *mcc = MIPS_CPU_CLASS(object_class_by_name(cpu_type));
25626 return (mcc->cpu_def->CP0_Config3 & (1 << CP0C3_CMGCR)) != 0;
25629 bool cpu_supports_isa(const char *cpu_type, unsigned int isa)
25631 const MIPSCPUClass *mcc = MIPS_CPU_CLASS(object_class_by_name(cpu_type));
25632 return (mcc->cpu_def->insn_flags & isa) != 0;
25635 void cpu_set_exception_base(int vp_index, target_ulong address)
25637 MIPSCPU *vp = MIPS_CPU(qemu_get_cpu(vp_index));
25638 vp->env.exception_base = address;
25641 void cpu_state_reset(CPUMIPSState *env)
25643 MIPSCPU *cpu = mips_env_get_cpu(env);
25644 CPUState *cs = CPU(cpu);
25646 /* Reset registers to their default values */
25647 env->CP0_PRid = env->cpu_model->CP0_PRid;
25648 env->CP0_Config0 = env->cpu_model->CP0_Config0;
25649 #ifdef TARGET_WORDS_BIGENDIAN
25650 env->CP0_Config0 |= (1 << CP0C0_BE);
25652 env->CP0_Config1 = env->cpu_model->CP0_Config1;
25653 env->CP0_Config2 = env->cpu_model->CP0_Config2;
25654 env->CP0_Config3 = env->cpu_model->CP0_Config3;
25655 env->CP0_Config4 = env->cpu_model->CP0_Config4;
25656 env->CP0_Config4_rw_bitmask = env->cpu_model->CP0_Config4_rw_bitmask;
25657 env->CP0_Config5 = env->cpu_model->CP0_Config5;
25658 env->CP0_Config5_rw_bitmask = env->cpu_model->CP0_Config5_rw_bitmask;
25659 env->CP0_Config6 = env->cpu_model->CP0_Config6;
25660 env->CP0_Config7 = env->cpu_model->CP0_Config7;
25661 env->CP0_LLAddr_rw_bitmask = env->cpu_model->CP0_LLAddr_rw_bitmask
25662 << env->cpu_model->CP0_LLAddr_shift;
25663 env->CP0_LLAddr_shift = env->cpu_model->CP0_LLAddr_shift;
25664 env->SYNCI_Step = env->cpu_model->SYNCI_Step;
25665 env->CCRes = env->cpu_model->CCRes;
25666 env->CP0_Status_rw_bitmask = env->cpu_model->CP0_Status_rw_bitmask;
25667 env->CP0_TCStatus_rw_bitmask = env->cpu_model->CP0_TCStatus_rw_bitmask;
25668 env->CP0_SRSCtl = env->cpu_model->CP0_SRSCtl;
25669 env->current_tc = 0;
25670 env->SEGBITS = env->cpu_model->SEGBITS;
25671 env->SEGMask = (target_ulong)((1ULL << env->cpu_model->SEGBITS) - 1);
25672 #if defined(TARGET_MIPS64)
25673 if (env->cpu_model->insn_flags & ISA_MIPS3) {
25674 env->SEGMask |= 3ULL << 62;
25677 env->PABITS = env->cpu_model->PABITS;
25678 env->CP0_SRSConf0_rw_bitmask = env->cpu_model->CP0_SRSConf0_rw_bitmask;
25679 env->CP0_SRSConf0 = env->cpu_model->CP0_SRSConf0;
25680 env->CP0_SRSConf1_rw_bitmask = env->cpu_model->CP0_SRSConf1_rw_bitmask;
25681 env->CP0_SRSConf1 = env->cpu_model->CP0_SRSConf1;
25682 env->CP0_SRSConf2_rw_bitmask = env->cpu_model->CP0_SRSConf2_rw_bitmask;
25683 env->CP0_SRSConf2 = env->cpu_model->CP0_SRSConf2;
25684 env->CP0_SRSConf3_rw_bitmask = env->cpu_model->CP0_SRSConf3_rw_bitmask;
25685 env->CP0_SRSConf3 = env->cpu_model->CP0_SRSConf3;
25686 env->CP0_SRSConf4_rw_bitmask = env->cpu_model->CP0_SRSConf4_rw_bitmask;
25687 env->CP0_SRSConf4 = env->cpu_model->CP0_SRSConf4;
25688 env->CP0_PageGrain_rw_bitmask = env->cpu_model->CP0_PageGrain_rw_bitmask;
25689 env->CP0_PageGrain = env->cpu_model->CP0_PageGrain;
25690 env->CP0_EBaseWG_rw_bitmask = env->cpu_model->CP0_EBaseWG_rw_bitmask;
25691 env->active_fpu.fcr0 = env->cpu_model->CP1_fcr0;
25692 env->active_fpu.fcr31_rw_bitmask = env->cpu_model->CP1_fcr31_rw_bitmask;
25693 env->active_fpu.fcr31 = env->cpu_model->CP1_fcr31;
25694 env->msair = env->cpu_model->MSAIR;
25695 env->insn_flags = env->cpu_model->insn_flags;
25697 #if defined(CONFIG_USER_ONLY)
25698 env->CP0_Status = (MIPS_HFLAG_UM << CP0St_KSU);
25699 # ifdef TARGET_MIPS64
25700 /* Enable 64-bit register mode. */
25701 env->CP0_Status |= (1 << CP0St_PX);
25703 # ifdef TARGET_ABI_MIPSN64
25704 /* Enable 64-bit address mode. */
25705 env->CP0_Status |= (1 << CP0St_UX);
25707 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
25708 hardware registers. */
25709 env->CP0_HWREna |= 0x0000000F;
25710 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
25711 env->CP0_Status |= (1 << CP0St_CU1);
25713 if (env->CP0_Config3 & (1 << CP0C3_DSPP)) {
25714 env->CP0_Status |= (1 << CP0St_MX);
25716 # if defined(TARGET_MIPS64)
25717 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
25718 if ((env->CP0_Config1 & (1 << CP0C1_FP)) &&
25719 (env->CP0_Status_rw_bitmask & (1 << CP0St_FR))) {
25720 env->CP0_Status |= (1 << CP0St_FR);
25724 if (env->hflags & MIPS_HFLAG_BMASK) {
25725 /* If the exception was raised from a delay slot,
25726 come back to the jump. */
25727 env->CP0_ErrorEPC = (env->active_tc.PC
25728 - (env->hflags & MIPS_HFLAG_B16 ? 2 : 4));
25730 env->CP0_ErrorEPC = env->active_tc.PC;
25732 env->active_tc.PC = env->exception_base;
25733 env->CP0_Random = env->tlb->nb_tlb - 1;
25734 env->tlb->tlb_in_use = env->tlb->nb_tlb;
25735 env->CP0_Wired = 0;
25736 env->CP0_GlobalNumber = (cs->cpu_index & 0xFF) << CP0GN_VPId;
25737 env->CP0_EBase = (cs->cpu_index & 0x3FF);
25738 if (mips_um_ksegs_enabled()) {
25739 env->CP0_EBase |= 0x40000000;
25741 env->CP0_EBase |= (int32_t)0x80000000;
25743 if (env->CP0_Config3 & (1 << CP0C3_CMGCR)) {
25744 env->CP0_CMGCRBase = 0x1fbf8000 >> 4;
25746 env->CP0_EntryHi_ASID_mask = (env->CP0_Config4 & (1 << CP0C4_AE)) ?
25748 env->CP0_Status = (1 << CP0St_BEV) | (1 << CP0St_ERL);
25749 /* vectored interrupts not implemented, timer on int 7,
25750 no performance counters. */
25751 env->CP0_IntCtl = 0xe0000000;
25755 for (i = 0; i < 7; i++) {
25756 env->CP0_WatchLo[i] = 0;
25757 env->CP0_WatchHi[i] = 0x80000000;
25759 env->CP0_WatchLo[7] = 0;
25760 env->CP0_WatchHi[7] = 0;
25762 /* Count register increments in debug mode, EJTAG version 1 */
25763 env->CP0_Debug = (1 << CP0DB_CNT) | (0x1 << CP0DB_VER);
25765 cpu_mips_store_count(env, 1);
25767 if (env->CP0_Config3 & (1 << CP0C3_MT)) {
25770 /* Only TC0 on VPE 0 starts as active. */
25771 for (i = 0; i < ARRAY_SIZE(env->tcs); i++) {
25772 env->tcs[i].CP0_TCBind = cs->cpu_index << CP0TCBd_CurVPE;
25773 env->tcs[i].CP0_TCHalt = 1;
25775 env->active_tc.CP0_TCHalt = 1;
25778 if (cs->cpu_index == 0) {
25779 /* VPE0 starts up enabled. */
25780 env->mvp->CP0_MVPControl |= (1 << CP0MVPCo_EVP);
25781 env->CP0_VPEConf0 |= (1 << CP0VPEC0_MVP) | (1 << CP0VPEC0_VPA);
25783 /* TC0 starts up unhalted. */
25785 env->active_tc.CP0_TCHalt = 0;
25786 env->tcs[0].CP0_TCHalt = 0;
25787 /* With thread 0 active. */
25788 env->active_tc.CP0_TCStatus = (1 << CP0TCSt_A);
25789 env->tcs[0].CP0_TCStatus = (1 << CP0TCSt_A);
25794 * Configure default legacy segmentation control. We use this regardless of
25795 * whether segmentation control is presented to the guest.
25797 /* KSeg3 (seg0 0xE0000000..0xFFFFFFFF) */
25798 env->CP0_SegCtl0 = (CP0SC_AM_MK << CP0SC_AM);
25799 /* KSeg2 (seg1 0xC0000000..0xDFFFFFFF) */
25800 env->CP0_SegCtl0 |= ((CP0SC_AM_MSK << CP0SC_AM)) << 16;
25801 /* KSeg1 (seg2 0xA0000000..0x9FFFFFFF) */
25802 env->CP0_SegCtl1 = (0 << CP0SC_PA) | (CP0SC_AM_UK << CP0SC_AM) |
25804 /* KSeg0 (seg3 0x80000000..0x9FFFFFFF) */
25805 env->CP0_SegCtl1 |= ((0 << CP0SC_PA) | (CP0SC_AM_UK << CP0SC_AM) |
25806 (3 << CP0SC_C)) << 16;
25807 /* USeg (seg4 0x40000000..0x7FFFFFFF) */
25808 env->CP0_SegCtl2 = (2 << CP0SC_PA) | (CP0SC_AM_MUSK << CP0SC_AM) |
25809 (1 << CP0SC_EU) | (2 << CP0SC_C);
25810 /* USeg (seg5 0x00000000..0x3FFFFFFF) */
25811 env->CP0_SegCtl2 |= ((0 << CP0SC_PA) | (CP0SC_AM_MUSK << CP0SC_AM) |
25812 (1 << CP0SC_EU) | (2 << CP0SC_C)) << 16;
25813 /* XKPhys (note, SegCtl2.XR = 0, so XAM won't be used) */
25814 env->CP0_SegCtl1 |= (CP0SC_AM_UK << CP0SC1_XAM);
25816 if ((env->insn_flags & ISA_MIPS32R6) &&
25817 (env->active_fpu.fcr0 & (1 << FCR0_F64))) {
25818 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
25819 env->CP0_Status |= (1 << CP0St_FR);
25822 if (env->CP0_Config3 & (1 << CP0C3_ISA) & (1 << (CP0C3_ISA + 1))) {
25823 /* microMIPS on reset when Config3.ISA is 3 */
25824 env->hflags |= MIPS_HFLAG_M16;
25828 if (env->CP0_Config3 & (1 << CP0C3_MSAP)) {
25832 compute_hflags(env);
25833 restore_fp_status(env);
25834 restore_pamask(env);
25835 cs->exception_index = EXCP_NONE;
25837 if (semihosting_get_argc()) {
25838 /* UHI interface can be used to obtain argc and argv */
25839 env->active_tc.gpr[4] = -1;
25843 void restore_state_to_opc(CPUMIPSState *env, TranslationBlock *tb,
25844 target_ulong *data)
25846 env->active_tc.PC = data[0];
25847 env->hflags &= ~MIPS_HFLAG_BMASK;
25848 env->hflags |= data[1];
25849 switch (env->hflags & MIPS_HFLAG_BMASK_BASE) {
25850 case MIPS_HFLAG_BR:
25852 case MIPS_HFLAG_BC:
25853 case MIPS_HFLAG_BL:
25855 env->btarget = data[2];