2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
33 #include "qemu-common.h"
39 //#define MIPS_DEBUG_DISAS
40 //#define MIPS_DEBUG_SIGN_EXTENSIONS
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL = (0x00 << 26),
48 OPC_REGIMM = (0x01 << 26),
49 OPC_CP0 = (0x10 << 26),
50 OPC_CP1 = (0x11 << 26),
51 OPC_CP2 = (0x12 << 26),
52 OPC_CP3 = (0x13 << 26),
53 OPC_SPECIAL2 = (0x1C << 26),
54 OPC_SPECIAL3 = (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI = (0x08 << 26),
57 OPC_ADDIU = (0x09 << 26),
58 OPC_SLTI = (0x0A << 26),
59 OPC_SLTIU = (0x0B << 26),
60 OPC_ANDI = (0x0C << 26),
61 OPC_ORI = (0x0D << 26),
62 OPC_XORI = (0x0E << 26),
63 OPC_LUI = (0x0F << 26),
64 OPC_DADDI = (0x18 << 26),
65 OPC_DADDIU = (0x19 << 26),
66 /* Jump and branches */
68 OPC_JAL = (0x03 << 26),
69 OPC_BEQ = (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
70 OPC_BEQL = (0x14 << 26),
71 OPC_BNE = (0x05 << 26),
72 OPC_BNEL = (0x15 << 26),
73 OPC_BLEZ = (0x06 << 26),
74 OPC_BLEZL = (0x16 << 26),
75 OPC_BGTZ = (0x07 << 26),
76 OPC_BGTZL = (0x17 << 26),
77 OPC_JALX = (0x1D << 26), /* MIPS 16 only */
79 OPC_LDL = (0x1A << 26),
80 OPC_LDR = (0x1B << 26),
81 OPC_LB = (0x20 << 26),
82 OPC_LH = (0x21 << 26),
83 OPC_LWL = (0x22 << 26),
84 OPC_LW = (0x23 << 26),
85 OPC_LBU = (0x24 << 26),
86 OPC_LHU = (0x25 << 26),
87 OPC_LWR = (0x26 << 26),
88 OPC_LWU = (0x27 << 26),
89 OPC_SB = (0x28 << 26),
90 OPC_SH = (0x29 << 26),
91 OPC_SWL = (0x2A << 26),
92 OPC_SW = (0x2B << 26),
93 OPC_SDL = (0x2C << 26),
94 OPC_SDR = (0x2D << 26),
95 OPC_SWR = (0x2E << 26),
96 OPC_LL = (0x30 << 26),
97 OPC_LLD = (0x34 << 26),
98 OPC_LD = (0x37 << 26),
99 OPC_SC = (0x38 << 26),
100 OPC_SCD = (0x3C << 26),
101 OPC_SD = (0x3F << 26),
102 /* Floating point load/store */
103 OPC_LWC1 = (0x31 << 26),
104 OPC_LWC2 = (0x32 << 26),
105 OPC_LDC1 = (0x35 << 26),
106 OPC_LDC2 = (0x36 << 26),
107 OPC_SWC1 = (0x39 << 26),
108 OPC_SWC2 = (0x3A << 26),
109 OPC_SDC1 = (0x3D << 26),
110 OPC_SDC2 = (0x3E << 26),
111 /* MDMX ASE specific */
112 OPC_MDMX = (0x1E << 26),
113 /* Cache and prefetch */
114 OPC_CACHE = (0x2F << 26),
115 OPC_PREF = (0x33 << 26),
116 /* Reserved major opcode */
117 OPC_MAJOR3B_RESERVED = (0x3B << 26),
120 /* MIPS special opcodes */
121 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
125 OPC_SLL = 0x00 | OPC_SPECIAL,
126 /* NOP is SLL r0, r0, 0 */
127 /* SSNOP is SLL r0, r0, 1 */
128 /* EHB is SLL r0, r0, 3 */
129 OPC_SRL = 0x02 | OPC_SPECIAL, /* also ROTR */
130 OPC_SRA = 0x03 | OPC_SPECIAL,
131 OPC_SLLV = 0x04 | OPC_SPECIAL,
132 OPC_SRLV = 0x06 | OPC_SPECIAL, /* also ROTRV */
133 OPC_SRAV = 0x07 | OPC_SPECIAL,
134 OPC_DSLLV = 0x14 | OPC_SPECIAL,
135 OPC_DSRLV = 0x16 | OPC_SPECIAL, /* also DROTRV */
136 OPC_DSRAV = 0x17 | OPC_SPECIAL,
137 OPC_DSLL = 0x38 | OPC_SPECIAL,
138 OPC_DSRL = 0x3A | OPC_SPECIAL, /* also DROTR */
139 OPC_DSRA = 0x3B | OPC_SPECIAL,
140 OPC_DSLL32 = 0x3C | OPC_SPECIAL,
141 OPC_DSRL32 = 0x3E | OPC_SPECIAL, /* also DROTR32 */
142 OPC_DSRA32 = 0x3F | OPC_SPECIAL,
143 /* Multiplication / division */
144 OPC_MULT = 0x18 | OPC_SPECIAL,
145 OPC_MULTU = 0x19 | OPC_SPECIAL,
146 OPC_DIV = 0x1A | OPC_SPECIAL,
147 OPC_DIVU = 0x1B | OPC_SPECIAL,
148 OPC_DMULT = 0x1C | OPC_SPECIAL,
149 OPC_DMULTU = 0x1D | OPC_SPECIAL,
150 OPC_DDIV = 0x1E | OPC_SPECIAL,
151 OPC_DDIVU = 0x1F | OPC_SPECIAL,
152 /* 2 registers arithmetic / logic */
153 OPC_ADD = 0x20 | OPC_SPECIAL,
154 OPC_ADDU = 0x21 | OPC_SPECIAL,
155 OPC_SUB = 0x22 | OPC_SPECIAL,
156 OPC_SUBU = 0x23 | OPC_SPECIAL,
157 OPC_AND = 0x24 | OPC_SPECIAL,
158 OPC_OR = 0x25 | OPC_SPECIAL,
159 OPC_XOR = 0x26 | OPC_SPECIAL,
160 OPC_NOR = 0x27 | OPC_SPECIAL,
161 OPC_SLT = 0x2A | OPC_SPECIAL,
162 OPC_SLTU = 0x2B | OPC_SPECIAL,
163 OPC_DADD = 0x2C | OPC_SPECIAL,
164 OPC_DADDU = 0x2D | OPC_SPECIAL,
165 OPC_DSUB = 0x2E | OPC_SPECIAL,
166 OPC_DSUBU = 0x2F | OPC_SPECIAL,
168 OPC_JR = 0x08 | OPC_SPECIAL, /* Also JR.HB */
169 OPC_JALR = 0x09 | OPC_SPECIAL, /* Also JALR.HB */
171 OPC_TGE = 0x30 | OPC_SPECIAL,
172 OPC_TGEU = 0x31 | OPC_SPECIAL,
173 OPC_TLT = 0x32 | OPC_SPECIAL,
174 OPC_TLTU = 0x33 | OPC_SPECIAL,
175 OPC_TEQ = 0x34 | OPC_SPECIAL,
176 OPC_TNE = 0x36 | OPC_SPECIAL,
177 /* HI / LO registers load & stores */
178 OPC_MFHI = 0x10 | OPC_SPECIAL,
179 OPC_MTHI = 0x11 | OPC_SPECIAL,
180 OPC_MFLO = 0x12 | OPC_SPECIAL,
181 OPC_MTLO = 0x13 | OPC_SPECIAL,
182 /* Conditional moves */
183 OPC_MOVZ = 0x0A | OPC_SPECIAL,
184 OPC_MOVN = 0x0B | OPC_SPECIAL,
186 OPC_MOVCI = 0x01 | OPC_SPECIAL,
189 OPC_PMON = 0x05 | OPC_SPECIAL, /* inofficial */
190 OPC_SYSCALL = 0x0C | OPC_SPECIAL,
191 OPC_BREAK = 0x0D | OPC_SPECIAL,
192 OPC_SPIM = 0x0E | OPC_SPECIAL, /* inofficial */
193 OPC_SYNC = 0x0F | OPC_SPECIAL,
195 OPC_SPECIAL15_RESERVED = 0x15 | OPC_SPECIAL,
196 OPC_SPECIAL28_RESERVED = 0x28 | OPC_SPECIAL,
197 OPC_SPECIAL29_RESERVED = 0x29 | OPC_SPECIAL,
198 OPC_SPECIAL35_RESERVED = 0x35 | OPC_SPECIAL,
199 OPC_SPECIAL37_RESERVED = 0x37 | OPC_SPECIAL,
200 OPC_SPECIAL39_RESERVED = 0x39 | OPC_SPECIAL,
201 OPC_SPECIAL3D_RESERVED = 0x3D | OPC_SPECIAL,
204 /* Multiplication variants of the vr54xx. */
205 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
208 OPC_VR54XX_MULS = (0x03 << 6) | OPC_MULT,
209 OPC_VR54XX_MULSU = (0x03 << 6) | OPC_MULTU,
210 OPC_VR54XX_MACC = (0x05 << 6) | OPC_MULT,
211 OPC_VR54XX_MACCU = (0x05 << 6) | OPC_MULTU,
212 OPC_VR54XX_MSAC = (0x07 << 6) | OPC_MULT,
213 OPC_VR54XX_MSACU = (0x07 << 6) | OPC_MULTU,
214 OPC_VR54XX_MULHI = (0x09 << 6) | OPC_MULT,
215 OPC_VR54XX_MULHIU = (0x09 << 6) | OPC_MULTU,
216 OPC_VR54XX_MULSHI = (0x0B << 6) | OPC_MULT,
217 OPC_VR54XX_MULSHIU = (0x0B << 6) | OPC_MULTU,
218 OPC_VR54XX_MACCHI = (0x0D << 6) | OPC_MULT,
219 OPC_VR54XX_MACCHIU = (0x0D << 6) | OPC_MULTU,
220 OPC_VR54XX_MSACHI = (0x0F << 6) | OPC_MULT,
221 OPC_VR54XX_MSACHIU = (0x0F << 6) | OPC_MULTU,
224 /* REGIMM (rt field) opcodes */
225 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
228 OPC_BLTZ = (0x00 << 16) | OPC_REGIMM,
229 OPC_BLTZL = (0x02 << 16) | OPC_REGIMM,
230 OPC_BGEZ = (0x01 << 16) | OPC_REGIMM,
231 OPC_BGEZL = (0x03 << 16) | OPC_REGIMM,
232 OPC_BLTZAL = (0x10 << 16) | OPC_REGIMM,
233 OPC_BLTZALL = (0x12 << 16) | OPC_REGIMM,
234 OPC_BGEZAL = (0x11 << 16) | OPC_REGIMM,
235 OPC_BGEZALL = (0x13 << 16) | OPC_REGIMM,
236 OPC_TGEI = (0x08 << 16) | OPC_REGIMM,
237 OPC_TGEIU = (0x09 << 16) | OPC_REGIMM,
238 OPC_TLTI = (0x0A << 16) | OPC_REGIMM,
239 OPC_TLTIU = (0x0B << 16) | OPC_REGIMM,
240 OPC_TEQI = (0x0C << 16) | OPC_REGIMM,
241 OPC_TNEI = (0x0E << 16) | OPC_REGIMM,
242 OPC_SYNCI = (0x1F << 16) | OPC_REGIMM,
245 /* Special2 opcodes */
246 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
249 /* Multiply & xxx operations */
250 OPC_MADD = 0x00 | OPC_SPECIAL2,
251 OPC_MADDU = 0x01 | OPC_SPECIAL2,
252 OPC_MUL = 0x02 | OPC_SPECIAL2,
253 OPC_MSUB = 0x04 | OPC_SPECIAL2,
254 OPC_MSUBU = 0x05 | OPC_SPECIAL2,
256 OPC_CLZ = 0x20 | OPC_SPECIAL2,
257 OPC_CLO = 0x21 | OPC_SPECIAL2,
258 OPC_DCLZ = 0x24 | OPC_SPECIAL2,
259 OPC_DCLO = 0x25 | OPC_SPECIAL2,
261 OPC_SDBBP = 0x3F | OPC_SPECIAL2,
264 /* Special3 opcodes */
265 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
268 OPC_EXT = 0x00 | OPC_SPECIAL3,
269 OPC_DEXTM = 0x01 | OPC_SPECIAL3,
270 OPC_DEXTU = 0x02 | OPC_SPECIAL3,
271 OPC_DEXT = 0x03 | OPC_SPECIAL3,
272 OPC_INS = 0x04 | OPC_SPECIAL3,
273 OPC_DINSM = 0x05 | OPC_SPECIAL3,
274 OPC_DINSU = 0x06 | OPC_SPECIAL3,
275 OPC_DINS = 0x07 | OPC_SPECIAL3,
276 OPC_FORK = 0x08 | OPC_SPECIAL3,
277 OPC_YIELD = 0x09 | OPC_SPECIAL3,
278 OPC_BSHFL = 0x20 | OPC_SPECIAL3,
279 OPC_DBSHFL = 0x24 | OPC_SPECIAL3,
280 OPC_RDHWR = 0x3B | OPC_SPECIAL3,
284 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
287 OPC_WSBH = (0x02 << 6) | OPC_BSHFL,
288 OPC_SEB = (0x10 << 6) | OPC_BSHFL,
289 OPC_SEH = (0x18 << 6) | OPC_BSHFL,
293 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
296 OPC_DSBH = (0x02 << 6) | OPC_DBSHFL,
297 OPC_DSHD = (0x05 << 6) | OPC_DBSHFL,
300 /* Coprocessor 0 (rs field) */
301 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
304 OPC_MFC0 = (0x00 << 21) | OPC_CP0,
305 OPC_DMFC0 = (0x01 << 21) | OPC_CP0,
306 OPC_MTC0 = (0x04 << 21) | OPC_CP0,
307 OPC_DMTC0 = (0x05 << 21) | OPC_CP0,
308 OPC_MFTR = (0x08 << 21) | OPC_CP0,
309 OPC_RDPGPR = (0x0A << 21) | OPC_CP0,
310 OPC_MFMC0 = (0x0B << 21) | OPC_CP0,
311 OPC_MTTR = (0x0C << 21) | OPC_CP0,
312 OPC_WRPGPR = (0x0E << 21) | OPC_CP0,
313 OPC_C0 = (0x10 << 21) | OPC_CP0,
314 OPC_C0_FIRST = (0x10 << 21) | OPC_CP0,
315 OPC_C0_LAST = (0x1F << 21) | OPC_CP0,
319 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
322 OPC_DMT = 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
323 OPC_EMT = 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
324 OPC_DVPE = 0x01 | (0 << 5) | OPC_MFMC0,
325 OPC_EVPE = 0x01 | (1 << 5) | OPC_MFMC0,
326 OPC_DI = (0 << 5) | (0x0C << 11) | OPC_MFMC0,
327 OPC_EI = (1 << 5) | (0x0C << 11) | OPC_MFMC0,
330 /* Coprocessor 0 (with rs == C0) */
331 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
334 OPC_TLBR = 0x01 | OPC_C0,
335 OPC_TLBWI = 0x02 | OPC_C0,
336 OPC_TLBWR = 0x06 | OPC_C0,
337 OPC_TLBP = 0x08 | OPC_C0,
338 OPC_RFE = 0x10 | OPC_C0,
339 OPC_ERET = 0x18 | OPC_C0,
340 OPC_DERET = 0x1F | OPC_C0,
341 OPC_WAIT = 0x20 | OPC_C0,
344 /* Coprocessor 1 (rs field) */
345 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
348 OPC_MFC1 = (0x00 << 21) | OPC_CP1,
349 OPC_DMFC1 = (0x01 << 21) | OPC_CP1,
350 OPC_CFC1 = (0x02 << 21) | OPC_CP1,
351 OPC_MFHC1 = (0x03 << 21) | OPC_CP1,
352 OPC_MTC1 = (0x04 << 21) | OPC_CP1,
353 OPC_DMTC1 = (0x05 << 21) | OPC_CP1,
354 OPC_CTC1 = (0x06 << 21) | OPC_CP1,
355 OPC_MTHC1 = (0x07 << 21) | OPC_CP1,
356 OPC_BC1 = (0x08 << 21) | OPC_CP1, /* bc */
357 OPC_BC1ANY2 = (0x09 << 21) | OPC_CP1,
358 OPC_BC1ANY4 = (0x0A << 21) | OPC_CP1,
359 OPC_S_FMT = (0x10 << 21) | OPC_CP1, /* 16: fmt=single fp */
360 OPC_D_FMT = (0x11 << 21) | OPC_CP1, /* 17: fmt=double fp */
361 OPC_E_FMT = (0x12 << 21) | OPC_CP1, /* 18: fmt=extended fp */
362 OPC_Q_FMT = (0x13 << 21) | OPC_CP1, /* 19: fmt=quad fp */
363 OPC_W_FMT = (0x14 << 21) | OPC_CP1, /* 20: fmt=32bit fixed */
364 OPC_L_FMT = (0x15 << 21) | OPC_CP1, /* 21: fmt=64bit fixed */
365 OPC_PS_FMT = (0x16 << 21) | OPC_CP1, /* 22: fmt=paired single fp */
368 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
369 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
372 OPC_BC1F = (0x00 << 16) | OPC_BC1,
373 OPC_BC1T = (0x01 << 16) | OPC_BC1,
374 OPC_BC1FL = (0x02 << 16) | OPC_BC1,
375 OPC_BC1TL = (0x03 << 16) | OPC_BC1,
379 OPC_BC1FANY2 = (0x00 << 16) | OPC_BC1ANY2,
380 OPC_BC1TANY2 = (0x01 << 16) | OPC_BC1ANY2,
384 OPC_BC1FANY4 = (0x00 << 16) | OPC_BC1ANY4,
385 OPC_BC1TANY4 = (0x01 << 16) | OPC_BC1ANY4,
388 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
391 OPC_MFC2 = (0x00 << 21) | OPC_CP2,
392 OPC_DMFC2 = (0x01 << 21) | OPC_CP2,
393 OPC_CFC2 = (0x02 << 21) | OPC_CP2,
394 OPC_MFHC2 = (0x03 << 21) | OPC_CP2,
395 OPC_MTC2 = (0x04 << 21) | OPC_CP2,
396 OPC_DMTC2 = (0x05 << 21) | OPC_CP2,
397 OPC_CTC2 = (0x06 << 21) | OPC_CP2,
398 OPC_MTHC2 = (0x07 << 21) | OPC_CP2,
399 OPC_BC2 = (0x08 << 21) | OPC_CP2,
402 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
405 OPC_LWXC1 = 0x00 | OPC_CP3,
406 OPC_LDXC1 = 0x01 | OPC_CP3,
407 OPC_LUXC1 = 0x05 | OPC_CP3,
408 OPC_SWXC1 = 0x08 | OPC_CP3,
409 OPC_SDXC1 = 0x09 | OPC_CP3,
410 OPC_SUXC1 = 0x0D | OPC_CP3,
411 OPC_PREFX = 0x0F | OPC_CP3,
412 OPC_ALNV_PS = 0x1E | OPC_CP3,
413 OPC_MADD_S = 0x20 | OPC_CP3,
414 OPC_MADD_D = 0x21 | OPC_CP3,
415 OPC_MADD_PS = 0x26 | OPC_CP3,
416 OPC_MSUB_S = 0x28 | OPC_CP3,
417 OPC_MSUB_D = 0x29 | OPC_CP3,
418 OPC_MSUB_PS = 0x2E | OPC_CP3,
419 OPC_NMADD_S = 0x30 | OPC_CP3,
420 OPC_NMADD_D = 0x31 | OPC_CP3,
421 OPC_NMADD_PS= 0x36 | OPC_CP3,
422 OPC_NMSUB_S = 0x38 | OPC_CP3,
423 OPC_NMSUB_D = 0x39 | OPC_CP3,
424 OPC_NMSUB_PS= 0x3E | OPC_CP3,
427 /* global register indices */
428 static TCGv_ptr cpu_env;
429 static TCGv cpu_gpr[32], cpu_PC;
430 static TCGv cpu_HI[MIPS_DSP_ACC], cpu_LO[MIPS_DSP_ACC], cpu_ACX[MIPS_DSP_ACC];
431 static TCGv cpu_dspctrl, btarget, bcond;
432 static TCGv_i32 hflags;
433 static TCGv_i32 fpu_fcr0, fpu_fcr31;
435 #include "gen-icount.h"
437 #define gen_helper_0i(name, arg) do { \
438 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
439 gen_helper_##name(helper_tmp); \
440 tcg_temp_free_i32(helper_tmp); \
443 #define gen_helper_1i(name, arg1, arg2) do { \
444 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
445 gen_helper_##name(arg1, helper_tmp); \
446 tcg_temp_free_i32(helper_tmp); \
449 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
450 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
451 gen_helper_##name(arg1, arg2, helper_tmp); \
452 tcg_temp_free_i32(helper_tmp); \
455 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
456 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
457 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
458 tcg_temp_free_i32(helper_tmp); \
461 typedef struct DisasContext {
462 struct TranslationBlock *tb;
463 target_ulong pc, saved_pc;
465 /* Routine used to access memory */
467 uint32_t hflags, saved_hflags;
469 target_ulong btarget;
473 BS_NONE = 0, /* We go out of the TB without reaching a branch or an
474 * exception condition */
475 BS_STOP = 1, /* We want to stop translation for any reason */
476 BS_BRANCH = 2, /* We reached a branch condition */
477 BS_EXCP = 3, /* We reached an exception condition */
480 static const char *regnames[] =
481 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
482 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
483 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
484 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
486 static const char *regnames_HI[] =
487 { "HI0", "HI1", "HI2", "HI3", };
489 static const char *regnames_LO[] =
490 { "LO0", "LO1", "LO2", "LO3", };
492 static const char *regnames_ACX[] =
493 { "ACX0", "ACX1", "ACX2", "ACX3", };
495 static const char *fregnames[] =
496 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
497 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
498 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
499 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
501 #ifdef MIPS_DEBUG_DISAS
502 #define MIPS_DEBUG(fmt, args...) \
503 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
504 TARGET_FMT_lx ": %08x " fmt "\n", \
505 ctx->pc, ctx->opcode , ##args)
506 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
508 #define MIPS_DEBUG(fmt, args...) do { } while(0)
509 #define LOG_DISAS(...) do { } while (0)
512 #define MIPS_INVAL(op) \
514 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
515 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
518 /* General purpose registers moves. */
519 static inline void gen_load_gpr (TCGv t, int reg)
522 tcg_gen_movi_tl(t, 0);
524 tcg_gen_mov_tl(t, cpu_gpr[reg]);
527 static inline void gen_store_gpr (TCGv t, int reg)
530 tcg_gen_mov_tl(cpu_gpr[reg], t);
533 /* Moves to/from ACX register. */
534 static inline void gen_load_ACX (TCGv t, int reg)
536 tcg_gen_mov_tl(t, cpu_ACX[reg]);
539 static inline void gen_store_ACX (TCGv t, int reg)
541 tcg_gen_mov_tl(cpu_ACX[reg], t);
544 /* Moves to/from shadow registers. */
545 static inline void gen_load_srsgpr (int from, int to)
547 TCGv r_tmp1 = tcg_temp_new();
550 tcg_gen_movi_tl(r_tmp1, 0);
552 TCGv_i32 r_tmp2 = tcg_temp_new_i32();
553 TCGv_ptr addr = tcg_temp_new_ptr();
555 tcg_gen_ld_i32(r_tmp2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
556 tcg_gen_shri_i32(r_tmp2, r_tmp2, CP0SRSCtl_PSS);
557 tcg_gen_andi_i32(r_tmp2, r_tmp2, 0xf);
558 tcg_gen_muli_i32(r_tmp2, r_tmp2, sizeof(target_ulong) * 32);
559 tcg_gen_ext_i32_ptr(addr, r_tmp2);
560 tcg_gen_add_ptr(addr, cpu_env, addr);
562 tcg_gen_ld_tl(r_tmp1, addr, sizeof(target_ulong) * from);
563 tcg_temp_free_ptr(addr);
564 tcg_temp_free_i32(r_tmp2);
566 gen_store_gpr(r_tmp1, to);
567 tcg_temp_free(r_tmp1);
570 static inline void gen_store_srsgpr (int from, int to)
573 TCGv r_tmp1 = tcg_temp_new();
574 TCGv_i32 r_tmp2 = tcg_temp_new_i32();
575 TCGv_ptr addr = tcg_temp_new_ptr();
577 gen_load_gpr(r_tmp1, from);
578 tcg_gen_ld_i32(r_tmp2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
579 tcg_gen_shri_i32(r_tmp2, r_tmp2, CP0SRSCtl_PSS);
580 tcg_gen_andi_i32(r_tmp2, r_tmp2, 0xf);
581 tcg_gen_muli_i32(r_tmp2, r_tmp2, sizeof(target_ulong) * 32);
582 tcg_gen_ext_i32_ptr(addr, r_tmp2);
583 tcg_gen_add_ptr(addr, cpu_env, addr);
585 tcg_gen_st_tl(r_tmp1, addr, sizeof(target_ulong) * to);
586 tcg_temp_free_ptr(addr);
587 tcg_temp_free_i32(r_tmp2);
588 tcg_temp_free(r_tmp1);
592 /* Floating point register moves. */
593 static inline void gen_load_fpr32 (TCGv_i32 t, int reg)
595 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
598 static inline void gen_store_fpr32 (TCGv_i32 t, int reg)
600 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
603 static inline void gen_load_fpr32h (TCGv_i32 t, int reg)
605 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
608 static inline void gen_store_fpr32h (TCGv_i32 t, int reg)
610 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
613 static inline void gen_load_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
615 if (ctx->hflags & MIPS_HFLAG_F64) {
616 tcg_gen_ld_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
618 TCGv_i32 t0 = tcg_temp_new_i32();
619 TCGv_i32 t1 = tcg_temp_new_i32();
620 gen_load_fpr32(t0, reg & ~1);
621 gen_load_fpr32(t1, reg | 1);
622 tcg_gen_concat_i32_i64(t, t0, t1);
623 tcg_temp_free_i32(t0);
624 tcg_temp_free_i32(t1);
628 static inline void gen_store_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
630 if (ctx->hflags & MIPS_HFLAG_F64) {
631 tcg_gen_st_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
633 TCGv_i64 t0 = tcg_temp_new_i64();
634 TCGv_i32 t1 = tcg_temp_new_i32();
635 tcg_gen_trunc_i64_i32(t1, t);
636 gen_store_fpr32(t1, reg & ~1);
637 tcg_gen_shri_i64(t0, t, 32);
638 tcg_gen_trunc_i64_i32(t1, t0);
639 gen_store_fpr32(t1, reg | 1);
640 tcg_temp_free_i32(t1);
641 tcg_temp_free_i64(t0);
645 static inline int get_fp_bit (int cc)
653 #define FOP_CONDS(type, fmt, bits) \
654 static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
655 TCGv_i##bits b, int cc) \
658 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
659 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
660 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
661 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
662 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
663 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
664 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
665 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
666 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
667 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
668 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
669 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
670 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
671 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
672 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
673 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
679 FOP_CONDS(abs, d, 64)
681 FOP_CONDS(abs, s, 32)
683 FOP_CONDS(abs, ps, 64)
687 #define OP_COND(name, cond) \
688 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, TCGv t1) \
690 int l1 = gen_new_label(); \
691 int l2 = gen_new_label(); \
693 tcg_gen_brcond_tl(cond, t0, t1, l1); \
694 tcg_gen_movi_tl(ret, 0); \
697 tcg_gen_movi_tl(ret, 1); \
700 OP_COND(eq, TCG_COND_EQ);
701 OP_COND(ne, TCG_COND_NE);
702 OP_COND(ge, TCG_COND_GE);
703 OP_COND(geu, TCG_COND_GEU);
704 OP_COND(lt, TCG_COND_LT);
705 OP_COND(ltu, TCG_COND_LTU);
708 #define OP_CONDI(name, cond) \
709 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, target_ulong val) \
711 int l1 = gen_new_label(); \
712 int l2 = gen_new_label(); \
714 tcg_gen_brcondi_tl(cond, t0, val, l1); \
715 tcg_gen_movi_tl(ret, 0); \
718 tcg_gen_movi_tl(ret, 1); \
721 OP_CONDI(lti, TCG_COND_LT);
722 OP_CONDI(ltiu, TCG_COND_LTU);
725 #define OP_CONDZ(name, cond) \
726 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0) \
728 int l1 = gen_new_label(); \
729 int l2 = gen_new_label(); \
731 tcg_gen_brcondi_tl(cond, t0, 0, l1); \
732 tcg_gen_movi_tl(ret, 0); \
735 tcg_gen_movi_tl(ret, 1); \
738 OP_CONDZ(gez, TCG_COND_GE);
739 OP_CONDZ(gtz, TCG_COND_GT);
740 OP_CONDZ(lez, TCG_COND_LE);
741 OP_CONDZ(ltz, TCG_COND_LT);
744 static inline void gen_save_pc(target_ulong pc)
746 tcg_gen_movi_tl(cpu_PC, pc);
749 static inline void save_cpu_state (DisasContext *ctx, int do_save_pc)
751 LOG_DISAS("hflags %08x saved %08x\n", ctx->hflags, ctx->saved_hflags);
752 if (do_save_pc && ctx->pc != ctx->saved_pc) {
753 gen_save_pc(ctx->pc);
754 ctx->saved_pc = ctx->pc;
756 if (ctx->hflags != ctx->saved_hflags) {
757 tcg_gen_movi_i32(hflags, ctx->hflags);
758 ctx->saved_hflags = ctx->hflags;
759 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
765 tcg_gen_movi_tl(btarget, ctx->btarget);
771 static inline void restore_cpu_state (CPUState *env, DisasContext *ctx)
773 ctx->saved_hflags = ctx->hflags;
774 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
780 ctx->btarget = env->btarget;
786 generate_exception_err (DisasContext *ctx, int excp, int err)
788 TCGv_i32 texcp = tcg_const_i32(excp);
789 TCGv_i32 terr = tcg_const_i32(err);
790 save_cpu_state(ctx, 1);
791 gen_helper_raise_exception_err(texcp, terr);
792 tcg_temp_free_i32(terr);
793 tcg_temp_free_i32(texcp);
794 gen_helper_interrupt_restart();
799 generate_exception (DisasContext *ctx, int excp)
801 save_cpu_state(ctx, 1);
802 gen_helper_0i(raise_exception, excp);
803 gen_helper_interrupt_restart();
807 /* Addresses computation */
808 static inline void gen_op_addr_add (DisasContext *ctx, TCGv t0, TCGv t1)
810 tcg_gen_add_tl(t0, t0, t1);
812 #if defined(TARGET_MIPS64)
813 /* For compatibility with 32-bit code, data reference in user mode
814 with Status_UX = 0 should be casted to 32-bit and sign extended.
815 See the MIPS64 PRA manual, section 4.10. */
816 if (((ctx->hflags & MIPS_HFLAG_KSU) == MIPS_HFLAG_UM) &&
817 !(ctx->hflags & MIPS_HFLAG_UX)) {
818 tcg_gen_ext32s_i64(t0, t0);
823 static inline void check_cp0_enabled(DisasContext *ctx)
825 if (unlikely(!(ctx->hflags & MIPS_HFLAG_CP0)))
826 generate_exception_err(ctx, EXCP_CpU, 1);
829 static inline void check_cp1_enabled(DisasContext *ctx)
831 if (unlikely(!(ctx->hflags & MIPS_HFLAG_FPU)))
832 generate_exception_err(ctx, EXCP_CpU, 1);
835 /* Verify that the processor is running with COP1X instructions enabled.
836 This is associated with the nabla symbol in the MIPS32 and MIPS64
839 static inline void check_cop1x(DisasContext *ctx)
841 if (unlikely(!(ctx->hflags & MIPS_HFLAG_COP1X)))
842 generate_exception(ctx, EXCP_RI);
845 /* Verify that the processor is running with 64-bit floating-point
846 operations enabled. */
848 static inline void check_cp1_64bitmode(DisasContext *ctx)
850 if (unlikely(~ctx->hflags & (MIPS_HFLAG_F64 | MIPS_HFLAG_COP1X)))
851 generate_exception(ctx, EXCP_RI);
855 * Verify if floating point register is valid; an operation is not defined
856 * if bit 0 of any register specification is set and the FR bit in the
857 * Status register equals zero, since the register numbers specify an
858 * even-odd pair of adjacent coprocessor general registers. When the FR bit
859 * in the Status register equals one, both even and odd register numbers
860 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
862 * Multiple 64 bit wide registers can be checked by calling
863 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
865 static inline void check_cp1_registers(DisasContext *ctx, int regs)
867 if (unlikely(!(ctx->hflags & MIPS_HFLAG_F64) && (regs & 1)))
868 generate_exception(ctx, EXCP_RI);
871 /* This code generates a "reserved instruction" exception if the
872 CPU does not support the instruction set corresponding to flags. */
873 static inline void check_insn(CPUState *env, DisasContext *ctx, int flags)
875 if (unlikely(!(env->insn_flags & flags)))
876 generate_exception(ctx, EXCP_RI);
879 /* This code generates a "reserved instruction" exception if 64-bit
880 instructions are not enabled. */
881 static inline void check_mips_64(DisasContext *ctx)
883 if (unlikely(!(ctx->hflags & MIPS_HFLAG_64)))
884 generate_exception(ctx, EXCP_RI);
887 /* load/store instructions. */
888 #define OP_LD(insn,fname) \
889 static inline void op_ldst_##insn(TCGv t0, DisasContext *ctx) \
891 tcg_gen_qemu_##fname(t0, t0, ctx->mem_idx); \
898 #if defined(TARGET_MIPS64)
904 #define OP_ST(insn,fname) \
905 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
907 tcg_gen_qemu_##fname(t1, t0, ctx->mem_idx); \
912 #if defined(TARGET_MIPS64)
917 #define OP_LD_ATOMIC(insn,fname) \
918 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
920 tcg_gen_mov_tl(t1, t0); \
921 tcg_gen_qemu_##fname(t0, t0, ctx->mem_idx); \
922 tcg_gen_st_tl(t1, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
924 OP_LD_ATOMIC(ll,ld32s);
925 #if defined(TARGET_MIPS64)
926 OP_LD_ATOMIC(lld,ld64);
930 #define OP_ST_ATOMIC(insn,fname,almask) \
931 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
933 TCGv r_tmp = tcg_temp_local_new(); \
934 int l1 = gen_new_label(); \
935 int l2 = gen_new_label(); \
936 int l3 = gen_new_label(); \
938 tcg_gen_andi_tl(r_tmp, t0, almask); \
939 tcg_gen_brcondi_tl(TCG_COND_EQ, r_tmp, 0, l1); \
940 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
941 generate_exception(ctx, EXCP_AdES); \
943 tcg_gen_ld_tl(r_tmp, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
944 tcg_gen_brcond_tl(TCG_COND_NE, t0, r_tmp, l2); \
945 tcg_gen_qemu_##fname(t1, t0, ctx->mem_idx); \
946 tcg_gen_movi_tl(t0, 1); \
949 tcg_gen_movi_tl(t0, 0); \
951 tcg_temp_free(r_tmp); \
953 OP_ST_ATOMIC(sc,st32,0x3);
954 #if defined(TARGET_MIPS64)
955 OP_ST_ATOMIC(scd,st64,0x7);
960 static void gen_ldst (DisasContext *ctx, uint32_t opc, int rt,
961 int base, int16_t offset)
963 const char *opn = "ldst";
964 TCGv t0 = tcg_temp_local_new();
965 TCGv t1 = tcg_temp_local_new();
968 tcg_gen_movi_tl(t0, offset);
969 } else if (offset == 0) {
970 gen_load_gpr(t0, base);
972 tcg_gen_movi_tl(t0, offset);
973 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
975 /* Don't do NOP if destination is zero: we must perform the actual
978 #if defined(TARGET_MIPS64)
980 op_ldst_lwu(t0, ctx);
981 gen_store_gpr(t0, rt);
986 gen_store_gpr(t0, rt);
990 op_ldst_lld(t0, t1, ctx);
991 gen_store_gpr(t0, rt);
995 gen_load_gpr(t1, rt);
996 op_ldst_sd(t0, t1, ctx);
1000 save_cpu_state(ctx, 1);
1001 gen_load_gpr(t1, rt);
1002 op_ldst_scd(t0, t1, ctx);
1003 gen_store_gpr(t0, rt);
1007 save_cpu_state(ctx, 1);
1008 gen_load_gpr(t1, rt);
1009 gen_helper_3i(ldl, t1, t0, t1, ctx->mem_idx);
1010 gen_store_gpr(t1, rt);
1014 save_cpu_state(ctx, 1);
1015 gen_load_gpr(t1, rt);
1016 gen_helper_2i(sdl, t0, t1, ctx->mem_idx);
1020 save_cpu_state(ctx, 1);
1021 gen_load_gpr(t1, rt);
1022 gen_helper_3i(ldr, t1, t0, t1, ctx->mem_idx);
1023 gen_store_gpr(t1, rt);
1027 save_cpu_state(ctx, 1);
1028 gen_load_gpr(t1, rt);
1029 gen_helper_2i(sdr, t0, t1, ctx->mem_idx);
1034 op_ldst_lw(t0, ctx);
1035 gen_store_gpr(t0, rt);
1039 gen_load_gpr(t1, rt);
1040 op_ldst_sw(t0, t1, ctx);
1044 op_ldst_lh(t0, ctx);
1045 gen_store_gpr(t0, rt);
1049 gen_load_gpr(t1, rt);
1050 op_ldst_sh(t0, t1, ctx);
1054 op_ldst_lhu(t0, ctx);
1055 gen_store_gpr(t0, rt);
1059 op_ldst_lb(t0, ctx);
1060 gen_store_gpr(t0, rt);
1064 gen_load_gpr(t1, rt);
1065 op_ldst_sb(t0, t1, ctx);
1069 op_ldst_lbu(t0, ctx);
1070 gen_store_gpr(t0, rt);
1074 save_cpu_state(ctx, 1);
1075 gen_load_gpr(t1, rt);
1076 gen_helper_3i(lwl, t1, t0, t1, ctx->mem_idx);
1077 gen_store_gpr(t1, rt);
1081 save_cpu_state(ctx, 1);
1082 gen_load_gpr(t1, rt);
1083 gen_helper_2i(swl, t0, t1, ctx->mem_idx);
1087 save_cpu_state(ctx, 1);
1088 gen_load_gpr(t1, rt);
1089 gen_helper_3i(lwr, t1, t0, t1, ctx->mem_idx);
1090 gen_store_gpr(t1, rt);
1094 save_cpu_state(ctx, 1);
1095 gen_load_gpr(t1, rt);
1096 gen_helper_2i(swr, t0, t1, ctx->mem_idx);
1100 op_ldst_ll(t0, t1, ctx);
1101 gen_store_gpr(t0, rt);
1105 save_cpu_state(ctx, 1);
1106 gen_load_gpr(t1, rt);
1107 op_ldst_sc(t0, t1, ctx);
1108 gen_store_gpr(t0, rt);
1113 generate_exception(ctx, EXCP_RI);
1116 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1122 /* Load and store */
1123 static void gen_flt_ldst (DisasContext *ctx, uint32_t opc, int ft,
1124 int base, int16_t offset)
1126 const char *opn = "flt_ldst";
1127 TCGv t0 = tcg_temp_new();
1130 tcg_gen_movi_tl(t0, offset);
1131 } else if (offset == 0) {
1132 gen_load_gpr(t0, base);
1134 tcg_gen_movi_tl(t0, offset);
1135 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
1137 /* Don't do NOP if destination is zero: we must perform the actual
1142 TCGv_i32 fp0 = tcg_temp_new_i32();
1143 TCGv t1 = tcg_temp_new();
1145 tcg_gen_qemu_ld32s(t1, t0, ctx->mem_idx);
1146 tcg_gen_trunc_tl_i32(fp0, t1);
1147 gen_store_fpr32(fp0, ft);
1149 tcg_temp_free_i32(fp0);
1155 TCGv_i32 fp0 = tcg_temp_new_i32();
1156 TCGv t1 = tcg_temp_new();
1158 gen_load_fpr32(fp0, ft);
1159 tcg_gen_extu_i32_tl(t1, fp0);
1160 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
1162 tcg_temp_free_i32(fp0);
1168 TCGv_i64 fp0 = tcg_temp_new_i64();
1170 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
1171 gen_store_fpr64(ctx, fp0, ft);
1172 tcg_temp_free_i64(fp0);
1178 TCGv_i64 fp0 = tcg_temp_new_i64();
1180 gen_load_fpr64(ctx, fp0, ft);
1181 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
1182 tcg_temp_free_i64(fp0);
1188 generate_exception(ctx, EXCP_RI);
1191 MIPS_DEBUG("%s %s, %d(%s)", opn, fregnames[ft], offset, regnames[base]);
1196 /* Arithmetic with immediate operand */
1197 static void gen_arith_imm (CPUState *env, DisasContext *ctx, uint32_t opc,
1198 int rt, int rs, int16_t imm)
1201 const char *opn = "imm arith";
1202 TCGv t0 = tcg_temp_local_new();
1204 if (rt == 0 && opc != OPC_ADDI && opc != OPC_DADDI) {
1205 /* If no destination, treat it as a NOP.
1206 For addi, we must generate the overflow exception when needed. */
1210 uimm = (uint16_t)imm;
1214 #if defined(TARGET_MIPS64)
1220 uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1225 gen_load_gpr(t0, rs);
1228 tcg_gen_movi_tl(t0, imm << 16);
1233 #if defined(TARGET_MIPS64)
1242 gen_load_gpr(t0, rs);
1248 TCGv r_tmp1 = tcg_temp_new();
1249 TCGv r_tmp2 = tcg_temp_new();
1250 int l1 = gen_new_label();
1252 save_cpu_state(ctx, 1);
1253 tcg_gen_ext32s_tl(r_tmp1, t0);
1254 tcg_gen_addi_tl(t0, r_tmp1, uimm);
1256 tcg_gen_xori_tl(r_tmp1, r_tmp1, ~uimm);
1257 tcg_gen_xori_tl(r_tmp2, t0, uimm);
1258 tcg_gen_and_tl(r_tmp1, r_tmp1, r_tmp2);
1259 tcg_temp_free(r_tmp2);
1260 tcg_gen_brcondi_tl(TCG_COND_GE, r_tmp1, 0, l1);
1261 /* operands of same sign, result different sign */
1262 generate_exception(ctx, EXCP_OVERFLOW);
1264 tcg_temp_free(r_tmp1);
1266 tcg_gen_ext32s_tl(t0, t0);
1271 tcg_gen_addi_tl(t0, t0, uimm);
1272 tcg_gen_ext32s_tl(t0, t0);
1275 #if defined(TARGET_MIPS64)
1278 TCGv r_tmp1 = tcg_temp_new();
1279 TCGv r_tmp2 = tcg_temp_new();
1280 int l1 = gen_new_label();
1282 save_cpu_state(ctx, 1);
1283 tcg_gen_mov_tl(r_tmp1, t0);
1284 tcg_gen_addi_tl(t0, t0, uimm);
1286 tcg_gen_xori_tl(r_tmp1, r_tmp1, ~uimm);
1287 tcg_gen_xori_tl(r_tmp2, t0, uimm);
1288 tcg_gen_and_tl(r_tmp1, r_tmp1, r_tmp2);
1289 tcg_temp_free(r_tmp2);
1290 tcg_gen_brcondi_tl(TCG_COND_GE, r_tmp1, 0, l1);
1291 /* operands of same sign, result different sign */
1292 generate_exception(ctx, EXCP_OVERFLOW);
1294 tcg_temp_free(r_tmp1);
1299 tcg_gen_addi_tl(t0, t0, uimm);
1304 gen_op_lti(t0, t0, uimm);
1308 gen_op_ltiu(t0, t0, uimm);
1312 tcg_gen_andi_tl(t0, t0, uimm);
1316 tcg_gen_ori_tl(t0, t0, uimm);
1320 tcg_gen_xori_tl(t0, t0, uimm);
1327 tcg_gen_shli_tl(t0, t0, uimm);
1328 tcg_gen_ext32s_tl(t0, t0);
1332 tcg_gen_ext32s_tl(t0, t0);
1333 tcg_gen_sari_tl(t0, t0, uimm);
1337 switch ((ctx->opcode >> 21) & 0x1f) {
1340 tcg_gen_ext32u_tl(t0, t0);
1341 tcg_gen_shri_tl(t0, t0, uimm);
1343 tcg_gen_ext32s_tl(t0, t0);
1348 /* rotr is decoded as srl on non-R2 CPUs */
1349 if (env->insn_flags & ISA_MIPS32R2) {
1351 TCGv_i32 r_tmp1 = tcg_temp_new_i32();
1353 tcg_gen_trunc_tl_i32(r_tmp1, t0);
1354 tcg_gen_rotri_i32(r_tmp1, r_tmp1, uimm);
1355 tcg_gen_ext_i32_tl(t0, r_tmp1);
1356 tcg_temp_free_i32(r_tmp1);
1361 tcg_gen_ext32u_tl(t0, t0);
1362 tcg_gen_shri_tl(t0, t0, uimm);
1364 tcg_gen_ext32s_tl(t0, t0);
1370 MIPS_INVAL("invalid srl flag");
1371 generate_exception(ctx, EXCP_RI);
1375 #if defined(TARGET_MIPS64)
1377 tcg_gen_shli_tl(t0, t0, uimm);
1381 tcg_gen_sari_tl(t0, t0, uimm);
1385 switch ((ctx->opcode >> 21) & 0x1f) {
1387 tcg_gen_shri_tl(t0, t0, uimm);
1391 /* drotr is decoded as dsrl on non-R2 CPUs */
1392 if (env->insn_flags & ISA_MIPS32R2) {
1394 tcg_gen_rotri_tl(t0, t0, uimm);
1398 tcg_gen_shri_tl(t0, t0, uimm);
1403 MIPS_INVAL("invalid dsrl flag");
1404 generate_exception(ctx, EXCP_RI);
1409 tcg_gen_shli_tl(t0, t0, uimm + 32);
1413 tcg_gen_sari_tl(t0, t0, uimm + 32);
1417 switch ((ctx->opcode >> 21) & 0x1f) {
1419 tcg_gen_shri_tl(t0, t0, uimm + 32);
1423 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
1424 if (env->insn_flags & ISA_MIPS32R2) {
1425 tcg_gen_rotri_tl(t0, t0, uimm + 32);
1428 tcg_gen_shri_tl(t0, t0, uimm + 32);
1433 MIPS_INVAL("invalid dsrl32 flag");
1434 generate_exception(ctx, EXCP_RI);
1441 generate_exception(ctx, EXCP_RI);
1444 gen_store_gpr(t0, rt);
1445 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1451 static void gen_arith (CPUState *env, DisasContext *ctx, uint32_t opc,
1452 int rd, int rs, int rt)
1454 const char *opn = "arith";
1456 if (rd == 0 && opc != OPC_ADD && opc != OPC_SUB
1457 && opc != OPC_DADD && opc != OPC_DSUB) {
1458 /* If no destination, treat it as a NOP.
1459 For add & sub, we must generate the overflow exception when needed. */
1467 TCGv t0 = tcg_temp_local_new();
1468 TCGv t1 = tcg_temp_new();
1469 TCGv t2 = tcg_temp_new();
1470 int l1 = gen_new_label();
1472 gen_load_gpr(t1, rs);
1473 gen_load_gpr(t2, rt);
1474 tcg_gen_add_tl(t0, t1, t2);
1475 tcg_gen_ext32s_tl(t0, t0);
1476 tcg_gen_xor_tl(t1, t1, t2);
1477 tcg_gen_not_tl(t1, t1);
1478 tcg_gen_xor_tl(t2, t0, t2);
1479 tcg_gen_and_tl(t1, t1, t2);
1481 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1483 /* operands of same sign, result different sign */
1484 generate_exception(ctx, EXCP_OVERFLOW);
1486 gen_store_gpr(t0, rd);
1492 if (rs != 0 && rt != 0) {
1493 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1494 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1495 } else if (rs == 0 && rt != 0) {
1496 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1497 } else if (rs != 0 && rt == 0) {
1498 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1500 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1506 TCGv t0 = tcg_temp_local_new();
1507 TCGv t1 = tcg_temp_new();
1508 TCGv t2 = tcg_temp_new();
1509 int l1 = gen_new_label();
1511 gen_load_gpr(t1, rs);
1512 gen_load_gpr(t2, rt);
1513 tcg_gen_sub_tl(t0, t1, t2);
1514 tcg_gen_ext32s_tl(t0, t0);
1515 tcg_gen_xor_tl(t2, t1, t2);
1516 tcg_gen_xor_tl(t1, t0, t1);
1517 tcg_gen_and_tl(t1, t1, t2);
1519 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1521 /* operands of same sign, result different sign */
1522 generate_exception(ctx, EXCP_OVERFLOW);
1524 gen_store_gpr(t0, rd);
1530 if (rs != 0 && rt != 0) {
1531 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1532 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1533 } else if (rs == 0 && rt != 0) {
1534 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1535 } else if (rs != 0 && rt == 0) {
1536 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1538 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1542 #if defined(TARGET_MIPS64)
1545 TCGv t0 = tcg_temp_local_new();
1546 TCGv t1 = tcg_temp_new();
1547 TCGv t2 = tcg_temp_new();
1548 int l1 = gen_new_label();
1550 gen_load_gpr(t1, rs);
1551 gen_load_gpr(t2, rt);
1552 tcg_gen_add_tl(t0, t1, t2);
1553 tcg_gen_xor_tl(t1, t1, t2);
1554 tcg_gen_not_tl(t1, t1);
1555 tcg_gen_xor_tl(t2, t0, t2);
1556 tcg_gen_and_tl(t1, t1, t2);
1558 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1560 /* operands of same sign, result different sign */
1561 generate_exception(ctx, EXCP_OVERFLOW);
1563 gen_store_gpr(t0, rd);
1569 if (rs != 0 && rt != 0) {
1570 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1571 } else if (rs == 0 && rt != 0) {
1572 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1573 } else if (rs != 0 && rt == 0) {
1574 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1576 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1582 TCGv t0 = tcg_temp_local_new();
1583 TCGv t1 = tcg_temp_new();
1584 TCGv t2 = tcg_temp_new();
1585 int l1 = gen_new_label();
1587 gen_load_gpr(t1, rs);
1588 gen_load_gpr(t2, rt);
1589 tcg_gen_sub_tl(t0, t1, t2);
1590 tcg_gen_xor_tl(t2, t1, t2);
1591 tcg_gen_xor_tl(t1, t0, t1);
1592 tcg_gen_and_tl(t1, t1, t2);
1594 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1596 /* operands of same sign, result different sign */
1597 generate_exception(ctx, EXCP_OVERFLOW);
1599 gen_store_gpr(t0, rd);
1605 if (rs != 0 && rt != 0) {
1606 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1607 } else if (rs == 0 && rt != 0) {
1608 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1609 } else if (rs != 0 && rt == 0) {
1610 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1612 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1618 if (likely(rs != 0 && rt != 0)) {
1619 tcg_gen_mul_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1620 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1622 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1627 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1630 /* Conditional move */
1631 static void gen_cond_move (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1633 const char *opn = "cond move";
1637 /* If no destination, treat it as a NOP.
1638 For add & sub, we must generate the overflow exception when needed. */
1643 l1 = gen_new_label();
1646 if (likely(rt != 0))
1647 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rt], 0, l1);
1653 if (likely(rt != 0))
1654 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rt], 0, l1);
1659 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1661 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1664 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1668 static void gen_logic (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1670 const char *opn = "logic";
1673 /* If no destination, treat it as a NOP. */
1680 if (likely(rs != 0 && rt != 0)) {
1681 tcg_gen_and_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1683 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1688 if (rs != 0 && rt != 0) {
1689 tcg_gen_nor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1690 } else if (rs == 0 && rt != 0) {
1691 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rt]);
1692 } else if (rs != 0 && rt == 0) {
1693 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rs]);
1695 tcg_gen_movi_tl(cpu_gpr[rd], ~((target_ulong)0));
1700 if (likely(rs != 0 && rt != 0)) {
1701 tcg_gen_or_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1702 } else if (rs == 0 && rt != 0) {
1703 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1704 } else if (rs != 0 && rt == 0) {
1705 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1707 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1712 if (likely(rs != 0 && rt != 0)) {
1713 tcg_gen_xor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1714 } else if (rs == 0 && rt != 0) {
1715 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1716 } else if (rs != 0 && rt == 0) {
1717 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1719 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1724 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1727 /* Set on lower than */
1728 static void gen_slt (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1730 const char *opn = "slt";
1734 /* If no destination, treat it as a NOP. */
1739 t0 = tcg_temp_new();
1740 t1 = tcg_temp_new();
1741 gen_load_gpr(t0, rs);
1742 gen_load_gpr(t1, rt);
1745 gen_op_lt(cpu_gpr[rd], t0, t1);
1749 gen_op_ltu(cpu_gpr[rd], t0, t1);
1753 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1759 static void gen_shift (CPUState *env, DisasContext *ctx, uint32_t opc,
1760 int rd, int rs, int rt)
1762 const char *opn = "shifts";
1766 /* If no destination, treat it as a NOP.
1767 For add & sub, we must generate the overflow exception when needed. */
1772 t0 = tcg_temp_new();
1773 t1 = tcg_temp_new();
1774 gen_load_gpr(t0, rs);
1775 gen_load_gpr(t1, rt);
1778 tcg_gen_andi_tl(t0, t0, 0x1f);
1779 tcg_gen_shl_tl(t0, t1, t0);
1780 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1784 tcg_gen_ext32s_tl(t1, t1);
1785 tcg_gen_andi_tl(t0, t0, 0x1f);
1786 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1790 switch ((ctx->opcode >> 6) & 0x1f) {
1792 tcg_gen_ext32u_tl(t1, t1);
1793 tcg_gen_andi_tl(t0, t0, 0x1f);
1794 tcg_gen_shr_tl(t0, t1, t0);
1795 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1799 /* rotrv is decoded as srlv on non-R2 CPUs */
1800 if (env->insn_flags & ISA_MIPS32R2) {
1801 TCGv_i32 t2 = tcg_temp_new_i32();
1802 TCGv_i32 t3 = tcg_temp_new_i32();
1804 tcg_gen_trunc_tl_i32(t2, t0);
1805 tcg_gen_trunc_tl_i32(t3, t1);
1806 tcg_gen_andi_i32(t2, t2, 0x1f);
1807 tcg_gen_rotr_i32(t2, t3, t2);
1808 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
1809 tcg_temp_free_i32(t2);
1810 tcg_temp_free_i32(t3);
1813 tcg_gen_ext32u_tl(t1, t1);
1814 tcg_gen_andi_tl(t0, t0, 0x1f);
1815 tcg_gen_shr_tl(t0, t1, t0);
1816 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1821 MIPS_INVAL("invalid srlv flag");
1822 generate_exception(ctx, EXCP_RI);
1826 #if defined(TARGET_MIPS64)
1828 tcg_gen_andi_tl(t0, t0, 0x3f);
1829 tcg_gen_shl_tl(cpu_gpr[rd], t1, t0);
1833 tcg_gen_andi_tl(t0, t0, 0x3f);
1834 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1838 switch ((ctx->opcode >> 6) & 0x1f) {
1840 tcg_gen_andi_tl(t0, t0, 0x3f);
1841 tcg_gen_shr_tl(cpu_gpr[rd], t1, t0);
1845 /* drotrv is decoded as dsrlv on non-R2 CPUs */
1846 if (env->insn_flags & ISA_MIPS32R2) {
1847 tcg_gen_andi_tl(t0, t0, 0x3f);
1848 tcg_gen_rotr_tl(cpu_gpr[rd], t1, t0);
1851 tcg_gen_andi_tl(t0, t0, 0x3f);
1852 tcg_gen_shr_tl(t0, t1, t0);
1857 MIPS_INVAL("invalid dsrlv flag");
1858 generate_exception(ctx, EXCP_RI);
1864 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1869 /* Arithmetic on HI/LO registers */
1870 static void gen_HILO (DisasContext *ctx, uint32_t opc, int reg)
1872 const char *opn = "hilo";
1874 if (reg == 0 && (opc == OPC_MFHI || opc == OPC_MFLO)) {
1881 tcg_gen_mov_tl(cpu_gpr[reg], cpu_HI[0]);
1885 tcg_gen_mov_tl(cpu_gpr[reg], cpu_LO[0]);
1890 tcg_gen_mov_tl(cpu_HI[0], cpu_gpr[reg]);
1892 tcg_gen_movi_tl(cpu_HI[0], 0);
1897 tcg_gen_mov_tl(cpu_LO[0], cpu_gpr[reg]);
1899 tcg_gen_movi_tl(cpu_LO[0], 0);
1903 MIPS_DEBUG("%s %s", opn, regnames[reg]);
1906 static void gen_muldiv (DisasContext *ctx, uint32_t opc,
1909 const char *opn = "mul/div";
1915 #if defined(TARGET_MIPS64)
1919 t0 = tcg_temp_local_new();
1920 t1 = tcg_temp_local_new();
1923 t0 = tcg_temp_new();
1924 t1 = tcg_temp_new();
1928 gen_load_gpr(t0, rs);
1929 gen_load_gpr(t1, rt);
1933 int l1 = gen_new_label();
1934 int l2 = gen_new_label();
1936 tcg_gen_ext32s_tl(t0, t0);
1937 tcg_gen_ext32s_tl(t1, t1);
1938 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
1939 tcg_gen_brcondi_tl(TCG_COND_NE, t0, INT_MIN, l2);
1940 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1, l2);
1942 tcg_gen_mov_tl(cpu_LO[0], t0);
1943 tcg_gen_movi_tl(cpu_HI[0], 0);
1946 tcg_gen_div_tl(cpu_LO[0], t0, t1);
1947 tcg_gen_rem_tl(cpu_HI[0], t0, t1);
1948 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
1949 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
1956 int l1 = gen_new_label();
1958 tcg_gen_ext32u_tl(t0, t0);
1959 tcg_gen_ext32u_tl(t1, t1);
1960 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
1961 tcg_gen_divu_tl(cpu_LO[0], t0, t1);
1962 tcg_gen_remu_tl(cpu_HI[0], t0, t1);
1963 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
1964 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
1971 TCGv_i64 t2 = tcg_temp_new_i64();
1972 TCGv_i64 t3 = tcg_temp_new_i64();
1974 tcg_gen_ext_tl_i64(t2, t0);
1975 tcg_gen_ext_tl_i64(t3, t1);
1976 tcg_gen_mul_i64(t2, t2, t3);
1977 tcg_temp_free_i64(t3);
1978 tcg_gen_trunc_i64_tl(t0, t2);
1979 tcg_gen_shri_i64(t2, t2, 32);
1980 tcg_gen_trunc_i64_tl(t1, t2);
1981 tcg_temp_free_i64(t2);
1982 tcg_gen_ext32s_tl(cpu_LO[0], t0);
1983 tcg_gen_ext32s_tl(cpu_HI[0], t1);
1989 TCGv_i64 t2 = tcg_temp_new_i64();
1990 TCGv_i64 t3 = tcg_temp_new_i64();
1992 tcg_gen_ext32u_tl(t0, t0);
1993 tcg_gen_ext32u_tl(t1, t1);
1994 tcg_gen_extu_tl_i64(t2, t0);
1995 tcg_gen_extu_tl_i64(t3, t1);
1996 tcg_gen_mul_i64(t2, t2, t3);
1997 tcg_temp_free_i64(t3);
1998 tcg_gen_trunc_i64_tl(t0, t2);
1999 tcg_gen_shri_i64(t2, t2, 32);
2000 tcg_gen_trunc_i64_tl(t1, t2);
2001 tcg_temp_free_i64(t2);
2002 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2003 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2007 #if defined(TARGET_MIPS64)
2010 int l1 = gen_new_label();
2011 int l2 = gen_new_label();
2013 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2014 tcg_gen_brcondi_tl(TCG_COND_NE, t0, -1LL << 63, l2);
2015 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1LL, l2);
2016 tcg_gen_mov_tl(cpu_LO[0], t0);
2017 tcg_gen_movi_tl(cpu_HI[0], 0);
2020 tcg_gen_div_i64(cpu_LO[0], t0, t1);
2021 tcg_gen_rem_i64(cpu_HI[0], t0, t1);
2028 int l1 = gen_new_label();
2030 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2031 tcg_gen_divu_i64(cpu_LO[0], t0, t1);
2032 tcg_gen_remu_i64(cpu_HI[0], t0, t1);
2038 gen_helper_dmult(t0, t1);
2042 gen_helper_dmultu(t0, t1);
2048 TCGv_i64 t2 = tcg_temp_new_i64();
2049 TCGv_i64 t3 = tcg_temp_new_i64();
2051 tcg_gen_ext_tl_i64(t2, t0);
2052 tcg_gen_ext_tl_i64(t3, t1);
2053 tcg_gen_mul_i64(t2, t2, t3);
2054 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2055 tcg_gen_add_i64(t2, t2, t3);
2056 tcg_temp_free_i64(t3);
2057 tcg_gen_trunc_i64_tl(t0, t2);
2058 tcg_gen_shri_i64(t2, t2, 32);
2059 tcg_gen_trunc_i64_tl(t1, t2);
2060 tcg_temp_free_i64(t2);
2061 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2062 tcg_gen_ext32s_tl(cpu_LO[1], t1);
2068 TCGv_i64 t2 = tcg_temp_new_i64();
2069 TCGv_i64 t3 = tcg_temp_new_i64();
2071 tcg_gen_ext32u_tl(t0, t0);
2072 tcg_gen_ext32u_tl(t1, t1);
2073 tcg_gen_extu_tl_i64(t2, t0);
2074 tcg_gen_extu_tl_i64(t3, t1);
2075 tcg_gen_mul_i64(t2, t2, t3);
2076 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2077 tcg_gen_add_i64(t2, t2, t3);
2078 tcg_temp_free_i64(t3);
2079 tcg_gen_trunc_i64_tl(t0, t2);
2080 tcg_gen_shri_i64(t2, t2, 32);
2081 tcg_gen_trunc_i64_tl(t1, t2);
2082 tcg_temp_free_i64(t2);
2083 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2084 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2090 TCGv_i64 t2 = tcg_temp_new_i64();
2091 TCGv_i64 t3 = tcg_temp_new_i64();
2093 tcg_gen_ext_tl_i64(t2, t0);
2094 tcg_gen_ext_tl_i64(t3, t1);
2095 tcg_gen_mul_i64(t2, t2, t3);
2096 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2097 tcg_gen_sub_i64(t2, t2, t3);
2098 tcg_temp_free_i64(t3);
2099 tcg_gen_trunc_i64_tl(t0, t2);
2100 tcg_gen_shri_i64(t2, t2, 32);
2101 tcg_gen_trunc_i64_tl(t1, t2);
2102 tcg_temp_free_i64(t2);
2103 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2104 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2110 TCGv_i64 t2 = tcg_temp_new_i64();
2111 TCGv_i64 t3 = tcg_temp_new_i64();
2113 tcg_gen_ext32u_tl(t0, t0);
2114 tcg_gen_ext32u_tl(t1, t1);
2115 tcg_gen_extu_tl_i64(t2, t0);
2116 tcg_gen_extu_tl_i64(t3, t1);
2117 tcg_gen_mul_i64(t2, t2, t3);
2118 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2119 tcg_gen_sub_i64(t2, t2, t3);
2120 tcg_temp_free_i64(t3);
2121 tcg_gen_trunc_i64_tl(t0, t2);
2122 tcg_gen_shri_i64(t2, t2, 32);
2123 tcg_gen_trunc_i64_tl(t1, t2);
2124 tcg_temp_free_i64(t2);
2125 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2126 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2132 generate_exception(ctx, EXCP_RI);
2135 MIPS_DEBUG("%s %s %s", opn, regnames[rs], regnames[rt]);
2141 static void gen_mul_vr54xx (DisasContext *ctx, uint32_t opc,
2142 int rd, int rs, int rt)
2144 const char *opn = "mul vr54xx";
2145 TCGv t0 = tcg_temp_new();
2146 TCGv t1 = tcg_temp_new();
2148 gen_load_gpr(t0, rs);
2149 gen_load_gpr(t1, rt);
2152 case OPC_VR54XX_MULS:
2153 gen_helper_muls(t0, t0, t1);
2156 case OPC_VR54XX_MULSU:
2157 gen_helper_mulsu(t0, t0, t1);
2160 case OPC_VR54XX_MACC:
2161 gen_helper_macc(t0, t0, t1);
2164 case OPC_VR54XX_MACCU:
2165 gen_helper_maccu(t0, t0, t1);
2168 case OPC_VR54XX_MSAC:
2169 gen_helper_msac(t0, t0, t1);
2172 case OPC_VR54XX_MSACU:
2173 gen_helper_msacu(t0, t0, t1);
2176 case OPC_VR54XX_MULHI:
2177 gen_helper_mulhi(t0, t0, t1);
2180 case OPC_VR54XX_MULHIU:
2181 gen_helper_mulhiu(t0, t0, t1);
2184 case OPC_VR54XX_MULSHI:
2185 gen_helper_mulshi(t0, t0, t1);
2188 case OPC_VR54XX_MULSHIU:
2189 gen_helper_mulshiu(t0, t0, t1);
2192 case OPC_VR54XX_MACCHI:
2193 gen_helper_macchi(t0, t0, t1);
2196 case OPC_VR54XX_MACCHIU:
2197 gen_helper_macchiu(t0, t0, t1);
2200 case OPC_VR54XX_MSACHI:
2201 gen_helper_msachi(t0, t0, t1);
2204 case OPC_VR54XX_MSACHIU:
2205 gen_helper_msachiu(t0, t0, t1);
2209 MIPS_INVAL("mul vr54xx");
2210 generate_exception(ctx, EXCP_RI);
2213 gen_store_gpr(t0, rd);
2214 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
2221 static void gen_cl (DisasContext *ctx, uint32_t opc,
2224 const char *opn = "CLx";
2232 t0 = tcg_temp_new();
2233 gen_load_gpr(t0, rs);
2236 gen_helper_clo(cpu_gpr[rd], t0);
2240 gen_helper_clz(cpu_gpr[rd], t0);
2243 #if defined(TARGET_MIPS64)
2245 gen_helper_dclo(cpu_gpr[rd], t0);
2249 gen_helper_dclz(cpu_gpr[rd], t0);
2254 MIPS_DEBUG("%s %s, %s", opn, regnames[rd], regnames[rs]);
2259 static void gen_trap (DisasContext *ctx, uint32_t opc,
2260 int rs, int rt, int16_t imm)
2263 TCGv t0 = tcg_temp_new();
2264 TCGv t1 = tcg_temp_new();
2267 /* Load needed operands */
2275 /* Compare two registers */
2277 gen_load_gpr(t0, rs);
2278 gen_load_gpr(t1, rt);
2288 /* Compare register to immediate */
2289 if (rs != 0 || imm != 0) {
2290 gen_load_gpr(t0, rs);
2291 tcg_gen_movi_tl(t1, (int32_t)imm);
2298 case OPC_TEQ: /* rs == rs */
2299 case OPC_TEQI: /* r0 == 0 */
2300 case OPC_TGE: /* rs >= rs */
2301 case OPC_TGEI: /* r0 >= 0 */
2302 case OPC_TGEU: /* rs >= rs unsigned */
2303 case OPC_TGEIU: /* r0 >= 0 unsigned */
2305 generate_exception(ctx, EXCP_TRAP);
2307 case OPC_TLT: /* rs < rs */
2308 case OPC_TLTI: /* r0 < 0 */
2309 case OPC_TLTU: /* rs < rs unsigned */
2310 case OPC_TLTIU: /* r0 < 0 unsigned */
2311 case OPC_TNE: /* rs != rs */
2312 case OPC_TNEI: /* r0 != 0 */
2313 /* Never trap: treat as NOP. */
2317 int l1 = gen_new_label();
2322 tcg_gen_brcond_tl(TCG_COND_NE, t0, t1, l1);
2326 tcg_gen_brcond_tl(TCG_COND_LT, t0, t1, l1);
2330 tcg_gen_brcond_tl(TCG_COND_LTU, t0, t1, l1);
2334 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
2338 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
2342 tcg_gen_brcond_tl(TCG_COND_EQ, t0, t1, l1);
2345 generate_exception(ctx, EXCP_TRAP);
2352 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
2354 TranslationBlock *tb;
2356 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
2359 tcg_gen_exit_tb((long)tb + n);
2366 /* Branches (before delay slot) */
2367 static void gen_compute_branch (DisasContext *ctx, uint32_t opc,
2368 int rs, int rt, int32_t offset)
2370 target_ulong btgt = -1;
2372 int bcond_compute = 0;
2373 TCGv t0 = tcg_temp_new();
2374 TCGv t1 = tcg_temp_new();
2376 if (ctx->hflags & MIPS_HFLAG_BMASK) {
2377 #ifdef MIPS_DEBUG_DISAS
2378 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx "\n", ctx->pc);
2380 generate_exception(ctx, EXCP_RI);
2384 /* Load needed operands */
2390 /* Compare two registers */
2392 gen_load_gpr(t0, rs);
2393 gen_load_gpr(t1, rt);
2396 btgt = ctx->pc + 4 + offset;
2410 /* Compare to zero */
2412 gen_load_gpr(t0, rs);
2415 btgt = ctx->pc + 4 + offset;
2419 /* Jump to immediate */
2420 btgt = ((ctx->pc + 4) & (int32_t)0xF0000000) | (uint32_t)offset;
2424 /* Jump to register */
2425 if (offset != 0 && offset != 16) {
2426 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2427 others are reserved. */
2428 MIPS_INVAL("jump hint");
2429 generate_exception(ctx, EXCP_RI);
2432 gen_load_gpr(btarget, rs);
2435 MIPS_INVAL("branch/jump");
2436 generate_exception(ctx, EXCP_RI);
2439 if (bcond_compute == 0) {
2440 /* No condition to be computed */
2442 case OPC_BEQ: /* rx == rx */
2443 case OPC_BEQL: /* rx == rx likely */
2444 case OPC_BGEZ: /* 0 >= 0 */
2445 case OPC_BGEZL: /* 0 >= 0 likely */
2446 case OPC_BLEZ: /* 0 <= 0 */
2447 case OPC_BLEZL: /* 0 <= 0 likely */
2449 ctx->hflags |= MIPS_HFLAG_B;
2450 MIPS_DEBUG("balways");
2452 case OPC_BGEZAL: /* 0 >= 0 */
2453 case OPC_BGEZALL: /* 0 >= 0 likely */
2454 /* Always take and link */
2456 ctx->hflags |= MIPS_HFLAG_B;
2457 MIPS_DEBUG("balways and link");
2459 case OPC_BNE: /* rx != rx */
2460 case OPC_BGTZ: /* 0 > 0 */
2461 case OPC_BLTZ: /* 0 < 0 */
2463 MIPS_DEBUG("bnever (NOP)");
2465 case OPC_BLTZAL: /* 0 < 0 */
2466 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2467 MIPS_DEBUG("bnever and link");
2469 case OPC_BLTZALL: /* 0 < 0 likely */
2470 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2471 /* Skip the instruction in the delay slot */
2472 MIPS_DEBUG("bnever, link and skip");
2475 case OPC_BNEL: /* rx != rx likely */
2476 case OPC_BGTZL: /* 0 > 0 likely */
2477 case OPC_BLTZL: /* 0 < 0 likely */
2478 /* Skip the instruction in the delay slot */
2479 MIPS_DEBUG("bnever and skip");
2483 ctx->hflags |= MIPS_HFLAG_B;
2484 MIPS_DEBUG("j " TARGET_FMT_lx, btgt);
2488 ctx->hflags |= MIPS_HFLAG_B;
2489 MIPS_DEBUG("jal " TARGET_FMT_lx, btgt);
2492 ctx->hflags |= MIPS_HFLAG_BR;
2493 MIPS_DEBUG("jr %s", regnames[rs]);
2497 ctx->hflags |= MIPS_HFLAG_BR;
2498 MIPS_DEBUG("jalr %s, %s", regnames[rt], regnames[rs]);
2501 MIPS_INVAL("branch/jump");
2502 generate_exception(ctx, EXCP_RI);
2508 gen_op_eq(bcond, t0, t1);
2509 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx,
2510 regnames[rs], regnames[rt], btgt);
2513 gen_op_eq(bcond, t0, t1);
2514 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx,
2515 regnames[rs], regnames[rt], btgt);
2518 gen_op_ne(bcond, t0, t1);
2519 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx,
2520 regnames[rs], regnames[rt], btgt);
2523 gen_op_ne(bcond, t0, t1);
2524 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx,
2525 regnames[rs], regnames[rt], btgt);
2528 gen_op_gez(bcond, t0);
2529 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2532 gen_op_gez(bcond, t0);
2533 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2536 gen_op_gez(bcond, t0);
2537 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2541 gen_op_gez(bcond, t0);
2543 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2546 gen_op_gtz(bcond, t0);
2547 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2550 gen_op_gtz(bcond, t0);
2551 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2554 gen_op_lez(bcond, t0);
2555 MIPS_DEBUG("blez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2558 gen_op_lez(bcond, t0);
2559 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2562 gen_op_ltz(bcond, t0);
2563 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2566 gen_op_ltz(bcond, t0);
2567 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2570 gen_op_ltz(bcond, t0);
2572 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2574 ctx->hflags |= MIPS_HFLAG_BC;
2577 gen_op_ltz(bcond, t0);
2579 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2581 ctx->hflags |= MIPS_HFLAG_BL;
2584 MIPS_INVAL("conditional branch/jump");
2585 generate_exception(ctx, EXCP_RI);
2589 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx,
2590 blink, ctx->hflags, btgt);
2592 ctx->btarget = btgt;
2594 tcg_gen_movi_tl(cpu_gpr[blink], ctx->pc + 8);
2602 /* special3 bitfield operations */
2603 static void gen_bitops (DisasContext *ctx, uint32_t opc, int rt,
2604 int rs, int lsb, int msb)
2606 TCGv t0 = tcg_temp_new();
2607 TCGv t1 = tcg_temp_new();
2610 gen_load_gpr(t1, rs);
2615 tcg_gen_shri_tl(t0, t1, lsb);
2617 tcg_gen_andi_tl(t0, t0, (1 << (msb + 1)) - 1);
2619 tcg_gen_ext32s_tl(t0, t0);
2622 #if defined(TARGET_MIPS64)
2624 tcg_gen_shri_tl(t0, t1, lsb);
2626 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1 + 32)) - 1);
2630 tcg_gen_shri_tl(t0, t1, lsb + 32);
2631 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2634 tcg_gen_shri_tl(t0, t1, lsb);
2635 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2641 mask = ((msb - lsb + 1 < 32) ? ((1 << (msb - lsb + 1)) - 1) : ~0) << lsb;
2642 gen_load_gpr(t0, rt);
2643 tcg_gen_andi_tl(t0, t0, ~mask);
2644 tcg_gen_shli_tl(t1, t1, lsb);
2645 tcg_gen_andi_tl(t1, t1, mask);
2646 tcg_gen_or_tl(t0, t0, t1);
2647 tcg_gen_ext32s_tl(t0, t0);
2649 #if defined(TARGET_MIPS64)
2653 mask = ((msb - lsb + 1 + 32 < 64) ? ((1ULL << (msb - lsb + 1 + 32)) - 1) : ~0ULL) << lsb;
2654 gen_load_gpr(t0, rt);
2655 tcg_gen_andi_tl(t0, t0, ~mask);
2656 tcg_gen_shli_tl(t1, t1, lsb);
2657 tcg_gen_andi_tl(t1, t1, mask);
2658 tcg_gen_or_tl(t0, t0, t1);
2663 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2664 gen_load_gpr(t0, rt);
2665 tcg_gen_andi_tl(t0, t0, ~mask);
2666 tcg_gen_shli_tl(t1, t1, lsb + 32);
2667 tcg_gen_andi_tl(t1, t1, mask);
2668 tcg_gen_or_tl(t0, t0, t1);
2673 gen_load_gpr(t0, rt);
2674 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2675 gen_load_gpr(t0, rt);
2676 tcg_gen_andi_tl(t0, t0, ~mask);
2677 tcg_gen_shli_tl(t1, t1, lsb);
2678 tcg_gen_andi_tl(t1, t1, mask);
2679 tcg_gen_or_tl(t0, t0, t1);
2684 MIPS_INVAL("bitops");
2685 generate_exception(ctx, EXCP_RI);
2690 gen_store_gpr(t0, rt);
2695 static void gen_bshfl (DisasContext *ctx, uint32_t op2, int rt, int rd)
2700 /* If no destination, treat it as a NOP. */
2705 t0 = tcg_temp_new();
2706 gen_load_gpr(t0, rt);
2710 TCGv t1 = tcg_temp_new();
2712 tcg_gen_shri_tl(t1, t0, 8);
2713 tcg_gen_andi_tl(t1, t1, 0x00FF00FF);
2714 tcg_gen_shli_tl(t0, t0, 8);
2715 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF);
2716 tcg_gen_or_tl(t0, t0, t1);
2718 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
2722 tcg_gen_ext8s_tl(cpu_gpr[rd], t0);
2725 tcg_gen_ext16s_tl(cpu_gpr[rd], t0);
2727 #if defined(TARGET_MIPS64)
2730 TCGv t1 = tcg_temp_new();
2732 tcg_gen_shri_tl(t1, t0, 8);
2733 tcg_gen_andi_tl(t1, t1, 0x00FF00FF00FF00FFULL);
2734 tcg_gen_shli_tl(t0, t0, 8);
2735 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF00FF00FFULL);
2736 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2742 TCGv t1 = tcg_temp_new();
2744 tcg_gen_shri_tl(t1, t0, 16);
2745 tcg_gen_andi_tl(t1, t1, 0x0000FFFF0000FFFFULL);
2746 tcg_gen_shli_tl(t0, t0, 16);
2747 tcg_gen_andi_tl(t0, t0, ~0x0000FFFF0000FFFFULL);
2748 tcg_gen_or_tl(t0, t0, t1);
2749 tcg_gen_shri_tl(t1, t0, 32);
2750 tcg_gen_shli_tl(t0, t0, 32);
2751 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2757 MIPS_INVAL("bsfhl");
2758 generate_exception(ctx, EXCP_RI);
2765 #ifndef CONFIG_USER_ONLY
2766 /* CP0 (MMU and control) */
2767 static inline void gen_mfc0_load32 (TCGv t, target_ulong off)
2769 TCGv_i32 r_tmp = tcg_temp_new_i32();
2771 tcg_gen_ld_i32(r_tmp, cpu_env, off);
2772 tcg_gen_ext_i32_tl(t, r_tmp);
2773 tcg_temp_free_i32(r_tmp);
2776 static inline void gen_mfc0_load64 (TCGv t, target_ulong off)
2778 tcg_gen_ld_tl(t, cpu_env, off);
2779 tcg_gen_ext32s_tl(t, t);
2782 static inline void gen_mtc0_store32 (TCGv t, target_ulong off)
2784 TCGv_i32 r_tmp = tcg_temp_new_i32();
2786 tcg_gen_trunc_tl_i32(r_tmp, t);
2787 tcg_gen_st_i32(r_tmp, cpu_env, off);
2788 tcg_temp_free_i32(r_tmp);
2791 static inline void gen_mtc0_store64 (TCGv t, target_ulong off)
2793 tcg_gen_ext32s_tl(t, t);
2794 tcg_gen_st_tl(t, cpu_env, off);
2797 static void gen_mfc0 (CPUState *env, DisasContext *ctx, TCGv t0, int reg, int sel)
2799 const char *rn = "invalid";
2802 check_insn(env, ctx, ISA_MIPS32);
2808 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Index));
2812 check_insn(env, ctx, ASE_MT);
2813 gen_helper_mfc0_mvpcontrol(t0);
2817 check_insn(env, ctx, ASE_MT);
2818 gen_helper_mfc0_mvpconf0(t0);
2822 check_insn(env, ctx, ASE_MT);
2823 gen_helper_mfc0_mvpconf1(t0);
2833 gen_helper_mfc0_random(t0);
2837 check_insn(env, ctx, ASE_MT);
2838 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEControl));
2842 check_insn(env, ctx, ASE_MT);
2843 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEConf0));
2847 check_insn(env, ctx, ASE_MT);
2848 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEConf1));
2852 check_insn(env, ctx, ASE_MT);
2853 gen_mfc0_load64(t0, offsetof(CPUState, CP0_YQMask));
2857 check_insn(env, ctx, ASE_MT);
2858 gen_mfc0_load64(t0, offsetof(CPUState, CP0_VPESchedule));
2862 check_insn(env, ctx, ASE_MT);
2863 gen_mfc0_load64(t0, offsetof(CPUState, CP0_VPEScheFBack));
2864 rn = "VPEScheFBack";
2867 check_insn(env, ctx, ASE_MT);
2868 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEOpt));
2878 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EntryLo0));
2879 tcg_gen_ext32s_tl(t0, t0);
2883 check_insn(env, ctx, ASE_MT);
2884 gen_helper_mfc0_tcstatus(t0);
2888 check_insn(env, ctx, ASE_MT);
2889 gen_helper_mfc0_tcbind(t0);
2893 check_insn(env, ctx, ASE_MT);
2894 gen_helper_mfc0_tcrestart(t0);
2898 check_insn(env, ctx, ASE_MT);
2899 gen_helper_mfc0_tchalt(t0);
2903 check_insn(env, ctx, ASE_MT);
2904 gen_helper_mfc0_tccontext(t0);
2908 check_insn(env, ctx, ASE_MT);
2909 gen_helper_mfc0_tcschedule(t0);
2913 check_insn(env, ctx, ASE_MT);
2914 gen_helper_mfc0_tcschefback(t0);
2924 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EntryLo1));
2925 tcg_gen_ext32s_tl(t0, t0);
2935 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_Context));
2936 tcg_gen_ext32s_tl(t0, t0);
2940 // gen_helper_mfc0_contextconfig(t0); /* SmartMIPS ASE */
2941 rn = "ContextConfig";
2950 gen_mfc0_load32(t0, offsetof(CPUState, CP0_PageMask));
2954 check_insn(env, ctx, ISA_MIPS32R2);
2955 gen_mfc0_load32(t0, offsetof(CPUState, CP0_PageGrain));
2965 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Wired));
2969 check_insn(env, ctx, ISA_MIPS32R2);
2970 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf0));
2974 check_insn(env, ctx, ISA_MIPS32R2);
2975 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf1));
2979 check_insn(env, ctx, ISA_MIPS32R2);
2980 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf2));
2984 check_insn(env, ctx, ISA_MIPS32R2);
2985 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf3));
2989 check_insn(env, ctx, ISA_MIPS32R2);
2990 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf4));
3000 check_insn(env, ctx, ISA_MIPS32R2);
3001 gen_mfc0_load32(t0, offsetof(CPUState, CP0_HWREna));
3011 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_BadVAddr));
3012 tcg_gen_ext32s_tl(t0, t0);
3022 /* Mark as an IO operation because we read the time. */
3025 gen_helper_mfc0_count(t0);
3028 ctx->bstate = BS_STOP;
3032 /* 6,7 are implementation dependent */
3040 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EntryHi));
3041 tcg_gen_ext32s_tl(t0, t0);
3051 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Compare));
3054 /* 6,7 are implementation dependent */
3062 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Status));
3066 check_insn(env, ctx, ISA_MIPS32R2);
3067 gen_mfc0_load32(t0, offsetof(CPUState, CP0_IntCtl));
3071 check_insn(env, ctx, ISA_MIPS32R2);
3072 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSCtl));
3076 check_insn(env, ctx, ISA_MIPS32R2);
3077 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSMap));
3087 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Cause));
3097 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EPC));
3098 tcg_gen_ext32s_tl(t0, t0);
3108 gen_mfc0_load32(t0, offsetof(CPUState, CP0_PRid));
3112 check_insn(env, ctx, ISA_MIPS32R2);
3113 gen_mfc0_load32(t0, offsetof(CPUState, CP0_EBase));
3123 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config0));
3127 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config1));
3131 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config2));
3135 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config3));
3138 /* 4,5 are reserved */
3139 /* 6,7 are implementation dependent */
3141 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config6));
3145 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config7));
3155 gen_helper_mfc0_lladdr(t0);
3165 gen_helper_1i(mfc0_watchlo, t0, sel);
3175 gen_helper_1i(mfc0_watchhi, t0, sel);
3185 #if defined(TARGET_MIPS64)
3186 check_insn(env, ctx, ISA_MIPS3);
3187 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_XContext));
3188 tcg_gen_ext32s_tl(t0, t0);
3197 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3200 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Framemask));
3208 tcg_gen_movi_tl(t0, 0); /* unimplemented */
3209 rn = "'Diagnostic"; /* implementation dependent */
3214 gen_helper_mfc0_debug(t0); /* EJTAG support */
3218 // gen_helper_mfc0_tracecontrol(t0); /* PDtrace support */
3219 rn = "TraceControl";
3222 // gen_helper_mfc0_tracecontrol2(t0); /* PDtrace support */
3223 rn = "TraceControl2";
3226 // gen_helper_mfc0_usertracedata(t0); /* PDtrace support */
3227 rn = "UserTraceData";
3230 // gen_helper_mfc0_tracebpc(t0); /* PDtrace support */
3241 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_DEPC));
3242 tcg_gen_ext32s_tl(t0, t0);
3252 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Performance0));
3253 rn = "Performance0";
3256 // gen_helper_mfc0_performance1(t0);
3257 rn = "Performance1";
3260 // gen_helper_mfc0_performance2(t0);
3261 rn = "Performance2";
3264 // gen_helper_mfc0_performance3(t0);
3265 rn = "Performance3";
3268 // gen_helper_mfc0_performance4(t0);
3269 rn = "Performance4";
3272 // gen_helper_mfc0_performance5(t0);
3273 rn = "Performance5";
3276 // gen_helper_mfc0_performance6(t0);
3277 rn = "Performance6";
3280 // gen_helper_mfc0_performance7(t0);
3281 rn = "Performance7";
3288 tcg_gen_movi_tl(t0, 0); /* unimplemented */
3294 tcg_gen_movi_tl(t0, 0); /* unimplemented */
3307 gen_mfc0_load32(t0, offsetof(CPUState, CP0_TagLo));
3314 gen_mfc0_load32(t0, offsetof(CPUState, CP0_DataLo));
3327 gen_mfc0_load32(t0, offsetof(CPUState, CP0_TagHi));
3334 gen_mfc0_load32(t0, offsetof(CPUState, CP0_DataHi));
3344 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
3345 tcg_gen_ext32s_tl(t0, t0);
3356 gen_mfc0_load32(t0, offsetof(CPUState, CP0_DESAVE));
3366 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3370 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3371 generate_exception(ctx, EXCP_RI);
3374 static void gen_mtc0 (CPUState *env, DisasContext *ctx, TCGv t0, int reg, int sel)
3376 const char *rn = "invalid";
3379 check_insn(env, ctx, ISA_MIPS32);
3388 gen_helper_mtc0_index(t0);
3392 check_insn(env, ctx, ASE_MT);
3393 gen_helper_mtc0_mvpcontrol(t0);
3397 check_insn(env, ctx, ASE_MT);
3402 check_insn(env, ctx, ASE_MT);
3417 check_insn(env, ctx, ASE_MT);
3418 gen_helper_mtc0_vpecontrol(t0);
3422 check_insn(env, ctx, ASE_MT);
3423 gen_helper_mtc0_vpeconf0(t0);
3427 check_insn(env, ctx, ASE_MT);
3428 gen_helper_mtc0_vpeconf1(t0);
3432 check_insn(env, ctx, ASE_MT);
3433 gen_helper_mtc0_yqmask(t0);
3437 check_insn(env, ctx, ASE_MT);
3438 gen_mtc0_store64(t0, offsetof(CPUState, CP0_VPESchedule));
3442 check_insn(env, ctx, ASE_MT);
3443 gen_mtc0_store64(t0, offsetof(CPUState, CP0_VPEScheFBack));
3444 rn = "VPEScheFBack";
3447 check_insn(env, ctx, ASE_MT);
3448 gen_helper_mtc0_vpeopt(t0);
3458 gen_helper_mtc0_entrylo0(t0);
3462 check_insn(env, ctx, ASE_MT);
3463 gen_helper_mtc0_tcstatus(t0);
3467 check_insn(env, ctx, ASE_MT);
3468 gen_helper_mtc0_tcbind(t0);
3472 check_insn(env, ctx, ASE_MT);
3473 gen_helper_mtc0_tcrestart(t0);
3477 check_insn(env, ctx, ASE_MT);
3478 gen_helper_mtc0_tchalt(t0);
3482 check_insn(env, ctx, ASE_MT);
3483 gen_helper_mtc0_tccontext(t0);
3487 check_insn(env, ctx, ASE_MT);
3488 gen_helper_mtc0_tcschedule(t0);
3492 check_insn(env, ctx, ASE_MT);
3493 gen_helper_mtc0_tcschefback(t0);
3503 gen_helper_mtc0_entrylo1(t0);
3513 gen_helper_mtc0_context(t0);
3517 // gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
3518 rn = "ContextConfig";
3527 gen_helper_mtc0_pagemask(t0);
3531 check_insn(env, ctx, ISA_MIPS32R2);
3532 gen_helper_mtc0_pagegrain(t0);
3542 gen_helper_mtc0_wired(t0);
3546 check_insn(env, ctx, ISA_MIPS32R2);
3547 gen_helper_mtc0_srsconf0(t0);
3551 check_insn(env, ctx, ISA_MIPS32R2);
3552 gen_helper_mtc0_srsconf1(t0);
3556 check_insn(env, ctx, ISA_MIPS32R2);
3557 gen_helper_mtc0_srsconf2(t0);
3561 check_insn(env, ctx, ISA_MIPS32R2);
3562 gen_helper_mtc0_srsconf3(t0);
3566 check_insn(env, ctx, ISA_MIPS32R2);
3567 gen_helper_mtc0_srsconf4(t0);
3577 check_insn(env, ctx, ISA_MIPS32R2);
3578 gen_helper_mtc0_hwrena(t0);
3592 gen_helper_mtc0_count(t0);
3595 /* 6,7 are implementation dependent */
3603 gen_helper_mtc0_entryhi(t0);
3613 gen_helper_mtc0_compare(t0);
3616 /* 6,7 are implementation dependent */
3624 gen_helper_mtc0_status(t0);
3625 /* BS_STOP isn't good enough here, hflags may have changed. */
3626 gen_save_pc(ctx->pc + 4);
3627 ctx->bstate = BS_EXCP;
3631 check_insn(env, ctx, ISA_MIPS32R2);
3632 gen_helper_mtc0_intctl(t0);
3633 /* Stop translation as we may have switched the execution mode */
3634 ctx->bstate = BS_STOP;
3638 check_insn(env, ctx, ISA_MIPS32R2);
3639 gen_helper_mtc0_srsctl(t0);
3640 /* Stop translation as we may have switched the execution mode */
3641 ctx->bstate = BS_STOP;
3645 check_insn(env, ctx, ISA_MIPS32R2);
3646 gen_mtc0_store32(t0, offsetof(CPUState, CP0_SRSMap));
3647 /* Stop translation as we may have switched the execution mode */
3648 ctx->bstate = BS_STOP;
3658 gen_helper_mtc0_cause(t0);
3668 gen_mtc0_store64(t0, offsetof(CPUState, CP0_EPC));
3682 check_insn(env, ctx, ISA_MIPS32R2);
3683 gen_helper_mtc0_ebase(t0);
3693 gen_helper_mtc0_config0(t0);
3695 /* Stop translation as we may have switched the execution mode */
3696 ctx->bstate = BS_STOP;
3699 /* ignored, read only */
3703 gen_helper_mtc0_config2(t0);
3705 /* Stop translation as we may have switched the execution mode */
3706 ctx->bstate = BS_STOP;
3709 /* ignored, read only */
3712 /* 4,5 are reserved */
3713 /* 6,7 are implementation dependent */
3723 rn = "Invalid config selector";
3740 gen_helper_1i(mtc0_watchlo, t0, sel);
3750 gen_helper_1i(mtc0_watchhi, t0, sel);
3760 #if defined(TARGET_MIPS64)
3761 check_insn(env, ctx, ISA_MIPS3);
3762 gen_helper_mtc0_xcontext(t0);
3771 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3774 gen_helper_mtc0_framemask(t0);
3783 rn = "Diagnostic"; /* implementation dependent */
3788 gen_helper_mtc0_debug(t0); /* EJTAG support */
3789 /* BS_STOP isn't good enough here, hflags may have changed. */
3790 gen_save_pc(ctx->pc + 4);
3791 ctx->bstate = BS_EXCP;
3795 // gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
3796 rn = "TraceControl";
3797 /* Stop translation as we may have switched the execution mode */
3798 ctx->bstate = BS_STOP;
3801 // gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
3802 rn = "TraceControl2";
3803 /* Stop translation as we may have switched the execution mode */
3804 ctx->bstate = BS_STOP;
3807 /* Stop translation as we may have switched the execution mode */
3808 ctx->bstate = BS_STOP;
3809 // gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
3810 rn = "UserTraceData";
3811 /* Stop translation as we may have switched the execution mode */
3812 ctx->bstate = BS_STOP;
3815 // gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
3816 /* Stop translation as we may have switched the execution mode */
3817 ctx->bstate = BS_STOP;
3828 gen_mtc0_store64(t0, offsetof(CPUState, CP0_DEPC));
3838 gen_helper_mtc0_performance0(t0);
3839 rn = "Performance0";
3842 // gen_helper_mtc0_performance1(t0);
3843 rn = "Performance1";
3846 // gen_helper_mtc0_performance2(t0);
3847 rn = "Performance2";
3850 // gen_helper_mtc0_performance3(t0);
3851 rn = "Performance3";
3854 // gen_helper_mtc0_performance4(t0);
3855 rn = "Performance4";
3858 // gen_helper_mtc0_performance5(t0);
3859 rn = "Performance5";
3862 // gen_helper_mtc0_performance6(t0);
3863 rn = "Performance6";
3866 // gen_helper_mtc0_performance7(t0);
3867 rn = "Performance7";
3893 gen_helper_mtc0_taglo(t0);
3900 gen_helper_mtc0_datalo(t0);
3913 gen_helper_mtc0_taghi(t0);
3920 gen_helper_mtc0_datahi(t0);
3931 gen_mtc0_store64(t0, offsetof(CPUState, CP0_ErrorEPC));
3942 gen_mtc0_store32(t0, offsetof(CPUState, CP0_DESAVE));
3948 /* Stop translation as we may have switched the execution mode */
3949 ctx->bstate = BS_STOP;
3954 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
3955 /* For simplicity assume that all writes can cause interrupts. */
3958 ctx->bstate = BS_STOP;
3963 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
3964 generate_exception(ctx, EXCP_RI);
3967 #if defined(TARGET_MIPS64)
3968 static void gen_dmfc0 (CPUState *env, DisasContext *ctx, TCGv t0, int reg, int sel)
3970 const char *rn = "invalid";
3973 check_insn(env, ctx, ISA_MIPS64);
3979 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Index));
3983 check_insn(env, ctx, ASE_MT);
3984 gen_helper_mfc0_mvpcontrol(t0);
3988 check_insn(env, ctx, ASE_MT);
3989 gen_helper_mfc0_mvpconf0(t0);
3993 check_insn(env, ctx, ASE_MT);
3994 gen_helper_mfc0_mvpconf1(t0);
4004 gen_helper_mfc0_random(t0);
4008 check_insn(env, ctx, ASE_MT);
4009 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEControl));
4013 check_insn(env, ctx, ASE_MT);
4014 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEConf0));
4018 check_insn(env, ctx, ASE_MT);
4019 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEConf1));
4023 check_insn(env, ctx, ASE_MT);
4024 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_YQMask));
4028 check_insn(env, ctx, ASE_MT);
4029 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4033 check_insn(env, ctx, ASE_MT);
4034 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4035 rn = "VPEScheFBack";
4038 check_insn(env, ctx, ASE_MT);
4039 gen_mfc0_load32(t0, offsetof(CPUState, CP0_VPEOpt));
4049 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EntryLo0));
4053 check_insn(env, ctx, ASE_MT);
4054 gen_helper_mfc0_tcstatus(t0);
4058 check_insn(env, ctx, ASE_MT);
4059 gen_helper_mfc0_tcbind(t0);
4063 check_insn(env, ctx, ASE_MT);
4064 gen_helper_dmfc0_tcrestart(t0);
4068 check_insn(env, ctx, ASE_MT);
4069 gen_helper_dmfc0_tchalt(t0);
4073 check_insn(env, ctx, ASE_MT);
4074 gen_helper_dmfc0_tccontext(t0);
4078 check_insn(env, ctx, ASE_MT);
4079 gen_helper_dmfc0_tcschedule(t0);
4083 check_insn(env, ctx, ASE_MT);
4084 gen_helper_dmfc0_tcschefback(t0);
4094 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EntryLo1));
4104 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_Context));
4108 // gen_helper_dmfc0_contextconfig(t0); /* SmartMIPS ASE */
4109 rn = "ContextConfig";
4118 gen_mfc0_load32(t0, offsetof(CPUState, CP0_PageMask));
4122 check_insn(env, ctx, ISA_MIPS32R2);
4123 gen_mfc0_load32(t0, offsetof(CPUState, CP0_PageGrain));
4133 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Wired));
4137 check_insn(env, ctx, ISA_MIPS32R2);
4138 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf0));
4142 check_insn(env, ctx, ISA_MIPS32R2);
4143 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf1));
4147 check_insn(env, ctx, ISA_MIPS32R2);
4148 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf2));
4152 check_insn(env, ctx, ISA_MIPS32R2);
4153 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf3));
4157 check_insn(env, ctx, ISA_MIPS32R2);
4158 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSConf4));
4168 check_insn(env, ctx, ISA_MIPS32R2);
4169 gen_mfc0_load32(t0, offsetof(CPUState, CP0_HWREna));
4179 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_BadVAddr));
4189 /* Mark as an IO operation because we read the time. */
4192 gen_helper_mfc0_count(t0);
4195 ctx->bstate = BS_STOP;
4199 /* 6,7 are implementation dependent */
4207 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EntryHi));
4217 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Compare));
4220 /* 6,7 are implementation dependent */
4228 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Status));
4232 check_insn(env, ctx, ISA_MIPS32R2);
4233 gen_mfc0_load32(t0, offsetof(CPUState, CP0_IntCtl));
4237 check_insn(env, ctx, ISA_MIPS32R2);
4238 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSCtl));
4242 check_insn(env, ctx, ISA_MIPS32R2);
4243 gen_mfc0_load32(t0, offsetof(CPUState, CP0_SRSMap));
4253 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Cause));
4263 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_EPC));
4273 gen_mfc0_load32(t0, offsetof(CPUState, CP0_PRid));
4277 check_insn(env, ctx, ISA_MIPS32R2);
4278 gen_mfc0_load32(t0, offsetof(CPUState, CP0_EBase));
4288 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config0));
4292 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config1));
4296 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config2));
4300 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config3));
4303 /* 6,7 are implementation dependent */
4305 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config6));
4309 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Config7));
4319 gen_helper_dmfc0_lladdr(t0);
4329 gen_helper_1i(dmfc0_watchlo, t0, sel);
4339 gen_helper_1i(mfc0_watchhi, t0, sel);
4349 check_insn(env, ctx, ISA_MIPS3);
4350 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_XContext));
4358 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4361 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Framemask));
4369 tcg_gen_movi_tl(t0, 0); /* unimplemented */
4370 rn = "'Diagnostic"; /* implementation dependent */
4375 gen_helper_mfc0_debug(t0); /* EJTAG support */
4379 // gen_helper_dmfc0_tracecontrol(t0); /* PDtrace support */
4380 rn = "TraceControl";
4383 // gen_helper_dmfc0_tracecontrol2(t0); /* PDtrace support */
4384 rn = "TraceControl2";
4387 // gen_helper_dmfc0_usertracedata(t0); /* PDtrace support */
4388 rn = "UserTraceData";
4391 // gen_helper_dmfc0_tracebpc(t0); /* PDtrace support */
4402 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_DEPC));
4412 gen_mfc0_load32(t0, offsetof(CPUState, CP0_Performance0));
4413 rn = "Performance0";
4416 // gen_helper_dmfc0_performance1(t0);
4417 rn = "Performance1";
4420 // gen_helper_dmfc0_performance2(t0);
4421 rn = "Performance2";
4424 // gen_helper_dmfc0_performance3(t0);
4425 rn = "Performance3";
4428 // gen_helper_dmfc0_performance4(t0);
4429 rn = "Performance4";
4432 // gen_helper_dmfc0_performance5(t0);
4433 rn = "Performance5";
4436 // gen_helper_dmfc0_performance6(t0);
4437 rn = "Performance6";
4440 // gen_helper_dmfc0_performance7(t0);
4441 rn = "Performance7";
4448 tcg_gen_movi_tl(t0, 0); /* unimplemented */
4455 tcg_gen_movi_tl(t0, 0); /* unimplemented */
4468 gen_mfc0_load32(t0, offsetof(CPUState, CP0_TagLo));
4475 gen_mfc0_load32(t0, offsetof(CPUState, CP0_DataLo));
4488 gen_mfc0_load32(t0, offsetof(CPUState, CP0_TagHi));
4495 gen_mfc0_load32(t0, offsetof(CPUState, CP0_DataHi));
4505 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
4516 gen_mfc0_load32(t0, offsetof(CPUState, CP0_DESAVE));
4526 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4530 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4531 generate_exception(ctx, EXCP_RI);
4534 static void gen_dmtc0 (CPUState *env, DisasContext *ctx, TCGv t0, int reg, int sel)
4536 const char *rn = "invalid";
4539 check_insn(env, ctx, ISA_MIPS64);
4548 gen_helper_mtc0_index(t0);
4552 check_insn(env, ctx, ASE_MT);
4553 gen_helper_mtc0_mvpcontrol(t0);
4557 check_insn(env, ctx, ASE_MT);
4562 check_insn(env, ctx, ASE_MT);
4577 check_insn(env, ctx, ASE_MT);
4578 gen_helper_mtc0_vpecontrol(t0);
4582 check_insn(env, ctx, ASE_MT);
4583 gen_helper_mtc0_vpeconf0(t0);
4587 check_insn(env, ctx, ASE_MT);
4588 gen_helper_mtc0_vpeconf1(t0);
4592 check_insn(env, ctx, ASE_MT);
4593 gen_helper_mtc0_yqmask(t0);
4597 check_insn(env, ctx, ASE_MT);
4598 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4602 check_insn(env, ctx, ASE_MT);
4603 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4604 rn = "VPEScheFBack";
4607 check_insn(env, ctx, ASE_MT);
4608 gen_helper_mtc0_vpeopt(t0);
4618 gen_helper_mtc0_entrylo0(t0);
4622 check_insn(env, ctx, ASE_MT);
4623 gen_helper_mtc0_tcstatus(t0);
4627 check_insn(env, ctx, ASE_MT);
4628 gen_helper_mtc0_tcbind(t0);
4632 check_insn(env, ctx, ASE_MT);
4633 gen_helper_mtc0_tcrestart(t0);
4637 check_insn(env, ctx, ASE_MT);
4638 gen_helper_mtc0_tchalt(t0);
4642 check_insn(env, ctx, ASE_MT);
4643 gen_helper_mtc0_tccontext(t0);
4647 check_insn(env, ctx, ASE_MT);
4648 gen_helper_mtc0_tcschedule(t0);
4652 check_insn(env, ctx, ASE_MT);
4653 gen_helper_mtc0_tcschefback(t0);
4663 gen_helper_mtc0_entrylo1(t0);
4673 gen_helper_mtc0_context(t0);
4677 // gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
4678 rn = "ContextConfig";
4687 gen_helper_mtc0_pagemask(t0);
4691 check_insn(env, ctx, ISA_MIPS32R2);
4692 gen_helper_mtc0_pagegrain(t0);
4702 gen_helper_mtc0_wired(t0);
4706 check_insn(env, ctx, ISA_MIPS32R2);
4707 gen_helper_mtc0_srsconf0(t0);
4711 check_insn(env, ctx, ISA_MIPS32R2);
4712 gen_helper_mtc0_srsconf1(t0);
4716 check_insn(env, ctx, ISA_MIPS32R2);
4717 gen_helper_mtc0_srsconf2(t0);
4721 check_insn(env, ctx, ISA_MIPS32R2);
4722 gen_helper_mtc0_srsconf3(t0);
4726 check_insn(env, ctx, ISA_MIPS32R2);
4727 gen_helper_mtc0_srsconf4(t0);
4737 check_insn(env, ctx, ISA_MIPS32R2);
4738 gen_helper_mtc0_hwrena(t0);
4752 gen_helper_mtc0_count(t0);
4755 /* 6,7 are implementation dependent */
4759 /* Stop translation as we may have switched the execution mode */
4760 ctx->bstate = BS_STOP;
4765 gen_helper_mtc0_entryhi(t0);
4775 gen_helper_mtc0_compare(t0);
4778 /* 6,7 are implementation dependent */
4782 /* Stop translation as we may have switched the execution mode */
4783 ctx->bstate = BS_STOP;
4788 gen_helper_mtc0_status(t0);
4789 /* BS_STOP isn't good enough here, hflags may have changed. */
4790 gen_save_pc(ctx->pc + 4);
4791 ctx->bstate = BS_EXCP;
4795 check_insn(env, ctx, ISA_MIPS32R2);
4796 gen_helper_mtc0_intctl(t0);
4797 /* Stop translation as we may have switched the execution mode */
4798 ctx->bstate = BS_STOP;
4802 check_insn(env, ctx, ISA_MIPS32R2);
4803 gen_helper_mtc0_srsctl(t0);
4804 /* Stop translation as we may have switched the execution mode */
4805 ctx->bstate = BS_STOP;
4809 check_insn(env, ctx, ISA_MIPS32R2);
4810 gen_mtc0_store32(t0, offsetof(CPUState, CP0_SRSMap));
4811 /* Stop translation as we may have switched the execution mode */
4812 ctx->bstate = BS_STOP;
4822 gen_helper_mtc0_cause(t0);
4828 /* Stop translation as we may have switched the execution mode */
4829 ctx->bstate = BS_STOP;
4834 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_EPC));
4848 check_insn(env, ctx, ISA_MIPS32R2);
4849 gen_helper_mtc0_ebase(t0);
4859 gen_helper_mtc0_config0(t0);
4861 /* Stop translation as we may have switched the execution mode */
4862 ctx->bstate = BS_STOP;
4865 /* ignored, read only */
4869 gen_helper_mtc0_config2(t0);
4871 /* Stop translation as we may have switched the execution mode */
4872 ctx->bstate = BS_STOP;
4878 /* 6,7 are implementation dependent */
4880 rn = "Invalid config selector";
4897 gen_helper_1i(mtc0_watchlo, t0, sel);
4907 gen_helper_1i(mtc0_watchhi, t0, sel);
4917 check_insn(env, ctx, ISA_MIPS3);
4918 gen_helper_mtc0_xcontext(t0);
4926 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4929 gen_helper_mtc0_framemask(t0);
4938 rn = "Diagnostic"; /* implementation dependent */
4943 gen_helper_mtc0_debug(t0); /* EJTAG support */
4944 /* BS_STOP isn't good enough here, hflags may have changed. */
4945 gen_save_pc(ctx->pc + 4);
4946 ctx->bstate = BS_EXCP;
4950 // gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
4951 /* Stop translation as we may have switched the execution mode */
4952 ctx->bstate = BS_STOP;
4953 rn = "TraceControl";
4956 // gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
4957 /* Stop translation as we may have switched the execution mode */
4958 ctx->bstate = BS_STOP;
4959 rn = "TraceControl2";
4962 // gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
4963 /* Stop translation as we may have switched the execution mode */
4964 ctx->bstate = BS_STOP;
4965 rn = "UserTraceData";
4968 // gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
4969 /* Stop translation as we may have switched the execution mode */
4970 ctx->bstate = BS_STOP;
4981 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_DEPC));
4991 gen_helper_mtc0_performance0(t0);
4992 rn = "Performance0";
4995 // gen_helper_mtc0_performance1(t0);
4996 rn = "Performance1";
4999 // gen_helper_mtc0_performance2(t0);
5000 rn = "Performance2";
5003 // gen_helper_mtc0_performance3(t0);
5004 rn = "Performance3";
5007 // gen_helper_mtc0_performance4(t0);
5008 rn = "Performance4";
5011 // gen_helper_mtc0_performance5(t0);
5012 rn = "Performance5";
5015 // gen_helper_mtc0_performance6(t0);
5016 rn = "Performance6";
5019 // gen_helper_mtc0_performance7(t0);
5020 rn = "Performance7";
5046 gen_helper_mtc0_taglo(t0);
5053 gen_helper_mtc0_datalo(t0);
5066 gen_helper_mtc0_taghi(t0);
5073 gen_helper_mtc0_datahi(t0);
5084 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
5095 gen_mtc0_store32(t0, offsetof(CPUState, CP0_DESAVE));
5101 /* Stop translation as we may have switched the execution mode */
5102 ctx->bstate = BS_STOP;
5107 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5108 /* For simplicity assume that all writes can cause interrupts. */
5111 ctx->bstate = BS_STOP;
5116 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5117 generate_exception(ctx, EXCP_RI);
5119 #endif /* TARGET_MIPS64 */
5121 static void gen_mftr(CPUState *env, DisasContext *ctx, int rt, int rd,
5122 int u, int sel, int h)
5124 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5125 TCGv t0 = tcg_temp_local_new();
5127 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5128 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5129 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5130 tcg_gen_movi_tl(t0, -1);
5131 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5132 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5133 tcg_gen_movi_tl(t0, -1);
5139 gen_helper_mftc0_tcstatus(t0);
5142 gen_helper_mftc0_tcbind(t0);
5145 gen_helper_mftc0_tcrestart(t0);
5148 gen_helper_mftc0_tchalt(t0);
5151 gen_helper_mftc0_tccontext(t0);
5154 gen_helper_mftc0_tcschedule(t0);
5157 gen_helper_mftc0_tcschefback(t0);
5160 gen_mfc0(env, ctx, t0, rt, sel);
5167 gen_helper_mftc0_entryhi(t0);
5170 gen_mfc0(env, ctx, t0, rt, sel);
5176 gen_helper_mftc0_status(t0);
5179 gen_mfc0(env, ctx, t0, rt, sel);
5185 gen_helper_mftc0_debug(t0);
5188 gen_mfc0(env, ctx, t0, rt, sel);
5193 gen_mfc0(env, ctx, t0, rt, sel);
5195 } else switch (sel) {
5196 /* GPR registers. */
5198 gen_helper_1i(mftgpr, t0, rt);
5200 /* Auxiliary CPU registers */
5204 gen_helper_1i(mftlo, t0, 0);
5207 gen_helper_1i(mfthi, t0, 0);
5210 gen_helper_1i(mftacx, t0, 0);
5213 gen_helper_1i(mftlo, t0, 1);
5216 gen_helper_1i(mfthi, t0, 1);
5219 gen_helper_1i(mftacx, t0, 1);
5222 gen_helper_1i(mftlo, t0, 2);
5225 gen_helper_1i(mfthi, t0, 2);
5228 gen_helper_1i(mftacx, t0, 2);
5231 gen_helper_1i(mftlo, t0, 3);
5234 gen_helper_1i(mfthi, t0, 3);
5237 gen_helper_1i(mftacx, t0, 3);
5240 gen_helper_mftdsp(t0);
5246 /* Floating point (COP1). */
5248 /* XXX: For now we support only a single FPU context. */
5250 TCGv_i32 fp0 = tcg_temp_new_i32();
5252 gen_load_fpr32(fp0, rt);
5253 tcg_gen_ext_i32_tl(t0, fp0);
5254 tcg_temp_free_i32(fp0);
5256 TCGv_i32 fp0 = tcg_temp_new_i32();
5258 gen_load_fpr32h(fp0, rt);
5259 tcg_gen_ext_i32_tl(t0, fp0);
5260 tcg_temp_free_i32(fp0);
5264 /* XXX: For now we support only a single FPU context. */
5265 gen_helper_1i(cfc1, t0, rt);
5267 /* COP2: Not implemented. */
5274 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5275 gen_store_gpr(t0, rd);
5281 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5282 generate_exception(ctx, EXCP_RI);
5285 static void gen_mttr(CPUState *env, DisasContext *ctx, int rd, int rt,
5286 int u, int sel, int h)
5288 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5289 TCGv t0 = tcg_temp_local_new();
5291 gen_load_gpr(t0, rt);
5292 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5293 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5294 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5296 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5297 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5304 gen_helper_mttc0_tcstatus(t0);
5307 gen_helper_mttc0_tcbind(t0);
5310 gen_helper_mttc0_tcrestart(t0);
5313 gen_helper_mttc0_tchalt(t0);
5316 gen_helper_mttc0_tccontext(t0);
5319 gen_helper_mttc0_tcschedule(t0);
5322 gen_helper_mttc0_tcschefback(t0);
5325 gen_mtc0(env, ctx, t0, rd, sel);
5332 gen_helper_mttc0_entryhi(t0);
5335 gen_mtc0(env, ctx, t0, rd, sel);
5341 gen_helper_mttc0_status(t0);
5344 gen_mtc0(env, ctx, t0, rd, sel);
5350 gen_helper_mttc0_debug(t0);
5353 gen_mtc0(env, ctx, t0, rd, sel);
5358 gen_mtc0(env, ctx, t0, rd, sel);
5360 } else switch (sel) {
5361 /* GPR registers. */
5363 gen_helper_1i(mttgpr, t0, rd);
5365 /* Auxiliary CPU registers */
5369 gen_helper_1i(mttlo, t0, 0);
5372 gen_helper_1i(mtthi, t0, 0);
5375 gen_helper_1i(mttacx, t0, 0);
5378 gen_helper_1i(mttlo, t0, 1);
5381 gen_helper_1i(mtthi, t0, 1);
5384 gen_helper_1i(mttacx, t0, 1);
5387 gen_helper_1i(mttlo, t0, 2);
5390 gen_helper_1i(mtthi, t0, 2);
5393 gen_helper_1i(mttacx, t0, 2);
5396 gen_helper_1i(mttlo, t0, 3);
5399 gen_helper_1i(mtthi, t0, 3);
5402 gen_helper_1i(mttacx, t0, 3);
5405 gen_helper_mttdsp(t0);
5411 /* Floating point (COP1). */
5413 /* XXX: For now we support only a single FPU context. */
5415 TCGv_i32 fp0 = tcg_temp_new_i32();
5417 tcg_gen_trunc_tl_i32(fp0, t0);
5418 gen_store_fpr32(fp0, rd);
5419 tcg_temp_free_i32(fp0);
5421 TCGv_i32 fp0 = tcg_temp_new_i32();
5423 tcg_gen_trunc_tl_i32(fp0, t0);
5424 gen_store_fpr32h(fp0, rd);
5425 tcg_temp_free_i32(fp0);
5429 /* XXX: For now we support only a single FPU context. */
5430 gen_helper_1i(ctc1, t0, rd);
5432 /* COP2: Not implemented. */
5439 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5445 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5446 generate_exception(ctx, EXCP_RI);
5449 static void gen_cp0 (CPUState *env, DisasContext *ctx, uint32_t opc, int rt, int rd)
5451 const char *opn = "ldst";
5459 gen_mfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5464 TCGv t0 = tcg_temp_new();
5466 gen_load_gpr(t0, rt);
5467 gen_mtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5472 #if defined(TARGET_MIPS64)
5474 check_insn(env, ctx, ISA_MIPS3);
5479 gen_dmfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5483 check_insn(env, ctx, ISA_MIPS3);
5485 TCGv t0 = tcg_temp_new();
5487 gen_load_gpr(t0, rt);
5488 gen_dmtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5495 check_insn(env, ctx, ASE_MT);
5500 gen_mftr(env, ctx, rt, rd, (ctx->opcode >> 5) & 1,
5501 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5505 check_insn(env, ctx, ASE_MT);
5506 gen_mttr(env, ctx, rd, rt, (ctx->opcode >> 5) & 1,
5507 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5512 if (!env->tlb->helper_tlbwi)
5518 if (!env->tlb->helper_tlbwr)
5524 if (!env->tlb->helper_tlbp)
5530 if (!env->tlb->helper_tlbr)
5536 check_insn(env, ctx, ISA_MIPS2);
5538 ctx->bstate = BS_EXCP;
5542 check_insn(env, ctx, ISA_MIPS32);
5543 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
5545 generate_exception(ctx, EXCP_RI);
5548 ctx->bstate = BS_EXCP;
5553 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
5554 /* If we get an exception, we want to restart at next instruction */
5556 save_cpu_state(ctx, 1);
5559 ctx->bstate = BS_EXCP;
5564 generate_exception(ctx, EXCP_RI);
5567 MIPS_DEBUG("%s %s %d", opn, regnames[rt], rd);
5569 #endif /* !CONFIG_USER_ONLY */
5571 /* CP1 Branches (before delay slot) */
5572 static void gen_compute_branch1 (CPUState *env, DisasContext *ctx, uint32_t op,
5573 int32_t cc, int32_t offset)
5575 target_ulong btarget;
5576 const char *opn = "cp1 cond branch";
5577 TCGv_i32 t0 = tcg_temp_new_i32();
5580 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
5582 btarget = ctx->pc + 4 + offset;
5586 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5587 tcg_gen_not_i32(t0, t0);
5588 tcg_gen_andi_i32(t0, t0, 1);
5589 tcg_gen_extu_i32_tl(bcond, t0);
5593 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5594 tcg_gen_not_i32(t0, t0);
5595 tcg_gen_andi_i32(t0, t0, 1);
5596 tcg_gen_extu_i32_tl(bcond, t0);
5600 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5601 tcg_gen_andi_i32(t0, t0, 1);
5602 tcg_gen_extu_i32_tl(bcond, t0);
5606 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5607 tcg_gen_andi_i32(t0, t0, 1);
5608 tcg_gen_extu_i32_tl(bcond, t0);
5611 ctx->hflags |= MIPS_HFLAG_BL;
5615 TCGv_i32 t1 = tcg_temp_new_i32();
5616 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5617 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5618 tcg_gen_or_i32(t0, t0, t1);
5619 tcg_temp_free_i32(t1);
5620 tcg_gen_not_i32(t0, t0);
5621 tcg_gen_andi_i32(t0, t0, 1);
5622 tcg_gen_extu_i32_tl(bcond, t0);
5628 TCGv_i32 t1 = tcg_temp_new_i32();
5629 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5630 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5631 tcg_gen_or_i32(t0, t0, t1);
5632 tcg_temp_free_i32(t1);
5633 tcg_gen_andi_i32(t0, t0, 1);
5634 tcg_gen_extu_i32_tl(bcond, t0);
5640 TCGv_i32 t1 = tcg_temp_new_i32();
5641 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5642 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5643 tcg_gen_or_i32(t0, t0, t1);
5644 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5645 tcg_gen_or_i32(t0, t0, t1);
5646 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5647 tcg_gen_or_i32(t0, t0, t1);
5648 tcg_temp_free_i32(t1);
5649 tcg_gen_not_i32(t0, t0);
5650 tcg_gen_andi_i32(t0, t0, 1);
5651 tcg_gen_extu_i32_tl(bcond, t0);
5657 TCGv_i32 t1 = tcg_temp_new_i32();
5658 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5659 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5660 tcg_gen_or_i32(t0, t0, t1);
5661 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5662 tcg_gen_or_i32(t0, t0, t1);
5663 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5664 tcg_gen_or_i32(t0, t0, t1);
5665 tcg_temp_free_i32(t1);
5666 tcg_gen_andi_i32(t0, t0, 1);
5667 tcg_gen_extu_i32_tl(bcond, t0);
5671 ctx->hflags |= MIPS_HFLAG_BC;
5675 generate_exception (ctx, EXCP_RI);
5678 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx, opn,
5679 ctx->hflags, btarget);
5680 ctx->btarget = btarget;
5683 tcg_temp_free_i32(t0);
5686 /* Coprocessor 1 (FPU) */
5688 #define FOP(func, fmt) (((fmt) << 21) | (func))
5690 static void gen_cp1 (DisasContext *ctx, uint32_t opc, int rt, int fs)
5692 const char *opn = "cp1 move";
5693 TCGv t0 = tcg_temp_new();
5698 TCGv_i32 fp0 = tcg_temp_new_i32();
5700 gen_load_fpr32(fp0, fs);
5701 tcg_gen_ext_i32_tl(t0, fp0);
5702 tcg_temp_free_i32(fp0);
5704 gen_store_gpr(t0, rt);
5708 gen_load_gpr(t0, rt);
5710 TCGv_i32 fp0 = tcg_temp_new_i32();
5712 tcg_gen_trunc_tl_i32(fp0, t0);
5713 gen_store_fpr32(fp0, fs);
5714 tcg_temp_free_i32(fp0);
5719 gen_helper_1i(cfc1, t0, fs);
5720 gen_store_gpr(t0, rt);
5724 gen_load_gpr(t0, rt);
5725 gen_helper_1i(ctc1, t0, fs);
5728 #if defined(TARGET_MIPS64)
5730 gen_load_fpr64(ctx, t0, fs);
5731 gen_store_gpr(t0, rt);
5735 gen_load_gpr(t0, rt);
5736 gen_store_fpr64(ctx, t0, fs);
5742 TCGv_i32 fp0 = tcg_temp_new_i32();
5744 gen_load_fpr32h(fp0, fs);
5745 tcg_gen_ext_i32_tl(t0, fp0);
5746 tcg_temp_free_i32(fp0);
5748 gen_store_gpr(t0, rt);
5752 gen_load_gpr(t0, rt);
5754 TCGv_i32 fp0 = tcg_temp_new_i32();
5756 tcg_gen_trunc_tl_i32(fp0, t0);
5757 gen_store_fpr32h(fp0, fs);
5758 tcg_temp_free_i32(fp0);
5764 generate_exception (ctx, EXCP_RI);
5767 MIPS_DEBUG("%s %s %s", opn, regnames[rt], fregnames[fs]);
5773 static void gen_movci (DisasContext *ctx, int rd, int rs, int cc, int tf)
5789 l1 = gen_new_label();
5790 t0 = tcg_temp_new_i32();
5791 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5792 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5794 tcg_gen_movi_tl(cpu_gpr[rd], 0);
5796 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
5799 tcg_temp_free_i32(t0);
5802 static inline void gen_movcf_s (int fs, int fd, int cc, int tf)
5805 TCGv_i32 t0 = tcg_temp_new_i32();
5806 int l1 = gen_new_label();
5813 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5814 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5815 gen_load_fpr32(t0, fs);
5816 gen_store_fpr32(t0, fd);
5818 tcg_temp_free_i32(t0);
5821 static inline void gen_movcf_d (DisasContext *ctx, int fs, int fd, int cc, int tf)
5824 TCGv_i32 t0 = tcg_temp_new_i32();
5826 int l1 = gen_new_label();
5833 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5834 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5835 fp0 = tcg_temp_local_new_i64();
5836 gen_load_fpr64(ctx, fp0, fs);
5837 gen_store_fpr64(ctx, fp0, fd);
5838 tcg_temp_free_i64(fp0);
5840 tcg_temp_free_i32(t0);
5843 static inline void gen_movcf_ps (int fs, int fd, int cc, int tf)
5846 TCGv_i32 t0 = tcg_temp_new_i32();
5847 int l1 = gen_new_label();
5848 int l2 = gen_new_label();
5855 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5856 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5857 gen_load_fpr32(t0, fs);
5858 gen_store_fpr32(t0, fd);
5861 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc+1));
5862 tcg_gen_brcondi_i32(cond, t0, 0, l2);
5863 gen_load_fpr32h(t0, fs);
5864 gen_store_fpr32h(t0, fd);
5867 tcg_temp_free_i32(t0);
5871 static void gen_farith (DisasContext *ctx, uint32_t op1,
5872 int ft, int fs, int fd, int cc)
5874 const char *opn = "farith";
5875 const char *condnames[] = {
5893 const char *condnames_abs[] = {
5911 enum { BINOP, CMPOP, OTHEROP } optype = OTHEROP;
5912 uint32_t func = ctx->opcode & 0x3f;
5914 switch (ctx->opcode & FOP(0x3f, 0x1f)) {
5917 TCGv_i32 fp0 = tcg_temp_new_i32();
5918 TCGv_i32 fp1 = tcg_temp_new_i32();
5920 gen_load_fpr32(fp0, fs);
5921 gen_load_fpr32(fp1, ft);
5922 gen_helper_float_add_s(fp0, fp0, fp1);
5923 tcg_temp_free_i32(fp1);
5924 gen_store_fpr32(fp0, fd);
5925 tcg_temp_free_i32(fp0);
5932 TCGv_i32 fp0 = tcg_temp_new_i32();
5933 TCGv_i32 fp1 = tcg_temp_new_i32();
5935 gen_load_fpr32(fp0, fs);
5936 gen_load_fpr32(fp1, ft);
5937 gen_helper_float_sub_s(fp0, fp0, fp1);
5938 tcg_temp_free_i32(fp1);
5939 gen_store_fpr32(fp0, fd);
5940 tcg_temp_free_i32(fp0);
5947 TCGv_i32 fp0 = tcg_temp_new_i32();
5948 TCGv_i32 fp1 = tcg_temp_new_i32();
5950 gen_load_fpr32(fp0, fs);
5951 gen_load_fpr32(fp1, ft);
5952 gen_helper_float_mul_s(fp0, fp0, fp1);
5953 tcg_temp_free_i32(fp1);
5954 gen_store_fpr32(fp0, fd);
5955 tcg_temp_free_i32(fp0);
5962 TCGv_i32 fp0 = tcg_temp_new_i32();
5963 TCGv_i32 fp1 = tcg_temp_new_i32();
5965 gen_load_fpr32(fp0, fs);
5966 gen_load_fpr32(fp1, ft);
5967 gen_helper_float_div_s(fp0, fp0, fp1);
5968 tcg_temp_free_i32(fp1);
5969 gen_store_fpr32(fp0, fd);
5970 tcg_temp_free_i32(fp0);
5977 TCGv_i32 fp0 = tcg_temp_new_i32();
5979 gen_load_fpr32(fp0, fs);
5980 gen_helper_float_sqrt_s(fp0, fp0);
5981 gen_store_fpr32(fp0, fd);
5982 tcg_temp_free_i32(fp0);
5988 TCGv_i32 fp0 = tcg_temp_new_i32();
5990 gen_load_fpr32(fp0, fs);
5991 gen_helper_float_abs_s(fp0, fp0);
5992 gen_store_fpr32(fp0, fd);
5993 tcg_temp_free_i32(fp0);
5999 TCGv_i32 fp0 = tcg_temp_new_i32();
6001 gen_load_fpr32(fp0, fs);
6002 gen_store_fpr32(fp0, fd);
6003 tcg_temp_free_i32(fp0);
6009 TCGv_i32 fp0 = tcg_temp_new_i32();
6011 gen_load_fpr32(fp0, fs);
6012 gen_helper_float_chs_s(fp0, fp0);
6013 gen_store_fpr32(fp0, fd);
6014 tcg_temp_free_i32(fp0);
6019 check_cp1_64bitmode(ctx);
6021 TCGv_i32 fp32 = tcg_temp_new_i32();
6022 TCGv_i64 fp64 = tcg_temp_new_i64();
6024 gen_load_fpr32(fp32, fs);
6025 gen_helper_float_roundl_s(fp64, fp32);
6026 tcg_temp_free_i32(fp32);
6027 gen_store_fpr64(ctx, fp64, fd);
6028 tcg_temp_free_i64(fp64);
6033 check_cp1_64bitmode(ctx);
6035 TCGv_i32 fp32 = tcg_temp_new_i32();
6036 TCGv_i64 fp64 = tcg_temp_new_i64();
6038 gen_load_fpr32(fp32, fs);
6039 gen_helper_float_truncl_s(fp64, fp32);
6040 tcg_temp_free_i32(fp32);
6041 gen_store_fpr64(ctx, fp64, fd);
6042 tcg_temp_free_i64(fp64);
6047 check_cp1_64bitmode(ctx);
6049 TCGv_i32 fp32 = tcg_temp_new_i32();
6050 TCGv_i64 fp64 = tcg_temp_new_i64();
6052 gen_load_fpr32(fp32, fs);
6053 gen_helper_float_ceill_s(fp64, fp32);
6054 tcg_temp_free_i32(fp32);
6055 gen_store_fpr64(ctx, fp64, fd);
6056 tcg_temp_free_i64(fp64);
6061 check_cp1_64bitmode(ctx);
6063 TCGv_i32 fp32 = tcg_temp_new_i32();
6064 TCGv_i64 fp64 = tcg_temp_new_i64();
6066 gen_load_fpr32(fp32, fs);
6067 gen_helper_float_floorl_s(fp64, fp32);
6068 tcg_temp_free_i32(fp32);
6069 gen_store_fpr64(ctx, fp64, fd);
6070 tcg_temp_free_i64(fp64);
6076 TCGv_i32 fp0 = tcg_temp_new_i32();
6078 gen_load_fpr32(fp0, fs);
6079 gen_helper_float_roundw_s(fp0, fp0);
6080 gen_store_fpr32(fp0, fd);
6081 tcg_temp_free_i32(fp0);
6087 TCGv_i32 fp0 = tcg_temp_new_i32();
6089 gen_load_fpr32(fp0, fs);
6090 gen_helper_float_truncw_s(fp0, fp0);
6091 gen_store_fpr32(fp0, fd);
6092 tcg_temp_free_i32(fp0);
6098 TCGv_i32 fp0 = tcg_temp_new_i32();
6100 gen_load_fpr32(fp0, fs);
6101 gen_helper_float_ceilw_s(fp0, fp0);
6102 gen_store_fpr32(fp0, fd);
6103 tcg_temp_free_i32(fp0);
6109 TCGv_i32 fp0 = tcg_temp_new_i32();
6111 gen_load_fpr32(fp0, fs);
6112 gen_helper_float_floorw_s(fp0, fp0);
6113 gen_store_fpr32(fp0, fd);
6114 tcg_temp_free_i32(fp0);
6119 gen_movcf_s(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6124 int l1 = gen_new_label();
6125 TCGv t0 = tcg_temp_new();
6126 TCGv_i32 fp0 = tcg_temp_local_new_i32();
6128 gen_load_gpr(t0, ft);
6129 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
6130 gen_load_fpr32(fp0, fs);
6131 gen_store_fpr32(fp0, fd);
6132 tcg_temp_free_i32(fp0);
6140 int l1 = gen_new_label();
6141 TCGv t0 = tcg_temp_new();
6142 TCGv_i32 fp0 = tcg_temp_local_new_i32();
6144 gen_load_gpr(t0, ft);
6145 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6146 gen_load_fpr32(fp0, fs);
6147 gen_store_fpr32(fp0, fd);
6148 tcg_temp_free_i32(fp0);
6157 TCGv_i32 fp0 = tcg_temp_new_i32();
6159 gen_load_fpr32(fp0, fs);
6160 gen_helper_float_recip_s(fp0, fp0);
6161 gen_store_fpr32(fp0, fd);
6162 tcg_temp_free_i32(fp0);
6169 TCGv_i32 fp0 = tcg_temp_new_i32();
6171 gen_load_fpr32(fp0, fs);
6172 gen_helper_float_rsqrt_s(fp0, fp0);
6173 gen_store_fpr32(fp0, fd);
6174 tcg_temp_free_i32(fp0);
6179 check_cp1_64bitmode(ctx);
6181 TCGv_i32 fp0 = tcg_temp_new_i32();
6182 TCGv_i32 fp1 = tcg_temp_new_i32();
6184 gen_load_fpr32(fp0, fs);
6185 gen_load_fpr32(fp1, fd);
6186 gen_helper_float_recip2_s(fp0, fp0, fp1);
6187 tcg_temp_free_i32(fp1);
6188 gen_store_fpr32(fp0, fd);
6189 tcg_temp_free_i32(fp0);
6194 check_cp1_64bitmode(ctx);
6196 TCGv_i32 fp0 = tcg_temp_new_i32();
6198 gen_load_fpr32(fp0, fs);
6199 gen_helper_float_recip1_s(fp0, fp0);
6200 gen_store_fpr32(fp0, fd);
6201 tcg_temp_free_i32(fp0);
6206 check_cp1_64bitmode(ctx);
6208 TCGv_i32 fp0 = tcg_temp_new_i32();
6210 gen_load_fpr32(fp0, fs);
6211 gen_helper_float_rsqrt1_s(fp0, fp0);
6212 gen_store_fpr32(fp0, fd);
6213 tcg_temp_free_i32(fp0);
6218 check_cp1_64bitmode(ctx);
6220 TCGv_i32 fp0 = tcg_temp_new_i32();
6221 TCGv_i32 fp1 = tcg_temp_new_i32();
6223 gen_load_fpr32(fp0, fs);
6224 gen_load_fpr32(fp1, ft);
6225 gen_helper_float_rsqrt2_s(fp0, fp0, fp1);
6226 tcg_temp_free_i32(fp1);
6227 gen_store_fpr32(fp0, fd);
6228 tcg_temp_free_i32(fp0);
6233 check_cp1_registers(ctx, fd);
6235 TCGv_i32 fp32 = tcg_temp_new_i32();
6236 TCGv_i64 fp64 = tcg_temp_new_i64();
6238 gen_load_fpr32(fp32, fs);
6239 gen_helper_float_cvtd_s(fp64, fp32);
6240 tcg_temp_free_i32(fp32);
6241 gen_store_fpr64(ctx, fp64, fd);
6242 tcg_temp_free_i64(fp64);
6248 TCGv_i32 fp0 = tcg_temp_new_i32();
6250 gen_load_fpr32(fp0, fs);
6251 gen_helper_float_cvtw_s(fp0, fp0);
6252 gen_store_fpr32(fp0, fd);
6253 tcg_temp_free_i32(fp0);
6258 check_cp1_64bitmode(ctx);
6260 TCGv_i32 fp32 = tcg_temp_new_i32();
6261 TCGv_i64 fp64 = tcg_temp_new_i64();
6263 gen_load_fpr32(fp32, fs);
6264 gen_helper_float_cvtl_s(fp64, fp32);
6265 tcg_temp_free_i32(fp32);
6266 gen_store_fpr64(ctx, fp64, fd);
6267 tcg_temp_free_i64(fp64);
6272 check_cp1_64bitmode(ctx);
6274 TCGv_i64 fp64 = tcg_temp_new_i64();
6275 TCGv_i32 fp32_0 = tcg_temp_new_i32();
6276 TCGv_i32 fp32_1 = tcg_temp_new_i32();
6278 gen_load_fpr32(fp32_0, fs);
6279 gen_load_fpr32(fp32_1, ft);
6280 tcg_gen_concat_i32_i64(fp64, fp32_0, fp32_1);
6281 tcg_temp_free_i32(fp32_1);
6282 tcg_temp_free_i32(fp32_0);
6283 gen_store_fpr64(ctx, fp64, fd);
6284 tcg_temp_free_i64(fp64);
6305 TCGv_i32 fp0 = tcg_temp_new_i32();
6306 TCGv_i32 fp1 = tcg_temp_new_i32();
6308 gen_load_fpr32(fp0, fs);
6309 gen_load_fpr32(fp1, ft);
6310 if (ctx->opcode & (1 << 6)) {
6312 gen_cmpabs_s(func-48, fp0, fp1, cc);
6313 opn = condnames_abs[func-48];
6315 gen_cmp_s(func-48, fp0, fp1, cc);
6316 opn = condnames[func-48];
6318 tcg_temp_free_i32(fp0);
6319 tcg_temp_free_i32(fp1);
6323 check_cp1_registers(ctx, fs | ft | fd);
6325 TCGv_i64 fp0 = tcg_temp_new_i64();
6326 TCGv_i64 fp1 = tcg_temp_new_i64();
6328 gen_load_fpr64(ctx, fp0, fs);
6329 gen_load_fpr64(ctx, fp1, ft);
6330 gen_helper_float_add_d(fp0, fp0, fp1);
6331 tcg_temp_free_i64(fp1);
6332 gen_store_fpr64(ctx, fp0, fd);
6333 tcg_temp_free_i64(fp0);
6339 check_cp1_registers(ctx, fs | ft | fd);
6341 TCGv_i64 fp0 = tcg_temp_new_i64();
6342 TCGv_i64 fp1 = tcg_temp_new_i64();
6344 gen_load_fpr64(ctx, fp0, fs);
6345 gen_load_fpr64(ctx, fp1, ft);
6346 gen_helper_float_sub_d(fp0, fp0, fp1);
6347 tcg_temp_free_i64(fp1);
6348 gen_store_fpr64(ctx, fp0, fd);
6349 tcg_temp_free_i64(fp0);
6355 check_cp1_registers(ctx, fs | ft | fd);
6357 TCGv_i64 fp0 = tcg_temp_new_i64();
6358 TCGv_i64 fp1 = tcg_temp_new_i64();
6360 gen_load_fpr64(ctx, fp0, fs);
6361 gen_load_fpr64(ctx, fp1, ft);
6362 gen_helper_float_mul_d(fp0, fp0, fp1);
6363 tcg_temp_free_i64(fp1);
6364 gen_store_fpr64(ctx, fp0, fd);
6365 tcg_temp_free_i64(fp0);
6371 check_cp1_registers(ctx, fs | ft | fd);
6373 TCGv_i64 fp0 = tcg_temp_new_i64();
6374 TCGv_i64 fp1 = tcg_temp_new_i64();
6376 gen_load_fpr64(ctx, fp0, fs);
6377 gen_load_fpr64(ctx, fp1, ft);
6378 gen_helper_float_div_d(fp0, fp0, fp1);
6379 tcg_temp_free_i64(fp1);
6380 gen_store_fpr64(ctx, fp0, fd);
6381 tcg_temp_free_i64(fp0);
6387 check_cp1_registers(ctx, fs | fd);
6389 TCGv_i64 fp0 = tcg_temp_new_i64();
6391 gen_load_fpr64(ctx, fp0, fs);
6392 gen_helper_float_sqrt_d(fp0, fp0);
6393 gen_store_fpr64(ctx, fp0, fd);
6394 tcg_temp_free_i64(fp0);
6399 check_cp1_registers(ctx, fs | fd);
6401 TCGv_i64 fp0 = tcg_temp_new_i64();
6403 gen_load_fpr64(ctx, fp0, fs);
6404 gen_helper_float_abs_d(fp0, fp0);
6405 gen_store_fpr64(ctx, fp0, fd);
6406 tcg_temp_free_i64(fp0);
6411 check_cp1_registers(ctx, fs | fd);
6413 TCGv_i64 fp0 = tcg_temp_new_i64();
6415 gen_load_fpr64(ctx, fp0, fs);
6416 gen_store_fpr64(ctx, fp0, fd);
6417 tcg_temp_free_i64(fp0);
6422 check_cp1_registers(ctx, fs | fd);
6424 TCGv_i64 fp0 = tcg_temp_new_i64();
6426 gen_load_fpr64(ctx, fp0, fs);
6427 gen_helper_float_chs_d(fp0, fp0);
6428 gen_store_fpr64(ctx, fp0, fd);
6429 tcg_temp_free_i64(fp0);
6434 check_cp1_64bitmode(ctx);
6436 TCGv_i64 fp0 = tcg_temp_new_i64();
6438 gen_load_fpr64(ctx, fp0, fs);
6439 gen_helper_float_roundl_d(fp0, fp0);
6440 gen_store_fpr64(ctx, fp0, fd);
6441 tcg_temp_free_i64(fp0);
6446 check_cp1_64bitmode(ctx);
6448 TCGv_i64 fp0 = tcg_temp_new_i64();
6450 gen_load_fpr64(ctx, fp0, fs);
6451 gen_helper_float_truncl_d(fp0, fp0);
6452 gen_store_fpr64(ctx, fp0, fd);
6453 tcg_temp_free_i64(fp0);
6458 check_cp1_64bitmode(ctx);
6460 TCGv_i64 fp0 = tcg_temp_new_i64();
6462 gen_load_fpr64(ctx, fp0, fs);
6463 gen_helper_float_ceill_d(fp0, fp0);
6464 gen_store_fpr64(ctx, fp0, fd);
6465 tcg_temp_free_i64(fp0);
6470 check_cp1_64bitmode(ctx);
6472 TCGv_i64 fp0 = tcg_temp_new_i64();
6474 gen_load_fpr64(ctx, fp0, fs);
6475 gen_helper_float_floorl_d(fp0, fp0);
6476 gen_store_fpr64(ctx, fp0, fd);
6477 tcg_temp_free_i64(fp0);
6482 check_cp1_registers(ctx, fs);
6484 TCGv_i32 fp32 = tcg_temp_new_i32();
6485 TCGv_i64 fp64 = tcg_temp_new_i64();
6487 gen_load_fpr64(ctx, fp64, fs);
6488 gen_helper_float_roundw_d(fp32, fp64);
6489 tcg_temp_free_i64(fp64);
6490 gen_store_fpr32(fp32, fd);
6491 tcg_temp_free_i32(fp32);
6496 check_cp1_registers(ctx, fs);
6498 TCGv_i32 fp32 = tcg_temp_new_i32();
6499 TCGv_i64 fp64 = tcg_temp_new_i64();
6501 gen_load_fpr64(ctx, fp64, fs);
6502 gen_helper_float_truncw_d(fp32, fp64);
6503 tcg_temp_free_i64(fp64);
6504 gen_store_fpr32(fp32, fd);
6505 tcg_temp_free_i32(fp32);
6510 check_cp1_registers(ctx, fs);
6512 TCGv_i32 fp32 = tcg_temp_new_i32();
6513 TCGv_i64 fp64 = tcg_temp_new_i64();
6515 gen_load_fpr64(ctx, fp64, fs);
6516 gen_helper_float_ceilw_d(fp32, fp64);
6517 tcg_temp_free_i64(fp64);
6518 gen_store_fpr32(fp32, fd);
6519 tcg_temp_free_i32(fp32);
6524 check_cp1_registers(ctx, fs);
6526 TCGv_i32 fp32 = tcg_temp_new_i32();
6527 TCGv_i64 fp64 = tcg_temp_new_i64();
6529 gen_load_fpr64(ctx, fp64, fs);
6530 gen_helper_float_floorw_d(fp32, fp64);
6531 tcg_temp_free_i64(fp64);
6532 gen_store_fpr32(fp32, fd);
6533 tcg_temp_free_i32(fp32);
6538 gen_movcf_d(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6543 int l1 = gen_new_label();
6544 TCGv t0 = tcg_temp_new();
6545 TCGv_i64 fp0 = tcg_temp_local_new_i64();
6547 gen_load_gpr(t0, ft);
6548 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
6549 gen_load_fpr64(ctx, fp0, fs);
6550 gen_store_fpr64(ctx, fp0, fd);
6551 tcg_temp_free_i64(fp0);
6559 int l1 = gen_new_label();
6560 TCGv t0 = tcg_temp_new();
6561 TCGv_i64 fp0 = tcg_temp_local_new_i64();
6563 gen_load_gpr(t0, ft);
6564 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6565 gen_load_fpr64(ctx, fp0, fs);
6566 gen_store_fpr64(ctx, fp0, fd);
6567 tcg_temp_free_i64(fp0);
6574 check_cp1_64bitmode(ctx);
6576 TCGv_i64 fp0 = tcg_temp_new_i64();
6578 gen_load_fpr64(ctx, fp0, fs);
6579 gen_helper_float_recip_d(fp0, fp0);
6580 gen_store_fpr64(ctx, fp0, fd);
6581 tcg_temp_free_i64(fp0);
6586 check_cp1_64bitmode(ctx);
6588 TCGv_i64 fp0 = tcg_temp_new_i64();
6590 gen_load_fpr64(ctx, fp0, fs);
6591 gen_helper_float_rsqrt_d(fp0, fp0);
6592 gen_store_fpr64(ctx, fp0, fd);
6593 tcg_temp_free_i64(fp0);
6598 check_cp1_64bitmode(ctx);
6600 TCGv_i64 fp0 = tcg_temp_new_i64();
6601 TCGv_i64 fp1 = tcg_temp_new_i64();
6603 gen_load_fpr64(ctx, fp0, fs);
6604 gen_load_fpr64(ctx, fp1, ft);
6605 gen_helper_float_recip2_d(fp0, fp0, fp1);
6606 tcg_temp_free_i64(fp1);
6607 gen_store_fpr64(ctx, fp0, fd);
6608 tcg_temp_free_i64(fp0);
6613 check_cp1_64bitmode(ctx);
6615 TCGv_i64 fp0 = tcg_temp_new_i64();
6617 gen_load_fpr64(ctx, fp0, fs);
6618 gen_helper_float_recip1_d(fp0, fp0);
6619 gen_store_fpr64(ctx, fp0, fd);
6620 tcg_temp_free_i64(fp0);
6625 check_cp1_64bitmode(ctx);
6627 TCGv_i64 fp0 = tcg_temp_new_i64();
6629 gen_load_fpr64(ctx, fp0, fs);
6630 gen_helper_float_rsqrt1_d(fp0, fp0);
6631 gen_store_fpr64(ctx, fp0, fd);
6632 tcg_temp_free_i64(fp0);
6637 check_cp1_64bitmode(ctx);
6639 TCGv_i64 fp0 = tcg_temp_new_i64();
6640 TCGv_i64 fp1 = tcg_temp_new_i64();
6642 gen_load_fpr64(ctx, fp0, fs);
6643 gen_load_fpr64(ctx, fp1, ft);
6644 gen_helper_float_rsqrt2_d(fp0, fp0, fp1);
6645 tcg_temp_free_i64(fp1);
6646 gen_store_fpr64(ctx, fp0, fd);
6647 tcg_temp_free_i64(fp0);
6668 TCGv_i64 fp0 = tcg_temp_new_i64();
6669 TCGv_i64 fp1 = tcg_temp_new_i64();
6671 gen_load_fpr64(ctx, fp0, fs);
6672 gen_load_fpr64(ctx, fp1, ft);
6673 if (ctx->opcode & (1 << 6)) {
6675 check_cp1_registers(ctx, fs | ft);
6676 gen_cmpabs_d(func-48, fp0, fp1, cc);
6677 opn = condnames_abs[func-48];
6679 check_cp1_registers(ctx, fs | ft);
6680 gen_cmp_d(func-48, fp0, fp1, cc);
6681 opn = condnames[func-48];
6683 tcg_temp_free_i64(fp0);
6684 tcg_temp_free_i64(fp1);
6688 check_cp1_registers(ctx, fs);
6690 TCGv_i32 fp32 = tcg_temp_new_i32();
6691 TCGv_i64 fp64 = tcg_temp_new_i64();
6693 gen_load_fpr64(ctx, fp64, fs);
6694 gen_helper_float_cvts_d(fp32, fp64);
6695 tcg_temp_free_i64(fp64);
6696 gen_store_fpr32(fp32, fd);
6697 tcg_temp_free_i32(fp32);
6702 check_cp1_registers(ctx, fs);
6704 TCGv_i32 fp32 = tcg_temp_new_i32();
6705 TCGv_i64 fp64 = tcg_temp_new_i64();
6707 gen_load_fpr64(ctx, fp64, fs);
6708 gen_helper_float_cvtw_d(fp32, fp64);
6709 tcg_temp_free_i64(fp64);
6710 gen_store_fpr32(fp32, fd);
6711 tcg_temp_free_i32(fp32);
6716 check_cp1_64bitmode(ctx);
6718 TCGv_i64 fp0 = tcg_temp_new_i64();
6720 gen_load_fpr64(ctx, fp0, fs);
6721 gen_helper_float_cvtl_d(fp0, fp0);
6722 gen_store_fpr64(ctx, fp0, fd);
6723 tcg_temp_free_i64(fp0);
6729 TCGv_i32 fp0 = tcg_temp_new_i32();
6731 gen_load_fpr32(fp0, fs);
6732 gen_helper_float_cvts_w(fp0, fp0);
6733 gen_store_fpr32(fp0, fd);
6734 tcg_temp_free_i32(fp0);
6739 check_cp1_registers(ctx, fd);
6741 TCGv_i32 fp32 = tcg_temp_new_i32();
6742 TCGv_i64 fp64 = tcg_temp_new_i64();
6744 gen_load_fpr32(fp32, fs);
6745 gen_helper_float_cvtd_w(fp64, fp32);
6746 tcg_temp_free_i32(fp32);
6747 gen_store_fpr64(ctx, fp64, fd);
6748 tcg_temp_free_i64(fp64);
6753 check_cp1_64bitmode(ctx);
6755 TCGv_i32 fp32 = tcg_temp_new_i32();
6756 TCGv_i64 fp64 = tcg_temp_new_i64();
6758 gen_load_fpr64(ctx, fp64, fs);
6759 gen_helper_float_cvts_l(fp32, fp64);
6760 tcg_temp_free_i64(fp64);
6761 gen_store_fpr32(fp32, fd);
6762 tcg_temp_free_i32(fp32);
6767 check_cp1_64bitmode(ctx);
6769 TCGv_i64 fp0 = tcg_temp_new_i64();
6771 gen_load_fpr64(ctx, fp0, fs);
6772 gen_helper_float_cvtd_l(fp0, fp0);
6773 gen_store_fpr64(ctx, fp0, fd);
6774 tcg_temp_free_i64(fp0);
6779 check_cp1_64bitmode(ctx);
6781 TCGv_i64 fp0 = tcg_temp_new_i64();
6783 gen_load_fpr64(ctx, fp0, fs);
6784 gen_helper_float_cvtps_pw(fp0, fp0);
6785 gen_store_fpr64(ctx, fp0, fd);
6786 tcg_temp_free_i64(fp0);
6791 check_cp1_64bitmode(ctx);
6793 TCGv_i64 fp0 = tcg_temp_new_i64();
6794 TCGv_i64 fp1 = tcg_temp_new_i64();
6796 gen_load_fpr64(ctx, fp0, fs);
6797 gen_load_fpr64(ctx, fp1, ft);
6798 gen_helper_float_add_ps(fp0, fp0, fp1);
6799 tcg_temp_free_i64(fp1);
6800 gen_store_fpr64(ctx, fp0, fd);
6801 tcg_temp_free_i64(fp0);
6806 check_cp1_64bitmode(ctx);
6808 TCGv_i64 fp0 = tcg_temp_new_i64();
6809 TCGv_i64 fp1 = tcg_temp_new_i64();
6811 gen_load_fpr64(ctx, fp0, fs);
6812 gen_load_fpr64(ctx, fp1, ft);
6813 gen_helper_float_sub_ps(fp0, fp0, fp1);
6814 tcg_temp_free_i64(fp1);
6815 gen_store_fpr64(ctx, fp0, fd);
6816 tcg_temp_free_i64(fp0);
6821 check_cp1_64bitmode(ctx);
6823 TCGv_i64 fp0 = tcg_temp_new_i64();
6824 TCGv_i64 fp1 = tcg_temp_new_i64();
6826 gen_load_fpr64(ctx, fp0, fs);
6827 gen_load_fpr64(ctx, fp1, ft);
6828 gen_helper_float_mul_ps(fp0, fp0, fp1);
6829 tcg_temp_free_i64(fp1);
6830 gen_store_fpr64(ctx, fp0, fd);
6831 tcg_temp_free_i64(fp0);
6836 check_cp1_64bitmode(ctx);
6838 TCGv_i64 fp0 = tcg_temp_new_i64();
6840 gen_load_fpr64(ctx, fp0, fs);
6841 gen_helper_float_abs_ps(fp0, fp0);
6842 gen_store_fpr64(ctx, fp0, fd);
6843 tcg_temp_free_i64(fp0);
6848 check_cp1_64bitmode(ctx);
6850 TCGv_i64 fp0 = tcg_temp_new_i64();
6852 gen_load_fpr64(ctx, fp0, fs);
6853 gen_store_fpr64(ctx, fp0, fd);
6854 tcg_temp_free_i64(fp0);
6859 check_cp1_64bitmode(ctx);
6861 TCGv_i64 fp0 = tcg_temp_new_i64();
6863 gen_load_fpr64(ctx, fp0, fs);
6864 gen_helper_float_chs_ps(fp0, fp0);
6865 gen_store_fpr64(ctx, fp0, fd);
6866 tcg_temp_free_i64(fp0);
6871 check_cp1_64bitmode(ctx);
6872 gen_movcf_ps(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6876 check_cp1_64bitmode(ctx);
6878 int l1 = gen_new_label();
6879 TCGv t0 = tcg_temp_new();
6880 TCGv_i32 fp0 = tcg_temp_local_new_i32();
6881 TCGv_i32 fph0 = tcg_temp_local_new_i32();
6883 gen_load_gpr(t0, ft);
6884 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
6885 gen_load_fpr32(fp0, fs);
6886 gen_load_fpr32h(fph0, fs);
6887 gen_store_fpr32(fp0, fd);
6888 gen_store_fpr32h(fph0, fd);
6889 tcg_temp_free_i32(fp0);
6890 tcg_temp_free_i32(fph0);
6897 check_cp1_64bitmode(ctx);
6899 int l1 = gen_new_label();
6900 TCGv t0 = tcg_temp_new();
6901 TCGv_i32 fp0 = tcg_temp_local_new_i32();
6902 TCGv_i32 fph0 = tcg_temp_local_new_i32();
6904 gen_load_gpr(t0, ft);
6905 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6906 gen_load_fpr32(fp0, fs);
6907 gen_load_fpr32h(fph0, fs);
6908 gen_store_fpr32(fp0, fd);
6909 gen_store_fpr32h(fph0, fd);
6910 tcg_temp_free_i32(fp0);
6911 tcg_temp_free_i32(fph0);
6918 check_cp1_64bitmode(ctx);
6920 TCGv_i64 fp0 = tcg_temp_new_i64();
6921 TCGv_i64 fp1 = tcg_temp_new_i64();
6923 gen_load_fpr64(ctx, fp0, ft);
6924 gen_load_fpr64(ctx, fp1, fs);
6925 gen_helper_float_addr_ps(fp0, fp0, fp1);
6926 tcg_temp_free_i64(fp1);
6927 gen_store_fpr64(ctx, fp0, fd);
6928 tcg_temp_free_i64(fp0);
6933 check_cp1_64bitmode(ctx);
6935 TCGv_i64 fp0 = tcg_temp_new_i64();
6936 TCGv_i64 fp1 = tcg_temp_new_i64();
6938 gen_load_fpr64(ctx, fp0, ft);
6939 gen_load_fpr64(ctx, fp1, fs);
6940 gen_helper_float_mulr_ps(fp0, fp0, fp1);
6941 tcg_temp_free_i64(fp1);
6942 gen_store_fpr64(ctx, fp0, fd);
6943 tcg_temp_free_i64(fp0);
6948 check_cp1_64bitmode(ctx);
6950 TCGv_i64 fp0 = tcg_temp_new_i64();
6951 TCGv_i64 fp1 = tcg_temp_new_i64();
6953 gen_load_fpr64(ctx, fp0, fs);
6954 gen_load_fpr64(ctx, fp1, fd);
6955 gen_helper_float_recip2_ps(fp0, fp0, fp1);
6956 tcg_temp_free_i64(fp1);
6957 gen_store_fpr64(ctx, fp0, fd);
6958 tcg_temp_free_i64(fp0);
6963 check_cp1_64bitmode(ctx);
6965 TCGv_i64 fp0 = tcg_temp_new_i64();
6967 gen_load_fpr64(ctx, fp0, fs);
6968 gen_helper_float_recip1_ps(fp0, fp0);
6969 gen_store_fpr64(ctx, fp0, fd);
6970 tcg_temp_free_i64(fp0);
6975 check_cp1_64bitmode(ctx);
6977 TCGv_i64 fp0 = tcg_temp_new_i64();
6979 gen_load_fpr64(ctx, fp0, fs);
6980 gen_helper_float_rsqrt1_ps(fp0, fp0);
6981 gen_store_fpr64(ctx, fp0, fd);
6982 tcg_temp_free_i64(fp0);
6987 check_cp1_64bitmode(ctx);
6989 TCGv_i64 fp0 = tcg_temp_new_i64();
6990 TCGv_i64 fp1 = tcg_temp_new_i64();
6992 gen_load_fpr64(ctx, fp0, fs);
6993 gen_load_fpr64(ctx, fp1, ft);
6994 gen_helper_float_rsqrt2_ps(fp0, fp0, fp1);
6995 tcg_temp_free_i64(fp1);
6996 gen_store_fpr64(ctx, fp0, fd);
6997 tcg_temp_free_i64(fp0);
7002 check_cp1_64bitmode(ctx);
7004 TCGv_i32 fp0 = tcg_temp_new_i32();
7006 gen_load_fpr32h(fp0, fs);
7007 gen_helper_float_cvts_pu(fp0, fp0);
7008 gen_store_fpr32(fp0, fd);
7009 tcg_temp_free_i32(fp0);
7014 check_cp1_64bitmode(ctx);
7016 TCGv_i64 fp0 = tcg_temp_new_i64();
7018 gen_load_fpr64(ctx, fp0, fs);
7019 gen_helper_float_cvtpw_ps(fp0, fp0);
7020 gen_store_fpr64(ctx, fp0, fd);
7021 tcg_temp_free_i64(fp0);
7026 check_cp1_64bitmode(ctx);
7028 TCGv_i32 fp0 = tcg_temp_new_i32();
7030 gen_load_fpr32(fp0, fs);
7031 gen_helper_float_cvts_pl(fp0, fp0);
7032 gen_store_fpr32(fp0, fd);
7033 tcg_temp_free_i32(fp0);
7038 check_cp1_64bitmode(ctx);
7040 TCGv_i32 fp0 = tcg_temp_new_i32();
7041 TCGv_i32 fp1 = tcg_temp_new_i32();
7043 gen_load_fpr32(fp0, fs);
7044 gen_load_fpr32(fp1, ft);
7045 gen_store_fpr32h(fp0, fd);
7046 gen_store_fpr32(fp1, fd);
7047 tcg_temp_free_i32(fp0);
7048 tcg_temp_free_i32(fp1);
7053 check_cp1_64bitmode(ctx);
7055 TCGv_i32 fp0 = tcg_temp_new_i32();
7056 TCGv_i32 fp1 = tcg_temp_new_i32();
7058 gen_load_fpr32(fp0, fs);
7059 gen_load_fpr32h(fp1, ft);
7060 gen_store_fpr32(fp1, fd);
7061 gen_store_fpr32h(fp0, fd);
7062 tcg_temp_free_i32(fp0);
7063 tcg_temp_free_i32(fp1);
7068 check_cp1_64bitmode(ctx);
7070 TCGv_i32 fp0 = tcg_temp_new_i32();
7071 TCGv_i32 fp1 = tcg_temp_new_i32();
7073 gen_load_fpr32h(fp0, fs);
7074 gen_load_fpr32(fp1, ft);
7075 gen_store_fpr32(fp1, fd);
7076 gen_store_fpr32h(fp0, fd);
7077 tcg_temp_free_i32(fp0);
7078 tcg_temp_free_i32(fp1);
7083 check_cp1_64bitmode(ctx);
7085 TCGv_i32 fp0 = tcg_temp_new_i32();
7086 TCGv_i32 fp1 = tcg_temp_new_i32();
7088 gen_load_fpr32h(fp0, fs);
7089 gen_load_fpr32h(fp1, ft);
7090 gen_store_fpr32(fp1, fd);
7091 gen_store_fpr32h(fp0, fd);
7092 tcg_temp_free_i32(fp0);
7093 tcg_temp_free_i32(fp1);
7113 check_cp1_64bitmode(ctx);
7115 TCGv_i64 fp0 = tcg_temp_new_i64();
7116 TCGv_i64 fp1 = tcg_temp_new_i64();
7118 gen_load_fpr64(ctx, fp0, fs);
7119 gen_load_fpr64(ctx, fp1, ft);
7120 if (ctx->opcode & (1 << 6)) {
7121 gen_cmpabs_ps(func-48, fp0, fp1, cc);
7122 opn = condnames_abs[func-48];
7124 gen_cmp_ps(func-48, fp0, fp1, cc);
7125 opn = condnames[func-48];
7127 tcg_temp_free_i64(fp0);
7128 tcg_temp_free_i64(fp1);
7133 generate_exception (ctx, EXCP_RI);
7138 MIPS_DEBUG("%s %s, %s, %s", opn, fregnames[fd], fregnames[fs], fregnames[ft]);
7141 MIPS_DEBUG("%s %s,%s", opn, fregnames[fs], fregnames[ft]);
7144 MIPS_DEBUG("%s %s,%s", opn, fregnames[fd], fregnames[fs]);
7149 /* Coprocessor 3 (FPU) */
7150 static void gen_flt3_ldst (DisasContext *ctx, uint32_t opc,
7151 int fd, int fs, int base, int index)
7153 const char *opn = "extended float load/store";
7155 TCGv t0 = tcg_temp_new();
7156 TCGv t1 = tcg_temp_new();
7159 gen_load_gpr(t0, index);
7160 } else if (index == 0) {
7161 gen_load_gpr(t0, base);
7163 gen_load_gpr(t0, index);
7164 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
7166 /* Don't do NOP if destination is zero: we must perform the actual
7168 save_cpu_state(ctx, 0);
7173 TCGv_i32 fp0 = tcg_temp_new_i32();
7175 tcg_gen_qemu_ld32s(t1, t0, ctx->mem_idx);
7176 tcg_gen_trunc_tl_i32(fp0, t1);
7177 gen_store_fpr32(fp0, fd);
7178 tcg_temp_free_i32(fp0);
7184 check_cp1_registers(ctx, fd);
7186 TCGv_i64 fp0 = tcg_temp_new_i64();
7188 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7189 gen_store_fpr64(ctx, fp0, fd);
7190 tcg_temp_free_i64(fp0);
7195 check_cp1_64bitmode(ctx);
7196 tcg_gen_andi_tl(t0, t0, ~0x7);
7198 TCGv_i64 fp0 = tcg_temp_new_i64();
7200 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7201 gen_store_fpr64(ctx, fp0, fd);
7202 tcg_temp_free_i64(fp0);
7209 TCGv_i32 fp0 = tcg_temp_new_i32();
7211 gen_load_fpr32(fp0, fs);
7212 tcg_gen_extu_i32_tl(t1, fp0);
7213 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
7214 tcg_temp_free_i32(fp0);
7221 check_cp1_registers(ctx, fs);
7223 TCGv_i64 fp0 = tcg_temp_new_i64();
7225 gen_load_fpr64(ctx, fp0, fs);
7226 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7227 tcg_temp_free_i64(fp0);
7233 check_cp1_64bitmode(ctx);
7234 tcg_gen_andi_tl(t0, t0, ~0x7);
7236 TCGv_i64 fp0 = tcg_temp_new_i64();
7238 gen_load_fpr64(ctx, fp0, fs);
7239 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7240 tcg_temp_free_i64(fp0);
7248 MIPS_DEBUG("%s %s, %s(%s)", opn, fregnames[store ? fs : fd],
7249 regnames[index], regnames[base]);
7252 static void gen_flt3_arith (DisasContext *ctx, uint32_t opc,
7253 int fd, int fr, int fs, int ft)
7255 const char *opn = "flt3_arith";
7259 check_cp1_64bitmode(ctx);
7261 TCGv t0 = tcg_temp_local_new();
7262 TCGv_i32 fp = tcg_temp_new_i32();
7263 TCGv_i32 fph = tcg_temp_new_i32();
7264 int l1 = gen_new_label();
7265 int l2 = gen_new_label();
7267 gen_load_gpr(t0, fr);
7268 tcg_gen_andi_tl(t0, t0, 0x7);
7270 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
7271 gen_load_fpr32(fp, fs);
7272 gen_load_fpr32h(fph, fs);
7273 gen_store_fpr32(fp, fd);
7274 gen_store_fpr32h(fph, fd);
7277 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 4, l2);
7279 #ifdef TARGET_WORDS_BIGENDIAN
7280 gen_load_fpr32(fp, fs);
7281 gen_load_fpr32h(fph, ft);
7282 gen_store_fpr32h(fp, fd);
7283 gen_store_fpr32(fph, fd);
7285 gen_load_fpr32h(fph, fs);
7286 gen_load_fpr32(fp, ft);
7287 gen_store_fpr32(fph, fd);
7288 gen_store_fpr32h(fp, fd);
7291 tcg_temp_free_i32(fp);
7292 tcg_temp_free_i32(fph);
7299 TCGv_i32 fp0 = tcg_temp_new_i32();
7300 TCGv_i32 fp1 = tcg_temp_new_i32();
7301 TCGv_i32 fp2 = tcg_temp_new_i32();
7303 gen_load_fpr32(fp0, fs);
7304 gen_load_fpr32(fp1, ft);
7305 gen_load_fpr32(fp2, fr);
7306 gen_helper_float_muladd_s(fp2, fp0, fp1, fp2);
7307 tcg_temp_free_i32(fp0);
7308 tcg_temp_free_i32(fp1);
7309 gen_store_fpr32(fp2, fd);
7310 tcg_temp_free_i32(fp2);
7316 check_cp1_registers(ctx, fd | fs | ft | fr);
7318 TCGv_i64 fp0 = tcg_temp_new_i64();
7319 TCGv_i64 fp1 = tcg_temp_new_i64();
7320 TCGv_i64 fp2 = tcg_temp_new_i64();
7322 gen_load_fpr64(ctx, fp0, fs);
7323 gen_load_fpr64(ctx, fp1, ft);
7324 gen_load_fpr64(ctx, fp2, fr);
7325 gen_helper_float_muladd_d(fp2, fp0, fp1, fp2);
7326 tcg_temp_free_i64(fp0);
7327 tcg_temp_free_i64(fp1);
7328 gen_store_fpr64(ctx, fp2, fd);
7329 tcg_temp_free_i64(fp2);
7334 check_cp1_64bitmode(ctx);
7336 TCGv_i64 fp0 = tcg_temp_new_i64();
7337 TCGv_i64 fp1 = tcg_temp_new_i64();
7338 TCGv_i64 fp2 = tcg_temp_new_i64();
7340 gen_load_fpr64(ctx, fp0, fs);
7341 gen_load_fpr64(ctx, fp1, ft);
7342 gen_load_fpr64(ctx, fp2, fr);
7343 gen_helper_float_muladd_ps(fp2, fp0, fp1, fp2);
7344 tcg_temp_free_i64(fp0);
7345 tcg_temp_free_i64(fp1);
7346 gen_store_fpr64(ctx, fp2, fd);
7347 tcg_temp_free_i64(fp2);
7354 TCGv_i32 fp0 = tcg_temp_new_i32();
7355 TCGv_i32 fp1 = tcg_temp_new_i32();
7356 TCGv_i32 fp2 = tcg_temp_new_i32();
7358 gen_load_fpr32(fp0, fs);
7359 gen_load_fpr32(fp1, ft);
7360 gen_load_fpr32(fp2, fr);
7361 gen_helper_float_mulsub_s(fp2, fp0, fp1, fp2);
7362 tcg_temp_free_i32(fp0);
7363 tcg_temp_free_i32(fp1);
7364 gen_store_fpr32(fp2, fd);
7365 tcg_temp_free_i32(fp2);
7371 check_cp1_registers(ctx, fd | fs | ft | fr);
7373 TCGv_i64 fp0 = tcg_temp_new_i64();
7374 TCGv_i64 fp1 = tcg_temp_new_i64();
7375 TCGv_i64 fp2 = tcg_temp_new_i64();
7377 gen_load_fpr64(ctx, fp0, fs);
7378 gen_load_fpr64(ctx, fp1, ft);
7379 gen_load_fpr64(ctx, fp2, fr);
7380 gen_helper_float_mulsub_d(fp2, fp0, fp1, fp2);
7381 tcg_temp_free_i64(fp0);
7382 tcg_temp_free_i64(fp1);
7383 gen_store_fpr64(ctx, fp2, fd);
7384 tcg_temp_free_i64(fp2);
7389 check_cp1_64bitmode(ctx);
7391 TCGv_i64 fp0 = tcg_temp_new_i64();
7392 TCGv_i64 fp1 = tcg_temp_new_i64();
7393 TCGv_i64 fp2 = tcg_temp_new_i64();
7395 gen_load_fpr64(ctx, fp0, fs);
7396 gen_load_fpr64(ctx, fp1, ft);
7397 gen_load_fpr64(ctx, fp2, fr);
7398 gen_helper_float_mulsub_ps(fp2, fp0, fp1, fp2);
7399 tcg_temp_free_i64(fp0);
7400 tcg_temp_free_i64(fp1);
7401 gen_store_fpr64(ctx, fp2, fd);
7402 tcg_temp_free_i64(fp2);
7409 TCGv_i32 fp0 = tcg_temp_new_i32();
7410 TCGv_i32 fp1 = tcg_temp_new_i32();
7411 TCGv_i32 fp2 = tcg_temp_new_i32();
7413 gen_load_fpr32(fp0, fs);
7414 gen_load_fpr32(fp1, ft);
7415 gen_load_fpr32(fp2, fr);
7416 gen_helper_float_nmuladd_s(fp2, fp0, fp1, fp2);
7417 tcg_temp_free_i32(fp0);
7418 tcg_temp_free_i32(fp1);
7419 gen_store_fpr32(fp2, fd);
7420 tcg_temp_free_i32(fp2);
7426 check_cp1_registers(ctx, fd | fs | ft | fr);
7428 TCGv_i64 fp0 = tcg_temp_new_i64();
7429 TCGv_i64 fp1 = tcg_temp_new_i64();
7430 TCGv_i64 fp2 = tcg_temp_new_i64();
7432 gen_load_fpr64(ctx, fp0, fs);
7433 gen_load_fpr64(ctx, fp1, ft);
7434 gen_load_fpr64(ctx, fp2, fr);
7435 gen_helper_float_nmuladd_d(fp2, fp0, fp1, fp2);
7436 tcg_temp_free_i64(fp0);
7437 tcg_temp_free_i64(fp1);
7438 gen_store_fpr64(ctx, fp2, fd);
7439 tcg_temp_free_i64(fp2);
7444 check_cp1_64bitmode(ctx);
7446 TCGv_i64 fp0 = tcg_temp_new_i64();
7447 TCGv_i64 fp1 = tcg_temp_new_i64();
7448 TCGv_i64 fp2 = tcg_temp_new_i64();
7450 gen_load_fpr64(ctx, fp0, fs);
7451 gen_load_fpr64(ctx, fp1, ft);
7452 gen_load_fpr64(ctx, fp2, fr);
7453 gen_helper_float_nmuladd_ps(fp2, fp0, fp1, fp2);
7454 tcg_temp_free_i64(fp0);
7455 tcg_temp_free_i64(fp1);
7456 gen_store_fpr64(ctx, fp2, fd);
7457 tcg_temp_free_i64(fp2);
7464 TCGv_i32 fp0 = tcg_temp_new_i32();
7465 TCGv_i32 fp1 = tcg_temp_new_i32();
7466 TCGv_i32 fp2 = tcg_temp_new_i32();
7468 gen_load_fpr32(fp0, fs);
7469 gen_load_fpr32(fp1, ft);
7470 gen_load_fpr32(fp2, fr);
7471 gen_helper_float_nmulsub_s(fp2, fp0, fp1, fp2);
7472 tcg_temp_free_i32(fp0);
7473 tcg_temp_free_i32(fp1);
7474 gen_store_fpr32(fp2, fd);
7475 tcg_temp_free_i32(fp2);
7481 check_cp1_registers(ctx, fd | fs | ft | fr);
7483 TCGv_i64 fp0 = tcg_temp_new_i64();
7484 TCGv_i64 fp1 = tcg_temp_new_i64();
7485 TCGv_i64 fp2 = tcg_temp_new_i64();
7487 gen_load_fpr64(ctx, fp0, fs);
7488 gen_load_fpr64(ctx, fp1, ft);
7489 gen_load_fpr64(ctx, fp2, fr);
7490 gen_helper_float_nmulsub_d(fp2, fp0, fp1, fp2);
7491 tcg_temp_free_i64(fp0);
7492 tcg_temp_free_i64(fp1);
7493 gen_store_fpr64(ctx, fp2, fd);
7494 tcg_temp_free_i64(fp2);
7499 check_cp1_64bitmode(ctx);
7501 TCGv_i64 fp0 = tcg_temp_new_i64();
7502 TCGv_i64 fp1 = tcg_temp_new_i64();
7503 TCGv_i64 fp2 = tcg_temp_new_i64();
7505 gen_load_fpr64(ctx, fp0, fs);
7506 gen_load_fpr64(ctx, fp1, ft);
7507 gen_load_fpr64(ctx, fp2, fr);
7508 gen_helper_float_nmulsub_ps(fp2, fp0, fp1, fp2);
7509 tcg_temp_free_i64(fp0);
7510 tcg_temp_free_i64(fp1);
7511 gen_store_fpr64(ctx, fp2, fd);
7512 tcg_temp_free_i64(fp2);
7518 generate_exception (ctx, EXCP_RI);
7521 MIPS_DEBUG("%s %s, %s, %s, %s", opn, fregnames[fd], fregnames[fr],
7522 fregnames[fs], fregnames[ft]);
7525 /* ISA extensions (ASEs) */
7526 /* MIPS16 extension to MIPS32 */
7527 /* SmartMIPS extension to MIPS32 */
7529 #if defined(TARGET_MIPS64)
7531 /* MDMX extension to MIPS64 */
7535 static void decode_opc (CPUState *env, DisasContext *ctx)
7539 uint32_t op, op1, op2;
7542 /* make sure instructions are on a word boundary */
7543 if (ctx->pc & 0x3) {
7544 env->CP0_BadVAddr = ctx->pc;
7545 generate_exception(ctx, EXCP_AdEL);
7549 /* Handle blikely not taken case */
7550 if ((ctx->hflags & MIPS_HFLAG_BMASK) == MIPS_HFLAG_BL) {
7551 int l1 = gen_new_label();
7553 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx ")", ctx->pc + 4);
7554 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
7555 tcg_gen_movi_i32(hflags, ctx->hflags & ~MIPS_HFLAG_BMASK);
7556 gen_goto_tb(ctx, 1, ctx->pc + 4);
7559 op = MASK_OP_MAJOR(ctx->opcode);
7560 rs = (ctx->opcode >> 21) & 0x1f;
7561 rt = (ctx->opcode >> 16) & 0x1f;
7562 rd = (ctx->opcode >> 11) & 0x1f;
7563 sa = (ctx->opcode >> 6) & 0x1f;
7564 imm = (int16_t)ctx->opcode;
7567 op1 = MASK_SPECIAL(ctx->opcode);
7569 case OPC_SLL: /* Arithmetic with immediate */
7570 case OPC_SRL ... OPC_SRA:
7571 gen_arith_imm(env, ctx, op1, rd, rt, sa);
7573 case OPC_MOVN: /* Conditional move */
7575 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7576 gen_cond_move(env, op1, rd, rs, rt);
7578 case OPC_ADD ... OPC_SUBU:
7579 gen_arith(env, ctx, op1, rd, rs, rt);
7581 case OPC_SLLV: /* Shifts */
7584 gen_shift(env, ctx, op1, rd, rs, rt);
7586 case OPC_SLT: /* Set on less than */
7588 gen_slt(env, op1, rd, rs, rt);
7590 case OPC_AND: /* Logic*/
7594 gen_logic(env, op1, rd, rs, rt);
7596 case OPC_MULT ... OPC_DIVU:
7598 check_insn(env, ctx, INSN_VR54XX);
7599 op1 = MASK_MUL_VR54XX(ctx->opcode);
7600 gen_mul_vr54xx(ctx, op1, rd, rs, rt);
7602 gen_muldiv(ctx, op1, rs, rt);
7604 case OPC_JR ... OPC_JALR:
7605 gen_compute_branch(ctx, op1, rs, rd, sa);
7607 case OPC_TGE ... OPC_TEQ: /* Traps */
7609 gen_trap(ctx, op1, rs, rt, -1);
7611 case OPC_MFHI: /* Move from HI/LO */
7613 gen_HILO(ctx, op1, rd);
7616 case OPC_MTLO: /* Move to HI/LO */
7617 gen_HILO(ctx, op1, rs);
7619 case OPC_PMON: /* Pmon entry point, also R4010 selsl */
7620 #ifdef MIPS_STRICT_STANDARD
7621 MIPS_INVAL("PMON / selsl");
7622 generate_exception(ctx, EXCP_RI);
7624 gen_helper_0i(pmon, sa);
7628 generate_exception(ctx, EXCP_SYSCALL);
7631 generate_exception(ctx, EXCP_BREAK);
7634 #ifdef MIPS_STRICT_STANDARD
7636 generate_exception(ctx, EXCP_RI);
7638 /* Implemented as RI exception for now. */
7639 MIPS_INVAL("spim (unofficial)");
7640 generate_exception(ctx, EXCP_RI);
7648 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7649 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
7650 check_cp1_enabled(ctx);
7651 gen_movci(ctx, rd, rs, (ctx->opcode >> 18) & 0x7,
7652 (ctx->opcode >> 16) & 1);
7654 generate_exception_err(ctx, EXCP_CpU, 1);
7658 #if defined(TARGET_MIPS64)
7659 /* MIPS64 specific opcodes */
7661 case OPC_DSRL ... OPC_DSRA:
7663 case OPC_DSRL32 ... OPC_DSRA32:
7664 check_insn(env, ctx, ISA_MIPS3);
7666 gen_arith_imm(env, ctx, op1, rd, rt, sa);
7668 case OPC_DADD ... OPC_DSUBU:
7669 check_insn(env, ctx, ISA_MIPS3);
7671 gen_arith(env, ctx, op1, rd, rs, rt);
7676 check_insn(env, ctx, ISA_MIPS3);
7678 gen_shift(env, ctx, op1, rd, rs, rt);
7680 case OPC_DMULT ... OPC_DDIVU:
7681 check_insn(env, ctx, ISA_MIPS3);
7683 gen_muldiv(ctx, op1, rs, rt);
7686 default: /* Invalid */
7687 MIPS_INVAL("special");
7688 generate_exception(ctx, EXCP_RI);
7693 op1 = MASK_SPECIAL2(ctx->opcode);
7695 case OPC_MADD ... OPC_MADDU: /* Multiply and add/sub */
7696 case OPC_MSUB ... OPC_MSUBU:
7697 check_insn(env, ctx, ISA_MIPS32);
7698 gen_muldiv(ctx, op1, rs, rt);
7701 gen_arith(env, ctx, op1, rd, rs, rt);
7705 check_insn(env, ctx, ISA_MIPS32);
7706 gen_cl(ctx, op1, rd, rs);
7709 /* XXX: not clear which exception should be raised
7710 * when in debug mode...
7712 check_insn(env, ctx, ISA_MIPS32);
7713 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
7714 generate_exception(ctx, EXCP_DBp);
7716 generate_exception(ctx, EXCP_DBp);
7720 #if defined(TARGET_MIPS64)
7723 check_insn(env, ctx, ISA_MIPS64);
7725 gen_cl(ctx, op1, rd, rs);
7728 default: /* Invalid */
7729 MIPS_INVAL("special2");
7730 generate_exception(ctx, EXCP_RI);
7735 op1 = MASK_SPECIAL3(ctx->opcode);
7739 check_insn(env, ctx, ISA_MIPS32R2);
7740 gen_bitops(ctx, op1, rt, rs, sa, rd);
7743 check_insn(env, ctx, ISA_MIPS32R2);
7744 op2 = MASK_BSHFL(ctx->opcode);
7745 gen_bshfl(ctx, op2, rt, rd);
7748 check_insn(env, ctx, ISA_MIPS32R2);
7750 TCGv t0 = tcg_temp_new();
7754 save_cpu_state(ctx, 1);
7755 gen_helper_rdhwr_cpunum(t0);
7756 gen_store_gpr(t0, rt);
7759 save_cpu_state(ctx, 1);
7760 gen_helper_rdhwr_synci_step(t0);
7761 gen_store_gpr(t0, rt);
7764 save_cpu_state(ctx, 1);
7765 gen_helper_rdhwr_cc(t0);
7766 gen_store_gpr(t0, rt);
7769 save_cpu_state(ctx, 1);
7770 gen_helper_rdhwr_ccres(t0);
7771 gen_store_gpr(t0, rt);
7774 #if defined(CONFIG_USER_ONLY)
7775 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, tls_value));
7776 gen_store_gpr(t0, rt);
7779 /* XXX: Some CPUs implement this in hardware.
7780 Not supported yet. */
7782 default: /* Invalid */
7783 MIPS_INVAL("rdhwr");
7784 generate_exception(ctx, EXCP_RI);
7791 check_insn(env, ctx, ASE_MT);
7793 TCGv t0 = tcg_temp_new();
7794 TCGv t1 = tcg_temp_new();
7796 gen_load_gpr(t0, rt);
7797 gen_load_gpr(t1, rs);
7798 gen_helper_fork(t0, t1);
7804 check_insn(env, ctx, ASE_MT);
7806 TCGv t0 = tcg_temp_new();
7808 save_cpu_state(ctx, 1);
7809 gen_load_gpr(t0, rs);
7810 gen_helper_yield(t0, t0);
7811 gen_store_gpr(t0, rd);
7815 #if defined(TARGET_MIPS64)
7816 case OPC_DEXTM ... OPC_DEXT:
7817 case OPC_DINSM ... OPC_DINS:
7818 check_insn(env, ctx, ISA_MIPS64R2);
7820 gen_bitops(ctx, op1, rt, rs, sa, rd);
7823 check_insn(env, ctx, ISA_MIPS64R2);
7825 op2 = MASK_DBSHFL(ctx->opcode);
7826 gen_bshfl(ctx, op2, rt, rd);
7829 default: /* Invalid */
7830 MIPS_INVAL("special3");
7831 generate_exception(ctx, EXCP_RI);
7836 op1 = MASK_REGIMM(ctx->opcode);
7838 case OPC_BLTZ ... OPC_BGEZL: /* REGIMM branches */
7839 case OPC_BLTZAL ... OPC_BGEZALL:
7840 gen_compute_branch(ctx, op1, rs, -1, imm << 2);
7842 case OPC_TGEI ... OPC_TEQI: /* REGIMM traps */
7844 gen_trap(ctx, op1, rs, -1, imm);
7847 check_insn(env, ctx, ISA_MIPS32R2);
7850 default: /* Invalid */
7851 MIPS_INVAL("regimm");
7852 generate_exception(ctx, EXCP_RI);
7857 check_cp0_enabled(ctx);
7858 op1 = MASK_CP0(ctx->opcode);
7864 #if defined(TARGET_MIPS64)
7868 #ifndef CONFIG_USER_ONLY
7869 gen_cp0(env, ctx, op1, rt, rd);
7870 #endif /* !CONFIG_USER_ONLY */
7872 case OPC_C0_FIRST ... OPC_C0_LAST:
7873 #ifndef CONFIG_USER_ONLY
7874 gen_cp0(env, ctx, MASK_C0(ctx->opcode), rt, rd);
7875 #endif /* !CONFIG_USER_ONLY */
7878 #ifndef CONFIG_USER_ONLY
7880 TCGv t0 = tcg_temp_new();
7882 op2 = MASK_MFMC0(ctx->opcode);
7885 check_insn(env, ctx, ASE_MT);
7886 gen_helper_dmt(t0, t0);
7887 gen_store_gpr(t0, rt);
7890 check_insn(env, ctx, ASE_MT);
7891 gen_helper_emt(t0, t0);
7892 gen_store_gpr(t0, rt);
7895 check_insn(env, ctx, ASE_MT);
7896 gen_helper_dvpe(t0, t0);
7897 gen_store_gpr(t0, rt);
7900 check_insn(env, ctx, ASE_MT);
7901 gen_helper_evpe(t0, t0);
7902 gen_store_gpr(t0, rt);
7905 check_insn(env, ctx, ISA_MIPS32R2);
7907 gen_store_gpr(t0, rt);
7908 /* Stop translation as we may have switched the execution mode */
7909 ctx->bstate = BS_STOP;
7912 check_insn(env, ctx, ISA_MIPS32R2);
7914 gen_store_gpr(t0, rt);
7915 /* Stop translation as we may have switched the execution mode */
7916 ctx->bstate = BS_STOP;
7918 default: /* Invalid */
7919 MIPS_INVAL("mfmc0");
7920 generate_exception(ctx, EXCP_RI);
7925 #endif /* !CONFIG_USER_ONLY */
7928 check_insn(env, ctx, ISA_MIPS32R2);
7929 gen_load_srsgpr(rt, rd);
7932 check_insn(env, ctx, ISA_MIPS32R2);
7933 gen_store_srsgpr(rt, rd);
7937 generate_exception(ctx, EXCP_RI);
7941 case OPC_ADDI ... OPC_LUI: /* Arithmetic with immediate opcode */
7942 gen_arith_imm(env, ctx, op, rt, rs, imm);
7944 case OPC_J ... OPC_JAL: /* Jump */
7945 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
7946 gen_compute_branch(ctx, op, rs, rt, offset);
7948 case OPC_BEQ ... OPC_BGTZ: /* Branch */
7949 case OPC_BEQL ... OPC_BGTZL:
7950 gen_compute_branch(ctx, op, rs, rt, imm << 2);
7952 case OPC_LB ... OPC_LWR: /* Load and stores */
7953 case OPC_SB ... OPC_SW:
7957 gen_ldst(ctx, op, rt, rs, imm);
7960 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
7964 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7968 /* Floating point (COP1). */
7973 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
7974 check_cp1_enabled(ctx);
7975 gen_flt_ldst(ctx, op, rt, rs, imm);
7977 generate_exception_err(ctx, EXCP_CpU, 1);
7982 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
7983 check_cp1_enabled(ctx);
7984 op1 = MASK_CP1(ctx->opcode);
7988 check_insn(env, ctx, ISA_MIPS32R2);
7993 gen_cp1(ctx, op1, rt, rd);
7995 #if defined(TARGET_MIPS64)
7998 check_insn(env, ctx, ISA_MIPS3);
7999 gen_cp1(ctx, op1, rt, rd);
8005 check_insn(env, ctx, ASE_MIPS3D);
8008 gen_compute_branch1(env, ctx, MASK_BC1(ctx->opcode),
8009 (rt >> 2) & 0x7, imm << 2);
8016 gen_farith(ctx, MASK_CP1_FUNC(ctx->opcode), rt, rd, sa,
8021 generate_exception (ctx, EXCP_RI);
8025 generate_exception_err(ctx, EXCP_CpU, 1);
8035 /* COP2: Not implemented. */
8036 generate_exception_err(ctx, EXCP_CpU, 2);
8040 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8041 check_cp1_enabled(ctx);
8042 op1 = MASK_CP3(ctx->opcode);
8050 gen_flt3_ldst(ctx, op1, sa, rd, rs, rt);
8068 gen_flt3_arith(ctx, op1, sa, rs, rd, rt);
8072 generate_exception (ctx, EXCP_RI);
8076 generate_exception_err(ctx, EXCP_CpU, 1);
8080 #if defined(TARGET_MIPS64)
8081 /* MIPS64 opcodes */
8083 case OPC_LDL ... OPC_LDR:
8084 case OPC_SDL ... OPC_SDR:
8089 check_insn(env, ctx, ISA_MIPS3);
8091 gen_ldst(ctx, op, rt, rs, imm);
8093 case OPC_DADDI ... OPC_DADDIU:
8094 check_insn(env, ctx, ISA_MIPS3);
8096 gen_arith_imm(env, ctx, op, rt, rs, imm);
8100 check_insn(env, ctx, ASE_MIPS16);
8101 /* MIPS16: Not implemented. */
8103 check_insn(env, ctx, ASE_MDMX);
8104 /* MDMX: Not implemented. */
8105 default: /* Invalid */
8106 MIPS_INVAL("major opcode");
8107 generate_exception(ctx, EXCP_RI);
8110 if (ctx->hflags & MIPS_HFLAG_BMASK) {
8111 int hflags = ctx->hflags & MIPS_HFLAG_BMASK;
8112 /* Branches completion */
8113 ctx->hflags &= ~MIPS_HFLAG_BMASK;
8114 ctx->bstate = BS_BRANCH;
8115 save_cpu_state(ctx, 0);
8116 /* FIXME: Need to clear can_do_io. */
8119 /* unconditional branch */
8120 MIPS_DEBUG("unconditional branch");
8121 gen_goto_tb(ctx, 0, ctx->btarget);
8124 /* blikely taken case */
8125 MIPS_DEBUG("blikely branch taken");
8126 gen_goto_tb(ctx, 0, ctx->btarget);
8129 /* Conditional branch */
8130 MIPS_DEBUG("conditional branch");
8132 int l1 = gen_new_label();
8134 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
8135 gen_goto_tb(ctx, 1, ctx->pc + 4);
8137 gen_goto_tb(ctx, 0, ctx->btarget);
8141 /* unconditional branch to register */
8142 MIPS_DEBUG("branch to register");
8143 tcg_gen_mov_tl(cpu_PC, btarget);
8147 MIPS_DEBUG("unknown branch");
8154 gen_intermediate_code_internal (CPUState *env, TranslationBlock *tb,
8158 target_ulong pc_start;
8159 uint16_t *gen_opc_end;
8166 qemu_log("search pc %d\n", search_pc);
8169 /* Leave some spare opc slots for branch handling. */
8170 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE - 16;
8174 ctx.bstate = BS_NONE;
8175 /* Restore delay slot state from the tb context. */
8176 ctx.hflags = (uint32_t)tb->flags; /* FIXME: maybe use 64 bits here? */
8177 restore_cpu_state(env, &ctx);
8178 #ifdef CONFIG_USER_ONLY
8179 ctx.mem_idx = MIPS_HFLAG_UM;
8181 ctx.mem_idx = ctx.hflags & MIPS_HFLAG_KSU;
8184 max_insns = tb->cflags & CF_COUNT_MASK;
8186 max_insns = CF_COUNT_MASK;
8188 qemu_log_mask(CPU_LOG_TB_CPU, "------------------------------------------------\n");
8189 /* FIXME: This may print out stale hflags from env... */
8190 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
8192 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb, ctx.mem_idx, ctx.hflags);
8194 while (ctx.bstate == BS_NONE) {
8195 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
8196 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
8197 if (bp->pc == ctx.pc) {
8198 save_cpu_state(&ctx, 1);
8199 ctx.bstate = BS_BRANCH;
8200 gen_helper_0i(raise_exception, EXCP_DEBUG);
8201 /* Include the breakpoint location or the tb won't
8202 * be flushed when it must be. */
8204 goto done_generating;
8210 j = gen_opc_ptr - gen_opc_buf;
8214 gen_opc_instr_start[lj++] = 0;
8216 gen_opc_pc[lj] = ctx.pc;
8217 gen_opc_hflags[lj] = ctx.hflags & MIPS_HFLAG_BMASK;
8218 gen_opc_instr_start[lj] = 1;
8219 gen_opc_icount[lj] = num_insns;
8221 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8223 ctx.opcode = ldl_code(ctx.pc);
8224 decode_opc(env, &ctx);
8228 if (env->singlestep_enabled)
8231 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
8234 if (gen_opc_ptr >= gen_opc_end)
8237 if (num_insns >= max_insns)
8243 if (tb->cflags & CF_LAST_IO)
8245 if (env->singlestep_enabled) {
8246 save_cpu_state(&ctx, ctx.bstate == BS_NONE);
8247 gen_helper_0i(raise_exception, EXCP_DEBUG);
8249 switch (ctx.bstate) {
8251 gen_helper_interrupt_restart();
8252 gen_goto_tb(&ctx, 0, ctx.pc);
8255 save_cpu_state(&ctx, 0);
8256 gen_goto_tb(&ctx, 0, ctx.pc);
8259 gen_helper_interrupt_restart();
8268 gen_icount_end(tb, num_insns);
8269 *gen_opc_ptr = INDEX_op_end;
8271 j = gen_opc_ptr - gen_opc_buf;
8274 gen_opc_instr_start[lj++] = 0;
8276 tb->size = ctx.pc - pc_start;
8277 tb->icount = num_insns;
8281 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
8282 qemu_log("IN: %s\n", lookup_symbol(pc_start));
8283 log_target_disas(pc_start, ctx.pc - pc_start, 0);
8286 qemu_log_mask(CPU_LOG_TB_CPU, "---------------- %d %08x\n", ctx.bstate, ctx.hflags);
8290 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
8292 gen_intermediate_code_internal(env, tb, 0);
8295 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
8297 gen_intermediate_code_internal(env, tb, 1);
8300 static void fpu_dump_state(CPUState *env, FILE *f,
8301 int (*fpu_fprintf)(FILE *f, const char *fmt, ...),
8305 int is_fpu64 = !!(env->hflags & MIPS_HFLAG_F64);
8307 #define printfpr(fp) \
8310 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu: %13g\n", \
8311 (fp)->w[FP_ENDIAN_IDX], (fp)->d, (fp)->fd, \
8312 (fp)->fs[FP_ENDIAN_IDX], (fp)->fs[!FP_ENDIAN_IDX]); \
8315 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
8316 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
8317 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu:%13g\n", \
8318 tmp.w[FP_ENDIAN_IDX], tmp.d, tmp.fd, \
8319 tmp.fs[FP_ENDIAN_IDX], tmp.fs[!FP_ENDIAN_IDX]); \
8324 fpu_fprintf(f, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
8325 env->active_fpu.fcr0, env->active_fpu.fcr31, is_fpu64, env->active_fpu.fp_status,
8326 get_float_exception_flags(&env->active_fpu.fp_status));
8327 for (i = 0; i < 32; (is_fpu64) ? i++ : (i += 2)) {
8328 fpu_fprintf(f, "%3s: ", fregnames[i]);
8329 printfpr(&env->active_fpu.fpr[i]);
8335 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8336 /* Debug help: The architecture requires 32bit code to maintain proper
8337 sign-extended values on 64bit machines. */
8339 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
8342 cpu_mips_check_sign_extensions (CPUState *env, FILE *f,
8343 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8348 if (!SIGN_EXT_P(env->active_tc.PC))
8349 cpu_fprintf(f, "BROKEN: pc=0x" TARGET_FMT_lx "\n", env->active_tc.PC);
8350 if (!SIGN_EXT_P(env->active_tc.HI[0]))
8351 cpu_fprintf(f, "BROKEN: HI=0x" TARGET_FMT_lx "\n", env->active_tc.HI[0]);
8352 if (!SIGN_EXT_P(env->active_tc.LO[0]))
8353 cpu_fprintf(f, "BROKEN: LO=0x" TARGET_FMT_lx "\n", env->active_tc.LO[0]);
8354 if (!SIGN_EXT_P(env->btarget))
8355 cpu_fprintf(f, "BROKEN: btarget=0x" TARGET_FMT_lx "\n", env->btarget);
8357 for (i = 0; i < 32; i++) {
8358 if (!SIGN_EXT_P(env->active_tc.gpr[i]))
8359 cpu_fprintf(f, "BROKEN: %s=0x" TARGET_FMT_lx "\n", regnames[i], env->active_tc.gpr[i]);
8362 if (!SIGN_EXT_P(env->CP0_EPC))
8363 cpu_fprintf(f, "BROKEN: EPC=0x" TARGET_FMT_lx "\n", env->CP0_EPC);
8364 if (!SIGN_EXT_P(env->CP0_LLAddr))
8365 cpu_fprintf(f, "BROKEN: LLAddr=0x" TARGET_FMT_lx "\n", env->CP0_LLAddr);
8369 void cpu_dump_state (CPUState *env, FILE *f,
8370 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8375 cpu_fprintf(f, "pc=0x" TARGET_FMT_lx " HI=0x" TARGET_FMT_lx " LO=0x" TARGET_FMT_lx " ds %04x " TARGET_FMT_lx " %d\n",
8376 env->active_tc.PC, env->active_tc.HI[0], env->active_tc.LO[0],
8377 env->hflags, env->btarget, env->bcond);
8378 for (i = 0; i < 32; i++) {
8380 cpu_fprintf(f, "GPR%02d:", i);
8381 cpu_fprintf(f, " %s " TARGET_FMT_lx, regnames[i], env->active_tc.gpr[i]);
8383 cpu_fprintf(f, "\n");
8386 cpu_fprintf(f, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx "\n",
8387 env->CP0_Status, env->CP0_Cause, env->CP0_EPC);
8388 cpu_fprintf(f, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx "\n",
8389 env->CP0_Config0, env->CP0_Config1, env->CP0_LLAddr);
8390 if (env->hflags & MIPS_HFLAG_FPU)
8391 fpu_dump_state(env, f, cpu_fprintf, flags);
8392 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8393 cpu_mips_check_sign_extensions(env, f, cpu_fprintf, flags);
8397 static void mips_tcg_init(void)
8402 /* Initialize various static tables. */
8406 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8407 for (i = 0; i < 32; i++)
8408 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
8409 offsetof(CPUState, active_tc.gpr[i]),
8411 cpu_PC = tcg_global_mem_new(TCG_AREG0,
8412 offsetof(CPUState, active_tc.PC), "PC");
8413 for (i = 0; i < MIPS_DSP_ACC; i++) {
8414 cpu_HI[i] = tcg_global_mem_new(TCG_AREG0,
8415 offsetof(CPUState, active_tc.HI[i]),
8417 cpu_LO[i] = tcg_global_mem_new(TCG_AREG0,
8418 offsetof(CPUState, active_tc.LO[i]),
8420 cpu_ACX[i] = tcg_global_mem_new(TCG_AREG0,
8421 offsetof(CPUState, active_tc.ACX[i]),
8424 cpu_dspctrl = tcg_global_mem_new(TCG_AREG0,
8425 offsetof(CPUState, active_tc.DSPControl),
8427 bcond = tcg_global_mem_new(TCG_AREG0,
8428 offsetof(CPUState, bcond), "bcond");
8429 btarget = tcg_global_mem_new(TCG_AREG0,
8430 offsetof(CPUState, btarget), "btarget");
8431 hflags = tcg_global_mem_new_i32(TCG_AREG0,
8432 offsetof(CPUState, hflags), "hflags");
8434 fpu_fcr0 = tcg_global_mem_new_i32(TCG_AREG0,
8435 offsetof(CPUState, active_fpu.fcr0),
8437 fpu_fcr31 = tcg_global_mem_new_i32(TCG_AREG0,
8438 offsetof(CPUState, active_fpu.fcr31),
8441 /* register helpers */
8442 #define GEN_HELPER 2
8448 #include "translate_init.c"
8450 CPUMIPSState *cpu_mips_init (const char *cpu_model)
8453 const mips_def_t *def;
8455 def = cpu_mips_find_by_name(cpu_model);
8458 env = qemu_mallocz(sizeof(CPUMIPSState));
8459 env->cpu_model = def;
8462 env->cpu_model_str = cpu_model;
8468 void cpu_reset (CPUMIPSState *env)
8470 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
8471 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
8472 log_cpu_state(env, 0);
8475 memset(env, 0, offsetof(CPUMIPSState, breakpoints));
8480 #if defined(CONFIG_USER_ONLY)
8481 env->hflags = MIPS_HFLAG_UM;
8483 if (env->hflags & MIPS_HFLAG_BMASK) {
8484 /* If the exception was raised from a delay slot,
8485 come back to the jump. */
8486 env->CP0_ErrorEPC = env->active_tc.PC - 4;
8488 env->CP0_ErrorEPC = env->active_tc.PC;
8490 env->active_tc.PC = (int32_t)0xBFC00000;
8492 /* SMP not implemented */
8493 env->CP0_EBase = 0x80000000;
8494 env->CP0_Status = (1 << CP0St_BEV) | (1 << CP0St_ERL);
8495 /* vectored interrupts not implemented, timer on int 7,
8496 no performance counters. */
8497 env->CP0_IntCtl = 0xe0000000;
8501 for (i = 0; i < 7; i++) {
8502 env->CP0_WatchLo[i] = 0;
8503 env->CP0_WatchHi[i] = 0x80000000;
8505 env->CP0_WatchLo[7] = 0;
8506 env->CP0_WatchHi[7] = 0;
8508 /* Count register increments in debug mode, EJTAG version 1 */
8509 env->CP0_Debug = (1 << CP0DB_CNT) | (0x1 << CP0DB_VER);
8510 env->hflags = MIPS_HFLAG_CP0;
8512 env->exception_index = EXCP_NONE;
8513 cpu_mips_register(env, env->cpu_model);
8516 void gen_pc_load(CPUState *env, TranslationBlock *tb,
8517 unsigned long searched_pc, int pc_pos, void *puc)
8519 env->active_tc.PC = gen_opc_pc[pc_pos];
8520 env->hflags &= ~MIPS_HFLAG_BMASK;
8521 env->hflags |= gen_opc_hflags[pc_pos];