2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
33 #include "qemu-common.h"
39 //#define MIPS_DEBUG_DISAS
40 //#define MIPS_DEBUG_SIGN_EXTENSIONS
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL = (0x00 << 26),
48 OPC_REGIMM = (0x01 << 26),
49 OPC_CP0 = (0x10 << 26),
50 OPC_CP1 = (0x11 << 26),
51 OPC_CP2 = (0x12 << 26),
52 OPC_CP3 = (0x13 << 26),
53 OPC_SPECIAL2 = (0x1C << 26),
54 OPC_SPECIAL3 = (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI = (0x08 << 26),
57 OPC_ADDIU = (0x09 << 26),
58 OPC_SLTI = (0x0A << 26),
59 OPC_SLTIU = (0x0B << 26),
60 /* logic with immediate */
61 OPC_ANDI = (0x0C << 26),
62 OPC_ORI = (0x0D << 26),
63 OPC_XORI = (0x0E << 26),
64 OPC_LUI = (0x0F << 26),
65 /* arithmetic with immediate */
66 OPC_DADDI = (0x18 << 26),
67 OPC_DADDIU = (0x19 << 26),
68 /* Jump and branches */
70 OPC_JAL = (0x03 << 26),
71 OPC_BEQ = (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
72 OPC_BEQL = (0x14 << 26),
73 OPC_BNE = (0x05 << 26),
74 OPC_BNEL = (0x15 << 26),
75 OPC_BLEZ = (0x06 << 26),
76 OPC_BLEZL = (0x16 << 26),
77 OPC_BGTZ = (0x07 << 26),
78 OPC_BGTZL = (0x17 << 26),
79 OPC_JALX = (0x1D << 26), /* MIPS 16 only */
81 OPC_LDL = (0x1A << 26),
82 OPC_LDR = (0x1B << 26),
83 OPC_LB = (0x20 << 26),
84 OPC_LH = (0x21 << 26),
85 OPC_LWL = (0x22 << 26),
86 OPC_LW = (0x23 << 26),
87 OPC_LBU = (0x24 << 26),
88 OPC_LHU = (0x25 << 26),
89 OPC_LWR = (0x26 << 26),
90 OPC_LWU = (0x27 << 26),
91 OPC_SB = (0x28 << 26),
92 OPC_SH = (0x29 << 26),
93 OPC_SWL = (0x2A << 26),
94 OPC_SW = (0x2B << 26),
95 OPC_SDL = (0x2C << 26),
96 OPC_SDR = (0x2D << 26),
97 OPC_SWR = (0x2E << 26),
98 OPC_LL = (0x30 << 26),
99 OPC_LLD = (0x34 << 26),
100 OPC_LD = (0x37 << 26),
101 OPC_SC = (0x38 << 26),
102 OPC_SCD = (0x3C << 26),
103 OPC_SD = (0x3F << 26),
104 /* Floating point load/store */
105 OPC_LWC1 = (0x31 << 26),
106 OPC_LWC2 = (0x32 << 26),
107 OPC_LDC1 = (0x35 << 26),
108 OPC_LDC2 = (0x36 << 26),
109 OPC_SWC1 = (0x39 << 26),
110 OPC_SWC2 = (0x3A << 26),
111 OPC_SDC1 = (0x3D << 26),
112 OPC_SDC2 = (0x3E << 26),
113 /* MDMX ASE specific */
114 OPC_MDMX = (0x1E << 26),
115 /* Cache and prefetch */
116 OPC_CACHE = (0x2F << 26),
117 OPC_PREF = (0x33 << 26),
118 /* Reserved major opcode */
119 OPC_MAJOR3B_RESERVED = (0x3B << 26),
122 /* MIPS special opcodes */
123 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
127 OPC_SLL = 0x00 | OPC_SPECIAL,
128 /* NOP is SLL r0, r0, 0 */
129 /* SSNOP is SLL r0, r0, 1 */
130 /* EHB is SLL r0, r0, 3 */
131 OPC_SRL = 0x02 | OPC_SPECIAL, /* also ROTR */
132 OPC_SRA = 0x03 | OPC_SPECIAL,
133 OPC_SLLV = 0x04 | OPC_SPECIAL,
134 OPC_SRLV = 0x06 | OPC_SPECIAL, /* also ROTRV */
135 OPC_SRAV = 0x07 | OPC_SPECIAL,
136 OPC_DSLLV = 0x14 | OPC_SPECIAL,
137 OPC_DSRLV = 0x16 | OPC_SPECIAL, /* also DROTRV */
138 OPC_DSRAV = 0x17 | OPC_SPECIAL,
139 OPC_DSLL = 0x38 | OPC_SPECIAL,
140 OPC_DSRL = 0x3A | OPC_SPECIAL, /* also DROTR */
141 OPC_DSRA = 0x3B | OPC_SPECIAL,
142 OPC_DSLL32 = 0x3C | OPC_SPECIAL,
143 OPC_DSRL32 = 0x3E | OPC_SPECIAL, /* also DROTR32 */
144 OPC_DSRA32 = 0x3F | OPC_SPECIAL,
145 /* Multiplication / division */
146 OPC_MULT = 0x18 | OPC_SPECIAL,
147 OPC_MULTU = 0x19 | OPC_SPECIAL,
148 OPC_DIV = 0x1A | OPC_SPECIAL,
149 OPC_DIVU = 0x1B | OPC_SPECIAL,
150 OPC_DMULT = 0x1C | OPC_SPECIAL,
151 OPC_DMULTU = 0x1D | OPC_SPECIAL,
152 OPC_DDIV = 0x1E | OPC_SPECIAL,
153 OPC_DDIVU = 0x1F | OPC_SPECIAL,
154 /* 2 registers arithmetic / logic */
155 OPC_ADD = 0x20 | OPC_SPECIAL,
156 OPC_ADDU = 0x21 | OPC_SPECIAL,
157 OPC_SUB = 0x22 | OPC_SPECIAL,
158 OPC_SUBU = 0x23 | OPC_SPECIAL,
159 OPC_AND = 0x24 | OPC_SPECIAL,
160 OPC_OR = 0x25 | OPC_SPECIAL,
161 OPC_XOR = 0x26 | OPC_SPECIAL,
162 OPC_NOR = 0x27 | OPC_SPECIAL,
163 OPC_SLT = 0x2A | OPC_SPECIAL,
164 OPC_SLTU = 0x2B | OPC_SPECIAL,
165 OPC_DADD = 0x2C | OPC_SPECIAL,
166 OPC_DADDU = 0x2D | OPC_SPECIAL,
167 OPC_DSUB = 0x2E | OPC_SPECIAL,
168 OPC_DSUBU = 0x2F | OPC_SPECIAL,
170 OPC_JR = 0x08 | OPC_SPECIAL, /* Also JR.HB */
171 OPC_JALR = 0x09 | OPC_SPECIAL, /* Also JALR.HB */
173 OPC_TGE = 0x30 | OPC_SPECIAL,
174 OPC_TGEU = 0x31 | OPC_SPECIAL,
175 OPC_TLT = 0x32 | OPC_SPECIAL,
176 OPC_TLTU = 0x33 | OPC_SPECIAL,
177 OPC_TEQ = 0x34 | OPC_SPECIAL,
178 OPC_TNE = 0x36 | OPC_SPECIAL,
179 /* HI / LO registers load & stores */
180 OPC_MFHI = 0x10 | OPC_SPECIAL,
181 OPC_MTHI = 0x11 | OPC_SPECIAL,
182 OPC_MFLO = 0x12 | OPC_SPECIAL,
183 OPC_MTLO = 0x13 | OPC_SPECIAL,
184 /* Conditional moves */
185 OPC_MOVZ = 0x0A | OPC_SPECIAL,
186 OPC_MOVN = 0x0B | OPC_SPECIAL,
188 OPC_MOVCI = 0x01 | OPC_SPECIAL,
191 OPC_PMON = 0x05 | OPC_SPECIAL, /* inofficial */
192 OPC_SYSCALL = 0x0C | OPC_SPECIAL,
193 OPC_BREAK = 0x0D | OPC_SPECIAL,
194 OPC_SPIM = 0x0E | OPC_SPECIAL, /* inofficial */
195 OPC_SYNC = 0x0F | OPC_SPECIAL,
197 OPC_SPECIAL15_RESERVED = 0x15 | OPC_SPECIAL,
198 OPC_SPECIAL28_RESERVED = 0x28 | OPC_SPECIAL,
199 OPC_SPECIAL29_RESERVED = 0x29 | OPC_SPECIAL,
200 OPC_SPECIAL35_RESERVED = 0x35 | OPC_SPECIAL,
201 OPC_SPECIAL37_RESERVED = 0x37 | OPC_SPECIAL,
202 OPC_SPECIAL39_RESERVED = 0x39 | OPC_SPECIAL,
203 OPC_SPECIAL3D_RESERVED = 0x3D | OPC_SPECIAL,
206 /* Multiplication variants of the vr54xx. */
207 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
210 OPC_VR54XX_MULS = (0x03 << 6) | OPC_MULT,
211 OPC_VR54XX_MULSU = (0x03 << 6) | OPC_MULTU,
212 OPC_VR54XX_MACC = (0x05 << 6) | OPC_MULT,
213 OPC_VR54XX_MACCU = (0x05 << 6) | OPC_MULTU,
214 OPC_VR54XX_MSAC = (0x07 << 6) | OPC_MULT,
215 OPC_VR54XX_MSACU = (0x07 << 6) | OPC_MULTU,
216 OPC_VR54XX_MULHI = (0x09 << 6) | OPC_MULT,
217 OPC_VR54XX_MULHIU = (0x09 << 6) | OPC_MULTU,
218 OPC_VR54XX_MULSHI = (0x0B << 6) | OPC_MULT,
219 OPC_VR54XX_MULSHIU = (0x0B << 6) | OPC_MULTU,
220 OPC_VR54XX_MACCHI = (0x0D << 6) | OPC_MULT,
221 OPC_VR54XX_MACCHIU = (0x0D << 6) | OPC_MULTU,
222 OPC_VR54XX_MSACHI = (0x0F << 6) | OPC_MULT,
223 OPC_VR54XX_MSACHIU = (0x0F << 6) | OPC_MULTU,
226 /* REGIMM (rt field) opcodes */
227 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
230 OPC_BLTZ = (0x00 << 16) | OPC_REGIMM,
231 OPC_BLTZL = (0x02 << 16) | OPC_REGIMM,
232 OPC_BGEZ = (0x01 << 16) | OPC_REGIMM,
233 OPC_BGEZL = (0x03 << 16) | OPC_REGIMM,
234 OPC_BLTZAL = (0x10 << 16) | OPC_REGIMM,
235 OPC_BLTZALL = (0x12 << 16) | OPC_REGIMM,
236 OPC_BGEZAL = (0x11 << 16) | OPC_REGIMM,
237 OPC_BGEZALL = (0x13 << 16) | OPC_REGIMM,
238 OPC_TGEI = (0x08 << 16) | OPC_REGIMM,
239 OPC_TGEIU = (0x09 << 16) | OPC_REGIMM,
240 OPC_TLTI = (0x0A << 16) | OPC_REGIMM,
241 OPC_TLTIU = (0x0B << 16) | OPC_REGIMM,
242 OPC_TEQI = (0x0C << 16) | OPC_REGIMM,
243 OPC_TNEI = (0x0E << 16) | OPC_REGIMM,
244 OPC_SYNCI = (0x1F << 16) | OPC_REGIMM,
247 /* Special2 opcodes */
248 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
251 /* Multiply & xxx operations */
252 OPC_MADD = 0x00 | OPC_SPECIAL2,
253 OPC_MADDU = 0x01 | OPC_SPECIAL2,
254 OPC_MUL = 0x02 | OPC_SPECIAL2,
255 OPC_MSUB = 0x04 | OPC_SPECIAL2,
256 OPC_MSUBU = 0x05 | OPC_SPECIAL2,
258 OPC_CLZ = 0x20 | OPC_SPECIAL2,
259 OPC_CLO = 0x21 | OPC_SPECIAL2,
260 OPC_DCLZ = 0x24 | OPC_SPECIAL2,
261 OPC_DCLO = 0x25 | OPC_SPECIAL2,
263 OPC_SDBBP = 0x3F | OPC_SPECIAL2,
266 /* Special3 opcodes */
267 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
270 OPC_EXT = 0x00 | OPC_SPECIAL3,
271 OPC_DEXTM = 0x01 | OPC_SPECIAL3,
272 OPC_DEXTU = 0x02 | OPC_SPECIAL3,
273 OPC_DEXT = 0x03 | OPC_SPECIAL3,
274 OPC_INS = 0x04 | OPC_SPECIAL3,
275 OPC_DINSM = 0x05 | OPC_SPECIAL3,
276 OPC_DINSU = 0x06 | OPC_SPECIAL3,
277 OPC_DINS = 0x07 | OPC_SPECIAL3,
278 OPC_FORK = 0x08 | OPC_SPECIAL3,
279 OPC_YIELD = 0x09 | OPC_SPECIAL3,
280 OPC_BSHFL = 0x20 | OPC_SPECIAL3,
281 OPC_DBSHFL = 0x24 | OPC_SPECIAL3,
282 OPC_RDHWR = 0x3B | OPC_SPECIAL3,
286 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
289 OPC_WSBH = (0x02 << 6) | OPC_BSHFL,
290 OPC_SEB = (0x10 << 6) | OPC_BSHFL,
291 OPC_SEH = (0x18 << 6) | OPC_BSHFL,
295 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
298 OPC_DSBH = (0x02 << 6) | OPC_DBSHFL,
299 OPC_DSHD = (0x05 << 6) | OPC_DBSHFL,
302 /* Coprocessor 0 (rs field) */
303 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
306 OPC_MFC0 = (0x00 << 21) | OPC_CP0,
307 OPC_DMFC0 = (0x01 << 21) | OPC_CP0,
308 OPC_MTC0 = (0x04 << 21) | OPC_CP0,
309 OPC_DMTC0 = (0x05 << 21) | OPC_CP0,
310 OPC_MFTR = (0x08 << 21) | OPC_CP0,
311 OPC_RDPGPR = (0x0A << 21) | OPC_CP0,
312 OPC_MFMC0 = (0x0B << 21) | OPC_CP0,
313 OPC_MTTR = (0x0C << 21) | OPC_CP0,
314 OPC_WRPGPR = (0x0E << 21) | OPC_CP0,
315 OPC_C0 = (0x10 << 21) | OPC_CP0,
316 OPC_C0_FIRST = (0x10 << 21) | OPC_CP0,
317 OPC_C0_LAST = (0x1F << 21) | OPC_CP0,
321 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
324 OPC_DMT = 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
325 OPC_EMT = 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
326 OPC_DVPE = 0x01 | (0 << 5) | OPC_MFMC0,
327 OPC_EVPE = 0x01 | (1 << 5) | OPC_MFMC0,
328 OPC_DI = (0 << 5) | (0x0C << 11) | OPC_MFMC0,
329 OPC_EI = (1 << 5) | (0x0C << 11) | OPC_MFMC0,
332 /* Coprocessor 0 (with rs == C0) */
333 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
336 OPC_TLBR = 0x01 | OPC_C0,
337 OPC_TLBWI = 0x02 | OPC_C0,
338 OPC_TLBWR = 0x06 | OPC_C0,
339 OPC_TLBP = 0x08 | OPC_C0,
340 OPC_RFE = 0x10 | OPC_C0,
341 OPC_ERET = 0x18 | OPC_C0,
342 OPC_DERET = 0x1F | OPC_C0,
343 OPC_WAIT = 0x20 | OPC_C0,
346 /* Coprocessor 1 (rs field) */
347 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
350 OPC_MFC1 = (0x00 << 21) | OPC_CP1,
351 OPC_DMFC1 = (0x01 << 21) | OPC_CP1,
352 OPC_CFC1 = (0x02 << 21) | OPC_CP1,
353 OPC_MFHC1 = (0x03 << 21) | OPC_CP1,
354 OPC_MTC1 = (0x04 << 21) | OPC_CP1,
355 OPC_DMTC1 = (0x05 << 21) | OPC_CP1,
356 OPC_CTC1 = (0x06 << 21) | OPC_CP1,
357 OPC_MTHC1 = (0x07 << 21) | OPC_CP1,
358 OPC_BC1 = (0x08 << 21) | OPC_CP1, /* bc */
359 OPC_BC1ANY2 = (0x09 << 21) | OPC_CP1,
360 OPC_BC1ANY4 = (0x0A << 21) | OPC_CP1,
361 OPC_S_FMT = (0x10 << 21) | OPC_CP1, /* 16: fmt=single fp */
362 OPC_D_FMT = (0x11 << 21) | OPC_CP1, /* 17: fmt=double fp */
363 OPC_E_FMT = (0x12 << 21) | OPC_CP1, /* 18: fmt=extended fp */
364 OPC_Q_FMT = (0x13 << 21) | OPC_CP1, /* 19: fmt=quad fp */
365 OPC_W_FMT = (0x14 << 21) | OPC_CP1, /* 20: fmt=32bit fixed */
366 OPC_L_FMT = (0x15 << 21) | OPC_CP1, /* 21: fmt=64bit fixed */
367 OPC_PS_FMT = (0x16 << 21) | OPC_CP1, /* 22: fmt=paired single fp */
370 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
371 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
374 OPC_BC1F = (0x00 << 16) | OPC_BC1,
375 OPC_BC1T = (0x01 << 16) | OPC_BC1,
376 OPC_BC1FL = (0x02 << 16) | OPC_BC1,
377 OPC_BC1TL = (0x03 << 16) | OPC_BC1,
381 OPC_BC1FANY2 = (0x00 << 16) | OPC_BC1ANY2,
382 OPC_BC1TANY2 = (0x01 << 16) | OPC_BC1ANY2,
386 OPC_BC1FANY4 = (0x00 << 16) | OPC_BC1ANY4,
387 OPC_BC1TANY4 = (0x01 << 16) | OPC_BC1ANY4,
390 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
393 OPC_MFC2 = (0x00 << 21) | OPC_CP2,
394 OPC_DMFC2 = (0x01 << 21) | OPC_CP2,
395 OPC_CFC2 = (0x02 << 21) | OPC_CP2,
396 OPC_MFHC2 = (0x03 << 21) | OPC_CP2,
397 OPC_MTC2 = (0x04 << 21) | OPC_CP2,
398 OPC_DMTC2 = (0x05 << 21) | OPC_CP2,
399 OPC_CTC2 = (0x06 << 21) | OPC_CP2,
400 OPC_MTHC2 = (0x07 << 21) | OPC_CP2,
401 OPC_BC2 = (0x08 << 21) | OPC_CP2,
404 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
407 OPC_LWXC1 = 0x00 | OPC_CP3,
408 OPC_LDXC1 = 0x01 | OPC_CP3,
409 OPC_LUXC1 = 0x05 | OPC_CP3,
410 OPC_SWXC1 = 0x08 | OPC_CP3,
411 OPC_SDXC1 = 0x09 | OPC_CP3,
412 OPC_SUXC1 = 0x0D | OPC_CP3,
413 OPC_PREFX = 0x0F | OPC_CP3,
414 OPC_ALNV_PS = 0x1E | OPC_CP3,
415 OPC_MADD_S = 0x20 | OPC_CP3,
416 OPC_MADD_D = 0x21 | OPC_CP3,
417 OPC_MADD_PS = 0x26 | OPC_CP3,
418 OPC_MSUB_S = 0x28 | OPC_CP3,
419 OPC_MSUB_D = 0x29 | OPC_CP3,
420 OPC_MSUB_PS = 0x2E | OPC_CP3,
421 OPC_NMADD_S = 0x30 | OPC_CP3,
422 OPC_NMADD_D = 0x31 | OPC_CP3,
423 OPC_NMADD_PS= 0x36 | OPC_CP3,
424 OPC_NMSUB_S = 0x38 | OPC_CP3,
425 OPC_NMSUB_D = 0x39 | OPC_CP3,
426 OPC_NMSUB_PS= 0x3E | OPC_CP3,
429 /* global register indices */
430 static TCGv_ptr cpu_env;
431 static TCGv cpu_gpr[32], cpu_PC;
432 static TCGv cpu_HI[MIPS_DSP_ACC], cpu_LO[MIPS_DSP_ACC], cpu_ACX[MIPS_DSP_ACC];
433 static TCGv cpu_dspctrl, btarget, bcond;
434 static TCGv_i32 hflags;
435 static TCGv_i32 fpu_fcr0, fpu_fcr31;
437 #include "gen-icount.h"
439 #define gen_helper_0i(name, arg) do { \
440 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
441 gen_helper_##name(helper_tmp); \
442 tcg_temp_free_i32(helper_tmp); \
445 #define gen_helper_1i(name, arg1, arg2) do { \
446 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
447 gen_helper_##name(arg1, helper_tmp); \
448 tcg_temp_free_i32(helper_tmp); \
451 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
452 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
453 gen_helper_##name(arg1, arg2, helper_tmp); \
454 tcg_temp_free_i32(helper_tmp); \
457 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
458 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
459 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
460 tcg_temp_free_i32(helper_tmp); \
463 typedef struct DisasContext {
464 struct TranslationBlock *tb;
465 target_ulong pc, saved_pc;
467 /* Routine used to access memory */
469 uint32_t hflags, saved_hflags;
471 target_ulong btarget;
475 BS_NONE = 0, /* We go out of the TB without reaching a branch or an
476 * exception condition */
477 BS_STOP = 1, /* We want to stop translation for any reason */
478 BS_BRANCH = 2, /* We reached a branch condition */
479 BS_EXCP = 3, /* We reached an exception condition */
482 static const char *regnames[] =
483 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
484 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
485 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
486 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
488 static const char *regnames_HI[] =
489 { "HI0", "HI1", "HI2", "HI3", };
491 static const char *regnames_LO[] =
492 { "LO0", "LO1", "LO2", "LO3", };
494 static const char *regnames_ACX[] =
495 { "ACX0", "ACX1", "ACX2", "ACX3", };
497 static const char *fregnames[] =
498 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
499 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
500 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
501 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
503 #ifdef MIPS_DEBUG_DISAS
504 #define MIPS_DEBUG(fmt, args...) \
505 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
506 TARGET_FMT_lx ": %08x " fmt "\n", \
507 ctx->pc, ctx->opcode , ##args)
508 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
510 #define MIPS_DEBUG(fmt, args...) do { } while(0)
511 #define LOG_DISAS(...) do { } while (0)
514 #define MIPS_INVAL(op) \
516 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
517 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
520 /* General purpose registers moves. */
521 static inline void gen_load_gpr (TCGv t, int reg)
524 tcg_gen_movi_tl(t, 0);
526 tcg_gen_mov_tl(t, cpu_gpr[reg]);
529 static inline void gen_store_gpr (TCGv t, int reg)
532 tcg_gen_mov_tl(cpu_gpr[reg], t);
535 /* Moves to/from ACX register. */
536 static inline void gen_load_ACX (TCGv t, int reg)
538 tcg_gen_mov_tl(t, cpu_ACX[reg]);
541 static inline void gen_store_ACX (TCGv t, int reg)
543 tcg_gen_mov_tl(cpu_ACX[reg], t);
546 /* Moves to/from shadow registers. */
547 static inline void gen_load_srsgpr (int from, int to)
549 TCGv t0 = tcg_temp_new();
552 tcg_gen_movi_tl(t0, 0);
554 TCGv_i32 t2 = tcg_temp_new_i32();
555 TCGv_ptr addr = tcg_temp_new_ptr();
557 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
558 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
559 tcg_gen_andi_i32(t2, t2, 0xf);
560 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
561 tcg_gen_ext_i32_ptr(addr, t2);
562 tcg_gen_add_ptr(addr, cpu_env, addr);
564 tcg_gen_ld_tl(t0, addr, sizeof(target_ulong) * from);
565 tcg_temp_free_ptr(addr);
566 tcg_temp_free_i32(t2);
568 gen_store_gpr(t0, to);
572 static inline void gen_store_srsgpr (int from, int to)
575 TCGv t0 = tcg_temp_new();
576 TCGv_i32 t2 = tcg_temp_new_i32();
577 TCGv_ptr addr = tcg_temp_new_ptr();
579 gen_load_gpr(t0, from);
580 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
581 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
582 tcg_gen_andi_i32(t2, t2, 0xf);
583 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
584 tcg_gen_ext_i32_ptr(addr, t2);
585 tcg_gen_add_ptr(addr, cpu_env, addr);
587 tcg_gen_st_tl(t0, addr, sizeof(target_ulong) * to);
588 tcg_temp_free_ptr(addr);
589 tcg_temp_free_i32(t2);
594 /* Floating point register moves. */
595 static inline void gen_load_fpr32 (TCGv_i32 t, int reg)
597 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
600 static inline void gen_store_fpr32 (TCGv_i32 t, int reg)
602 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
605 static inline void gen_load_fpr32h (TCGv_i32 t, int reg)
607 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
610 static inline void gen_store_fpr32h (TCGv_i32 t, int reg)
612 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
615 static inline void gen_load_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
617 if (ctx->hflags & MIPS_HFLAG_F64) {
618 tcg_gen_ld_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
620 TCGv_i32 t0 = tcg_temp_new_i32();
621 TCGv_i32 t1 = tcg_temp_new_i32();
622 gen_load_fpr32(t0, reg & ~1);
623 gen_load_fpr32(t1, reg | 1);
624 tcg_gen_concat_i32_i64(t, t0, t1);
625 tcg_temp_free_i32(t0);
626 tcg_temp_free_i32(t1);
630 static inline void gen_store_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
632 if (ctx->hflags & MIPS_HFLAG_F64) {
633 tcg_gen_st_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
635 TCGv_i64 t0 = tcg_temp_new_i64();
636 TCGv_i32 t1 = tcg_temp_new_i32();
637 tcg_gen_trunc_i64_i32(t1, t);
638 gen_store_fpr32(t1, reg & ~1);
639 tcg_gen_shri_i64(t0, t, 32);
640 tcg_gen_trunc_i64_i32(t1, t0);
641 gen_store_fpr32(t1, reg | 1);
642 tcg_temp_free_i32(t1);
643 tcg_temp_free_i64(t0);
647 static inline int get_fp_bit (int cc)
655 #define FOP_CONDS(type, fmt, bits) \
656 static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
657 TCGv_i##bits b, int cc) \
660 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
661 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
662 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
663 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
664 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
665 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
666 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
667 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
668 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
669 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
670 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
671 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
672 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
673 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
674 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
675 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
681 FOP_CONDS(abs, d, 64)
683 FOP_CONDS(abs, s, 32)
685 FOP_CONDS(abs, ps, 64)
689 #define OP_COND(name, cond) \
690 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, TCGv t1) \
692 int l1 = gen_new_label(); \
693 int l2 = gen_new_label(); \
695 tcg_gen_brcond_tl(cond, t0, t1, l1); \
696 tcg_gen_movi_tl(ret, 0); \
699 tcg_gen_movi_tl(ret, 1); \
702 OP_COND(eq, TCG_COND_EQ);
703 OP_COND(ne, TCG_COND_NE);
704 OP_COND(ge, TCG_COND_GE);
705 OP_COND(geu, TCG_COND_GEU);
706 OP_COND(lt, TCG_COND_LT);
707 OP_COND(ltu, TCG_COND_LTU);
710 #define OP_CONDI(name, cond) \
711 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, target_ulong val) \
713 int l1 = gen_new_label(); \
714 int l2 = gen_new_label(); \
716 tcg_gen_brcondi_tl(cond, t0, val, l1); \
717 tcg_gen_movi_tl(ret, 0); \
720 tcg_gen_movi_tl(ret, 1); \
723 OP_CONDI(lti, TCG_COND_LT);
724 OP_CONDI(ltiu, TCG_COND_LTU);
727 #define OP_CONDZ(name, cond) \
728 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0) \
730 int l1 = gen_new_label(); \
731 int l2 = gen_new_label(); \
733 tcg_gen_brcondi_tl(cond, t0, 0, l1); \
734 tcg_gen_movi_tl(ret, 0); \
737 tcg_gen_movi_tl(ret, 1); \
740 OP_CONDZ(gez, TCG_COND_GE);
741 OP_CONDZ(gtz, TCG_COND_GT);
742 OP_CONDZ(lez, TCG_COND_LE);
743 OP_CONDZ(ltz, TCG_COND_LT);
746 static inline void gen_save_pc(target_ulong pc)
748 tcg_gen_movi_tl(cpu_PC, pc);
751 static inline void save_cpu_state (DisasContext *ctx, int do_save_pc)
753 LOG_DISAS("hflags %08x saved %08x\n", ctx->hflags, ctx->saved_hflags);
754 if (do_save_pc && ctx->pc != ctx->saved_pc) {
755 gen_save_pc(ctx->pc);
756 ctx->saved_pc = ctx->pc;
758 if (ctx->hflags != ctx->saved_hflags) {
759 tcg_gen_movi_i32(hflags, ctx->hflags);
760 ctx->saved_hflags = ctx->hflags;
761 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
767 tcg_gen_movi_tl(btarget, ctx->btarget);
773 static inline void restore_cpu_state (CPUState *env, DisasContext *ctx)
775 ctx->saved_hflags = ctx->hflags;
776 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
782 ctx->btarget = env->btarget;
788 generate_exception_err (DisasContext *ctx, int excp, int err)
790 TCGv_i32 texcp = tcg_const_i32(excp);
791 TCGv_i32 terr = tcg_const_i32(err);
792 save_cpu_state(ctx, 1);
793 gen_helper_raise_exception_err(texcp, terr);
794 tcg_temp_free_i32(terr);
795 tcg_temp_free_i32(texcp);
796 gen_helper_interrupt_restart();
801 generate_exception (DisasContext *ctx, int excp)
803 save_cpu_state(ctx, 1);
804 gen_helper_0i(raise_exception, excp);
805 gen_helper_interrupt_restart();
809 /* Addresses computation */
810 static inline void gen_op_addr_add (DisasContext *ctx, TCGv t0, TCGv t1)
812 tcg_gen_add_tl(t0, t0, t1);
814 #if defined(TARGET_MIPS64)
815 /* For compatibility with 32-bit code, data reference in user mode
816 with Status_UX = 0 should be casted to 32-bit and sign extended.
817 See the MIPS64 PRA manual, section 4.10. */
818 if (((ctx->hflags & MIPS_HFLAG_KSU) == MIPS_HFLAG_UM) &&
819 !(ctx->hflags & MIPS_HFLAG_UX)) {
820 tcg_gen_ext32s_i64(t0, t0);
825 static inline void check_cp0_enabled(DisasContext *ctx)
827 if (unlikely(!(ctx->hflags & MIPS_HFLAG_CP0)))
828 generate_exception_err(ctx, EXCP_CpU, 1);
831 static inline void check_cp1_enabled(DisasContext *ctx)
833 if (unlikely(!(ctx->hflags & MIPS_HFLAG_FPU)))
834 generate_exception_err(ctx, EXCP_CpU, 1);
837 /* Verify that the processor is running with COP1X instructions enabled.
838 This is associated with the nabla symbol in the MIPS32 and MIPS64
841 static inline void check_cop1x(DisasContext *ctx)
843 if (unlikely(!(ctx->hflags & MIPS_HFLAG_COP1X)))
844 generate_exception(ctx, EXCP_RI);
847 /* Verify that the processor is running with 64-bit floating-point
848 operations enabled. */
850 static inline void check_cp1_64bitmode(DisasContext *ctx)
852 if (unlikely(~ctx->hflags & (MIPS_HFLAG_F64 | MIPS_HFLAG_COP1X)))
853 generate_exception(ctx, EXCP_RI);
857 * Verify if floating point register is valid; an operation is not defined
858 * if bit 0 of any register specification is set and the FR bit in the
859 * Status register equals zero, since the register numbers specify an
860 * even-odd pair of adjacent coprocessor general registers. When the FR bit
861 * in the Status register equals one, both even and odd register numbers
862 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
864 * Multiple 64 bit wide registers can be checked by calling
865 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
867 static inline void check_cp1_registers(DisasContext *ctx, int regs)
869 if (unlikely(!(ctx->hflags & MIPS_HFLAG_F64) && (regs & 1)))
870 generate_exception(ctx, EXCP_RI);
873 /* This code generates a "reserved instruction" exception if the
874 CPU does not support the instruction set corresponding to flags. */
875 static inline void check_insn(CPUState *env, DisasContext *ctx, int flags)
877 if (unlikely(!(env->insn_flags & flags)))
878 generate_exception(ctx, EXCP_RI);
881 /* This code generates a "reserved instruction" exception if 64-bit
882 instructions are not enabled. */
883 static inline void check_mips_64(DisasContext *ctx)
885 if (unlikely(!(ctx->hflags & MIPS_HFLAG_64)))
886 generate_exception(ctx, EXCP_RI);
889 /* load/store instructions. */
890 #define OP_LD(insn,fname) \
891 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
893 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
900 #if defined(TARGET_MIPS64)
906 #define OP_ST(insn,fname) \
907 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
909 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
914 #if defined(TARGET_MIPS64)
919 #define OP_LD_ATOMIC(insn,fname) \
920 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
922 TCGv t0 = tcg_temp_new(); \
923 tcg_gen_mov_tl(t0, arg1); \
924 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
925 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
928 OP_LD_ATOMIC(ll,ld32s);
929 #if defined(TARGET_MIPS64)
930 OP_LD_ATOMIC(lld,ld64);
934 #define OP_ST_ATOMIC(insn,fname,almask) \
935 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, TCGv arg2, DisasContext *ctx) \
937 TCGv t0 = tcg_temp_new(); \
938 int l1 = gen_new_label(); \
939 int l2 = gen_new_label(); \
940 int l3 = gen_new_label(); \
942 tcg_gen_andi_tl(t0, arg2, almask); \
943 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
944 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
945 generate_exception(ctx, EXCP_AdES); \
947 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
948 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
950 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
951 tcg_gen_movi_tl(ret, 1); \
954 tcg_gen_movi_tl(ret, 0); \
957 OP_ST_ATOMIC(sc,st32,0x3);
958 #if defined(TARGET_MIPS64)
959 OP_ST_ATOMIC(scd,st64,0x7);
964 static void gen_ldst (DisasContext *ctx, uint32_t opc, int rt,
965 int base, int16_t offset)
967 const char *opn = "ldst";
968 TCGv t0 = tcg_temp_new();
969 TCGv t1 = tcg_temp_new();
972 tcg_gen_movi_tl(t0, offset);
973 } else if (offset == 0) {
974 gen_load_gpr(t0, base);
976 tcg_gen_movi_tl(t0, offset);
977 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
979 /* Don't do NOP if destination is zero: we must perform the actual
982 #if defined(TARGET_MIPS64)
984 save_cpu_state(ctx, 0);
985 op_ldst_lwu(t0, t0, ctx);
986 gen_store_gpr(t0, rt);
990 save_cpu_state(ctx, 0);
991 op_ldst_ld(t0, t0, ctx);
992 gen_store_gpr(t0, rt);
996 save_cpu_state(ctx, 0);
997 op_ldst_lld(t0, t0, ctx);
998 gen_store_gpr(t0, rt);
1002 save_cpu_state(ctx, 0);
1003 gen_load_gpr(t1, rt);
1004 op_ldst_sd(t1, t0, ctx);
1008 save_cpu_state(ctx, 1);
1009 gen_load_gpr(t1, rt);
1010 gen_helper_3i(ldl, t1, t1, t0, ctx->mem_idx);
1011 gen_store_gpr(t1, rt);
1015 save_cpu_state(ctx, 1);
1016 gen_load_gpr(t1, rt);
1017 gen_helper_2i(sdl, t1, t0, ctx->mem_idx);
1021 save_cpu_state(ctx, 1);
1022 gen_load_gpr(t1, rt);
1023 gen_helper_3i(ldr, t1, t1, t0, ctx->mem_idx);
1024 gen_store_gpr(t1, rt);
1028 save_cpu_state(ctx, 1);
1029 gen_load_gpr(t1, rt);
1030 gen_helper_2i(sdr, t1, t0, ctx->mem_idx);
1035 save_cpu_state(ctx, 0);
1036 op_ldst_lw(t0, t0, ctx);
1037 gen_store_gpr(t0, rt);
1041 save_cpu_state(ctx, 0);
1042 gen_load_gpr(t1, rt);
1043 op_ldst_sw(t1, t0, ctx);
1047 save_cpu_state(ctx, 0);
1048 op_ldst_lh(t0, t0, ctx);
1049 gen_store_gpr(t0, rt);
1053 save_cpu_state(ctx, 0);
1054 gen_load_gpr(t1, rt);
1055 op_ldst_sh(t1, t0, ctx);
1059 save_cpu_state(ctx, 0);
1060 op_ldst_lhu(t0, t0, ctx);
1061 gen_store_gpr(t0, rt);
1065 save_cpu_state(ctx, 0);
1066 op_ldst_lb(t0, t0, ctx);
1067 gen_store_gpr(t0, rt);
1071 save_cpu_state(ctx, 0);
1072 gen_load_gpr(t1, rt);
1073 op_ldst_sb(t1, t0, ctx);
1077 save_cpu_state(ctx, 0);
1078 op_ldst_lbu(t0, t0, ctx);
1079 gen_store_gpr(t0, rt);
1083 save_cpu_state(ctx, 1);
1084 gen_load_gpr(t1, rt);
1085 gen_helper_3i(lwl, t1, t1, t0, ctx->mem_idx);
1086 gen_store_gpr(t1, rt);
1090 save_cpu_state(ctx, 1);
1091 gen_load_gpr(t1, rt);
1092 gen_helper_2i(swl, t1, t0, ctx->mem_idx);
1096 save_cpu_state(ctx, 1);
1097 gen_load_gpr(t1, rt);
1098 gen_helper_3i(lwr, t1, t1, t0, ctx->mem_idx);
1099 gen_store_gpr(t1, rt);
1103 save_cpu_state(ctx, 1);
1104 gen_load_gpr(t1, rt);
1105 gen_helper_2i(swr, t1, t0, ctx->mem_idx);
1109 save_cpu_state(ctx, 0);
1110 op_ldst_ll(t0, t0, ctx);
1111 gen_store_gpr(t0, rt);
1115 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1120 /* Store conditional */
1121 static void gen_st_cond (DisasContext *ctx, uint32_t opc, int rt,
1122 int base, int16_t offset)
1124 const char *opn = "st_cond";
1127 t0 = tcg_temp_local_new();
1130 tcg_gen_movi_tl(t0, offset);
1131 } else if (offset == 0) {
1132 gen_load_gpr(t0, base);
1134 tcg_gen_movi_tl(t0, offset);
1135 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
1137 /* Don't do NOP if destination is zero: we must perform the actual
1140 t1 = tcg_temp_local_new();
1141 gen_load_gpr(t1, rt);
1143 #if defined(TARGET_MIPS64)
1145 save_cpu_state(ctx, 0);
1146 op_ldst_scd(t0, t1, t0, ctx);
1151 save_cpu_state(ctx, 0);
1152 op_ldst_sc(t0, t1, t0, ctx);
1156 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1158 gen_store_gpr(t0, rt);
1162 /* Load and store */
1163 static void gen_flt_ldst (DisasContext *ctx, uint32_t opc, int ft,
1164 int base, int16_t offset)
1166 const char *opn = "flt_ldst";
1167 TCGv t0 = tcg_temp_new();
1170 tcg_gen_movi_tl(t0, offset);
1171 } else if (offset == 0) {
1172 gen_load_gpr(t0, base);
1174 tcg_gen_movi_tl(t0, offset);
1175 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
1177 /* Don't do NOP if destination is zero: we must perform the actual
1182 TCGv_i32 fp0 = tcg_temp_new_i32();
1184 tcg_gen_qemu_ld32s(t0, t0, ctx->mem_idx);
1185 tcg_gen_trunc_tl_i32(fp0, t0);
1186 gen_store_fpr32(fp0, ft);
1187 tcg_temp_free_i32(fp0);
1193 TCGv_i32 fp0 = tcg_temp_new_i32();
1194 TCGv t1 = tcg_temp_new();
1196 gen_load_fpr32(fp0, ft);
1197 tcg_gen_extu_i32_tl(t1, fp0);
1198 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
1200 tcg_temp_free_i32(fp0);
1206 TCGv_i64 fp0 = tcg_temp_new_i64();
1208 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
1209 gen_store_fpr64(ctx, fp0, ft);
1210 tcg_temp_free_i64(fp0);
1216 TCGv_i64 fp0 = tcg_temp_new_i64();
1218 gen_load_fpr64(ctx, fp0, ft);
1219 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
1220 tcg_temp_free_i64(fp0);
1226 generate_exception(ctx, EXCP_RI);
1229 MIPS_DEBUG("%s %s, %d(%s)", opn, fregnames[ft], offset, regnames[base]);
1234 /* Arithmetic with immediate operand */
1235 static void gen_arith_imm (CPUState *env, DisasContext *ctx, uint32_t opc,
1236 int rt, int rs, int16_t imm)
1238 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1239 const char *opn = "imm arith";
1241 if (rt == 0 && opc != OPC_ADDI && opc != OPC_DADDI) {
1242 /* If no destination, treat it as a NOP.
1243 For addi, we must generate the overflow exception when needed. */
1250 TCGv t0 = tcg_temp_local_new();
1251 TCGv t1 = tcg_temp_new();
1252 TCGv t2 = tcg_temp_new();
1253 int l1 = gen_new_label();
1255 gen_load_gpr(t1, rs);
1256 tcg_gen_addi_tl(t0, t1, uimm);
1257 tcg_gen_ext32s_tl(t0, t0);
1259 tcg_gen_xori_tl(t1, t1, ~uimm);
1260 tcg_gen_xori_tl(t2, t0, uimm);
1261 tcg_gen_and_tl(t1, t1, t2);
1263 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1265 /* operands of same sign, result different sign */
1266 generate_exception(ctx, EXCP_OVERFLOW);
1268 tcg_gen_ext32s_tl(t0, t0);
1269 gen_store_gpr(t0, rt);
1276 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1277 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
1279 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1283 #if defined(TARGET_MIPS64)
1286 TCGv t0 = tcg_temp_local_new();
1287 TCGv t1 = tcg_temp_new();
1288 TCGv t2 = tcg_temp_new();
1289 int l1 = gen_new_label();
1291 gen_load_gpr(t1, rs);
1292 tcg_gen_addi_tl(t0, t1, uimm);
1294 tcg_gen_xori_tl(t1, t1, ~uimm);
1295 tcg_gen_xori_tl(t2, t0, uimm);
1296 tcg_gen_and_tl(t1, t1, t2);
1298 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1300 /* operands of same sign, result different sign */
1301 generate_exception(ctx, EXCP_OVERFLOW);
1303 gen_store_gpr(t0, rt);
1310 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1312 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1318 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1321 /* Logic with immediate operand */
1322 static void gen_logic_imm (CPUState *env, uint32_t opc, int rt, int rs, int16_t imm)
1325 const char *opn = "imm logic";
1328 /* If no destination, treat it as a NOP. */
1332 uimm = (uint16_t)imm;
1335 if (likely(rs != 0))
1336 tcg_gen_andi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1338 tcg_gen_movi_tl(cpu_gpr[rt], 0);
1343 tcg_gen_ori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1345 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1349 if (likely(rs != 0))
1350 tcg_gen_xori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1352 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1356 tcg_gen_movi_tl(cpu_gpr[rt], imm << 16);
1360 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1363 /* Set on less than with immediate operand */
1364 static void gen_slt_imm (CPUState *env, uint32_t opc, int rt, int rs, int16_t imm)
1366 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1367 const char *opn = "imm arith";
1371 /* If no destination, treat it as a NOP. */
1375 t0 = tcg_temp_new();
1376 gen_load_gpr(t0, rs);
1379 gen_op_lti(cpu_gpr[rt], t0, uimm);
1383 gen_op_ltiu(cpu_gpr[rt], t0, uimm);
1387 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1391 /* Shifts with immediate operand */
1392 static void gen_shift_imm(CPUState *env, DisasContext *ctx, uint32_t opc,
1393 int rt, int rs, int16_t imm)
1395 target_ulong uimm = ((uint16_t)imm) & 0x1f;
1396 const char *opn = "imm shift";
1400 /* If no destination, treat it as a NOP. */
1405 t0 = tcg_temp_new();
1406 gen_load_gpr(t0, rs);
1409 tcg_gen_shli_tl(t0, t0, uimm);
1410 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1414 tcg_gen_ext32s_tl(t0, t0);
1415 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
1419 switch ((ctx->opcode >> 21) & 0x1f) {
1422 tcg_gen_ext32u_tl(t0, t0);
1423 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1425 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1430 /* rotr is decoded as srl on non-R2 CPUs */
1431 if (env->insn_flags & ISA_MIPS32R2) {
1433 TCGv_i32 t1 = tcg_temp_new_i32();
1435 tcg_gen_trunc_tl_i32(t1, t0);
1436 tcg_gen_rotri_i32(t1, t1, uimm);
1437 tcg_gen_ext_i32_tl(cpu_gpr[rt], t1);
1438 tcg_temp_free_i32(t1);
1443 tcg_gen_ext32u_tl(t0, t0);
1444 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1446 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1452 MIPS_INVAL("invalid srl flag");
1453 generate_exception(ctx, EXCP_RI);
1457 #if defined(TARGET_MIPS64)
1459 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm);
1463 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
1467 switch ((ctx->opcode >> 21) & 0x1f) {
1469 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1473 /* drotr is decoded as dsrl on non-R2 CPUs */
1474 if (env->insn_flags & ISA_MIPS32R2) {
1476 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm);
1480 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1485 MIPS_INVAL("invalid dsrl flag");
1486 generate_exception(ctx, EXCP_RI);
1491 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm + 32);
1495 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm + 32);
1499 switch ((ctx->opcode >> 21) & 0x1f) {
1501 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm + 32);
1505 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
1506 if (env->insn_flags & ISA_MIPS32R2) {
1507 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm + 32);
1510 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm + 32);
1515 MIPS_INVAL("invalid dsrl32 flag");
1516 generate_exception(ctx, EXCP_RI);
1522 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1527 static void gen_arith (CPUState *env, DisasContext *ctx, uint32_t opc,
1528 int rd, int rs, int rt)
1530 const char *opn = "arith";
1532 if (rd == 0 && opc != OPC_ADD && opc != OPC_SUB
1533 && opc != OPC_DADD && opc != OPC_DSUB) {
1534 /* If no destination, treat it as a NOP.
1535 For add & sub, we must generate the overflow exception when needed. */
1543 TCGv t0 = tcg_temp_local_new();
1544 TCGv t1 = tcg_temp_new();
1545 TCGv t2 = tcg_temp_new();
1546 int l1 = gen_new_label();
1548 gen_load_gpr(t1, rs);
1549 gen_load_gpr(t2, rt);
1550 tcg_gen_add_tl(t0, t1, t2);
1551 tcg_gen_ext32s_tl(t0, t0);
1552 tcg_gen_xor_tl(t1, t1, t2);
1553 tcg_gen_not_tl(t1, t1);
1554 tcg_gen_xor_tl(t2, t0, t2);
1555 tcg_gen_and_tl(t1, t1, t2);
1557 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1559 /* operands of same sign, result different sign */
1560 generate_exception(ctx, EXCP_OVERFLOW);
1562 gen_store_gpr(t0, rd);
1568 if (rs != 0 && rt != 0) {
1569 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1570 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1571 } else if (rs == 0 && rt != 0) {
1572 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1573 } else if (rs != 0 && rt == 0) {
1574 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1576 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1582 TCGv t0 = tcg_temp_local_new();
1583 TCGv t1 = tcg_temp_new();
1584 TCGv t2 = tcg_temp_new();
1585 int l1 = gen_new_label();
1587 gen_load_gpr(t1, rs);
1588 gen_load_gpr(t2, rt);
1589 tcg_gen_sub_tl(t0, t1, t2);
1590 tcg_gen_ext32s_tl(t0, t0);
1591 tcg_gen_xor_tl(t2, t1, t2);
1592 tcg_gen_xor_tl(t1, t0, t1);
1593 tcg_gen_and_tl(t1, t1, t2);
1595 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1597 /* operands of same sign, result different sign */
1598 generate_exception(ctx, EXCP_OVERFLOW);
1600 gen_store_gpr(t0, rd);
1606 if (rs != 0 && rt != 0) {
1607 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1608 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1609 } else if (rs == 0 && rt != 0) {
1610 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1611 } else if (rs != 0 && rt == 0) {
1612 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1614 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1618 #if defined(TARGET_MIPS64)
1621 TCGv t0 = tcg_temp_local_new();
1622 TCGv t1 = tcg_temp_new();
1623 TCGv t2 = tcg_temp_new();
1624 int l1 = gen_new_label();
1626 gen_load_gpr(t1, rs);
1627 gen_load_gpr(t2, rt);
1628 tcg_gen_add_tl(t0, t1, t2);
1629 tcg_gen_xor_tl(t1, t1, t2);
1630 tcg_gen_not_tl(t1, t1);
1631 tcg_gen_xor_tl(t2, t0, t2);
1632 tcg_gen_and_tl(t1, t1, t2);
1634 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1636 /* operands of same sign, result different sign */
1637 generate_exception(ctx, EXCP_OVERFLOW);
1639 gen_store_gpr(t0, rd);
1645 if (rs != 0 && rt != 0) {
1646 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1647 } else if (rs == 0 && rt != 0) {
1648 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1649 } else if (rs != 0 && rt == 0) {
1650 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1652 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1658 TCGv t0 = tcg_temp_local_new();
1659 TCGv t1 = tcg_temp_new();
1660 TCGv t2 = tcg_temp_new();
1661 int l1 = gen_new_label();
1663 gen_load_gpr(t1, rs);
1664 gen_load_gpr(t2, rt);
1665 tcg_gen_sub_tl(t0, t1, t2);
1666 tcg_gen_xor_tl(t2, t1, t2);
1667 tcg_gen_xor_tl(t1, t0, t1);
1668 tcg_gen_and_tl(t1, t1, t2);
1670 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1672 /* operands of same sign, result different sign */
1673 generate_exception(ctx, EXCP_OVERFLOW);
1675 gen_store_gpr(t0, rd);
1681 if (rs != 0 && rt != 0) {
1682 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1683 } else if (rs == 0 && rt != 0) {
1684 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1685 } else if (rs != 0 && rt == 0) {
1686 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1688 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1694 if (likely(rs != 0 && rt != 0)) {
1695 tcg_gen_mul_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1696 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1698 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1703 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1706 /* Conditional move */
1707 static void gen_cond_move (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1709 const char *opn = "cond move";
1713 /* If no destination, treat it as a NOP.
1714 For add & sub, we must generate the overflow exception when needed. */
1719 l1 = gen_new_label();
1722 if (likely(rt != 0))
1723 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rt], 0, l1);
1729 if (likely(rt != 0))
1730 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rt], 0, l1);
1735 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1737 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1740 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1744 static void gen_logic (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1746 const char *opn = "logic";
1749 /* If no destination, treat it as a NOP. */
1756 if (likely(rs != 0 && rt != 0)) {
1757 tcg_gen_and_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1759 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1764 if (rs != 0 && rt != 0) {
1765 tcg_gen_nor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1766 } else if (rs == 0 && rt != 0) {
1767 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rt]);
1768 } else if (rs != 0 && rt == 0) {
1769 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rs]);
1771 tcg_gen_movi_tl(cpu_gpr[rd], ~((target_ulong)0));
1776 if (likely(rs != 0 && rt != 0)) {
1777 tcg_gen_or_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1778 } else if (rs == 0 && rt != 0) {
1779 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1780 } else if (rs != 0 && rt == 0) {
1781 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1783 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1788 if (likely(rs != 0 && rt != 0)) {
1789 tcg_gen_xor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1790 } else if (rs == 0 && rt != 0) {
1791 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1792 } else if (rs != 0 && rt == 0) {
1793 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1795 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1800 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1803 /* Set on lower than */
1804 static void gen_slt (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1806 const char *opn = "slt";
1810 /* If no destination, treat it as a NOP. */
1815 t0 = tcg_temp_new();
1816 t1 = tcg_temp_new();
1817 gen_load_gpr(t0, rs);
1818 gen_load_gpr(t1, rt);
1821 gen_op_lt(cpu_gpr[rd], t0, t1);
1825 gen_op_ltu(cpu_gpr[rd], t0, t1);
1829 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1835 static void gen_shift (CPUState *env, DisasContext *ctx, uint32_t opc,
1836 int rd, int rs, int rt)
1838 const char *opn = "shifts";
1842 /* If no destination, treat it as a NOP.
1843 For add & sub, we must generate the overflow exception when needed. */
1848 t0 = tcg_temp_new();
1849 t1 = tcg_temp_new();
1850 gen_load_gpr(t0, rs);
1851 gen_load_gpr(t1, rt);
1854 tcg_gen_andi_tl(t0, t0, 0x1f);
1855 tcg_gen_shl_tl(t0, t1, t0);
1856 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1860 tcg_gen_ext32s_tl(t1, t1);
1861 tcg_gen_andi_tl(t0, t0, 0x1f);
1862 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1866 switch ((ctx->opcode >> 6) & 0x1f) {
1868 tcg_gen_ext32u_tl(t1, t1);
1869 tcg_gen_andi_tl(t0, t0, 0x1f);
1870 tcg_gen_shr_tl(t0, t1, t0);
1871 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1875 /* rotrv is decoded as srlv on non-R2 CPUs */
1876 if (env->insn_flags & ISA_MIPS32R2) {
1877 TCGv_i32 t2 = tcg_temp_new_i32();
1878 TCGv_i32 t3 = tcg_temp_new_i32();
1880 tcg_gen_trunc_tl_i32(t2, t0);
1881 tcg_gen_trunc_tl_i32(t3, t1);
1882 tcg_gen_andi_i32(t2, t2, 0x1f);
1883 tcg_gen_rotr_i32(t2, t3, t2);
1884 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
1885 tcg_temp_free_i32(t2);
1886 tcg_temp_free_i32(t3);
1889 tcg_gen_ext32u_tl(t1, t1);
1890 tcg_gen_andi_tl(t0, t0, 0x1f);
1891 tcg_gen_shr_tl(t0, t1, t0);
1892 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1897 MIPS_INVAL("invalid srlv flag");
1898 generate_exception(ctx, EXCP_RI);
1902 #if defined(TARGET_MIPS64)
1904 tcg_gen_andi_tl(t0, t0, 0x3f);
1905 tcg_gen_shl_tl(cpu_gpr[rd], t1, t0);
1909 tcg_gen_andi_tl(t0, t0, 0x3f);
1910 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1914 switch ((ctx->opcode >> 6) & 0x1f) {
1916 tcg_gen_andi_tl(t0, t0, 0x3f);
1917 tcg_gen_shr_tl(cpu_gpr[rd], t1, t0);
1921 /* drotrv is decoded as dsrlv on non-R2 CPUs */
1922 if (env->insn_flags & ISA_MIPS32R2) {
1923 tcg_gen_andi_tl(t0, t0, 0x3f);
1924 tcg_gen_rotr_tl(cpu_gpr[rd], t1, t0);
1927 tcg_gen_andi_tl(t0, t0, 0x3f);
1928 tcg_gen_shr_tl(t0, t1, t0);
1933 MIPS_INVAL("invalid dsrlv flag");
1934 generate_exception(ctx, EXCP_RI);
1940 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1945 /* Arithmetic on HI/LO registers */
1946 static void gen_HILO (DisasContext *ctx, uint32_t opc, int reg)
1948 const char *opn = "hilo";
1950 if (reg == 0 && (opc == OPC_MFHI || opc == OPC_MFLO)) {
1957 tcg_gen_mov_tl(cpu_gpr[reg], cpu_HI[0]);
1961 tcg_gen_mov_tl(cpu_gpr[reg], cpu_LO[0]);
1966 tcg_gen_mov_tl(cpu_HI[0], cpu_gpr[reg]);
1968 tcg_gen_movi_tl(cpu_HI[0], 0);
1973 tcg_gen_mov_tl(cpu_LO[0], cpu_gpr[reg]);
1975 tcg_gen_movi_tl(cpu_LO[0], 0);
1979 MIPS_DEBUG("%s %s", opn, regnames[reg]);
1982 static void gen_muldiv (DisasContext *ctx, uint32_t opc,
1985 const char *opn = "mul/div";
1991 #if defined(TARGET_MIPS64)
1995 t0 = tcg_temp_local_new();
1996 t1 = tcg_temp_local_new();
1999 t0 = tcg_temp_new();
2000 t1 = tcg_temp_new();
2004 gen_load_gpr(t0, rs);
2005 gen_load_gpr(t1, rt);
2009 int l1 = gen_new_label();
2010 int l2 = gen_new_label();
2012 tcg_gen_ext32s_tl(t0, t0);
2013 tcg_gen_ext32s_tl(t1, t1);
2014 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2015 tcg_gen_brcondi_tl(TCG_COND_NE, t0, INT_MIN, l2);
2016 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1, l2);
2018 tcg_gen_mov_tl(cpu_LO[0], t0);
2019 tcg_gen_movi_tl(cpu_HI[0], 0);
2022 tcg_gen_div_tl(cpu_LO[0], t0, t1);
2023 tcg_gen_rem_tl(cpu_HI[0], t0, t1);
2024 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
2025 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
2032 int l1 = gen_new_label();
2034 tcg_gen_ext32u_tl(t0, t0);
2035 tcg_gen_ext32u_tl(t1, t1);
2036 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2037 tcg_gen_divu_tl(cpu_LO[0], t0, t1);
2038 tcg_gen_remu_tl(cpu_HI[0], t0, t1);
2039 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
2040 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
2047 TCGv_i64 t2 = tcg_temp_new_i64();
2048 TCGv_i64 t3 = tcg_temp_new_i64();
2050 tcg_gen_ext_tl_i64(t2, t0);
2051 tcg_gen_ext_tl_i64(t3, t1);
2052 tcg_gen_mul_i64(t2, t2, t3);
2053 tcg_temp_free_i64(t3);
2054 tcg_gen_trunc_i64_tl(t0, t2);
2055 tcg_gen_shri_i64(t2, t2, 32);
2056 tcg_gen_trunc_i64_tl(t1, t2);
2057 tcg_temp_free_i64(t2);
2058 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2059 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2065 TCGv_i64 t2 = tcg_temp_new_i64();
2066 TCGv_i64 t3 = tcg_temp_new_i64();
2068 tcg_gen_ext32u_tl(t0, t0);
2069 tcg_gen_ext32u_tl(t1, t1);
2070 tcg_gen_extu_tl_i64(t2, t0);
2071 tcg_gen_extu_tl_i64(t3, t1);
2072 tcg_gen_mul_i64(t2, t2, t3);
2073 tcg_temp_free_i64(t3);
2074 tcg_gen_trunc_i64_tl(t0, t2);
2075 tcg_gen_shri_i64(t2, t2, 32);
2076 tcg_gen_trunc_i64_tl(t1, t2);
2077 tcg_temp_free_i64(t2);
2078 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2079 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2083 #if defined(TARGET_MIPS64)
2086 int l1 = gen_new_label();
2087 int l2 = gen_new_label();
2089 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2090 tcg_gen_brcondi_tl(TCG_COND_NE, t0, -1LL << 63, l2);
2091 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1LL, l2);
2092 tcg_gen_mov_tl(cpu_LO[0], t0);
2093 tcg_gen_movi_tl(cpu_HI[0], 0);
2096 tcg_gen_div_i64(cpu_LO[0], t0, t1);
2097 tcg_gen_rem_i64(cpu_HI[0], t0, t1);
2104 int l1 = gen_new_label();
2106 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2107 tcg_gen_divu_i64(cpu_LO[0], t0, t1);
2108 tcg_gen_remu_i64(cpu_HI[0], t0, t1);
2114 gen_helper_dmult(t0, t1);
2118 gen_helper_dmultu(t0, t1);
2124 TCGv_i64 t2 = tcg_temp_new_i64();
2125 TCGv_i64 t3 = tcg_temp_new_i64();
2127 tcg_gen_ext_tl_i64(t2, t0);
2128 tcg_gen_ext_tl_i64(t3, t1);
2129 tcg_gen_mul_i64(t2, t2, t3);
2130 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2131 tcg_gen_add_i64(t2, t2, t3);
2132 tcg_temp_free_i64(t3);
2133 tcg_gen_trunc_i64_tl(t0, t2);
2134 tcg_gen_shri_i64(t2, t2, 32);
2135 tcg_gen_trunc_i64_tl(t1, t2);
2136 tcg_temp_free_i64(t2);
2137 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2138 tcg_gen_ext32s_tl(cpu_LO[1], t1);
2144 TCGv_i64 t2 = tcg_temp_new_i64();
2145 TCGv_i64 t3 = tcg_temp_new_i64();
2147 tcg_gen_ext32u_tl(t0, t0);
2148 tcg_gen_ext32u_tl(t1, t1);
2149 tcg_gen_extu_tl_i64(t2, t0);
2150 tcg_gen_extu_tl_i64(t3, t1);
2151 tcg_gen_mul_i64(t2, t2, t3);
2152 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2153 tcg_gen_add_i64(t2, t2, t3);
2154 tcg_temp_free_i64(t3);
2155 tcg_gen_trunc_i64_tl(t0, t2);
2156 tcg_gen_shri_i64(t2, t2, 32);
2157 tcg_gen_trunc_i64_tl(t1, t2);
2158 tcg_temp_free_i64(t2);
2159 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2160 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2166 TCGv_i64 t2 = tcg_temp_new_i64();
2167 TCGv_i64 t3 = tcg_temp_new_i64();
2169 tcg_gen_ext_tl_i64(t2, t0);
2170 tcg_gen_ext_tl_i64(t3, t1);
2171 tcg_gen_mul_i64(t2, t2, t3);
2172 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2173 tcg_gen_sub_i64(t2, t2, t3);
2174 tcg_temp_free_i64(t3);
2175 tcg_gen_trunc_i64_tl(t0, t2);
2176 tcg_gen_shri_i64(t2, t2, 32);
2177 tcg_gen_trunc_i64_tl(t1, t2);
2178 tcg_temp_free_i64(t2);
2179 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2180 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2186 TCGv_i64 t2 = tcg_temp_new_i64();
2187 TCGv_i64 t3 = tcg_temp_new_i64();
2189 tcg_gen_ext32u_tl(t0, t0);
2190 tcg_gen_ext32u_tl(t1, t1);
2191 tcg_gen_extu_tl_i64(t2, t0);
2192 tcg_gen_extu_tl_i64(t3, t1);
2193 tcg_gen_mul_i64(t2, t2, t3);
2194 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2195 tcg_gen_sub_i64(t2, t2, t3);
2196 tcg_temp_free_i64(t3);
2197 tcg_gen_trunc_i64_tl(t0, t2);
2198 tcg_gen_shri_i64(t2, t2, 32);
2199 tcg_gen_trunc_i64_tl(t1, t2);
2200 tcg_temp_free_i64(t2);
2201 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2202 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2208 generate_exception(ctx, EXCP_RI);
2211 MIPS_DEBUG("%s %s %s", opn, regnames[rs], regnames[rt]);
2217 static void gen_mul_vr54xx (DisasContext *ctx, uint32_t opc,
2218 int rd, int rs, int rt)
2220 const char *opn = "mul vr54xx";
2221 TCGv t0 = tcg_temp_new();
2222 TCGv t1 = tcg_temp_new();
2224 gen_load_gpr(t0, rs);
2225 gen_load_gpr(t1, rt);
2228 case OPC_VR54XX_MULS:
2229 gen_helper_muls(t0, t0, t1);
2232 case OPC_VR54XX_MULSU:
2233 gen_helper_mulsu(t0, t0, t1);
2236 case OPC_VR54XX_MACC:
2237 gen_helper_macc(t0, t0, t1);
2240 case OPC_VR54XX_MACCU:
2241 gen_helper_maccu(t0, t0, t1);
2244 case OPC_VR54XX_MSAC:
2245 gen_helper_msac(t0, t0, t1);
2248 case OPC_VR54XX_MSACU:
2249 gen_helper_msacu(t0, t0, t1);
2252 case OPC_VR54XX_MULHI:
2253 gen_helper_mulhi(t0, t0, t1);
2256 case OPC_VR54XX_MULHIU:
2257 gen_helper_mulhiu(t0, t0, t1);
2260 case OPC_VR54XX_MULSHI:
2261 gen_helper_mulshi(t0, t0, t1);
2264 case OPC_VR54XX_MULSHIU:
2265 gen_helper_mulshiu(t0, t0, t1);
2268 case OPC_VR54XX_MACCHI:
2269 gen_helper_macchi(t0, t0, t1);
2272 case OPC_VR54XX_MACCHIU:
2273 gen_helper_macchiu(t0, t0, t1);
2276 case OPC_VR54XX_MSACHI:
2277 gen_helper_msachi(t0, t0, t1);
2280 case OPC_VR54XX_MSACHIU:
2281 gen_helper_msachiu(t0, t0, t1);
2285 MIPS_INVAL("mul vr54xx");
2286 generate_exception(ctx, EXCP_RI);
2289 gen_store_gpr(t0, rd);
2290 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
2297 static void gen_cl (DisasContext *ctx, uint32_t opc,
2300 const char *opn = "CLx";
2308 t0 = tcg_temp_new();
2309 gen_load_gpr(t0, rs);
2312 gen_helper_clo(cpu_gpr[rd], t0);
2316 gen_helper_clz(cpu_gpr[rd], t0);
2319 #if defined(TARGET_MIPS64)
2321 gen_helper_dclo(cpu_gpr[rd], t0);
2325 gen_helper_dclz(cpu_gpr[rd], t0);
2330 MIPS_DEBUG("%s %s, %s", opn, regnames[rd], regnames[rs]);
2335 static void gen_trap (DisasContext *ctx, uint32_t opc,
2336 int rs, int rt, int16_t imm)
2339 TCGv t0 = tcg_temp_new();
2340 TCGv t1 = tcg_temp_new();
2343 /* Load needed operands */
2351 /* Compare two registers */
2353 gen_load_gpr(t0, rs);
2354 gen_load_gpr(t1, rt);
2364 /* Compare register to immediate */
2365 if (rs != 0 || imm != 0) {
2366 gen_load_gpr(t0, rs);
2367 tcg_gen_movi_tl(t1, (int32_t)imm);
2374 case OPC_TEQ: /* rs == rs */
2375 case OPC_TEQI: /* r0 == 0 */
2376 case OPC_TGE: /* rs >= rs */
2377 case OPC_TGEI: /* r0 >= 0 */
2378 case OPC_TGEU: /* rs >= rs unsigned */
2379 case OPC_TGEIU: /* r0 >= 0 unsigned */
2381 generate_exception(ctx, EXCP_TRAP);
2383 case OPC_TLT: /* rs < rs */
2384 case OPC_TLTI: /* r0 < 0 */
2385 case OPC_TLTU: /* rs < rs unsigned */
2386 case OPC_TLTIU: /* r0 < 0 unsigned */
2387 case OPC_TNE: /* rs != rs */
2388 case OPC_TNEI: /* r0 != 0 */
2389 /* Never trap: treat as NOP. */
2393 int l1 = gen_new_label();
2398 tcg_gen_brcond_tl(TCG_COND_NE, t0, t1, l1);
2402 tcg_gen_brcond_tl(TCG_COND_LT, t0, t1, l1);
2406 tcg_gen_brcond_tl(TCG_COND_LTU, t0, t1, l1);
2410 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
2414 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
2418 tcg_gen_brcond_tl(TCG_COND_EQ, t0, t1, l1);
2421 generate_exception(ctx, EXCP_TRAP);
2428 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
2430 TranslationBlock *tb;
2432 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
2435 tcg_gen_exit_tb((long)tb + n);
2442 /* Branches (before delay slot) */
2443 static void gen_compute_branch (DisasContext *ctx, uint32_t opc,
2444 int rs, int rt, int32_t offset)
2446 target_ulong btgt = -1;
2448 int bcond_compute = 0;
2449 TCGv t0 = tcg_temp_new();
2450 TCGv t1 = tcg_temp_new();
2452 if (ctx->hflags & MIPS_HFLAG_BMASK) {
2453 #ifdef MIPS_DEBUG_DISAS
2454 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx "\n", ctx->pc);
2456 generate_exception(ctx, EXCP_RI);
2460 /* Load needed operands */
2466 /* Compare two registers */
2468 gen_load_gpr(t0, rs);
2469 gen_load_gpr(t1, rt);
2472 btgt = ctx->pc + 4 + offset;
2486 /* Compare to zero */
2488 gen_load_gpr(t0, rs);
2491 btgt = ctx->pc + 4 + offset;
2495 /* Jump to immediate */
2496 btgt = ((ctx->pc + 4) & (int32_t)0xF0000000) | (uint32_t)offset;
2500 /* Jump to register */
2501 if (offset != 0 && offset != 16) {
2502 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2503 others are reserved. */
2504 MIPS_INVAL("jump hint");
2505 generate_exception(ctx, EXCP_RI);
2508 gen_load_gpr(btarget, rs);
2511 MIPS_INVAL("branch/jump");
2512 generate_exception(ctx, EXCP_RI);
2515 if (bcond_compute == 0) {
2516 /* No condition to be computed */
2518 case OPC_BEQ: /* rx == rx */
2519 case OPC_BEQL: /* rx == rx likely */
2520 case OPC_BGEZ: /* 0 >= 0 */
2521 case OPC_BGEZL: /* 0 >= 0 likely */
2522 case OPC_BLEZ: /* 0 <= 0 */
2523 case OPC_BLEZL: /* 0 <= 0 likely */
2525 ctx->hflags |= MIPS_HFLAG_B;
2526 MIPS_DEBUG("balways");
2528 case OPC_BGEZAL: /* 0 >= 0 */
2529 case OPC_BGEZALL: /* 0 >= 0 likely */
2530 /* Always take and link */
2532 ctx->hflags |= MIPS_HFLAG_B;
2533 MIPS_DEBUG("balways and link");
2535 case OPC_BNE: /* rx != rx */
2536 case OPC_BGTZ: /* 0 > 0 */
2537 case OPC_BLTZ: /* 0 < 0 */
2539 MIPS_DEBUG("bnever (NOP)");
2541 case OPC_BLTZAL: /* 0 < 0 */
2542 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2543 MIPS_DEBUG("bnever and link");
2545 case OPC_BLTZALL: /* 0 < 0 likely */
2546 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2547 /* Skip the instruction in the delay slot */
2548 MIPS_DEBUG("bnever, link and skip");
2551 case OPC_BNEL: /* rx != rx likely */
2552 case OPC_BGTZL: /* 0 > 0 likely */
2553 case OPC_BLTZL: /* 0 < 0 likely */
2554 /* Skip the instruction in the delay slot */
2555 MIPS_DEBUG("bnever and skip");
2559 ctx->hflags |= MIPS_HFLAG_B;
2560 MIPS_DEBUG("j " TARGET_FMT_lx, btgt);
2564 ctx->hflags |= MIPS_HFLAG_B;
2565 MIPS_DEBUG("jal " TARGET_FMT_lx, btgt);
2568 ctx->hflags |= MIPS_HFLAG_BR;
2569 MIPS_DEBUG("jr %s", regnames[rs]);
2573 ctx->hflags |= MIPS_HFLAG_BR;
2574 MIPS_DEBUG("jalr %s, %s", regnames[rt], regnames[rs]);
2577 MIPS_INVAL("branch/jump");
2578 generate_exception(ctx, EXCP_RI);
2584 gen_op_eq(bcond, t0, t1);
2585 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx,
2586 regnames[rs], regnames[rt], btgt);
2589 gen_op_eq(bcond, t0, t1);
2590 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx,
2591 regnames[rs], regnames[rt], btgt);
2594 gen_op_ne(bcond, t0, t1);
2595 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx,
2596 regnames[rs], regnames[rt], btgt);
2599 gen_op_ne(bcond, t0, t1);
2600 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx,
2601 regnames[rs], regnames[rt], btgt);
2604 gen_op_gez(bcond, t0);
2605 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2608 gen_op_gez(bcond, t0);
2609 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2612 gen_op_gez(bcond, t0);
2613 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2617 gen_op_gez(bcond, t0);
2619 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2622 gen_op_gtz(bcond, t0);
2623 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2626 gen_op_gtz(bcond, t0);
2627 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2630 gen_op_lez(bcond, t0);
2631 MIPS_DEBUG("blez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2634 gen_op_lez(bcond, t0);
2635 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2638 gen_op_ltz(bcond, t0);
2639 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2642 gen_op_ltz(bcond, t0);
2643 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2646 gen_op_ltz(bcond, t0);
2648 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2650 ctx->hflags |= MIPS_HFLAG_BC;
2653 gen_op_ltz(bcond, t0);
2655 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2657 ctx->hflags |= MIPS_HFLAG_BL;
2660 MIPS_INVAL("conditional branch/jump");
2661 generate_exception(ctx, EXCP_RI);
2665 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx,
2666 blink, ctx->hflags, btgt);
2668 ctx->btarget = btgt;
2670 tcg_gen_movi_tl(cpu_gpr[blink], ctx->pc + 8);
2678 /* special3 bitfield operations */
2679 static void gen_bitops (DisasContext *ctx, uint32_t opc, int rt,
2680 int rs, int lsb, int msb)
2682 TCGv t0 = tcg_temp_new();
2683 TCGv t1 = tcg_temp_new();
2686 gen_load_gpr(t1, rs);
2691 tcg_gen_shri_tl(t0, t1, lsb);
2693 tcg_gen_andi_tl(t0, t0, (1 << (msb + 1)) - 1);
2695 tcg_gen_ext32s_tl(t0, t0);
2698 #if defined(TARGET_MIPS64)
2700 tcg_gen_shri_tl(t0, t1, lsb);
2702 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1 + 32)) - 1);
2706 tcg_gen_shri_tl(t0, t1, lsb + 32);
2707 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2710 tcg_gen_shri_tl(t0, t1, lsb);
2711 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2717 mask = ((msb - lsb + 1 < 32) ? ((1 << (msb - lsb + 1)) - 1) : ~0) << lsb;
2718 gen_load_gpr(t0, rt);
2719 tcg_gen_andi_tl(t0, t0, ~mask);
2720 tcg_gen_shli_tl(t1, t1, lsb);
2721 tcg_gen_andi_tl(t1, t1, mask);
2722 tcg_gen_or_tl(t0, t0, t1);
2723 tcg_gen_ext32s_tl(t0, t0);
2725 #if defined(TARGET_MIPS64)
2729 mask = ((msb - lsb + 1 + 32 < 64) ? ((1ULL << (msb - lsb + 1 + 32)) - 1) : ~0ULL) << lsb;
2730 gen_load_gpr(t0, rt);
2731 tcg_gen_andi_tl(t0, t0, ~mask);
2732 tcg_gen_shli_tl(t1, t1, lsb);
2733 tcg_gen_andi_tl(t1, t1, mask);
2734 tcg_gen_or_tl(t0, t0, t1);
2739 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2740 gen_load_gpr(t0, rt);
2741 tcg_gen_andi_tl(t0, t0, ~mask);
2742 tcg_gen_shli_tl(t1, t1, lsb + 32);
2743 tcg_gen_andi_tl(t1, t1, mask);
2744 tcg_gen_or_tl(t0, t0, t1);
2749 gen_load_gpr(t0, rt);
2750 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2751 gen_load_gpr(t0, rt);
2752 tcg_gen_andi_tl(t0, t0, ~mask);
2753 tcg_gen_shli_tl(t1, t1, lsb);
2754 tcg_gen_andi_tl(t1, t1, mask);
2755 tcg_gen_or_tl(t0, t0, t1);
2760 MIPS_INVAL("bitops");
2761 generate_exception(ctx, EXCP_RI);
2766 gen_store_gpr(t0, rt);
2771 static void gen_bshfl (DisasContext *ctx, uint32_t op2, int rt, int rd)
2776 /* If no destination, treat it as a NOP. */
2781 t0 = tcg_temp_new();
2782 gen_load_gpr(t0, rt);
2786 TCGv t1 = tcg_temp_new();
2788 tcg_gen_shri_tl(t1, t0, 8);
2789 tcg_gen_andi_tl(t1, t1, 0x00FF00FF);
2790 tcg_gen_shli_tl(t0, t0, 8);
2791 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF);
2792 tcg_gen_or_tl(t0, t0, t1);
2794 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
2798 tcg_gen_ext8s_tl(cpu_gpr[rd], t0);
2801 tcg_gen_ext16s_tl(cpu_gpr[rd], t0);
2803 #if defined(TARGET_MIPS64)
2806 TCGv t1 = tcg_temp_new();
2808 tcg_gen_shri_tl(t1, t0, 8);
2809 tcg_gen_andi_tl(t1, t1, 0x00FF00FF00FF00FFULL);
2810 tcg_gen_shli_tl(t0, t0, 8);
2811 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF00FF00FFULL);
2812 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2818 TCGv t1 = tcg_temp_new();
2820 tcg_gen_shri_tl(t1, t0, 16);
2821 tcg_gen_andi_tl(t1, t1, 0x0000FFFF0000FFFFULL);
2822 tcg_gen_shli_tl(t0, t0, 16);
2823 tcg_gen_andi_tl(t0, t0, ~0x0000FFFF0000FFFFULL);
2824 tcg_gen_or_tl(t0, t0, t1);
2825 tcg_gen_shri_tl(t1, t0, 32);
2826 tcg_gen_shli_tl(t0, t0, 32);
2827 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2833 MIPS_INVAL("bsfhl");
2834 generate_exception(ctx, EXCP_RI);
2841 #ifndef CONFIG_USER_ONLY
2842 /* CP0 (MMU and control) */
2843 static inline void gen_mfc0_load32 (TCGv arg, target_ulong off)
2845 TCGv_i32 t0 = tcg_temp_new_i32();
2847 tcg_gen_ld_i32(t0, cpu_env, off);
2848 tcg_gen_ext_i32_tl(arg, t0);
2849 tcg_temp_free_i32(t0);
2852 static inline void gen_mfc0_load64 (TCGv arg, target_ulong off)
2854 tcg_gen_ld_tl(arg, cpu_env, off);
2855 tcg_gen_ext32s_tl(arg, arg);
2858 static inline void gen_mtc0_store32 (TCGv arg, target_ulong off)
2860 TCGv_i32 t0 = tcg_temp_new_i32();
2862 tcg_gen_trunc_tl_i32(t0, arg);
2863 tcg_gen_st_i32(t0, cpu_env, off);
2864 tcg_temp_free_i32(t0);
2867 static inline void gen_mtc0_store64 (TCGv arg, target_ulong off)
2869 tcg_gen_ext32s_tl(arg, arg);
2870 tcg_gen_st_tl(arg, cpu_env, off);
2873 static void gen_mfc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
2875 const char *rn = "invalid";
2878 check_insn(env, ctx, ISA_MIPS32);
2884 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Index));
2888 check_insn(env, ctx, ASE_MT);
2889 gen_helper_mfc0_mvpcontrol(arg);
2893 check_insn(env, ctx, ASE_MT);
2894 gen_helper_mfc0_mvpconf0(arg);
2898 check_insn(env, ctx, ASE_MT);
2899 gen_helper_mfc0_mvpconf1(arg);
2909 gen_helper_mfc0_random(arg);
2913 check_insn(env, ctx, ASE_MT);
2914 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEControl));
2918 check_insn(env, ctx, ASE_MT);
2919 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf0));
2923 check_insn(env, ctx, ASE_MT);
2924 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf1));
2928 check_insn(env, ctx, ASE_MT);
2929 gen_mfc0_load64(arg, offsetof(CPUState, CP0_YQMask));
2933 check_insn(env, ctx, ASE_MT);
2934 gen_mfc0_load64(arg, offsetof(CPUState, CP0_VPESchedule));
2938 check_insn(env, ctx, ASE_MT);
2939 gen_mfc0_load64(arg, offsetof(CPUState, CP0_VPEScheFBack));
2940 rn = "VPEScheFBack";
2943 check_insn(env, ctx, ASE_MT);
2944 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEOpt));
2954 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo0));
2955 tcg_gen_ext32s_tl(arg, arg);
2959 check_insn(env, ctx, ASE_MT);
2960 gen_helper_mfc0_tcstatus(arg);
2964 check_insn(env, ctx, ASE_MT);
2965 gen_helper_mfc0_tcbind(arg);
2969 check_insn(env, ctx, ASE_MT);
2970 gen_helper_mfc0_tcrestart(arg);
2974 check_insn(env, ctx, ASE_MT);
2975 gen_helper_mfc0_tchalt(arg);
2979 check_insn(env, ctx, ASE_MT);
2980 gen_helper_mfc0_tccontext(arg);
2984 check_insn(env, ctx, ASE_MT);
2985 gen_helper_mfc0_tcschedule(arg);
2989 check_insn(env, ctx, ASE_MT);
2990 gen_helper_mfc0_tcschefback(arg);
3000 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo1));
3001 tcg_gen_ext32s_tl(arg, arg);
3011 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_Context));
3012 tcg_gen_ext32s_tl(arg, arg);
3016 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3017 rn = "ContextConfig";
3026 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageMask));
3030 check_insn(env, ctx, ISA_MIPS32R2);
3031 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageGrain));
3041 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Wired));
3045 check_insn(env, ctx, ISA_MIPS32R2);
3046 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf0));
3050 check_insn(env, ctx, ISA_MIPS32R2);
3051 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf1));
3055 check_insn(env, ctx, ISA_MIPS32R2);
3056 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf2));
3060 check_insn(env, ctx, ISA_MIPS32R2);
3061 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf3));
3065 check_insn(env, ctx, ISA_MIPS32R2);
3066 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf4));
3076 check_insn(env, ctx, ISA_MIPS32R2);
3077 gen_mfc0_load32(arg, offsetof(CPUState, CP0_HWREna));
3087 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_BadVAddr));
3088 tcg_gen_ext32s_tl(arg, arg);
3098 /* Mark as an IO operation because we read the time. */
3101 gen_helper_mfc0_count(arg);
3104 ctx->bstate = BS_STOP;
3108 /* 6,7 are implementation dependent */
3116 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryHi));
3117 tcg_gen_ext32s_tl(arg, arg);
3127 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Compare));
3130 /* 6,7 are implementation dependent */
3138 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Status));
3142 check_insn(env, ctx, ISA_MIPS32R2);
3143 gen_mfc0_load32(arg, offsetof(CPUState, CP0_IntCtl));
3147 check_insn(env, ctx, ISA_MIPS32R2);
3148 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSCtl));
3152 check_insn(env, ctx, ISA_MIPS32R2);
3153 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSMap));
3163 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Cause));
3173 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
3174 tcg_gen_ext32s_tl(arg, arg);
3184 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PRid));
3188 check_insn(env, ctx, ISA_MIPS32R2);
3189 gen_mfc0_load32(arg, offsetof(CPUState, CP0_EBase));
3199 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config0));
3203 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config1));
3207 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config2));
3211 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config3));
3214 /* 4,5 are reserved */
3215 /* 6,7 are implementation dependent */
3217 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config6));
3221 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config7));
3231 gen_helper_mfc0_lladdr(arg);
3241 gen_helper_1i(mfc0_watchlo, arg, sel);
3251 gen_helper_1i(mfc0_watchhi, arg, sel);
3261 #if defined(TARGET_MIPS64)
3262 check_insn(env, ctx, ISA_MIPS3);
3263 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_XContext));
3264 tcg_gen_ext32s_tl(arg, arg);
3273 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3276 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Framemask));
3284 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3285 rn = "'Diagnostic"; /* implementation dependent */
3290 gen_helper_mfc0_debug(arg); /* EJTAG support */
3294 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3295 rn = "TraceControl";
3298 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3299 rn = "TraceControl2";
3302 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
3303 rn = "UserTraceData";
3306 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
3317 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
3318 tcg_gen_ext32s_tl(arg, arg);
3328 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Performance0));
3329 rn = "Performance0";
3332 // gen_helper_mfc0_performance1(arg);
3333 rn = "Performance1";
3336 // gen_helper_mfc0_performance2(arg);
3337 rn = "Performance2";
3340 // gen_helper_mfc0_performance3(arg);
3341 rn = "Performance3";
3344 // gen_helper_mfc0_performance4(arg);
3345 rn = "Performance4";
3348 // gen_helper_mfc0_performance5(arg);
3349 rn = "Performance5";
3352 // gen_helper_mfc0_performance6(arg);
3353 rn = "Performance6";
3356 // gen_helper_mfc0_performance7(arg);
3357 rn = "Performance7";
3364 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3370 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3383 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagLo));
3390 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataLo));
3403 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagHi));
3410 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataHi));
3420 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
3421 tcg_gen_ext32s_tl(arg, arg);
3432 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DESAVE));
3442 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3446 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3447 generate_exception(ctx, EXCP_RI);
3450 static void gen_mtc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
3452 const char *rn = "invalid";
3455 check_insn(env, ctx, ISA_MIPS32);
3464 gen_helper_mtc0_index(arg);
3468 check_insn(env, ctx, ASE_MT);
3469 gen_helper_mtc0_mvpcontrol(arg);
3473 check_insn(env, ctx, ASE_MT);
3478 check_insn(env, ctx, ASE_MT);
3493 check_insn(env, ctx, ASE_MT);
3494 gen_helper_mtc0_vpecontrol(arg);
3498 check_insn(env, ctx, ASE_MT);
3499 gen_helper_mtc0_vpeconf0(arg);
3503 check_insn(env, ctx, ASE_MT);
3504 gen_helper_mtc0_vpeconf1(arg);
3508 check_insn(env, ctx, ASE_MT);
3509 gen_helper_mtc0_yqmask(arg);
3513 check_insn(env, ctx, ASE_MT);
3514 gen_mtc0_store64(arg, offsetof(CPUState, CP0_VPESchedule));
3518 check_insn(env, ctx, ASE_MT);
3519 gen_mtc0_store64(arg, offsetof(CPUState, CP0_VPEScheFBack));
3520 rn = "VPEScheFBack";
3523 check_insn(env, ctx, ASE_MT);
3524 gen_helper_mtc0_vpeopt(arg);
3534 gen_helper_mtc0_entrylo0(arg);
3538 check_insn(env, ctx, ASE_MT);
3539 gen_helper_mtc0_tcstatus(arg);
3543 check_insn(env, ctx, ASE_MT);
3544 gen_helper_mtc0_tcbind(arg);
3548 check_insn(env, ctx, ASE_MT);
3549 gen_helper_mtc0_tcrestart(arg);
3553 check_insn(env, ctx, ASE_MT);
3554 gen_helper_mtc0_tchalt(arg);
3558 check_insn(env, ctx, ASE_MT);
3559 gen_helper_mtc0_tccontext(arg);
3563 check_insn(env, ctx, ASE_MT);
3564 gen_helper_mtc0_tcschedule(arg);
3568 check_insn(env, ctx, ASE_MT);
3569 gen_helper_mtc0_tcschefback(arg);
3579 gen_helper_mtc0_entrylo1(arg);
3589 gen_helper_mtc0_context(arg);
3593 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
3594 rn = "ContextConfig";
3603 gen_helper_mtc0_pagemask(arg);
3607 check_insn(env, ctx, ISA_MIPS32R2);
3608 gen_helper_mtc0_pagegrain(arg);
3618 gen_helper_mtc0_wired(arg);
3622 check_insn(env, ctx, ISA_MIPS32R2);
3623 gen_helper_mtc0_srsconf0(arg);
3627 check_insn(env, ctx, ISA_MIPS32R2);
3628 gen_helper_mtc0_srsconf1(arg);
3632 check_insn(env, ctx, ISA_MIPS32R2);
3633 gen_helper_mtc0_srsconf2(arg);
3637 check_insn(env, ctx, ISA_MIPS32R2);
3638 gen_helper_mtc0_srsconf3(arg);
3642 check_insn(env, ctx, ISA_MIPS32R2);
3643 gen_helper_mtc0_srsconf4(arg);
3653 check_insn(env, ctx, ISA_MIPS32R2);
3654 gen_helper_mtc0_hwrena(arg);
3668 gen_helper_mtc0_count(arg);
3671 /* 6,7 are implementation dependent */
3679 gen_helper_mtc0_entryhi(arg);
3689 gen_helper_mtc0_compare(arg);
3692 /* 6,7 are implementation dependent */
3700 save_cpu_state(ctx, 1);
3701 gen_helper_mtc0_status(arg);
3702 /* BS_STOP isn't good enough here, hflags may have changed. */
3703 gen_save_pc(ctx->pc + 4);
3704 ctx->bstate = BS_EXCP;
3708 check_insn(env, ctx, ISA_MIPS32R2);
3709 gen_helper_mtc0_intctl(arg);
3710 /* Stop translation as we may have switched the execution mode */
3711 ctx->bstate = BS_STOP;
3715 check_insn(env, ctx, ISA_MIPS32R2);
3716 gen_helper_mtc0_srsctl(arg);
3717 /* Stop translation as we may have switched the execution mode */
3718 ctx->bstate = BS_STOP;
3722 check_insn(env, ctx, ISA_MIPS32R2);
3723 gen_mtc0_store32(arg, offsetof(CPUState, CP0_SRSMap));
3724 /* Stop translation as we may have switched the execution mode */
3725 ctx->bstate = BS_STOP;
3735 save_cpu_state(ctx, 1);
3736 gen_helper_mtc0_cause(arg);
3746 gen_mtc0_store64(arg, offsetof(CPUState, CP0_EPC));
3760 check_insn(env, ctx, ISA_MIPS32R2);
3761 gen_helper_mtc0_ebase(arg);
3771 gen_helper_mtc0_config0(arg);
3773 /* Stop translation as we may have switched the execution mode */
3774 ctx->bstate = BS_STOP;
3777 /* ignored, read only */
3781 gen_helper_mtc0_config2(arg);
3783 /* Stop translation as we may have switched the execution mode */
3784 ctx->bstate = BS_STOP;
3787 /* ignored, read only */
3790 /* 4,5 are reserved */
3791 /* 6,7 are implementation dependent */
3801 rn = "Invalid config selector";
3818 gen_helper_1i(mtc0_watchlo, arg, sel);
3828 gen_helper_1i(mtc0_watchhi, arg, sel);
3838 #if defined(TARGET_MIPS64)
3839 check_insn(env, ctx, ISA_MIPS3);
3840 gen_helper_mtc0_xcontext(arg);
3849 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3852 gen_helper_mtc0_framemask(arg);
3861 rn = "Diagnostic"; /* implementation dependent */
3866 gen_helper_mtc0_debug(arg); /* EJTAG support */
3867 /* BS_STOP isn't good enough here, hflags may have changed. */
3868 gen_save_pc(ctx->pc + 4);
3869 ctx->bstate = BS_EXCP;
3873 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
3874 rn = "TraceControl";
3875 /* Stop translation as we may have switched the execution mode */
3876 ctx->bstate = BS_STOP;
3879 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
3880 rn = "TraceControl2";
3881 /* Stop translation as we may have switched the execution mode */
3882 ctx->bstate = BS_STOP;
3885 /* Stop translation as we may have switched the execution mode */
3886 ctx->bstate = BS_STOP;
3887 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
3888 rn = "UserTraceData";
3889 /* Stop translation as we may have switched the execution mode */
3890 ctx->bstate = BS_STOP;
3893 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
3894 /* Stop translation as we may have switched the execution mode */
3895 ctx->bstate = BS_STOP;
3906 gen_mtc0_store64(arg, offsetof(CPUState, CP0_DEPC));
3916 gen_helper_mtc0_performance0(arg);
3917 rn = "Performance0";
3920 // gen_helper_mtc0_performance1(arg);
3921 rn = "Performance1";
3924 // gen_helper_mtc0_performance2(arg);
3925 rn = "Performance2";
3928 // gen_helper_mtc0_performance3(arg);
3929 rn = "Performance3";
3932 // gen_helper_mtc0_performance4(arg);
3933 rn = "Performance4";
3936 // gen_helper_mtc0_performance5(arg);
3937 rn = "Performance5";
3940 // gen_helper_mtc0_performance6(arg);
3941 rn = "Performance6";
3944 // gen_helper_mtc0_performance7(arg);
3945 rn = "Performance7";
3971 gen_helper_mtc0_taglo(arg);
3978 gen_helper_mtc0_datalo(arg);
3991 gen_helper_mtc0_taghi(arg);
3998 gen_helper_mtc0_datahi(arg);
4009 gen_mtc0_store64(arg, offsetof(CPUState, CP0_ErrorEPC));
4020 gen_mtc0_store32(arg, offsetof(CPUState, CP0_DESAVE));
4026 /* Stop translation as we may have switched the execution mode */
4027 ctx->bstate = BS_STOP;
4032 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
4033 /* For simplicity assume that all writes can cause interrupts. */
4036 ctx->bstate = BS_STOP;
4041 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
4042 generate_exception(ctx, EXCP_RI);
4045 #if defined(TARGET_MIPS64)
4046 static void gen_dmfc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
4048 const char *rn = "invalid";
4051 check_insn(env, ctx, ISA_MIPS64);
4057 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Index));
4061 check_insn(env, ctx, ASE_MT);
4062 gen_helper_mfc0_mvpcontrol(arg);
4066 check_insn(env, ctx, ASE_MT);
4067 gen_helper_mfc0_mvpconf0(arg);
4071 check_insn(env, ctx, ASE_MT);
4072 gen_helper_mfc0_mvpconf1(arg);
4082 gen_helper_mfc0_random(arg);
4086 check_insn(env, ctx, ASE_MT);
4087 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEControl));
4091 check_insn(env, ctx, ASE_MT);
4092 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf0));
4096 check_insn(env, ctx, ASE_MT);
4097 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf1));
4101 check_insn(env, ctx, ASE_MT);
4102 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_YQMask));
4106 check_insn(env, ctx, ASE_MT);
4107 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4111 check_insn(env, ctx, ASE_MT);
4112 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4113 rn = "VPEScheFBack";
4116 check_insn(env, ctx, ASE_MT);
4117 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEOpt));
4127 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo0));
4131 check_insn(env, ctx, ASE_MT);
4132 gen_helper_mfc0_tcstatus(arg);
4136 check_insn(env, ctx, ASE_MT);
4137 gen_helper_mfc0_tcbind(arg);
4141 check_insn(env, ctx, ASE_MT);
4142 gen_helper_dmfc0_tcrestart(arg);
4146 check_insn(env, ctx, ASE_MT);
4147 gen_helper_dmfc0_tchalt(arg);
4151 check_insn(env, ctx, ASE_MT);
4152 gen_helper_dmfc0_tccontext(arg);
4156 check_insn(env, ctx, ASE_MT);
4157 gen_helper_dmfc0_tcschedule(arg);
4161 check_insn(env, ctx, ASE_MT);
4162 gen_helper_dmfc0_tcschefback(arg);
4172 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo1));
4182 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_Context));
4186 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4187 rn = "ContextConfig";
4196 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageMask));
4200 check_insn(env, ctx, ISA_MIPS32R2);
4201 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageGrain));
4211 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Wired));
4215 check_insn(env, ctx, ISA_MIPS32R2);
4216 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf0));
4220 check_insn(env, ctx, ISA_MIPS32R2);
4221 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf1));
4225 check_insn(env, ctx, ISA_MIPS32R2);
4226 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf2));
4230 check_insn(env, ctx, ISA_MIPS32R2);
4231 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf3));
4235 check_insn(env, ctx, ISA_MIPS32R2);
4236 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf4));
4246 check_insn(env, ctx, ISA_MIPS32R2);
4247 gen_mfc0_load32(arg, offsetof(CPUState, CP0_HWREna));
4257 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_BadVAddr));
4267 /* Mark as an IO operation because we read the time. */
4270 gen_helper_mfc0_count(arg);
4273 ctx->bstate = BS_STOP;
4277 /* 6,7 are implementation dependent */
4285 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryHi));
4295 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Compare));
4298 /* 6,7 are implementation dependent */
4306 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Status));
4310 check_insn(env, ctx, ISA_MIPS32R2);
4311 gen_mfc0_load32(arg, offsetof(CPUState, CP0_IntCtl));
4315 check_insn(env, ctx, ISA_MIPS32R2);
4316 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSCtl));
4320 check_insn(env, ctx, ISA_MIPS32R2);
4321 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSMap));
4331 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Cause));
4341 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
4351 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PRid));
4355 check_insn(env, ctx, ISA_MIPS32R2);
4356 gen_mfc0_load32(arg, offsetof(CPUState, CP0_EBase));
4366 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config0));
4370 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config1));
4374 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config2));
4378 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config3));
4381 /* 6,7 are implementation dependent */
4383 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config6));
4387 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config7));
4397 gen_helper_dmfc0_lladdr(arg);
4407 gen_helper_1i(dmfc0_watchlo, arg, sel);
4417 gen_helper_1i(mfc0_watchhi, arg, sel);
4427 check_insn(env, ctx, ISA_MIPS3);
4428 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_XContext));
4436 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4439 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Framemask));
4447 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4448 rn = "'Diagnostic"; /* implementation dependent */
4453 gen_helper_mfc0_debug(arg); /* EJTAG support */
4457 // gen_helper_dmfc0_tracecontrol(arg); /* PDtrace support */
4458 rn = "TraceControl";
4461 // gen_helper_dmfc0_tracecontrol2(arg); /* PDtrace support */
4462 rn = "TraceControl2";
4465 // gen_helper_dmfc0_usertracedata(arg); /* PDtrace support */
4466 rn = "UserTraceData";
4469 // gen_helper_dmfc0_tracebpc(arg); /* PDtrace support */
4480 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
4490 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Performance0));
4491 rn = "Performance0";
4494 // gen_helper_dmfc0_performance1(arg);
4495 rn = "Performance1";
4498 // gen_helper_dmfc0_performance2(arg);
4499 rn = "Performance2";
4502 // gen_helper_dmfc0_performance3(arg);
4503 rn = "Performance3";
4506 // gen_helper_dmfc0_performance4(arg);
4507 rn = "Performance4";
4510 // gen_helper_dmfc0_performance5(arg);
4511 rn = "Performance5";
4514 // gen_helper_dmfc0_performance6(arg);
4515 rn = "Performance6";
4518 // gen_helper_dmfc0_performance7(arg);
4519 rn = "Performance7";
4526 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4533 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4546 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagLo));
4553 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataLo));
4566 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagHi));
4573 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataHi));
4583 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
4594 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DESAVE));
4604 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4608 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4609 generate_exception(ctx, EXCP_RI);
4612 static void gen_dmtc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
4614 const char *rn = "invalid";
4617 check_insn(env, ctx, ISA_MIPS64);
4626 gen_helper_mtc0_index(arg);
4630 check_insn(env, ctx, ASE_MT);
4631 gen_helper_mtc0_mvpcontrol(arg);
4635 check_insn(env, ctx, ASE_MT);
4640 check_insn(env, ctx, ASE_MT);
4655 check_insn(env, ctx, ASE_MT);
4656 gen_helper_mtc0_vpecontrol(arg);
4660 check_insn(env, ctx, ASE_MT);
4661 gen_helper_mtc0_vpeconf0(arg);
4665 check_insn(env, ctx, ASE_MT);
4666 gen_helper_mtc0_vpeconf1(arg);
4670 check_insn(env, ctx, ASE_MT);
4671 gen_helper_mtc0_yqmask(arg);
4675 check_insn(env, ctx, ASE_MT);
4676 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4680 check_insn(env, ctx, ASE_MT);
4681 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4682 rn = "VPEScheFBack";
4685 check_insn(env, ctx, ASE_MT);
4686 gen_helper_mtc0_vpeopt(arg);
4696 gen_helper_mtc0_entrylo0(arg);
4700 check_insn(env, ctx, ASE_MT);
4701 gen_helper_mtc0_tcstatus(arg);
4705 check_insn(env, ctx, ASE_MT);
4706 gen_helper_mtc0_tcbind(arg);
4710 check_insn(env, ctx, ASE_MT);
4711 gen_helper_mtc0_tcrestart(arg);
4715 check_insn(env, ctx, ASE_MT);
4716 gen_helper_mtc0_tchalt(arg);
4720 check_insn(env, ctx, ASE_MT);
4721 gen_helper_mtc0_tccontext(arg);
4725 check_insn(env, ctx, ASE_MT);
4726 gen_helper_mtc0_tcschedule(arg);
4730 check_insn(env, ctx, ASE_MT);
4731 gen_helper_mtc0_tcschefback(arg);
4741 gen_helper_mtc0_entrylo1(arg);
4751 gen_helper_mtc0_context(arg);
4755 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
4756 rn = "ContextConfig";
4765 gen_helper_mtc0_pagemask(arg);
4769 check_insn(env, ctx, ISA_MIPS32R2);
4770 gen_helper_mtc0_pagegrain(arg);
4780 gen_helper_mtc0_wired(arg);
4784 check_insn(env, ctx, ISA_MIPS32R2);
4785 gen_helper_mtc0_srsconf0(arg);
4789 check_insn(env, ctx, ISA_MIPS32R2);
4790 gen_helper_mtc0_srsconf1(arg);
4794 check_insn(env, ctx, ISA_MIPS32R2);
4795 gen_helper_mtc0_srsconf2(arg);
4799 check_insn(env, ctx, ISA_MIPS32R2);
4800 gen_helper_mtc0_srsconf3(arg);
4804 check_insn(env, ctx, ISA_MIPS32R2);
4805 gen_helper_mtc0_srsconf4(arg);
4815 check_insn(env, ctx, ISA_MIPS32R2);
4816 gen_helper_mtc0_hwrena(arg);
4830 gen_helper_mtc0_count(arg);
4833 /* 6,7 are implementation dependent */
4837 /* Stop translation as we may have switched the execution mode */
4838 ctx->bstate = BS_STOP;
4843 gen_helper_mtc0_entryhi(arg);
4853 gen_helper_mtc0_compare(arg);
4856 /* 6,7 are implementation dependent */
4860 /* Stop translation as we may have switched the execution mode */
4861 ctx->bstate = BS_STOP;
4866 save_cpu_state(ctx, 1);
4867 gen_helper_mtc0_status(arg);
4868 /* BS_STOP isn't good enough here, hflags may have changed. */
4869 gen_save_pc(ctx->pc + 4);
4870 ctx->bstate = BS_EXCP;
4874 check_insn(env, ctx, ISA_MIPS32R2);
4875 gen_helper_mtc0_intctl(arg);
4876 /* Stop translation as we may have switched the execution mode */
4877 ctx->bstate = BS_STOP;
4881 check_insn(env, ctx, ISA_MIPS32R2);
4882 gen_helper_mtc0_srsctl(arg);
4883 /* Stop translation as we may have switched the execution mode */
4884 ctx->bstate = BS_STOP;
4888 check_insn(env, ctx, ISA_MIPS32R2);
4889 gen_mtc0_store32(arg, offsetof(CPUState, CP0_SRSMap));
4890 /* Stop translation as we may have switched the execution mode */
4891 ctx->bstate = BS_STOP;
4901 save_cpu_state(ctx, 1);
4902 gen_helper_mtc0_cause(arg);
4912 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
4926 check_insn(env, ctx, ISA_MIPS32R2);
4927 gen_helper_mtc0_ebase(arg);
4937 gen_helper_mtc0_config0(arg);
4939 /* Stop translation as we may have switched the execution mode */
4940 ctx->bstate = BS_STOP;
4943 /* ignored, read only */
4947 gen_helper_mtc0_config2(arg);
4949 /* Stop translation as we may have switched the execution mode */
4950 ctx->bstate = BS_STOP;
4956 /* 6,7 are implementation dependent */
4958 rn = "Invalid config selector";
4975 gen_helper_1i(mtc0_watchlo, arg, sel);
4985 gen_helper_1i(mtc0_watchhi, arg, sel);
4995 check_insn(env, ctx, ISA_MIPS3);
4996 gen_helper_mtc0_xcontext(arg);
5004 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5007 gen_helper_mtc0_framemask(arg);
5016 rn = "Diagnostic"; /* implementation dependent */
5021 gen_helper_mtc0_debug(arg); /* EJTAG support */
5022 /* BS_STOP isn't good enough here, hflags may have changed. */
5023 gen_save_pc(ctx->pc + 4);
5024 ctx->bstate = BS_EXCP;
5028 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
5029 /* Stop translation as we may have switched the execution mode */
5030 ctx->bstate = BS_STOP;
5031 rn = "TraceControl";
5034 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
5035 /* Stop translation as we may have switched the execution mode */
5036 ctx->bstate = BS_STOP;
5037 rn = "TraceControl2";
5040 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
5041 /* Stop translation as we may have switched the execution mode */
5042 ctx->bstate = BS_STOP;
5043 rn = "UserTraceData";
5046 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
5047 /* Stop translation as we may have switched the execution mode */
5048 ctx->bstate = BS_STOP;
5059 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
5069 gen_helper_mtc0_performance0(arg);
5070 rn = "Performance0";
5073 // gen_helper_mtc0_performance1(arg);
5074 rn = "Performance1";
5077 // gen_helper_mtc0_performance2(arg);
5078 rn = "Performance2";
5081 // gen_helper_mtc0_performance3(arg);
5082 rn = "Performance3";
5085 // gen_helper_mtc0_performance4(arg);
5086 rn = "Performance4";
5089 // gen_helper_mtc0_performance5(arg);
5090 rn = "Performance5";
5093 // gen_helper_mtc0_performance6(arg);
5094 rn = "Performance6";
5097 // gen_helper_mtc0_performance7(arg);
5098 rn = "Performance7";
5124 gen_helper_mtc0_taglo(arg);
5131 gen_helper_mtc0_datalo(arg);
5144 gen_helper_mtc0_taghi(arg);
5151 gen_helper_mtc0_datahi(arg);
5162 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
5173 gen_mtc0_store32(arg, offsetof(CPUState, CP0_DESAVE));
5179 /* Stop translation as we may have switched the execution mode */
5180 ctx->bstate = BS_STOP;
5185 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5186 /* For simplicity assume that all writes can cause interrupts. */
5189 ctx->bstate = BS_STOP;
5194 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5195 generate_exception(ctx, EXCP_RI);
5197 #endif /* TARGET_MIPS64 */
5199 static void gen_mftr(CPUState *env, DisasContext *ctx, int rt, int rd,
5200 int u, int sel, int h)
5202 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5203 TCGv t0 = tcg_temp_local_new();
5205 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5206 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5207 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5208 tcg_gen_movi_tl(t0, -1);
5209 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5210 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5211 tcg_gen_movi_tl(t0, -1);
5217 gen_helper_mftc0_tcstatus(t0);
5220 gen_helper_mftc0_tcbind(t0);
5223 gen_helper_mftc0_tcrestart(t0);
5226 gen_helper_mftc0_tchalt(t0);
5229 gen_helper_mftc0_tccontext(t0);
5232 gen_helper_mftc0_tcschedule(t0);
5235 gen_helper_mftc0_tcschefback(t0);
5238 gen_mfc0(env, ctx, t0, rt, sel);
5245 gen_helper_mftc0_entryhi(t0);
5248 gen_mfc0(env, ctx, t0, rt, sel);
5254 gen_helper_mftc0_status(t0);
5257 gen_mfc0(env, ctx, t0, rt, sel);
5263 gen_helper_mftc0_debug(t0);
5266 gen_mfc0(env, ctx, t0, rt, sel);
5271 gen_mfc0(env, ctx, t0, rt, sel);
5273 } else switch (sel) {
5274 /* GPR registers. */
5276 gen_helper_1i(mftgpr, t0, rt);
5278 /* Auxiliary CPU registers */
5282 gen_helper_1i(mftlo, t0, 0);
5285 gen_helper_1i(mfthi, t0, 0);
5288 gen_helper_1i(mftacx, t0, 0);
5291 gen_helper_1i(mftlo, t0, 1);
5294 gen_helper_1i(mfthi, t0, 1);
5297 gen_helper_1i(mftacx, t0, 1);
5300 gen_helper_1i(mftlo, t0, 2);
5303 gen_helper_1i(mfthi, t0, 2);
5306 gen_helper_1i(mftacx, t0, 2);
5309 gen_helper_1i(mftlo, t0, 3);
5312 gen_helper_1i(mfthi, t0, 3);
5315 gen_helper_1i(mftacx, t0, 3);
5318 gen_helper_mftdsp(t0);
5324 /* Floating point (COP1). */
5326 /* XXX: For now we support only a single FPU context. */
5328 TCGv_i32 fp0 = tcg_temp_new_i32();
5330 gen_load_fpr32(fp0, rt);
5331 tcg_gen_ext_i32_tl(t0, fp0);
5332 tcg_temp_free_i32(fp0);
5334 TCGv_i32 fp0 = tcg_temp_new_i32();
5336 gen_load_fpr32h(fp0, rt);
5337 tcg_gen_ext_i32_tl(t0, fp0);
5338 tcg_temp_free_i32(fp0);
5342 /* XXX: For now we support only a single FPU context. */
5343 gen_helper_1i(cfc1, t0, rt);
5345 /* COP2: Not implemented. */
5352 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5353 gen_store_gpr(t0, rd);
5359 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5360 generate_exception(ctx, EXCP_RI);
5363 static void gen_mttr(CPUState *env, DisasContext *ctx, int rd, int rt,
5364 int u, int sel, int h)
5366 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5367 TCGv t0 = tcg_temp_local_new();
5369 gen_load_gpr(t0, rt);
5370 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5371 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5372 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5374 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5375 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5382 gen_helper_mttc0_tcstatus(t0);
5385 gen_helper_mttc0_tcbind(t0);
5388 gen_helper_mttc0_tcrestart(t0);
5391 gen_helper_mttc0_tchalt(t0);
5394 gen_helper_mttc0_tccontext(t0);
5397 gen_helper_mttc0_tcschedule(t0);
5400 gen_helper_mttc0_tcschefback(t0);
5403 gen_mtc0(env, ctx, t0, rd, sel);
5410 gen_helper_mttc0_entryhi(t0);
5413 gen_mtc0(env, ctx, t0, rd, sel);
5419 gen_helper_mttc0_status(t0);
5422 gen_mtc0(env, ctx, t0, rd, sel);
5428 gen_helper_mttc0_debug(t0);
5431 gen_mtc0(env, ctx, t0, rd, sel);
5436 gen_mtc0(env, ctx, t0, rd, sel);
5438 } else switch (sel) {
5439 /* GPR registers. */
5441 gen_helper_1i(mttgpr, t0, rd);
5443 /* Auxiliary CPU registers */
5447 gen_helper_1i(mttlo, t0, 0);
5450 gen_helper_1i(mtthi, t0, 0);
5453 gen_helper_1i(mttacx, t0, 0);
5456 gen_helper_1i(mttlo, t0, 1);
5459 gen_helper_1i(mtthi, t0, 1);
5462 gen_helper_1i(mttacx, t0, 1);
5465 gen_helper_1i(mttlo, t0, 2);
5468 gen_helper_1i(mtthi, t0, 2);
5471 gen_helper_1i(mttacx, t0, 2);
5474 gen_helper_1i(mttlo, t0, 3);
5477 gen_helper_1i(mtthi, t0, 3);
5480 gen_helper_1i(mttacx, t0, 3);
5483 gen_helper_mttdsp(t0);
5489 /* Floating point (COP1). */
5491 /* XXX: For now we support only a single FPU context. */
5493 TCGv_i32 fp0 = tcg_temp_new_i32();
5495 tcg_gen_trunc_tl_i32(fp0, t0);
5496 gen_store_fpr32(fp0, rd);
5497 tcg_temp_free_i32(fp0);
5499 TCGv_i32 fp0 = tcg_temp_new_i32();
5501 tcg_gen_trunc_tl_i32(fp0, t0);
5502 gen_store_fpr32h(fp0, rd);
5503 tcg_temp_free_i32(fp0);
5507 /* XXX: For now we support only a single FPU context. */
5508 gen_helper_1i(ctc1, t0, rd);
5510 /* COP2: Not implemented. */
5517 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5523 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5524 generate_exception(ctx, EXCP_RI);
5527 static void gen_cp0 (CPUState *env, DisasContext *ctx, uint32_t opc, int rt, int rd)
5529 const char *opn = "ldst";
5537 gen_mfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5542 TCGv t0 = tcg_temp_new();
5544 gen_load_gpr(t0, rt);
5545 gen_mtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5550 #if defined(TARGET_MIPS64)
5552 check_insn(env, ctx, ISA_MIPS3);
5557 gen_dmfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5561 check_insn(env, ctx, ISA_MIPS3);
5563 TCGv t0 = tcg_temp_new();
5565 gen_load_gpr(t0, rt);
5566 gen_dmtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5573 check_insn(env, ctx, ASE_MT);
5578 gen_mftr(env, ctx, rt, rd, (ctx->opcode >> 5) & 1,
5579 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5583 check_insn(env, ctx, ASE_MT);
5584 gen_mttr(env, ctx, rd, rt, (ctx->opcode >> 5) & 1,
5585 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5590 if (!env->tlb->helper_tlbwi)
5596 if (!env->tlb->helper_tlbwr)
5602 if (!env->tlb->helper_tlbp)
5608 if (!env->tlb->helper_tlbr)
5614 check_insn(env, ctx, ISA_MIPS2);
5616 ctx->bstate = BS_EXCP;
5620 check_insn(env, ctx, ISA_MIPS32);
5621 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
5623 generate_exception(ctx, EXCP_RI);
5626 ctx->bstate = BS_EXCP;
5631 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
5632 /* If we get an exception, we want to restart at next instruction */
5634 save_cpu_state(ctx, 1);
5637 ctx->bstate = BS_EXCP;
5642 generate_exception(ctx, EXCP_RI);
5645 MIPS_DEBUG("%s %s %d", opn, regnames[rt], rd);
5647 #endif /* !CONFIG_USER_ONLY */
5649 /* CP1 Branches (before delay slot) */
5650 static void gen_compute_branch1 (CPUState *env, DisasContext *ctx, uint32_t op,
5651 int32_t cc, int32_t offset)
5653 target_ulong btarget;
5654 const char *opn = "cp1 cond branch";
5655 TCGv_i32 t0 = tcg_temp_new_i32();
5658 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
5660 btarget = ctx->pc + 4 + offset;
5664 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5665 tcg_gen_not_i32(t0, t0);
5666 tcg_gen_andi_i32(t0, t0, 1);
5667 tcg_gen_extu_i32_tl(bcond, t0);
5671 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5672 tcg_gen_not_i32(t0, t0);
5673 tcg_gen_andi_i32(t0, t0, 1);
5674 tcg_gen_extu_i32_tl(bcond, t0);
5678 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5679 tcg_gen_andi_i32(t0, t0, 1);
5680 tcg_gen_extu_i32_tl(bcond, t0);
5684 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5685 tcg_gen_andi_i32(t0, t0, 1);
5686 tcg_gen_extu_i32_tl(bcond, t0);
5689 ctx->hflags |= MIPS_HFLAG_BL;
5693 TCGv_i32 t1 = tcg_temp_new_i32();
5694 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5695 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5696 tcg_gen_or_i32(t0, t0, t1);
5697 tcg_temp_free_i32(t1);
5698 tcg_gen_not_i32(t0, t0);
5699 tcg_gen_andi_i32(t0, t0, 1);
5700 tcg_gen_extu_i32_tl(bcond, t0);
5706 TCGv_i32 t1 = tcg_temp_new_i32();
5707 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5708 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5709 tcg_gen_or_i32(t0, t0, t1);
5710 tcg_temp_free_i32(t1);
5711 tcg_gen_andi_i32(t0, t0, 1);
5712 tcg_gen_extu_i32_tl(bcond, t0);
5718 TCGv_i32 t1 = tcg_temp_new_i32();
5719 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5720 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5721 tcg_gen_or_i32(t0, t0, t1);
5722 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5723 tcg_gen_or_i32(t0, t0, t1);
5724 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5725 tcg_gen_or_i32(t0, t0, t1);
5726 tcg_temp_free_i32(t1);
5727 tcg_gen_not_i32(t0, t0);
5728 tcg_gen_andi_i32(t0, t0, 1);
5729 tcg_gen_extu_i32_tl(bcond, t0);
5735 TCGv_i32 t1 = tcg_temp_new_i32();
5736 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5737 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5738 tcg_gen_or_i32(t0, t0, t1);
5739 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5740 tcg_gen_or_i32(t0, t0, t1);
5741 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5742 tcg_gen_or_i32(t0, t0, t1);
5743 tcg_temp_free_i32(t1);
5744 tcg_gen_andi_i32(t0, t0, 1);
5745 tcg_gen_extu_i32_tl(bcond, t0);
5749 ctx->hflags |= MIPS_HFLAG_BC;
5753 generate_exception (ctx, EXCP_RI);
5756 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx, opn,
5757 ctx->hflags, btarget);
5758 ctx->btarget = btarget;
5761 tcg_temp_free_i32(t0);
5764 /* Coprocessor 1 (FPU) */
5766 #define FOP(func, fmt) (((fmt) << 21) | (func))
5768 static void gen_cp1 (DisasContext *ctx, uint32_t opc, int rt, int fs)
5770 const char *opn = "cp1 move";
5771 TCGv t0 = tcg_temp_new();
5776 TCGv_i32 fp0 = tcg_temp_new_i32();
5778 gen_load_fpr32(fp0, fs);
5779 tcg_gen_ext_i32_tl(t0, fp0);
5780 tcg_temp_free_i32(fp0);
5782 gen_store_gpr(t0, rt);
5786 gen_load_gpr(t0, rt);
5788 TCGv_i32 fp0 = tcg_temp_new_i32();
5790 tcg_gen_trunc_tl_i32(fp0, t0);
5791 gen_store_fpr32(fp0, fs);
5792 tcg_temp_free_i32(fp0);
5797 gen_helper_1i(cfc1, t0, fs);
5798 gen_store_gpr(t0, rt);
5802 gen_load_gpr(t0, rt);
5803 gen_helper_1i(ctc1, t0, fs);
5806 #if defined(TARGET_MIPS64)
5808 gen_load_fpr64(ctx, t0, fs);
5809 gen_store_gpr(t0, rt);
5813 gen_load_gpr(t0, rt);
5814 gen_store_fpr64(ctx, t0, fs);
5820 TCGv_i32 fp0 = tcg_temp_new_i32();
5822 gen_load_fpr32h(fp0, fs);
5823 tcg_gen_ext_i32_tl(t0, fp0);
5824 tcg_temp_free_i32(fp0);
5826 gen_store_gpr(t0, rt);
5830 gen_load_gpr(t0, rt);
5832 TCGv_i32 fp0 = tcg_temp_new_i32();
5834 tcg_gen_trunc_tl_i32(fp0, t0);
5835 gen_store_fpr32h(fp0, fs);
5836 tcg_temp_free_i32(fp0);
5842 generate_exception (ctx, EXCP_RI);
5845 MIPS_DEBUG("%s %s %s", opn, regnames[rt], fregnames[fs]);
5851 static void gen_movci (DisasContext *ctx, int rd, int rs, int cc, int tf)
5867 l1 = gen_new_label();
5868 t0 = tcg_temp_new_i32();
5869 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5870 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5871 tcg_temp_free_i32(t0);
5873 tcg_gen_movi_tl(cpu_gpr[rd], 0);
5875 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
5880 static inline void gen_movcf_s (int fs, int fd, int cc, int tf)
5883 TCGv_i32 t0 = tcg_temp_new_i32();
5884 int l1 = gen_new_label();
5891 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5892 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5893 gen_load_fpr32(t0, fs);
5894 gen_store_fpr32(t0, fd);
5896 tcg_temp_free_i32(t0);
5899 static inline void gen_movcf_d (DisasContext *ctx, int fs, int fd, int cc, int tf)
5902 TCGv_i32 t0 = tcg_temp_new_i32();
5904 int l1 = gen_new_label();
5911 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5912 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5913 tcg_temp_free_i32(t0);
5914 fp0 = tcg_temp_new_i64();
5915 gen_load_fpr64(ctx, fp0, fs);
5916 gen_store_fpr64(ctx, fp0, fd);
5917 tcg_temp_free_i64(fp0);
5921 static inline void gen_movcf_ps (int fs, int fd, int cc, int tf)
5924 TCGv_i32 t0 = tcg_temp_new_i32();
5925 int l1 = gen_new_label();
5926 int l2 = gen_new_label();
5933 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5934 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5935 gen_load_fpr32(t0, fs);
5936 gen_store_fpr32(t0, fd);
5939 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc+1));
5940 tcg_gen_brcondi_i32(cond, t0, 0, l2);
5941 gen_load_fpr32h(t0, fs);
5942 gen_store_fpr32h(t0, fd);
5943 tcg_temp_free_i32(t0);
5948 static void gen_farith (DisasContext *ctx, uint32_t op1,
5949 int ft, int fs, int fd, int cc)
5951 const char *opn = "farith";
5952 const char *condnames[] = {
5970 const char *condnames_abs[] = {
5988 enum { BINOP, CMPOP, OTHEROP } optype = OTHEROP;
5989 uint32_t func = ctx->opcode & 0x3f;
5991 switch (ctx->opcode & FOP(0x3f, 0x1f)) {
5994 TCGv_i32 fp0 = tcg_temp_new_i32();
5995 TCGv_i32 fp1 = tcg_temp_new_i32();
5997 gen_load_fpr32(fp0, fs);
5998 gen_load_fpr32(fp1, ft);
5999 gen_helper_float_add_s(fp0, fp0, fp1);
6000 tcg_temp_free_i32(fp1);
6001 gen_store_fpr32(fp0, fd);
6002 tcg_temp_free_i32(fp0);
6009 TCGv_i32 fp0 = tcg_temp_new_i32();
6010 TCGv_i32 fp1 = tcg_temp_new_i32();
6012 gen_load_fpr32(fp0, fs);
6013 gen_load_fpr32(fp1, ft);
6014 gen_helper_float_sub_s(fp0, fp0, fp1);
6015 tcg_temp_free_i32(fp1);
6016 gen_store_fpr32(fp0, fd);
6017 tcg_temp_free_i32(fp0);
6024 TCGv_i32 fp0 = tcg_temp_new_i32();
6025 TCGv_i32 fp1 = tcg_temp_new_i32();
6027 gen_load_fpr32(fp0, fs);
6028 gen_load_fpr32(fp1, ft);
6029 gen_helper_float_mul_s(fp0, fp0, fp1);
6030 tcg_temp_free_i32(fp1);
6031 gen_store_fpr32(fp0, fd);
6032 tcg_temp_free_i32(fp0);
6039 TCGv_i32 fp0 = tcg_temp_new_i32();
6040 TCGv_i32 fp1 = tcg_temp_new_i32();
6042 gen_load_fpr32(fp0, fs);
6043 gen_load_fpr32(fp1, ft);
6044 gen_helper_float_div_s(fp0, fp0, fp1);
6045 tcg_temp_free_i32(fp1);
6046 gen_store_fpr32(fp0, fd);
6047 tcg_temp_free_i32(fp0);
6054 TCGv_i32 fp0 = tcg_temp_new_i32();
6056 gen_load_fpr32(fp0, fs);
6057 gen_helper_float_sqrt_s(fp0, fp0);
6058 gen_store_fpr32(fp0, fd);
6059 tcg_temp_free_i32(fp0);
6065 TCGv_i32 fp0 = tcg_temp_new_i32();
6067 gen_load_fpr32(fp0, fs);
6068 gen_helper_float_abs_s(fp0, fp0);
6069 gen_store_fpr32(fp0, fd);
6070 tcg_temp_free_i32(fp0);
6076 TCGv_i32 fp0 = tcg_temp_new_i32();
6078 gen_load_fpr32(fp0, fs);
6079 gen_store_fpr32(fp0, fd);
6080 tcg_temp_free_i32(fp0);
6086 TCGv_i32 fp0 = tcg_temp_new_i32();
6088 gen_load_fpr32(fp0, fs);
6089 gen_helper_float_chs_s(fp0, fp0);
6090 gen_store_fpr32(fp0, fd);
6091 tcg_temp_free_i32(fp0);
6096 check_cp1_64bitmode(ctx);
6098 TCGv_i32 fp32 = tcg_temp_new_i32();
6099 TCGv_i64 fp64 = tcg_temp_new_i64();
6101 gen_load_fpr32(fp32, fs);
6102 gen_helper_float_roundl_s(fp64, fp32);
6103 tcg_temp_free_i32(fp32);
6104 gen_store_fpr64(ctx, fp64, fd);
6105 tcg_temp_free_i64(fp64);
6110 check_cp1_64bitmode(ctx);
6112 TCGv_i32 fp32 = tcg_temp_new_i32();
6113 TCGv_i64 fp64 = tcg_temp_new_i64();
6115 gen_load_fpr32(fp32, fs);
6116 gen_helper_float_truncl_s(fp64, fp32);
6117 tcg_temp_free_i32(fp32);
6118 gen_store_fpr64(ctx, fp64, fd);
6119 tcg_temp_free_i64(fp64);
6124 check_cp1_64bitmode(ctx);
6126 TCGv_i32 fp32 = tcg_temp_new_i32();
6127 TCGv_i64 fp64 = tcg_temp_new_i64();
6129 gen_load_fpr32(fp32, fs);
6130 gen_helper_float_ceill_s(fp64, fp32);
6131 tcg_temp_free_i32(fp32);
6132 gen_store_fpr64(ctx, fp64, fd);
6133 tcg_temp_free_i64(fp64);
6138 check_cp1_64bitmode(ctx);
6140 TCGv_i32 fp32 = tcg_temp_new_i32();
6141 TCGv_i64 fp64 = tcg_temp_new_i64();
6143 gen_load_fpr32(fp32, fs);
6144 gen_helper_float_floorl_s(fp64, fp32);
6145 tcg_temp_free_i32(fp32);
6146 gen_store_fpr64(ctx, fp64, fd);
6147 tcg_temp_free_i64(fp64);
6153 TCGv_i32 fp0 = tcg_temp_new_i32();
6155 gen_load_fpr32(fp0, fs);
6156 gen_helper_float_roundw_s(fp0, fp0);
6157 gen_store_fpr32(fp0, fd);
6158 tcg_temp_free_i32(fp0);
6164 TCGv_i32 fp0 = tcg_temp_new_i32();
6166 gen_load_fpr32(fp0, fs);
6167 gen_helper_float_truncw_s(fp0, fp0);
6168 gen_store_fpr32(fp0, fd);
6169 tcg_temp_free_i32(fp0);
6175 TCGv_i32 fp0 = tcg_temp_new_i32();
6177 gen_load_fpr32(fp0, fs);
6178 gen_helper_float_ceilw_s(fp0, fp0);
6179 gen_store_fpr32(fp0, fd);
6180 tcg_temp_free_i32(fp0);
6186 TCGv_i32 fp0 = tcg_temp_new_i32();
6188 gen_load_fpr32(fp0, fs);
6189 gen_helper_float_floorw_s(fp0, fp0);
6190 gen_store_fpr32(fp0, fd);
6191 tcg_temp_free_i32(fp0);
6196 gen_movcf_s(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6201 int l1 = gen_new_label();
6205 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6207 fp0 = tcg_temp_new_i32();
6208 gen_load_fpr32(fp0, fs);
6209 gen_store_fpr32(fp0, fd);
6210 tcg_temp_free_i32(fp0);
6217 int l1 = gen_new_label();
6221 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6222 fp0 = tcg_temp_new_i32();
6223 gen_load_fpr32(fp0, fs);
6224 gen_store_fpr32(fp0, fd);
6225 tcg_temp_free_i32(fp0);
6234 TCGv_i32 fp0 = tcg_temp_new_i32();
6236 gen_load_fpr32(fp0, fs);
6237 gen_helper_float_recip_s(fp0, fp0);
6238 gen_store_fpr32(fp0, fd);
6239 tcg_temp_free_i32(fp0);
6246 TCGv_i32 fp0 = tcg_temp_new_i32();
6248 gen_load_fpr32(fp0, fs);
6249 gen_helper_float_rsqrt_s(fp0, fp0);
6250 gen_store_fpr32(fp0, fd);
6251 tcg_temp_free_i32(fp0);
6256 check_cp1_64bitmode(ctx);
6258 TCGv_i32 fp0 = tcg_temp_new_i32();
6259 TCGv_i32 fp1 = tcg_temp_new_i32();
6261 gen_load_fpr32(fp0, fs);
6262 gen_load_fpr32(fp1, fd);
6263 gen_helper_float_recip2_s(fp0, fp0, fp1);
6264 tcg_temp_free_i32(fp1);
6265 gen_store_fpr32(fp0, fd);
6266 tcg_temp_free_i32(fp0);
6271 check_cp1_64bitmode(ctx);
6273 TCGv_i32 fp0 = tcg_temp_new_i32();
6275 gen_load_fpr32(fp0, fs);
6276 gen_helper_float_recip1_s(fp0, fp0);
6277 gen_store_fpr32(fp0, fd);
6278 tcg_temp_free_i32(fp0);
6283 check_cp1_64bitmode(ctx);
6285 TCGv_i32 fp0 = tcg_temp_new_i32();
6287 gen_load_fpr32(fp0, fs);
6288 gen_helper_float_rsqrt1_s(fp0, fp0);
6289 gen_store_fpr32(fp0, fd);
6290 tcg_temp_free_i32(fp0);
6295 check_cp1_64bitmode(ctx);
6297 TCGv_i32 fp0 = tcg_temp_new_i32();
6298 TCGv_i32 fp1 = tcg_temp_new_i32();
6300 gen_load_fpr32(fp0, fs);
6301 gen_load_fpr32(fp1, ft);
6302 gen_helper_float_rsqrt2_s(fp0, fp0, fp1);
6303 tcg_temp_free_i32(fp1);
6304 gen_store_fpr32(fp0, fd);
6305 tcg_temp_free_i32(fp0);
6310 check_cp1_registers(ctx, fd);
6312 TCGv_i32 fp32 = tcg_temp_new_i32();
6313 TCGv_i64 fp64 = tcg_temp_new_i64();
6315 gen_load_fpr32(fp32, fs);
6316 gen_helper_float_cvtd_s(fp64, fp32);
6317 tcg_temp_free_i32(fp32);
6318 gen_store_fpr64(ctx, fp64, fd);
6319 tcg_temp_free_i64(fp64);
6325 TCGv_i32 fp0 = tcg_temp_new_i32();
6327 gen_load_fpr32(fp0, fs);
6328 gen_helper_float_cvtw_s(fp0, fp0);
6329 gen_store_fpr32(fp0, fd);
6330 tcg_temp_free_i32(fp0);
6335 check_cp1_64bitmode(ctx);
6337 TCGv_i32 fp32 = tcg_temp_new_i32();
6338 TCGv_i64 fp64 = tcg_temp_new_i64();
6340 gen_load_fpr32(fp32, fs);
6341 gen_helper_float_cvtl_s(fp64, fp32);
6342 tcg_temp_free_i32(fp32);
6343 gen_store_fpr64(ctx, fp64, fd);
6344 tcg_temp_free_i64(fp64);
6349 check_cp1_64bitmode(ctx);
6351 TCGv_i64 fp64 = tcg_temp_new_i64();
6352 TCGv_i32 fp32_0 = tcg_temp_new_i32();
6353 TCGv_i32 fp32_1 = tcg_temp_new_i32();
6355 gen_load_fpr32(fp32_0, fs);
6356 gen_load_fpr32(fp32_1, ft);
6357 tcg_gen_concat_i32_i64(fp64, fp32_0, fp32_1);
6358 tcg_temp_free_i32(fp32_1);
6359 tcg_temp_free_i32(fp32_0);
6360 gen_store_fpr64(ctx, fp64, fd);
6361 tcg_temp_free_i64(fp64);
6382 TCGv_i32 fp0 = tcg_temp_new_i32();
6383 TCGv_i32 fp1 = tcg_temp_new_i32();
6385 gen_load_fpr32(fp0, fs);
6386 gen_load_fpr32(fp1, ft);
6387 if (ctx->opcode & (1 << 6)) {
6389 gen_cmpabs_s(func-48, fp0, fp1, cc);
6390 opn = condnames_abs[func-48];
6392 gen_cmp_s(func-48, fp0, fp1, cc);
6393 opn = condnames[func-48];
6395 tcg_temp_free_i32(fp0);
6396 tcg_temp_free_i32(fp1);
6400 check_cp1_registers(ctx, fs | ft | fd);
6402 TCGv_i64 fp0 = tcg_temp_new_i64();
6403 TCGv_i64 fp1 = tcg_temp_new_i64();
6405 gen_load_fpr64(ctx, fp0, fs);
6406 gen_load_fpr64(ctx, fp1, ft);
6407 gen_helper_float_add_d(fp0, fp0, fp1);
6408 tcg_temp_free_i64(fp1);
6409 gen_store_fpr64(ctx, fp0, fd);
6410 tcg_temp_free_i64(fp0);
6416 check_cp1_registers(ctx, fs | ft | fd);
6418 TCGv_i64 fp0 = tcg_temp_new_i64();
6419 TCGv_i64 fp1 = tcg_temp_new_i64();
6421 gen_load_fpr64(ctx, fp0, fs);
6422 gen_load_fpr64(ctx, fp1, ft);
6423 gen_helper_float_sub_d(fp0, fp0, fp1);
6424 tcg_temp_free_i64(fp1);
6425 gen_store_fpr64(ctx, fp0, fd);
6426 tcg_temp_free_i64(fp0);
6432 check_cp1_registers(ctx, fs | ft | fd);
6434 TCGv_i64 fp0 = tcg_temp_new_i64();
6435 TCGv_i64 fp1 = tcg_temp_new_i64();
6437 gen_load_fpr64(ctx, fp0, fs);
6438 gen_load_fpr64(ctx, fp1, ft);
6439 gen_helper_float_mul_d(fp0, fp0, fp1);
6440 tcg_temp_free_i64(fp1);
6441 gen_store_fpr64(ctx, fp0, fd);
6442 tcg_temp_free_i64(fp0);
6448 check_cp1_registers(ctx, fs | ft | fd);
6450 TCGv_i64 fp0 = tcg_temp_new_i64();
6451 TCGv_i64 fp1 = tcg_temp_new_i64();
6453 gen_load_fpr64(ctx, fp0, fs);
6454 gen_load_fpr64(ctx, fp1, ft);
6455 gen_helper_float_div_d(fp0, fp0, fp1);
6456 tcg_temp_free_i64(fp1);
6457 gen_store_fpr64(ctx, fp0, fd);
6458 tcg_temp_free_i64(fp0);
6464 check_cp1_registers(ctx, fs | fd);
6466 TCGv_i64 fp0 = tcg_temp_new_i64();
6468 gen_load_fpr64(ctx, fp0, fs);
6469 gen_helper_float_sqrt_d(fp0, fp0);
6470 gen_store_fpr64(ctx, fp0, fd);
6471 tcg_temp_free_i64(fp0);
6476 check_cp1_registers(ctx, fs | fd);
6478 TCGv_i64 fp0 = tcg_temp_new_i64();
6480 gen_load_fpr64(ctx, fp0, fs);
6481 gen_helper_float_abs_d(fp0, fp0);
6482 gen_store_fpr64(ctx, fp0, fd);
6483 tcg_temp_free_i64(fp0);
6488 check_cp1_registers(ctx, fs | fd);
6490 TCGv_i64 fp0 = tcg_temp_new_i64();
6492 gen_load_fpr64(ctx, fp0, fs);
6493 gen_store_fpr64(ctx, fp0, fd);
6494 tcg_temp_free_i64(fp0);
6499 check_cp1_registers(ctx, fs | fd);
6501 TCGv_i64 fp0 = tcg_temp_new_i64();
6503 gen_load_fpr64(ctx, fp0, fs);
6504 gen_helper_float_chs_d(fp0, fp0);
6505 gen_store_fpr64(ctx, fp0, fd);
6506 tcg_temp_free_i64(fp0);
6511 check_cp1_64bitmode(ctx);
6513 TCGv_i64 fp0 = tcg_temp_new_i64();
6515 gen_load_fpr64(ctx, fp0, fs);
6516 gen_helper_float_roundl_d(fp0, fp0);
6517 gen_store_fpr64(ctx, fp0, fd);
6518 tcg_temp_free_i64(fp0);
6523 check_cp1_64bitmode(ctx);
6525 TCGv_i64 fp0 = tcg_temp_new_i64();
6527 gen_load_fpr64(ctx, fp0, fs);
6528 gen_helper_float_truncl_d(fp0, fp0);
6529 gen_store_fpr64(ctx, fp0, fd);
6530 tcg_temp_free_i64(fp0);
6535 check_cp1_64bitmode(ctx);
6537 TCGv_i64 fp0 = tcg_temp_new_i64();
6539 gen_load_fpr64(ctx, fp0, fs);
6540 gen_helper_float_ceill_d(fp0, fp0);
6541 gen_store_fpr64(ctx, fp0, fd);
6542 tcg_temp_free_i64(fp0);
6547 check_cp1_64bitmode(ctx);
6549 TCGv_i64 fp0 = tcg_temp_new_i64();
6551 gen_load_fpr64(ctx, fp0, fs);
6552 gen_helper_float_floorl_d(fp0, fp0);
6553 gen_store_fpr64(ctx, fp0, fd);
6554 tcg_temp_free_i64(fp0);
6559 check_cp1_registers(ctx, fs);
6561 TCGv_i32 fp32 = tcg_temp_new_i32();
6562 TCGv_i64 fp64 = tcg_temp_new_i64();
6564 gen_load_fpr64(ctx, fp64, fs);
6565 gen_helper_float_roundw_d(fp32, fp64);
6566 tcg_temp_free_i64(fp64);
6567 gen_store_fpr32(fp32, fd);
6568 tcg_temp_free_i32(fp32);
6573 check_cp1_registers(ctx, fs);
6575 TCGv_i32 fp32 = tcg_temp_new_i32();
6576 TCGv_i64 fp64 = tcg_temp_new_i64();
6578 gen_load_fpr64(ctx, fp64, fs);
6579 gen_helper_float_truncw_d(fp32, fp64);
6580 tcg_temp_free_i64(fp64);
6581 gen_store_fpr32(fp32, fd);
6582 tcg_temp_free_i32(fp32);
6587 check_cp1_registers(ctx, fs);
6589 TCGv_i32 fp32 = tcg_temp_new_i32();
6590 TCGv_i64 fp64 = tcg_temp_new_i64();
6592 gen_load_fpr64(ctx, fp64, fs);
6593 gen_helper_float_ceilw_d(fp32, fp64);
6594 tcg_temp_free_i64(fp64);
6595 gen_store_fpr32(fp32, fd);
6596 tcg_temp_free_i32(fp32);
6601 check_cp1_registers(ctx, fs);
6603 TCGv_i32 fp32 = tcg_temp_new_i32();
6604 TCGv_i64 fp64 = tcg_temp_new_i64();
6606 gen_load_fpr64(ctx, fp64, fs);
6607 gen_helper_float_floorw_d(fp32, fp64);
6608 tcg_temp_free_i64(fp64);
6609 gen_store_fpr32(fp32, fd);
6610 tcg_temp_free_i32(fp32);
6615 gen_movcf_d(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6620 int l1 = gen_new_label();
6624 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6626 fp0 = tcg_temp_new_i64();
6627 gen_load_fpr64(ctx, fp0, fs);
6628 gen_store_fpr64(ctx, fp0, fd);
6629 tcg_temp_free_i64(fp0);
6636 int l1 = gen_new_label();
6640 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6641 fp0 = tcg_temp_new_i64();
6642 gen_load_fpr64(ctx, fp0, fs);
6643 gen_store_fpr64(ctx, fp0, fd);
6644 tcg_temp_free_i64(fp0);
6651 check_cp1_64bitmode(ctx);
6653 TCGv_i64 fp0 = tcg_temp_new_i64();
6655 gen_load_fpr64(ctx, fp0, fs);
6656 gen_helper_float_recip_d(fp0, fp0);
6657 gen_store_fpr64(ctx, fp0, fd);
6658 tcg_temp_free_i64(fp0);
6663 check_cp1_64bitmode(ctx);
6665 TCGv_i64 fp0 = tcg_temp_new_i64();
6667 gen_load_fpr64(ctx, fp0, fs);
6668 gen_helper_float_rsqrt_d(fp0, fp0);
6669 gen_store_fpr64(ctx, fp0, fd);
6670 tcg_temp_free_i64(fp0);
6675 check_cp1_64bitmode(ctx);
6677 TCGv_i64 fp0 = tcg_temp_new_i64();
6678 TCGv_i64 fp1 = tcg_temp_new_i64();
6680 gen_load_fpr64(ctx, fp0, fs);
6681 gen_load_fpr64(ctx, fp1, ft);
6682 gen_helper_float_recip2_d(fp0, fp0, fp1);
6683 tcg_temp_free_i64(fp1);
6684 gen_store_fpr64(ctx, fp0, fd);
6685 tcg_temp_free_i64(fp0);
6690 check_cp1_64bitmode(ctx);
6692 TCGv_i64 fp0 = tcg_temp_new_i64();
6694 gen_load_fpr64(ctx, fp0, fs);
6695 gen_helper_float_recip1_d(fp0, fp0);
6696 gen_store_fpr64(ctx, fp0, fd);
6697 tcg_temp_free_i64(fp0);
6702 check_cp1_64bitmode(ctx);
6704 TCGv_i64 fp0 = tcg_temp_new_i64();
6706 gen_load_fpr64(ctx, fp0, fs);
6707 gen_helper_float_rsqrt1_d(fp0, fp0);
6708 gen_store_fpr64(ctx, fp0, fd);
6709 tcg_temp_free_i64(fp0);
6714 check_cp1_64bitmode(ctx);
6716 TCGv_i64 fp0 = tcg_temp_new_i64();
6717 TCGv_i64 fp1 = tcg_temp_new_i64();
6719 gen_load_fpr64(ctx, fp0, fs);
6720 gen_load_fpr64(ctx, fp1, ft);
6721 gen_helper_float_rsqrt2_d(fp0, fp0, fp1);
6722 tcg_temp_free_i64(fp1);
6723 gen_store_fpr64(ctx, fp0, fd);
6724 tcg_temp_free_i64(fp0);
6745 TCGv_i64 fp0 = tcg_temp_new_i64();
6746 TCGv_i64 fp1 = tcg_temp_new_i64();
6748 gen_load_fpr64(ctx, fp0, fs);
6749 gen_load_fpr64(ctx, fp1, ft);
6750 if (ctx->opcode & (1 << 6)) {
6752 check_cp1_registers(ctx, fs | ft);
6753 gen_cmpabs_d(func-48, fp0, fp1, cc);
6754 opn = condnames_abs[func-48];
6756 check_cp1_registers(ctx, fs | ft);
6757 gen_cmp_d(func-48, fp0, fp1, cc);
6758 opn = condnames[func-48];
6760 tcg_temp_free_i64(fp0);
6761 tcg_temp_free_i64(fp1);
6765 check_cp1_registers(ctx, fs);
6767 TCGv_i32 fp32 = tcg_temp_new_i32();
6768 TCGv_i64 fp64 = tcg_temp_new_i64();
6770 gen_load_fpr64(ctx, fp64, fs);
6771 gen_helper_float_cvts_d(fp32, fp64);
6772 tcg_temp_free_i64(fp64);
6773 gen_store_fpr32(fp32, fd);
6774 tcg_temp_free_i32(fp32);
6779 check_cp1_registers(ctx, fs);
6781 TCGv_i32 fp32 = tcg_temp_new_i32();
6782 TCGv_i64 fp64 = tcg_temp_new_i64();
6784 gen_load_fpr64(ctx, fp64, fs);
6785 gen_helper_float_cvtw_d(fp32, fp64);
6786 tcg_temp_free_i64(fp64);
6787 gen_store_fpr32(fp32, fd);
6788 tcg_temp_free_i32(fp32);
6793 check_cp1_64bitmode(ctx);
6795 TCGv_i64 fp0 = tcg_temp_new_i64();
6797 gen_load_fpr64(ctx, fp0, fs);
6798 gen_helper_float_cvtl_d(fp0, fp0);
6799 gen_store_fpr64(ctx, fp0, fd);
6800 tcg_temp_free_i64(fp0);
6806 TCGv_i32 fp0 = tcg_temp_new_i32();
6808 gen_load_fpr32(fp0, fs);
6809 gen_helper_float_cvts_w(fp0, fp0);
6810 gen_store_fpr32(fp0, fd);
6811 tcg_temp_free_i32(fp0);
6816 check_cp1_registers(ctx, fd);
6818 TCGv_i32 fp32 = tcg_temp_new_i32();
6819 TCGv_i64 fp64 = tcg_temp_new_i64();
6821 gen_load_fpr32(fp32, fs);
6822 gen_helper_float_cvtd_w(fp64, fp32);
6823 tcg_temp_free_i32(fp32);
6824 gen_store_fpr64(ctx, fp64, fd);
6825 tcg_temp_free_i64(fp64);
6830 check_cp1_64bitmode(ctx);
6832 TCGv_i32 fp32 = tcg_temp_new_i32();
6833 TCGv_i64 fp64 = tcg_temp_new_i64();
6835 gen_load_fpr64(ctx, fp64, fs);
6836 gen_helper_float_cvts_l(fp32, fp64);
6837 tcg_temp_free_i64(fp64);
6838 gen_store_fpr32(fp32, fd);
6839 tcg_temp_free_i32(fp32);
6844 check_cp1_64bitmode(ctx);
6846 TCGv_i64 fp0 = tcg_temp_new_i64();
6848 gen_load_fpr64(ctx, fp0, fs);
6849 gen_helper_float_cvtd_l(fp0, fp0);
6850 gen_store_fpr64(ctx, fp0, fd);
6851 tcg_temp_free_i64(fp0);
6856 check_cp1_64bitmode(ctx);
6858 TCGv_i64 fp0 = tcg_temp_new_i64();
6860 gen_load_fpr64(ctx, fp0, fs);
6861 gen_helper_float_cvtps_pw(fp0, fp0);
6862 gen_store_fpr64(ctx, fp0, fd);
6863 tcg_temp_free_i64(fp0);
6868 check_cp1_64bitmode(ctx);
6870 TCGv_i64 fp0 = tcg_temp_new_i64();
6871 TCGv_i64 fp1 = tcg_temp_new_i64();
6873 gen_load_fpr64(ctx, fp0, fs);
6874 gen_load_fpr64(ctx, fp1, ft);
6875 gen_helper_float_add_ps(fp0, fp0, fp1);
6876 tcg_temp_free_i64(fp1);
6877 gen_store_fpr64(ctx, fp0, fd);
6878 tcg_temp_free_i64(fp0);
6883 check_cp1_64bitmode(ctx);
6885 TCGv_i64 fp0 = tcg_temp_new_i64();
6886 TCGv_i64 fp1 = tcg_temp_new_i64();
6888 gen_load_fpr64(ctx, fp0, fs);
6889 gen_load_fpr64(ctx, fp1, ft);
6890 gen_helper_float_sub_ps(fp0, fp0, fp1);
6891 tcg_temp_free_i64(fp1);
6892 gen_store_fpr64(ctx, fp0, fd);
6893 tcg_temp_free_i64(fp0);
6898 check_cp1_64bitmode(ctx);
6900 TCGv_i64 fp0 = tcg_temp_new_i64();
6901 TCGv_i64 fp1 = tcg_temp_new_i64();
6903 gen_load_fpr64(ctx, fp0, fs);
6904 gen_load_fpr64(ctx, fp1, ft);
6905 gen_helper_float_mul_ps(fp0, fp0, fp1);
6906 tcg_temp_free_i64(fp1);
6907 gen_store_fpr64(ctx, fp0, fd);
6908 tcg_temp_free_i64(fp0);
6913 check_cp1_64bitmode(ctx);
6915 TCGv_i64 fp0 = tcg_temp_new_i64();
6917 gen_load_fpr64(ctx, fp0, fs);
6918 gen_helper_float_abs_ps(fp0, fp0);
6919 gen_store_fpr64(ctx, fp0, fd);
6920 tcg_temp_free_i64(fp0);
6925 check_cp1_64bitmode(ctx);
6927 TCGv_i64 fp0 = tcg_temp_new_i64();
6929 gen_load_fpr64(ctx, fp0, fs);
6930 gen_store_fpr64(ctx, fp0, fd);
6931 tcg_temp_free_i64(fp0);
6936 check_cp1_64bitmode(ctx);
6938 TCGv_i64 fp0 = tcg_temp_new_i64();
6940 gen_load_fpr64(ctx, fp0, fs);
6941 gen_helper_float_chs_ps(fp0, fp0);
6942 gen_store_fpr64(ctx, fp0, fd);
6943 tcg_temp_free_i64(fp0);
6948 check_cp1_64bitmode(ctx);
6949 gen_movcf_ps(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6953 check_cp1_64bitmode(ctx);
6955 int l1 = gen_new_label();
6959 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6960 fp0 = tcg_temp_new_i64();
6961 gen_load_fpr64(ctx, fp0, fs);
6962 gen_store_fpr64(ctx, fp0, fd);
6963 tcg_temp_free_i64(fp0);
6969 check_cp1_64bitmode(ctx);
6971 int l1 = gen_new_label();
6975 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6976 fp0 = tcg_temp_new_i64();
6977 gen_load_fpr64(ctx, fp0, fs);
6978 gen_store_fpr64(ctx, fp0, fd);
6979 tcg_temp_free_i64(fp0);
6986 check_cp1_64bitmode(ctx);
6988 TCGv_i64 fp0 = tcg_temp_new_i64();
6989 TCGv_i64 fp1 = tcg_temp_new_i64();
6991 gen_load_fpr64(ctx, fp0, ft);
6992 gen_load_fpr64(ctx, fp1, fs);
6993 gen_helper_float_addr_ps(fp0, fp0, fp1);
6994 tcg_temp_free_i64(fp1);
6995 gen_store_fpr64(ctx, fp0, fd);
6996 tcg_temp_free_i64(fp0);
7001 check_cp1_64bitmode(ctx);
7003 TCGv_i64 fp0 = tcg_temp_new_i64();
7004 TCGv_i64 fp1 = tcg_temp_new_i64();
7006 gen_load_fpr64(ctx, fp0, ft);
7007 gen_load_fpr64(ctx, fp1, fs);
7008 gen_helper_float_mulr_ps(fp0, fp0, fp1);
7009 tcg_temp_free_i64(fp1);
7010 gen_store_fpr64(ctx, fp0, fd);
7011 tcg_temp_free_i64(fp0);
7016 check_cp1_64bitmode(ctx);
7018 TCGv_i64 fp0 = tcg_temp_new_i64();
7019 TCGv_i64 fp1 = tcg_temp_new_i64();
7021 gen_load_fpr64(ctx, fp0, fs);
7022 gen_load_fpr64(ctx, fp1, fd);
7023 gen_helper_float_recip2_ps(fp0, fp0, fp1);
7024 tcg_temp_free_i64(fp1);
7025 gen_store_fpr64(ctx, fp0, fd);
7026 tcg_temp_free_i64(fp0);
7031 check_cp1_64bitmode(ctx);
7033 TCGv_i64 fp0 = tcg_temp_new_i64();
7035 gen_load_fpr64(ctx, fp0, fs);
7036 gen_helper_float_recip1_ps(fp0, fp0);
7037 gen_store_fpr64(ctx, fp0, fd);
7038 tcg_temp_free_i64(fp0);
7043 check_cp1_64bitmode(ctx);
7045 TCGv_i64 fp0 = tcg_temp_new_i64();
7047 gen_load_fpr64(ctx, fp0, fs);
7048 gen_helper_float_rsqrt1_ps(fp0, fp0);
7049 gen_store_fpr64(ctx, fp0, fd);
7050 tcg_temp_free_i64(fp0);
7055 check_cp1_64bitmode(ctx);
7057 TCGv_i64 fp0 = tcg_temp_new_i64();
7058 TCGv_i64 fp1 = tcg_temp_new_i64();
7060 gen_load_fpr64(ctx, fp0, fs);
7061 gen_load_fpr64(ctx, fp1, ft);
7062 gen_helper_float_rsqrt2_ps(fp0, fp0, fp1);
7063 tcg_temp_free_i64(fp1);
7064 gen_store_fpr64(ctx, fp0, fd);
7065 tcg_temp_free_i64(fp0);
7070 check_cp1_64bitmode(ctx);
7072 TCGv_i32 fp0 = tcg_temp_new_i32();
7074 gen_load_fpr32h(fp0, fs);
7075 gen_helper_float_cvts_pu(fp0, fp0);
7076 gen_store_fpr32(fp0, fd);
7077 tcg_temp_free_i32(fp0);
7082 check_cp1_64bitmode(ctx);
7084 TCGv_i64 fp0 = tcg_temp_new_i64();
7086 gen_load_fpr64(ctx, fp0, fs);
7087 gen_helper_float_cvtpw_ps(fp0, fp0);
7088 gen_store_fpr64(ctx, fp0, fd);
7089 tcg_temp_free_i64(fp0);
7094 check_cp1_64bitmode(ctx);
7096 TCGv_i32 fp0 = tcg_temp_new_i32();
7098 gen_load_fpr32(fp0, fs);
7099 gen_helper_float_cvts_pl(fp0, fp0);
7100 gen_store_fpr32(fp0, fd);
7101 tcg_temp_free_i32(fp0);
7106 check_cp1_64bitmode(ctx);
7108 TCGv_i32 fp0 = tcg_temp_new_i32();
7109 TCGv_i32 fp1 = tcg_temp_new_i32();
7111 gen_load_fpr32(fp0, fs);
7112 gen_load_fpr32(fp1, ft);
7113 gen_store_fpr32h(fp0, fd);
7114 gen_store_fpr32(fp1, fd);
7115 tcg_temp_free_i32(fp0);
7116 tcg_temp_free_i32(fp1);
7121 check_cp1_64bitmode(ctx);
7123 TCGv_i32 fp0 = tcg_temp_new_i32();
7124 TCGv_i32 fp1 = tcg_temp_new_i32();
7126 gen_load_fpr32(fp0, fs);
7127 gen_load_fpr32h(fp1, ft);
7128 gen_store_fpr32(fp1, fd);
7129 gen_store_fpr32h(fp0, fd);
7130 tcg_temp_free_i32(fp0);
7131 tcg_temp_free_i32(fp1);
7136 check_cp1_64bitmode(ctx);
7138 TCGv_i32 fp0 = tcg_temp_new_i32();
7139 TCGv_i32 fp1 = tcg_temp_new_i32();
7141 gen_load_fpr32h(fp0, fs);
7142 gen_load_fpr32(fp1, ft);
7143 gen_store_fpr32(fp1, fd);
7144 gen_store_fpr32h(fp0, fd);
7145 tcg_temp_free_i32(fp0);
7146 tcg_temp_free_i32(fp1);
7151 check_cp1_64bitmode(ctx);
7153 TCGv_i32 fp0 = tcg_temp_new_i32();
7154 TCGv_i32 fp1 = tcg_temp_new_i32();
7156 gen_load_fpr32h(fp0, fs);
7157 gen_load_fpr32h(fp1, ft);
7158 gen_store_fpr32(fp1, fd);
7159 gen_store_fpr32h(fp0, fd);
7160 tcg_temp_free_i32(fp0);
7161 tcg_temp_free_i32(fp1);
7181 check_cp1_64bitmode(ctx);
7183 TCGv_i64 fp0 = tcg_temp_new_i64();
7184 TCGv_i64 fp1 = tcg_temp_new_i64();
7186 gen_load_fpr64(ctx, fp0, fs);
7187 gen_load_fpr64(ctx, fp1, ft);
7188 if (ctx->opcode & (1 << 6)) {
7189 gen_cmpabs_ps(func-48, fp0, fp1, cc);
7190 opn = condnames_abs[func-48];
7192 gen_cmp_ps(func-48, fp0, fp1, cc);
7193 opn = condnames[func-48];
7195 tcg_temp_free_i64(fp0);
7196 tcg_temp_free_i64(fp1);
7201 generate_exception (ctx, EXCP_RI);
7206 MIPS_DEBUG("%s %s, %s, %s", opn, fregnames[fd], fregnames[fs], fregnames[ft]);
7209 MIPS_DEBUG("%s %s,%s", opn, fregnames[fs], fregnames[ft]);
7212 MIPS_DEBUG("%s %s,%s", opn, fregnames[fd], fregnames[fs]);
7217 /* Coprocessor 3 (FPU) */
7218 static void gen_flt3_ldst (DisasContext *ctx, uint32_t opc,
7219 int fd, int fs, int base, int index)
7221 const char *opn = "extended float load/store";
7223 TCGv t0 = tcg_temp_new();
7226 gen_load_gpr(t0, index);
7227 } else if (index == 0) {
7228 gen_load_gpr(t0, base);
7230 gen_load_gpr(t0, index);
7231 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
7233 /* Don't do NOP if destination is zero: we must perform the actual
7235 save_cpu_state(ctx, 0);
7240 TCGv_i32 fp0 = tcg_temp_new_i32();
7242 tcg_gen_qemu_ld32s(t0, t0, ctx->mem_idx);
7243 tcg_gen_trunc_tl_i32(fp0, t0);
7244 gen_store_fpr32(fp0, fd);
7245 tcg_temp_free_i32(fp0);
7251 check_cp1_registers(ctx, fd);
7253 TCGv_i64 fp0 = tcg_temp_new_i64();
7255 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7256 gen_store_fpr64(ctx, fp0, fd);
7257 tcg_temp_free_i64(fp0);
7262 check_cp1_64bitmode(ctx);
7263 tcg_gen_andi_tl(t0, t0, ~0x7);
7265 TCGv_i64 fp0 = tcg_temp_new_i64();
7267 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7268 gen_store_fpr64(ctx, fp0, fd);
7269 tcg_temp_free_i64(fp0);
7276 TCGv_i32 fp0 = tcg_temp_new_i32();
7277 TCGv t1 = tcg_temp_new();
7279 gen_load_fpr32(fp0, fs);
7280 tcg_gen_extu_i32_tl(t1, fp0);
7281 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
7282 tcg_temp_free_i32(fp0);
7283 tcg_temp_free_i32(t1);
7290 check_cp1_registers(ctx, fs);
7292 TCGv_i64 fp0 = tcg_temp_new_i64();
7294 gen_load_fpr64(ctx, fp0, fs);
7295 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7296 tcg_temp_free_i64(fp0);
7302 check_cp1_64bitmode(ctx);
7303 tcg_gen_andi_tl(t0, t0, ~0x7);
7305 TCGv_i64 fp0 = tcg_temp_new_i64();
7307 gen_load_fpr64(ctx, fp0, fs);
7308 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7309 tcg_temp_free_i64(fp0);
7316 MIPS_DEBUG("%s %s, %s(%s)", opn, fregnames[store ? fs : fd],
7317 regnames[index], regnames[base]);
7320 static void gen_flt3_arith (DisasContext *ctx, uint32_t opc,
7321 int fd, int fr, int fs, int ft)
7323 const char *opn = "flt3_arith";
7327 check_cp1_64bitmode(ctx);
7329 TCGv t0 = tcg_temp_local_new();
7330 TCGv_i32 fp = tcg_temp_new_i32();
7331 TCGv_i32 fph = tcg_temp_new_i32();
7332 int l1 = gen_new_label();
7333 int l2 = gen_new_label();
7335 gen_load_gpr(t0, fr);
7336 tcg_gen_andi_tl(t0, t0, 0x7);
7338 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
7339 gen_load_fpr32(fp, fs);
7340 gen_load_fpr32h(fph, fs);
7341 gen_store_fpr32(fp, fd);
7342 gen_store_fpr32h(fph, fd);
7345 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 4, l2);
7347 #ifdef TARGET_WORDS_BIGENDIAN
7348 gen_load_fpr32(fp, fs);
7349 gen_load_fpr32h(fph, ft);
7350 gen_store_fpr32h(fp, fd);
7351 gen_store_fpr32(fph, fd);
7353 gen_load_fpr32h(fph, fs);
7354 gen_load_fpr32(fp, ft);
7355 gen_store_fpr32(fph, fd);
7356 gen_store_fpr32h(fp, fd);
7359 tcg_temp_free_i32(fp);
7360 tcg_temp_free_i32(fph);
7367 TCGv_i32 fp0 = tcg_temp_new_i32();
7368 TCGv_i32 fp1 = tcg_temp_new_i32();
7369 TCGv_i32 fp2 = tcg_temp_new_i32();
7371 gen_load_fpr32(fp0, fs);
7372 gen_load_fpr32(fp1, ft);
7373 gen_load_fpr32(fp2, fr);
7374 gen_helper_float_muladd_s(fp2, fp0, fp1, fp2);
7375 tcg_temp_free_i32(fp0);
7376 tcg_temp_free_i32(fp1);
7377 gen_store_fpr32(fp2, fd);
7378 tcg_temp_free_i32(fp2);
7384 check_cp1_registers(ctx, fd | fs | ft | fr);
7386 TCGv_i64 fp0 = tcg_temp_new_i64();
7387 TCGv_i64 fp1 = tcg_temp_new_i64();
7388 TCGv_i64 fp2 = tcg_temp_new_i64();
7390 gen_load_fpr64(ctx, fp0, fs);
7391 gen_load_fpr64(ctx, fp1, ft);
7392 gen_load_fpr64(ctx, fp2, fr);
7393 gen_helper_float_muladd_d(fp2, fp0, fp1, fp2);
7394 tcg_temp_free_i64(fp0);
7395 tcg_temp_free_i64(fp1);
7396 gen_store_fpr64(ctx, fp2, fd);
7397 tcg_temp_free_i64(fp2);
7402 check_cp1_64bitmode(ctx);
7404 TCGv_i64 fp0 = tcg_temp_new_i64();
7405 TCGv_i64 fp1 = tcg_temp_new_i64();
7406 TCGv_i64 fp2 = tcg_temp_new_i64();
7408 gen_load_fpr64(ctx, fp0, fs);
7409 gen_load_fpr64(ctx, fp1, ft);
7410 gen_load_fpr64(ctx, fp2, fr);
7411 gen_helper_float_muladd_ps(fp2, fp0, fp1, fp2);
7412 tcg_temp_free_i64(fp0);
7413 tcg_temp_free_i64(fp1);
7414 gen_store_fpr64(ctx, fp2, fd);
7415 tcg_temp_free_i64(fp2);
7422 TCGv_i32 fp0 = tcg_temp_new_i32();
7423 TCGv_i32 fp1 = tcg_temp_new_i32();
7424 TCGv_i32 fp2 = tcg_temp_new_i32();
7426 gen_load_fpr32(fp0, fs);
7427 gen_load_fpr32(fp1, ft);
7428 gen_load_fpr32(fp2, fr);
7429 gen_helper_float_mulsub_s(fp2, fp0, fp1, fp2);
7430 tcg_temp_free_i32(fp0);
7431 tcg_temp_free_i32(fp1);
7432 gen_store_fpr32(fp2, fd);
7433 tcg_temp_free_i32(fp2);
7439 check_cp1_registers(ctx, fd | fs | ft | fr);
7441 TCGv_i64 fp0 = tcg_temp_new_i64();
7442 TCGv_i64 fp1 = tcg_temp_new_i64();
7443 TCGv_i64 fp2 = tcg_temp_new_i64();
7445 gen_load_fpr64(ctx, fp0, fs);
7446 gen_load_fpr64(ctx, fp1, ft);
7447 gen_load_fpr64(ctx, fp2, fr);
7448 gen_helper_float_mulsub_d(fp2, fp0, fp1, fp2);
7449 tcg_temp_free_i64(fp0);
7450 tcg_temp_free_i64(fp1);
7451 gen_store_fpr64(ctx, fp2, fd);
7452 tcg_temp_free_i64(fp2);
7457 check_cp1_64bitmode(ctx);
7459 TCGv_i64 fp0 = tcg_temp_new_i64();
7460 TCGv_i64 fp1 = tcg_temp_new_i64();
7461 TCGv_i64 fp2 = tcg_temp_new_i64();
7463 gen_load_fpr64(ctx, fp0, fs);
7464 gen_load_fpr64(ctx, fp1, ft);
7465 gen_load_fpr64(ctx, fp2, fr);
7466 gen_helper_float_mulsub_ps(fp2, fp0, fp1, fp2);
7467 tcg_temp_free_i64(fp0);
7468 tcg_temp_free_i64(fp1);
7469 gen_store_fpr64(ctx, fp2, fd);
7470 tcg_temp_free_i64(fp2);
7477 TCGv_i32 fp0 = tcg_temp_new_i32();
7478 TCGv_i32 fp1 = tcg_temp_new_i32();
7479 TCGv_i32 fp2 = tcg_temp_new_i32();
7481 gen_load_fpr32(fp0, fs);
7482 gen_load_fpr32(fp1, ft);
7483 gen_load_fpr32(fp2, fr);
7484 gen_helper_float_nmuladd_s(fp2, fp0, fp1, fp2);
7485 tcg_temp_free_i32(fp0);
7486 tcg_temp_free_i32(fp1);
7487 gen_store_fpr32(fp2, fd);
7488 tcg_temp_free_i32(fp2);
7494 check_cp1_registers(ctx, fd | fs | ft | fr);
7496 TCGv_i64 fp0 = tcg_temp_new_i64();
7497 TCGv_i64 fp1 = tcg_temp_new_i64();
7498 TCGv_i64 fp2 = tcg_temp_new_i64();
7500 gen_load_fpr64(ctx, fp0, fs);
7501 gen_load_fpr64(ctx, fp1, ft);
7502 gen_load_fpr64(ctx, fp2, fr);
7503 gen_helper_float_nmuladd_d(fp2, fp0, fp1, fp2);
7504 tcg_temp_free_i64(fp0);
7505 tcg_temp_free_i64(fp1);
7506 gen_store_fpr64(ctx, fp2, fd);
7507 tcg_temp_free_i64(fp2);
7512 check_cp1_64bitmode(ctx);
7514 TCGv_i64 fp0 = tcg_temp_new_i64();
7515 TCGv_i64 fp1 = tcg_temp_new_i64();
7516 TCGv_i64 fp2 = tcg_temp_new_i64();
7518 gen_load_fpr64(ctx, fp0, fs);
7519 gen_load_fpr64(ctx, fp1, ft);
7520 gen_load_fpr64(ctx, fp2, fr);
7521 gen_helper_float_nmuladd_ps(fp2, fp0, fp1, fp2);
7522 tcg_temp_free_i64(fp0);
7523 tcg_temp_free_i64(fp1);
7524 gen_store_fpr64(ctx, fp2, fd);
7525 tcg_temp_free_i64(fp2);
7532 TCGv_i32 fp0 = tcg_temp_new_i32();
7533 TCGv_i32 fp1 = tcg_temp_new_i32();
7534 TCGv_i32 fp2 = tcg_temp_new_i32();
7536 gen_load_fpr32(fp0, fs);
7537 gen_load_fpr32(fp1, ft);
7538 gen_load_fpr32(fp2, fr);
7539 gen_helper_float_nmulsub_s(fp2, fp0, fp1, fp2);
7540 tcg_temp_free_i32(fp0);
7541 tcg_temp_free_i32(fp1);
7542 gen_store_fpr32(fp2, fd);
7543 tcg_temp_free_i32(fp2);
7549 check_cp1_registers(ctx, fd | fs | ft | fr);
7551 TCGv_i64 fp0 = tcg_temp_new_i64();
7552 TCGv_i64 fp1 = tcg_temp_new_i64();
7553 TCGv_i64 fp2 = tcg_temp_new_i64();
7555 gen_load_fpr64(ctx, fp0, fs);
7556 gen_load_fpr64(ctx, fp1, ft);
7557 gen_load_fpr64(ctx, fp2, fr);
7558 gen_helper_float_nmulsub_d(fp2, fp0, fp1, fp2);
7559 tcg_temp_free_i64(fp0);
7560 tcg_temp_free_i64(fp1);
7561 gen_store_fpr64(ctx, fp2, fd);
7562 tcg_temp_free_i64(fp2);
7567 check_cp1_64bitmode(ctx);
7569 TCGv_i64 fp0 = tcg_temp_new_i64();
7570 TCGv_i64 fp1 = tcg_temp_new_i64();
7571 TCGv_i64 fp2 = tcg_temp_new_i64();
7573 gen_load_fpr64(ctx, fp0, fs);
7574 gen_load_fpr64(ctx, fp1, ft);
7575 gen_load_fpr64(ctx, fp2, fr);
7576 gen_helper_float_nmulsub_ps(fp2, fp0, fp1, fp2);
7577 tcg_temp_free_i64(fp0);
7578 tcg_temp_free_i64(fp1);
7579 gen_store_fpr64(ctx, fp2, fd);
7580 tcg_temp_free_i64(fp2);
7586 generate_exception (ctx, EXCP_RI);
7589 MIPS_DEBUG("%s %s, %s, %s, %s", opn, fregnames[fd], fregnames[fr],
7590 fregnames[fs], fregnames[ft]);
7593 /* ISA extensions (ASEs) */
7594 /* MIPS16 extension to MIPS32 */
7595 /* SmartMIPS extension to MIPS32 */
7597 #if defined(TARGET_MIPS64)
7599 /* MDMX extension to MIPS64 */
7603 static void decode_opc (CPUState *env, DisasContext *ctx)
7607 uint32_t op, op1, op2;
7610 /* make sure instructions are on a word boundary */
7611 if (ctx->pc & 0x3) {
7612 env->CP0_BadVAddr = ctx->pc;
7613 generate_exception(ctx, EXCP_AdEL);
7617 /* Handle blikely not taken case */
7618 if ((ctx->hflags & MIPS_HFLAG_BMASK) == MIPS_HFLAG_BL) {
7619 int l1 = gen_new_label();
7621 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx ")", ctx->pc + 4);
7622 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
7623 tcg_gen_movi_i32(hflags, ctx->hflags & ~MIPS_HFLAG_BMASK);
7624 gen_goto_tb(ctx, 1, ctx->pc + 4);
7627 op = MASK_OP_MAJOR(ctx->opcode);
7628 rs = (ctx->opcode >> 21) & 0x1f;
7629 rt = (ctx->opcode >> 16) & 0x1f;
7630 rd = (ctx->opcode >> 11) & 0x1f;
7631 sa = (ctx->opcode >> 6) & 0x1f;
7632 imm = (int16_t)ctx->opcode;
7635 op1 = MASK_SPECIAL(ctx->opcode);
7637 case OPC_SLL: /* Shift with immediate */
7640 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7642 case OPC_MOVN: /* Conditional move */
7644 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7645 gen_cond_move(env, op1, rd, rs, rt);
7647 case OPC_ADD ... OPC_SUBU:
7648 gen_arith(env, ctx, op1, rd, rs, rt);
7650 case OPC_SLLV: /* Shifts */
7653 gen_shift(env, ctx, op1, rd, rs, rt);
7655 case OPC_SLT: /* Set on less than */
7657 gen_slt(env, op1, rd, rs, rt);
7659 case OPC_AND: /* Logic*/
7663 gen_logic(env, op1, rd, rs, rt);
7665 case OPC_MULT ... OPC_DIVU:
7667 check_insn(env, ctx, INSN_VR54XX);
7668 op1 = MASK_MUL_VR54XX(ctx->opcode);
7669 gen_mul_vr54xx(ctx, op1, rd, rs, rt);
7671 gen_muldiv(ctx, op1, rs, rt);
7673 case OPC_JR ... OPC_JALR:
7674 gen_compute_branch(ctx, op1, rs, rd, sa);
7676 case OPC_TGE ... OPC_TEQ: /* Traps */
7678 gen_trap(ctx, op1, rs, rt, -1);
7680 case OPC_MFHI: /* Move from HI/LO */
7682 gen_HILO(ctx, op1, rd);
7685 case OPC_MTLO: /* Move to HI/LO */
7686 gen_HILO(ctx, op1, rs);
7688 case OPC_PMON: /* Pmon entry point, also R4010 selsl */
7689 #ifdef MIPS_STRICT_STANDARD
7690 MIPS_INVAL("PMON / selsl");
7691 generate_exception(ctx, EXCP_RI);
7693 gen_helper_0i(pmon, sa);
7697 generate_exception(ctx, EXCP_SYSCALL);
7698 ctx->bstate = BS_STOP;
7701 generate_exception(ctx, EXCP_BREAK);
7704 #ifdef MIPS_STRICT_STANDARD
7706 generate_exception(ctx, EXCP_RI);
7708 /* Implemented as RI exception for now. */
7709 MIPS_INVAL("spim (unofficial)");
7710 generate_exception(ctx, EXCP_RI);
7718 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7719 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
7720 check_cp1_enabled(ctx);
7721 gen_movci(ctx, rd, rs, (ctx->opcode >> 18) & 0x7,
7722 (ctx->opcode >> 16) & 1);
7724 generate_exception_err(ctx, EXCP_CpU, 1);
7728 #if defined(TARGET_MIPS64)
7729 /* MIPS64 specific opcodes */
7736 check_insn(env, ctx, ISA_MIPS3);
7738 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7740 case OPC_DADD ... OPC_DSUBU:
7741 check_insn(env, ctx, ISA_MIPS3);
7743 gen_arith(env, ctx, op1, rd, rs, rt);
7748 check_insn(env, ctx, ISA_MIPS3);
7750 gen_shift(env, ctx, op1, rd, rs, rt);
7752 case OPC_DMULT ... OPC_DDIVU:
7753 check_insn(env, ctx, ISA_MIPS3);
7755 gen_muldiv(ctx, op1, rs, rt);
7758 default: /* Invalid */
7759 MIPS_INVAL("special");
7760 generate_exception(ctx, EXCP_RI);
7765 op1 = MASK_SPECIAL2(ctx->opcode);
7767 case OPC_MADD ... OPC_MADDU: /* Multiply and add/sub */
7768 case OPC_MSUB ... OPC_MSUBU:
7769 check_insn(env, ctx, ISA_MIPS32);
7770 gen_muldiv(ctx, op1, rs, rt);
7773 gen_arith(env, ctx, op1, rd, rs, rt);
7777 check_insn(env, ctx, ISA_MIPS32);
7778 gen_cl(ctx, op1, rd, rs);
7781 /* XXX: not clear which exception should be raised
7782 * when in debug mode...
7784 check_insn(env, ctx, ISA_MIPS32);
7785 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
7786 generate_exception(ctx, EXCP_DBp);
7788 generate_exception(ctx, EXCP_DBp);
7792 #if defined(TARGET_MIPS64)
7795 check_insn(env, ctx, ISA_MIPS64);
7797 gen_cl(ctx, op1, rd, rs);
7800 default: /* Invalid */
7801 MIPS_INVAL("special2");
7802 generate_exception(ctx, EXCP_RI);
7807 op1 = MASK_SPECIAL3(ctx->opcode);
7811 check_insn(env, ctx, ISA_MIPS32R2);
7812 gen_bitops(ctx, op1, rt, rs, sa, rd);
7815 check_insn(env, ctx, ISA_MIPS32R2);
7816 op2 = MASK_BSHFL(ctx->opcode);
7817 gen_bshfl(ctx, op2, rt, rd);
7820 check_insn(env, ctx, ISA_MIPS32R2);
7822 TCGv t0 = tcg_temp_new();
7826 save_cpu_state(ctx, 1);
7827 gen_helper_rdhwr_cpunum(t0);
7828 gen_store_gpr(t0, rt);
7831 save_cpu_state(ctx, 1);
7832 gen_helper_rdhwr_synci_step(t0);
7833 gen_store_gpr(t0, rt);
7836 save_cpu_state(ctx, 1);
7837 gen_helper_rdhwr_cc(t0);
7838 gen_store_gpr(t0, rt);
7841 save_cpu_state(ctx, 1);
7842 gen_helper_rdhwr_ccres(t0);
7843 gen_store_gpr(t0, rt);
7846 #if defined(CONFIG_USER_ONLY)
7847 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, tls_value));
7848 gen_store_gpr(t0, rt);
7851 /* XXX: Some CPUs implement this in hardware.
7852 Not supported yet. */
7854 default: /* Invalid */
7855 MIPS_INVAL("rdhwr");
7856 generate_exception(ctx, EXCP_RI);
7863 check_insn(env, ctx, ASE_MT);
7865 TCGv t0 = tcg_temp_new();
7866 TCGv t1 = tcg_temp_new();
7868 gen_load_gpr(t0, rt);
7869 gen_load_gpr(t1, rs);
7870 gen_helper_fork(t0, t1);
7876 check_insn(env, ctx, ASE_MT);
7878 TCGv t0 = tcg_temp_new();
7880 save_cpu_state(ctx, 1);
7881 gen_load_gpr(t0, rs);
7882 gen_helper_yield(t0, t0);
7883 gen_store_gpr(t0, rd);
7887 #if defined(TARGET_MIPS64)
7888 case OPC_DEXTM ... OPC_DEXT:
7889 case OPC_DINSM ... OPC_DINS:
7890 check_insn(env, ctx, ISA_MIPS64R2);
7892 gen_bitops(ctx, op1, rt, rs, sa, rd);
7895 check_insn(env, ctx, ISA_MIPS64R2);
7897 op2 = MASK_DBSHFL(ctx->opcode);
7898 gen_bshfl(ctx, op2, rt, rd);
7901 default: /* Invalid */
7902 MIPS_INVAL("special3");
7903 generate_exception(ctx, EXCP_RI);
7908 op1 = MASK_REGIMM(ctx->opcode);
7910 case OPC_BLTZ ... OPC_BGEZL: /* REGIMM branches */
7911 case OPC_BLTZAL ... OPC_BGEZALL:
7912 gen_compute_branch(ctx, op1, rs, -1, imm << 2);
7914 case OPC_TGEI ... OPC_TEQI: /* REGIMM traps */
7916 gen_trap(ctx, op1, rs, -1, imm);
7919 check_insn(env, ctx, ISA_MIPS32R2);
7922 default: /* Invalid */
7923 MIPS_INVAL("regimm");
7924 generate_exception(ctx, EXCP_RI);
7929 check_cp0_enabled(ctx);
7930 op1 = MASK_CP0(ctx->opcode);
7936 #if defined(TARGET_MIPS64)
7940 #ifndef CONFIG_USER_ONLY
7941 gen_cp0(env, ctx, op1, rt, rd);
7942 #endif /* !CONFIG_USER_ONLY */
7944 case OPC_C0_FIRST ... OPC_C0_LAST:
7945 #ifndef CONFIG_USER_ONLY
7946 gen_cp0(env, ctx, MASK_C0(ctx->opcode), rt, rd);
7947 #endif /* !CONFIG_USER_ONLY */
7950 #ifndef CONFIG_USER_ONLY
7952 TCGv t0 = tcg_temp_new();
7954 op2 = MASK_MFMC0(ctx->opcode);
7957 check_insn(env, ctx, ASE_MT);
7958 gen_helper_dmt(t0, t0);
7959 gen_store_gpr(t0, rt);
7962 check_insn(env, ctx, ASE_MT);
7963 gen_helper_emt(t0, t0);
7964 gen_store_gpr(t0, rt);
7967 check_insn(env, ctx, ASE_MT);
7968 gen_helper_dvpe(t0, t0);
7969 gen_store_gpr(t0, rt);
7972 check_insn(env, ctx, ASE_MT);
7973 gen_helper_evpe(t0, t0);
7974 gen_store_gpr(t0, rt);
7977 check_insn(env, ctx, ISA_MIPS32R2);
7978 save_cpu_state(ctx, 1);
7980 gen_store_gpr(t0, rt);
7981 /* Stop translation as we may have switched the execution mode */
7982 ctx->bstate = BS_STOP;
7985 check_insn(env, ctx, ISA_MIPS32R2);
7986 save_cpu_state(ctx, 1);
7988 gen_store_gpr(t0, rt);
7989 /* Stop translation as we may have switched the execution mode */
7990 ctx->bstate = BS_STOP;
7992 default: /* Invalid */
7993 MIPS_INVAL("mfmc0");
7994 generate_exception(ctx, EXCP_RI);
7999 #endif /* !CONFIG_USER_ONLY */
8002 check_insn(env, ctx, ISA_MIPS32R2);
8003 gen_load_srsgpr(rt, rd);
8006 check_insn(env, ctx, ISA_MIPS32R2);
8007 gen_store_srsgpr(rt, rd);
8011 generate_exception(ctx, EXCP_RI);
8015 case OPC_ADDI: /* Arithmetic with immediate opcode */
8017 gen_arith_imm(env, ctx, op, rt, rs, imm);
8019 case OPC_SLTI: /* Set on less than with immediate opcode */
8021 gen_slt_imm(env, op, rt, rs, imm);
8023 case OPC_ANDI: /* Arithmetic with immediate opcode */
8027 gen_logic_imm(env, op, rt, rs, imm);
8029 case OPC_J ... OPC_JAL: /* Jump */
8030 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
8031 gen_compute_branch(ctx, op, rs, rt, offset);
8033 case OPC_BEQ ... OPC_BGTZ: /* Branch */
8034 case OPC_BEQL ... OPC_BGTZL:
8035 gen_compute_branch(ctx, op, rs, rt, imm << 2);
8037 case OPC_LB ... OPC_LWR: /* Load and stores */
8038 case OPC_SB ... OPC_SW:
8041 gen_ldst(ctx, op, rt, rs, imm);
8044 gen_st_cond(ctx, op, rt, rs, imm);
8047 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
8051 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
8055 /* Floating point (COP1). */
8060 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8061 check_cp1_enabled(ctx);
8062 gen_flt_ldst(ctx, op, rt, rs, imm);
8064 generate_exception_err(ctx, EXCP_CpU, 1);
8069 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8070 check_cp1_enabled(ctx);
8071 op1 = MASK_CP1(ctx->opcode);
8075 check_insn(env, ctx, ISA_MIPS32R2);
8080 gen_cp1(ctx, op1, rt, rd);
8082 #if defined(TARGET_MIPS64)
8085 check_insn(env, ctx, ISA_MIPS3);
8086 gen_cp1(ctx, op1, rt, rd);
8092 check_insn(env, ctx, ASE_MIPS3D);
8095 gen_compute_branch1(env, ctx, MASK_BC1(ctx->opcode),
8096 (rt >> 2) & 0x7, imm << 2);
8103 gen_farith(ctx, MASK_CP1_FUNC(ctx->opcode), rt, rd, sa,
8108 generate_exception (ctx, EXCP_RI);
8112 generate_exception_err(ctx, EXCP_CpU, 1);
8122 /* COP2: Not implemented. */
8123 generate_exception_err(ctx, EXCP_CpU, 2);
8127 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8128 check_cp1_enabled(ctx);
8129 op1 = MASK_CP3(ctx->opcode);
8137 gen_flt3_ldst(ctx, op1, sa, rd, rs, rt);
8155 gen_flt3_arith(ctx, op1, sa, rs, rd, rt);
8159 generate_exception (ctx, EXCP_RI);
8163 generate_exception_err(ctx, EXCP_CpU, 1);
8167 #if defined(TARGET_MIPS64)
8168 /* MIPS64 opcodes */
8170 case OPC_LDL ... OPC_LDR:
8171 case OPC_SDL ... OPC_SDR:
8175 check_insn(env, ctx, ISA_MIPS3);
8177 gen_ldst(ctx, op, rt, rs, imm);
8180 check_insn(env, ctx, ISA_MIPS3);
8182 gen_st_cond(ctx, op, rt, rs, imm);
8186 check_insn(env, ctx, ISA_MIPS3);
8188 gen_arith_imm(env, ctx, op, rt, rs, imm);
8192 check_insn(env, ctx, ASE_MIPS16);
8193 /* MIPS16: Not implemented. */
8195 check_insn(env, ctx, ASE_MDMX);
8196 /* MDMX: Not implemented. */
8197 default: /* Invalid */
8198 MIPS_INVAL("major opcode");
8199 generate_exception(ctx, EXCP_RI);
8202 if (ctx->hflags & MIPS_HFLAG_BMASK) {
8203 int hflags = ctx->hflags & MIPS_HFLAG_BMASK;
8204 /* Branches completion */
8205 ctx->hflags &= ~MIPS_HFLAG_BMASK;
8206 ctx->bstate = BS_BRANCH;
8207 save_cpu_state(ctx, 0);
8208 /* FIXME: Need to clear can_do_io. */
8211 /* unconditional branch */
8212 MIPS_DEBUG("unconditional branch");
8213 gen_goto_tb(ctx, 0, ctx->btarget);
8216 /* blikely taken case */
8217 MIPS_DEBUG("blikely branch taken");
8218 gen_goto_tb(ctx, 0, ctx->btarget);
8221 /* Conditional branch */
8222 MIPS_DEBUG("conditional branch");
8224 int l1 = gen_new_label();
8226 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
8227 gen_goto_tb(ctx, 1, ctx->pc + 4);
8229 gen_goto_tb(ctx, 0, ctx->btarget);
8233 /* unconditional branch to register */
8234 MIPS_DEBUG("branch to register");
8235 tcg_gen_mov_tl(cpu_PC, btarget);
8239 MIPS_DEBUG("unknown branch");
8246 gen_intermediate_code_internal (CPUState *env, TranslationBlock *tb,
8250 target_ulong pc_start;
8251 uint16_t *gen_opc_end;
8258 qemu_log("search pc %d\n", search_pc);
8261 /* Leave some spare opc slots for branch handling. */
8262 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE - 16;
8266 ctx.bstate = BS_NONE;
8267 /* Restore delay slot state from the tb context. */
8268 ctx.hflags = (uint32_t)tb->flags; /* FIXME: maybe use 64 bits here? */
8269 restore_cpu_state(env, &ctx);
8270 #ifdef CONFIG_USER_ONLY
8271 ctx.mem_idx = MIPS_HFLAG_UM;
8273 ctx.mem_idx = ctx.hflags & MIPS_HFLAG_KSU;
8276 max_insns = tb->cflags & CF_COUNT_MASK;
8278 max_insns = CF_COUNT_MASK;
8280 qemu_log_mask(CPU_LOG_TB_CPU, "------------------------------------------------\n");
8281 /* FIXME: This may print out stale hflags from env... */
8282 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
8284 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb, ctx.mem_idx, ctx.hflags);
8286 while (ctx.bstate == BS_NONE) {
8287 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
8288 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
8289 if (bp->pc == ctx.pc) {
8290 save_cpu_state(&ctx, 1);
8291 ctx.bstate = BS_BRANCH;
8292 gen_helper_0i(raise_exception, EXCP_DEBUG);
8293 /* Include the breakpoint location or the tb won't
8294 * be flushed when it must be. */
8296 goto done_generating;
8302 j = gen_opc_ptr - gen_opc_buf;
8306 gen_opc_instr_start[lj++] = 0;
8308 gen_opc_pc[lj] = ctx.pc;
8309 gen_opc_hflags[lj] = ctx.hflags & MIPS_HFLAG_BMASK;
8310 gen_opc_instr_start[lj] = 1;
8311 gen_opc_icount[lj] = num_insns;
8313 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8315 ctx.opcode = ldl_code(ctx.pc);
8316 decode_opc(env, &ctx);
8320 if (env->singlestep_enabled)
8323 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
8326 if (gen_opc_ptr >= gen_opc_end)
8329 if (num_insns >= max_insns)
8335 if (tb->cflags & CF_LAST_IO)
8337 if (env->singlestep_enabled) {
8338 save_cpu_state(&ctx, ctx.bstate == BS_NONE);
8339 gen_helper_0i(raise_exception, EXCP_DEBUG);
8341 switch (ctx.bstate) {
8343 gen_helper_interrupt_restart();
8344 gen_goto_tb(&ctx, 0, ctx.pc);
8347 save_cpu_state(&ctx, 0);
8348 gen_goto_tb(&ctx, 0, ctx.pc);
8351 gen_helper_interrupt_restart();
8360 gen_icount_end(tb, num_insns);
8361 *gen_opc_ptr = INDEX_op_end;
8363 j = gen_opc_ptr - gen_opc_buf;
8366 gen_opc_instr_start[lj++] = 0;
8368 tb->size = ctx.pc - pc_start;
8369 tb->icount = num_insns;
8373 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
8374 qemu_log("IN: %s\n", lookup_symbol(pc_start));
8375 log_target_disas(pc_start, ctx.pc - pc_start, 0);
8378 qemu_log_mask(CPU_LOG_TB_CPU, "---------------- %d %08x\n", ctx.bstate, ctx.hflags);
8382 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
8384 gen_intermediate_code_internal(env, tb, 0);
8387 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
8389 gen_intermediate_code_internal(env, tb, 1);
8392 static void fpu_dump_state(CPUState *env, FILE *f,
8393 int (*fpu_fprintf)(FILE *f, const char *fmt, ...),
8397 int is_fpu64 = !!(env->hflags & MIPS_HFLAG_F64);
8399 #define printfpr(fp) \
8402 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu: %13g\n", \
8403 (fp)->w[FP_ENDIAN_IDX], (fp)->d, (fp)->fd, \
8404 (fp)->fs[FP_ENDIAN_IDX], (fp)->fs[!FP_ENDIAN_IDX]); \
8407 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
8408 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
8409 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu:%13g\n", \
8410 tmp.w[FP_ENDIAN_IDX], tmp.d, tmp.fd, \
8411 tmp.fs[FP_ENDIAN_IDX], tmp.fs[!FP_ENDIAN_IDX]); \
8416 fpu_fprintf(f, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
8417 env->active_fpu.fcr0, env->active_fpu.fcr31, is_fpu64, env->active_fpu.fp_status,
8418 get_float_exception_flags(&env->active_fpu.fp_status));
8419 for (i = 0; i < 32; (is_fpu64) ? i++ : (i += 2)) {
8420 fpu_fprintf(f, "%3s: ", fregnames[i]);
8421 printfpr(&env->active_fpu.fpr[i]);
8427 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8428 /* Debug help: The architecture requires 32bit code to maintain proper
8429 sign-extended values on 64bit machines. */
8431 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
8434 cpu_mips_check_sign_extensions (CPUState *env, FILE *f,
8435 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8440 if (!SIGN_EXT_P(env->active_tc.PC))
8441 cpu_fprintf(f, "BROKEN: pc=0x" TARGET_FMT_lx "\n", env->active_tc.PC);
8442 if (!SIGN_EXT_P(env->active_tc.HI[0]))
8443 cpu_fprintf(f, "BROKEN: HI=0x" TARGET_FMT_lx "\n", env->active_tc.HI[0]);
8444 if (!SIGN_EXT_P(env->active_tc.LO[0]))
8445 cpu_fprintf(f, "BROKEN: LO=0x" TARGET_FMT_lx "\n", env->active_tc.LO[0]);
8446 if (!SIGN_EXT_P(env->btarget))
8447 cpu_fprintf(f, "BROKEN: btarget=0x" TARGET_FMT_lx "\n", env->btarget);
8449 for (i = 0; i < 32; i++) {
8450 if (!SIGN_EXT_P(env->active_tc.gpr[i]))
8451 cpu_fprintf(f, "BROKEN: %s=0x" TARGET_FMT_lx "\n", regnames[i], env->active_tc.gpr[i]);
8454 if (!SIGN_EXT_P(env->CP0_EPC))
8455 cpu_fprintf(f, "BROKEN: EPC=0x" TARGET_FMT_lx "\n", env->CP0_EPC);
8456 if (!SIGN_EXT_P(env->CP0_LLAddr))
8457 cpu_fprintf(f, "BROKEN: LLAddr=0x" TARGET_FMT_lx "\n", env->CP0_LLAddr);
8461 void cpu_dump_state (CPUState *env, FILE *f,
8462 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8467 cpu_fprintf(f, "pc=0x" TARGET_FMT_lx " HI=0x" TARGET_FMT_lx " LO=0x" TARGET_FMT_lx " ds %04x " TARGET_FMT_lx " %d\n",
8468 env->active_tc.PC, env->active_tc.HI[0], env->active_tc.LO[0],
8469 env->hflags, env->btarget, env->bcond);
8470 for (i = 0; i < 32; i++) {
8472 cpu_fprintf(f, "GPR%02d:", i);
8473 cpu_fprintf(f, " %s " TARGET_FMT_lx, regnames[i], env->active_tc.gpr[i]);
8475 cpu_fprintf(f, "\n");
8478 cpu_fprintf(f, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx "\n",
8479 env->CP0_Status, env->CP0_Cause, env->CP0_EPC);
8480 cpu_fprintf(f, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx "\n",
8481 env->CP0_Config0, env->CP0_Config1, env->CP0_LLAddr);
8482 if (env->hflags & MIPS_HFLAG_FPU)
8483 fpu_dump_state(env, f, cpu_fprintf, flags);
8484 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8485 cpu_mips_check_sign_extensions(env, f, cpu_fprintf, flags);
8489 static void mips_tcg_init(void)
8494 /* Initialize various static tables. */
8498 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8499 TCGV_UNUSED(cpu_gpr[0]);
8500 for (i = 1; i < 32; i++)
8501 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
8502 offsetof(CPUState, active_tc.gpr[i]),
8504 cpu_PC = tcg_global_mem_new(TCG_AREG0,
8505 offsetof(CPUState, active_tc.PC), "PC");
8506 for (i = 0; i < MIPS_DSP_ACC; i++) {
8507 cpu_HI[i] = tcg_global_mem_new(TCG_AREG0,
8508 offsetof(CPUState, active_tc.HI[i]),
8510 cpu_LO[i] = tcg_global_mem_new(TCG_AREG0,
8511 offsetof(CPUState, active_tc.LO[i]),
8513 cpu_ACX[i] = tcg_global_mem_new(TCG_AREG0,
8514 offsetof(CPUState, active_tc.ACX[i]),
8517 cpu_dspctrl = tcg_global_mem_new(TCG_AREG0,
8518 offsetof(CPUState, active_tc.DSPControl),
8520 bcond = tcg_global_mem_new(TCG_AREG0,
8521 offsetof(CPUState, bcond), "bcond");
8522 btarget = tcg_global_mem_new(TCG_AREG0,
8523 offsetof(CPUState, btarget), "btarget");
8524 hflags = tcg_global_mem_new_i32(TCG_AREG0,
8525 offsetof(CPUState, hflags), "hflags");
8527 fpu_fcr0 = tcg_global_mem_new_i32(TCG_AREG0,
8528 offsetof(CPUState, active_fpu.fcr0),
8530 fpu_fcr31 = tcg_global_mem_new_i32(TCG_AREG0,
8531 offsetof(CPUState, active_fpu.fcr31),
8534 /* register helpers */
8535 #define GEN_HELPER 2
8541 #include "translate_init.c"
8543 CPUMIPSState *cpu_mips_init (const char *cpu_model)
8546 const mips_def_t *def;
8548 def = cpu_mips_find_by_name(cpu_model);
8551 env = qemu_mallocz(sizeof(CPUMIPSState));
8552 env->cpu_model = def;
8555 env->cpu_model_str = cpu_model;
8561 void cpu_reset (CPUMIPSState *env)
8563 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
8564 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
8565 log_cpu_state(env, 0);
8568 memset(env, 0, offsetof(CPUMIPSState, breakpoints));
8573 #if defined(CONFIG_USER_ONLY)
8574 env->hflags = MIPS_HFLAG_UM;
8576 if (env->hflags & MIPS_HFLAG_BMASK) {
8577 /* If the exception was raised from a delay slot,
8578 come back to the jump. */
8579 env->CP0_ErrorEPC = env->active_tc.PC - 4;
8581 env->CP0_ErrorEPC = env->active_tc.PC;
8583 env->active_tc.PC = (int32_t)0xBFC00000;
8585 /* SMP not implemented */
8586 env->CP0_EBase = 0x80000000;
8587 env->CP0_Status = (1 << CP0St_BEV) | (1 << CP0St_ERL);
8588 /* vectored interrupts not implemented, timer on int 7,
8589 no performance counters. */
8590 env->CP0_IntCtl = 0xe0000000;
8594 for (i = 0; i < 7; i++) {
8595 env->CP0_WatchLo[i] = 0;
8596 env->CP0_WatchHi[i] = 0x80000000;
8598 env->CP0_WatchLo[7] = 0;
8599 env->CP0_WatchHi[7] = 0;
8601 /* Count register increments in debug mode, EJTAG version 1 */
8602 env->CP0_Debug = (1 << CP0DB_CNT) | (0x1 << CP0DB_VER);
8603 env->hflags = MIPS_HFLAG_CP0;
8605 env->exception_index = EXCP_NONE;
8606 cpu_mips_register(env, env->cpu_model);
8609 void gen_pc_load(CPUState *env, TranslationBlock *tb,
8610 unsigned long searched_pc, int pc_pos, void *puc)
8612 env->active_tc.PC = gen_opc_pc[pc_pos];
8613 env->hflags &= ~MIPS_HFLAG_BMASK;
8614 env->hflags |= gen_opc_hflags[pc_pos];