2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
33 #include "qemu-common.h"
39 //#define MIPS_DEBUG_DISAS
40 //#define MIPS_DEBUG_SIGN_EXTENSIONS
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL = (0x00 << 26),
48 OPC_REGIMM = (0x01 << 26),
49 OPC_CP0 = (0x10 << 26),
50 OPC_CP1 = (0x11 << 26),
51 OPC_CP2 = (0x12 << 26),
52 OPC_CP3 = (0x13 << 26),
53 OPC_SPECIAL2 = (0x1C << 26),
54 OPC_SPECIAL3 = (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI = (0x08 << 26),
57 OPC_ADDIU = (0x09 << 26),
58 OPC_SLTI = (0x0A << 26),
59 OPC_SLTIU = (0x0B << 26),
60 /* logic with immediate */
61 OPC_ANDI = (0x0C << 26),
62 OPC_ORI = (0x0D << 26),
63 OPC_XORI = (0x0E << 26),
64 OPC_LUI = (0x0F << 26),
65 /* arithmetic with immediate */
66 OPC_DADDI = (0x18 << 26),
67 OPC_DADDIU = (0x19 << 26),
68 /* Jump and branches */
70 OPC_JAL = (0x03 << 26),
71 OPC_BEQ = (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
72 OPC_BEQL = (0x14 << 26),
73 OPC_BNE = (0x05 << 26),
74 OPC_BNEL = (0x15 << 26),
75 OPC_BLEZ = (0x06 << 26),
76 OPC_BLEZL = (0x16 << 26),
77 OPC_BGTZ = (0x07 << 26),
78 OPC_BGTZL = (0x17 << 26),
79 OPC_JALX = (0x1D << 26), /* MIPS 16 only */
81 OPC_LDL = (0x1A << 26),
82 OPC_LDR = (0x1B << 26),
83 OPC_LB = (0x20 << 26),
84 OPC_LH = (0x21 << 26),
85 OPC_LWL = (0x22 << 26),
86 OPC_LW = (0x23 << 26),
87 OPC_LBU = (0x24 << 26),
88 OPC_LHU = (0x25 << 26),
89 OPC_LWR = (0x26 << 26),
90 OPC_LWU = (0x27 << 26),
91 OPC_SB = (0x28 << 26),
92 OPC_SH = (0x29 << 26),
93 OPC_SWL = (0x2A << 26),
94 OPC_SW = (0x2B << 26),
95 OPC_SDL = (0x2C << 26),
96 OPC_SDR = (0x2D << 26),
97 OPC_SWR = (0x2E << 26),
98 OPC_LL = (0x30 << 26),
99 OPC_LLD = (0x34 << 26),
100 OPC_LD = (0x37 << 26),
101 OPC_SC = (0x38 << 26),
102 OPC_SCD = (0x3C << 26),
103 OPC_SD = (0x3F << 26),
104 /* Floating point load/store */
105 OPC_LWC1 = (0x31 << 26),
106 OPC_LWC2 = (0x32 << 26),
107 OPC_LDC1 = (0x35 << 26),
108 OPC_LDC2 = (0x36 << 26),
109 OPC_SWC1 = (0x39 << 26),
110 OPC_SWC2 = (0x3A << 26),
111 OPC_SDC1 = (0x3D << 26),
112 OPC_SDC2 = (0x3E << 26),
113 /* MDMX ASE specific */
114 OPC_MDMX = (0x1E << 26),
115 /* Cache and prefetch */
116 OPC_CACHE = (0x2F << 26),
117 OPC_PREF = (0x33 << 26),
118 /* Reserved major opcode */
119 OPC_MAJOR3B_RESERVED = (0x3B << 26),
122 /* MIPS special opcodes */
123 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
127 OPC_SLL = 0x00 | OPC_SPECIAL,
128 /* NOP is SLL r0, r0, 0 */
129 /* SSNOP is SLL r0, r0, 1 */
130 /* EHB is SLL r0, r0, 3 */
131 OPC_SRL = 0x02 | OPC_SPECIAL, /* also ROTR */
132 OPC_SRA = 0x03 | OPC_SPECIAL,
133 OPC_SLLV = 0x04 | OPC_SPECIAL,
134 OPC_SRLV = 0x06 | OPC_SPECIAL, /* also ROTRV */
135 OPC_SRAV = 0x07 | OPC_SPECIAL,
136 OPC_DSLLV = 0x14 | OPC_SPECIAL,
137 OPC_DSRLV = 0x16 | OPC_SPECIAL, /* also DROTRV */
138 OPC_DSRAV = 0x17 | OPC_SPECIAL,
139 OPC_DSLL = 0x38 | OPC_SPECIAL,
140 OPC_DSRL = 0x3A | OPC_SPECIAL, /* also DROTR */
141 OPC_DSRA = 0x3B | OPC_SPECIAL,
142 OPC_DSLL32 = 0x3C | OPC_SPECIAL,
143 OPC_DSRL32 = 0x3E | OPC_SPECIAL, /* also DROTR32 */
144 OPC_DSRA32 = 0x3F | OPC_SPECIAL,
145 /* Multiplication / division */
146 OPC_MULT = 0x18 | OPC_SPECIAL,
147 OPC_MULTU = 0x19 | OPC_SPECIAL,
148 OPC_DIV = 0x1A | OPC_SPECIAL,
149 OPC_DIVU = 0x1B | OPC_SPECIAL,
150 OPC_DMULT = 0x1C | OPC_SPECIAL,
151 OPC_DMULTU = 0x1D | OPC_SPECIAL,
152 OPC_DDIV = 0x1E | OPC_SPECIAL,
153 OPC_DDIVU = 0x1F | OPC_SPECIAL,
154 /* 2 registers arithmetic / logic */
155 OPC_ADD = 0x20 | OPC_SPECIAL,
156 OPC_ADDU = 0x21 | OPC_SPECIAL,
157 OPC_SUB = 0x22 | OPC_SPECIAL,
158 OPC_SUBU = 0x23 | OPC_SPECIAL,
159 OPC_AND = 0x24 | OPC_SPECIAL,
160 OPC_OR = 0x25 | OPC_SPECIAL,
161 OPC_XOR = 0x26 | OPC_SPECIAL,
162 OPC_NOR = 0x27 | OPC_SPECIAL,
163 OPC_SLT = 0x2A | OPC_SPECIAL,
164 OPC_SLTU = 0x2B | OPC_SPECIAL,
165 OPC_DADD = 0x2C | OPC_SPECIAL,
166 OPC_DADDU = 0x2D | OPC_SPECIAL,
167 OPC_DSUB = 0x2E | OPC_SPECIAL,
168 OPC_DSUBU = 0x2F | OPC_SPECIAL,
170 OPC_JR = 0x08 | OPC_SPECIAL, /* Also JR.HB */
171 OPC_JALR = 0x09 | OPC_SPECIAL, /* Also JALR.HB */
173 OPC_TGE = 0x30 | OPC_SPECIAL,
174 OPC_TGEU = 0x31 | OPC_SPECIAL,
175 OPC_TLT = 0x32 | OPC_SPECIAL,
176 OPC_TLTU = 0x33 | OPC_SPECIAL,
177 OPC_TEQ = 0x34 | OPC_SPECIAL,
178 OPC_TNE = 0x36 | OPC_SPECIAL,
179 /* HI / LO registers load & stores */
180 OPC_MFHI = 0x10 | OPC_SPECIAL,
181 OPC_MTHI = 0x11 | OPC_SPECIAL,
182 OPC_MFLO = 0x12 | OPC_SPECIAL,
183 OPC_MTLO = 0x13 | OPC_SPECIAL,
184 /* Conditional moves */
185 OPC_MOVZ = 0x0A | OPC_SPECIAL,
186 OPC_MOVN = 0x0B | OPC_SPECIAL,
188 OPC_MOVCI = 0x01 | OPC_SPECIAL,
191 OPC_PMON = 0x05 | OPC_SPECIAL, /* inofficial */
192 OPC_SYSCALL = 0x0C | OPC_SPECIAL,
193 OPC_BREAK = 0x0D | OPC_SPECIAL,
194 OPC_SPIM = 0x0E | OPC_SPECIAL, /* inofficial */
195 OPC_SYNC = 0x0F | OPC_SPECIAL,
197 OPC_SPECIAL15_RESERVED = 0x15 | OPC_SPECIAL,
198 OPC_SPECIAL28_RESERVED = 0x28 | OPC_SPECIAL,
199 OPC_SPECIAL29_RESERVED = 0x29 | OPC_SPECIAL,
200 OPC_SPECIAL35_RESERVED = 0x35 | OPC_SPECIAL,
201 OPC_SPECIAL37_RESERVED = 0x37 | OPC_SPECIAL,
202 OPC_SPECIAL39_RESERVED = 0x39 | OPC_SPECIAL,
203 OPC_SPECIAL3D_RESERVED = 0x3D | OPC_SPECIAL,
206 /* Multiplication variants of the vr54xx. */
207 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
210 OPC_VR54XX_MULS = (0x03 << 6) | OPC_MULT,
211 OPC_VR54XX_MULSU = (0x03 << 6) | OPC_MULTU,
212 OPC_VR54XX_MACC = (0x05 << 6) | OPC_MULT,
213 OPC_VR54XX_MACCU = (0x05 << 6) | OPC_MULTU,
214 OPC_VR54XX_MSAC = (0x07 << 6) | OPC_MULT,
215 OPC_VR54XX_MSACU = (0x07 << 6) | OPC_MULTU,
216 OPC_VR54XX_MULHI = (0x09 << 6) | OPC_MULT,
217 OPC_VR54XX_MULHIU = (0x09 << 6) | OPC_MULTU,
218 OPC_VR54XX_MULSHI = (0x0B << 6) | OPC_MULT,
219 OPC_VR54XX_MULSHIU = (0x0B << 6) | OPC_MULTU,
220 OPC_VR54XX_MACCHI = (0x0D << 6) | OPC_MULT,
221 OPC_VR54XX_MACCHIU = (0x0D << 6) | OPC_MULTU,
222 OPC_VR54XX_MSACHI = (0x0F << 6) | OPC_MULT,
223 OPC_VR54XX_MSACHIU = (0x0F << 6) | OPC_MULTU,
226 /* REGIMM (rt field) opcodes */
227 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
230 OPC_BLTZ = (0x00 << 16) | OPC_REGIMM,
231 OPC_BLTZL = (0x02 << 16) | OPC_REGIMM,
232 OPC_BGEZ = (0x01 << 16) | OPC_REGIMM,
233 OPC_BGEZL = (0x03 << 16) | OPC_REGIMM,
234 OPC_BLTZAL = (0x10 << 16) | OPC_REGIMM,
235 OPC_BLTZALL = (0x12 << 16) | OPC_REGIMM,
236 OPC_BGEZAL = (0x11 << 16) | OPC_REGIMM,
237 OPC_BGEZALL = (0x13 << 16) | OPC_REGIMM,
238 OPC_TGEI = (0x08 << 16) | OPC_REGIMM,
239 OPC_TGEIU = (0x09 << 16) | OPC_REGIMM,
240 OPC_TLTI = (0x0A << 16) | OPC_REGIMM,
241 OPC_TLTIU = (0x0B << 16) | OPC_REGIMM,
242 OPC_TEQI = (0x0C << 16) | OPC_REGIMM,
243 OPC_TNEI = (0x0E << 16) | OPC_REGIMM,
244 OPC_SYNCI = (0x1F << 16) | OPC_REGIMM,
247 /* Special2 opcodes */
248 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
251 /* Multiply & xxx operations */
252 OPC_MADD = 0x00 | OPC_SPECIAL2,
253 OPC_MADDU = 0x01 | OPC_SPECIAL2,
254 OPC_MUL = 0x02 | OPC_SPECIAL2,
255 OPC_MSUB = 0x04 | OPC_SPECIAL2,
256 OPC_MSUBU = 0x05 | OPC_SPECIAL2,
258 OPC_CLZ = 0x20 | OPC_SPECIAL2,
259 OPC_CLO = 0x21 | OPC_SPECIAL2,
260 OPC_DCLZ = 0x24 | OPC_SPECIAL2,
261 OPC_DCLO = 0x25 | OPC_SPECIAL2,
263 OPC_SDBBP = 0x3F | OPC_SPECIAL2,
266 /* Special3 opcodes */
267 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
270 OPC_EXT = 0x00 | OPC_SPECIAL3,
271 OPC_DEXTM = 0x01 | OPC_SPECIAL3,
272 OPC_DEXTU = 0x02 | OPC_SPECIAL3,
273 OPC_DEXT = 0x03 | OPC_SPECIAL3,
274 OPC_INS = 0x04 | OPC_SPECIAL3,
275 OPC_DINSM = 0x05 | OPC_SPECIAL3,
276 OPC_DINSU = 0x06 | OPC_SPECIAL3,
277 OPC_DINS = 0x07 | OPC_SPECIAL3,
278 OPC_FORK = 0x08 | OPC_SPECIAL3,
279 OPC_YIELD = 0x09 | OPC_SPECIAL3,
280 OPC_BSHFL = 0x20 | OPC_SPECIAL3,
281 OPC_DBSHFL = 0x24 | OPC_SPECIAL3,
282 OPC_RDHWR = 0x3B | OPC_SPECIAL3,
286 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
289 OPC_WSBH = (0x02 << 6) | OPC_BSHFL,
290 OPC_SEB = (0x10 << 6) | OPC_BSHFL,
291 OPC_SEH = (0x18 << 6) | OPC_BSHFL,
295 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
298 OPC_DSBH = (0x02 << 6) | OPC_DBSHFL,
299 OPC_DSHD = (0x05 << 6) | OPC_DBSHFL,
302 /* Coprocessor 0 (rs field) */
303 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
306 OPC_MFC0 = (0x00 << 21) | OPC_CP0,
307 OPC_DMFC0 = (0x01 << 21) | OPC_CP0,
308 OPC_MTC0 = (0x04 << 21) | OPC_CP0,
309 OPC_DMTC0 = (0x05 << 21) | OPC_CP0,
310 OPC_MFTR = (0x08 << 21) | OPC_CP0,
311 OPC_RDPGPR = (0x0A << 21) | OPC_CP0,
312 OPC_MFMC0 = (0x0B << 21) | OPC_CP0,
313 OPC_MTTR = (0x0C << 21) | OPC_CP0,
314 OPC_WRPGPR = (0x0E << 21) | OPC_CP0,
315 OPC_C0 = (0x10 << 21) | OPC_CP0,
316 OPC_C0_FIRST = (0x10 << 21) | OPC_CP0,
317 OPC_C0_LAST = (0x1F << 21) | OPC_CP0,
321 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
324 OPC_DMT = 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
325 OPC_EMT = 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
326 OPC_DVPE = 0x01 | (0 << 5) | OPC_MFMC0,
327 OPC_EVPE = 0x01 | (1 << 5) | OPC_MFMC0,
328 OPC_DI = (0 << 5) | (0x0C << 11) | OPC_MFMC0,
329 OPC_EI = (1 << 5) | (0x0C << 11) | OPC_MFMC0,
332 /* Coprocessor 0 (with rs == C0) */
333 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
336 OPC_TLBR = 0x01 | OPC_C0,
337 OPC_TLBWI = 0x02 | OPC_C0,
338 OPC_TLBWR = 0x06 | OPC_C0,
339 OPC_TLBP = 0x08 | OPC_C0,
340 OPC_RFE = 0x10 | OPC_C0,
341 OPC_ERET = 0x18 | OPC_C0,
342 OPC_DERET = 0x1F | OPC_C0,
343 OPC_WAIT = 0x20 | OPC_C0,
346 /* Coprocessor 1 (rs field) */
347 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
350 OPC_MFC1 = (0x00 << 21) | OPC_CP1,
351 OPC_DMFC1 = (0x01 << 21) | OPC_CP1,
352 OPC_CFC1 = (0x02 << 21) | OPC_CP1,
353 OPC_MFHC1 = (0x03 << 21) | OPC_CP1,
354 OPC_MTC1 = (0x04 << 21) | OPC_CP1,
355 OPC_DMTC1 = (0x05 << 21) | OPC_CP1,
356 OPC_CTC1 = (0x06 << 21) | OPC_CP1,
357 OPC_MTHC1 = (0x07 << 21) | OPC_CP1,
358 OPC_BC1 = (0x08 << 21) | OPC_CP1, /* bc */
359 OPC_BC1ANY2 = (0x09 << 21) | OPC_CP1,
360 OPC_BC1ANY4 = (0x0A << 21) | OPC_CP1,
361 OPC_S_FMT = (0x10 << 21) | OPC_CP1, /* 16: fmt=single fp */
362 OPC_D_FMT = (0x11 << 21) | OPC_CP1, /* 17: fmt=double fp */
363 OPC_E_FMT = (0x12 << 21) | OPC_CP1, /* 18: fmt=extended fp */
364 OPC_Q_FMT = (0x13 << 21) | OPC_CP1, /* 19: fmt=quad fp */
365 OPC_W_FMT = (0x14 << 21) | OPC_CP1, /* 20: fmt=32bit fixed */
366 OPC_L_FMT = (0x15 << 21) | OPC_CP1, /* 21: fmt=64bit fixed */
367 OPC_PS_FMT = (0x16 << 21) | OPC_CP1, /* 22: fmt=paired single fp */
370 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
371 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
374 OPC_BC1F = (0x00 << 16) | OPC_BC1,
375 OPC_BC1T = (0x01 << 16) | OPC_BC1,
376 OPC_BC1FL = (0x02 << 16) | OPC_BC1,
377 OPC_BC1TL = (0x03 << 16) | OPC_BC1,
381 OPC_BC1FANY2 = (0x00 << 16) | OPC_BC1ANY2,
382 OPC_BC1TANY2 = (0x01 << 16) | OPC_BC1ANY2,
386 OPC_BC1FANY4 = (0x00 << 16) | OPC_BC1ANY4,
387 OPC_BC1TANY4 = (0x01 << 16) | OPC_BC1ANY4,
390 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
393 OPC_MFC2 = (0x00 << 21) | OPC_CP2,
394 OPC_DMFC2 = (0x01 << 21) | OPC_CP2,
395 OPC_CFC2 = (0x02 << 21) | OPC_CP2,
396 OPC_MFHC2 = (0x03 << 21) | OPC_CP2,
397 OPC_MTC2 = (0x04 << 21) | OPC_CP2,
398 OPC_DMTC2 = (0x05 << 21) | OPC_CP2,
399 OPC_CTC2 = (0x06 << 21) | OPC_CP2,
400 OPC_MTHC2 = (0x07 << 21) | OPC_CP2,
401 OPC_BC2 = (0x08 << 21) | OPC_CP2,
404 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
407 OPC_LWXC1 = 0x00 | OPC_CP3,
408 OPC_LDXC1 = 0x01 | OPC_CP3,
409 OPC_LUXC1 = 0x05 | OPC_CP3,
410 OPC_SWXC1 = 0x08 | OPC_CP3,
411 OPC_SDXC1 = 0x09 | OPC_CP3,
412 OPC_SUXC1 = 0x0D | OPC_CP3,
413 OPC_PREFX = 0x0F | OPC_CP3,
414 OPC_ALNV_PS = 0x1E | OPC_CP3,
415 OPC_MADD_S = 0x20 | OPC_CP3,
416 OPC_MADD_D = 0x21 | OPC_CP3,
417 OPC_MADD_PS = 0x26 | OPC_CP3,
418 OPC_MSUB_S = 0x28 | OPC_CP3,
419 OPC_MSUB_D = 0x29 | OPC_CP3,
420 OPC_MSUB_PS = 0x2E | OPC_CP3,
421 OPC_NMADD_S = 0x30 | OPC_CP3,
422 OPC_NMADD_D = 0x31 | OPC_CP3,
423 OPC_NMADD_PS= 0x36 | OPC_CP3,
424 OPC_NMSUB_S = 0x38 | OPC_CP3,
425 OPC_NMSUB_D = 0x39 | OPC_CP3,
426 OPC_NMSUB_PS= 0x3E | OPC_CP3,
429 /* global register indices */
430 static TCGv_ptr cpu_env;
431 static TCGv cpu_gpr[32], cpu_PC;
432 static TCGv cpu_HI[MIPS_DSP_ACC], cpu_LO[MIPS_DSP_ACC], cpu_ACX[MIPS_DSP_ACC];
433 static TCGv cpu_dspctrl, btarget, bcond;
434 static TCGv_i32 hflags;
435 static TCGv_i32 fpu_fcr0, fpu_fcr31;
437 #include "gen-icount.h"
439 #define gen_helper_0i(name, arg) do { \
440 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
441 gen_helper_##name(helper_tmp); \
442 tcg_temp_free_i32(helper_tmp); \
445 #define gen_helper_1i(name, arg1, arg2) do { \
446 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
447 gen_helper_##name(arg1, helper_tmp); \
448 tcg_temp_free_i32(helper_tmp); \
451 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
452 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
453 gen_helper_##name(arg1, arg2, helper_tmp); \
454 tcg_temp_free_i32(helper_tmp); \
457 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
458 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
459 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
460 tcg_temp_free_i32(helper_tmp); \
463 typedef struct DisasContext {
464 struct TranslationBlock *tb;
465 target_ulong pc, saved_pc;
467 /* Routine used to access memory */
469 uint32_t hflags, saved_hflags;
471 target_ulong btarget;
475 BS_NONE = 0, /* We go out of the TB without reaching a branch or an
476 * exception condition */
477 BS_STOP = 1, /* We want to stop translation for any reason */
478 BS_BRANCH = 2, /* We reached a branch condition */
479 BS_EXCP = 3, /* We reached an exception condition */
482 static const char *regnames[] =
483 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
484 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
485 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
486 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
488 static const char *regnames_HI[] =
489 { "HI0", "HI1", "HI2", "HI3", };
491 static const char *regnames_LO[] =
492 { "LO0", "LO1", "LO2", "LO3", };
494 static const char *regnames_ACX[] =
495 { "ACX0", "ACX1", "ACX2", "ACX3", };
497 static const char *fregnames[] =
498 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
499 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
500 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
501 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
503 #ifdef MIPS_DEBUG_DISAS
504 #define MIPS_DEBUG(fmt, args...) \
505 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
506 TARGET_FMT_lx ": %08x " fmt "\n", \
507 ctx->pc, ctx->opcode , ##args)
508 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
510 #define MIPS_DEBUG(fmt, args...) do { } while(0)
511 #define LOG_DISAS(...) do { } while (0)
514 #define MIPS_INVAL(op) \
516 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
517 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
520 /* General purpose registers moves. */
521 static inline void gen_load_gpr (TCGv t, int reg)
524 tcg_gen_movi_tl(t, 0);
526 tcg_gen_mov_tl(t, cpu_gpr[reg]);
529 static inline void gen_store_gpr (TCGv t, int reg)
532 tcg_gen_mov_tl(cpu_gpr[reg], t);
535 /* Moves to/from ACX register. */
536 static inline void gen_load_ACX (TCGv t, int reg)
538 tcg_gen_mov_tl(t, cpu_ACX[reg]);
541 static inline void gen_store_ACX (TCGv t, int reg)
543 tcg_gen_mov_tl(cpu_ACX[reg], t);
546 /* Moves to/from shadow registers. */
547 static inline void gen_load_srsgpr (int from, int to)
549 TCGv t0 = tcg_temp_new();
552 tcg_gen_movi_tl(t0, 0);
554 TCGv_i32 t2 = tcg_temp_new_i32();
555 TCGv_ptr addr = tcg_temp_new_ptr();
557 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
558 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
559 tcg_gen_andi_i32(t2, t2, 0xf);
560 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
561 tcg_gen_ext_i32_ptr(addr, t2);
562 tcg_gen_add_ptr(addr, cpu_env, addr);
564 tcg_gen_ld_tl(t0, addr, sizeof(target_ulong) * from);
565 tcg_temp_free_ptr(addr);
566 tcg_temp_free_i32(t2);
568 gen_store_gpr(t0, to);
572 static inline void gen_store_srsgpr (int from, int to)
575 TCGv t0 = tcg_temp_new();
576 TCGv_i32 t2 = tcg_temp_new_i32();
577 TCGv_ptr addr = tcg_temp_new_ptr();
579 gen_load_gpr(t0, from);
580 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
581 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
582 tcg_gen_andi_i32(t2, t2, 0xf);
583 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
584 tcg_gen_ext_i32_ptr(addr, t2);
585 tcg_gen_add_ptr(addr, cpu_env, addr);
587 tcg_gen_st_tl(t0, addr, sizeof(target_ulong) * to);
588 tcg_temp_free_ptr(addr);
589 tcg_temp_free_i32(t2);
594 /* Floating point register moves. */
595 static inline void gen_load_fpr32 (TCGv_i32 t, int reg)
597 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
600 static inline void gen_store_fpr32 (TCGv_i32 t, int reg)
602 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
605 static inline void gen_load_fpr32h (TCGv_i32 t, int reg)
607 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
610 static inline void gen_store_fpr32h (TCGv_i32 t, int reg)
612 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
615 static inline void gen_load_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
617 if (ctx->hflags & MIPS_HFLAG_F64) {
618 tcg_gen_ld_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
620 TCGv_i32 t0 = tcg_temp_new_i32();
621 TCGv_i32 t1 = tcg_temp_new_i32();
622 gen_load_fpr32(t0, reg & ~1);
623 gen_load_fpr32(t1, reg | 1);
624 tcg_gen_concat_i32_i64(t, t0, t1);
625 tcg_temp_free_i32(t0);
626 tcg_temp_free_i32(t1);
630 static inline void gen_store_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
632 if (ctx->hflags & MIPS_HFLAG_F64) {
633 tcg_gen_st_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
635 TCGv_i64 t0 = tcg_temp_new_i64();
636 TCGv_i32 t1 = tcg_temp_new_i32();
637 tcg_gen_trunc_i64_i32(t1, t);
638 gen_store_fpr32(t1, reg & ~1);
639 tcg_gen_shri_i64(t0, t, 32);
640 tcg_gen_trunc_i64_i32(t1, t0);
641 gen_store_fpr32(t1, reg | 1);
642 tcg_temp_free_i32(t1);
643 tcg_temp_free_i64(t0);
647 static inline int get_fp_bit (int cc)
655 #define FOP_CONDS(type, fmt, bits) \
656 static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
657 TCGv_i##bits b, int cc) \
660 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
661 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
662 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
663 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
664 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
665 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
666 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
667 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
668 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
669 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
670 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
671 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
672 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
673 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
674 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
675 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
681 FOP_CONDS(abs, d, 64)
683 FOP_CONDS(abs, s, 32)
685 FOP_CONDS(abs, ps, 64)
689 #define OP_COND(name, cond) \
690 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, TCGv t1) \
692 int l1 = gen_new_label(); \
693 int l2 = gen_new_label(); \
695 tcg_gen_brcond_tl(cond, t0, t1, l1); \
696 tcg_gen_movi_tl(ret, 0); \
699 tcg_gen_movi_tl(ret, 1); \
702 OP_COND(eq, TCG_COND_EQ);
703 OP_COND(ne, TCG_COND_NE);
704 OP_COND(ge, TCG_COND_GE);
705 OP_COND(geu, TCG_COND_GEU);
706 OP_COND(lt, TCG_COND_LT);
707 OP_COND(ltu, TCG_COND_LTU);
710 #define OP_CONDI(name, cond) \
711 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, target_ulong val) \
713 int l1 = gen_new_label(); \
714 int l2 = gen_new_label(); \
716 tcg_gen_brcondi_tl(cond, t0, val, l1); \
717 tcg_gen_movi_tl(ret, 0); \
720 tcg_gen_movi_tl(ret, 1); \
723 OP_CONDI(lti, TCG_COND_LT);
724 OP_CONDI(ltiu, TCG_COND_LTU);
727 #define OP_CONDZ(name, cond) \
728 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0) \
730 int l1 = gen_new_label(); \
731 int l2 = gen_new_label(); \
733 tcg_gen_brcondi_tl(cond, t0, 0, l1); \
734 tcg_gen_movi_tl(ret, 0); \
737 tcg_gen_movi_tl(ret, 1); \
740 OP_CONDZ(gez, TCG_COND_GE);
741 OP_CONDZ(gtz, TCG_COND_GT);
742 OP_CONDZ(lez, TCG_COND_LE);
743 OP_CONDZ(ltz, TCG_COND_LT);
746 static inline void gen_save_pc(target_ulong pc)
748 tcg_gen_movi_tl(cpu_PC, pc);
751 static inline void save_cpu_state (DisasContext *ctx, int do_save_pc)
753 LOG_DISAS("hflags %08x saved %08x\n", ctx->hflags, ctx->saved_hflags);
754 if (do_save_pc && ctx->pc != ctx->saved_pc) {
755 gen_save_pc(ctx->pc);
756 ctx->saved_pc = ctx->pc;
758 if (ctx->hflags != ctx->saved_hflags) {
759 tcg_gen_movi_i32(hflags, ctx->hflags);
760 ctx->saved_hflags = ctx->hflags;
761 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
767 tcg_gen_movi_tl(btarget, ctx->btarget);
773 static inline void restore_cpu_state (CPUState *env, DisasContext *ctx)
775 ctx->saved_hflags = ctx->hflags;
776 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
782 ctx->btarget = env->btarget;
788 generate_exception_err (DisasContext *ctx, int excp, int err)
790 TCGv_i32 texcp = tcg_const_i32(excp);
791 TCGv_i32 terr = tcg_const_i32(err);
792 save_cpu_state(ctx, 1);
793 gen_helper_raise_exception_err(texcp, terr);
794 tcg_temp_free_i32(terr);
795 tcg_temp_free_i32(texcp);
799 generate_exception (DisasContext *ctx, int excp)
801 save_cpu_state(ctx, 1);
802 gen_helper_0i(raise_exception, excp);
805 /* Addresses computation */
806 static inline void gen_op_addr_add (DisasContext *ctx, TCGv t0, TCGv t1)
808 tcg_gen_add_tl(t0, t0, t1);
810 #if defined(TARGET_MIPS64)
811 /* For compatibility with 32-bit code, data reference in user mode
812 with Status_UX = 0 should be casted to 32-bit and sign extended.
813 See the MIPS64 PRA manual, section 4.10. */
814 if (((ctx->hflags & MIPS_HFLAG_KSU) == MIPS_HFLAG_UM) &&
815 !(ctx->hflags & MIPS_HFLAG_UX)) {
816 tcg_gen_ext32s_i64(t0, t0);
821 static inline void check_cp0_enabled(DisasContext *ctx)
823 if (unlikely(!(ctx->hflags & MIPS_HFLAG_CP0)))
824 generate_exception_err(ctx, EXCP_CpU, 1);
827 static inline void check_cp1_enabled(DisasContext *ctx)
829 if (unlikely(!(ctx->hflags & MIPS_HFLAG_FPU)))
830 generate_exception_err(ctx, EXCP_CpU, 1);
833 /* Verify that the processor is running with COP1X instructions enabled.
834 This is associated with the nabla symbol in the MIPS32 and MIPS64
837 static inline void check_cop1x(DisasContext *ctx)
839 if (unlikely(!(ctx->hflags & MIPS_HFLAG_COP1X)))
840 generate_exception(ctx, EXCP_RI);
843 /* Verify that the processor is running with 64-bit floating-point
844 operations enabled. */
846 static inline void check_cp1_64bitmode(DisasContext *ctx)
848 if (unlikely(~ctx->hflags & (MIPS_HFLAG_F64 | MIPS_HFLAG_COP1X)))
849 generate_exception(ctx, EXCP_RI);
853 * Verify if floating point register is valid; an operation is not defined
854 * if bit 0 of any register specification is set and the FR bit in the
855 * Status register equals zero, since the register numbers specify an
856 * even-odd pair of adjacent coprocessor general registers. When the FR bit
857 * in the Status register equals one, both even and odd register numbers
858 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
860 * Multiple 64 bit wide registers can be checked by calling
861 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
863 static inline void check_cp1_registers(DisasContext *ctx, int regs)
865 if (unlikely(!(ctx->hflags & MIPS_HFLAG_F64) && (regs & 1)))
866 generate_exception(ctx, EXCP_RI);
869 /* This code generates a "reserved instruction" exception if the
870 CPU does not support the instruction set corresponding to flags. */
871 static inline void check_insn(CPUState *env, DisasContext *ctx, int flags)
873 if (unlikely(!(env->insn_flags & flags)))
874 generate_exception(ctx, EXCP_RI);
877 /* This code generates a "reserved instruction" exception if 64-bit
878 instructions are not enabled. */
879 static inline void check_mips_64(DisasContext *ctx)
881 if (unlikely(!(ctx->hflags & MIPS_HFLAG_64)))
882 generate_exception(ctx, EXCP_RI);
885 /* load/store instructions. */
886 #define OP_LD(insn,fname) \
887 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
889 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
896 #if defined(TARGET_MIPS64)
902 #define OP_ST(insn,fname) \
903 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
905 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
910 #if defined(TARGET_MIPS64)
915 #define OP_LD_ATOMIC(insn,fname) \
916 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
918 TCGv t0 = tcg_temp_new(); \
919 tcg_gen_mov_tl(t0, arg1); \
920 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
921 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
924 OP_LD_ATOMIC(ll,ld32s);
925 #if defined(TARGET_MIPS64)
926 OP_LD_ATOMIC(lld,ld64);
930 #define OP_ST_ATOMIC(insn,fname,almask) \
931 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, TCGv arg2, DisasContext *ctx) \
933 TCGv t0 = tcg_temp_new(); \
934 int l1 = gen_new_label(); \
935 int l2 = gen_new_label(); \
936 int l3 = gen_new_label(); \
938 tcg_gen_andi_tl(t0, arg2, almask); \
939 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
940 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
941 generate_exception(ctx, EXCP_AdES); \
943 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
944 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
946 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
947 tcg_gen_movi_tl(ret, 1); \
950 tcg_gen_movi_tl(ret, 0); \
953 OP_ST_ATOMIC(sc,st32,0x3);
954 #if defined(TARGET_MIPS64)
955 OP_ST_ATOMIC(scd,st64,0x7);
960 static void gen_ldst (DisasContext *ctx, uint32_t opc, int rt,
961 int base, int16_t offset)
963 const char *opn = "ldst";
964 TCGv t0 = tcg_temp_new();
965 TCGv t1 = tcg_temp_new();
968 tcg_gen_movi_tl(t0, offset);
969 } else if (offset == 0) {
970 gen_load_gpr(t0, base);
972 tcg_gen_movi_tl(t0, offset);
973 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
975 /* Don't do NOP if destination is zero: we must perform the actual
978 #if defined(TARGET_MIPS64)
980 save_cpu_state(ctx, 0);
981 op_ldst_lwu(t0, t0, ctx);
982 gen_store_gpr(t0, rt);
986 save_cpu_state(ctx, 0);
987 op_ldst_ld(t0, t0, ctx);
988 gen_store_gpr(t0, rt);
992 save_cpu_state(ctx, 0);
993 op_ldst_lld(t0, t0, ctx);
994 gen_store_gpr(t0, rt);
998 save_cpu_state(ctx, 0);
999 gen_load_gpr(t1, rt);
1000 op_ldst_sd(t1, t0, ctx);
1004 save_cpu_state(ctx, 1);
1005 gen_load_gpr(t1, rt);
1006 gen_helper_3i(ldl, t1, t1, t0, ctx->mem_idx);
1007 gen_store_gpr(t1, rt);
1011 save_cpu_state(ctx, 1);
1012 gen_load_gpr(t1, rt);
1013 gen_helper_2i(sdl, t1, t0, ctx->mem_idx);
1017 save_cpu_state(ctx, 1);
1018 gen_load_gpr(t1, rt);
1019 gen_helper_3i(ldr, t1, t1, t0, ctx->mem_idx);
1020 gen_store_gpr(t1, rt);
1024 save_cpu_state(ctx, 1);
1025 gen_load_gpr(t1, rt);
1026 gen_helper_2i(sdr, t1, t0, ctx->mem_idx);
1031 save_cpu_state(ctx, 0);
1032 op_ldst_lw(t0, t0, ctx);
1033 gen_store_gpr(t0, rt);
1037 save_cpu_state(ctx, 0);
1038 gen_load_gpr(t1, rt);
1039 op_ldst_sw(t1, t0, ctx);
1043 save_cpu_state(ctx, 0);
1044 op_ldst_lh(t0, t0, ctx);
1045 gen_store_gpr(t0, rt);
1049 save_cpu_state(ctx, 0);
1050 gen_load_gpr(t1, rt);
1051 op_ldst_sh(t1, t0, ctx);
1055 save_cpu_state(ctx, 0);
1056 op_ldst_lhu(t0, t0, ctx);
1057 gen_store_gpr(t0, rt);
1061 save_cpu_state(ctx, 0);
1062 op_ldst_lb(t0, t0, ctx);
1063 gen_store_gpr(t0, rt);
1067 save_cpu_state(ctx, 0);
1068 gen_load_gpr(t1, rt);
1069 op_ldst_sb(t1, t0, ctx);
1073 save_cpu_state(ctx, 0);
1074 op_ldst_lbu(t0, t0, ctx);
1075 gen_store_gpr(t0, rt);
1079 save_cpu_state(ctx, 1);
1080 gen_load_gpr(t1, rt);
1081 gen_helper_3i(lwl, t1, t1, t0, ctx->mem_idx);
1082 gen_store_gpr(t1, rt);
1086 save_cpu_state(ctx, 1);
1087 gen_load_gpr(t1, rt);
1088 gen_helper_2i(swl, t1, t0, ctx->mem_idx);
1092 save_cpu_state(ctx, 1);
1093 gen_load_gpr(t1, rt);
1094 gen_helper_3i(lwr, t1, t1, t0, ctx->mem_idx);
1095 gen_store_gpr(t1, rt);
1099 save_cpu_state(ctx, 1);
1100 gen_load_gpr(t1, rt);
1101 gen_helper_2i(swr, t1, t0, ctx->mem_idx);
1105 save_cpu_state(ctx, 0);
1106 op_ldst_ll(t0, t0, ctx);
1107 gen_store_gpr(t0, rt);
1111 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1116 /* Store conditional */
1117 static void gen_st_cond (DisasContext *ctx, uint32_t opc, int rt,
1118 int base, int16_t offset)
1120 const char *opn = "st_cond";
1123 t0 = tcg_temp_local_new();
1126 tcg_gen_movi_tl(t0, offset);
1127 } else if (offset == 0) {
1128 gen_load_gpr(t0, base);
1130 tcg_gen_movi_tl(t0, offset);
1131 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
1133 /* Don't do NOP if destination is zero: we must perform the actual
1136 t1 = tcg_temp_local_new();
1137 gen_load_gpr(t1, rt);
1139 #if defined(TARGET_MIPS64)
1141 save_cpu_state(ctx, 0);
1142 op_ldst_scd(t0, t1, t0, ctx);
1147 save_cpu_state(ctx, 0);
1148 op_ldst_sc(t0, t1, t0, ctx);
1152 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1154 gen_store_gpr(t0, rt);
1158 /* Load and store */
1159 static void gen_flt_ldst (DisasContext *ctx, uint32_t opc, int ft,
1160 int base, int16_t offset)
1162 const char *opn = "flt_ldst";
1163 TCGv t0 = tcg_temp_new();
1166 tcg_gen_movi_tl(t0, offset);
1167 } else if (offset == 0) {
1168 gen_load_gpr(t0, base);
1170 tcg_gen_movi_tl(t0, offset);
1171 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
1173 /* Don't do NOP if destination is zero: we must perform the actual
1178 TCGv_i32 fp0 = tcg_temp_new_i32();
1180 tcg_gen_qemu_ld32s(t0, t0, ctx->mem_idx);
1181 tcg_gen_trunc_tl_i32(fp0, t0);
1182 gen_store_fpr32(fp0, ft);
1183 tcg_temp_free_i32(fp0);
1189 TCGv_i32 fp0 = tcg_temp_new_i32();
1190 TCGv t1 = tcg_temp_new();
1192 gen_load_fpr32(fp0, ft);
1193 tcg_gen_extu_i32_tl(t1, fp0);
1194 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
1196 tcg_temp_free_i32(fp0);
1202 TCGv_i64 fp0 = tcg_temp_new_i64();
1204 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
1205 gen_store_fpr64(ctx, fp0, ft);
1206 tcg_temp_free_i64(fp0);
1212 TCGv_i64 fp0 = tcg_temp_new_i64();
1214 gen_load_fpr64(ctx, fp0, ft);
1215 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
1216 tcg_temp_free_i64(fp0);
1222 generate_exception(ctx, EXCP_RI);
1225 MIPS_DEBUG("%s %s, %d(%s)", opn, fregnames[ft], offset, regnames[base]);
1230 /* Arithmetic with immediate operand */
1231 static void gen_arith_imm (CPUState *env, DisasContext *ctx, uint32_t opc,
1232 int rt, int rs, int16_t imm)
1234 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1235 const char *opn = "imm arith";
1237 if (rt == 0 && opc != OPC_ADDI && opc != OPC_DADDI) {
1238 /* If no destination, treat it as a NOP.
1239 For addi, we must generate the overflow exception when needed. */
1246 TCGv t0 = tcg_temp_local_new();
1247 TCGv t1 = tcg_temp_new();
1248 TCGv t2 = tcg_temp_new();
1249 int l1 = gen_new_label();
1251 gen_load_gpr(t1, rs);
1252 tcg_gen_addi_tl(t0, t1, uimm);
1253 tcg_gen_ext32s_tl(t0, t0);
1255 tcg_gen_xori_tl(t1, t1, ~uimm);
1256 tcg_gen_xori_tl(t2, t0, uimm);
1257 tcg_gen_and_tl(t1, t1, t2);
1259 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1261 /* operands of same sign, result different sign */
1262 generate_exception(ctx, EXCP_OVERFLOW);
1264 tcg_gen_ext32s_tl(t0, t0);
1265 gen_store_gpr(t0, rt);
1272 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1273 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
1275 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1279 #if defined(TARGET_MIPS64)
1282 TCGv t0 = tcg_temp_local_new();
1283 TCGv t1 = tcg_temp_new();
1284 TCGv t2 = tcg_temp_new();
1285 int l1 = gen_new_label();
1287 gen_load_gpr(t1, rs);
1288 tcg_gen_addi_tl(t0, t1, uimm);
1290 tcg_gen_xori_tl(t1, t1, ~uimm);
1291 tcg_gen_xori_tl(t2, t0, uimm);
1292 tcg_gen_and_tl(t1, t1, t2);
1294 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1296 /* operands of same sign, result different sign */
1297 generate_exception(ctx, EXCP_OVERFLOW);
1299 gen_store_gpr(t0, rt);
1306 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1308 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1314 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1317 /* Logic with immediate operand */
1318 static void gen_logic_imm (CPUState *env, uint32_t opc, int rt, int rs, int16_t imm)
1321 const char *opn = "imm logic";
1324 /* If no destination, treat it as a NOP. */
1328 uimm = (uint16_t)imm;
1331 if (likely(rs != 0))
1332 tcg_gen_andi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1334 tcg_gen_movi_tl(cpu_gpr[rt], 0);
1339 tcg_gen_ori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1341 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1345 if (likely(rs != 0))
1346 tcg_gen_xori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1348 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1352 tcg_gen_movi_tl(cpu_gpr[rt], imm << 16);
1356 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1359 /* Set on less than with immediate operand */
1360 static void gen_slt_imm (CPUState *env, uint32_t opc, int rt, int rs, int16_t imm)
1362 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1363 const char *opn = "imm arith";
1367 /* If no destination, treat it as a NOP. */
1371 t0 = tcg_temp_new();
1372 gen_load_gpr(t0, rs);
1375 gen_op_lti(cpu_gpr[rt], t0, uimm);
1379 gen_op_ltiu(cpu_gpr[rt], t0, uimm);
1383 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1387 /* Shifts with immediate operand */
1388 static void gen_shift_imm(CPUState *env, DisasContext *ctx, uint32_t opc,
1389 int rt, int rs, int16_t imm)
1391 target_ulong uimm = ((uint16_t)imm) & 0x1f;
1392 const char *opn = "imm shift";
1396 /* If no destination, treat it as a NOP. */
1401 t0 = tcg_temp_new();
1402 gen_load_gpr(t0, rs);
1405 tcg_gen_shli_tl(t0, t0, uimm);
1406 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1410 tcg_gen_ext32s_tl(t0, t0);
1411 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
1415 switch ((ctx->opcode >> 21) & 0x1f) {
1418 tcg_gen_ext32u_tl(t0, t0);
1419 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1421 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1426 /* rotr is decoded as srl on non-R2 CPUs */
1427 if (env->insn_flags & ISA_MIPS32R2) {
1429 TCGv_i32 t1 = tcg_temp_new_i32();
1431 tcg_gen_trunc_tl_i32(t1, t0);
1432 tcg_gen_rotri_i32(t1, t1, uimm);
1433 tcg_gen_ext_i32_tl(cpu_gpr[rt], t1);
1434 tcg_temp_free_i32(t1);
1439 tcg_gen_ext32u_tl(t0, t0);
1440 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1442 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1448 MIPS_INVAL("invalid srl flag");
1449 generate_exception(ctx, EXCP_RI);
1453 #if defined(TARGET_MIPS64)
1455 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm);
1459 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
1463 switch ((ctx->opcode >> 21) & 0x1f) {
1465 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1469 /* drotr is decoded as dsrl on non-R2 CPUs */
1470 if (env->insn_flags & ISA_MIPS32R2) {
1472 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm);
1476 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1481 MIPS_INVAL("invalid dsrl flag");
1482 generate_exception(ctx, EXCP_RI);
1487 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm + 32);
1491 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm + 32);
1495 switch ((ctx->opcode >> 21) & 0x1f) {
1497 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm + 32);
1501 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
1502 if (env->insn_flags & ISA_MIPS32R2) {
1503 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm + 32);
1506 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm + 32);
1511 MIPS_INVAL("invalid dsrl32 flag");
1512 generate_exception(ctx, EXCP_RI);
1518 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1523 static void gen_arith (CPUState *env, DisasContext *ctx, uint32_t opc,
1524 int rd, int rs, int rt)
1526 const char *opn = "arith";
1528 if (rd == 0 && opc != OPC_ADD && opc != OPC_SUB
1529 && opc != OPC_DADD && opc != OPC_DSUB) {
1530 /* If no destination, treat it as a NOP.
1531 For add & sub, we must generate the overflow exception when needed. */
1539 TCGv t0 = tcg_temp_local_new();
1540 TCGv t1 = tcg_temp_new();
1541 TCGv t2 = tcg_temp_new();
1542 int l1 = gen_new_label();
1544 gen_load_gpr(t1, rs);
1545 gen_load_gpr(t2, rt);
1546 tcg_gen_add_tl(t0, t1, t2);
1547 tcg_gen_ext32s_tl(t0, t0);
1548 tcg_gen_xor_tl(t1, t1, t2);
1549 tcg_gen_not_tl(t1, t1);
1550 tcg_gen_xor_tl(t2, t0, t2);
1551 tcg_gen_and_tl(t1, t1, t2);
1553 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1555 /* operands of same sign, result different sign */
1556 generate_exception(ctx, EXCP_OVERFLOW);
1558 gen_store_gpr(t0, rd);
1564 if (rs != 0 && rt != 0) {
1565 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1566 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1567 } else if (rs == 0 && rt != 0) {
1568 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1569 } else if (rs != 0 && rt == 0) {
1570 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1572 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1578 TCGv t0 = tcg_temp_local_new();
1579 TCGv t1 = tcg_temp_new();
1580 TCGv t2 = tcg_temp_new();
1581 int l1 = gen_new_label();
1583 gen_load_gpr(t1, rs);
1584 gen_load_gpr(t2, rt);
1585 tcg_gen_sub_tl(t0, t1, t2);
1586 tcg_gen_ext32s_tl(t0, t0);
1587 tcg_gen_xor_tl(t2, t1, t2);
1588 tcg_gen_xor_tl(t1, t0, t1);
1589 tcg_gen_and_tl(t1, t1, t2);
1591 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1593 /* operands of same sign, result different sign */
1594 generate_exception(ctx, EXCP_OVERFLOW);
1596 gen_store_gpr(t0, rd);
1602 if (rs != 0 && rt != 0) {
1603 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1604 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1605 } else if (rs == 0 && rt != 0) {
1606 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1607 } else if (rs != 0 && rt == 0) {
1608 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1610 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1614 #if defined(TARGET_MIPS64)
1617 TCGv t0 = tcg_temp_local_new();
1618 TCGv t1 = tcg_temp_new();
1619 TCGv t2 = tcg_temp_new();
1620 int l1 = gen_new_label();
1622 gen_load_gpr(t1, rs);
1623 gen_load_gpr(t2, rt);
1624 tcg_gen_add_tl(t0, t1, t2);
1625 tcg_gen_xor_tl(t1, t1, t2);
1626 tcg_gen_not_tl(t1, t1);
1627 tcg_gen_xor_tl(t2, t0, t2);
1628 tcg_gen_and_tl(t1, t1, t2);
1630 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1632 /* operands of same sign, result different sign */
1633 generate_exception(ctx, EXCP_OVERFLOW);
1635 gen_store_gpr(t0, rd);
1641 if (rs != 0 && rt != 0) {
1642 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1643 } else if (rs == 0 && rt != 0) {
1644 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1645 } else if (rs != 0 && rt == 0) {
1646 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1648 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1654 TCGv t0 = tcg_temp_local_new();
1655 TCGv t1 = tcg_temp_new();
1656 TCGv t2 = tcg_temp_new();
1657 int l1 = gen_new_label();
1659 gen_load_gpr(t1, rs);
1660 gen_load_gpr(t2, rt);
1661 tcg_gen_sub_tl(t0, t1, t2);
1662 tcg_gen_xor_tl(t2, t1, t2);
1663 tcg_gen_xor_tl(t1, t0, t1);
1664 tcg_gen_and_tl(t1, t1, t2);
1666 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1668 /* operands of same sign, result different sign */
1669 generate_exception(ctx, EXCP_OVERFLOW);
1671 gen_store_gpr(t0, rd);
1677 if (rs != 0 && rt != 0) {
1678 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1679 } else if (rs == 0 && rt != 0) {
1680 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1681 } else if (rs != 0 && rt == 0) {
1682 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1684 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1690 if (likely(rs != 0 && rt != 0)) {
1691 tcg_gen_mul_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1692 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1694 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1699 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1702 /* Conditional move */
1703 static void gen_cond_move (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1705 const char *opn = "cond move";
1709 /* If no destination, treat it as a NOP.
1710 For add & sub, we must generate the overflow exception when needed. */
1715 l1 = gen_new_label();
1718 if (likely(rt != 0))
1719 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rt], 0, l1);
1725 if (likely(rt != 0))
1726 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rt], 0, l1);
1731 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1733 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1736 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1740 static void gen_logic (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1742 const char *opn = "logic";
1745 /* If no destination, treat it as a NOP. */
1752 if (likely(rs != 0 && rt != 0)) {
1753 tcg_gen_and_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1755 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1760 if (rs != 0 && rt != 0) {
1761 tcg_gen_nor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1762 } else if (rs == 0 && rt != 0) {
1763 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rt]);
1764 } else if (rs != 0 && rt == 0) {
1765 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rs]);
1767 tcg_gen_movi_tl(cpu_gpr[rd], ~((target_ulong)0));
1772 if (likely(rs != 0 && rt != 0)) {
1773 tcg_gen_or_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1774 } else if (rs == 0 && rt != 0) {
1775 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1776 } else if (rs != 0 && rt == 0) {
1777 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1779 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1784 if (likely(rs != 0 && rt != 0)) {
1785 tcg_gen_xor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1786 } else if (rs == 0 && rt != 0) {
1787 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1788 } else if (rs != 0 && rt == 0) {
1789 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1791 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1796 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1799 /* Set on lower than */
1800 static void gen_slt (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1802 const char *opn = "slt";
1806 /* If no destination, treat it as a NOP. */
1811 t0 = tcg_temp_new();
1812 t1 = tcg_temp_new();
1813 gen_load_gpr(t0, rs);
1814 gen_load_gpr(t1, rt);
1817 gen_op_lt(cpu_gpr[rd], t0, t1);
1821 gen_op_ltu(cpu_gpr[rd], t0, t1);
1825 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1831 static void gen_shift (CPUState *env, DisasContext *ctx, uint32_t opc,
1832 int rd, int rs, int rt)
1834 const char *opn = "shifts";
1838 /* If no destination, treat it as a NOP.
1839 For add & sub, we must generate the overflow exception when needed. */
1844 t0 = tcg_temp_new();
1845 t1 = tcg_temp_new();
1846 gen_load_gpr(t0, rs);
1847 gen_load_gpr(t1, rt);
1850 tcg_gen_andi_tl(t0, t0, 0x1f);
1851 tcg_gen_shl_tl(t0, t1, t0);
1852 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1856 tcg_gen_ext32s_tl(t1, t1);
1857 tcg_gen_andi_tl(t0, t0, 0x1f);
1858 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1862 switch ((ctx->opcode >> 6) & 0x1f) {
1864 tcg_gen_ext32u_tl(t1, t1);
1865 tcg_gen_andi_tl(t0, t0, 0x1f);
1866 tcg_gen_shr_tl(t0, t1, t0);
1867 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1871 /* rotrv is decoded as srlv on non-R2 CPUs */
1872 if (env->insn_flags & ISA_MIPS32R2) {
1873 TCGv_i32 t2 = tcg_temp_new_i32();
1874 TCGv_i32 t3 = tcg_temp_new_i32();
1876 tcg_gen_trunc_tl_i32(t2, t0);
1877 tcg_gen_trunc_tl_i32(t3, t1);
1878 tcg_gen_andi_i32(t2, t2, 0x1f);
1879 tcg_gen_rotr_i32(t2, t3, t2);
1880 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
1881 tcg_temp_free_i32(t2);
1882 tcg_temp_free_i32(t3);
1885 tcg_gen_ext32u_tl(t1, t1);
1886 tcg_gen_andi_tl(t0, t0, 0x1f);
1887 tcg_gen_shr_tl(t0, t1, t0);
1888 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1893 MIPS_INVAL("invalid srlv flag");
1894 generate_exception(ctx, EXCP_RI);
1898 #if defined(TARGET_MIPS64)
1900 tcg_gen_andi_tl(t0, t0, 0x3f);
1901 tcg_gen_shl_tl(cpu_gpr[rd], t1, t0);
1905 tcg_gen_andi_tl(t0, t0, 0x3f);
1906 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1910 switch ((ctx->opcode >> 6) & 0x1f) {
1912 tcg_gen_andi_tl(t0, t0, 0x3f);
1913 tcg_gen_shr_tl(cpu_gpr[rd], t1, t0);
1917 /* drotrv is decoded as dsrlv on non-R2 CPUs */
1918 if (env->insn_flags & ISA_MIPS32R2) {
1919 tcg_gen_andi_tl(t0, t0, 0x3f);
1920 tcg_gen_rotr_tl(cpu_gpr[rd], t1, t0);
1923 tcg_gen_andi_tl(t0, t0, 0x3f);
1924 tcg_gen_shr_tl(t0, t1, t0);
1929 MIPS_INVAL("invalid dsrlv flag");
1930 generate_exception(ctx, EXCP_RI);
1936 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1941 /* Arithmetic on HI/LO registers */
1942 static void gen_HILO (DisasContext *ctx, uint32_t opc, int reg)
1944 const char *opn = "hilo";
1946 if (reg == 0 && (opc == OPC_MFHI || opc == OPC_MFLO)) {
1953 tcg_gen_mov_tl(cpu_gpr[reg], cpu_HI[0]);
1957 tcg_gen_mov_tl(cpu_gpr[reg], cpu_LO[0]);
1962 tcg_gen_mov_tl(cpu_HI[0], cpu_gpr[reg]);
1964 tcg_gen_movi_tl(cpu_HI[0], 0);
1969 tcg_gen_mov_tl(cpu_LO[0], cpu_gpr[reg]);
1971 tcg_gen_movi_tl(cpu_LO[0], 0);
1975 MIPS_DEBUG("%s %s", opn, regnames[reg]);
1978 static void gen_muldiv (DisasContext *ctx, uint32_t opc,
1981 const char *opn = "mul/div";
1987 #if defined(TARGET_MIPS64)
1991 t0 = tcg_temp_local_new();
1992 t1 = tcg_temp_local_new();
1995 t0 = tcg_temp_new();
1996 t1 = tcg_temp_new();
2000 gen_load_gpr(t0, rs);
2001 gen_load_gpr(t1, rt);
2005 int l1 = gen_new_label();
2006 int l2 = gen_new_label();
2008 tcg_gen_ext32s_tl(t0, t0);
2009 tcg_gen_ext32s_tl(t1, t1);
2010 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2011 tcg_gen_brcondi_tl(TCG_COND_NE, t0, INT_MIN, l2);
2012 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1, l2);
2014 tcg_gen_mov_tl(cpu_LO[0], t0);
2015 tcg_gen_movi_tl(cpu_HI[0], 0);
2018 tcg_gen_div_tl(cpu_LO[0], t0, t1);
2019 tcg_gen_rem_tl(cpu_HI[0], t0, t1);
2020 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
2021 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
2028 int l1 = gen_new_label();
2030 tcg_gen_ext32u_tl(t0, t0);
2031 tcg_gen_ext32u_tl(t1, t1);
2032 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2033 tcg_gen_divu_tl(cpu_LO[0], t0, t1);
2034 tcg_gen_remu_tl(cpu_HI[0], t0, t1);
2035 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
2036 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
2043 TCGv_i64 t2 = tcg_temp_new_i64();
2044 TCGv_i64 t3 = tcg_temp_new_i64();
2046 tcg_gen_ext_tl_i64(t2, t0);
2047 tcg_gen_ext_tl_i64(t3, t1);
2048 tcg_gen_mul_i64(t2, t2, t3);
2049 tcg_temp_free_i64(t3);
2050 tcg_gen_trunc_i64_tl(t0, t2);
2051 tcg_gen_shri_i64(t2, t2, 32);
2052 tcg_gen_trunc_i64_tl(t1, t2);
2053 tcg_temp_free_i64(t2);
2054 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2055 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2061 TCGv_i64 t2 = tcg_temp_new_i64();
2062 TCGv_i64 t3 = tcg_temp_new_i64();
2064 tcg_gen_ext32u_tl(t0, t0);
2065 tcg_gen_ext32u_tl(t1, t1);
2066 tcg_gen_extu_tl_i64(t2, t0);
2067 tcg_gen_extu_tl_i64(t3, t1);
2068 tcg_gen_mul_i64(t2, t2, t3);
2069 tcg_temp_free_i64(t3);
2070 tcg_gen_trunc_i64_tl(t0, t2);
2071 tcg_gen_shri_i64(t2, t2, 32);
2072 tcg_gen_trunc_i64_tl(t1, t2);
2073 tcg_temp_free_i64(t2);
2074 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2075 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2079 #if defined(TARGET_MIPS64)
2082 int l1 = gen_new_label();
2083 int l2 = gen_new_label();
2085 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2086 tcg_gen_brcondi_tl(TCG_COND_NE, t0, -1LL << 63, l2);
2087 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1LL, l2);
2088 tcg_gen_mov_tl(cpu_LO[0], t0);
2089 tcg_gen_movi_tl(cpu_HI[0], 0);
2092 tcg_gen_div_i64(cpu_LO[0], t0, t1);
2093 tcg_gen_rem_i64(cpu_HI[0], t0, t1);
2100 int l1 = gen_new_label();
2102 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2103 tcg_gen_divu_i64(cpu_LO[0], t0, t1);
2104 tcg_gen_remu_i64(cpu_HI[0], t0, t1);
2110 gen_helper_dmult(t0, t1);
2114 gen_helper_dmultu(t0, t1);
2120 TCGv_i64 t2 = tcg_temp_new_i64();
2121 TCGv_i64 t3 = tcg_temp_new_i64();
2123 tcg_gen_ext_tl_i64(t2, t0);
2124 tcg_gen_ext_tl_i64(t3, t1);
2125 tcg_gen_mul_i64(t2, t2, t3);
2126 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2127 tcg_gen_add_i64(t2, t2, t3);
2128 tcg_temp_free_i64(t3);
2129 tcg_gen_trunc_i64_tl(t0, t2);
2130 tcg_gen_shri_i64(t2, t2, 32);
2131 tcg_gen_trunc_i64_tl(t1, t2);
2132 tcg_temp_free_i64(t2);
2133 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2134 tcg_gen_ext32s_tl(cpu_LO[1], t1);
2140 TCGv_i64 t2 = tcg_temp_new_i64();
2141 TCGv_i64 t3 = tcg_temp_new_i64();
2143 tcg_gen_ext32u_tl(t0, t0);
2144 tcg_gen_ext32u_tl(t1, t1);
2145 tcg_gen_extu_tl_i64(t2, t0);
2146 tcg_gen_extu_tl_i64(t3, t1);
2147 tcg_gen_mul_i64(t2, t2, t3);
2148 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2149 tcg_gen_add_i64(t2, t2, t3);
2150 tcg_temp_free_i64(t3);
2151 tcg_gen_trunc_i64_tl(t0, t2);
2152 tcg_gen_shri_i64(t2, t2, 32);
2153 tcg_gen_trunc_i64_tl(t1, t2);
2154 tcg_temp_free_i64(t2);
2155 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2156 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2162 TCGv_i64 t2 = tcg_temp_new_i64();
2163 TCGv_i64 t3 = tcg_temp_new_i64();
2165 tcg_gen_ext_tl_i64(t2, t0);
2166 tcg_gen_ext_tl_i64(t3, t1);
2167 tcg_gen_mul_i64(t2, t2, t3);
2168 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2169 tcg_gen_sub_i64(t2, t2, t3);
2170 tcg_temp_free_i64(t3);
2171 tcg_gen_trunc_i64_tl(t0, t2);
2172 tcg_gen_shri_i64(t2, t2, 32);
2173 tcg_gen_trunc_i64_tl(t1, t2);
2174 tcg_temp_free_i64(t2);
2175 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2176 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2182 TCGv_i64 t2 = tcg_temp_new_i64();
2183 TCGv_i64 t3 = tcg_temp_new_i64();
2185 tcg_gen_ext32u_tl(t0, t0);
2186 tcg_gen_ext32u_tl(t1, t1);
2187 tcg_gen_extu_tl_i64(t2, t0);
2188 tcg_gen_extu_tl_i64(t3, t1);
2189 tcg_gen_mul_i64(t2, t2, t3);
2190 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2191 tcg_gen_sub_i64(t2, t2, t3);
2192 tcg_temp_free_i64(t3);
2193 tcg_gen_trunc_i64_tl(t0, t2);
2194 tcg_gen_shri_i64(t2, t2, 32);
2195 tcg_gen_trunc_i64_tl(t1, t2);
2196 tcg_temp_free_i64(t2);
2197 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2198 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2204 generate_exception(ctx, EXCP_RI);
2207 MIPS_DEBUG("%s %s %s", opn, regnames[rs], regnames[rt]);
2213 static void gen_mul_vr54xx (DisasContext *ctx, uint32_t opc,
2214 int rd, int rs, int rt)
2216 const char *opn = "mul vr54xx";
2217 TCGv t0 = tcg_temp_new();
2218 TCGv t1 = tcg_temp_new();
2220 gen_load_gpr(t0, rs);
2221 gen_load_gpr(t1, rt);
2224 case OPC_VR54XX_MULS:
2225 gen_helper_muls(t0, t0, t1);
2228 case OPC_VR54XX_MULSU:
2229 gen_helper_mulsu(t0, t0, t1);
2232 case OPC_VR54XX_MACC:
2233 gen_helper_macc(t0, t0, t1);
2236 case OPC_VR54XX_MACCU:
2237 gen_helper_maccu(t0, t0, t1);
2240 case OPC_VR54XX_MSAC:
2241 gen_helper_msac(t0, t0, t1);
2244 case OPC_VR54XX_MSACU:
2245 gen_helper_msacu(t0, t0, t1);
2248 case OPC_VR54XX_MULHI:
2249 gen_helper_mulhi(t0, t0, t1);
2252 case OPC_VR54XX_MULHIU:
2253 gen_helper_mulhiu(t0, t0, t1);
2256 case OPC_VR54XX_MULSHI:
2257 gen_helper_mulshi(t0, t0, t1);
2260 case OPC_VR54XX_MULSHIU:
2261 gen_helper_mulshiu(t0, t0, t1);
2264 case OPC_VR54XX_MACCHI:
2265 gen_helper_macchi(t0, t0, t1);
2268 case OPC_VR54XX_MACCHIU:
2269 gen_helper_macchiu(t0, t0, t1);
2272 case OPC_VR54XX_MSACHI:
2273 gen_helper_msachi(t0, t0, t1);
2276 case OPC_VR54XX_MSACHIU:
2277 gen_helper_msachiu(t0, t0, t1);
2281 MIPS_INVAL("mul vr54xx");
2282 generate_exception(ctx, EXCP_RI);
2285 gen_store_gpr(t0, rd);
2286 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
2293 static void gen_cl (DisasContext *ctx, uint32_t opc,
2296 const char *opn = "CLx";
2304 t0 = tcg_temp_new();
2305 gen_load_gpr(t0, rs);
2308 gen_helper_clo(cpu_gpr[rd], t0);
2312 gen_helper_clz(cpu_gpr[rd], t0);
2315 #if defined(TARGET_MIPS64)
2317 gen_helper_dclo(cpu_gpr[rd], t0);
2321 gen_helper_dclz(cpu_gpr[rd], t0);
2326 MIPS_DEBUG("%s %s, %s", opn, regnames[rd], regnames[rs]);
2331 static void gen_trap (DisasContext *ctx, uint32_t opc,
2332 int rs, int rt, int16_t imm)
2335 TCGv t0 = tcg_temp_new();
2336 TCGv t1 = tcg_temp_new();
2339 /* Load needed operands */
2347 /* Compare two registers */
2349 gen_load_gpr(t0, rs);
2350 gen_load_gpr(t1, rt);
2360 /* Compare register to immediate */
2361 if (rs != 0 || imm != 0) {
2362 gen_load_gpr(t0, rs);
2363 tcg_gen_movi_tl(t1, (int32_t)imm);
2370 case OPC_TEQ: /* rs == rs */
2371 case OPC_TEQI: /* r0 == 0 */
2372 case OPC_TGE: /* rs >= rs */
2373 case OPC_TGEI: /* r0 >= 0 */
2374 case OPC_TGEU: /* rs >= rs unsigned */
2375 case OPC_TGEIU: /* r0 >= 0 unsigned */
2377 generate_exception(ctx, EXCP_TRAP);
2379 case OPC_TLT: /* rs < rs */
2380 case OPC_TLTI: /* r0 < 0 */
2381 case OPC_TLTU: /* rs < rs unsigned */
2382 case OPC_TLTIU: /* r0 < 0 unsigned */
2383 case OPC_TNE: /* rs != rs */
2384 case OPC_TNEI: /* r0 != 0 */
2385 /* Never trap: treat as NOP. */
2389 int l1 = gen_new_label();
2394 tcg_gen_brcond_tl(TCG_COND_NE, t0, t1, l1);
2398 tcg_gen_brcond_tl(TCG_COND_LT, t0, t1, l1);
2402 tcg_gen_brcond_tl(TCG_COND_LTU, t0, t1, l1);
2406 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
2410 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
2414 tcg_gen_brcond_tl(TCG_COND_EQ, t0, t1, l1);
2417 generate_exception(ctx, EXCP_TRAP);
2424 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
2426 TranslationBlock *tb;
2428 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
2431 tcg_gen_exit_tb((long)tb + n);
2438 /* Branches (before delay slot) */
2439 static void gen_compute_branch (DisasContext *ctx, uint32_t opc,
2440 int rs, int rt, int32_t offset)
2442 target_ulong btgt = -1;
2444 int bcond_compute = 0;
2445 TCGv t0 = tcg_temp_new();
2446 TCGv t1 = tcg_temp_new();
2448 if (ctx->hflags & MIPS_HFLAG_BMASK) {
2449 #ifdef MIPS_DEBUG_DISAS
2450 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx "\n", ctx->pc);
2452 generate_exception(ctx, EXCP_RI);
2456 /* Load needed operands */
2462 /* Compare two registers */
2464 gen_load_gpr(t0, rs);
2465 gen_load_gpr(t1, rt);
2468 btgt = ctx->pc + 4 + offset;
2482 /* Compare to zero */
2484 gen_load_gpr(t0, rs);
2487 btgt = ctx->pc + 4 + offset;
2491 /* Jump to immediate */
2492 btgt = ((ctx->pc + 4) & (int32_t)0xF0000000) | (uint32_t)offset;
2496 /* Jump to register */
2497 if (offset != 0 && offset != 16) {
2498 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2499 others are reserved. */
2500 MIPS_INVAL("jump hint");
2501 generate_exception(ctx, EXCP_RI);
2504 gen_load_gpr(btarget, rs);
2507 MIPS_INVAL("branch/jump");
2508 generate_exception(ctx, EXCP_RI);
2511 if (bcond_compute == 0) {
2512 /* No condition to be computed */
2514 case OPC_BEQ: /* rx == rx */
2515 case OPC_BEQL: /* rx == rx likely */
2516 case OPC_BGEZ: /* 0 >= 0 */
2517 case OPC_BGEZL: /* 0 >= 0 likely */
2518 case OPC_BLEZ: /* 0 <= 0 */
2519 case OPC_BLEZL: /* 0 <= 0 likely */
2521 ctx->hflags |= MIPS_HFLAG_B;
2522 MIPS_DEBUG("balways");
2524 case OPC_BGEZAL: /* 0 >= 0 */
2525 case OPC_BGEZALL: /* 0 >= 0 likely */
2526 /* Always take and link */
2528 ctx->hflags |= MIPS_HFLAG_B;
2529 MIPS_DEBUG("balways and link");
2531 case OPC_BNE: /* rx != rx */
2532 case OPC_BGTZ: /* 0 > 0 */
2533 case OPC_BLTZ: /* 0 < 0 */
2535 MIPS_DEBUG("bnever (NOP)");
2537 case OPC_BLTZAL: /* 0 < 0 */
2538 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2539 MIPS_DEBUG("bnever and link");
2541 case OPC_BLTZALL: /* 0 < 0 likely */
2542 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2543 /* Skip the instruction in the delay slot */
2544 MIPS_DEBUG("bnever, link and skip");
2547 case OPC_BNEL: /* rx != rx likely */
2548 case OPC_BGTZL: /* 0 > 0 likely */
2549 case OPC_BLTZL: /* 0 < 0 likely */
2550 /* Skip the instruction in the delay slot */
2551 MIPS_DEBUG("bnever and skip");
2555 ctx->hflags |= MIPS_HFLAG_B;
2556 MIPS_DEBUG("j " TARGET_FMT_lx, btgt);
2560 ctx->hflags |= MIPS_HFLAG_B;
2561 MIPS_DEBUG("jal " TARGET_FMT_lx, btgt);
2564 ctx->hflags |= MIPS_HFLAG_BR;
2565 MIPS_DEBUG("jr %s", regnames[rs]);
2569 ctx->hflags |= MIPS_HFLAG_BR;
2570 MIPS_DEBUG("jalr %s, %s", regnames[rt], regnames[rs]);
2573 MIPS_INVAL("branch/jump");
2574 generate_exception(ctx, EXCP_RI);
2580 gen_op_eq(bcond, t0, t1);
2581 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx,
2582 regnames[rs], regnames[rt], btgt);
2585 gen_op_eq(bcond, t0, t1);
2586 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx,
2587 regnames[rs], regnames[rt], btgt);
2590 gen_op_ne(bcond, t0, t1);
2591 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx,
2592 regnames[rs], regnames[rt], btgt);
2595 gen_op_ne(bcond, t0, t1);
2596 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx,
2597 regnames[rs], regnames[rt], btgt);
2600 gen_op_gez(bcond, t0);
2601 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2604 gen_op_gez(bcond, t0);
2605 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2608 gen_op_gez(bcond, t0);
2609 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2613 gen_op_gez(bcond, t0);
2615 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2618 gen_op_gtz(bcond, t0);
2619 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2622 gen_op_gtz(bcond, t0);
2623 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2626 gen_op_lez(bcond, t0);
2627 MIPS_DEBUG("blez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2630 gen_op_lez(bcond, t0);
2631 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2634 gen_op_ltz(bcond, t0);
2635 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2638 gen_op_ltz(bcond, t0);
2639 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2642 gen_op_ltz(bcond, t0);
2644 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2646 ctx->hflags |= MIPS_HFLAG_BC;
2649 gen_op_ltz(bcond, t0);
2651 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2653 ctx->hflags |= MIPS_HFLAG_BL;
2656 MIPS_INVAL("conditional branch/jump");
2657 generate_exception(ctx, EXCP_RI);
2661 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx,
2662 blink, ctx->hflags, btgt);
2664 ctx->btarget = btgt;
2666 tcg_gen_movi_tl(cpu_gpr[blink], ctx->pc + 8);
2674 /* special3 bitfield operations */
2675 static void gen_bitops (DisasContext *ctx, uint32_t opc, int rt,
2676 int rs, int lsb, int msb)
2678 TCGv t0 = tcg_temp_new();
2679 TCGv t1 = tcg_temp_new();
2682 gen_load_gpr(t1, rs);
2687 tcg_gen_shri_tl(t0, t1, lsb);
2689 tcg_gen_andi_tl(t0, t0, (1 << (msb + 1)) - 1);
2691 tcg_gen_ext32s_tl(t0, t0);
2694 #if defined(TARGET_MIPS64)
2696 tcg_gen_shri_tl(t0, t1, lsb);
2698 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1 + 32)) - 1);
2702 tcg_gen_shri_tl(t0, t1, lsb + 32);
2703 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2706 tcg_gen_shri_tl(t0, t1, lsb);
2707 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2713 mask = ((msb - lsb + 1 < 32) ? ((1 << (msb - lsb + 1)) - 1) : ~0) << lsb;
2714 gen_load_gpr(t0, rt);
2715 tcg_gen_andi_tl(t0, t0, ~mask);
2716 tcg_gen_shli_tl(t1, t1, lsb);
2717 tcg_gen_andi_tl(t1, t1, mask);
2718 tcg_gen_or_tl(t0, t0, t1);
2719 tcg_gen_ext32s_tl(t0, t0);
2721 #if defined(TARGET_MIPS64)
2725 mask = ((msb - lsb + 1 + 32 < 64) ? ((1ULL << (msb - lsb + 1 + 32)) - 1) : ~0ULL) << lsb;
2726 gen_load_gpr(t0, rt);
2727 tcg_gen_andi_tl(t0, t0, ~mask);
2728 tcg_gen_shli_tl(t1, t1, lsb);
2729 tcg_gen_andi_tl(t1, t1, mask);
2730 tcg_gen_or_tl(t0, t0, t1);
2735 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2736 gen_load_gpr(t0, rt);
2737 tcg_gen_andi_tl(t0, t0, ~mask);
2738 tcg_gen_shli_tl(t1, t1, lsb + 32);
2739 tcg_gen_andi_tl(t1, t1, mask);
2740 tcg_gen_or_tl(t0, t0, t1);
2745 gen_load_gpr(t0, rt);
2746 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2747 gen_load_gpr(t0, rt);
2748 tcg_gen_andi_tl(t0, t0, ~mask);
2749 tcg_gen_shli_tl(t1, t1, lsb);
2750 tcg_gen_andi_tl(t1, t1, mask);
2751 tcg_gen_or_tl(t0, t0, t1);
2756 MIPS_INVAL("bitops");
2757 generate_exception(ctx, EXCP_RI);
2762 gen_store_gpr(t0, rt);
2767 static void gen_bshfl (DisasContext *ctx, uint32_t op2, int rt, int rd)
2772 /* If no destination, treat it as a NOP. */
2777 t0 = tcg_temp_new();
2778 gen_load_gpr(t0, rt);
2782 TCGv t1 = tcg_temp_new();
2784 tcg_gen_shri_tl(t1, t0, 8);
2785 tcg_gen_andi_tl(t1, t1, 0x00FF00FF);
2786 tcg_gen_shli_tl(t0, t0, 8);
2787 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF);
2788 tcg_gen_or_tl(t0, t0, t1);
2790 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
2794 tcg_gen_ext8s_tl(cpu_gpr[rd], t0);
2797 tcg_gen_ext16s_tl(cpu_gpr[rd], t0);
2799 #if defined(TARGET_MIPS64)
2802 TCGv t1 = tcg_temp_new();
2804 tcg_gen_shri_tl(t1, t0, 8);
2805 tcg_gen_andi_tl(t1, t1, 0x00FF00FF00FF00FFULL);
2806 tcg_gen_shli_tl(t0, t0, 8);
2807 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF00FF00FFULL);
2808 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2814 TCGv t1 = tcg_temp_new();
2816 tcg_gen_shri_tl(t1, t0, 16);
2817 tcg_gen_andi_tl(t1, t1, 0x0000FFFF0000FFFFULL);
2818 tcg_gen_shli_tl(t0, t0, 16);
2819 tcg_gen_andi_tl(t0, t0, ~0x0000FFFF0000FFFFULL);
2820 tcg_gen_or_tl(t0, t0, t1);
2821 tcg_gen_shri_tl(t1, t0, 32);
2822 tcg_gen_shli_tl(t0, t0, 32);
2823 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2829 MIPS_INVAL("bsfhl");
2830 generate_exception(ctx, EXCP_RI);
2837 #ifndef CONFIG_USER_ONLY
2838 /* CP0 (MMU and control) */
2839 static inline void gen_mfc0_load32 (TCGv arg, target_ulong off)
2841 TCGv_i32 t0 = tcg_temp_new_i32();
2843 tcg_gen_ld_i32(t0, cpu_env, off);
2844 tcg_gen_ext_i32_tl(arg, t0);
2845 tcg_temp_free_i32(t0);
2848 static inline void gen_mfc0_load64 (TCGv arg, target_ulong off)
2850 tcg_gen_ld_tl(arg, cpu_env, off);
2851 tcg_gen_ext32s_tl(arg, arg);
2854 static inline void gen_mtc0_store32 (TCGv arg, target_ulong off)
2856 TCGv_i32 t0 = tcg_temp_new_i32();
2858 tcg_gen_trunc_tl_i32(t0, arg);
2859 tcg_gen_st_i32(t0, cpu_env, off);
2860 tcg_temp_free_i32(t0);
2863 static inline void gen_mtc0_store64 (TCGv arg, target_ulong off)
2865 tcg_gen_ext32s_tl(arg, arg);
2866 tcg_gen_st_tl(arg, cpu_env, off);
2869 static void gen_mfc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
2871 const char *rn = "invalid";
2874 check_insn(env, ctx, ISA_MIPS32);
2880 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Index));
2884 check_insn(env, ctx, ASE_MT);
2885 gen_helper_mfc0_mvpcontrol(arg);
2889 check_insn(env, ctx, ASE_MT);
2890 gen_helper_mfc0_mvpconf0(arg);
2894 check_insn(env, ctx, ASE_MT);
2895 gen_helper_mfc0_mvpconf1(arg);
2905 gen_helper_mfc0_random(arg);
2909 check_insn(env, ctx, ASE_MT);
2910 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEControl));
2914 check_insn(env, ctx, ASE_MT);
2915 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf0));
2919 check_insn(env, ctx, ASE_MT);
2920 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf1));
2924 check_insn(env, ctx, ASE_MT);
2925 gen_mfc0_load64(arg, offsetof(CPUState, CP0_YQMask));
2929 check_insn(env, ctx, ASE_MT);
2930 gen_mfc0_load64(arg, offsetof(CPUState, CP0_VPESchedule));
2934 check_insn(env, ctx, ASE_MT);
2935 gen_mfc0_load64(arg, offsetof(CPUState, CP0_VPEScheFBack));
2936 rn = "VPEScheFBack";
2939 check_insn(env, ctx, ASE_MT);
2940 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEOpt));
2950 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo0));
2951 tcg_gen_ext32s_tl(arg, arg);
2955 check_insn(env, ctx, ASE_MT);
2956 gen_helper_mfc0_tcstatus(arg);
2960 check_insn(env, ctx, ASE_MT);
2961 gen_helper_mfc0_tcbind(arg);
2965 check_insn(env, ctx, ASE_MT);
2966 gen_helper_mfc0_tcrestart(arg);
2970 check_insn(env, ctx, ASE_MT);
2971 gen_helper_mfc0_tchalt(arg);
2975 check_insn(env, ctx, ASE_MT);
2976 gen_helper_mfc0_tccontext(arg);
2980 check_insn(env, ctx, ASE_MT);
2981 gen_helper_mfc0_tcschedule(arg);
2985 check_insn(env, ctx, ASE_MT);
2986 gen_helper_mfc0_tcschefback(arg);
2996 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo1));
2997 tcg_gen_ext32s_tl(arg, arg);
3007 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_Context));
3008 tcg_gen_ext32s_tl(arg, arg);
3012 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3013 rn = "ContextConfig";
3022 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageMask));
3026 check_insn(env, ctx, ISA_MIPS32R2);
3027 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageGrain));
3037 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Wired));
3041 check_insn(env, ctx, ISA_MIPS32R2);
3042 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf0));
3046 check_insn(env, ctx, ISA_MIPS32R2);
3047 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf1));
3051 check_insn(env, ctx, ISA_MIPS32R2);
3052 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf2));
3056 check_insn(env, ctx, ISA_MIPS32R2);
3057 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf3));
3061 check_insn(env, ctx, ISA_MIPS32R2);
3062 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf4));
3072 check_insn(env, ctx, ISA_MIPS32R2);
3073 gen_mfc0_load32(arg, offsetof(CPUState, CP0_HWREna));
3083 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_BadVAddr));
3084 tcg_gen_ext32s_tl(arg, arg);
3094 /* Mark as an IO operation because we read the time. */
3097 gen_helper_mfc0_count(arg);
3100 ctx->bstate = BS_STOP;
3104 /* 6,7 are implementation dependent */
3112 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryHi));
3113 tcg_gen_ext32s_tl(arg, arg);
3123 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Compare));
3126 /* 6,7 are implementation dependent */
3134 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Status));
3138 check_insn(env, ctx, ISA_MIPS32R2);
3139 gen_mfc0_load32(arg, offsetof(CPUState, CP0_IntCtl));
3143 check_insn(env, ctx, ISA_MIPS32R2);
3144 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSCtl));
3148 check_insn(env, ctx, ISA_MIPS32R2);
3149 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSMap));
3159 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Cause));
3169 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
3170 tcg_gen_ext32s_tl(arg, arg);
3180 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PRid));
3184 check_insn(env, ctx, ISA_MIPS32R2);
3185 gen_mfc0_load32(arg, offsetof(CPUState, CP0_EBase));
3195 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config0));
3199 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config1));
3203 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config2));
3207 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config3));
3210 /* 4,5 are reserved */
3211 /* 6,7 are implementation dependent */
3213 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config6));
3217 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config7));
3227 gen_helper_mfc0_lladdr(arg);
3237 gen_helper_1i(mfc0_watchlo, arg, sel);
3247 gen_helper_1i(mfc0_watchhi, arg, sel);
3257 #if defined(TARGET_MIPS64)
3258 check_insn(env, ctx, ISA_MIPS3);
3259 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_XContext));
3260 tcg_gen_ext32s_tl(arg, arg);
3269 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3272 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Framemask));
3280 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3281 rn = "'Diagnostic"; /* implementation dependent */
3286 gen_helper_mfc0_debug(arg); /* EJTAG support */
3290 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3291 rn = "TraceControl";
3294 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3295 rn = "TraceControl2";
3298 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
3299 rn = "UserTraceData";
3302 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
3313 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
3314 tcg_gen_ext32s_tl(arg, arg);
3324 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Performance0));
3325 rn = "Performance0";
3328 // gen_helper_mfc0_performance1(arg);
3329 rn = "Performance1";
3332 // gen_helper_mfc0_performance2(arg);
3333 rn = "Performance2";
3336 // gen_helper_mfc0_performance3(arg);
3337 rn = "Performance3";
3340 // gen_helper_mfc0_performance4(arg);
3341 rn = "Performance4";
3344 // gen_helper_mfc0_performance5(arg);
3345 rn = "Performance5";
3348 // gen_helper_mfc0_performance6(arg);
3349 rn = "Performance6";
3352 // gen_helper_mfc0_performance7(arg);
3353 rn = "Performance7";
3360 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3366 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3379 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagLo));
3386 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataLo));
3399 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagHi));
3406 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataHi));
3416 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
3417 tcg_gen_ext32s_tl(arg, arg);
3428 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DESAVE));
3438 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3442 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3443 generate_exception(ctx, EXCP_RI);
3446 static void gen_mtc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
3448 const char *rn = "invalid";
3451 check_insn(env, ctx, ISA_MIPS32);
3460 gen_helper_mtc0_index(arg);
3464 check_insn(env, ctx, ASE_MT);
3465 gen_helper_mtc0_mvpcontrol(arg);
3469 check_insn(env, ctx, ASE_MT);
3474 check_insn(env, ctx, ASE_MT);
3489 check_insn(env, ctx, ASE_MT);
3490 gen_helper_mtc0_vpecontrol(arg);
3494 check_insn(env, ctx, ASE_MT);
3495 gen_helper_mtc0_vpeconf0(arg);
3499 check_insn(env, ctx, ASE_MT);
3500 gen_helper_mtc0_vpeconf1(arg);
3504 check_insn(env, ctx, ASE_MT);
3505 gen_helper_mtc0_yqmask(arg);
3509 check_insn(env, ctx, ASE_MT);
3510 gen_mtc0_store64(arg, offsetof(CPUState, CP0_VPESchedule));
3514 check_insn(env, ctx, ASE_MT);
3515 gen_mtc0_store64(arg, offsetof(CPUState, CP0_VPEScheFBack));
3516 rn = "VPEScheFBack";
3519 check_insn(env, ctx, ASE_MT);
3520 gen_helper_mtc0_vpeopt(arg);
3530 gen_helper_mtc0_entrylo0(arg);
3534 check_insn(env, ctx, ASE_MT);
3535 gen_helper_mtc0_tcstatus(arg);
3539 check_insn(env, ctx, ASE_MT);
3540 gen_helper_mtc0_tcbind(arg);
3544 check_insn(env, ctx, ASE_MT);
3545 gen_helper_mtc0_tcrestart(arg);
3549 check_insn(env, ctx, ASE_MT);
3550 gen_helper_mtc0_tchalt(arg);
3554 check_insn(env, ctx, ASE_MT);
3555 gen_helper_mtc0_tccontext(arg);
3559 check_insn(env, ctx, ASE_MT);
3560 gen_helper_mtc0_tcschedule(arg);
3564 check_insn(env, ctx, ASE_MT);
3565 gen_helper_mtc0_tcschefback(arg);
3575 gen_helper_mtc0_entrylo1(arg);
3585 gen_helper_mtc0_context(arg);
3589 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
3590 rn = "ContextConfig";
3599 gen_helper_mtc0_pagemask(arg);
3603 check_insn(env, ctx, ISA_MIPS32R2);
3604 gen_helper_mtc0_pagegrain(arg);
3614 gen_helper_mtc0_wired(arg);
3618 check_insn(env, ctx, ISA_MIPS32R2);
3619 gen_helper_mtc0_srsconf0(arg);
3623 check_insn(env, ctx, ISA_MIPS32R2);
3624 gen_helper_mtc0_srsconf1(arg);
3628 check_insn(env, ctx, ISA_MIPS32R2);
3629 gen_helper_mtc0_srsconf2(arg);
3633 check_insn(env, ctx, ISA_MIPS32R2);
3634 gen_helper_mtc0_srsconf3(arg);
3638 check_insn(env, ctx, ISA_MIPS32R2);
3639 gen_helper_mtc0_srsconf4(arg);
3649 check_insn(env, ctx, ISA_MIPS32R2);
3650 gen_helper_mtc0_hwrena(arg);
3664 gen_helper_mtc0_count(arg);
3667 /* 6,7 are implementation dependent */
3675 gen_helper_mtc0_entryhi(arg);
3685 gen_helper_mtc0_compare(arg);
3688 /* 6,7 are implementation dependent */
3696 save_cpu_state(ctx, 1);
3697 gen_helper_mtc0_status(arg);
3698 /* BS_STOP isn't good enough here, hflags may have changed. */
3699 gen_save_pc(ctx->pc + 4);
3700 ctx->bstate = BS_EXCP;
3704 check_insn(env, ctx, ISA_MIPS32R2);
3705 gen_helper_mtc0_intctl(arg);
3706 /* Stop translation as we may have switched the execution mode */
3707 ctx->bstate = BS_STOP;
3711 check_insn(env, ctx, ISA_MIPS32R2);
3712 gen_helper_mtc0_srsctl(arg);
3713 /* Stop translation as we may have switched the execution mode */
3714 ctx->bstate = BS_STOP;
3718 check_insn(env, ctx, ISA_MIPS32R2);
3719 gen_mtc0_store32(arg, offsetof(CPUState, CP0_SRSMap));
3720 /* Stop translation as we may have switched the execution mode */
3721 ctx->bstate = BS_STOP;
3731 save_cpu_state(ctx, 1);
3732 gen_helper_mtc0_cause(arg);
3742 gen_mtc0_store64(arg, offsetof(CPUState, CP0_EPC));
3756 check_insn(env, ctx, ISA_MIPS32R2);
3757 gen_helper_mtc0_ebase(arg);
3767 gen_helper_mtc0_config0(arg);
3769 /* Stop translation as we may have switched the execution mode */
3770 ctx->bstate = BS_STOP;
3773 /* ignored, read only */
3777 gen_helper_mtc0_config2(arg);
3779 /* Stop translation as we may have switched the execution mode */
3780 ctx->bstate = BS_STOP;
3783 /* ignored, read only */
3786 /* 4,5 are reserved */
3787 /* 6,7 are implementation dependent */
3797 rn = "Invalid config selector";
3814 gen_helper_1i(mtc0_watchlo, arg, sel);
3824 gen_helper_1i(mtc0_watchhi, arg, sel);
3834 #if defined(TARGET_MIPS64)
3835 check_insn(env, ctx, ISA_MIPS3);
3836 gen_helper_mtc0_xcontext(arg);
3845 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3848 gen_helper_mtc0_framemask(arg);
3857 rn = "Diagnostic"; /* implementation dependent */
3862 gen_helper_mtc0_debug(arg); /* EJTAG support */
3863 /* BS_STOP isn't good enough here, hflags may have changed. */
3864 gen_save_pc(ctx->pc + 4);
3865 ctx->bstate = BS_EXCP;
3869 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
3870 rn = "TraceControl";
3871 /* Stop translation as we may have switched the execution mode */
3872 ctx->bstate = BS_STOP;
3875 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
3876 rn = "TraceControl2";
3877 /* Stop translation as we may have switched the execution mode */
3878 ctx->bstate = BS_STOP;
3881 /* Stop translation as we may have switched the execution mode */
3882 ctx->bstate = BS_STOP;
3883 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
3884 rn = "UserTraceData";
3885 /* Stop translation as we may have switched the execution mode */
3886 ctx->bstate = BS_STOP;
3889 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
3890 /* Stop translation as we may have switched the execution mode */
3891 ctx->bstate = BS_STOP;
3902 gen_mtc0_store64(arg, offsetof(CPUState, CP0_DEPC));
3912 gen_helper_mtc0_performance0(arg);
3913 rn = "Performance0";
3916 // gen_helper_mtc0_performance1(arg);
3917 rn = "Performance1";
3920 // gen_helper_mtc0_performance2(arg);
3921 rn = "Performance2";
3924 // gen_helper_mtc0_performance3(arg);
3925 rn = "Performance3";
3928 // gen_helper_mtc0_performance4(arg);
3929 rn = "Performance4";
3932 // gen_helper_mtc0_performance5(arg);
3933 rn = "Performance5";
3936 // gen_helper_mtc0_performance6(arg);
3937 rn = "Performance6";
3940 // gen_helper_mtc0_performance7(arg);
3941 rn = "Performance7";
3967 gen_helper_mtc0_taglo(arg);
3974 gen_helper_mtc0_datalo(arg);
3987 gen_helper_mtc0_taghi(arg);
3994 gen_helper_mtc0_datahi(arg);
4005 gen_mtc0_store64(arg, offsetof(CPUState, CP0_ErrorEPC));
4016 gen_mtc0_store32(arg, offsetof(CPUState, CP0_DESAVE));
4022 /* Stop translation as we may have switched the execution mode */
4023 ctx->bstate = BS_STOP;
4028 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
4029 /* For simplicity assume that all writes can cause interrupts. */
4032 ctx->bstate = BS_STOP;
4037 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
4038 generate_exception(ctx, EXCP_RI);
4041 #if defined(TARGET_MIPS64)
4042 static void gen_dmfc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
4044 const char *rn = "invalid";
4047 check_insn(env, ctx, ISA_MIPS64);
4053 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Index));
4057 check_insn(env, ctx, ASE_MT);
4058 gen_helper_mfc0_mvpcontrol(arg);
4062 check_insn(env, ctx, ASE_MT);
4063 gen_helper_mfc0_mvpconf0(arg);
4067 check_insn(env, ctx, ASE_MT);
4068 gen_helper_mfc0_mvpconf1(arg);
4078 gen_helper_mfc0_random(arg);
4082 check_insn(env, ctx, ASE_MT);
4083 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEControl));
4087 check_insn(env, ctx, ASE_MT);
4088 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf0));
4092 check_insn(env, ctx, ASE_MT);
4093 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf1));
4097 check_insn(env, ctx, ASE_MT);
4098 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_YQMask));
4102 check_insn(env, ctx, ASE_MT);
4103 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4107 check_insn(env, ctx, ASE_MT);
4108 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4109 rn = "VPEScheFBack";
4112 check_insn(env, ctx, ASE_MT);
4113 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEOpt));
4123 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo0));
4127 check_insn(env, ctx, ASE_MT);
4128 gen_helper_mfc0_tcstatus(arg);
4132 check_insn(env, ctx, ASE_MT);
4133 gen_helper_mfc0_tcbind(arg);
4137 check_insn(env, ctx, ASE_MT);
4138 gen_helper_dmfc0_tcrestart(arg);
4142 check_insn(env, ctx, ASE_MT);
4143 gen_helper_dmfc0_tchalt(arg);
4147 check_insn(env, ctx, ASE_MT);
4148 gen_helper_dmfc0_tccontext(arg);
4152 check_insn(env, ctx, ASE_MT);
4153 gen_helper_dmfc0_tcschedule(arg);
4157 check_insn(env, ctx, ASE_MT);
4158 gen_helper_dmfc0_tcschefback(arg);
4168 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo1));
4178 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_Context));
4182 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4183 rn = "ContextConfig";
4192 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageMask));
4196 check_insn(env, ctx, ISA_MIPS32R2);
4197 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageGrain));
4207 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Wired));
4211 check_insn(env, ctx, ISA_MIPS32R2);
4212 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf0));
4216 check_insn(env, ctx, ISA_MIPS32R2);
4217 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf1));
4221 check_insn(env, ctx, ISA_MIPS32R2);
4222 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf2));
4226 check_insn(env, ctx, ISA_MIPS32R2);
4227 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf3));
4231 check_insn(env, ctx, ISA_MIPS32R2);
4232 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf4));
4242 check_insn(env, ctx, ISA_MIPS32R2);
4243 gen_mfc0_load32(arg, offsetof(CPUState, CP0_HWREna));
4253 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_BadVAddr));
4263 /* Mark as an IO operation because we read the time. */
4266 gen_helper_mfc0_count(arg);
4269 ctx->bstate = BS_STOP;
4273 /* 6,7 are implementation dependent */
4281 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryHi));
4291 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Compare));
4294 /* 6,7 are implementation dependent */
4302 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Status));
4306 check_insn(env, ctx, ISA_MIPS32R2);
4307 gen_mfc0_load32(arg, offsetof(CPUState, CP0_IntCtl));
4311 check_insn(env, ctx, ISA_MIPS32R2);
4312 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSCtl));
4316 check_insn(env, ctx, ISA_MIPS32R2);
4317 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSMap));
4327 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Cause));
4337 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
4347 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PRid));
4351 check_insn(env, ctx, ISA_MIPS32R2);
4352 gen_mfc0_load32(arg, offsetof(CPUState, CP0_EBase));
4362 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config0));
4366 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config1));
4370 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config2));
4374 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config3));
4377 /* 6,7 are implementation dependent */
4379 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config6));
4383 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config7));
4393 gen_helper_dmfc0_lladdr(arg);
4403 gen_helper_1i(dmfc0_watchlo, arg, sel);
4413 gen_helper_1i(mfc0_watchhi, arg, sel);
4423 check_insn(env, ctx, ISA_MIPS3);
4424 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_XContext));
4432 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4435 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Framemask));
4443 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4444 rn = "'Diagnostic"; /* implementation dependent */
4449 gen_helper_mfc0_debug(arg); /* EJTAG support */
4453 // gen_helper_dmfc0_tracecontrol(arg); /* PDtrace support */
4454 rn = "TraceControl";
4457 // gen_helper_dmfc0_tracecontrol2(arg); /* PDtrace support */
4458 rn = "TraceControl2";
4461 // gen_helper_dmfc0_usertracedata(arg); /* PDtrace support */
4462 rn = "UserTraceData";
4465 // gen_helper_dmfc0_tracebpc(arg); /* PDtrace support */
4476 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
4486 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Performance0));
4487 rn = "Performance0";
4490 // gen_helper_dmfc0_performance1(arg);
4491 rn = "Performance1";
4494 // gen_helper_dmfc0_performance2(arg);
4495 rn = "Performance2";
4498 // gen_helper_dmfc0_performance3(arg);
4499 rn = "Performance3";
4502 // gen_helper_dmfc0_performance4(arg);
4503 rn = "Performance4";
4506 // gen_helper_dmfc0_performance5(arg);
4507 rn = "Performance5";
4510 // gen_helper_dmfc0_performance6(arg);
4511 rn = "Performance6";
4514 // gen_helper_dmfc0_performance7(arg);
4515 rn = "Performance7";
4522 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4529 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4542 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagLo));
4549 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataLo));
4562 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagHi));
4569 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataHi));
4579 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
4590 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DESAVE));
4600 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4604 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4605 generate_exception(ctx, EXCP_RI);
4608 static void gen_dmtc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
4610 const char *rn = "invalid";
4613 check_insn(env, ctx, ISA_MIPS64);
4622 gen_helper_mtc0_index(arg);
4626 check_insn(env, ctx, ASE_MT);
4627 gen_helper_mtc0_mvpcontrol(arg);
4631 check_insn(env, ctx, ASE_MT);
4636 check_insn(env, ctx, ASE_MT);
4651 check_insn(env, ctx, ASE_MT);
4652 gen_helper_mtc0_vpecontrol(arg);
4656 check_insn(env, ctx, ASE_MT);
4657 gen_helper_mtc0_vpeconf0(arg);
4661 check_insn(env, ctx, ASE_MT);
4662 gen_helper_mtc0_vpeconf1(arg);
4666 check_insn(env, ctx, ASE_MT);
4667 gen_helper_mtc0_yqmask(arg);
4671 check_insn(env, ctx, ASE_MT);
4672 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4676 check_insn(env, ctx, ASE_MT);
4677 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4678 rn = "VPEScheFBack";
4681 check_insn(env, ctx, ASE_MT);
4682 gen_helper_mtc0_vpeopt(arg);
4692 gen_helper_mtc0_entrylo0(arg);
4696 check_insn(env, ctx, ASE_MT);
4697 gen_helper_mtc0_tcstatus(arg);
4701 check_insn(env, ctx, ASE_MT);
4702 gen_helper_mtc0_tcbind(arg);
4706 check_insn(env, ctx, ASE_MT);
4707 gen_helper_mtc0_tcrestart(arg);
4711 check_insn(env, ctx, ASE_MT);
4712 gen_helper_mtc0_tchalt(arg);
4716 check_insn(env, ctx, ASE_MT);
4717 gen_helper_mtc0_tccontext(arg);
4721 check_insn(env, ctx, ASE_MT);
4722 gen_helper_mtc0_tcschedule(arg);
4726 check_insn(env, ctx, ASE_MT);
4727 gen_helper_mtc0_tcschefback(arg);
4737 gen_helper_mtc0_entrylo1(arg);
4747 gen_helper_mtc0_context(arg);
4751 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
4752 rn = "ContextConfig";
4761 gen_helper_mtc0_pagemask(arg);
4765 check_insn(env, ctx, ISA_MIPS32R2);
4766 gen_helper_mtc0_pagegrain(arg);
4776 gen_helper_mtc0_wired(arg);
4780 check_insn(env, ctx, ISA_MIPS32R2);
4781 gen_helper_mtc0_srsconf0(arg);
4785 check_insn(env, ctx, ISA_MIPS32R2);
4786 gen_helper_mtc0_srsconf1(arg);
4790 check_insn(env, ctx, ISA_MIPS32R2);
4791 gen_helper_mtc0_srsconf2(arg);
4795 check_insn(env, ctx, ISA_MIPS32R2);
4796 gen_helper_mtc0_srsconf3(arg);
4800 check_insn(env, ctx, ISA_MIPS32R2);
4801 gen_helper_mtc0_srsconf4(arg);
4811 check_insn(env, ctx, ISA_MIPS32R2);
4812 gen_helper_mtc0_hwrena(arg);
4826 gen_helper_mtc0_count(arg);
4829 /* 6,7 are implementation dependent */
4833 /* Stop translation as we may have switched the execution mode */
4834 ctx->bstate = BS_STOP;
4839 gen_helper_mtc0_entryhi(arg);
4849 gen_helper_mtc0_compare(arg);
4852 /* 6,7 are implementation dependent */
4856 /* Stop translation as we may have switched the execution mode */
4857 ctx->bstate = BS_STOP;
4862 save_cpu_state(ctx, 1);
4863 gen_helper_mtc0_status(arg);
4864 /* BS_STOP isn't good enough here, hflags may have changed. */
4865 gen_save_pc(ctx->pc + 4);
4866 ctx->bstate = BS_EXCP;
4870 check_insn(env, ctx, ISA_MIPS32R2);
4871 gen_helper_mtc0_intctl(arg);
4872 /* Stop translation as we may have switched the execution mode */
4873 ctx->bstate = BS_STOP;
4877 check_insn(env, ctx, ISA_MIPS32R2);
4878 gen_helper_mtc0_srsctl(arg);
4879 /* Stop translation as we may have switched the execution mode */
4880 ctx->bstate = BS_STOP;
4884 check_insn(env, ctx, ISA_MIPS32R2);
4885 gen_mtc0_store32(arg, offsetof(CPUState, CP0_SRSMap));
4886 /* Stop translation as we may have switched the execution mode */
4887 ctx->bstate = BS_STOP;
4897 save_cpu_state(ctx, 1);
4898 gen_helper_mtc0_cause(arg);
4908 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
4922 check_insn(env, ctx, ISA_MIPS32R2);
4923 gen_helper_mtc0_ebase(arg);
4933 gen_helper_mtc0_config0(arg);
4935 /* Stop translation as we may have switched the execution mode */
4936 ctx->bstate = BS_STOP;
4939 /* ignored, read only */
4943 gen_helper_mtc0_config2(arg);
4945 /* Stop translation as we may have switched the execution mode */
4946 ctx->bstate = BS_STOP;
4952 /* 6,7 are implementation dependent */
4954 rn = "Invalid config selector";
4971 gen_helper_1i(mtc0_watchlo, arg, sel);
4981 gen_helper_1i(mtc0_watchhi, arg, sel);
4991 check_insn(env, ctx, ISA_MIPS3);
4992 gen_helper_mtc0_xcontext(arg);
5000 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5003 gen_helper_mtc0_framemask(arg);
5012 rn = "Diagnostic"; /* implementation dependent */
5017 gen_helper_mtc0_debug(arg); /* EJTAG support */
5018 /* BS_STOP isn't good enough here, hflags may have changed. */
5019 gen_save_pc(ctx->pc + 4);
5020 ctx->bstate = BS_EXCP;
5024 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
5025 /* Stop translation as we may have switched the execution mode */
5026 ctx->bstate = BS_STOP;
5027 rn = "TraceControl";
5030 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
5031 /* Stop translation as we may have switched the execution mode */
5032 ctx->bstate = BS_STOP;
5033 rn = "TraceControl2";
5036 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
5037 /* Stop translation as we may have switched the execution mode */
5038 ctx->bstate = BS_STOP;
5039 rn = "UserTraceData";
5042 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
5043 /* Stop translation as we may have switched the execution mode */
5044 ctx->bstate = BS_STOP;
5055 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
5065 gen_helper_mtc0_performance0(arg);
5066 rn = "Performance0";
5069 // gen_helper_mtc0_performance1(arg);
5070 rn = "Performance1";
5073 // gen_helper_mtc0_performance2(arg);
5074 rn = "Performance2";
5077 // gen_helper_mtc0_performance3(arg);
5078 rn = "Performance3";
5081 // gen_helper_mtc0_performance4(arg);
5082 rn = "Performance4";
5085 // gen_helper_mtc0_performance5(arg);
5086 rn = "Performance5";
5089 // gen_helper_mtc0_performance6(arg);
5090 rn = "Performance6";
5093 // gen_helper_mtc0_performance7(arg);
5094 rn = "Performance7";
5120 gen_helper_mtc0_taglo(arg);
5127 gen_helper_mtc0_datalo(arg);
5140 gen_helper_mtc0_taghi(arg);
5147 gen_helper_mtc0_datahi(arg);
5158 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
5169 gen_mtc0_store32(arg, offsetof(CPUState, CP0_DESAVE));
5175 /* Stop translation as we may have switched the execution mode */
5176 ctx->bstate = BS_STOP;
5181 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5182 /* For simplicity assume that all writes can cause interrupts. */
5185 ctx->bstate = BS_STOP;
5190 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5191 generate_exception(ctx, EXCP_RI);
5193 #endif /* TARGET_MIPS64 */
5195 static void gen_mftr(CPUState *env, DisasContext *ctx, int rt, int rd,
5196 int u, int sel, int h)
5198 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5199 TCGv t0 = tcg_temp_local_new();
5201 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5202 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5203 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5204 tcg_gen_movi_tl(t0, -1);
5205 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5206 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5207 tcg_gen_movi_tl(t0, -1);
5213 gen_helper_mftc0_tcstatus(t0);
5216 gen_helper_mftc0_tcbind(t0);
5219 gen_helper_mftc0_tcrestart(t0);
5222 gen_helper_mftc0_tchalt(t0);
5225 gen_helper_mftc0_tccontext(t0);
5228 gen_helper_mftc0_tcschedule(t0);
5231 gen_helper_mftc0_tcschefback(t0);
5234 gen_mfc0(env, ctx, t0, rt, sel);
5241 gen_helper_mftc0_entryhi(t0);
5244 gen_mfc0(env, ctx, t0, rt, sel);
5250 gen_helper_mftc0_status(t0);
5253 gen_mfc0(env, ctx, t0, rt, sel);
5259 gen_helper_mftc0_debug(t0);
5262 gen_mfc0(env, ctx, t0, rt, sel);
5267 gen_mfc0(env, ctx, t0, rt, sel);
5269 } else switch (sel) {
5270 /* GPR registers. */
5272 gen_helper_1i(mftgpr, t0, rt);
5274 /* Auxiliary CPU registers */
5278 gen_helper_1i(mftlo, t0, 0);
5281 gen_helper_1i(mfthi, t0, 0);
5284 gen_helper_1i(mftacx, t0, 0);
5287 gen_helper_1i(mftlo, t0, 1);
5290 gen_helper_1i(mfthi, t0, 1);
5293 gen_helper_1i(mftacx, t0, 1);
5296 gen_helper_1i(mftlo, t0, 2);
5299 gen_helper_1i(mfthi, t0, 2);
5302 gen_helper_1i(mftacx, t0, 2);
5305 gen_helper_1i(mftlo, t0, 3);
5308 gen_helper_1i(mfthi, t0, 3);
5311 gen_helper_1i(mftacx, t0, 3);
5314 gen_helper_mftdsp(t0);
5320 /* Floating point (COP1). */
5322 /* XXX: For now we support only a single FPU context. */
5324 TCGv_i32 fp0 = tcg_temp_new_i32();
5326 gen_load_fpr32(fp0, rt);
5327 tcg_gen_ext_i32_tl(t0, fp0);
5328 tcg_temp_free_i32(fp0);
5330 TCGv_i32 fp0 = tcg_temp_new_i32();
5332 gen_load_fpr32h(fp0, rt);
5333 tcg_gen_ext_i32_tl(t0, fp0);
5334 tcg_temp_free_i32(fp0);
5338 /* XXX: For now we support only a single FPU context. */
5339 gen_helper_1i(cfc1, t0, rt);
5341 /* COP2: Not implemented. */
5348 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5349 gen_store_gpr(t0, rd);
5355 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5356 generate_exception(ctx, EXCP_RI);
5359 static void gen_mttr(CPUState *env, DisasContext *ctx, int rd, int rt,
5360 int u, int sel, int h)
5362 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5363 TCGv t0 = tcg_temp_local_new();
5365 gen_load_gpr(t0, rt);
5366 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5367 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5368 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5370 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5371 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5378 gen_helper_mttc0_tcstatus(t0);
5381 gen_helper_mttc0_tcbind(t0);
5384 gen_helper_mttc0_tcrestart(t0);
5387 gen_helper_mttc0_tchalt(t0);
5390 gen_helper_mttc0_tccontext(t0);
5393 gen_helper_mttc0_tcschedule(t0);
5396 gen_helper_mttc0_tcschefback(t0);
5399 gen_mtc0(env, ctx, t0, rd, sel);
5406 gen_helper_mttc0_entryhi(t0);
5409 gen_mtc0(env, ctx, t0, rd, sel);
5415 gen_helper_mttc0_status(t0);
5418 gen_mtc0(env, ctx, t0, rd, sel);
5424 gen_helper_mttc0_debug(t0);
5427 gen_mtc0(env, ctx, t0, rd, sel);
5432 gen_mtc0(env, ctx, t0, rd, sel);
5434 } else switch (sel) {
5435 /* GPR registers. */
5437 gen_helper_1i(mttgpr, t0, rd);
5439 /* Auxiliary CPU registers */
5443 gen_helper_1i(mttlo, t0, 0);
5446 gen_helper_1i(mtthi, t0, 0);
5449 gen_helper_1i(mttacx, t0, 0);
5452 gen_helper_1i(mttlo, t0, 1);
5455 gen_helper_1i(mtthi, t0, 1);
5458 gen_helper_1i(mttacx, t0, 1);
5461 gen_helper_1i(mttlo, t0, 2);
5464 gen_helper_1i(mtthi, t0, 2);
5467 gen_helper_1i(mttacx, t0, 2);
5470 gen_helper_1i(mttlo, t0, 3);
5473 gen_helper_1i(mtthi, t0, 3);
5476 gen_helper_1i(mttacx, t0, 3);
5479 gen_helper_mttdsp(t0);
5485 /* Floating point (COP1). */
5487 /* XXX: For now we support only a single FPU context. */
5489 TCGv_i32 fp0 = tcg_temp_new_i32();
5491 tcg_gen_trunc_tl_i32(fp0, t0);
5492 gen_store_fpr32(fp0, rd);
5493 tcg_temp_free_i32(fp0);
5495 TCGv_i32 fp0 = tcg_temp_new_i32();
5497 tcg_gen_trunc_tl_i32(fp0, t0);
5498 gen_store_fpr32h(fp0, rd);
5499 tcg_temp_free_i32(fp0);
5503 /* XXX: For now we support only a single FPU context. */
5504 gen_helper_1i(ctc1, t0, rd);
5506 /* COP2: Not implemented. */
5513 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5519 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5520 generate_exception(ctx, EXCP_RI);
5523 static void gen_cp0 (CPUState *env, DisasContext *ctx, uint32_t opc, int rt, int rd)
5525 const char *opn = "ldst";
5533 gen_mfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5538 TCGv t0 = tcg_temp_new();
5540 gen_load_gpr(t0, rt);
5541 gen_mtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5546 #if defined(TARGET_MIPS64)
5548 check_insn(env, ctx, ISA_MIPS3);
5553 gen_dmfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5557 check_insn(env, ctx, ISA_MIPS3);
5559 TCGv t0 = tcg_temp_new();
5561 gen_load_gpr(t0, rt);
5562 gen_dmtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5569 check_insn(env, ctx, ASE_MT);
5574 gen_mftr(env, ctx, rt, rd, (ctx->opcode >> 5) & 1,
5575 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5579 check_insn(env, ctx, ASE_MT);
5580 gen_mttr(env, ctx, rd, rt, (ctx->opcode >> 5) & 1,
5581 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5586 if (!env->tlb->helper_tlbwi)
5592 if (!env->tlb->helper_tlbwr)
5598 if (!env->tlb->helper_tlbp)
5604 if (!env->tlb->helper_tlbr)
5610 check_insn(env, ctx, ISA_MIPS2);
5612 ctx->bstate = BS_EXCP;
5616 check_insn(env, ctx, ISA_MIPS32);
5617 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
5619 generate_exception(ctx, EXCP_RI);
5622 ctx->bstate = BS_EXCP;
5627 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
5628 /* If we get an exception, we want to restart at next instruction */
5630 save_cpu_state(ctx, 1);
5633 ctx->bstate = BS_EXCP;
5638 generate_exception(ctx, EXCP_RI);
5641 MIPS_DEBUG("%s %s %d", opn, regnames[rt], rd);
5643 #endif /* !CONFIG_USER_ONLY */
5645 /* CP1 Branches (before delay slot) */
5646 static void gen_compute_branch1 (CPUState *env, DisasContext *ctx, uint32_t op,
5647 int32_t cc, int32_t offset)
5649 target_ulong btarget;
5650 const char *opn = "cp1 cond branch";
5651 TCGv_i32 t0 = tcg_temp_new_i32();
5654 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
5656 btarget = ctx->pc + 4 + offset;
5660 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5661 tcg_gen_not_i32(t0, t0);
5662 tcg_gen_andi_i32(t0, t0, 1);
5663 tcg_gen_extu_i32_tl(bcond, t0);
5667 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5668 tcg_gen_not_i32(t0, t0);
5669 tcg_gen_andi_i32(t0, t0, 1);
5670 tcg_gen_extu_i32_tl(bcond, t0);
5674 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5675 tcg_gen_andi_i32(t0, t0, 1);
5676 tcg_gen_extu_i32_tl(bcond, t0);
5680 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5681 tcg_gen_andi_i32(t0, t0, 1);
5682 tcg_gen_extu_i32_tl(bcond, t0);
5685 ctx->hflags |= MIPS_HFLAG_BL;
5689 TCGv_i32 t1 = tcg_temp_new_i32();
5690 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5691 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5692 tcg_gen_or_i32(t0, t0, t1);
5693 tcg_temp_free_i32(t1);
5694 tcg_gen_not_i32(t0, t0);
5695 tcg_gen_andi_i32(t0, t0, 1);
5696 tcg_gen_extu_i32_tl(bcond, t0);
5702 TCGv_i32 t1 = tcg_temp_new_i32();
5703 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5704 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5705 tcg_gen_or_i32(t0, t0, t1);
5706 tcg_temp_free_i32(t1);
5707 tcg_gen_andi_i32(t0, t0, 1);
5708 tcg_gen_extu_i32_tl(bcond, t0);
5714 TCGv_i32 t1 = tcg_temp_new_i32();
5715 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5716 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5717 tcg_gen_or_i32(t0, t0, t1);
5718 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5719 tcg_gen_or_i32(t0, t0, t1);
5720 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5721 tcg_gen_or_i32(t0, t0, t1);
5722 tcg_temp_free_i32(t1);
5723 tcg_gen_not_i32(t0, t0);
5724 tcg_gen_andi_i32(t0, t0, 1);
5725 tcg_gen_extu_i32_tl(bcond, t0);
5731 TCGv_i32 t1 = tcg_temp_new_i32();
5732 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5733 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5734 tcg_gen_or_i32(t0, t0, t1);
5735 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5736 tcg_gen_or_i32(t0, t0, t1);
5737 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5738 tcg_gen_or_i32(t0, t0, t1);
5739 tcg_temp_free_i32(t1);
5740 tcg_gen_andi_i32(t0, t0, 1);
5741 tcg_gen_extu_i32_tl(bcond, t0);
5745 ctx->hflags |= MIPS_HFLAG_BC;
5749 generate_exception (ctx, EXCP_RI);
5752 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx, opn,
5753 ctx->hflags, btarget);
5754 ctx->btarget = btarget;
5757 tcg_temp_free_i32(t0);
5760 /* Coprocessor 1 (FPU) */
5762 #define FOP(func, fmt) (((fmt) << 21) | (func))
5764 static void gen_cp1 (DisasContext *ctx, uint32_t opc, int rt, int fs)
5766 const char *opn = "cp1 move";
5767 TCGv t0 = tcg_temp_new();
5772 TCGv_i32 fp0 = tcg_temp_new_i32();
5774 gen_load_fpr32(fp0, fs);
5775 tcg_gen_ext_i32_tl(t0, fp0);
5776 tcg_temp_free_i32(fp0);
5778 gen_store_gpr(t0, rt);
5782 gen_load_gpr(t0, rt);
5784 TCGv_i32 fp0 = tcg_temp_new_i32();
5786 tcg_gen_trunc_tl_i32(fp0, t0);
5787 gen_store_fpr32(fp0, fs);
5788 tcg_temp_free_i32(fp0);
5793 gen_helper_1i(cfc1, t0, fs);
5794 gen_store_gpr(t0, rt);
5798 gen_load_gpr(t0, rt);
5799 gen_helper_1i(ctc1, t0, fs);
5802 #if defined(TARGET_MIPS64)
5804 gen_load_fpr64(ctx, t0, fs);
5805 gen_store_gpr(t0, rt);
5809 gen_load_gpr(t0, rt);
5810 gen_store_fpr64(ctx, t0, fs);
5816 TCGv_i32 fp0 = tcg_temp_new_i32();
5818 gen_load_fpr32h(fp0, fs);
5819 tcg_gen_ext_i32_tl(t0, fp0);
5820 tcg_temp_free_i32(fp0);
5822 gen_store_gpr(t0, rt);
5826 gen_load_gpr(t0, rt);
5828 TCGv_i32 fp0 = tcg_temp_new_i32();
5830 tcg_gen_trunc_tl_i32(fp0, t0);
5831 gen_store_fpr32h(fp0, fs);
5832 tcg_temp_free_i32(fp0);
5838 generate_exception (ctx, EXCP_RI);
5841 MIPS_DEBUG("%s %s %s", opn, regnames[rt], fregnames[fs]);
5847 static void gen_movci (DisasContext *ctx, int rd, int rs, int cc, int tf)
5863 l1 = gen_new_label();
5864 t0 = tcg_temp_new_i32();
5865 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5866 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5867 tcg_temp_free_i32(t0);
5869 tcg_gen_movi_tl(cpu_gpr[rd], 0);
5871 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
5876 static inline void gen_movcf_s (int fs, int fd, int cc, int tf)
5879 TCGv_i32 t0 = tcg_temp_new_i32();
5880 int l1 = gen_new_label();
5887 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5888 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5889 gen_load_fpr32(t0, fs);
5890 gen_store_fpr32(t0, fd);
5892 tcg_temp_free_i32(t0);
5895 static inline void gen_movcf_d (DisasContext *ctx, int fs, int fd, int cc, int tf)
5898 TCGv_i32 t0 = tcg_temp_new_i32();
5900 int l1 = gen_new_label();
5907 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5908 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5909 tcg_temp_free_i32(t0);
5910 fp0 = tcg_temp_new_i64();
5911 gen_load_fpr64(ctx, fp0, fs);
5912 gen_store_fpr64(ctx, fp0, fd);
5913 tcg_temp_free_i64(fp0);
5917 static inline void gen_movcf_ps (int fs, int fd, int cc, int tf)
5920 TCGv_i32 t0 = tcg_temp_new_i32();
5921 int l1 = gen_new_label();
5922 int l2 = gen_new_label();
5929 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc));
5930 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5931 gen_load_fpr32(t0, fs);
5932 gen_store_fpr32(t0, fd);
5935 tcg_gen_andi_i32(t0, fpu_fcr31, get_fp_bit(cc+1));
5936 tcg_gen_brcondi_i32(cond, t0, 0, l2);
5937 gen_load_fpr32h(t0, fs);
5938 gen_store_fpr32h(t0, fd);
5939 tcg_temp_free_i32(t0);
5944 static void gen_farith (DisasContext *ctx, uint32_t op1,
5945 int ft, int fs, int fd, int cc)
5947 const char *opn = "farith";
5948 const char *condnames[] = {
5966 const char *condnames_abs[] = {
5984 enum { BINOP, CMPOP, OTHEROP } optype = OTHEROP;
5985 uint32_t func = ctx->opcode & 0x3f;
5987 switch (ctx->opcode & FOP(0x3f, 0x1f)) {
5990 TCGv_i32 fp0 = tcg_temp_new_i32();
5991 TCGv_i32 fp1 = tcg_temp_new_i32();
5993 gen_load_fpr32(fp0, fs);
5994 gen_load_fpr32(fp1, ft);
5995 gen_helper_float_add_s(fp0, fp0, fp1);
5996 tcg_temp_free_i32(fp1);
5997 gen_store_fpr32(fp0, fd);
5998 tcg_temp_free_i32(fp0);
6005 TCGv_i32 fp0 = tcg_temp_new_i32();
6006 TCGv_i32 fp1 = tcg_temp_new_i32();
6008 gen_load_fpr32(fp0, fs);
6009 gen_load_fpr32(fp1, ft);
6010 gen_helper_float_sub_s(fp0, fp0, fp1);
6011 tcg_temp_free_i32(fp1);
6012 gen_store_fpr32(fp0, fd);
6013 tcg_temp_free_i32(fp0);
6020 TCGv_i32 fp0 = tcg_temp_new_i32();
6021 TCGv_i32 fp1 = tcg_temp_new_i32();
6023 gen_load_fpr32(fp0, fs);
6024 gen_load_fpr32(fp1, ft);
6025 gen_helper_float_mul_s(fp0, fp0, fp1);
6026 tcg_temp_free_i32(fp1);
6027 gen_store_fpr32(fp0, fd);
6028 tcg_temp_free_i32(fp0);
6035 TCGv_i32 fp0 = tcg_temp_new_i32();
6036 TCGv_i32 fp1 = tcg_temp_new_i32();
6038 gen_load_fpr32(fp0, fs);
6039 gen_load_fpr32(fp1, ft);
6040 gen_helper_float_div_s(fp0, fp0, fp1);
6041 tcg_temp_free_i32(fp1);
6042 gen_store_fpr32(fp0, fd);
6043 tcg_temp_free_i32(fp0);
6050 TCGv_i32 fp0 = tcg_temp_new_i32();
6052 gen_load_fpr32(fp0, fs);
6053 gen_helper_float_sqrt_s(fp0, fp0);
6054 gen_store_fpr32(fp0, fd);
6055 tcg_temp_free_i32(fp0);
6061 TCGv_i32 fp0 = tcg_temp_new_i32();
6063 gen_load_fpr32(fp0, fs);
6064 gen_helper_float_abs_s(fp0, fp0);
6065 gen_store_fpr32(fp0, fd);
6066 tcg_temp_free_i32(fp0);
6072 TCGv_i32 fp0 = tcg_temp_new_i32();
6074 gen_load_fpr32(fp0, fs);
6075 gen_store_fpr32(fp0, fd);
6076 tcg_temp_free_i32(fp0);
6082 TCGv_i32 fp0 = tcg_temp_new_i32();
6084 gen_load_fpr32(fp0, fs);
6085 gen_helper_float_chs_s(fp0, fp0);
6086 gen_store_fpr32(fp0, fd);
6087 tcg_temp_free_i32(fp0);
6092 check_cp1_64bitmode(ctx);
6094 TCGv_i32 fp32 = tcg_temp_new_i32();
6095 TCGv_i64 fp64 = tcg_temp_new_i64();
6097 gen_load_fpr32(fp32, fs);
6098 gen_helper_float_roundl_s(fp64, fp32);
6099 tcg_temp_free_i32(fp32);
6100 gen_store_fpr64(ctx, fp64, fd);
6101 tcg_temp_free_i64(fp64);
6106 check_cp1_64bitmode(ctx);
6108 TCGv_i32 fp32 = tcg_temp_new_i32();
6109 TCGv_i64 fp64 = tcg_temp_new_i64();
6111 gen_load_fpr32(fp32, fs);
6112 gen_helper_float_truncl_s(fp64, fp32);
6113 tcg_temp_free_i32(fp32);
6114 gen_store_fpr64(ctx, fp64, fd);
6115 tcg_temp_free_i64(fp64);
6120 check_cp1_64bitmode(ctx);
6122 TCGv_i32 fp32 = tcg_temp_new_i32();
6123 TCGv_i64 fp64 = tcg_temp_new_i64();
6125 gen_load_fpr32(fp32, fs);
6126 gen_helper_float_ceill_s(fp64, fp32);
6127 tcg_temp_free_i32(fp32);
6128 gen_store_fpr64(ctx, fp64, fd);
6129 tcg_temp_free_i64(fp64);
6134 check_cp1_64bitmode(ctx);
6136 TCGv_i32 fp32 = tcg_temp_new_i32();
6137 TCGv_i64 fp64 = tcg_temp_new_i64();
6139 gen_load_fpr32(fp32, fs);
6140 gen_helper_float_floorl_s(fp64, fp32);
6141 tcg_temp_free_i32(fp32);
6142 gen_store_fpr64(ctx, fp64, fd);
6143 tcg_temp_free_i64(fp64);
6149 TCGv_i32 fp0 = tcg_temp_new_i32();
6151 gen_load_fpr32(fp0, fs);
6152 gen_helper_float_roundw_s(fp0, fp0);
6153 gen_store_fpr32(fp0, fd);
6154 tcg_temp_free_i32(fp0);
6160 TCGv_i32 fp0 = tcg_temp_new_i32();
6162 gen_load_fpr32(fp0, fs);
6163 gen_helper_float_truncw_s(fp0, fp0);
6164 gen_store_fpr32(fp0, fd);
6165 tcg_temp_free_i32(fp0);
6171 TCGv_i32 fp0 = tcg_temp_new_i32();
6173 gen_load_fpr32(fp0, fs);
6174 gen_helper_float_ceilw_s(fp0, fp0);
6175 gen_store_fpr32(fp0, fd);
6176 tcg_temp_free_i32(fp0);
6182 TCGv_i32 fp0 = tcg_temp_new_i32();
6184 gen_load_fpr32(fp0, fs);
6185 gen_helper_float_floorw_s(fp0, fp0);
6186 gen_store_fpr32(fp0, fd);
6187 tcg_temp_free_i32(fp0);
6192 gen_movcf_s(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6197 int l1 = gen_new_label();
6201 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6203 fp0 = tcg_temp_new_i32();
6204 gen_load_fpr32(fp0, fs);
6205 gen_store_fpr32(fp0, fd);
6206 tcg_temp_free_i32(fp0);
6213 int l1 = gen_new_label();
6217 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6218 fp0 = tcg_temp_new_i32();
6219 gen_load_fpr32(fp0, fs);
6220 gen_store_fpr32(fp0, fd);
6221 tcg_temp_free_i32(fp0);
6230 TCGv_i32 fp0 = tcg_temp_new_i32();
6232 gen_load_fpr32(fp0, fs);
6233 gen_helper_float_recip_s(fp0, fp0);
6234 gen_store_fpr32(fp0, fd);
6235 tcg_temp_free_i32(fp0);
6242 TCGv_i32 fp0 = tcg_temp_new_i32();
6244 gen_load_fpr32(fp0, fs);
6245 gen_helper_float_rsqrt_s(fp0, fp0);
6246 gen_store_fpr32(fp0, fd);
6247 tcg_temp_free_i32(fp0);
6252 check_cp1_64bitmode(ctx);
6254 TCGv_i32 fp0 = tcg_temp_new_i32();
6255 TCGv_i32 fp1 = tcg_temp_new_i32();
6257 gen_load_fpr32(fp0, fs);
6258 gen_load_fpr32(fp1, fd);
6259 gen_helper_float_recip2_s(fp0, fp0, fp1);
6260 tcg_temp_free_i32(fp1);
6261 gen_store_fpr32(fp0, fd);
6262 tcg_temp_free_i32(fp0);
6267 check_cp1_64bitmode(ctx);
6269 TCGv_i32 fp0 = tcg_temp_new_i32();
6271 gen_load_fpr32(fp0, fs);
6272 gen_helper_float_recip1_s(fp0, fp0);
6273 gen_store_fpr32(fp0, fd);
6274 tcg_temp_free_i32(fp0);
6279 check_cp1_64bitmode(ctx);
6281 TCGv_i32 fp0 = tcg_temp_new_i32();
6283 gen_load_fpr32(fp0, fs);
6284 gen_helper_float_rsqrt1_s(fp0, fp0);
6285 gen_store_fpr32(fp0, fd);
6286 tcg_temp_free_i32(fp0);
6291 check_cp1_64bitmode(ctx);
6293 TCGv_i32 fp0 = tcg_temp_new_i32();
6294 TCGv_i32 fp1 = tcg_temp_new_i32();
6296 gen_load_fpr32(fp0, fs);
6297 gen_load_fpr32(fp1, ft);
6298 gen_helper_float_rsqrt2_s(fp0, fp0, fp1);
6299 tcg_temp_free_i32(fp1);
6300 gen_store_fpr32(fp0, fd);
6301 tcg_temp_free_i32(fp0);
6306 check_cp1_registers(ctx, fd);
6308 TCGv_i32 fp32 = tcg_temp_new_i32();
6309 TCGv_i64 fp64 = tcg_temp_new_i64();
6311 gen_load_fpr32(fp32, fs);
6312 gen_helper_float_cvtd_s(fp64, fp32);
6313 tcg_temp_free_i32(fp32);
6314 gen_store_fpr64(ctx, fp64, fd);
6315 tcg_temp_free_i64(fp64);
6321 TCGv_i32 fp0 = tcg_temp_new_i32();
6323 gen_load_fpr32(fp0, fs);
6324 gen_helper_float_cvtw_s(fp0, fp0);
6325 gen_store_fpr32(fp0, fd);
6326 tcg_temp_free_i32(fp0);
6331 check_cp1_64bitmode(ctx);
6333 TCGv_i32 fp32 = tcg_temp_new_i32();
6334 TCGv_i64 fp64 = tcg_temp_new_i64();
6336 gen_load_fpr32(fp32, fs);
6337 gen_helper_float_cvtl_s(fp64, fp32);
6338 tcg_temp_free_i32(fp32);
6339 gen_store_fpr64(ctx, fp64, fd);
6340 tcg_temp_free_i64(fp64);
6345 check_cp1_64bitmode(ctx);
6347 TCGv_i64 fp64 = tcg_temp_new_i64();
6348 TCGv_i32 fp32_0 = tcg_temp_new_i32();
6349 TCGv_i32 fp32_1 = tcg_temp_new_i32();
6351 gen_load_fpr32(fp32_0, fs);
6352 gen_load_fpr32(fp32_1, ft);
6353 tcg_gen_concat_i32_i64(fp64, fp32_0, fp32_1);
6354 tcg_temp_free_i32(fp32_1);
6355 tcg_temp_free_i32(fp32_0);
6356 gen_store_fpr64(ctx, fp64, fd);
6357 tcg_temp_free_i64(fp64);
6378 TCGv_i32 fp0 = tcg_temp_new_i32();
6379 TCGv_i32 fp1 = tcg_temp_new_i32();
6381 if (ctx->opcode & (1 << 6)) {
6385 gen_load_fpr32(fp0, fs);
6386 gen_load_fpr32(fp1, ft);
6387 if (ctx->opcode & (1 << 6)) {
6388 gen_cmpabs_s(func-48, fp0, fp1, cc);
6389 opn = condnames_abs[func-48];
6391 gen_cmp_s(func-48, fp0, fp1, cc);
6392 opn = condnames[func-48];
6394 tcg_temp_free_i32(fp0);
6395 tcg_temp_free_i32(fp1);
6399 check_cp1_registers(ctx, fs | ft | fd);
6401 TCGv_i64 fp0 = tcg_temp_new_i64();
6402 TCGv_i64 fp1 = tcg_temp_new_i64();
6404 gen_load_fpr64(ctx, fp0, fs);
6405 gen_load_fpr64(ctx, fp1, ft);
6406 gen_helper_float_add_d(fp0, fp0, fp1);
6407 tcg_temp_free_i64(fp1);
6408 gen_store_fpr64(ctx, fp0, fd);
6409 tcg_temp_free_i64(fp0);
6415 check_cp1_registers(ctx, fs | ft | fd);
6417 TCGv_i64 fp0 = tcg_temp_new_i64();
6418 TCGv_i64 fp1 = tcg_temp_new_i64();
6420 gen_load_fpr64(ctx, fp0, fs);
6421 gen_load_fpr64(ctx, fp1, ft);
6422 gen_helper_float_sub_d(fp0, fp0, fp1);
6423 tcg_temp_free_i64(fp1);
6424 gen_store_fpr64(ctx, fp0, fd);
6425 tcg_temp_free_i64(fp0);
6431 check_cp1_registers(ctx, fs | ft | fd);
6433 TCGv_i64 fp0 = tcg_temp_new_i64();
6434 TCGv_i64 fp1 = tcg_temp_new_i64();
6436 gen_load_fpr64(ctx, fp0, fs);
6437 gen_load_fpr64(ctx, fp1, ft);
6438 gen_helper_float_mul_d(fp0, fp0, fp1);
6439 tcg_temp_free_i64(fp1);
6440 gen_store_fpr64(ctx, fp0, fd);
6441 tcg_temp_free_i64(fp0);
6447 check_cp1_registers(ctx, fs | ft | fd);
6449 TCGv_i64 fp0 = tcg_temp_new_i64();
6450 TCGv_i64 fp1 = tcg_temp_new_i64();
6452 gen_load_fpr64(ctx, fp0, fs);
6453 gen_load_fpr64(ctx, fp1, ft);
6454 gen_helper_float_div_d(fp0, fp0, fp1);
6455 tcg_temp_free_i64(fp1);
6456 gen_store_fpr64(ctx, fp0, fd);
6457 tcg_temp_free_i64(fp0);
6463 check_cp1_registers(ctx, fs | fd);
6465 TCGv_i64 fp0 = tcg_temp_new_i64();
6467 gen_load_fpr64(ctx, fp0, fs);
6468 gen_helper_float_sqrt_d(fp0, fp0);
6469 gen_store_fpr64(ctx, fp0, fd);
6470 tcg_temp_free_i64(fp0);
6475 check_cp1_registers(ctx, fs | fd);
6477 TCGv_i64 fp0 = tcg_temp_new_i64();
6479 gen_load_fpr64(ctx, fp0, fs);
6480 gen_helper_float_abs_d(fp0, fp0);
6481 gen_store_fpr64(ctx, fp0, fd);
6482 tcg_temp_free_i64(fp0);
6487 check_cp1_registers(ctx, fs | fd);
6489 TCGv_i64 fp0 = tcg_temp_new_i64();
6491 gen_load_fpr64(ctx, fp0, fs);
6492 gen_store_fpr64(ctx, fp0, fd);
6493 tcg_temp_free_i64(fp0);
6498 check_cp1_registers(ctx, fs | fd);
6500 TCGv_i64 fp0 = tcg_temp_new_i64();
6502 gen_load_fpr64(ctx, fp0, fs);
6503 gen_helper_float_chs_d(fp0, fp0);
6504 gen_store_fpr64(ctx, fp0, fd);
6505 tcg_temp_free_i64(fp0);
6510 check_cp1_64bitmode(ctx);
6512 TCGv_i64 fp0 = tcg_temp_new_i64();
6514 gen_load_fpr64(ctx, fp0, fs);
6515 gen_helper_float_roundl_d(fp0, fp0);
6516 gen_store_fpr64(ctx, fp0, fd);
6517 tcg_temp_free_i64(fp0);
6522 check_cp1_64bitmode(ctx);
6524 TCGv_i64 fp0 = tcg_temp_new_i64();
6526 gen_load_fpr64(ctx, fp0, fs);
6527 gen_helper_float_truncl_d(fp0, fp0);
6528 gen_store_fpr64(ctx, fp0, fd);
6529 tcg_temp_free_i64(fp0);
6534 check_cp1_64bitmode(ctx);
6536 TCGv_i64 fp0 = tcg_temp_new_i64();
6538 gen_load_fpr64(ctx, fp0, fs);
6539 gen_helper_float_ceill_d(fp0, fp0);
6540 gen_store_fpr64(ctx, fp0, fd);
6541 tcg_temp_free_i64(fp0);
6546 check_cp1_64bitmode(ctx);
6548 TCGv_i64 fp0 = tcg_temp_new_i64();
6550 gen_load_fpr64(ctx, fp0, fs);
6551 gen_helper_float_floorl_d(fp0, fp0);
6552 gen_store_fpr64(ctx, fp0, fd);
6553 tcg_temp_free_i64(fp0);
6558 check_cp1_registers(ctx, fs);
6560 TCGv_i32 fp32 = tcg_temp_new_i32();
6561 TCGv_i64 fp64 = tcg_temp_new_i64();
6563 gen_load_fpr64(ctx, fp64, fs);
6564 gen_helper_float_roundw_d(fp32, fp64);
6565 tcg_temp_free_i64(fp64);
6566 gen_store_fpr32(fp32, fd);
6567 tcg_temp_free_i32(fp32);
6572 check_cp1_registers(ctx, fs);
6574 TCGv_i32 fp32 = tcg_temp_new_i32();
6575 TCGv_i64 fp64 = tcg_temp_new_i64();
6577 gen_load_fpr64(ctx, fp64, fs);
6578 gen_helper_float_truncw_d(fp32, fp64);
6579 tcg_temp_free_i64(fp64);
6580 gen_store_fpr32(fp32, fd);
6581 tcg_temp_free_i32(fp32);
6586 check_cp1_registers(ctx, fs);
6588 TCGv_i32 fp32 = tcg_temp_new_i32();
6589 TCGv_i64 fp64 = tcg_temp_new_i64();
6591 gen_load_fpr64(ctx, fp64, fs);
6592 gen_helper_float_ceilw_d(fp32, fp64);
6593 tcg_temp_free_i64(fp64);
6594 gen_store_fpr32(fp32, fd);
6595 tcg_temp_free_i32(fp32);
6600 check_cp1_registers(ctx, fs);
6602 TCGv_i32 fp32 = tcg_temp_new_i32();
6603 TCGv_i64 fp64 = tcg_temp_new_i64();
6605 gen_load_fpr64(ctx, fp64, fs);
6606 gen_helper_float_floorw_d(fp32, fp64);
6607 tcg_temp_free_i64(fp64);
6608 gen_store_fpr32(fp32, fd);
6609 tcg_temp_free_i32(fp32);
6614 gen_movcf_d(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6619 int l1 = gen_new_label();
6623 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6625 fp0 = tcg_temp_new_i64();
6626 gen_load_fpr64(ctx, fp0, fs);
6627 gen_store_fpr64(ctx, fp0, fd);
6628 tcg_temp_free_i64(fp0);
6635 int l1 = gen_new_label();
6639 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6640 fp0 = tcg_temp_new_i64();
6641 gen_load_fpr64(ctx, fp0, fs);
6642 gen_store_fpr64(ctx, fp0, fd);
6643 tcg_temp_free_i64(fp0);
6650 check_cp1_64bitmode(ctx);
6652 TCGv_i64 fp0 = tcg_temp_new_i64();
6654 gen_load_fpr64(ctx, fp0, fs);
6655 gen_helper_float_recip_d(fp0, fp0);
6656 gen_store_fpr64(ctx, fp0, fd);
6657 tcg_temp_free_i64(fp0);
6662 check_cp1_64bitmode(ctx);
6664 TCGv_i64 fp0 = tcg_temp_new_i64();
6666 gen_load_fpr64(ctx, fp0, fs);
6667 gen_helper_float_rsqrt_d(fp0, fp0);
6668 gen_store_fpr64(ctx, fp0, fd);
6669 tcg_temp_free_i64(fp0);
6674 check_cp1_64bitmode(ctx);
6676 TCGv_i64 fp0 = tcg_temp_new_i64();
6677 TCGv_i64 fp1 = tcg_temp_new_i64();
6679 gen_load_fpr64(ctx, fp0, fs);
6680 gen_load_fpr64(ctx, fp1, ft);
6681 gen_helper_float_recip2_d(fp0, fp0, fp1);
6682 tcg_temp_free_i64(fp1);
6683 gen_store_fpr64(ctx, fp0, fd);
6684 tcg_temp_free_i64(fp0);
6689 check_cp1_64bitmode(ctx);
6691 TCGv_i64 fp0 = tcg_temp_new_i64();
6693 gen_load_fpr64(ctx, fp0, fs);
6694 gen_helper_float_recip1_d(fp0, fp0);
6695 gen_store_fpr64(ctx, fp0, fd);
6696 tcg_temp_free_i64(fp0);
6701 check_cp1_64bitmode(ctx);
6703 TCGv_i64 fp0 = tcg_temp_new_i64();
6705 gen_load_fpr64(ctx, fp0, fs);
6706 gen_helper_float_rsqrt1_d(fp0, fp0);
6707 gen_store_fpr64(ctx, fp0, fd);
6708 tcg_temp_free_i64(fp0);
6713 check_cp1_64bitmode(ctx);
6715 TCGv_i64 fp0 = tcg_temp_new_i64();
6716 TCGv_i64 fp1 = tcg_temp_new_i64();
6718 gen_load_fpr64(ctx, fp0, fs);
6719 gen_load_fpr64(ctx, fp1, ft);
6720 gen_helper_float_rsqrt2_d(fp0, fp0, fp1);
6721 tcg_temp_free_i64(fp1);
6722 gen_store_fpr64(ctx, fp0, fd);
6723 tcg_temp_free_i64(fp0);
6744 TCGv_i64 fp0 = tcg_temp_new_i64();
6745 TCGv_i64 fp1 = tcg_temp_new_i64();
6746 if (ctx->opcode & (1 << 6)) {
6749 check_cp1_registers(ctx, fs | ft);
6751 gen_load_fpr64(ctx, fp0, fs);
6752 gen_load_fpr64(ctx, fp1, ft);
6753 if (ctx->opcode & (1 << 6)) {
6754 gen_cmpabs_d(func-48, fp0, fp1, cc);
6755 opn = condnames_abs[func-48];
6757 gen_cmp_d(func-48, fp0, fp1, cc);
6758 opn = condnames[func-48];
6760 tcg_temp_free_i64(fp0);
6761 tcg_temp_free_i64(fp1);
6765 check_cp1_registers(ctx, fs);
6767 TCGv_i32 fp32 = tcg_temp_new_i32();
6768 TCGv_i64 fp64 = tcg_temp_new_i64();
6770 gen_load_fpr64(ctx, fp64, fs);
6771 gen_helper_float_cvts_d(fp32, fp64);
6772 tcg_temp_free_i64(fp64);
6773 gen_store_fpr32(fp32, fd);
6774 tcg_temp_free_i32(fp32);
6779 check_cp1_registers(ctx, fs);
6781 TCGv_i32 fp32 = tcg_temp_new_i32();
6782 TCGv_i64 fp64 = tcg_temp_new_i64();
6784 gen_load_fpr64(ctx, fp64, fs);
6785 gen_helper_float_cvtw_d(fp32, fp64);
6786 tcg_temp_free_i64(fp64);
6787 gen_store_fpr32(fp32, fd);
6788 tcg_temp_free_i32(fp32);
6793 check_cp1_64bitmode(ctx);
6795 TCGv_i64 fp0 = tcg_temp_new_i64();
6797 gen_load_fpr64(ctx, fp0, fs);
6798 gen_helper_float_cvtl_d(fp0, fp0);
6799 gen_store_fpr64(ctx, fp0, fd);
6800 tcg_temp_free_i64(fp0);
6806 TCGv_i32 fp0 = tcg_temp_new_i32();
6808 gen_load_fpr32(fp0, fs);
6809 gen_helper_float_cvts_w(fp0, fp0);
6810 gen_store_fpr32(fp0, fd);
6811 tcg_temp_free_i32(fp0);
6816 check_cp1_registers(ctx, fd);
6818 TCGv_i32 fp32 = tcg_temp_new_i32();
6819 TCGv_i64 fp64 = tcg_temp_new_i64();
6821 gen_load_fpr32(fp32, fs);
6822 gen_helper_float_cvtd_w(fp64, fp32);
6823 tcg_temp_free_i32(fp32);
6824 gen_store_fpr64(ctx, fp64, fd);
6825 tcg_temp_free_i64(fp64);
6830 check_cp1_64bitmode(ctx);
6832 TCGv_i32 fp32 = tcg_temp_new_i32();
6833 TCGv_i64 fp64 = tcg_temp_new_i64();
6835 gen_load_fpr64(ctx, fp64, fs);
6836 gen_helper_float_cvts_l(fp32, fp64);
6837 tcg_temp_free_i64(fp64);
6838 gen_store_fpr32(fp32, fd);
6839 tcg_temp_free_i32(fp32);
6844 check_cp1_64bitmode(ctx);
6846 TCGv_i64 fp0 = tcg_temp_new_i64();
6848 gen_load_fpr64(ctx, fp0, fs);
6849 gen_helper_float_cvtd_l(fp0, fp0);
6850 gen_store_fpr64(ctx, fp0, fd);
6851 tcg_temp_free_i64(fp0);
6856 check_cp1_64bitmode(ctx);
6858 TCGv_i64 fp0 = tcg_temp_new_i64();
6860 gen_load_fpr64(ctx, fp0, fs);
6861 gen_helper_float_cvtps_pw(fp0, fp0);
6862 gen_store_fpr64(ctx, fp0, fd);
6863 tcg_temp_free_i64(fp0);
6868 check_cp1_64bitmode(ctx);
6870 TCGv_i64 fp0 = tcg_temp_new_i64();
6871 TCGv_i64 fp1 = tcg_temp_new_i64();
6873 gen_load_fpr64(ctx, fp0, fs);
6874 gen_load_fpr64(ctx, fp1, ft);
6875 gen_helper_float_add_ps(fp0, fp0, fp1);
6876 tcg_temp_free_i64(fp1);
6877 gen_store_fpr64(ctx, fp0, fd);
6878 tcg_temp_free_i64(fp0);
6883 check_cp1_64bitmode(ctx);
6885 TCGv_i64 fp0 = tcg_temp_new_i64();
6886 TCGv_i64 fp1 = tcg_temp_new_i64();
6888 gen_load_fpr64(ctx, fp0, fs);
6889 gen_load_fpr64(ctx, fp1, ft);
6890 gen_helper_float_sub_ps(fp0, fp0, fp1);
6891 tcg_temp_free_i64(fp1);
6892 gen_store_fpr64(ctx, fp0, fd);
6893 tcg_temp_free_i64(fp0);
6898 check_cp1_64bitmode(ctx);
6900 TCGv_i64 fp0 = tcg_temp_new_i64();
6901 TCGv_i64 fp1 = tcg_temp_new_i64();
6903 gen_load_fpr64(ctx, fp0, fs);
6904 gen_load_fpr64(ctx, fp1, ft);
6905 gen_helper_float_mul_ps(fp0, fp0, fp1);
6906 tcg_temp_free_i64(fp1);
6907 gen_store_fpr64(ctx, fp0, fd);
6908 tcg_temp_free_i64(fp0);
6913 check_cp1_64bitmode(ctx);
6915 TCGv_i64 fp0 = tcg_temp_new_i64();
6917 gen_load_fpr64(ctx, fp0, fs);
6918 gen_helper_float_abs_ps(fp0, fp0);
6919 gen_store_fpr64(ctx, fp0, fd);
6920 tcg_temp_free_i64(fp0);
6925 check_cp1_64bitmode(ctx);
6927 TCGv_i64 fp0 = tcg_temp_new_i64();
6929 gen_load_fpr64(ctx, fp0, fs);
6930 gen_store_fpr64(ctx, fp0, fd);
6931 tcg_temp_free_i64(fp0);
6936 check_cp1_64bitmode(ctx);
6938 TCGv_i64 fp0 = tcg_temp_new_i64();
6940 gen_load_fpr64(ctx, fp0, fs);
6941 gen_helper_float_chs_ps(fp0, fp0);
6942 gen_store_fpr64(ctx, fp0, fd);
6943 tcg_temp_free_i64(fp0);
6948 check_cp1_64bitmode(ctx);
6949 gen_movcf_ps(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6953 check_cp1_64bitmode(ctx);
6955 int l1 = gen_new_label();
6959 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6960 fp0 = tcg_temp_new_i64();
6961 gen_load_fpr64(ctx, fp0, fs);
6962 gen_store_fpr64(ctx, fp0, fd);
6963 tcg_temp_free_i64(fp0);
6969 check_cp1_64bitmode(ctx);
6971 int l1 = gen_new_label();
6975 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6976 fp0 = tcg_temp_new_i64();
6977 gen_load_fpr64(ctx, fp0, fs);
6978 gen_store_fpr64(ctx, fp0, fd);
6979 tcg_temp_free_i64(fp0);
6986 check_cp1_64bitmode(ctx);
6988 TCGv_i64 fp0 = tcg_temp_new_i64();
6989 TCGv_i64 fp1 = tcg_temp_new_i64();
6991 gen_load_fpr64(ctx, fp0, ft);
6992 gen_load_fpr64(ctx, fp1, fs);
6993 gen_helper_float_addr_ps(fp0, fp0, fp1);
6994 tcg_temp_free_i64(fp1);
6995 gen_store_fpr64(ctx, fp0, fd);
6996 tcg_temp_free_i64(fp0);
7001 check_cp1_64bitmode(ctx);
7003 TCGv_i64 fp0 = tcg_temp_new_i64();
7004 TCGv_i64 fp1 = tcg_temp_new_i64();
7006 gen_load_fpr64(ctx, fp0, ft);
7007 gen_load_fpr64(ctx, fp1, fs);
7008 gen_helper_float_mulr_ps(fp0, fp0, fp1);
7009 tcg_temp_free_i64(fp1);
7010 gen_store_fpr64(ctx, fp0, fd);
7011 tcg_temp_free_i64(fp0);
7016 check_cp1_64bitmode(ctx);
7018 TCGv_i64 fp0 = tcg_temp_new_i64();
7019 TCGv_i64 fp1 = tcg_temp_new_i64();
7021 gen_load_fpr64(ctx, fp0, fs);
7022 gen_load_fpr64(ctx, fp1, fd);
7023 gen_helper_float_recip2_ps(fp0, fp0, fp1);
7024 tcg_temp_free_i64(fp1);
7025 gen_store_fpr64(ctx, fp0, fd);
7026 tcg_temp_free_i64(fp0);
7031 check_cp1_64bitmode(ctx);
7033 TCGv_i64 fp0 = tcg_temp_new_i64();
7035 gen_load_fpr64(ctx, fp0, fs);
7036 gen_helper_float_recip1_ps(fp0, fp0);
7037 gen_store_fpr64(ctx, fp0, fd);
7038 tcg_temp_free_i64(fp0);
7043 check_cp1_64bitmode(ctx);
7045 TCGv_i64 fp0 = tcg_temp_new_i64();
7047 gen_load_fpr64(ctx, fp0, fs);
7048 gen_helper_float_rsqrt1_ps(fp0, fp0);
7049 gen_store_fpr64(ctx, fp0, fd);
7050 tcg_temp_free_i64(fp0);
7055 check_cp1_64bitmode(ctx);
7057 TCGv_i64 fp0 = tcg_temp_new_i64();
7058 TCGv_i64 fp1 = tcg_temp_new_i64();
7060 gen_load_fpr64(ctx, fp0, fs);
7061 gen_load_fpr64(ctx, fp1, ft);
7062 gen_helper_float_rsqrt2_ps(fp0, fp0, fp1);
7063 tcg_temp_free_i64(fp1);
7064 gen_store_fpr64(ctx, fp0, fd);
7065 tcg_temp_free_i64(fp0);
7070 check_cp1_64bitmode(ctx);
7072 TCGv_i32 fp0 = tcg_temp_new_i32();
7074 gen_load_fpr32h(fp0, fs);
7075 gen_helper_float_cvts_pu(fp0, fp0);
7076 gen_store_fpr32(fp0, fd);
7077 tcg_temp_free_i32(fp0);
7082 check_cp1_64bitmode(ctx);
7084 TCGv_i64 fp0 = tcg_temp_new_i64();
7086 gen_load_fpr64(ctx, fp0, fs);
7087 gen_helper_float_cvtpw_ps(fp0, fp0);
7088 gen_store_fpr64(ctx, fp0, fd);
7089 tcg_temp_free_i64(fp0);
7094 check_cp1_64bitmode(ctx);
7096 TCGv_i32 fp0 = tcg_temp_new_i32();
7098 gen_load_fpr32(fp0, fs);
7099 gen_helper_float_cvts_pl(fp0, fp0);
7100 gen_store_fpr32(fp0, fd);
7101 tcg_temp_free_i32(fp0);
7106 check_cp1_64bitmode(ctx);
7108 TCGv_i32 fp0 = tcg_temp_new_i32();
7109 TCGv_i32 fp1 = tcg_temp_new_i32();
7111 gen_load_fpr32(fp0, fs);
7112 gen_load_fpr32(fp1, ft);
7113 gen_store_fpr32h(fp0, fd);
7114 gen_store_fpr32(fp1, fd);
7115 tcg_temp_free_i32(fp0);
7116 tcg_temp_free_i32(fp1);
7121 check_cp1_64bitmode(ctx);
7123 TCGv_i32 fp0 = tcg_temp_new_i32();
7124 TCGv_i32 fp1 = tcg_temp_new_i32();
7126 gen_load_fpr32(fp0, fs);
7127 gen_load_fpr32h(fp1, ft);
7128 gen_store_fpr32(fp1, fd);
7129 gen_store_fpr32h(fp0, fd);
7130 tcg_temp_free_i32(fp0);
7131 tcg_temp_free_i32(fp1);
7136 check_cp1_64bitmode(ctx);
7138 TCGv_i32 fp0 = tcg_temp_new_i32();
7139 TCGv_i32 fp1 = tcg_temp_new_i32();
7141 gen_load_fpr32h(fp0, fs);
7142 gen_load_fpr32(fp1, ft);
7143 gen_store_fpr32(fp1, fd);
7144 gen_store_fpr32h(fp0, fd);
7145 tcg_temp_free_i32(fp0);
7146 tcg_temp_free_i32(fp1);
7151 check_cp1_64bitmode(ctx);
7153 TCGv_i32 fp0 = tcg_temp_new_i32();
7154 TCGv_i32 fp1 = tcg_temp_new_i32();
7156 gen_load_fpr32h(fp0, fs);
7157 gen_load_fpr32h(fp1, ft);
7158 gen_store_fpr32(fp1, fd);
7159 gen_store_fpr32h(fp0, fd);
7160 tcg_temp_free_i32(fp0);
7161 tcg_temp_free_i32(fp1);
7181 check_cp1_64bitmode(ctx);
7183 TCGv_i64 fp0 = tcg_temp_new_i64();
7184 TCGv_i64 fp1 = tcg_temp_new_i64();
7186 gen_load_fpr64(ctx, fp0, fs);
7187 gen_load_fpr64(ctx, fp1, ft);
7188 if (ctx->opcode & (1 << 6)) {
7189 gen_cmpabs_ps(func-48, fp0, fp1, cc);
7190 opn = condnames_abs[func-48];
7192 gen_cmp_ps(func-48, fp0, fp1, cc);
7193 opn = condnames[func-48];
7195 tcg_temp_free_i64(fp0);
7196 tcg_temp_free_i64(fp1);
7201 generate_exception (ctx, EXCP_RI);
7206 MIPS_DEBUG("%s %s, %s, %s", opn, fregnames[fd], fregnames[fs], fregnames[ft]);
7209 MIPS_DEBUG("%s %s,%s", opn, fregnames[fs], fregnames[ft]);
7212 MIPS_DEBUG("%s %s,%s", opn, fregnames[fd], fregnames[fs]);
7217 /* Coprocessor 3 (FPU) */
7218 static void gen_flt3_ldst (DisasContext *ctx, uint32_t opc,
7219 int fd, int fs, int base, int index)
7221 const char *opn = "extended float load/store";
7223 TCGv t0 = tcg_temp_new();
7233 check_cp1_registers(ctx, fd);
7237 check_cp1_64bitmode(ctx);
7242 gen_load_gpr(t0, index);
7243 } else if (index == 0) {
7244 gen_load_gpr(t0, base);
7246 gen_load_gpr(t0, index);
7247 gen_op_addr_add(ctx, t0, cpu_gpr[base]);
7249 /* Don't do NOP if destination is zero: we must perform the actual
7251 save_cpu_state(ctx, 0);
7255 TCGv_i32 fp0 = tcg_temp_new_i32();
7257 tcg_gen_qemu_ld32s(t0, t0, ctx->mem_idx);
7258 tcg_gen_trunc_tl_i32(fp0, t0);
7259 gen_store_fpr32(fp0, fd);
7260 tcg_temp_free_i32(fp0);
7266 TCGv_i64 fp0 = tcg_temp_new_i64();
7268 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7269 gen_store_fpr64(ctx, fp0, fd);
7270 tcg_temp_free_i64(fp0);
7275 tcg_gen_andi_tl(t0, t0, ~0x7);
7277 TCGv_i64 fp0 = tcg_temp_new_i64();
7279 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7280 gen_store_fpr64(ctx, fp0, fd);
7281 tcg_temp_free_i64(fp0);
7287 TCGv_i32 fp0 = tcg_temp_new_i32();
7288 TCGv t1 = tcg_temp_new();
7290 gen_load_fpr32(fp0, fs);
7291 tcg_gen_extu_i32_tl(t1, fp0);
7292 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
7293 tcg_temp_free_i32(fp0);
7301 TCGv_i64 fp0 = tcg_temp_new_i64();
7303 gen_load_fpr64(ctx, fp0, fs);
7304 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7305 tcg_temp_free_i64(fp0);
7311 tcg_gen_andi_tl(t0, t0, ~0x7);
7313 TCGv_i64 fp0 = tcg_temp_new_i64();
7315 gen_load_fpr64(ctx, fp0, fs);
7316 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7317 tcg_temp_free_i64(fp0);
7324 MIPS_DEBUG("%s %s, %s(%s)", opn, fregnames[store ? fs : fd],
7325 regnames[index], regnames[base]);
7328 static void gen_flt3_arith (DisasContext *ctx, uint32_t opc,
7329 int fd, int fr, int fs, int ft)
7331 const char *opn = "flt3_arith";
7335 check_cp1_64bitmode(ctx);
7337 TCGv t0 = tcg_temp_local_new();
7338 TCGv_i32 fp = tcg_temp_new_i32();
7339 TCGv_i32 fph = tcg_temp_new_i32();
7340 int l1 = gen_new_label();
7341 int l2 = gen_new_label();
7343 gen_load_gpr(t0, fr);
7344 tcg_gen_andi_tl(t0, t0, 0x7);
7346 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
7347 gen_load_fpr32(fp, fs);
7348 gen_load_fpr32h(fph, fs);
7349 gen_store_fpr32(fp, fd);
7350 gen_store_fpr32h(fph, fd);
7353 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 4, l2);
7355 #ifdef TARGET_WORDS_BIGENDIAN
7356 gen_load_fpr32(fp, fs);
7357 gen_load_fpr32h(fph, ft);
7358 gen_store_fpr32h(fp, fd);
7359 gen_store_fpr32(fph, fd);
7361 gen_load_fpr32h(fph, fs);
7362 gen_load_fpr32(fp, ft);
7363 gen_store_fpr32(fph, fd);
7364 gen_store_fpr32h(fp, fd);
7367 tcg_temp_free_i32(fp);
7368 tcg_temp_free_i32(fph);
7375 TCGv_i32 fp0 = tcg_temp_new_i32();
7376 TCGv_i32 fp1 = tcg_temp_new_i32();
7377 TCGv_i32 fp2 = tcg_temp_new_i32();
7379 gen_load_fpr32(fp0, fs);
7380 gen_load_fpr32(fp1, ft);
7381 gen_load_fpr32(fp2, fr);
7382 gen_helper_float_muladd_s(fp2, fp0, fp1, fp2);
7383 tcg_temp_free_i32(fp0);
7384 tcg_temp_free_i32(fp1);
7385 gen_store_fpr32(fp2, fd);
7386 tcg_temp_free_i32(fp2);
7392 check_cp1_registers(ctx, fd | fs | ft | fr);
7394 TCGv_i64 fp0 = tcg_temp_new_i64();
7395 TCGv_i64 fp1 = tcg_temp_new_i64();
7396 TCGv_i64 fp2 = tcg_temp_new_i64();
7398 gen_load_fpr64(ctx, fp0, fs);
7399 gen_load_fpr64(ctx, fp1, ft);
7400 gen_load_fpr64(ctx, fp2, fr);
7401 gen_helper_float_muladd_d(fp2, fp0, fp1, fp2);
7402 tcg_temp_free_i64(fp0);
7403 tcg_temp_free_i64(fp1);
7404 gen_store_fpr64(ctx, fp2, fd);
7405 tcg_temp_free_i64(fp2);
7410 check_cp1_64bitmode(ctx);
7412 TCGv_i64 fp0 = tcg_temp_new_i64();
7413 TCGv_i64 fp1 = tcg_temp_new_i64();
7414 TCGv_i64 fp2 = tcg_temp_new_i64();
7416 gen_load_fpr64(ctx, fp0, fs);
7417 gen_load_fpr64(ctx, fp1, ft);
7418 gen_load_fpr64(ctx, fp2, fr);
7419 gen_helper_float_muladd_ps(fp2, fp0, fp1, fp2);
7420 tcg_temp_free_i64(fp0);
7421 tcg_temp_free_i64(fp1);
7422 gen_store_fpr64(ctx, fp2, fd);
7423 tcg_temp_free_i64(fp2);
7430 TCGv_i32 fp0 = tcg_temp_new_i32();
7431 TCGv_i32 fp1 = tcg_temp_new_i32();
7432 TCGv_i32 fp2 = tcg_temp_new_i32();
7434 gen_load_fpr32(fp0, fs);
7435 gen_load_fpr32(fp1, ft);
7436 gen_load_fpr32(fp2, fr);
7437 gen_helper_float_mulsub_s(fp2, fp0, fp1, fp2);
7438 tcg_temp_free_i32(fp0);
7439 tcg_temp_free_i32(fp1);
7440 gen_store_fpr32(fp2, fd);
7441 tcg_temp_free_i32(fp2);
7447 check_cp1_registers(ctx, fd | fs | ft | fr);
7449 TCGv_i64 fp0 = tcg_temp_new_i64();
7450 TCGv_i64 fp1 = tcg_temp_new_i64();
7451 TCGv_i64 fp2 = tcg_temp_new_i64();
7453 gen_load_fpr64(ctx, fp0, fs);
7454 gen_load_fpr64(ctx, fp1, ft);
7455 gen_load_fpr64(ctx, fp2, fr);
7456 gen_helper_float_mulsub_d(fp2, fp0, fp1, fp2);
7457 tcg_temp_free_i64(fp0);
7458 tcg_temp_free_i64(fp1);
7459 gen_store_fpr64(ctx, fp2, fd);
7460 tcg_temp_free_i64(fp2);
7465 check_cp1_64bitmode(ctx);
7467 TCGv_i64 fp0 = tcg_temp_new_i64();
7468 TCGv_i64 fp1 = tcg_temp_new_i64();
7469 TCGv_i64 fp2 = tcg_temp_new_i64();
7471 gen_load_fpr64(ctx, fp0, fs);
7472 gen_load_fpr64(ctx, fp1, ft);
7473 gen_load_fpr64(ctx, fp2, fr);
7474 gen_helper_float_mulsub_ps(fp2, fp0, fp1, fp2);
7475 tcg_temp_free_i64(fp0);
7476 tcg_temp_free_i64(fp1);
7477 gen_store_fpr64(ctx, fp2, fd);
7478 tcg_temp_free_i64(fp2);
7485 TCGv_i32 fp0 = tcg_temp_new_i32();
7486 TCGv_i32 fp1 = tcg_temp_new_i32();
7487 TCGv_i32 fp2 = tcg_temp_new_i32();
7489 gen_load_fpr32(fp0, fs);
7490 gen_load_fpr32(fp1, ft);
7491 gen_load_fpr32(fp2, fr);
7492 gen_helper_float_nmuladd_s(fp2, fp0, fp1, fp2);
7493 tcg_temp_free_i32(fp0);
7494 tcg_temp_free_i32(fp1);
7495 gen_store_fpr32(fp2, fd);
7496 tcg_temp_free_i32(fp2);
7502 check_cp1_registers(ctx, fd | fs | ft | fr);
7504 TCGv_i64 fp0 = tcg_temp_new_i64();
7505 TCGv_i64 fp1 = tcg_temp_new_i64();
7506 TCGv_i64 fp2 = tcg_temp_new_i64();
7508 gen_load_fpr64(ctx, fp0, fs);
7509 gen_load_fpr64(ctx, fp1, ft);
7510 gen_load_fpr64(ctx, fp2, fr);
7511 gen_helper_float_nmuladd_d(fp2, fp0, fp1, fp2);
7512 tcg_temp_free_i64(fp0);
7513 tcg_temp_free_i64(fp1);
7514 gen_store_fpr64(ctx, fp2, fd);
7515 tcg_temp_free_i64(fp2);
7520 check_cp1_64bitmode(ctx);
7522 TCGv_i64 fp0 = tcg_temp_new_i64();
7523 TCGv_i64 fp1 = tcg_temp_new_i64();
7524 TCGv_i64 fp2 = tcg_temp_new_i64();
7526 gen_load_fpr64(ctx, fp0, fs);
7527 gen_load_fpr64(ctx, fp1, ft);
7528 gen_load_fpr64(ctx, fp2, fr);
7529 gen_helper_float_nmuladd_ps(fp2, fp0, fp1, fp2);
7530 tcg_temp_free_i64(fp0);
7531 tcg_temp_free_i64(fp1);
7532 gen_store_fpr64(ctx, fp2, fd);
7533 tcg_temp_free_i64(fp2);
7540 TCGv_i32 fp0 = tcg_temp_new_i32();
7541 TCGv_i32 fp1 = tcg_temp_new_i32();
7542 TCGv_i32 fp2 = tcg_temp_new_i32();
7544 gen_load_fpr32(fp0, fs);
7545 gen_load_fpr32(fp1, ft);
7546 gen_load_fpr32(fp2, fr);
7547 gen_helper_float_nmulsub_s(fp2, fp0, fp1, fp2);
7548 tcg_temp_free_i32(fp0);
7549 tcg_temp_free_i32(fp1);
7550 gen_store_fpr32(fp2, fd);
7551 tcg_temp_free_i32(fp2);
7557 check_cp1_registers(ctx, fd | fs | ft | fr);
7559 TCGv_i64 fp0 = tcg_temp_new_i64();
7560 TCGv_i64 fp1 = tcg_temp_new_i64();
7561 TCGv_i64 fp2 = tcg_temp_new_i64();
7563 gen_load_fpr64(ctx, fp0, fs);
7564 gen_load_fpr64(ctx, fp1, ft);
7565 gen_load_fpr64(ctx, fp2, fr);
7566 gen_helper_float_nmulsub_d(fp2, fp0, fp1, fp2);
7567 tcg_temp_free_i64(fp0);
7568 tcg_temp_free_i64(fp1);
7569 gen_store_fpr64(ctx, fp2, fd);
7570 tcg_temp_free_i64(fp2);
7575 check_cp1_64bitmode(ctx);
7577 TCGv_i64 fp0 = tcg_temp_new_i64();
7578 TCGv_i64 fp1 = tcg_temp_new_i64();
7579 TCGv_i64 fp2 = tcg_temp_new_i64();
7581 gen_load_fpr64(ctx, fp0, fs);
7582 gen_load_fpr64(ctx, fp1, ft);
7583 gen_load_fpr64(ctx, fp2, fr);
7584 gen_helper_float_nmulsub_ps(fp2, fp0, fp1, fp2);
7585 tcg_temp_free_i64(fp0);
7586 tcg_temp_free_i64(fp1);
7587 gen_store_fpr64(ctx, fp2, fd);
7588 tcg_temp_free_i64(fp2);
7594 generate_exception (ctx, EXCP_RI);
7597 MIPS_DEBUG("%s %s, %s, %s, %s", opn, fregnames[fd], fregnames[fr],
7598 fregnames[fs], fregnames[ft]);
7601 /* ISA extensions (ASEs) */
7602 /* MIPS16 extension to MIPS32 */
7603 /* SmartMIPS extension to MIPS32 */
7605 #if defined(TARGET_MIPS64)
7607 /* MDMX extension to MIPS64 */
7611 static void decode_opc (CPUState *env, DisasContext *ctx)
7615 uint32_t op, op1, op2;
7618 /* make sure instructions are on a word boundary */
7619 if (ctx->pc & 0x3) {
7620 env->CP0_BadVAddr = ctx->pc;
7621 generate_exception(ctx, EXCP_AdEL);
7625 /* Handle blikely not taken case */
7626 if ((ctx->hflags & MIPS_HFLAG_BMASK) == MIPS_HFLAG_BL) {
7627 int l1 = gen_new_label();
7629 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx ")", ctx->pc + 4);
7630 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
7631 tcg_gen_movi_i32(hflags, ctx->hflags & ~MIPS_HFLAG_BMASK);
7632 gen_goto_tb(ctx, 1, ctx->pc + 4);
7635 op = MASK_OP_MAJOR(ctx->opcode);
7636 rs = (ctx->opcode >> 21) & 0x1f;
7637 rt = (ctx->opcode >> 16) & 0x1f;
7638 rd = (ctx->opcode >> 11) & 0x1f;
7639 sa = (ctx->opcode >> 6) & 0x1f;
7640 imm = (int16_t)ctx->opcode;
7643 op1 = MASK_SPECIAL(ctx->opcode);
7645 case OPC_SLL: /* Shift with immediate */
7648 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7650 case OPC_MOVN: /* Conditional move */
7652 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7653 gen_cond_move(env, op1, rd, rs, rt);
7655 case OPC_ADD ... OPC_SUBU:
7656 gen_arith(env, ctx, op1, rd, rs, rt);
7658 case OPC_SLLV: /* Shifts */
7661 gen_shift(env, ctx, op1, rd, rs, rt);
7663 case OPC_SLT: /* Set on less than */
7665 gen_slt(env, op1, rd, rs, rt);
7667 case OPC_AND: /* Logic*/
7671 gen_logic(env, op1, rd, rs, rt);
7673 case OPC_MULT ... OPC_DIVU:
7675 check_insn(env, ctx, INSN_VR54XX);
7676 op1 = MASK_MUL_VR54XX(ctx->opcode);
7677 gen_mul_vr54xx(ctx, op1, rd, rs, rt);
7679 gen_muldiv(ctx, op1, rs, rt);
7681 case OPC_JR ... OPC_JALR:
7682 gen_compute_branch(ctx, op1, rs, rd, sa);
7684 case OPC_TGE ... OPC_TEQ: /* Traps */
7686 gen_trap(ctx, op1, rs, rt, -1);
7688 case OPC_MFHI: /* Move from HI/LO */
7690 gen_HILO(ctx, op1, rd);
7693 case OPC_MTLO: /* Move to HI/LO */
7694 gen_HILO(ctx, op1, rs);
7696 case OPC_PMON: /* Pmon entry point, also R4010 selsl */
7697 #ifdef MIPS_STRICT_STANDARD
7698 MIPS_INVAL("PMON / selsl");
7699 generate_exception(ctx, EXCP_RI);
7701 gen_helper_0i(pmon, sa);
7705 generate_exception(ctx, EXCP_SYSCALL);
7706 ctx->bstate = BS_STOP;
7709 generate_exception(ctx, EXCP_BREAK);
7712 #ifdef MIPS_STRICT_STANDARD
7714 generate_exception(ctx, EXCP_RI);
7716 /* Implemented as RI exception for now. */
7717 MIPS_INVAL("spim (unofficial)");
7718 generate_exception(ctx, EXCP_RI);
7726 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7727 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
7728 check_cp1_enabled(ctx);
7729 gen_movci(ctx, rd, rs, (ctx->opcode >> 18) & 0x7,
7730 (ctx->opcode >> 16) & 1);
7732 generate_exception_err(ctx, EXCP_CpU, 1);
7736 #if defined(TARGET_MIPS64)
7737 /* MIPS64 specific opcodes */
7744 check_insn(env, ctx, ISA_MIPS3);
7746 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7748 case OPC_DADD ... OPC_DSUBU:
7749 check_insn(env, ctx, ISA_MIPS3);
7751 gen_arith(env, ctx, op1, rd, rs, rt);
7756 check_insn(env, ctx, ISA_MIPS3);
7758 gen_shift(env, ctx, op1, rd, rs, rt);
7760 case OPC_DMULT ... OPC_DDIVU:
7761 check_insn(env, ctx, ISA_MIPS3);
7763 gen_muldiv(ctx, op1, rs, rt);
7766 default: /* Invalid */
7767 MIPS_INVAL("special");
7768 generate_exception(ctx, EXCP_RI);
7773 op1 = MASK_SPECIAL2(ctx->opcode);
7775 case OPC_MADD ... OPC_MADDU: /* Multiply and add/sub */
7776 case OPC_MSUB ... OPC_MSUBU:
7777 check_insn(env, ctx, ISA_MIPS32);
7778 gen_muldiv(ctx, op1, rs, rt);
7781 gen_arith(env, ctx, op1, rd, rs, rt);
7785 check_insn(env, ctx, ISA_MIPS32);
7786 gen_cl(ctx, op1, rd, rs);
7789 /* XXX: not clear which exception should be raised
7790 * when in debug mode...
7792 check_insn(env, ctx, ISA_MIPS32);
7793 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
7794 generate_exception(ctx, EXCP_DBp);
7796 generate_exception(ctx, EXCP_DBp);
7800 #if defined(TARGET_MIPS64)
7803 check_insn(env, ctx, ISA_MIPS64);
7805 gen_cl(ctx, op1, rd, rs);
7808 default: /* Invalid */
7809 MIPS_INVAL("special2");
7810 generate_exception(ctx, EXCP_RI);
7815 op1 = MASK_SPECIAL3(ctx->opcode);
7819 check_insn(env, ctx, ISA_MIPS32R2);
7820 gen_bitops(ctx, op1, rt, rs, sa, rd);
7823 check_insn(env, ctx, ISA_MIPS32R2);
7824 op2 = MASK_BSHFL(ctx->opcode);
7825 gen_bshfl(ctx, op2, rt, rd);
7828 check_insn(env, ctx, ISA_MIPS32R2);
7830 TCGv t0 = tcg_temp_new();
7834 save_cpu_state(ctx, 1);
7835 gen_helper_rdhwr_cpunum(t0);
7836 gen_store_gpr(t0, rt);
7839 save_cpu_state(ctx, 1);
7840 gen_helper_rdhwr_synci_step(t0);
7841 gen_store_gpr(t0, rt);
7844 save_cpu_state(ctx, 1);
7845 gen_helper_rdhwr_cc(t0);
7846 gen_store_gpr(t0, rt);
7849 save_cpu_state(ctx, 1);
7850 gen_helper_rdhwr_ccres(t0);
7851 gen_store_gpr(t0, rt);
7854 #if defined(CONFIG_USER_ONLY)
7855 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, tls_value));
7856 gen_store_gpr(t0, rt);
7859 /* XXX: Some CPUs implement this in hardware.
7860 Not supported yet. */
7862 default: /* Invalid */
7863 MIPS_INVAL("rdhwr");
7864 generate_exception(ctx, EXCP_RI);
7871 check_insn(env, ctx, ASE_MT);
7873 TCGv t0 = tcg_temp_new();
7874 TCGv t1 = tcg_temp_new();
7876 gen_load_gpr(t0, rt);
7877 gen_load_gpr(t1, rs);
7878 gen_helper_fork(t0, t1);
7884 check_insn(env, ctx, ASE_MT);
7886 TCGv t0 = tcg_temp_new();
7888 save_cpu_state(ctx, 1);
7889 gen_load_gpr(t0, rs);
7890 gen_helper_yield(t0, t0);
7891 gen_store_gpr(t0, rd);
7895 #if defined(TARGET_MIPS64)
7896 case OPC_DEXTM ... OPC_DEXT:
7897 case OPC_DINSM ... OPC_DINS:
7898 check_insn(env, ctx, ISA_MIPS64R2);
7900 gen_bitops(ctx, op1, rt, rs, sa, rd);
7903 check_insn(env, ctx, ISA_MIPS64R2);
7905 op2 = MASK_DBSHFL(ctx->opcode);
7906 gen_bshfl(ctx, op2, rt, rd);
7909 default: /* Invalid */
7910 MIPS_INVAL("special3");
7911 generate_exception(ctx, EXCP_RI);
7916 op1 = MASK_REGIMM(ctx->opcode);
7918 case OPC_BLTZ ... OPC_BGEZL: /* REGIMM branches */
7919 case OPC_BLTZAL ... OPC_BGEZALL:
7920 gen_compute_branch(ctx, op1, rs, -1, imm << 2);
7922 case OPC_TGEI ... OPC_TEQI: /* REGIMM traps */
7924 gen_trap(ctx, op1, rs, -1, imm);
7927 check_insn(env, ctx, ISA_MIPS32R2);
7930 default: /* Invalid */
7931 MIPS_INVAL("regimm");
7932 generate_exception(ctx, EXCP_RI);
7937 check_cp0_enabled(ctx);
7938 op1 = MASK_CP0(ctx->opcode);
7944 #if defined(TARGET_MIPS64)
7948 #ifndef CONFIG_USER_ONLY
7949 gen_cp0(env, ctx, op1, rt, rd);
7950 #endif /* !CONFIG_USER_ONLY */
7952 case OPC_C0_FIRST ... OPC_C0_LAST:
7953 #ifndef CONFIG_USER_ONLY
7954 gen_cp0(env, ctx, MASK_C0(ctx->opcode), rt, rd);
7955 #endif /* !CONFIG_USER_ONLY */
7958 #ifndef CONFIG_USER_ONLY
7960 TCGv t0 = tcg_temp_new();
7962 op2 = MASK_MFMC0(ctx->opcode);
7965 check_insn(env, ctx, ASE_MT);
7966 gen_helper_dmt(t0, t0);
7967 gen_store_gpr(t0, rt);
7970 check_insn(env, ctx, ASE_MT);
7971 gen_helper_emt(t0, t0);
7972 gen_store_gpr(t0, rt);
7975 check_insn(env, ctx, ASE_MT);
7976 gen_helper_dvpe(t0, t0);
7977 gen_store_gpr(t0, rt);
7980 check_insn(env, ctx, ASE_MT);
7981 gen_helper_evpe(t0, t0);
7982 gen_store_gpr(t0, rt);
7985 check_insn(env, ctx, ISA_MIPS32R2);
7986 save_cpu_state(ctx, 1);
7988 gen_store_gpr(t0, rt);
7989 /* Stop translation as we may have switched the execution mode */
7990 ctx->bstate = BS_STOP;
7993 check_insn(env, ctx, ISA_MIPS32R2);
7994 save_cpu_state(ctx, 1);
7996 gen_store_gpr(t0, rt);
7997 /* Stop translation as we may have switched the execution mode */
7998 ctx->bstate = BS_STOP;
8000 default: /* Invalid */
8001 MIPS_INVAL("mfmc0");
8002 generate_exception(ctx, EXCP_RI);
8007 #endif /* !CONFIG_USER_ONLY */
8010 check_insn(env, ctx, ISA_MIPS32R2);
8011 gen_load_srsgpr(rt, rd);
8014 check_insn(env, ctx, ISA_MIPS32R2);
8015 gen_store_srsgpr(rt, rd);
8019 generate_exception(ctx, EXCP_RI);
8023 case OPC_ADDI: /* Arithmetic with immediate opcode */
8025 gen_arith_imm(env, ctx, op, rt, rs, imm);
8027 case OPC_SLTI: /* Set on less than with immediate opcode */
8029 gen_slt_imm(env, op, rt, rs, imm);
8031 case OPC_ANDI: /* Arithmetic with immediate opcode */
8035 gen_logic_imm(env, op, rt, rs, imm);
8037 case OPC_J ... OPC_JAL: /* Jump */
8038 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
8039 gen_compute_branch(ctx, op, rs, rt, offset);
8041 case OPC_BEQ ... OPC_BGTZ: /* Branch */
8042 case OPC_BEQL ... OPC_BGTZL:
8043 gen_compute_branch(ctx, op, rs, rt, imm << 2);
8045 case OPC_LB ... OPC_LWR: /* Load and stores */
8046 case OPC_SB ... OPC_SW:
8049 gen_ldst(ctx, op, rt, rs, imm);
8052 gen_st_cond(ctx, op, rt, rs, imm);
8055 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
8059 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
8063 /* Floating point (COP1). */
8068 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8069 check_cp1_enabled(ctx);
8070 gen_flt_ldst(ctx, op, rt, rs, imm);
8072 generate_exception_err(ctx, EXCP_CpU, 1);
8077 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8078 check_cp1_enabled(ctx);
8079 op1 = MASK_CP1(ctx->opcode);
8083 check_insn(env, ctx, ISA_MIPS32R2);
8088 gen_cp1(ctx, op1, rt, rd);
8090 #if defined(TARGET_MIPS64)
8093 check_insn(env, ctx, ISA_MIPS3);
8094 gen_cp1(ctx, op1, rt, rd);
8100 check_insn(env, ctx, ASE_MIPS3D);
8103 gen_compute_branch1(env, ctx, MASK_BC1(ctx->opcode),
8104 (rt >> 2) & 0x7, imm << 2);
8111 gen_farith(ctx, MASK_CP1_FUNC(ctx->opcode), rt, rd, sa,
8116 generate_exception (ctx, EXCP_RI);
8120 generate_exception_err(ctx, EXCP_CpU, 1);
8130 /* COP2: Not implemented. */
8131 generate_exception_err(ctx, EXCP_CpU, 2);
8135 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8136 check_cp1_enabled(ctx);
8137 op1 = MASK_CP3(ctx->opcode);
8145 gen_flt3_ldst(ctx, op1, sa, rd, rs, rt);
8163 gen_flt3_arith(ctx, op1, sa, rs, rd, rt);
8167 generate_exception (ctx, EXCP_RI);
8171 generate_exception_err(ctx, EXCP_CpU, 1);
8175 #if defined(TARGET_MIPS64)
8176 /* MIPS64 opcodes */
8178 case OPC_LDL ... OPC_LDR:
8179 case OPC_SDL ... OPC_SDR:
8183 check_insn(env, ctx, ISA_MIPS3);
8185 gen_ldst(ctx, op, rt, rs, imm);
8188 check_insn(env, ctx, ISA_MIPS3);
8190 gen_st_cond(ctx, op, rt, rs, imm);
8194 check_insn(env, ctx, ISA_MIPS3);
8196 gen_arith_imm(env, ctx, op, rt, rs, imm);
8200 check_insn(env, ctx, ASE_MIPS16);
8201 /* MIPS16: Not implemented. */
8203 check_insn(env, ctx, ASE_MDMX);
8204 /* MDMX: Not implemented. */
8205 default: /* Invalid */
8206 MIPS_INVAL("major opcode");
8207 generate_exception(ctx, EXCP_RI);
8210 if (ctx->hflags & MIPS_HFLAG_BMASK) {
8211 int hflags = ctx->hflags & MIPS_HFLAG_BMASK;
8212 /* Branches completion */
8213 ctx->hflags &= ~MIPS_HFLAG_BMASK;
8214 ctx->bstate = BS_BRANCH;
8215 save_cpu_state(ctx, 0);
8216 /* FIXME: Need to clear can_do_io. */
8219 /* unconditional branch */
8220 MIPS_DEBUG("unconditional branch");
8221 gen_goto_tb(ctx, 0, ctx->btarget);
8224 /* blikely taken case */
8225 MIPS_DEBUG("blikely branch taken");
8226 gen_goto_tb(ctx, 0, ctx->btarget);
8229 /* Conditional branch */
8230 MIPS_DEBUG("conditional branch");
8232 int l1 = gen_new_label();
8234 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
8235 gen_goto_tb(ctx, 1, ctx->pc + 4);
8237 gen_goto_tb(ctx, 0, ctx->btarget);
8241 /* unconditional branch to register */
8242 MIPS_DEBUG("branch to register");
8243 tcg_gen_mov_tl(cpu_PC, btarget);
8247 MIPS_DEBUG("unknown branch");
8254 gen_intermediate_code_internal (CPUState *env, TranslationBlock *tb,
8258 target_ulong pc_start;
8259 uint16_t *gen_opc_end;
8266 qemu_log("search pc %d\n", search_pc);
8269 /* Leave some spare opc slots for branch handling. */
8270 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE - 16;
8274 ctx.bstate = BS_NONE;
8275 /* Restore delay slot state from the tb context. */
8276 ctx.hflags = (uint32_t)tb->flags; /* FIXME: maybe use 64 bits here? */
8277 restore_cpu_state(env, &ctx);
8278 #ifdef CONFIG_USER_ONLY
8279 ctx.mem_idx = MIPS_HFLAG_UM;
8281 ctx.mem_idx = ctx.hflags & MIPS_HFLAG_KSU;
8284 max_insns = tb->cflags & CF_COUNT_MASK;
8286 max_insns = CF_COUNT_MASK;
8288 qemu_log_mask(CPU_LOG_TB_CPU, "------------------------------------------------\n");
8289 /* FIXME: This may print out stale hflags from env... */
8290 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
8292 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb, ctx.mem_idx, ctx.hflags);
8294 while (ctx.bstate == BS_NONE) {
8295 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
8296 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
8297 if (bp->pc == ctx.pc) {
8298 save_cpu_state(&ctx, 1);
8299 ctx.bstate = BS_BRANCH;
8300 gen_helper_0i(raise_exception, EXCP_DEBUG);
8301 /* Include the breakpoint location or the tb won't
8302 * be flushed when it must be. */
8304 goto done_generating;
8310 j = gen_opc_ptr - gen_opc_buf;
8314 gen_opc_instr_start[lj++] = 0;
8316 gen_opc_pc[lj] = ctx.pc;
8317 gen_opc_hflags[lj] = ctx.hflags & MIPS_HFLAG_BMASK;
8318 gen_opc_instr_start[lj] = 1;
8319 gen_opc_icount[lj] = num_insns;
8321 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8323 ctx.opcode = ldl_code(ctx.pc);
8324 decode_opc(env, &ctx);
8328 if (env->singlestep_enabled)
8331 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
8334 if (gen_opc_ptr >= gen_opc_end)
8337 if (num_insns >= max_insns)
8343 if (tb->cflags & CF_LAST_IO)
8345 if (env->singlestep_enabled) {
8346 save_cpu_state(&ctx, ctx.bstate == BS_NONE);
8347 gen_helper_0i(raise_exception, EXCP_DEBUG);
8349 switch (ctx.bstate) {
8351 gen_helper_interrupt_restart();
8352 gen_goto_tb(&ctx, 0, ctx.pc);
8355 save_cpu_state(&ctx, 0);
8356 gen_goto_tb(&ctx, 0, ctx.pc);
8359 gen_helper_interrupt_restart();
8368 gen_icount_end(tb, num_insns);
8369 *gen_opc_ptr = INDEX_op_end;
8371 j = gen_opc_ptr - gen_opc_buf;
8374 gen_opc_instr_start[lj++] = 0;
8376 tb->size = ctx.pc - pc_start;
8377 tb->icount = num_insns;
8381 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
8382 qemu_log("IN: %s\n", lookup_symbol(pc_start));
8383 log_target_disas(pc_start, ctx.pc - pc_start, 0);
8386 qemu_log_mask(CPU_LOG_TB_CPU, "---------------- %d %08x\n", ctx.bstate, ctx.hflags);
8390 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
8392 gen_intermediate_code_internal(env, tb, 0);
8395 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
8397 gen_intermediate_code_internal(env, tb, 1);
8400 static void fpu_dump_state(CPUState *env, FILE *f,
8401 int (*fpu_fprintf)(FILE *f, const char *fmt, ...),
8405 int is_fpu64 = !!(env->hflags & MIPS_HFLAG_F64);
8407 #define printfpr(fp) \
8410 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu: %13g\n", \
8411 (fp)->w[FP_ENDIAN_IDX], (fp)->d, (fp)->fd, \
8412 (fp)->fs[FP_ENDIAN_IDX], (fp)->fs[!FP_ENDIAN_IDX]); \
8415 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
8416 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
8417 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu:%13g\n", \
8418 tmp.w[FP_ENDIAN_IDX], tmp.d, tmp.fd, \
8419 tmp.fs[FP_ENDIAN_IDX], tmp.fs[!FP_ENDIAN_IDX]); \
8424 fpu_fprintf(f, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
8425 env->active_fpu.fcr0, env->active_fpu.fcr31, is_fpu64, env->active_fpu.fp_status,
8426 get_float_exception_flags(&env->active_fpu.fp_status));
8427 for (i = 0; i < 32; (is_fpu64) ? i++ : (i += 2)) {
8428 fpu_fprintf(f, "%3s: ", fregnames[i]);
8429 printfpr(&env->active_fpu.fpr[i]);
8435 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8436 /* Debug help: The architecture requires 32bit code to maintain proper
8437 sign-extended values on 64bit machines. */
8439 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
8442 cpu_mips_check_sign_extensions (CPUState *env, FILE *f,
8443 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8448 if (!SIGN_EXT_P(env->active_tc.PC))
8449 cpu_fprintf(f, "BROKEN: pc=0x" TARGET_FMT_lx "\n", env->active_tc.PC);
8450 if (!SIGN_EXT_P(env->active_tc.HI[0]))
8451 cpu_fprintf(f, "BROKEN: HI=0x" TARGET_FMT_lx "\n", env->active_tc.HI[0]);
8452 if (!SIGN_EXT_P(env->active_tc.LO[0]))
8453 cpu_fprintf(f, "BROKEN: LO=0x" TARGET_FMT_lx "\n", env->active_tc.LO[0]);
8454 if (!SIGN_EXT_P(env->btarget))
8455 cpu_fprintf(f, "BROKEN: btarget=0x" TARGET_FMT_lx "\n", env->btarget);
8457 for (i = 0; i < 32; i++) {
8458 if (!SIGN_EXT_P(env->active_tc.gpr[i]))
8459 cpu_fprintf(f, "BROKEN: %s=0x" TARGET_FMT_lx "\n", regnames[i], env->active_tc.gpr[i]);
8462 if (!SIGN_EXT_P(env->CP0_EPC))
8463 cpu_fprintf(f, "BROKEN: EPC=0x" TARGET_FMT_lx "\n", env->CP0_EPC);
8464 if (!SIGN_EXT_P(env->CP0_LLAddr))
8465 cpu_fprintf(f, "BROKEN: LLAddr=0x" TARGET_FMT_lx "\n", env->CP0_LLAddr);
8469 void cpu_dump_state (CPUState *env, FILE *f,
8470 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8475 cpu_fprintf(f, "pc=0x" TARGET_FMT_lx " HI=0x" TARGET_FMT_lx " LO=0x" TARGET_FMT_lx " ds %04x " TARGET_FMT_lx " %d\n",
8476 env->active_tc.PC, env->active_tc.HI[0], env->active_tc.LO[0],
8477 env->hflags, env->btarget, env->bcond);
8478 for (i = 0; i < 32; i++) {
8480 cpu_fprintf(f, "GPR%02d:", i);
8481 cpu_fprintf(f, " %s " TARGET_FMT_lx, regnames[i], env->active_tc.gpr[i]);
8483 cpu_fprintf(f, "\n");
8486 cpu_fprintf(f, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx "\n",
8487 env->CP0_Status, env->CP0_Cause, env->CP0_EPC);
8488 cpu_fprintf(f, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx "\n",
8489 env->CP0_Config0, env->CP0_Config1, env->CP0_LLAddr);
8490 if (env->hflags & MIPS_HFLAG_FPU)
8491 fpu_dump_state(env, f, cpu_fprintf, flags);
8492 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8493 cpu_mips_check_sign_extensions(env, f, cpu_fprintf, flags);
8497 static void mips_tcg_init(void)
8502 /* Initialize various static tables. */
8506 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8507 TCGV_UNUSED(cpu_gpr[0]);
8508 for (i = 1; i < 32; i++)
8509 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
8510 offsetof(CPUState, active_tc.gpr[i]),
8512 cpu_PC = tcg_global_mem_new(TCG_AREG0,
8513 offsetof(CPUState, active_tc.PC), "PC");
8514 for (i = 0; i < MIPS_DSP_ACC; i++) {
8515 cpu_HI[i] = tcg_global_mem_new(TCG_AREG0,
8516 offsetof(CPUState, active_tc.HI[i]),
8518 cpu_LO[i] = tcg_global_mem_new(TCG_AREG0,
8519 offsetof(CPUState, active_tc.LO[i]),
8521 cpu_ACX[i] = tcg_global_mem_new(TCG_AREG0,
8522 offsetof(CPUState, active_tc.ACX[i]),
8525 cpu_dspctrl = tcg_global_mem_new(TCG_AREG0,
8526 offsetof(CPUState, active_tc.DSPControl),
8528 bcond = tcg_global_mem_new(TCG_AREG0,
8529 offsetof(CPUState, bcond), "bcond");
8530 btarget = tcg_global_mem_new(TCG_AREG0,
8531 offsetof(CPUState, btarget), "btarget");
8532 hflags = tcg_global_mem_new_i32(TCG_AREG0,
8533 offsetof(CPUState, hflags), "hflags");
8535 fpu_fcr0 = tcg_global_mem_new_i32(TCG_AREG0,
8536 offsetof(CPUState, active_fpu.fcr0),
8538 fpu_fcr31 = tcg_global_mem_new_i32(TCG_AREG0,
8539 offsetof(CPUState, active_fpu.fcr31),
8542 /* register helpers */
8543 #define GEN_HELPER 2
8549 #include "translate_init.c"
8551 CPUMIPSState *cpu_mips_init (const char *cpu_model)
8554 const mips_def_t *def;
8556 def = cpu_mips_find_by_name(cpu_model);
8559 env = qemu_mallocz(sizeof(CPUMIPSState));
8560 env->cpu_model = def;
8563 env->cpu_model_str = cpu_model;
8569 void cpu_reset (CPUMIPSState *env)
8571 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
8572 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
8573 log_cpu_state(env, 0);
8576 memset(env, 0, offsetof(CPUMIPSState, breakpoints));
8581 #if defined(CONFIG_USER_ONLY)
8582 env->hflags = MIPS_HFLAG_UM;
8584 if (env->hflags & MIPS_HFLAG_BMASK) {
8585 /* If the exception was raised from a delay slot,
8586 come back to the jump. */
8587 env->CP0_ErrorEPC = env->active_tc.PC - 4;
8589 env->CP0_ErrorEPC = env->active_tc.PC;
8591 env->active_tc.PC = (int32_t)0xBFC00000;
8593 /* SMP not implemented */
8594 env->CP0_EBase = 0x80000000;
8595 env->CP0_Status = (1 << CP0St_BEV) | (1 << CP0St_ERL);
8596 /* vectored interrupts not implemented, timer on int 7,
8597 no performance counters. */
8598 env->CP0_IntCtl = 0xe0000000;
8602 for (i = 0; i < 7; i++) {
8603 env->CP0_WatchLo[i] = 0;
8604 env->CP0_WatchHi[i] = 0x80000000;
8606 env->CP0_WatchLo[7] = 0;
8607 env->CP0_WatchHi[7] = 0;
8609 /* Count register increments in debug mode, EJTAG version 1 */
8610 env->CP0_Debug = (1 << CP0DB_CNT) | (0x1 << CP0DB_VER);
8611 env->hflags = MIPS_HFLAG_CP0;
8613 env->exception_index = EXCP_NONE;
8614 cpu_mips_register(env, env->cpu_model);
8617 void gen_pc_load(CPUState *env, TranslationBlock *tb,
8618 unsigned long searched_pc, int pc_pos, void *puc)
8620 env->active_tc.PC = gen_opc_pc[pc_pos];
8621 env->hflags &= ~MIPS_HFLAG_BMASK;
8622 env->hflags |= gen_opc_hflags[pc_pos];