2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
32 #include "qemu-common.h"
38 //#define MIPS_DEBUG_DISAS
39 //#define MIPS_DEBUG_SIGN_EXTENSIONS
41 /* MIPS major opcodes */
42 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
45 /* indirect opcode tables */
46 OPC_SPECIAL = (0x00 << 26),
47 OPC_REGIMM = (0x01 << 26),
48 OPC_CP0 = (0x10 << 26),
49 OPC_CP1 = (0x11 << 26),
50 OPC_CP2 = (0x12 << 26),
51 OPC_CP3 = (0x13 << 26),
52 OPC_SPECIAL2 = (0x1C << 26),
53 OPC_SPECIAL3 = (0x1F << 26),
54 /* arithmetic with immediate */
55 OPC_ADDI = (0x08 << 26),
56 OPC_ADDIU = (0x09 << 26),
57 OPC_SLTI = (0x0A << 26),
58 OPC_SLTIU = (0x0B << 26),
59 /* logic with immediate */
60 OPC_ANDI = (0x0C << 26),
61 OPC_ORI = (0x0D << 26),
62 OPC_XORI = (0x0E << 26),
63 OPC_LUI = (0x0F << 26),
64 /* arithmetic with immediate */
65 OPC_DADDI = (0x18 << 26),
66 OPC_DADDIU = (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL = (0x03 << 26),
70 OPC_BEQ = (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL = (0x14 << 26),
72 OPC_BNE = (0x05 << 26),
73 OPC_BNEL = (0x15 << 26),
74 OPC_BLEZ = (0x06 << 26),
75 OPC_BLEZL = (0x16 << 26),
76 OPC_BGTZ = (0x07 << 26),
77 OPC_BGTZL = (0x17 << 26),
78 OPC_JALX = (0x1D << 26), /* MIPS 16 only */
80 OPC_LDL = (0x1A << 26),
81 OPC_LDR = (0x1B << 26),
82 OPC_LB = (0x20 << 26),
83 OPC_LH = (0x21 << 26),
84 OPC_LWL = (0x22 << 26),
85 OPC_LW = (0x23 << 26),
86 OPC_LBU = (0x24 << 26),
87 OPC_LHU = (0x25 << 26),
88 OPC_LWR = (0x26 << 26),
89 OPC_LWU = (0x27 << 26),
90 OPC_SB = (0x28 << 26),
91 OPC_SH = (0x29 << 26),
92 OPC_SWL = (0x2A << 26),
93 OPC_SW = (0x2B << 26),
94 OPC_SDL = (0x2C << 26),
95 OPC_SDR = (0x2D << 26),
96 OPC_SWR = (0x2E << 26),
97 OPC_LL = (0x30 << 26),
98 OPC_LLD = (0x34 << 26),
99 OPC_LD = (0x37 << 26),
100 OPC_SC = (0x38 << 26),
101 OPC_SCD = (0x3C << 26),
102 OPC_SD = (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1 = (0x31 << 26),
105 OPC_LWC2 = (0x32 << 26),
106 OPC_LDC1 = (0x35 << 26),
107 OPC_LDC2 = (0x36 << 26),
108 OPC_SWC1 = (0x39 << 26),
109 OPC_SWC2 = (0x3A << 26),
110 OPC_SDC1 = (0x3D << 26),
111 OPC_SDC2 = (0x3E << 26),
112 /* MDMX ASE specific */
113 OPC_MDMX = (0x1E << 26),
114 /* Cache and prefetch */
115 OPC_CACHE = (0x2F << 26),
116 OPC_PREF = (0x33 << 26),
117 /* Reserved major opcode */
118 OPC_MAJOR3B_RESERVED = (0x3B << 26),
121 /* MIPS special opcodes */
122 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
126 OPC_SLL = 0x00 | OPC_SPECIAL,
127 /* NOP is SLL r0, r0, 0 */
128 /* SSNOP is SLL r0, r0, 1 */
129 /* EHB is SLL r0, r0, 3 */
130 OPC_SRL = 0x02 | OPC_SPECIAL, /* also ROTR */
131 OPC_ROTR = OPC_SRL | (1 << 21),
132 OPC_SRA = 0x03 | OPC_SPECIAL,
133 OPC_SLLV = 0x04 | OPC_SPECIAL,
134 OPC_SRLV = 0x06 | OPC_SPECIAL, /* also ROTRV */
135 OPC_ROTRV = OPC_SRLV | (1 << 6),
136 OPC_SRAV = 0x07 | OPC_SPECIAL,
137 OPC_DSLLV = 0x14 | OPC_SPECIAL,
138 OPC_DSRLV = 0x16 | OPC_SPECIAL, /* also DROTRV */
139 OPC_DROTRV = OPC_DSRLV | (1 << 6),
140 OPC_DSRAV = 0x17 | OPC_SPECIAL,
141 OPC_DSLL = 0x38 | OPC_SPECIAL,
142 OPC_DSRL = 0x3A | OPC_SPECIAL, /* also DROTR */
143 OPC_DROTR = OPC_DSRL | (1 << 21),
144 OPC_DSRA = 0x3B | OPC_SPECIAL,
145 OPC_DSLL32 = 0x3C | OPC_SPECIAL,
146 OPC_DSRL32 = 0x3E | OPC_SPECIAL, /* also DROTR32 */
147 OPC_DROTR32 = OPC_DSRL32 | (1 << 21),
148 OPC_DSRA32 = 0x3F | OPC_SPECIAL,
149 /* Multiplication / division */
150 OPC_MULT = 0x18 | OPC_SPECIAL,
151 OPC_MULTU = 0x19 | OPC_SPECIAL,
152 OPC_DIV = 0x1A | OPC_SPECIAL,
153 OPC_DIVU = 0x1B | OPC_SPECIAL,
154 OPC_DMULT = 0x1C | OPC_SPECIAL,
155 OPC_DMULTU = 0x1D | OPC_SPECIAL,
156 OPC_DDIV = 0x1E | OPC_SPECIAL,
157 OPC_DDIVU = 0x1F | OPC_SPECIAL,
158 /* 2 registers arithmetic / logic */
159 OPC_ADD = 0x20 | OPC_SPECIAL,
160 OPC_ADDU = 0x21 | OPC_SPECIAL,
161 OPC_SUB = 0x22 | OPC_SPECIAL,
162 OPC_SUBU = 0x23 | OPC_SPECIAL,
163 OPC_AND = 0x24 | OPC_SPECIAL,
164 OPC_OR = 0x25 | OPC_SPECIAL,
165 OPC_XOR = 0x26 | OPC_SPECIAL,
166 OPC_NOR = 0x27 | OPC_SPECIAL,
167 OPC_SLT = 0x2A | OPC_SPECIAL,
168 OPC_SLTU = 0x2B | OPC_SPECIAL,
169 OPC_DADD = 0x2C | OPC_SPECIAL,
170 OPC_DADDU = 0x2D | OPC_SPECIAL,
171 OPC_DSUB = 0x2E | OPC_SPECIAL,
172 OPC_DSUBU = 0x2F | OPC_SPECIAL,
174 OPC_JR = 0x08 | OPC_SPECIAL, /* Also JR.HB */
175 OPC_JALR = 0x09 | OPC_SPECIAL, /* Also JALR.HB */
177 OPC_TGE = 0x30 | OPC_SPECIAL,
178 OPC_TGEU = 0x31 | OPC_SPECIAL,
179 OPC_TLT = 0x32 | OPC_SPECIAL,
180 OPC_TLTU = 0x33 | OPC_SPECIAL,
181 OPC_TEQ = 0x34 | OPC_SPECIAL,
182 OPC_TNE = 0x36 | OPC_SPECIAL,
183 /* HI / LO registers load & stores */
184 OPC_MFHI = 0x10 | OPC_SPECIAL,
185 OPC_MTHI = 0x11 | OPC_SPECIAL,
186 OPC_MFLO = 0x12 | OPC_SPECIAL,
187 OPC_MTLO = 0x13 | OPC_SPECIAL,
188 /* Conditional moves */
189 OPC_MOVZ = 0x0A | OPC_SPECIAL,
190 OPC_MOVN = 0x0B | OPC_SPECIAL,
192 OPC_MOVCI = 0x01 | OPC_SPECIAL,
195 OPC_PMON = 0x05 | OPC_SPECIAL, /* unofficial */
196 OPC_SYSCALL = 0x0C | OPC_SPECIAL,
197 OPC_BREAK = 0x0D | OPC_SPECIAL,
198 OPC_SPIM = 0x0E | OPC_SPECIAL, /* unofficial */
199 OPC_SYNC = 0x0F | OPC_SPECIAL,
201 OPC_SPECIAL15_RESERVED = 0x15 | OPC_SPECIAL,
202 OPC_SPECIAL28_RESERVED = 0x28 | OPC_SPECIAL,
203 OPC_SPECIAL29_RESERVED = 0x29 | OPC_SPECIAL,
204 OPC_SPECIAL35_RESERVED = 0x35 | OPC_SPECIAL,
205 OPC_SPECIAL37_RESERVED = 0x37 | OPC_SPECIAL,
206 OPC_SPECIAL39_RESERVED = 0x39 | OPC_SPECIAL,
207 OPC_SPECIAL3D_RESERVED = 0x3D | OPC_SPECIAL,
210 /* Multiplication variants of the vr54xx. */
211 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
214 OPC_VR54XX_MULS = (0x03 << 6) | OPC_MULT,
215 OPC_VR54XX_MULSU = (0x03 << 6) | OPC_MULTU,
216 OPC_VR54XX_MACC = (0x05 << 6) | OPC_MULT,
217 OPC_VR54XX_MACCU = (0x05 << 6) | OPC_MULTU,
218 OPC_VR54XX_MSAC = (0x07 << 6) | OPC_MULT,
219 OPC_VR54XX_MSACU = (0x07 << 6) | OPC_MULTU,
220 OPC_VR54XX_MULHI = (0x09 << 6) | OPC_MULT,
221 OPC_VR54XX_MULHIU = (0x09 << 6) | OPC_MULTU,
222 OPC_VR54XX_MULSHI = (0x0B << 6) | OPC_MULT,
223 OPC_VR54XX_MULSHIU = (0x0B << 6) | OPC_MULTU,
224 OPC_VR54XX_MACCHI = (0x0D << 6) | OPC_MULT,
225 OPC_VR54XX_MACCHIU = (0x0D << 6) | OPC_MULTU,
226 OPC_VR54XX_MSACHI = (0x0F << 6) | OPC_MULT,
227 OPC_VR54XX_MSACHIU = (0x0F << 6) | OPC_MULTU,
230 /* REGIMM (rt field) opcodes */
231 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
234 OPC_BLTZ = (0x00 << 16) | OPC_REGIMM,
235 OPC_BLTZL = (0x02 << 16) | OPC_REGIMM,
236 OPC_BGEZ = (0x01 << 16) | OPC_REGIMM,
237 OPC_BGEZL = (0x03 << 16) | OPC_REGIMM,
238 OPC_BLTZAL = (0x10 << 16) | OPC_REGIMM,
239 OPC_BLTZALL = (0x12 << 16) | OPC_REGIMM,
240 OPC_BGEZAL = (0x11 << 16) | OPC_REGIMM,
241 OPC_BGEZALL = (0x13 << 16) | OPC_REGIMM,
242 OPC_TGEI = (0x08 << 16) | OPC_REGIMM,
243 OPC_TGEIU = (0x09 << 16) | OPC_REGIMM,
244 OPC_TLTI = (0x0A << 16) | OPC_REGIMM,
245 OPC_TLTIU = (0x0B << 16) | OPC_REGIMM,
246 OPC_TEQI = (0x0C << 16) | OPC_REGIMM,
247 OPC_TNEI = (0x0E << 16) | OPC_REGIMM,
248 OPC_SYNCI = (0x1F << 16) | OPC_REGIMM,
251 /* Special2 opcodes */
252 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
255 /* Multiply & xxx operations */
256 OPC_MADD = 0x00 | OPC_SPECIAL2,
257 OPC_MADDU = 0x01 | OPC_SPECIAL2,
258 OPC_MUL = 0x02 | OPC_SPECIAL2,
259 OPC_MSUB = 0x04 | OPC_SPECIAL2,
260 OPC_MSUBU = 0x05 | OPC_SPECIAL2,
262 OPC_CLZ = 0x20 | OPC_SPECIAL2,
263 OPC_CLO = 0x21 | OPC_SPECIAL2,
264 OPC_DCLZ = 0x24 | OPC_SPECIAL2,
265 OPC_DCLO = 0x25 | OPC_SPECIAL2,
267 OPC_SDBBP = 0x3F | OPC_SPECIAL2,
270 /* Special3 opcodes */
271 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
274 OPC_EXT = 0x00 | OPC_SPECIAL3,
275 OPC_DEXTM = 0x01 | OPC_SPECIAL3,
276 OPC_DEXTU = 0x02 | OPC_SPECIAL3,
277 OPC_DEXT = 0x03 | OPC_SPECIAL3,
278 OPC_INS = 0x04 | OPC_SPECIAL3,
279 OPC_DINSM = 0x05 | OPC_SPECIAL3,
280 OPC_DINSU = 0x06 | OPC_SPECIAL3,
281 OPC_DINS = 0x07 | OPC_SPECIAL3,
282 OPC_FORK = 0x08 | OPC_SPECIAL3,
283 OPC_YIELD = 0x09 | OPC_SPECIAL3,
284 OPC_BSHFL = 0x20 | OPC_SPECIAL3,
285 OPC_DBSHFL = 0x24 | OPC_SPECIAL3,
286 OPC_RDHWR = 0x3B | OPC_SPECIAL3,
290 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
293 OPC_WSBH = (0x02 << 6) | OPC_BSHFL,
294 OPC_SEB = (0x10 << 6) | OPC_BSHFL,
295 OPC_SEH = (0x18 << 6) | OPC_BSHFL,
299 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
302 OPC_DSBH = (0x02 << 6) | OPC_DBSHFL,
303 OPC_DSHD = (0x05 << 6) | OPC_DBSHFL,
306 /* Coprocessor 0 (rs field) */
307 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
310 OPC_MFC0 = (0x00 << 21) | OPC_CP0,
311 OPC_DMFC0 = (0x01 << 21) | OPC_CP0,
312 OPC_MTC0 = (0x04 << 21) | OPC_CP0,
313 OPC_DMTC0 = (0x05 << 21) | OPC_CP0,
314 OPC_MFTR = (0x08 << 21) | OPC_CP0,
315 OPC_RDPGPR = (0x0A << 21) | OPC_CP0,
316 OPC_MFMC0 = (0x0B << 21) | OPC_CP0,
317 OPC_MTTR = (0x0C << 21) | OPC_CP0,
318 OPC_WRPGPR = (0x0E << 21) | OPC_CP0,
319 OPC_C0 = (0x10 << 21) | OPC_CP0,
320 OPC_C0_FIRST = (0x10 << 21) | OPC_CP0,
321 OPC_C0_LAST = (0x1F << 21) | OPC_CP0,
325 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
328 OPC_DMT = 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
329 OPC_EMT = 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0,
330 OPC_DVPE = 0x01 | (0 << 5) | OPC_MFMC0,
331 OPC_EVPE = 0x01 | (1 << 5) | OPC_MFMC0,
332 OPC_DI = (0 << 5) | (0x0C << 11) | OPC_MFMC0,
333 OPC_EI = (1 << 5) | (0x0C << 11) | OPC_MFMC0,
336 /* Coprocessor 0 (with rs == C0) */
337 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
340 OPC_TLBR = 0x01 | OPC_C0,
341 OPC_TLBWI = 0x02 | OPC_C0,
342 OPC_TLBWR = 0x06 | OPC_C0,
343 OPC_TLBP = 0x08 | OPC_C0,
344 OPC_RFE = 0x10 | OPC_C0,
345 OPC_ERET = 0x18 | OPC_C0,
346 OPC_DERET = 0x1F | OPC_C0,
347 OPC_WAIT = 0x20 | OPC_C0,
350 /* Coprocessor 1 (rs field) */
351 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
354 OPC_MFC1 = (0x00 << 21) | OPC_CP1,
355 OPC_DMFC1 = (0x01 << 21) | OPC_CP1,
356 OPC_CFC1 = (0x02 << 21) | OPC_CP1,
357 OPC_MFHC1 = (0x03 << 21) | OPC_CP1,
358 OPC_MTC1 = (0x04 << 21) | OPC_CP1,
359 OPC_DMTC1 = (0x05 << 21) | OPC_CP1,
360 OPC_CTC1 = (0x06 << 21) | OPC_CP1,
361 OPC_MTHC1 = (0x07 << 21) | OPC_CP1,
362 OPC_BC1 = (0x08 << 21) | OPC_CP1, /* bc */
363 OPC_BC1ANY2 = (0x09 << 21) | OPC_CP1,
364 OPC_BC1ANY4 = (0x0A << 21) | OPC_CP1,
365 OPC_S_FMT = (0x10 << 21) | OPC_CP1, /* 16: fmt=single fp */
366 OPC_D_FMT = (0x11 << 21) | OPC_CP1, /* 17: fmt=double fp */
367 OPC_E_FMT = (0x12 << 21) | OPC_CP1, /* 18: fmt=extended fp */
368 OPC_Q_FMT = (0x13 << 21) | OPC_CP1, /* 19: fmt=quad fp */
369 OPC_W_FMT = (0x14 << 21) | OPC_CP1, /* 20: fmt=32bit fixed */
370 OPC_L_FMT = (0x15 << 21) | OPC_CP1, /* 21: fmt=64bit fixed */
371 OPC_PS_FMT = (0x16 << 21) | OPC_CP1, /* 22: fmt=paired single fp */
374 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
375 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
378 OPC_BC1F = (0x00 << 16) | OPC_BC1,
379 OPC_BC1T = (0x01 << 16) | OPC_BC1,
380 OPC_BC1FL = (0x02 << 16) | OPC_BC1,
381 OPC_BC1TL = (0x03 << 16) | OPC_BC1,
385 OPC_BC1FANY2 = (0x00 << 16) | OPC_BC1ANY2,
386 OPC_BC1TANY2 = (0x01 << 16) | OPC_BC1ANY2,
390 OPC_BC1FANY4 = (0x00 << 16) | OPC_BC1ANY4,
391 OPC_BC1TANY4 = (0x01 << 16) | OPC_BC1ANY4,
394 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
397 OPC_MFC2 = (0x00 << 21) | OPC_CP2,
398 OPC_DMFC2 = (0x01 << 21) | OPC_CP2,
399 OPC_CFC2 = (0x02 << 21) | OPC_CP2,
400 OPC_MFHC2 = (0x03 << 21) | OPC_CP2,
401 OPC_MTC2 = (0x04 << 21) | OPC_CP2,
402 OPC_DMTC2 = (0x05 << 21) | OPC_CP2,
403 OPC_CTC2 = (0x06 << 21) | OPC_CP2,
404 OPC_MTHC2 = (0x07 << 21) | OPC_CP2,
405 OPC_BC2 = (0x08 << 21) | OPC_CP2,
408 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
411 OPC_LWXC1 = 0x00 | OPC_CP3,
412 OPC_LDXC1 = 0x01 | OPC_CP3,
413 OPC_LUXC1 = 0x05 | OPC_CP3,
414 OPC_SWXC1 = 0x08 | OPC_CP3,
415 OPC_SDXC1 = 0x09 | OPC_CP3,
416 OPC_SUXC1 = 0x0D | OPC_CP3,
417 OPC_PREFX = 0x0F | OPC_CP3,
418 OPC_ALNV_PS = 0x1E | OPC_CP3,
419 OPC_MADD_S = 0x20 | OPC_CP3,
420 OPC_MADD_D = 0x21 | OPC_CP3,
421 OPC_MADD_PS = 0x26 | OPC_CP3,
422 OPC_MSUB_S = 0x28 | OPC_CP3,
423 OPC_MSUB_D = 0x29 | OPC_CP3,
424 OPC_MSUB_PS = 0x2E | OPC_CP3,
425 OPC_NMADD_S = 0x30 | OPC_CP3,
426 OPC_NMADD_D = 0x31 | OPC_CP3,
427 OPC_NMADD_PS= 0x36 | OPC_CP3,
428 OPC_NMSUB_S = 0x38 | OPC_CP3,
429 OPC_NMSUB_D = 0x39 | OPC_CP3,
430 OPC_NMSUB_PS= 0x3E | OPC_CP3,
433 /* global register indices */
434 static TCGv_ptr cpu_env;
435 static TCGv cpu_gpr[32], cpu_PC;
436 static TCGv cpu_HI[MIPS_DSP_ACC], cpu_LO[MIPS_DSP_ACC], cpu_ACX[MIPS_DSP_ACC];
437 static TCGv cpu_dspctrl, btarget, bcond;
438 static TCGv_i32 hflags;
439 static TCGv_i32 fpu_fcr0, fpu_fcr31;
441 #include "gen-icount.h"
443 #define gen_helper_0i(name, arg) do { \
444 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
445 gen_helper_##name(helper_tmp); \
446 tcg_temp_free_i32(helper_tmp); \
449 #define gen_helper_1i(name, arg1, arg2) do { \
450 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
451 gen_helper_##name(arg1, helper_tmp); \
452 tcg_temp_free_i32(helper_tmp); \
455 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
456 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
457 gen_helper_##name(arg1, arg2, helper_tmp); \
458 tcg_temp_free_i32(helper_tmp); \
461 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
462 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
463 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
464 tcg_temp_free_i32(helper_tmp); \
467 typedef struct DisasContext {
468 struct TranslationBlock *tb;
469 target_ulong pc, saved_pc;
471 int singlestep_enabled;
472 /* Routine used to access memory */
474 uint32_t hflags, saved_hflags;
476 target_ulong btarget;
480 BS_NONE = 0, /* We go out of the TB without reaching a branch or an
481 * exception condition */
482 BS_STOP = 1, /* We want to stop translation for any reason */
483 BS_BRANCH = 2, /* We reached a branch condition */
484 BS_EXCP = 3, /* We reached an exception condition */
487 static const char *regnames[] =
488 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
489 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
490 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
491 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
493 static const char *regnames_HI[] =
494 { "HI0", "HI1", "HI2", "HI3", };
496 static const char *regnames_LO[] =
497 { "LO0", "LO1", "LO2", "LO3", };
499 static const char *regnames_ACX[] =
500 { "ACX0", "ACX1", "ACX2", "ACX3", };
502 static const char *fregnames[] =
503 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
504 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
505 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
506 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
508 #ifdef MIPS_DEBUG_DISAS
509 #define MIPS_DEBUG(fmt, ...) \
510 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
511 TARGET_FMT_lx ": %08x " fmt "\n", \
512 ctx->pc, ctx->opcode , ## __VA_ARGS__)
513 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
515 #define MIPS_DEBUG(fmt, ...) do { } while(0)
516 #define LOG_DISAS(...) do { } while (0)
519 #define MIPS_INVAL(op) \
521 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
522 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
525 /* General purpose registers moves. */
526 static inline void gen_load_gpr (TCGv t, int reg)
529 tcg_gen_movi_tl(t, 0);
531 tcg_gen_mov_tl(t, cpu_gpr[reg]);
534 static inline void gen_store_gpr (TCGv t, int reg)
537 tcg_gen_mov_tl(cpu_gpr[reg], t);
540 /* Moves to/from ACX register. */
541 static inline void gen_load_ACX (TCGv t, int reg)
543 tcg_gen_mov_tl(t, cpu_ACX[reg]);
546 static inline void gen_store_ACX (TCGv t, int reg)
548 tcg_gen_mov_tl(cpu_ACX[reg], t);
551 /* Moves to/from shadow registers. */
552 static inline void gen_load_srsgpr (int from, int to)
554 TCGv t0 = tcg_temp_new();
557 tcg_gen_movi_tl(t0, 0);
559 TCGv_i32 t2 = tcg_temp_new_i32();
560 TCGv_ptr addr = tcg_temp_new_ptr();
562 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
563 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
564 tcg_gen_andi_i32(t2, t2, 0xf);
565 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
566 tcg_gen_ext_i32_ptr(addr, t2);
567 tcg_gen_add_ptr(addr, cpu_env, addr);
569 tcg_gen_ld_tl(t0, addr, sizeof(target_ulong) * from);
570 tcg_temp_free_ptr(addr);
571 tcg_temp_free_i32(t2);
573 gen_store_gpr(t0, to);
577 static inline void gen_store_srsgpr (int from, int to)
580 TCGv t0 = tcg_temp_new();
581 TCGv_i32 t2 = tcg_temp_new_i32();
582 TCGv_ptr addr = tcg_temp_new_ptr();
584 gen_load_gpr(t0, from);
585 tcg_gen_ld_i32(t2, cpu_env, offsetof(CPUState, CP0_SRSCtl));
586 tcg_gen_shri_i32(t2, t2, CP0SRSCtl_PSS);
587 tcg_gen_andi_i32(t2, t2, 0xf);
588 tcg_gen_muli_i32(t2, t2, sizeof(target_ulong) * 32);
589 tcg_gen_ext_i32_ptr(addr, t2);
590 tcg_gen_add_ptr(addr, cpu_env, addr);
592 tcg_gen_st_tl(t0, addr, sizeof(target_ulong) * to);
593 tcg_temp_free_ptr(addr);
594 tcg_temp_free_i32(t2);
599 /* Floating point register moves. */
600 static inline void gen_load_fpr32 (TCGv_i32 t, int reg)
602 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
605 static inline void gen_store_fpr32 (TCGv_i32 t, int reg)
607 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[FP_ENDIAN_IDX]));
610 static inline void gen_load_fpr32h (TCGv_i32 t, int reg)
612 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
615 static inline void gen_store_fpr32h (TCGv_i32 t, int reg)
617 tcg_gen_st_i32(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].w[!FP_ENDIAN_IDX]));
620 static inline void gen_load_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
622 if (ctx->hflags & MIPS_HFLAG_F64) {
623 tcg_gen_ld_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
625 TCGv_i32 t0 = tcg_temp_new_i32();
626 TCGv_i32 t1 = tcg_temp_new_i32();
627 gen_load_fpr32(t0, reg & ~1);
628 gen_load_fpr32(t1, reg | 1);
629 tcg_gen_concat_i32_i64(t, t0, t1);
630 tcg_temp_free_i32(t0);
631 tcg_temp_free_i32(t1);
635 static inline void gen_store_fpr64 (DisasContext *ctx, TCGv_i64 t, int reg)
637 if (ctx->hflags & MIPS_HFLAG_F64) {
638 tcg_gen_st_i64(t, cpu_env, offsetof(CPUState, active_fpu.fpr[reg].d));
640 TCGv_i64 t0 = tcg_temp_new_i64();
641 TCGv_i32 t1 = tcg_temp_new_i32();
642 tcg_gen_trunc_i64_i32(t1, t);
643 gen_store_fpr32(t1, reg & ~1);
644 tcg_gen_shri_i64(t0, t, 32);
645 tcg_gen_trunc_i64_i32(t1, t0);
646 gen_store_fpr32(t1, reg | 1);
647 tcg_temp_free_i32(t1);
648 tcg_temp_free_i64(t0);
652 static inline int get_fp_bit (int cc)
660 #define FOP_CONDS(type, fmt, bits) \
661 static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
662 TCGv_i##bits b, int cc) \
665 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
666 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
667 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
668 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
669 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
670 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
671 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
672 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
673 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
674 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
675 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
676 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
677 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
678 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
679 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
680 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
686 FOP_CONDS(abs, d, 64)
688 FOP_CONDS(abs, s, 32)
690 FOP_CONDS(abs, ps, 64)
694 #define OP_COND(name, cond) \
695 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, TCGv t1) \
697 int l1 = gen_new_label(); \
698 int l2 = gen_new_label(); \
700 tcg_gen_brcond_tl(cond, t0, t1, l1); \
701 tcg_gen_movi_tl(ret, 0); \
704 tcg_gen_movi_tl(ret, 1); \
707 OP_COND(eq, TCG_COND_EQ);
708 OP_COND(ne, TCG_COND_NE);
709 OP_COND(ge, TCG_COND_GE);
710 OP_COND(geu, TCG_COND_GEU);
711 OP_COND(lt, TCG_COND_LT);
712 OP_COND(ltu, TCG_COND_LTU);
715 #define OP_CONDI(name, cond) \
716 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0, target_ulong val) \
718 int l1 = gen_new_label(); \
719 int l2 = gen_new_label(); \
721 tcg_gen_brcondi_tl(cond, t0, val, l1); \
722 tcg_gen_movi_tl(ret, 0); \
725 tcg_gen_movi_tl(ret, 1); \
728 OP_CONDI(lti, TCG_COND_LT);
729 OP_CONDI(ltiu, TCG_COND_LTU);
732 #define OP_CONDZ(name, cond) \
733 static inline void glue(gen_op_, name) (TCGv ret, TCGv t0) \
735 int l1 = gen_new_label(); \
736 int l2 = gen_new_label(); \
738 tcg_gen_brcondi_tl(cond, t0, 0, l1); \
739 tcg_gen_movi_tl(ret, 0); \
742 tcg_gen_movi_tl(ret, 1); \
745 OP_CONDZ(gez, TCG_COND_GE);
746 OP_CONDZ(gtz, TCG_COND_GT);
747 OP_CONDZ(lez, TCG_COND_LE);
748 OP_CONDZ(ltz, TCG_COND_LT);
751 static inline void gen_save_pc(target_ulong pc)
753 tcg_gen_movi_tl(cpu_PC, pc);
756 static inline void save_cpu_state (DisasContext *ctx, int do_save_pc)
758 LOG_DISAS("hflags %08x saved %08x\n", ctx->hflags, ctx->saved_hflags);
759 if (do_save_pc && ctx->pc != ctx->saved_pc) {
760 gen_save_pc(ctx->pc);
761 ctx->saved_pc = ctx->pc;
763 if (ctx->hflags != ctx->saved_hflags) {
764 tcg_gen_movi_i32(hflags, ctx->hflags);
765 ctx->saved_hflags = ctx->hflags;
766 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
772 tcg_gen_movi_tl(btarget, ctx->btarget);
778 static inline void restore_cpu_state (CPUState *env, DisasContext *ctx)
780 ctx->saved_hflags = ctx->hflags;
781 switch (ctx->hflags & MIPS_HFLAG_BMASK) {
787 ctx->btarget = env->btarget;
793 generate_exception_err (DisasContext *ctx, int excp, int err)
795 TCGv_i32 texcp = tcg_const_i32(excp);
796 TCGv_i32 terr = tcg_const_i32(err);
797 save_cpu_state(ctx, 1);
798 gen_helper_raise_exception_err(texcp, terr);
799 tcg_temp_free_i32(terr);
800 tcg_temp_free_i32(texcp);
804 generate_exception (DisasContext *ctx, int excp)
806 save_cpu_state(ctx, 1);
807 gen_helper_0i(raise_exception, excp);
810 /* Addresses computation */
811 static inline void gen_op_addr_add (DisasContext *ctx, TCGv ret, TCGv arg0, TCGv arg1)
813 tcg_gen_add_tl(ret, arg0, arg1);
815 #if defined(TARGET_MIPS64)
816 /* For compatibility with 32-bit code, data reference in user mode
817 with Status_UX = 0 should be casted to 32-bit and sign extended.
818 See the MIPS64 PRA manual, section 4.10. */
819 if (((ctx->hflags & MIPS_HFLAG_KSU) == MIPS_HFLAG_UM) &&
820 !(ctx->hflags & MIPS_HFLAG_UX)) {
821 tcg_gen_ext32s_i64(ret, ret);
826 static inline void check_cp0_enabled(DisasContext *ctx)
828 if (unlikely(!(ctx->hflags & MIPS_HFLAG_CP0)))
829 generate_exception_err(ctx, EXCP_CpU, 1);
832 static inline void check_cp1_enabled(DisasContext *ctx)
834 if (unlikely(!(ctx->hflags & MIPS_HFLAG_FPU)))
835 generate_exception_err(ctx, EXCP_CpU, 1);
838 /* Verify that the processor is running with COP1X instructions enabled.
839 This is associated with the nabla symbol in the MIPS32 and MIPS64
842 static inline void check_cop1x(DisasContext *ctx)
844 if (unlikely(!(ctx->hflags & MIPS_HFLAG_COP1X)))
845 generate_exception(ctx, EXCP_RI);
848 /* Verify that the processor is running with 64-bit floating-point
849 operations enabled. */
851 static inline void check_cp1_64bitmode(DisasContext *ctx)
853 if (unlikely(~ctx->hflags & (MIPS_HFLAG_F64 | MIPS_HFLAG_COP1X)))
854 generate_exception(ctx, EXCP_RI);
858 * Verify if floating point register is valid; an operation is not defined
859 * if bit 0 of any register specification is set and the FR bit in the
860 * Status register equals zero, since the register numbers specify an
861 * even-odd pair of adjacent coprocessor general registers. When the FR bit
862 * in the Status register equals one, both even and odd register numbers
863 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
865 * Multiple 64 bit wide registers can be checked by calling
866 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
868 static inline void check_cp1_registers(DisasContext *ctx, int regs)
870 if (unlikely(!(ctx->hflags & MIPS_HFLAG_F64) && (regs & 1)))
871 generate_exception(ctx, EXCP_RI);
874 /* This code generates a "reserved instruction" exception if the
875 CPU does not support the instruction set corresponding to flags. */
876 static inline void check_insn(CPUState *env, DisasContext *ctx, int flags)
878 if (unlikely(!(env->insn_flags & flags)))
879 generate_exception(ctx, EXCP_RI);
882 /* This code generates a "reserved instruction" exception if 64-bit
883 instructions are not enabled. */
884 static inline void check_mips_64(DisasContext *ctx)
886 if (unlikely(!(ctx->hflags & MIPS_HFLAG_64)))
887 generate_exception(ctx, EXCP_RI);
890 /* load/store instructions. */
891 #define OP_LD(insn,fname) \
892 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
894 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
901 #if defined(TARGET_MIPS64)
907 #define OP_ST(insn,fname) \
908 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
910 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
915 #if defined(TARGET_MIPS64)
920 #ifdef CONFIG_USER_ONLY
921 #define OP_LD_ATOMIC(insn,fname) \
922 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
924 TCGv t0 = tcg_temp_new(); \
925 tcg_gen_mov_tl(t0, arg1); \
926 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
927 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, lladdr)); \
928 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUState, llval)); \
932 #define OP_LD_ATOMIC(insn,fname) \
933 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
935 gen_helper_2i(insn, ret, arg1, ctx->mem_idx); \
938 OP_LD_ATOMIC(ll,ld32s);
939 #if defined(TARGET_MIPS64)
940 OP_LD_ATOMIC(lld,ld64);
944 #ifdef CONFIG_USER_ONLY
945 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
946 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
948 TCGv t0 = tcg_temp_new(); \
949 int l1 = gen_new_label(); \
950 int l2 = gen_new_label(); \
952 tcg_gen_andi_tl(t0, arg2, almask); \
953 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
954 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
955 generate_exception(ctx, EXCP_AdES); \
957 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, lladdr)); \
958 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
959 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
960 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, llreg)); \
961 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUState, llnewval)); \
962 gen_helper_0i(raise_exception, EXCP_SC); \
964 tcg_gen_movi_tl(t0, 0); \
965 gen_store_gpr(t0, rt); \
969 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
970 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
972 TCGv t0 = tcg_temp_new(); \
973 gen_helper_3i(insn, t0, arg1, arg2, ctx->mem_idx); \
974 gen_store_gpr(t0, rt); \
978 OP_ST_ATOMIC(sc,st32,ld32s,0x3);
979 #if defined(TARGET_MIPS64)
980 OP_ST_ATOMIC(scd,st64,ld64,0x7);
984 static void gen_base_offset_addr (DisasContext *ctx, TCGv addr,
985 int base, int16_t offset)
988 tcg_gen_movi_tl(addr, offset);
989 } else if (offset == 0) {
990 gen_load_gpr(addr, base);
992 tcg_gen_movi_tl(addr, offset);
993 gen_op_addr_add(ctx, addr, cpu_gpr[base], addr);
998 static void gen_ldst (DisasContext *ctx, uint32_t opc, int rt,
999 int base, int16_t offset)
1001 const char *opn = "ldst";
1002 TCGv t0 = tcg_temp_new();
1003 TCGv t1 = tcg_temp_new();
1005 gen_base_offset_addr(ctx, t0, base, offset);
1006 /* Don't do NOP if destination is zero: we must perform the actual
1009 #if defined(TARGET_MIPS64)
1011 save_cpu_state(ctx, 0);
1012 op_ldst_lwu(t0, t0, ctx);
1013 gen_store_gpr(t0, rt);
1017 save_cpu_state(ctx, 0);
1018 op_ldst_ld(t0, t0, ctx);
1019 gen_store_gpr(t0, rt);
1023 save_cpu_state(ctx, 0);
1024 op_ldst_lld(t0, t0, ctx);
1025 gen_store_gpr(t0, rt);
1029 save_cpu_state(ctx, 0);
1030 gen_load_gpr(t1, rt);
1031 op_ldst_sd(t1, t0, ctx);
1035 save_cpu_state(ctx, 1);
1036 gen_load_gpr(t1, rt);
1037 gen_helper_3i(ldl, t1, t1, t0, ctx->mem_idx);
1038 gen_store_gpr(t1, rt);
1042 save_cpu_state(ctx, 1);
1043 gen_load_gpr(t1, rt);
1044 gen_helper_2i(sdl, t1, t0, ctx->mem_idx);
1048 save_cpu_state(ctx, 1);
1049 gen_load_gpr(t1, rt);
1050 gen_helper_3i(ldr, t1, t1, t0, ctx->mem_idx);
1051 gen_store_gpr(t1, rt);
1055 save_cpu_state(ctx, 1);
1056 gen_load_gpr(t1, rt);
1057 gen_helper_2i(sdr, t1, t0, ctx->mem_idx);
1062 save_cpu_state(ctx, 0);
1063 op_ldst_lw(t0, t0, ctx);
1064 gen_store_gpr(t0, rt);
1068 save_cpu_state(ctx, 0);
1069 gen_load_gpr(t1, rt);
1070 op_ldst_sw(t1, t0, ctx);
1074 save_cpu_state(ctx, 0);
1075 op_ldst_lh(t0, t0, ctx);
1076 gen_store_gpr(t0, rt);
1080 save_cpu_state(ctx, 0);
1081 gen_load_gpr(t1, rt);
1082 op_ldst_sh(t1, t0, ctx);
1086 save_cpu_state(ctx, 0);
1087 op_ldst_lhu(t0, t0, ctx);
1088 gen_store_gpr(t0, rt);
1092 save_cpu_state(ctx, 0);
1093 op_ldst_lb(t0, t0, ctx);
1094 gen_store_gpr(t0, rt);
1098 save_cpu_state(ctx, 0);
1099 gen_load_gpr(t1, rt);
1100 op_ldst_sb(t1, t0, ctx);
1104 save_cpu_state(ctx, 0);
1105 op_ldst_lbu(t0, t0, ctx);
1106 gen_store_gpr(t0, rt);
1110 save_cpu_state(ctx, 1);
1111 gen_load_gpr(t1, rt);
1112 gen_helper_3i(lwl, t1, t1, t0, ctx->mem_idx);
1113 gen_store_gpr(t1, rt);
1117 save_cpu_state(ctx, 1);
1118 gen_load_gpr(t1, rt);
1119 gen_helper_2i(swl, t1, t0, ctx->mem_idx);
1123 save_cpu_state(ctx, 1);
1124 gen_load_gpr(t1, rt);
1125 gen_helper_3i(lwr, t1, t1, t0, ctx->mem_idx);
1126 gen_store_gpr(t1, rt);
1130 save_cpu_state(ctx, 1);
1131 gen_load_gpr(t1, rt);
1132 gen_helper_2i(swr, t1, t0, ctx->mem_idx);
1136 save_cpu_state(ctx, 1);
1137 op_ldst_ll(t0, t0, ctx);
1138 gen_store_gpr(t0, rt);
1142 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1147 /* Store conditional */
1148 static void gen_st_cond (DisasContext *ctx, uint32_t opc, int rt,
1149 int base, int16_t offset)
1151 const char *opn = "st_cond";
1154 t0 = tcg_temp_local_new();
1156 gen_base_offset_addr(ctx, t0, base, offset);
1157 /* Don't do NOP if destination is zero: we must perform the actual
1160 t1 = tcg_temp_local_new();
1161 gen_load_gpr(t1, rt);
1163 #if defined(TARGET_MIPS64)
1165 save_cpu_state(ctx, 0);
1166 op_ldst_scd(t1, t0, rt, ctx);
1171 save_cpu_state(ctx, 1);
1172 op_ldst_sc(t1, t0, rt, ctx);
1176 MIPS_DEBUG("%s %s, %d(%s)", opn, regnames[rt], offset, regnames[base]);
1181 /* Load and store */
1182 static void gen_flt_ldst (DisasContext *ctx, uint32_t opc, int ft,
1183 int base, int16_t offset)
1185 const char *opn = "flt_ldst";
1186 TCGv t0 = tcg_temp_new();
1188 gen_base_offset_addr(ctx, t0, base, offset);
1189 /* Don't do NOP if destination is zero: we must perform the actual
1194 TCGv_i32 fp0 = tcg_temp_new_i32();
1196 tcg_gen_qemu_ld32s(t0, t0, ctx->mem_idx);
1197 tcg_gen_trunc_tl_i32(fp0, t0);
1198 gen_store_fpr32(fp0, ft);
1199 tcg_temp_free_i32(fp0);
1205 TCGv_i32 fp0 = tcg_temp_new_i32();
1206 TCGv t1 = tcg_temp_new();
1208 gen_load_fpr32(fp0, ft);
1209 tcg_gen_extu_i32_tl(t1, fp0);
1210 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
1212 tcg_temp_free_i32(fp0);
1218 TCGv_i64 fp0 = tcg_temp_new_i64();
1220 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
1221 gen_store_fpr64(ctx, fp0, ft);
1222 tcg_temp_free_i64(fp0);
1228 TCGv_i64 fp0 = tcg_temp_new_i64();
1230 gen_load_fpr64(ctx, fp0, ft);
1231 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
1232 tcg_temp_free_i64(fp0);
1238 generate_exception(ctx, EXCP_RI);
1241 MIPS_DEBUG("%s %s, %d(%s)", opn, fregnames[ft], offset, regnames[base]);
1246 /* Arithmetic with immediate operand */
1247 static void gen_arith_imm (CPUState *env, DisasContext *ctx, uint32_t opc,
1248 int rt, int rs, int16_t imm)
1250 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1251 const char *opn = "imm arith";
1253 if (rt == 0 && opc != OPC_ADDI && opc != OPC_DADDI) {
1254 /* If no destination, treat it as a NOP.
1255 For addi, we must generate the overflow exception when needed. */
1262 TCGv t0 = tcg_temp_local_new();
1263 TCGv t1 = tcg_temp_new();
1264 TCGv t2 = tcg_temp_new();
1265 int l1 = gen_new_label();
1267 gen_load_gpr(t1, rs);
1268 tcg_gen_addi_tl(t0, t1, uimm);
1269 tcg_gen_ext32s_tl(t0, t0);
1271 tcg_gen_xori_tl(t1, t1, ~uimm);
1272 tcg_gen_xori_tl(t2, t0, uimm);
1273 tcg_gen_and_tl(t1, t1, t2);
1275 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1277 /* operands of same sign, result different sign */
1278 generate_exception(ctx, EXCP_OVERFLOW);
1280 tcg_gen_ext32s_tl(t0, t0);
1281 gen_store_gpr(t0, rt);
1288 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1289 tcg_gen_ext32s_tl(cpu_gpr[rt], cpu_gpr[rt]);
1291 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1295 #if defined(TARGET_MIPS64)
1298 TCGv t0 = tcg_temp_local_new();
1299 TCGv t1 = tcg_temp_new();
1300 TCGv t2 = tcg_temp_new();
1301 int l1 = gen_new_label();
1303 gen_load_gpr(t1, rs);
1304 tcg_gen_addi_tl(t0, t1, uimm);
1306 tcg_gen_xori_tl(t1, t1, ~uimm);
1307 tcg_gen_xori_tl(t2, t0, uimm);
1308 tcg_gen_and_tl(t1, t1, t2);
1310 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1312 /* operands of same sign, result different sign */
1313 generate_exception(ctx, EXCP_OVERFLOW);
1315 gen_store_gpr(t0, rt);
1322 tcg_gen_addi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1324 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1330 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1333 /* Logic with immediate operand */
1334 static void gen_logic_imm (CPUState *env, uint32_t opc, int rt, int rs, int16_t imm)
1337 const char *opn = "imm logic";
1340 /* If no destination, treat it as a NOP. */
1344 uimm = (uint16_t)imm;
1347 if (likely(rs != 0))
1348 tcg_gen_andi_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1350 tcg_gen_movi_tl(cpu_gpr[rt], 0);
1355 tcg_gen_ori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1357 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1361 if (likely(rs != 0))
1362 tcg_gen_xori_tl(cpu_gpr[rt], cpu_gpr[rs], uimm);
1364 tcg_gen_movi_tl(cpu_gpr[rt], uimm);
1368 tcg_gen_movi_tl(cpu_gpr[rt], imm << 16);
1372 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1375 /* Set on less than with immediate operand */
1376 static void gen_slt_imm (CPUState *env, uint32_t opc, int rt, int rs, int16_t imm)
1378 target_ulong uimm = (target_long)imm; /* Sign extend to 32/64 bits */
1379 const char *opn = "imm arith";
1383 /* If no destination, treat it as a NOP. */
1387 t0 = tcg_temp_new();
1388 gen_load_gpr(t0, rs);
1391 gen_op_lti(cpu_gpr[rt], t0, uimm);
1395 gen_op_ltiu(cpu_gpr[rt], t0, uimm);
1399 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1403 /* Shifts with immediate operand */
1404 static void gen_shift_imm(CPUState *env, DisasContext *ctx, uint32_t opc,
1405 int rt, int rs, int16_t imm)
1407 target_ulong uimm = ((uint16_t)imm) & 0x1f;
1408 const char *opn = "imm shift";
1412 /* If no destination, treat it as a NOP. */
1417 t0 = tcg_temp_new();
1418 gen_load_gpr(t0, rs);
1421 tcg_gen_shli_tl(t0, t0, uimm);
1422 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1426 tcg_gen_ext32s_tl(t0, t0);
1427 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
1432 tcg_gen_ext32u_tl(t0, t0);
1433 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1435 tcg_gen_ext32s_tl(cpu_gpr[rt], t0);
1441 TCGv_i32 t1 = tcg_temp_new_i32();
1443 tcg_gen_trunc_tl_i32(t1, t0);
1444 tcg_gen_rotri_i32(t1, t1, uimm);
1445 tcg_gen_ext_i32_tl(cpu_gpr[rt], t1);
1446 tcg_temp_free_i32(t1);
1450 #if defined(TARGET_MIPS64)
1452 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm);
1456 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm);
1460 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm);
1465 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm);
1470 tcg_gen_shli_tl(cpu_gpr[rt], t0, uimm + 32);
1474 tcg_gen_sari_tl(cpu_gpr[rt], t0, uimm + 32);
1478 tcg_gen_shri_tl(cpu_gpr[rt], t0, uimm + 32);
1482 tcg_gen_rotri_tl(cpu_gpr[rt], t0, uimm + 32);
1487 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx, opn, regnames[rt], regnames[rs], uimm);
1492 static void gen_arith (CPUState *env, DisasContext *ctx, uint32_t opc,
1493 int rd, int rs, int rt)
1495 const char *opn = "arith";
1497 if (rd == 0 && opc != OPC_ADD && opc != OPC_SUB
1498 && opc != OPC_DADD && opc != OPC_DSUB) {
1499 /* If no destination, treat it as a NOP.
1500 For add & sub, we must generate the overflow exception when needed. */
1508 TCGv t0 = tcg_temp_local_new();
1509 TCGv t1 = tcg_temp_new();
1510 TCGv t2 = tcg_temp_new();
1511 int l1 = gen_new_label();
1513 gen_load_gpr(t1, rs);
1514 gen_load_gpr(t2, rt);
1515 tcg_gen_add_tl(t0, t1, t2);
1516 tcg_gen_ext32s_tl(t0, t0);
1517 tcg_gen_xor_tl(t1, t1, t2);
1518 tcg_gen_not_tl(t1, t1);
1519 tcg_gen_xor_tl(t2, t0, t2);
1520 tcg_gen_and_tl(t1, t1, t2);
1522 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1524 /* operands of same sign, result different sign */
1525 generate_exception(ctx, EXCP_OVERFLOW);
1527 gen_store_gpr(t0, rd);
1533 if (rs != 0 && rt != 0) {
1534 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1535 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1536 } else if (rs == 0 && rt != 0) {
1537 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1538 } else if (rs != 0 && rt == 0) {
1539 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1541 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1547 TCGv t0 = tcg_temp_local_new();
1548 TCGv t1 = tcg_temp_new();
1549 TCGv t2 = tcg_temp_new();
1550 int l1 = gen_new_label();
1552 gen_load_gpr(t1, rs);
1553 gen_load_gpr(t2, rt);
1554 tcg_gen_sub_tl(t0, t1, t2);
1555 tcg_gen_ext32s_tl(t0, t0);
1556 tcg_gen_xor_tl(t2, t1, t2);
1557 tcg_gen_xor_tl(t1, t0, t1);
1558 tcg_gen_and_tl(t1, t1, t2);
1560 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1562 /* operands of different sign, first operand and result different sign */
1563 generate_exception(ctx, EXCP_OVERFLOW);
1565 gen_store_gpr(t0, rd);
1571 if (rs != 0 && rt != 0) {
1572 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1573 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1574 } else if (rs == 0 && rt != 0) {
1575 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1576 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1577 } else if (rs != 0 && rt == 0) {
1578 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1580 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1584 #if defined(TARGET_MIPS64)
1587 TCGv t0 = tcg_temp_local_new();
1588 TCGv t1 = tcg_temp_new();
1589 TCGv t2 = tcg_temp_new();
1590 int l1 = gen_new_label();
1592 gen_load_gpr(t1, rs);
1593 gen_load_gpr(t2, rt);
1594 tcg_gen_add_tl(t0, t1, t2);
1595 tcg_gen_xor_tl(t1, t1, t2);
1596 tcg_gen_not_tl(t1, t1);
1597 tcg_gen_xor_tl(t2, t0, t2);
1598 tcg_gen_and_tl(t1, t1, t2);
1600 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1602 /* operands of same sign, result different sign */
1603 generate_exception(ctx, EXCP_OVERFLOW);
1605 gen_store_gpr(t0, rd);
1611 if (rs != 0 && rt != 0) {
1612 tcg_gen_add_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1613 } else if (rs == 0 && rt != 0) {
1614 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1615 } else if (rs != 0 && rt == 0) {
1616 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1618 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1624 TCGv t0 = tcg_temp_local_new();
1625 TCGv t1 = tcg_temp_new();
1626 TCGv t2 = tcg_temp_new();
1627 int l1 = gen_new_label();
1629 gen_load_gpr(t1, rs);
1630 gen_load_gpr(t2, rt);
1631 tcg_gen_sub_tl(t0, t1, t2);
1632 tcg_gen_xor_tl(t2, t1, t2);
1633 tcg_gen_xor_tl(t1, t0, t1);
1634 tcg_gen_and_tl(t1, t1, t2);
1636 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
1638 /* operands of different sign, first operand and result different sign */
1639 generate_exception(ctx, EXCP_OVERFLOW);
1641 gen_store_gpr(t0, rd);
1647 if (rs != 0 && rt != 0) {
1648 tcg_gen_sub_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1649 } else if (rs == 0 && rt != 0) {
1650 tcg_gen_neg_tl(cpu_gpr[rd], cpu_gpr[rt]);
1651 } else if (rs != 0 && rt == 0) {
1652 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1654 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1660 if (likely(rs != 0 && rt != 0)) {
1661 tcg_gen_mul_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1662 tcg_gen_ext32s_tl(cpu_gpr[rd], cpu_gpr[rd]);
1664 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1669 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1672 /* Conditional move */
1673 static void gen_cond_move (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1675 const char *opn = "cond move";
1679 /* If no destination, treat it as a NOP.
1680 For add & sub, we must generate the overflow exception when needed. */
1685 l1 = gen_new_label();
1688 if (likely(rt != 0))
1689 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rt], 0, l1);
1695 if (likely(rt != 0))
1696 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rt], 0, l1);
1701 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1703 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1706 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1710 static void gen_logic (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1712 const char *opn = "logic";
1715 /* If no destination, treat it as a NOP. */
1722 if (likely(rs != 0 && rt != 0)) {
1723 tcg_gen_and_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1725 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1730 if (rs != 0 && rt != 0) {
1731 tcg_gen_nor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1732 } else if (rs == 0 && rt != 0) {
1733 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rt]);
1734 } else if (rs != 0 && rt == 0) {
1735 tcg_gen_not_tl(cpu_gpr[rd], cpu_gpr[rs]);
1737 tcg_gen_movi_tl(cpu_gpr[rd], ~((target_ulong)0));
1742 if (likely(rs != 0 && rt != 0)) {
1743 tcg_gen_or_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1744 } else if (rs == 0 && rt != 0) {
1745 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1746 } else if (rs != 0 && rt == 0) {
1747 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1749 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1754 if (likely(rs != 0 && rt != 0)) {
1755 tcg_gen_xor_tl(cpu_gpr[rd], cpu_gpr[rs], cpu_gpr[rt]);
1756 } else if (rs == 0 && rt != 0) {
1757 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rt]);
1758 } else if (rs != 0 && rt == 0) {
1759 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
1761 tcg_gen_movi_tl(cpu_gpr[rd], 0);
1766 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1769 /* Set on lower than */
1770 static void gen_slt (CPUState *env, uint32_t opc, int rd, int rs, int rt)
1772 const char *opn = "slt";
1776 /* If no destination, treat it as a NOP. */
1781 t0 = tcg_temp_new();
1782 t1 = tcg_temp_new();
1783 gen_load_gpr(t0, rs);
1784 gen_load_gpr(t1, rt);
1787 gen_op_lt(cpu_gpr[rd], t0, t1);
1791 gen_op_ltu(cpu_gpr[rd], t0, t1);
1795 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1801 static void gen_shift (CPUState *env, DisasContext *ctx, uint32_t opc,
1802 int rd, int rs, int rt)
1804 const char *opn = "shifts";
1808 /* If no destination, treat it as a NOP.
1809 For add & sub, we must generate the overflow exception when needed. */
1814 t0 = tcg_temp_new();
1815 t1 = tcg_temp_new();
1816 gen_load_gpr(t0, rs);
1817 gen_load_gpr(t1, rt);
1820 tcg_gen_andi_tl(t0, t0, 0x1f);
1821 tcg_gen_shl_tl(t0, t1, t0);
1822 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1826 tcg_gen_ext32s_tl(t1, t1);
1827 tcg_gen_andi_tl(t0, t0, 0x1f);
1828 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1832 tcg_gen_ext32u_tl(t1, t1);
1833 tcg_gen_andi_tl(t0, t0, 0x1f);
1834 tcg_gen_shr_tl(t0, t1, t0);
1835 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
1840 TCGv_i32 t2 = tcg_temp_new_i32();
1841 TCGv_i32 t3 = tcg_temp_new_i32();
1843 tcg_gen_trunc_tl_i32(t2, t0);
1844 tcg_gen_trunc_tl_i32(t3, t1);
1845 tcg_gen_andi_i32(t2, t2, 0x1f);
1846 tcg_gen_rotr_i32(t2, t3, t2);
1847 tcg_gen_ext_i32_tl(cpu_gpr[rd], t2);
1848 tcg_temp_free_i32(t2);
1849 tcg_temp_free_i32(t3);
1853 #if defined(TARGET_MIPS64)
1855 tcg_gen_andi_tl(t0, t0, 0x3f);
1856 tcg_gen_shl_tl(cpu_gpr[rd], t1, t0);
1860 tcg_gen_andi_tl(t0, t0, 0x3f);
1861 tcg_gen_sar_tl(cpu_gpr[rd], t1, t0);
1865 tcg_gen_andi_tl(t0, t0, 0x3f);
1866 tcg_gen_shr_tl(cpu_gpr[rd], t1, t0);
1870 tcg_gen_andi_tl(t0, t0, 0x3f);
1871 tcg_gen_rotr_tl(cpu_gpr[rd], t1, t0);
1876 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
1881 /* Arithmetic on HI/LO registers */
1882 static void gen_HILO (DisasContext *ctx, uint32_t opc, int reg)
1884 const char *opn = "hilo";
1886 if (reg == 0 && (opc == OPC_MFHI || opc == OPC_MFLO)) {
1893 tcg_gen_mov_tl(cpu_gpr[reg], cpu_HI[0]);
1897 tcg_gen_mov_tl(cpu_gpr[reg], cpu_LO[0]);
1902 tcg_gen_mov_tl(cpu_HI[0], cpu_gpr[reg]);
1904 tcg_gen_movi_tl(cpu_HI[0], 0);
1909 tcg_gen_mov_tl(cpu_LO[0], cpu_gpr[reg]);
1911 tcg_gen_movi_tl(cpu_LO[0], 0);
1915 MIPS_DEBUG("%s %s", opn, regnames[reg]);
1918 static void gen_muldiv (DisasContext *ctx, uint32_t opc,
1921 const char *opn = "mul/div";
1927 #if defined(TARGET_MIPS64)
1931 t0 = tcg_temp_local_new();
1932 t1 = tcg_temp_local_new();
1935 t0 = tcg_temp_new();
1936 t1 = tcg_temp_new();
1940 gen_load_gpr(t0, rs);
1941 gen_load_gpr(t1, rt);
1945 int l1 = gen_new_label();
1946 int l2 = gen_new_label();
1948 tcg_gen_ext32s_tl(t0, t0);
1949 tcg_gen_ext32s_tl(t1, t1);
1950 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
1951 tcg_gen_brcondi_tl(TCG_COND_NE, t0, INT_MIN, l2);
1952 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1, l2);
1954 tcg_gen_mov_tl(cpu_LO[0], t0);
1955 tcg_gen_movi_tl(cpu_HI[0], 0);
1958 tcg_gen_div_tl(cpu_LO[0], t0, t1);
1959 tcg_gen_rem_tl(cpu_HI[0], t0, t1);
1960 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
1961 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
1968 int l1 = gen_new_label();
1970 tcg_gen_ext32u_tl(t0, t0);
1971 tcg_gen_ext32u_tl(t1, t1);
1972 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
1973 tcg_gen_divu_tl(cpu_LO[0], t0, t1);
1974 tcg_gen_remu_tl(cpu_HI[0], t0, t1);
1975 tcg_gen_ext32s_tl(cpu_LO[0], cpu_LO[0]);
1976 tcg_gen_ext32s_tl(cpu_HI[0], cpu_HI[0]);
1983 TCGv_i64 t2 = tcg_temp_new_i64();
1984 TCGv_i64 t3 = tcg_temp_new_i64();
1986 tcg_gen_ext_tl_i64(t2, t0);
1987 tcg_gen_ext_tl_i64(t3, t1);
1988 tcg_gen_mul_i64(t2, t2, t3);
1989 tcg_temp_free_i64(t3);
1990 tcg_gen_trunc_i64_tl(t0, t2);
1991 tcg_gen_shri_i64(t2, t2, 32);
1992 tcg_gen_trunc_i64_tl(t1, t2);
1993 tcg_temp_free_i64(t2);
1994 tcg_gen_ext32s_tl(cpu_LO[0], t0);
1995 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2001 TCGv_i64 t2 = tcg_temp_new_i64();
2002 TCGv_i64 t3 = tcg_temp_new_i64();
2004 tcg_gen_ext32u_tl(t0, t0);
2005 tcg_gen_ext32u_tl(t1, t1);
2006 tcg_gen_extu_tl_i64(t2, t0);
2007 tcg_gen_extu_tl_i64(t3, t1);
2008 tcg_gen_mul_i64(t2, t2, t3);
2009 tcg_temp_free_i64(t3);
2010 tcg_gen_trunc_i64_tl(t0, t2);
2011 tcg_gen_shri_i64(t2, t2, 32);
2012 tcg_gen_trunc_i64_tl(t1, t2);
2013 tcg_temp_free_i64(t2);
2014 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2015 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2019 #if defined(TARGET_MIPS64)
2022 int l1 = gen_new_label();
2023 int l2 = gen_new_label();
2025 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2026 tcg_gen_brcondi_tl(TCG_COND_NE, t0, -1LL << 63, l2);
2027 tcg_gen_brcondi_tl(TCG_COND_NE, t1, -1LL, l2);
2028 tcg_gen_mov_tl(cpu_LO[0], t0);
2029 tcg_gen_movi_tl(cpu_HI[0], 0);
2032 tcg_gen_div_i64(cpu_LO[0], t0, t1);
2033 tcg_gen_rem_i64(cpu_HI[0], t0, t1);
2040 int l1 = gen_new_label();
2042 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
2043 tcg_gen_divu_i64(cpu_LO[0], t0, t1);
2044 tcg_gen_remu_i64(cpu_HI[0], t0, t1);
2050 gen_helper_dmult(t0, t1);
2054 gen_helper_dmultu(t0, t1);
2060 TCGv_i64 t2 = tcg_temp_new_i64();
2061 TCGv_i64 t3 = tcg_temp_new_i64();
2063 tcg_gen_ext_tl_i64(t2, t0);
2064 tcg_gen_ext_tl_i64(t3, t1);
2065 tcg_gen_mul_i64(t2, t2, t3);
2066 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2067 tcg_gen_add_i64(t2, t2, t3);
2068 tcg_temp_free_i64(t3);
2069 tcg_gen_trunc_i64_tl(t0, t2);
2070 tcg_gen_shri_i64(t2, t2, 32);
2071 tcg_gen_trunc_i64_tl(t1, t2);
2072 tcg_temp_free_i64(t2);
2073 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2074 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2080 TCGv_i64 t2 = tcg_temp_new_i64();
2081 TCGv_i64 t3 = tcg_temp_new_i64();
2083 tcg_gen_ext32u_tl(t0, t0);
2084 tcg_gen_ext32u_tl(t1, t1);
2085 tcg_gen_extu_tl_i64(t2, t0);
2086 tcg_gen_extu_tl_i64(t3, t1);
2087 tcg_gen_mul_i64(t2, t2, t3);
2088 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2089 tcg_gen_add_i64(t2, t2, t3);
2090 tcg_temp_free_i64(t3);
2091 tcg_gen_trunc_i64_tl(t0, t2);
2092 tcg_gen_shri_i64(t2, t2, 32);
2093 tcg_gen_trunc_i64_tl(t1, t2);
2094 tcg_temp_free_i64(t2);
2095 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2096 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2102 TCGv_i64 t2 = tcg_temp_new_i64();
2103 TCGv_i64 t3 = tcg_temp_new_i64();
2105 tcg_gen_ext_tl_i64(t2, t0);
2106 tcg_gen_ext_tl_i64(t3, t1);
2107 tcg_gen_mul_i64(t2, t2, t3);
2108 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2109 tcg_gen_sub_i64(t2, t3, t2);
2110 tcg_temp_free_i64(t3);
2111 tcg_gen_trunc_i64_tl(t0, t2);
2112 tcg_gen_shri_i64(t2, t2, 32);
2113 tcg_gen_trunc_i64_tl(t1, t2);
2114 tcg_temp_free_i64(t2);
2115 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2116 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2122 TCGv_i64 t2 = tcg_temp_new_i64();
2123 TCGv_i64 t3 = tcg_temp_new_i64();
2125 tcg_gen_ext32u_tl(t0, t0);
2126 tcg_gen_ext32u_tl(t1, t1);
2127 tcg_gen_extu_tl_i64(t2, t0);
2128 tcg_gen_extu_tl_i64(t3, t1);
2129 tcg_gen_mul_i64(t2, t2, t3);
2130 tcg_gen_concat_tl_i64(t3, cpu_LO[0], cpu_HI[0]);
2131 tcg_gen_sub_i64(t2, t3, t2);
2132 tcg_temp_free_i64(t3);
2133 tcg_gen_trunc_i64_tl(t0, t2);
2134 tcg_gen_shri_i64(t2, t2, 32);
2135 tcg_gen_trunc_i64_tl(t1, t2);
2136 tcg_temp_free_i64(t2);
2137 tcg_gen_ext32s_tl(cpu_LO[0], t0);
2138 tcg_gen_ext32s_tl(cpu_HI[0], t1);
2144 generate_exception(ctx, EXCP_RI);
2147 MIPS_DEBUG("%s %s %s", opn, regnames[rs], regnames[rt]);
2153 static void gen_mul_vr54xx (DisasContext *ctx, uint32_t opc,
2154 int rd, int rs, int rt)
2156 const char *opn = "mul vr54xx";
2157 TCGv t0 = tcg_temp_new();
2158 TCGv t1 = tcg_temp_new();
2160 gen_load_gpr(t0, rs);
2161 gen_load_gpr(t1, rt);
2164 case OPC_VR54XX_MULS:
2165 gen_helper_muls(t0, t0, t1);
2168 case OPC_VR54XX_MULSU:
2169 gen_helper_mulsu(t0, t0, t1);
2172 case OPC_VR54XX_MACC:
2173 gen_helper_macc(t0, t0, t1);
2176 case OPC_VR54XX_MACCU:
2177 gen_helper_maccu(t0, t0, t1);
2180 case OPC_VR54XX_MSAC:
2181 gen_helper_msac(t0, t0, t1);
2184 case OPC_VR54XX_MSACU:
2185 gen_helper_msacu(t0, t0, t1);
2188 case OPC_VR54XX_MULHI:
2189 gen_helper_mulhi(t0, t0, t1);
2192 case OPC_VR54XX_MULHIU:
2193 gen_helper_mulhiu(t0, t0, t1);
2196 case OPC_VR54XX_MULSHI:
2197 gen_helper_mulshi(t0, t0, t1);
2200 case OPC_VR54XX_MULSHIU:
2201 gen_helper_mulshiu(t0, t0, t1);
2204 case OPC_VR54XX_MACCHI:
2205 gen_helper_macchi(t0, t0, t1);
2208 case OPC_VR54XX_MACCHIU:
2209 gen_helper_macchiu(t0, t0, t1);
2212 case OPC_VR54XX_MSACHI:
2213 gen_helper_msachi(t0, t0, t1);
2216 case OPC_VR54XX_MSACHIU:
2217 gen_helper_msachiu(t0, t0, t1);
2221 MIPS_INVAL("mul vr54xx");
2222 generate_exception(ctx, EXCP_RI);
2225 gen_store_gpr(t0, rd);
2226 MIPS_DEBUG("%s %s, %s, %s", opn, regnames[rd], regnames[rs], regnames[rt]);
2233 static void gen_cl (DisasContext *ctx, uint32_t opc,
2236 const char *opn = "CLx";
2244 t0 = tcg_temp_new();
2245 gen_load_gpr(t0, rs);
2248 gen_helper_clo(cpu_gpr[rd], t0);
2252 gen_helper_clz(cpu_gpr[rd], t0);
2255 #if defined(TARGET_MIPS64)
2257 gen_helper_dclo(cpu_gpr[rd], t0);
2261 gen_helper_dclz(cpu_gpr[rd], t0);
2266 MIPS_DEBUG("%s %s, %s", opn, regnames[rd], regnames[rs]);
2271 static void gen_trap (DisasContext *ctx, uint32_t opc,
2272 int rs, int rt, int16_t imm)
2275 TCGv t0 = tcg_temp_new();
2276 TCGv t1 = tcg_temp_new();
2279 /* Load needed operands */
2287 /* Compare two registers */
2289 gen_load_gpr(t0, rs);
2290 gen_load_gpr(t1, rt);
2300 /* Compare register to immediate */
2301 if (rs != 0 || imm != 0) {
2302 gen_load_gpr(t0, rs);
2303 tcg_gen_movi_tl(t1, (int32_t)imm);
2310 case OPC_TEQ: /* rs == rs */
2311 case OPC_TEQI: /* r0 == 0 */
2312 case OPC_TGE: /* rs >= rs */
2313 case OPC_TGEI: /* r0 >= 0 */
2314 case OPC_TGEU: /* rs >= rs unsigned */
2315 case OPC_TGEIU: /* r0 >= 0 unsigned */
2317 generate_exception(ctx, EXCP_TRAP);
2319 case OPC_TLT: /* rs < rs */
2320 case OPC_TLTI: /* r0 < 0 */
2321 case OPC_TLTU: /* rs < rs unsigned */
2322 case OPC_TLTIU: /* r0 < 0 unsigned */
2323 case OPC_TNE: /* rs != rs */
2324 case OPC_TNEI: /* r0 != 0 */
2325 /* Never trap: treat as NOP. */
2329 int l1 = gen_new_label();
2334 tcg_gen_brcond_tl(TCG_COND_NE, t0, t1, l1);
2338 tcg_gen_brcond_tl(TCG_COND_LT, t0, t1, l1);
2342 tcg_gen_brcond_tl(TCG_COND_LTU, t0, t1, l1);
2346 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
2350 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
2354 tcg_gen_brcond_tl(TCG_COND_EQ, t0, t1, l1);
2357 generate_exception(ctx, EXCP_TRAP);
2364 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
2366 TranslationBlock *tb;
2368 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
2369 likely(!ctx->singlestep_enabled)) {
2372 tcg_gen_exit_tb((long)tb + n);
2375 if (ctx->singlestep_enabled) {
2376 save_cpu_state(ctx, 0);
2377 gen_helper_0i(raise_exception, EXCP_DEBUG);
2383 /* Branches (before delay slot) */
2384 static void gen_compute_branch (DisasContext *ctx, uint32_t opc,
2386 int rs, int rt, int32_t offset)
2388 target_ulong btgt = -1;
2390 int bcond_compute = 0;
2391 TCGv t0 = tcg_temp_new();
2392 TCGv t1 = tcg_temp_new();
2394 if (ctx->hflags & MIPS_HFLAG_BMASK) {
2395 #ifdef MIPS_DEBUG_DISAS
2396 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx "\n", ctx->pc);
2398 generate_exception(ctx, EXCP_RI);
2402 /* Load needed operands */
2408 /* Compare two registers */
2410 gen_load_gpr(t0, rs);
2411 gen_load_gpr(t1, rt);
2414 btgt = ctx->pc + insn_bytes + offset;
2428 /* Compare to zero */
2430 gen_load_gpr(t0, rs);
2433 btgt = ctx->pc + insn_bytes + offset;
2437 /* Jump to immediate */
2438 btgt = ((ctx->pc + insn_bytes) & (int32_t)0xF0000000) | (uint32_t)offset;
2442 /* Jump to register */
2443 if (offset != 0 && offset != 16) {
2444 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2445 others are reserved. */
2446 MIPS_INVAL("jump hint");
2447 generate_exception(ctx, EXCP_RI);
2450 gen_load_gpr(btarget, rs);
2453 MIPS_INVAL("branch/jump");
2454 generate_exception(ctx, EXCP_RI);
2457 if (bcond_compute == 0) {
2458 /* No condition to be computed */
2460 case OPC_BEQ: /* rx == rx */
2461 case OPC_BEQL: /* rx == rx likely */
2462 case OPC_BGEZ: /* 0 >= 0 */
2463 case OPC_BGEZL: /* 0 >= 0 likely */
2464 case OPC_BLEZ: /* 0 <= 0 */
2465 case OPC_BLEZL: /* 0 <= 0 likely */
2467 ctx->hflags |= MIPS_HFLAG_B;
2468 MIPS_DEBUG("balways");
2470 case OPC_BGEZAL: /* 0 >= 0 */
2471 case OPC_BGEZALL: /* 0 >= 0 likely */
2472 /* Always take and link */
2474 ctx->hflags |= MIPS_HFLAG_B;
2475 MIPS_DEBUG("balways and link");
2477 case OPC_BNE: /* rx != rx */
2478 case OPC_BGTZ: /* 0 > 0 */
2479 case OPC_BLTZ: /* 0 < 0 */
2481 MIPS_DEBUG("bnever (NOP)");
2483 case OPC_BLTZAL: /* 0 < 0 */
2484 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2485 MIPS_DEBUG("bnever and link");
2487 case OPC_BLTZALL: /* 0 < 0 likely */
2488 tcg_gen_movi_tl(cpu_gpr[31], ctx->pc + 8);
2489 /* Skip the instruction in the delay slot */
2490 MIPS_DEBUG("bnever, link and skip");
2493 case OPC_BNEL: /* rx != rx likely */
2494 case OPC_BGTZL: /* 0 > 0 likely */
2495 case OPC_BLTZL: /* 0 < 0 likely */
2496 /* Skip the instruction in the delay slot */
2497 MIPS_DEBUG("bnever and skip");
2501 ctx->hflags |= MIPS_HFLAG_B;
2502 MIPS_DEBUG("j " TARGET_FMT_lx, btgt);
2506 ctx->hflags |= MIPS_HFLAG_B;
2507 MIPS_DEBUG("jal " TARGET_FMT_lx, btgt);
2510 ctx->hflags |= MIPS_HFLAG_BR;
2511 MIPS_DEBUG("jr %s", regnames[rs]);
2515 ctx->hflags |= MIPS_HFLAG_BR;
2516 MIPS_DEBUG("jalr %s, %s", regnames[rt], regnames[rs]);
2519 MIPS_INVAL("branch/jump");
2520 generate_exception(ctx, EXCP_RI);
2526 gen_op_eq(bcond, t0, t1);
2527 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx,
2528 regnames[rs], regnames[rt], btgt);
2531 gen_op_eq(bcond, t0, t1);
2532 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx,
2533 regnames[rs], regnames[rt], btgt);
2536 gen_op_ne(bcond, t0, t1);
2537 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx,
2538 regnames[rs], regnames[rt], btgt);
2541 gen_op_ne(bcond, t0, t1);
2542 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx,
2543 regnames[rs], regnames[rt], btgt);
2546 gen_op_gez(bcond, t0);
2547 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2550 gen_op_gez(bcond, t0);
2551 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2554 gen_op_gez(bcond, t0);
2555 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2559 gen_op_gez(bcond, t0);
2561 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2564 gen_op_gtz(bcond, t0);
2565 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2568 gen_op_gtz(bcond, t0);
2569 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2572 gen_op_lez(bcond, t0);
2573 MIPS_DEBUG("blez %s, " TARGET_FMT_lx, regnames[rs], btgt);
2576 gen_op_lez(bcond, t0);
2577 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2580 gen_op_ltz(bcond, t0);
2581 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx, regnames[rs], btgt);
2584 gen_op_ltz(bcond, t0);
2585 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx, regnames[rs], btgt);
2588 gen_op_ltz(bcond, t0);
2590 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx, regnames[rs], btgt);
2592 ctx->hflags |= MIPS_HFLAG_BC;
2595 gen_op_ltz(bcond, t0);
2597 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx, regnames[rs], btgt);
2599 ctx->hflags |= MIPS_HFLAG_BL;
2602 MIPS_INVAL("conditional branch/jump");
2603 generate_exception(ctx, EXCP_RI);
2607 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx,
2608 blink, ctx->hflags, btgt);
2610 ctx->btarget = btgt;
2612 tcg_gen_movi_tl(cpu_gpr[blink], ctx->pc + 8);
2620 /* special3 bitfield operations */
2621 static void gen_bitops (DisasContext *ctx, uint32_t opc, int rt,
2622 int rs, int lsb, int msb)
2624 TCGv t0 = tcg_temp_new();
2625 TCGv t1 = tcg_temp_new();
2628 gen_load_gpr(t1, rs);
2633 tcg_gen_shri_tl(t0, t1, lsb);
2635 tcg_gen_andi_tl(t0, t0, (1 << (msb + 1)) - 1);
2637 tcg_gen_ext32s_tl(t0, t0);
2640 #if defined(TARGET_MIPS64)
2642 tcg_gen_shri_tl(t0, t1, lsb);
2644 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1 + 32)) - 1);
2648 tcg_gen_shri_tl(t0, t1, lsb + 32);
2649 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2652 tcg_gen_shri_tl(t0, t1, lsb);
2653 tcg_gen_andi_tl(t0, t0, (1ULL << (msb + 1)) - 1);
2659 mask = ((msb - lsb + 1 < 32) ? ((1 << (msb - lsb + 1)) - 1) : ~0) << lsb;
2660 gen_load_gpr(t0, rt);
2661 tcg_gen_andi_tl(t0, t0, ~mask);
2662 tcg_gen_shli_tl(t1, t1, lsb);
2663 tcg_gen_andi_tl(t1, t1, mask);
2664 tcg_gen_or_tl(t0, t0, t1);
2665 tcg_gen_ext32s_tl(t0, t0);
2667 #if defined(TARGET_MIPS64)
2671 mask = ((msb - lsb + 1 + 32 < 64) ? ((1ULL << (msb - lsb + 1 + 32)) - 1) : ~0ULL) << lsb;
2672 gen_load_gpr(t0, rt);
2673 tcg_gen_andi_tl(t0, t0, ~mask);
2674 tcg_gen_shli_tl(t1, t1, lsb);
2675 tcg_gen_andi_tl(t1, t1, mask);
2676 tcg_gen_or_tl(t0, t0, t1);
2681 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2682 gen_load_gpr(t0, rt);
2683 tcg_gen_andi_tl(t0, t0, ~mask);
2684 tcg_gen_shli_tl(t1, t1, lsb + 32);
2685 tcg_gen_andi_tl(t1, t1, mask);
2686 tcg_gen_or_tl(t0, t0, t1);
2691 gen_load_gpr(t0, rt);
2692 mask = ((1ULL << (msb - lsb + 1)) - 1) << lsb;
2693 gen_load_gpr(t0, rt);
2694 tcg_gen_andi_tl(t0, t0, ~mask);
2695 tcg_gen_shli_tl(t1, t1, lsb);
2696 tcg_gen_andi_tl(t1, t1, mask);
2697 tcg_gen_or_tl(t0, t0, t1);
2702 MIPS_INVAL("bitops");
2703 generate_exception(ctx, EXCP_RI);
2708 gen_store_gpr(t0, rt);
2713 static void gen_bshfl (DisasContext *ctx, uint32_t op2, int rt, int rd)
2718 /* If no destination, treat it as a NOP. */
2723 t0 = tcg_temp_new();
2724 gen_load_gpr(t0, rt);
2728 TCGv t1 = tcg_temp_new();
2730 tcg_gen_shri_tl(t1, t0, 8);
2731 tcg_gen_andi_tl(t1, t1, 0x00FF00FF);
2732 tcg_gen_shli_tl(t0, t0, 8);
2733 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF);
2734 tcg_gen_or_tl(t0, t0, t1);
2736 tcg_gen_ext32s_tl(cpu_gpr[rd], t0);
2740 tcg_gen_ext8s_tl(cpu_gpr[rd], t0);
2743 tcg_gen_ext16s_tl(cpu_gpr[rd], t0);
2745 #if defined(TARGET_MIPS64)
2748 TCGv t1 = tcg_temp_new();
2750 tcg_gen_shri_tl(t1, t0, 8);
2751 tcg_gen_andi_tl(t1, t1, 0x00FF00FF00FF00FFULL);
2752 tcg_gen_shli_tl(t0, t0, 8);
2753 tcg_gen_andi_tl(t0, t0, ~0x00FF00FF00FF00FFULL);
2754 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2760 TCGv t1 = tcg_temp_new();
2762 tcg_gen_shri_tl(t1, t0, 16);
2763 tcg_gen_andi_tl(t1, t1, 0x0000FFFF0000FFFFULL);
2764 tcg_gen_shli_tl(t0, t0, 16);
2765 tcg_gen_andi_tl(t0, t0, ~0x0000FFFF0000FFFFULL);
2766 tcg_gen_or_tl(t0, t0, t1);
2767 tcg_gen_shri_tl(t1, t0, 32);
2768 tcg_gen_shli_tl(t0, t0, 32);
2769 tcg_gen_or_tl(cpu_gpr[rd], t0, t1);
2775 MIPS_INVAL("bsfhl");
2776 generate_exception(ctx, EXCP_RI);
2783 #ifndef CONFIG_USER_ONLY
2784 /* CP0 (MMU and control) */
2785 static inline void gen_mfc0_load32 (TCGv arg, target_ulong off)
2787 TCGv_i32 t0 = tcg_temp_new_i32();
2789 tcg_gen_ld_i32(t0, cpu_env, off);
2790 tcg_gen_ext_i32_tl(arg, t0);
2791 tcg_temp_free_i32(t0);
2794 static inline void gen_mfc0_load64 (TCGv arg, target_ulong off)
2796 tcg_gen_ld_tl(arg, cpu_env, off);
2797 tcg_gen_ext32s_tl(arg, arg);
2800 static inline void gen_mtc0_store32 (TCGv arg, target_ulong off)
2802 TCGv_i32 t0 = tcg_temp_new_i32();
2804 tcg_gen_trunc_tl_i32(t0, arg);
2805 tcg_gen_st_i32(t0, cpu_env, off);
2806 tcg_temp_free_i32(t0);
2809 static inline void gen_mtc0_store64 (TCGv arg, target_ulong off)
2811 tcg_gen_ext32s_tl(arg, arg);
2812 tcg_gen_st_tl(arg, cpu_env, off);
2815 static void gen_mfc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
2817 const char *rn = "invalid";
2820 check_insn(env, ctx, ISA_MIPS32);
2826 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Index));
2830 check_insn(env, ctx, ASE_MT);
2831 gen_helper_mfc0_mvpcontrol(arg);
2835 check_insn(env, ctx, ASE_MT);
2836 gen_helper_mfc0_mvpconf0(arg);
2840 check_insn(env, ctx, ASE_MT);
2841 gen_helper_mfc0_mvpconf1(arg);
2851 gen_helper_mfc0_random(arg);
2855 check_insn(env, ctx, ASE_MT);
2856 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEControl));
2860 check_insn(env, ctx, ASE_MT);
2861 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf0));
2865 check_insn(env, ctx, ASE_MT);
2866 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf1));
2870 check_insn(env, ctx, ASE_MT);
2871 gen_mfc0_load64(arg, offsetof(CPUState, CP0_YQMask));
2875 check_insn(env, ctx, ASE_MT);
2876 gen_mfc0_load64(arg, offsetof(CPUState, CP0_VPESchedule));
2880 check_insn(env, ctx, ASE_MT);
2881 gen_mfc0_load64(arg, offsetof(CPUState, CP0_VPEScheFBack));
2882 rn = "VPEScheFBack";
2885 check_insn(env, ctx, ASE_MT);
2886 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEOpt));
2896 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo0));
2897 tcg_gen_ext32s_tl(arg, arg);
2901 check_insn(env, ctx, ASE_MT);
2902 gen_helper_mfc0_tcstatus(arg);
2906 check_insn(env, ctx, ASE_MT);
2907 gen_helper_mfc0_tcbind(arg);
2911 check_insn(env, ctx, ASE_MT);
2912 gen_helper_mfc0_tcrestart(arg);
2916 check_insn(env, ctx, ASE_MT);
2917 gen_helper_mfc0_tchalt(arg);
2921 check_insn(env, ctx, ASE_MT);
2922 gen_helper_mfc0_tccontext(arg);
2926 check_insn(env, ctx, ASE_MT);
2927 gen_helper_mfc0_tcschedule(arg);
2931 check_insn(env, ctx, ASE_MT);
2932 gen_helper_mfc0_tcschefback(arg);
2942 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo1));
2943 tcg_gen_ext32s_tl(arg, arg);
2953 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_Context));
2954 tcg_gen_ext32s_tl(arg, arg);
2958 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
2959 rn = "ContextConfig";
2968 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageMask));
2972 check_insn(env, ctx, ISA_MIPS32R2);
2973 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageGrain));
2983 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Wired));
2987 check_insn(env, ctx, ISA_MIPS32R2);
2988 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf0));
2992 check_insn(env, ctx, ISA_MIPS32R2);
2993 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf1));
2997 check_insn(env, ctx, ISA_MIPS32R2);
2998 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf2));
3002 check_insn(env, ctx, ISA_MIPS32R2);
3003 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf3));
3007 check_insn(env, ctx, ISA_MIPS32R2);
3008 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf4));
3018 check_insn(env, ctx, ISA_MIPS32R2);
3019 gen_mfc0_load32(arg, offsetof(CPUState, CP0_HWREna));
3029 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_BadVAddr));
3030 tcg_gen_ext32s_tl(arg, arg);
3040 /* Mark as an IO operation because we read the time. */
3043 gen_helper_mfc0_count(arg);
3046 ctx->bstate = BS_STOP;
3050 /* 6,7 are implementation dependent */
3058 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryHi));
3059 tcg_gen_ext32s_tl(arg, arg);
3069 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Compare));
3072 /* 6,7 are implementation dependent */
3080 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Status));
3084 check_insn(env, ctx, ISA_MIPS32R2);
3085 gen_mfc0_load32(arg, offsetof(CPUState, CP0_IntCtl));
3089 check_insn(env, ctx, ISA_MIPS32R2);
3090 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSCtl));
3094 check_insn(env, ctx, ISA_MIPS32R2);
3095 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSMap));
3105 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Cause));
3115 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
3116 tcg_gen_ext32s_tl(arg, arg);
3126 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PRid));
3130 check_insn(env, ctx, ISA_MIPS32R2);
3131 gen_mfc0_load32(arg, offsetof(CPUState, CP0_EBase));
3141 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config0));
3145 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config1));
3149 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config2));
3153 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config3));
3156 /* 4,5 are reserved */
3157 /* 6,7 are implementation dependent */
3159 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config6));
3163 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config7));
3173 gen_helper_mfc0_lladdr(arg);
3183 gen_helper_1i(mfc0_watchlo, arg, sel);
3193 gen_helper_1i(mfc0_watchhi, arg, sel);
3203 #if defined(TARGET_MIPS64)
3204 check_insn(env, ctx, ISA_MIPS3);
3205 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_XContext));
3206 tcg_gen_ext32s_tl(arg, arg);
3215 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3218 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Framemask));
3226 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3227 rn = "'Diagnostic"; /* implementation dependent */
3232 gen_helper_mfc0_debug(arg); /* EJTAG support */
3236 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3237 rn = "TraceControl";
3240 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3241 rn = "TraceControl2";
3244 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
3245 rn = "UserTraceData";
3248 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
3259 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
3260 tcg_gen_ext32s_tl(arg, arg);
3270 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Performance0));
3271 rn = "Performance0";
3274 // gen_helper_mfc0_performance1(arg);
3275 rn = "Performance1";
3278 // gen_helper_mfc0_performance2(arg);
3279 rn = "Performance2";
3282 // gen_helper_mfc0_performance3(arg);
3283 rn = "Performance3";
3286 // gen_helper_mfc0_performance4(arg);
3287 rn = "Performance4";
3290 // gen_helper_mfc0_performance5(arg);
3291 rn = "Performance5";
3294 // gen_helper_mfc0_performance6(arg);
3295 rn = "Performance6";
3298 // gen_helper_mfc0_performance7(arg);
3299 rn = "Performance7";
3306 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3312 tcg_gen_movi_tl(arg, 0); /* unimplemented */
3325 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagLo));
3332 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataLo));
3345 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagHi));
3352 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataHi));
3362 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
3363 tcg_gen_ext32s_tl(arg, arg);
3374 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DESAVE));
3384 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3388 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn, reg, sel);
3389 generate_exception(ctx, EXCP_RI);
3392 static void gen_mtc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
3394 const char *rn = "invalid";
3397 check_insn(env, ctx, ISA_MIPS32);
3406 gen_helper_mtc0_index(arg);
3410 check_insn(env, ctx, ASE_MT);
3411 gen_helper_mtc0_mvpcontrol(arg);
3415 check_insn(env, ctx, ASE_MT);
3420 check_insn(env, ctx, ASE_MT);
3435 check_insn(env, ctx, ASE_MT);
3436 gen_helper_mtc0_vpecontrol(arg);
3440 check_insn(env, ctx, ASE_MT);
3441 gen_helper_mtc0_vpeconf0(arg);
3445 check_insn(env, ctx, ASE_MT);
3446 gen_helper_mtc0_vpeconf1(arg);
3450 check_insn(env, ctx, ASE_MT);
3451 gen_helper_mtc0_yqmask(arg);
3455 check_insn(env, ctx, ASE_MT);
3456 gen_mtc0_store64(arg, offsetof(CPUState, CP0_VPESchedule));
3460 check_insn(env, ctx, ASE_MT);
3461 gen_mtc0_store64(arg, offsetof(CPUState, CP0_VPEScheFBack));
3462 rn = "VPEScheFBack";
3465 check_insn(env, ctx, ASE_MT);
3466 gen_helper_mtc0_vpeopt(arg);
3476 gen_helper_mtc0_entrylo0(arg);
3480 check_insn(env, ctx, ASE_MT);
3481 gen_helper_mtc0_tcstatus(arg);
3485 check_insn(env, ctx, ASE_MT);
3486 gen_helper_mtc0_tcbind(arg);
3490 check_insn(env, ctx, ASE_MT);
3491 gen_helper_mtc0_tcrestart(arg);
3495 check_insn(env, ctx, ASE_MT);
3496 gen_helper_mtc0_tchalt(arg);
3500 check_insn(env, ctx, ASE_MT);
3501 gen_helper_mtc0_tccontext(arg);
3505 check_insn(env, ctx, ASE_MT);
3506 gen_helper_mtc0_tcschedule(arg);
3510 check_insn(env, ctx, ASE_MT);
3511 gen_helper_mtc0_tcschefback(arg);
3521 gen_helper_mtc0_entrylo1(arg);
3531 gen_helper_mtc0_context(arg);
3535 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
3536 rn = "ContextConfig";
3545 gen_helper_mtc0_pagemask(arg);
3549 check_insn(env, ctx, ISA_MIPS32R2);
3550 gen_helper_mtc0_pagegrain(arg);
3560 gen_helper_mtc0_wired(arg);
3564 check_insn(env, ctx, ISA_MIPS32R2);
3565 gen_helper_mtc0_srsconf0(arg);
3569 check_insn(env, ctx, ISA_MIPS32R2);
3570 gen_helper_mtc0_srsconf1(arg);
3574 check_insn(env, ctx, ISA_MIPS32R2);
3575 gen_helper_mtc0_srsconf2(arg);
3579 check_insn(env, ctx, ISA_MIPS32R2);
3580 gen_helper_mtc0_srsconf3(arg);
3584 check_insn(env, ctx, ISA_MIPS32R2);
3585 gen_helper_mtc0_srsconf4(arg);
3595 check_insn(env, ctx, ISA_MIPS32R2);
3596 gen_helper_mtc0_hwrena(arg);
3610 gen_helper_mtc0_count(arg);
3613 /* 6,7 are implementation dependent */
3621 gen_helper_mtc0_entryhi(arg);
3631 gen_helper_mtc0_compare(arg);
3634 /* 6,7 are implementation dependent */
3642 save_cpu_state(ctx, 1);
3643 gen_helper_mtc0_status(arg);
3644 /* BS_STOP isn't good enough here, hflags may have changed. */
3645 gen_save_pc(ctx->pc + 4);
3646 ctx->bstate = BS_EXCP;
3650 check_insn(env, ctx, ISA_MIPS32R2);
3651 gen_helper_mtc0_intctl(arg);
3652 /* Stop translation as we may have switched the execution mode */
3653 ctx->bstate = BS_STOP;
3657 check_insn(env, ctx, ISA_MIPS32R2);
3658 gen_helper_mtc0_srsctl(arg);
3659 /* Stop translation as we may have switched the execution mode */
3660 ctx->bstate = BS_STOP;
3664 check_insn(env, ctx, ISA_MIPS32R2);
3665 gen_mtc0_store32(arg, offsetof(CPUState, CP0_SRSMap));
3666 /* Stop translation as we may have switched the execution mode */
3667 ctx->bstate = BS_STOP;
3677 save_cpu_state(ctx, 1);
3678 gen_helper_mtc0_cause(arg);
3688 gen_mtc0_store64(arg, offsetof(CPUState, CP0_EPC));
3702 check_insn(env, ctx, ISA_MIPS32R2);
3703 gen_helper_mtc0_ebase(arg);
3713 gen_helper_mtc0_config0(arg);
3715 /* Stop translation as we may have switched the execution mode */
3716 ctx->bstate = BS_STOP;
3719 /* ignored, read only */
3723 gen_helper_mtc0_config2(arg);
3725 /* Stop translation as we may have switched the execution mode */
3726 ctx->bstate = BS_STOP;
3729 /* ignored, read only */
3732 /* 4,5 are reserved */
3733 /* 6,7 are implementation dependent */
3743 rn = "Invalid config selector";
3750 gen_helper_mtc0_lladdr(arg);
3760 gen_helper_1i(mtc0_watchlo, arg, sel);
3770 gen_helper_1i(mtc0_watchhi, arg, sel);
3780 #if defined(TARGET_MIPS64)
3781 check_insn(env, ctx, ISA_MIPS3);
3782 gen_helper_mtc0_xcontext(arg);
3791 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3794 gen_helper_mtc0_framemask(arg);
3803 rn = "Diagnostic"; /* implementation dependent */
3808 gen_helper_mtc0_debug(arg); /* EJTAG support */
3809 /* BS_STOP isn't good enough here, hflags may have changed. */
3810 gen_save_pc(ctx->pc + 4);
3811 ctx->bstate = BS_EXCP;
3815 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
3816 rn = "TraceControl";
3817 /* Stop translation as we may have switched the execution mode */
3818 ctx->bstate = BS_STOP;
3821 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
3822 rn = "TraceControl2";
3823 /* Stop translation as we may have switched the execution mode */
3824 ctx->bstate = BS_STOP;
3827 /* Stop translation as we may have switched the execution mode */
3828 ctx->bstate = BS_STOP;
3829 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
3830 rn = "UserTraceData";
3831 /* Stop translation as we may have switched the execution mode */
3832 ctx->bstate = BS_STOP;
3835 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
3836 /* Stop translation as we may have switched the execution mode */
3837 ctx->bstate = BS_STOP;
3848 gen_mtc0_store64(arg, offsetof(CPUState, CP0_DEPC));
3858 gen_helper_mtc0_performance0(arg);
3859 rn = "Performance0";
3862 // gen_helper_mtc0_performance1(arg);
3863 rn = "Performance1";
3866 // gen_helper_mtc0_performance2(arg);
3867 rn = "Performance2";
3870 // gen_helper_mtc0_performance3(arg);
3871 rn = "Performance3";
3874 // gen_helper_mtc0_performance4(arg);
3875 rn = "Performance4";
3878 // gen_helper_mtc0_performance5(arg);
3879 rn = "Performance5";
3882 // gen_helper_mtc0_performance6(arg);
3883 rn = "Performance6";
3886 // gen_helper_mtc0_performance7(arg);
3887 rn = "Performance7";
3913 gen_helper_mtc0_taglo(arg);
3920 gen_helper_mtc0_datalo(arg);
3933 gen_helper_mtc0_taghi(arg);
3940 gen_helper_mtc0_datahi(arg);
3951 gen_mtc0_store64(arg, offsetof(CPUState, CP0_ErrorEPC));
3962 gen_mtc0_store32(arg, offsetof(CPUState, CP0_DESAVE));
3968 /* Stop translation as we may have switched the execution mode */
3969 ctx->bstate = BS_STOP;
3974 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
3975 /* For simplicity assume that all writes can cause interrupts. */
3978 ctx->bstate = BS_STOP;
3983 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn, reg, sel);
3984 generate_exception(ctx, EXCP_RI);
3987 #if defined(TARGET_MIPS64)
3988 static void gen_dmfc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
3990 const char *rn = "invalid";
3993 check_insn(env, ctx, ISA_MIPS64);
3999 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Index));
4003 check_insn(env, ctx, ASE_MT);
4004 gen_helper_mfc0_mvpcontrol(arg);
4008 check_insn(env, ctx, ASE_MT);
4009 gen_helper_mfc0_mvpconf0(arg);
4013 check_insn(env, ctx, ASE_MT);
4014 gen_helper_mfc0_mvpconf1(arg);
4024 gen_helper_mfc0_random(arg);
4028 check_insn(env, ctx, ASE_MT);
4029 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEControl));
4033 check_insn(env, ctx, ASE_MT);
4034 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf0));
4038 check_insn(env, ctx, ASE_MT);
4039 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEConf1));
4043 check_insn(env, ctx, ASE_MT);
4044 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_YQMask));
4048 check_insn(env, ctx, ASE_MT);
4049 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4053 check_insn(env, ctx, ASE_MT);
4054 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4055 rn = "VPEScheFBack";
4058 check_insn(env, ctx, ASE_MT);
4059 gen_mfc0_load32(arg, offsetof(CPUState, CP0_VPEOpt));
4069 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo0));
4073 check_insn(env, ctx, ASE_MT);
4074 gen_helper_mfc0_tcstatus(arg);
4078 check_insn(env, ctx, ASE_MT);
4079 gen_helper_mfc0_tcbind(arg);
4083 check_insn(env, ctx, ASE_MT);
4084 gen_helper_dmfc0_tcrestart(arg);
4088 check_insn(env, ctx, ASE_MT);
4089 gen_helper_dmfc0_tchalt(arg);
4093 check_insn(env, ctx, ASE_MT);
4094 gen_helper_dmfc0_tccontext(arg);
4098 check_insn(env, ctx, ASE_MT);
4099 gen_helper_dmfc0_tcschedule(arg);
4103 check_insn(env, ctx, ASE_MT);
4104 gen_helper_dmfc0_tcschefback(arg);
4114 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryLo1));
4124 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_Context));
4128 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4129 rn = "ContextConfig";
4138 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageMask));
4142 check_insn(env, ctx, ISA_MIPS32R2);
4143 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PageGrain));
4153 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Wired));
4157 check_insn(env, ctx, ISA_MIPS32R2);
4158 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf0));
4162 check_insn(env, ctx, ISA_MIPS32R2);
4163 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf1));
4167 check_insn(env, ctx, ISA_MIPS32R2);
4168 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf2));
4172 check_insn(env, ctx, ISA_MIPS32R2);
4173 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf3));
4177 check_insn(env, ctx, ISA_MIPS32R2);
4178 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSConf4));
4188 check_insn(env, ctx, ISA_MIPS32R2);
4189 gen_mfc0_load32(arg, offsetof(CPUState, CP0_HWREna));
4199 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_BadVAddr));
4209 /* Mark as an IO operation because we read the time. */
4212 gen_helper_mfc0_count(arg);
4215 ctx->bstate = BS_STOP;
4219 /* 6,7 are implementation dependent */
4227 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EntryHi));
4237 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Compare));
4240 /* 6,7 are implementation dependent */
4248 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Status));
4252 check_insn(env, ctx, ISA_MIPS32R2);
4253 gen_mfc0_load32(arg, offsetof(CPUState, CP0_IntCtl));
4257 check_insn(env, ctx, ISA_MIPS32R2);
4258 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSCtl));
4262 check_insn(env, ctx, ISA_MIPS32R2);
4263 gen_mfc0_load32(arg, offsetof(CPUState, CP0_SRSMap));
4273 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Cause));
4283 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
4293 gen_mfc0_load32(arg, offsetof(CPUState, CP0_PRid));
4297 check_insn(env, ctx, ISA_MIPS32R2);
4298 gen_mfc0_load32(arg, offsetof(CPUState, CP0_EBase));
4308 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config0));
4312 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config1));
4316 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config2));
4320 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config3));
4323 /* 6,7 are implementation dependent */
4325 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config6));
4329 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Config7));
4339 gen_helper_dmfc0_lladdr(arg);
4349 gen_helper_1i(dmfc0_watchlo, arg, sel);
4359 gen_helper_1i(mfc0_watchhi, arg, sel);
4369 check_insn(env, ctx, ISA_MIPS3);
4370 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_XContext));
4378 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4381 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Framemask));
4389 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4390 rn = "'Diagnostic"; /* implementation dependent */
4395 gen_helper_mfc0_debug(arg); /* EJTAG support */
4399 // gen_helper_dmfc0_tracecontrol(arg); /* PDtrace support */
4400 rn = "TraceControl";
4403 // gen_helper_dmfc0_tracecontrol2(arg); /* PDtrace support */
4404 rn = "TraceControl2";
4407 // gen_helper_dmfc0_usertracedata(arg); /* PDtrace support */
4408 rn = "UserTraceData";
4411 // gen_helper_dmfc0_tracebpc(arg); /* PDtrace support */
4422 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
4432 gen_mfc0_load32(arg, offsetof(CPUState, CP0_Performance0));
4433 rn = "Performance0";
4436 // gen_helper_dmfc0_performance1(arg);
4437 rn = "Performance1";
4440 // gen_helper_dmfc0_performance2(arg);
4441 rn = "Performance2";
4444 // gen_helper_dmfc0_performance3(arg);
4445 rn = "Performance3";
4448 // gen_helper_dmfc0_performance4(arg);
4449 rn = "Performance4";
4452 // gen_helper_dmfc0_performance5(arg);
4453 rn = "Performance5";
4456 // gen_helper_dmfc0_performance6(arg);
4457 rn = "Performance6";
4460 // gen_helper_dmfc0_performance7(arg);
4461 rn = "Performance7";
4468 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4475 tcg_gen_movi_tl(arg, 0); /* unimplemented */
4488 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagLo));
4495 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataLo));
4508 gen_mfc0_load32(arg, offsetof(CPUState, CP0_TagHi));
4515 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DataHi));
4525 tcg_gen_ld_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
4536 gen_mfc0_load32(arg, offsetof(CPUState, CP0_DESAVE));
4546 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4550 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn, reg, sel);
4551 generate_exception(ctx, EXCP_RI);
4554 static void gen_dmtc0 (CPUState *env, DisasContext *ctx, TCGv arg, int reg, int sel)
4556 const char *rn = "invalid";
4559 check_insn(env, ctx, ISA_MIPS64);
4568 gen_helper_mtc0_index(arg);
4572 check_insn(env, ctx, ASE_MT);
4573 gen_helper_mtc0_mvpcontrol(arg);
4577 check_insn(env, ctx, ASE_MT);
4582 check_insn(env, ctx, ASE_MT);
4597 check_insn(env, ctx, ASE_MT);
4598 gen_helper_mtc0_vpecontrol(arg);
4602 check_insn(env, ctx, ASE_MT);
4603 gen_helper_mtc0_vpeconf0(arg);
4607 check_insn(env, ctx, ASE_MT);
4608 gen_helper_mtc0_vpeconf1(arg);
4612 check_insn(env, ctx, ASE_MT);
4613 gen_helper_mtc0_yqmask(arg);
4617 check_insn(env, ctx, ASE_MT);
4618 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_VPESchedule));
4622 check_insn(env, ctx, ASE_MT);
4623 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_VPEScheFBack));
4624 rn = "VPEScheFBack";
4627 check_insn(env, ctx, ASE_MT);
4628 gen_helper_mtc0_vpeopt(arg);
4638 gen_helper_mtc0_entrylo0(arg);
4642 check_insn(env, ctx, ASE_MT);
4643 gen_helper_mtc0_tcstatus(arg);
4647 check_insn(env, ctx, ASE_MT);
4648 gen_helper_mtc0_tcbind(arg);
4652 check_insn(env, ctx, ASE_MT);
4653 gen_helper_mtc0_tcrestart(arg);
4657 check_insn(env, ctx, ASE_MT);
4658 gen_helper_mtc0_tchalt(arg);
4662 check_insn(env, ctx, ASE_MT);
4663 gen_helper_mtc0_tccontext(arg);
4667 check_insn(env, ctx, ASE_MT);
4668 gen_helper_mtc0_tcschedule(arg);
4672 check_insn(env, ctx, ASE_MT);
4673 gen_helper_mtc0_tcschefback(arg);
4683 gen_helper_mtc0_entrylo1(arg);
4693 gen_helper_mtc0_context(arg);
4697 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
4698 rn = "ContextConfig";
4707 gen_helper_mtc0_pagemask(arg);
4711 check_insn(env, ctx, ISA_MIPS32R2);
4712 gen_helper_mtc0_pagegrain(arg);
4722 gen_helper_mtc0_wired(arg);
4726 check_insn(env, ctx, ISA_MIPS32R2);
4727 gen_helper_mtc0_srsconf0(arg);
4731 check_insn(env, ctx, ISA_MIPS32R2);
4732 gen_helper_mtc0_srsconf1(arg);
4736 check_insn(env, ctx, ISA_MIPS32R2);
4737 gen_helper_mtc0_srsconf2(arg);
4741 check_insn(env, ctx, ISA_MIPS32R2);
4742 gen_helper_mtc0_srsconf3(arg);
4746 check_insn(env, ctx, ISA_MIPS32R2);
4747 gen_helper_mtc0_srsconf4(arg);
4757 check_insn(env, ctx, ISA_MIPS32R2);
4758 gen_helper_mtc0_hwrena(arg);
4772 gen_helper_mtc0_count(arg);
4775 /* 6,7 are implementation dependent */
4779 /* Stop translation as we may have switched the execution mode */
4780 ctx->bstate = BS_STOP;
4785 gen_helper_mtc0_entryhi(arg);
4795 gen_helper_mtc0_compare(arg);
4798 /* 6,7 are implementation dependent */
4802 /* Stop translation as we may have switched the execution mode */
4803 ctx->bstate = BS_STOP;
4808 save_cpu_state(ctx, 1);
4809 gen_helper_mtc0_status(arg);
4810 /* BS_STOP isn't good enough here, hflags may have changed. */
4811 gen_save_pc(ctx->pc + 4);
4812 ctx->bstate = BS_EXCP;
4816 check_insn(env, ctx, ISA_MIPS32R2);
4817 gen_helper_mtc0_intctl(arg);
4818 /* Stop translation as we may have switched the execution mode */
4819 ctx->bstate = BS_STOP;
4823 check_insn(env, ctx, ISA_MIPS32R2);
4824 gen_helper_mtc0_srsctl(arg);
4825 /* Stop translation as we may have switched the execution mode */
4826 ctx->bstate = BS_STOP;
4830 check_insn(env, ctx, ISA_MIPS32R2);
4831 gen_mtc0_store32(arg, offsetof(CPUState, CP0_SRSMap));
4832 /* Stop translation as we may have switched the execution mode */
4833 ctx->bstate = BS_STOP;
4843 save_cpu_state(ctx, 1);
4844 gen_helper_mtc0_cause(arg);
4854 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_EPC));
4868 check_insn(env, ctx, ISA_MIPS32R2);
4869 gen_helper_mtc0_ebase(arg);
4879 gen_helper_mtc0_config0(arg);
4881 /* Stop translation as we may have switched the execution mode */
4882 ctx->bstate = BS_STOP;
4885 /* ignored, read only */
4889 gen_helper_mtc0_config2(arg);
4891 /* Stop translation as we may have switched the execution mode */
4892 ctx->bstate = BS_STOP;
4898 /* 6,7 are implementation dependent */
4900 rn = "Invalid config selector";
4907 gen_helper_mtc0_lladdr(arg);
4917 gen_helper_1i(mtc0_watchlo, arg, sel);
4927 gen_helper_1i(mtc0_watchhi, arg, sel);
4937 check_insn(env, ctx, ISA_MIPS3);
4938 gen_helper_mtc0_xcontext(arg);
4946 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4949 gen_helper_mtc0_framemask(arg);
4958 rn = "Diagnostic"; /* implementation dependent */
4963 gen_helper_mtc0_debug(arg); /* EJTAG support */
4964 /* BS_STOP isn't good enough here, hflags may have changed. */
4965 gen_save_pc(ctx->pc + 4);
4966 ctx->bstate = BS_EXCP;
4970 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
4971 /* Stop translation as we may have switched the execution mode */
4972 ctx->bstate = BS_STOP;
4973 rn = "TraceControl";
4976 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
4977 /* Stop translation as we may have switched the execution mode */
4978 ctx->bstate = BS_STOP;
4979 rn = "TraceControl2";
4982 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
4983 /* Stop translation as we may have switched the execution mode */
4984 ctx->bstate = BS_STOP;
4985 rn = "UserTraceData";
4988 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
4989 /* Stop translation as we may have switched the execution mode */
4990 ctx->bstate = BS_STOP;
5001 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_DEPC));
5011 gen_helper_mtc0_performance0(arg);
5012 rn = "Performance0";
5015 // gen_helper_mtc0_performance1(arg);
5016 rn = "Performance1";
5019 // gen_helper_mtc0_performance2(arg);
5020 rn = "Performance2";
5023 // gen_helper_mtc0_performance3(arg);
5024 rn = "Performance3";
5027 // gen_helper_mtc0_performance4(arg);
5028 rn = "Performance4";
5031 // gen_helper_mtc0_performance5(arg);
5032 rn = "Performance5";
5035 // gen_helper_mtc0_performance6(arg);
5036 rn = "Performance6";
5039 // gen_helper_mtc0_performance7(arg);
5040 rn = "Performance7";
5066 gen_helper_mtc0_taglo(arg);
5073 gen_helper_mtc0_datalo(arg);
5086 gen_helper_mtc0_taghi(arg);
5093 gen_helper_mtc0_datahi(arg);
5104 tcg_gen_st_tl(arg, cpu_env, offsetof(CPUState, CP0_ErrorEPC));
5115 gen_mtc0_store32(arg, offsetof(CPUState, CP0_DESAVE));
5121 /* Stop translation as we may have switched the execution mode */
5122 ctx->bstate = BS_STOP;
5127 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5128 /* For simplicity assume that all writes can cause interrupts. */
5131 ctx->bstate = BS_STOP;
5136 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn, reg, sel);
5137 generate_exception(ctx, EXCP_RI);
5139 #endif /* TARGET_MIPS64 */
5141 static void gen_mftr(CPUState *env, DisasContext *ctx, int rt, int rd,
5142 int u, int sel, int h)
5144 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5145 TCGv t0 = tcg_temp_local_new();
5147 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5148 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5149 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5150 tcg_gen_movi_tl(t0, -1);
5151 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5152 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5153 tcg_gen_movi_tl(t0, -1);
5159 gen_helper_mftc0_tcstatus(t0);
5162 gen_helper_mftc0_tcbind(t0);
5165 gen_helper_mftc0_tcrestart(t0);
5168 gen_helper_mftc0_tchalt(t0);
5171 gen_helper_mftc0_tccontext(t0);
5174 gen_helper_mftc0_tcschedule(t0);
5177 gen_helper_mftc0_tcschefback(t0);
5180 gen_mfc0(env, ctx, t0, rt, sel);
5187 gen_helper_mftc0_entryhi(t0);
5190 gen_mfc0(env, ctx, t0, rt, sel);
5196 gen_helper_mftc0_status(t0);
5199 gen_mfc0(env, ctx, t0, rt, sel);
5205 gen_helper_mftc0_debug(t0);
5208 gen_mfc0(env, ctx, t0, rt, sel);
5213 gen_mfc0(env, ctx, t0, rt, sel);
5215 } else switch (sel) {
5216 /* GPR registers. */
5218 gen_helper_1i(mftgpr, t0, rt);
5220 /* Auxiliary CPU registers */
5224 gen_helper_1i(mftlo, t0, 0);
5227 gen_helper_1i(mfthi, t0, 0);
5230 gen_helper_1i(mftacx, t0, 0);
5233 gen_helper_1i(mftlo, t0, 1);
5236 gen_helper_1i(mfthi, t0, 1);
5239 gen_helper_1i(mftacx, t0, 1);
5242 gen_helper_1i(mftlo, t0, 2);
5245 gen_helper_1i(mfthi, t0, 2);
5248 gen_helper_1i(mftacx, t0, 2);
5251 gen_helper_1i(mftlo, t0, 3);
5254 gen_helper_1i(mfthi, t0, 3);
5257 gen_helper_1i(mftacx, t0, 3);
5260 gen_helper_mftdsp(t0);
5266 /* Floating point (COP1). */
5268 /* XXX: For now we support only a single FPU context. */
5270 TCGv_i32 fp0 = tcg_temp_new_i32();
5272 gen_load_fpr32(fp0, rt);
5273 tcg_gen_ext_i32_tl(t0, fp0);
5274 tcg_temp_free_i32(fp0);
5276 TCGv_i32 fp0 = tcg_temp_new_i32();
5278 gen_load_fpr32h(fp0, rt);
5279 tcg_gen_ext_i32_tl(t0, fp0);
5280 tcg_temp_free_i32(fp0);
5284 /* XXX: For now we support only a single FPU context. */
5285 gen_helper_1i(cfc1, t0, rt);
5287 /* COP2: Not implemented. */
5294 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5295 gen_store_gpr(t0, rd);
5301 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt, u, sel, h);
5302 generate_exception(ctx, EXCP_RI);
5305 static void gen_mttr(CPUState *env, DisasContext *ctx, int rd, int rt,
5306 int u, int sel, int h)
5308 int other_tc = env->CP0_VPEControl & (0xff << CP0VPECo_TargTC);
5309 TCGv t0 = tcg_temp_local_new();
5311 gen_load_gpr(t0, rt);
5312 if ((env->CP0_VPEConf0 & (1 << CP0VPEC0_MVP)) == 0 &&
5313 ((env->tcs[other_tc].CP0_TCBind & (0xf << CP0TCBd_CurVPE)) !=
5314 (env->active_tc.CP0_TCBind & (0xf << CP0TCBd_CurVPE))))
5316 else if ((env->CP0_VPEControl & (0xff << CP0VPECo_TargTC)) >
5317 (env->mvp->CP0_MVPConf0 & (0xff << CP0MVPC0_PTC)))
5324 gen_helper_mttc0_tcstatus(t0);
5327 gen_helper_mttc0_tcbind(t0);
5330 gen_helper_mttc0_tcrestart(t0);
5333 gen_helper_mttc0_tchalt(t0);
5336 gen_helper_mttc0_tccontext(t0);
5339 gen_helper_mttc0_tcschedule(t0);
5342 gen_helper_mttc0_tcschefback(t0);
5345 gen_mtc0(env, ctx, t0, rd, sel);
5352 gen_helper_mttc0_entryhi(t0);
5355 gen_mtc0(env, ctx, t0, rd, sel);
5361 gen_helper_mttc0_status(t0);
5364 gen_mtc0(env, ctx, t0, rd, sel);
5370 gen_helper_mttc0_debug(t0);
5373 gen_mtc0(env, ctx, t0, rd, sel);
5378 gen_mtc0(env, ctx, t0, rd, sel);
5380 } else switch (sel) {
5381 /* GPR registers. */
5383 gen_helper_1i(mttgpr, t0, rd);
5385 /* Auxiliary CPU registers */
5389 gen_helper_1i(mttlo, t0, 0);
5392 gen_helper_1i(mtthi, t0, 0);
5395 gen_helper_1i(mttacx, t0, 0);
5398 gen_helper_1i(mttlo, t0, 1);
5401 gen_helper_1i(mtthi, t0, 1);
5404 gen_helper_1i(mttacx, t0, 1);
5407 gen_helper_1i(mttlo, t0, 2);
5410 gen_helper_1i(mtthi, t0, 2);
5413 gen_helper_1i(mttacx, t0, 2);
5416 gen_helper_1i(mttlo, t0, 3);
5419 gen_helper_1i(mtthi, t0, 3);
5422 gen_helper_1i(mttacx, t0, 3);
5425 gen_helper_mttdsp(t0);
5431 /* Floating point (COP1). */
5433 /* XXX: For now we support only a single FPU context. */
5435 TCGv_i32 fp0 = tcg_temp_new_i32();
5437 tcg_gen_trunc_tl_i32(fp0, t0);
5438 gen_store_fpr32(fp0, rd);
5439 tcg_temp_free_i32(fp0);
5441 TCGv_i32 fp0 = tcg_temp_new_i32();
5443 tcg_gen_trunc_tl_i32(fp0, t0);
5444 gen_store_fpr32h(fp0, rd);
5445 tcg_temp_free_i32(fp0);
5449 /* XXX: For now we support only a single FPU context. */
5450 gen_helper_1i(ctc1, t0, rd);
5452 /* COP2: Not implemented. */
5459 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5465 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd, u, sel, h);
5466 generate_exception(ctx, EXCP_RI);
5469 static void gen_cp0 (CPUState *env, DisasContext *ctx, uint32_t opc, int rt, int rd)
5471 const char *opn = "ldst";
5479 gen_mfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5484 TCGv t0 = tcg_temp_new();
5486 gen_load_gpr(t0, rt);
5487 gen_mtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5492 #if defined(TARGET_MIPS64)
5494 check_insn(env, ctx, ISA_MIPS3);
5499 gen_dmfc0(env, ctx, cpu_gpr[rt], rd, ctx->opcode & 0x7);
5503 check_insn(env, ctx, ISA_MIPS3);
5505 TCGv t0 = tcg_temp_new();
5507 gen_load_gpr(t0, rt);
5508 gen_dmtc0(env, ctx, t0, rd, ctx->opcode & 0x7);
5515 check_insn(env, ctx, ASE_MT);
5520 gen_mftr(env, ctx, rt, rd, (ctx->opcode >> 5) & 1,
5521 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5525 check_insn(env, ctx, ASE_MT);
5526 gen_mttr(env, ctx, rd, rt, (ctx->opcode >> 5) & 1,
5527 ctx->opcode & 0x7, (ctx->opcode >> 4) & 1);
5532 if (!env->tlb->helper_tlbwi)
5538 if (!env->tlb->helper_tlbwr)
5544 if (!env->tlb->helper_tlbp)
5550 if (!env->tlb->helper_tlbr)
5556 check_insn(env, ctx, ISA_MIPS2);
5558 ctx->bstate = BS_EXCP;
5562 check_insn(env, ctx, ISA_MIPS32);
5563 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
5565 generate_exception(ctx, EXCP_RI);
5568 ctx->bstate = BS_EXCP;
5573 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
5574 /* If we get an exception, we want to restart at next instruction */
5576 save_cpu_state(ctx, 1);
5579 ctx->bstate = BS_EXCP;
5584 generate_exception(ctx, EXCP_RI);
5587 MIPS_DEBUG("%s %s %d", opn, regnames[rt], rd);
5589 #endif /* !CONFIG_USER_ONLY */
5591 /* CP1 Branches (before delay slot) */
5592 static void gen_compute_branch1 (CPUState *env, DisasContext *ctx, uint32_t op,
5593 int32_t cc, int32_t offset)
5595 target_ulong btarget;
5596 const char *opn = "cp1 cond branch";
5597 TCGv_i32 t0 = tcg_temp_new_i32();
5600 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
5602 btarget = ctx->pc + 4 + offset;
5606 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5607 tcg_gen_not_i32(t0, t0);
5608 tcg_gen_andi_i32(t0, t0, 1);
5609 tcg_gen_extu_i32_tl(bcond, t0);
5613 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5614 tcg_gen_not_i32(t0, t0);
5615 tcg_gen_andi_i32(t0, t0, 1);
5616 tcg_gen_extu_i32_tl(bcond, t0);
5620 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5621 tcg_gen_andi_i32(t0, t0, 1);
5622 tcg_gen_extu_i32_tl(bcond, t0);
5626 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5627 tcg_gen_andi_i32(t0, t0, 1);
5628 tcg_gen_extu_i32_tl(bcond, t0);
5631 ctx->hflags |= MIPS_HFLAG_BL;
5635 TCGv_i32 t1 = tcg_temp_new_i32();
5636 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5637 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5638 tcg_gen_or_i32(t0, t0, t1);
5639 tcg_temp_free_i32(t1);
5640 tcg_gen_not_i32(t0, t0);
5641 tcg_gen_andi_i32(t0, t0, 1);
5642 tcg_gen_extu_i32_tl(bcond, t0);
5648 TCGv_i32 t1 = tcg_temp_new_i32();
5649 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5650 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5651 tcg_gen_or_i32(t0, t0, t1);
5652 tcg_temp_free_i32(t1);
5653 tcg_gen_andi_i32(t0, t0, 1);
5654 tcg_gen_extu_i32_tl(bcond, t0);
5660 TCGv_i32 t1 = tcg_temp_new_i32();
5661 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5662 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5663 tcg_gen_or_i32(t0, t0, t1);
5664 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5665 tcg_gen_or_i32(t0, t0, t1);
5666 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5667 tcg_gen_or_i32(t0, t0, t1);
5668 tcg_temp_free_i32(t1);
5669 tcg_gen_not_i32(t0, t0);
5670 tcg_gen_andi_i32(t0, t0, 1);
5671 tcg_gen_extu_i32_tl(bcond, t0);
5677 TCGv_i32 t1 = tcg_temp_new_i32();
5678 tcg_gen_shri_i32(t0, fpu_fcr31, get_fp_bit(cc));
5679 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+1));
5680 tcg_gen_or_i32(t0, t0, t1);
5681 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+2));
5682 tcg_gen_or_i32(t0, t0, t1);
5683 tcg_gen_shri_i32(t1, fpu_fcr31, get_fp_bit(cc+3));
5684 tcg_gen_or_i32(t0, t0, t1);
5685 tcg_temp_free_i32(t1);
5686 tcg_gen_andi_i32(t0, t0, 1);
5687 tcg_gen_extu_i32_tl(bcond, t0);
5691 ctx->hflags |= MIPS_HFLAG_BC;
5695 generate_exception (ctx, EXCP_RI);
5698 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx, opn,
5699 ctx->hflags, btarget);
5700 ctx->btarget = btarget;
5703 tcg_temp_free_i32(t0);
5706 /* Coprocessor 1 (FPU) */
5708 #define FOP(func, fmt) (((fmt) << 21) | (func))
5710 static void gen_cp1 (DisasContext *ctx, uint32_t opc, int rt, int fs)
5712 const char *opn = "cp1 move";
5713 TCGv t0 = tcg_temp_new();
5718 TCGv_i32 fp0 = tcg_temp_new_i32();
5720 gen_load_fpr32(fp0, fs);
5721 tcg_gen_ext_i32_tl(t0, fp0);
5722 tcg_temp_free_i32(fp0);
5724 gen_store_gpr(t0, rt);
5728 gen_load_gpr(t0, rt);
5730 TCGv_i32 fp0 = tcg_temp_new_i32();
5732 tcg_gen_trunc_tl_i32(fp0, t0);
5733 gen_store_fpr32(fp0, fs);
5734 tcg_temp_free_i32(fp0);
5739 gen_helper_1i(cfc1, t0, fs);
5740 gen_store_gpr(t0, rt);
5744 gen_load_gpr(t0, rt);
5745 gen_helper_1i(ctc1, t0, fs);
5748 #if defined(TARGET_MIPS64)
5750 gen_load_fpr64(ctx, t0, fs);
5751 gen_store_gpr(t0, rt);
5755 gen_load_gpr(t0, rt);
5756 gen_store_fpr64(ctx, t0, fs);
5762 TCGv_i32 fp0 = tcg_temp_new_i32();
5764 gen_load_fpr32h(fp0, fs);
5765 tcg_gen_ext_i32_tl(t0, fp0);
5766 tcg_temp_free_i32(fp0);
5768 gen_store_gpr(t0, rt);
5772 gen_load_gpr(t0, rt);
5774 TCGv_i32 fp0 = tcg_temp_new_i32();
5776 tcg_gen_trunc_tl_i32(fp0, t0);
5777 gen_store_fpr32h(fp0, fs);
5778 tcg_temp_free_i32(fp0);
5784 generate_exception (ctx, EXCP_RI);
5787 MIPS_DEBUG("%s %s %s", opn, regnames[rt], fregnames[fs]);
5793 static void gen_movci (DisasContext *ctx, int rd, int rs, int cc, int tf)
5809 l1 = gen_new_label();
5810 t0 = tcg_temp_new_i32();
5811 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
5812 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5813 tcg_temp_free_i32(t0);
5815 tcg_gen_movi_tl(cpu_gpr[rd], 0);
5817 tcg_gen_mov_tl(cpu_gpr[rd], cpu_gpr[rs]);
5822 static inline void gen_movcf_s (int fs, int fd, int cc, int tf)
5825 TCGv_i32 t0 = tcg_temp_new_i32();
5826 int l1 = gen_new_label();
5833 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
5834 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5835 gen_load_fpr32(t0, fs);
5836 gen_store_fpr32(t0, fd);
5838 tcg_temp_free_i32(t0);
5841 static inline void gen_movcf_d (DisasContext *ctx, int fs, int fd, int cc, int tf)
5844 TCGv_i32 t0 = tcg_temp_new_i32();
5846 int l1 = gen_new_label();
5853 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
5854 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5855 tcg_temp_free_i32(t0);
5856 fp0 = tcg_temp_new_i64();
5857 gen_load_fpr64(ctx, fp0, fs);
5858 gen_store_fpr64(ctx, fp0, fd);
5859 tcg_temp_free_i64(fp0);
5863 static inline void gen_movcf_ps (int fs, int fd, int cc, int tf)
5866 TCGv_i32 t0 = tcg_temp_new_i32();
5867 int l1 = gen_new_label();
5868 int l2 = gen_new_label();
5875 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc));
5876 tcg_gen_brcondi_i32(cond, t0, 0, l1);
5877 gen_load_fpr32(t0, fs);
5878 gen_store_fpr32(t0, fd);
5881 tcg_gen_andi_i32(t0, fpu_fcr31, 1 << get_fp_bit(cc+1));
5882 tcg_gen_brcondi_i32(cond, t0, 0, l2);
5883 gen_load_fpr32h(t0, fs);
5884 gen_store_fpr32h(t0, fd);
5885 tcg_temp_free_i32(t0);
5890 static void gen_farith (DisasContext *ctx, uint32_t op1,
5891 int ft, int fs, int fd, int cc)
5893 const char *opn = "farith";
5894 const char *condnames[] = {
5912 const char *condnames_abs[] = {
5930 enum { BINOP, CMPOP, OTHEROP } optype = OTHEROP;
5931 uint32_t func = ctx->opcode & 0x3f;
5933 switch (ctx->opcode & FOP(0x3f, 0x1f)) {
5936 TCGv_i32 fp0 = tcg_temp_new_i32();
5937 TCGv_i32 fp1 = tcg_temp_new_i32();
5939 gen_load_fpr32(fp0, fs);
5940 gen_load_fpr32(fp1, ft);
5941 gen_helper_float_add_s(fp0, fp0, fp1);
5942 tcg_temp_free_i32(fp1);
5943 gen_store_fpr32(fp0, fd);
5944 tcg_temp_free_i32(fp0);
5951 TCGv_i32 fp0 = tcg_temp_new_i32();
5952 TCGv_i32 fp1 = tcg_temp_new_i32();
5954 gen_load_fpr32(fp0, fs);
5955 gen_load_fpr32(fp1, ft);
5956 gen_helper_float_sub_s(fp0, fp0, fp1);
5957 tcg_temp_free_i32(fp1);
5958 gen_store_fpr32(fp0, fd);
5959 tcg_temp_free_i32(fp0);
5966 TCGv_i32 fp0 = tcg_temp_new_i32();
5967 TCGv_i32 fp1 = tcg_temp_new_i32();
5969 gen_load_fpr32(fp0, fs);
5970 gen_load_fpr32(fp1, ft);
5971 gen_helper_float_mul_s(fp0, fp0, fp1);
5972 tcg_temp_free_i32(fp1);
5973 gen_store_fpr32(fp0, fd);
5974 tcg_temp_free_i32(fp0);
5981 TCGv_i32 fp0 = tcg_temp_new_i32();
5982 TCGv_i32 fp1 = tcg_temp_new_i32();
5984 gen_load_fpr32(fp0, fs);
5985 gen_load_fpr32(fp1, ft);
5986 gen_helper_float_div_s(fp0, fp0, fp1);
5987 tcg_temp_free_i32(fp1);
5988 gen_store_fpr32(fp0, fd);
5989 tcg_temp_free_i32(fp0);
5996 TCGv_i32 fp0 = tcg_temp_new_i32();
5998 gen_load_fpr32(fp0, fs);
5999 gen_helper_float_sqrt_s(fp0, fp0);
6000 gen_store_fpr32(fp0, fd);
6001 tcg_temp_free_i32(fp0);
6007 TCGv_i32 fp0 = tcg_temp_new_i32();
6009 gen_load_fpr32(fp0, fs);
6010 gen_helper_float_abs_s(fp0, fp0);
6011 gen_store_fpr32(fp0, fd);
6012 tcg_temp_free_i32(fp0);
6018 TCGv_i32 fp0 = tcg_temp_new_i32();
6020 gen_load_fpr32(fp0, fs);
6021 gen_store_fpr32(fp0, fd);
6022 tcg_temp_free_i32(fp0);
6028 TCGv_i32 fp0 = tcg_temp_new_i32();
6030 gen_load_fpr32(fp0, fs);
6031 gen_helper_float_chs_s(fp0, fp0);
6032 gen_store_fpr32(fp0, fd);
6033 tcg_temp_free_i32(fp0);
6038 check_cp1_64bitmode(ctx);
6040 TCGv_i32 fp32 = tcg_temp_new_i32();
6041 TCGv_i64 fp64 = tcg_temp_new_i64();
6043 gen_load_fpr32(fp32, fs);
6044 gen_helper_float_roundl_s(fp64, fp32);
6045 tcg_temp_free_i32(fp32);
6046 gen_store_fpr64(ctx, fp64, fd);
6047 tcg_temp_free_i64(fp64);
6052 check_cp1_64bitmode(ctx);
6054 TCGv_i32 fp32 = tcg_temp_new_i32();
6055 TCGv_i64 fp64 = tcg_temp_new_i64();
6057 gen_load_fpr32(fp32, fs);
6058 gen_helper_float_truncl_s(fp64, fp32);
6059 tcg_temp_free_i32(fp32);
6060 gen_store_fpr64(ctx, fp64, fd);
6061 tcg_temp_free_i64(fp64);
6066 check_cp1_64bitmode(ctx);
6068 TCGv_i32 fp32 = tcg_temp_new_i32();
6069 TCGv_i64 fp64 = tcg_temp_new_i64();
6071 gen_load_fpr32(fp32, fs);
6072 gen_helper_float_ceill_s(fp64, fp32);
6073 tcg_temp_free_i32(fp32);
6074 gen_store_fpr64(ctx, fp64, fd);
6075 tcg_temp_free_i64(fp64);
6080 check_cp1_64bitmode(ctx);
6082 TCGv_i32 fp32 = tcg_temp_new_i32();
6083 TCGv_i64 fp64 = tcg_temp_new_i64();
6085 gen_load_fpr32(fp32, fs);
6086 gen_helper_float_floorl_s(fp64, fp32);
6087 tcg_temp_free_i32(fp32);
6088 gen_store_fpr64(ctx, fp64, fd);
6089 tcg_temp_free_i64(fp64);
6095 TCGv_i32 fp0 = tcg_temp_new_i32();
6097 gen_load_fpr32(fp0, fs);
6098 gen_helper_float_roundw_s(fp0, fp0);
6099 gen_store_fpr32(fp0, fd);
6100 tcg_temp_free_i32(fp0);
6106 TCGv_i32 fp0 = tcg_temp_new_i32();
6108 gen_load_fpr32(fp0, fs);
6109 gen_helper_float_truncw_s(fp0, fp0);
6110 gen_store_fpr32(fp0, fd);
6111 tcg_temp_free_i32(fp0);
6117 TCGv_i32 fp0 = tcg_temp_new_i32();
6119 gen_load_fpr32(fp0, fs);
6120 gen_helper_float_ceilw_s(fp0, fp0);
6121 gen_store_fpr32(fp0, fd);
6122 tcg_temp_free_i32(fp0);
6128 TCGv_i32 fp0 = tcg_temp_new_i32();
6130 gen_load_fpr32(fp0, fs);
6131 gen_helper_float_floorw_s(fp0, fp0);
6132 gen_store_fpr32(fp0, fd);
6133 tcg_temp_free_i32(fp0);
6138 gen_movcf_s(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6143 int l1 = gen_new_label();
6147 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6149 fp0 = tcg_temp_new_i32();
6150 gen_load_fpr32(fp0, fs);
6151 gen_store_fpr32(fp0, fd);
6152 tcg_temp_free_i32(fp0);
6159 int l1 = gen_new_label();
6163 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6164 fp0 = tcg_temp_new_i32();
6165 gen_load_fpr32(fp0, fs);
6166 gen_store_fpr32(fp0, fd);
6167 tcg_temp_free_i32(fp0);
6176 TCGv_i32 fp0 = tcg_temp_new_i32();
6178 gen_load_fpr32(fp0, fs);
6179 gen_helper_float_recip_s(fp0, fp0);
6180 gen_store_fpr32(fp0, fd);
6181 tcg_temp_free_i32(fp0);
6188 TCGv_i32 fp0 = tcg_temp_new_i32();
6190 gen_load_fpr32(fp0, fs);
6191 gen_helper_float_rsqrt_s(fp0, fp0);
6192 gen_store_fpr32(fp0, fd);
6193 tcg_temp_free_i32(fp0);
6198 check_cp1_64bitmode(ctx);
6200 TCGv_i32 fp0 = tcg_temp_new_i32();
6201 TCGv_i32 fp1 = tcg_temp_new_i32();
6203 gen_load_fpr32(fp0, fs);
6204 gen_load_fpr32(fp1, fd);
6205 gen_helper_float_recip2_s(fp0, fp0, fp1);
6206 tcg_temp_free_i32(fp1);
6207 gen_store_fpr32(fp0, fd);
6208 tcg_temp_free_i32(fp0);
6213 check_cp1_64bitmode(ctx);
6215 TCGv_i32 fp0 = tcg_temp_new_i32();
6217 gen_load_fpr32(fp0, fs);
6218 gen_helper_float_recip1_s(fp0, fp0);
6219 gen_store_fpr32(fp0, fd);
6220 tcg_temp_free_i32(fp0);
6225 check_cp1_64bitmode(ctx);
6227 TCGv_i32 fp0 = tcg_temp_new_i32();
6229 gen_load_fpr32(fp0, fs);
6230 gen_helper_float_rsqrt1_s(fp0, fp0);
6231 gen_store_fpr32(fp0, fd);
6232 tcg_temp_free_i32(fp0);
6237 check_cp1_64bitmode(ctx);
6239 TCGv_i32 fp0 = tcg_temp_new_i32();
6240 TCGv_i32 fp1 = tcg_temp_new_i32();
6242 gen_load_fpr32(fp0, fs);
6243 gen_load_fpr32(fp1, ft);
6244 gen_helper_float_rsqrt2_s(fp0, fp0, fp1);
6245 tcg_temp_free_i32(fp1);
6246 gen_store_fpr32(fp0, fd);
6247 tcg_temp_free_i32(fp0);
6252 check_cp1_registers(ctx, fd);
6254 TCGv_i32 fp32 = tcg_temp_new_i32();
6255 TCGv_i64 fp64 = tcg_temp_new_i64();
6257 gen_load_fpr32(fp32, fs);
6258 gen_helper_float_cvtd_s(fp64, fp32);
6259 tcg_temp_free_i32(fp32);
6260 gen_store_fpr64(ctx, fp64, fd);
6261 tcg_temp_free_i64(fp64);
6267 TCGv_i32 fp0 = tcg_temp_new_i32();
6269 gen_load_fpr32(fp0, fs);
6270 gen_helper_float_cvtw_s(fp0, fp0);
6271 gen_store_fpr32(fp0, fd);
6272 tcg_temp_free_i32(fp0);
6277 check_cp1_64bitmode(ctx);
6279 TCGv_i32 fp32 = tcg_temp_new_i32();
6280 TCGv_i64 fp64 = tcg_temp_new_i64();
6282 gen_load_fpr32(fp32, fs);
6283 gen_helper_float_cvtl_s(fp64, fp32);
6284 tcg_temp_free_i32(fp32);
6285 gen_store_fpr64(ctx, fp64, fd);
6286 tcg_temp_free_i64(fp64);
6291 check_cp1_64bitmode(ctx);
6293 TCGv_i64 fp64 = tcg_temp_new_i64();
6294 TCGv_i32 fp32_0 = tcg_temp_new_i32();
6295 TCGv_i32 fp32_1 = tcg_temp_new_i32();
6297 gen_load_fpr32(fp32_0, fs);
6298 gen_load_fpr32(fp32_1, ft);
6299 tcg_gen_concat_i32_i64(fp64, fp32_0, fp32_1);
6300 tcg_temp_free_i32(fp32_1);
6301 tcg_temp_free_i32(fp32_0);
6302 gen_store_fpr64(ctx, fp64, fd);
6303 tcg_temp_free_i64(fp64);
6324 TCGv_i32 fp0 = tcg_temp_new_i32();
6325 TCGv_i32 fp1 = tcg_temp_new_i32();
6327 gen_load_fpr32(fp0, fs);
6328 gen_load_fpr32(fp1, ft);
6329 if (ctx->opcode & (1 << 6)) {
6331 gen_cmpabs_s(func-48, fp0, fp1, cc);
6332 opn = condnames_abs[func-48];
6334 gen_cmp_s(func-48, fp0, fp1, cc);
6335 opn = condnames[func-48];
6337 tcg_temp_free_i32(fp0);
6338 tcg_temp_free_i32(fp1);
6342 check_cp1_registers(ctx, fs | ft | fd);
6344 TCGv_i64 fp0 = tcg_temp_new_i64();
6345 TCGv_i64 fp1 = tcg_temp_new_i64();
6347 gen_load_fpr64(ctx, fp0, fs);
6348 gen_load_fpr64(ctx, fp1, ft);
6349 gen_helper_float_add_d(fp0, fp0, fp1);
6350 tcg_temp_free_i64(fp1);
6351 gen_store_fpr64(ctx, fp0, fd);
6352 tcg_temp_free_i64(fp0);
6358 check_cp1_registers(ctx, fs | ft | fd);
6360 TCGv_i64 fp0 = tcg_temp_new_i64();
6361 TCGv_i64 fp1 = tcg_temp_new_i64();
6363 gen_load_fpr64(ctx, fp0, fs);
6364 gen_load_fpr64(ctx, fp1, ft);
6365 gen_helper_float_sub_d(fp0, fp0, fp1);
6366 tcg_temp_free_i64(fp1);
6367 gen_store_fpr64(ctx, fp0, fd);
6368 tcg_temp_free_i64(fp0);
6374 check_cp1_registers(ctx, fs | ft | fd);
6376 TCGv_i64 fp0 = tcg_temp_new_i64();
6377 TCGv_i64 fp1 = tcg_temp_new_i64();
6379 gen_load_fpr64(ctx, fp0, fs);
6380 gen_load_fpr64(ctx, fp1, ft);
6381 gen_helper_float_mul_d(fp0, fp0, fp1);
6382 tcg_temp_free_i64(fp1);
6383 gen_store_fpr64(ctx, fp0, fd);
6384 tcg_temp_free_i64(fp0);
6390 check_cp1_registers(ctx, fs | ft | fd);
6392 TCGv_i64 fp0 = tcg_temp_new_i64();
6393 TCGv_i64 fp1 = tcg_temp_new_i64();
6395 gen_load_fpr64(ctx, fp0, fs);
6396 gen_load_fpr64(ctx, fp1, ft);
6397 gen_helper_float_div_d(fp0, fp0, fp1);
6398 tcg_temp_free_i64(fp1);
6399 gen_store_fpr64(ctx, fp0, fd);
6400 tcg_temp_free_i64(fp0);
6406 check_cp1_registers(ctx, fs | fd);
6408 TCGv_i64 fp0 = tcg_temp_new_i64();
6410 gen_load_fpr64(ctx, fp0, fs);
6411 gen_helper_float_sqrt_d(fp0, fp0);
6412 gen_store_fpr64(ctx, fp0, fd);
6413 tcg_temp_free_i64(fp0);
6418 check_cp1_registers(ctx, fs | fd);
6420 TCGv_i64 fp0 = tcg_temp_new_i64();
6422 gen_load_fpr64(ctx, fp0, fs);
6423 gen_helper_float_abs_d(fp0, fp0);
6424 gen_store_fpr64(ctx, fp0, fd);
6425 tcg_temp_free_i64(fp0);
6430 check_cp1_registers(ctx, fs | fd);
6432 TCGv_i64 fp0 = tcg_temp_new_i64();
6434 gen_load_fpr64(ctx, fp0, fs);
6435 gen_store_fpr64(ctx, fp0, fd);
6436 tcg_temp_free_i64(fp0);
6441 check_cp1_registers(ctx, fs | fd);
6443 TCGv_i64 fp0 = tcg_temp_new_i64();
6445 gen_load_fpr64(ctx, fp0, fs);
6446 gen_helper_float_chs_d(fp0, fp0);
6447 gen_store_fpr64(ctx, fp0, fd);
6448 tcg_temp_free_i64(fp0);
6453 check_cp1_64bitmode(ctx);
6455 TCGv_i64 fp0 = tcg_temp_new_i64();
6457 gen_load_fpr64(ctx, fp0, fs);
6458 gen_helper_float_roundl_d(fp0, fp0);
6459 gen_store_fpr64(ctx, fp0, fd);
6460 tcg_temp_free_i64(fp0);
6465 check_cp1_64bitmode(ctx);
6467 TCGv_i64 fp0 = tcg_temp_new_i64();
6469 gen_load_fpr64(ctx, fp0, fs);
6470 gen_helper_float_truncl_d(fp0, fp0);
6471 gen_store_fpr64(ctx, fp0, fd);
6472 tcg_temp_free_i64(fp0);
6477 check_cp1_64bitmode(ctx);
6479 TCGv_i64 fp0 = tcg_temp_new_i64();
6481 gen_load_fpr64(ctx, fp0, fs);
6482 gen_helper_float_ceill_d(fp0, fp0);
6483 gen_store_fpr64(ctx, fp0, fd);
6484 tcg_temp_free_i64(fp0);
6489 check_cp1_64bitmode(ctx);
6491 TCGv_i64 fp0 = tcg_temp_new_i64();
6493 gen_load_fpr64(ctx, fp0, fs);
6494 gen_helper_float_floorl_d(fp0, fp0);
6495 gen_store_fpr64(ctx, fp0, fd);
6496 tcg_temp_free_i64(fp0);
6501 check_cp1_registers(ctx, fs);
6503 TCGv_i32 fp32 = tcg_temp_new_i32();
6504 TCGv_i64 fp64 = tcg_temp_new_i64();
6506 gen_load_fpr64(ctx, fp64, fs);
6507 gen_helper_float_roundw_d(fp32, fp64);
6508 tcg_temp_free_i64(fp64);
6509 gen_store_fpr32(fp32, fd);
6510 tcg_temp_free_i32(fp32);
6515 check_cp1_registers(ctx, fs);
6517 TCGv_i32 fp32 = tcg_temp_new_i32();
6518 TCGv_i64 fp64 = tcg_temp_new_i64();
6520 gen_load_fpr64(ctx, fp64, fs);
6521 gen_helper_float_truncw_d(fp32, fp64);
6522 tcg_temp_free_i64(fp64);
6523 gen_store_fpr32(fp32, fd);
6524 tcg_temp_free_i32(fp32);
6529 check_cp1_registers(ctx, fs);
6531 TCGv_i32 fp32 = tcg_temp_new_i32();
6532 TCGv_i64 fp64 = tcg_temp_new_i64();
6534 gen_load_fpr64(ctx, fp64, fs);
6535 gen_helper_float_ceilw_d(fp32, fp64);
6536 tcg_temp_free_i64(fp64);
6537 gen_store_fpr32(fp32, fd);
6538 tcg_temp_free_i32(fp32);
6543 check_cp1_registers(ctx, fs);
6545 TCGv_i32 fp32 = tcg_temp_new_i32();
6546 TCGv_i64 fp64 = tcg_temp_new_i64();
6548 gen_load_fpr64(ctx, fp64, fs);
6549 gen_helper_float_floorw_d(fp32, fp64);
6550 tcg_temp_free_i64(fp64);
6551 gen_store_fpr32(fp32, fd);
6552 tcg_temp_free_i32(fp32);
6557 gen_movcf_d(ctx, fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6562 int l1 = gen_new_label();
6566 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6568 fp0 = tcg_temp_new_i64();
6569 gen_load_fpr64(ctx, fp0, fs);
6570 gen_store_fpr64(ctx, fp0, fd);
6571 tcg_temp_free_i64(fp0);
6578 int l1 = gen_new_label();
6582 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6583 fp0 = tcg_temp_new_i64();
6584 gen_load_fpr64(ctx, fp0, fs);
6585 gen_store_fpr64(ctx, fp0, fd);
6586 tcg_temp_free_i64(fp0);
6593 check_cp1_64bitmode(ctx);
6595 TCGv_i64 fp0 = tcg_temp_new_i64();
6597 gen_load_fpr64(ctx, fp0, fs);
6598 gen_helper_float_recip_d(fp0, fp0);
6599 gen_store_fpr64(ctx, fp0, fd);
6600 tcg_temp_free_i64(fp0);
6605 check_cp1_64bitmode(ctx);
6607 TCGv_i64 fp0 = tcg_temp_new_i64();
6609 gen_load_fpr64(ctx, fp0, fs);
6610 gen_helper_float_rsqrt_d(fp0, fp0);
6611 gen_store_fpr64(ctx, fp0, fd);
6612 tcg_temp_free_i64(fp0);
6617 check_cp1_64bitmode(ctx);
6619 TCGv_i64 fp0 = tcg_temp_new_i64();
6620 TCGv_i64 fp1 = tcg_temp_new_i64();
6622 gen_load_fpr64(ctx, fp0, fs);
6623 gen_load_fpr64(ctx, fp1, ft);
6624 gen_helper_float_recip2_d(fp0, fp0, fp1);
6625 tcg_temp_free_i64(fp1);
6626 gen_store_fpr64(ctx, fp0, fd);
6627 tcg_temp_free_i64(fp0);
6632 check_cp1_64bitmode(ctx);
6634 TCGv_i64 fp0 = tcg_temp_new_i64();
6636 gen_load_fpr64(ctx, fp0, fs);
6637 gen_helper_float_recip1_d(fp0, fp0);
6638 gen_store_fpr64(ctx, fp0, fd);
6639 tcg_temp_free_i64(fp0);
6644 check_cp1_64bitmode(ctx);
6646 TCGv_i64 fp0 = tcg_temp_new_i64();
6648 gen_load_fpr64(ctx, fp0, fs);
6649 gen_helper_float_rsqrt1_d(fp0, fp0);
6650 gen_store_fpr64(ctx, fp0, fd);
6651 tcg_temp_free_i64(fp0);
6656 check_cp1_64bitmode(ctx);
6658 TCGv_i64 fp0 = tcg_temp_new_i64();
6659 TCGv_i64 fp1 = tcg_temp_new_i64();
6661 gen_load_fpr64(ctx, fp0, fs);
6662 gen_load_fpr64(ctx, fp1, ft);
6663 gen_helper_float_rsqrt2_d(fp0, fp0, fp1);
6664 tcg_temp_free_i64(fp1);
6665 gen_store_fpr64(ctx, fp0, fd);
6666 tcg_temp_free_i64(fp0);
6687 TCGv_i64 fp0 = tcg_temp_new_i64();
6688 TCGv_i64 fp1 = tcg_temp_new_i64();
6690 gen_load_fpr64(ctx, fp0, fs);
6691 gen_load_fpr64(ctx, fp1, ft);
6692 if (ctx->opcode & (1 << 6)) {
6694 check_cp1_registers(ctx, fs | ft);
6695 gen_cmpabs_d(func-48, fp0, fp1, cc);
6696 opn = condnames_abs[func-48];
6698 check_cp1_registers(ctx, fs | ft);
6699 gen_cmp_d(func-48, fp0, fp1, cc);
6700 opn = condnames[func-48];
6702 tcg_temp_free_i64(fp0);
6703 tcg_temp_free_i64(fp1);
6707 check_cp1_registers(ctx, fs);
6709 TCGv_i32 fp32 = tcg_temp_new_i32();
6710 TCGv_i64 fp64 = tcg_temp_new_i64();
6712 gen_load_fpr64(ctx, fp64, fs);
6713 gen_helper_float_cvts_d(fp32, fp64);
6714 tcg_temp_free_i64(fp64);
6715 gen_store_fpr32(fp32, fd);
6716 tcg_temp_free_i32(fp32);
6721 check_cp1_registers(ctx, fs);
6723 TCGv_i32 fp32 = tcg_temp_new_i32();
6724 TCGv_i64 fp64 = tcg_temp_new_i64();
6726 gen_load_fpr64(ctx, fp64, fs);
6727 gen_helper_float_cvtw_d(fp32, fp64);
6728 tcg_temp_free_i64(fp64);
6729 gen_store_fpr32(fp32, fd);
6730 tcg_temp_free_i32(fp32);
6735 check_cp1_64bitmode(ctx);
6737 TCGv_i64 fp0 = tcg_temp_new_i64();
6739 gen_load_fpr64(ctx, fp0, fs);
6740 gen_helper_float_cvtl_d(fp0, fp0);
6741 gen_store_fpr64(ctx, fp0, fd);
6742 tcg_temp_free_i64(fp0);
6748 TCGv_i32 fp0 = tcg_temp_new_i32();
6750 gen_load_fpr32(fp0, fs);
6751 gen_helper_float_cvts_w(fp0, fp0);
6752 gen_store_fpr32(fp0, fd);
6753 tcg_temp_free_i32(fp0);
6758 check_cp1_registers(ctx, fd);
6760 TCGv_i32 fp32 = tcg_temp_new_i32();
6761 TCGv_i64 fp64 = tcg_temp_new_i64();
6763 gen_load_fpr32(fp32, fs);
6764 gen_helper_float_cvtd_w(fp64, fp32);
6765 tcg_temp_free_i32(fp32);
6766 gen_store_fpr64(ctx, fp64, fd);
6767 tcg_temp_free_i64(fp64);
6772 check_cp1_64bitmode(ctx);
6774 TCGv_i32 fp32 = tcg_temp_new_i32();
6775 TCGv_i64 fp64 = tcg_temp_new_i64();
6777 gen_load_fpr64(ctx, fp64, fs);
6778 gen_helper_float_cvts_l(fp32, fp64);
6779 tcg_temp_free_i64(fp64);
6780 gen_store_fpr32(fp32, fd);
6781 tcg_temp_free_i32(fp32);
6786 check_cp1_64bitmode(ctx);
6788 TCGv_i64 fp0 = tcg_temp_new_i64();
6790 gen_load_fpr64(ctx, fp0, fs);
6791 gen_helper_float_cvtd_l(fp0, fp0);
6792 gen_store_fpr64(ctx, fp0, fd);
6793 tcg_temp_free_i64(fp0);
6798 check_cp1_64bitmode(ctx);
6800 TCGv_i64 fp0 = tcg_temp_new_i64();
6802 gen_load_fpr64(ctx, fp0, fs);
6803 gen_helper_float_cvtps_pw(fp0, fp0);
6804 gen_store_fpr64(ctx, fp0, fd);
6805 tcg_temp_free_i64(fp0);
6810 check_cp1_64bitmode(ctx);
6812 TCGv_i64 fp0 = tcg_temp_new_i64();
6813 TCGv_i64 fp1 = tcg_temp_new_i64();
6815 gen_load_fpr64(ctx, fp0, fs);
6816 gen_load_fpr64(ctx, fp1, ft);
6817 gen_helper_float_add_ps(fp0, fp0, fp1);
6818 tcg_temp_free_i64(fp1);
6819 gen_store_fpr64(ctx, fp0, fd);
6820 tcg_temp_free_i64(fp0);
6825 check_cp1_64bitmode(ctx);
6827 TCGv_i64 fp0 = tcg_temp_new_i64();
6828 TCGv_i64 fp1 = tcg_temp_new_i64();
6830 gen_load_fpr64(ctx, fp0, fs);
6831 gen_load_fpr64(ctx, fp1, ft);
6832 gen_helper_float_sub_ps(fp0, fp0, fp1);
6833 tcg_temp_free_i64(fp1);
6834 gen_store_fpr64(ctx, fp0, fd);
6835 tcg_temp_free_i64(fp0);
6840 check_cp1_64bitmode(ctx);
6842 TCGv_i64 fp0 = tcg_temp_new_i64();
6843 TCGv_i64 fp1 = tcg_temp_new_i64();
6845 gen_load_fpr64(ctx, fp0, fs);
6846 gen_load_fpr64(ctx, fp1, ft);
6847 gen_helper_float_mul_ps(fp0, fp0, fp1);
6848 tcg_temp_free_i64(fp1);
6849 gen_store_fpr64(ctx, fp0, fd);
6850 tcg_temp_free_i64(fp0);
6855 check_cp1_64bitmode(ctx);
6857 TCGv_i64 fp0 = tcg_temp_new_i64();
6859 gen_load_fpr64(ctx, fp0, fs);
6860 gen_helper_float_abs_ps(fp0, fp0);
6861 gen_store_fpr64(ctx, fp0, fd);
6862 tcg_temp_free_i64(fp0);
6867 check_cp1_64bitmode(ctx);
6869 TCGv_i64 fp0 = tcg_temp_new_i64();
6871 gen_load_fpr64(ctx, fp0, fs);
6872 gen_store_fpr64(ctx, fp0, fd);
6873 tcg_temp_free_i64(fp0);
6878 check_cp1_64bitmode(ctx);
6880 TCGv_i64 fp0 = tcg_temp_new_i64();
6882 gen_load_fpr64(ctx, fp0, fs);
6883 gen_helper_float_chs_ps(fp0, fp0);
6884 gen_store_fpr64(ctx, fp0, fd);
6885 tcg_temp_free_i64(fp0);
6890 check_cp1_64bitmode(ctx);
6891 gen_movcf_ps(fs, fd, (ft >> 2) & 0x7, ft & 0x1);
6895 check_cp1_64bitmode(ctx);
6897 int l1 = gen_new_label();
6901 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[ft], 0, l1);
6902 fp0 = tcg_temp_new_i64();
6903 gen_load_fpr64(ctx, fp0, fs);
6904 gen_store_fpr64(ctx, fp0, fd);
6905 tcg_temp_free_i64(fp0);
6911 check_cp1_64bitmode(ctx);
6913 int l1 = gen_new_label();
6917 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[ft], 0, l1);
6918 fp0 = tcg_temp_new_i64();
6919 gen_load_fpr64(ctx, fp0, fs);
6920 gen_store_fpr64(ctx, fp0, fd);
6921 tcg_temp_free_i64(fp0);
6928 check_cp1_64bitmode(ctx);
6930 TCGv_i64 fp0 = tcg_temp_new_i64();
6931 TCGv_i64 fp1 = tcg_temp_new_i64();
6933 gen_load_fpr64(ctx, fp0, ft);
6934 gen_load_fpr64(ctx, fp1, fs);
6935 gen_helper_float_addr_ps(fp0, fp0, fp1);
6936 tcg_temp_free_i64(fp1);
6937 gen_store_fpr64(ctx, fp0, fd);
6938 tcg_temp_free_i64(fp0);
6943 check_cp1_64bitmode(ctx);
6945 TCGv_i64 fp0 = tcg_temp_new_i64();
6946 TCGv_i64 fp1 = tcg_temp_new_i64();
6948 gen_load_fpr64(ctx, fp0, ft);
6949 gen_load_fpr64(ctx, fp1, fs);
6950 gen_helper_float_mulr_ps(fp0, fp0, fp1);
6951 tcg_temp_free_i64(fp1);
6952 gen_store_fpr64(ctx, fp0, fd);
6953 tcg_temp_free_i64(fp0);
6958 check_cp1_64bitmode(ctx);
6960 TCGv_i64 fp0 = tcg_temp_new_i64();
6961 TCGv_i64 fp1 = tcg_temp_new_i64();
6963 gen_load_fpr64(ctx, fp0, fs);
6964 gen_load_fpr64(ctx, fp1, fd);
6965 gen_helper_float_recip2_ps(fp0, fp0, fp1);
6966 tcg_temp_free_i64(fp1);
6967 gen_store_fpr64(ctx, fp0, fd);
6968 tcg_temp_free_i64(fp0);
6973 check_cp1_64bitmode(ctx);
6975 TCGv_i64 fp0 = tcg_temp_new_i64();
6977 gen_load_fpr64(ctx, fp0, fs);
6978 gen_helper_float_recip1_ps(fp0, fp0);
6979 gen_store_fpr64(ctx, fp0, fd);
6980 tcg_temp_free_i64(fp0);
6985 check_cp1_64bitmode(ctx);
6987 TCGv_i64 fp0 = tcg_temp_new_i64();
6989 gen_load_fpr64(ctx, fp0, fs);
6990 gen_helper_float_rsqrt1_ps(fp0, fp0);
6991 gen_store_fpr64(ctx, fp0, fd);
6992 tcg_temp_free_i64(fp0);
6997 check_cp1_64bitmode(ctx);
6999 TCGv_i64 fp0 = tcg_temp_new_i64();
7000 TCGv_i64 fp1 = tcg_temp_new_i64();
7002 gen_load_fpr64(ctx, fp0, fs);
7003 gen_load_fpr64(ctx, fp1, ft);
7004 gen_helper_float_rsqrt2_ps(fp0, fp0, fp1);
7005 tcg_temp_free_i64(fp1);
7006 gen_store_fpr64(ctx, fp0, fd);
7007 tcg_temp_free_i64(fp0);
7012 check_cp1_64bitmode(ctx);
7014 TCGv_i32 fp0 = tcg_temp_new_i32();
7016 gen_load_fpr32h(fp0, fs);
7017 gen_helper_float_cvts_pu(fp0, fp0);
7018 gen_store_fpr32(fp0, fd);
7019 tcg_temp_free_i32(fp0);
7024 check_cp1_64bitmode(ctx);
7026 TCGv_i64 fp0 = tcg_temp_new_i64();
7028 gen_load_fpr64(ctx, fp0, fs);
7029 gen_helper_float_cvtpw_ps(fp0, fp0);
7030 gen_store_fpr64(ctx, fp0, fd);
7031 tcg_temp_free_i64(fp0);
7036 check_cp1_64bitmode(ctx);
7038 TCGv_i32 fp0 = tcg_temp_new_i32();
7040 gen_load_fpr32(fp0, fs);
7041 gen_helper_float_cvts_pl(fp0, fp0);
7042 gen_store_fpr32(fp0, fd);
7043 tcg_temp_free_i32(fp0);
7048 check_cp1_64bitmode(ctx);
7050 TCGv_i32 fp0 = tcg_temp_new_i32();
7051 TCGv_i32 fp1 = tcg_temp_new_i32();
7053 gen_load_fpr32(fp0, fs);
7054 gen_load_fpr32(fp1, ft);
7055 gen_store_fpr32h(fp0, fd);
7056 gen_store_fpr32(fp1, fd);
7057 tcg_temp_free_i32(fp0);
7058 tcg_temp_free_i32(fp1);
7063 check_cp1_64bitmode(ctx);
7065 TCGv_i32 fp0 = tcg_temp_new_i32();
7066 TCGv_i32 fp1 = tcg_temp_new_i32();
7068 gen_load_fpr32(fp0, fs);
7069 gen_load_fpr32h(fp1, ft);
7070 gen_store_fpr32(fp1, fd);
7071 gen_store_fpr32h(fp0, fd);
7072 tcg_temp_free_i32(fp0);
7073 tcg_temp_free_i32(fp1);
7078 check_cp1_64bitmode(ctx);
7080 TCGv_i32 fp0 = tcg_temp_new_i32();
7081 TCGv_i32 fp1 = tcg_temp_new_i32();
7083 gen_load_fpr32h(fp0, fs);
7084 gen_load_fpr32(fp1, ft);
7085 gen_store_fpr32(fp1, fd);
7086 gen_store_fpr32h(fp0, fd);
7087 tcg_temp_free_i32(fp0);
7088 tcg_temp_free_i32(fp1);
7093 check_cp1_64bitmode(ctx);
7095 TCGv_i32 fp0 = tcg_temp_new_i32();
7096 TCGv_i32 fp1 = tcg_temp_new_i32();
7098 gen_load_fpr32h(fp0, fs);
7099 gen_load_fpr32h(fp1, ft);
7100 gen_store_fpr32(fp1, fd);
7101 gen_store_fpr32h(fp0, fd);
7102 tcg_temp_free_i32(fp0);
7103 tcg_temp_free_i32(fp1);
7123 check_cp1_64bitmode(ctx);
7125 TCGv_i64 fp0 = tcg_temp_new_i64();
7126 TCGv_i64 fp1 = tcg_temp_new_i64();
7128 gen_load_fpr64(ctx, fp0, fs);
7129 gen_load_fpr64(ctx, fp1, ft);
7130 if (ctx->opcode & (1 << 6)) {
7131 gen_cmpabs_ps(func-48, fp0, fp1, cc);
7132 opn = condnames_abs[func-48];
7134 gen_cmp_ps(func-48, fp0, fp1, cc);
7135 opn = condnames[func-48];
7137 tcg_temp_free_i64(fp0);
7138 tcg_temp_free_i64(fp1);
7143 generate_exception (ctx, EXCP_RI);
7148 MIPS_DEBUG("%s %s, %s, %s", opn, fregnames[fd], fregnames[fs], fregnames[ft]);
7151 MIPS_DEBUG("%s %s,%s", opn, fregnames[fs], fregnames[ft]);
7154 MIPS_DEBUG("%s %s,%s", opn, fregnames[fd], fregnames[fs]);
7159 /* Coprocessor 3 (FPU) */
7160 static void gen_flt3_ldst (DisasContext *ctx, uint32_t opc,
7161 int fd, int fs, int base, int index)
7163 const char *opn = "extended float load/store";
7165 TCGv t0 = tcg_temp_new();
7168 gen_load_gpr(t0, index);
7169 } else if (index == 0) {
7170 gen_load_gpr(t0, base);
7172 gen_load_gpr(t0, index);
7173 gen_op_addr_add(ctx, t0, cpu_gpr[base], t0);
7175 /* Don't do NOP if destination is zero: we must perform the actual
7177 save_cpu_state(ctx, 0);
7182 TCGv_i32 fp0 = tcg_temp_new_i32();
7184 tcg_gen_qemu_ld32s(t0, t0, ctx->mem_idx);
7185 tcg_gen_trunc_tl_i32(fp0, t0);
7186 gen_store_fpr32(fp0, fd);
7187 tcg_temp_free_i32(fp0);
7193 check_cp1_registers(ctx, fd);
7195 TCGv_i64 fp0 = tcg_temp_new_i64();
7197 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7198 gen_store_fpr64(ctx, fp0, fd);
7199 tcg_temp_free_i64(fp0);
7204 check_cp1_64bitmode(ctx);
7205 tcg_gen_andi_tl(t0, t0, ~0x7);
7207 TCGv_i64 fp0 = tcg_temp_new_i64();
7209 tcg_gen_qemu_ld64(fp0, t0, ctx->mem_idx);
7210 gen_store_fpr64(ctx, fp0, fd);
7211 tcg_temp_free_i64(fp0);
7218 TCGv_i32 fp0 = tcg_temp_new_i32();
7219 TCGv t1 = tcg_temp_new();
7221 gen_load_fpr32(fp0, fs);
7222 tcg_gen_extu_i32_tl(t1, fp0);
7223 tcg_gen_qemu_st32(t1, t0, ctx->mem_idx);
7224 tcg_temp_free_i32(fp0);
7232 check_cp1_registers(ctx, fs);
7234 TCGv_i64 fp0 = tcg_temp_new_i64();
7236 gen_load_fpr64(ctx, fp0, fs);
7237 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7238 tcg_temp_free_i64(fp0);
7244 check_cp1_64bitmode(ctx);
7245 tcg_gen_andi_tl(t0, t0, ~0x7);
7247 TCGv_i64 fp0 = tcg_temp_new_i64();
7249 gen_load_fpr64(ctx, fp0, fs);
7250 tcg_gen_qemu_st64(fp0, t0, ctx->mem_idx);
7251 tcg_temp_free_i64(fp0);
7258 MIPS_DEBUG("%s %s, %s(%s)", opn, fregnames[store ? fs : fd],
7259 regnames[index], regnames[base]);
7262 static void gen_flt3_arith (DisasContext *ctx, uint32_t opc,
7263 int fd, int fr, int fs, int ft)
7265 const char *opn = "flt3_arith";
7269 check_cp1_64bitmode(ctx);
7271 TCGv t0 = tcg_temp_local_new();
7272 TCGv_i32 fp = tcg_temp_new_i32();
7273 TCGv_i32 fph = tcg_temp_new_i32();
7274 int l1 = gen_new_label();
7275 int l2 = gen_new_label();
7277 gen_load_gpr(t0, fr);
7278 tcg_gen_andi_tl(t0, t0, 0x7);
7280 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 0, l1);
7281 gen_load_fpr32(fp, fs);
7282 gen_load_fpr32h(fph, fs);
7283 gen_store_fpr32(fp, fd);
7284 gen_store_fpr32h(fph, fd);
7287 tcg_gen_brcondi_tl(TCG_COND_NE, t0, 4, l2);
7289 #ifdef TARGET_WORDS_BIGENDIAN
7290 gen_load_fpr32(fp, fs);
7291 gen_load_fpr32h(fph, ft);
7292 gen_store_fpr32h(fp, fd);
7293 gen_store_fpr32(fph, fd);
7295 gen_load_fpr32h(fph, fs);
7296 gen_load_fpr32(fp, ft);
7297 gen_store_fpr32(fph, fd);
7298 gen_store_fpr32h(fp, fd);
7301 tcg_temp_free_i32(fp);
7302 tcg_temp_free_i32(fph);
7309 TCGv_i32 fp0 = tcg_temp_new_i32();
7310 TCGv_i32 fp1 = tcg_temp_new_i32();
7311 TCGv_i32 fp2 = tcg_temp_new_i32();
7313 gen_load_fpr32(fp0, fs);
7314 gen_load_fpr32(fp1, ft);
7315 gen_load_fpr32(fp2, fr);
7316 gen_helper_float_muladd_s(fp2, fp0, fp1, fp2);
7317 tcg_temp_free_i32(fp0);
7318 tcg_temp_free_i32(fp1);
7319 gen_store_fpr32(fp2, fd);
7320 tcg_temp_free_i32(fp2);
7326 check_cp1_registers(ctx, fd | fs | ft | fr);
7328 TCGv_i64 fp0 = tcg_temp_new_i64();
7329 TCGv_i64 fp1 = tcg_temp_new_i64();
7330 TCGv_i64 fp2 = tcg_temp_new_i64();
7332 gen_load_fpr64(ctx, fp0, fs);
7333 gen_load_fpr64(ctx, fp1, ft);
7334 gen_load_fpr64(ctx, fp2, fr);
7335 gen_helper_float_muladd_d(fp2, fp0, fp1, fp2);
7336 tcg_temp_free_i64(fp0);
7337 tcg_temp_free_i64(fp1);
7338 gen_store_fpr64(ctx, fp2, fd);
7339 tcg_temp_free_i64(fp2);
7344 check_cp1_64bitmode(ctx);
7346 TCGv_i64 fp0 = tcg_temp_new_i64();
7347 TCGv_i64 fp1 = tcg_temp_new_i64();
7348 TCGv_i64 fp2 = tcg_temp_new_i64();
7350 gen_load_fpr64(ctx, fp0, fs);
7351 gen_load_fpr64(ctx, fp1, ft);
7352 gen_load_fpr64(ctx, fp2, fr);
7353 gen_helper_float_muladd_ps(fp2, fp0, fp1, fp2);
7354 tcg_temp_free_i64(fp0);
7355 tcg_temp_free_i64(fp1);
7356 gen_store_fpr64(ctx, fp2, fd);
7357 tcg_temp_free_i64(fp2);
7364 TCGv_i32 fp0 = tcg_temp_new_i32();
7365 TCGv_i32 fp1 = tcg_temp_new_i32();
7366 TCGv_i32 fp2 = tcg_temp_new_i32();
7368 gen_load_fpr32(fp0, fs);
7369 gen_load_fpr32(fp1, ft);
7370 gen_load_fpr32(fp2, fr);
7371 gen_helper_float_mulsub_s(fp2, fp0, fp1, fp2);
7372 tcg_temp_free_i32(fp0);
7373 tcg_temp_free_i32(fp1);
7374 gen_store_fpr32(fp2, fd);
7375 tcg_temp_free_i32(fp2);
7381 check_cp1_registers(ctx, fd | fs | ft | fr);
7383 TCGv_i64 fp0 = tcg_temp_new_i64();
7384 TCGv_i64 fp1 = tcg_temp_new_i64();
7385 TCGv_i64 fp2 = tcg_temp_new_i64();
7387 gen_load_fpr64(ctx, fp0, fs);
7388 gen_load_fpr64(ctx, fp1, ft);
7389 gen_load_fpr64(ctx, fp2, fr);
7390 gen_helper_float_mulsub_d(fp2, fp0, fp1, fp2);
7391 tcg_temp_free_i64(fp0);
7392 tcg_temp_free_i64(fp1);
7393 gen_store_fpr64(ctx, fp2, fd);
7394 tcg_temp_free_i64(fp2);
7399 check_cp1_64bitmode(ctx);
7401 TCGv_i64 fp0 = tcg_temp_new_i64();
7402 TCGv_i64 fp1 = tcg_temp_new_i64();
7403 TCGv_i64 fp2 = tcg_temp_new_i64();
7405 gen_load_fpr64(ctx, fp0, fs);
7406 gen_load_fpr64(ctx, fp1, ft);
7407 gen_load_fpr64(ctx, fp2, fr);
7408 gen_helper_float_mulsub_ps(fp2, fp0, fp1, fp2);
7409 tcg_temp_free_i64(fp0);
7410 tcg_temp_free_i64(fp1);
7411 gen_store_fpr64(ctx, fp2, fd);
7412 tcg_temp_free_i64(fp2);
7419 TCGv_i32 fp0 = tcg_temp_new_i32();
7420 TCGv_i32 fp1 = tcg_temp_new_i32();
7421 TCGv_i32 fp2 = tcg_temp_new_i32();
7423 gen_load_fpr32(fp0, fs);
7424 gen_load_fpr32(fp1, ft);
7425 gen_load_fpr32(fp2, fr);
7426 gen_helper_float_nmuladd_s(fp2, fp0, fp1, fp2);
7427 tcg_temp_free_i32(fp0);
7428 tcg_temp_free_i32(fp1);
7429 gen_store_fpr32(fp2, fd);
7430 tcg_temp_free_i32(fp2);
7436 check_cp1_registers(ctx, fd | fs | ft | fr);
7438 TCGv_i64 fp0 = tcg_temp_new_i64();
7439 TCGv_i64 fp1 = tcg_temp_new_i64();
7440 TCGv_i64 fp2 = tcg_temp_new_i64();
7442 gen_load_fpr64(ctx, fp0, fs);
7443 gen_load_fpr64(ctx, fp1, ft);
7444 gen_load_fpr64(ctx, fp2, fr);
7445 gen_helper_float_nmuladd_d(fp2, fp0, fp1, fp2);
7446 tcg_temp_free_i64(fp0);
7447 tcg_temp_free_i64(fp1);
7448 gen_store_fpr64(ctx, fp2, fd);
7449 tcg_temp_free_i64(fp2);
7454 check_cp1_64bitmode(ctx);
7456 TCGv_i64 fp0 = tcg_temp_new_i64();
7457 TCGv_i64 fp1 = tcg_temp_new_i64();
7458 TCGv_i64 fp2 = tcg_temp_new_i64();
7460 gen_load_fpr64(ctx, fp0, fs);
7461 gen_load_fpr64(ctx, fp1, ft);
7462 gen_load_fpr64(ctx, fp2, fr);
7463 gen_helper_float_nmuladd_ps(fp2, fp0, fp1, fp2);
7464 tcg_temp_free_i64(fp0);
7465 tcg_temp_free_i64(fp1);
7466 gen_store_fpr64(ctx, fp2, fd);
7467 tcg_temp_free_i64(fp2);
7474 TCGv_i32 fp0 = tcg_temp_new_i32();
7475 TCGv_i32 fp1 = tcg_temp_new_i32();
7476 TCGv_i32 fp2 = tcg_temp_new_i32();
7478 gen_load_fpr32(fp0, fs);
7479 gen_load_fpr32(fp1, ft);
7480 gen_load_fpr32(fp2, fr);
7481 gen_helper_float_nmulsub_s(fp2, fp0, fp1, fp2);
7482 tcg_temp_free_i32(fp0);
7483 tcg_temp_free_i32(fp1);
7484 gen_store_fpr32(fp2, fd);
7485 tcg_temp_free_i32(fp2);
7491 check_cp1_registers(ctx, fd | fs | ft | fr);
7493 TCGv_i64 fp0 = tcg_temp_new_i64();
7494 TCGv_i64 fp1 = tcg_temp_new_i64();
7495 TCGv_i64 fp2 = tcg_temp_new_i64();
7497 gen_load_fpr64(ctx, fp0, fs);
7498 gen_load_fpr64(ctx, fp1, ft);
7499 gen_load_fpr64(ctx, fp2, fr);
7500 gen_helper_float_nmulsub_d(fp2, fp0, fp1, fp2);
7501 tcg_temp_free_i64(fp0);
7502 tcg_temp_free_i64(fp1);
7503 gen_store_fpr64(ctx, fp2, fd);
7504 tcg_temp_free_i64(fp2);
7509 check_cp1_64bitmode(ctx);
7511 TCGv_i64 fp0 = tcg_temp_new_i64();
7512 TCGv_i64 fp1 = tcg_temp_new_i64();
7513 TCGv_i64 fp2 = tcg_temp_new_i64();
7515 gen_load_fpr64(ctx, fp0, fs);
7516 gen_load_fpr64(ctx, fp1, ft);
7517 gen_load_fpr64(ctx, fp2, fr);
7518 gen_helper_float_nmulsub_ps(fp2, fp0, fp1, fp2);
7519 tcg_temp_free_i64(fp0);
7520 tcg_temp_free_i64(fp1);
7521 gen_store_fpr64(ctx, fp2, fd);
7522 tcg_temp_free_i64(fp2);
7528 generate_exception (ctx, EXCP_RI);
7531 MIPS_DEBUG("%s %s, %s, %s, %s", opn, fregnames[fd], fregnames[fr],
7532 fregnames[fs], fregnames[ft]);
7535 static void handle_delay_slot (CPUState *env, DisasContext *ctx,
7538 if (ctx->hflags & MIPS_HFLAG_BMASK) {
7539 int hflags = ctx->hflags & MIPS_HFLAG_BMASK;
7540 /* Branches completion */
7541 ctx->hflags &= ~MIPS_HFLAG_BMASK;
7542 ctx->bstate = BS_BRANCH;
7543 save_cpu_state(ctx, 0);
7544 /* FIXME: Need to clear can_do_io. */
7547 /* unconditional branch */
7548 MIPS_DEBUG("unconditional branch");
7549 gen_goto_tb(ctx, 0, ctx->btarget);
7552 /* blikely taken case */
7553 MIPS_DEBUG("blikely branch taken");
7554 gen_goto_tb(ctx, 0, ctx->btarget);
7557 /* Conditional branch */
7558 MIPS_DEBUG("conditional branch");
7560 int l1 = gen_new_label();
7562 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
7563 gen_goto_tb(ctx, 1, ctx->pc + insn_bytes);
7565 gen_goto_tb(ctx, 0, ctx->btarget);
7569 /* unconditional branch to register */
7570 MIPS_DEBUG("branch to register");
7571 tcg_gen_mov_tl(cpu_PC, btarget);
7572 if (ctx->singlestep_enabled) {
7573 save_cpu_state(ctx, 0);
7574 gen_helper_0i(raise_exception, EXCP_DEBUG);
7579 MIPS_DEBUG("unknown branch");
7585 /* ISA extensions (ASEs) */
7586 /* MIPS16 extension to MIPS32 */
7587 /* SmartMIPS extension to MIPS32 */
7589 #if defined(TARGET_MIPS64)
7591 /* MDMX extension to MIPS64 */
7595 static void decode_opc (CPUState *env, DisasContext *ctx, int *is_branch)
7599 uint32_t op, op1, op2;
7602 /* make sure instructions are on a word boundary */
7603 if (ctx->pc & 0x3) {
7604 env->CP0_BadVAddr = ctx->pc;
7605 generate_exception(ctx, EXCP_AdEL);
7609 /* Handle blikely not taken case */
7610 if ((ctx->hflags & MIPS_HFLAG_BMASK_BASE) == MIPS_HFLAG_BL) {
7611 int l1 = gen_new_label();
7613 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx ")", ctx->pc + 4);
7614 tcg_gen_brcondi_tl(TCG_COND_NE, bcond, 0, l1);
7615 tcg_gen_movi_i32(hflags, ctx->hflags & ~MIPS_HFLAG_BMASK);
7616 gen_goto_tb(ctx, 1, ctx->pc + 4);
7620 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
7621 tcg_gen_debug_insn_start(ctx->pc);
7623 op = MASK_OP_MAJOR(ctx->opcode);
7624 rs = (ctx->opcode >> 21) & 0x1f;
7625 rt = (ctx->opcode >> 16) & 0x1f;
7626 rd = (ctx->opcode >> 11) & 0x1f;
7627 sa = (ctx->opcode >> 6) & 0x1f;
7628 imm = (int16_t)ctx->opcode;
7631 op1 = MASK_SPECIAL(ctx->opcode);
7633 case OPC_SLL: /* Shift with immediate */
7635 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7638 switch ((ctx->opcode >> 21) & 0x1f) {
7640 /* rotr is decoded as srl on non-R2 CPUs */
7641 if (env->insn_flags & ISA_MIPS32R2) {
7646 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7649 generate_exception(ctx, EXCP_RI);
7653 case OPC_MOVN: /* Conditional move */
7655 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7656 gen_cond_move(env, op1, rd, rs, rt);
7658 case OPC_ADD ... OPC_SUBU:
7659 gen_arith(env, ctx, op1, rd, rs, rt);
7661 case OPC_SLLV: /* Shifts */
7663 gen_shift(env, ctx, op1, rd, rs, rt);
7666 switch ((ctx->opcode >> 6) & 0x1f) {
7668 /* rotrv is decoded as srlv on non-R2 CPUs */
7669 if (env->insn_flags & ISA_MIPS32R2) {
7674 gen_shift(env, ctx, op1, rd, rs, rt);
7677 generate_exception(ctx, EXCP_RI);
7681 case OPC_SLT: /* Set on less than */
7683 gen_slt(env, op1, rd, rs, rt);
7685 case OPC_AND: /* Logic*/
7689 gen_logic(env, op1, rd, rs, rt);
7691 case OPC_MULT ... OPC_DIVU:
7693 check_insn(env, ctx, INSN_VR54XX);
7694 op1 = MASK_MUL_VR54XX(ctx->opcode);
7695 gen_mul_vr54xx(ctx, op1, rd, rs, rt);
7697 gen_muldiv(ctx, op1, rs, rt);
7699 case OPC_JR ... OPC_JALR:
7700 gen_compute_branch(ctx, op1, 4, rs, rd, sa);
7703 case OPC_TGE ... OPC_TEQ: /* Traps */
7705 gen_trap(ctx, op1, rs, rt, -1);
7707 case OPC_MFHI: /* Move from HI/LO */
7709 gen_HILO(ctx, op1, rd);
7712 case OPC_MTLO: /* Move to HI/LO */
7713 gen_HILO(ctx, op1, rs);
7715 case OPC_PMON: /* Pmon entry point, also R4010 selsl */
7716 #ifdef MIPS_STRICT_STANDARD
7717 MIPS_INVAL("PMON / selsl");
7718 generate_exception(ctx, EXCP_RI);
7720 gen_helper_0i(pmon, sa);
7724 generate_exception(ctx, EXCP_SYSCALL);
7725 ctx->bstate = BS_STOP;
7728 generate_exception(ctx, EXCP_BREAK);
7731 #ifdef MIPS_STRICT_STANDARD
7733 generate_exception(ctx, EXCP_RI);
7735 /* Implemented as RI exception for now. */
7736 MIPS_INVAL("spim (unofficial)");
7737 generate_exception(ctx, EXCP_RI);
7745 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
7746 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
7747 check_cp1_enabled(ctx);
7748 gen_movci(ctx, rd, rs, (ctx->opcode >> 18) & 0x7,
7749 (ctx->opcode >> 16) & 1);
7751 generate_exception_err(ctx, EXCP_CpU, 1);
7755 #if defined(TARGET_MIPS64)
7756 /* MIPS64 specific opcodes */
7761 check_insn(env, ctx, ISA_MIPS3);
7763 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7766 switch ((ctx->opcode >> 21) & 0x1f) {
7768 /* drotr is decoded as dsrl on non-R2 CPUs */
7769 if (env->insn_flags & ISA_MIPS32R2) {
7774 check_insn(env, ctx, ISA_MIPS3);
7776 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7779 generate_exception(ctx, EXCP_RI);
7784 switch ((ctx->opcode >> 21) & 0x1f) {
7786 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
7787 if (env->insn_flags & ISA_MIPS32R2) {
7792 check_insn(env, ctx, ISA_MIPS3);
7794 gen_shift_imm(env, ctx, op1, rd, rt, sa);
7797 generate_exception(ctx, EXCP_RI);
7801 case OPC_DADD ... OPC_DSUBU:
7802 check_insn(env, ctx, ISA_MIPS3);
7804 gen_arith(env, ctx, op1, rd, rs, rt);
7808 check_insn(env, ctx, ISA_MIPS3);
7810 gen_shift(env, ctx, op1, rd, rs, rt);
7813 switch ((ctx->opcode >> 6) & 0x1f) {
7815 /* drotrv is decoded as dsrlv on non-R2 CPUs */
7816 if (env->insn_flags & ISA_MIPS32R2) {
7821 check_insn(env, ctx, ISA_MIPS3);
7823 gen_shift(env, ctx, op1, rd, rs, rt);
7826 generate_exception(ctx, EXCP_RI);
7830 case OPC_DMULT ... OPC_DDIVU:
7831 check_insn(env, ctx, ISA_MIPS3);
7833 gen_muldiv(ctx, op1, rs, rt);
7836 default: /* Invalid */
7837 MIPS_INVAL("special");
7838 generate_exception(ctx, EXCP_RI);
7843 op1 = MASK_SPECIAL2(ctx->opcode);
7845 case OPC_MADD ... OPC_MADDU: /* Multiply and add/sub */
7846 case OPC_MSUB ... OPC_MSUBU:
7847 check_insn(env, ctx, ISA_MIPS32);
7848 gen_muldiv(ctx, op1, rs, rt);
7851 gen_arith(env, ctx, op1, rd, rs, rt);
7855 check_insn(env, ctx, ISA_MIPS32);
7856 gen_cl(ctx, op1, rd, rs);
7859 /* XXX: not clear which exception should be raised
7860 * when in debug mode...
7862 check_insn(env, ctx, ISA_MIPS32);
7863 if (!(ctx->hflags & MIPS_HFLAG_DM)) {
7864 generate_exception(ctx, EXCP_DBp);
7866 generate_exception(ctx, EXCP_DBp);
7870 #if defined(TARGET_MIPS64)
7873 check_insn(env, ctx, ISA_MIPS64);
7875 gen_cl(ctx, op1, rd, rs);
7878 default: /* Invalid */
7879 MIPS_INVAL("special2");
7880 generate_exception(ctx, EXCP_RI);
7885 op1 = MASK_SPECIAL3(ctx->opcode);
7889 check_insn(env, ctx, ISA_MIPS32R2);
7890 gen_bitops(ctx, op1, rt, rs, sa, rd);
7893 check_insn(env, ctx, ISA_MIPS32R2);
7894 op2 = MASK_BSHFL(ctx->opcode);
7895 gen_bshfl(ctx, op2, rt, rd);
7898 check_insn(env, ctx, ISA_MIPS32R2);
7900 TCGv t0 = tcg_temp_new();
7904 save_cpu_state(ctx, 1);
7905 gen_helper_rdhwr_cpunum(t0);
7906 gen_store_gpr(t0, rt);
7909 save_cpu_state(ctx, 1);
7910 gen_helper_rdhwr_synci_step(t0);
7911 gen_store_gpr(t0, rt);
7914 save_cpu_state(ctx, 1);
7915 gen_helper_rdhwr_cc(t0);
7916 gen_store_gpr(t0, rt);
7919 save_cpu_state(ctx, 1);
7920 gen_helper_rdhwr_ccres(t0);
7921 gen_store_gpr(t0, rt);
7924 #if defined(CONFIG_USER_ONLY)
7925 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, tls_value));
7926 gen_store_gpr(t0, rt);
7929 /* XXX: Some CPUs implement this in hardware.
7930 Not supported yet. */
7932 default: /* Invalid */
7933 MIPS_INVAL("rdhwr");
7934 generate_exception(ctx, EXCP_RI);
7941 check_insn(env, ctx, ASE_MT);
7943 TCGv t0 = tcg_temp_new();
7944 TCGv t1 = tcg_temp_new();
7946 gen_load_gpr(t0, rt);
7947 gen_load_gpr(t1, rs);
7948 gen_helper_fork(t0, t1);
7954 check_insn(env, ctx, ASE_MT);
7956 TCGv t0 = tcg_temp_new();
7958 save_cpu_state(ctx, 1);
7959 gen_load_gpr(t0, rs);
7960 gen_helper_yield(t0, t0);
7961 gen_store_gpr(t0, rd);
7965 #if defined(TARGET_MIPS64)
7966 case OPC_DEXTM ... OPC_DEXT:
7967 case OPC_DINSM ... OPC_DINS:
7968 check_insn(env, ctx, ISA_MIPS64R2);
7970 gen_bitops(ctx, op1, rt, rs, sa, rd);
7973 check_insn(env, ctx, ISA_MIPS64R2);
7975 op2 = MASK_DBSHFL(ctx->opcode);
7976 gen_bshfl(ctx, op2, rt, rd);
7979 default: /* Invalid */
7980 MIPS_INVAL("special3");
7981 generate_exception(ctx, EXCP_RI);
7986 op1 = MASK_REGIMM(ctx->opcode);
7988 case OPC_BLTZ ... OPC_BGEZL: /* REGIMM branches */
7989 case OPC_BLTZAL ... OPC_BGEZALL:
7990 gen_compute_branch(ctx, op1, 4, rs, -1, imm << 2);
7993 case OPC_TGEI ... OPC_TEQI: /* REGIMM traps */
7995 gen_trap(ctx, op1, rs, -1, imm);
7998 check_insn(env, ctx, ISA_MIPS32R2);
8001 default: /* Invalid */
8002 MIPS_INVAL("regimm");
8003 generate_exception(ctx, EXCP_RI);
8008 check_cp0_enabled(ctx);
8009 op1 = MASK_CP0(ctx->opcode);
8015 #if defined(TARGET_MIPS64)
8019 #ifndef CONFIG_USER_ONLY
8020 gen_cp0(env, ctx, op1, rt, rd);
8021 #endif /* !CONFIG_USER_ONLY */
8023 case OPC_C0_FIRST ... OPC_C0_LAST:
8024 #ifndef CONFIG_USER_ONLY
8025 gen_cp0(env, ctx, MASK_C0(ctx->opcode), rt, rd);
8026 #endif /* !CONFIG_USER_ONLY */
8029 #ifndef CONFIG_USER_ONLY
8031 TCGv t0 = tcg_temp_new();
8033 op2 = MASK_MFMC0(ctx->opcode);
8036 check_insn(env, ctx, ASE_MT);
8037 gen_helper_dmt(t0, t0);
8038 gen_store_gpr(t0, rt);
8041 check_insn(env, ctx, ASE_MT);
8042 gen_helper_emt(t0, t0);
8043 gen_store_gpr(t0, rt);
8046 check_insn(env, ctx, ASE_MT);
8047 gen_helper_dvpe(t0, t0);
8048 gen_store_gpr(t0, rt);
8051 check_insn(env, ctx, ASE_MT);
8052 gen_helper_evpe(t0, t0);
8053 gen_store_gpr(t0, rt);
8056 check_insn(env, ctx, ISA_MIPS32R2);
8057 save_cpu_state(ctx, 1);
8059 gen_store_gpr(t0, rt);
8060 /* Stop translation as we may have switched the execution mode */
8061 ctx->bstate = BS_STOP;
8064 check_insn(env, ctx, ISA_MIPS32R2);
8065 save_cpu_state(ctx, 1);
8067 gen_store_gpr(t0, rt);
8068 /* Stop translation as we may have switched the execution mode */
8069 ctx->bstate = BS_STOP;
8071 default: /* Invalid */
8072 MIPS_INVAL("mfmc0");
8073 generate_exception(ctx, EXCP_RI);
8078 #endif /* !CONFIG_USER_ONLY */
8081 check_insn(env, ctx, ISA_MIPS32R2);
8082 gen_load_srsgpr(rt, rd);
8085 check_insn(env, ctx, ISA_MIPS32R2);
8086 gen_store_srsgpr(rt, rd);
8090 generate_exception(ctx, EXCP_RI);
8094 case OPC_ADDI: /* Arithmetic with immediate opcode */
8096 gen_arith_imm(env, ctx, op, rt, rs, imm);
8098 case OPC_SLTI: /* Set on less than with immediate opcode */
8100 gen_slt_imm(env, op, rt, rs, imm);
8102 case OPC_ANDI: /* Arithmetic with immediate opcode */
8106 gen_logic_imm(env, op, rt, rs, imm);
8108 case OPC_J ... OPC_JAL: /* Jump */
8109 offset = (int32_t)(ctx->opcode & 0x3FFFFFF) << 2;
8110 gen_compute_branch(ctx, op, 4, rs, rt, offset);
8113 case OPC_BEQ ... OPC_BGTZ: /* Branch */
8114 case OPC_BEQL ... OPC_BGTZL:
8115 gen_compute_branch(ctx, op, 4, rs, rt, imm << 2);
8118 case OPC_LB ... OPC_LWR: /* Load and stores */
8119 case OPC_SB ... OPC_SW:
8122 gen_ldst(ctx, op, rt, rs, imm);
8125 gen_st_cond(ctx, op, rt, rs, imm);
8128 check_insn(env, ctx, ISA_MIPS3 | ISA_MIPS32);
8132 check_insn(env, ctx, ISA_MIPS4 | ISA_MIPS32);
8136 /* Floating point (COP1). */
8141 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8142 check_cp1_enabled(ctx);
8143 gen_flt_ldst(ctx, op, rt, rs, imm);
8145 generate_exception_err(ctx, EXCP_CpU, 1);
8150 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8151 check_cp1_enabled(ctx);
8152 op1 = MASK_CP1(ctx->opcode);
8156 check_insn(env, ctx, ISA_MIPS32R2);
8161 gen_cp1(ctx, op1, rt, rd);
8163 #if defined(TARGET_MIPS64)
8166 check_insn(env, ctx, ISA_MIPS3);
8167 gen_cp1(ctx, op1, rt, rd);
8173 check_insn(env, ctx, ASE_MIPS3D);
8176 gen_compute_branch1(env, ctx, MASK_BC1(ctx->opcode),
8177 (rt >> 2) & 0x7, imm << 2);
8185 gen_farith(ctx, MASK_CP1_FUNC(ctx->opcode), rt, rd, sa,
8190 generate_exception (ctx, EXCP_RI);
8194 generate_exception_err(ctx, EXCP_CpU, 1);
8204 /* COP2: Not implemented. */
8205 generate_exception_err(ctx, EXCP_CpU, 2);
8209 if (env->CP0_Config1 & (1 << CP0C1_FP)) {
8210 check_cp1_enabled(ctx);
8211 op1 = MASK_CP3(ctx->opcode);
8219 gen_flt3_ldst(ctx, op1, sa, rd, rs, rt);
8237 gen_flt3_arith(ctx, op1, sa, rs, rd, rt);
8241 generate_exception (ctx, EXCP_RI);
8245 generate_exception_err(ctx, EXCP_CpU, 1);
8249 #if defined(TARGET_MIPS64)
8250 /* MIPS64 opcodes */
8252 case OPC_LDL ... OPC_LDR:
8253 case OPC_SDL ... OPC_SDR:
8257 check_insn(env, ctx, ISA_MIPS3);
8259 gen_ldst(ctx, op, rt, rs, imm);
8262 check_insn(env, ctx, ISA_MIPS3);
8264 gen_st_cond(ctx, op, rt, rs, imm);
8268 check_insn(env, ctx, ISA_MIPS3);
8270 gen_arith_imm(env, ctx, op, rt, rs, imm);
8274 check_insn(env, ctx, ASE_MIPS16);
8275 /* MIPS16: Not implemented. */
8277 check_insn(env, ctx, ASE_MDMX);
8278 /* MDMX: Not implemented. */
8279 default: /* Invalid */
8280 MIPS_INVAL("major opcode");
8281 generate_exception(ctx, EXCP_RI);
8287 gen_intermediate_code_internal (CPUState *env, TranslationBlock *tb,
8291 target_ulong pc_start;
8292 uint16_t *gen_opc_end;
8301 qemu_log("search pc %d\n", search_pc);
8304 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
8307 ctx.singlestep_enabled = env->singlestep_enabled;
8309 ctx.bstate = BS_NONE;
8310 /* Restore delay slot state from the tb context. */
8311 ctx.hflags = (uint32_t)tb->flags; /* FIXME: maybe use 64 bits here? */
8312 restore_cpu_state(env, &ctx);
8313 #ifdef CONFIG_USER_ONLY
8314 ctx.mem_idx = MIPS_HFLAG_UM;
8316 ctx.mem_idx = ctx.hflags & MIPS_HFLAG_KSU;
8319 max_insns = tb->cflags & CF_COUNT_MASK;
8321 max_insns = CF_COUNT_MASK;
8323 qemu_log_mask(CPU_LOG_TB_CPU, "------------------------------------------------\n");
8324 /* FIXME: This may print out stale hflags from env... */
8325 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
8327 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb, ctx.mem_idx, ctx.hflags);
8329 while (ctx.bstate == BS_NONE) {
8330 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
8331 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
8332 if (bp->pc == ctx.pc) {
8333 save_cpu_state(&ctx, 1);
8334 ctx.bstate = BS_BRANCH;
8335 gen_helper_0i(raise_exception, EXCP_DEBUG);
8336 /* Include the breakpoint location or the tb won't
8337 * be flushed when it must be. */
8339 goto done_generating;
8345 j = gen_opc_ptr - gen_opc_buf;
8349 gen_opc_instr_start[lj++] = 0;
8351 gen_opc_pc[lj] = ctx.pc;
8352 gen_opc_hflags[lj] = ctx.hflags & MIPS_HFLAG_BMASK;
8353 gen_opc_instr_start[lj] = 1;
8354 gen_opc_icount[lj] = num_insns;
8356 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
8360 if (ctx.isa_mode == 0) {
8361 ctx.opcode = ldl_code(ctx.pc);
8363 decode_opc(env, &ctx, &is_branch);
8365 generate_exception(&ctx, EXCP_RI);
8369 handle_delay_slot(env, &ctx, insn_bytes);
8371 ctx.pc += insn_bytes;
8375 /* Execute a branch and its delay slot as a single instruction.
8376 This is what GDB expects and is consistent with what the
8377 hardware does (e.g. if a delay slot instruction faults, the
8378 reported PC is the PC of the branch). */
8379 if (env->singlestep_enabled && (ctx.hflags & MIPS_HFLAG_BMASK) == 0)
8382 if ((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0)
8385 if (gen_opc_ptr >= gen_opc_end)
8388 if (num_insns >= max_insns)
8394 if (tb->cflags & CF_LAST_IO)
8396 if (env->singlestep_enabled && ctx.bstate != BS_BRANCH) {
8397 save_cpu_state(&ctx, ctx.bstate == BS_NONE);
8398 gen_helper_0i(raise_exception, EXCP_DEBUG);
8400 switch (ctx.bstate) {
8402 gen_helper_interrupt_restart();
8403 gen_goto_tb(&ctx, 0, ctx.pc);
8406 save_cpu_state(&ctx, 0);
8407 gen_goto_tb(&ctx, 0, ctx.pc);
8410 gen_helper_interrupt_restart();
8419 gen_icount_end(tb, num_insns);
8420 *gen_opc_ptr = INDEX_op_end;
8422 j = gen_opc_ptr - gen_opc_buf;
8425 gen_opc_instr_start[lj++] = 0;
8427 tb->size = ctx.pc - pc_start;
8428 tb->icount = num_insns;
8432 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
8433 qemu_log("IN: %s\n", lookup_symbol(pc_start));
8434 log_target_disas(pc_start, ctx.pc - pc_start, 0);
8437 qemu_log_mask(CPU_LOG_TB_CPU, "---------------- %d %08x\n", ctx.bstate, ctx.hflags);
8441 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
8443 gen_intermediate_code_internal(env, tb, 0);
8446 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
8448 gen_intermediate_code_internal(env, tb, 1);
8451 static void fpu_dump_state(CPUState *env, FILE *f,
8452 int (*fpu_fprintf)(FILE *f, const char *fmt, ...),
8456 int is_fpu64 = !!(env->hflags & MIPS_HFLAG_F64);
8458 #define printfpr(fp) \
8461 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu: %13g\n", \
8462 (fp)->w[FP_ENDIAN_IDX], (fp)->d, (fp)->fd, \
8463 (fp)->fs[FP_ENDIAN_IDX], (fp)->fs[!FP_ENDIAN_IDX]); \
8466 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
8467 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
8468 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu:%13g\n", \
8469 tmp.w[FP_ENDIAN_IDX], tmp.d, tmp.fd, \
8470 tmp.fs[FP_ENDIAN_IDX], tmp.fs[!FP_ENDIAN_IDX]); \
8475 fpu_fprintf(f, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
8476 env->active_fpu.fcr0, env->active_fpu.fcr31, is_fpu64, env->active_fpu.fp_status,
8477 get_float_exception_flags(&env->active_fpu.fp_status));
8478 for (i = 0; i < 32; (is_fpu64) ? i++ : (i += 2)) {
8479 fpu_fprintf(f, "%3s: ", fregnames[i]);
8480 printfpr(&env->active_fpu.fpr[i]);
8486 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8487 /* Debug help: The architecture requires 32bit code to maintain proper
8488 sign-extended values on 64bit machines. */
8490 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
8493 cpu_mips_check_sign_extensions (CPUState *env, FILE *f,
8494 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8499 if (!SIGN_EXT_P(env->active_tc.PC))
8500 cpu_fprintf(f, "BROKEN: pc=0x" TARGET_FMT_lx "\n", env->active_tc.PC);
8501 if (!SIGN_EXT_P(env->active_tc.HI[0]))
8502 cpu_fprintf(f, "BROKEN: HI=0x" TARGET_FMT_lx "\n", env->active_tc.HI[0]);
8503 if (!SIGN_EXT_P(env->active_tc.LO[0]))
8504 cpu_fprintf(f, "BROKEN: LO=0x" TARGET_FMT_lx "\n", env->active_tc.LO[0]);
8505 if (!SIGN_EXT_P(env->btarget))
8506 cpu_fprintf(f, "BROKEN: btarget=0x" TARGET_FMT_lx "\n", env->btarget);
8508 for (i = 0; i < 32; i++) {
8509 if (!SIGN_EXT_P(env->active_tc.gpr[i]))
8510 cpu_fprintf(f, "BROKEN: %s=0x" TARGET_FMT_lx "\n", regnames[i], env->active_tc.gpr[i]);
8513 if (!SIGN_EXT_P(env->CP0_EPC))
8514 cpu_fprintf(f, "BROKEN: EPC=0x" TARGET_FMT_lx "\n", env->CP0_EPC);
8515 if (!SIGN_EXT_P(env->lladdr))
8516 cpu_fprintf(f, "BROKEN: LLAddr=0x" TARGET_FMT_lx "\n", env->lladdr);
8520 void cpu_dump_state (CPUState *env, FILE *f,
8521 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
8526 cpu_fprintf(f, "pc=0x" TARGET_FMT_lx " HI=0x" TARGET_FMT_lx " LO=0x" TARGET_FMT_lx " ds %04x " TARGET_FMT_lx " %d\n",
8527 env->active_tc.PC, env->active_tc.HI[0], env->active_tc.LO[0],
8528 env->hflags, env->btarget, env->bcond);
8529 for (i = 0; i < 32; i++) {
8531 cpu_fprintf(f, "GPR%02d:", i);
8532 cpu_fprintf(f, " %s " TARGET_FMT_lx, regnames[i], env->active_tc.gpr[i]);
8534 cpu_fprintf(f, "\n");
8537 cpu_fprintf(f, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx "\n",
8538 env->CP0_Status, env->CP0_Cause, env->CP0_EPC);
8539 cpu_fprintf(f, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx "\n",
8540 env->CP0_Config0, env->CP0_Config1, env->lladdr);
8541 if (env->hflags & MIPS_HFLAG_FPU)
8542 fpu_dump_state(env, f, cpu_fprintf, flags);
8543 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8544 cpu_mips_check_sign_extensions(env, f, cpu_fprintf, flags);
8548 static void mips_tcg_init(void)
8553 /* Initialize various static tables. */
8557 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
8558 TCGV_UNUSED(cpu_gpr[0]);
8559 for (i = 1; i < 32; i++)
8560 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
8561 offsetof(CPUState, active_tc.gpr[i]),
8563 cpu_PC = tcg_global_mem_new(TCG_AREG0,
8564 offsetof(CPUState, active_tc.PC), "PC");
8565 for (i = 0; i < MIPS_DSP_ACC; i++) {
8566 cpu_HI[i] = tcg_global_mem_new(TCG_AREG0,
8567 offsetof(CPUState, active_tc.HI[i]),
8569 cpu_LO[i] = tcg_global_mem_new(TCG_AREG0,
8570 offsetof(CPUState, active_tc.LO[i]),
8572 cpu_ACX[i] = tcg_global_mem_new(TCG_AREG0,
8573 offsetof(CPUState, active_tc.ACX[i]),
8576 cpu_dspctrl = tcg_global_mem_new(TCG_AREG0,
8577 offsetof(CPUState, active_tc.DSPControl),
8579 bcond = tcg_global_mem_new(TCG_AREG0,
8580 offsetof(CPUState, bcond), "bcond");
8581 btarget = tcg_global_mem_new(TCG_AREG0,
8582 offsetof(CPUState, btarget), "btarget");
8583 hflags = tcg_global_mem_new_i32(TCG_AREG0,
8584 offsetof(CPUState, hflags), "hflags");
8586 fpu_fcr0 = tcg_global_mem_new_i32(TCG_AREG0,
8587 offsetof(CPUState, active_fpu.fcr0),
8589 fpu_fcr31 = tcg_global_mem_new_i32(TCG_AREG0,
8590 offsetof(CPUState, active_fpu.fcr31),
8593 /* register helpers */
8594 #define GEN_HELPER 2
8600 #include "translate_init.c"
8602 CPUMIPSState *cpu_mips_init (const char *cpu_model)
8605 const mips_def_t *def;
8607 def = cpu_mips_find_by_name(cpu_model);
8610 env = qemu_mallocz(sizeof(CPUMIPSState));
8611 env->cpu_model = def;
8612 env->cpu_model_str = cpu_model;
8615 #ifndef CONFIG_USER_ONLY
8622 qemu_init_vcpu(env);
8626 void cpu_reset (CPUMIPSState *env)
8628 if (qemu_loglevel_mask(CPU_LOG_RESET)) {
8629 qemu_log("CPU Reset (CPU %d)\n", env->cpu_index);
8630 log_cpu_state(env, 0);
8633 memset(env, 0, offsetof(CPUMIPSState, breakpoints));
8636 /* Reset registers to their default values */
8637 env->CP0_PRid = env->cpu_model->CP0_PRid;
8638 env->CP0_Config0 = env->cpu_model->CP0_Config0;
8639 #ifdef TARGET_WORDS_BIGENDIAN
8640 env->CP0_Config0 |= (1 << CP0C0_BE);
8642 env->CP0_Config1 = env->cpu_model->CP0_Config1;
8643 env->CP0_Config2 = env->cpu_model->CP0_Config2;
8644 env->CP0_Config3 = env->cpu_model->CP0_Config3;
8645 env->CP0_Config6 = env->cpu_model->CP0_Config6;
8646 env->CP0_Config7 = env->cpu_model->CP0_Config7;
8647 env->CP0_LLAddr_rw_bitmask = env->cpu_model->CP0_LLAddr_rw_bitmask
8648 << env->cpu_model->CP0_LLAddr_shift;
8649 env->CP0_LLAddr_shift = env->cpu_model->CP0_LLAddr_shift;
8650 env->SYNCI_Step = env->cpu_model->SYNCI_Step;
8651 env->CCRes = env->cpu_model->CCRes;
8652 env->CP0_Status_rw_bitmask = env->cpu_model->CP0_Status_rw_bitmask;
8653 env->CP0_TCStatus_rw_bitmask = env->cpu_model->CP0_TCStatus_rw_bitmask;
8654 env->CP0_SRSCtl = env->cpu_model->CP0_SRSCtl;
8655 env->current_tc = 0;
8656 env->SEGBITS = env->cpu_model->SEGBITS;
8657 env->SEGMask = (target_ulong)((1ULL << env->cpu_model->SEGBITS) - 1);
8658 #if defined(TARGET_MIPS64)
8659 if (env->cpu_model->insn_flags & ISA_MIPS3) {
8660 env->SEGMask |= 3ULL << 62;
8663 env->PABITS = env->cpu_model->PABITS;
8664 env->PAMask = (target_ulong)((1ULL << env->cpu_model->PABITS) - 1);
8665 env->CP0_SRSConf0_rw_bitmask = env->cpu_model->CP0_SRSConf0_rw_bitmask;
8666 env->CP0_SRSConf0 = env->cpu_model->CP0_SRSConf0;
8667 env->CP0_SRSConf1_rw_bitmask = env->cpu_model->CP0_SRSConf1_rw_bitmask;
8668 env->CP0_SRSConf1 = env->cpu_model->CP0_SRSConf1;
8669 env->CP0_SRSConf2_rw_bitmask = env->cpu_model->CP0_SRSConf2_rw_bitmask;
8670 env->CP0_SRSConf2 = env->cpu_model->CP0_SRSConf2;
8671 env->CP0_SRSConf3_rw_bitmask = env->cpu_model->CP0_SRSConf3_rw_bitmask;
8672 env->CP0_SRSConf3 = env->cpu_model->CP0_SRSConf3;
8673 env->CP0_SRSConf4_rw_bitmask = env->cpu_model->CP0_SRSConf4_rw_bitmask;
8674 env->CP0_SRSConf4 = env->cpu_model->CP0_SRSConf4;
8675 env->insn_flags = env->cpu_model->insn_flags;
8677 #if defined(CONFIG_USER_ONLY)
8678 env->hflags = MIPS_HFLAG_UM;
8679 /* Enable access to the SYNCI_Step register. */
8680 env->CP0_HWREna |= (1 << 1);
8682 if (env->hflags & MIPS_HFLAG_BMASK) {
8683 /* If the exception was raised from a delay slot,
8684 come back to the jump. */
8685 env->CP0_ErrorEPC = env->active_tc.PC - 4;
8687 env->CP0_ErrorEPC = env->active_tc.PC;
8689 env->active_tc.PC = (int32_t)0xBFC00000;
8690 env->CP0_Random = env->tlb->nb_tlb - 1;
8691 env->tlb->tlb_in_use = env->tlb->nb_tlb;
8693 /* SMP not implemented */
8694 env->CP0_EBase = 0x80000000;
8695 env->CP0_Status = (1 << CP0St_BEV) | (1 << CP0St_ERL);
8696 /* vectored interrupts not implemented, timer on int 7,
8697 no performance counters. */
8698 env->CP0_IntCtl = 0xe0000000;
8702 for (i = 0; i < 7; i++) {
8703 env->CP0_WatchLo[i] = 0;
8704 env->CP0_WatchHi[i] = 0x80000000;
8706 env->CP0_WatchLo[7] = 0;
8707 env->CP0_WatchHi[7] = 0;
8709 /* Count register increments in debug mode, EJTAG version 1 */
8710 env->CP0_Debug = (1 << CP0DB_CNT) | (0x1 << CP0DB_VER);
8711 env->hflags = MIPS_HFLAG_CP0;
8713 #if defined(TARGET_MIPS64)
8714 if (env->cpu_model->insn_flags & ISA_MIPS3) {
8715 env->hflags |= MIPS_HFLAG_64;
8718 env->exception_index = EXCP_NONE;
8721 void gen_pc_load(CPUState *env, TranslationBlock *tb,
8722 unsigned long searched_pc, int pc_pos, void *puc)
8724 env->active_tc.PC = gen_opc_pc[pc_pos];
8725 env->hflags &= ~MIPS_HFLAG_BMASK;
8726 env->hflags |= gen_opc_hflags[pc_pos];