2 * Tiny Code Generator for QEMU
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order[] = {
97 static const TCGReg tcg_target_call_iarg_regs[4] = {
104 static const TCGReg tcg_target_call_oarg_regs[2] = {
109 static uint8_t *tb_ret_addr;
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
113 return target & 0xffff;
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
124 return (target >> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
142 return (disp >> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
157 return (target >> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
172 reloc_lo16(code_ptr, value);
175 reloc_hi16(code_ptr, value);
178 reloc_pc16(code_ptr, value);
181 reloc_pc26(code_ptr, value);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
202 ct->ct |= TCG_CT_REG;
203 tcg_regset_set(ct->u.regs, 0xffffffff);
206 ct->ct |= TCG_CT_REG;
207 tcg_regset_clear(ct->u.regs);
208 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
210 case 'L': /* qemu_ld output arg constraint */
211 ct->ct |= TCG_CT_REG;
212 tcg_regset_set(ct->u.regs, 0xffffffff);
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
215 case 'l': /* qemu_ld input arg constraint */
216 ct->ct |= TCG_CT_REG;
217 tcg_regset_set(ct->u.regs, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
225 case 'S': /* qemu_st constraint */
226 ct->ct |= TCG_CT_REG;
227 tcg_regset_set(ct->u.regs, 0xffffffff);
228 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
233 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
240 ct->ct |= TCG_CT_CONST_U16;
243 ct->ct |= TCG_CT_CONST_S16;
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct->ct |= TCG_CT_CONST_ZERO;
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val,
261 const TCGArgConstraint *arg_ct)
265 if (ct & TCG_CT_CONST)
267 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
269 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
271 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
277 /* instruction opcodes */
279 OPC_BEQ = 0x04 << 26,
280 OPC_BNE = 0x05 << 26,
281 OPC_BLEZ = 0x06 << 26,
282 OPC_BGTZ = 0x07 << 26,
283 OPC_ADDIU = 0x09 << 26,
284 OPC_SLTI = 0x0A << 26,
285 OPC_SLTIU = 0x0B << 26,
286 OPC_ANDI = 0x0C << 26,
287 OPC_ORI = 0x0D << 26,
288 OPC_XORI = 0x0E << 26,
289 OPC_LUI = 0x0F << 26,
293 OPC_LBU = 0x24 << 26,
294 OPC_LHU = 0x25 << 26,
295 OPC_LWU = 0x27 << 26,
300 OPC_SPECIAL = 0x00 << 26,
301 OPC_SLL = OPC_SPECIAL | 0x00,
302 OPC_SRL = OPC_SPECIAL | 0x02,
303 OPC_ROTR = OPC_SPECIAL | (0x01 << 21) | 0x02,
304 OPC_SRA = OPC_SPECIAL | 0x03,
305 OPC_SLLV = OPC_SPECIAL | 0x04,
306 OPC_SRLV = OPC_SPECIAL | 0x06,
307 OPC_ROTRV = OPC_SPECIAL | (0x01 << 6) | 0x06,
308 OPC_SRAV = OPC_SPECIAL | 0x07,
309 OPC_JR = OPC_SPECIAL | 0x08,
310 OPC_JALR = OPC_SPECIAL | 0x09,
311 OPC_MFHI = OPC_SPECIAL | 0x10,
312 OPC_MFLO = OPC_SPECIAL | 0x12,
313 OPC_MULT = OPC_SPECIAL | 0x18,
314 OPC_MULTU = OPC_SPECIAL | 0x19,
315 OPC_DIV = OPC_SPECIAL | 0x1A,
316 OPC_DIVU = OPC_SPECIAL | 0x1B,
317 OPC_ADDU = OPC_SPECIAL | 0x21,
318 OPC_SUBU = OPC_SPECIAL | 0x23,
319 OPC_AND = OPC_SPECIAL | 0x24,
320 OPC_OR = OPC_SPECIAL | 0x25,
321 OPC_XOR = OPC_SPECIAL | 0x26,
322 OPC_NOR = OPC_SPECIAL | 0x27,
323 OPC_SLT = OPC_SPECIAL | 0x2A,
324 OPC_SLTU = OPC_SPECIAL | 0x2B,
326 OPC_REGIMM = 0x01 << 26,
327 OPC_BLTZ = OPC_REGIMM | (0x00 << 16),
328 OPC_BGEZ = OPC_REGIMM | (0x01 << 16),
330 OPC_SPECIAL3 = 0x1f << 26,
331 OPC_INS = OPC_SPECIAL3 | 0x004,
332 OPC_WSBH = OPC_SPECIAL3 | 0x0a0,
333 OPC_SEB = OPC_SPECIAL3 | 0x420,
334 OPC_SEH = OPC_SPECIAL3 | 0x620,
340 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
341 TCGReg rd, TCGReg rs, TCGReg rt)
346 inst |= (rs & 0x1F) << 21;
347 inst |= (rt & 0x1F) << 16;
348 inst |= (rd & 0x1F) << 11;
355 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
356 TCGReg rt, TCGReg rs, TCGArg imm)
361 inst |= (rs & 0x1F) << 21;
362 inst |= (rt & 0x1F) << 16;
363 inst |= (imm & 0xffff);
370 static inline void tcg_out_opc_br(TCGContext *s, int opc,
371 TCGReg rt, TCGReg rs)
373 /* We pay attention here to not modify the branch target by reading
374 the existing value and using it again. This ensure that caches and
375 memory are kept coherent during retranslation. */
376 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
378 tcg_out_opc_imm(s, opc, rt, rs, offset);
384 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
385 TCGReg rd, TCGReg rt, TCGArg sa)
390 inst |= (rt & 0x1F) << 16;
391 inst |= (rd & 0x1F) << 11;
392 inst |= (sa & 0x1F) << 6;
397 static inline void tcg_out_nop(TCGContext *s)
402 static inline void tcg_out_mov(TCGContext *s, TCGType type,
403 TCGReg ret, TCGReg arg)
405 /* Simple reg-reg move, optimising out the 'do nothing' case */
407 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
411 static inline void tcg_out_movi(TCGContext *s, TCGType type,
412 TCGReg reg, tcg_target_long arg)
414 if (arg == (int16_t)arg) {
415 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
416 } else if (arg == (uint16_t)arg) {
417 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
419 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
420 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
424 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
426 #ifdef _MIPS_ARCH_MIPS32R2
427 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
429 /* ret and arg can't be register at */
430 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
434 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
435 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
436 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
437 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
441 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
443 #ifdef _MIPS_ARCH_MIPS32R2
444 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
445 tcg_out_opc_reg(s, OPC_SEH, ret, 0, ret);
447 /* ret and arg can't be register at */
448 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
452 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
453 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
454 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
455 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
459 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
461 #ifdef _MIPS_ARCH_MIPS32R2
462 tcg_out_opc_reg(s, OPC_WSBH, ret, 0, arg);
463 tcg_out_opc_sa(s, OPC_ROTR, ret, ret, 16);
465 /* ret and arg must be different and can't be register at */
466 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
470 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
472 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
473 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
475 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
476 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
477 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
479 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
480 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
481 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
485 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
487 #ifdef _MIPS_ARCH_MIPS32R2
488 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
490 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
491 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
495 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
497 #ifdef _MIPS_ARCH_MIPS32R2
498 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
500 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
501 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
505 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
506 TCGReg arg1, TCGArg arg2)
508 if (arg2 == (int16_t) arg2) {
509 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
511 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
512 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
513 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
517 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
518 TCGReg arg1, tcg_target_long arg2)
520 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
523 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
524 TCGReg arg1, tcg_target_long arg2)
526 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
529 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
531 if (val == (int16_t)val) {
532 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
534 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
535 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
539 /* Helper routines for marshalling helper function arguments into
540 * the correct registers and stack.
541 * arg_num is where we want to put this argument, and is updated to be ready
542 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
543 * real registers, 4+ on stack.
545 * We provide routines for arguments which are: immediate, 32 bit
546 * value in register, 16 and 8 bit values in register (which must be zero
547 * extended before use) and 64 bit value in a lo:hi register pair.
549 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
550 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
552 if (*arg_num < 4) { \
553 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
555 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
556 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
560 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
561 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
562 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
563 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
564 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
565 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
566 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
567 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
568 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
569 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
570 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
571 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
573 /* We don't use the macro for this one to avoid an unnecessary reg-reg
574 move when storing to the stack. */
575 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
579 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
581 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
586 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
587 TCGReg arg_low, TCGReg arg_high)
589 (*arg_num) = (*arg_num + 1) & ~1;
591 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
592 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
593 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
595 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
596 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
600 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
601 TCGArg arg2, int label_index)
603 TCGLabel *l = &s->labels[label_index];
607 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
610 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
614 tcg_out_opc_br(s, OPC_BLTZ, 0, arg1);
616 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
617 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
621 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
622 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
626 tcg_out_opc_br(s, OPC_BGEZ, 0, arg1);
628 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
629 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
633 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
634 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
638 tcg_out_opc_br(s, OPC_BLEZ, 0, arg1);
640 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
641 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
645 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
646 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
650 tcg_out_opc_br(s, OPC_BGTZ, 0, arg1);
652 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
653 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
657 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
658 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
665 reloc_pc16(s->code_ptr - 4, l->u.value);
667 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
672 /* XXX: we implement it at the target level to avoid having to
673 handle cross basic blocks temporaries */
674 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
675 TCGArg arg2, TCGArg arg3, TCGArg arg4,
682 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
683 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
689 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
693 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
697 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
701 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
707 label_ptr = s->code_ptr;
708 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
713 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
717 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
721 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
725 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
729 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
735 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
738 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
739 TCGArg arg1, TCGArg arg2)
744 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
745 } else if (arg2 == 0) {
746 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
748 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
749 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
754 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
755 } else if (arg2 == 0) {
756 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
758 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
759 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
763 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
766 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
769 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
770 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
773 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
774 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
777 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
778 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
781 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
782 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
785 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
788 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
796 /* XXX: we implement it at the target level to avoid having to
797 handle cross basic blocks temporaries */
798 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
799 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
803 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
804 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
805 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
808 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
809 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
810 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
814 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
818 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
822 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
826 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
833 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
838 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
842 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
846 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
850 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
856 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
857 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
860 #if defined(CONFIG_SOFTMMU)
862 #include "../../softmmu_defs.h"
864 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
866 static const void * const qemu_ld_helpers[4] = {
873 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
874 uintxx_t val, int mmu_idx) */
875 static const void * const qemu_st_helpers[4] = {
883 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
886 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
887 #if defined(CONFIG_SOFTMMU)
888 void *label1_ptr, *label2_ptr;
890 int mem_index, s_bits;
892 # if TARGET_LONG_BITS == 64
904 #if defined(CONFIG_SOFTMMU)
905 # if TARGET_LONG_BITS == 64
907 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
922 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
923 data_reg1 = data_regh;
924 data_reg2 = data_regl;
926 data_reg1 = data_regl;
927 data_reg2 = data_regh;
930 data_reg1 = data_regl;
933 #if defined(CONFIG_SOFTMMU)
934 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
935 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
936 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
937 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
938 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
939 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
940 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
942 # if TARGET_LONG_BITS == 64
943 label3_ptr = s->code_ptr;
944 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
947 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
948 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
950 label1_ptr = s->code_ptr;
951 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
954 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
956 label1_ptr = s->code_ptr;
957 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
963 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
964 # if TARGET_LONG_BITS == 64
965 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
967 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
969 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
970 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
971 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
976 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
979 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
982 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
985 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
988 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
991 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
992 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
998 label2_ptr = s->code_ptr;
999 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1002 /* label1: fast path */
1003 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1005 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1006 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1007 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
1009 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1010 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
1012 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
1013 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
1019 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
1022 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
1025 if (TCG_NEED_BSWAP) {
1026 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1027 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
1029 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
1033 if (TCG_NEED_BSWAP) {
1034 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1035 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1037 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1041 if (TCG_NEED_BSWAP) {
1042 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1043 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1045 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1049 if (TCG_NEED_BSWAP) {
1050 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1051 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1052 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1053 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1055 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1056 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1063 #if defined(CONFIG_SOFTMMU)
1064 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1068 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1071 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1072 #if defined(CONFIG_SOFTMMU)
1073 uint8_t *label1_ptr, *label2_ptr;
1075 int mem_index, s_bits;
1078 #if TARGET_LONG_BITS == 64
1079 # if defined(CONFIG_SOFTMMU)
1080 uint8_t *label3_ptr;
1085 data_regl = *args++;
1087 data_regh = *args++;
1091 addr_regl = *args++;
1092 #if defined(CONFIG_SOFTMMU)
1093 # if TARGET_LONG_BITS == 64
1094 addr_regh = *args++;
1095 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1110 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1111 data_reg1 = data_regh;
1112 data_reg2 = data_regl;
1114 data_reg1 = data_regl;
1115 data_reg2 = data_regh;
1118 data_reg1 = data_regl;
1122 #if defined(CONFIG_SOFTMMU)
1123 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1124 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1125 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1126 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1127 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1128 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1129 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1131 # if TARGET_LONG_BITS == 64
1132 label3_ptr = s->code_ptr;
1133 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1136 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1137 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1139 label1_ptr = s->code_ptr;
1140 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1143 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1145 label1_ptr = s->code_ptr;
1146 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1152 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1153 # if TARGET_LONG_BITS == 64
1154 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1156 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1160 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1163 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1166 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1169 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1174 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1175 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1176 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1179 label2_ptr = s->code_ptr;
1180 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1183 /* label1: fast path */
1184 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1186 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1187 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1188 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1190 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1191 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1193 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1194 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1201 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1204 if (TCG_NEED_BSWAP) {
1205 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_T0, data_reg1, 0xffff);
1206 tcg_out_bswap16(s, TCG_REG_T0, TCG_REG_T0);
1207 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1209 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1213 if (TCG_NEED_BSWAP) {
1214 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1215 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1217 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1221 if (TCG_NEED_BSWAP) {
1222 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1223 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1224 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1225 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1227 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1228 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1235 #if defined(CONFIG_SOFTMMU)
1236 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1240 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1241 const TCGArg *args, const int *const_args)
1244 case INDEX_op_exit_tb:
1245 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1246 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1247 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1250 case INDEX_op_goto_tb:
1251 if (s->tb_jmp_offset) {
1252 /* direct jump method */
1255 /* indirect jump method */
1256 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1257 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1258 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1261 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1264 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1268 tcg_out_opc_reg(s, OPC_JR, 0, args[0], 0);
1272 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1275 case INDEX_op_mov_i32:
1276 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1278 case INDEX_op_movi_i32:
1279 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1282 case INDEX_op_ld8u_i32:
1283 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1285 case INDEX_op_ld8s_i32:
1286 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1288 case INDEX_op_ld16u_i32:
1289 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1291 case INDEX_op_ld16s_i32:
1292 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1294 case INDEX_op_ld_i32:
1295 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1297 case INDEX_op_st8_i32:
1298 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1300 case INDEX_op_st16_i32:
1301 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1303 case INDEX_op_st_i32:
1304 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1307 case INDEX_op_add_i32:
1308 if (const_args[2]) {
1309 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1311 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1314 case INDEX_op_add2_i32:
1315 if (const_args[4]) {
1316 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1318 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1320 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1321 if (const_args[5]) {
1322 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1324 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1326 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1327 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1329 case INDEX_op_sub_i32:
1330 if (const_args[2]) {
1331 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1333 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1336 case INDEX_op_sub2_i32:
1337 if (const_args[4]) {
1338 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1340 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1342 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1343 if (const_args[5]) {
1344 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1346 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1348 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1349 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1351 case INDEX_op_mul_i32:
1352 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1353 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1355 case INDEX_op_mulu2_i32:
1356 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1357 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1358 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1360 case INDEX_op_div_i32:
1361 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1362 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1364 case INDEX_op_divu_i32:
1365 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1366 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1368 case INDEX_op_rem_i32:
1369 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1370 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1372 case INDEX_op_remu_i32:
1373 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1374 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1377 case INDEX_op_and_i32:
1378 if (const_args[2]) {
1379 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1381 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1384 case INDEX_op_or_i32:
1385 if (const_args[2]) {
1386 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1388 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1391 case INDEX_op_nor_i32:
1392 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1394 case INDEX_op_not_i32:
1395 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1397 case INDEX_op_xor_i32:
1398 if (const_args[2]) {
1399 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1401 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1405 case INDEX_op_sar_i32:
1406 if (const_args[2]) {
1407 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1409 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1412 case INDEX_op_shl_i32:
1413 if (const_args[2]) {
1414 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1416 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1419 case INDEX_op_shr_i32:
1420 if (const_args[2]) {
1421 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1423 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1426 case INDEX_op_rotl_i32:
1427 if (const_args[2]) {
1428 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], 0x20 - args[2]);
1430 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, 32);
1431 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, TCG_REG_AT, args[2]);
1432 tcg_out_opc_reg(s, OPC_ROTRV, args[0], TCG_REG_AT, args[1]);
1435 case INDEX_op_rotr_i32:
1436 if (const_args[2]) {
1437 tcg_out_opc_sa(s, OPC_ROTR, args[0], args[1], args[2]);
1439 tcg_out_opc_reg(s, OPC_ROTRV, args[0], args[2], args[1]);
1443 /* The bswap routines do not work on non-R2 CPU. In that case
1444 we let TCG generating the corresponding code. */
1445 case INDEX_op_bswap16_i32:
1446 tcg_out_bswap16(s, args[0], args[1]);
1448 case INDEX_op_bswap32_i32:
1449 tcg_out_bswap32(s, args[0], args[1]);
1452 case INDEX_op_ext8s_i32:
1453 tcg_out_ext8s(s, args[0], args[1]);
1455 case INDEX_op_ext16s_i32:
1456 tcg_out_ext16s(s, args[0], args[1]);
1459 case INDEX_op_deposit_i32:
1460 tcg_out_opc_imm(s, OPC_INS, args[0], args[2],
1461 ((args[3] + args[4] - 1) << 11) | (args[3] << 6));
1464 case INDEX_op_brcond_i32:
1465 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1467 case INDEX_op_brcond2_i32:
1468 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1471 case INDEX_op_setcond_i32:
1472 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1474 case INDEX_op_setcond2_i32:
1475 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1478 case INDEX_op_qemu_ld8u:
1479 tcg_out_qemu_ld(s, args, 0);
1481 case INDEX_op_qemu_ld8s:
1482 tcg_out_qemu_ld(s, args, 0 | 4);
1484 case INDEX_op_qemu_ld16u:
1485 tcg_out_qemu_ld(s, args, 1);
1487 case INDEX_op_qemu_ld16s:
1488 tcg_out_qemu_ld(s, args, 1 | 4);
1490 case INDEX_op_qemu_ld32:
1491 tcg_out_qemu_ld(s, args, 2);
1493 case INDEX_op_qemu_ld64:
1494 tcg_out_qemu_ld(s, args, 3);
1496 case INDEX_op_qemu_st8:
1497 tcg_out_qemu_st(s, args, 0);
1499 case INDEX_op_qemu_st16:
1500 tcg_out_qemu_st(s, args, 1);
1502 case INDEX_op_qemu_st32:
1503 tcg_out_qemu_st(s, args, 2);
1505 case INDEX_op_qemu_st64:
1506 tcg_out_qemu_st(s, args, 3);
1514 static const TCGTargetOpDef mips_op_defs[] = {
1515 { INDEX_op_exit_tb, { } },
1516 { INDEX_op_goto_tb, { } },
1517 { INDEX_op_call, { "C" } },
1518 { INDEX_op_jmp, { "r" } },
1519 { INDEX_op_br, { } },
1521 { INDEX_op_mov_i32, { "r", "r" } },
1522 { INDEX_op_movi_i32, { "r" } },
1523 { INDEX_op_ld8u_i32, { "r", "r" } },
1524 { INDEX_op_ld8s_i32, { "r", "r" } },
1525 { INDEX_op_ld16u_i32, { "r", "r" } },
1526 { INDEX_op_ld16s_i32, { "r", "r" } },
1527 { INDEX_op_ld_i32, { "r", "r" } },
1528 { INDEX_op_st8_i32, { "rZ", "r" } },
1529 { INDEX_op_st16_i32, { "rZ", "r" } },
1530 { INDEX_op_st_i32, { "rZ", "r" } },
1532 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1533 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1534 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1535 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1536 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1537 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1538 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1539 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1541 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1542 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1543 { INDEX_op_not_i32, { "r", "rZ" } },
1544 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1545 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1547 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1548 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1549 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1550 { INDEX_op_rotr_i32, { "r", "rZ", "ri" } },
1551 { INDEX_op_rotl_i32, { "r", "rZ", "ri" } },
1553 { INDEX_op_bswap16_i32, { "r", "r" } },
1554 { INDEX_op_bswap32_i32, { "r", "r" } },
1556 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1557 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1559 { INDEX_op_deposit_i32, { "r", "0", "rZ" } },
1561 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1562 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1563 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1565 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1566 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1567 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1569 #if TARGET_LONG_BITS == 32
1570 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1571 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1572 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1573 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1574 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1575 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1577 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1578 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1579 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1580 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1582 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1583 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1584 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1585 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1586 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1587 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1589 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1590 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1591 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1592 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1597 static int tcg_target_callee_save_regs[] = {
1598 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1607 TCG_REG_RA, /* should be last for ABI compliance */
1610 /* Generate global QEMU prologue and epilogue code */
1611 static void tcg_target_qemu_prologue(TCGContext *s)
1615 /* reserve some stack space, also for TCG temps. */
1616 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1617 + TCG_STATIC_CALL_ARGS_SIZE
1618 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1619 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1620 ~(TCG_TARGET_STACK_ALIGN - 1);
1621 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1622 + TCG_STATIC_CALL_ARGS_SIZE,
1623 CPU_TEMP_BUF_NLONGS * sizeof(long));
1626 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1627 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1628 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1629 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1632 /* Call generated code */
1633 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1634 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1635 tb_ret_addr = s->code_ptr;
1638 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1639 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1640 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1643 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1644 tcg_out_addi(s, TCG_REG_SP, frame_size);
1647 static void tcg_target_init(TCGContext *s)
1649 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1650 tcg_regset_set(tcg_target_call_clobber_regs,
1667 tcg_regset_clear(s->reserved_regs);
1668 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1669 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1670 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1671 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1672 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1673 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1674 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1675 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1677 tcg_add_target_add_op_defs(mips_op_defs);