2 * Tiny Code Generator for QEMU
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order[] = {
97 static const TCGReg tcg_target_call_iarg_regs[4] = {
104 static const TCGReg tcg_target_call_oarg_regs[2] = {
109 static uint8_t *tb_ret_addr;
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
113 return target & 0xffff;
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
124 return (target >> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
142 return (disp >> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
157 return (target >> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
172 reloc_lo16(code_ptr, value);
175 reloc_hi16(code_ptr, value);
178 reloc_pc16(code_ptr, value);
181 reloc_pc26(code_ptr, value);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
202 ct->ct |= TCG_CT_REG;
203 tcg_regset_set(ct->u.regs, 0xffffffff);
206 ct->ct |= TCG_CT_REG;
207 tcg_regset_clear(ct->u.regs);
208 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
210 case 'L': /* qemu_ld output arg constraint */
211 ct->ct |= TCG_CT_REG;
212 tcg_regset_set(ct->u.regs, 0xffffffff);
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
215 case 'l': /* qemu_ld input arg constraint */
216 ct->ct |= TCG_CT_REG;
217 tcg_regset_set(ct->u.regs, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
225 case 'S': /* qemu_st constraint */
226 ct->ct |= TCG_CT_REG;
227 tcg_regset_set(ct->u.regs, 0xffffffff);
228 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
233 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
240 ct->ct |= TCG_CT_CONST_U16;
243 ct->ct |= TCG_CT_CONST_S16;
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct->ct |= TCG_CT_CONST_ZERO;
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val,
261 const TCGArgConstraint *arg_ct)
265 if (ct & TCG_CT_CONST)
267 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
269 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
271 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
277 /* instruction opcodes */
279 OPC_BEQ = 0x04 << 26,
280 OPC_BNE = 0x05 << 26,
281 OPC_ADDIU = 0x09 << 26,
282 OPC_SLTI = 0x0A << 26,
283 OPC_SLTIU = 0x0B << 26,
284 OPC_ANDI = 0x0C << 26,
285 OPC_ORI = 0x0D << 26,
286 OPC_XORI = 0x0E << 26,
287 OPC_LUI = 0x0F << 26,
291 OPC_LBU = 0x24 << 26,
292 OPC_LHU = 0x25 << 26,
293 OPC_LWU = 0x27 << 26,
298 OPC_SPECIAL = 0x00 << 26,
299 OPC_SLL = OPC_SPECIAL | 0x00,
300 OPC_SRL = OPC_SPECIAL | 0x02,
301 OPC_SRA = OPC_SPECIAL | 0x03,
302 OPC_SLLV = OPC_SPECIAL | 0x04,
303 OPC_SRLV = OPC_SPECIAL | 0x06,
304 OPC_SRAV = OPC_SPECIAL | 0x07,
305 OPC_JR = OPC_SPECIAL | 0x08,
306 OPC_JALR = OPC_SPECIAL | 0x09,
307 OPC_MFHI = OPC_SPECIAL | 0x10,
308 OPC_MFLO = OPC_SPECIAL | 0x12,
309 OPC_MULT = OPC_SPECIAL | 0x18,
310 OPC_MULTU = OPC_SPECIAL | 0x19,
311 OPC_DIV = OPC_SPECIAL | 0x1A,
312 OPC_DIVU = OPC_SPECIAL | 0x1B,
313 OPC_ADDU = OPC_SPECIAL | 0x21,
314 OPC_SUBU = OPC_SPECIAL | 0x23,
315 OPC_AND = OPC_SPECIAL | 0x24,
316 OPC_OR = OPC_SPECIAL | 0x25,
317 OPC_XOR = OPC_SPECIAL | 0x26,
318 OPC_NOR = OPC_SPECIAL | 0x27,
319 OPC_SLT = OPC_SPECIAL | 0x2A,
320 OPC_SLTU = OPC_SPECIAL | 0x2B,
322 OPC_SPECIAL3 = 0x1f << 26,
323 OPC_SEB = OPC_SPECIAL3 | 0x420,
324 OPC_SEH = OPC_SPECIAL3 | 0x620,
330 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
331 TCGReg rd, TCGReg rs, TCGReg rt)
336 inst |= (rs & 0x1F) << 21;
337 inst |= (rt & 0x1F) << 16;
338 inst |= (rd & 0x1F) << 11;
345 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
346 TCGReg rt, TCGReg rs, TCGArg imm)
351 inst |= (rs & 0x1F) << 21;
352 inst |= (rt & 0x1F) << 16;
353 inst |= (imm & 0xffff);
360 static inline void tcg_out_opc_br(TCGContext *s, int opc,
361 TCGReg rt, TCGReg rs)
363 /* We pay attention here to not modify the branch target by reading
364 the existing value and using it again. This ensure that caches and
365 memory are kept coherent during retranslation. */
366 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
368 tcg_out_opc_imm(s, opc, rt, rs, offset);
374 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
375 TCGReg rd, TCGReg rt, TCGArg sa)
380 inst |= (rt & 0x1F) << 16;
381 inst |= (rd & 0x1F) << 11;
382 inst |= (sa & 0x1F) << 6;
387 static inline void tcg_out_nop(TCGContext *s)
392 static inline void tcg_out_mov(TCGContext *s, TCGType type,
393 TCGReg ret, TCGReg arg)
395 /* Simple reg-reg move, optimising out the 'do nothing' case */
397 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
401 static inline void tcg_out_movi(TCGContext *s, TCGType type,
402 TCGReg reg, tcg_target_long arg)
404 if (arg == (int16_t)arg) {
405 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
406 } else if (arg == (uint16_t)arg) {
407 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
409 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
410 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
414 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
416 /* ret and arg can't be register at */
417 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
421 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
422 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0x00ff);
424 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
425 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
426 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
429 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
431 /* ret and arg can't be register at */
432 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
436 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
437 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff);
439 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
440 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
441 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
444 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
446 /* ret and arg must be different and can't be register at */
447 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
451 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
453 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
454 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
456 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
457 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
458 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
460 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
461 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
462 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
465 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
467 #ifdef _MIPS_ARCH_MIPS32R2
468 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
470 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
471 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
475 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
477 #ifdef _MIPS_ARCH_MIPS32R2
478 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
480 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
481 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
485 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
486 TCGReg arg1, TCGArg arg2)
488 if (arg2 == (int16_t) arg2) {
489 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
491 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
492 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
493 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
497 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
498 TCGReg arg1, tcg_target_long arg2)
500 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
503 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
504 TCGReg arg1, tcg_target_long arg2)
506 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
509 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
511 if (val == (int16_t)val) {
512 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
514 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
515 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
519 /* Helper routines for marshalling helper function arguments into
520 * the correct registers and stack.
521 * arg_num is where we want to put this argument, and is updated to be ready
522 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
523 * real registers, 4+ on stack.
525 * We provide routines for arguments which are: immediate, 32 bit
526 * value in register, 16 and 8 bit values in register (which must be zero
527 * extended before use) and 64 bit value in a lo:hi register pair.
529 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
530 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
532 if (*arg_num < 4) { \
533 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
535 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
536 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
540 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
541 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
542 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
543 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
544 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
545 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
546 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
547 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
548 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
549 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
550 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
551 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
553 /* We don't use the macro for this one to avoid an unnecessary reg-reg
554 move when storing to the stack. */
555 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
559 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
561 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
566 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
567 TCGReg arg_low, TCGReg arg_high)
569 (*arg_num) = (*arg_num + 1) & ~1;
571 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
572 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
573 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
575 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
576 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
580 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
581 TCGArg arg2, int label_index)
583 TCGLabel *l = &s->labels[label_index];
587 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
590 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
593 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
594 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
597 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
598 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
601 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
602 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
605 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
606 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
609 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
610 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
613 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
614 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
617 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
618 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
621 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
622 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
629 reloc_pc16(s->code_ptr - 4, l->u.value);
631 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
636 /* XXX: we implement it at the target level to avoid having to
637 handle cross basic blocks temporaries */
638 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
639 TCGArg arg2, TCGArg arg3, TCGArg arg4,
646 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
647 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
653 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
657 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
661 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
665 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
671 label_ptr = s->code_ptr;
672 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
677 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
681 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
685 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
689 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
693 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
699 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
702 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
703 TCGArg arg1, TCGArg arg2)
708 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
709 } else if (arg2 == 0) {
710 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
712 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
713 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
718 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
719 } else if (arg2 == 0) {
720 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
722 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
723 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
727 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
730 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
733 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
734 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
737 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
738 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
741 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
742 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
745 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
746 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
749 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
752 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
760 /* XXX: we implement it at the target level to avoid having to
761 handle cross basic blocks temporaries */
762 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
763 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
767 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
768 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
769 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
772 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
773 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
774 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
778 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
782 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
786 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
790 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
797 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
802 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
806 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
810 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
814 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
820 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
821 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
824 #if defined(CONFIG_SOFTMMU)
826 #include "../../softmmu_defs.h"
828 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
830 static const void * const qemu_ld_helpers[4] = {
837 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
838 uintxx_t val, int mmu_idx) */
839 static const void * const qemu_st_helpers[4] = {
847 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
850 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
851 #if defined(CONFIG_SOFTMMU)
852 void *label1_ptr, *label2_ptr;
854 int mem_index, s_bits;
856 # if TARGET_LONG_BITS == 64
868 #if defined(CONFIG_SOFTMMU)
869 # if TARGET_LONG_BITS == 64
871 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
886 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
887 data_reg1 = data_regh;
888 data_reg2 = data_regl;
890 data_reg1 = data_regl;
891 data_reg2 = data_regh;
894 data_reg1 = data_regl;
897 #if defined(CONFIG_SOFTMMU)
898 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
899 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
900 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
901 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
902 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
903 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
904 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
906 # if TARGET_LONG_BITS == 64
907 label3_ptr = s->code_ptr;
908 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
911 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
912 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
914 label1_ptr = s->code_ptr;
915 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
918 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
920 label1_ptr = s->code_ptr;
921 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
927 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
928 # if TARGET_LONG_BITS == 64
929 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
931 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
933 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
934 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
935 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
940 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
943 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
946 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
949 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
952 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
955 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
956 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
962 label2_ptr = s->code_ptr;
963 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
966 /* label1: fast path */
967 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
969 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
970 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
971 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
973 if (GUEST_BASE == (int16_t)GUEST_BASE) {
974 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
976 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
977 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
983 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
986 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
989 if (TCG_NEED_BSWAP) {
990 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
991 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
993 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
997 if (TCG_NEED_BSWAP) {
998 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
999 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1001 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1005 if (TCG_NEED_BSWAP) {
1006 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1007 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1009 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1013 if (TCG_NEED_BSWAP) {
1014 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1015 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1016 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1017 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1019 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1020 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1027 #if defined(CONFIG_SOFTMMU)
1028 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1032 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1035 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1036 #if defined(CONFIG_SOFTMMU)
1037 uint8_t *label1_ptr, *label2_ptr;
1039 int mem_index, s_bits;
1042 #if TARGET_LONG_BITS == 64
1043 # if defined(CONFIG_SOFTMMU)
1044 uint8_t *label3_ptr;
1049 data_regl = *args++;
1051 data_regh = *args++;
1055 addr_regl = *args++;
1056 #if defined(CONFIG_SOFTMMU)
1057 # if TARGET_LONG_BITS == 64
1058 addr_regh = *args++;
1059 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1074 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1075 data_reg1 = data_regh;
1076 data_reg2 = data_regl;
1078 data_reg1 = data_regl;
1079 data_reg2 = data_regh;
1082 data_reg1 = data_regl;
1086 #if defined(CONFIG_SOFTMMU)
1087 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1088 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1089 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1090 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1091 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1092 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1093 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1095 # if TARGET_LONG_BITS == 64
1096 label3_ptr = s->code_ptr;
1097 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1100 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1101 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1103 label1_ptr = s->code_ptr;
1104 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1107 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1109 label1_ptr = s->code_ptr;
1110 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1116 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1117 # if TARGET_LONG_BITS == 64
1118 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1120 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1124 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1127 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1130 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1133 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1138 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1139 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1140 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1143 label2_ptr = s->code_ptr;
1144 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1147 /* label1: fast path */
1148 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1150 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1151 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1152 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1154 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1155 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1157 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1158 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1165 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1168 if (TCG_NEED_BSWAP) {
1169 tcg_out_bswap16(s, TCG_REG_T0, data_reg1);
1170 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1172 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1176 if (TCG_NEED_BSWAP) {
1177 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1178 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1180 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1184 if (TCG_NEED_BSWAP) {
1185 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1186 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1187 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1188 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1190 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1191 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1198 #if defined(CONFIG_SOFTMMU)
1199 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1203 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1204 const TCGArg *args, const int *const_args)
1207 case INDEX_op_exit_tb:
1208 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1209 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1210 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1213 case INDEX_op_goto_tb:
1214 if (s->tb_jmp_offset) {
1215 /* direct jump method */
1218 /* indirect jump method */
1219 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1220 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1221 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1224 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1227 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1231 tcg_out_opc_reg(s, OPC_JR, 0, args[0], 0);
1235 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1238 case INDEX_op_mov_i32:
1239 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1241 case INDEX_op_movi_i32:
1242 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1245 case INDEX_op_ld8u_i32:
1246 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1248 case INDEX_op_ld8s_i32:
1249 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1251 case INDEX_op_ld16u_i32:
1252 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1254 case INDEX_op_ld16s_i32:
1255 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1257 case INDEX_op_ld_i32:
1258 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1260 case INDEX_op_st8_i32:
1261 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1263 case INDEX_op_st16_i32:
1264 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1266 case INDEX_op_st_i32:
1267 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1270 case INDEX_op_add_i32:
1271 if (const_args[2]) {
1272 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1274 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1277 case INDEX_op_add2_i32:
1278 if (const_args[4]) {
1279 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1281 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1283 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1284 if (const_args[5]) {
1285 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1287 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1289 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1290 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1292 case INDEX_op_sub_i32:
1293 if (const_args[2]) {
1294 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1296 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1299 case INDEX_op_sub2_i32:
1300 if (const_args[4]) {
1301 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1303 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1305 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1306 if (const_args[5]) {
1307 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1309 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1311 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1312 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1314 case INDEX_op_mul_i32:
1315 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1316 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1318 case INDEX_op_mulu2_i32:
1319 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1320 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1321 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1323 case INDEX_op_div_i32:
1324 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1325 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1327 case INDEX_op_divu_i32:
1328 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1329 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1331 case INDEX_op_rem_i32:
1332 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1333 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1335 case INDEX_op_remu_i32:
1336 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1337 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1340 case INDEX_op_and_i32:
1341 if (const_args[2]) {
1342 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1344 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1347 case INDEX_op_or_i32:
1348 if (const_args[2]) {
1349 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1351 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1354 case INDEX_op_nor_i32:
1355 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1357 case INDEX_op_not_i32:
1358 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1360 case INDEX_op_xor_i32:
1361 if (const_args[2]) {
1362 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1364 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1368 case INDEX_op_sar_i32:
1369 if (const_args[2]) {
1370 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1372 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1375 case INDEX_op_shl_i32:
1376 if (const_args[2]) {
1377 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1379 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1382 case INDEX_op_shr_i32:
1383 if (const_args[2]) {
1384 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1386 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1390 case INDEX_op_ext8s_i32:
1391 tcg_out_ext8s(s, args[0], args[1]);
1393 case INDEX_op_ext16s_i32:
1394 tcg_out_ext16s(s, args[0], args[1]);
1397 case INDEX_op_brcond_i32:
1398 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1400 case INDEX_op_brcond2_i32:
1401 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1404 case INDEX_op_setcond_i32:
1405 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1407 case INDEX_op_setcond2_i32:
1408 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1411 case INDEX_op_qemu_ld8u:
1412 tcg_out_qemu_ld(s, args, 0);
1414 case INDEX_op_qemu_ld8s:
1415 tcg_out_qemu_ld(s, args, 0 | 4);
1417 case INDEX_op_qemu_ld16u:
1418 tcg_out_qemu_ld(s, args, 1);
1420 case INDEX_op_qemu_ld16s:
1421 tcg_out_qemu_ld(s, args, 1 | 4);
1423 case INDEX_op_qemu_ld32:
1424 tcg_out_qemu_ld(s, args, 2);
1426 case INDEX_op_qemu_ld64:
1427 tcg_out_qemu_ld(s, args, 3);
1429 case INDEX_op_qemu_st8:
1430 tcg_out_qemu_st(s, args, 0);
1432 case INDEX_op_qemu_st16:
1433 tcg_out_qemu_st(s, args, 1);
1435 case INDEX_op_qemu_st32:
1436 tcg_out_qemu_st(s, args, 2);
1438 case INDEX_op_qemu_st64:
1439 tcg_out_qemu_st(s, args, 3);
1447 static const TCGTargetOpDef mips_op_defs[] = {
1448 { INDEX_op_exit_tb, { } },
1449 { INDEX_op_goto_tb, { } },
1450 { INDEX_op_call, { "C" } },
1451 { INDEX_op_jmp, { "r" } },
1452 { INDEX_op_br, { } },
1454 { INDEX_op_mov_i32, { "r", "r" } },
1455 { INDEX_op_movi_i32, { "r" } },
1456 { INDEX_op_ld8u_i32, { "r", "r" } },
1457 { INDEX_op_ld8s_i32, { "r", "r" } },
1458 { INDEX_op_ld16u_i32, { "r", "r" } },
1459 { INDEX_op_ld16s_i32, { "r", "r" } },
1460 { INDEX_op_ld_i32, { "r", "r" } },
1461 { INDEX_op_st8_i32, { "rZ", "r" } },
1462 { INDEX_op_st16_i32, { "rZ", "r" } },
1463 { INDEX_op_st_i32, { "rZ", "r" } },
1465 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1466 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1467 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1468 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1469 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1470 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1471 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1472 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1474 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1475 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1476 { INDEX_op_not_i32, { "r", "rZ" } },
1477 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1478 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1480 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1481 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1482 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1484 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1485 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1487 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1488 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1489 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1491 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1492 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1493 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1495 #if TARGET_LONG_BITS == 32
1496 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1497 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1498 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1499 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1500 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1501 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1503 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1504 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1505 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1506 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1508 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1509 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1510 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1511 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1512 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1513 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1515 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1516 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1517 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1518 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1523 static int tcg_target_callee_save_regs[] = {
1524 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1533 TCG_REG_RA, /* should be last for ABI compliance */
1536 /* Generate global QEMU prologue and epilogue code */
1537 static void tcg_target_qemu_prologue(TCGContext *s)
1541 /* reserve some stack space, also for TCG temps. */
1542 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1543 + TCG_STATIC_CALL_ARGS_SIZE
1544 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1545 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1546 ~(TCG_TARGET_STACK_ALIGN - 1);
1547 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1548 + TCG_STATIC_CALL_ARGS_SIZE,
1549 CPU_TEMP_BUF_NLONGS * sizeof(long));
1552 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1553 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1554 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1555 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1558 /* Call generated code */
1559 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1560 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1561 tb_ret_addr = s->code_ptr;
1564 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1565 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1566 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1569 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1570 tcg_out_addi(s, TCG_REG_SP, frame_size);
1573 static void tcg_target_init(TCGContext *s)
1575 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1576 tcg_regset_set(tcg_target_call_clobber_regs,
1593 tcg_regset_clear(s->reserved_regs);
1594 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1595 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1596 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1597 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1598 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1599 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1600 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1601 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1603 tcg_add_target_add_op_defs(mips_op_defs);