2 * Tiny Code Generator for QEMU
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order[] = {
97 static const TCGReg tcg_target_call_iarg_regs[4] = {
104 static const TCGReg tcg_target_call_oarg_regs[2] = {
109 static uint8_t *tb_ret_addr;
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
113 return target & 0xffff;
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
124 return (target >> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
142 return (disp >> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
157 return (target >> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
172 reloc_lo16(code_ptr, value);
175 reloc_hi16(code_ptr, value);
178 reloc_pc16(code_ptr, value);
181 reloc_pc26(code_ptr, value);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
202 ct->ct |= TCG_CT_REG;
203 tcg_regset_set(ct->u.regs, 0xffffffff);
206 ct->ct |= TCG_CT_REG;
207 tcg_regset_clear(ct->u.regs);
208 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
210 case 'L': /* qemu_ld output arg constraint */
211 ct->ct |= TCG_CT_REG;
212 tcg_regset_set(ct->u.regs, 0xffffffff);
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
215 case 'l': /* qemu_ld input arg constraint */
216 ct->ct |= TCG_CT_REG;
217 tcg_regset_set(ct->u.regs, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
225 case 'S': /* qemu_st constraint */
226 ct->ct |= TCG_CT_REG;
227 tcg_regset_set(ct->u.regs, 0xffffffff);
228 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
233 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A3);
240 ct->ct |= TCG_CT_CONST_U16;
243 ct->ct |= TCG_CT_CONST_S16;
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct->ct |= TCG_CT_CONST_ZERO;
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val,
261 const TCGArgConstraint *arg_ct)
265 if (ct & TCG_CT_CONST)
267 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
269 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
271 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
277 /* instruction opcodes */
279 OPC_BEQ = 0x04 << 26,
280 OPC_BNE = 0x05 << 26,
281 OPC_BLEZ = 0x06 << 26,
282 OPC_BGTZ = 0x07 << 26,
283 OPC_ADDIU = 0x09 << 26,
284 OPC_SLTI = 0x0A << 26,
285 OPC_SLTIU = 0x0B << 26,
286 OPC_ANDI = 0x0C << 26,
287 OPC_ORI = 0x0D << 26,
288 OPC_XORI = 0x0E << 26,
289 OPC_LUI = 0x0F << 26,
293 OPC_LBU = 0x24 << 26,
294 OPC_LHU = 0x25 << 26,
295 OPC_LWU = 0x27 << 26,
300 OPC_SPECIAL = 0x00 << 26,
301 OPC_SLL = OPC_SPECIAL | 0x00,
302 OPC_SRL = OPC_SPECIAL | 0x02,
303 OPC_SRA = OPC_SPECIAL | 0x03,
304 OPC_SLLV = OPC_SPECIAL | 0x04,
305 OPC_SRLV = OPC_SPECIAL | 0x06,
306 OPC_SRAV = OPC_SPECIAL | 0x07,
307 OPC_JR = OPC_SPECIAL | 0x08,
308 OPC_JALR = OPC_SPECIAL | 0x09,
309 OPC_MFHI = OPC_SPECIAL | 0x10,
310 OPC_MFLO = OPC_SPECIAL | 0x12,
311 OPC_MULT = OPC_SPECIAL | 0x18,
312 OPC_MULTU = OPC_SPECIAL | 0x19,
313 OPC_DIV = OPC_SPECIAL | 0x1A,
314 OPC_DIVU = OPC_SPECIAL | 0x1B,
315 OPC_ADDU = OPC_SPECIAL | 0x21,
316 OPC_SUBU = OPC_SPECIAL | 0x23,
317 OPC_AND = OPC_SPECIAL | 0x24,
318 OPC_OR = OPC_SPECIAL | 0x25,
319 OPC_XOR = OPC_SPECIAL | 0x26,
320 OPC_NOR = OPC_SPECIAL | 0x27,
321 OPC_SLT = OPC_SPECIAL | 0x2A,
322 OPC_SLTU = OPC_SPECIAL | 0x2B,
324 OPC_REGIMM = 0x01 << 26,
325 OPC_BLTZ = OPC_REGIMM | (0x00 << 16),
326 OPC_BGEZ = OPC_REGIMM | (0x01 << 16),
328 OPC_SPECIAL3 = 0x1f << 26,
329 OPC_SEB = OPC_SPECIAL3 | 0x420,
330 OPC_SEH = OPC_SPECIAL3 | 0x620,
336 static inline void tcg_out_opc_reg(TCGContext *s, int opc,
337 TCGReg rd, TCGReg rs, TCGReg rt)
342 inst |= (rs & 0x1F) << 21;
343 inst |= (rt & 0x1F) << 16;
344 inst |= (rd & 0x1F) << 11;
351 static inline void tcg_out_opc_imm(TCGContext *s, int opc,
352 TCGReg rt, TCGReg rs, TCGArg imm)
357 inst |= (rs & 0x1F) << 21;
358 inst |= (rt & 0x1F) << 16;
359 inst |= (imm & 0xffff);
366 static inline void tcg_out_opc_br(TCGContext *s, int opc,
367 TCGReg rt, TCGReg rs)
369 /* We pay attention here to not modify the branch target by reading
370 the existing value and using it again. This ensure that caches and
371 memory are kept coherent during retranslation. */
372 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
374 tcg_out_opc_imm(s, opc, rt, rs, offset);
380 static inline void tcg_out_opc_sa(TCGContext *s, int opc,
381 TCGReg rd, TCGReg rt, TCGArg sa)
386 inst |= (rt & 0x1F) << 16;
387 inst |= (rd & 0x1F) << 11;
388 inst |= (sa & 0x1F) << 6;
393 static inline void tcg_out_nop(TCGContext *s)
398 static inline void tcg_out_mov(TCGContext *s, TCGType type,
399 TCGReg ret, TCGReg arg)
401 /* Simple reg-reg move, optimising out the 'do nothing' case */
403 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
407 static inline void tcg_out_movi(TCGContext *s, TCGType type,
408 TCGReg reg, tcg_target_long arg)
410 if (arg == (int16_t)arg) {
411 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
412 } else if (arg == (uint16_t)arg) {
413 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
415 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
416 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
420 static inline void tcg_out_bswap16(TCGContext *s, TCGReg ret, TCGReg arg)
422 /* ret and arg can't be register at */
423 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
427 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
428 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0x00ff);
430 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
431 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
432 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
435 static inline void tcg_out_bswap16s(TCGContext *s, TCGReg ret, TCGReg arg)
437 /* ret and arg can't be register at */
438 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
442 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
443 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff);
445 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
446 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
447 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
450 static inline void tcg_out_bswap32(TCGContext *s, TCGReg ret, TCGReg arg)
452 /* ret and arg must be different and can't be register at */
453 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
457 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
459 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
460 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
462 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
463 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
464 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
466 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
467 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
468 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
471 static inline void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
473 #ifdef _MIPS_ARCH_MIPS32R2
474 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
476 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
477 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
481 static inline void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
483 #ifdef _MIPS_ARCH_MIPS32R2
484 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
486 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
487 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
491 static inline void tcg_out_ldst(TCGContext *s, int opc, TCGArg arg,
492 TCGReg arg1, TCGArg arg2)
494 if (arg2 == (int16_t) arg2) {
495 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
497 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
498 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
499 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
503 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
504 TCGReg arg1, tcg_target_long arg2)
506 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
509 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
510 TCGReg arg1, tcg_target_long arg2)
512 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
515 static inline void tcg_out_addi(TCGContext *s, TCGReg reg, TCGArg val)
517 if (val == (int16_t)val) {
518 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
520 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
521 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
525 /* Helper routines for marshalling helper function arguments into
526 * the correct registers and stack.
527 * arg_num is where we want to put this argument, and is updated to be ready
528 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
529 * real registers, 4+ on stack.
531 * We provide routines for arguments which are: immediate, 32 bit
532 * value in register, 16 and 8 bit values in register (which must be zero
533 * extended before use) and 64 bit value in a lo:hi register pair.
535 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
536 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
538 if (*arg_num < 4) { \
539 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
541 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
542 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
546 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
547 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
548 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8, TCGReg arg)
549 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
550 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
551 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
552 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16, TCGReg arg)
553 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
554 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
555 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
556 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32, TCGArg arg)
557 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
559 /* We don't use the macro for this one to avoid an unnecessary reg-reg
560 move when storing to the stack. */
561 static inline void tcg_out_call_iarg_reg32(TCGContext *s, int *arg_num,
565 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[*arg_num], arg);
567 tcg_out_st(s, TCG_TYPE_I32, arg, TCG_REG_SP, 4 * (*arg_num));
572 static inline void tcg_out_call_iarg_reg64(TCGContext *s, int *arg_num,
573 TCGReg arg_low, TCGReg arg_high)
575 (*arg_num) = (*arg_num + 1) & ~1;
577 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
578 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
579 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
581 tcg_out_call_iarg_reg32(s, arg_num, arg_low);
582 tcg_out_call_iarg_reg32(s, arg_num, arg_high);
586 static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGArg arg1,
587 TCGArg arg2, int label_index)
589 TCGLabel *l = &s->labels[label_index];
593 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
596 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
600 tcg_out_opc_br(s, OPC_BLTZ, 0, arg1);
602 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
603 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
607 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
608 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
612 tcg_out_opc_br(s, OPC_BGEZ, 0, arg1);
614 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
615 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
619 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
620 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
624 tcg_out_opc_br(s, OPC_BLEZ, 0, arg1);
626 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
627 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
631 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
632 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
636 tcg_out_opc_br(s, OPC_BGTZ, 0, arg1);
638 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
639 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
643 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
644 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
651 reloc_pc16(s->code_ptr - 4, l->u.value);
653 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
658 /* XXX: we implement it at the target level to avoid having to
659 handle cross basic blocks temporaries */
660 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGArg arg1,
661 TCGArg arg2, TCGArg arg3, TCGArg arg4,
668 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
669 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
675 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
679 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
683 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
687 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
693 label_ptr = s->code_ptr;
694 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
699 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
703 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
707 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
711 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
715 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
721 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
724 static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
725 TCGArg arg1, TCGArg arg2)
730 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
731 } else if (arg2 == 0) {
732 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
734 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
735 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
740 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
741 } else if (arg2 == 0) {
742 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
744 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
745 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
749 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
752 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
755 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
756 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
759 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
760 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
763 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
764 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
767 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
768 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
771 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
774 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
782 /* XXX: we implement it at the target level to avoid having to
783 handle cross basic blocks temporaries */
784 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
785 TCGArg arg1, TCGArg arg2, TCGArg arg3, TCGArg arg4)
789 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
790 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
791 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
794 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
795 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
796 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
800 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
804 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
808 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
812 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
819 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
824 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
828 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
832 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
836 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
842 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
843 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
846 #if defined(CONFIG_SOFTMMU)
848 #include "../../softmmu_defs.h"
850 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
852 static const void * const qemu_ld_helpers[4] = {
859 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
860 uintxx_t val, int mmu_idx) */
861 static const void * const qemu_st_helpers[4] = {
869 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
872 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
873 #if defined(CONFIG_SOFTMMU)
874 void *label1_ptr, *label2_ptr;
876 int mem_index, s_bits;
878 # if TARGET_LONG_BITS == 64
890 #if defined(CONFIG_SOFTMMU)
891 # if TARGET_LONG_BITS == 64
893 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
908 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
909 data_reg1 = data_regh;
910 data_reg2 = data_regl;
912 data_reg1 = data_regl;
913 data_reg2 = data_regh;
916 data_reg1 = data_regl;
919 #if defined(CONFIG_SOFTMMU)
920 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
921 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
922 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
923 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
924 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
925 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
926 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
928 # if TARGET_LONG_BITS == 64
929 label3_ptr = s->code_ptr;
930 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
933 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
934 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
936 label1_ptr = s->code_ptr;
937 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
940 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
942 label1_ptr = s->code_ptr;
943 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
949 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
950 # if TARGET_LONG_BITS == 64
951 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
953 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
955 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
956 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
957 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
962 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
965 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
968 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
971 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
974 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
977 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
978 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
984 label2_ptr = s->code_ptr;
985 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
988 /* label1: fast path */
989 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
991 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
992 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
993 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
995 if (GUEST_BASE == (int16_t)GUEST_BASE) {
996 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
998 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
999 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
1005 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
1008 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
1011 if (TCG_NEED_BSWAP) {
1012 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1013 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
1015 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
1019 if (TCG_NEED_BSWAP) {
1020 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
1021 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
1023 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
1027 if (TCG_NEED_BSWAP) {
1028 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1029 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1031 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1035 if (TCG_NEED_BSWAP) {
1036 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
1037 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
1038 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
1039 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
1041 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
1042 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
1049 #if defined(CONFIG_SOFTMMU)
1050 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1054 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
1057 TCGReg addr_regl, data_regl, data_regh, data_reg1, data_reg2;
1058 #if defined(CONFIG_SOFTMMU)
1059 uint8_t *label1_ptr, *label2_ptr;
1061 int mem_index, s_bits;
1064 #if TARGET_LONG_BITS == 64
1065 # if defined(CONFIG_SOFTMMU)
1066 uint8_t *label3_ptr;
1071 data_regl = *args++;
1073 data_regh = *args++;
1077 addr_regl = *args++;
1078 #if defined(CONFIG_SOFTMMU)
1079 # if TARGET_LONG_BITS == 64
1080 addr_regh = *args++;
1081 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1096 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1097 data_reg1 = data_regh;
1098 data_reg2 = data_regl;
1100 data_reg1 = data_regl;
1101 data_reg2 = data_regh;
1104 data_reg1 = data_regl;
1108 #if defined(CONFIG_SOFTMMU)
1109 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1110 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1111 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1112 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1113 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1114 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1115 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1117 # if TARGET_LONG_BITS == 64
1118 label3_ptr = s->code_ptr;
1119 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1122 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1123 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1125 label1_ptr = s->code_ptr;
1126 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1129 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1131 label1_ptr = s->code_ptr;
1132 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1138 tcg_out_call_iarg_reg32(s, &arg_num, TCG_AREG0);
1139 # if TARGET_LONG_BITS == 64
1140 tcg_out_call_iarg_reg64(s, &arg_num, addr_regl, addr_regh);
1142 tcg_out_call_iarg_reg32(s, &arg_num, addr_regl);
1146 tcg_out_call_iarg_reg8(s, &arg_num, data_regl);
1149 tcg_out_call_iarg_reg16(s, &arg_num, data_regl);
1152 tcg_out_call_iarg_reg32(s, &arg_num, data_regl);
1155 tcg_out_call_iarg_reg64(s, &arg_num, data_regl, data_regh);
1160 tcg_out_call_iarg_imm32(s, &arg_num, mem_index);
1161 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1162 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1165 label2_ptr = s->code_ptr;
1166 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1169 /* label1: fast path */
1170 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1172 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1173 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1174 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1176 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1177 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1179 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1180 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1187 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1190 if (TCG_NEED_BSWAP) {
1191 tcg_out_bswap16(s, TCG_REG_T0, data_reg1);
1192 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1194 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1198 if (TCG_NEED_BSWAP) {
1199 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1200 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1202 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1206 if (TCG_NEED_BSWAP) {
1207 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1208 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1209 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1210 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1212 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1213 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1220 #if defined(CONFIG_SOFTMMU)
1221 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1225 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1226 const TCGArg *args, const int *const_args)
1229 case INDEX_op_exit_tb:
1230 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1231 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1232 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1235 case INDEX_op_goto_tb:
1236 if (s->tb_jmp_offset) {
1237 /* direct jump method */
1240 /* indirect jump method */
1241 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1242 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1243 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1246 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1249 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1253 tcg_out_opc_reg(s, OPC_JR, 0, args[0], 0);
1257 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1260 case INDEX_op_mov_i32:
1261 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1263 case INDEX_op_movi_i32:
1264 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1267 case INDEX_op_ld8u_i32:
1268 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1270 case INDEX_op_ld8s_i32:
1271 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1273 case INDEX_op_ld16u_i32:
1274 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1276 case INDEX_op_ld16s_i32:
1277 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1279 case INDEX_op_ld_i32:
1280 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1282 case INDEX_op_st8_i32:
1283 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1285 case INDEX_op_st16_i32:
1286 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1288 case INDEX_op_st_i32:
1289 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1292 case INDEX_op_add_i32:
1293 if (const_args[2]) {
1294 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1296 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1299 case INDEX_op_add2_i32:
1300 if (const_args[4]) {
1301 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1303 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1305 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1306 if (const_args[5]) {
1307 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1309 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1311 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1312 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1314 case INDEX_op_sub_i32:
1315 if (const_args[2]) {
1316 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1318 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1321 case INDEX_op_sub2_i32:
1322 if (const_args[4]) {
1323 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1325 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1327 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1328 if (const_args[5]) {
1329 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1331 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1333 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1334 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1336 case INDEX_op_mul_i32:
1337 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1338 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1340 case INDEX_op_mulu2_i32:
1341 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1342 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1343 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1345 case INDEX_op_div_i32:
1346 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1347 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1349 case INDEX_op_divu_i32:
1350 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1351 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1353 case INDEX_op_rem_i32:
1354 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1355 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1357 case INDEX_op_remu_i32:
1358 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1359 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1362 case INDEX_op_and_i32:
1363 if (const_args[2]) {
1364 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1366 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1369 case INDEX_op_or_i32:
1370 if (const_args[2]) {
1371 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1373 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1376 case INDEX_op_nor_i32:
1377 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1379 case INDEX_op_not_i32:
1380 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1382 case INDEX_op_xor_i32:
1383 if (const_args[2]) {
1384 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1386 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1390 case INDEX_op_sar_i32:
1391 if (const_args[2]) {
1392 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1394 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1397 case INDEX_op_shl_i32:
1398 if (const_args[2]) {
1399 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1401 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1404 case INDEX_op_shr_i32:
1405 if (const_args[2]) {
1406 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1408 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1412 case INDEX_op_ext8s_i32:
1413 tcg_out_ext8s(s, args[0], args[1]);
1415 case INDEX_op_ext16s_i32:
1416 tcg_out_ext16s(s, args[0], args[1]);
1419 case INDEX_op_brcond_i32:
1420 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1422 case INDEX_op_brcond2_i32:
1423 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1426 case INDEX_op_setcond_i32:
1427 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1429 case INDEX_op_setcond2_i32:
1430 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1433 case INDEX_op_qemu_ld8u:
1434 tcg_out_qemu_ld(s, args, 0);
1436 case INDEX_op_qemu_ld8s:
1437 tcg_out_qemu_ld(s, args, 0 | 4);
1439 case INDEX_op_qemu_ld16u:
1440 tcg_out_qemu_ld(s, args, 1);
1442 case INDEX_op_qemu_ld16s:
1443 tcg_out_qemu_ld(s, args, 1 | 4);
1445 case INDEX_op_qemu_ld32:
1446 tcg_out_qemu_ld(s, args, 2);
1448 case INDEX_op_qemu_ld64:
1449 tcg_out_qemu_ld(s, args, 3);
1451 case INDEX_op_qemu_st8:
1452 tcg_out_qemu_st(s, args, 0);
1454 case INDEX_op_qemu_st16:
1455 tcg_out_qemu_st(s, args, 1);
1457 case INDEX_op_qemu_st32:
1458 tcg_out_qemu_st(s, args, 2);
1460 case INDEX_op_qemu_st64:
1461 tcg_out_qemu_st(s, args, 3);
1469 static const TCGTargetOpDef mips_op_defs[] = {
1470 { INDEX_op_exit_tb, { } },
1471 { INDEX_op_goto_tb, { } },
1472 { INDEX_op_call, { "C" } },
1473 { INDEX_op_jmp, { "r" } },
1474 { INDEX_op_br, { } },
1476 { INDEX_op_mov_i32, { "r", "r" } },
1477 { INDEX_op_movi_i32, { "r" } },
1478 { INDEX_op_ld8u_i32, { "r", "r" } },
1479 { INDEX_op_ld8s_i32, { "r", "r" } },
1480 { INDEX_op_ld16u_i32, { "r", "r" } },
1481 { INDEX_op_ld16s_i32, { "r", "r" } },
1482 { INDEX_op_ld_i32, { "r", "r" } },
1483 { INDEX_op_st8_i32, { "rZ", "r" } },
1484 { INDEX_op_st16_i32, { "rZ", "r" } },
1485 { INDEX_op_st_i32, { "rZ", "r" } },
1487 { INDEX_op_add_i32, { "r", "rZ", "rJ" } },
1488 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1489 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1490 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1491 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1492 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1493 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1494 { INDEX_op_sub_i32, { "r", "rZ", "rJ" } },
1496 { INDEX_op_and_i32, { "r", "rZ", "rI" } },
1497 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1498 { INDEX_op_not_i32, { "r", "rZ" } },
1499 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1500 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1502 { INDEX_op_shl_i32, { "r", "rZ", "ri" } },
1503 { INDEX_op_shr_i32, { "r", "rZ", "ri" } },
1504 { INDEX_op_sar_i32, { "r", "rZ", "ri" } },
1506 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1507 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1509 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1510 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1511 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1513 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1514 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1515 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1517 #if TARGET_LONG_BITS == 32
1518 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1519 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1520 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1521 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1522 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1523 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1525 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1526 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1527 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1528 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1530 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1531 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1532 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1533 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1534 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1535 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1537 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1538 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1539 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1540 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1545 static int tcg_target_callee_save_regs[] = {
1546 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1555 TCG_REG_RA, /* should be last for ABI compliance */
1558 /* Generate global QEMU prologue and epilogue code */
1559 static void tcg_target_qemu_prologue(TCGContext *s)
1563 /* reserve some stack space, also for TCG temps. */
1564 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1565 + TCG_STATIC_CALL_ARGS_SIZE
1566 + CPU_TEMP_BUF_NLONGS * sizeof(long);
1567 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1568 ~(TCG_TARGET_STACK_ALIGN - 1);
1569 tcg_set_frame(s, TCG_REG_SP, ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1570 + TCG_STATIC_CALL_ARGS_SIZE,
1571 CPU_TEMP_BUF_NLONGS * sizeof(long));
1574 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1575 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1576 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1577 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1580 /* Call generated code */
1581 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1582 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1583 tb_ret_addr = s->code_ptr;
1586 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1587 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1588 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1591 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1592 tcg_out_addi(s, TCG_REG_SP, frame_size);
1595 static void tcg_target_init(TCGContext *s)
1597 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1598 tcg_regset_set(tcg_target_call_clobber_regs,
1615 tcg_regset_clear(s->reserved_regs);
1616 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1617 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1618 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1619 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1620 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1621 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1622 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1623 tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP); /* global pointer */
1625 tcg_add_target_add_op_defs(mips_op_defs);