2 * Tiny Code Generator for QEMU
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
70 /* check if we really need so many registers :P */
71 static const int tcg_target_reg_alloc_order[] = {
97 static const int tcg_target_call_iarg_regs[4] = {
104 static const int tcg_target_call_oarg_regs[2] = {
109 static uint8_t *tb_ret_addr;
111 static inline uint32_t reloc_lo16_val (void *pc, tcg_target_long target)
113 return target & 0xffff;
116 static inline void reloc_lo16 (void *pc, tcg_target_long target)
118 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
119 | reloc_lo16_val(pc, target);
122 static inline uint32_t reloc_hi16_val (void *pc, tcg_target_long target)
124 return (target >> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc, tcg_target_long target)
129 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
130 | reloc_hi16_val(pc, target);
133 static inline uint32_t reloc_pc16_val (void *pc, tcg_target_long target)
137 disp = target - (tcg_target_long) pc - 4;
138 if (disp != (disp << 14) >> 14) {
142 return (disp >> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc, tcg_target_long target)
147 *(uint32_t *) pc = (*(uint32_t *) pc & ~0xffff)
148 | reloc_pc16_val(pc, target);
151 static inline uint32_t reloc_26_val (void *pc, tcg_target_long target)
153 if ((((tcg_target_long)pc + 4) & 0xf0000000) != (target & 0xf0000000)) {
157 return (target >> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc, tcg_target_long target)
162 *(uint32_t *) pc = (*(uint32_t *) pc & ~0x3ffffff)
163 | reloc_26_val(pc, target);
166 static void patch_reloc(uint8_t *code_ptr, int type,
167 tcg_target_long value, tcg_target_long addend)
172 reloc_lo16(code_ptr, value);
175 reloc_hi16(code_ptr, value);
178 reloc_pc16(code_ptr, value);
181 reloc_pc26(code_ptr, value);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
202 ct->ct |= TCG_CT_REG;
203 tcg_regset_set(ct->u.regs, 0xffffffff);
206 ct->ct |= TCG_CT_REG;
207 tcg_regset_clear(ct->u.regs);
208 tcg_regset_set_reg(ct->u.regs, TCG_REG_T9);
210 case 'L': /* qemu_ld output arg constraint */
211 ct->ct |= TCG_CT_REG;
212 tcg_regset_set(ct->u.regs, 0xffffffff);
213 tcg_regset_reset_reg(ct->u.regs, TCG_REG_V0);
215 case 'l': /* qemu_ld input arg constraint */
216 ct->ct |= TCG_CT_REG;
217 tcg_regset_set(ct->u.regs, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
222 case 'S': /* qemu_st constraint */
223 ct->ct |= TCG_CT_REG;
224 tcg_regset_set(ct->u.regs, 0xffffffff);
225 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A0);
226 #if defined(CONFIG_SOFTMMU)
227 # if TARGET_LONG_BITS == 64
228 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A1);
230 tcg_regset_reset_reg(ct->u.regs, TCG_REG_A2);
234 ct->ct |= TCG_CT_CONST_U16;
237 ct->ct |= TCG_CT_CONST_S16;
240 /* We are cheating a bit here, using the fact that the register
241 ZERO is also the register number 0. Hence there is no need
242 to check for const_args in each instruction. */
243 ct->ct |= TCG_CT_CONST_ZERO;
253 /* test if a constant matches the constraint */
254 static inline int tcg_target_const_match(tcg_target_long val,
255 const TCGArgConstraint *arg_ct)
259 if (ct & TCG_CT_CONST)
261 else if ((ct & TCG_CT_CONST_ZERO) && val == 0)
263 else if ((ct & TCG_CT_CONST_U16) && val == (uint16_t)val)
265 else if ((ct & TCG_CT_CONST_S16) && val == (int16_t)val)
271 /* instruction opcodes */
273 OPC_BEQ = 0x04 << 26,
274 OPC_BNE = 0x05 << 26,
275 OPC_ADDIU = 0x09 << 26,
276 OPC_SLTI = 0x0A << 26,
277 OPC_SLTIU = 0x0B << 26,
278 OPC_ANDI = 0x0C << 26,
279 OPC_ORI = 0x0D << 26,
280 OPC_XORI = 0x0E << 26,
281 OPC_LUI = 0x0F << 26,
285 OPC_LBU = 0x24 << 26,
286 OPC_LHU = 0x25 << 26,
287 OPC_LWU = 0x27 << 26,
292 OPC_SPECIAL = 0x00 << 26,
293 OPC_SLL = OPC_SPECIAL | 0x00,
294 OPC_SRL = OPC_SPECIAL | 0x02,
295 OPC_SRA = OPC_SPECIAL | 0x03,
296 OPC_SLLV = OPC_SPECIAL | 0x04,
297 OPC_SRLV = OPC_SPECIAL | 0x06,
298 OPC_SRAV = OPC_SPECIAL | 0x07,
299 OPC_JR = OPC_SPECIAL | 0x08,
300 OPC_JALR = OPC_SPECIAL | 0x09,
301 OPC_MFHI = OPC_SPECIAL | 0x10,
302 OPC_MFLO = OPC_SPECIAL | 0x12,
303 OPC_MULT = OPC_SPECIAL | 0x18,
304 OPC_MULTU = OPC_SPECIAL | 0x19,
305 OPC_DIV = OPC_SPECIAL | 0x1A,
306 OPC_DIVU = OPC_SPECIAL | 0x1B,
307 OPC_ADDU = OPC_SPECIAL | 0x21,
308 OPC_SUBU = OPC_SPECIAL | 0x23,
309 OPC_AND = OPC_SPECIAL | 0x24,
310 OPC_OR = OPC_SPECIAL | 0x25,
311 OPC_XOR = OPC_SPECIAL | 0x26,
312 OPC_NOR = OPC_SPECIAL | 0x27,
313 OPC_SLT = OPC_SPECIAL | 0x2A,
314 OPC_SLTU = OPC_SPECIAL | 0x2B,
316 OPC_SPECIAL3 = 0x1f << 26,
317 OPC_SEB = OPC_SPECIAL3 | 0x420,
318 OPC_SEH = OPC_SPECIAL3 | 0x620,
324 static inline void tcg_out_opc_reg(TCGContext *s, int opc, int rd, int rs, int rt)
329 inst |= (rs & 0x1F) << 21;
330 inst |= (rt & 0x1F) << 16;
331 inst |= (rd & 0x1F) << 11;
338 static inline void tcg_out_opc_imm(TCGContext *s, int opc, int rt, int rs, int imm)
343 inst |= (rs & 0x1F) << 21;
344 inst |= (rt & 0x1F) << 16;
345 inst |= (imm & 0xffff);
352 static inline void tcg_out_opc_br(TCGContext *s, int opc, int rt, int rs)
354 /* We pay attention here to not modify the branch target by reading
355 the existing value and using it again. This ensure that caches and
356 memory are kept coherent during retranslation. */
357 uint16_t offset = (uint16_t)(*(uint32_t *) s->code_ptr);
359 tcg_out_opc_imm(s, opc, rt, rs, offset);
365 static inline void tcg_out_opc_sa(TCGContext *s, int opc, int rd, int rt, int sa)
370 inst |= (rt & 0x1F) << 16;
371 inst |= (rd & 0x1F) << 11;
372 inst |= (sa & 0x1F) << 6;
377 static inline void tcg_out_nop(TCGContext *s)
382 static inline void tcg_out_mov(TCGContext *s, TCGType type,
383 TCGReg ret, TCGReg arg)
385 tcg_out_opc_reg(s, OPC_ADDU, ret, arg, TCG_REG_ZERO);
388 static inline void tcg_out_movi(TCGContext *s, TCGType type,
389 TCGReg reg, tcg_target_long arg)
391 if (arg == (int16_t)arg) {
392 tcg_out_opc_imm(s, OPC_ADDIU, reg, TCG_REG_ZERO, arg);
393 } else if (arg == (uint16_t)arg) {
394 tcg_out_opc_imm(s, OPC_ORI, reg, TCG_REG_ZERO, arg);
396 tcg_out_opc_imm(s, OPC_LUI, reg, 0, arg >> 16);
397 tcg_out_opc_imm(s, OPC_ORI, reg, reg, arg & 0xffff);
401 static inline void tcg_out_bswap16(TCGContext *s, int ret, int arg)
403 /* ret and arg can't be register at */
404 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
408 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
409 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0x00ff);
411 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 8);
412 tcg_out_opc_imm(s, OPC_ANDI, ret, ret, 0xff00);
413 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
416 static inline void tcg_out_bswap16s(TCGContext *s, int ret, int arg)
418 /* ret and arg can't be register at */
419 if (ret == TCG_REG_AT || arg == TCG_REG_AT) {
423 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
424 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff);
426 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
427 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
428 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
431 static inline void tcg_out_bswap32(TCGContext *s, int ret, int arg)
433 /* ret and arg must be different and can't be register at */
434 if (ret == arg || ret == TCG_REG_AT || arg == TCG_REG_AT) {
438 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
440 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 24);
441 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
443 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, arg, 0xff00);
444 tcg_out_opc_sa(s, OPC_SLL, TCG_REG_AT, TCG_REG_AT, 8);
445 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
447 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_AT, arg, 8);
448 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_AT, TCG_REG_AT, 0xff00);
449 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
452 static inline void tcg_out_ext8s(TCGContext *s, int ret, int arg)
454 #ifdef _MIPS_ARCH_MIPS32R2
455 tcg_out_opc_reg(s, OPC_SEB, ret, 0, arg);
457 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 24);
458 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 24);
462 static inline void tcg_out_ext16s(TCGContext *s, int ret, int arg)
464 #ifdef _MIPS_ARCH_MIPS32R2
465 tcg_out_opc_reg(s, OPC_SEH, ret, 0, arg);
467 tcg_out_opc_sa(s, OPC_SLL, ret, arg, 16);
468 tcg_out_opc_sa(s, OPC_SRA, ret, ret, 16);
472 static inline void tcg_out_ldst(TCGContext *s, int opc, int arg,
473 int arg1, tcg_target_long arg2)
475 if (arg2 == (int16_t) arg2) {
476 tcg_out_opc_imm(s, opc, arg, arg1, arg2);
478 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, arg2);
479 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, TCG_REG_AT, arg1);
480 tcg_out_opc_imm(s, opc, arg, TCG_REG_AT, 0);
484 static inline void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
485 TCGReg arg1, tcg_target_long arg2)
487 tcg_out_ldst(s, OPC_LW, arg, arg1, arg2);
490 static inline void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
491 TCGReg arg1, tcg_target_long arg2)
493 tcg_out_ldst(s, OPC_SW, arg, arg1, arg2);
496 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
498 if (val == (int16_t)val) {
499 tcg_out_opc_imm(s, OPC_ADDIU, reg, reg, val);
501 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, val);
502 tcg_out_opc_reg(s, OPC_ADDU, reg, reg, TCG_REG_AT);
506 static void tcg_out_brcond(TCGContext *s, TCGCond cond, int arg1,
507 int arg2, int label_index)
509 TCGLabel *l = &s->labels[label_index];
513 tcg_out_opc_br(s, OPC_BEQ, arg1, arg2);
516 tcg_out_opc_br(s, OPC_BNE, arg1, arg2);
519 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
520 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
523 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
524 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
527 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg1, arg2);
528 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
531 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg1, arg2);
532 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
535 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
536 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
539 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
540 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_AT, TCG_REG_ZERO);
543 tcg_out_opc_reg(s, OPC_SLT, TCG_REG_AT, arg2, arg1);
544 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
547 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_AT, arg2, arg1);
548 tcg_out_opc_br(s, OPC_BNE, TCG_REG_AT, TCG_REG_ZERO);
555 reloc_pc16(s->code_ptr - 4, l->u.value);
557 tcg_out_reloc(s, s->code_ptr - 4, R_MIPS_PC16, label_index, 0);
562 /* XXX: we implement it at the target level to avoid having to
563 handle cross basic blocks temporaries */
564 static void tcg_out_brcond2(TCGContext *s, TCGCond cond, int arg1,
565 int arg2, int arg3, int arg4, int label_index)
571 tcg_out_brcond(s, TCG_COND_NE, arg2, arg4, label_index);
572 tcg_out_brcond(s, TCG_COND_NE, arg1, arg3, label_index);
578 tcg_out_brcond(s, TCG_COND_LT, arg2, arg4, label_index);
582 tcg_out_brcond(s, TCG_COND_GT, arg2, arg4, label_index);
586 tcg_out_brcond(s, TCG_COND_LTU, arg2, arg4, label_index);
590 tcg_out_brcond(s, TCG_COND_GTU, arg2, arg4, label_index);
596 label_ptr = s->code_ptr;
597 tcg_out_opc_br(s, OPC_BNE, arg2, arg4);
602 tcg_out_brcond(s, TCG_COND_EQ, arg1, arg3, label_index);
606 tcg_out_brcond(s, TCG_COND_LTU, arg1, arg3, label_index);
610 tcg_out_brcond(s, TCG_COND_LEU, arg1, arg3, label_index);
614 tcg_out_brcond(s, TCG_COND_GTU, arg1, arg3, label_index);
618 tcg_out_brcond(s, TCG_COND_GEU, arg1, arg3, label_index);
624 reloc_pc16(label_ptr, (tcg_target_long) s->code_ptr);
627 static void tcg_out_setcond(TCGContext *s, TCGCond cond, int ret,
633 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg2, 1);
634 } else if (arg2 == 0) {
635 tcg_out_opc_imm(s, OPC_SLTIU, ret, arg1, 1);
637 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
638 tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
643 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg2);
644 } else if (arg2 == 0) {
645 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, arg1);
647 tcg_out_opc_reg(s, OPC_XOR, ret, arg1, arg2);
648 tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
652 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
655 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
658 tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
659 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
662 tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
663 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
666 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
667 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
670 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
671 tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
674 tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
677 tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
685 /* XXX: we implement it at the target level to avoid having to
686 handle cross basic blocks temporaries */
687 static void tcg_out_setcond2(TCGContext *s, TCGCond cond, int ret,
688 int arg1, int arg2, int arg3, int arg4)
692 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_AT, arg2, arg4);
693 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg1, arg3);
694 tcg_out_opc_reg(s, OPC_AND, ret, TCG_REG_AT, TCG_REG_T0);
697 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_AT, arg2, arg4);
698 tcg_out_setcond(s, TCG_COND_NE, TCG_REG_T0, arg1, arg3);
699 tcg_out_opc_reg(s, OPC_OR, ret, TCG_REG_AT, TCG_REG_T0);
703 tcg_out_setcond(s, TCG_COND_LT, TCG_REG_AT, arg2, arg4);
707 tcg_out_setcond(s, TCG_COND_GT, TCG_REG_AT, arg2, arg4);
711 tcg_out_setcond(s, TCG_COND_LTU, TCG_REG_AT, arg2, arg4);
715 tcg_out_setcond(s, TCG_COND_GTU, TCG_REG_AT, arg2, arg4);
722 tcg_out_setcond(s, TCG_COND_EQ, TCG_REG_T0, arg2, arg4);
727 tcg_out_setcond(s, TCG_COND_LTU, ret, arg1, arg3);
731 tcg_out_setcond(s, TCG_COND_LEU, ret, arg1, arg3);
735 tcg_out_setcond(s, TCG_COND_GTU, ret, arg1, arg3);
739 tcg_out_setcond(s, TCG_COND_GEU, ret, arg1, arg3);
745 tcg_out_opc_reg(s, OPC_AND, ret, ret, TCG_REG_T0);
746 tcg_out_opc_reg(s, OPC_OR, ret, ret, TCG_REG_AT);
749 #if defined(CONFIG_SOFTMMU)
751 #include "../../softmmu_defs.h"
753 #ifdef CONFIG_TCG_PASS_AREG0
754 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
756 static const void * const qemu_ld_helpers[4] = {
763 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
764 uintxx_t val, int mmu_idx) */
765 static const void * const qemu_st_helpers[4] = {
772 /* legacy helper signature: __ld_mmu(target_ulong addr, int
774 static void *qemu_ld_helpers[4] = {
781 /* legacy helper signature: __st_mmu(target_ulong addr, uintxx_t val,
783 static void *qemu_st_helpers[4] = {
792 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
795 int addr_regl, addr_reg1, addr_meml;
796 int data_regl, data_regh, data_reg1, data_reg2;
797 int mem_index, s_bits;
798 #if defined(CONFIG_SOFTMMU)
799 void *label1_ptr, *label2_ptr;
802 #if TARGET_LONG_BITS == 64
803 # if defined(CONFIG_SOFTMMU)
806 int addr_regh, addr_reg2, addr_memh;
814 #if TARGET_LONG_BITS == 64
821 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
822 data_reg1 = data_regh;
823 data_reg2 = data_regl;
825 data_reg1 = data_regl;
826 data_reg2 = data_regh;
829 data_reg1 = data_regl;
832 #if TARGET_LONG_BITS == 64
833 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
834 addr_reg1 = addr_regh;
835 addr_reg2 = addr_regl;
839 addr_reg1 = addr_regl;
840 addr_reg2 = addr_regh;
845 addr_reg1 = addr_regl;
849 #if defined(CONFIG_SOFTMMU)
850 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
851 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
852 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
853 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
854 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_meml);
855 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
856 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
858 # if TARGET_LONG_BITS == 64
859 label3_ptr = s->code_ptr;
860 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
863 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
864 offsetof(CPUArchState, tlb_table[mem_index][0].addr_read) + addr_memh);
866 label1_ptr = s->code_ptr;
867 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
870 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
872 label1_ptr = s->code_ptr;
873 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
878 sp_args = TCG_REG_A0;
879 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg1);
880 # if TARGET_LONG_BITS == 64
881 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg2);
883 tcg_out_movi(s, TCG_TYPE_I32, sp_args++, mem_index);
884 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_ld_helpers[s_bits]);
885 #ifdef CONFIG_TCG_PASS_AREG0
886 /* XXX/FIXME: suboptimal and incorrect for 64 on 32 bit */
887 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[2],
888 tcg_target_call_iarg_regs[1]);
889 tcg_out_mov(s, TCG_TYPE_TL, tcg_target_call_iarg_regs[1],
890 tcg_target_call_iarg_regs[0]);
891 tcg_out_mov(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[0],
894 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
899 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xff);
902 tcg_out_ext8s(s, data_reg1, TCG_REG_V0);
905 tcg_out_opc_imm(s, OPC_ANDI, data_reg1, TCG_REG_V0, 0xffff);
908 tcg_out_ext16s(s, data_reg1, TCG_REG_V0);
911 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
914 tcg_out_mov(s, TCG_TYPE_I32, data_reg2, TCG_REG_V1);
915 tcg_out_mov(s, TCG_TYPE_I32, data_reg1, TCG_REG_V0);
921 label2_ptr = s->code_ptr;
922 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
925 /* label1: fast path */
926 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
928 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
929 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
930 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_A0, addr_regl);
932 if (GUEST_BASE == (int16_t)GUEST_BASE) {
933 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_V0, addr_regl, GUEST_BASE);
935 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_V0, GUEST_BASE);
936 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_V0, TCG_REG_V0, addr_regl);
942 tcg_out_opc_imm(s, OPC_LBU, data_reg1, TCG_REG_V0, 0);
945 tcg_out_opc_imm(s, OPC_LB, data_reg1, TCG_REG_V0, 0);
948 if (TCG_NEED_BSWAP) {
949 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
950 tcg_out_bswap16(s, data_reg1, TCG_REG_T0);
952 tcg_out_opc_imm(s, OPC_LHU, data_reg1, TCG_REG_V0, 0);
956 if (TCG_NEED_BSWAP) {
957 tcg_out_opc_imm(s, OPC_LHU, TCG_REG_T0, TCG_REG_V0, 0);
958 tcg_out_bswap16s(s, data_reg1, TCG_REG_T0);
960 tcg_out_opc_imm(s, OPC_LH, data_reg1, TCG_REG_V0, 0);
964 if (TCG_NEED_BSWAP) {
965 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
966 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
968 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
972 if (TCG_NEED_BSWAP) {
973 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 4);
974 tcg_out_bswap32(s, data_reg1, TCG_REG_T0);
975 tcg_out_opc_imm(s, OPC_LW, TCG_REG_T0, TCG_REG_V0, 0);
976 tcg_out_bswap32(s, data_reg2, TCG_REG_T0);
978 tcg_out_opc_imm(s, OPC_LW, data_reg1, TCG_REG_V0, 0);
979 tcg_out_opc_imm(s, OPC_LW, data_reg2, TCG_REG_V0, 4);
986 #if defined(CONFIG_SOFTMMU)
987 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
991 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
994 int addr_regl, addr_reg1, addr_meml;
995 int data_regl, data_regh, data_reg1, data_reg2;
996 int mem_index, s_bits;
997 #if defined(CONFIG_SOFTMMU)
998 uint8_t *label1_ptr, *label2_ptr;
1001 #if TARGET_LONG_BITS == 64
1002 # if defined(CONFIG_SOFTMMU)
1003 uint8_t *label3_ptr;
1005 int addr_regh, addr_reg2, addr_memh;
1008 data_regl = *args++;
1010 data_regh = *args++;
1011 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1012 data_reg1 = data_regh;
1013 data_reg2 = data_regl;
1015 data_reg1 = data_regl;
1016 data_reg2 = data_regh;
1019 data_reg1 = data_regl;
1023 addr_regl = *args++;
1024 #if TARGET_LONG_BITS == 64
1025 addr_regh = *args++;
1026 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1027 addr_reg1 = addr_regh;
1028 addr_reg2 = addr_regl;
1032 addr_reg1 = addr_regl;
1033 addr_reg2 = addr_regh;
1038 addr_reg1 = addr_regl;
1044 #if defined(CONFIG_SOFTMMU)
1045 tcg_out_opc_sa(s, OPC_SRL, TCG_REG_A0, addr_regl, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
1046 tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_A0, TCG_REG_A0, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
1047 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, TCG_AREG0);
1048 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1049 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_meml);
1050 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T0, TARGET_PAGE_MASK | ((1 << s_bits) - 1));
1051 tcg_out_opc_reg(s, OPC_AND, TCG_REG_T0, TCG_REG_T0, addr_regl);
1053 # if TARGET_LONG_BITS == 64
1054 label3_ptr = s->code_ptr;
1055 tcg_out_opc_br(s, OPC_BNE, TCG_REG_T0, TCG_REG_AT);
1058 tcg_out_opc_imm(s, OPC_LW, TCG_REG_AT, TCG_REG_A0,
1059 offsetof(CPUArchState, tlb_table[mem_index][0].addr_write) + addr_memh);
1061 label1_ptr = s->code_ptr;
1062 tcg_out_opc_br(s, OPC_BEQ, addr_regh, TCG_REG_AT);
1065 reloc_pc16(label3_ptr, (tcg_target_long) s->code_ptr);
1067 label1_ptr = s->code_ptr;
1068 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_T0, TCG_REG_AT);
1073 sp_args = TCG_REG_A0;
1074 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg1);
1075 # if TARGET_LONG_BITS == 64
1076 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, addr_reg2);
1080 tcg_out_opc_imm(s, OPC_ANDI, sp_args++, data_reg1, 0xff);
1083 tcg_out_opc_imm(s, OPC_ANDI, sp_args++, data_reg1, 0xffff);
1086 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, data_reg1);
1089 sp_args = (sp_args + 1) & ~1;
1090 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, data_reg1);
1091 tcg_out_mov(s, TCG_TYPE_I32, sp_args++, data_reg2);
1096 if (sp_args > TCG_REG_A3) {
1097 /* Push mem_index on the stack */
1098 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, mem_index);
1099 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 16);
1101 tcg_out_movi(s, TCG_TYPE_I32, sp_args, mem_index);
1104 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_T9, (tcg_target_long)qemu_st_helpers[s_bits]);
1105 #ifdef CONFIG_TCG_PASS_AREG0
1106 /* XXX/FIXME: suboptimal and incorrect for 64 on 32 bit */
1107 tcg_out_mov(s, TCG_TYPE_I32, tcg_target_call_iarg_regs[3],
1108 tcg_target_call_iarg_regs[2]);
1109 tcg_out_mov(s, TCG_TYPE_I64, tcg_target_call_iarg_regs[2],
1110 tcg_target_call_iarg_regs[1]);
1111 tcg_out_mov(s, TCG_TYPE_TL, tcg_target_call_iarg_regs[1],
1112 tcg_target_call_iarg_regs[0]);
1113 tcg_out_mov(s, TCG_TYPE_PTR, tcg_target_call_iarg_regs[0],
1116 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, TCG_REG_T9, 0);
1119 label2_ptr = s->code_ptr;
1120 tcg_out_opc_br(s, OPC_BEQ, TCG_REG_ZERO, TCG_REG_ZERO);
1123 /* label1: fast path */
1124 reloc_pc16(label1_ptr, (tcg_target_long) s->code_ptr);
1126 tcg_out_opc_imm(s, OPC_LW, TCG_REG_A0, TCG_REG_A0,
1127 offsetof(CPUArchState, tlb_table[mem_index][0].addend));
1128 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1130 if (GUEST_BASE == (int16_t)GUEST_BASE) {
1131 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_A0, addr_regl, GUEST_BASE);
1133 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, GUEST_BASE);
1134 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_A0, TCG_REG_A0, addr_regl);
1141 tcg_out_opc_imm(s, OPC_SB, data_reg1, TCG_REG_A0, 0);
1144 if (TCG_NEED_BSWAP) {
1145 tcg_out_bswap16(s, TCG_REG_T0, data_reg1);
1146 tcg_out_opc_imm(s, OPC_SH, TCG_REG_T0, TCG_REG_A0, 0);
1148 tcg_out_opc_imm(s, OPC_SH, data_reg1, TCG_REG_A0, 0);
1152 if (TCG_NEED_BSWAP) {
1153 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1154 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1156 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1160 if (TCG_NEED_BSWAP) {
1161 tcg_out_bswap32(s, TCG_REG_T0, data_reg2);
1162 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 0);
1163 tcg_out_bswap32(s, TCG_REG_T0, data_reg1);
1164 tcg_out_opc_imm(s, OPC_SW, TCG_REG_T0, TCG_REG_A0, 4);
1166 tcg_out_opc_imm(s, OPC_SW, data_reg1, TCG_REG_A0, 0);
1167 tcg_out_opc_imm(s, OPC_SW, data_reg2, TCG_REG_A0, 4);
1174 #if defined(CONFIG_SOFTMMU)
1175 reloc_pc16(label2_ptr, (tcg_target_long) s->code_ptr);
1179 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
1180 const TCGArg *args, const int *const_args)
1183 case INDEX_op_exit_tb:
1184 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_V0, args[0]);
1185 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_AT, (tcg_target_long)tb_ret_addr);
1186 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1189 case INDEX_op_goto_tb:
1190 if (s->tb_jmp_offset) {
1191 /* direct jump method */
1194 /* indirect jump method */
1195 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_AT, (tcg_target_long)(s->tb_next + args[0]));
1196 tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_AT, TCG_REG_AT, 0);
1197 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_AT, 0);
1200 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1203 tcg_out_opc_reg(s, OPC_JALR, TCG_REG_RA, args[0], 0);
1207 tcg_out_opc_reg(s, OPC_JR, 0, args[0], 0);
1211 tcg_out_brcond(s, TCG_COND_EQ, TCG_REG_ZERO, TCG_REG_ZERO, args[0]);
1214 case INDEX_op_mov_i32:
1215 tcg_out_mov(s, TCG_TYPE_I32, args[0], args[1]);
1217 case INDEX_op_movi_i32:
1218 tcg_out_movi(s, TCG_TYPE_I32, args[0], args[1]);
1221 case INDEX_op_ld8u_i32:
1222 tcg_out_ldst(s, OPC_LBU, args[0], args[1], args[2]);
1224 case INDEX_op_ld8s_i32:
1225 tcg_out_ldst(s, OPC_LB, args[0], args[1], args[2]);
1227 case INDEX_op_ld16u_i32:
1228 tcg_out_ldst(s, OPC_LHU, args[0], args[1], args[2]);
1230 case INDEX_op_ld16s_i32:
1231 tcg_out_ldst(s, OPC_LH, args[0], args[1], args[2]);
1233 case INDEX_op_ld_i32:
1234 tcg_out_ldst(s, OPC_LW, args[0], args[1], args[2]);
1236 case INDEX_op_st8_i32:
1237 tcg_out_ldst(s, OPC_SB, args[0], args[1], args[2]);
1239 case INDEX_op_st16_i32:
1240 tcg_out_ldst(s, OPC_SH, args[0], args[1], args[2]);
1242 case INDEX_op_st_i32:
1243 tcg_out_ldst(s, OPC_SW, args[0], args[1], args[2]);
1246 case INDEX_op_add_i32:
1247 if (const_args[2]) {
1248 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], args[2]);
1250 tcg_out_opc_reg(s, OPC_ADDU, args[0], args[1], args[2]);
1253 case INDEX_op_add2_i32:
1254 if (const_args[4]) {
1255 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], args[4]);
1257 tcg_out_opc_reg(s, OPC_ADDU, TCG_REG_AT, args[2], args[4]);
1259 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, TCG_REG_AT, args[2]);
1260 if (const_args[5]) {
1261 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], args[5]);
1263 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[3], args[5]);
1265 tcg_out_opc_reg(s, OPC_ADDU, args[1], args[1], TCG_REG_T0);
1266 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1268 case INDEX_op_sub_i32:
1269 if (const_args[2]) {
1270 tcg_out_opc_imm(s, OPC_ADDIU, args[0], args[1], -args[2]);
1272 tcg_out_opc_reg(s, OPC_SUBU, args[0], args[1], args[2]);
1275 case INDEX_op_sub2_i32:
1276 if (const_args[4]) {
1277 tcg_out_opc_imm(s, OPC_ADDIU, TCG_REG_AT, args[2], -args[4]);
1279 tcg_out_opc_reg(s, OPC_SUBU, TCG_REG_AT, args[2], args[4]);
1281 tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_T0, args[2], TCG_REG_AT);
1282 if (const_args[5]) {
1283 tcg_out_opc_imm(s, OPC_ADDIU, args[1], args[3], -args[5]);
1285 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[3], args[5]);
1287 tcg_out_opc_reg(s, OPC_SUBU, args[1], args[1], TCG_REG_T0);
1288 tcg_out_mov(s, TCG_TYPE_I32, args[0], TCG_REG_AT);
1290 case INDEX_op_mul_i32:
1291 tcg_out_opc_reg(s, OPC_MULT, 0, args[1], args[2]);
1292 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1294 case INDEX_op_mulu2_i32:
1295 tcg_out_opc_reg(s, OPC_MULTU, 0, args[2], args[3]);
1296 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1297 tcg_out_opc_reg(s, OPC_MFHI, args[1], 0, 0);
1299 case INDEX_op_div_i32:
1300 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1301 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1303 case INDEX_op_divu_i32:
1304 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1305 tcg_out_opc_reg(s, OPC_MFLO, args[0], 0, 0);
1307 case INDEX_op_rem_i32:
1308 tcg_out_opc_reg(s, OPC_DIV, 0, args[1], args[2]);
1309 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1311 case INDEX_op_remu_i32:
1312 tcg_out_opc_reg(s, OPC_DIVU, 0, args[1], args[2]);
1313 tcg_out_opc_reg(s, OPC_MFHI, args[0], 0, 0);
1316 case INDEX_op_and_i32:
1317 if (const_args[2]) {
1318 tcg_out_opc_imm(s, OPC_ANDI, args[0], args[1], args[2]);
1320 tcg_out_opc_reg(s, OPC_AND, args[0], args[1], args[2]);
1323 case INDEX_op_or_i32:
1324 if (const_args[2]) {
1325 tcg_out_opc_imm(s, OPC_ORI, args[0], args[1], args[2]);
1327 tcg_out_opc_reg(s, OPC_OR, args[0], args[1], args[2]);
1330 case INDEX_op_nor_i32:
1331 tcg_out_opc_reg(s, OPC_NOR, args[0], args[1], args[2]);
1333 case INDEX_op_not_i32:
1334 tcg_out_opc_reg(s, OPC_NOR, args[0], TCG_REG_ZERO, args[1]);
1336 case INDEX_op_xor_i32:
1337 if (const_args[2]) {
1338 tcg_out_opc_imm(s, OPC_XORI, args[0], args[1], args[2]);
1340 tcg_out_opc_reg(s, OPC_XOR, args[0], args[1], args[2]);
1344 case INDEX_op_sar_i32:
1345 if (const_args[2]) {
1346 tcg_out_opc_sa(s, OPC_SRA, args[0], args[1], args[2]);
1348 tcg_out_opc_reg(s, OPC_SRAV, args[0], args[2], args[1]);
1351 case INDEX_op_shl_i32:
1352 if (const_args[2]) {
1353 tcg_out_opc_sa(s, OPC_SLL, args[0], args[1], args[2]);
1355 tcg_out_opc_reg(s, OPC_SLLV, args[0], args[2], args[1]);
1358 case INDEX_op_shr_i32:
1359 if (const_args[2]) {
1360 tcg_out_opc_sa(s, OPC_SRL, args[0], args[1], args[2]);
1362 tcg_out_opc_reg(s, OPC_SRLV, args[0], args[2], args[1]);
1366 case INDEX_op_ext8s_i32:
1367 tcg_out_ext8s(s, args[0], args[1]);
1369 case INDEX_op_ext16s_i32:
1370 tcg_out_ext16s(s, args[0], args[1]);
1373 case INDEX_op_brcond_i32:
1374 tcg_out_brcond(s, args[2], args[0], args[1], args[3]);
1376 case INDEX_op_brcond2_i32:
1377 tcg_out_brcond2(s, args[4], args[0], args[1], args[2], args[3], args[5]);
1380 case INDEX_op_setcond_i32:
1381 tcg_out_setcond(s, args[3], args[0], args[1], args[2]);
1383 case INDEX_op_setcond2_i32:
1384 tcg_out_setcond2(s, args[5], args[0], args[1], args[2], args[3], args[4]);
1387 case INDEX_op_qemu_ld8u:
1388 tcg_out_qemu_ld(s, args, 0);
1390 case INDEX_op_qemu_ld8s:
1391 tcg_out_qemu_ld(s, args, 0 | 4);
1393 case INDEX_op_qemu_ld16u:
1394 tcg_out_qemu_ld(s, args, 1);
1396 case INDEX_op_qemu_ld16s:
1397 tcg_out_qemu_ld(s, args, 1 | 4);
1399 case INDEX_op_qemu_ld32:
1400 tcg_out_qemu_ld(s, args, 2);
1402 case INDEX_op_qemu_ld64:
1403 tcg_out_qemu_ld(s, args, 3);
1405 case INDEX_op_qemu_st8:
1406 tcg_out_qemu_st(s, args, 0);
1408 case INDEX_op_qemu_st16:
1409 tcg_out_qemu_st(s, args, 1);
1411 case INDEX_op_qemu_st32:
1412 tcg_out_qemu_st(s, args, 2);
1414 case INDEX_op_qemu_st64:
1415 tcg_out_qemu_st(s, args, 3);
1423 static const TCGTargetOpDef mips_op_defs[] = {
1424 { INDEX_op_exit_tb, { } },
1425 { INDEX_op_goto_tb, { } },
1426 { INDEX_op_call, { "C" } },
1427 { INDEX_op_jmp, { "r" } },
1428 { INDEX_op_br, { } },
1430 { INDEX_op_mov_i32, { "r", "r" } },
1431 { INDEX_op_movi_i32, { "r" } },
1432 { INDEX_op_ld8u_i32, { "r", "r" } },
1433 { INDEX_op_ld8s_i32, { "r", "r" } },
1434 { INDEX_op_ld16u_i32, { "r", "r" } },
1435 { INDEX_op_ld16s_i32, { "r", "r" } },
1436 { INDEX_op_ld_i32, { "r", "r" } },
1437 { INDEX_op_st8_i32, { "rZ", "r" } },
1438 { INDEX_op_st16_i32, { "rZ", "r" } },
1439 { INDEX_op_st_i32, { "rZ", "r" } },
1441 { INDEX_op_add_i32, { "r", "rZ", "rJZ" } },
1442 { INDEX_op_mul_i32, { "r", "rZ", "rZ" } },
1443 { INDEX_op_mulu2_i32, { "r", "r", "rZ", "rZ" } },
1444 { INDEX_op_div_i32, { "r", "rZ", "rZ" } },
1445 { INDEX_op_divu_i32, { "r", "rZ", "rZ" } },
1446 { INDEX_op_rem_i32, { "r", "rZ", "rZ" } },
1447 { INDEX_op_remu_i32, { "r", "rZ", "rZ" } },
1448 { INDEX_op_sub_i32, { "r", "rZ", "rJZ" } },
1450 { INDEX_op_and_i32, { "r", "rZ", "rIZ" } },
1451 { INDEX_op_nor_i32, { "r", "rZ", "rZ" } },
1452 { INDEX_op_not_i32, { "r", "rZ" } },
1453 { INDEX_op_or_i32, { "r", "rZ", "rIZ" } },
1454 { INDEX_op_xor_i32, { "r", "rZ", "rIZ" } },
1456 { INDEX_op_shl_i32, { "r", "rZ", "riZ" } },
1457 { INDEX_op_shr_i32, { "r", "rZ", "riZ" } },
1458 { INDEX_op_sar_i32, { "r", "rZ", "riZ" } },
1460 { INDEX_op_ext8s_i32, { "r", "rZ" } },
1461 { INDEX_op_ext16s_i32, { "r", "rZ" } },
1463 { INDEX_op_brcond_i32, { "rZ", "rZ" } },
1464 { INDEX_op_setcond_i32, { "r", "rZ", "rZ" } },
1465 { INDEX_op_setcond2_i32, { "r", "rZ", "rZ", "rZ", "rZ" } },
1467 { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rJZ", "rJZ" } },
1468 { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rJZ", "rJZ" } },
1469 { INDEX_op_brcond2_i32, { "rZ", "rZ", "rZ", "rZ" } },
1471 #if TARGET_LONG_BITS == 32
1472 { INDEX_op_qemu_ld8u, { "L", "lZ" } },
1473 { INDEX_op_qemu_ld8s, { "L", "lZ" } },
1474 { INDEX_op_qemu_ld16u, { "L", "lZ" } },
1475 { INDEX_op_qemu_ld16s, { "L", "lZ" } },
1476 { INDEX_op_qemu_ld32, { "L", "lZ" } },
1477 { INDEX_op_qemu_ld64, { "L", "L", "lZ" } },
1479 { INDEX_op_qemu_st8, { "SZ", "SZ" } },
1480 { INDEX_op_qemu_st16, { "SZ", "SZ" } },
1481 { INDEX_op_qemu_st32, { "SZ", "SZ" } },
1482 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ" } },
1484 { INDEX_op_qemu_ld8u, { "L", "lZ", "lZ" } },
1485 { INDEX_op_qemu_ld8s, { "L", "lZ", "lZ" } },
1486 { INDEX_op_qemu_ld16u, { "L", "lZ", "lZ" } },
1487 { INDEX_op_qemu_ld16s, { "L", "lZ", "lZ" } },
1488 { INDEX_op_qemu_ld32, { "L", "lZ", "lZ" } },
1489 { INDEX_op_qemu_ld64, { "L", "L", "lZ", "lZ" } },
1491 { INDEX_op_qemu_st8, { "SZ", "SZ", "SZ" } },
1492 { INDEX_op_qemu_st16, { "SZ", "SZ", "SZ" } },
1493 { INDEX_op_qemu_st32, { "SZ", "SZ", "SZ" } },
1494 { INDEX_op_qemu_st64, { "SZ", "SZ", "SZ", "SZ" } },
1499 static int tcg_target_callee_save_regs[] = {
1500 TCG_REG_S0, /* used for the global env (TCG_AREG0) */
1510 TCG_REG_RA, /* should be last for ABI compliance */
1513 /* Generate global QEMU prologue and epilogue code */
1514 static void tcg_target_qemu_prologue(TCGContext *s)
1518 /* reserve some stack space */
1519 frame_size = ARRAY_SIZE(tcg_target_callee_save_regs) * 4
1520 + TCG_STATIC_CALL_ARGS_SIZE;
1521 frame_size = (frame_size + TCG_TARGET_STACK_ALIGN - 1) &
1522 ~(TCG_TARGET_STACK_ALIGN - 1);
1525 tcg_out_addi(s, TCG_REG_SP, -frame_size);
1526 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1527 tcg_out_st(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1528 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1531 /* Call generated code */
1532 tcg_out_opc_reg(s, OPC_JR, 0, tcg_target_call_iarg_regs[1], 0);
1533 tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
1534 tb_ret_addr = s->code_ptr;
1537 for(i = 0 ; i < ARRAY_SIZE(tcg_target_callee_save_regs) ; i++) {
1538 tcg_out_ld(s, TCG_TYPE_I32, tcg_target_callee_save_regs[i],
1539 TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE + i * 4);
1542 tcg_out_opc_reg(s, OPC_JR, 0, TCG_REG_RA, 0);
1543 tcg_out_addi(s, TCG_REG_SP, frame_size);
1546 static void tcg_target_init(TCGContext *s)
1548 tcg_regset_set(tcg_target_available_regs[TCG_TYPE_I32], 0xffffffff);
1549 tcg_regset_set(tcg_target_call_clobber_regs,
1566 tcg_regset_clear(s->reserved_regs);
1567 tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO); /* zero register */
1568 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K0); /* kernel use only */
1569 tcg_regset_set_reg(s->reserved_regs, TCG_REG_K1); /* kernel use only */
1570 tcg_regset_set_reg(s->reserved_regs, TCG_REG_AT); /* internal use */
1571 tcg_regset_set_reg(s->reserved_regs, TCG_REG_T0); /* internal use */
1572 tcg_regset_set_reg(s->reserved_regs, TCG_REG_RA); /* return address */
1573 tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP); /* stack pointer */
1575 tcg_add_target_add_op_defs(mips_op_defs);
1576 tcg_set_frame(s, TCG_AREG0, offsetof(CPUArchState, temp_buf),
1577 CPU_TEMP_BUF_NLONGS * sizeof(long));