2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2009, 2011 Stefan Weil
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "tcg-be-null.h"
28 * - See TODO comments in code.
31 /* Marker for missing code. */
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
39 /* Bitfield n...m (in 32 bit value). */
40 #define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
42 /* Macros used in tcg_target_op_defs. */
45 #if TCG_TARGET_REG_BITS == 32
50 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
58 /* TODO: documentation. */
59 static const TCGTargetOpDef tcg_target_op_defs[] = {
60 { INDEX_op_exit_tb, { NULL } },
61 { INDEX_op_goto_tb, { NULL } },
62 { INDEX_op_br, { NULL } },
64 { INDEX_op_ld8u_i32, { R, R } },
65 { INDEX_op_ld8s_i32, { R, R } },
66 { INDEX_op_ld16u_i32, { R, R } },
67 { INDEX_op_ld16s_i32, { R, R } },
68 { INDEX_op_ld_i32, { R, R } },
69 { INDEX_op_st8_i32, { R, R } },
70 { INDEX_op_st16_i32, { R, R } },
71 { INDEX_op_st_i32, { R, R } },
73 { INDEX_op_add_i32, { R, RI, RI } },
74 { INDEX_op_sub_i32, { R, RI, RI } },
75 { INDEX_op_mul_i32, { R, RI, RI } },
76 #if TCG_TARGET_HAS_div_i32
77 { INDEX_op_div_i32, { R, R, R } },
78 { INDEX_op_divu_i32, { R, R, R } },
79 { INDEX_op_rem_i32, { R, R, R } },
80 { INDEX_op_remu_i32, { R, R, R } },
81 #elif TCG_TARGET_HAS_div2_i32
82 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
83 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
85 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
86 If both operands are constants, we can optimize. */
87 { INDEX_op_and_i32, { R, RI, RI } },
88 #if TCG_TARGET_HAS_andc_i32
89 { INDEX_op_andc_i32, { R, RI, RI } },
91 #if TCG_TARGET_HAS_eqv_i32
92 { INDEX_op_eqv_i32, { R, RI, RI } },
94 #if TCG_TARGET_HAS_nand_i32
95 { INDEX_op_nand_i32, { R, RI, RI } },
97 #if TCG_TARGET_HAS_nor_i32
98 { INDEX_op_nor_i32, { R, RI, RI } },
100 { INDEX_op_or_i32, { R, RI, RI } },
101 #if TCG_TARGET_HAS_orc_i32
102 { INDEX_op_orc_i32, { R, RI, RI } },
104 { INDEX_op_xor_i32, { R, RI, RI } },
105 { INDEX_op_shl_i32, { R, RI, RI } },
106 { INDEX_op_shr_i32, { R, RI, RI } },
107 { INDEX_op_sar_i32, { R, RI, RI } },
108 #if TCG_TARGET_HAS_rot_i32
109 { INDEX_op_rotl_i32, { R, RI, RI } },
110 { INDEX_op_rotr_i32, { R, RI, RI } },
112 #if TCG_TARGET_HAS_deposit_i32
113 { INDEX_op_deposit_i32, { R, "0", R } },
116 { INDEX_op_brcond_i32, { R, RI } },
118 { INDEX_op_setcond_i32, { R, R, RI } },
119 #if TCG_TARGET_REG_BITS == 64
120 { INDEX_op_setcond_i64, { R, R, RI } },
121 #endif /* TCG_TARGET_REG_BITS == 64 */
123 #if TCG_TARGET_REG_BITS == 32
124 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
125 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
126 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
127 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
128 { INDEX_op_mulu2_i32, { R, R, R, R } },
129 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
132 #if TCG_TARGET_HAS_not_i32
133 { INDEX_op_not_i32, { R, R } },
135 #if TCG_TARGET_HAS_neg_i32
136 { INDEX_op_neg_i32, { R, R } },
139 #if TCG_TARGET_REG_BITS == 64
140 { INDEX_op_ld8u_i64, { R, R } },
141 { INDEX_op_ld8s_i64, { R, R } },
142 { INDEX_op_ld16u_i64, { R, R } },
143 { INDEX_op_ld16s_i64, { R, R } },
144 { INDEX_op_ld32u_i64, { R, R } },
145 { INDEX_op_ld32s_i64, { R, R } },
146 { INDEX_op_ld_i64, { R, R } },
148 { INDEX_op_st8_i64, { R, R } },
149 { INDEX_op_st16_i64, { R, R } },
150 { INDEX_op_st32_i64, { R, R } },
151 { INDEX_op_st_i64, { R, R } },
153 { INDEX_op_add_i64, { R, RI, RI } },
154 { INDEX_op_sub_i64, { R, RI, RI } },
155 { INDEX_op_mul_i64, { R, RI, RI } },
156 #if TCG_TARGET_HAS_div_i64
157 { INDEX_op_div_i64, { R, R, R } },
158 { INDEX_op_divu_i64, { R, R, R } },
159 { INDEX_op_rem_i64, { R, R, R } },
160 { INDEX_op_remu_i64, { R, R, R } },
161 #elif TCG_TARGET_HAS_div2_i64
162 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
163 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
165 { INDEX_op_and_i64, { R, RI, RI } },
166 #if TCG_TARGET_HAS_andc_i64
167 { INDEX_op_andc_i64, { R, RI, RI } },
169 #if TCG_TARGET_HAS_eqv_i64
170 { INDEX_op_eqv_i64, { R, RI, RI } },
172 #if TCG_TARGET_HAS_nand_i64
173 { INDEX_op_nand_i64, { R, RI, RI } },
175 #if TCG_TARGET_HAS_nor_i64
176 { INDEX_op_nor_i64, { R, RI, RI } },
178 { INDEX_op_or_i64, { R, RI, RI } },
179 #if TCG_TARGET_HAS_orc_i64
180 { INDEX_op_orc_i64, { R, RI, RI } },
182 { INDEX_op_xor_i64, { R, RI, RI } },
183 { INDEX_op_shl_i64, { R, RI, RI } },
184 { INDEX_op_shr_i64, { R, RI, RI } },
185 { INDEX_op_sar_i64, { R, RI, RI } },
186 #if TCG_TARGET_HAS_rot_i64
187 { INDEX_op_rotl_i64, { R, RI, RI } },
188 { INDEX_op_rotr_i64, { R, RI, RI } },
190 #if TCG_TARGET_HAS_deposit_i64
191 { INDEX_op_deposit_i64, { R, "0", R } },
193 { INDEX_op_brcond_i64, { R, RI } },
195 #if TCG_TARGET_HAS_ext8s_i64
196 { INDEX_op_ext8s_i64, { R, R } },
198 #if TCG_TARGET_HAS_ext16s_i64
199 { INDEX_op_ext16s_i64, { R, R } },
201 #if TCG_TARGET_HAS_ext32s_i64
202 { INDEX_op_ext32s_i64, { R, R } },
204 #if TCG_TARGET_HAS_ext8u_i64
205 { INDEX_op_ext8u_i64, { R, R } },
207 #if TCG_TARGET_HAS_ext16u_i64
208 { INDEX_op_ext16u_i64, { R, R } },
210 #if TCG_TARGET_HAS_ext32u_i64
211 { INDEX_op_ext32u_i64, { R, R } },
213 { INDEX_op_ext_i32_i64, { R, R } },
214 { INDEX_op_extu_i32_i64, { R, R } },
215 #if TCG_TARGET_HAS_bswap16_i64
216 { INDEX_op_bswap16_i64, { R, R } },
218 #if TCG_TARGET_HAS_bswap32_i64
219 { INDEX_op_bswap32_i64, { R, R } },
221 #if TCG_TARGET_HAS_bswap64_i64
222 { INDEX_op_bswap64_i64, { R, R } },
224 #if TCG_TARGET_HAS_not_i64
225 { INDEX_op_not_i64, { R, R } },
227 #if TCG_TARGET_HAS_neg_i64
228 { INDEX_op_neg_i64, { R, R } },
230 #endif /* TCG_TARGET_REG_BITS == 64 */
232 { INDEX_op_qemu_ld_i32, { R, L } },
233 { INDEX_op_qemu_ld_i64, { R64, L } },
235 { INDEX_op_qemu_st_i32, { R, S } },
236 { INDEX_op_qemu_st_i64, { R64, S } },
238 #if TCG_TARGET_HAS_ext8s_i32
239 { INDEX_op_ext8s_i32, { R, R } },
241 #if TCG_TARGET_HAS_ext16s_i32
242 { INDEX_op_ext16s_i32, { R, R } },
244 #if TCG_TARGET_HAS_ext8u_i32
245 { INDEX_op_ext8u_i32, { R, R } },
247 #if TCG_TARGET_HAS_ext16u_i32
248 { INDEX_op_ext16u_i32, { R, R } },
251 #if TCG_TARGET_HAS_bswap16_i32
252 { INDEX_op_bswap16_i32, { R, R } },
254 #if TCG_TARGET_HAS_bswap32_i32
255 { INDEX_op_bswap32_i32, { R, R } },
258 { INDEX_op_mb, { } },
262 static const int tcg_target_reg_alloc_order[] = {
267 #if 0 /* used for TCG_REG_CALL_STACK */
273 #if TCG_TARGET_NB_REGS >= 16
285 #if MAX_OPC_PARAM_IARGS != 5
286 # error Fix needed, number of supported input arguments changed!
289 static const int tcg_target_call_iarg_regs[] = {
294 #if 0 /* used for TCG_REG_CALL_STACK */
298 #if TCG_TARGET_REG_BITS == 32
299 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
302 #if TCG_TARGET_NB_REGS >= 16
307 # error Too few input registers available
312 static const int tcg_target_call_oarg_regs[] = {
314 #if TCG_TARGET_REG_BITS == 32
319 #ifdef CONFIG_DEBUG_TCG
320 static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
329 #if TCG_TARGET_NB_REGS >= 16
338 #if TCG_TARGET_NB_REGS >= 32
360 static void patch_reloc(tcg_insn_unit *code_ptr, int type,
361 intptr_t value, intptr_t addend)
363 /* tcg_out_reloc always uses the same type, addend. */
364 tcg_debug_assert(type == sizeof(tcg_target_long));
365 tcg_debug_assert(addend == 0);
366 tcg_debug_assert(value != 0);
367 if (TCG_TARGET_REG_BITS == 32) {
368 tcg_patch32(code_ptr, value);
370 tcg_patch64(code_ptr, value);
374 /* Parse target specific constraints. */
375 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
377 const char *ct_str = *pct_str;
380 case 'L': /* qemu_ld constraint */
381 case 'S': /* qemu_st constraint */
382 ct->ct |= TCG_CT_REG;
383 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
393 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
394 /* Show current bytecode. Used by tcg interpreter. */
395 void tci_disas(uint8_t opc)
397 const TCGOpDef *def = &tcg_op_defs[opc];
398 fprintf(stderr, "TCG %s %u, %u, %u\n",
399 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
403 /* Write value (native size). */
404 static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
406 if (TCG_TARGET_REG_BITS == 32) {
414 static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
420 /* Write register. */
421 static void tcg_out_r(TCGContext *s, TCGArg t0)
423 tcg_debug_assert(t0 < TCG_TARGET_NB_REGS);
427 /* Write register or constant (native size). */
428 static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
431 tcg_debug_assert(const_arg == 1);
432 tcg_out8(s, TCG_CONST);
439 /* Write register or constant (32 bit). */
440 static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
443 tcg_debug_assert(const_arg == 1);
444 tcg_out8(s, TCG_CONST);
451 #if TCG_TARGET_REG_BITS == 64
452 /* Write register or constant (64 bit). */
453 static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
456 tcg_debug_assert(const_arg == 1);
457 tcg_out8(s, TCG_CONST);
466 static void tci_out_label(TCGContext *s, TCGLabel *label)
468 if (label->has_value) {
469 tcg_out_i(s, label->u.value);
470 tcg_debug_assert(label->u.value);
472 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), label, 0);
473 s->code_ptr += sizeof(tcg_target_ulong);
477 static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
480 uint8_t *old_code_ptr = s->code_ptr;
481 if (type == TCG_TYPE_I32) {
482 tcg_out_op_t(s, INDEX_op_ld_i32);
487 tcg_debug_assert(type == TCG_TYPE_I64);
488 #if TCG_TARGET_REG_BITS == 64
489 tcg_out_op_t(s, INDEX_op_ld_i64);
492 tcg_debug_assert(arg2 == (int32_t)arg2);
498 old_code_ptr[1] = s->code_ptr - old_code_ptr;
501 static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
503 uint8_t *old_code_ptr = s->code_ptr;
504 tcg_debug_assert(ret != arg);
505 #if TCG_TARGET_REG_BITS == 32
506 tcg_out_op_t(s, INDEX_op_mov_i32);
508 tcg_out_op_t(s, INDEX_op_mov_i64);
512 old_code_ptr[1] = s->code_ptr - old_code_ptr;
515 static void tcg_out_movi(TCGContext *s, TCGType type,
516 TCGReg t0, tcg_target_long arg)
518 uint8_t *old_code_ptr = s->code_ptr;
519 uint32_t arg32 = arg;
520 if (type == TCG_TYPE_I32 || arg == arg32) {
521 tcg_out_op_t(s, INDEX_op_movi_i32);
525 tcg_debug_assert(type == TCG_TYPE_I64);
526 #if TCG_TARGET_REG_BITS == 64
527 tcg_out_op_t(s, INDEX_op_movi_i64);
534 old_code_ptr[1] = s->code_ptr - old_code_ptr;
537 static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
539 uint8_t *old_code_ptr = s->code_ptr;
540 tcg_out_op_t(s, INDEX_op_call);
541 tcg_out_ri(s, 1, (uintptr_t)arg);
542 old_code_ptr[1] = s->code_ptr - old_code_ptr;
545 static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
546 const int *const_args)
548 uint8_t *old_code_ptr = s->code_ptr;
550 tcg_out_op_t(s, opc);
553 case INDEX_op_exit_tb:
554 tcg_out64(s, args[0]);
556 case INDEX_op_goto_tb:
557 if (s->tb_jmp_insn_offset) {
558 /* Direct jump method. */
559 tcg_debug_assert(args[0] < ARRAY_SIZE(s->tb_jmp_insn_offset));
560 /* Align for atomic patching and thread safety */
561 s->code_ptr = QEMU_ALIGN_PTR_UP(s->code_ptr, 4);
562 s->tb_jmp_insn_offset[args[0]] = tcg_current_code_size(s);
565 /* Indirect jump method. */
568 tcg_debug_assert(args[0] < ARRAY_SIZE(s->tb_jmp_reset_offset));
569 s->tb_jmp_reset_offset[args[0]] = tcg_current_code_size(s);
572 tci_out_label(s, arg_label(args[0]));
574 case INDEX_op_setcond_i32:
575 tcg_out_r(s, args[0]);
576 tcg_out_r(s, args[1]);
577 tcg_out_ri32(s, const_args[2], args[2]);
578 tcg_out8(s, args[3]); /* condition */
580 #if TCG_TARGET_REG_BITS == 32
581 case INDEX_op_setcond2_i32:
582 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
583 tcg_out_r(s, args[0]);
584 tcg_out_r(s, args[1]);
585 tcg_out_r(s, args[2]);
586 tcg_out_ri32(s, const_args[3], args[3]);
587 tcg_out_ri32(s, const_args[4], args[4]);
588 tcg_out8(s, args[5]); /* condition */
590 #elif TCG_TARGET_REG_BITS == 64
591 case INDEX_op_setcond_i64:
592 tcg_out_r(s, args[0]);
593 tcg_out_r(s, args[1]);
594 tcg_out_ri64(s, const_args[2], args[2]);
595 tcg_out8(s, args[3]); /* condition */
598 case INDEX_op_ld8u_i32:
599 case INDEX_op_ld8s_i32:
600 case INDEX_op_ld16u_i32:
601 case INDEX_op_ld16s_i32:
602 case INDEX_op_ld_i32:
603 case INDEX_op_st8_i32:
604 case INDEX_op_st16_i32:
605 case INDEX_op_st_i32:
606 case INDEX_op_ld8u_i64:
607 case INDEX_op_ld8s_i64:
608 case INDEX_op_ld16u_i64:
609 case INDEX_op_ld16s_i64:
610 case INDEX_op_ld32u_i64:
611 case INDEX_op_ld32s_i64:
612 case INDEX_op_ld_i64:
613 case INDEX_op_st8_i64:
614 case INDEX_op_st16_i64:
615 case INDEX_op_st32_i64:
616 case INDEX_op_st_i64:
617 tcg_out_r(s, args[0]);
618 tcg_out_r(s, args[1]);
619 tcg_debug_assert(args[2] == (int32_t)args[2]);
620 tcg_out32(s, args[2]);
622 case INDEX_op_add_i32:
623 case INDEX_op_sub_i32:
624 case INDEX_op_mul_i32:
625 case INDEX_op_and_i32:
626 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
627 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
628 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
629 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
630 case INDEX_op_or_i32:
631 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
632 case INDEX_op_xor_i32:
633 case INDEX_op_shl_i32:
634 case INDEX_op_shr_i32:
635 case INDEX_op_sar_i32:
636 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
637 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
638 tcg_out_r(s, args[0]);
639 tcg_out_ri32(s, const_args[1], args[1]);
640 tcg_out_ri32(s, const_args[2], args[2]);
642 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
643 tcg_out_r(s, args[0]);
644 tcg_out_r(s, args[1]);
645 tcg_out_r(s, args[2]);
646 tcg_debug_assert(args[3] <= UINT8_MAX);
647 tcg_out8(s, args[3]);
648 tcg_debug_assert(args[4] <= UINT8_MAX);
649 tcg_out8(s, args[4]);
652 #if TCG_TARGET_REG_BITS == 64
653 case INDEX_op_add_i64:
654 case INDEX_op_sub_i64:
655 case INDEX_op_mul_i64:
656 case INDEX_op_and_i64:
657 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
658 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
659 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
660 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
661 case INDEX_op_or_i64:
662 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
663 case INDEX_op_xor_i64:
664 case INDEX_op_shl_i64:
665 case INDEX_op_shr_i64:
666 case INDEX_op_sar_i64:
667 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
668 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
669 tcg_out_r(s, args[0]);
670 tcg_out_ri64(s, const_args[1], args[1]);
671 tcg_out_ri64(s, const_args[2], args[2]);
673 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
674 tcg_out_r(s, args[0]);
675 tcg_out_r(s, args[1]);
676 tcg_out_r(s, args[2]);
677 tcg_debug_assert(args[3] <= UINT8_MAX);
678 tcg_out8(s, args[3]);
679 tcg_debug_assert(args[4] <= UINT8_MAX);
680 tcg_out8(s, args[4]);
682 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
683 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
684 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
685 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
688 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
689 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
692 case INDEX_op_brcond_i64:
693 tcg_out_r(s, args[0]);
694 tcg_out_ri64(s, const_args[1], args[1]);
695 tcg_out8(s, args[2]); /* condition */
696 tci_out_label(s, arg_label(args[3]));
698 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
699 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
700 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
701 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
702 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
703 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
704 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
705 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
706 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
707 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
708 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
709 case INDEX_op_ext_i32_i64:
710 case INDEX_op_extu_i32_i64:
711 #endif /* TCG_TARGET_REG_BITS == 64 */
712 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
713 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
714 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
715 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
716 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
717 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
718 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
719 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
720 tcg_out_r(s, args[0]);
721 tcg_out_r(s, args[1]);
723 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
724 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
725 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
726 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
727 tcg_out_r(s, args[0]);
728 tcg_out_ri32(s, const_args[1], args[1]);
729 tcg_out_ri32(s, const_args[2], args[2]);
731 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
732 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
735 #if TCG_TARGET_REG_BITS == 32
736 case INDEX_op_add2_i32:
737 case INDEX_op_sub2_i32:
738 tcg_out_r(s, args[0]);
739 tcg_out_r(s, args[1]);
740 tcg_out_r(s, args[2]);
741 tcg_out_r(s, args[3]);
742 tcg_out_r(s, args[4]);
743 tcg_out_r(s, args[5]);
745 case INDEX_op_brcond2_i32:
746 tcg_out_r(s, args[0]);
747 tcg_out_r(s, args[1]);
748 tcg_out_ri32(s, const_args[2], args[2]);
749 tcg_out_ri32(s, const_args[3], args[3]);
750 tcg_out8(s, args[4]); /* condition */
751 tci_out_label(s, arg_label(args[5]));
753 case INDEX_op_mulu2_i32:
754 tcg_out_r(s, args[0]);
755 tcg_out_r(s, args[1]);
756 tcg_out_r(s, args[2]);
757 tcg_out_r(s, args[3]);
760 case INDEX_op_brcond_i32:
761 tcg_out_r(s, args[0]);
762 tcg_out_ri32(s, const_args[1], args[1]);
763 tcg_out8(s, args[2]); /* condition */
764 tci_out_label(s, arg_label(args[3]));
766 case INDEX_op_qemu_ld_i32:
767 tcg_out_r(s, *args++);
768 tcg_out_r(s, *args++);
769 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
770 tcg_out_r(s, *args++);
772 tcg_out_i(s, *args++);
774 case INDEX_op_qemu_ld_i64:
775 tcg_out_r(s, *args++);
776 if (TCG_TARGET_REG_BITS == 32) {
777 tcg_out_r(s, *args++);
779 tcg_out_r(s, *args++);
780 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
781 tcg_out_r(s, *args++);
783 tcg_out_i(s, *args++);
785 case INDEX_op_qemu_st_i32:
786 tcg_out_r(s, *args++);
787 tcg_out_r(s, *args++);
788 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
789 tcg_out_r(s, *args++);
791 tcg_out_i(s, *args++);
793 case INDEX_op_qemu_st_i64:
794 tcg_out_r(s, *args++);
795 if (TCG_TARGET_REG_BITS == 32) {
796 tcg_out_r(s, *args++);
798 tcg_out_r(s, *args++);
799 if (TARGET_LONG_BITS > TCG_TARGET_REG_BITS) {
800 tcg_out_r(s, *args++);
802 tcg_out_i(s, *args++);
806 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
807 case INDEX_op_mov_i64:
808 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
809 case INDEX_op_movi_i64:
810 case INDEX_op_call: /* Always emitted via tcg_out_call. */
814 old_code_ptr[1] = s->code_ptr - old_code_ptr;
817 static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
820 uint8_t *old_code_ptr = s->code_ptr;
821 if (type == TCG_TYPE_I32) {
822 tcg_out_op_t(s, INDEX_op_st_i32);
827 tcg_debug_assert(type == TCG_TYPE_I64);
828 #if TCG_TARGET_REG_BITS == 64
829 tcg_out_op_t(s, INDEX_op_st_i64);
837 old_code_ptr[1] = s->code_ptr - old_code_ptr;
840 static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
841 TCGReg base, intptr_t ofs)
846 /* Test if a constant matches the constraint. */
847 static int tcg_target_const_match(tcg_target_long val, TCGType type,
848 const TCGArgConstraint *arg_ct)
850 /* No need to return 0 or 1, 0 or != 0 is good enough. */
851 return arg_ct->ct & TCG_CT_CONST;
854 static void tcg_target_init(TCGContext *s)
856 #if defined(CONFIG_DEBUG_TCG_INTERPRETER)
857 const char *envval = getenv("DEBUG_TCG");
859 qemu_set_log(strtol(envval, NULL, 0));
863 /* The current code uses uint8_t for tcg operations. */
864 tcg_debug_assert(tcg_op_defs_max <= UINT8_MAX);
866 /* Registers available for 32 bit operations. */
867 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
868 BIT(TCG_TARGET_NB_REGS) - 1);
869 /* Registers available for 64 bit operations. */
870 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
871 BIT(TCG_TARGET_NB_REGS) - 1);
872 /* TODO: Which registers should be set here? */
873 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
874 BIT(TCG_TARGET_NB_REGS) - 1);
876 tcg_regset_clear(s->reserved_regs);
877 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
878 tcg_add_target_add_op_defs(tcg_target_op_defs);
880 /* We use negative offsets from "sp" so that we can distinguish
881 stores that might pretend to be call arguments. */
882 tcg_set_frame(s, TCG_REG_CALL_STACK,
883 -CPU_TEMP_BUF_NLONGS * sizeof(long),
884 CPU_TEMP_BUF_NLONGS * sizeof(long));
887 /* Generate global QEMU prologue and epilogue code. */
888 static inline void tcg_target_qemu_prologue(TCGContext *s)