2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
62 static const int tcg_target_reg_alloc_order[] = {
78 static const int tcg_target_call_iarg_regs[6] = {
87 static const int tcg_target_call_oarg_regs[2] = {
92 static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
94 return (val << ((sizeof(tcg_target_long) * 8 - bits))
95 >> (sizeof(tcg_target_long) * 8 - bits)) == val;
98 static inline int check_fit_i32(uint32_t val, unsigned int bits)
100 return ((val << (32 - bits)) >> (32 - bits)) == val;
103 static void patch_reloc(uint8_t *code_ptr, int type,
104 tcg_target_long value, tcg_target_long addend)
109 if (value != (uint32_t)value)
111 *(uint32_t *)code_ptr = value;
113 case R_SPARC_WDISP22:
114 value -= (long)code_ptr;
116 if (!check_fit_tl(value, 22))
118 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
120 case R_SPARC_WDISP19:
121 value -= (long)code_ptr;
123 if (!check_fit_tl(value, 19))
125 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x7ffff) | value;
132 /* maximum number of register used for input function arguments */
133 static inline int tcg_target_get_call_iarg_regs_count(int flags)
138 /* parse target specific constraints */
139 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
146 ct->ct |= TCG_CT_REG;
147 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
149 case 'L': /* qemu_ld/st constraint */
150 ct->ct |= TCG_CT_REG;
151 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
153 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O0);
154 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O1);
155 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O2);
158 ct->ct |= TCG_CT_CONST_S11;
161 ct->ct |= TCG_CT_CONST_S13;
171 /* test if a constant matches the constraint */
172 static inline int tcg_target_const_match(tcg_target_long val,
173 const TCGArgConstraint *arg_ct)
178 if (ct & TCG_CT_CONST)
180 else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11))
182 else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13))
188 #define INSN_OP(x) ((x) << 30)
189 #define INSN_OP2(x) ((x) << 22)
190 #define INSN_OP3(x) ((x) << 19)
191 #define INSN_OPF(x) ((x) << 5)
192 #define INSN_RD(x) ((x) << 25)
193 #define INSN_RS1(x) ((x) << 14)
194 #define INSN_RS2(x) (x)
195 #define INSN_ASI(x) ((x) << 5)
197 #define INSN_IMM11(x) ((1 << 13) | ((x) & 0x7ff))
198 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
199 #define INSN_OFF19(x) (((x) >> 2) & 0x07ffff)
200 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
202 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
219 #define BA (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
221 #define MOVCC_ICC (1 << 18)
222 #define MOVCC_XCC (1 << 18 | 1 << 12)
224 #define ARITH_ADD (INSN_OP(2) | INSN_OP3(0x00))
225 #define ARITH_ADDCC (INSN_OP(2) | INSN_OP3(0x10))
226 #define ARITH_AND (INSN_OP(2) | INSN_OP3(0x01))
227 #define ARITH_ANDN (INSN_OP(2) | INSN_OP3(0x05))
228 #define ARITH_OR (INSN_OP(2) | INSN_OP3(0x02))
229 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
230 #define ARITH_ORN (INSN_OP(2) | INSN_OP3(0x06))
231 #define ARITH_XOR (INSN_OP(2) | INSN_OP3(0x03))
232 #define ARITH_SUB (INSN_OP(2) | INSN_OP3(0x04))
233 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
234 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
235 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
236 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
237 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
238 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
239 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
240 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
241 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
242 #define ARITH_MOVCC (INSN_OP(2) | INSN_OP3(0x2c))
244 #define SHIFT_SLL (INSN_OP(2) | INSN_OP3(0x25))
245 #define SHIFT_SRL (INSN_OP(2) | INSN_OP3(0x26))
246 #define SHIFT_SRA (INSN_OP(2) | INSN_OP3(0x27))
248 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
249 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
250 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
252 #define RDY (INSN_OP(2) | INSN_OP3(0x28) | INSN_RS1(0))
253 #define WRY (INSN_OP(2) | INSN_OP3(0x30) | INSN_RD(0))
254 #define JMPL (INSN_OP(2) | INSN_OP3(0x38))
255 #define SAVE (INSN_OP(2) | INSN_OP3(0x3c))
256 #define RESTORE (INSN_OP(2) | INSN_OP3(0x3d))
257 #define SETHI (INSN_OP(0) | INSN_OP2(0x4))
258 #define CALL INSN_OP(1)
259 #define LDUB (INSN_OP(3) | INSN_OP3(0x01))
260 #define LDSB (INSN_OP(3) | INSN_OP3(0x09))
261 #define LDUH (INSN_OP(3) | INSN_OP3(0x02))
262 #define LDSH (INSN_OP(3) | INSN_OP3(0x0a))
263 #define LDUW (INSN_OP(3) | INSN_OP3(0x00))
264 #define LDSW (INSN_OP(3) | INSN_OP3(0x08))
265 #define LDX (INSN_OP(3) | INSN_OP3(0x0b))
266 #define STB (INSN_OP(3) | INSN_OP3(0x05))
267 #define STH (INSN_OP(3) | INSN_OP3(0x06))
268 #define STW (INSN_OP(3) | INSN_OP3(0x04))
269 #define STX (INSN_OP(3) | INSN_OP3(0x0e))
270 #define LDUBA (INSN_OP(3) | INSN_OP3(0x11))
271 #define LDSBA (INSN_OP(3) | INSN_OP3(0x19))
272 #define LDUHA (INSN_OP(3) | INSN_OP3(0x12))
273 #define LDSHA (INSN_OP(3) | INSN_OP3(0x1a))
274 #define LDUWA (INSN_OP(3) | INSN_OP3(0x10))
275 #define LDSWA (INSN_OP(3) | INSN_OP3(0x18))
276 #define LDXA (INSN_OP(3) | INSN_OP3(0x1b))
277 #define STBA (INSN_OP(3) | INSN_OP3(0x15))
278 #define STHA (INSN_OP(3) | INSN_OP3(0x16))
279 #define STWA (INSN_OP(3) | INSN_OP3(0x14))
280 #define STXA (INSN_OP(3) | INSN_OP3(0x1e))
282 #ifndef ASI_PRIMARY_LITTLE
283 #define ASI_PRIMARY_LITTLE 0x88
286 static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
289 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
293 static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1,
294 uint32_t offset, int op)
296 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
300 static void tcg_out_arithc(TCGContext *s, int rd, int rs1,
301 int val2, int val2const, int op)
303 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1)
304 | (val2const ? INSN_IMM13(val2) : INSN_RS2(val2)));
307 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
309 tcg_out_arith(s, ret, arg, TCG_REG_G0, ARITH_OR);
312 static inline void tcg_out_sethi(TCGContext *s, int ret, uint32_t arg)
314 tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
317 static inline void tcg_out_movi_imm13(TCGContext *s, int ret, uint32_t arg)
319 tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
322 static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg)
324 if (check_fit_tl(arg, 13))
325 tcg_out_movi_imm13(s, ret, arg);
327 tcg_out_sethi(s, ret, arg);
329 tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
333 static inline void tcg_out_movi(TCGContext *s, TCGType type,
334 int ret, tcg_target_long arg)
336 /* All 32-bit constants, as well as 64-bit constants with
337 no high bits set go through movi_imm32. */
338 if (TCG_TARGET_REG_BITS == 32
339 || type == TCG_TYPE_I32
340 || (arg & ~(tcg_target_long)0xffffffff) == 0) {
341 tcg_out_movi_imm32(s, ret, arg);
342 } else if (check_fit_tl(arg, 13)) {
343 /* A 13-bit constant sign-extended to 64-bits. */
344 tcg_out_movi_imm13(s, ret, arg);
345 } else if (check_fit_tl(arg, 32)) {
346 /* A 32-bit constant sign-extended to 64-bits. */
347 tcg_out_sethi(s, ret, ~arg);
348 tcg_out_arithi(s, ret, ret, (arg & 0x3ff) | -0x400, ARITH_XOR);
350 tcg_out_movi_imm32(s, TCG_REG_I4, arg >> (TCG_TARGET_REG_BITS / 2));
351 tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX);
352 tcg_out_movi_imm32(s, ret, arg);
353 tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR);
357 static inline void tcg_out_ld_raw(TCGContext *s, int ret,
360 tcg_out_sethi(s, ret, arg);
361 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
362 INSN_IMM13(arg & 0x3ff));
365 static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
368 if (!check_fit_tl(arg, 10))
369 tcg_out_movi(s, TCG_TYPE_PTR, ret, arg & ~0x3ffULL);
370 if (TCG_TARGET_REG_BITS == 64) {
371 tcg_out32(s, LDX | INSN_RD(ret) | INSN_RS1(ret) |
372 INSN_IMM13(arg & 0x3ff));
374 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
375 INSN_IMM13(arg & 0x3ff));
379 static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset, int op)
381 if (check_fit_tl(offset, 13))
382 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
385 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
386 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
391 static inline void tcg_out_ldst_asi(TCGContext *s, int ret, int addr,
392 int offset, int op, int asi)
394 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
395 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
396 INSN_ASI(asi) | INSN_RS2(addr));
399 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
400 int arg1, tcg_target_long arg2)
402 if (type == TCG_TYPE_I32)
403 tcg_out_ldst(s, ret, arg1, arg2, LDUW);
405 tcg_out_ldst(s, ret, arg1, arg2, LDX);
408 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
409 int arg1, tcg_target_long arg2)
411 if (type == TCG_TYPE_I32)
412 tcg_out_ldst(s, arg, arg1, arg2, STW);
414 tcg_out_ldst(s, arg, arg1, arg2, STX);
417 static inline void tcg_out_sety(TCGContext *s, int rs)
419 tcg_out32(s, WRY | INSN_RS1(TCG_REG_G0) | INSN_RS2(rs));
422 static inline void tcg_out_rdy(TCGContext *s, int rd)
424 tcg_out32(s, RDY | INSN_RD(rd));
427 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
430 if (check_fit_tl(val, 13))
431 tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
433 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, val);
434 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_ADD);
439 static inline void tcg_out_andi(TCGContext *s, int reg, tcg_target_long val)
442 if (check_fit_tl(val, 13))
443 tcg_out_arithi(s, reg, reg, val, ARITH_AND);
445 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, val);
446 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_AND);
451 static void tcg_out_div32(TCGContext *s, int rd, int rs1,
452 int val2, int val2const, int uns)
454 /* Load Y with the sign/zero extension of RS1 to 64-bits. */
456 tcg_out_sety(s, TCG_REG_G0);
458 tcg_out_arithi(s, TCG_REG_I5, rs1, 31, SHIFT_SRA);
459 tcg_out_sety(s, TCG_REG_I5);
462 tcg_out_arithc(s, rd, rs1, val2, val2const,
463 uns ? ARITH_UDIV : ARITH_SDIV);
466 static inline void tcg_out_nop(TCGContext *s)
468 tcg_out_sethi(s, TCG_REG_G0, 0);
471 static void tcg_out_branch_i32(TCGContext *s, int opc, int label_index)
474 TCGLabel *l = &s->labels[label_index];
477 val = l->u.value - (tcg_target_long)s->code_ptr;
478 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2)
479 | INSN_OFF22(l->u.value - (unsigned long)s->code_ptr)));
481 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP22, label_index, 0);
482 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2) | 0));
486 #if TCG_TARGET_REG_BITS == 64
487 static void tcg_out_branch_i64(TCGContext *s, int opc, int label_index)
490 TCGLabel *l = &s->labels[label_index];
493 val = l->u.value - (tcg_target_long)s->code_ptr;
494 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
496 INSN_OFF19(l->u.value - (unsigned long)s->code_ptr)));
498 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP19, label_index, 0);
499 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x1) |
505 static const uint8_t tcg_cond_to_bcond[10] = {
506 [TCG_COND_EQ] = COND_E,
507 [TCG_COND_NE] = COND_NE,
508 [TCG_COND_LT] = COND_L,
509 [TCG_COND_GE] = COND_GE,
510 [TCG_COND_LE] = COND_LE,
511 [TCG_COND_GT] = COND_G,
512 [TCG_COND_LTU] = COND_CS,
513 [TCG_COND_GEU] = COND_CC,
514 [TCG_COND_LEU] = COND_LEU,
515 [TCG_COND_GTU] = COND_GU,
518 static void tcg_out_cmp(TCGContext *s, TCGArg c1, TCGArg c2, int c2const)
520 tcg_out_arithc(s, TCG_REG_G0, c1, c2, c2const, ARITH_SUBCC);
523 static void tcg_out_brcond_i32(TCGContext *s, TCGCond cond,
524 TCGArg arg1, TCGArg arg2, int const_arg2,
527 tcg_out_cmp(s, arg1, arg2, const_arg2);
528 tcg_out_branch_i32(s, tcg_cond_to_bcond[cond], label_index);
532 #if TCG_TARGET_REG_BITS == 64
533 static void tcg_out_brcond_i64(TCGContext *s, TCGCond cond,
534 TCGArg arg1, TCGArg arg2, int const_arg2,
537 tcg_out_cmp(s, arg1, arg2, const_arg2);
538 tcg_out_branch_i64(s, tcg_cond_to_bcond[cond], label_index);
542 static void tcg_out_brcond2_i32(TCGContext *s, TCGCond cond,
543 TCGArg al, TCGArg ah,
544 TCGArg bl, int blconst,
545 TCGArg bh, int bhconst, int label_dest)
547 int cc, label_next = gen_new_label();
549 tcg_out_cmp(s, ah, bh, bhconst);
551 /* Note that we fill one of the delay slots with the second compare. */
554 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
555 tcg_out_branch_i32(s, cc, label_next);
556 tcg_out_cmp(s, al, bl, blconst);
557 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_EQ], 0);
558 tcg_out_branch_i32(s, cc, label_dest);
562 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
563 tcg_out_branch_i32(s, cc, label_dest);
564 tcg_out_cmp(s, al, bl, blconst);
565 tcg_out_branch_i32(s, cc, label_dest);
569 /* ??? One could fairly easily special-case 64-bit unsigned
570 compares against 32-bit zero-extended constants. For instance,
571 we know that (unsigned)AH < 0 is false and need not emit it.
572 Similarly, (unsigned)AH > 0 being true implies AH != 0, so the
573 second branch will never be taken. */
574 cc = INSN_COND(tcg_cond_to_bcond[cond], 0);
575 tcg_out_branch_i32(s, cc, label_dest);
577 cc = INSN_COND(tcg_cond_to_bcond[TCG_COND_NE], 0);
578 tcg_out_branch_i32(s, cc, label_next);
579 tcg_out_cmp(s, al, bl, blconst);
580 cc = INSN_COND(tcg_cond_to_bcond[tcg_unsigned_cond(cond)], 0);
581 tcg_out_branch_i32(s, cc, label_dest);
586 tcg_out_label(s, label_next, (tcg_target_long)s->code_ptr);
590 static void tcg_out_setcond_i32(TCGContext *s, TCGCond cond, TCGArg ret,
591 TCGArg c1, TCGArg c2, int c2const)
595 /* For 32-bit comparisons, we can play games with ADDX/SUBX. */
600 tcg_out_arithc(s, ret, c1, c2, c2const, ARITH_XOR);
602 c1 = TCG_REG_G0, c2 = ret, c2const = 0;
603 cond = (cond == TCG_COND_EQ ? TCG_COND_LEU : TCG_COND_LTU);
608 if (c2const && c2 != 0) {
609 tcg_out_movi_imm13(s, TCG_REG_I5, c2);
612 t = c1, c1 = c2, c2 = t, c2const = 0;
613 cond = tcg_swap_cond(cond);
621 tcg_out_cmp(s, c1, c2, c2const);
622 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
623 tcg_out_movi_imm13(s, ret, 0);
624 tcg_out32 (s, ARITH_MOVCC | INSN_RD(ret)
625 | INSN_RS1(tcg_cond_to_bcond[cond])
626 | MOVCC_ICC | INSN_IMM11(1));
629 tcg_out_branch_i32(s, INSN_COND(tcg_cond_to_bcond[cond], 1), t);
630 tcg_out_movi_imm13(s, ret, 1);
631 tcg_out_movi_imm13(s, ret, 0);
632 tcg_out_label(s, t, (tcg_target_long)s->code_ptr);
637 tcg_out_cmp(s, c1, c2, c2const);
638 if (cond == TCG_COND_LTU) {
639 tcg_out_arithi(s, ret, TCG_REG_G0, 0, ARITH_ADDX);
641 tcg_out_arithi(s, ret, TCG_REG_G0, -1, ARITH_SUBX);
645 #if TCG_TARGET_REG_BITS == 64
646 static void tcg_out_setcond_i64(TCGContext *s, TCGCond cond, TCGArg ret,
647 TCGArg c1, TCGArg c2, int c2const)
649 tcg_out_cmp(s, c1, c2, c2const);
650 tcg_out_movi_imm13(s, ret, 0);
651 tcg_out32 (s, ARITH_MOVCC | INSN_RD(ret)
652 | INSN_RS1(tcg_cond_to_bcond[cond])
653 | MOVCC_XCC | INSN_IMM11(1));
656 static void tcg_out_setcond2_i32(TCGContext *s, TCGCond cond, TCGArg ret,
657 TCGArg al, TCGArg ah,
658 TCGArg bl, int blconst,
659 TCGArg bh, int bhconst)
665 tcg_out_setcond_i32(s, TCG_COND_EQ, TCG_REG_I5, al, bl, blconst);
666 tcg_out_setcond_i32(s, TCG_COND_EQ, ret, ah, bh, bhconst);
667 tcg_out_arith(s, ret, ret, TCG_REG_I5, ARITH_AND);
671 tcg_out_setcond_i32(s, TCG_COND_NE, TCG_REG_I5, al, al, blconst);
672 tcg_out_setcond_i32(s, TCG_COND_NE, ret, ah, bh, bhconst);
673 tcg_out_arith(s, ret, ret, TCG_REG_I5, ARITH_OR);
677 lab = gen_new_label();
679 tcg_out_cmp(s, ah, bh, bhconst);
680 tcg_out_branch_i32(s, INSN_COND(tcg_cond_to_bcond[cond], 1), lab);
681 tcg_out_movi_imm13(s, ret, 1);
682 tcg_out_branch_i32(s, INSN_COND(COND_NE, 1), lab);
683 tcg_out_movi_imm13(s, ret, 0);
685 tcg_out_setcond_i32(s, tcg_unsigned_cond(cond), ret, al, bl, blconst);
687 tcg_out_label(s, lab, (tcg_target_long)s->code_ptr);
693 /* Generate global QEMU prologue and epilogue code */
694 void tcg_target_qemu_prologue(TCGContext *s)
696 tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
697 INSN_IMM13(-TCG_TARGET_STACK_MINFRAME));
698 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I0) |
699 INSN_RS2(TCG_REG_G0));
703 #if defined(CONFIG_SOFTMMU)
705 #include "../../softmmu_defs.h"
707 static const void * const qemu_ld_helpers[4] = {
714 static const void * const qemu_st_helpers[4] = {
722 #if TARGET_LONG_BITS == 32
723 #define TARGET_LD_OP LDUW
725 #define TARGET_LD_OP LDX
728 #if defined(CONFIG_SOFTMMU)
729 #if HOST_LONG_BITS == 32
730 #define TARGET_ADDEND_LD_OP LDUW
732 #define TARGET_ADDEND_LD_OP LDX
737 #define HOST_LD_OP LDX
738 #define HOST_ST_OP STX
739 #define HOST_SLL_OP SHIFT_SLLX
740 #define HOST_SRA_OP SHIFT_SRAX
742 #define HOST_LD_OP LDUW
743 #define HOST_ST_OP STW
744 #define HOST_SLL_OP SHIFT_SLL
745 #define HOST_SRA_OP SHIFT_SRA
748 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
751 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
752 #if defined(CONFIG_SOFTMMU)
753 uint32_t *label1_ptr, *label2_ptr;
765 #if defined(CONFIG_SOFTMMU)
766 /* srl addr_reg, x, arg1 */
767 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
769 /* and addr_reg, x, arg0 */
770 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
773 /* and arg1, x, arg1 */
774 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
776 /* add arg1, x, arg1 */
777 tcg_out_addi(s, arg1, offsetof(CPUState,
778 tlb_table[mem_index][0].addr_read));
780 /* add env, arg1, arg1 */
781 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
783 /* ld [arg1], arg2 */
784 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
785 INSN_RS2(TCG_REG_G0));
787 /* subcc arg0, arg2, %g0 */
788 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
794 label1_ptr = (uint32_t *)s->code_ptr;
797 /* mov (delay slot) */
798 tcg_out_mov(s, arg0, addr_reg);
801 tcg_out_movi(s, TCG_TYPE_I32, arg1, mem_index);
803 /* XXX: move that code at the end of the TB */
804 /* qemu_ld_helper[s_bits](arg0, arg1) */
805 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_ld_helpers[s_bits]
806 - (tcg_target_ulong)s->code_ptr) >> 2)
808 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
811 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
812 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
813 sizeof(long), HOST_ST_OP);
814 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
815 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
816 sizeof(long), HOST_LD_OP);
818 /* data_reg = sign_extend(arg0) */
821 /* sll arg0, 24/56, data_reg */
822 tcg_out_arithi(s, data_reg, arg0, (int)sizeof(tcg_target_long) * 8 - 8,
824 /* sra data_reg, 24/56, data_reg */
825 tcg_out_arithi(s, data_reg, data_reg,
826 (int)sizeof(tcg_target_long) * 8 - 8, HOST_SRA_OP);
829 /* sll arg0, 16/48, data_reg */
830 tcg_out_arithi(s, data_reg, arg0,
831 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SLL_OP);
832 /* sra data_reg, 16/48, data_reg */
833 tcg_out_arithi(s, data_reg, data_reg,
834 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SRA_OP);
837 /* sll arg0, 32, data_reg */
838 tcg_out_arithi(s, data_reg, arg0, 32, HOST_SLL_OP);
839 /* sra data_reg, 32, data_reg */
840 tcg_out_arithi(s, data_reg, data_reg, 32, HOST_SRA_OP);
848 tcg_out_mov(s, data_reg, arg0);
854 label2_ptr = (uint32_t *)s->code_ptr;
857 /* nop (delay slot */
861 #if TARGET_LONG_BITS == 32
863 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
864 INSN_OFF22((unsigned long)s->code_ptr -
865 (unsigned long)label1_ptr));
867 /* be,pt %xcc label1 */
868 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
869 (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
870 (unsigned long)label1_ptr));
873 /* ld [arg1 + x], arg1 */
874 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
875 offsetof(CPUTLBEntry, addr_read), TARGET_ADDEND_LD_OP);
877 #if TARGET_LONG_BITS == 32
878 /* and addr_reg, x, arg0 */
879 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
880 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
881 /* add arg0, arg1, arg0 */
882 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
884 /* add addr_reg, arg1, arg0 */
885 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
894 /* ldub [arg0], data_reg */
895 tcg_out_ldst(s, data_reg, arg0, 0, LDUB);
898 /* ldsb [arg0], data_reg */
899 tcg_out_ldst(s, data_reg, arg0, 0, LDSB);
902 #ifdef TARGET_WORDS_BIGENDIAN
903 /* lduh [arg0], data_reg */
904 tcg_out_ldst(s, data_reg, arg0, 0, LDUH);
906 /* lduha [arg0] ASI_PRIMARY_LITTLE, data_reg */
907 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUHA, ASI_PRIMARY_LITTLE);
911 #ifdef TARGET_WORDS_BIGENDIAN
912 /* ldsh [arg0], data_reg */
913 tcg_out_ldst(s, data_reg, arg0, 0, LDSH);
915 /* ldsha [arg0] ASI_PRIMARY_LITTLE, data_reg */
916 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSHA, ASI_PRIMARY_LITTLE);
920 #ifdef TARGET_WORDS_BIGENDIAN
921 /* lduw [arg0], data_reg */
922 tcg_out_ldst(s, data_reg, arg0, 0, LDUW);
924 /* lduwa [arg0] ASI_PRIMARY_LITTLE, data_reg */
925 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUWA, ASI_PRIMARY_LITTLE);
929 #ifdef TARGET_WORDS_BIGENDIAN
930 /* ldsw [arg0], data_reg */
931 tcg_out_ldst(s, data_reg, arg0, 0, LDSW);
933 /* ldswa [arg0] ASI_PRIMARY_LITTLE, data_reg */
934 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSWA, ASI_PRIMARY_LITTLE);
938 #ifdef TARGET_WORDS_BIGENDIAN
939 /* ldx [arg0], data_reg */
940 tcg_out_ldst(s, data_reg, arg0, 0, LDX);
942 /* ldxa [arg0] ASI_PRIMARY_LITTLE, data_reg */
943 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDXA, ASI_PRIMARY_LITTLE);
950 #if defined(CONFIG_SOFTMMU)
952 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
953 INSN_OFF22((unsigned long)s->code_ptr -
954 (unsigned long)label2_ptr));
958 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
961 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
962 #if defined(CONFIG_SOFTMMU)
963 uint32_t *label1_ptr, *label2_ptr;
976 #if defined(CONFIG_SOFTMMU)
977 /* srl addr_reg, x, arg1 */
978 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
981 /* and addr_reg, x, arg0 */
982 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
985 /* and arg1, x, arg1 */
986 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
988 /* add arg1, x, arg1 */
989 tcg_out_addi(s, arg1, offsetof(CPUState,
990 tlb_table[mem_index][0].addr_write));
992 /* add env, arg1, arg1 */
993 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
995 /* ld [arg1], arg2 */
996 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
997 INSN_RS2(TCG_REG_G0));
999 /* subcc arg0, arg2, %g0 */
1000 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
1005 be,pt %xcc label1 */
1006 label1_ptr = (uint32_t *)s->code_ptr;
1009 /* mov (delay slot) */
1010 tcg_out_mov(s, arg0, addr_reg);
1013 tcg_out_mov(s, arg1, data_reg);
1016 tcg_out_movi(s, TCG_TYPE_I32, arg2, mem_index);
1018 /* XXX: move that code at the end of the TB */
1019 /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
1020 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_st_helpers[s_bits]
1021 - (tcg_target_ulong)s->code_ptr) >> 2)
1023 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1026 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1027 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1028 sizeof(long), HOST_ST_OP);
1029 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1030 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1031 sizeof(long), HOST_LD_OP);
1035 label2_ptr = (uint32_t *)s->code_ptr;
1038 /* nop (delay slot) */
1041 #if TARGET_LONG_BITS == 32
1043 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
1044 INSN_OFF22((unsigned long)s->code_ptr -
1045 (unsigned long)label1_ptr));
1047 /* be,pt %xcc label1 */
1048 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x1) |
1049 (0x5 << 19) | INSN_OFF19((unsigned long)s->code_ptr -
1050 (unsigned long)label1_ptr));
1053 /* ld [arg1 + x], arg1 */
1054 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
1055 offsetof(CPUTLBEntry, addr_write), TARGET_ADDEND_LD_OP);
1057 #if TARGET_LONG_BITS == 32
1058 /* and addr_reg, x, arg0 */
1059 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
1060 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
1061 /* add arg0, arg1, arg0 */
1062 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
1064 /* add addr_reg, arg1, arg0 */
1065 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
1074 /* stb data_reg, [arg0] */
1075 tcg_out_ldst(s, data_reg, arg0, 0, STB);
1078 #ifdef TARGET_WORDS_BIGENDIAN
1079 /* sth data_reg, [arg0] */
1080 tcg_out_ldst(s, data_reg, arg0, 0, STH);
1082 /* stha data_reg, [arg0] ASI_PRIMARY_LITTLE */
1083 tcg_out_ldst_asi(s, data_reg, arg0, 0, STHA, ASI_PRIMARY_LITTLE);
1087 #ifdef TARGET_WORDS_BIGENDIAN
1088 /* stw data_reg, [arg0] */
1089 tcg_out_ldst(s, data_reg, arg0, 0, STW);
1091 /* stwa data_reg, [arg0] ASI_PRIMARY_LITTLE */
1092 tcg_out_ldst_asi(s, data_reg, arg0, 0, STWA, ASI_PRIMARY_LITTLE);
1096 #ifdef TARGET_WORDS_BIGENDIAN
1097 /* stx data_reg, [arg0] */
1098 tcg_out_ldst(s, data_reg, arg0, 0, STX);
1100 /* stxa data_reg, [arg0] ASI_PRIMARY_LITTLE */
1101 tcg_out_ldst_asi(s, data_reg, arg0, 0, STXA, ASI_PRIMARY_LITTLE);
1108 #if defined(CONFIG_SOFTMMU)
1110 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
1111 INSN_OFF22((unsigned long)s->code_ptr -
1112 (unsigned long)label2_ptr));
1116 static inline void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
1117 const int *const_args)
1122 case INDEX_op_exit_tb:
1123 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
1124 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
1126 tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
1127 INSN_RS2(TCG_REG_G0));
1129 case INDEX_op_goto_tb:
1130 if (s->tb_jmp_offset) {
1131 /* direct jump method */
1132 tcg_out_sethi(s, TCG_REG_I5, args[0] & 0xffffe000);
1133 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1134 INSN_IMM13((args[0] & 0x1fff)));
1135 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
1137 /* indirect jump method */
1138 tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
1139 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
1140 INSN_RS2(TCG_REG_G0));
1143 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
1147 tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
1148 - (tcg_target_ulong)s->code_ptr) >> 2)
1151 tcg_out_ld_ptr(s, TCG_REG_I5,
1152 (tcg_target_long)(s->tb_next + args[0]));
1153 tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_I5) |
1154 INSN_RS2(TCG_REG_G0));
1156 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
1159 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1160 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1161 sizeof(long), HOST_ST_OP);
1162 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
1163 TCG_TARGET_CALL_STACK_OFFSET - TCG_STATIC_CALL_ARGS_SIZE -
1164 sizeof(long), HOST_LD_OP);
1168 tcg_out_branch_i32(s, COND_A, args[0]);
1171 case INDEX_op_movi_i32:
1172 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
1175 #if TCG_TARGET_REG_BITS == 64
1176 #define OP_32_64(x) \
1177 glue(glue(case INDEX_op_, x), _i32): \
1178 glue(glue(case INDEX_op_, x), _i64)
1180 #define OP_32_64(x) \
1181 glue(glue(case INDEX_op_, x), _i32)
1184 tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
1187 tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
1190 tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
1193 tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
1195 case INDEX_op_ld_i32:
1196 #if TCG_TARGET_REG_BITS == 64
1197 case INDEX_op_ld32u_i64:
1199 tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
1202 tcg_out_ldst(s, args[0], args[1], args[2], STB);
1205 tcg_out_ldst(s, args[0], args[1], args[2], STH);
1207 case INDEX_op_st_i32:
1208 #if TCG_TARGET_REG_BITS == 64
1209 case INDEX_op_st32_i64:
1211 tcg_out_ldst(s, args[0], args[1], args[2], STW);
1234 case INDEX_op_shl_i32:
1237 case INDEX_op_shr_i32:
1240 case INDEX_op_sar_i32:
1243 case INDEX_op_mul_i32:
1254 case INDEX_op_div_i32:
1255 tcg_out_div32(s, args[0], args[1], args[2], const_args[2], 0);
1257 case INDEX_op_divu_i32:
1258 tcg_out_div32(s, args[0], args[1], args[2], const_args[2], 1);
1261 case INDEX_op_rem_i32:
1262 case INDEX_op_remu_i32:
1263 tcg_out_div32(s, TCG_REG_I5, args[1], args[2], const_args[2],
1264 opc == INDEX_op_remu_i32);
1265 tcg_out_arithc(s, TCG_REG_I5, TCG_REG_I5, args[2], const_args[2],
1267 tcg_out_arith(s, args[0], args[1], TCG_REG_I5, ARITH_SUB);
1270 case INDEX_op_brcond_i32:
1271 tcg_out_brcond_i32(s, args[2], args[0], args[1], const_args[1],
1274 case INDEX_op_setcond_i32:
1275 tcg_out_setcond_i32(s, args[3], args[0], args[1],
1276 args[2], const_args[2]);
1279 #if TCG_TARGET_REG_BITS == 32
1280 case INDEX_op_brcond2_i32:
1281 tcg_out_brcond2_i32(s, args[4], args[0], args[1],
1282 args[2], const_args[2],
1283 args[3], const_args[3], args[5]);
1285 case INDEX_op_setcond2_i32:
1286 tcg_out_setcond2_i32(s, args[5], args[0], args[1], args[2],
1287 args[3], const_args[3],
1288 args[4], const_args[4]);
1290 case INDEX_op_add2_i32:
1291 tcg_out_arithc(s, args[0], args[2], args[4], const_args[4],
1293 tcg_out_arithc(s, args[1], args[3], args[5], const_args[5],
1296 case INDEX_op_sub2_i32:
1297 tcg_out_arithc(s, args[0], args[2], args[4], const_args[4],
1299 tcg_out_arithc(s, args[1], args[3], args[5], const_args[5],
1302 case INDEX_op_mulu2_i32:
1303 tcg_out_arithc(s, args[0], args[2], args[3], const_args[3],
1305 tcg_out_rdy(s, args[1]);
1309 case INDEX_op_qemu_ld8u:
1310 tcg_out_qemu_ld(s, args, 0);
1312 case INDEX_op_qemu_ld8s:
1313 tcg_out_qemu_ld(s, args, 0 | 4);
1315 case INDEX_op_qemu_ld16u:
1316 tcg_out_qemu_ld(s, args, 1);
1318 case INDEX_op_qemu_ld16s:
1319 tcg_out_qemu_ld(s, args, 1 | 4);
1321 case INDEX_op_qemu_ld32:
1322 #if TCG_TARGET_REG_BITS == 64
1323 case INDEX_op_qemu_ld32u:
1325 tcg_out_qemu_ld(s, args, 2);
1327 #if TCG_TARGET_REG_BITS == 64
1328 case INDEX_op_qemu_ld32s:
1329 tcg_out_qemu_ld(s, args, 2 | 4);
1332 case INDEX_op_qemu_st8:
1333 tcg_out_qemu_st(s, args, 0);
1335 case INDEX_op_qemu_st16:
1336 tcg_out_qemu_st(s, args, 1);
1338 case INDEX_op_qemu_st32:
1339 tcg_out_qemu_st(s, args, 2);
1342 #if TCG_TARGET_REG_BITS == 64
1343 case INDEX_op_movi_i64:
1344 tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1346 case INDEX_op_ld32s_i64:
1347 tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
1349 case INDEX_op_ld_i64:
1350 tcg_out_ldst(s, args[0], args[1], args[2], LDX);
1352 case INDEX_op_st_i64:
1353 tcg_out_ldst(s, args[0], args[1], args[2], STX);
1355 case INDEX_op_shl_i64:
1358 case INDEX_op_shr_i64:
1361 case INDEX_op_sar_i64:
1364 case INDEX_op_mul_i64:
1367 case INDEX_op_div_i64:
1370 case INDEX_op_divu_i64:
1373 case INDEX_op_rem_i64:
1374 case INDEX_op_remu_i64:
1375 tcg_out_arithc(s, TCG_REG_I5, args[1], args[2], const_args[2],
1376 opc == INDEX_op_rem_i64 ? ARITH_SDIVX : ARITH_UDIVX);
1377 tcg_out_arithc(s, TCG_REG_I5, TCG_REG_I5, args[2], const_args[2],
1379 tcg_out_arith(s, args[0], args[1], TCG_REG_I5, ARITH_SUB);
1381 case INDEX_op_ext32s_i64:
1382 if (const_args[1]) {
1383 tcg_out_movi(s, TCG_TYPE_I64, args[0], (int32_t)args[1]);
1385 tcg_out_arithi(s, args[0], args[1], 0, SHIFT_SRA);
1388 case INDEX_op_ext32u_i64:
1389 if (const_args[1]) {
1390 tcg_out_movi_imm32(s, args[0], args[1]);
1392 tcg_out_arithi(s, args[0], args[1], 0, SHIFT_SRL);
1396 case INDEX_op_brcond_i64:
1397 tcg_out_brcond_i64(s, args[2], args[0], args[1], const_args[1],
1400 case INDEX_op_setcond_i64:
1401 tcg_out_setcond_i64(s, args[3], args[0], args[1],
1402 args[2], const_args[2]);
1405 case INDEX_op_qemu_ld64:
1406 tcg_out_qemu_ld(s, args, 3);
1408 case INDEX_op_qemu_st64:
1409 tcg_out_qemu_st(s, args, 3);
1414 tcg_out_arithc(s, args[0], args[1], args[2], const_args[2], c);
1418 tcg_out_arithc(s, args[0], TCG_REG_G0, args[1], const_args[1], c);
1422 fprintf(stderr, "unknown opcode 0x%x\n", opc);
1427 static const TCGTargetOpDef sparc_op_defs[] = {
1428 { INDEX_op_exit_tb, { } },
1429 { INDEX_op_goto_tb, { } },
1430 { INDEX_op_call, { "ri" } },
1431 { INDEX_op_jmp, { "ri" } },
1432 { INDEX_op_br, { } },
1434 { INDEX_op_mov_i32, { "r", "r" } },
1435 { INDEX_op_movi_i32, { "r" } },
1436 { INDEX_op_ld8u_i32, { "r", "r" } },
1437 { INDEX_op_ld8s_i32, { "r", "r" } },
1438 { INDEX_op_ld16u_i32, { "r", "r" } },
1439 { INDEX_op_ld16s_i32, { "r", "r" } },
1440 { INDEX_op_ld_i32, { "r", "r" } },
1441 { INDEX_op_st8_i32, { "r", "r" } },
1442 { INDEX_op_st16_i32, { "r", "r" } },
1443 { INDEX_op_st_i32, { "r", "r" } },
1445 { INDEX_op_add_i32, { "r", "r", "rJ" } },
1446 { INDEX_op_mul_i32, { "r", "r", "rJ" } },
1447 { INDEX_op_div_i32, { "r", "r", "rJ" } },
1448 { INDEX_op_divu_i32, { "r", "r", "rJ" } },
1449 { INDEX_op_rem_i32, { "r", "r", "rJ" } },
1450 { INDEX_op_remu_i32, { "r", "r", "rJ" } },
1451 { INDEX_op_sub_i32, { "r", "r", "rJ" } },
1452 { INDEX_op_and_i32, { "r", "r", "rJ" } },
1453 { INDEX_op_andc_i32, { "r", "r", "rJ" } },
1454 { INDEX_op_or_i32, { "r", "r", "rJ" } },
1455 { INDEX_op_orc_i32, { "r", "r", "rJ" } },
1456 { INDEX_op_xor_i32, { "r", "r", "rJ" } },
1458 { INDEX_op_shl_i32, { "r", "r", "rJ" } },
1459 { INDEX_op_shr_i32, { "r", "r", "rJ" } },
1460 { INDEX_op_sar_i32, { "r", "r", "rJ" } },
1462 { INDEX_op_neg_i32, { "r", "rJ" } },
1463 { INDEX_op_not_i32, { "r", "rJ" } },
1465 { INDEX_op_brcond_i32, { "r", "rJ" } },
1466 { INDEX_op_setcond_i32, { "r", "r", "rJ" } },
1468 #if TCG_TARGET_REG_BITS == 32
1469 { INDEX_op_brcond2_i32, { "r", "r", "rJ", "rJ" } },
1470 { INDEX_op_setcond2_i32, { "r", "r", "r", "rJ", "rJ" } },
1471 { INDEX_op_add2_i32, { "r", "r", "r", "r", "rJ", "rJ" } },
1472 { INDEX_op_sub2_i32, { "r", "r", "r", "r", "rJ", "rJ" } },
1473 { INDEX_op_mulu2_i32, { "r", "r", "r", "rJ" } },
1476 { INDEX_op_qemu_ld8u, { "r", "L" } },
1477 { INDEX_op_qemu_ld8s, { "r", "L" } },
1478 { INDEX_op_qemu_ld16u, { "r", "L" } },
1479 { INDEX_op_qemu_ld16s, { "r", "L" } },
1480 { INDEX_op_qemu_ld32, { "r", "L" } },
1481 #if TCG_TARGET_REG_BITS == 64
1482 { INDEX_op_qemu_ld32u, { "r", "L" } },
1483 { INDEX_op_qemu_ld32s, { "r", "L" } },
1486 { INDEX_op_qemu_st8, { "L", "L" } },
1487 { INDEX_op_qemu_st16, { "L", "L" } },
1488 { INDEX_op_qemu_st32, { "L", "L" } },
1490 #if TCG_TARGET_REG_BITS == 64
1491 { INDEX_op_mov_i64, { "r", "r" } },
1492 { INDEX_op_movi_i64, { "r" } },
1493 { INDEX_op_ld8u_i64, { "r", "r" } },
1494 { INDEX_op_ld8s_i64, { "r", "r" } },
1495 { INDEX_op_ld16u_i64, { "r", "r" } },
1496 { INDEX_op_ld16s_i64, { "r", "r" } },
1497 { INDEX_op_ld32u_i64, { "r", "r" } },
1498 { INDEX_op_ld32s_i64, { "r", "r" } },
1499 { INDEX_op_ld_i64, { "r", "r" } },
1500 { INDEX_op_st8_i64, { "r", "r" } },
1501 { INDEX_op_st16_i64, { "r", "r" } },
1502 { INDEX_op_st32_i64, { "r", "r" } },
1503 { INDEX_op_st_i64, { "r", "r" } },
1504 { INDEX_op_qemu_ld64, { "L", "L" } },
1505 { INDEX_op_qemu_st64, { "L", "L" } },
1507 { INDEX_op_add_i64, { "r", "r", "rJ" } },
1508 { INDEX_op_mul_i64, { "r", "r", "rJ" } },
1509 { INDEX_op_div_i64, { "r", "r", "rJ" } },
1510 { INDEX_op_divu_i64, { "r", "r", "rJ" } },
1511 { INDEX_op_rem_i64, { "r", "r", "rJ" } },
1512 { INDEX_op_remu_i64, { "r", "r", "rJ" } },
1513 { INDEX_op_sub_i64, { "r", "r", "rJ" } },
1514 { INDEX_op_and_i64, { "r", "r", "rJ" } },
1515 { INDEX_op_andc_i64, { "r", "r", "rJ" } },
1516 { INDEX_op_or_i64, { "r", "r", "rJ" } },
1517 { INDEX_op_orc_i64, { "r", "r", "rJ" } },
1518 { INDEX_op_xor_i64, { "r", "r", "rJ" } },
1520 { INDEX_op_shl_i64, { "r", "r", "rJ" } },
1521 { INDEX_op_shr_i64, { "r", "r", "rJ" } },
1522 { INDEX_op_sar_i64, { "r", "r", "rJ" } },
1524 { INDEX_op_neg_i64, { "r", "rJ" } },
1525 { INDEX_op_not_i64, { "r", "rJ" } },
1527 { INDEX_op_ext32s_i64, { "r", "ri" } },
1528 { INDEX_op_ext32u_i64, { "r", "ri" } },
1530 { INDEX_op_brcond_i64, { "r", "rJ" } },
1531 { INDEX_op_setcond_i64, { "r", "r", "rJ" } },
1536 void tcg_target_init(TCGContext *s)
1538 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1539 #if TCG_TARGET_REG_BITS == 64
1540 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
1542 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1558 tcg_regset_clear(s->reserved_regs);
1559 tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
1560 #if TCG_TARGET_REG_BITS == 64
1561 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I4); // for internal use
1563 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
1564 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
1565 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
1566 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
1567 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
1568 tcg_add_target_add_op_defs(sparc_op_defs);