2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
60 static const int tcg_target_reg_alloc_order[] = {
76 static const int tcg_target_call_iarg_regs[6] = {
85 static const int tcg_target_call_oarg_regs[2] = {
90 static inline int check_fit_tl(tcg_target_long val, unsigned int bits)
92 return (val << ((sizeof(tcg_target_long) * 8 - bits))
93 >> (sizeof(tcg_target_long) * 8 - bits)) == val;
96 static inline int check_fit_i32(uint32_t val, unsigned int bits)
98 return ((val << (32 - bits)) >> (32 - bits)) == val;
101 static void patch_reloc(uint8_t *code_ptr, int type,
102 tcg_target_long value, tcg_target_long addend)
107 if (value != (uint32_t)value)
109 *(uint32_t *)code_ptr = value;
111 case R_SPARC_WDISP22:
112 value -= (long)code_ptr;
114 if (!check_fit_tl(value, 22))
116 *(uint32_t *)code_ptr = ((*(uint32_t *)code_ptr) & ~0x3fffff) | value;
123 /* maximum number of register used for input function arguments */
124 static inline int tcg_target_get_call_iarg_regs_count(int flags)
129 /* parse target specific constraints */
130 static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
137 case 'L': /* qemu_ld/st constraint */
138 ct->ct |= TCG_CT_REG;
139 tcg_regset_set32(ct->u.regs, 0, 0xffffffff);
141 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O0);
142 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O1);
143 tcg_regset_reset_reg(ct->u.regs, TCG_REG_O2);
146 ct->ct |= TCG_CT_CONST_S11;
149 ct->ct |= TCG_CT_CONST_S13;
159 /* test if a constant matches the constraint */
160 static inline int tcg_target_const_match(tcg_target_long val,
161 const TCGArgConstraint *arg_ct)
166 if (ct & TCG_CT_CONST)
168 else if ((ct & TCG_CT_CONST_S11) && check_fit_tl(val, 11))
170 else if ((ct & TCG_CT_CONST_S13) && check_fit_tl(val, 13))
176 #define INSN_OP(x) ((x) << 30)
177 #define INSN_OP2(x) ((x) << 22)
178 #define INSN_OP3(x) ((x) << 19)
179 #define INSN_OPF(x) ((x) << 5)
180 #define INSN_RD(x) ((x) << 25)
181 #define INSN_RS1(x) ((x) << 14)
182 #define INSN_RS2(x) (x)
183 #define INSN_ASI(x) ((x) << 5)
185 #define INSN_IMM13(x) ((1 << 13) | ((x) & 0x1fff))
186 #define INSN_OFF22(x) (((x) >> 2) & 0x3fffff)
188 #define INSN_COND(x, a) (((x) << 25) | ((a) << 29))
205 #define BA (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2))
207 #define ARITH_ADD (INSN_OP(2) | INSN_OP3(0x00))
208 #define ARITH_AND (INSN_OP(2) | INSN_OP3(0x01))
209 #define ARITH_OR (INSN_OP(2) | INSN_OP3(0x02))
210 #define ARITH_ORCC (INSN_OP(2) | INSN_OP3(0x12))
211 #define ARITH_XOR (INSN_OP(2) | INSN_OP3(0x03))
212 #define ARITH_SUB (INSN_OP(2) | INSN_OP3(0x04))
213 #define ARITH_SUBCC (INSN_OP(2) | INSN_OP3(0x14))
214 #define ARITH_ADDX (INSN_OP(2) | INSN_OP3(0x10))
215 #define ARITH_SUBX (INSN_OP(2) | INSN_OP3(0x0c))
216 #define ARITH_UMUL (INSN_OP(2) | INSN_OP3(0x0a))
217 #define ARITH_UDIV (INSN_OP(2) | INSN_OP3(0x0e))
218 #define ARITH_SDIV (INSN_OP(2) | INSN_OP3(0x0f))
219 #define ARITH_MULX (INSN_OP(2) | INSN_OP3(0x09))
220 #define ARITH_UDIVX (INSN_OP(2) | INSN_OP3(0x0d))
221 #define ARITH_SDIVX (INSN_OP(2) | INSN_OP3(0x2d))
223 #define SHIFT_SLL (INSN_OP(2) | INSN_OP3(0x25))
224 #define SHIFT_SRL (INSN_OP(2) | INSN_OP3(0x26))
225 #define SHIFT_SRA (INSN_OP(2) | INSN_OP3(0x27))
227 #define SHIFT_SLLX (INSN_OP(2) | INSN_OP3(0x25) | (1 << 12))
228 #define SHIFT_SRLX (INSN_OP(2) | INSN_OP3(0x26) | (1 << 12))
229 #define SHIFT_SRAX (INSN_OP(2) | INSN_OP3(0x27) | (1 << 12))
231 #define WRY (INSN_OP(2) | INSN_OP3(0x30))
232 #define JMPL (INSN_OP(2) | INSN_OP3(0x38))
233 #define SAVE (INSN_OP(2) | INSN_OP3(0x3c))
234 #define RESTORE (INSN_OP(2) | INSN_OP3(0x3d))
235 #define SETHI (INSN_OP(0) | INSN_OP2(0x4))
236 #define CALL INSN_OP(1)
237 #define LDUB (INSN_OP(3) | INSN_OP3(0x01))
238 #define LDSB (INSN_OP(3) | INSN_OP3(0x09))
239 #define LDUH (INSN_OP(3) | INSN_OP3(0x02))
240 #define LDSH (INSN_OP(3) | INSN_OP3(0x0a))
241 #define LDUW (INSN_OP(3) | INSN_OP3(0x00))
242 #define LDSW (INSN_OP(3) | INSN_OP3(0x08))
243 #define LDX (INSN_OP(3) | INSN_OP3(0x0b))
244 #define STB (INSN_OP(3) | INSN_OP3(0x05))
245 #define STH (INSN_OP(3) | INSN_OP3(0x06))
246 #define STW (INSN_OP(3) | INSN_OP3(0x04))
247 #define STX (INSN_OP(3) | INSN_OP3(0x0e))
248 #define LDUBA (INSN_OP(3) | INSN_OP3(0x11))
249 #define LDSBA (INSN_OP(3) | INSN_OP3(0x19))
250 #define LDUHA (INSN_OP(3) | INSN_OP3(0x12))
251 #define LDSHA (INSN_OP(3) | INSN_OP3(0x1a))
252 #define LDUWA (INSN_OP(3) | INSN_OP3(0x10))
253 #define LDSWA (INSN_OP(3) | INSN_OP3(0x18))
254 #define LDXA (INSN_OP(3) | INSN_OP3(0x1b))
255 #define STBA (INSN_OP(3) | INSN_OP3(0x15))
256 #define STHA (INSN_OP(3) | INSN_OP3(0x16))
257 #define STWA (INSN_OP(3) | INSN_OP3(0x14))
258 #define STXA (INSN_OP(3) | INSN_OP3(0x1e))
260 #ifndef ASI_PRIMARY_LITTLE
261 #define ASI_PRIMARY_LITTLE 0x88
264 static inline void tcg_out_arith(TCGContext *s, int rd, int rs1, int rs2,
267 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
271 static inline void tcg_out_arithi(TCGContext *s, int rd, int rs1,
272 uint32_t offset, int op)
274 tcg_out32(s, op | INSN_RD(rd) | INSN_RS1(rs1) |
278 static inline void tcg_out_mov(TCGContext *s, int ret, int arg)
280 tcg_out_arith(s, ret, arg, TCG_REG_G0, ARITH_OR);
283 static inline void tcg_out_sethi(TCGContext *s, int ret, uint32_t arg)
285 tcg_out32(s, SETHI | INSN_RD(ret) | ((arg & 0xfffffc00) >> 10));
288 static inline void tcg_out_movi_imm13(TCGContext *s, int ret, uint32_t arg)
290 tcg_out_arithi(s, ret, TCG_REG_G0, arg, ARITH_OR);
293 static inline void tcg_out_movi_imm32(TCGContext *s, int ret, uint32_t arg)
295 if (check_fit_tl(arg, 12))
296 tcg_out_movi_imm13(s, ret, arg);
298 tcg_out_sethi(s, ret, arg);
300 tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
304 static inline void tcg_out_movi(TCGContext *s, TCGType type,
305 int ret, tcg_target_long arg)
307 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
308 if (!check_fit_tl(arg, 32) && (arg & ~0xffffffffULL) != 0) {
309 tcg_out_movi_imm32(s, TCG_REG_I4, arg >> 32);
310 tcg_out_arithi(s, TCG_REG_I4, TCG_REG_I4, 32, SHIFT_SLLX);
311 tcg_out_movi_imm32(s, ret, arg);
312 tcg_out_arith(s, ret, ret, TCG_REG_I4, ARITH_OR);
313 } else if (check_fit_tl(arg, 12))
314 tcg_out_movi_imm13(s, ret, arg);
316 tcg_out_sethi(s, ret, arg);
318 tcg_out_arithi(s, ret, ret, arg & 0x3ff, ARITH_OR);
321 tcg_out_movi_imm32(s, ret, arg);
325 static inline void tcg_out_ld_raw(TCGContext *s, int ret,
328 tcg_out_sethi(s, ret, arg);
329 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
330 INSN_IMM13(arg & 0x3ff));
333 static inline void tcg_out_ld_ptr(TCGContext *s, int ret,
336 if (!check_fit_tl(arg, 10))
337 tcg_out_movi(s, TCG_TYPE_PTR, ret, arg & ~0x3ffULL);
338 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
339 tcg_out32(s, LDX | INSN_RD(ret) | INSN_RS1(ret) |
340 INSN_IMM13(arg & 0x3ff));
342 tcg_out32(s, LDUW | INSN_RD(ret) | INSN_RS1(ret) |
343 INSN_IMM13(arg & 0x3ff));
347 static inline void tcg_out_ldst(TCGContext *s, int ret, int addr, int offset, int op)
349 if (check_fit_tl(offset, 13))
350 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(addr) |
353 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
354 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
359 static inline void tcg_out_ldst_asi(TCGContext *s, int ret, int addr,
360 int offset, int op, int asi)
362 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, offset);
363 tcg_out32(s, op | INSN_RD(ret) | INSN_RS1(TCG_REG_I5) |
364 INSN_ASI(asi) | INSN_RS2(addr));
367 static inline void tcg_out_ld(TCGContext *s, TCGType type, int ret,
368 int arg1, tcg_target_long arg2)
370 if (type == TCG_TYPE_I32)
371 tcg_out_ldst(s, ret, arg1, arg2, LDUW);
373 tcg_out_ldst(s, ret, arg1, arg2, LDX);
376 static inline void tcg_out_st(TCGContext *s, TCGType type, int arg,
377 int arg1, tcg_target_long arg2)
379 if (type == TCG_TYPE_I32)
380 tcg_out_ldst(s, arg, arg1, arg2, STW);
382 tcg_out_ldst(s, arg, arg1, arg2, STX);
385 static inline void tcg_out_sety(TCGContext *s, tcg_target_long val)
387 if (val == 0 || val == -1)
388 tcg_out32(s, WRY | INSN_IMM13(val));
390 fprintf(stderr, "unimplemented sety %ld\n", (long)val);
393 static inline void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
396 if (check_fit_tl(val, 13))
397 tcg_out_arithi(s, reg, reg, val, ARITH_ADD);
399 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I5, val);
400 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_ADD);
405 static inline void tcg_out_andi(TCGContext *s, int reg, tcg_target_long val)
408 if (check_fit_tl(val, 13))
409 tcg_out_arithi(s, reg, reg, val, ARITH_AND);
411 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, val);
412 tcg_out_arith(s, reg, reg, TCG_REG_I5, ARITH_AND);
417 static inline void tcg_out_nop(TCGContext *s)
419 tcg_out_sethi(s, TCG_REG_G0, 0);
422 static void tcg_out_branch(TCGContext *s, int opc, int label_index)
425 TCGLabel *l = &s->labels[label_index];
428 val = l->u.value - (tcg_target_long)s->code_ptr;
429 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2)
430 | INSN_OFF22(l->u.value - (unsigned long)s->code_ptr)));
432 tcg_out_reloc(s, s->code_ptr, R_SPARC_WDISP22, label_index, 0);
433 tcg_out32(s, (INSN_OP(0) | INSN_COND(opc, 0) | INSN_OP2(0x2) | 0));
437 static const uint8_t tcg_cond_to_bcond[10] = {
438 [TCG_COND_EQ] = COND_E,
439 [TCG_COND_NE] = COND_NE,
440 [TCG_COND_LT] = COND_L,
441 [TCG_COND_GE] = COND_GE,
442 [TCG_COND_LE] = COND_LE,
443 [TCG_COND_GT] = COND_G,
444 [TCG_COND_LTU] = COND_CS,
445 [TCG_COND_GEU] = COND_CC,
446 [TCG_COND_LEU] = COND_LEU,
447 [TCG_COND_GTU] = COND_GU,
450 static void tcg_out_brcond(TCGContext *s, int cond,
451 TCGArg arg1, TCGArg arg2, int const_arg2,
454 if (const_arg2 && arg2 == 0)
455 /* orcc %g0, r, %g0 */
456 tcg_out_arith(s, TCG_REG_G0, TCG_REG_G0, arg1, ARITH_ORCC);
458 /* subcc r1, r2, %g0 */
459 tcg_out_arith(s, TCG_REG_G0, arg1, arg2, ARITH_SUBCC);
460 tcg_out_branch(s, tcg_cond_to_bcond[cond], label_index);
464 /* Generate global QEMU prologue and epilogue code */
465 void tcg_target_qemu_prologue(TCGContext *s)
467 tcg_out32(s, SAVE | INSN_RD(TCG_REG_O6) | INSN_RS1(TCG_REG_O6) |
468 INSN_IMM13(-TCG_TARGET_STACK_MINFRAME));
469 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I0) |
470 INSN_RS2(TCG_REG_G0));
474 #if defined(CONFIG_SOFTMMU)
476 #include "../../softmmu_defs.h"
478 static const void * const qemu_ld_helpers[4] = {
485 static const void * const qemu_st_helpers[4] = {
493 #if TARGET_LONG_BITS == 32
494 #define TARGET_LD_OP LDUW
496 #define TARGET_LD_OP LDX
500 #define HOST_LD_OP LDX
501 #define HOST_ST_OP STX
502 #define HOST_SLL_OP SHIFT_SLLX
503 #define HOST_SRA_OP SHIFT_SRAX
505 #define HOST_LD_OP LDUW
506 #define HOST_ST_OP STW
507 #define HOST_SLL_OP SHIFT_SLL
508 #define HOST_SRA_OP SHIFT_SRA
511 static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args,
514 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
515 #if defined(CONFIG_SOFTMMU)
516 uint32_t *label1_ptr, *label2_ptr;
528 #if defined(CONFIG_SOFTMMU)
529 /* srl addr_reg, x, arg1 */
530 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
532 /* and addr_reg, x, arg0 */
533 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
536 /* and arg1, x, arg1 */
537 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
539 /* add arg1, x, arg1 */
540 tcg_out_addi(s, arg1, offsetof(CPUState,
541 tlb_table[mem_index][0].addr_read));
543 /* add env, arg1, arg1 */
544 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
546 /* ld [arg1], arg2 */
547 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
548 INSN_RS2(TCG_REG_G0));
550 /* subcc arg0, arg2, %g0 */
551 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
555 label1_ptr = (uint32_t *)s->code_ptr;
558 /* mov (delay slot) */
559 tcg_out_mov(s, arg0, addr_reg);
562 tcg_out_movi(s, TCG_TYPE_I32, arg1, mem_index);
564 /* XXX: move that code at the end of the TB */
565 /* qemu_ld_helper[s_bits](arg0, arg1) */
566 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_ld_helpers[s_bits]
567 - (tcg_target_ulong)s->code_ptr) >> 2)
569 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
572 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
573 TCG_TARGET_CALL_STACK_OFFSET - sizeof(long), HOST_ST_OP);
574 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
575 TCG_TARGET_CALL_STACK_OFFSET - sizeof(long), HOST_LD_OP);
577 /* data_reg = sign_extend(arg0) */
580 /* sll arg0, 24/56, data_reg */
581 tcg_out_arithi(s, data_reg, arg0, (int)sizeof(tcg_target_long) * 8 - 8,
583 /* sra data_reg, 24/56, data_reg */
584 tcg_out_arithi(s, data_reg, data_reg,
585 (int)sizeof(tcg_target_long) * 8 - 8, HOST_SRA_OP);
588 /* sll arg0, 16/48, data_reg */
589 tcg_out_arithi(s, data_reg, arg0,
590 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SLL_OP);
591 /* sra data_reg, 16/48, data_reg */
592 tcg_out_arithi(s, data_reg, data_reg,
593 (int)sizeof(tcg_target_long) * 8 - 16, HOST_SRA_OP);
596 /* sll arg0, 32, data_reg */
597 tcg_out_arithi(s, data_reg, arg0, 32, HOST_SLL_OP);
598 /* sra data_reg, 32, data_reg */
599 tcg_out_arithi(s, data_reg, data_reg, 32, HOST_SRA_OP);
607 tcg_out_mov(s, data_reg, arg0);
613 label2_ptr = (uint32_t *)s->code_ptr;
616 /* nop (delay slot */
620 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
621 INSN_OFF22((unsigned long)s->code_ptr -
622 (unsigned long)label1_ptr));
624 /* ld [arg1 + x], arg1 */
625 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
626 offsetof(CPUTLBEntry, addr_read), HOST_LD_OP);
628 #if TARGET_LONG_BITS == 32
629 /* and addr_reg, x, arg0 */
630 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
631 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
632 /* add arg0, arg1, arg0 */
633 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
635 /* add addr_reg, arg1, arg0 */
636 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
645 /* ldub [arg0], data_reg */
646 tcg_out_ldst(s, data_reg, arg0, 0, LDUB);
649 /* ldsb [arg0], data_reg */
650 tcg_out_ldst(s, data_reg, arg0, 0, LDSB);
653 #ifdef TARGET_WORDS_BIGENDIAN
654 /* lduh [arg0], data_reg */
655 tcg_out_ldst(s, data_reg, arg0, 0, LDUH);
657 /* lduha [arg0] ASI_PRIMARY_LITTLE, data_reg */
658 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUHA, ASI_PRIMARY_LITTLE);
662 #ifdef TARGET_WORDS_BIGENDIAN
663 /* ldsh [arg0], data_reg */
664 tcg_out_ldst(s, data_reg, arg0, 0, LDSH);
666 /* ldsha [arg0] ASI_PRIMARY_LITTLE, data_reg */
667 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSHA, ASI_PRIMARY_LITTLE);
671 #ifdef TARGET_WORDS_BIGENDIAN
672 /* lduw [arg0], data_reg */
673 tcg_out_ldst(s, data_reg, arg0, 0, LDUW);
675 /* lduwa [arg0] ASI_PRIMARY_LITTLE, data_reg */
676 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDUWA, ASI_PRIMARY_LITTLE);
680 #ifdef TARGET_WORDS_BIGENDIAN
681 /* ldsw [arg0], data_reg */
682 tcg_out_ldst(s, data_reg, arg0, 0, LDSW);
684 /* ldswa [arg0] ASI_PRIMARY_LITTLE, data_reg */
685 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDSWA, ASI_PRIMARY_LITTLE);
689 #ifdef TARGET_WORDS_BIGENDIAN
690 /* ldx [arg0], data_reg */
691 tcg_out_ldst(s, data_reg, arg0, 0, LDX);
693 /* ldxa [arg0] ASI_PRIMARY_LITTLE, data_reg */
694 tcg_out_ldst_asi(s, data_reg, arg0, 0, LDXA, ASI_PRIMARY_LITTLE);
701 #if defined(CONFIG_SOFTMMU)
703 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
704 INSN_OFF22((unsigned long)s->code_ptr -
705 (unsigned long)label2_ptr));
709 static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args,
712 int addr_reg, data_reg, arg0, arg1, arg2, mem_index, s_bits;
713 #if defined(CONFIG_SOFTMMU)
714 uint32_t *label1_ptr, *label2_ptr;
727 #if defined(CONFIG_SOFTMMU)
728 /* srl addr_reg, x, arg1 */
729 tcg_out_arithi(s, arg1, addr_reg, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS,
732 /* and addr_reg, x, arg0 */
733 tcg_out_arithi(s, arg0, addr_reg, TARGET_PAGE_MASK | ((1 << s_bits) - 1),
736 /* and arg1, x, arg1 */
737 tcg_out_andi(s, arg1, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS);
739 /* add arg1, x, arg1 */
740 tcg_out_addi(s, arg1, offsetof(CPUState,
741 tlb_table[mem_index][0].addr_write));
743 /* add env, arg1, arg1 */
744 tcg_out_arith(s, arg1, TCG_AREG0, arg1, ARITH_ADD);
746 /* ld [arg1], arg2 */
747 tcg_out32(s, TARGET_LD_OP | INSN_RD(arg2) | INSN_RS1(arg1) |
748 INSN_RS2(TCG_REG_G0));
750 /* subcc arg0, arg2, %g0 */
751 tcg_out_arith(s, TCG_REG_G0, arg0, arg2, ARITH_SUBCC);
755 label1_ptr = (uint32_t *)s->code_ptr;
758 /* mov (delay slot) */
759 tcg_out_mov(s, arg0, addr_reg);
762 tcg_out_mov(s, arg1, data_reg);
765 tcg_out_movi(s, TCG_TYPE_I32, arg2, mem_index);
767 /* XXX: move that code at the end of the TB */
768 /* qemu_st_helper[s_bits](arg0, arg1, arg2) */
769 tcg_out32(s, CALL | ((((tcg_target_ulong)qemu_st_helpers[s_bits]
770 - (tcg_target_ulong)s->code_ptr) >> 2)
772 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
775 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
776 TCG_TARGET_CALL_STACK_OFFSET - sizeof(long), HOST_ST_OP);
777 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
778 TCG_TARGET_CALL_STACK_OFFSET - sizeof(long), HOST_LD_OP);
782 label2_ptr = (uint32_t *)s->code_ptr;
785 /* nop (delay slot) */
789 *label1_ptr = (INSN_OP(0) | INSN_COND(COND_E, 0) | INSN_OP2(0x2) |
790 INSN_OFF22((unsigned long)s->code_ptr -
791 (unsigned long)label1_ptr));
793 /* ld [arg1 + x], arg1 */
794 tcg_out_ldst(s, arg1, arg1, offsetof(CPUTLBEntry, addend) -
795 offsetof(CPUTLBEntry, addr_write), HOST_LD_OP);
797 #if TARGET_LONG_BITS == 32
798 /* and addr_reg, x, arg0 */
799 tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_I5, 0xffffffff);
800 tcg_out_arith(s, arg0, addr_reg, TCG_REG_I5, ARITH_AND);
801 /* add arg0, arg1, arg0 */
802 tcg_out_arith(s, arg0, arg0, arg1, ARITH_ADD);
804 /* add addr_reg, arg1, arg0 */
805 tcg_out_arith(s, arg0, addr_reg, arg1, ARITH_ADD);
814 /* stb data_reg, [arg0] */
815 tcg_out_ldst(s, data_reg, arg0, 0, STB);
818 #ifdef TARGET_WORDS_BIGENDIAN
819 /* sth data_reg, [arg0] */
820 tcg_out_ldst(s, data_reg, arg0, 0, STH);
822 /* stha data_reg, [arg0] ASI_PRIMARY_LITTLE */
823 tcg_out_ldst_asi(s, data_reg, arg0, 0, STHA, ASI_PRIMARY_LITTLE);
827 #ifdef TARGET_WORDS_BIGENDIAN
828 /* stw data_reg, [arg0] */
829 tcg_out_ldst(s, data_reg, arg0, 0, STW);
831 /* stwa data_reg, [arg0] ASI_PRIMARY_LITTLE */
832 tcg_out_ldst_asi(s, data_reg, arg0, 0, STWA, ASI_PRIMARY_LITTLE);
836 #ifdef TARGET_WORDS_BIGENDIAN
837 /* stx data_reg, [arg0] */
838 tcg_out_ldst(s, data_reg, arg0, 0, STX);
840 /* stxa data_reg, [arg0] ASI_PRIMARY_LITTLE */
841 tcg_out_ldst_asi(s, data_reg, arg0, 0, STXA, ASI_PRIMARY_LITTLE);
848 #if defined(CONFIG_SOFTMMU)
850 *label2_ptr = (INSN_OP(0) | INSN_COND(COND_A, 0) | INSN_OP2(0x2) |
851 INSN_OFF22((unsigned long)s->code_ptr -
852 (unsigned long)label2_ptr));
856 static inline void tcg_out_op(TCGContext *s, int opc, const TCGArg *args,
857 const int *const_args)
862 case INDEX_op_exit_tb:
863 tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_I0, args[0]);
864 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I7) |
866 tcg_out32(s, RESTORE | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_G0) |
867 INSN_RS2(TCG_REG_G0));
869 case INDEX_op_goto_tb:
870 if (s->tb_jmp_offset) {
871 /* direct jump method */
872 tcg_out_sethi(s, TCG_REG_I5, args[0] & 0xffffe000);
873 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
874 INSN_IMM13((args[0] & 0x1fff)));
875 s->tb_jmp_offset[args[0]] = s->code_ptr - s->code_buf;
877 /* indirect jump method */
878 tcg_out_ld_ptr(s, TCG_REG_I5, (tcg_target_long)(s->tb_next + args[0]));
879 tcg_out32(s, JMPL | INSN_RD(TCG_REG_G0) | INSN_RS1(TCG_REG_I5) |
880 INSN_RS2(TCG_REG_G0));
883 s->tb_next_offset[args[0]] = s->code_ptr - s->code_buf;
887 tcg_out32(s, CALL | ((((tcg_target_ulong)args[0]
888 - (tcg_target_ulong)s->code_ptr) >> 2)
891 tcg_out_ld_ptr(s, TCG_REG_I5,
892 (tcg_target_long)(s->tb_next + args[0]));
893 tcg_out32(s, JMPL | INSN_RD(TCG_REG_O7) | INSN_RS1(TCG_REG_I5) |
894 INSN_RS2(TCG_REG_G0));
896 /* Store AREG0 in stack to avoid ugly glibc bugs that mangle
899 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
900 TCG_TARGET_CALL_STACK_OFFSET - sizeof(long), HOST_ST_OP);
901 tcg_out_ldst(s, TCG_AREG0, TCG_REG_CALL_STACK,
902 TCG_TARGET_CALL_STACK_OFFSET - sizeof(long), HOST_LD_OP);
906 tcg_out_branch(s, COND_A, args[0]);
909 case INDEX_op_movi_i32:
910 tcg_out_movi(s, TCG_TYPE_I32, args[0], (uint32_t)args[1]);
913 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
914 #define OP_32_64(x) \
915 glue(glue(case INDEX_op_, x), _i32:) \
916 glue(glue(case INDEX_op_, x), _i64:)
918 #define OP_32_64(x) \
919 glue(glue(case INDEX_op_, x), _i32:)
922 tcg_out_ldst(s, args[0], args[1], args[2], LDUB);
925 tcg_out_ldst(s, args[0], args[1], args[2], LDSB);
928 tcg_out_ldst(s, args[0], args[1], args[2], LDUH);
931 tcg_out_ldst(s, args[0], args[1], args[2], LDSH);
933 case INDEX_op_ld_i32:
934 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
935 case INDEX_op_ld32u_i64:
937 tcg_out_ldst(s, args[0], args[1], args[2], LDUW);
940 tcg_out_ldst(s, args[0], args[1], args[2], STB);
943 tcg_out_ldst(s, args[0], args[1], args[2], STH);
945 case INDEX_op_st_i32:
946 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
947 case INDEX_op_st32_i64:
949 tcg_out_ldst(s, args[0], args[1], args[2], STW);
966 case INDEX_op_shl_i32:
969 case INDEX_op_shr_i32:
972 case INDEX_op_sar_i32:
975 case INDEX_op_mul_i32:
978 case INDEX_op_div2_i32:
979 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
987 case INDEX_op_divu2_i32:
988 #if defined(__sparc_v9__) || defined(__sparc_v8plus__)
997 case INDEX_op_brcond_i32:
998 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1002 case INDEX_op_qemu_ld8u:
1003 tcg_out_qemu_ld(s, args, 0);
1005 case INDEX_op_qemu_ld8s:
1006 tcg_out_qemu_ld(s, args, 0 | 4);
1008 case INDEX_op_qemu_ld16u:
1009 tcg_out_qemu_ld(s, args, 1);
1011 case INDEX_op_qemu_ld16s:
1012 tcg_out_qemu_ld(s, args, 1 | 4);
1014 case INDEX_op_qemu_ld32u:
1015 tcg_out_qemu_ld(s, args, 2);
1017 case INDEX_op_qemu_ld32s:
1018 tcg_out_qemu_ld(s, args, 2 | 4);
1020 case INDEX_op_qemu_st8:
1021 tcg_out_qemu_st(s, args, 0);
1023 case INDEX_op_qemu_st16:
1024 tcg_out_qemu_st(s, args, 1);
1026 case INDEX_op_qemu_st32:
1027 tcg_out_qemu_st(s, args, 2);
1030 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1031 case INDEX_op_movi_i64:
1032 tcg_out_movi(s, TCG_TYPE_I64, args[0], args[1]);
1034 case INDEX_op_ld32s_i64:
1035 tcg_out_ldst(s, args[0], args[1], args[2], LDSW);
1037 case INDEX_op_ld_i64:
1038 tcg_out_ldst(s, args[0], args[1], args[2], LDX);
1040 case INDEX_op_st_i64:
1041 tcg_out_ldst(s, args[0], args[1], args[2], STX);
1043 case INDEX_op_shl_i64:
1046 case INDEX_op_shr_i64:
1049 case INDEX_op_sar_i64:
1052 case INDEX_op_mul_i64:
1055 case INDEX_op_div2_i64:
1058 case INDEX_op_divu2_i64:
1062 case INDEX_op_brcond_i64:
1063 tcg_out_brcond(s, args[2], args[0], args[1], const_args[1],
1066 case INDEX_op_qemu_ld64:
1067 tcg_out_qemu_ld(s, args, 3);
1069 case INDEX_op_qemu_st64:
1070 tcg_out_qemu_st(s, args, 3);
1075 if (const_args[2]) {
1076 tcg_out_arithi(s, args[0], args[1], args[2], c);
1078 tcg_out_arith(s, args[0], args[1], args[2], c);
1083 fprintf(stderr, "unknown opcode 0x%x\n", opc);
1088 static const TCGTargetOpDef sparc_op_defs[] = {
1089 { INDEX_op_exit_tb, { } },
1090 { INDEX_op_goto_tb, { } },
1091 { INDEX_op_call, { "ri" } },
1092 { INDEX_op_jmp, { "ri" } },
1093 { INDEX_op_br, { } },
1095 { INDEX_op_mov_i32, { "r", "r" } },
1096 { INDEX_op_movi_i32, { "r" } },
1097 { INDEX_op_ld8u_i32, { "r", "r" } },
1098 { INDEX_op_ld8s_i32, { "r", "r" } },
1099 { INDEX_op_ld16u_i32, { "r", "r" } },
1100 { INDEX_op_ld16s_i32, { "r", "r" } },
1101 { INDEX_op_ld_i32, { "r", "r" } },
1102 { INDEX_op_st8_i32, { "r", "r" } },
1103 { INDEX_op_st16_i32, { "r", "r" } },
1104 { INDEX_op_st_i32, { "r", "r" } },
1106 { INDEX_op_add_i32, { "r", "r", "rJ" } },
1107 { INDEX_op_mul_i32, { "r", "r", "rJ" } },
1108 { INDEX_op_div2_i32, { "r", "r", "0", "1", "r" } },
1109 { INDEX_op_divu2_i32, { "r", "r", "0", "1", "r" } },
1110 { INDEX_op_sub_i32, { "r", "r", "rJ" } },
1111 { INDEX_op_and_i32, { "r", "r", "rJ" } },
1112 { INDEX_op_or_i32, { "r", "r", "rJ" } },
1113 { INDEX_op_xor_i32, { "r", "r", "rJ" } },
1115 { INDEX_op_shl_i32, { "r", "r", "rJ" } },
1116 { INDEX_op_shr_i32, { "r", "r", "rJ" } },
1117 { INDEX_op_sar_i32, { "r", "r", "rJ" } },
1119 { INDEX_op_brcond_i32, { "r", "ri" } },
1121 { INDEX_op_qemu_ld8u, { "r", "L" } },
1122 { INDEX_op_qemu_ld8s, { "r", "L" } },
1123 { INDEX_op_qemu_ld16u, { "r", "L" } },
1124 { INDEX_op_qemu_ld16s, { "r", "L" } },
1125 { INDEX_op_qemu_ld32u, { "r", "L" } },
1126 { INDEX_op_qemu_ld32s, { "r", "L" } },
1128 { INDEX_op_qemu_st8, { "L", "L" } },
1129 { INDEX_op_qemu_st16, { "L", "L" } },
1130 { INDEX_op_qemu_st32, { "L", "L" } },
1132 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1133 { INDEX_op_mov_i64, { "r", "r" } },
1134 { INDEX_op_movi_i64, { "r" } },
1135 { INDEX_op_ld8u_i64, { "r", "r" } },
1136 { INDEX_op_ld8s_i64, { "r", "r" } },
1137 { INDEX_op_ld16u_i64, { "r", "r" } },
1138 { INDEX_op_ld16s_i64, { "r", "r" } },
1139 { INDEX_op_ld32u_i64, { "r", "r" } },
1140 { INDEX_op_ld32s_i64, { "r", "r" } },
1141 { INDEX_op_ld_i64, { "r", "r" } },
1142 { INDEX_op_st8_i64, { "r", "r" } },
1143 { INDEX_op_st16_i64, { "r", "r" } },
1144 { INDEX_op_st32_i64, { "r", "r" } },
1145 { INDEX_op_st_i64, { "r", "r" } },
1146 { INDEX_op_qemu_ld64, { "L", "L" } },
1147 { INDEX_op_qemu_st64, { "L", "L" } },
1149 { INDEX_op_add_i64, { "r", "r", "rJ" } },
1150 { INDEX_op_mul_i64, { "r", "r", "rJ" } },
1151 { INDEX_op_div2_i64, { "r", "r", "0", "1", "r" } },
1152 { INDEX_op_divu2_i64, { "r", "r", "0", "1", "r" } },
1153 { INDEX_op_sub_i64, { "r", "r", "rJ" } },
1154 { INDEX_op_and_i64, { "r", "r", "rJ" } },
1155 { INDEX_op_or_i64, { "r", "r", "rJ" } },
1156 { INDEX_op_xor_i64, { "r", "r", "rJ" } },
1158 { INDEX_op_shl_i64, { "r", "r", "rJ" } },
1159 { INDEX_op_shr_i64, { "r", "r", "rJ" } },
1160 { INDEX_op_sar_i64, { "r", "r", "rJ" } },
1162 { INDEX_op_brcond_i64, { "r", "ri" } },
1167 void tcg_target_init(TCGContext *s)
1169 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0, 0xffffffff);
1170 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1171 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0, 0xffffffff);
1173 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
1189 tcg_regset_clear(s->reserved_regs);
1190 tcg_regset_set_reg(s->reserved_regs, TCG_REG_G0);
1191 #if defined(__sparc_v9__) && !defined(__sparc_v8plus__)
1192 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I4); // for internal use
1194 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I5); // for internal use
1195 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I6);
1196 tcg_regset_set_reg(s->reserved_regs, TCG_REG_I7);
1197 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O6);
1198 tcg_regset_set_reg(s->reserved_regs, TCG_REG_O7);
1199 tcg_add_target_add_op_defs(sparc_op_defs);