]> Git Repo - qemu.git/blame - tcg/tci/tcg-target.c
Merge remote-tracking branch 'remotes/bonzini/scsi-next' into staging
[qemu.git] / tcg / tci / tcg-target.c
CommitLineData
7316329a
SW
1/*
2 * Tiny Code Generator for QEMU
3 *
4 * Copyright (c) 2009, 2011 Stefan Weil
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 * THE SOFTWARE.
23 */
24
3cf246f0
RH
25#include "tcg-be-null.h"
26
7316329a
SW
27/* TODO list:
28 * - See TODO comments in code.
29 */
30
31/* Marker for missing code. */
32#define TODO() \
33 do { \
34 fprintf(stderr, "TODO %s:%u: %s()\n", \
35 __FILE__, __LINE__, __func__); \
36 tcg_abort(); \
37 } while (0)
38
7316329a
SW
39/* Bitfield n...m (in 32 bit value). */
40#define BITS(n, m) (((0xffffffffU << (31 - n)) >> (31 - n + m)) << m)
41
7316329a
SW
42/* Macros used in tcg_target_op_defs. */
43#define R "r"
44#define RI "ri"
45#if TCG_TARGET_REG_BITS == 32
46# define R64 "r", "r"
47#else
48# define R64 "r"
49#endif
50#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
51# define L "L", "L"
52# define S "S", "S"
53#else
54# define L "L"
55# define S "S"
56#endif
57
58/* TODO: documentation. */
59static const TCGTargetOpDef tcg_target_op_defs[] = {
60 { INDEX_op_exit_tb, { NULL } },
61 { INDEX_op_goto_tb, { NULL } },
7316329a
SW
62 { INDEX_op_br, { NULL } },
63
7316329a
SW
64 { INDEX_op_ld8u_i32, { R, R } },
65 { INDEX_op_ld8s_i32, { R, R } },
66 { INDEX_op_ld16u_i32, { R, R } },
67 { INDEX_op_ld16s_i32, { R, R } },
68 { INDEX_op_ld_i32, { R, R } },
69 { INDEX_op_st8_i32, { R, R } },
70 { INDEX_op_st16_i32, { R, R } },
71 { INDEX_op_st_i32, { R, R } },
72
73 { INDEX_op_add_i32, { R, RI, RI } },
74 { INDEX_op_sub_i32, { R, RI, RI } },
75 { INDEX_op_mul_i32, { R, RI, RI } },
76#if TCG_TARGET_HAS_div_i32
77 { INDEX_op_div_i32, { R, R, R } },
78 { INDEX_op_divu_i32, { R, R, R } },
79 { INDEX_op_rem_i32, { R, R, R } },
80 { INDEX_op_remu_i32, { R, R, R } },
81#elif TCG_TARGET_HAS_div2_i32
82 { INDEX_op_div2_i32, { R, R, "0", "1", R } },
83 { INDEX_op_divu2_i32, { R, R, "0", "1", R } },
84#endif
85 /* TODO: Does R, RI, RI result in faster code than R, R, RI?
86 If both operands are constants, we can optimize. */
87 { INDEX_op_and_i32, { R, RI, RI } },
88#if TCG_TARGET_HAS_andc_i32
89 { INDEX_op_andc_i32, { R, RI, RI } },
90#endif
91#if TCG_TARGET_HAS_eqv_i32
92 { INDEX_op_eqv_i32, { R, RI, RI } },
93#endif
94#if TCG_TARGET_HAS_nand_i32
95 { INDEX_op_nand_i32, { R, RI, RI } },
96#endif
97#if TCG_TARGET_HAS_nor_i32
98 { INDEX_op_nor_i32, { R, RI, RI } },
99#endif
100 { INDEX_op_or_i32, { R, RI, RI } },
101#if TCG_TARGET_HAS_orc_i32
102 { INDEX_op_orc_i32, { R, RI, RI } },
103#endif
104 { INDEX_op_xor_i32, { R, RI, RI } },
105 { INDEX_op_shl_i32, { R, RI, RI } },
106 { INDEX_op_shr_i32, { R, RI, RI } },
107 { INDEX_op_sar_i32, { R, RI, RI } },
108#if TCG_TARGET_HAS_rot_i32
109 { INDEX_op_rotl_i32, { R, RI, RI } },
110 { INDEX_op_rotr_i32, { R, RI, RI } },
111#endif
e24dc9fe
SW
112#if TCG_TARGET_HAS_deposit_i32
113 { INDEX_op_deposit_i32, { R, "0", R } },
114#endif
7316329a
SW
115
116 { INDEX_op_brcond_i32, { R, RI } },
117
118 { INDEX_op_setcond_i32, { R, R, RI } },
119#if TCG_TARGET_REG_BITS == 64
120 { INDEX_op_setcond_i64, { R, R, RI } },
121#endif /* TCG_TARGET_REG_BITS == 64 */
122
123#if TCG_TARGET_REG_BITS == 32
124 /* TODO: Support R, R, R, R, RI, RI? Will it be faster? */
125 { INDEX_op_add2_i32, { R, R, R, R, R, R } },
126 { INDEX_op_sub2_i32, { R, R, R, R, R, R } },
127 { INDEX_op_brcond2_i32, { R, R, RI, RI } },
128 { INDEX_op_mulu2_i32, { R, R, R, R } },
129 { INDEX_op_setcond2_i32, { R, R, R, RI, RI } },
130#endif
131
132#if TCG_TARGET_HAS_not_i32
133 { INDEX_op_not_i32, { R, R } },
134#endif
135#if TCG_TARGET_HAS_neg_i32
136 { INDEX_op_neg_i32, { R, R } },
137#endif
138
139#if TCG_TARGET_REG_BITS == 64
7316329a
SW
140 { INDEX_op_ld8u_i64, { R, R } },
141 { INDEX_op_ld8s_i64, { R, R } },
142 { INDEX_op_ld16u_i64, { R, R } },
143 { INDEX_op_ld16s_i64, { R, R } },
144 { INDEX_op_ld32u_i64, { R, R } },
145 { INDEX_op_ld32s_i64, { R, R } },
146 { INDEX_op_ld_i64, { R, R } },
147
148 { INDEX_op_st8_i64, { R, R } },
149 { INDEX_op_st16_i64, { R, R } },
150 { INDEX_op_st32_i64, { R, R } },
151 { INDEX_op_st_i64, { R, R } },
152
153 { INDEX_op_add_i64, { R, RI, RI } },
154 { INDEX_op_sub_i64, { R, RI, RI } },
155 { INDEX_op_mul_i64, { R, RI, RI } },
156#if TCG_TARGET_HAS_div_i64
157 { INDEX_op_div_i64, { R, R, R } },
158 { INDEX_op_divu_i64, { R, R, R } },
159 { INDEX_op_rem_i64, { R, R, R } },
160 { INDEX_op_remu_i64, { R, R, R } },
161#elif TCG_TARGET_HAS_div2_i64
162 { INDEX_op_div2_i64, { R, R, "0", "1", R } },
163 { INDEX_op_divu2_i64, { R, R, "0", "1", R } },
164#endif
165 { INDEX_op_and_i64, { R, RI, RI } },
166#if TCG_TARGET_HAS_andc_i64
167 { INDEX_op_andc_i64, { R, RI, RI } },
168#endif
169#if TCG_TARGET_HAS_eqv_i64
170 { INDEX_op_eqv_i64, { R, RI, RI } },
171#endif
172#if TCG_TARGET_HAS_nand_i64
173 { INDEX_op_nand_i64, { R, RI, RI } },
174#endif
175#if TCG_TARGET_HAS_nor_i64
176 { INDEX_op_nor_i64, { R, RI, RI } },
177#endif
178 { INDEX_op_or_i64, { R, RI, RI } },
179#if TCG_TARGET_HAS_orc_i64
180 { INDEX_op_orc_i64, { R, RI, RI } },
181#endif
182 { INDEX_op_xor_i64, { R, RI, RI } },
183 { INDEX_op_shl_i64, { R, RI, RI } },
184 { INDEX_op_shr_i64, { R, RI, RI } },
185 { INDEX_op_sar_i64, { R, RI, RI } },
186#if TCG_TARGET_HAS_rot_i64
187 { INDEX_op_rotl_i64, { R, RI, RI } },
188 { INDEX_op_rotr_i64, { R, RI, RI } },
e24dc9fe
SW
189#endif
190#if TCG_TARGET_HAS_deposit_i64
191 { INDEX_op_deposit_i64, { R, "0", R } },
7316329a
SW
192#endif
193 { INDEX_op_brcond_i64, { R, RI } },
194
195#if TCG_TARGET_HAS_ext8s_i64
196 { INDEX_op_ext8s_i64, { R, R } },
197#endif
198#if TCG_TARGET_HAS_ext16s_i64
199 { INDEX_op_ext16s_i64, { R, R } },
200#endif
201#if TCG_TARGET_HAS_ext32s_i64
202 { INDEX_op_ext32s_i64, { R, R } },
203#endif
204#if TCG_TARGET_HAS_ext8u_i64
205 { INDEX_op_ext8u_i64, { R, R } },
206#endif
207#if TCG_TARGET_HAS_ext16u_i64
208 { INDEX_op_ext16u_i64, { R, R } },
209#endif
210#if TCG_TARGET_HAS_ext32u_i64
211 { INDEX_op_ext32u_i64, { R, R } },
212#endif
213#if TCG_TARGET_HAS_bswap16_i64
214 { INDEX_op_bswap16_i64, { R, R } },
215#endif
216#if TCG_TARGET_HAS_bswap32_i64
217 { INDEX_op_bswap32_i64, { R, R } },
218#endif
219#if TCG_TARGET_HAS_bswap64_i64
220 { INDEX_op_bswap64_i64, { R, R } },
221#endif
222#if TCG_TARGET_HAS_not_i64
223 { INDEX_op_not_i64, { R, R } },
224#endif
225#if TCG_TARGET_HAS_neg_i64
226 { INDEX_op_neg_i64, { R, R } },
227#endif
228#endif /* TCG_TARGET_REG_BITS == 64 */
229
230 { INDEX_op_qemu_ld8u, { R, L } },
231 { INDEX_op_qemu_ld8s, { R, L } },
232 { INDEX_op_qemu_ld16u, { R, L } },
233 { INDEX_op_qemu_ld16s, { R, L } },
234 { INDEX_op_qemu_ld32, { R, L } },
235#if TCG_TARGET_REG_BITS == 64
236 { INDEX_op_qemu_ld32u, { R, L } },
237 { INDEX_op_qemu_ld32s, { R, L } },
238#endif
239 { INDEX_op_qemu_ld64, { R64, L } },
240
241 { INDEX_op_qemu_st8, { R, S } },
242 { INDEX_op_qemu_st16, { R, S } },
243 { INDEX_op_qemu_st32, { R, S } },
244 { INDEX_op_qemu_st64, { R64, S } },
245
246#if TCG_TARGET_HAS_ext8s_i32
247 { INDEX_op_ext8s_i32, { R, R } },
248#endif
249#if TCG_TARGET_HAS_ext16s_i32
250 { INDEX_op_ext16s_i32, { R, R } },
251#endif
252#if TCG_TARGET_HAS_ext8u_i32
253 { INDEX_op_ext8u_i32, { R, R } },
254#endif
255#if TCG_TARGET_HAS_ext16u_i32
256 { INDEX_op_ext16u_i32, { R, R } },
257#endif
258
259#if TCG_TARGET_HAS_bswap16_i32
260 { INDEX_op_bswap16_i32, { R, R } },
261#endif
262#if TCG_TARGET_HAS_bswap32_i32
263 { INDEX_op_bswap32_i32, { R, R } },
264#endif
265
266 { -1 },
267};
268
269static const int tcg_target_reg_alloc_order[] = {
270 TCG_REG_R0,
271 TCG_REG_R1,
272 TCG_REG_R2,
273 TCG_REG_R3,
274#if 0 /* used for TCG_REG_CALL_STACK */
275 TCG_REG_R4,
276#endif
277 TCG_REG_R5,
278 TCG_REG_R6,
279 TCG_REG_R7,
280#if TCG_TARGET_NB_REGS >= 16
281 TCG_REG_R8,
282 TCG_REG_R9,
283 TCG_REG_R10,
284 TCG_REG_R11,
285 TCG_REG_R12,
286 TCG_REG_R13,
287 TCG_REG_R14,
288 TCG_REG_R15,
289#endif
290};
291
6673f47d 292#if MAX_OPC_PARAM_IARGS != 5
7316329a
SW
293# error Fix needed, number of supported input arguments changed!
294#endif
295
296static const int tcg_target_call_iarg_regs[] = {
297 TCG_REG_R0,
298 TCG_REG_R1,
299 TCG_REG_R2,
300 TCG_REG_R3,
7316329a
SW
301#if 0 /* used for TCG_REG_CALL_STACK */
302 TCG_REG_R4,
303#endif
304 TCG_REG_R5,
6673f47d
SW
305#if TCG_TARGET_REG_BITS == 32
306 /* 32 bit hosts need 2 * MAX_OPC_PARAM_IARGS registers. */
7316329a
SW
307 TCG_REG_R6,
308 TCG_REG_R7,
309#if TCG_TARGET_NB_REGS >= 16
310 TCG_REG_R8,
6673f47d
SW
311 TCG_REG_R9,
312 TCG_REG_R10,
7316329a
SW
313#else
314# error Too few input registers available
315#endif
316#endif
317};
318
319static const int tcg_target_call_oarg_regs[] = {
320 TCG_REG_R0,
321#if TCG_TARGET_REG_BITS == 32
322 TCG_REG_R1
323#endif
324};
325
326#ifndef NDEBUG
327static const char *const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
328 "r00",
329 "r01",
330 "r02",
331 "r03",
332 "r04",
333 "r05",
334 "r06",
335 "r07",
336#if TCG_TARGET_NB_REGS >= 16
337 "r08",
338 "r09",
339 "r10",
340 "r11",
341 "r12",
342 "r13",
343 "r14",
344 "r15",
345#if TCG_TARGET_NB_REGS >= 32
346 "r16",
347 "r17",
348 "r18",
349 "r19",
350 "r20",
351 "r21",
352 "r22",
353 "r23",
354 "r24",
355 "r25",
356 "r26",
357 "r27",
358 "r28",
359 "r29",
360 "r30",
361 "r31"
362#endif
363#endif
364};
365#endif
366
a7f96f76 367static void patch_reloc(tcg_insn_unit *code_ptr, int type,
2ba7fae2 368 intptr_t value, intptr_t addend)
7316329a
SW
369{
370 /* tcg_out_reloc always uses the same type, addend. */
371 assert(type == sizeof(tcg_target_long));
372 assert(addend == 0);
373 assert(value != 0);
a7f96f76
RH
374 if (TCG_TARGET_REG_BITS == 32) {
375 tcg_patch32(code_ptr, value);
376 } else {
377 tcg_patch64(code_ptr, value);
378 }
7316329a
SW
379}
380
381/* Parse target specific constraints. */
382static int target_parse_constraint(TCGArgConstraint *ct, const char **pct_str)
383{
384 const char *ct_str = *pct_str;
385 switch (ct_str[0]) {
386 case 'r':
387 case 'L': /* qemu_ld constraint */
388 case 'S': /* qemu_st constraint */
389 ct->ct |= TCG_CT_REG;
390 tcg_regset_set32(ct->u.regs, 0, BIT(TCG_TARGET_NB_REGS) - 1);
391 break;
392 default:
393 return -1;
394 }
395 ct_str++;
396 *pct_str = ct_str;
397 return 0;
398}
399
400#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
401/* Show current bytecode. Used by tcg interpreter. */
402void tci_disas(uint8_t opc)
403{
404 const TCGOpDef *def = &tcg_op_defs[opc];
405 fprintf(stderr, "TCG %s %u, %u, %u\n",
406 def->name, def->nb_oargs, def->nb_iargs, def->nb_cargs);
407}
408#endif
409
410/* Write value (native size). */
411static void tcg_out_i(TCGContext *s, tcg_target_ulong v)
412{
a7f96f76
RH
413 if (TCG_TARGET_REG_BITS == 32) {
414 tcg_out32(s, v);
415 } else {
416 tcg_out64(s, v);
417 }
7316329a
SW
418}
419
7316329a
SW
420/* Write opcode. */
421static void tcg_out_op_t(TCGContext *s, TCGOpcode op)
422{
423 tcg_out8(s, op);
424 tcg_out8(s, 0);
425}
426
427/* Write register. */
428static void tcg_out_r(TCGContext *s, TCGArg t0)
429{
430 assert(t0 < TCG_TARGET_NB_REGS);
431 tcg_out8(s, t0);
432}
433
434/* Write register or constant (native size). */
435static void tcg_out_ri(TCGContext *s, int const_arg, TCGArg arg)
436{
437 if (const_arg) {
438 assert(const_arg == 1);
439 tcg_out8(s, TCG_CONST);
440 tcg_out_i(s, arg);
441 } else {
442 tcg_out_r(s, arg);
443 }
444}
445
446/* Write register or constant (32 bit). */
447static void tcg_out_ri32(TCGContext *s, int const_arg, TCGArg arg)
448{
449 if (const_arg) {
450 assert(const_arg == 1);
451 tcg_out8(s, TCG_CONST);
452 tcg_out32(s, arg);
453 } else {
454 tcg_out_r(s, arg);
455 }
456}
457
458#if TCG_TARGET_REG_BITS == 64
459/* Write register or constant (64 bit). */
460static void tcg_out_ri64(TCGContext *s, int const_arg, TCGArg arg)
461{
462 if (const_arg) {
463 assert(const_arg == 1);
464 tcg_out8(s, TCG_CONST);
465 tcg_out64(s, arg);
466 } else {
467 tcg_out_r(s, arg);
468 }
469}
470#endif
471
472/* Write label. */
473static void tci_out_label(TCGContext *s, TCGArg arg)
474{
475 TCGLabel *label = &s->labels[arg];
476 if (label->has_value) {
477 tcg_out_i(s, label->u.value);
478 assert(label->u.value);
479 } else {
480 tcg_out_reloc(s, s->code_ptr, sizeof(tcg_target_ulong), arg, 0);
3c01ae0e 481 s->code_ptr += sizeof(tcg_target_ulong);
7316329a
SW
482 }
483}
484
2a534aff 485static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
a05b5b9b 486 intptr_t arg2)
7316329a
SW
487{
488 uint8_t *old_code_ptr = s->code_ptr;
489 if (type == TCG_TYPE_I32) {
490 tcg_out_op_t(s, INDEX_op_ld_i32);
491 tcg_out_r(s, ret);
492 tcg_out_r(s, arg1);
493 tcg_out32(s, arg2);
494 } else {
495 assert(type == TCG_TYPE_I64);
496#if TCG_TARGET_REG_BITS == 64
497 tcg_out_op_t(s, INDEX_op_ld_i64);
498 tcg_out_r(s, ret);
499 tcg_out_r(s, arg1);
03fc0548 500 assert(arg2 == (int32_t)arg2);
7316329a
SW
501 tcg_out32(s, arg2);
502#else
503 TODO();
504#endif
505 }
506 old_code_ptr[1] = s->code_ptr - old_code_ptr;
507}
508
2a534aff 509static void tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
7316329a
SW
510{
511 uint8_t *old_code_ptr = s->code_ptr;
512 assert(ret != arg);
513#if TCG_TARGET_REG_BITS == 32
514 tcg_out_op_t(s, INDEX_op_mov_i32);
515#else
516 tcg_out_op_t(s, INDEX_op_mov_i64);
517#endif
518 tcg_out_r(s, ret);
519 tcg_out_r(s, arg);
520 old_code_ptr[1] = s->code_ptr - old_code_ptr;
521}
522
523static void tcg_out_movi(TCGContext *s, TCGType type,
2a534aff 524 TCGReg t0, tcg_target_long arg)
7316329a
SW
525{
526 uint8_t *old_code_ptr = s->code_ptr;
527 uint32_t arg32 = arg;
528 if (type == TCG_TYPE_I32 || arg == arg32) {
529 tcg_out_op_t(s, INDEX_op_movi_i32);
530 tcg_out_r(s, t0);
531 tcg_out32(s, arg32);
532 } else {
533 assert(type == TCG_TYPE_I64);
534#if TCG_TARGET_REG_BITS == 64
535 tcg_out_op_t(s, INDEX_op_movi_i64);
536 tcg_out_r(s, t0);
537 tcg_out64(s, arg);
538#else
539 TODO();
540#endif
541 }
542 old_code_ptr[1] = s->code_ptr - old_code_ptr;
543}
544
dddbb2e1
RH
545static inline void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
546{
547 tcg_out_ri(s, 1, (uintptr_t)arg);
548}
549
7316329a
SW
550static void tcg_out_op(TCGContext *s, TCGOpcode opc, const TCGArg *args,
551 const int *const_args)
552{
553 uint8_t *old_code_ptr = s->code_ptr;
554
555 tcg_out_op_t(s, opc);
556
557 switch (opc) {
558 case INDEX_op_exit_tb:
559 tcg_out64(s, args[0]);
560 break;
561 case INDEX_op_goto_tb:
562 if (s->tb_jmp_offset) {
563 /* Direct jump method. */
564 assert(args[0] < ARRAY_SIZE(s->tb_jmp_offset));
a7f96f76 565 s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
7316329a
SW
566 tcg_out32(s, 0);
567 } else {
568 /* Indirect jump method. */
569 TODO();
570 }
571 assert(args[0] < ARRAY_SIZE(s->tb_next_offset));
a7f96f76 572 s->tb_next_offset[args[0]] = tcg_current_code_size(s);
7316329a
SW
573 break;
574 case INDEX_op_br:
575 tci_out_label(s, args[0]);
576 break;
7316329a
SW
577 case INDEX_op_setcond_i32:
578 tcg_out_r(s, args[0]);
579 tcg_out_r(s, args[1]);
580 tcg_out_ri32(s, const_args[2], args[2]);
581 tcg_out8(s, args[3]); /* condition */
582 break;
583#if TCG_TARGET_REG_BITS == 32
584 case INDEX_op_setcond2_i32:
585 /* setcond2_i32 cond, t0, t1_low, t1_high, t2_low, t2_high */
586 tcg_out_r(s, args[0]);
587 tcg_out_r(s, args[1]);
588 tcg_out_r(s, args[2]);
589 tcg_out_ri32(s, const_args[3], args[3]);
590 tcg_out_ri32(s, const_args[4], args[4]);
591 tcg_out8(s, args[5]); /* condition */
592 break;
593#elif TCG_TARGET_REG_BITS == 64
594 case INDEX_op_setcond_i64:
595 tcg_out_r(s, args[0]);
596 tcg_out_r(s, args[1]);
597 tcg_out_ri64(s, const_args[2], args[2]);
598 tcg_out8(s, args[3]); /* condition */
599 break;
600#endif
7316329a
SW
601 case INDEX_op_ld8u_i32:
602 case INDEX_op_ld8s_i32:
603 case INDEX_op_ld16u_i32:
604 case INDEX_op_ld16s_i32:
605 case INDEX_op_ld_i32:
606 case INDEX_op_st8_i32:
607 case INDEX_op_st16_i32:
608 case INDEX_op_st_i32:
609 case INDEX_op_ld8u_i64:
610 case INDEX_op_ld8s_i64:
611 case INDEX_op_ld16u_i64:
612 case INDEX_op_ld16s_i64:
613 case INDEX_op_ld32u_i64:
614 case INDEX_op_ld32s_i64:
615 case INDEX_op_ld_i64:
616 case INDEX_op_st8_i64:
617 case INDEX_op_st16_i64:
618 case INDEX_op_st32_i64:
619 case INDEX_op_st_i64:
620 tcg_out_r(s, args[0]);
621 tcg_out_r(s, args[1]);
03fc0548 622 assert(args[2] == (int32_t)args[2]);
7316329a
SW
623 tcg_out32(s, args[2]);
624 break;
625 case INDEX_op_add_i32:
626 case INDEX_op_sub_i32:
627 case INDEX_op_mul_i32:
628 case INDEX_op_and_i32:
629 case INDEX_op_andc_i32: /* Optional (TCG_TARGET_HAS_andc_i32). */
630 case INDEX_op_eqv_i32: /* Optional (TCG_TARGET_HAS_eqv_i32). */
631 case INDEX_op_nand_i32: /* Optional (TCG_TARGET_HAS_nand_i32). */
632 case INDEX_op_nor_i32: /* Optional (TCG_TARGET_HAS_nor_i32). */
633 case INDEX_op_or_i32:
634 case INDEX_op_orc_i32: /* Optional (TCG_TARGET_HAS_orc_i32). */
635 case INDEX_op_xor_i32:
636 case INDEX_op_shl_i32:
637 case INDEX_op_shr_i32:
638 case INDEX_op_sar_i32:
639 case INDEX_op_rotl_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
640 case INDEX_op_rotr_i32: /* Optional (TCG_TARGET_HAS_rot_i32). */
641 tcg_out_r(s, args[0]);
642 tcg_out_ri32(s, const_args[1], args[1]);
643 tcg_out_ri32(s, const_args[2], args[2]);
644 break;
e24dc9fe
SW
645 case INDEX_op_deposit_i32: /* Optional (TCG_TARGET_HAS_deposit_i32). */
646 tcg_out_r(s, args[0]);
647 tcg_out_r(s, args[1]);
648 tcg_out_r(s, args[2]);
649 assert(args[3] <= UINT8_MAX);
650 tcg_out8(s, args[3]);
651 assert(args[4] <= UINT8_MAX);
652 tcg_out8(s, args[4]);
653 break;
7316329a
SW
654
655#if TCG_TARGET_REG_BITS == 64
7316329a
SW
656 case INDEX_op_add_i64:
657 case INDEX_op_sub_i64:
658 case INDEX_op_mul_i64:
659 case INDEX_op_and_i64:
660 case INDEX_op_andc_i64: /* Optional (TCG_TARGET_HAS_andc_i64). */
661 case INDEX_op_eqv_i64: /* Optional (TCG_TARGET_HAS_eqv_i64). */
662 case INDEX_op_nand_i64: /* Optional (TCG_TARGET_HAS_nand_i64). */
663 case INDEX_op_nor_i64: /* Optional (TCG_TARGET_HAS_nor_i64). */
664 case INDEX_op_or_i64:
665 case INDEX_op_orc_i64: /* Optional (TCG_TARGET_HAS_orc_i64). */
666 case INDEX_op_xor_i64:
667 case INDEX_op_shl_i64:
668 case INDEX_op_shr_i64:
669 case INDEX_op_sar_i64:
7316329a
SW
670 case INDEX_op_rotl_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
671 case INDEX_op_rotr_i64: /* Optional (TCG_TARGET_HAS_rot_i64). */
672 tcg_out_r(s, args[0]);
673 tcg_out_ri64(s, const_args[1], args[1]);
674 tcg_out_ri64(s, const_args[2], args[2]);
675 break;
e24dc9fe
SW
676 case INDEX_op_deposit_i64: /* Optional (TCG_TARGET_HAS_deposit_i64). */
677 tcg_out_r(s, args[0]);
678 tcg_out_r(s, args[1]);
679 tcg_out_r(s, args[2]);
680 assert(args[3] <= UINT8_MAX);
681 tcg_out8(s, args[3]);
682 assert(args[4] <= UINT8_MAX);
683 tcg_out8(s, args[4]);
684 break;
7316329a
SW
685 case INDEX_op_div_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
686 case INDEX_op_divu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
687 case INDEX_op_rem_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
688 case INDEX_op_remu_i64: /* Optional (TCG_TARGET_HAS_div_i64). */
689 TODO();
690 break;
691 case INDEX_op_div2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
692 case INDEX_op_divu2_i64: /* Optional (TCG_TARGET_HAS_div2_i64). */
693 TODO();
694 break;
695 case INDEX_op_brcond_i64:
696 tcg_out_r(s, args[0]);
697 tcg_out_ri64(s, const_args[1], args[1]);
698 tcg_out8(s, args[2]); /* condition */
699 tci_out_label(s, args[3]);
700 break;
701 case INDEX_op_bswap16_i64: /* Optional (TCG_TARGET_HAS_bswap16_i64). */
702 case INDEX_op_bswap32_i64: /* Optional (TCG_TARGET_HAS_bswap32_i64). */
703 case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
704 case INDEX_op_not_i64: /* Optional (TCG_TARGET_HAS_not_i64). */
705 case INDEX_op_neg_i64: /* Optional (TCG_TARGET_HAS_neg_i64). */
706 case INDEX_op_ext8s_i64: /* Optional (TCG_TARGET_HAS_ext8s_i64). */
707 case INDEX_op_ext8u_i64: /* Optional (TCG_TARGET_HAS_ext8u_i64). */
708 case INDEX_op_ext16s_i64: /* Optional (TCG_TARGET_HAS_ext16s_i64). */
709 case INDEX_op_ext16u_i64: /* Optional (TCG_TARGET_HAS_ext16u_i64). */
710 case INDEX_op_ext32s_i64: /* Optional (TCG_TARGET_HAS_ext32s_i64). */
711 case INDEX_op_ext32u_i64: /* Optional (TCG_TARGET_HAS_ext32u_i64). */
712#endif /* TCG_TARGET_REG_BITS == 64 */
713 case INDEX_op_neg_i32: /* Optional (TCG_TARGET_HAS_neg_i32). */
714 case INDEX_op_not_i32: /* Optional (TCG_TARGET_HAS_not_i32). */
715 case INDEX_op_ext8s_i32: /* Optional (TCG_TARGET_HAS_ext8s_i32). */
716 case INDEX_op_ext16s_i32: /* Optional (TCG_TARGET_HAS_ext16s_i32). */
717 case INDEX_op_ext8u_i32: /* Optional (TCG_TARGET_HAS_ext8u_i32). */
718 case INDEX_op_ext16u_i32: /* Optional (TCG_TARGET_HAS_ext16u_i32). */
719 case INDEX_op_bswap16_i32: /* Optional (TCG_TARGET_HAS_bswap16_i32). */
720 case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
721 tcg_out_r(s, args[0]);
722 tcg_out_r(s, args[1]);
723 break;
724 case INDEX_op_div_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
725 case INDEX_op_divu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
726 case INDEX_op_rem_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
727 case INDEX_op_remu_i32: /* Optional (TCG_TARGET_HAS_div_i32). */
728 tcg_out_r(s, args[0]);
729 tcg_out_ri32(s, const_args[1], args[1]);
730 tcg_out_ri32(s, const_args[2], args[2]);
731 break;
732 case INDEX_op_div2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
733 case INDEX_op_divu2_i32: /* Optional (TCG_TARGET_HAS_div2_i32). */
734 TODO();
735 break;
736#if TCG_TARGET_REG_BITS == 32
737 case INDEX_op_add2_i32:
738 case INDEX_op_sub2_i32:
739 tcg_out_r(s, args[0]);
740 tcg_out_r(s, args[1]);
741 tcg_out_r(s, args[2]);
742 tcg_out_r(s, args[3]);
743 tcg_out_r(s, args[4]);
744 tcg_out_r(s, args[5]);
745 break;
746 case INDEX_op_brcond2_i32:
747 tcg_out_r(s, args[0]);
748 tcg_out_r(s, args[1]);
749 tcg_out_ri32(s, const_args[2], args[2]);
750 tcg_out_ri32(s, const_args[3], args[3]);
751 tcg_out8(s, args[4]); /* condition */
752 tci_out_label(s, args[5]);
753 break;
754 case INDEX_op_mulu2_i32:
755 tcg_out_r(s, args[0]);
756 tcg_out_r(s, args[1]);
757 tcg_out_r(s, args[2]);
758 tcg_out_r(s, args[3]);
759 break;
760#endif
761 case INDEX_op_brcond_i32:
762 tcg_out_r(s, args[0]);
763 tcg_out_ri32(s, const_args[1], args[1]);
764 tcg_out8(s, args[2]); /* condition */
765 tci_out_label(s, args[3]);
766 break;
767 case INDEX_op_qemu_ld8u:
768 case INDEX_op_qemu_ld8s:
769 case INDEX_op_qemu_ld16u:
770 case INDEX_op_qemu_ld16s:
771 case INDEX_op_qemu_ld32:
772#if TCG_TARGET_REG_BITS == 64
773 case INDEX_op_qemu_ld32s:
774 case INDEX_op_qemu_ld32u:
775#endif
776 tcg_out_r(s, *args++);
777 tcg_out_r(s, *args++);
778#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
779 tcg_out_r(s, *args++);
780#endif
781#ifdef CONFIG_SOFTMMU
782 tcg_out_i(s, *args);
783#endif
784 break;
785 case INDEX_op_qemu_ld64:
786 tcg_out_r(s, *args++);
787#if TCG_TARGET_REG_BITS == 32
788 tcg_out_r(s, *args++);
789#endif
790 tcg_out_r(s, *args++);
791#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
792 tcg_out_r(s, *args++);
793#endif
794#ifdef CONFIG_SOFTMMU
795 tcg_out_i(s, *args);
796#endif
797 break;
798 case INDEX_op_qemu_st8:
799 case INDEX_op_qemu_st16:
800 case INDEX_op_qemu_st32:
801 tcg_out_r(s, *args++);
802 tcg_out_r(s, *args++);
803#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
804 tcg_out_r(s, *args++);
805#endif
806#ifdef CONFIG_SOFTMMU
807 tcg_out_i(s, *args);
808#endif
809 break;
810 case INDEX_op_qemu_st64:
811 tcg_out_r(s, *args++);
812#if TCG_TARGET_REG_BITS == 32
813 tcg_out_r(s, *args++);
814#endif
815 tcg_out_r(s, *args++);
816#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
817 tcg_out_r(s, *args++);
818#endif
819#ifdef CONFIG_SOFTMMU
820 tcg_out_i(s, *args);
821#endif
822 break;
96d0ee7f
RH
823 case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
824 case INDEX_op_mov_i64:
825 case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
826 case INDEX_op_movi_i64:
827 case INDEX_op_call: /* Always emitted via tcg_out_call. */
7316329a 828 default:
7316329a
SW
829 tcg_abort();
830 }
831 old_code_ptr[1] = s->code_ptr - old_code_ptr;
832}
833
2a534aff 834static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg, TCGReg arg1,
a05b5b9b 835 intptr_t arg2)
7316329a
SW
836{
837 uint8_t *old_code_ptr = s->code_ptr;
838 if (type == TCG_TYPE_I32) {
839 tcg_out_op_t(s, INDEX_op_st_i32);
840 tcg_out_r(s, arg);
841 tcg_out_r(s, arg1);
842 tcg_out32(s, arg2);
843 } else {
844 assert(type == TCG_TYPE_I64);
845#if TCG_TARGET_REG_BITS == 64
846 tcg_out_op_t(s, INDEX_op_st_i64);
847 tcg_out_r(s, arg);
848 tcg_out_r(s, arg1);
849 tcg_out32(s, arg2);
850#else
851 TODO();
852#endif
853 }
854 old_code_ptr[1] = s->code_ptr - old_code_ptr;
855}
856
857/* Test if a constant matches the constraint. */
f6c6afc1 858static int tcg_target_const_match(tcg_target_long val, TCGType type,
7316329a
SW
859 const TCGArgConstraint *arg_ct)
860{
861 /* No need to return 0 or 1, 0 or != 0 is good enough. */
862 return arg_ct->ct & TCG_CT_CONST;
863}
864
7316329a
SW
865static void tcg_target_init(TCGContext *s)
866{
867#if defined(CONFIG_DEBUG_TCG_INTERPRETER)
868 const char *envval = getenv("DEBUG_TCG");
869 if (envval) {
24537a01 870 qemu_set_log(strtol(envval, NULL, 0));
7316329a
SW
871 }
872#endif
873
874 /* The current code uses uint8_t for tcg operations. */
875 assert(ARRAY_SIZE(tcg_op_defs) <= UINT8_MAX);
876
877 /* Registers available for 32 bit operations. */
878 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I32], 0,
879 BIT(TCG_TARGET_NB_REGS) - 1);
880 /* Registers available for 64 bit operations. */
881 tcg_regset_set32(tcg_target_available_regs[TCG_TYPE_I64], 0,
882 BIT(TCG_TARGET_NB_REGS) - 1);
883 /* TODO: Which registers should be set here? */
884 tcg_regset_set32(tcg_target_call_clobber_regs, 0,
885 BIT(TCG_TARGET_NB_REGS) - 1);
ee79c356 886
7316329a
SW
887 tcg_regset_clear(s->reserved_regs);
888 tcg_regset_set_reg(s->reserved_regs, TCG_REG_CALL_STACK);
889 tcg_add_target_add_op_defs(tcg_target_op_defs);
ee79c356
RH
890
891 /* We use negative offsets from "sp" so that we can distinguish
892 stores that might pretend to be call arguments. */
893 tcg_set_frame(s, TCG_REG_CALL_STACK,
894 -CPU_TEMP_BUF_NLONGS * sizeof(long),
7316329a
SW
895 CPU_TEMP_BUF_NLONGS * sizeof(long));
896}
897
898/* Generate global QEMU prologue and epilogue code. */
4699ca6d 899static inline void tcg_target_qemu_prologue(TCGContext *s)
7316329a 900{
7316329a 901}
This page took 0.388922 seconds and 4 git commands to generate.