]> Git Repo - qemu.git/blob - tci.c
Merge branch 'usb.65' of git://git.kraxel.org/qemu
[qemu.git] / tci.c
1 /*
2  * Tiny Code Interpreter for QEMU
3  *
4  * Copyright (c) 2009, 2011 Stefan Weil
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation, either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #include "config.h"
21
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
26
27 #include "qemu-common.h"
28 #include "exec-all.h"           /* MAX_OPC_PARAM_IARGS */
29 #include "tcg-op.h"
30
31 /* Marker for missing code. */
32 #define TODO() \
33     do { \
34         fprintf(stderr, "TODO %s:%u: %s()\n", \
35                 __FILE__, __LINE__, __func__); \
36         tcg_abort(); \
37     } while (0)
38
39 #if MAX_OPC_PARAM_IARGS != 4
40 # error Fix needed, number of supported input arguments changed!
41 #endif
42 #if TCG_TARGET_REG_BITS == 32
43 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
44                                     tcg_target_ulong, tcg_target_ulong,
45                                     tcg_target_ulong, tcg_target_ulong,
46                                     tcg_target_ulong, tcg_target_ulong);
47 #else
48 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
49                                     tcg_target_ulong, tcg_target_ulong);
50 #endif
51
52 /* TCI can optionally use a global register variable for env. */
53 #if !defined(AREG0)
54 CPUArchState *env;
55 #endif
56
57 /* Targets which don't use GETPC also don't need tci_tb_ptr
58    which makes them a little faster. */
59 #if defined(GETPC)
60 uintptr_t tci_tb_ptr;
61 #endif
62
63 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
64
65 static tcg_target_ulong tci_read_reg(TCGReg index)
66 {
67     assert(index < ARRAY_SIZE(tci_reg));
68     return tci_reg[index];
69 }
70
71 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
72 static int8_t tci_read_reg8s(TCGReg index)
73 {
74     return (int8_t)tci_read_reg(index);
75 }
76 #endif
77
78 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
79 static int16_t tci_read_reg16s(TCGReg index)
80 {
81     return (int16_t)tci_read_reg(index);
82 }
83 #endif
84
85 #if TCG_TARGET_REG_BITS == 64
86 static int32_t tci_read_reg32s(TCGReg index)
87 {
88     return (int32_t)tci_read_reg(index);
89 }
90 #endif
91
92 static uint8_t tci_read_reg8(TCGReg index)
93 {
94     return (uint8_t)tci_read_reg(index);
95 }
96
97 static uint16_t tci_read_reg16(TCGReg index)
98 {
99     return (uint16_t)tci_read_reg(index);
100 }
101
102 static uint32_t tci_read_reg32(TCGReg index)
103 {
104     return (uint32_t)tci_read_reg(index);
105 }
106
107 #if TCG_TARGET_REG_BITS == 64
108 static uint64_t tci_read_reg64(TCGReg index)
109 {
110     return tci_read_reg(index);
111 }
112 #endif
113
114 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
115 {
116     assert(index < ARRAY_SIZE(tci_reg));
117     assert(index != TCG_AREG0);
118     tci_reg[index] = value;
119 }
120
121 static void tci_write_reg8s(TCGReg index, int8_t value)
122 {
123     tci_write_reg(index, value);
124 }
125
126 static void tci_write_reg16s(TCGReg index, int16_t value)
127 {
128     tci_write_reg(index, value);
129 }
130
131 #if TCG_TARGET_REG_BITS == 64
132 static void tci_write_reg32s(TCGReg index, int32_t value)
133 {
134     tci_write_reg(index, value);
135 }
136 #endif
137
138 static void tci_write_reg8(TCGReg index, uint8_t value)
139 {
140     tci_write_reg(index, value);
141 }
142
143 static void tci_write_reg16(TCGReg index, uint16_t value)
144 {
145     tci_write_reg(index, value);
146 }
147
148 static void tci_write_reg32(TCGReg index, uint32_t value)
149 {
150     tci_write_reg(index, value);
151 }
152
153 #if TCG_TARGET_REG_BITS == 32
154 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
155                             uint64_t value)
156 {
157     tci_write_reg(low_index, value);
158     tci_write_reg(high_index, value >> 32);
159 }
160 #elif TCG_TARGET_REG_BITS == 64
161 static void tci_write_reg64(TCGReg index, uint64_t value)
162 {
163     tci_write_reg(index, value);
164 }
165 #endif
166
167 #if TCG_TARGET_REG_BITS == 32
168 /* Create a 64 bit value from two 32 bit values. */
169 static uint64_t tci_uint64(uint32_t high, uint32_t low)
170 {
171     return ((uint64_t)high << 32) + low;
172 }
173 #endif
174
175 /* Read constant (native size) from bytecode. */
176 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
177 {
178     tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
179     *tb_ptr += sizeof(value);
180     return value;
181 }
182
183 /* Read constant (32 bit) from bytecode. */
184 static uint32_t tci_read_i32(uint8_t **tb_ptr)
185 {
186     uint32_t value = *(uint32_t *)(*tb_ptr);
187     *tb_ptr += sizeof(value);
188     return value;
189 }
190
191 #if TCG_TARGET_REG_BITS == 64
192 /* Read constant (64 bit) from bytecode. */
193 static uint64_t tci_read_i64(uint8_t **tb_ptr)
194 {
195     uint64_t value = *(uint64_t *)(*tb_ptr);
196     *tb_ptr += sizeof(value);
197     return value;
198 }
199 #endif
200
201 /* Read indexed register (native size) from bytecode. */
202 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
203 {
204     tcg_target_ulong value = tci_read_reg(**tb_ptr);
205     *tb_ptr += 1;
206     return value;
207 }
208
209 /* Read indexed register (8 bit) from bytecode. */
210 static uint8_t tci_read_r8(uint8_t **tb_ptr)
211 {
212     uint8_t value = tci_read_reg8(**tb_ptr);
213     *tb_ptr += 1;
214     return value;
215 }
216
217 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
218 /* Read indexed register (8 bit signed) from bytecode. */
219 static int8_t tci_read_r8s(uint8_t **tb_ptr)
220 {
221     int8_t value = tci_read_reg8s(**tb_ptr);
222     *tb_ptr += 1;
223     return value;
224 }
225 #endif
226
227 /* Read indexed register (16 bit) from bytecode. */
228 static uint16_t tci_read_r16(uint8_t **tb_ptr)
229 {
230     uint16_t value = tci_read_reg16(**tb_ptr);
231     *tb_ptr += 1;
232     return value;
233 }
234
235 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
236 /* Read indexed register (16 bit signed) from bytecode. */
237 static int16_t tci_read_r16s(uint8_t **tb_ptr)
238 {
239     int16_t value = tci_read_reg16s(**tb_ptr);
240     *tb_ptr += 1;
241     return value;
242 }
243 #endif
244
245 /* Read indexed register (32 bit) from bytecode. */
246 static uint32_t tci_read_r32(uint8_t **tb_ptr)
247 {
248     uint32_t value = tci_read_reg32(**tb_ptr);
249     *tb_ptr += 1;
250     return value;
251 }
252
253 #if TCG_TARGET_REG_BITS == 32
254 /* Read two indexed registers (2 * 32 bit) from bytecode. */
255 static uint64_t tci_read_r64(uint8_t **tb_ptr)
256 {
257     uint32_t low = tci_read_r32(tb_ptr);
258     return tci_uint64(tci_read_r32(tb_ptr), low);
259 }
260 #elif TCG_TARGET_REG_BITS == 64
261 /* Read indexed register (32 bit signed) from bytecode. */
262 static int32_t tci_read_r32s(uint8_t **tb_ptr)
263 {
264     int32_t value = tci_read_reg32s(**tb_ptr);
265     *tb_ptr += 1;
266     return value;
267 }
268
269 /* Read indexed register (64 bit) from bytecode. */
270 static uint64_t tci_read_r64(uint8_t **tb_ptr)
271 {
272     uint64_t value = tci_read_reg64(**tb_ptr);
273     *tb_ptr += 1;
274     return value;
275 }
276 #endif
277
278 /* Read indexed register(s) with target address from bytecode. */
279 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
280 {
281     target_ulong taddr = tci_read_r(tb_ptr);
282 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
283     taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
284 #endif
285     return taddr;
286 }
287
288 /* Read indexed register or constant (native size) from bytecode. */
289 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
290 {
291     tcg_target_ulong value;
292     TCGReg r = **tb_ptr;
293     *tb_ptr += 1;
294     if (r == TCG_CONST) {
295         value = tci_read_i(tb_ptr);
296     } else {
297         value = tci_read_reg(r);
298     }
299     return value;
300 }
301
302 /* Read indexed register or constant (32 bit) from bytecode. */
303 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
304 {
305     uint32_t value;
306     TCGReg r = **tb_ptr;
307     *tb_ptr += 1;
308     if (r == TCG_CONST) {
309         value = tci_read_i32(tb_ptr);
310     } else {
311         value = tci_read_reg32(r);
312     }
313     return value;
314 }
315
316 #if TCG_TARGET_REG_BITS == 32
317 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
318 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
319 {
320     uint32_t low = tci_read_ri32(tb_ptr);
321     return tci_uint64(tci_read_ri32(tb_ptr), low);
322 }
323 #elif TCG_TARGET_REG_BITS == 64
324 /* Read indexed register or constant (64 bit) from bytecode. */
325 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
326 {
327     uint64_t value;
328     TCGReg r = **tb_ptr;
329     *tb_ptr += 1;
330     if (r == TCG_CONST) {
331         value = tci_read_i64(tb_ptr);
332     } else {
333         value = tci_read_reg64(r);
334     }
335     return value;
336 }
337 #endif
338
339 static target_ulong tci_read_label(uint8_t **tb_ptr)
340 {
341     target_ulong label = tci_read_i(tb_ptr);
342     assert(label != 0);
343     return label;
344 }
345
346 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
347 {
348     bool result = false;
349     int32_t i0 = u0;
350     int32_t i1 = u1;
351     switch (condition) {
352     case TCG_COND_EQ:
353         result = (u0 == u1);
354         break;
355     case TCG_COND_NE:
356         result = (u0 != u1);
357         break;
358     case TCG_COND_LT:
359         result = (i0 < i1);
360         break;
361     case TCG_COND_GE:
362         result = (i0 >= i1);
363         break;
364     case TCG_COND_LE:
365         result = (i0 <= i1);
366         break;
367     case TCG_COND_GT:
368         result = (i0 > i1);
369         break;
370     case TCG_COND_LTU:
371         result = (u0 < u1);
372         break;
373     case TCG_COND_GEU:
374         result = (u0 >= u1);
375         break;
376     case TCG_COND_LEU:
377         result = (u0 <= u1);
378         break;
379     case TCG_COND_GTU:
380         result = (u0 > u1);
381         break;
382     default:
383         TODO();
384     }
385     return result;
386 }
387
388 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
389 {
390     bool result = false;
391     int64_t i0 = u0;
392     int64_t i1 = u1;
393     switch (condition) {
394     case TCG_COND_EQ:
395         result = (u0 == u1);
396         break;
397     case TCG_COND_NE:
398         result = (u0 != u1);
399         break;
400     case TCG_COND_LT:
401         result = (i0 < i1);
402         break;
403     case TCG_COND_GE:
404         result = (i0 >= i1);
405         break;
406     case TCG_COND_LE:
407         result = (i0 <= i1);
408         break;
409     case TCG_COND_GT:
410         result = (i0 > i1);
411         break;
412     case TCG_COND_LTU:
413         result = (u0 < u1);
414         break;
415     case TCG_COND_GEU:
416         result = (u0 >= u1);
417         break;
418     case TCG_COND_LEU:
419         result = (u0 <= u1);
420         break;
421     case TCG_COND_GTU:
422         result = (u0 > u1);
423         break;
424     default:
425         TODO();
426     }
427     return result;
428 }
429
430 /* Interpret pseudo code in tb. */
431 tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *cpustate, uint8_t *tb_ptr)
432 {
433     tcg_target_ulong next_tb = 0;
434
435     env = cpustate;
436     tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
437     assert(tb_ptr);
438
439     for (;;) {
440 #if defined(GETPC)
441         tci_tb_ptr = (uintptr_t)tb_ptr;
442 #endif
443         TCGOpcode opc = tb_ptr[0];
444 #if !defined(NDEBUG)
445         uint8_t op_size = tb_ptr[1];
446         uint8_t *old_code_ptr = tb_ptr;
447 #endif
448         tcg_target_ulong t0;
449         tcg_target_ulong t1;
450         tcg_target_ulong t2;
451         tcg_target_ulong label;
452         TCGCond condition;
453         target_ulong taddr;
454 #ifndef CONFIG_SOFTMMU
455         tcg_target_ulong host_addr;
456 #endif
457         uint8_t tmp8;
458         uint16_t tmp16;
459         uint32_t tmp32;
460         uint64_t tmp64;
461 #if TCG_TARGET_REG_BITS == 32
462         uint64_t v64;
463 #endif
464
465         /* Skip opcode and size entry. */
466         tb_ptr += 2;
467
468         switch (opc) {
469         case INDEX_op_end:
470         case INDEX_op_nop:
471             break;
472         case INDEX_op_nop1:
473         case INDEX_op_nop2:
474         case INDEX_op_nop3:
475         case INDEX_op_nopn:
476         case INDEX_op_discard:
477             TODO();
478             break;
479         case INDEX_op_set_label:
480             TODO();
481             break;
482         case INDEX_op_call:
483             t0 = tci_read_ri(&tb_ptr);
484 #if TCG_TARGET_REG_BITS == 32
485             tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
486                                           tci_read_reg(TCG_REG_R1),
487                                           tci_read_reg(TCG_REG_R2),
488                                           tci_read_reg(TCG_REG_R3),
489                                           tci_read_reg(TCG_REG_R5),
490                                           tci_read_reg(TCG_REG_R6),
491                                           tci_read_reg(TCG_REG_R7),
492                                           tci_read_reg(TCG_REG_R8));
493             tci_write_reg(TCG_REG_R0, tmp64);
494             tci_write_reg(TCG_REG_R1, tmp64 >> 32);
495 #else
496             tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
497                                           tci_read_reg(TCG_REG_R1),
498                                           tci_read_reg(TCG_REG_R2),
499                                           tci_read_reg(TCG_REG_R3));
500             tci_write_reg(TCG_REG_R0, tmp64);
501 #endif
502             break;
503         case INDEX_op_jmp:
504         case INDEX_op_br:
505             label = tci_read_label(&tb_ptr);
506             assert(tb_ptr == old_code_ptr + op_size);
507             tb_ptr = (uint8_t *)label;
508             continue;
509         case INDEX_op_setcond_i32:
510             t0 = *tb_ptr++;
511             t1 = tci_read_r32(&tb_ptr);
512             t2 = tci_read_ri32(&tb_ptr);
513             condition = *tb_ptr++;
514             tci_write_reg32(t0, tci_compare32(t1, t2, condition));
515             break;
516 #if TCG_TARGET_REG_BITS == 32
517         case INDEX_op_setcond2_i32:
518             t0 = *tb_ptr++;
519             tmp64 = tci_read_r64(&tb_ptr);
520             v64 = tci_read_ri64(&tb_ptr);
521             condition = *tb_ptr++;
522             tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
523             break;
524 #elif TCG_TARGET_REG_BITS == 64
525         case INDEX_op_setcond_i64:
526             t0 = *tb_ptr++;
527             t1 = tci_read_r64(&tb_ptr);
528             t2 = tci_read_ri64(&tb_ptr);
529             condition = *tb_ptr++;
530             tci_write_reg64(t0, tci_compare64(t1, t2, condition));
531             break;
532 #endif
533         case INDEX_op_mov_i32:
534             t0 = *tb_ptr++;
535             t1 = tci_read_r32(&tb_ptr);
536             tci_write_reg32(t0, t1);
537             break;
538         case INDEX_op_movi_i32:
539             t0 = *tb_ptr++;
540             t1 = tci_read_i32(&tb_ptr);
541             tci_write_reg32(t0, t1);
542             break;
543
544             /* Load/store operations (32 bit). */
545
546         case INDEX_op_ld8u_i32:
547             t0 = *tb_ptr++;
548             t1 = tci_read_r(&tb_ptr);
549             t2 = tci_read_i32(&tb_ptr);
550             tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
551             break;
552         case INDEX_op_ld8s_i32:
553         case INDEX_op_ld16u_i32:
554             TODO();
555             break;
556         case INDEX_op_ld16s_i32:
557             TODO();
558             break;
559         case INDEX_op_ld_i32:
560             t0 = *tb_ptr++;
561             t1 = tci_read_r(&tb_ptr);
562             t2 = tci_read_i32(&tb_ptr);
563             tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
564             break;
565         case INDEX_op_st8_i32:
566             t0 = tci_read_r8(&tb_ptr);
567             t1 = tci_read_r(&tb_ptr);
568             t2 = tci_read_i32(&tb_ptr);
569             *(uint8_t *)(t1 + t2) = t0;
570             break;
571         case INDEX_op_st16_i32:
572             t0 = tci_read_r16(&tb_ptr);
573             t1 = tci_read_r(&tb_ptr);
574             t2 = tci_read_i32(&tb_ptr);
575             *(uint16_t *)(t1 + t2) = t0;
576             break;
577         case INDEX_op_st_i32:
578             t0 = tci_read_r32(&tb_ptr);
579             t1 = tci_read_r(&tb_ptr);
580             t2 = tci_read_i32(&tb_ptr);
581             *(uint32_t *)(t1 + t2) = t0;
582             break;
583
584             /* Arithmetic operations (32 bit). */
585
586         case INDEX_op_add_i32:
587             t0 = *tb_ptr++;
588             t1 = tci_read_ri32(&tb_ptr);
589             t2 = tci_read_ri32(&tb_ptr);
590             tci_write_reg32(t0, t1 + t2);
591             break;
592         case INDEX_op_sub_i32:
593             t0 = *tb_ptr++;
594             t1 = tci_read_ri32(&tb_ptr);
595             t2 = tci_read_ri32(&tb_ptr);
596             tci_write_reg32(t0, t1 - t2);
597             break;
598         case INDEX_op_mul_i32:
599             t0 = *tb_ptr++;
600             t1 = tci_read_ri32(&tb_ptr);
601             t2 = tci_read_ri32(&tb_ptr);
602             tci_write_reg32(t0, t1 * t2);
603             break;
604 #if TCG_TARGET_HAS_div_i32
605         case INDEX_op_div_i32:
606             t0 = *tb_ptr++;
607             t1 = tci_read_ri32(&tb_ptr);
608             t2 = tci_read_ri32(&tb_ptr);
609             tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
610             break;
611         case INDEX_op_divu_i32:
612             t0 = *tb_ptr++;
613             t1 = tci_read_ri32(&tb_ptr);
614             t2 = tci_read_ri32(&tb_ptr);
615             tci_write_reg32(t0, t1 / t2);
616             break;
617         case INDEX_op_rem_i32:
618             t0 = *tb_ptr++;
619             t1 = tci_read_ri32(&tb_ptr);
620             t2 = tci_read_ri32(&tb_ptr);
621             tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
622             break;
623         case INDEX_op_remu_i32:
624             t0 = *tb_ptr++;
625             t1 = tci_read_ri32(&tb_ptr);
626             t2 = tci_read_ri32(&tb_ptr);
627             tci_write_reg32(t0, t1 % t2);
628             break;
629 #elif TCG_TARGET_HAS_div2_i32
630         case INDEX_op_div2_i32:
631         case INDEX_op_divu2_i32:
632             TODO();
633             break;
634 #endif
635         case INDEX_op_and_i32:
636             t0 = *tb_ptr++;
637             t1 = tci_read_ri32(&tb_ptr);
638             t2 = tci_read_ri32(&tb_ptr);
639             tci_write_reg32(t0, t1 & t2);
640             break;
641         case INDEX_op_or_i32:
642             t0 = *tb_ptr++;
643             t1 = tci_read_ri32(&tb_ptr);
644             t2 = tci_read_ri32(&tb_ptr);
645             tci_write_reg32(t0, t1 | t2);
646             break;
647         case INDEX_op_xor_i32:
648             t0 = *tb_ptr++;
649             t1 = tci_read_ri32(&tb_ptr);
650             t2 = tci_read_ri32(&tb_ptr);
651             tci_write_reg32(t0, t1 ^ t2);
652             break;
653
654             /* Shift/rotate operations (32 bit). */
655
656         case INDEX_op_shl_i32:
657             t0 = *tb_ptr++;
658             t1 = tci_read_ri32(&tb_ptr);
659             t2 = tci_read_ri32(&tb_ptr);
660             tci_write_reg32(t0, t1 << t2);
661             break;
662         case INDEX_op_shr_i32:
663             t0 = *tb_ptr++;
664             t1 = tci_read_ri32(&tb_ptr);
665             t2 = tci_read_ri32(&tb_ptr);
666             tci_write_reg32(t0, t1 >> t2);
667             break;
668         case INDEX_op_sar_i32:
669             t0 = *tb_ptr++;
670             t1 = tci_read_ri32(&tb_ptr);
671             t2 = tci_read_ri32(&tb_ptr);
672             tci_write_reg32(t0, ((int32_t)t1 >> t2));
673             break;
674 #if TCG_TARGET_HAS_rot_i32
675         case INDEX_op_rotl_i32:
676             t0 = *tb_ptr++;
677             t1 = tci_read_ri32(&tb_ptr);
678             t2 = tci_read_ri32(&tb_ptr);
679             tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
680             break;
681         case INDEX_op_rotr_i32:
682             t0 = *tb_ptr++;
683             t1 = tci_read_ri32(&tb_ptr);
684             t2 = tci_read_ri32(&tb_ptr);
685             tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
686             break;
687 #endif
688         case INDEX_op_brcond_i32:
689             t0 = tci_read_r32(&tb_ptr);
690             t1 = tci_read_ri32(&tb_ptr);
691             condition = *tb_ptr++;
692             label = tci_read_label(&tb_ptr);
693             if (tci_compare32(t0, t1, condition)) {
694                 assert(tb_ptr == old_code_ptr + op_size);
695                 tb_ptr = (uint8_t *)label;
696                 continue;
697             }
698             break;
699 #if TCG_TARGET_REG_BITS == 32
700         case INDEX_op_add2_i32:
701             t0 = *tb_ptr++;
702             t1 = *tb_ptr++;
703             tmp64 = tci_read_r64(&tb_ptr);
704             tmp64 += tci_read_r64(&tb_ptr);
705             tci_write_reg64(t1, t0, tmp64);
706             break;
707         case INDEX_op_sub2_i32:
708             t0 = *tb_ptr++;
709             t1 = *tb_ptr++;
710             tmp64 = tci_read_r64(&tb_ptr);
711             tmp64 -= tci_read_r64(&tb_ptr);
712             tci_write_reg64(t1, t0, tmp64);
713             break;
714         case INDEX_op_brcond2_i32:
715             tmp64 = tci_read_r64(&tb_ptr);
716             v64 = tci_read_ri64(&tb_ptr);
717             condition = *tb_ptr++;
718             label = tci_read_label(&tb_ptr);
719             if (tci_compare64(tmp64, v64, condition)) {
720                 assert(tb_ptr == old_code_ptr + op_size);
721                 tb_ptr = (uint8_t *)label;
722                 continue;
723             }
724             break;
725         case INDEX_op_mulu2_i32:
726             t0 = *tb_ptr++;
727             t1 = *tb_ptr++;
728             t2 = tci_read_r32(&tb_ptr);
729             tmp64 = tci_read_r32(&tb_ptr);
730             tci_write_reg64(t1, t0, t2 * tmp64);
731             break;
732 #endif /* TCG_TARGET_REG_BITS == 32 */
733 #if TCG_TARGET_HAS_ext8s_i32
734         case INDEX_op_ext8s_i32:
735             t0 = *tb_ptr++;
736             t1 = tci_read_r8s(&tb_ptr);
737             tci_write_reg32(t0, t1);
738             break;
739 #endif
740 #if TCG_TARGET_HAS_ext16s_i32
741         case INDEX_op_ext16s_i32:
742             t0 = *tb_ptr++;
743             t1 = tci_read_r16s(&tb_ptr);
744             tci_write_reg32(t0, t1);
745             break;
746 #endif
747 #if TCG_TARGET_HAS_ext8u_i32
748         case INDEX_op_ext8u_i32:
749             t0 = *tb_ptr++;
750             t1 = tci_read_r8(&tb_ptr);
751             tci_write_reg32(t0, t1);
752             break;
753 #endif
754 #if TCG_TARGET_HAS_ext16u_i32
755         case INDEX_op_ext16u_i32:
756             t0 = *tb_ptr++;
757             t1 = tci_read_r16(&tb_ptr);
758             tci_write_reg32(t0, t1);
759             break;
760 #endif
761 #if TCG_TARGET_HAS_bswap16_i32
762         case INDEX_op_bswap16_i32:
763             t0 = *tb_ptr++;
764             t1 = tci_read_r16(&tb_ptr);
765             tci_write_reg32(t0, bswap16(t1));
766             break;
767 #endif
768 #if TCG_TARGET_HAS_bswap32_i32
769         case INDEX_op_bswap32_i32:
770             t0 = *tb_ptr++;
771             t1 = tci_read_r32(&tb_ptr);
772             tci_write_reg32(t0, bswap32(t1));
773             break;
774 #endif
775 #if TCG_TARGET_HAS_not_i32
776         case INDEX_op_not_i32:
777             t0 = *tb_ptr++;
778             t1 = tci_read_r32(&tb_ptr);
779             tci_write_reg32(t0, ~t1);
780             break;
781 #endif
782 #if TCG_TARGET_HAS_neg_i32
783         case INDEX_op_neg_i32:
784             t0 = *tb_ptr++;
785             t1 = tci_read_r32(&tb_ptr);
786             tci_write_reg32(t0, -t1);
787             break;
788 #endif
789 #if TCG_TARGET_REG_BITS == 64
790         case INDEX_op_mov_i64:
791             t0 = *tb_ptr++;
792             t1 = tci_read_r64(&tb_ptr);
793             tci_write_reg64(t0, t1);
794             break;
795         case INDEX_op_movi_i64:
796             t0 = *tb_ptr++;
797             t1 = tci_read_i64(&tb_ptr);
798             tci_write_reg64(t0, t1);
799             break;
800
801             /* Load/store operations (64 bit). */
802
803         case INDEX_op_ld8u_i64:
804             t0 = *tb_ptr++;
805             t1 = tci_read_r(&tb_ptr);
806             t2 = tci_read_i32(&tb_ptr);
807             tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
808             break;
809         case INDEX_op_ld8s_i64:
810         case INDEX_op_ld16u_i64:
811         case INDEX_op_ld16s_i64:
812             TODO();
813             break;
814         case INDEX_op_ld32u_i64:
815             t0 = *tb_ptr++;
816             t1 = tci_read_r(&tb_ptr);
817             t2 = tci_read_i32(&tb_ptr);
818             tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
819             break;
820         case INDEX_op_ld32s_i64:
821             t0 = *tb_ptr++;
822             t1 = tci_read_r(&tb_ptr);
823             t2 = tci_read_i32(&tb_ptr);
824             tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
825             break;
826         case INDEX_op_ld_i64:
827             t0 = *tb_ptr++;
828             t1 = tci_read_r(&tb_ptr);
829             t2 = tci_read_i32(&tb_ptr);
830             tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
831             break;
832         case INDEX_op_st8_i64:
833             t0 = tci_read_r8(&tb_ptr);
834             t1 = tci_read_r(&tb_ptr);
835             t2 = tci_read_i32(&tb_ptr);
836             *(uint8_t *)(t1 + t2) = t0;
837             break;
838         case INDEX_op_st16_i64:
839             t0 = tci_read_r16(&tb_ptr);
840             t1 = tci_read_r(&tb_ptr);
841             t2 = tci_read_i32(&tb_ptr);
842             *(uint16_t *)(t1 + t2) = t0;
843             break;
844         case INDEX_op_st32_i64:
845             t0 = tci_read_r32(&tb_ptr);
846             t1 = tci_read_r(&tb_ptr);
847             t2 = tci_read_i32(&tb_ptr);
848             *(uint32_t *)(t1 + t2) = t0;
849             break;
850         case INDEX_op_st_i64:
851             t0 = tci_read_r64(&tb_ptr);
852             t1 = tci_read_r(&tb_ptr);
853             t2 = tci_read_i32(&tb_ptr);
854             *(uint64_t *)(t1 + t2) = t0;
855             break;
856
857             /* Arithmetic operations (64 bit). */
858
859         case INDEX_op_add_i64:
860             t0 = *tb_ptr++;
861             t1 = tci_read_ri64(&tb_ptr);
862             t2 = tci_read_ri64(&tb_ptr);
863             tci_write_reg64(t0, t1 + t2);
864             break;
865         case INDEX_op_sub_i64:
866             t0 = *tb_ptr++;
867             t1 = tci_read_ri64(&tb_ptr);
868             t2 = tci_read_ri64(&tb_ptr);
869             tci_write_reg64(t0, t1 - t2);
870             break;
871         case INDEX_op_mul_i64:
872             t0 = *tb_ptr++;
873             t1 = tci_read_ri64(&tb_ptr);
874             t2 = tci_read_ri64(&tb_ptr);
875             tci_write_reg64(t0, t1 * t2);
876             break;
877 #if TCG_TARGET_HAS_div_i64
878         case INDEX_op_div_i64:
879         case INDEX_op_divu_i64:
880         case INDEX_op_rem_i64:
881         case INDEX_op_remu_i64:
882             TODO();
883             break;
884 #elif TCG_TARGET_HAS_div2_i64
885         case INDEX_op_div2_i64:
886         case INDEX_op_divu2_i64:
887             TODO();
888             break;
889 #endif
890         case INDEX_op_and_i64:
891             t0 = *tb_ptr++;
892             t1 = tci_read_ri64(&tb_ptr);
893             t2 = tci_read_ri64(&tb_ptr);
894             tci_write_reg64(t0, t1 & t2);
895             break;
896         case INDEX_op_or_i64:
897             t0 = *tb_ptr++;
898             t1 = tci_read_ri64(&tb_ptr);
899             t2 = tci_read_ri64(&tb_ptr);
900             tci_write_reg64(t0, t1 | t2);
901             break;
902         case INDEX_op_xor_i64:
903             t0 = *tb_ptr++;
904             t1 = tci_read_ri64(&tb_ptr);
905             t2 = tci_read_ri64(&tb_ptr);
906             tci_write_reg64(t0, t1 ^ t2);
907             break;
908
909             /* Shift/rotate operations (64 bit). */
910
911         case INDEX_op_shl_i64:
912             t0 = *tb_ptr++;
913             t1 = tci_read_ri64(&tb_ptr);
914             t2 = tci_read_ri64(&tb_ptr);
915             tci_write_reg64(t0, t1 << t2);
916             break;
917         case INDEX_op_shr_i64:
918             t0 = *tb_ptr++;
919             t1 = tci_read_ri64(&tb_ptr);
920             t2 = tci_read_ri64(&tb_ptr);
921             tci_write_reg64(t0, t1 >> t2);
922             break;
923         case INDEX_op_sar_i64:
924             t0 = *tb_ptr++;
925             t1 = tci_read_ri64(&tb_ptr);
926             t2 = tci_read_ri64(&tb_ptr);
927             tci_write_reg64(t0, ((int64_t)t1 >> t2));
928             break;
929 #if TCG_TARGET_HAS_rot_i64
930         case INDEX_op_rotl_i64:
931         case INDEX_op_rotr_i64:
932             TODO();
933             break;
934 #endif
935         case INDEX_op_brcond_i64:
936             t0 = tci_read_r64(&tb_ptr);
937             t1 = tci_read_ri64(&tb_ptr);
938             condition = *tb_ptr++;
939             label = tci_read_label(&tb_ptr);
940             if (tci_compare64(t0, t1, condition)) {
941                 assert(tb_ptr == old_code_ptr + op_size);
942                 tb_ptr = (uint8_t *)label;
943                 continue;
944             }
945             break;
946 #if TCG_TARGET_HAS_ext8u_i64
947         case INDEX_op_ext8u_i64:
948             t0 = *tb_ptr++;
949             t1 = tci_read_r8(&tb_ptr);
950             tci_write_reg64(t0, t1);
951             break;
952 #endif
953 #if TCG_TARGET_HAS_ext8s_i64
954         case INDEX_op_ext8s_i64:
955             t0 = *tb_ptr++;
956             t1 = tci_read_r8s(&tb_ptr);
957             tci_write_reg64(t0, t1);
958             break;
959 #endif
960 #if TCG_TARGET_HAS_ext16s_i64
961         case INDEX_op_ext16s_i64:
962             t0 = *tb_ptr++;
963             t1 = tci_read_r16s(&tb_ptr);
964             tci_write_reg64(t0, t1);
965             break;
966 #endif
967 #if TCG_TARGET_HAS_ext16u_i64
968         case INDEX_op_ext16u_i64:
969             t0 = *tb_ptr++;
970             t1 = tci_read_r16(&tb_ptr);
971             tci_write_reg64(t0, t1);
972             break;
973 #endif
974 #if TCG_TARGET_HAS_ext32s_i64
975         case INDEX_op_ext32s_i64:
976             t0 = *tb_ptr++;
977             t1 = tci_read_r32s(&tb_ptr);
978             tci_write_reg64(t0, t1);
979             break;
980 #endif
981 #if TCG_TARGET_HAS_ext32u_i64
982         case INDEX_op_ext32u_i64:
983             t0 = *tb_ptr++;
984             t1 = tci_read_r32(&tb_ptr);
985             tci_write_reg64(t0, t1);
986             break;
987 #endif
988 #if TCG_TARGET_HAS_bswap16_i64
989         case INDEX_op_bswap16_i64:
990             TODO();
991             t0 = *tb_ptr++;
992             t1 = tci_read_r16(&tb_ptr);
993             tci_write_reg64(t0, bswap16(t1));
994             break;
995 #endif
996 #if TCG_TARGET_HAS_bswap32_i64
997         case INDEX_op_bswap32_i64:
998             t0 = *tb_ptr++;
999             t1 = tci_read_r32(&tb_ptr);
1000             tci_write_reg64(t0, bswap32(t1));
1001             break;
1002 #endif
1003 #if TCG_TARGET_HAS_bswap64_i64
1004         case INDEX_op_bswap64_i64:
1005             t0 = *tb_ptr++;
1006             t1 = tci_read_r64(&tb_ptr);
1007             tci_write_reg64(t0, bswap64(t1));
1008             break;
1009 #endif
1010 #if TCG_TARGET_HAS_not_i64
1011         case INDEX_op_not_i64:
1012             t0 = *tb_ptr++;
1013             t1 = tci_read_r64(&tb_ptr);
1014             tci_write_reg64(t0, ~t1);
1015             break;
1016 #endif
1017 #if TCG_TARGET_HAS_neg_i64
1018         case INDEX_op_neg_i64:
1019             t0 = *tb_ptr++;
1020             t1 = tci_read_r64(&tb_ptr);
1021             tci_write_reg64(t0, -t1);
1022             break;
1023 #endif
1024 #endif /* TCG_TARGET_REG_BITS == 64 */
1025
1026             /* QEMU specific operations. */
1027
1028 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1029         case INDEX_op_debug_insn_start:
1030             TODO();
1031             break;
1032 #else
1033         case INDEX_op_debug_insn_start:
1034             TODO();
1035             break;
1036 #endif
1037         case INDEX_op_exit_tb:
1038             next_tb = *(uint64_t *)tb_ptr;
1039             goto exit;
1040             break;
1041         case INDEX_op_goto_tb:
1042             t0 = tci_read_i32(&tb_ptr);
1043             assert(tb_ptr == old_code_ptr + op_size);
1044             tb_ptr += (int32_t)t0;
1045             continue;
1046         case INDEX_op_qemu_ld8u:
1047             t0 = *tb_ptr++;
1048             taddr = tci_read_ulong(&tb_ptr);
1049 #ifdef CONFIG_SOFTMMU
1050             tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1051 #else
1052             host_addr = (tcg_target_ulong)taddr;
1053             assert(taddr == host_addr);
1054             tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1055 #endif
1056             tci_write_reg8(t0, tmp8);
1057             break;
1058         case INDEX_op_qemu_ld8s:
1059             t0 = *tb_ptr++;
1060             taddr = tci_read_ulong(&tb_ptr);
1061 #ifdef CONFIG_SOFTMMU
1062             tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1063 #else
1064             host_addr = (tcg_target_ulong)taddr;
1065             assert(taddr == host_addr);
1066             tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1067 #endif
1068             tci_write_reg8s(t0, tmp8);
1069             break;
1070         case INDEX_op_qemu_ld16u:
1071             t0 = *tb_ptr++;
1072             taddr = tci_read_ulong(&tb_ptr);
1073 #ifdef CONFIG_SOFTMMU
1074             tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1075 #else
1076             host_addr = (tcg_target_ulong)taddr;
1077             assert(taddr == host_addr);
1078             tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1079 #endif
1080             tci_write_reg16(t0, tmp16);
1081             break;
1082         case INDEX_op_qemu_ld16s:
1083             t0 = *tb_ptr++;
1084             taddr = tci_read_ulong(&tb_ptr);
1085 #ifdef CONFIG_SOFTMMU
1086             tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1087 #else
1088             host_addr = (tcg_target_ulong)taddr;
1089             assert(taddr == host_addr);
1090             tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1091 #endif
1092             tci_write_reg16s(t0, tmp16);
1093             break;
1094 #if TCG_TARGET_REG_BITS == 64
1095         case INDEX_op_qemu_ld32u:
1096             t0 = *tb_ptr++;
1097             taddr = tci_read_ulong(&tb_ptr);
1098 #ifdef CONFIG_SOFTMMU
1099             tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1100 #else
1101             host_addr = (tcg_target_ulong)taddr;
1102             assert(taddr == host_addr);
1103             tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1104 #endif
1105             tci_write_reg32(t0, tmp32);
1106             break;
1107         case INDEX_op_qemu_ld32s:
1108             t0 = *tb_ptr++;
1109             taddr = tci_read_ulong(&tb_ptr);
1110 #ifdef CONFIG_SOFTMMU
1111             tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1112 #else
1113             host_addr = (tcg_target_ulong)taddr;
1114             assert(taddr == host_addr);
1115             tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1116 #endif
1117             tci_write_reg32s(t0, tmp32);
1118             break;
1119 #endif /* TCG_TARGET_REG_BITS == 64 */
1120         case INDEX_op_qemu_ld32:
1121             t0 = *tb_ptr++;
1122             taddr = tci_read_ulong(&tb_ptr);
1123 #ifdef CONFIG_SOFTMMU
1124             tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1125 #else
1126             host_addr = (tcg_target_ulong)taddr;
1127             assert(taddr == host_addr);
1128             tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1129 #endif
1130             tci_write_reg32(t0, tmp32);
1131             break;
1132         case INDEX_op_qemu_ld64:
1133             t0 = *tb_ptr++;
1134 #if TCG_TARGET_REG_BITS == 32
1135             t1 = *tb_ptr++;
1136 #endif
1137             taddr = tci_read_ulong(&tb_ptr);
1138 #ifdef CONFIG_SOFTMMU
1139             tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
1140 #else
1141             host_addr = (tcg_target_ulong)taddr;
1142             assert(taddr == host_addr);
1143             tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
1144 #endif
1145             tci_write_reg(t0, tmp64);
1146 #if TCG_TARGET_REG_BITS == 32
1147             tci_write_reg(t1, tmp64 >> 32);
1148 #endif
1149             break;
1150         case INDEX_op_qemu_st8:
1151             t0 = tci_read_r8(&tb_ptr);
1152             taddr = tci_read_ulong(&tb_ptr);
1153 #ifdef CONFIG_SOFTMMU
1154             t2 = tci_read_i(&tb_ptr);
1155             helper_stb_mmu(env, taddr, t0, t2);
1156 #else
1157             host_addr = (tcg_target_ulong)taddr;
1158             assert(taddr == host_addr);
1159             *(uint8_t *)(host_addr + GUEST_BASE) = t0;
1160 #endif
1161             break;
1162         case INDEX_op_qemu_st16:
1163             t0 = tci_read_r16(&tb_ptr);
1164             taddr = tci_read_ulong(&tb_ptr);
1165 #ifdef CONFIG_SOFTMMU
1166             t2 = tci_read_i(&tb_ptr);
1167             helper_stw_mmu(env, taddr, t0, t2);
1168 #else
1169             host_addr = (tcg_target_ulong)taddr;
1170             assert(taddr == host_addr);
1171             *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
1172 #endif
1173             break;
1174         case INDEX_op_qemu_st32:
1175             t0 = tci_read_r32(&tb_ptr);
1176             taddr = tci_read_ulong(&tb_ptr);
1177 #ifdef CONFIG_SOFTMMU
1178             t2 = tci_read_i(&tb_ptr);
1179             helper_stl_mmu(env, taddr, t0, t2);
1180 #else
1181             host_addr = (tcg_target_ulong)taddr;
1182             assert(taddr == host_addr);
1183             *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
1184 #endif
1185             break;
1186         case INDEX_op_qemu_st64:
1187             tmp64 = tci_read_r64(&tb_ptr);
1188             taddr = tci_read_ulong(&tb_ptr);
1189 #ifdef CONFIG_SOFTMMU
1190             t2 = tci_read_i(&tb_ptr);
1191             helper_stq_mmu(env, taddr, tmp64, t2);
1192 #else
1193             host_addr = (tcg_target_ulong)taddr;
1194             assert(taddr == host_addr);
1195             *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
1196 #endif
1197             break;
1198         default:
1199             TODO();
1200             break;
1201         }
1202         assert(tb_ptr == old_code_ptr + op_size);
1203     }
1204 exit:
1205     return next_tb;
1206 }
This page took 0.092035 seconds and 4 git commands to generate.