]> Git Repo - qemu.git/blob - tci.c
Merge branch 'arm-devs.for-upstream' of git://git.linaro.org/people/pmaydell/qemu-arm
[qemu.git] / tci.c
1 /*
2  * Tiny Code Interpreter for QEMU
3  *
4  * Copyright (c) 2009, 2011 Stefan Weil
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation, either version 2 of the License, or
9  * (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #include "config.h"
21
22 /* Defining NDEBUG disables assertions (which makes the code faster). */
23 #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
24 # define NDEBUG
25 #endif
26
27 #include "qemu-common.h"
28 #include "exec-all.h"           /* MAX_OPC_PARAM_IARGS */
29 #include "tcg-op.h"
30
31 /* Marker for missing code. */
32 #define TODO() \
33     do { \
34         fprintf(stderr, "TODO %s:%u: %s()\n", \
35                 __FILE__, __LINE__, __func__); \
36         tcg_abort(); \
37     } while (0)
38
39 #if MAX_OPC_PARAM_IARGS != 5
40 # error Fix needed, number of supported input arguments changed!
41 #endif
42 #if TCG_TARGET_REG_BITS == 32
43 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
44                                     tcg_target_ulong, tcg_target_ulong,
45                                     tcg_target_ulong, tcg_target_ulong,
46                                     tcg_target_ulong, tcg_target_ulong,
47                                     tcg_target_ulong, tcg_target_ulong);
48 #else
49 typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
50                                     tcg_target_ulong, tcg_target_ulong,
51                                     tcg_target_ulong);
52 #endif
53
54 /* TCI can optionally use a global register variable for env. */
55 #if !defined(AREG0)
56 CPUArchState *env;
57 #endif
58
59 /* Targets which don't use GETPC also don't need tci_tb_ptr
60    which makes them a little faster. */
61 #if defined(GETPC)
62 uintptr_t tci_tb_ptr;
63 #endif
64
65 static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
66
67 static tcg_target_ulong tci_read_reg(TCGReg index)
68 {
69     assert(index < ARRAY_SIZE(tci_reg));
70     return tci_reg[index];
71 }
72
73 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
74 static int8_t tci_read_reg8s(TCGReg index)
75 {
76     return (int8_t)tci_read_reg(index);
77 }
78 #endif
79
80 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
81 static int16_t tci_read_reg16s(TCGReg index)
82 {
83     return (int16_t)tci_read_reg(index);
84 }
85 #endif
86
87 #if TCG_TARGET_REG_BITS == 64
88 static int32_t tci_read_reg32s(TCGReg index)
89 {
90     return (int32_t)tci_read_reg(index);
91 }
92 #endif
93
94 static uint8_t tci_read_reg8(TCGReg index)
95 {
96     return (uint8_t)tci_read_reg(index);
97 }
98
99 static uint16_t tci_read_reg16(TCGReg index)
100 {
101     return (uint16_t)tci_read_reg(index);
102 }
103
104 static uint32_t tci_read_reg32(TCGReg index)
105 {
106     return (uint32_t)tci_read_reg(index);
107 }
108
109 #if TCG_TARGET_REG_BITS == 64
110 static uint64_t tci_read_reg64(TCGReg index)
111 {
112     return tci_read_reg(index);
113 }
114 #endif
115
116 static void tci_write_reg(TCGReg index, tcg_target_ulong value)
117 {
118     assert(index < ARRAY_SIZE(tci_reg));
119     assert(index != TCG_AREG0);
120     tci_reg[index] = value;
121 }
122
123 static void tci_write_reg8s(TCGReg index, int8_t value)
124 {
125     tci_write_reg(index, value);
126 }
127
128 static void tci_write_reg16s(TCGReg index, int16_t value)
129 {
130     tci_write_reg(index, value);
131 }
132
133 #if TCG_TARGET_REG_BITS == 64
134 static void tci_write_reg32s(TCGReg index, int32_t value)
135 {
136     tci_write_reg(index, value);
137 }
138 #endif
139
140 static void tci_write_reg8(TCGReg index, uint8_t value)
141 {
142     tci_write_reg(index, value);
143 }
144
145 static void tci_write_reg16(TCGReg index, uint16_t value)
146 {
147     tci_write_reg(index, value);
148 }
149
150 static void tci_write_reg32(TCGReg index, uint32_t value)
151 {
152     tci_write_reg(index, value);
153 }
154
155 #if TCG_TARGET_REG_BITS == 32
156 static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
157                             uint64_t value)
158 {
159     tci_write_reg(low_index, value);
160     tci_write_reg(high_index, value >> 32);
161 }
162 #elif TCG_TARGET_REG_BITS == 64
163 static void tci_write_reg64(TCGReg index, uint64_t value)
164 {
165     tci_write_reg(index, value);
166 }
167 #endif
168
169 #if TCG_TARGET_REG_BITS == 32
170 /* Create a 64 bit value from two 32 bit values. */
171 static uint64_t tci_uint64(uint32_t high, uint32_t low)
172 {
173     return ((uint64_t)high << 32) + low;
174 }
175 #endif
176
177 /* Read constant (native size) from bytecode. */
178 static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
179 {
180     tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
181     *tb_ptr += sizeof(value);
182     return value;
183 }
184
185 /* Read constant (32 bit) from bytecode. */
186 static uint32_t tci_read_i32(uint8_t **tb_ptr)
187 {
188     uint32_t value = *(uint32_t *)(*tb_ptr);
189     *tb_ptr += sizeof(value);
190     return value;
191 }
192
193 #if TCG_TARGET_REG_BITS == 64
194 /* Read constant (64 bit) from bytecode. */
195 static uint64_t tci_read_i64(uint8_t **tb_ptr)
196 {
197     uint64_t value = *(uint64_t *)(*tb_ptr);
198     *tb_ptr += sizeof(value);
199     return value;
200 }
201 #endif
202
203 /* Read indexed register (native size) from bytecode. */
204 static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
205 {
206     tcg_target_ulong value = tci_read_reg(**tb_ptr);
207     *tb_ptr += 1;
208     return value;
209 }
210
211 /* Read indexed register (8 bit) from bytecode. */
212 static uint8_t tci_read_r8(uint8_t **tb_ptr)
213 {
214     uint8_t value = tci_read_reg8(**tb_ptr);
215     *tb_ptr += 1;
216     return value;
217 }
218
219 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
220 /* Read indexed register (8 bit signed) from bytecode. */
221 static int8_t tci_read_r8s(uint8_t **tb_ptr)
222 {
223     int8_t value = tci_read_reg8s(**tb_ptr);
224     *tb_ptr += 1;
225     return value;
226 }
227 #endif
228
229 /* Read indexed register (16 bit) from bytecode. */
230 static uint16_t tci_read_r16(uint8_t **tb_ptr)
231 {
232     uint16_t value = tci_read_reg16(**tb_ptr);
233     *tb_ptr += 1;
234     return value;
235 }
236
237 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
238 /* Read indexed register (16 bit signed) from bytecode. */
239 static int16_t tci_read_r16s(uint8_t **tb_ptr)
240 {
241     int16_t value = tci_read_reg16s(**tb_ptr);
242     *tb_ptr += 1;
243     return value;
244 }
245 #endif
246
247 /* Read indexed register (32 bit) from bytecode. */
248 static uint32_t tci_read_r32(uint8_t **tb_ptr)
249 {
250     uint32_t value = tci_read_reg32(**tb_ptr);
251     *tb_ptr += 1;
252     return value;
253 }
254
255 #if TCG_TARGET_REG_BITS == 32
256 /* Read two indexed registers (2 * 32 bit) from bytecode. */
257 static uint64_t tci_read_r64(uint8_t **tb_ptr)
258 {
259     uint32_t low = tci_read_r32(tb_ptr);
260     return tci_uint64(tci_read_r32(tb_ptr), low);
261 }
262 #elif TCG_TARGET_REG_BITS == 64
263 /* Read indexed register (32 bit signed) from bytecode. */
264 static int32_t tci_read_r32s(uint8_t **tb_ptr)
265 {
266     int32_t value = tci_read_reg32s(**tb_ptr);
267     *tb_ptr += 1;
268     return value;
269 }
270
271 /* Read indexed register (64 bit) from bytecode. */
272 static uint64_t tci_read_r64(uint8_t **tb_ptr)
273 {
274     uint64_t value = tci_read_reg64(**tb_ptr);
275     *tb_ptr += 1;
276     return value;
277 }
278 #endif
279
280 /* Read indexed register(s) with target address from bytecode. */
281 static target_ulong tci_read_ulong(uint8_t **tb_ptr)
282 {
283     target_ulong taddr = tci_read_r(tb_ptr);
284 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
285     taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
286 #endif
287     return taddr;
288 }
289
290 /* Read indexed register or constant (native size) from bytecode. */
291 static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
292 {
293     tcg_target_ulong value;
294     TCGReg r = **tb_ptr;
295     *tb_ptr += 1;
296     if (r == TCG_CONST) {
297         value = tci_read_i(tb_ptr);
298     } else {
299         value = tci_read_reg(r);
300     }
301     return value;
302 }
303
304 /* Read indexed register or constant (32 bit) from bytecode. */
305 static uint32_t tci_read_ri32(uint8_t **tb_ptr)
306 {
307     uint32_t value;
308     TCGReg r = **tb_ptr;
309     *tb_ptr += 1;
310     if (r == TCG_CONST) {
311         value = tci_read_i32(tb_ptr);
312     } else {
313         value = tci_read_reg32(r);
314     }
315     return value;
316 }
317
318 #if TCG_TARGET_REG_BITS == 32
319 /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
320 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
321 {
322     uint32_t low = tci_read_ri32(tb_ptr);
323     return tci_uint64(tci_read_ri32(tb_ptr), low);
324 }
325 #elif TCG_TARGET_REG_BITS == 64
326 /* Read indexed register or constant (64 bit) from bytecode. */
327 static uint64_t tci_read_ri64(uint8_t **tb_ptr)
328 {
329     uint64_t value;
330     TCGReg r = **tb_ptr;
331     *tb_ptr += 1;
332     if (r == TCG_CONST) {
333         value = tci_read_i64(tb_ptr);
334     } else {
335         value = tci_read_reg64(r);
336     }
337     return value;
338 }
339 #endif
340
341 static target_ulong tci_read_label(uint8_t **tb_ptr)
342 {
343     target_ulong label = tci_read_i(tb_ptr);
344     assert(label != 0);
345     return label;
346 }
347
348 static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
349 {
350     bool result = false;
351     int32_t i0 = u0;
352     int32_t i1 = u1;
353     switch (condition) {
354     case TCG_COND_EQ:
355         result = (u0 == u1);
356         break;
357     case TCG_COND_NE:
358         result = (u0 != u1);
359         break;
360     case TCG_COND_LT:
361         result = (i0 < i1);
362         break;
363     case TCG_COND_GE:
364         result = (i0 >= i1);
365         break;
366     case TCG_COND_LE:
367         result = (i0 <= i1);
368         break;
369     case TCG_COND_GT:
370         result = (i0 > i1);
371         break;
372     case TCG_COND_LTU:
373         result = (u0 < u1);
374         break;
375     case TCG_COND_GEU:
376         result = (u0 >= u1);
377         break;
378     case TCG_COND_LEU:
379         result = (u0 <= u1);
380         break;
381     case TCG_COND_GTU:
382         result = (u0 > u1);
383         break;
384     default:
385         TODO();
386     }
387     return result;
388 }
389
390 static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
391 {
392     bool result = false;
393     int64_t i0 = u0;
394     int64_t i1 = u1;
395     switch (condition) {
396     case TCG_COND_EQ:
397         result = (u0 == u1);
398         break;
399     case TCG_COND_NE:
400         result = (u0 != u1);
401         break;
402     case TCG_COND_LT:
403         result = (i0 < i1);
404         break;
405     case TCG_COND_GE:
406         result = (i0 >= i1);
407         break;
408     case TCG_COND_LE:
409         result = (i0 <= i1);
410         break;
411     case TCG_COND_GT:
412         result = (i0 > i1);
413         break;
414     case TCG_COND_LTU:
415         result = (u0 < u1);
416         break;
417     case TCG_COND_GEU:
418         result = (u0 >= u1);
419         break;
420     case TCG_COND_LEU:
421         result = (u0 <= u1);
422         break;
423     case TCG_COND_GTU:
424         result = (u0 > u1);
425         break;
426     default:
427         TODO();
428     }
429     return result;
430 }
431
432 /* Interpret pseudo code in tb. */
433 tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *cpustate, uint8_t *tb_ptr)
434 {
435     tcg_target_ulong next_tb = 0;
436
437     env = cpustate;
438     tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
439     assert(tb_ptr);
440
441     for (;;) {
442 #if defined(GETPC)
443         tci_tb_ptr = (uintptr_t)tb_ptr;
444 #endif
445         TCGOpcode opc = tb_ptr[0];
446 #if !defined(NDEBUG)
447         uint8_t op_size = tb_ptr[1];
448         uint8_t *old_code_ptr = tb_ptr;
449 #endif
450         tcg_target_ulong t0;
451         tcg_target_ulong t1;
452         tcg_target_ulong t2;
453         tcg_target_ulong label;
454         TCGCond condition;
455         target_ulong taddr;
456 #ifndef CONFIG_SOFTMMU
457         tcg_target_ulong host_addr;
458 #endif
459         uint8_t tmp8;
460         uint16_t tmp16;
461         uint32_t tmp32;
462         uint64_t tmp64;
463 #if TCG_TARGET_REG_BITS == 32
464         uint64_t v64;
465 #endif
466
467         /* Skip opcode and size entry. */
468         tb_ptr += 2;
469
470         switch (opc) {
471         case INDEX_op_end:
472         case INDEX_op_nop:
473             break;
474         case INDEX_op_nop1:
475         case INDEX_op_nop2:
476         case INDEX_op_nop3:
477         case INDEX_op_nopn:
478         case INDEX_op_discard:
479             TODO();
480             break;
481         case INDEX_op_set_label:
482             TODO();
483             break;
484         case INDEX_op_call:
485             t0 = tci_read_ri(&tb_ptr);
486 #if TCG_TARGET_REG_BITS == 32
487             tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
488                                           tci_read_reg(TCG_REG_R1),
489                                           tci_read_reg(TCG_REG_R2),
490                                           tci_read_reg(TCG_REG_R3),
491                                           tci_read_reg(TCG_REG_R5),
492                                           tci_read_reg(TCG_REG_R6),
493                                           tci_read_reg(TCG_REG_R7),
494                                           tci_read_reg(TCG_REG_R8),
495                                           tci_read_reg(TCG_REG_R9),
496                                           tci_read_reg(TCG_REG_R10));
497             tci_write_reg(TCG_REG_R0, tmp64);
498             tci_write_reg(TCG_REG_R1, tmp64 >> 32);
499 #else
500             tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
501                                           tci_read_reg(TCG_REG_R1),
502                                           tci_read_reg(TCG_REG_R2),
503                                           tci_read_reg(TCG_REG_R3),
504                                           tci_read_reg(TCG_REG_R5));
505             tci_write_reg(TCG_REG_R0, tmp64);
506 #endif
507             break;
508         case INDEX_op_jmp:
509         case INDEX_op_br:
510             label = tci_read_label(&tb_ptr);
511             assert(tb_ptr == old_code_ptr + op_size);
512             tb_ptr = (uint8_t *)label;
513             continue;
514         case INDEX_op_setcond_i32:
515             t0 = *tb_ptr++;
516             t1 = tci_read_r32(&tb_ptr);
517             t2 = tci_read_ri32(&tb_ptr);
518             condition = *tb_ptr++;
519             tci_write_reg32(t0, tci_compare32(t1, t2, condition));
520             break;
521 #if TCG_TARGET_REG_BITS == 32
522         case INDEX_op_setcond2_i32:
523             t0 = *tb_ptr++;
524             tmp64 = tci_read_r64(&tb_ptr);
525             v64 = tci_read_ri64(&tb_ptr);
526             condition = *tb_ptr++;
527             tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
528             break;
529 #elif TCG_TARGET_REG_BITS == 64
530         case INDEX_op_setcond_i64:
531             t0 = *tb_ptr++;
532             t1 = tci_read_r64(&tb_ptr);
533             t2 = tci_read_ri64(&tb_ptr);
534             condition = *tb_ptr++;
535             tci_write_reg64(t0, tci_compare64(t1, t2, condition));
536             break;
537 #endif
538         case INDEX_op_mov_i32:
539             t0 = *tb_ptr++;
540             t1 = tci_read_r32(&tb_ptr);
541             tci_write_reg32(t0, t1);
542             break;
543         case INDEX_op_movi_i32:
544             t0 = *tb_ptr++;
545             t1 = tci_read_i32(&tb_ptr);
546             tci_write_reg32(t0, t1);
547             break;
548
549             /* Load/store operations (32 bit). */
550
551         case INDEX_op_ld8u_i32:
552             t0 = *tb_ptr++;
553             t1 = tci_read_r(&tb_ptr);
554             t2 = tci_read_i32(&tb_ptr);
555             tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
556             break;
557         case INDEX_op_ld8s_i32:
558         case INDEX_op_ld16u_i32:
559             TODO();
560             break;
561         case INDEX_op_ld16s_i32:
562             TODO();
563             break;
564         case INDEX_op_ld_i32:
565             t0 = *tb_ptr++;
566             t1 = tci_read_r(&tb_ptr);
567             t2 = tci_read_i32(&tb_ptr);
568             tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
569             break;
570         case INDEX_op_st8_i32:
571             t0 = tci_read_r8(&tb_ptr);
572             t1 = tci_read_r(&tb_ptr);
573             t2 = tci_read_i32(&tb_ptr);
574             *(uint8_t *)(t1 + t2) = t0;
575             break;
576         case INDEX_op_st16_i32:
577             t0 = tci_read_r16(&tb_ptr);
578             t1 = tci_read_r(&tb_ptr);
579             t2 = tci_read_i32(&tb_ptr);
580             *(uint16_t *)(t1 + t2) = t0;
581             break;
582         case INDEX_op_st_i32:
583             t0 = tci_read_r32(&tb_ptr);
584             t1 = tci_read_r(&tb_ptr);
585             t2 = tci_read_i32(&tb_ptr);
586             *(uint32_t *)(t1 + t2) = t0;
587             break;
588
589             /* Arithmetic operations (32 bit). */
590
591         case INDEX_op_add_i32:
592             t0 = *tb_ptr++;
593             t1 = tci_read_ri32(&tb_ptr);
594             t2 = tci_read_ri32(&tb_ptr);
595             tci_write_reg32(t0, t1 + t2);
596             break;
597         case INDEX_op_sub_i32:
598             t0 = *tb_ptr++;
599             t1 = tci_read_ri32(&tb_ptr);
600             t2 = tci_read_ri32(&tb_ptr);
601             tci_write_reg32(t0, t1 - t2);
602             break;
603         case INDEX_op_mul_i32:
604             t0 = *tb_ptr++;
605             t1 = tci_read_ri32(&tb_ptr);
606             t2 = tci_read_ri32(&tb_ptr);
607             tci_write_reg32(t0, t1 * t2);
608             break;
609 #if TCG_TARGET_HAS_div_i32
610         case INDEX_op_div_i32:
611             t0 = *tb_ptr++;
612             t1 = tci_read_ri32(&tb_ptr);
613             t2 = tci_read_ri32(&tb_ptr);
614             tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
615             break;
616         case INDEX_op_divu_i32:
617             t0 = *tb_ptr++;
618             t1 = tci_read_ri32(&tb_ptr);
619             t2 = tci_read_ri32(&tb_ptr);
620             tci_write_reg32(t0, t1 / t2);
621             break;
622         case INDEX_op_rem_i32:
623             t0 = *tb_ptr++;
624             t1 = tci_read_ri32(&tb_ptr);
625             t2 = tci_read_ri32(&tb_ptr);
626             tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
627             break;
628         case INDEX_op_remu_i32:
629             t0 = *tb_ptr++;
630             t1 = tci_read_ri32(&tb_ptr);
631             t2 = tci_read_ri32(&tb_ptr);
632             tci_write_reg32(t0, t1 % t2);
633             break;
634 #elif TCG_TARGET_HAS_div2_i32
635         case INDEX_op_div2_i32:
636         case INDEX_op_divu2_i32:
637             TODO();
638             break;
639 #endif
640         case INDEX_op_and_i32:
641             t0 = *tb_ptr++;
642             t1 = tci_read_ri32(&tb_ptr);
643             t2 = tci_read_ri32(&tb_ptr);
644             tci_write_reg32(t0, t1 & t2);
645             break;
646         case INDEX_op_or_i32:
647             t0 = *tb_ptr++;
648             t1 = tci_read_ri32(&tb_ptr);
649             t2 = tci_read_ri32(&tb_ptr);
650             tci_write_reg32(t0, t1 | t2);
651             break;
652         case INDEX_op_xor_i32:
653             t0 = *tb_ptr++;
654             t1 = tci_read_ri32(&tb_ptr);
655             t2 = tci_read_ri32(&tb_ptr);
656             tci_write_reg32(t0, t1 ^ t2);
657             break;
658
659             /* Shift/rotate operations (32 bit). */
660
661         case INDEX_op_shl_i32:
662             t0 = *tb_ptr++;
663             t1 = tci_read_ri32(&tb_ptr);
664             t2 = tci_read_ri32(&tb_ptr);
665             tci_write_reg32(t0, t1 << t2);
666             break;
667         case INDEX_op_shr_i32:
668             t0 = *tb_ptr++;
669             t1 = tci_read_ri32(&tb_ptr);
670             t2 = tci_read_ri32(&tb_ptr);
671             tci_write_reg32(t0, t1 >> t2);
672             break;
673         case INDEX_op_sar_i32:
674             t0 = *tb_ptr++;
675             t1 = tci_read_ri32(&tb_ptr);
676             t2 = tci_read_ri32(&tb_ptr);
677             tci_write_reg32(t0, ((int32_t)t1 >> t2));
678             break;
679 #if TCG_TARGET_HAS_rot_i32
680         case INDEX_op_rotl_i32:
681             t0 = *tb_ptr++;
682             t1 = tci_read_ri32(&tb_ptr);
683             t2 = tci_read_ri32(&tb_ptr);
684             tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
685             break;
686         case INDEX_op_rotr_i32:
687             t0 = *tb_ptr++;
688             t1 = tci_read_ri32(&tb_ptr);
689             t2 = tci_read_ri32(&tb_ptr);
690             tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
691             break;
692 #endif
693         case INDEX_op_brcond_i32:
694             t0 = tci_read_r32(&tb_ptr);
695             t1 = tci_read_ri32(&tb_ptr);
696             condition = *tb_ptr++;
697             label = tci_read_label(&tb_ptr);
698             if (tci_compare32(t0, t1, condition)) {
699                 assert(tb_ptr == old_code_ptr + op_size);
700                 tb_ptr = (uint8_t *)label;
701                 continue;
702             }
703             break;
704 #if TCG_TARGET_REG_BITS == 32
705         case INDEX_op_add2_i32:
706             t0 = *tb_ptr++;
707             t1 = *tb_ptr++;
708             tmp64 = tci_read_r64(&tb_ptr);
709             tmp64 += tci_read_r64(&tb_ptr);
710             tci_write_reg64(t1, t0, tmp64);
711             break;
712         case INDEX_op_sub2_i32:
713             t0 = *tb_ptr++;
714             t1 = *tb_ptr++;
715             tmp64 = tci_read_r64(&tb_ptr);
716             tmp64 -= tci_read_r64(&tb_ptr);
717             tci_write_reg64(t1, t0, tmp64);
718             break;
719         case INDEX_op_brcond2_i32:
720             tmp64 = tci_read_r64(&tb_ptr);
721             v64 = tci_read_ri64(&tb_ptr);
722             condition = *tb_ptr++;
723             label = tci_read_label(&tb_ptr);
724             if (tci_compare64(tmp64, v64, condition)) {
725                 assert(tb_ptr == old_code_ptr + op_size);
726                 tb_ptr = (uint8_t *)label;
727                 continue;
728             }
729             break;
730         case INDEX_op_mulu2_i32:
731             t0 = *tb_ptr++;
732             t1 = *tb_ptr++;
733             t2 = tci_read_r32(&tb_ptr);
734             tmp64 = tci_read_r32(&tb_ptr);
735             tci_write_reg64(t1, t0, t2 * tmp64);
736             break;
737 #endif /* TCG_TARGET_REG_BITS == 32 */
738 #if TCG_TARGET_HAS_ext8s_i32
739         case INDEX_op_ext8s_i32:
740             t0 = *tb_ptr++;
741             t1 = tci_read_r8s(&tb_ptr);
742             tci_write_reg32(t0, t1);
743             break;
744 #endif
745 #if TCG_TARGET_HAS_ext16s_i32
746         case INDEX_op_ext16s_i32:
747             t0 = *tb_ptr++;
748             t1 = tci_read_r16s(&tb_ptr);
749             tci_write_reg32(t0, t1);
750             break;
751 #endif
752 #if TCG_TARGET_HAS_ext8u_i32
753         case INDEX_op_ext8u_i32:
754             t0 = *tb_ptr++;
755             t1 = tci_read_r8(&tb_ptr);
756             tci_write_reg32(t0, t1);
757             break;
758 #endif
759 #if TCG_TARGET_HAS_ext16u_i32
760         case INDEX_op_ext16u_i32:
761             t0 = *tb_ptr++;
762             t1 = tci_read_r16(&tb_ptr);
763             tci_write_reg32(t0, t1);
764             break;
765 #endif
766 #if TCG_TARGET_HAS_bswap16_i32
767         case INDEX_op_bswap16_i32:
768             t0 = *tb_ptr++;
769             t1 = tci_read_r16(&tb_ptr);
770             tci_write_reg32(t0, bswap16(t1));
771             break;
772 #endif
773 #if TCG_TARGET_HAS_bswap32_i32
774         case INDEX_op_bswap32_i32:
775             t0 = *tb_ptr++;
776             t1 = tci_read_r32(&tb_ptr);
777             tci_write_reg32(t0, bswap32(t1));
778             break;
779 #endif
780 #if TCG_TARGET_HAS_not_i32
781         case INDEX_op_not_i32:
782             t0 = *tb_ptr++;
783             t1 = tci_read_r32(&tb_ptr);
784             tci_write_reg32(t0, ~t1);
785             break;
786 #endif
787 #if TCG_TARGET_HAS_neg_i32
788         case INDEX_op_neg_i32:
789             t0 = *tb_ptr++;
790             t1 = tci_read_r32(&tb_ptr);
791             tci_write_reg32(t0, -t1);
792             break;
793 #endif
794 #if TCG_TARGET_REG_BITS == 64
795         case INDEX_op_mov_i64:
796             t0 = *tb_ptr++;
797             t1 = tci_read_r64(&tb_ptr);
798             tci_write_reg64(t0, t1);
799             break;
800         case INDEX_op_movi_i64:
801             t0 = *tb_ptr++;
802             t1 = tci_read_i64(&tb_ptr);
803             tci_write_reg64(t0, t1);
804             break;
805
806             /* Load/store operations (64 bit). */
807
808         case INDEX_op_ld8u_i64:
809             t0 = *tb_ptr++;
810             t1 = tci_read_r(&tb_ptr);
811             t2 = tci_read_i32(&tb_ptr);
812             tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
813             break;
814         case INDEX_op_ld8s_i64:
815         case INDEX_op_ld16u_i64:
816         case INDEX_op_ld16s_i64:
817             TODO();
818             break;
819         case INDEX_op_ld32u_i64:
820             t0 = *tb_ptr++;
821             t1 = tci_read_r(&tb_ptr);
822             t2 = tci_read_i32(&tb_ptr);
823             tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
824             break;
825         case INDEX_op_ld32s_i64:
826             t0 = *tb_ptr++;
827             t1 = tci_read_r(&tb_ptr);
828             t2 = tci_read_i32(&tb_ptr);
829             tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
830             break;
831         case INDEX_op_ld_i64:
832             t0 = *tb_ptr++;
833             t1 = tci_read_r(&tb_ptr);
834             t2 = tci_read_i32(&tb_ptr);
835             tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
836             break;
837         case INDEX_op_st8_i64:
838             t0 = tci_read_r8(&tb_ptr);
839             t1 = tci_read_r(&tb_ptr);
840             t2 = tci_read_i32(&tb_ptr);
841             *(uint8_t *)(t1 + t2) = t0;
842             break;
843         case INDEX_op_st16_i64:
844             t0 = tci_read_r16(&tb_ptr);
845             t1 = tci_read_r(&tb_ptr);
846             t2 = tci_read_i32(&tb_ptr);
847             *(uint16_t *)(t1 + t2) = t0;
848             break;
849         case INDEX_op_st32_i64:
850             t0 = tci_read_r32(&tb_ptr);
851             t1 = tci_read_r(&tb_ptr);
852             t2 = tci_read_i32(&tb_ptr);
853             *(uint32_t *)(t1 + t2) = t0;
854             break;
855         case INDEX_op_st_i64:
856             t0 = tci_read_r64(&tb_ptr);
857             t1 = tci_read_r(&tb_ptr);
858             t2 = tci_read_i32(&tb_ptr);
859             *(uint64_t *)(t1 + t2) = t0;
860             break;
861
862             /* Arithmetic operations (64 bit). */
863
864         case INDEX_op_add_i64:
865             t0 = *tb_ptr++;
866             t1 = tci_read_ri64(&tb_ptr);
867             t2 = tci_read_ri64(&tb_ptr);
868             tci_write_reg64(t0, t1 + t2);
869             break;
870         case INDEX_op_sub_i64:
871             t0 = *tb_ptr++;
872             t1 = tci_read_ri64(&tb_ptr);
873             t2 = tci_read_ri64(&tb_ptr);
874             tci_write_reg64(t0, t1 - t2);
875             break;
876         case INDEX_op_mul_i64:
877             t0 = *tb_ptr++;
878             t1 = tci_read_ri64(&tb_ptr);
879             t2 = tci_read_ri64(&tb_ptr);
880             tci_write_reg64(t0, t1 * t2);
881             break;
882 #if TCG_TARGET_HAS_div_i64
883         case INDEX_op_div_i64:
884         case INDEX_op_divu_i64:
885         case INDEX_op_rem_i64:
886         case INDEX_op_remu_i64:
887             TODO();
888             break;
889 #elif TCG_TARGET_HAS_div2_i64
890         case INDEX_op_div2_i64:
891         case INDEX_op_divu2_i64:
892             TODO();
893             break;
894 #endif
895         case INDEX_op_and_i64:
896             t0 = *tb_ptr++;
897             t1 = tci_read_ri64(&tb_ptr);
898             t2 = tci_read_ri64(&tb_ptr);
899             tci_write_reg64(t0, t1 & t2);
900             break;
901         case INDEX_op_or_i64:
902             t0 = *tb_ptr++;
903             t1 = tci_read_ri64(&tb_ptr);
904             t2 = tci_read_ri64(&tb_ptr);
905             tci_write_reg64(t0, t1 | t2);
906             break;
907         case INDEX_op_xor_i64:
908             t0 = *tb_ptr++;
909             t1 = tci_read_ri64(&tb_ptr);
910             t2 = tci_read_ri64(&tb_ptr);
911             tci_write_reg64(t0, t1 ^ t2);
912             break;
913
914             /* Shift/rotate operations (64 bit). */
915
916         case INDEX_op_shl_i64:
917             t0 = *tb_ptr++;
918             t1 = tci_read_ri64(&tb_ptr);
919             t2 = tci_read_ri64(&tb_ptr);
920             tci_write_reg64(t0, t1 << t2);
921             break;
922         case INDEX_op_shr_i64:
923             t0 = *tb_ptr++;
924             t1 = tci_read_ri64(&tb_ptr);
925             t2 = tci_read_ri64(&tb_ptr);
926             tci_write_reg64(t0, t1 >> t2);
927             break;
928         case INDEX_op_sar_i64:
929             t0 = *tb_ptr++;
930             t1 = tci_read_ri64(&tb_ptr);
931             t2 = tci_read_ri64(&tb_ptr);
932             tci_write_reg64(t0, ((int64_t)t1 >> t2));
933             break;
934 #if TCG_TARGET_HAS_rot_i64
935         case INDEX_op_rotl_i64:
936         case INDEX_op_rotr_i64:
937             TODO();
938             break;
939 #endif
940         case INDEX_op_brcond_i64:
941             t0 = tci_read_r64(&tb_ptr);
942             t1 = tci_read_ri64(&tb_ptr);
943             condition = *tb_ptr++;
944             label = tci_read_label(&tb_ptr);
945             if (tci_compare64(t0, t1, condition)) {
946                 assert(tb_ptr == old_code_ptr + op_size);
947                 tb_ptr = (uint8_t *)label;
948                 continue;
949             }
950             break;
951 #if TCG_TARGET_HAS_ext8u_i64
952         case INDEX_op_ext8u_i64:
953             t0 = *tb_ptr++;
954             t1 = tci_read_r8(&tb_ptr);
955             tci_write_reg64(t0, t1);
956             break;
957 #endif
958 #if TCG_TARGET_HAS_ext8s_i64
959         case INDEX_op_ext8s_i64:
960             t0 = *tb_ptr++;
961             t1 = tci_read_r8s(&tb_ptr);
962             tci_write_reg64(t0, t1);
963             break;
964 #endif
965 #if TCG_TARGET_HAS_ext16s_i64
966         case INDEX_op_ext16s_i64:
967             t0 = *tb_ptr++;
968             t1 = tci_read_r16s(&tb_ptr);
969             tci_write_reg64(t0, t1);
970             break;
971 #endif
972 #if TCG_TARGET_HAS_ext16u_i64
973         case INDEX_op_ext16u_i64:
974             t0 = *tb_ptr++;
975             t1 = tci_read_r16(&tb_ptr);
976             tci_write_reg64(t0, t1);
977             break;
978 #endif
979 #if TCG_TARGET_HAS_ext32s_i64
980         case INDEX_op_ext32s_i64:
981             t0 = *tb_ptr++;
982             t1 = tci_read_r32s(&tb_ptr);
983             tci_write_reg64(t0, t1);
984             break;
985 #endif
986 #if TCG_TARGET_HAS_ext32u_i64
987         case INDEX_op_ext32u_i64:
988             t0 = *tb_ptr++;
989             t1 = tci_read_r32(&tb_ptr);
990             tci_write_reg64(t0, t1);
991             break;
992 #endif
993 #if TCG_TARGET_HAS_bswap16_i64
994         case INDEX_op_bswap16_i64:
995             TODO();
996             t0 = *tb_ptr++;
997             t1 = tci_read_r16(&tb_ptr);
998             tci_write_reg64(t0, bswap16(t1));
999             break;
1000 #endif
1001 #if TCG_TARGET_HAS_bswap32_i64
1002         case INDEX_op_bswap32_i64:
1003             t0 = *tb_ptr++;
1004             t1 = tci_read_r32(&tb_ptr);
1005             tci_write_reg64(t0, bswap32(t1));
1006             break;
1007 #endif
1008 #if TCG_TARGET_HAS_bswap64_i64
1009         case INDEX_op_bswap64_i64:
1010             t0 = *tb_ptr++;
1011             t1 = tci_read_r64(&tb_ptr);
1012             tci_write_reg64(t0, bswap64(t1));
1013             break;
1014 #endif
1015 #if TCG_TARGET_HAS_not_i64
1016         case INDEX_op_not_i64:
1017             t0 = *tb_ptr++;
1018             t1 = tci_read_r64(&tb_ptr);
1019             tci_write_reg64(t0, ~t1);
1020             break;
1021 #endif
1022 #if TCG_TARGET_HAS_neg_i64
1023         case INDEX_op_neg_i64:
1024             t0 = *tb_ptr++;
1025             t1 = tci_read_r64(&tb_ptr);
1026             tci_write_reg64(t0, -t1);
1027             break;
1028 #endif
1029 #endif /* TCG_TARGET_REG_BITS == 64 */
1030
1031             /* QEMU specific operations. */
1032
1033 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
1034         case INDEX_op_debug_insn_start:
1035             TODO();
1036             break;
1037 #else
1038         case INDEX_op_debug_insn_start:
1039             TODO();
1040             break;
1041 #endif
1042         case INDEX_op_exit_tb:
1043             next_tb = *(uint64_t *)tb_ptr;
1044             goto exit;
1045             break;
1046         case INDEX_op_goto_tb:
1047             t0 = tci_read_i32(&tb_ptr);
1048             assert(tb_ptr == old_code_ptr + op_size);
1049             tb_ptr += (int32_t)t0;
1050             continue;
1051         case INDEX_op_qemu_ld8u:
1052             t0 = *tb_ptr++;
1053             taddr = tci_read_ulong(&tb_ptr);
1054 #ifdef CONFIG_SOFTMMU
1055             tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1056 #else
1057             host_addr = (tcg_target_ulong)taddr;
1058             assert(taddr == host_addr);
1059             tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1060 #endif
1061             tci_write_reg8(t0, tmp8);
1062             break;
1063         case INDEX_op_qemu_ld8s:
1064             t0 = *tb_ptr++;
1065             taddr = tci_read_ulong(&tb_ptr);
1066 #ifdef CONFIG_SOFTMMU
1067             tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
1068 #else
1069             host_addr = (tcg_target_ulong)taddr;
1070             assert(taddr == host_addr);
1071             tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
1072 #endif
1073             tci_write_reg8s(t0, tmp8);
1074             break;
1075         case INDEX_op_qemu_ld16u:
1076             t0 = *tb_ptr++;
1077             taddr = tci_read_ulong(&tb_ptr);
1078 #ifdef CONFIG_SOFTMMU
1079             tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1080 #else
1081             host_addr = (tcg_target_ulong)taddr;
1082             assert(taddr == host_addr);
1083             tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1084 #endif
1085             tci_write_reg16(t0, tmp16);
1086             break;
1087         case INDEX_op_qemu_ld16s:
1088             t0 = *tb_ptr++;
1089             taddr = tci_read_ulong(&tb_ptr);
1090 #ifdef CONFIG_SOFTMMU
1091             tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
1092 #else
1093             host_addr = (tcg_target_ulong)taddr;
1094             assert(taddr == host_addr);
1095             tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
1096 #endif
1097             tci_write_reg16s(t0, tmp16);
1098             break;
1099 #if TCG_TARGET_REG_BITS == 64
1100         case INDEX_op_qemu_ld32u:
1101             t0 = *tb_ptr++;
1102             taddr = tci_read_ulong(&tb_ptr);
1103 #ifdef CONFIG_SOFTMMU
1104             tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1105 #else
1106             host_addr = (tcg_target_ulong)taddr;
1107             assert(taddr == host_addr);
1108             tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1109 #endif
1110             tci_write_reg32(t0, tmp32);
1111             break;
1112         case INDEX_op_qemu_ld32s:
1113             t0 = *tb_ptr++;
1114             taddr = tci_read_ulong(&tb_ptr);
1115 #ifdef CONFIG_SOFTMMU
1116             tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1117 #else
1118             host_addr = (tcg_target_ulong)taddr;
1119             assert(taddr == host_addr);
1120             tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1121 #endif
1122             tci_write_reg32s(t0, tmp32);
1123             break;
1124 #endif /* TCG_TARGET_REG_BITS == 64 */
1125         case INDEX_op_qemu_ld32:
1126             t0 = *tb_ptr++;
1127             taddr = tci_read_ulong(&tb_ptr);
1128 #ifdef CONFIG_SOFTMMU
1129             tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
1130 #else
1131             host_addr = (tcg_target_ulong)taddr;
1132             assert(taddr == host_addr);
1133             tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
1134 #endif
1135             tci_write_reg32(t0, tmp32);
1136             break;
1137         case INDEX_op_qemu_ld64:
1138             t0 = *tb_ptr++;
1139 #if TCG_TARGET_REG_BITS == 32
1140             t1 = *tb_ptr++;
1141 #endif
1142             taddr = tci_read_ulong(&tb_ptr);
1143 #ifdef CONFIG_SOFTMMU
1144             tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
1145 #else
1146             host_addr = (tcg_target_ulong)taddr;
1147             assert(taddr == host_addr);
1148             tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
1149 #endif
1150             tci_write_reg(t0, tmp64);
1151 #if TCG_TARGET_REG_BITS == 32
1152             tci_write_reg(t1, tmp64 >> 32);
1153 #endif
1154             break;
1155         case INDEX_op_qemu_st8:
1156             t0 = tci_read_r8(&tb_ptr);
1157             taddr = tci_read_ulong(&tb_ptr);
1158 #ifdef CONFIG_SOFTMMU
1159             t2 = tci_read_i(&tb_ptr);
1160             helper_stb_mmu(env, taddr, t0, t2);
1161 #else
1162             host_addr = (tcg_target_ulong)taddr;
1163             assert(taddr == host_addr);
1164             *(uint8_t *)(host_addr + GUEST_BASE) = t0;
1165 #endif
1166             break;
1167         case INDEX_op_qemu_st16:
1168             t0 = tci_read_r16(&tb_ptr);
1169             taddr = tci_read_ulong(&tb_ptr);
1170 #ifdef CONFIG_SOFTMMU
1171             t2 = tci_read_i(&tb_ptr);
1172             helper_stw_mmu(env, taddr, t0, t2);
1173 #else
1174             host_addr = (tcg_target_ulong)taddr;
1175             assert(taddr == host_addr);
1176             *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
1177 #endif
1178             break;
1179         case INDEX_op_qemu_st32:
1180             t0 = tci_read_r32(&tb_ptr);
1181             taddr = tci_read_ulong(&tb_ptr);
1182 #ifdef CONFIG_SOFTMMU
1183             t2 = tci_read_i(&tb_ptr);
1184             helper_stl_mmu(env, taddr, t0, t2);
1185 #else
1186             host_addr = (tcg_target_ulong)taddr;
1187             assert(taddr == host_addr);
1188             *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
1189 #endif
1190             break;
1191         case INDEX_op_qemu_st64:
1192             tmp64 = tci_read_r64(&tb_ptr);
1193             taddr = tci_read_ulong(&tb_ptr);
1194 #ifdef CONFIG_SOFTMMU
1195             t2 = tci_read_i(&tb_ptr);
1196             helper_stq_mmu(env, taddr, tmp64, t2);
1197 #else
1198             host_addr = (tcg_target_ulong)taddr;
1199             assert(taddr == host_addr);
1200             *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
1201 #endif
1202             break;
1203         default:
1204             TODO();
1205             break;
1206         }
1207         assert(tb_ptr == old_code_ptr + op_size);
1208     }
1209 exit:
1210     return next_tb;
1211 }
This page took 0.090247 seconds and 4 git commands to generate.