]> Git Repo - qemu.git/blame - tcg/tci.c
block/iscsi: only report an iSCSI Failure if we don't handle it gracefully
[qemu.git] / tcg / tci.c
CommitLineData
7657f4bf
SW
1/*
2 * Tiny Code Interpreter for QEMU
3 *
3ccdbecf 4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
7657f4bf
SW
5 *
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19
d38ea87a 20#include "qemu/osdep.h"
7657f4bf 21
3ccdbecf
SW
22/* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24#if defined(CONFIG_DEBUG_TCG)
25# define tci_assert(cond) assert(cond)
26#else
27# define tci_assert(cond) ((void)0)
7657f4bf
SW
28#endif
29
30#include "qemu-common.h"
65603e2f 31#include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
f08b6170 32#include "exec/cpu_ldst.h"
7657f4bf
SW
33#include "tcg-op.h"
34
35/* Marker for missing code. */
36#define TODO() \
37 do { \
38 fprintf(stderr, "TODO %s:%u: %s()\n", \
39 __FILE__, __LINE__, __func__); \
40 tcg_abort(); \
41 } while (0)
42
6673f47d 43#if MAX_OPC_PARAM_IARGS != 5
7657f4bf
SW
44# error Fix needed, number of supported input arguments changed!
45#endif
46#if TCG_TARGET_REG_BITS == 32
47typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
6673f47d 48 tcg_target_ulong, tcg_target_ulong,
7657f4bf
SW
49 tcg_target_ulong, tcg_target_ulong,
50 tcg_target_ulong, tcg_target_ulong,
51 tcg_target_ulong, tcg_target_ulong);
52#else
53typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
6673f47d
SW
54 tcg_target_ulong, tcg_target_ulong,
55 tcg_target_ulong);
7657f4bf
SW
56#endif
57
5e75150c 58static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 59{
5e75150c
EC
60 tci_assert(index < TCG_TARGET_NB_REGS);
61 return regs[index];
7657f4bf
SW
62}
63
64#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
5e75150c 65static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 66{
5e75150c 67 return (int8_t)tci_read_reg(regs, index);
7657f4bf
SW
68}
69#endif
70
71#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
5e75150c 72static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 73{
5e75150c 74 return (int16_t)tci_read_reg(regs, index);
7657f4bf
SW
75}
76#endif
77
78#if TCG_TARGET_REG_BITS == 64
5e75150c 79static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 80{
5e75150c 81 return (int32_t)tci_read_reg(regs, index);
7657f4bf
SW
82}
83#endif
84
5e75150c 85static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 86{
5e75150c 87 return (uint8_t)tci_read_reg(regs, index);
7657f4bf
SW
88}
89
5e75150c 90static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 91{
5e75150c 92 return (uint16_t)tci_read_reg(regs, index);
7657f4bf
SW
93}
94
5e75150c 95static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 96{
5e75150c 97 return (uint32_t)tci_read_reg(regs, index);
7657f4bf
SW
98}
99
100#if TCG_TARGET_REG_BITS == 64
5e75150c 101static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index)
7657f4bf 102{
5e75150c 103 return tci_read_reg(regs, index);
7657f4bf
SW
104}
105#endif
106
5e75150c
EC
107static void
108tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value)
7657f4bf 109{
5e75150c 110 tci_assert(index < TCG_TARGET_NB_REGS);
3ccdbecf
SW
111 tci_assert(index != TCG_AREG0);
112 tci_assert(index != TCG_REG_CALL_STACK);
5e75150c 113 regs[index] = value;
7657f4bf
SW
114}
115
7657f4bf 116#if TCG_TARGET_REG_BITS == 64
5e75150c
EC
117static void
118tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value)
7657f4bf 119{
5e75150c 120 tci_write_reg(regs, index, value);
7657f4bf
SW
121}
122#endif
123
5e75150c 124static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value)
7657f4bf 125{
5e75150c 126 tci_write_reg(regs, index, value);
7657f4bf
SW
127}
128
5e75150c
EC
129static void
130tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value)
7657f4bf 131{
5e75150c 132 tci_write_reg(regs, index, value);
7657f4bf
SW
133}
134
135#if TCG_TARGET_REG_BITS == 32
5e75150c
EC
136static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
137 uint32_t low_index, uint64_t value)
7657f4bf 138{
5e75150c
EC
139 tci_write_reg(regs, low_index, value);
140 tci_write_reg(regs, high_index, value >> 32);
7657f4bf
SW
141}
142#elif TCG_TARGET_REG_BITS == 64
5e75150c
EC
143static void
144tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value)
7657f4bf 145{
5e75150c 146 tci_write_reg(regs, index, value);
7657f4bf
SW
147}
148#endif
149
150#if TCG_TARGET_REG_BITS == 32
151/* Create a 64 bit value from two 32 bit values. */
152static uint64_t tci_uint64(uint32_t high, uint32_t low)
153{
154 return ((uint64_t)high << 32) + low;
155}
156#endif
157
158/* Read constant (native size) from bytecode. */
159static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
160{
161 tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
162 *tb_ptr += sizeof(value);
163 return value;
164}
165
03fc0548 166/* Read unsigned constant (32 bit) from bytecode. */
7657f4bf
SW
167static uint32_t tci_read_i32(uint8_t **tb_ptr)
168{
169 uint32_t value = *(uint32_t *)(*tb_ptr);
170 *tb_ptr += sizeof(value);
171 return value;
172}
173
03fc0548
RH
174/* Read signed constant (32 bit) from bytecode. */
175static int32_t tci_read_s32(uint8_t **tb_ptr)
176{
177 int32_t value = *(int32_t *)(*tb_ptr);
178 *tb_ptr += sizeof(value);
179 return value;
180}
181
7657f4bf
SW
182#if TCG_TARGET_REG_BITS == 64
183/* Read constant (64 bit) from bytecode. */
184static uint64_t tci_read_i64(uint8_t **tb_ptr)
185{
186 uint64_t value = *(uint64_t *)(*tb_ptr);
187 *tb_ptr += sizeof(value);
188 return value;
189}
190#endif
191
192/* Read indexed register (native size) from bytecode. */
5e75150c
EC
193static tcg_target_ulong
194tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 195{
5e75150c 196 tcg_target_ulong value = tci_read_reg(regs, **tb_ptr);
7657f4bf
SW
197 *tb_ptr += 1;
198 return value;
199}
200
201/* Read indexed register (8 bit) from bytecode. */
5e75150c 202static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 203{
5e75150c 204 uint8_t value = tci_read_reg8(regs, **tb_ptr);
7657f4bf
SW
205 *tb_ptr += 1;
206 return value;
207}
208
209#if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
210/* Read indexed register (8 bit signed) from bytecode. */
5e75150c 211static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 212{
5e75150c 213 int8_t value = tci_read_reg8s(regs, **tb_ptr);
7657f4bf
SW
214 *tb_ptr += 1;
215 return value;
216}
217#endif
218
219/* Read indexed register (16 bit) from bytecode. */
5e75150c 220static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 221{
5e75150c 222 uint16_t value = tci_read_reg16(regs, **tb_ptr);
7657f4bf
SW
223 *tb_ptr += 1;
224 return value;
225}
226
227#if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
228/* Read indexed register (16 bit signed) from bytecode. */
5e75150c 229static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 230{
5e75150c 231 int16_t value = tci_read_reg16s(regs, **tb_ptr);
7657f4bf
SW
232 *tb_ptr += 1;
233 return value;
234}
235#endif
236
237/* Read indexed register (32 bit) from bytecode. */
5e75150c 238static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 239{
5e75150c 240 uint32_t value = tci_read_reg32(regs, **tb_ptr);
7657f4bf
SW
241 *tb_ptr += 1;
242 return value;
243}
244
245#if TCG_TARGET_REG_BITS == 32
246/* Read two indexed registers (2 * 32 bit) from bytecode. */
5e75150c 247static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 248{
5e75150c
EC
249 uint32_t low = tci_read_r32(regs, tb_ptr);
250 return tci_uint64(tci_read_r32(regs, tb_ptr), low);
7657f4bf
SW
251}
252#elif TCG_TARGET_REG_BITS == 64
253/* Read indexed register (32 bit signed) from bytecode. */
5e75150c 254static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 255{
5e75150c 256 int32_t value = tci_read_reg32s(regs, **tb_ptr);
7657f4bf
SW
257 *tb_ptr += 1;
258 return value;
259}
260
261/* Read indexed register (64 bit) from bytecode. */
5e75150c 262static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 263{
5e75150c 264 uint64_t value = tci_read_reg64(regs, **tb_ptr);
7657f4bf
SW
265 *tb_ptr += 1;
266 return value;
267}
268#endif
269
270/* Read indexed register(s) with target address from bytecode. */
5e75150c
EC
271static target_ulong
272tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 273{
5e75150c 274 target_ulong taddr = tci_read_r(regs, tb_ptr);
7657f4bf 275#if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
5e75150c 276 taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32;
7657f4bf
SW
277#endif
278 return taddr;
279}
280
281/* Read indexed register or constant (native size) from bytecode. */
5e75150c
EC
282static tcg_target_ulong
283tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf
SW
284{
285 tcg_target_ulong value;
771142c2 286 TCGReg r = **tb_ptr;
7657f4bf
SW
287 *tb_ptr += 1;
288 if (r == TCG_CONST) {
289 value = tci_read_i(tb_ptr);
290 } else {
5e75150c 291 value = tci_read_reg(regs, r);
7657f4bf
SW
292 }
293 return value;
294}
295
296/* Read indexed register or constant (32 bit) from bytecode. */
5e75150c 297static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf
SW
298{
299 uint32_t value;
771142c2 300 TCGReg r = **tb_ptr;
7657f4bf
SW
301 *tb_ptr += 1;
302 if (r == TCG_CONST) {
303 value = tci_read_i32(tb_ptr);
304 } else {
5e75150c 305 value = tci_read_reg32(regs, r);
7657f4bf
SW
306 }
307 return value;
308}
309
310#if TCG_TARGET_REG_BITS == 32
311/* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
5e75150c 312static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf 313{
5e75150c
EC
314 uint32_t low = tci_read_ri32(regs, tb_ptr);
315 return tci_uint64(tci_read_ri32(regs, tb_ptr), low);
7657f4bf
SW
316}
317#elif TCG_TARGET_REG_BITS == 64
318/* Read indexed register or constant (64 bit) from bytecode. */
5e75150c 319static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr)
7657f4bf
SW
320{
321 uint64_t value;
771142c2 322 TCGReg r = **tb_ptr;
7657f4bf
SW
323 *tb_ptr += 1;
324 if (r == TCG_CONST) {
325 value = tci_read_i64(tb_ptr);
326 } else {
5e75150c 327 value = tci_read_reg64(regs, r);
7657f4bf
SW
328 }
329 return value;
330}
331#endif
332
c6c5063c 333static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
7657f4bf 334{
c6c5063c 335 tcg_target_ulong label = tci_read_i(tb_ptr);
3ccdbecf 336 tci_assert(label != 0);
7657f4bf
SW
337 return label;
338}
339
340static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
341{
342 bool result = false;
343 int32_t i0 = u0;
344 int32_t i1 = u1;
345 switch (condition) {
346 case TCG_COND_EQ:
347 result = (u0 == u1);
348 break;
349 case TCG_COND_NE:
350 result = (u0 != u1);
351 break;
352 case TCG_COND_LT:
353 result = (i0 < i1);
354 break;
355 case TCG_COND_GE:
356 result = (i0 >= i1);
357 break;
358 case TCG_COND_LE:
359 result = (i0 <= i1);
360 break;
361 case TCG_COND_GT:
362 result = (i0 > i1);
363 break;
364 case TCG_COND_LTU:
365 result = (u0 < u1);
366 break;
367 case TCG_COND_GEU:
368 result = (u0 >= u1);
369 break;
370 case TCG_COND_LEU:
371 result = (u0 <= u1);
372 break;
373 case TCG_COND_GTU:
374 result = (u0 > u1);
375 break;
376 default:
377 TODO();
378 }
379 return result;
380}
381
382static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
383{
384 bool result = false;
385 int64_t i0 = u0;
386 int64_t i1 = u1;
387 switch (condition) {
388 case TCG_COND_EQ:
389 result = (u0 == u1);
390 break;
391 case TCG_COND_NE:
392 result = (u0 != u1);
393 break;
394 case TCG_COND_LT:
395 result = (i0 < i1);
396 break;
397 case TCG_COND_GE:
398 result = (i0 >= i1);
399 break;
400 case TCG_COND_LE:
401 result = (i0 <= i1);
402 break;
403 case TCG_COND_GT:
404 result = (i0 > i1);
405 break;
406 case TCG_COND_LTU:
407 result = (u0 < u1);
408 break;
409 case TCG_COND_GEU:
410 result = (u0 >= u1);
411 break;
412 case TCG_COND_LEU:
413 result = (u0 <= u1);
414 break;
415 case TCG_COND_GTU:
416 result = (u0 > u1);
417 break;
418 default:
419 TODO();
420 }
421 return result;
422}
423
76782fab 424#ifdef CONFIG_SOFTMMU
76782fab 425# define qemu_ld_ub \
3972ef6f 426 helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 427# define qemu_ld_leuw \
3972ef6f 428 helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 429# define qemu_ld_leul \
3972ef6f 430 helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 431# define qemu_ld_leq \
3972ef6f 432 helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 433# define qemu_ld_beuw \
3972ef6f 434 helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 435# define qemu_ld_beul \
3972ef6f 436 helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 437# define qemu_ld_beq \
3972ef6f 438 helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr)
76782fab 439# define qemu_st_b(X) \
3972ef6f 440 helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 441# define qemu_st_lew(X) \
3972ef6f 442 helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 443# define qemu_st_lel(X) \
3972ef6f 444 helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 445# define qemu_st_leq(X) \
3972ef6f 446 helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 447# define qemu_st_bew(X) \
3972ef6f 448 helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 449# define qemu_st_bel(X) \
3972ef6f 450 helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab 451# define qemu_st_beq(X) \
3972ef6f 452 helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr)
76782fab
RH
453#else
454# define qemu_ld_ub ldub_p(g2h(taddr))
455# define qemu_ld_leuw lduw_le_p(g2h(taddr))
456# define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
457# define qemu_ld_leq ldq_le_p(g2h(taddr))
458# define qemu_ld_beuw lduw_be_p(g2h(taddr))
459# define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
460# define qemu_ld_beq ldq_be_p(g2h(taddr))
461# define qemu_st_b(X) stb_p(g2h(taddr), X)
462# define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
463# define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
464# define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
465# define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
466# define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
467# define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
468#endif
469
7657f4bf 470/* Interpret pseudo code in tb. */
04d5a1da 471uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
7657f4bf 472{
5e75150c 473 tcg_target_ulong regs[TCG_TARGET_NB_REGS];
ee79c356
RH
474 long tcg_temps[CPU_TEMP_BUF_NLONGS];
475 uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
819af24b 476 uintptr_t ret = 0;
7657f4bf 477
5e75150c
EC
478 regs[TCG_AREG0] = (tcg_target_ulong)env;
479 regs[TCG_REG_CALL_STACK] = sp_value;
3ccdbecf 480 tci_assert(tb_ptr);
7657f4bf
SW
481
482 for (;;) {
7657f4bf 483 TCGOpcode opc = tb_ptr[0];
3ccdbecf 484#if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
7657f4bf
SW
485 uint8_t op_size = tb_ptr[1];
486 uint8_t *old_code_ptr = tb_ptr;
487#endif
488 tcg_target_ulong t0;
489 tcg_target_ulong t1;
490 tcg_target_ulong t2;
491 tcg_target_ulong label;
492 TCGCond condition;
493 target_ulong taddr;
7657f4bf
SW
494 uint8_t tmp8;
495 uint16_t tmp16;
496 uint32_t tmp32;
497 uint64_t tmp64;
498#if TCG_TARGET_REG_BITS == 32
499 uint64_t v64;
500#endif
59227d5d 501 TCGMemOpIdx oi;
7657f4bf 502
dea8fde8
RH
503#if defined(GETPC)
504 tci_tb_ptr = (uintptr_t)tb_ptr;
505#endif
506
7657f4bf
SW
507 /* Skip opcode and size entry. */
508 tb_ptr += 2;
509
510 switch (opc) {
7657f4bf 511 case INDEX_op_call:
5e75150c 512 t0 = tci_read_ri(regs, &tb_ptr);
7657f4bf 513#if TCG_TARGET_REG_BITS == 32
5e75150c
EC
514 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
515 tci_read_reg(regs, TCG_REG_R1),
516 tci_read_reg(regs, TCG_REG_R2),
517 tci_read_reg(regs, TCG_REG_R3),
518 tci_read_reg(regs, TCG_REG_R5),
519 tci_read_reg(regs, TCG_REG_R6),
520 tci_read_reg(regs, TCG_REG_R7),
521 tci_read_reg(regs, TCG_REG_R8),
522 tci_read_reg(regs, TCG_REG_R9),
523 tci_read_reg(regs, TCG_REG_R10));
524 tci_write_reg(regs, TCG_REG_R0, tmp64);
525 tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32);
7657f4bf 526#else
5e75150c
EC
527 tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0),
528 tci_read_reg(regs, TCG_REG_R1),
529 tci_read_reg(regs, TCG_REG_R2),
530 tci_read_reg(regs, TCG_REG_R3),
531 tci_read_reg(regs, TCG_REG_R5));
532 tci_write_reg(regs, TCG_REG_R0, tmp64);
7657f4bf
SW
533#endif
534 break;
7657f4bf
SW
535 case INDEX_op_br:
536 label = tci_read_label(&tb_ptr);
3ccdbecf 537 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
538 tb_ptr = (uint8_t *)label;
539 continue;
540 case INDEX_op_setcond_i32:
541 t0 = *tb_ptr++;
5e75150c
EC
542 t1 = tci_read_r32(regs, &tb_ptr);
543 t2 = tci_read_ri32(regs, &tb_ptr);
7657f4bf 544 condition = *tb_ptr++;
5e75150c 545 tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition));
7657f4bf
SW
546 break;
547#if TCG_TARGET_REG_BITS == 32
548 case INDEX_op_setcond2_i32:
549 t0 = *tb_ptr++;
5e75150c
EC
550 tmp64 = tci_read_r64(regs, &tb_ptr);
551 v64 = tci_read_ri64(regs, &tb_ptr);
7657f4bf 552 condition = *tb_ptr++;
5e75150c 553 tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition));
7657f4bf
SW
554 break;
555#elif TCG_TARGET_REG_BITS == 64
556 case INDEX_op_setcond_i64:
557 t0 = *tb_ptr++;
5e75150c
EC
558 t1 = tci_read_r64(regs, &tb_ptr);
559 t2 = tci_read_ri64(regs, &tb_ptr);
7657f4bf 560 condition = *tb_ptr++;
5e75150c 561 tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition));
7657f4bf
SW
562 break;
563#endif
564 case INDEX_op_mov_i32:
565 t0 = *tb_ptr++;
5e75150c
EC
566 t1 = tci_read_r32(regs, &tb_ptr);
567 tci_write_reg32(regs, t0, t1);
7657f4bf
SW
568 break;
569 case INDEX_op_movi_i32:
570 t0 = *tb_ptr++;
571 t1 = tci_read_i32(&tb_ptr);
5e75150c 572 tci_write_reg32(regs, t0, t1);
7657f4bf
SW
573 break;
574
575 /* Load/store operations (32 bit). */
576
577 case INDEX_op_ld8u_i32:
578 t0 = *tb_ptr++;
5e75150c 579 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 580 t2 = tci_read_s32(&tb_ptr);
5e75150c 581 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
7657f4bf
SW
582 break;
583 case INDEX_op_ld8s_i32:
584 case INDEX_op_ld16u_i32:
585 TODO();
586 break;
587 case INDEX_op_ld16s_i32:
588 TODO();
589 break;
590 case INDEX_op_ld_i32:
591 t0 = *tb_ptr++;
5e75150c 592 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 593 t2 = tci_read_s32(&tb_ptr);
5e75150c 594 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
7657f4bf
SW
595 break;
596 case INDEX_op_st8_i32:
5e75150c
EC
597 t0 = tci_read_r8(regs, &tb_ptr);
598 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 599 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
600 *(uint8_t *)(t1 + t2) = t0;
601 break;
602 case INDEX_op_st16_i32:
5e75150c
EC
603 t0 = tci_read_r16(regs, &tb_ptr);
604 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 605 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
606 *(uint16_t *)(t1 + t2) = t0;
607 break;
608 case INDEX_op_st_i32:
5e75150c
EC
609 t0 = tci_read_r32(regs, &tb_ptr);
610 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 611 t2 = tci_read_s32(&tb_ptr);
3ccdbecf 612 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
7657f4bf
SW
613 *(uint32_t *)(t1 + t2) = t0;
614 break;
615
616 /* Arithmetic operations (32 bit). */
617
618 case INDEX_op_add_i32:
619 t0 = *tb_ptr++;
5e75150c
EC
620 t1 = tci_read_ri32(regs, &tb_ptr);
621 t2 = tci_read_ri32(regs, &tb_ptr);
622 tci_write_reg32(regs, t0, t1 + t2);
7657f4bf
SW
623 break;
624 case INDEX_op_sub_i32:
625 t0 = *tb_ptr++;
5e75150c
EC
626 t1 = tci_read_ri32(regs, &tb_ptr);
627 t2 = tci_read_ri32(regs, &tb_ptr);
628 tci_write_reg32(regs, t0, t1 - t2);
7657f4bf
SW
629 break;
630 case INDEX_op_mul_i32:
631 t0 = *tb_ptr++;
5e75150c
EC
632 t1 = tci_read_ri32(regs, &tb_ptr);
633 t2 = tci_read_ri32(regs, &tb_ptr);
634 tci_write_reg32(regs, t0, t1 * t2);
7657f4bf
SW
635 break;
636#if TCG_TARGET_HAS_div_i32
637 case INDEX_op_div_i32:
638 t0 = *tb_ptr++;
5e75150c
EC
639 t1 = tci_read_ri32(regs, &tb_ptr);
640 t2 = tci_read_ri32(regs, &tb_ptr);
641 tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2);
7657f4bf
SW
642 break;
643 case INDEX_op_divu_i32:
644 t0 = *tb_ptr++;
5e75150c
EC
645 t1 = tci_read_ri32(regs, &tb_ptr);
646 t2 = tci_read_ri32(regs, &tb_ptr);
647 tci_write_reg32(regs, t0, t1 / t2);
7657f4bf
SW
648 break;
649 case INDEX_op_rem_i32:
650 t0 = *tb_ptr++;
5e75150c
EC
651 t1 = tci_read_ri32(regs, &tb_ptr);
652 t2 = tci_read_ri32(regs, &tb_ptr);
653 tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2);
7657f4bf
SW
654 break;
655 case INDEX_op_remu_i32:
656 t0 = *tb_ptr++;
5e75150c
EC
657 t1 = tci_read_ri32(regs, &tb_ptr);
658 t2 = tci_read_ri32(regs, &tb_ptr);
659 tci_write_reg32(regs, t0, t1 % t2);
7657f4bf
SW
660 break;
661#elif TCG_TARGET_HAS_div2_i32
662 case INDEX_op_div2_i32:
663 case INDEX_op_divu2_i32:
664 TODO();
665 break;
666#endif
667 case INDEX_op_and_i32:
668 t0 = *tb_ptr++;
5e75150c
EC
669 t1 = tci_read_ri32(regs, &tb_ptr);
670 t2 = tci_read_ri32(regs, &tb_ptr);
671 tci_write_reg32(regs, t0, t1 & t2);
7657f4bf
SW
672 break;
673 case INDEX_op_or_i32:
674 t0 = *tb_ptr++;
5e75150c
EC
675 t1 = tci_read_ri32(regs, &tb_ptr);
676 t2 = tci_read_ri32(regs, &tb_ptr);
677 tci_write_reg32(regs, t0, t1 | t2);
7657f4bf
SW
678 break;
679 case INDEX_op_xor_i32:
680 t0 = *tb_ptr++;
5e75150c
EC
681 t1 = tci_read_ri32(regs, &tb_ptr);
682 t2 = tci_read_ri32(regs, &tb_ptr);
683 tci_write_reg32(regs, t0, t1 ^ t2);
7657f4bf
SW
684 break;
685
686 /* Shift/rotate operations (32 bit). */
687
688 case INDEX_op_shl_i32:
689 t0 = *tb_ptr++;
5e75150c
EC
690 t1 = tci_read_ri32(regs, &tb_ptr);
691 t2 = tci_read_ri32(regs, &tb_ptr);
692 tci_write_reg32(regs, t0, t1 << (t2 & 31));
7657f4bf
SW
693 break;
694 case INDEX_op_shr_i32:
695 t0 = *tb_ptr++;
5e75150c
EC
696 t1 = tci_read_ri32(regs, &tb_ptr);
697 t2 = tci_read_ri32(regs, &tb_ptr);
698 tci_write_reg32(regs, t0, t1 >> (t2 & 31));
7657f4bf
SW
699 break;
700 case INDEX_op_sar_i32:
701 t0 = *tb_ptr++;
5e75150c
EC
702 t1 = tci_read_ri32(regs, &tb_ptr);
703 t2 = tci_read_ri32(regs, &tb_ptr);
704 tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31)));
7657f4bf
SW
705 break;
706#if TCG_TARGET_HAS_rot_i32
707 case INDEX_op_rotl_i32:
708 t0 = *tb_ptr++;
5e75150c
EC
709 t1 = tci_read_ri32(regs, &tb_ptr);
710 t2 = tci_read_ri32(regs, &tb_ptr);
711 tci_write_reg32(regs, t0, rol32(t1, t2 & 31));
7657f4bf
SW
712 break;
713 case INDEX_op_rotr_i32:
714 t0 = *tb_ptr++;
5e75150c
EC
715 t1 = tci_read_ri32(regs, &tb_ptr);
716 t2 = tci_read_ri32(regs, &tb_ptr);
717 tci_write_reg32(regs, t0, ror32(t1, t2 & 31));
7657f4bf 718 break;
e24dc9fe
SW
719#endif
720#if TCG_TARGET_HAS_deposit_i32
721 case INDEX_op_deposit_i32:
722 t0 = *tb_ptr++;
5e75150c
EC
723 t1 = tci_read_r32(regs, &tb_ptr);
724 t2 = tci_read_r32(regs, &tb_ptr);
e24dc9fe
SW
725 tmp16 = *tb_ptr++;
726 tmp8 = *tb_ptr++;
727 tmp32 = (((1 << tmp8) - 1) << tmp16);
5e75150c 728 tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
e24dc9fe 729 break;
7657f4bf
SW
730#endif
731 case INDEX_op_brcond_i32:
5e75150c
EC
732 t0 = tci_read_r32(regs, &tb_ptr);
733 t1 = tci_read_ri32(regs, &tb_ptr);
7657f4bf
SW
734 condition = *tb_ptr++;
735 label = tci_read_label(&tb_ptr);
736 if (tci_compare32(t0, t1, condition)) {
3ccdbecf 737 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
738 tb_ptr = (uint8_t *)label;
739 continue;
740 }
741 break;
742#if TCG_TARGET_REG_BITS == 32
743 case INDEX_op_add2_i32:
744 t0 = *tb_ptr++;
745 t1 = *tb_ptr++;
5e75150c
EC
746 tmp64 = tci_read_r64(regs, &tb_ptr);
747 tmp64 += tci_read_r64(regs, &tb_ptr);
748 tci_write_reg64(regs, t1, t0, tmp64);
7657f4bf
SW
749 break;
750 case INDEX_op_sub2_i32:
751 t0 = *tb_ptr++;
752 t1 = *tb_ptr++;
5e75150c
EC
753 tmp64 = tci_read_r64(regs, &tb_ptr);
754 tmp64 -= tci_read_r64(regs, &tb_ptr);
755 tci_write_reg64(regs, t1, t0, tmp64);
7657f4bf
SW
756 break;
757 case INDEX_op_brcond2_i32:
5e75150c
EC
758 tmp64 = tci_read_r64(regs, &tb_ptr);
759 v64 = tci_read_ri64(regs, &tb_ptr);
7657f4bf
SW
760 condition = *tb_ptr++;
761 label = tci_read_label(&tb_ptr);
762 if (tci_compare64(tmp64, v64, condition)) {
3ccdbecf 763 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
764 tb_ptr = (uint8_t *)label;
765 continue;
766 }
767 break;
768 case INDEX_op_mulu2_i32:
769 t0 = *tb_ptr++;
770 t1 = *tb_ptr++;
5e75150c
EC
771 t2 = tci_read_r32(regs, &tb_ptr);
772 tmp64 = tci_read_r32(regs, &tb_ptr);
773 tci_write_reg64(regs, t1, t0, t2 * tmp64);
7657f4bf
SW
774 break;
775#endif /* TCG_TARGET_REG_BITS == 32 */
776#if TCG_TARGET_HAS_ext8s_i32
777 case INDEX_op_ext8s_i32:
778 t0 = *tb_ptr++;
5e75150c
EC
779 t1 = tci_read_r8s(regs, &tb_ptr);
780 tci_write_reg32(regs, t0, t1);
7657f4bf
SW
781 break;
782#endif
783#if TCG_TARGET_HAS_ext16s_i32
784 case INDEX_op_ext16s_i32:
785 t0 = *tb_ptr++;
5e75150c
EC
786 t1 = tci_read_r16s(regs, &tb_ptr);
787 tci_write_reg32(regs, t0, t1);
7657f4bf
SW
788 break;
789#endif
790#if TCG_TARGET_HAS_ext8u_i32
791 case INDEX_op_ext8u_i32:
792 t0 = *tb_ptr++;
5e75150c
EC
793 t1 = tci_read_r8(regs, &tb_ptr);
794 tci_write_reg32(regs, t0, t1);
7657f4bf
SW
795 break;
796#endif
797#if TCG_TARGET_HAS_ext16u_i32
798 case INDEX_op_ext16u_i32:
799 t0 = *tb_ptr++;
5e75150c
EC
800 t1 = tci_read_r16(regs, &tb_ptr);
801 tci_write_reg32(regs, t0, t1);
7657f4bf
SW
802 break;
803#endif
804#if TCG_TARGET_HAS_bswap16_i32
805 case INDEX_op_bswap16_i32:
806 t0 = *tb_ptr++;
5e75150c
EC
807 t1 = tci_read_r16(regs, &tb_ptr);
808 tci_write_reg32(regs, t0, bswap16(t1));
7657f4bf
SW
809 break;
810#endif
811#if TCG_TARGET_HAS_bswap32_i32
812 case INDEX_op_bswap32_i32:
813 t0 = *tb_ptr++;
5e75150c
EC
814 t1 = tci_read_r32(regs, &tb_ptr);
815 tci_write_reg32(regs, t0, bswap32(t1));
7657f4bf
SW
816 break;
817#endif
818#if TCG_TARGET_HAS_not_i32
819 case INDEX_op_not_i32:
820 t0 = *tb_ptr++;
5e75150c
EC
821 t1 = tci_read_r32(regs, &tb_ptr);
822 tci_write_reg32(regs, t0, ~t1);
7657f4bf
SW
823 break;
824#endif
825#if TCG_TARGET_HAS_neg_i32
826 case INDEX_op_neg_i32:
827 t0 = *tb_ptr++;
5e75150c
EC
828 t1 = tci_read_r32(regs, &tb_ptr);
829 tci_write_reg32(regs, t0, -t1);
7657f4bf
SW
830 break;
831#endif
832#if TCG_TARGET_REG_BITS == 64
833 case INDEX_op_mov_i64:
834 t0 = *tb_ptr++;
5e75150c
EC
835 t1 = tci_read_r64(regs, &tb_ptr);
836 tci_write_reg64(regs, t0, t1);
7657f4bf
SW
837 break;
838 case INDEX_op_movi_i64:
839 t0 = *tb_ptr++;
840 t1 = tci_read_i64(&tb_ptr);
5e75150c 841 tci_write_reg64(regs, t0, t1);
7657f4bf
SW
842 break;
843
844 /* Load/store operations (64 bit). */
845
846 case INDEX_op_ld8u_i64:
847 t0 = *tb_ptr++;
5e75150c 848 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 849 t2 = tci_read_s32(&tb_ptr);
5e75150c 850 tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2));
7657f4bf
SW
851 break;
852 case INDEX_op_ld8s_i64:
853 case INDEX_op_ld16u_i64:
854 case INDEX_op_ld16s_i64:
855 TODO();
856 break;
857 case INDEX_op_ld32u_i64:
858 t0 = *tb_ptr++;
5e75150c 859 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 860 t2 = tci_read_s32(&tb_ptr);
5e75150c 861 tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2));
7657f4bf
SW
862 break;
863 case INDEX_op_ld32s_i64:
864 t0 = *tb_ptr++;
5e75150c 865 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 866 t2 = tci_read_s32(&tb_ptr);
5e75150c 867 tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2));
7657f4bf
SW
868 break;
869 case INDEX_op_ld_i64:
870 t0 = *tb_ptr++;
5e75150c 871 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 872 t2 = tci_read_s32(&tb_ptr);
5e75150c 873 tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2));
7657f4bf
SW
874 break;
875 case INDEX_op_st8_i64:
5e75150c
EC
876 t0 = tci_read_r8(regs, &tb_ptr);
877 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 878 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
879 *(uint8_t *)(t1 + t2) = t0;
880 break;
881 case INDEX_op_st16_i64:
5e75150c
EC
882 t0 = tci_read_r16(regs, &tb_ptr);
883 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 884 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
885 *(uint16_t *)(t1 + t2) = t0;
886 break;
887 case INDEX_op_st32_i64:
5e75150c
EC
888 t0 = tci_read_r32(regs, &tb_ptr);
889 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 890 t2 = tci_read_s32(&tb_ptr);
7657f4bf
SW
891 *(uint32_t *)(t1 + t2) = t0;
892 break;
893 case INDEX_op_st_i64:
5e75150c
EC
894 t0 = tci_read_r64(regs, &tb_ptr);
895 t1 = tci_read_r(regs, &tb_ptr);
03fc0548 896 t2 = tci_read_s32(&tb_ptr);
3ccdbecf 897 tci_assert(t1 != sp_value || (int32_t)t2 < 0);
7657f4bf
SW
898 *(uint64_t *)(t1 + t2) = t0;
899 break;
900
901 /* Arithmetic operations (64 bit). */
902
903 case INDEX_op_add_i64:
904 t0 = *tb_ptr++;
5e75150c
EC
905 t1 = tci_read_ri64(regs, &tb_ptr);
906 t2 = tci_read_ri64(regs, &tb_ptr);
907 tci_write_reg64(regs, t0, t1 + t2);
7657f4bf
SW
908 break;
909 case INDEX_op_sub_i64:
910 t0 = *tb_ptr++;
5e75150c
EC
911 t1 = tci_read_ri64(regs, &tb_ptr);
912 t2 = tci_read_ri64(regs, &tb_ptr);
913 tci_write_reg64(regs, t0, t1 - t2);
7657f4bf
SW
914 break;
915 case INDEX_op_mul_i64:
916 t0 = *tb_ptr++;
5e75150c
EC
917 t1 = tci_read_ri64(regs, &tb_ptr);
918 t2 = tci_read_ri64(regs, &tb_ptr);
919 tci_write_reg64(regs, t0, t1 * t2);
7657f4bf
SW
920 break;
921#if TCG_TARGET_HAS_div_i64
922 case INDEX_op_div_i64:
923 case INDEX_op_divu_i64:
924 case INDEX_op_rem_i64:
925 case INDEX_op_remu_i64:
926 TODO();
927 break;
928#elif TCG_TARGET_HAS_div2_i64
929 case INDEX_op_div2_i64:
930 case INDEX_op_divu2_i64:
931 TODO();
932 break;
933#endif
934 case INDEX_op_and_i64:
935 t0 = *tb_ptr++;
5e75150c
EC
936 t1 = tci_read_ri64(regs, &tb_ptr);
937 t2 = tci_read_ri64(regs, &tb_ptr);
938 tci_write_reg64(regs, t0, t1 & t2);
7657f4bf
SW
939 break;
940 case INDEX_op_or_i64:
941 t0 = *tb_ptr++;
5e75150c
EC
942 t1 = tci_read_ri64(regs, &tb_ptr);
943 t2 = tci_read_ri64(regs, &tb_ptr);
944 tci_write_reg64(regs, t0, t1 | t2);
7657f4bf
SW
945 break;
946 case INDEX_op_xor_i64:
947 t0 = *tb_ptr++;
5e75150c
EC
948 t1 = tci_read_ri64(regs, &tb_ptr);
949 t2 = tci_read_ri64(regs, &tb_ptr);
950 tci_write_reg64(regs, t0, t1 ^ t2);
7657f4bf
SW
951 break;
952
953 /* Shift/rotate operations (64 bit). */
954
955 case INDEX_op_shl_i64:
956 t0 = *tb_ptr++;
5e75150c
EC
957 t1 = tci_read_ri64(regs, &tb_ptr);
958 t2 = tci_read_ri64(regs, &tb_ptr);
959 tci_write_reg64(regs, t0, t1 << (t2 & 63));
7657f4bf
SW
960 break;
961 case INDEX_op_shr_i64:
962 t0 = *tb_ptr++;
5e75150c
EC
963 t1 = tci_read_ri64(regs, &tb_ptr);
964 t2 = tci_read_ri64(regs, &tb_ptr);
965 tci_write_reg64(regs, t0, t1 >> (t2 & 63));
7657f4bf
SW
966 break;
967 case INDEX_op_sar_i64:
968 t0 = *tb_ptr++;
5e75150c
EC
969 t1 = tci_read_ri64(regs, &tb_ptr);
970 t2 = tci_read_ri64(regs, &tb_ptr);
971 tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63)));
7657f4bf
SW
972 break;
973#if TCG_TARGET_HAS_rot_i64
974 case INDEX_op_rotl_i64:
d285bf78 975 t0 = *tb_ptr++;
5e75150c
EC
976 t1 = tci_read_ri64(regs, &tb_ptr);
977 t2 = tci_read_ri64(regs, &tb_ptr);
978 tci_write_reg64(regs, t0, rol64(t1, t2 & 63));
d285bf78 979 break;
7657f4bf 980 case INDEX_op_rotr_i64:
d285bf78 981 t0 = *tb_ptr++;
5e75150c
EC
982 t1 = tci_read_ri64(regs, &tb_ptr);
983 t2 = tci_read_ri64(regs, &tb_ptr);
984 tci_write_reg64(regs, t0, ror64(t1, t2 & 63));
7657f4bf 985 break;
e24dc9fe
SW
986#endif
987#if TCG_TARGET_HAS_deposit_i64
988 case INDEX_op_deposit_i64:
989 t0 = *tb_ptr++;
5e75150c
EC
990 t1 = tci_read_r64(regs, &tb_ptr);
991 t2 = tci_read_r64(regs, &tb_ptr);
e24dc9fe
SW
992 tmp16 = *tb_ptr++;
993 tmp8 = *tb_ptr++;
994 tmp64 = (((1ULL << tmp8) - 1) << tmp16);
5e75150c 995 tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
e24dc9fe 996 break;
7657f4bf
SW
997#endif
998 case INDEX_op_brcond_i64:
5e75150c
EC
999 t0 = tci_read_r64(regs, &tb_ptr);
1000 t1 = tci_read_ri64(regs, &tb_ptr);
7657f4bf
SW
1001 condition = *tb_ptr++;
1002 label = tci_read_label(&tb_ptr);
1003 if (tci_compare64(t0, t1, condition)) {
3ccdbecf 1004 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
1005 tb_ptr = (uint8_t *)label;
1006 continue;
1007 }
1008 break;
1009#if TCG_TARGET_HAS_ext8u_i64
1010 case INDEX_op_ext8u_i64:
1011 t0 = *tb_ptr++;
5e75150c
EC
1012 t1 = tci_read_r8(regs, &tb_ptr);
1013 tci_write_reg64(regs, t0, t1);
7657f4bf
SW
1014 break;
1015#endif
1016#if TCG_TARGET_HAS_ext8s_i64
1017 case INDEX_op_ext8s_i64:
1018 t0 = *tb_ptr++;
5e75150c
EC
1019 t1 = tci_read_r8s(regs, &tb_ptr);
1020 tci_write_reg64(regs, t0, t1);
7657f4bf
SW
1021 break;
1022#endif
1023#if TCG_TARGET_HAS_ext16s_i64
1024 case INDEX_op_ext16s_i64:
1025 t0 = *tb_ptr++;
5e75150c
EC
1026 t1 = tci_read_r16s(regs, &tb_ptr);
1027 tci_write_reg64(regs, t0, t1);
7657f4bf
SW
1028 break;
1029#endif
1030#if TCG_TARGET_HAS_ext16u_i64
1031 case INDEX_op_ext16u_i64:
1032 t0 = *tb_ptr++;
5e75150c
EC
1033 t1 = tci_read_r16(regs, &tb_ptr);
1034 tci_write_reg64(regs, t0, t1);
7657f4bf
SW
1035 break;
1036#endif
1037#if TCG_TARGET_HAS_ext32s_i64
1038 case INDEX_op_ext32s_i64:
4f2331e5
AJ
1039#endif
1040 case INDEX_op_ext_i32_i64:
7657f4bf 1041 t0 = *tb_ptr++;
5e75150c
EC
1042 t1 = tci_read_r32s(regs, &tb_ptr);
1043 tci_write_reg64(regs, t0, t1);
7657f4bf 1044 break;
7657f4bf
SW
1045#if TCG_TARGET_HAS_ext32u_i64
1046 case INDEX_op_ext32u_i64:
4f2331e5
AJ
1047#endif
1048 case INDEX_op_extu_i32_i64:
7657f4bf 1049 t0 = *tb_ptr++;
5e75150c
EC
1050 t1 = tci_read_r32(regs, &tb_ptr);
1051 tci_write_reg64(regs, t0, t1);
7657f4bf 1052 break;
7657f4bf
SW
1053#if TCG_TARGET_HAS_bswap16_i64
1054 case INDEX_op_bswap16_i64:
7657f4bf 1055 t0 = *tb_ptr++;
5e75150c
EC
1056 t1 = tci_read_r16(regs, &tb_ptr);
1057 tci_write_reg64(regs, t0, bswap16(t1));
7657f4bf
SW
1058 break;
1059#endif
1060#if TCG_TARGET_HAS_bswap32_i64
1061 case INDEX_op_bswap32_i64:
1062 t0 = *tb_ptr++;
5e75150c
EC
1063 t1 = tci_read_r32(regs, &tb_ptr);
1064 tci_write_reg64(regs, t0, bswap32(t1));
7657f4bf
SW
1065 break;
1066#endif
1067#if TCG_TARGET_HAS_bswap64_i64
1068 case INDEX_op_bswap64_i64:
7657f4bf 1069 t0 = *tb_ptr++;
5e75150c
EC
1070 t1 = tci_read_r64(regs, &tb_ptr);
1071 tci_write_reg64(regs, t0, bswap64(t1));
7657f4bf
SW
1072 break;
1073#endif
1074#if TCG_TARGET_HAS_not_i64
1075 case INDEX_op_not_i64:
1076 t0 = *tb_ptr++;
5e75150c
EC
1077 t1 = tci_read_r64(regs, &tb_ptr);
1078 tci_write_reg64(regs, t0, ~t1);
7657f4bf
SW
1079 break;
1080#endif
1081#if TCG_TARGET_HAS_neg_i64
1082 case INDEX_op_neg_i64:
1083 t0 = *tb_ptr++;
5e75150c
EC
1084 t1 = tci_read_r64(regs, &tb_ptr);
1085 tci_write_reg64(regs, t0, -t1);
7657f4bf
SW
1086 break;
1087#endif
1088#endif /* TCG_TARGET_REG_BITS == 64 */
1089
1090 /* QEMU specific operations. */
1091
7657f4bf 1092 case INDEX_op_exit_tb:
819af24b 1093 ret = *(uint64_t *)tb_ptr;
7657f4bf
SW
1094 goto exit;
1095 break;
1096 case INDEX_op_goto_tb:
76442a93
SF
1097 /* Jump address is aligned */
1098 tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4);
1099 t0 = atomic_read((int32_t *)tb_ptr);
1100 tb_ptr += sizeof(int32_t);
3ccdbecf 1101 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
1102 tb_ptr += (int32_t)t0;
1103 continue;
76782fab 1104 case INDEX_op_qemu_ld_i32:
7657f4bf 1105 t0 = *tb_ptr++;
5e75150c 1106 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1107 oi = tci_read_i(&tb_ptr);
2b7ec66f 1108 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
76782fab
RH
1109 case MO_UB:
1110 tmp32 = qemu_ld_ub;
1111 break;
1112 case MO_SB:
1113 tmp32 = (int8_t)qemu_ld_ub;
1114 break;
1115 case MO_LEUW:
1116 tmp32 = qemu_ld_leuw;
1117 break;
1118 case MO_LESW:
1119 tmp32 = (int16_t)qemu_ld_leuw;
1120 break;
1121 case MO_LEUL:
1122 tmp32 = qemu_ld_leul;
1123 break;
1124 case MO_BEUW:
1125 tmp32 = qemu_ld_beuw;
1126 break;
1127 case MO_BESW:
1128 tmp32 = (int16_t)qemu_ld_beuw;
1129 break;
1130 case MO_BEUL:
1131 tmp32 = qemu_ld_beul;
1132 break;
1133 default:
1134 tcg_abort();
1135 }
5e75150c 1136 tci_write_reg(regs, t0, tmp32);
7657f4bf 1137 break;
76782fab 1138 case INDEX_op_qemu_ld_i64:
7657f4bf 1139 t0 = *tb_ptr++;
76782fab
RH
1140 if (TCG_TARGET_REG_BITS == 32) {
1141 t1 = *tb_ptr++;
1142 }
5e75150c 1143 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1144 oi = tci_read_i(&tb_ptr);
2b7ec66f 1145 switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) {
76782fab
RH
1146 case MO_UB:
1147 tmp64 = qemu_ld_ub;
1148 break;
1149 case MO_SB:
1150 tmp64 = (int8_t)qemu_ld_ub;
1151 break;
1152 case MO_LEUW:
1153 tmp64 = qemu_ld_leuw;
1154 break;
1155 case MO_LESW:
1156 tmp64 = (int16_t)qemu_ld_leuw;
1157 break;
1158 case MO_LEUL:
1159 tmp64 = qemu_ld_leul;
1160 break;
1161 case MO_LESL:
1162 tmp64 = (int32_t)qemu_ld_leul;
1163 break;
1164 case MO_LEQ:
1165 tmp64 = qemu_ld_leq;
1166 break;
1167 case MO_BEUW:
1168 tmp64 = qemu_ld_beuw;
1169 break;
1170 case MO_BESW:
1171 tmp64 = (int16_t)qemu_ld_beuw;
1172 break;
1173 case MO_BEUL:
1174 tmp64 = qemu_ld_beul;
1175 break;
1176 case MO_BESL:
1177 tmp64 = (int32_t)qemu_ld_beul;
1178 break;
1179 case MO_BEQ:
1180 tmp64 = qemu_ld_beq;
1181 break;
1182 default:
1183 tcg_abort();
1184 }
5e75150c 1185 tci_write_reg(regs, t0, tmp64);
76782fab 1186 if (TCG_TARGET_REG_BITS == 32) {
5e75150c 1187 tci_write_reg(regs, t1, tmp64 >> 32);
76782fab 1188 }
7657f4bf 1189 break;
76782fab 1190 case INDEX_op_qemu_st_i32:
5e75150c
EC
1191 t0 = tci_read_r(regs, &tb_ptr);
1192 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1193 oi = tci_read_i(&tb_ptr);
2b7ec66f 1194 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
76782fab
RH
1195 case MO_UB:
1196 qemu_st_b(t0);
1197 break;
1198 case MO_LEUW:
1199 qemu_st_lew(t0);
1200 break;
1201 case MO_LEUL:
1202 qemu_st_lel(t0);
1203 break;
1204 case MO_BEUW:
1205 qemu_st_bew(t0);
1206 break;
1207 case MO_BEUL:
1208 qemu_st_bel(t0);
1209 break;
1210 default:
1211 tcg_abort();
1212 }
7657f4bf 1213 break;
76782fab 1214 case INDEX_op_qemu_st_i64:
5e75150c
EC
1215 tmp64 = tci_read_r64(regs, &tb_ptr);
1216 taddr = tci_read_ulong(regs, &tb_ptr);
59227d5d 1217 oi = tci_read_i(&tb_ptr);
2b7ec66f 1218 switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) {
76782fab
RH
1219 case MO_UB:
1220 qemu_st_b(tmp64);
1221 break;
1222 case MO_LEUW:
1223 qemu_st_lew(tmp64);
1224 break;
1225 case MO_LEUL:
1226 qemu_st_lel(tmp64);
1227 break;
1228 case MO_LEQ:
1229 qemu_st_leq(tmp64);
1230 break;
1231 case MO_BEUW:
1232 qemu_st_bew(tmp64);
1233 break;
1234 case MO_BEUL:
1235 qemu_st_bel(tmp64);
1236 break;
1237 case MO_BEQ:
1238 qemu_st_beq(tmp64);
1239 break;
1240 default:
1241 tcg_abort();
1242 }
7657f4bf 1243 break;
a1e69e2f
PK
1244 case INDEX_op_mb:
1245 /* Ensure ordering for all kinds */
1246 smp_mb();
1247 break;
7657f4bf
SW
1248 default:
1249 TODO();
1250 break;
1251 }
3ccdbecf 1252 tci_assert(tb_ptr == old_code_ptr + op_size);
7657f4bf
SW
1253 }
1254exit:
819af24b 1255 return ret;
7657f4bf 1256}
This page took 0.55083 seconds and 4 git commands to generate.