]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
3ccdbecf | 4 | * Copyright (c) 2009, 2011, 2016 Stefan Weil |
7657f4bf SW |
5 | * |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
d38ea87a | 20 | #include "qemu/osdep.h" |
7657f4bf | 21 | |
3ccdbecf SW |
22 | /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). |
23 | * Without assertions, the interpreter runs much faster. */ | |
24 | #if defined(CONFIG_DEBUG_TCG) | |
25 | # define tci_assert(cond) assert(cond) | |
26 | #else | |
27 | # define tci_assert(cond) ((void)0) | |
7657f4bf SW |
28 | #endif |
29 | ||
30 | #include "qemu-common.h" | |
65603e2f | 31 | #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */ |
f08b6170 | 32 | #include "exec/cpu_ldst.h" |
dcb32f1d | 33 | #include "tcg/tcg-op.h" |
c905a368 | 34 | #include "qemu/compiler.h" |
7657f4bf SW |
35 | |
36 | /* Marker for missing code. */ | |
37 | #define TODO() \ | |
38 | do { \ | |
39 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
40 | __FILE__, __LINE__, __func__); \ | |
41 | tcg_abort(); \ | |
42 | } while (0) | |
43 | ||
1df3caa9 | 44 | #if MAX_OPC_PARAM_IARGS != 6 |
7657f4bf SW |
45 | # error Fix needed, number of supported input arguments changed! |
46 | #endif | |
47 | #if TCG_TARGET_REG_BITS == 32 | |
48 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 49 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
50 | tcg_target_ulong, tcg_target_ulong, |
51 | tcg_target_ulong, tcg_target_ulong, | |
1df3caa9 | 52 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
53 | tcg_target_ulong, tcg_target_ulong); |
54 | #else | |
55 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 56 | tcg_target_ulong, tcg_target_ulong, |
1df3caa9 | 57 | tcg_target_ulong, tcg_target_ulong); |
7657f4bf SW |
58 | #endif |
59 | ||
5e75150c | 60 | static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 61 | { |
5e75150c EC |
62 | tci_assert(index < TCG_TARGET_NB_REGS); |
63 | return regs[index]; | |
7657f4bf SW |
64 | } |
65 | ||
66 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
5e75150c | 67 | static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 68 | { |
5e75150c | 69 | return (int8_t)tci_read_reg(regs, index); |
7657f4bf SW |
70 | } |
71 | #endif | |
72 | ||
73 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
5e75150c | 74 | static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 75 | { |
5e75150c | 76 | return (int16_t)tci_read_reg(regs, index); |
7657f4bf SW |
77 | } |
78 | #endif | |
79 | ||
80 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 81 | static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 82 | { |
5e75150c | 83 | return (int32_t)tci_read_reg(regs, index); |
7657f4bf SW |
84 | } |
85 | #endif | |
86 | ||
5e75150c | 87 | static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 88 | { |
5e75150c | 89 | return (uint8_t)tci_read_reg(regs, index); |
7657f4bf SW |
90 | } |
91 | ||
5e75150c | 92 | static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 93 | { |
5e75150c | 94 | return (uint16_t)tci_read_reg(regs, index); |
7657f4bf SW |
95 | } |
96 | ||
5e75150c | 97 | static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 98 | { |
5e75150c | 99 | return (uint32_t)tci_read_reg(regs, index); |
7657f4bf SW |
100 | } |
101 | ||
102 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 103 | static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 104 | { |
5e75150c | 105 | return tci_read_reg(regs, index); |
7657f4bf SW |
106 | } |
107 | #endif | |
108 | ||
5e75150c EC |
109 | static void |
110 | tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value) | |
7657f4bf | 111 | { |
5e75150c | 112 | tci_assert(index < TCG_TARGET_NB_REGS); |
3ccdbecf SW |
113 | tci_assert(index != TCG_AREG0); |
114 | tci_assert(index != TCG_REG_CALL_STACK); | |
5e75150c | 115 | regs[index] = value; |
7657f4bf SW |
116 | } |
117 | ||
7657f4bf | 118 | #if TCG_TARGET_REG_BITS == 64 |
5e75150c EC |
119 | static void |
120 | tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value) | |
7657f4bf | 121 | { |
5e75150c | 122 | tci_write_reg(regs, index, value); |
7657f4bf SW |
123 | } |
124 | #endif | |
125 | ||
5e75150c | 126 | static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value) |
7657f4bf | 127 | { |
5e75150c | 128 | tci_write_reg(regs, index, value); |
7657f4bf SW |
129 | } |
130 | ||
2f160e0f SW |
131 | static void |
132 | tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value) | |
133 | { | |
134 | tci_write_reg(regs, index, value); | |
135 | } | |
136 | ||
5e75150c EC |
137 | static void |
138 | tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value) | |
7657f4bf | 139 | { |
5e75150c | 140 | tci_write_reg(regs, index, value); |
7657f4bf SW |
141 | } |
142 | ||
143 | #if TCG_TARGET_REG_BITS == 32 | |
5e75150c EC |
144 | static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index, |
145 | uint32_t low_index, uint64_t value) | |
7657f4bf | 146 | { |
5e75150c EC |
147 | tci_write_reg(regs, low_index, value); |
148 | tci_write_reg(regs, high_index, value >> 32); | |
7657f4bf SW |
149 | } |
150 | #elif TCG_TARGET_REG_BITS == 64 | |
5e75150c EC |
151 | static void |
152 | tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value) | |
7657f4bf | 153 | { |
5e75150c | 154 | tci_write_reg(regs, index, value); |
7657f4bf SW |
155 | } |
156 | #endif | |
157 | ||
158 | #if TCG_TARGET_REG_BITS == 32 | |
159 | /* Create a 64 bit value from two 32 bit values. */ | |
160 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
161 | { | |
162 | return ((uint64_t)high << 32) + low; | |
163 | } | |
164 | #endif | |
165 | ||
166 | /* Read constant (native size) from bytecode. */ | |
305daaed | 167 | static tcg_target_ulong tci_read_i(const uint8_t **tb_ptr) |
7657f4bf | 168 | { |
305daaed | 169 | tcg_target_ulong value = *(const tcg_target_ulong *)(*tb_ptr); |
7657f4bf SW |
170 | *tb_ptr += sizeof(value); |
171 | return value; | |
172 | } | |
173 | ||
03fc0548 | 174 | /* Read unsigned constant (32 bit) from bytecode. */ |
305daaed | 175 | static uint32_t tci_read_i32(const uint8_t **tb_ptr) |
7657f4bf | 176 | { |
305daaed | 177 | uint32_t value = *(const uint32_t *)(*tb_ptr); |
7657f4bf SW |
178 | *tb_ptr += sizeof(value); |
179 | return value; | |
180 | } | |
181 | ||
03fc0548 | 182 | /* Read signed constant (32 bit) from bytecode. */ |
305daaed | 183 | static int32_t tci_read_s32(const uint8_t **tb_ptr) |
03fc0548 | 184 | { |
305daaed | 185 | int32_t value = *(const int32_t *)(*tb_ptr); |
03fc0548 RH |
186 | *tb_ptr += sizeof(value); |
187 | return value; | |
188 | } | |
189 | ||
7657f4bf SW |
190 | #if TCG_TARGET_REG_BITS == 64 |
191 | /* Read constant (64 bit) from bytecode. */ | |
305daaed | 192 | static uint64_t tci_read_i64(const uint8_t **tb_ptr) |
7657f4bf | 193 | { |
305daaed | 194 | uint64_t value = *(const uint64_t *)(*tb_ptr); |
7657f4bf SW |
195 | *tb_ptr += sizeof(value); |
196 | return value; | |
197 | } | |
198 | #endif | |
199 | ||
200 | /* Read indexed register (native size) from bytecode. */ | |
5e75150c | 201 | static tcg_target_ulong |
305daaed | 202 | tci_read_r(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 203 | { |
5e75150c | 204 | tcg_target_ulong value = tci_read_reg(regs, **tb_ptr); |
7657f4bf SW |
205 | *tb_ptr += 1; |
206 | return value; | |
207 | } | |
208 | ||
209 | /* Read indexed register (8 bit) from bytecode. */ | |
305daaed | 210 | static uint8_t tci_read_r8(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 211 | { |
5e75150c | 212 | uint8_t value = tci_read_reg8(regs, **tb_ptr); |
7657f4bf SW |
213 | *tb_ptr += 1; |
214 | return value; | |
215 | } | |
216 | ||
217 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
218 | /* Read indexed register (8 bit signed) from bytecode. */ | |
305daaed | 219 | static int8_t tci_read_r8s(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 220 | { |
5e75150c | 221 | int8_t value = tci_read_reg8s(regs, **tb_ptr); |
7657f4bf SW |
222 | *tb_ptr += 1; |
223 | return value; | |
224 | } | |
225 | #endif | |
226 | ||
227 | /* Read indexed register (16 bit) from bytecode. */ | |
305daaed RH |
228 | static uint16_t tci_read_r16(const tcg_target_ulong *regs, |
229 | const uint8_t **tb_ptr) | |
7657f4bf | 230 | { |
5e75150c | 231 | uint16_t value = tci_read_reg16(regs, **tb_ptr); |
7657f4bf SW |
232 | *tb_ptr += 1; |
233 | return value; | |
234 | } | |
235 | ||
236 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
237 | /* Read indexed register (16 bit signed) from bytecode. */ | |
305daaed RH |
238 | static int16_t tci_read_r16s(const tcg_target_ulong *regs, |
239 | const uint8_t **tb_ptr) | |
7657f4bf | 240 | { |
5e75150c | 241 | int16_t value = tci_read_reg16s(regs, **tb_ptr); |
7657f4bf SW |
242 | *tb_ptr += 1; |
243 | return value; | |
244 | } | |
245 | #endif | |
246 | ||
247 | /* Read indexed register (32 bit) from bytecode. */ | |
305daaed RH |
248 | static uint32_t tci_read_r32(const tcg_target_ulong *regs, |
249 | const uint8_t **tb_ptr) | |
7657f4bf | 250 | { |
5e75150c | 251 | uint32_t value = tci_read_reg32(regs, **tb_ptr); |
7657f4bf SW |
252 | *tb_ptr += 1; |
253 | return value; | |
254 | } | |
255 | ||
256 | #if TCG_TARGET_REG_BITS == 32 | |
257 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
305daaed RH |
258 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, |
259 | const uint8_t **tb_ptr) | |
7657f4bf | 260 | { |
5e75150c EC |
261 | uint32_t low = tci_read_r32(regs, tb_ptr); |
262 | return tci_uint64(tci_read_r32(regs, tb_ptr), low); | |
7657f4bf SW |
263 | } |
264 | #elif TCG_TARGET_REG_BITS == 64 | |
265 | /* Read indexed register (32 bit signed) from bytecode. */ | |
305daaed RH |
266 | static int32_t tci_read_r32s(const tcg_target_ulong *regs, |
267 | const uint8_t **tb_ptr) | |
7657f4bf | 268 | { |
5e75150c | 269 | int32_t value = tci_read_reg32s(regs, **tb_ptr); |
7657f4bf SW |
270 | *tb_ptr += 1; |
271 | return value; | |
272 | } | |
273 | ||
274 | /* Read indexed register (64 bit) from bytecode. */ | |
305daaed RH |
275 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, |
276 | const uint8_t **tb_ptr) | |
7657f4bf | 277 | { |
5e75150c | 278 | uint64_t value = tci_read_reg64(regs, **tb_ptr); |
7657f4bf SW |
279 | *tb_ptr += 1; |
280 | return value; | |
281 | } | |
282 | #endif | |
283 | ||
284 | /* Read indexed register(s) with target address from bytecode. */ | |
5e75150c | 285 | static target_ulong |
305daaed | 286 | tci_read_ulong(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf | 287 | { |
5e75150c | 288 | target_ulong taddr = tci_read_r(regs, tb_ptr); |
7657f4bf | 289 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS |
5e75150c | 290 | taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32; |
7657f4bf SW |
291 | #endif |
292 | return taddr; | |
293 | } | |
294 | ||
295 | /* Read indexed register or constant (native size) from bytecode. */ | |
5e75150c | 296 | static tcg_target_ulong |
305daaed | 297 | tci_read_ri(const tcg_target_ulong *regs, const uint8_t **tb_ptr) |
7657f4bf SW |
298 | { |
299 | tcg_target_ulong value; | |
771142c2 | 300 | TCGReg r = **tb_ptr; |
7657f4bf SW |
301 | *tb_ptr += 1; |
302 | if (r == TCG_CONST) { | |
303 | value = tci_read_i(tb_ptr); | |
304 | } else { | |
5e75150c | 305 | value = tci_read_reg(regs, r); |
7657f4bf SW |
306 | } |
307 | return value; | |
308 | } | |
309 | ||
310 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
305daaed RH |
311 | static uint32_t tci_read_ri32(const tcg_target_ulong *regs, |
312 | const uint8_t **tb_ptr) | |
7657f4bf SW |
313 | { |
314 | uint32_t value; | |
771142c2 | 315 | TCGReg r = **tb_ptr; |
7657f4bf SW |
316 | *tb_ptr += 1; |
317 | if (r == TCG_CONST) { | |
318 | value = tci_read_i32(tb_ptr); | |
319 | } else { | |
5e75150c | 320 | value = tci_read_reg32(regs, r); |
7657f4bf SW |
321 | } |
322 | return value; | |
323 | } | |
324 | ||
325 | #if TCG_TARGET_REG_BITS == 32 | |
326 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
305daaed RH |
327 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, |
328 | const uint8_t **tb_ptr) | |
7657f4bf | 329 | { |
5e75150c EC |
330 | uint32_t low = tci_read_ri32(regs, tb_ptr); |
331 | return tci_uint64(tci_read_ri32(regs, tb_ptr), low); | |
7657f4bf SW |
332 | } |
333 | #elif TCG_TARGET_REG_BITS == 64 | |
334 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
305daaed RH |
335 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, |
336 | const uint8_t **tb_ptr) | |
7657f4bf SW |
337 | { |
338 | uint64_t value; | |
771142c2 | 339 | TCGReg r = **tb_ptr; |
7657f4bf SW |
340 | *tb_ptr += 1; |
341 | if (r == TCG_CONST) { | |
342 | value = tci_read_i64(tb_ptr); | |
343 | } else { | |
5e75150c | 344 | value = tci_read_reg64(regs, r); |
7657f4bf SW |
345 | } |
346 | return value; | |
347 | } | |
348 | #endif | |
349 | ||
305daaed | 350 | static tcg_target_ulong tci_read_label(const uint8_t **tb_ptr) |
7657f4bf | 351 | { |
c6c5063c | 352 | tcg_target_ulong label = tci_read_i(tb_ptr); |
3ccdbecf | 353 | tci_assert(label != 0); |
7657f4bf SW |
354 | return label; |
355 | } | |
356 | ||
357 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
358 | { | |
359 | bool result = false; | |
360 | int32_t i0 = u0; | |
361 | int32_t i1 = u1; | |
362 | switch (condition) { | |
363 | case TCG_COND_EQ: | |
364 | result = (u0 == u1); | |
365 | break; | |
366 | case TCG_COND_NE: | |
367 | result = (u0 != u1); | |
368 | break; | |
369 | case TCG_COND_LT: | |
370 | result = (i0 < i1); | |
371 | break; | |
372 | case TCG_COND_GE: | |
373 | result = (i0 >= i1); | |
374 | break; | |
375 | case TCG_COND_LE: | |
376 | result = (i0 <= i1); | |
377 | break; | |
378 | case TCG_COND_GT: | |
379 | result = (i0 > i1); | |
380 | break; | |
381 | case TCG_COND_LTU: | |
382 | result = (u0 < u1); | |
383 | break; | |
384 | case TCG_COND_GEU: | |
385 | result = (u0 >= u1); | |
386 | break; | |
387 | case TCG_COND_LEU: | |
388 | result = (u0 <= u1); | |
389 | break; | |
390 | case TCG_COND_GTU: | |
391 | result = (u0 > u1); | |
392 | break; | |
393 | default: | |
394 | TODO(); | |
395 | } | |
396 | return result; | |
397 | } | |
398 | ||
399 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
400 | { | |
401 | bool result = false; | |
402 | int64_t i0 = u0; | |
403 | int64_t i1 = u1; | |
404 | switch (condition) { | |
405 | case TCG_COND_EQ: | |
406 | result = (u0 == u1); | |
407 | break; | |
408 | case TCG_COND_NE: | |
409 | result = (u0 != u1); | |
410 | break; | |
411 | case TCG_COND_LT: | |
412 | result = (i0 < i1); | |
413 | break; | |
414 | case TCG_COND_GE: | |
415 | result = (i0 >= i1); | |
416 | break; | |
417 | case TCG_COND_LE: | |
418 | result = (i0 <= i1); | |
419 | break; | |
420 | case TCG_COND_GT: | |
421 | result = (i0 > i1); | |
422 | break; | |
423 | case TCG_COND_LTU: | |
424 | result = (u0 < u1); | |
425 | break; | |
426 | case TCG_COND_GEU: | |
427 | result = (u0 >= u1); | |
428 | break; | |
429 | case TCG_COND_LEU: | |
430 | result = (u0 <= u1); | |
431 | break; | |
432 | case TCG_COND_GTU: | |
433 | result = (u0 > u1); | |
434 | break; | |
435 | default: | |
436 | TODO(); | |
437 | } | |
438 | return result; | |
439 | } | |
440 | ||
76782fab | 441 | #ifdef CONFIG_SOFTMMU |
76782fab | 442 | # define qemu_ld_ub \ |
3972ef6f | 443 | helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 444 | # define qemu_ld_leuw \ |
3972ef6f | 445 | helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 446 | # define qemu_ld_leul \ |
3972ef6f | 447 | helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 448 | # define qemu_ld_leq \ |
3972ef6f | 449 | helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 450 | # define qemu_ld_beuw \ |
3972ef6f | 451 | helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 452 | # define qemu_ld_beul \ |
3972ef6f | 453 | helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 454 | # define qemu_ld_beq \ |
3972ef6f | 455 | helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 456 | # define qemu_st_b(X) \ |
3972ef6f | 457 | helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 458 | # define qemu_st_lew(X) \ |
3972ef6f | 459 | helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 460 | # define qemu_st_lel(X) \ |
3972ef6f | 461 | helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 462 | # define qemu_st_leq(X) \ |
3972ef6f | 463 | helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 464 | # define qemu_st_bew(X) \ |
3972ef6f | 465 | helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 466 | # define qemu_st_bel(X) \ |
3972ef6f | 467 | helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 468 | # define qemu_st_beq(X) \ |
3972ef6f | 469 | helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab RH |
470 | #else |
471 | # define qemu_ld_ub ldub_p(g2h(taddr)) | |
472 | # define qemu_ld_leuw lduw_le_p(g2h(taddr)) | |
473 | # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) | |
474 | # define qemu_ld_leq ldq_le_p(g2h(taddr)) | |
475 | # define qemu_ld_beuw lduw_be_p(g2h(taddr)) | |
476 | # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) | |
477 | # define qemu_ld_beq ldq_be_p(g2h(taddr)) | |
478 | # define qemu_st_b(X) stb_p(g2h(taddr), X) | |
479 | # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) | |
480 | # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) | |
481 | # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) | |
482 | # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) | |
483 | # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) | |
484 | # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) | |
485 | #endif | |
486 | ||
7657f4bf | 487 | /* Interpret pseudo code in tb. */ |
c905a368 DB |
488 | /* |
489 | * Disable CFI checks. | |
490 | * One possible operation in the pseudo code is a call to binary code. | |
491 | * Therefore, disable CFI checks in the interpreter function | |
492 | */ | |
db0c51a3 RH |
493 | uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env, |
494 | const void *v_tb_ptr) | |
7657f4bf | 495 | { |
305daaed | 496 | const uint8_t *tb_ptr = v_tb_ptr; |
5e75150c | 497 | tcg_target_ulong regs[TCG_TARGET_NB_REGS]; |
ee79c356 RH |
498 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
499 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
819af24b | 500 | uintptr_t ret = 0; |
7657f4bf | 501 | |
5e75150c EC |
502 | regs[TCG_AREG0] = (tcg_target_ulong)env; |
503 | regs[TCG_REG_CALL_STACK] = sp_value; | |
3ccdbecf | 504 | tci_assert(tb_ptr); |
7657f4bf SW |
505 | |
506 | for (;;) { | |
7657f4bf | 507 | TCGOpcode opc = tb_ptr[0]; |
3ccdbecf | 508 | #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf | 509 | uint8_t op_size = tb_ptr[1]; |
305daaed | 510 | const uint8_t *old_code_ptr = tb_ptr; |
7657f4bf SW |
511 | #endif |
512 | tcg_target_ulong t0; | |
513 | tcg_target_ulong t1; | |
514 | tcg_target_ulong t2; | |
515 | tcg_target_ulong label; | |
516 | TCGCond condition; | |
517 | target_ulong taddr; | |
7657f4bf SW |
518 | uint8_t tmp8; |
519 | uint16_t tmp16; | |
520 | uint32_t tmp32; | |
521 | uint64_t tmp64; | |
522 | #if TCG_TARGET_REG_BITS == 32 | |
523 | uint64_t v64; | |
524 | #endif | |
59227d5d | 525 | TCGMemOpIdx oi; |
7657f4bf | 526 | |
dea8fde8 RH |
527 | #if defined(GETPC) |
528 | tci_tb_ptr = (uintptr_t)tb_ptr; | |
529 | #endif | |
530 | ||
7657f4bf SW |
531 | /* Skip opcode and size entry. */ |
532 | tb_ptr += 2; | |
533 | ||
534 | switch (opc) { | |
7657f4bf | 535 | case INDEX_op_call: |
5e75150c | 536 | t0 = tci_read_ri(regs, &tb_ptr); |
7657f4bf | 537 | #if TCG_TARGET_REG_BITS == 32 |
5e75150c EC |
538 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
539 | tci_read_reg(regs, TCG_REG_R1), | |
540 | tci_read_reg(regs, TCG_REG_R2), | |
541 | tci_read_reg(regs, TCG_REG_R3), | |
542 | tci_read_reg(regs, TCG_REG_R5), | |
543 | tci_read_reg(regs, TCG_REG_R6), | |
544 | tci_read_reg(regs, TCG_REG_R7), | |
545 | tci_read_reg(regs, TCG_REG_R8), | |
546 | tci_read_reg(regs, TCG_REG_R9), | |
1df3caa9 RH |
547 | tci_read_reg(regs, TCG_REG_R10), |
548 | tci_read_reg(regs, TCG_REG_R11), | |
549 | tci_read_reg(regs, TCG_REG_R12)); | |
5e75150c EC |
550 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
551 | tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32); | |
7657f4bf | 552 | #else |
5e75150c EC |
553 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
554 | tci_read_reg(regs, TCG_REG_R1), | |
555 | tci_read_reg(regs, TCG_REG_R2), | |
556 | tci_read_reg(regs, TCG_REG_R3), | |
1df3caa9 RH |
557 | tci_read_reg(regs, TCG_REG_R5), |
558 | tci_read_reg(regs, TCG_REG_R6)); | |
5e75150c | 559 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
7657f4bf SW |
560 | #endif |
561 | break; | |
7657f4bf SW |
562 | case INDEX_op_br: |
563 | label = tci_read_label(&tb_ptr); | |
3ccdbecf | 564 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
565 | tb_ptr = (uint8_t *)label; |
566 | continue; | |
567 | case INDEX_op_setcond_i32: | |
568 | t0 = *tb_ptr++; | |
5e75150c EC |
569 | t1 = tci_read_r32(regs, &tb_ptr); |
570 | t2 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf | 571 | condition = *tb_ptr++; |
5e75150c | 572 | tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition)); |
7657f4bf SW |
573 | break; |
574 | #if TCG_TARGET_REG_BITS == 32 | |
575 | case INDEX_op_setcond2_i32: | |
576 | t0 = *tb_ptr++; | |
5e75150c EC |
577 | tmp64 = tci_read_r64(regs, &tb_ptr); |
578 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 579 | condition = *tb_ptr++; |
5e75150c | 580 | tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition)); |
7657f4bf SW |
581 | break; |
582 | #elif TCG_TARGET_REG_BITS == 64 | |
583 | case INDEX_op_setcond_i64: | |
584 | t0 = *tb_ptr++; | |
5e75150c EC |
585 | t1 = tci_read_r64(regs, &tb_ptr); |
586 | t2 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 587 | condition = *tb_ptr++; |
5e75150c | 588 | tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition)); |
7657f4bf SW |
589 | break; |
590 | #endif | |
591 | case INDEX_op_mov_i32: | |
592 | t0 = *tb_ptr++; | |
5e75150c EC |
593 | t1 = tci_read_r32(regs, &tb_ptr); |
594 | tci_write_reg32(regs, t0, t1); | |
7657f4bf | 595 | break; |
1bd1af98 | 596 | case INDEX_op_tci_movi_i32: |
7657f4bf SW |
597 | t0 = *tb_ptr++; |
598 | t1 = tci_read_i32(&tb_ptr); | |
5e75150c | 599 | tci_write_reg32(regs, t0, t1); |
7657f4bf SW |
600 | break; |
601 | ||
602 | /* Load/store operations (32 bit). */ | |
603 | ||
604 | case INDEX_op_ld8u_i32: | |
605 | t0 = *tb_ptr++; | |
5e75150c | 606 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 607 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 608 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
609 | break; |
610 | case INDEX_op_ld8s_i32: | |
2f160e0f SW |
611 | TODO(); |
612 | break; | |
7657f4bf SW |
613 | case INDEX_op_ld16u_i32: |
614 | TODO(); | |
615 | break; | |
616 | case INDEX_op_ld16s_i32: | |
617 | TODO(); | |
618 | break; | |
619 | case INDEX_op_ld_i32: | |
620 | t0 = *tb_ptr++; | |
5e75150c | 621 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 622 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 623 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
624 | break; |
625 | case INDEX_op_st8_i32: | |
5e75150c EC |
626 | t0 = tci_read_r8(regs, &tb_ptr); |
627 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 628 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
629 | *(uint8_t *)(t1 + t2) = t0; |
630 | break; | |
631 | case INDEX_op_st16_i32: | |
5e75150c EC |
632 | t0 = tci_read_r16(regs, &tb_ptr); |
633 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 634 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
635 | *(uint16_t *)(t1 + t2) = t0; |
636 | break; | |
637 | case INDEX_op_st_i32: | |
5e75150c EC |
638 | t0 = tci_read_r32(regs, &tb_ptr); |
639 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 640 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 641 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
642 | *(uint32_t *)(t1 + t2) = t0; |
643 | break; | |
644 | ||
645 | /* Arithmetic operations (32 bit). */ | |
646 | ||
647 | case INDEX_op_add_i32: | |
648 | t0 = *tb_ptr++; | |
5e75150c EC |
649 | t1 = tci_read_ri32(regs, &tb_ptr); |
650 | t2 = tci_read_ri32(regs, &tb_ptr); | |
651 | tci_write_reg32(regs, t0, t1 + t2); | |
7657f4bf SW |
652 | break; |
653 | case INDEX_op_sub_i32: | |
654 | t0 = *tb_ptr++; | |
5e75150c EC |
655 | t1 = tci_read_ri32(regs, &tb_ptr); |
656 | t2 = tci_read_ri32(regs, &tb_ptr); | |
657 | tci_write_reg32(regs, t0, t1 - t2); | |
7657f4bf SW |
658 | break; |
659 | case INDEX_op_mul_i32: | |
660 | t0 = *tb_ptr++; | |
5e75150c EC |
661 | t1 = tci_read_ri32(regs, &tb_ptr); |
662 | t2 = tci_read_ri32(regs, &tb_ptr); | |
663 | tci_write_reg32(regs, t0, t1 * t2); | |
7657f4bf SW |
664 | break; |
665 | #if TCG_TARGET_HAS_div_i32 | |
666 | case INDEX_op_div_i32: | |
667 | t0 = *tb_ptr++; | |
5e75150c EC |
668 | t1 = tci_read_ri32(regs, &tb_ptr); |
669 | t2 = tci_read_ri32(regs, &tb_ptr); | |
670 | tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2); | |
7657f4bf SW |
671 | break; |
672 | case INDEX_op_divu_i32: | |
673 | t0 = *tb_ptr++; | |
5e75150c EC |
674 | t1 = tci_read_ri32(regs, &tb_ptr); |
675 | t2 = tci_read_ri32(regs, &tb_ptr); | |
676 | tci_write_reg32(regs, t0, t1 / t2); | |
7657f4bf SW |
677 | break; |
678 | case INDEX_op_rem_i32: | |
679 | t0 = *tb_ptr++; | |
5e75150c EC |
680 | t1 = tci_read_ri32(regs, &tb_ptr); |
681 | t2 = tci_read_ri32(regs, &tb_ptr); | |
682 | tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2); | |
7657f4bf SW |
683 | break; |
684 | case INDEX_op_remu_i32: | |
685 | t0 = *tb_ptr++; | |
5e75150c EC |
686 | t1 = tci_read_ri32(regs, &tb_ptr); |
687 | t2 = tci_read_ri32(regs, &tb_ptr); | |
688 | tci_write_reg32(regs, t0, t1 % t2); | |
7657f4bf SW |
689 | break; |
690 | #elif TCG_TARGET_HAS_div2_i32 | |
691 | case INDEX_op_div2_i32: | |
692 | case INDEX_op_divu2_i32: | |
693 | TODO(); | |
694 | break; | |
695 | #endif | |
696 | case INDEX_op_and_i32: | |
697 | t0 = *tb_ptr++; | |
5e75150c EC |
698 | t1 = tci_read_ri32(regs, &tb_ptr); |
699 | t2 = tci_read_ri32(regs, &tb_ptr); | |
700 | tci_write_reg32(regs, t0, t1 & t2); | |
7657f4bf SW |
701 | break; |
702 | case INDEX_op_or_i32: | |
703 | t0 = *tb_ptr++; | |
5e75150c EC |
704 | t1 = tci_read_ri32(regs, &tb_ptr); |
705 | t2 = tci_read_ri32(regs, &tb_ptr); | |
706 | tci_write_reg32(regs, t0, t1 | t2); | |
7657f4bf SW |
707 | break; |
708 | case INDEX_op_xor_i32: | |
709 | t0 = *tb_ptr++; | |
5e75150c EC |
710 | t1 = tci_read_ri32(regs, &tb_ptr); |
711 | t2 = tci_read_ri32(regs, &tb_ptr); | |
712 | tci_write_reg32(regs, t0, t1 ^ t2); | |
7657f4bf SW |
713 | break; |
714 | ||
715 | /* Shift/rotate operations (32 bit). */ | |
716 | ||
717 | case INDEX_op_shl_i32: | |
718 | t0 = *tb_ptr++; | |
5e75150c EC |
719 | t1 = tci_read_ri32(regs, &tb_ptr); |
720 | t2 = tci_read_ri32(regs, &tb_ptr); | |
721 | tci_write_reg32(regs, t0, t1 << (t2 & 31)); | |
7657f4bf SW |
722 | break; |
723 | case INDEX_op_shr_i32: | |
724 | t0 = *tb_ptr++; | |
5e75150c EC |
725 | t1 = tci_read_ri32(regs, &tb_ptr); |
726 | t2 = tci_read_ri32(regs, &tb_ptr); | |
727 | tci_write_reg32(regs, t0, t1 >> (t2 & 31)); | |
7657f4bf SW |
728 | break; |
729 | case INDEX_op_sar_i32: | |
730 | t0 = *tb_ptr++; | |
5e75150c EC |
731 | t1 = tci_read_ri32(regs, &tb_ptr); |
732 | t2 = tci_read_ri32(regs, &tb_ptr); | |
733 | tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31))); | |
7657f4bf SW |
734 | break; |
735 | #if TCG_TARGET_HAS_rot_i32 | |
736 | case INDEX_op_rotl_i32: | |
737 | t0 = *tb_ptr++; | |
5e75150c EC |
738 | t1 = tci_read_ri32(regs, &tb_ptr); |
739 | t2 = tci_read_ri32(regs, &tb_ptr); | |
740 | tci_write_reg32(regs, t0, rol32(t1, t2 & 31)); | |
7657f4bf SW |
741 | break; |
742 | case INDEX_op_rotr_i32: | |
743 | t0 = *tb_ptr++; | |
5e75150c EC |
744 | t1 = tci_read_ri32(regs, &tb_ptr); |
745 | t2 = tci_read_ri32(regs, &tb_ptr); | |
746 | tci_write_reg32(regs, t0, ror32(t1, t2 & 31)); | |
7657f4bf | 747 | break; |
e24dc9fe SW |
748 | #endif |
749 | #if TCG_TARGET_HAS_deposit_i32 | |
750 | case INDEX_op_deposit_i32: | |
751 | t0 = *tb_ptr++; | |
5e75150c EC |
752 | t1 = tci_read_r32(regs, &tb_ptr); |
753 | t2 = tci_read_r32(regs, &tb_ptr); | |
e24dc9fe SW |
754 | tmp16 = *tb_ptr++; |
755 | tmp8 = *tb_ptr++; | |
756 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
5e75150c | 757 | tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); |
e24dc9fe | 758 | break; |
7657f4bf SW |
759 | #endif |
760 | case INDEX_op_brcond_i32: | |
5e75150c EC |
761 | t0 = tci_read_r32(regs, &tb_ptr); |
762 | t1 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf SW |
763 | condition = *tb_ptr++; |
764 | label = tci_read_label(&tb_ptr); | |
765 | if (tci_compare32(t0, t1, condition)) { | |
3ccdbecf | 766 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
767 | tb_ptr = (uint8_t *)label; |
768 | continue; | |
769 | } | |
770 | break; | |
771 | #if TCG_TARGET_REG_BITS == 32 | |
772 | case INDEX_op_add2_i32: | |
773 | t0 = *tb_ptr++; | |
774 | t1 = *tb_ptr++; | |
5e75150c EC |
775 | tmp64 = tci_read_r64(regs, &tb_ptr); |
776 | tmp64 += tci_read_r64(regs, &tb_ptr); | |
777 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
778 | break; |
779 | case INDEX_op_sub2_i32: | |
780 | t0 = *tb_ptr++; | |
781 | t1 = *tb_ptr++; | |
5e75150c EC |
782 | tmp64 = tci_read_r64(regs, &tb_ptr); |
783 | tmp64 -= tci_read_r64(regs, &tb_ptr); | |
784 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
785 | break; |
786 | case INDEX_op_brcond2_i32: | |
5e75150c EC |
787 | tmp64 = tci_read_r64(regs, &tb_ptr); |
788 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
789 | condition = *tb_ptr++; |
790 | label = tci_read_label(&tb_ptr); | |
791 | if (tci_compare64(tmp64, v64, condition)) { | |
3ccdbecf | 792 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
793 | tb_ptr = (uint8_t *)label; |
794 | continue; | |
795 | } | |
796 | break; | |
797 | case INDEX_op_mulu2_i32: | |
798 | t0 = *tb_ptr++; | |
799 | t1 = *tb_ptr++; | |
5e75150c EC |
800 | t2 = tci_read_r32(regs, &tb_ptr); |
801 | tmp64 = tci_read_r32(regs, &tb_ptr); | |
802 | tci_write_reg64(regs, t1, t0, t2 * tmp64); | |
7657f4bf SW |
803 | break; |
804 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
805 | #if TCG_TARGET_HAS_ext8s_i32 | |
806 | case INDEX_op_ext8s_i32: | |
807 | t0 = *tb_ptr++; | |
5e75150c EC |
808 | t1 = tci_read_r8s(regs, &tb_ptr); |
809 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
810 | break; |
811 | #endif | |
812 | #if TCG_TARGET_HAS_ext16s_i32 | |
813 | case INDEX_op_ext16s_i32: | |
814 | t0 = *tb_ptr++; | |
5e75150c EC |
815 | t1 = tci_read_r16s(regs, &tb_ptr); |
816 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
817 | break; |
818 | #endif | |
819 | #if TCG_TARGET_HAS_ext8u_i32 | |
820 | case INDEX_op_ext8u_i32: | |
821 | t0 = *tb_ptr++; | |
5e75150c EC |
822 | t1 = tci_read_r8(regs, &tb_ptr); |
823 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
824 | break; |
825 | #endif | |
826 | #if TCG_TARGET_HAS_ext16u_i32 | |
827 | case INDEX_op_ext16u_i32: | |
828 | t0 = *tb_ptr++; | |
5e75150c EC |
829 | t1 = tci_read_r16(regs, &tb_ptr); |
830 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
831 | break; |
832 | #endif | |
833 | #if TCG_TARGET_HAS_bswap16_i32 | |
834 | case INDEX_op_bswap16_i32: | |
835 | t0 = *tb_ptr++; | |
5e75150c EC |
836 | t1 = tci_read_r16(regs, &tb_ptr); |
837 | tci_write_reg32(regs, t0, bswap16(t1)); | |
7657f4bf SW |
838 | break; |
839 | #endif | |
840 | #if TCG_TARGET_HAS_bswap32_i32 | |
841 | case INDEX_op_bswap32_i32: | |
842 | t0 = *tb_ptr++; | |
5e75150c EC |
843 | t1 = tci_read_r32(regs, &tb_ptr); |
844 | tci_write_reg32(regs, t0, bswap32(t1)); | |
7657f4bf SW |
845 | break; |
846 | #endif | |
847 | #if TCG_TARGET_HAS_not_i32 | |
848 | case INDEX_op_not_i32: | |
849 | t0 = *tb_ptr++; | |
5e75150c EC |
850 | t1 = tci_read_r32(regs, &tb_ptr); |
851 | tci_write_reg32(regs, t0, ~t1); | |
7657f4bf SW |
852 | break; |
853 | #endif | |
854 | #if TCG_TARGET_HAS_neg_i32 | |
855 | case INDEX_op_neg_i32: | |
856 | t0 = *tb_ptr++; | |
5e75150c EC |
857 | t1 = tci_read_r32(regs, &tb_ptr); |
858 | tci_write_reg32(regs, t0, -t1); | |
7657f4bf SW |
859 | break; |
860 | #endif | |
861 | #if TCG_TARGET_REG_BITS == 64 | |
862 | case INDEX_op_mov_i64: | |
863 | t0 = *tb_ptr++; | |
5e75150c EC |
864 | t1 = tci_read_r64(regs, &tb_ptr); |
865 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 866 | break; |
1bd1af98 | 867 | case INDEX_op_tci_movi_i64: |
7657f4bf SW |
868 | t0 = *tb_ptr++; |
869 | t1 = tci_read_i64(&tb_ptr); | |
5e75150c | 870 | tci_write_reg64(regs, t0, t1); |
7657f4bf SW |
871 | break; |
872 | ||
873 | /* Load/store operations (64 bit). */ | |
874 | ||
875 | case INDEX_op_ld8u_i64: | |
876 | t0 = *tb_ptr++; | |
5e75150c | 877 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 878 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 879 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
880 | break; |
881 | case INDEX_op_ld8s_i64: | |
2f160e0f SW |
882 | TODO(); |
883 | break; | |
7657f4bf | 884 | case INDEX_op_ld16u_i64: |
2f160e0f SW |
885 | t0 = *tb_ptr++; |
886 | t1 = tci_read_r(regs, &tb_ptr); | |
887 | t2 = tci_read_s32(&tb_ptr); | |
888 | tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2)); | |
889 | break; | |
7657f4bf SW |
890 | case INDEX_op_ld16s_i64: |
891 | TODO(); | |
892 | break; | |
893 | case INDEX_op_ld32u_i64: | |
894 | t0 = *tb_ptr++; | |
5e75150c | 895 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 896 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 897 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
898 | break; |
899 | case INDEX_op_ld32s_i64: | |
900 | t0 = *tb_ptr++; | |
5e75150c | 901 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 902 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 903 | tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2)); |
7657f4bf SW |
904 | break; |
905 | case INDEX_op_ld_i64: | |
906 | t0 = *tb_ptr++; | |
5e75150c | 907 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 908 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 909 | tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2)); |
7657f4bf SW |
910 | break; |
911 | case INDEX_op_st8_i64: | |
5e75150c EC |
912 | t0 = tci_read_r8(regs, &tb_ptr); |
913 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 914 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
915 | *(uint8_t *)(t1 + t2) = t0; |
916 | break; | |
917 | case INDEX_op_st16_i64: | |
5e75150c EC |
918 | t0 = tci_read_r16(regs, &tb_ptr); |
919 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 920 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
921 | *(uint16_t *)(t1 + t2) = t0; |
922 | break; | |
923 | case INDEX_op_st32_i64: | |
5e75150c EC |
924 | t0 = tci_read_r32(regs, &tb_ptr); |
925 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 926 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
927 | *(uint32_t *)(t1 + t2) = t0; |
928 | break; | |
929 | case INDEX_op_st_i64: | |
5e75150c EC |
930 | t0 = tci_read_r64(regs, &tb_ptr); |
931 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 932 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 933 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
934 | *(uint64_t *)(t1 + t2) = t0; |
935 | break; | |
936 | ||
937 | /* Arithmetic operations (64 bit). */ | |
938 | ||
939 | case INDEX_op_add_i64: | |
940 | t0 = *tb_ptr++; | |
5e75150c EC |
941 | t1 = tci_read_ri64(regs, &tb_ptr); |
942 | t2 = tci_read_ri64(regs, &tb_ptr); | |
943 | tci_write_reg64(regs, t0, t1 + t2); | |
7657f4bf SW |
944 | break; |
945 | case INDEX_op_sub_i64: | |
946 | t0 = *tb_ptr++; | |
5e75150c EC |
947 | t1 = tci_read_ri64(regs, &tb_ptr); |
948 | t2 = tci_read_ri64(regs, &tb_ptr); | |
949 | tci_write_reg64(regs, t0, t1 - t2); | |
7657f4bf SW |
950 | break; |
951 | case INDEX_op_mul_i64: | |
952 | t0 = *tb_ptr++; | |
5e75150c EC |
953 | t1 = tci_read_ri64(regs, &tb_ptr); |
954 | t2 = tci_read_ri64(regs, &tb_ptr); | |
955 | tci_write_reg64(regs, t0, t1 * t2); | |
7657f4bf SW |
956 | break; |
957 | #if TCG_TARGET_HAS_div_i64 | |
958 | case INDEX_op_div_i64: | |
959 | case INDEX_op_divu_i64: | |
960 | case INDEX_op_rem_i64: | |
961 | case INDEX_op_remu_i64: | |
962 | TODO(); | |
963 | break; | |
964 | #elif TCG_TARGET_HAS_div2_i64 | |
965 | case INDEX_op_div2_i64: | |
966 | case INDEX_op_divu2_i64: | |
967 | TODO(); | |
968 | break; | |
969 | #endif | |
970 | case INDEX_op_and_i64: | |
971 | t0 = *tb_ptr++; | |
5e75150c EC |
972 | t1 = tci_read_ri64(regs, &tb_ptr); |
973 | t2 = tci_read_ri64(regs, &tb_ptr); | |
974 | tci_write_reg64(regs, t0, t1 & t2); | |
7657f4bf SW |
975 | break; |
976 | case INDEX_op_or_i64: | |
977 | t0 = *tb_ptr++; | |
5e75150c EC |
978 | t1 = tci_read_ri64(regs, &tb_ptr); |
979 | t2 = tci_read_ri64(regs, &tb_ptr); | |
980 | tci_write_reg64(regs, t0, t1 | t2); | |
7657f4bf SW |
981 | break; |
982 | case INDEX_op_xor_i64: | |
983 | t0 = *tb_ptr++; | |
5e75150c EC |
984 | t1 = tci_read_ri64(regs, &tb_ptr); |
985 | t2 = tci_read_ri64(regs, &tb_ptr); | |
986 | tci_write_reg64(regs, t0, t1 ^ t2); | |
7657f4bf SW |
987 | break; |
988 | ||
989 | /* Shift/rotate operations (64 bit). */ | |
990 | ||
991 | case INDEX_op_shl_i64: | |
992 | t0 = *tb_ptr++; | |
5e75150c EC |
993 | t1 = tci_read_ri64(regs, &tb_ptr); |
994 | t2 = tci_read_ri64(regs, &tb_ptr); | |
995 | tci_write_reg64(regs, t0, t1 << (t2 & 63)); | |
7657f4bf SW |
996 | break; |
997 | case INDEX_op_shr_i64: | |
998 | t0 = *tb_ptr++; | |
5e75150c EC |
999 | t1 = tci_read_ri64(regs, &tb_ptr); |
1000 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1001 | tci_write_reg64(regs, t0, t1 >> (t2 & 63)); | |
7657f4bf SW |
1002 | break; |
1003 | case INDEX_op_sar_i64: | |
1004 | t0 = *tb_ptr++; | |
5e75150c EC |
1005 | t1 = tci_read_ri64(regs, &tb_ptr); |
1006 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1007 | tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63))); | |
7657f4bf SW |
1008 | break; |
1009 | #if TCG_TARGET_HAS_rot_i64 | |
1010 | case INDEX_op_rotl_i64: | |
d285bf78 | 1011 | t0 = *tb_ptr++; |
5e75150c EC |
1012 | t1 = tci_read_ri64(regs, &tb_ptr); |
1013 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1014 | tci_write_reg64(regs, t0, rol64(t1, t2 & 63)); | |
d285bf78 | 1015 | break; |
7657f4bf | 1016 | case INDEX_op_rotr_i64: |
d285bf78 | 1017 | t0 = *tb_ptr++; |
5e75150c EC |
1018 | t1 = tci_read_ri64(regs, &tb_ptr); |
1019 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1020 | tci_write_reg64(regs, t0, ror64(t1, t2 & 63)); | |
7657f4bf | 1021 | break; |
e24dc9fe SW |
1022 | #endif |
1023 | #if TCG_TARGET_HAS_deposit_i64 | |
1024 | case INDEX_op_deposit_i64: | |
1025 | t0 = *tb_ptr++; | |
5e75150c EC |
1026 | t1 = tci_read_r64(regs, &tb_ptr); |
1027 | t2 = tci_read_r64(regs, &tb_ptr); | |
e24dc9fe SW |
1028 | tmp16 = *tb_ptr++; |
1029 | tmp8 = *tb_ptr++; | |
1030 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
5e75150c | 1031 | tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); |
e24dc9fe | 1032 | break; |
7657f4bf SW |
1033 | #endif |
1034 | case INDEX_op_brcond_i64: | |
5e75150c EC |
1035 | t0 = tci_read_r64(regs, &tb_ptr); |
1036 | t1 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
1037 | condition = *tb_ptr++; |
1038 | label = tci_read_label(&tb_ptr); | |
1039 | if (tci_compare64(t0, t1, condition)) { | |
3ccdbecf | 1040 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1041 | tb_ptr = (uint8_t *)label; |
1042 | continue; | |
1043 | } | |
1044 | break; | |
1045 | #if TCG_TARGET_HAS_ext8u_i64 | |
1046 | case INDEX_op_ext8u_i64: | |
1047 | t0 = *tb_ptr++; | |
5e75150c EC |
1048 | t1 = tci_read_r8(regs, &tb_ptr); |
1049 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1050 | break; |
1051 | #endif | |
1052 | #if TCG_TARGET_HAS_ext8s_i64 | |
1053 | case INDEX_op_ext8s_i64: | |
1054 | t0 = *tb_ptr++; | |
5e75150c EC |
1055 | t1 = tci_read_r8s(regs, &tb_ptr); |
1056 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1057 | break; |
1058 | #endif | |
1059 | #if TCG_TARGET_HAS_ext16s_i64 | |
1060 | case INDEX_op_ext16s_i64: | |
1061 | t0 = *tb_ptr++; | |
5e75150c EC |
1062 | t1 = tci_read_r16s(regs, &tb_ptr); |
1063 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1064 | break; |
1065 | #endif | |
1066 | #if TCG_TARGET_HAS_ext16u_i64 | |
1067 | case INDEX_op_ext16u_i64: | |
1068 | t0 = *tb_ptr++; | |
5e75150c EC |
1069 | t1 = tci_read_r16(regs, &tb_ptr); |
1070 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1071 | break; |
1072 | #endif | |
1073 | #if TCG_TARGET_HAS_ext32s_i64 | |
1074 | case INDEX_op_ext32s_i64: | |
4f2331e5 AJ |
1075 | #endif |
1076 | case INDEX_op_ext_i32_i64: | |
7657f4bf | 1077 | t0 = *tb_ptr++; |
5e75150c EC |
1078 | t1 = tci_read_r32s(regs, &tb_ptr); |
1079 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1080 | break; |
7657f4bf SW |
1081 | #if TCG_TARGET_HAS_ext32u_i64 |
1082 | case INDEX_op_ext32u_i64: | |
4f2331e5 AJ |
1083 | #endif |
1084 | case INDEX_op_extu_i32_i64: | |
7657f4bf | 1085 | t0 = *tb_ptr++; |
5e75150c EC |
1086 | t1 = tci_read_r32(regs, &tb_ptr); |
1087 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1088 | break; |
7657f4bf SW |
1089 | #if TCG_TARGET_HAS_bswap16_i64 |
1090 | case INDEX_op_bswap16_i64: | |
7657f4bf | 1091 | t0 = *tb_ptr++; |
5e75150c EC |
1092 | t1 = tci_read_r16(regs, &tb_ptr); |
1093 | tci_write_reg64(regs, t0, bswap16(t1)); | |
7657f4bf SW |
1094 | break; |
1095 | #endif | |
1096 | #if TCG_TARGET_HAS_bswap32_i64 | |
1097 | case INDEX_op_bswap32_i64: | |
1098 | t0 = *tb_ptr++; | |
5e75150c EC |
1099 | t1 = tci_read_r32(regs, &tb_ptr); |
1100 | tci_write_reg64(regs, t0, bswap32(t1)); | |
7657f4bf SW |
1101 | break; |
1102 | #endif | |
1103 | #if TCG_TARGET_HAS_bswap64_i64 | |
1104 | case INDEX_op_bswap64_i64: | |
7657f4bf | 1105 | t0 = *tb_ptr++; |
5e75150c EC |
1106 | t1 = tci_read_r64(regs, &tb_ptr); |
1107 | tci_write_reg64(regs, t0, bswap64(t1)); | |
7657f4bf SW |
1108 | break; |
1109 | #endif | |
1110 | #if TCG_TARGET_HAS_not_i64 | |
1111 | case INDEX_op_not_i64: | |
1112 | t0 = *tb_ptr++; | |
5e75150c EC |
1113 | t1 = tci_read_r64(regs, &tb_ptr); |
1114 | tci_write_reg64(regs, t0, ~t1); | |
7657f4bf SW |
1115 | break; |
1116 | #endif | |
1117 | #if TCG_TARGET_HAS_neg_i64 | |
1118 | case INDEX_op_neg_i64: | |
1119 | t0 = *tb_ptr++; | |
5e75150c EC |
1120 | t1 = tci_read_r64(regs, &tb_ptr); |
1121 | tci_write_reg64(regs, t0, -t1); | |
7657f4bf SW |
1122 | break; |
1123 | #endif | |
1124 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1125 | ||
1126 | /* QEMU specific operations. */ | |
1127 | ||
7657f4bf | 1128 | case INDEX_op_exit_tb: |
819af24b | 1129 | ret = *(uint64_t *)tb_ptr; |
7657f4bf SW |
1130 | goto exit; |
1131 | break; | |
1132 | case INDEX_op_goto_tb: | |
76442a93 SF |
1133 | /* Jump address is aligned */ |
1134 | tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4); | |
d73415a3 | 1135 | t0 = qatomic_read((int32_t *)tb_ptr); |
76442a93 | 1136 | tb_ptr += sizeof(int32_t); |
3ccdbecf | 1137 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1138 | tb_ptr += (int32_t)t0; |
1139 | continue; | |
76782fab | 1140 | case INDEX_op_qemu_ld_i32: |
7657f4bf | 1141 | t0 = *tb_ptr++; |
5e75150c | 1142 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1143 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1144 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1145 | case MO_UB: |
1146 | tmp32 = qemu_ld_ub; | |
1147 | break; | |
1148 | case MO_SB: | |
1149 | tmp32 = (int8_t)qemu_ld_ub; | |
1150 | break; | |
1151 | case MO_LEUW: | |
1152 | tmp32 = qemu_ld_leuw; | |
1153 | break; | |
1154 | case MO_LESW: | |
1155 | tmp32 = (int16_t)qemu_ld_leuw; | |
1156 | break; | |
1157 | case MO_LEUL: | |
1158 | tmp32 = qemu_ld_leul; | |
1159 | break; | |
1160 | case MO_BEUW: | |
1161 | tmp32 = qemu_ld_beuw; | |
1162 | break; | |
1163 | case MO_BESW: | |
1164 | tmp32 = (int16_t)qemu_ld_beuw; | |
1165 | break; | |
1166 | case MO_BEUL: | |
1167 | tmp32 = qemu_ld_beul; | |
1168 | break; | |
1169 | default: | |
1170 | tcg_abort(); | |
1171 | } | |
5e75150c | 1172 | tci_write_reg(regs, t0, tmp32); |
7657f4bf | 1173 | break; |
76782fab | 1174 | case INDEX_op_qemu_ld_i64: |
7657f4bf | 1175 | t0 = *tb_ptr++; |
76782fab RH |
1176 | if (TCG_TARGET_REG_BITS == 32) { |
1177 | t1 = *tb_ptr++; | |
1178 | } | |
5e75150c | 1179 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1180 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1181 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1182 | case MO_UB: |
1183 | tmp64 = qemu_ld_ub; | |
1184 | break; | |
1185 | case MO_SB: | |
1186 | tmp64 = (int8_t)qemu_ld_ub; | |
1187 | break; | |
1188 | case MO_LEUW: | |
1189 | tmp64 = qemu_ld_leuw; | |
1190 | break; | |
1191 | case MO_LESW: | |
1192 | tmp64 = (int16_t)qemu_ld_leuw; | |
1193 | break; | |
1194 | case MO_LEUL: | |
1195 | tmp64 = qemu_ld_leul; | |
1196 | break; | |
1197 | case MO_LESL: | |
1198 | tmp64 = (int32_t)qemu_ld_leul; | |
1199 | break; | |
1200 | case MO_LEQ: | |
1201 | tmp64 = qemu_ld_leq; | |
1202 | break; | |
1203 | case MO_BEUW: | |
1204 | tmp64 = qemu_ld_beuw; | |
1205 | break; | |
1206 | case MO_BESW: | |
1207 | tmp64 = (int16_t)qemu_ld_beuw; | |
1208 | break; | |
1209 | case MO_BEUL: | |
1210 | tmp64 = qemu_ld_beul; | |
1211 | break; | |
1212 | case MO_BESL: | |
1213 | tmp64 = (int32_t)qemu_ld_beul; | |
1214 | break; | |
1215 | case MO_BEQ: | |
1216 | tmp64 = qemu_ld_beq; | |
1217 | break; | |
1218 | default: | |
1219 | tcg_abort(); | |
1220 | } | |
5e75150c | 1221 | tci_write_reg(regs, t0, tmp64); |
76782fab | 1222 | if (TCG_TARGET_REG_BITS == 32) { |
5e75150c | 1223 | tci_write_reg(regs, t1, tmp64 >> 32); |
76782fab | 1224 | } |
7657f4bf | 1225 | break; |
76782fab | 1226 | case INDEX_op_qemu_st_i32: |
5e75150c EC |
1227 | t0 = tci_read_r(regs, &tb_ptr); |
1228 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1229 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1230 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1231 | case MO_UB: |
1232 | qemu_st_b(t0); | |
1233 | break; | |
1234 | case MO_LEUW: | |
1235 | qemu_st_lew(t0); | |
1236 | break; | |
1237 | case MO_LEUL: | |
1238 | qemu_st_lel(t0); | |
1239 | break; | |
1240 | case MO_BEUW: | |
1241 | qemu_st_bew(t0); | |
1242 | break; | |
1243 | case MO_BEUL: | |
1244 | qemu_st_bel(t0); | |
1245 | break; | |
1246 | default: | |
1247 | tcg_abort(); | |
1248 | } | |
7657f4bf | 1249 | break; |
76782fab | 1250 | case INDEX_op_qemu_st_i64: |
5e75150c EC |
1251 | tmp64 = tci_read_r64(regs, &tb_ptr); |
1252 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1253 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1254 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1255 | case MO_UB: |
1256 | qemu_st_b(tmp64); | |
1257 | break; | |
1258 | case MO_LEUW: | |
1259 | qemu_st_lew(tmp64); | |
1260 | break; | |
1261 | case MO_LEUL: | |
1262 | qemu_st_lel(tmp64); | |
1263 | break; | |
1264 | case MO_LEQ: | |
1265 | qemu_st_leq(tmp64); | |
1266 | break; | |
1267 | case MO_BEUW: | |
1268 | qemu_st_bew(tmp64); | |
1269 | break; | |
1270 | case MO_BEUL: | |
1271 | qemu_st_bel(tmp64); | |
1272 | break; | |
1273 | case MO_BEQ: | |
1274 | qemu_st_beq(tmp64); | |
1275 | break; | |
1276 | default: | |
1277 | tcg_abort(); | |
1278 | } | |
7657f4bf | 1279 | break; |
a1e69e2f PK |
1280 | case INDEX_op_mb: |
1281 | /* Ensure ordering for all kinds */ | |
1282 | smp_mb(); | |
1283 | break; | |
7657f4bf SW |
1284 | default: |
1285 | TODO(); | |
1286 | break; | |
1287 | } | |
3ccdbecf | 1288 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1289 | } |
1290 | exit: | |
819af24b | 1291 | return ret; |
7657f4bf | 1292 | } |