]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
3ccdbecf | 4 | * Copyright (c) 2009, 2011, 2016 Stefan Weil |
7657f4bf SW |
5 | * |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
d38ea87a | 20 | #include "qemu/osdep.h" |
7657f4bf | 21 | |
3ccdbecf SW |
22 | /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). |
23 | * Without assertions, the interpreter runs much faster. */ | |
24 | #if defined(CONFIG_DEBUG_TCG) | |
25 | # define tci_assert(cond) assert(cond) | |
26 | #else | |
27 | # define tci_assert(cond) ((void)0) | |
7657f4bf SW |
28 | #endif |
29 | ||
30 | #include "qemu-common.h" | |
65603e2f | 31 | #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */ |
f08b6170 | 32 | #include "exec/cpu_ldst.h" |
dcb32f1d | 33 | #include "tcg/tcg-op.h" |
7657f4bf SW |
34 | |
35 | /* Marker for missing code. */ | |
36 | #define TODO() \ | |
37 | do { \ | |
38 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
39 | __FILE__, __LINE__, __func__); \ | |
40 | tcg_abort(); \ | |
41 | } while (0) | |
42 | ||
1df3caa9 | 43 | #if MAX_OPC_PARAM_IARGS != 6 |
7657f4bf SW |
44 | # error Fix needed, number of supported input arguments changed! |
45 | #endif | |
46 | #if TCG_TARGET_REG_BITS == 32 | |
47 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 48 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
49 | tcg_target_ulong, tcg_target_ulong, |
50 | tcg_target_ulong, tcg_target_ulong, | |
1df3caa9 | 51 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
52 | tcg_target_ulong, tcg_target_ulong); |
53 | #else | |
54 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 55 | tcg_target_ulong, tcg_target_ulong, |
1df3caa9 | 56 | tcg_target_ulong, tcg_target_ulong); |
7657f4bf SW |
57 | #endif |
58 | ||
5e75150c | 59 | static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 60 | { |
5e75150c EC |
61 | tci_assert(index < TCG_TARGET_NB_REGS); |
62 | return regs[index]; | |
7657f4bf SW |
63 | } |
64 | ||
65 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
5e75150c | 66 | static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 67 | { |
5e75150c | 68 | return (int8_t)tci_read_reg(regs, index); |
7657f4bf SW |
69 | } |
70 | #endif | |
71 | ||
72 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
5e75150c | 73 | static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 74 | { |
5e75150c | 75 | return (int16_t)tci_read_reg(regs, index); |
7657f4bf SW |
76 | } |
77 | #endif | |
78 | ||
79 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 80 | static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 81 | { |
5e75150c | 82 | return (int32_t)tci_read_reg(regs, index); |
7657f4bf SW |
83 | } |
84 | #endif | |
85 | ||
5e75150c | 86 | static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 87 | { |
5e75150c | 88 | return (uint8_t)tci_read_reg(regs, index); |
7657f4bf SW |
89 | } |
90 | ||
5e75150c | 91 | static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 92 | { |
5e75150c | 93 | return (uint16_t)tci_read_reg(regs, index); |
7657f4bf SW |
94 | } |
95 | ||
5e75150c | 96 | static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 97 | { |
5e75150c | 98 | return (uint32_t)tci_read_reg(regs, index); |
7657f4bf SW |
99 | } |
100 | ||
101 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 102 | static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 103 | { |
5e75150c | 104 | return tci_read_reg(regs, index); |
7657f4bf SW |
105 | } |
106 | #endif | |
107 | ||
5e75150c EC |
108 | static void |
109 | tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value) | |
7657f4bf | 110 | { |
5e75150c | 111 | tci_assert(index < TCG_TARGET_NB_REGS); |
3ccdbecf SW |
112 | tci_assert(index != TCG_AREG0); |
113 | tci_assert(index != TCG_REG_CALL_STACK); | |
5e75150c | 114 | regs[index] = value; |
7657f4bf SW |
115 | } |
116 | ||
7657f4bf | 117 | #if TCG_TARGET_REG_BITS == 64 |
5e75150c EC |
118 | static void |
119 | tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value) | |
7657f4bf | 120 | { |
5e75150c | 121 | tci_write_reg(regs, index, value); |
7657f4bf SW |
122 | } |
123 | #endif | |
124 | ||
5e75150c | 125 | static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value) |
7657f4bf | 126 | { |
5e75150c | 127 | tci_write_reg(regs, index, value); |
7657f4bf SW |
128 | } |
129 | ||
2f160e0f SW |
130 | static void |
131 | tci_write_reg16(tcg_target_ulong *regs, TCGReg index, uint16_t value) | |
132 | { | |
133 | tci_write_reg(regs, index, value); | |
134 | } | |
135 | ||
5e75150c EC |
136 | static void |
137 | tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value) | |
7657f4bf | 138 | { |
5e75150c | 139 | tci_write_reg(regs, index, value); |
7657f4bf SW |
140 | } |
141 | ||
142 | #if TCG_TARGET_REG_BITS == 32 | |
5e75150c EC |
143 | static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index, |
144 | uint32_t low_index, uint64_t value) | |
7657f4bf | 145 | { |
5e75150c EC |
146 | tci_write_reg(regs, low_index, value); |
147 | tci_write_reg(regs, high_index, value >> 32); | |
7657f4bf SW |
148 | } |
149 | #elif TCG_TARGET_REG_BITS == 64 | |
5e75150c EC |
150 | static void |
151 | tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value) | |
7657f4bf | 152 | { |
5e75150c | 153 | tci_write_reg(regs, index, value); |
7657f4bf SW |
154 | } |
155 | #endif | |
156 | ||
157 | #if TCG_TARGET_REG_BITS == 32 | |
158 | /* Create a 64 bit value from two 32 bit values. */ | |
159 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
160 | { | |
161 | return ((uint64_t)high << 32) + low; | |
162 | } | |
163 | #endif | |
164 | ||
165 | /* Read constant (native size) from bytecode. */ | |
166 | static tcg_target_ulong tci_read_i(uint8_t **tb_ptr) | |
167 | { | |
168 | tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); | |
169 | *tb_ptr += sizeof(value); | |
170 | return value; | |
171 | } | |
172 | ||
03fc0548 | 173 | /* Read unsigned constant (32 bit) from bytecode. */ |
7657f4bf SW |
174 | static uint32_t tci_read_i32(uint8_t **tb_ptr) |
175 | { | |
176 | uint32_t value = *(uint32_t *)(*tb_ptr); | |
177 | *tb_ptr += sizeof(value); | |
178 | return value; | |
179 | } | |
180 | ||
03fc0548 RH |
181 | /* Read signed constant (32 bit) from bytecode. */ |
182 | static int32_t tci_read_s32(uint8_t **tb_ptr) | |
183 | { | |
184 | int32_t value = *(int32_t *)(*tb_ptr); | |
185 | *tb_ptr += sizeof(value); | |
186 | return value; | |
187 | } | |
188 | ||
7657f4bf SW |
189 | #if TCG_TARGET_REG_BITS == 64 |
190 | /* Read constant (64 bit) from bytecode. */ | |
191 | static uint64_t tci_read_i64(uint8_t **tb_ptr) | |
192 | { | |
193 | uint64_t value = *(uint64_t *)(*tb_ptr); | |
194 | *tb_ptr += sizeof(value); | |
195 | return value; | |
196 | } | |
197 | #endif | |
198 | ||
199 | /* Read indexed register (native size) from bytecode. */ | |
5e75150c EC |
200 | static tcg_target_ulong |
201 | tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr) | |
7657f4bf | 202 | { |
5e75150c | 203 | tcg_target_ulong value = tci_read_reg(regs, **tb_ptr); |
7657f4bf SW |
204 | *tb_ptr += 1; |
205 | return value; | |
206 | } | |
207 | ||
208 | /* Read indexed register (8 bit) from bytecode. */ | |
5e75150c | 209 | static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 210 | { |
5e75150c | 211 | uint8_t value = tci_read_reg8(regs, **tb_ptr); |
7657f4bf SW |
212 | *tb_ptr += 1; |
213 | return value; | |
214 | } | |
215 | ||
216 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
217 | /* Read indexed register (8 bit signed) from bytecode. */ | |
5e75150c | 218 | static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 219 | { |
5e75150c | 220 | int8_t value = tci_read_reg8s(regs, **tb_ptr); |
7657f4bf SW |
221 | *tb_ptr += 1; |
222 | return value; | |
223 | } | |
224 | #endif | |
225 | ||
226 | /* Read indexed register (16 bit) from bytecode. */ | |
5e75150c | 227 | static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 228 | { |
5e75150c | 229 | uint16_t value = tci_read_reg16(regs, **tb_ptr); |
7657f4bf SW |
230 | *tb_ptr += 1; |
231 | return value; | |
232 | } | |
233 | ||
234 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
235 | /* Read indexed register (16 bit signed) from bytecode. */ | |
5e75150c | 236 | static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 237 | { |
5e75150c | 238 | int16_t value = tci_read_reg16s(regs, **tb_ptr); |
7657f4bf SW |
239 | *tb_ptr += 1; |
240 | return value; | |
241 | } | |
242 | #endif | |
243 | ||
244 | /* Read indexed register (32 bit) from bytecode. */ | |
5e75150c | 245 | static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 246 | { |
5e75150c | 247 | uint32_t value = tci_read_reg32(regs, **tb_ptr); |
7657f4bf SW |
248 | *tb_ptr += 1; |
249 | return value; | |
250 | } | |
251 | ||
252 | #if TCG_TARGET_REG_BITS == 32 | |
253 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
5e75150c | 254 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 255 | { |
5e75150c EC |
256 | uint32_t low = tci_read_r32(regs, tb_ptr); |
257 | return tci_uint64(tci_read_r32(regs, tb_ptr), low); | |
7657f4bf SW |
258 | } |
259 | #elif TCG_TARGET_REG_BITS == 64 | |
260 | /* Read indexed register (32 bit signed) from bytecode. */ | |
5e75150c | 261 | static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 262 | { |
5e75150c | 263 | int32_t value = tci_read_reg32s(regs, **tb_ptr); |
7657f4bf SW |
264 | *tb_ptr += 1; |
265 | return value; | |
266 | } | |
267 | ||
268 | /* Read indexed register (64 bit) from bytecode. */ | |
5e75150c | 269 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 270 | { |
5e75150c | 271 | uint64_t value = tci_read_reg64(regs, **tb_ptr); |
7657f4bf SW |
272 | *tb_ptr += 1; |
273 | return value; | |
274 | } | |
275 | #endif | |
276 | ||
277 | /* Read indexed register(s) with target address from bytecode. */ | |
5e75150c EC |
278 | static target_ulong |
279 | tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr) | |
7657f4bf | 280 | { |
5e75150c | 281 | target_ulong taddr = tci_read_r(regs, tb_ptr); |
7657f4bf | 282 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS |
5e75150c | 283 | taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32; |
7657f4bf SW |
284 | #endif |
285 | return taddr; | |
286 | } | |
287 | ||
288 | /* Read indexed register or constant (native size) from bytecode. */ | |
5e75150c EC |
289 | static tcg_target_ulong |
290 | tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr) | |
7657f4bf SW |
291 | { |
292 | tcg_target_ulong value; | |
771142c2 | 293 | TCGReg r = **tb_ptr; |
7657f4bf SW |
294 | *tb_ptr += 1; |
295 | if (r == TCG_CONST) { | |
296 | value = tci_read_i(tb_ptr); | |
297 | } else { | |
5e75150c | 298 | value = tci_read_reg(regs, r); |
7657f4bf SW |
299 | } |
300 | return value; | |
301 | } | |
302 | ||
303 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
5e75150c | 304 | static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf SW |
305 | { |
306 | uint32_t value; | |
771142c2 | 307 | TCGReg r = **tb_ptr; |
7657f4bf SW |
308 | *tb_ptr += 1; |
309 | if (r == TCG_CONST) { | |
310 | value = tci_read_i32(tb_ptr); | |
311 | } else { | |
5e75150c | 312 | value = tci_read_reg32(regs, r); |
7657f4bf SW |
313 | } |
314 | return value; | |
315 | } | |
316 | ||
317 | #if TCG_TARGET_REG_BITS == 32 | |
318 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
5e75150c | 319 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 320 | { |
5e75150c EC |
321 | uint32_t low = tci_read_ri32(regs, tb_ptr); |
322 | return tci_uint64(tci_read_ri32(regs, tb_ptr), low); | |
7657f4bf SW |
323 | } |
324 | #elif TCG_TARGET_REG_BITS == 64 | |
325 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
5e75150c | 326 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf SW |
327 | { |
328 | uint64_t value; | |
771142c2 | 329 | TCGReg r = **tb_ptr; |
7657f4bf SW |
330 | *tb_ptr += 1; |
331 | if (r == TCG_CONST) { | |
332 | value = tci_read_i64(tb_ptr); | |
333 | } else { | |
5e75150c | 334 | value = tci_read_reg64(regs, r); |
7657f4bf SW |
335 | } |
336 | return value; | |
337 | } | |
338 | #endif | |
339 | ||
c6c5063c | 340 | static tcg_target_ulong tci_read_label(uint8_t **tb_ptr) |
7657f4bf | 341 | { |
c6c5063c | 342 | tcg_target_ulong label = tci_read_i(tb_ptr); |
3ccdbecf | 343 | tci_assert(label != 0); |
7657f4bf SW |
344 | return label; |
345 | } | |
346 | ||
347 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
348 | { | |
349 | bool result = false; | |
350 | int32_t i0 = u0; | |
351 | int32_t i1 = u1; | |
352 | switch (condition) { | |
353 | case TCG_COND_EQ: | |
354 | result = (u0 == u1); | |
355 | break; | |
356 | case TCG_COND_NE: | |
357 | result = (u0 != u1); | |
358 | break; | |
359 | case TCG_COND_LT: | |
360 | result = (i0 < i1); | |
361 | break; | |
362 | case TCG_COND_GE: | |
363 | result = (i0 >= i1); | |
364 | break; | |
365 | case TCG_COND_LE: | |
366 | result = (i0 <= i1); | |
367 | break; | |
368 | case TCG_COND_GT: | |
369 | result = (i0 > i1); | |
370 | break; | |
371 | case TCG_COND_LTU: | |
372 | result = (u0 < u1); | |
373 | break; | |
374 | case TCG_COND_GEU: | |
375 | result = (u0 >= u1); | |
376 | break; | |
377 | case TCG_COND_LEU: | |
378 | result = (u0 <= u1); | |
379 | break; | |
380 | case TCG_COND_GTU: | |
381 | result = (u0 > u1); | |
382 | break; | |
383 | default: | |
384 | TODO(); | |
385 | } | |
386 | return result; | |
387 | } | |
388 | ||
389 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
390 | { | |
391 | bool result = false; | |
392 | int64_t i0 = u0; | |
393 | int64_t i1 = u1; | |
394 | switch (condition) { | |
395 | case TCG_COND_EQ: | |
396 | result = (u0 == u1); | |
397 | break; | |
398 | case TCG_COND_NE: | |
399 | result = (u0 != u1); | |
400 | break; | |
401 | case TCG_COND_LT: | |
402 | result = (i0 < i1); | |
403 | break; | |
404 | case TCG_COND_GE: | |
405 | result = (i0 >= i1); | |
406 | break; | |
407 | case TCG_COND_LE: | |
408 | result = (i0 <= i1); | |
409 | break; | |
410 | case TCG_COND_GT: | |
411 | result = (i0 > i1); | |
412 | break; | |
413 | case TCG_COND_LTU: | |
414 | result = (u0 < u1); | |
415 | break; | |
416 | case TCG_COND_GEU: | |
417 | result = (u0 >= u1); | |
418 | break; | |
419 | case TCG_COND_LEU: | |
420 | result = (u0 <= u1); | |
421 | break; | |
422 | case TCG_COND_GTU: | |
423 | result = (u0 > u1); | |
424 | break; | |
425 | default: | |
426 | TODO(); | |
427 | } | |
428 | return result; | |
429 | } | |
430 | ||
76782fab | 431 | #ifdef CONFIG_SOFTMMU |
76782fab | 432 | # define qemu_ld_ub \ |
3972ef6f | 433 | helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 434 | # define qemu_ld_leuw \ |
3972ef6f | 435 | helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 436 | # define qemu_ld_leul \ |
3972ef6f | 437 | helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 438 | # define qemu_ld_leq \ |
3972ef6f | 439 | helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 440 | # define qemu_ld_beuw \ |
3972ef6f | 441 | helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 442 | # define qemu_ld_beul \ |
3972ef6f | 443 | helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 444 | # define qemu_ld_beq \ |
3972ef6f | 445 | helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 446 | # define qemu_st_b(X) \ |
3972ef6f | 447 | helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 448 | # define qemu_st_lew(X) \ |
3972ef6f | 449 | helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 450 | # define qemu_st_lel(X) \ |
3972ef6f | 451 | helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 452 | # define qemu_st_leq(X) \ |
3972ef6f | 453 | helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 454 | # define qemu_st_bew(X) \ |
3972ef6f | 455 | helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 456 | # define qemu_st_bel(X) \ |
3972ef6f | 457 | helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 458 | # define qemu_st_beq(X) \ |
3972ef6f | 459 | helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab RH |
460 | #else |
461 | # define qemu_ld_ub ldub_p(g2h(taddr)) | |
462 | # define qemu_ld_leuw lduw_le_p(g2h(taddr)) | |
463 | # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) | |
464 | # define qemu_ld_leq ldq_le_p(g2h(taddr)) | |
465 | # define qemu_ld_beuw lduw_be_p(g2h(taddr)) | |
466 | # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) | |
467 | # define qemu_ld_beq ldq_be_p(g2h(taddr)) | |
468 | # define qemu_st_b(X) stb_p(g2h(taddr), X) | |
469 | # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) | |
470 | # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) | |
471 | # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) | |
472 | # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) | |
473 | # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) | |
474 | # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) | |
475 | #endif | |
476 | ||
7657f4bf | 477 | /* Interpret pseudo code in tb. */ |
04d5a1da | 478 | uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
7657f4bf | 479 | { |
5e75150c | 480 | tcg_target_ulong regs[TCG_TARGET_NB_REGS]; |
ee79c356 RH |
481 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
482 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
819af24b | 483 | uintptr_t ret = 0; |
7657f4bf | 484 | |
5e75150c EC |
485 | regs[TCG_AREG0] = (tcg_target_ulong)env; |
486 | regs[TCG_REG_CALL_STACK] = sp_value; | |
3ccdbecf | 487 | tci_assert(tb_ptr); |
7657f4bf SW |
488 | |
489 | for (;;) { | |
7657f4bf | 490 | TCGOpcode opc = tb_ptr[0]; |
3ccdbecf | 491 | #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf SW |
492 | uint8_t op_size = tb_ptr[1]; |
493 | uint8_t *old_code_ptr = tb_ptr; | |
494 | #endif | |
495 | tcg_target_ulong t0; | |
496 | tcg_target_ulong t1; | |
497 | tcg_target_ulong t2; | |
498 | tcg_target_ulong label; | |
499 | TCGCond condition; | |
500 | target_ulong taddr; | |
7657f4bf SW |
501 | uint8_t tmp8; |
502 | uint16_t tmp16; | |
503 | uint32_t tmp32; | |
504 | uint64_t tmp64; | |
505 | #if TCG_TARGET_REG_BITS == 32 | |
506 | uint64_t v64; | |
507 | #endif | |
59227d5d | 508 | TCGMemOpIdx oi; |
7657f4bf | 509 | |
dea8fde8 RH |
510 | #if defined(GETPC) |
511 | tci_tb_ptr = (uintptr_t)tb_ptr; | |
512 | #endif | |
513 | ||
7657f4bf SW |
514 | /* Skip opcode and size entry. */ |
515 | tb_ptr += 2; | |
516 | ||
517 | switch (opc) { | |
7657f4bf | 518 | case INDEX_op_call: |
5e75150c | 519 | t0 = tci_read_ri(regs, &tb_ptr); |
7657f4bf | 520 | #if TCG_TARGET_REG_BITS == 32 |
5e75150c EC |
521 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
522 | tci_read_reg(regs, TCG_REG_R1), | |
523 | tci_read_reg(regs, TCG_REG_R2), | |
524 | tci_read_reg(regs, TCG_REG_R3), | |
525 | tci_read_reg(regs, TCG_REG_R5), | |
526 | tci_read_reg(regs, TCG_REG_R6), | |
527 | tci_read_reg(regs, TCG_REG_R7), | |
528 | tci_read_reg(regs, TCG_REG_R8), | |
529 | tci_read_reg(regs, TCG_REG_R9), | |
1df3caa9 RH |
530 | tci_read_reg(regs, TCG_REG_R10), |
531 | tci_read_reg(regs, TCG_REG_R11), | |
532 | tci_read_reg(regs, TCG_REG_R12)); | |
5e75150c EC |
533 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
534 | tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32); | |
7657f4bf | 535 | #else |
5e75150c EC |
536 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
537 | tci_read_reg(regs, TCG_REG_R1), | |
538 | tci_read_reg(regs, TCG_REG_R2), | |
539 | tci_read_reg(regs, TCG_REG_R3), | |
1df3caa9 RH |
540 | tci_read_reg(regs, TCG_REG_R5), |
541 | tci_read_reg(regs, TCG_REG_R6)); | |
5e75150c | 542 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
7657f4bf SW |
543 | #endif |
544 | break; | |
7657f4bf SW |
545 | case INDEX_op_br: |
546 | label = tci_read_label(&tb_ptr); | |
3ccdbecf | 547 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
548 | tb_ptr = (uint8_t *)label; |
549 | continue; | |
550 | case INDEX_op_setcond_i32: | |
551 | t0 = *tb_ptr++; | |
5e75150c EC |
552 | t1 = tci_read_r32(regs, &tb_ptr); |
553 | t2 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf | 554 | condition = *tb_ptr++; |
5e75150c | 555 | tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition)); |
7657f4bf SW |
556 | break; |
557 | #if TCG_TARGET_REG_BITS == 32 | |
558 | case INDEX_op_setcond2_i32: | |
559 | t0 = *tb_ptr++; | |
5e75150c EC |
560 | tmp64 = tci_read_r64(regs, &tb_ptr); |
561 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 562 | condition = *tb_ptr++; |
5e75150c | 563 | tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition)); |
7657f4bf SW |
564 | break; |
565 | #elif TCG_TARGET_REG_BITS == 64 | |
566 | case INDEX_op_setcond_i64: | |
567 | t0 = *tb_ptr++; | |
5e75150c EC |
568 | t1 = tci_read_r64(regs, &tb_ptr); |
569 | t2 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 570 | condition = *tb_ptr++; |
5e75150c | 571 | tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition)); |
7657f4bf SW |
572 | break; |
573 | #endif | |
574 | case INDEX_op_mov_i32: | |
575 | t0 = *tb_ptr++; | |
5e75150c EC |
576 | t1 = tci_read_r32(regs, &tb_ptr); |
577 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
578 | break; |
579 | case INDEX_op_movi_i32: | |
580 | t0 = *tb_ptr++; | |
581 | t1 = tci_read_i32(&tb_ptr); | |
5e75150c | 582 | tci_write_reg32(regs, t0, t1); |
7657f4bf SW |
583 | break; |
584 | ||
585 | /* Load/store operations (32 bit). */ | |
586 | ||
587 | case INDEX_op_ld8u_i32: | |
588 | t0 = *tb_ptr++; | |
5e75150c | 589 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 590 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 591 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
592 | break; |
593 | case INDEX_op_ld8s_i32: | |
2f160e0f SW |
594 | TODO(); |
595 | break; | |
7657f4bf SW |
596 | case INDEX_op_ld16u_i32: |
597 | TODO(); | |
598 | break; | |
599 | case INDEX_op_ld16s_i32: | |
600 | TODO(); | |
601 | break; | |
602 | case INDEX_op_ld_i32: | |
603 | t0 = *tb_ptr++; | |
5e75150c | 604 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 605 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 606 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
607 | break; |
608 | case INDEX_op_st8_i32: | |
5e75150c EC |
609 | t0 = tci_read_r8(regs, &tb_ptr); |
610 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 611 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
612 | *(uint8_t *)(t1 + t2) = t0; |
613 | break; | |
614 | case INDEX_op_st16_i32: | |
5e75150c EC |
615 | t0 = tci_read_r16(regs, &tb_ptr); |
616 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 617 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
618 | *(uint16_t *)(t1 + t2) = t0; |
619 | break; | |
620 | case INDEX_op_st_i32: | |
5e75150c EC |
621 | t0 = tci_read_r32(regs, &tb_ptr); |
622 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 623 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 624 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
625 | *(uint32_t *)(t1 + t2) = t0; |
626 | break; | |
627 | ||
628 | /* Arithmetic operations (32 bit). */ | |
629 | ||
630 | case INDEX_op_add_i32: | |
631 | t0 = *tb_ptr++; | |
5e75150c EC |
632 | t1 = tci_read_ri32(regs, &tb_ptr); |
633 | t2 = tci_read_ri32(regs, &tb_ptr); | |
634 | tci_write_reg32(regs, t0, t1 + t2); | |
7657f4bf SW |
635 | break; |
636 | case INDEX_op_sub_i32: | |
637 | t0 = *tb_ptr++; | |
5e75150c EC |
638 | t1 = tci_read_ri32(regs, &tb_ptr); |
639 | t2 = tci_read_ri32(regs, &tb_ptr); | |
640 | tci_write_reg32(regs, t0, t1 - t2); | |
7657f4bf SW |
641 | break; |
642 | case INDEX_op_mul_i32: | |
643 | t0 = *tb_ptr++; | |
5e75150c EC |
644 | t1 = tci_read_ri32(regs, &tb_ptr); |
645 | t2 = tci_read_ri32(regs, &tb_ptr); | |
646 | tci_write_reg32(regs, t0, t1 * t2); | |
7657f4bf SW |
647 | break; |
648 | #if TCG_TARGET_HAS_div_i32 | |
649 | case INDEX_op_div_i32: | |
650 | t0 = *tb_ptr++; | |
5e75150c EC |
651 | t1 = tci_read_ri32(regs, &tb_ptr); |
652 | t2 = tci_read_ri32(regs, &tb_ptr); | |
653 | tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2); | |
7657f4bf SW |
654 | break; |
655 | case INDEX_op_divu_i32: | |
656 | t0 = *tb_ptr++; | |
5e75150c EC |
657 | t1 = tci_read_ri32(regs, &tb_ptr); |
658 | t2 = tci_read_ri32(regs, &tb_ptr); | |
659 | tci_write_reg32(regs, t0, t1 / t2); | |
7657f4bf SW |
660 | break; |
661 | case INDEX_op_rem_i32: | |
662 | t0 = *tb_ptr++; | |
5e75150c EC |
663 | t1 = tci_read_ri32(regs, &tb_ptr); |
664 | t2 = tci_read_ri32(regs, &tb_ptr); | |
665 | tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2); | |
7657f4bf SW |
666 | break; |
667 | case INDEX_op_remu_i32: | |
668 | t0 = *tb_ptr++; | |
5e75150c EC |
669 | t1 = tci_read_ri32(regs, &tb_ptr); |
670 | t2 = tci_read_ri32(regs, &tb_ptr); | |
671 | tci_write_reg32(regs, t0, t1 % t2); | |
7657f4bf SW |
672 | break; |
673 | #elif TCG_TARGET_HAS_div2_i32 | |
674 | case INDEX_op_div2_i32: | |
675 | case INDEX_op_divu2_i32: | |
676 | TODO(); | |
677 | break; | |
678 | #endif | |
679 | case INDEX_op_and_i32: | |
680 | t0 = *tb_ptr++; | |
5e75150c EC |
681 | t1 = tci_read_ri32(regs, &tb_ptr); |
682 | t2 = tci_read_ri32(regs, &tb_ptr); | |
683 | tci_write_reg32(regs, t0, t1 & t2); | |
7657f4bf SW |
684 | break; |
685 | case INDEX_op_or_i32: | |
686 | t0 = *tb_ptr++; | |
5e75150c EC |
687 | t1 = tci_read_ri32(regs, &tb_ptr); |
688 | t2 = tci_read_ri32(regs, &tb_ptr); | |
689 | tci_write_reg32(regs, t0, t1 | t2); | |
7657f4bf SW |
690 | break; |
691 | case INDEX_op_xor_i32: | |
692 | t0 = *tb_ptr++; | |
5e75150c EC |
693 | t1 = tci_read_ri32(regs, &tb_ptr); |
694 | t2 = tci_read_ri32(regs, &tb_ptr); | |
695 | tci_write_reg32(regs, t0, t1 ^ t2); | |
7657f4bf SW |
696 | break; |
697 | ||
698 | /* Shift/rotate operations (32 bit). */ | |
699 | ||
700 | case INDEX_op_shl_i32: | |
701 | t0 = *tb_ptr++; | |
5e75150c EC |
702 | t1 = tci_read_ri32(regs, &tb_ptr); |
703 | t2 = tci_read_ri32(regs, &tb_ptr); | |
704 | tci_write_reg32(regs, t0, t1 << (t2 & 31)); | |
7657f4bf SW |
705 | break; |
706 | case INDEX_op_shr_i32: | |
707 | t0 = *tb_ptr++; | |
5e75150c EC |
708 | t1 = tci_read_ri32(regs, &tb_ptr); |
709 | t2 = tci_read_ri32(regs, &tb_ptr); | |
710 | tci_write_reg32(regs, t0, t1 >> (t2 & 31)); | |
7657f4bf SW |
711 | break; |
712 | case INDEX_op_sar_i32: | |
713 | t0 = *tb_ptr++; | |
5e75150c EC |
714 | t1 = tci_read_ri32(regs, &tb_ptr); |
715 | t2 = tci_read_ri32(regs, &tb_ptr); | |
716 | tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31))); | |
7657f4bf SW |
717 | break; |
718 | #if TCG_TARGET_HAS_rot_i32 | |
719 | case INDEX_op_rotl_i32: | |
720 | t0 = *tb_ptr++; | |
5e75150c EC |
721 | t1 = tci_read_ri32(regs, &tb_ptr); |
722 | t2 = tci_read_ri32(regs, &tb_ptr); | |
723 | tci_write_reg32(regs, t0, rol32(t1, t2 & 31)); | |
7657f4bf SW |
724 | break; |
725 | case INDEX_op_rotr_i32: | |
726 | t0 = *tb_ptr++; | |
5e75150c EC |
727 | t1 = tci_read_ri32(regs, &tb_ptr); |
728 | t2 = tci_read_ri32(regs, &tb_ptr); | |
729 | tci_write_reg32(regs, t0, ror32(t1, t2 & 31)); | |
7657f4bf | 730 | break; |
e24dc9fe SW |
731 | #endif |
732 | #if TCG_TARGET_HAS_deposit_i32 | |
733 | case INDEX_op_deposit_i32: | |
734 | t0 = *tb_ptr++; | |
5e75150c EC |
735 | t1 = tci_read_r32(regs, &tb_ptr); |
736 | t2 = tci_read_r32(regs, &tb_ptr); | |
e24dc9fe SW |
737 | tmp16 = *tb_ptr++; |
738 | tmp8 = *tb_ptr++; | |
739 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
5e75150c | 740 | tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); |
e24dc9fe | 741 | break; |
7657f4bf SW |
742 | #endif |
743 | case INDEX_op_brcond_i32: | |
5e75150c EC |
744 | t0 = tci_read_r32(regs, &tb_ptr); |
745 | t1 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf SW |
746 | condition = *tb_ptr++; |
747 | label = tci_read_label(&tb_ptr); | |
748 | if (tci_compare32(t0, t1, condition)) { | |
3ccdbecf | 749 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
750 | tb_ptr = (uint8_t *)label; |
751 | continue; | |
752 | } | |
753 | break; | |
754 | #if TCG_TARGET_REG_BITS == 32 | |
755 | case INDEX_op_add2_i32: | |
756 | t0 = *tb_ptr++; | |
757 | t1 = *tb_ptr++; | |
5e75150c EC |
758 | tmp64 = tci_read_r64(regs, &tb_ptr); |
759 | tmp64 += tci_read_r64(regs, &tb_ptr); | |
760 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
761 | break; |
762 | case INDEX_op_sub2_i32: | |
763 | t0 = *tb_ptr++; | |
764 | t1 = *tb_ptr++; | |
5e75150c EC |
765 | tmp64 = tci_read_r64(regs, &tb_ptr); |
766 | tmp64 -= tci_read_r64(regs, &tb_ptr); | |
767 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
768 | break; |
769 | case INDEX_op_brcond2_i32: | |
5e75150c EC |
770 | tmp64 = tci_read_r64(regs, &tb_ptr); |
771 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
772 | condition = *tb_ptr++; |
773 | label = tci_read_label(&tb_ptr); | |
774 | if (tci_compare64(tmp64, v64, condition)) { | |
3ccdbecf | 775 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
776 | tb_ptr = (uint8_t *)label; |
777 | continue; | |
778 | } | |
779 | break; | |
780 | case INDEX_op_mulu2_i32: | |
781 | t0 = *tb_ptr++; | |
782 | t1 = *tb_ptr++; | |
5e75150c EC |
783 | t2 = tci_read_r32(regs, &tb_ptr); |
784 | tmp64 = tci_read_r32(regs, &tb_ptr); | |
785 | tci_write_reg64(regs, t1, t0, t2 * tmp64); | |
7657f4bf SW |
786 | break; |
787 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
788 | #if TCG_TARGET_HAS_ext8s_i32 | |
789 | case INDEX_op_ext8s_i32: | |
790 | t0 = *tb_ptr++; | |
5e75150c EC |
791 | t1 = tci_read_r8s(regs, &tb_ptr); |
792 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
793 | break; |
794 | #endif | |
795 | #if TCG_TARGET_HAS_ext16s_i32 | |
796 | case INDEX_op_ext16s_i32: | |
797 | t0 = *tb_ptr++; | |
5e75150c EC |
798 | t1 = tci_read_r16s(regs, &tb_ptr); |
799 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
800 | break; |
801 | #endif | |
802 | #if TCG_TARGET_HAS_ext8u_i32 | |
803 | case INDEX_op_ext8u_i32: | |
804 | t0 = *tb_ptr++; | |
5e75150c EC |
805 | t1 = tci_read_r8(regs, &tb_ptr); |
806 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
807 | break; |
808 | #endif | |
809 | #if TCG_TARGET_HAS_ext16u_i32 | |
810 | case INDEX_op_ext16u_i32: | |
811 | t0 = *tb_ptr++; | |
5e75150c EC |
812 | t1 = tci_read_r16(regs, &tb_ptr); |
813 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
814 | break; |
815 | #endif | |
816 | #if TCG_TARGET_HAS_bswap16_i32 | |
817 | case INDEX_op_bswap16_i32: | |
818 | t0 = *tb_ptr++; | |
5e75150c EC |
819 | t1 = tci_read_r16(regs, &tb_ptr); |
820 | tci_write_reg32(regs, t0, bswap16(t1)); | |
7657f4bf SW |
821 | break; |
822 | #endif | |
823 | #if TCG_TARGET_HAS_bswap32_i32 | |
824 | case INDEX_op_bswap32_i32: | |
825 | t0 = *tb_ptr++; | |
5e75150c EC |
826 | t1 = tci_read_r32(regs, &tb_ptr); |
827 | tci_write_reg32(regs, t0, bswap32(t1)); | |
7657f4bf SW |
828 | break; |
829 | #endif | |
830 | #if TCG_TARGET_HAS_not_i32 | |
831 | case INDEX_op_not_i32: | |
832 | t0 = *tb_ptr++; | |
5e75150c EC |
833 | t1 = tci_read_r32(regs, &tb_ptr); |
834 | tci_write_reg32(regs, t0, ~t1); | |
7657f4bf SW |
835 | break; |
836 | #endif | |
837 | #if TCG_TARGET_HAS_neg_i32 | |
838 | case INDEX_op_neg_i32: | |
839 | t0 = *tb_ptr++; | |
5e75150c EC |
840 | t1 = tci_read_r32(regs, &tb_ptr); |
841 | tci_write_reg32(regs, t0, -t1); | |
7657f4bf SW |
842 | break; |
843 | #endif | |
844 | #if TCG_TARGET_REG_BITS == 64 | |
845 | case INDEX_op_mov_i64: | |
846 | t0 = *tb_ptr++; | |
5e75150c EC |
847 | t1 = tci_read_r64(regs, &tb_ptr); |
848 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
849 | break; |
850 | case INDEX_op_movi_i64: | |
851 | t0 = *tb_ptr++; | |
852 | t1 = tci_read_i64(&tb_ptr); | |
5e75150c | 853 | tci_write_reg64(regs, t0, t1); |
7657f4bf SW |
854 | break; |
855 | ||
856 | /* Load/store operations (64 bit). */ | |
857 | ||
858 | case INDEX_op_ld8u_i64: | |
859 | t0 = *tb_ptr++; | |
5e75150c | 860 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 861 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 862 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
863 | break; |
864 | case INDEX_op_ld8s_i64: | |
2f160e0f SW |
865 | TODO(); |
866 | break; | |
7657f4bf | 867 | case INDEX_op_ld16u_i64: |
2f160e0f SW |
868 | t0 = *tb_ptr++; |
869 | t1 = tci_read_r(regs, &tb_ptr); | |
870 | t2 = tci_read_s32(&tb_ptr); | |
871 | tci_write_reg16(regs, t0, *(uint16_t *)(t1 + t2)); | |
872 | break; | |
7657f4bf SW |
873 | case INDEX_op_ld16s_i64: |
874 | TODO(); | |
875 | break; | |
876 | case INDEX_op_ld32u_i64: | |
877 | t0 = *tb_ptr++; | |
5e75150c | 878 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 879 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 880 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
881 | break; |
882 | case INDEX_op_ld32s_i64: | |
883 | t0 = *tb_ptr++; | |
5e75150c | 884 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 885 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 886 | tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2)); |
7657f4bf SW |
887 | break; |
888 | case INDEX_op_ld_i64: | |
889 | t0 = *tb_ptr++; | |
5e75150c | 890 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 891 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 892 | tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2)); |
7657f4bf SW |
893 | break; |
894 | case INDEX_op_st8_i64: | |
5e75150c EC |
895 | t0 = tci_read_r8(regs, &tb_ptr); |
896 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 897 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
898 | *(uint8_t *)(t1 + t2) = t0; |
899 | break; | |
900 | case INDEX_op_st16_i64: | |
5e75150c EC |
901 | t0 = tci_read_r16(regs, &tb_ptr); |
902 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 903 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
904 | *(uint16_t *)(t1 + t2) = t0; |
905 | break; | |
906 | case INDEX_op_st32_i64: | |
5e75150c EC |
907 | t0 = tci_read_r32(regs, &tb_ptr); |
908 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 909 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
910 | *(uint32_t *)(t1 + t2) = t0; |
911 | break; | |
912 | case INDEX_op_st_i64: | |
5e75150c EC |
913 | t0 = tci_read_r64(regs, &tb_ptr); |
914 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 915 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 916 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
917 | *(uint64_t *)(t1 + t2) = t0; |
918 | break; | |
919 | ||
920 | /* Arithmetic operations (64 bit). */ | |
921 | ||
922 | case INDEX_op_add_i64: | |
923 | t0 = *tb_ptr++; | |
5e75150c EC |
924 | t1 = tci_read_ri64(regs, &tb_ptr); |
925 | t2 = tci_read_ri64(regs, &tb_ptr); | |
926 | tci_write_reg64(regs, t0, t1 + t2); | |
7657f4bf SW |
927 | break; |
928 | case INDEX_op_sub_i64: | |
929 | t0 = *tb_ptr++; | |
5e75150c EC |
930 | t1 = tci_read_ri64(regs, &tb_ptr); |
931 | t2 = tci_read_ri64(regs, &tb_ptr); | |
932 | tci_write_reg64(regs, t0, t1 - t2); | |
7657f4bf SW |
933 | break; |
934 | case INDEX_op_mul_i64: | |
935 | t0 = *tb_ptr++; | |
5e75150c EC |
936 | t1 = tci_read_ri64(regs, &tb_ptr); |
937 | t2 = tci_read_ri64(regs, &tb_ptr); | |
938 | tci_write_reg64(regs, t0, t1 * t2); | |
7657f4bf SW |
939 | break; |
940 | #if TCG_TARGET_HAS_div_i64 | |
941 | case INDEX_op_div_i64: | |
942 | case INDEX_op_divu_i64: | |
943 | case INDEX_op_rem_i64: | |
944 | case INDEX_op_remu_i64: | |
945 | TODO(); | |
946 | break; | |
947 | #elif TCG_TARGET_HAS_div2_i64 | |
948 | case INDEX_op_div2_i64: | |
949 | case INDEX_op_divu2_i64: | |
950 | TODO(); | |
951 | break; | |
952 | #endif | |
953 | case INDEX_op_and_i64: | |
954 | t0 = *tb_ptr++; | |
5e75150c EC |
955 | t1 = tci_read_ri64(regs, &tb_ptr); |
956 | t2 = tci_read_ri64(regs, &tb_ptr); | |
957 | tci_write_reg64(regs, t0, t1 & t2); | |
7657f4bf SW |
958 | break; |
959 | case INDEX_op_or_i64: | |
960 | t0 = *tb_ptr++; | |
5e75150c EC |
961 | t1 = tci_read_ri64(regs, &tb_ptr); |
962 | t2 = tci_read_ri64(regs, &tb_ptr); | |
963 | tci_write_reg64(regs, t0, t1 | t2); | |
7657f4bf SW |
964 | break; |
965 | case INDEX_op_xor_i64: | |
966 | t0 = *tb_ptr++; | |
5e75150c EC |
967 | t1 = tci_read_ri64(regs, &tb_ptr); |
968 | t2 = tci_read_ri64(regs, &tb_ptr); | |
969 | tci_write_reg64(regs, t0, t1 ^ t2); | |
7657f4bf SW |
970 | break; |
971 | ||
972 | /* Shift/rotate operations (64 bit). */ | |
973 | ||
974 | case INDEX_op_shl_i64: | |
975 | t0 = *tb_ptr++; | |
5e75150c EC |
976 | t1 = tci_read_ri64(regs, &tb_ptr); |
977 | t2 = tci_read_ri64(regs, &tb_ptr); | |
978 | tci_write_reg64(regs, t0, t1 << (t2 & 63)); | |
7657f4bf SW |
979 | break; |
980 | case INDEX_op_shr_i64: | |
981 | t0 = *tb_ptr++; | |
5e75150c EC |
982 | t1 = tci_read_ri64(regs, &tb_ptr); |
983 | t2 = tci_read_ri64(regs, &tb_ptr); | |
984 | tci_write_reg64(regs, t0, t1 >> (t2 & 63)); | |
7657f4bf SW |
985 | break; |
986 | case INDEX_op_sar_i64: | |
987 | t0 = *tb_ptr++; | |
5e75150c EC |
988 | t1 = tci_read_ri64(regs, &tb_ptr); |
989 | t2 = tci_read_ri64(regs, &tb_ptr); | |
990 | tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63))); | |
7657f4bf SW |
991 | break; |
992 | #if TCG_TARGET_HAS_rot_i64 | |
993 | case INDEX_op_rotl_i64: | |
d285bf78 | 994 | t0 = *tb_ptr++; |
5e75150c EC |
995 | t1 = tci_read_ri64(regs, &tb_ptr); |
996 | t2 = tci_read_ri64(regs, &tb_ptr); | |
997 | tci_write_reg64(regs, t0, rol64(t1, t2 & 63)); | |
d285bf78 | 998 | break; |
7657f4bf | 999 | case INDEX_op_rotr_i64: |
d285bf78 | 1000 | t0 = *tb_ptr++; |
5e75150c EC |
1001 | t1 = tci_read_ri64(regs, &tb_ptr); |
1002 | t2 = tci_read_ri64(regs, &tb_ptr); | |
1003 | tci_write_reg64(regs, t0, ror64(t1, t2 & 63)); | |
7657f4bf | 1004 | break; |
e24dc9fe SW |
1005 | #endif |
1006 | #if TCG_TARGET_HAS_deposit_i64 | |
1007 | case INDEX_op_deposit_i64: | |
1008 | t0 = *tb_ptr++; | |
5e75150c EC |
1009 | t1 = tci_read_r64(regs, &tb_ptr); |
1010 | t2 = tci_read_r64(regs, &tb_ptr); | |
e24dc9fe SW |
1011 | tmp16 = *tb_ptr++; |
1012 | tmp8 = *tb_ptr++; | |
1013 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
5e75150c | 1014 | tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); |
e24dc9fe | 1015 | break; |
7657f4bf SW |
1016 | #endif |
1017 | case INDEX_op_brcond_i64: | |
5e75150c EC |
1018 | t0 = tci_read_r64(regs, &tb_ptr); |
1019 | t1 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
1020 | condition = *tb_ptr++; |
1021 | label = tci_read_label(&tb_ptr); | |
1022 | if (tci_compare64(t0, t1, condition)) { | |
3ccdbecf | 1023 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1024 | tb_ptr = (uint8_t *)label; |
1025 | continue; | |
1026 | } | |
1027 | break; | |
1028 | #if TCG_TARGET_HAS_ext8u_i64 | |
1029 | case INDEX_op_ext8u_i64: | |
1030 | t0 = *tb_ptr++; | |
5e75150c EC |
1031 | t1 = tci_read_r8(regs, &tb_ptr); |
1032 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1033 | break; |
1034 | #endif | |
1035 | #if TCG_TARGET_HAS_ext8s_i64 | |
1036 | case INDEX_op_ext8s_i64: | |
1037 | t0 = *tb_ptr++; | |
5e75150c EC |
1038 | t1 = tci_read_r8s(regs, &tb_ptr); |
1039 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1040 | break; |
1041 | #endif | |
1042 | #if TCG_TARGET_HAS_ext16s_i64 | |
1043 | case INDEX_op_ext16s_i64: | |
1044 | t0 = *tb_ptr++; | |
5e75150c EC |
1045 | t1 = tci_read_r16s(regs, &tb_ptr); |
1046 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1047 | break; |
1048 | #endif | |
1049 | #if TCG_TARGET_HAS_ext16u_i64 | |
1050 | case INDEX_op_ext16u_i64: | |
1051 | t0 = *tb_ptr++; | |
5e75150c EC |
1052 | t1 = tci_read_r16(regs, &tb_ptr); |
1053 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1054 | break; |
1055 | #endif | |
1056 | #if TCG_TARGET_HAS_ext32s_i64 | |
1057 | case INDEX_op_ext32s_i64: | |
4f2331e5 AJ |
1058 | #endif |
1059 | case INDEX_op_ext_i32_i64: | |
7657f4bf | 1060 | t0 = *tb_ptr++; |
5e75150c EC |
1061 | t1 = tci_read_r32s(regs, &tb_ptr); |
1062 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1063 | break; |
7657f4bf SW |
1064 | #if TCG_TARGET_HAS_ext32u_i64 |
1065 | case INDEX_op_ext32u_i64: | |
4f2331e5 AJ |
1066 | #endif |
1067 | case INDEX_op_extu_i32_i64: | |
7657f4bf | 1068 | t0 = *tb_ptr++; |
5e75150c EC |
1069 | t1 = tci_read_r32(regs, &tb_ptr); |
1070 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1071 | break; |
7657f4bf SW |
1072 | #if TCG_TARGET_HAS_bswap16_i64 |
1073 | case INDEX_op_bswap16_i64: | |
7657f4bf | 1074 | t0 = *tb_ptr++; |
5e75150c EC |
1075 | t1 = tci_read_r16(regs, &tb_ptr); |
1076 | tci_write_reg64(regs, t0, bswap16(t1)); | |
7657f4bf SW |
1077 | break; |
1078 | #endif | |
1079 | #if TCG_TARGET_HAS_bswap32_i64 | |
1080 | case INDEX_op_bswap32_i64: | |
1081 | t0 = *tb_ptr++; | |
5e75150c EC |
1082 | t1 = tci_read_r32(regs, &tb_ptr); |
1083 | tci_write_reg64(regs, t0, bswap32(t1)); | |
7657f4bf SW |
1084 | break; |
1085 | #endif | |
1086 | #if TCG_TARGET_HAS_bswap64_i64 | |
1087 | case INDEX_op_bswap64_i64: | |
7657f4bf | 1088 | t0 = *tb_ptr++; |
5e75150c EC |
1089 | t1 = tci_read_r64(regs, &tb_ptr); |
1090 | tci_write_reg64(regs, t0, bswap64(t1)); | |
7657f4bf SW |
1091 | break; |
1092 | #endif | |
1093 | #if TCG_TARGET_HAS_not_i64 | |
1094 | case INDEX_op_not_i64: | |
1095 | t0 = *tb_ptr++; | |
5e75150c EC |
1096 | t1 = tci_read_r64(regs, &tb_ptr); |
1097 | tci_write_reg64(regs, t0, ~t1); | |
7657f4bf SW |
1098 | break; |
1099 | #endif | |
1100 | #if TCG_TARGET_HAS_neg_i64 | |
1101 | case INDEX_op_neg_i64: | |
1102 | t0 = *tb_ptr++; | |
5e75150c EC |
1103 | t1 = tci_read_r64(regs, &tb_ptr); |
1104 | tci_write_reg64(regs, t0, -t1); | |
7657f4bf SW |
1105 | break; |
1106 | #endif | |
1107 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1108 | ||
1109 | /* QEMU specific operations. */ | |
1110 | ||
7657f4bf | 1111 | case INDEX_op_exit_tb: |
819af24b | 1112 | ret = *(uint64_t *)tb_ptr; |
7657f4bf SW |
1113 | goto exit; |
1114 | break; | |
1115 | case INDEX_op_goto_tb: | |
76442a93 SF |
1116 | /* Jump address is aligned */ |
1117 | tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4); | |
1118 | t0 = atomic_read((int32_t *)tb_ptr); | |
1119 | tb_ptr += sizeof(int32_t); | |
3ccdbecf | 1120 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1121 | tb_ptr += (int32_t)t0; |
1122 | continue; | |
76782fab | 1123 | case INDEX_op_qemu_ld_i32: |
7657f4bf | 1124 | t0 = *tb_ptr++; |
5e75150c | 1125 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1126 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1127 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1128 | case MO_UB: |
1129 | tmp32 = qemu_ld_ub; | |
1130 | break; | |
1131 | case MO_SB: | |
1132 | tmp32 = (int8_t)qemu_ld_ub; | |
1133 | break; | |
1134 | case MO_LEUW: | |
1135 | tmp32 = qemu_ld_leuw; | |
1136 | break; | |
1137 | case MO_LESW: | |
1138 | tmp32 = (int16_t)qemu_ld_leuw; | |
1139 | break; | |
1140 | case MO_LEUL: | |
1141 | tmp32 = qemu_ld_leul; | |
1142 | break; | |
1143 | case MO_BEUW: | |
1144 | tmp32 = qemu_ld_beuw; | |
1145 | break; | |
1146 | case MO_BESW: | |
1147 | tmp32 = (int16_t)qemu_ld_beuw; | |
1148 | break; | |
1149 | case MO_BEUL: | |
1150 | tmp32 = qemu_ld_beul; | |
1151 | break; | |
1152 | default: | |
1153 | tcg_abort(); | |
1154 | } | |
5e75150c | 1155 | tci_write_reg(regs, t0, tmp32); |
7657f4bf | 1156 | break; |
76782fab | 1157 | case INDEX_op_qemu_ld_i64: |
7657f4bf | 1158 | t0 = *tb_ptr++; |
76782fab RH |
1159 | if (TCG_TARGET_REG_BITS == 32) { |
1160 | t1 = *tb_ptr++; | |
1161 | } | |
5e75150c | 1162 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1163 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1164 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1165 | case MO_UB: |
1166 | tmp64 = qemu_ld_ub; | |
1167 | break; | |
1168 | case MO_SB: | |
1169 | tmp64 = (int8_t)qemu_ld_ub; | |
1170 | break; | |
1171 | case MO_LEUW: | |
1172 | tmp64 = qemu_ld_leuw; | |
1173 | break; | |
1174 | case MO_LESW: | |
1175 | tmp64 = (int16_t)qemu_ld_leuw; | |
1176 | break; | |
1177 | case MO_LEUL: | |
1178 | tmp64 = qemu_ld_leul; | |
1179 | break; | |
1180 | case MO_LESL: | |
1181 | tmp64 = (int32_t)qemu_ld_leul; | |
1182 | break; | |
1183 | case MO_LEQ: | |
1184 | tmp64 = qemu_ld_leq; | |
1185 | break; | |
1186 | case MO_BEUW: | |
1187 | tmp64 = qemu_ld_beuw; | |
1188 | break; | |
1189 | case MO_BESW: | |
1190 | tmp64 = (int16_t)qemu_ld_beuw; | |
1191 | break; | |
1192 | case MO_BEUL: | |
1193 | tmp64 = qemu_ld_beul; | |
1194 | break; | |
1195 | case MO_BESL: | |
1196 | tmp64 = (int32_t)qemu_ld_beul; | |
1197 | break; | |
1198 | case MO_BEQ: | |
1199 | tmp64 = qemu_ld_beq; | |
1200 | break; | |
1201 | default: | |
1202 | tcg_abort(); | |
1203 | } | |
5e75150c | 1204 | tci_write_reg(regs, t0, tmp64); |
76782fab | 1205 | if (TCG_TARGET_REG_BITS == 32) { |
5e75150c | 1206 | tci_write_reg(regs, t1, tmp64 >> 32); |
76782fab | 1207 | } |
7657f4bf | 1208 | break; |
76782fab | 1209 | case INDEX_op_qemu_st_i32: |
5e75150c EC |
1210 | t0 = tci_read_r(regs, &tb_ptr); |
1211 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1212 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1213 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1214 | case MO_UB: |
1215 | qemu_st_b(t0); | |
1216 | break; | |
1217 | case MO_LEUW: | |
1218 | qemu_st_lew(t0); | |
1219 | break; | |
1220 | case MO_LEUL: | |
1221 | qemu_st_lel(t0); | |
1222 | break; | |
1223 | case MO_BEUW: | |
1224 | qemu_st_bew(t0); | |
1225 | break; | |
1226 | case MO_BEUL: | |
1227 | qemu_st_bel(t0); | |
1228 | break; | |
1229 | default: | |
1230 | tcg_abort(); | |
1231 | } | |
7657f4bf | 1232 | break; |
76782fab | 1233 | case INDEX_op_qemu_st_i64: |
5e75150c EC |
1234 | tmp64 = tci_read_r64(regs, &tb_ptr); |
1235 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1236 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1237 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1238 | case MO_UB: |
1239 | qemu_st_b(tmp64); | |
1240 | break; | |
1241 | case MO_LEUW: | |
1242 | qemu_st_lew(tmp64); | |
1243 | break; | |
1244 | case MO_LEUL: | |
1245 | qemu_st_lel(tmp64); | |
1246 | break; | |
1247 | case MO_LEQ: | |
1248 | qemu_st_leq(tmp64); | |
1249 | break; | |
1250 | case MO_BEUW: | |
1251 | qemu_st_bew(tmp64); | |
1252 | break; | |
1253 | case MO_BEUL: | |
1254 | qemu_st_bel(tmp64); | |
1255 | break; | |
1256 | case MO_BEQ: | |
1257 | qemu_st_beq(tmp64); | |
1258 | break; | |
1259 | default: | |
1260 | tcg_abort(); | |
1261 | } | |
7657f4bf | 1262 | break; |
a1e69e2f PK |
1263 | case INDEX_op_mb: |
1264 | /* Ensure ordering for all kinds */ | |
1265 | smp_mb(); | |
1266 | break; | |
7657f4bf SW |
1267 | default: |
1268 | TODO(); | |
1269 | break; | |
1270 | } | |
3ccdbecf | 1271 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1272 | } |
1273 | exit: | |
819af24b | 1274 | return ret; |
7657f4bf | 1275 | } |