]>
Commit | Line | Data |
---|---|---|
7657f4bf SW |
1 | /* |
2 | * Tiny Code Interpreter for QEMU | |
3 | * | |
3ccdbecf | 4 | * Copyright (c) 2009, 2011, 2016 Stefan Weil |
7657f4bf SW |
5 | * |
6 | * This program is free software: you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License as published by | |
8 | * the Free Software Foundation, either version 2 of the License, or | |
9 | * (at your option) any later version. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | ||
d38ea87a | 20 | #include "qemu/osdep.h" |
7657f4bf | 21 | |
3ccdbecf SW |
22 | /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined). |
23 | * Without assertions, the interpreter runs much faster. */ | |
24 | #if defined(CONFIG_DEBUG_TCG) | |
25 | # define tci_assert(cond) assert(cond) | |
26 | #else | |
27 | # define tci_assert(cond) ((void)0) | |
7657f4bf SW |
28 | #endif |
29 | ||
30 | #include "qemu-common.h" | |
65603e2f | 31 | #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */ |
f08b6170 | 32 | #include "exec/cpu_ldst.h" |
7657f4bf SW |
33 | #include "tcg-op.h" |
34 | ||
35 | /* Marker for missing code. */ | |
36 | #define TODO() \ | |
37 | do { \ | |
38 | fprintf(stderr, "TODO %s:%u: %s()\n", \ | |
39 | __FILE__, __LINE__, __func__); \ | |
40 | tcg_abort(); \ | |
41 | } while (0) | |
42 | ||
1df3caa9 | 43 | #if MAX_OPC_PARAM_IARGS != 6 |
7657f4bf SW |
44 | # error Fix needed, number of supported input arguments changed! |
45 | #endif | |
46 | #if TCG_TARGET_REG_BITS == 32 | |
47 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 48 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
49 | tcg_target_ulong, tcg_target_ulong, |
50 | tcg_target_ulong, tcg_target_ulong, | |
1df3caa9 | 51 | tcg_target_ulong, tcg_target_ulong, |
7657f4bf SW |
52 | tcg_target_ulong, tcg_target_ulong); |
53 | #else | |
54 | typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong, | |
6673f47d | 55 | tcg_target_ulong, tcg_target_ulong, |
1df3caa9 | 56 | tcg_target_ulong, tcg_target_ulong); |
7657f4bf SW |
57 | #endif |
58 | ||
5e75150c | 59 | static tcg_target_ulong tci_read_reg(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 60 | { |
5e75150c EC |
61 | tci_assert(index < TCG_TARGET_NB_REGS); |
62 | return regs[index]; | |
7657f4bf SW |
63 | } |
64 | ||
65 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
5e75150c | 66 | static int8_t tci_read_reg8s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 67 | { |
5e75150c | 68 | return (int8_t)tci_read_reg(regs, index); |
7657f4bf SW |
69 | } |
70 | #endif | |
71 | ||
72 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
5e75150c | 73 | static int16_t tci_read_reg16s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 74 | { |
5e75150c | 75 | return (int16_t)tci_read_reg(regs, index); |
7657f4bf SW |
76 | } |
77 | #endif | |
78 | ||
79 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 80 | static int32_t tci_read_reg32s(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 81 | { |
5e75150c | 82 | return (int32_t)tci_read_reg(regs, index); |
7657f4bf SW |
83 | } |
84 | #endif | |
85 | ||
5e75150c | 86 | static uint8_t tci_read_reg8(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 87 | { |
5e75150c | 88 | return (uint8_t)tci_read_reg(regs, index); |
7657f4bf SW |
89 | } |
90 | ||
5e75150c | 91 | static uint16_t tci_read_reg16(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 92 | { |
5e75150c | 93 | return (uint16_t)tci_read_reg(regs, index); |
7657f4bf SW |
94 | } |
95 | ||
5e75150c | 96 | static uint32_t tci_read_reg32(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 97 | { |
5e75150c | 98 | return (uint32_t)tci_read_reg(regs, index); |
7657f4bf SW |
99 | } |
100 | ||
101 | #if TCG_TARGET_REG_BITS == 64 | |
5e75150c | 102 | static uint64_t tci_read_reg64(const tcg_target_ulong *regs, TCGReg index) |
7657f4bf | 103 | { |
5e75150c | 104 | return tci_read_reg(regs, index); |
7657f4bf SW |
105 | } |
106 | #endif | |
107 | ||
5e75150c EC |
108 | static void |
109 | tci_write_reg(tcg_target_ulong *regs, TCGReg index, tcg_target_ulong value) | |
7657f4bf | 110 | { |
5e75150c | 111 | tci_assert(index < TCG_TARGET_NB_REGS); |
3ccdbecf SW |
112 | tci_assert(index != TCG_AREG0); |
113 | tci_assert(index != TCG_REG_CALL_STACK); | |
5e75150c | 114 | regs[index] = value; |
7657f4bf SW |
115 | } |
116 | ||
7657f4bf | 117 | #if TCG_TARGET_REG_BITS == 64 |
5e75150c EC |
118 | static void |
119 | tci_write_reg32s(tcg_target_ulong *regs, TCGReg index, int32_t value) | |
7657f4bf | 120 | { |
5e75150c | 121 | tci_write_reg(regs, index, value); |
7657f4bf SW |
122 | } |
123 | #endif | |
124 | ||
5e75150c | 125 | static void tci_write_reg8(tcg_target_ulong *regs, TCGReg index, uint8_t value) |
7657f4bf | 126 | { |
5e75150c | 127 | tci_write_reg(regs, index, value); |
7657f4bf SW |
128 | } |
129 | ||
5e75150c EC |
130 | static void |
131 | tci_write_reg32(tcg_target_ulong *regs, TCGReg index, uint32_t value) | |
7657f4bf | 132 | { |
5e75150c | 133 | tci_write_reg(regs, index, value); |
7657f4bf SW |
134 | } |
135 | ||
136 | #if TCG_TARGET_REG_BITS == 32 | |
5e75150c EC |
137 | static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index, |
138 | uint32_t low_index, uint64_t value) | |
7657f4bf | 139 | { |
5e75150c EC |
140 | tci_write_reg(regs, low_index, value); |
141 | tci_write_reg(regs, high_index, value >> 32); | |
7657f4bf SW |
142 | } |
143 | #elif TCG_TARGET_REG_BITS == 64 | |
5e75150c EC |
144 | static void |
145 | tci_write_reg64(tcg_target_ulong *regs, TCGReg index, uint64_t value) | |
7657f4bf | 146 | { |
5e75150c | 147 | tci_write_reg(regs, index, value); |
7657f4bf SW |
148 | } |
149 | #endif | |
150 | ||
151 | #if TCG_TARGET_REG_BITS == 32 | |
152 | /* Create a 64 bit value from two 32 bit values. */ | |
153 | static uint64_t tci_uint64(uint32_t high, uint32_t low) | |
154 | { | |
155 | return ((uint64_t)high << 32) + low; | |
156 | } | |
157 | #endif | |
158 | ||
159 | /* Read constant (native size) from bytecode. */ | |
160 | static tcg_target_ulong tci_read_i(uint8_t **tb_ptr) | |
161 | { | |
162 | tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr); | |
163 | *tb_ptr += sizeof(value); | |
164 | return value; | |
165 | } | |
166 | ||
03fc0548 | 167 | /* Read unsigned constant (32 bit) from bytecode. */ |
7657f4bf SW |
168 | static uint32_t tci_read_i32(uint8_t **tb_ptr) |
169 | { | |
170 | uint32_t value = *(uint32_t *)(*tb_ptr); | |
171 | *tb_ptr += sizeof(value); | |
172 | return value; | |
173 | } | |
174 | ||
03fc0548 RH |
175 | /* Read signed constant (32 bit) from bytecode. */ |
176 | static int32_t tci_read_s32(uint8_t **tb_ptr) | |
177 | { | |
178 | int32_t value = *(int32_t *)(*tb_ptr); | |
179 | *tb_ptr += sizeof(value); | |
180 | return value; | |
181 | } | |
182 | ||
7657f4bf SW |
183 | #if TCG_TARGET_REG_BITS == 64 |
184 | /* Read constant (64 bit) from bytecode. */ | |
185 | static uint64_t tci_read_i64(uint8_t **tb_ptr) | |
186 | { | |
187 | uint64_t value = *(uint64_t *)(*tb_ptr); | |
188 | *tb_ptr += sizeof(value); | |
189 | return value; | |
190 | } | |
191 | #endif | |
192 | ||
193 | /* Read indexed register (native size) from bytecode. */ | |
5e75150c EC |
194 | static tcg_target_ulong |
195 | tci_read_r(const tcg_target_ulong *regs, uint8_t **tb_ptr) | |
7657f4bf | 196 | { |
5e75150c | 197 | tcg_target_ulong value = tci_read_reg(regs, **tb_ptr); |
7657f4bf SW |
198 | *tb_ptr += 1; |
199 | return value; | |
200 | } | |
201 | ||
202 | /* Read indexed register (8 bit) from bytecode. */ | |
5e75150c | 203 | static uint8_t tci_read_r8(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 204 | { |
5e75150c | 205 | uint8_t value = tci_read_reg8(regs, **tb_ptr); |
7657f4bf SW |
206 | *tb_ptr += 1; |
207 | return value; | |
208 | } | |
209 | ||
210 | #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64 | |
211 | /* Read indexed register (8 bit signed) from bytecode. */ | |
5e75150c | 212 | static int8_t tci_read_r8s(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 213 | { |
5e75150c | 214 | int8_t value = tci_read_reg8s(regs, **tb_ptr); |
7657f4bf SW |
215 | *tb_ptr += 1; |
216 | return value; | |
217 | } | |
218 | #endif | |
219 | ||
220 | /* Read indexed register (16 bit) from bytecode. */ | |
5e75150c | 221 | static uint16_t tci_read_r16(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 222 | { |
5e75150c | 223 | uint16_t value = tci_read_reg16(regs, **tb_ptr); |
7657f4bf SW |
224 | *tb_ptr += 1; |
225 | return value; | |
226 | } | |
227 | ||
228 | #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 | |
229 | /* Read indexed register (16 bit signed) from bytecode. */ | |
5e75150c | 230 | static int16_t tci_read_r16s(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 231 | { |
5e75150c | 232 | int16_t value = tci_read_reg16s(regs, **tb_ptr); |
7657f4bf SW |
233 | *tb_ptr += 1; |
234 | return value; | |
235 | } | |
236 | #endif | |
237 | ||
238 | /* Read indexed register (32 bit) from bytecode. */ | |
5e75150c | 239 | static uint32_t tci_read_r32(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 240 | { |
5e75150c | 241 | uint32_t value = tci_read_reg32(regs, **tb_ptr); |
7657f4bf SW |
242 | *tb_ptr += 1; |
243 | return value; | |
244 | } | |
245 | ||
246 | #if TCG_TARGET_REG_BITS == 32 | |
247 | /* Read two indexed registers (2 * 32 bit) from bytecode. */ | |
5e75150c | 248 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 249 | { |
5e75150c EC |
250 | uint32_t low = tci_read_r32(regs, tb_ptr); |
251 | return tci_uint64(tci_read_r32(regs, tb_ptr), low); | |
7657f4bf SW |
252 | } |
253 | #elif TCG_TARGET_REG_BITS == 64 | |
254 | /* Read indexed register (32 bit signed) from bytecode. */ | |
5e75150c | 255 | static int32_t tci_read_r32s(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 256 | { |
5e75150c | 257 | int32_t value = tci_read_reg32s(regs, **tb_ptr); |
7657f4bf SW |
258 | *tb_ptr += 1; |
259 | return value; | |
260 | } | |
261 | ||
262 | /* Read indexed register (64 bit) from bytecode. */ | |
5e75150c | 263 | static uint64_t tci_read_r64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 264 | { |
5e75150c | 265 | uint64_t value = tci_read_reg64(regs, **tb_ptr); |
7657f4bf SW |
266 | *tb_ptr += 1; |
267 | return value; | |
268 | } | |
269 | #endif | |
270 | ||
271 | /* Read indexed register(s) with target address from bytecode. */ | |
5e75150c EC |
272 | static target_ulong |
273 | tci_read_ulong(const tcg_target_ulong *regs, uint8_t **tb_ptr) | |
7657f4bf | 274 | { |
5e75150c | 275 | target_ulong taddr = tci_read_r(regs, tb_ptr); |
7657f4bf | 276 | #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS |
5e75150c | 277 | taddr += (uint64_t)tci_read_r(regs, tb_ptr) << 32; |
7657f4bf SW |
278 | #endif |
279 | return taddr; | |
280 | } | |
281 | ||
282 | /* Read indexed register or constant (native size) from bytecode. */ | |
5e75150c EC |
283 | static tcg_target_ulong |
284 | tci_read_ri(const tcg_target_ulong *regs, uint8_t **tb_ptr) | |
7657f4bf SW |
285 | { |
286 | tcg_target_ulong value; | |
771142c2 | 287 | TCGReg r = **tb_ptr; |
7657f4bf SW |
288 | *tb_ptr += 1; |
289 | if (r == TCG_CONST) { | |
290 | value = tci_read_i(tb_ptr); | |
291 | } else { | |
5e75150c | 292 | value = tci_read_reg(regs, r); |
7657f4bf SW |
293 | } |
294 | return value; | |
295 | } | |
296 | ||
297 | /* Read indexed register or constant (32 bit) from bytecode. */ | |
5e75150c | 298 | static uint32_t tci_read_ri32(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf SW |
299 | { |
300 | uint32_t value; | |
771142c2 | 301 | TCGReg r = **tb_ptr; |
7657f4bf SW |
302 | *tb_ptr += 1; |
303 | if (r == TCG_CONST) { | |
304 | value = tci_read_i32(tb_ptr); | |
305 | } else { | |
5e75150c | 306 | value = tci_read_reg32(regs, r); |
7657f4bf SW |
307 | } |
308 | return value; | |
309 | } | |
310 | ||
311 | #if TCG_TARGET_REG_BITS == 32 | |
312 | /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */ | |
5e75150c | 313 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf | 314 | { |
5e75150c EC |
315 | uint32_t low = tci_read_ri32(regs, tb_ptr); |
316 | return tci_uint64(tci_read_ri32(regs, tb_ptr), low); | |
7657f4bf SW |
317 | } |
318 | #elif TCG_TARGET_REG_BITS == 64 | |
319 | /* Read indexed register or constant (64 bit) from bytecode. */ | |
5e75150c | 320 | static uint64_t tci_read_ri64(const tcg_target_ulong *regs, uint8_t **tb_ptr) |
7657f4bf SW |
321 | { |
322 | uint64_t value; | |
771142c2 | 323 | TCGReg r = **tb_ptr; |
7657f4bf SW |
324 | *tb_ptr += 1; |
325 | if (r == TCG_CONST) { | |
326 | value = tci_read_i64(tb_ptr); | |
327 | } else { | |
5e75150c | 328 | value = tci_read_reg64(regs, r); |
7657f4bf SW |
329 | } |
330 | return value; | |
331 | } | |
332 | #endif | |
333 | ||
c6c5063c | 334 | static tcg_target_ulong tci_read_label(uint8_t **tb_ptr) |
7657f4bf | 335 | { |
c6c5063c | 336 | tcg_target_ulong label = tci_read_i(tb_ptr); |
3ccdbecf | 337 | tci_assert(label != 0); |
7657f4bf SW |
338 | return label; |
339 | } | |
340 | ||
341 | static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition) | |
342 | { | |
343 | bool result = false; | |
344 | int32_t i0 = u0; | |
345 | int32_t i1 = u1; | |
346 | switch (condition) { | |
347 | case TCG_COND_EQ: | |
348 | result = (u0 == u1); | |
349 | break; | |
350 | case TCG_COND_NE: | |
351 | result = (u0 != u1); | |
352 | break; | |
353 | case TCG_COND_LT: | |
354 | result = (i0 < i1); | |
355 | break; | |
356 | case TCG_COND_GE: | |
357 | result = (i0 >= i1); | |
358 | break; | |
359 | case TCG_COND_LE: | |
360 | result = (i0 <= i1); | |
361 | break; | |
362 | case TCG_COND_GT: | |
363 | result = (i0 > i1); | |
364 | break; | |
365 | case TCG_COND_LTU: | |
366 | result = (u0 < u1); | |
367 | break; | |
368 | case TCG_COND_GEU: | |
369 | result = (u0 >= u1); | |
370 | break; | |
371 | case TCG_COND_LEU: | |
372 | result = (u0 <= u1); | |
373 | break; | |
374 | case TCG_COND_GTU: | |
375 | result = (u0 > u1); | |
376 | break; | |
377 | default: | |
378 | TODO(); | |
379 | } | |
380 | return result; | |
381 | } | |
382 | ||
383 | static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition) | |
384 | { | |
385 | bool result = false; | |
386 | int64_t i0 = u0; | |
387 | int64_t i1 = u1; | |
388 | switch (condition) { | |
389 | case TCG_COND_EQ: | |
390 | result = (u0 == u1); | |
391 | break; | |
392 | case TCG_COND_NE: | |
393 | result = (u0 != u1); | |
394 | break; | |
395 | case TCG_COND_LT: | |
396 | result = (i0 < i1); | |
397 | break; | |
398 | case TCG_COND_GE: | |
399 | result = (i0 >= i1); | |
400 | break; | |
401 | case TCG_COND_LE: | |
402 | result = (i0 <= i1); | |
403 | break; | |
404 | case TCG_COND_GT: | |
405 | result = (i0 > i1); | |
406 | break; | |
407 | case TCG_COND_LTU: | |
408 | result = (u0 < u1); | |
409 | break; | |
410 | case TCG_COND_GEU: | |
411 | result = (u0 >= u1); | |
412 | break; | |
413 | case TCG_COND_LEU: | |
414 | result = (u0 <= u1); | |
415 | break; | |
416 | case TCG_COND_GTU: | |
417 | result = (u0 > u1); | |
418 | break; | |
419 | default: | |
420 | TODO(); | |
421 | } | |
422 | return result; | |
423 | } | |
424 | ||
76782fab | 425 | #ifdef CONFIG_SOFTMMU |
76782fab | 426 | # define qemu_ld_ub \ |
3972ef6f | 427 | helper_ret_ldub_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 428 | # define qemu_ld_leuw \ |
3972ef6f | 429 | helper_le_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 430 | # define qemu_ld_leul \ |
3972ef6f | 431 | helper_le_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 432 | # define qemu_ld_leq \ |
3972ef6f | 433 | helper_le_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 434 | # define qemu_ld_beuw \ |
3972ef6f | 435 | helper_be_lduw_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 436 | # define qemu_ld_beul \ |
3972ef6f | 437 | helper_be_ldul_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 438 | # define qemu_ld_beq \ |
3972ef6f | 439 | helper_be_ldq_mmu(env, taddr, oi, (uintptr_t)tb_ptr) |
76782fab | 440 | # define qemu_st_b(X) \ |
3972ef6f | 441 | helper_ret_stb_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 442 | # define qemu_st_lew(X) \ |
3972ef6f | 443 | helper_le_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 444 | # define qemu_st_lel(X) \ |
3972ef6f | 445 | helper_le_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 446 | # define qemu_st_leq(X) \ |
3972ef6f | 447 | helper_le_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 448 | # define qemu_st_bew(X) \ |
3972ef6f | 449 | helper_be_stw_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 450 | # define qemu_st_bel(X) \ |
3972ef6f | 451 | helper_be_stl_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab | 452 | # define qemu_st_beq(X) \ |
3972ef6f | 453 | helper_be_stq_mmu(env, taddr, X, oi, (uintptr_t)tb_ptr) |
76782fab RH |
454 | #else |
455 | # define qemu_ld_ub ldub_p(g2h(taddr)) | |
456 | # define qemu_ld_leuw lduw_le_p(g2h(taddr)) | |
457 | # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr)) | |
458 | # define qemu_ld_leq ldq_le_p(g2h(taddr)) | |
459 | # define qemu_ld_beuw lduw_be_p(g2h(taddr)) | |
460 | # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr)) | |
461 | # define qemu_ld_beq ldq_be_p(g2h(taddr)) | |
462 | # define qemu_st_b(X) stb_p(g2h(taddr), X) | |
463 | # define qemu_st_lew(X) stw_le_p(g2h(taddr), X) | |
464 | # define qemu_st_lel(X) stl_le_p(g2h(taddr), X) | |
465 | # define qemu_st_leq(X) stq_le_p(g2h(taddr), X) | |
466 | # define qemu_st_bew(X) stw_be_p(g2h(taddr), X) | |
467 | # define qemu_st_bel(X) stl_be_p(g2h(taddr), X) | |
468 | # define qemu_st_beq(X) stq_be_p(g2h(taddr), X) | |
469 | #endif | |
470 | ||
7657f4bf | 471 | /* Interpret pseudo code in tb. */ |
04d5a1da | 472 | uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr) |
7657f4bf | 473 | { |
5e75150c | 474 | tcg_target_ulong regs[TCG_TARGET_NB_REGS]; |
ee79c356 RH |
475 | long tcg_temps[CPU_TEMP_BUF_NLONGS]; |
476 | uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS); | |
819af24b | 477 | uintptr_t ret = 0; |
7657f4bf | 478 | |
5e75150c EC |
479 | regs[TCG_AREG0] = (tcg_target_ulong)env; |
480 | regs[TCG_REG_CALL_STACK] = sp_value; | |
3ccdbecf | 481 | tci_assert(tb_ptr); |
7657f4bf SW |
482 | |
483 | for (;;) { | |
7657f4bf | 484 | TCGOpcode opc = tb_ptr[0]; |
3ccdbecf | 485 | #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG) |
7657f4bf SW |
486 | uint8_t op_size = tb_ptr[1]; |
487 | uint8_t *old_code_ptr = tb_ptr; | |
488 | #endif | |
489 | tcg_target_ulong t0; | |
490 | tcg_target_ulong t1; | |
491 | tcg_target_ulong t2; | |
492 | tcg_target_ulong label; | |
493 | TCGCond condition; | |
494 | target_ulong taddr; | |
7657f4bf SW |
495 | uint8_t tmp8; |
496 | uint16_t tmp16; | |
497 | uint32_t tmp32; | |
498 | uint64_t tmp64; | |
499 | #if TCG_TARGET_REG_BITS == 32 | |
500 | uint64_t v64; | |
501 | #endif | |
59227d5d | 502 | TCGMemOpIdx oi; |
7657f4bf | 503 | |
dea8fde8 RH |
504 | #if defined(GETPC) |
505 | tci_tb_ptr = (uintptr_t)tb_ptr; | |
506 | #endif | |
507 | ||
7657f4bf SW |
508 | /* Skip opcode and size entry. */ |
509 | tb_ptr += 2; | |
510 | ||
511 | switch (opc) { | |
7657f4bf | 512 | case INDEX_op_call: |
5e75150c | 513 | t0 = tci_read_ri(regs, &tb_ptr); |
7657f4bf | 514 | #if TCG_TARGET_REG_BITS == 32 |
5e75150c EC |
515 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
516 | tci_read_reg(regs, TCG_REG_R1), | |
517 | tci_read_reg(regs, TCG_REG_R2), | |
518 | tci_read_reg(regs, TCG_REG_R3), | |
519 | tci_read_reg(regs, TCG_REG_R5), | |
520 | tci_read_reg(regs, TCG_REG_R6), | |
521 | tci_read_reg(regs, TCG_REG_R7), | |
522 | tci_read_reg(regs, TCG_REG_R8), | |
523 | tci_read_reg(regs, TCG_REG_R9), | |
1df3caa9 RH |
524 | tci_read_reg(regs, TCG_REG_R10), |
525 | tci_read_reg(regs, TCG_REG_R11), | |
526 | tci_read_reg(regs, TCG_REG_R12)); | |
5e75150c EC |
527 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
528 | tci_write_reg(regs, TCG_REG_R1, tmp64 >> 32); | |
7657f4bf | 529 | #else |
5e75150c EC |
530 | tmp64 = ((helper_function)t0)(tci_read_reg(regs, TCG_REG_R0), |
531 | tci_read_reg(regs, TCG_REG_R1), | |
532 | tci_read_reg(regs, TCG_REG_R2), | |
533 | tci_read_reg(regs, TCG_REG_R3), | |
1df3caa9 RH |
534 | tci_read_reg(regs, TCG_REG_R5), |
535 | tci_read_reg(regs, TCG_REG_R6)); | |
5e75150c | 536 | tci_write_reg(regs, TCG_REG_R0, tmp64); |
7657f4bf SW |
537 | #endif |
538 | break; | |
7657f4bf SW |
539 | case INDEX_op_br: |
540 | label = tci_read_label(&tb_ptr); | |
3ccdbecf | 541 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
542 | tb_ptr = (uint8_t *)label; |
543 | continue; | |
544 | case INDEX_op_setcond_i32: | |
545 | t0 = *tb_ptr++; | |
5e75150c EC |
546 | t1 = tci_read_r32(regs, &tb_ptr); |
547 | t2 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf | 548 | condition = *tb_ptr++; |
5e75150c | 549 | tci_write_reg32(regs, t0, tci_compare32(t1, t2, condition)); |
7657f4bf SW |
550 | break; |
551 | #if TCG_TARGET_REG_BITS == 32 | |
552 | case INDEX_op_setcond2_i32: | |
553 | t0 = *tb_ptr++; | |
5e75150c EC |
554 | tmp64 = tci_read_r64(regs, &tb_ptr); |
555 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 556 | condition = *tb_ptr++; |
5e75150c | 557 | tci_write_reg32(regs, t0, tci_compare64(tmp64, v64, condition)); |
7657f4bf SW |
558 | break; |
559 | #elif TCG_TARGET_REG_BITS == 64 | |
560 | case INDEX_op_setcond_i64: | |
561 | t0 = *tb_ptr++; | |
5e75150c EC |
562 | t1 = tci_read_r64(regs, &tb_ptr); |
563 | t2 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf | 564 | condition = *tb_ptr++; |
5e75150c | 565 | tci_write_reg64(regs, t0, tci_compare64(t1, t2, condition)); |
7657f4bf SW |
566 | break; |
567 | #endif | |
568 | case INDEX_op_mov_i32: | |
569 | t0 = *tb_ptr++; | |
5e75150c EC |
570 | t1 = tci_read_r32(regs, &tb_ptr); |
571 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
572 | break; |
573 | case INDEX_op_movi_i32: | |
574 | t0 = *tb_ptr++; | |
575 | t1 = tci_read_i32(&tb_ptr); | |
5e75150c | 576 | tci_write_reg32(regs, t0, t1); |
7657f4bf SW |
577 | break; |
578 | ||
579 | /* Load/store operations (32 bit). */ | |
580 | ||
581 | case INDEX_op_ld8u_i32: | |
582 | t0 = *tb_ptr++; | |
5e75150c | 583 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 584 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 585 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
586 | break; |
587 | case INDEX_op_ld8s_i32: | |
588 | case INDEX_op_ld16u_i32: | |
589 | TODO(); | |
590 | break; | |
591 | case INDEX_op_ld16s_i32: | |
592 | TODO(); | |
593 | break; | |
594 | case INDEX_op_ld_i32: | |
595 | t0 = *tb_ptr++; | |
5e75150c | 596 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 597 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 598 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
599 | break; |
600 | case INDEX_op_st8_i32: | |
5e75150c EC |
601 | t0 = tci_read_r8(regs, &tb_ptr); |
602 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 603 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
604 | *(uint8_t *)(t1 + t2) = t0; |
605 | break; | |
606 | case INDEX_op_st16_i32: | |
5e75150c EC |
607 | t0 = tci_read_r16(regs, &tb_ptr); |
608 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 609 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
610 | *(uint16_t *)(t1 + t2) = t0; |
611 | break; | |
612 | case INDEX_op_st_i32: | |
5e75150c EC |
613 | t0 = tci_read_r32(regs, &tb_ptr); |
614 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 615 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 616 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
617 | *(uint32_t *)(t1 + t2) = t0; |
618 | break; | |
619 | ||
620 | /* Arithmetic operations (32 bit). */ | |
621 | ||
622 | case INDEX_op_add_i32: | |
623 | t0 = *tb_ptr++; | |
5e75150c EC |
624 | t1 = tci_read_ri32(regs, &tb_ptr); |
625 | t2 = tci_read_ri32(regs, &tb_ptr); | |
626 | tci_write_reg32(regs, t0, t1 + t2); | |
7657f4bf SW |
627 | break; |
628 | case INDEX_op_sub_i32: | |
629 | t0 = *tb_ptr++; | |
5e75150c EC |
630 | t1 = tci_read_ri32(regs, &tb_ptr); |
631 | t2 = tci_read_ri32(regs, &tb_ptr); | |
632 | tci_write_reg32(regs, t0, t1 - t2); | |
7657f4bf SW |
633 | break; |
634 | case INDEX_op_mul_i32: | |
635 | t0 = *tb_ptr++; | |
5e75150c EC |
636 | t1 = tci_read_ri32(regs, &tb_ptr); |
637 | t2 = tci_read_ri32(regs, &tb_ptr); | |
638 | tci_write_reg32(regs, t0, t1 * t2); | |
7657f4bf SW |
639 | break; |
640 | #if TCG_TARGET_HAS_div_i32 | |
641 | case INDEX_op_div_i32: | |
642 | t0 = *tb_ptr++; | |
5e75150c EC |
643 | t1 = tci_read_ri32(regs, &tb_ptr); |
644 | t2 = tci_read_ri32(regs, &tb_ptr); | |
645 | tci_write_reg32(regs, t0, (int32_t)t1 / (int32_t)t2); | |
7657f4bf SW |
646 | break; |
647 | case INDEX_op_divu_i32: | |
648 | t0 = *tb_ptr++; | |
5e75150c EC |
649 | t1 = tci_read_ri32(regs, &tb_ptr); |
650 | t2 = tci_read_ri32(regs, &tb_ptr); | |
651 | tci_write_reg32(regs, t0, t1 / t2); | |
7657f4bf SW |
652 | break; |
653 | case INDEX_op_rem_i32: | |
654 | t0 = *tb_ptr++; | |
5e75150c EC |
655 | t1 = tci_read_ri32(regs, &tb_ptr); |
656 | t2 = tci_read_ri32(regs, &tb_ptr); | |
657 | tci_write_reg32(regs, t0, (int32_t)t1 % (int32_t)t2); | |
7657f4bf SW |
658 | break; |
659 | case INDEX_op_remu_i32: | |
660 | t0 = *tb_ptr++; | |
5e75150c EC |
661 | t1 = tci_read_ri32(regs, &tb_ptr); |
662 | t2 = tci_read_ri32(regs, &tb_ptr); | |
663 | tci_write_reg32(regs, t0, t1 % t2); | |
7657f4bf SW |
664 | break; |
665 | #elif TCG_TARGET_HAS_div2_i32 | |
666 | case INDEX_op_div2_i32: | |
667 | case INDEX_op_divu2_i32: | |
668 | TODO(); | |
669 | break; | |
670 | #endif | |
671 | case INDEX_op_and_i32: | |
672 | t0 = *tb_ptr++; | |
5e75150c EC |
673 | t1 = tci_read_ri32(regs, &tb_ptr); |
674 | t2 = tci_read_ri32(regs, &tb_ptr); | |
675 | tci_write_reg32(regs, t0, t1 & t2); | |
7657f4bf SW |
676 | break; |
677 | case INDEX_op_or_i32: | |
678 | t0 = *tb_ptr++; | |
5e75150c EC |
679 | t1 = tci_read_ri32(regs, &tb_ptr); |
680 | t2 = tci_read_ri32(regs, &tb_ptr); | |
681 | tci_write_reg32(regs, t0, t1 | t2); | |
7657f4bf SW |
682 | break; |
683 | case INDEX_op_xor_i32: | |
684 | t0 = *tb_ptr++; | |
5e75150c EC |
685 | t1 = tci_read_ri32(regs, &tb_ptr); |
686 | t2 = tci_read_ri32(regs, &tb_ptr); | |
687 | tci_write_reg32(regs, t0, t1 ^ t2); | |
7657f4bf SW |
688 | break; |
689 | ||
690 | /* Shift/rotate operations (32 bit). */ | |
691 | ||
692 | case INDEX_op_shl_i32: | |
693 | t0 = *tb_ptr++; | |
5e75150c EC |
694 | t1 = tci_read_ri32(regs, &tb_ptr); |
695 | t2 = tci_read_ri32(regs, &tb_ptr); | |
696 | tci_write_reg32(regs, t0, t1 << (t2 & 31)); | |
7657f4bf SW |
697 | break; |
698 | case INDEX_op_shr_i32: | |
699 | t0 = *tb_ptr++; | |
5e75150c EC |
700 | t1 = tci_read_ri32(regs, &tb_ptr); |
701 | t2 = tci_read_ri32(regs, &tb_ptr); | |
702 | tci_write_reg32(regs, t0, t1 >> (t2 & 31)); | |
7657f4bf SW |
703 | break; |
704 | case INDEX_op_sar_i32: | |
705 | t0 = *tb_ptr++; | |
5e75150c EC |
706 | t1 = tci_read_ri32(regs, &tb_ptr); |
707 | t2 = tci_read_ri32(regs, &tb_ptr); | |
708 | tci_write_reg32(regs, t0, ((int32_t)t1 >> (t2 & 31))); | |
7657f4bf SW |
709 | break; |
710 | #if TCG_TARGET_HAS_rot_i32 | |
711 | case INDEX_op_rotl_i32: | |
712 | t0 = *tb_ptr++; | |
5e75150c EC |
713 | t1 = tci_read_ri32(regs, &tb_ptr); |
714 | t2 = tci_read_ri32(regs, &tb_ptr); | |
715 | tci_write_reg32(regs, t0, rol32(t1, t2 & 31)); | |
7657f4bf SW |
716 | break; |
717 | case INDEX_op_rotr_i32: | |
718 | t0 = *tb_ptr++; | |
5e75150c EC |
719 | t1 = tci_read_ri32(regs, &tb_ptr); |
720 | t2 = tci_read_ri32(regs, &tb_ptr); | |
721 | tci_write_reg32(regs, t0, ror32(t1, t2 & 31)); | |
7657f4bf | 722 | break; |
e24dc9fe SW |
723 | #endif |
724 | #if TCG_TARGET_HAS_deposit_i32 | |
725 | case INDEX_op_deposit_i32: | |
726 | t0 = *tb_ptr++; | |
5e75150c EC |
727 | t1 = tci_read_r32(regs, &tb_ptr); |
728 | t2 = tci_read_r32(regs, &tb_ptr); | |
e24dc9fe SW |
729 | tmp16 = *tb_ptr++; |
730 | tmp8 = *tb_ptr++; | |
731 | tmp32 = (((1 << tmp8) - 1) << tmp16); | |
5e75150c | 732 | tci_write_reg32(regs, t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32)); |
e24dc9fe | 733 | break; |
7657f4bf SW |
734 | #endif |
735 | case INDEX_op_brcond_i32: | |
5e75150c EC |
736 | t0 = tci_read_r32(regs, &tb_ptr); |
737 | t1 = tci_read_ri32(regs, &tb_ptr); | |
7657f4bf SW |
738 | condition = *tb_ptr++; |
739 | label = tci_read_label(&tb_ptr); | |
740 | if (tci_compare32(t0, t1, condition)) { | |
3ccdbecf | 741 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
742 | tb_ptr = (uint8_t *)label; |
743 | continue; | |
744 | } | |
745 | break; | |
746 | #if TCG_TARGET_REG_BITS == 32 | |
747 | case INDEX_op_add2_i32: | |
748 | t0 = *tb_ptr++; | |
749 | t1 = *tb_ptr++; | |
5e75150c EC |
750 | tmp64 = tci_read_r64(regs, &tb_ptr); |
751 | tmp64 += tci_read_r64(regs, &tb_ptr); | |
752 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
753 | break; |
754 | case INDEX_op_sub2_i32: | |
755 | t0 = *tb_ptr++; | |
756 | t1 = *tb_ptr++; | |
5e75150c EC |
757 | tmp64 = tci_read_r64(regs, &tb_ptr); |
758 | tmp64 -= tci_read_r64(regs, &tb_ptr); | |
759 | tci_write_reg64(regs, t1, t0, tmp64); | |
7657f4bf SW |
760 | break; |
761 | case INDEX_op_brcond2_i32: | |
5e75150c EC |
762 | tmp64 = tci_read_r64(regs, &tb_ptr); |
763 | v64 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
764 | condition = *tb_ptr++; |
765 | label = tci_read_label(&tb_ptr); | |
766 | if (tci_compare64(tmp64, v64, condition)) { | |
3ccdbecf | 767 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
768 | tb_ptr = (uint8_t *)label; |
769 | continue; | |
770 | } | |
771 | break; | |
772 | case INDEX_op_mulu2_i32: | |
773 | t0 = *tb_ptr++; | |
774 | t1 = *tb_ptr++; | |
5e75150c EC |
775 | t2 = tci_read_r32(regs, &tb_ptr); |
776 | tmp64 = tci_read_r32(regs, &tb_ptr); | |
777 | tci_write_reg64(regs, t1, t0, t2 * tmp64); | |
7657f4bf SW |
778 | break; |
779 | #endif /* TCG_TARGET_REG_BITS == 32 */ | |
780 | #if TCG_TARGET_HAS_ext8s_i32 | |
781 | case INDEX_op_ext8s_i32: | |
782 | t0 = *tb_ptr++; | |
5e75150c EC |
783 | t1 = tci_read_r8s(regs, &tb_ptr); |
784 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
785 | break; |
786 | #endif | |
787 | #if TCG_TARGET_HAS_ext16s_i32 | |
788 | case INDEX_op_ext16s_i32: | |
789 | t0 = *tb_ptr++; | |
5e75150c EC |
790 | t1 = tci_read_r16s(regs, &tb_ptr); |
791 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
792 | break; |
793 | #endif | |
794 | #if TCG_TARGET_HAS_ext8u_i32 | |
795 | case INDEX_op_ext8u_i32: | |
796 | t0 = *tb_ptr++; | |
5e75150c EC |
797 | t1 = tci_read_r8(regs, &tb_ptr); |
798 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
799 | break; |
800 | #endif | |
801 | #if TCG_TARGET_HAS_ext16u_i32 | |
802 | case INDEX_op_ext16u_i32: | |
803 | t0 = *tb_ptr++; | |
5e75150c EC |
804 | t1 = tci_read_r16(regs, &tb_ptr); |
805 | tci_write_reg32(regs, t0, t1); | |
7657f4bf SW |
806 | break; |
807 | #endif | |
808 | #if TCG_TARGET_HAS_bswap16_i32 | |
809 | case INDEX_op_bswap16_i32: | |
810 | t0 = *tb_ptr++; | |
5e75150c EC |
811 | t1 = tci_read_r16(regs, &tb_ptr); |
812 | tci_write_reg32(regs, t0, bswap16(t1)); | |
7657f4bf SW |
813 | break; |
814 | #endif | |
815 | #if TCG_TARGET_HAS_bswap32_i32 | |
816 | case INDEX_op_bswap32_i32: | |
817 | t0 = *tb_ptr++; | |
5e75150c EC |
818 | t1 = tci_read_r32(regs, &tb_ptr); |
819 | tci_write_reg32(regs, t0, bswap32(t1)); | |
7657f4bf SW |
820 | break; |
821 | #endif | |
822 | #if TCG_TARGET_HAS_not_i32 | |
823 | case INDEX_op_not_i32: | |
824 | t0 = *tb_ptr++; | |
5e75150c EC |
825 | t1 = tci_read_r32(regs, &tb_ptr); |
826 | tci_write_reg32(regs, t0, ~t1); | |
7657f4bf SW |
827 | break; |
828 | #endif | |
829 | #if TCG_TARGET_HAS_neg_i32 | |
830 | case INDEX_op_neg_i32: | |
831 | t0 = *tb_ptr++; | |
5e75150c EC |
832 | t1 = tci_read_r32(regs, &tb_ptr); |
833 | tci_write_reg32(regs, t0, -t1); | |
7657f4bf SW |
834 | break; |
835 | #endif | |
836 | #if TCG_TARGET_REG_BITS == 64 | |
837 | case INDEX_op_mov_i64: | |
838 | t0 = *tb_ptr++; | |
5e75150c EC |
839 | t1 = tci_read_r64(regs, &tb_ptr); |
840 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
841 | break; |
842 | case INDEX_op_movi_i64: | |
843 | t0 = *tb_ptr++; | |
844 | t1 = tci_read_i64(&tb_ptr); | |
5e75150c | 845 | tci_write_reg64(regs, t0, t1); |
7657f4bf SW |
846 | break; |
847 | ||
848 | /* Load/store operations (64 bit). */ | |
849 | ||
850 | case INDEX_op_ld8u_i64: | |
851 | t0 = *tb_ptr++; | |
5e75150c | 852 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 853 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 854 | tci_write_reg8(regs, t0, *(uint8_t *)(t1 + t2)); |
7657f4bf SW |
855 | break; |
856 | case INDEX_op_ld8s_i64: | |
857 | case INDEX_op_ld16u_i64: | |
858 | case INDEX_op_ld16s_i64: | |
859 | TODO(); | |
860 | break; | |
861 | case INDEX_op_ld32u_i64: | |
862 | t0 = *tb_ptr++; | |
5e75150c | 863 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 864 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 865 | tci_write_reg32(regs, t0, *(uint32_t *)(t1 + t2)); |
7657f4bf SW |
866 | break; |
867 | case INDEX_op_ld32s_i64: | |
868 | t0 = *tb_ptr++; | |
5e75150c | 869 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 870 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 871 | tci_write_reg32s(regs, t0, *(int32_t *)(t1 + t2)); |
7657f4bf SW |
872 | break; |
873 | case INDEX_op_ld_i64: | |
874 | t0 = *tb_ptr++; | |
5e75150c | 875 | t1 = tci_read_r(regs, &tb_ptr); |
03fc0548 | 876 | t2 = tci_read_s32(&tb_ptr); |
5e75150c | 877 | tci_write_reg64(regs, t0, *(uint64_t *)(t1 + t2)); |
7657f4bf SW |
878 | break; |
879 | case INDEX_op_st8_i64: | |
5e75150c EC |
880 | t0 = tci_read_r8(regs, &tb_ptr); |
881 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 882 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
883 | *(uint8_t *)(t1 + t2) = t0; |
884 | break; | |
885 | case INDEX_op_st16_i64: | |
5e75150c EC |
886 | t0 = tci_read_r16(regs, &tb_ptr); |
887 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 888 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
889 | *(uint16_t *)(t1 + t2) = t0; |
890 | break; | |
891 | case INDEX_op_st32_i64: | |
5e75150c EC |
892 | t0 = tci_read_r32(regs, &tb_ptr); |
893 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 894 | t2 = tci_read_s32(&tb_ptr); |
7657f4bf SW |
895 | *(uint32_t *)(t1 + t2) = t0; |
896 | break; | |
897 | case INDEX_op_st_i64: | |
5e75150c EC |
898 | t0 = tci_read_r64(regs, &tb_ptr); |
899 | t1 = tci_read_r(regs, &tb_ptr); | |
03fc0548 | 900 | t2 = tci_read_s32(&tb_ptr); |
3ccdbecf | 901 | tci_assert(t1 != sp_value || (int32_t)t2 < 0); |
7657f4bf SW |
902 | *(uint64_t *)(t1 + t2) = t0; |
903 | break; | |
904 | ||
905 | /* Arithmetic operations (64 bit). */ | |
906 | ||
907 | case INDEX_op_add_i64: | |
908 | t0 = *tb_ptr++; | |
5e75150c EC |
909 | t1 = tci_read_ri64(regs, &tb_ptr); |
910 | t2 = tci_read_ri64(regs, &tb_ptr); | |
911 | tci_write_reg64(regs, t0, t1 + t2); | |
7657f4bf SW |
912 | break; |
913 | case INDEX_op_sub_i64: | |
914 | t0 = *tb_ptr++; | |
5e75150c EC |
915 | t1 = tci_read_ri64(regs, &tb_ptr); |
916 | t2 = tci_read_ri64(regs, &tb_ptr); | |
917 | tci_write_reg64(regs, t0, t1 - t2); | |
7657f4bf SW |
918 | break; |
919 | case INDEX_op_mul_i64: | |
920 | t0 = *tb_ptr++; | |
5e75150c EC |
921 | t1 = tci_read_ri64(regs, &tb_ptr); |
922 | t2 = tci_read_ri64(regs, &tb_ptr); | |
923 | tci_write_reg64(regs, t0, t1 * t2); | |
7657f4bf SW |
924 | break; |
925 | #if TCG_TARGET_HAS_div_i64 | |
926 | case INDEX_op_div_i64: | |
927 | case INDEX_op_divu_i64: | |
928 | case INDEX_op_rem_i64: | |
929 | case INDEX_op_remu_i64: | |
930 | TODO(); | |
931 | break; | |
932 | #elif TCG_TARGET_HAS_div2_i64 | |
933 | case INDEX_op_div2_i64: | |
934 | case INDEX_op_divu2_i64: | |
935 | TODO(); | |
936 | break; | |
937 | #endif | |
938 | case INDEX_op_and_i64: | |
939 | t0 = *tb_ptr++; | |
5e75150c EC |
940 | t1 = tci_read_ri64(regs, &tb_ptr); |
941 | t2 = tci_read_ri64(regs, &tb_ptr); | |
942 | tci_write_reg64(regs, t0, t1 & t2); | |
7657f4bf SW |
943 | break; |
944 | case INDEX_op_or_i64: | |
945 | t0 = *tb_ptr++; | |
5e75150c EC |
946 | t1 = tci_read_ri64(regs, &tb_ptr); |
947 | t2 = tci_read_ri64(regs, &tb_ptr); | |
948 | tci_write_reg64(regs, t0, t1 | t2); | |
7657f4bf SW |
949 | break; |
950 | case INDEX_op_xor_i64: | |
951 | t0 = *tb_ptr++; | |
5e75150c EC |
952 | t1 = tci_read_ri64(regs, &tb_ptr); |
953 | t2 = tci_read_ri64(regs, &tb_ptr); | |
954 | tci_write_reg64(regs, t0, t1 ^ t2); | |
7657f4bf SW |
955 | break; |
956 | ||
957 | /* Shift/rotate operations (64 bit). */ | |
958 | ||
959 | case INDEX_op_shl_i64: | |
960 | t0 = *tb_ptr++; | |
5e75150c EC |
961 | t1 = tci_read_ri64(regs, &tb_ptr); |
962 | t2 = tci_read_ri64(regs, &tb_ptr); | |
963 | tci_write_reg64(regs, t0, t1 << (t2 & 63)); | |
7657f4bf SW |
964 | break; |
965 | case INDEX_op_shr_i64: | |
966 | t0 = *tb_ptr++; | |
5e75150c EC |
967 | t1 = tci_read_ri64(regs, &tb_ptr); |
968 | t2 = tci_read_ri64(regs, &tb_ptr); | |
969 | tci_write_reg64(regs, t0, t1 >> (t2 & 63)); | |
7657f4bf SW |
970 | break; |
971 | case INDEX_op_sar_i64: | |
972 | t0 = *tb_ptr++; | |
5e75150c EC |
973 | t1 = tci_read_ri64(regs, &tb_ptr); |
974 | t2 = tci_read_ri64(regs, &tb_ptr); | |
975 | tci_write_reg64(regs, t0, ((int64_t)t1 >> (t2 & 63))); | |
7657f4bf SW |
976 | break; |
977 | #if TCG_TARGET_HAS_rot_i64 | |
978 | case INDEX_op_rotl_i64: | |
d285bf78 | 979 | t0 = *tb_ptr++; |
5e75150c EC |
980 | t1 = tci_read_ri64(regs, &tb_ptr); |
981 | t2 = tci_read_ri64(regs, &tb_ptr); | |
982 | tci_write_reg64(regs, t0, rol64(t1, t2 & 63)); | |
d285bf78 | 983 | break; |
7657f4bf | 984 | case INDEX_op_rotr_i64: |
d285bf78 | 985 | t0 = *tb_ptr++; |
5e75150c EC |
986 | t1 = tci_read_ri64(regs, &tb_ptr); |
987 | t2 = tci_read_ri64(regs, &tb_ptr); | |
988 | tci_write_reg64(regs, t0, ror64(t1, t2 & 63)); | |
7657f4bf | 989 | break; |
e24dc9fe SW |
990 | #endif |
991 | #if TCG_TARGET_HAS_deposit_i64 | |
992 | case INDEX_op_deposit_i64: | |
993 | t0 = *tb_ptr++; | |
5e75150c EC |
994 | t1 = tci_read_r64(regs, &tb_ptr); |
995 | t2 = tci_read_r64(regs, &tb_ptr); | |
e24dc9fe SW |
996 | tmp16 = *tb_ptr++; |
997 | tmp8 = *tb_ptr++; | |
998 | tmp64 = (((1ULL << tmp8) - 1) << tmp16); | |
5e75150c | 999 | tci_write_reg64(regs, t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64)); |
e24dc9fe | 1000 | break; |
7657f4bf SW |
1001 | #endif |
1002 | case INDEX_op_brcond_i64: | |
5e75150c EC |
1003 | t0 = tci_read_r64(regs, &tb_ptr); |
1004 | t1 = tci_read_ri64(regs, &tb_ptr); | |
7657f4bf SW |
1005 | condition = *tb_ptr++; |
1006 | label = tci_read_label(&tb_ptr); | |
1007 | if (tci_compare64(t0, t1, condition)) { | |
3ccdbecf | 1008 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1009 | tb_ptr = (uint8_t *)label; |
1010 | continue; | |
1011 | } | |
1012 | break; | |
1013 | #if TCG_TARGET_HAS_ext8u_i64 | |
1014 | case INDEX_op_ext8u_i64: | |
1015 | t0 = *tb_ptr++; | |
5e75150c EC |
1016 | t1 = tci_read_r8(regs, &tb_ptr); |
1017 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1018 | break; |
1019 | #endif | |
1020 | #if TCG_TARGET_HAS_ext8s_i64 | |
1021 | case INDEX_op_ext8s_i64: | |
1022 | t0 = *tb_ptr++; | |
5e75150c EC |
1023 | t1 = tci_read_r8s(regs, &tb_ptr); |
1024 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1025 | break; |
1026 | #endif | |
1027 | #if TCG_TARGET_HAS_ext16s_i64 | |
1028 | case INDEX_op_ext16s_i64: | |
1029 | t0 = *tb_ptr++; | |
5e75150c EC |
1030 | t1 = tci_read_r16s(regs, &tb_ptr); |
1031 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1032 | break; |
1033 | #endif | |
1034 | #if TCG_TARGET_HAS_ext16u_i64 | |
1035 | case INDEX_op_ext16u_i64: | |
1036 | t0 = *tb_ptr++; | |
5e75150c EC |
1037 | t1 = tci_read_r16(regs, &tb_ptr); |
1038 | tci_write_reg64(regs, t0, t1); | |
7657f4bf SW |
1039 | break; |
1040 | #endif | |
1041 | #if TCG_TARGET_HAS_ext32s_i64 | |
1042 | case INDEX_op_ext32s_i64: | |
4f2331e5 AJ |
1043 | #endif |
1044 | case INDEX_op_ext_i32_i64: | |
7657f4bf | 1045 | t0 = *tb_ptr++; |
5e75150c EC |
1046 | t1 = tci_read_r32s(regs, &tb_ptr); |
1047 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1048 | break; |
7657f4bf SW |
1049 | #if TCG_TARGET_HAS_ext32u_i64 |
1050 | case INDEX_op_ext32u_i64: | |
4f2331e5 AJ |
1051 | #endif |
1052 | case INDEX_op_extu_i32_i64: | |
7657f4bf | 1053 | t0 = *tb_ptr++; |
5e75150c EC |
1054 | t1 = tci_read_r32(regs, &tb_ptr); |
1055 | tci_write_reg64(regs, t0, t1); | |
7657f4bf | 1056 | break; |
7657f4bf SW |
1057 | #if TCG_TARGET_HAS_bswap16_i64 |
1058 | case INDEX_op_bswap16_i64: | |
7657f4bf | 1059 | t0 = *tb_ptr++; |
5e75150c EC |
1060 | t1 = tci_read_r16(regs, &tb_ptr); |
1061 | tci_write_reg64(regs, t0, bswap16(t1)); | |
7657f4bf SW |
1062 | break; |
1063 | #endif | |
1064 | #if TCG_TARGET_HAS_bswap32_i64 | |
1065 | case INDEX_op_bswap32_i64: | |
1066 | t0 = *tb_ptr++; | |
5e75150c EC |
1067 | t1 = tci_read_r32(regs, &tb_ptr); |
1068 | tci_write_reg64(regs, t0, bswap32(t1)); | |
7657f4bf SW |
1069 | break; |
1070 | #endif | |
1071 | #if TCG_TARGET_HAS_bswap64_i64 | |
1072 | case INDEX_op_bswap64_i64: | |
7657f4bf | 1073 | t0 = *tb_ptr++; |
5e75150c EC |
1074 | t1 = tci_read_r64(regs, &tb_ptr); |
1075 | tci_write_reg64(regs, t0, bswap64(t1)); | |
7657f4bf SW |
1076 | break; |
1077 | #endif | |
1078 | #if TCG_TARGET_HAS_not_i64 | |
1079 | case INDEX_op_not_i64: | |
1080 | t0 = *tb_ptr++; | |
5e75150c EC |
1081 | t1 = tci_read_r64(regs, &tb_ptr); |
1082 | tci_write_reg64(regs, t0, ~t1); | |
7657f4bf SW |
1083 | break; |
1084 | #endif | |
1085 | #if TCG_TARGET_HAS_neg_i64 | |
1086 | case INDEX_op_neg_i64: | |
1087 | t0 = *tb_ptr++; | |
5e75150c EC |
1088 | t1 = tci_read_r64(regs, &tb_ptr); |
1089 | tci_write_reg64(regs, t0, -t1); | |
7657f4bf SW |
1090 | break; |
1091 | #endif | |
1092 | #endif /* TCG_TARGET_REG_BITS == 64 */ | |
1093 | ||
1094 | /* QEMU specific operations. */ | |
1095 | ||
7657f4bf | 1096 | case INDEX_op_exit_tb: |
819af24b | 1097 | ret = *(uint64_t *)tb_ptr; |
7657f4bf SW |
1098 | goto exit; |
1099 | break; | |
1100 | case INDEX_op_goto_tb: | |
76442a93 SF |
1101 | /* Jump address is aligned */ |
1102 | tb_ptr = QEMU_ALIGN_PTR_UP(tb_ptr, 4); | |
1103 | t0 = atomic_read((int32_t *)tb_ptr); | |
1104 | tb_ptr += sizeof(int32_t); | |
3ccdbecf | 1105 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1106 | tb_ptr += (int32_t)t0; |
1107 | continue; | |
76782fab | 1108 | case INDEX_op_qemu_ld_i32: |
7657f4bf | 1109 | t0 = *tb_ptr++; |
5e75150c | 1110 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1111 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1112 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1113 | case MO_UB: |
1114 | tmp32 = qemu_ld_ub; | |
1115 | break; | |
1116 | case MO_SB: | |
1117 | tmp32 = (int8_t)qemu_ld_ub; | |
1118 | break; | |
1119 | case MO_LEUW: | |
1120 | tmp32 = qemu_ld_leuw; | |
1121 | break; | |
1122 | case MO_LESW: | |
1123 | tmp32 = (int16_t)qemu_ld_leuw; | |
1124 | break; | |
1125 | case MO_LEUL: | |
1126 | tmp32 = qemu_ld_leul; | |
1127 | break; | |
1128 | case MO_BEUW: | |
1129 | tmp32 = qemu_ld_beuw; | |
1130 | break; | |
1131 | case MO_BESW: | |
1132 | tmp32 = (int16_t)qemu_ld_beuw; | |
1133 | break; | |
1134 | case MO_BEUL: | |
1135 | tmp32 = qemu_ld_beul; | |
1136 | break; | |
1137 | default: | |
1138 | tcg_abort(); | |
1139 | } | |
5e75150c | 1140 | tci_write_reg(regs, t0, tmp32); |
7657f4bf | 1141 | break; |
76782fab | 1142 | case INDEX_op_qemu_ld_i64: |
7657f4bf | 1143 | t0 = *tb_ptr++; |
76782fab RH |
1144 | if (TCG_TARGET_REG_BITS == 32) { |
1145 | t1 = *tb_ptr++; | |
1146 | } | |
5e75150c | 1147 | taddr = tci_read_ulong(regs, &tb_ptr); |
59227d5d | 1148 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1149 | switch (get_memop(oi) & (MO_BSWAP | MO_SSIZE)) { |
76782fab RH |
1150 | case MO_UB: |
1151 | tmp64 = qemu_ld_ub; | |
1152 | break; | |
1153 | case MO_SB: | |
1154 | tmp64 = (int8_t)qemu_ld_ub; | |
1155 | break; | |
1156 | case MO_LEUW: | |
1157 | tmp64 = qemu_ld_leuw; | |
1158 | break; | |
1159 | case MO_LESW: | |
1160 | tmp64 = (int16_t)qemu_ld_leuw; | |
1161 | break; | |
1162 | case MO_LEUL: | |
1163 | tmp64 = qemu_ld_leul; | |
1164 | break; | |
1165 | case MO_LESL: | |
1166 | tmp64 = (int32_t)qemu_ld_leul; | |
1167 | break; | |
1168 | case MO_LEQ: | |
1169 | tmp64 = qemu_ld_leq; | |
1170 | break; | |
1171 | case MO_BEUW: | |
1172 | tmp64 = qemu_ld_beuw; | |
1173 | break; | |
1174 | case MO_BESW: | |
1175 | tmp64 = (int16_t)qemu_ld_beuw; | |
1176 | break; | |
1177 | case MO_BEUL: | |
1178 | tmp64 = qemu_ld_beul; | |
1179 | break; | |
1180 | case MO_BESL: | |
1181 | tmp64 = (int32_t)qemu_ld_beul; | |
1182 | break; | |
1183 | case MO_BEQ: | |
1184 | tmp64 = qemu_ld_beq; | |
1185 | break; | |
1186 | default: | |
1187 | tcg_abort(); | |
1188 | } | |
5e75150c | 1189 | tci_write_reg(regs, t0, tmp64); |
76782fab | 1190 | if (TCG_TARGET_REG_BITS == 32) { |
5e75150c | 1191 | tci_write_reg(regs, t1, tmp64 >> 32); |
76782fab | 1192 | } |
7657f4bf | 1193 | break; |
76782fab | 1194 | case INDEX_op_qemu_st_i32: |
5e75150c EC |
1195 | t0 = tci_read_r(regs, &tb_ptr); |
1196 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1197 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1198 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1199 | case MO_UB: |
1200 | qemu_st_b(t0); | |
1201 | break; | |
1202 | case MO_LEUW: | |
1203 | qemu_st_lew(t0); | |
1204 | break; | |
1205 | case MO_LEUL: | |
1206 | qemu_st_lel(t0); | |
1207 | break; | |
1208 | case MO_BEUW: | |
1209 | qemu_st_bew(t0); | |
1210 | break; | |
1211 | case MO_BEUL: | |
1212 | qemu_st_bel(t0); | |
1213 | break; | |
1214 | default: | |
1215 | tcg_abort(); | |
1216 | } | |
7657f4bf | 1217 | break; |
76782fab | 1218 | case INDEX_op_qemu_st_i64: |
5e75150c EC |
1219 | tmp64 = tci_read_r64(regs, &tb_ptr); |
1220 | taddr = tci_read_ulong(regs, &tb_ptr); | |
59227d5d | 1221 | oi = tci_read_i(&tb_ptr); |
2b7ec66f | 1222 | switch (get_memop(oi) & (MO_BSWAP | MO_SIZE)) { |
76782fab RH |
1223 | case MO_UB: |
1224 | qemu_st_b(tmp64); | |
1225 | break; | |
1226 | case MO_LEUW: | |
1227 | qemu_st_lew(tmp64); | |
1228 | break; | |
1229 | case MO_LEUL: | |
1230 | qemu_st_lel(tmp64); | |
1231 | break; | |
1232 | case MO_LEQ: | |
1233 | qemu_st_leq(tmp64); | |
1234 | break; | |
1235 | case MO_BEUW: | |
1236 | qemu_st_bew(tmp64); | |
1237 | break; | |
1238 | case MO_BEUL: | |
1239 | qemu_st_bel(tmp64); | |
1240 | break; | |
1241 | case MO_BEQ: | |
1242 | qemu_st_beq(tmp64); | |
1243 | break; | |
1244 | default: | |
1245 | tcg_abort(); | |
1246 | } | |
7657f4bf | 1247 | break; |
a1e69e2f PK |
1248 | case INDEX_op_mb: |
1249 | /* Ensure ordering for all kinds */ | |
1250 | smp_mb(); | |
1251 | break; | |
7657f4bf SW |
1252 | default: |
1253 | TODO(); | |
1254 | break; | |
1255 | } | |
3ccdbecf | 1256 | tci_assert(tb_ptr == old_code_ptr + op_size); |
7657f4bf SW |
1257 | } |
1258 | exit: | |
819af24b | 1259 | return ret; |
7657f4bf | 1260 | } |