2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "exec/helper-proto.h"
27 int gen_new_label(void);
29 static inline void tcg_gen_op0(TCGOpcode opc)
31 *tcg_ctx.gen_opc_ptr++ = opc;
34 static inline void tcg_gen_op1_i32(TCGOpcode opc, TCGv_i32 arg1)
36 *tcg_ctx.gen_opc_ptr++ = opc;
37 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
40 static inline void tcg_gen_op1_i64(TCGOpcode opc, TCGv_i64 arg1)
42 *tcg_ctx.gen_opc_ptr++ = opc;
43 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
46 static inline void tcg_gen_op1i(TCGOpcode opc, TCGArg arg1)
48 *tcg_ctx.gen_opc_ptr++ = opc;
49 *tcg_ctx.gen_opparam_ptr++ = arg1;
52 static inline void tcg_gen_op2_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2)
54 *tcg_ctx.gen_opc_ptr++ = opc;
55 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
56 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
59 static inline void tcg_gen_op2_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2)
61 *tcg_ctx.gen_opc_ptr++ = opc;
62 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
63 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
66 static inline void tcg_gen_op2i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGArg arg2)
68 *tcg_ctx.gen_opc_ptr++ = opc;
69 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
70 *tcg_ctx.gen_opparam_ptr++ = arg2;
73 static inline void tcg_gen_op2i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGArg arg2)
75 *tcg_ctx.gen_opc_ptr++ = opc;
76 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
77 *tcg_ctx.gen_opparam_ptr++ = arg2;
80 static inline void tcg_gen_op2ii(TCGOpcode opc, TCGArg arg1, TCGArg arg2)
82 *tcg_ctx.gen_opc_ptr++ = opc;
83 *tcg_ctx.gen_opparam_ptr++ = arg1;
84 *tcg_ctx.gen_opparam_ptr++ = arg2;
87 static inline void tcg_gen_op3_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
90 *tcg_ctx.gen_opc_ptr++ = opc;
91 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
92 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
93 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
96 static inline void tcg_gen_op3_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
99 *tcg_ctx.gen_opc_ptr++ = opc;
100 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
101 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
102 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
105 static inline void tcg_gen_op3i_i32(TCGOpcode opc, TCGv_i32 arg1,
106 TCGv_i32 arg2, TCGArg arg3)
108 *tcg_ctx.gen_opc_ptr++ = opc;
109 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
110 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
111 *tcg_ctx.gen_opparam_ptr++ = arg3;
114 static inline void tcg_gen_op3i_i64(TCGOpcode opc, TCGv_i64 arg1,
115 TCGv_i64 arg2, TCGArg arg3)
117 *tcg_ctx.gen_opc_ptr++ = opc;
118 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
119 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
120 *tcg_ctx.gen_opparam_ptr++ = arg3;
123 static inline void tcg_gen_ldst_op_i32(TCGOpcode opc, TCGv_i32 val,
124 TCGv_ptr base, TCGArg offset)
126 *tcg_ctx.gen_opc_ptr++ = opc;
127 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val);
128 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base);
129 *tcg_ctx.gen_opparam_ptr++ = offset;
132 static inline void tcg_gen_ldst_op_i64(TCGOpcode opc, TCGv_i64 val,
133 TCGv_ptr base, TCGArg offset)
135 *tcg_ctx.gen_opc_ptr++ = opc;
136 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
137 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_PTR(base);
138 *tcg_ctx.gen_opparam_ptr++ = offset;
141 static inline void tcg_gen_op4_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
142 TCGv_i32 arg3, TCGv_i32 arg4)
144 *tcg_ctx.gen_opc_ptr++ = opc;
145 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
146 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
147 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
148 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
151 static inline void tcg_gen_op4_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
152 TCGv_i64 arg3, TCGv_i64 arg4)
154 *tcg_ctx.gen_opc_ptr++ = opc;
155 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
156 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
157 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
158 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
161 static inline void tcg_gen_op4i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
162 TCGv_i32 arg3, TCGArg arg4)
164 *tcg_ctx.gen_opc_ptr++ = opc;
165 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
166 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
167 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
168 *tcg_ctx.gen_opparam_ptr++ = arg4;
171 static inline void tcg_gen_op4i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
172 TCGv_i64 arg3, TCGArg arg4)
174 *tcg_ctx.gen_opc_ptr++ = opc;
175 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
176 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
177 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
178 *tcg_ctx.gen_opparam_ptr++ = arg4;
181 static inline void tcg_gen_op4ii_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
182 TCGArg arg3, TCGArg arg4)
184 *tcg_ctx.gen_opc_ptr++ = opc;
185 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
186 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
187 *tcg_ctx.gen_opparam_ptr++ = arg3;
188 *tcg_ctx.gen_opparam_ptr++ = arg4;
191 static inline void tcg_gen_op4ii_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
192 TCGArg arg3, TCGArg arg4)
194 *tcg_ctx.gen_opc_ptr++ = opc;
195 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
196 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
197 *tcg_ctx.gen_opparam_ptr++ = arg3;
198 *tcg_ctx.gen_opparam_ptr++ = arg4;
201 static inline void tcg_gen_op5_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
202 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5)
204 *tcg_ctx.gen_opc_ptr++ = opc;
205 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
206 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
207 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
208 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
209 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
212 static inline void tcg_gen_op5_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
213 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5)
215 *tcg_ctx.gen_opc_ptr++ = opc;
216 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
217 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
218 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
219 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
220 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
223 static inline void tcg_gen_op5i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
224 TCGv_i32 arg3, TCGv_i32 arg4, TCGArg arg5)
226 *tcg_ctx.gen_opc_ptr++ = opc;
227 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
228 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
229 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
230 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
231 *tcg_ctx.gen_opparam_ptr++ = arg5;
234 static inline void tcg_gen_op5i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
235 TCGv_i64 arg3, TCGv_i64 arg4, TCGArg arg5)
237 *tcg_ctx.gen_opc_ptr++ = opc;
238 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
239 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
240 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
241 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
242 *tcg_ctx.gen_opparam_ptr++ = arg5;
245 static inline void tcg_gen_op5ii_i32(TCGOpcode opc, TCGv_i32 arg1,
246 TCGv_i32 arg2, TCGv_i32 arg3,
247 TCGArg arg4, TCGArg arg5)
249 *tcg_ctx.gen_opc_ptr++ = opc;
250 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
251 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
252 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
253 *tcg_ctx.gen_opparam_ptr++ = arg4;
254 *tcg_ctx.gen_opparam_ptr++ = arg5;
257 static inline void tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 arg1,
258 TCGv_i64 arg2, TCGv_i64 arg3,
259 TCGArg arg4, TCGArg arg5)
261 *tcg_ctx.gen_opc_ptr++ = opc;
262 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
263 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
264 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
265 *tcg_ctx.gen_opparam_ptr++ = arg4;
266 *tcg_ctx.gen_opparam_ptr++ = arg5;
269 static inline void tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
270 TCGv_i32 arg3, TCGv_i32 arg4, TCGv_i32 arg5,
273 *tcg_ctx.gen_opc_ptr++ = opc;
274 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
275 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
276 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
277 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
278 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
279 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg6);
282 static inline void tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
283 TCGv_i64 arg3, TCGv_i64 arg4, TCGv_i64 arg5,
286 *tcg_ctx.gen_opc_ptr++ = opc;
287 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
288 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
289 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
290 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
291 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
292 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg6);
295 static inline void tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 arg1, TCGv_i32 arg2,
296 TCGv_i32 arg3, TCGv_i32 arg4,
297 TCGv_i32 arg5, TCGArg arg6)
299 *tcg_ctx.gen_opc_ptr++ = opc;
300 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
301 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
302 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
303 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
304 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg5);
305 *tcg_ctx.gen_opparam_ptr++ = arg6;
308 static inline void tcg_gen_op6i_i64(TCGOpcode opc, TCGv_i64 arg1, TCGv_i64 arg2,
309 TCGv_i64 arg3, TCGv_i64 arg4,
310 TCGv_i64 arg5, TCGArg arg6)
312 *tcg_ctx.gen_opc_ptr++ = opc;
313 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
314 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
315 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
316 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
317 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg5);
318 *tcg_ctx.gen_opparam_ptr++ = arg6;
321 static inline void tcg_gen_op6ii_i32(TCGOpcode opc, TCGv_i32 arg1,
322 TCGv_i32 arg2, TCGv_i32 arg3,
323 TCGv_i32 arg4, TCGArg arg5, TCGArg arg6)
325 *tcg_ctx.gen_opc_ptr++ = opc;
326 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg1);
327 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg2);
328 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg3);
329 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(arg4);
330 *tcg_ctx.gen_opparam_ptr++ = arg5;
331 *tcg_ctx.gen_opparam_ptr++ = arg6;
334 static inline void tcg_gen_op6ii_i64(TCGOpcode opc, TCGv_i64 arg1,
335 TCGv_i64 arg2, TCGv_i64 arg3,
336 TCGv_i64 arg4, TCGArg arg5, TCGArg arg6)
338 *tcg_ctx.gen_opc_ptr++ = opc;
339 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg1);
340 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg2);
341 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg3);
342 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(arg4);
343 *tcg_ctx.gen_opparam_ptr++ = arg5;
344 *tcg_ctx.gen_opparam_ptr++ = arg6;
347 static inline void tcg_add_param_i32(TCGv_i32 val)
349 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(val);
352 static inline void tcg_add_param_i64(TCGv_i64 val)
354 #if TCG_TARGET_REG_BITS == 32
355 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_LOW(val));
356 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I32(TCGV_HIGH(val));
358 *tcg_ctx.gen_opparam_ptr++ = GET_TCGV_I64(val);
362 static inline void gen_set_label(int n)
364 tcg_gen_op1i(INDEX_op_set_label, n);
367 static inline void tcg_gen_br(int label)
369 tcg_gen_op1i(INDEX_op_br, label);
372 static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
374 if (!TCGV_EQUAL_I32(ret, arg))
375 tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
378 static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
380 tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
383 /* A version of dh_sizemask from def-helper.h that doesn't rely on
384 preprocessor magic. */
385 static inline int tcg_gen_sizemask(int n, int is_64bit, int is_signed)
387 return (is_64bit << n*2) | (is_signed << (n*2 + 1));
391 static inline void tcg_gen_helperN(void *func, int flags, int sizemask,
392 TCGArg ret, int nargs, TCGArg *args)
394 tcg_gen_callN(&tcg_ctx, func, flags, sizemask, ret, nargs, args);
397 /* Note: Both tcg_gen_helper32() and tcg_gen_helper64() are currently
398 reserved for helpers in tcg-runtime.c. These helpers all do not read
399 globals and do not have side effects, hence the call to tcg_gen_callN()
400 with TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS. This may need
401 to be adjusted if these functions start to be used with other helpers. */
402 static inline void tcg_gen_helper32(void *func, int sizemask, TCGv_i32 ret,
403 TCGv_i32 a, TCGv_i32 b)
406 args[0] = GET_TCGV_I32(a);
407 args[1] = GET_TCGV_I32(b);
408 tcg_gen_callN(&tcg_ctx, func,
409 TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS,
410 sizemask, GET_TCGV_I32(ret), 2, args);
413 static inline void tcg_gen_helper64(void *func, int sizemask, TCGv_i64 ret,
414 TCGv_i64 a, TCGv_i64 b)
417 args[0] = GET_TCGV_I64(a);
418 args[1] = GET_TCGV_I64(b);
419 tcg_gen_callN(&tcg_ctx, func,
420 TCG_CALL_NO_READ_GLOBALS | TCG_CALL_NO_SIDE_EFFECTS,
421 sizemask, GET_TCGV_I64(ret), 2, args);
426 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
428 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
431 static inline void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
433 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
436 static inline void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
438 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
441 static inline void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
443 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
446 static inline void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
448 tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
451 static inline void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
453 tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
456 static inline void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
458 tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
461 static inline void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
463 tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
466 static inline void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
468 tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
471 static inline void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
473 /* some cases can be optimized here */
475 tcg_gen_mov_i32(ret, arg1);
477 TCGv_i32 t0 = tcg_const_i32(arg2);
478 tcg_gen_add_i32(ret, arg1, t0);
479 tcg_temp_free_i32(t0);
483 static inline void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
485 tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
488 static inline void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
490 TCGv_i32 t0 = tcg_const_i32(arg1);
491 tcg_gen_sub_i32(ret, t0, arg2);
492 tcg_temp_free_i32(t0);
495 static inline void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
497 /* some cases can be optimized here */
499 tcg_gen_mov_i32(ret, arg1);
501 TCGv_i32 t0 = tcg_const_i32(arg2);
502 tcg_gen_sub_i32(ret, arg1, t0);
503 tcg_temp_free_i32(t0);
507 static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
509 if (TCGV_EQUAL_I32(arg1, arg2)) {
510 tcg_gen_mov_i32(ret, arg1);
512 tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
516 static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
519 /* Some cases can be optimized here. */
522 tcg_gen_movi_i32(ret, 0);
525 tcg_gen_mov_i32(ret, arg1);
528 /* Don't recurse with tcg_gen_ext8u_i32. */
529 if (TCG_TARGET_HAS_ext8u_i32) {
530 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
535 if (TCG_TARGET_HAS_ext16u_i32) {
536 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
541 t0 = tcg_const_i32(arg2);
542 tcg_gen_and_i32(ret, arg1, t0);
543 tcg_temp_free_i32(t0);
546 static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
548 if (TCGV_EQUAL_I32(arg1, arg2)) {
549 tcg_gen_mov_i32(ret, arg1);
551 tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
555 static inline void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
557 /* Some cases can be optimized here. */
559 tcg_gen_movi_i32(ret, -1);
560 } else if (arg2 == 0) {
561 tcg_gen_mov_i32(ret, arg1);
563 TCGv_i32 t0 = tcg_const_i32(arg2);
564 tcg_gen_or_i32(ret, arg1, t0);
565 tcg_temp_free_i32(t0);
569 static inline void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
571 if (TCGV_EQUAL_I32(arg1, arg2)) {
572 tcg_gen_movi_i32(ret, 0);
574 tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
578 static inline void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
580 /* Some cases can be optimized here. */
582 tcg_gen_mov_i32(ret, arg1);
583 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
584 /* Don't recurse with tcg_gen_not_i32. */
585 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
587 TCGv_i32 t0 = tcg_const_i32(arg2);
588 tcg_gen_xor_i32(ret, arg1, t0);
589 tcg_temp_free_i32(t0);
593 static inline void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
595 tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
598 static inline void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
601 tcg_gen_mov_i32(ret, arg1);
603 TCGv_i32 t0 = tcg_const_i32(arg2);
604 tcg_gen_shl_i32(ret, arg1, t0);
605 tcg_temp_free_i32(t0);
609 static inline void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
611 tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
614 static inline void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
617 tcg_gen_mov_i32(ret, arg1);
619 TCGv_i32 t0 = tcg_const_i32(arg2);
620 tcg_gen_shr_i32(ret, arg1, t0);
621 tcg_temp_free_i32(t0);
625 static inline void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
627 tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
630 static inline void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
633 tcg_gen_mov_i32(ret, arg1);
635 TCGv_i32 t0 = tcg_const_i32(arg2);
636 tcg_gen_sar_i32(ret, arg1, t0);
637 tcg_temp_free_i32(t0);
641 static inline void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1,
642 TCGv_i32 arg2, int label_index)
644 if (cond == TCG_COND_ALWAYS) {
645 tcg_gen_br(label_index);
646 } else if (cond != TCG_COND_NEVER) {
647 tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_index);
651 static inline void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1,
652 int32_t arg2, int label_index)
654 if (cond == TCG_COND_ALWAYS) {
655 tcg_gen_br(label_index);
656 } else if (cond != TCG_COND_NEVER) {
657 TCGv_i32 t0 = tcg_const_i32(arg2);
658 tcg_gen_brcond_i32(cond, arg1, t0, label_index);
659 tcg_temp_free_i32(t0);
663 static inline void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
664 TCGv_i32 arg1, TCGv_i32 arg2)
666 if (cond == TCG_COND_ALWAYS) {
667 tcg_gen_movi_i32(ret, 1);
668 } else if (cond == TCG_COND_NEVER) {
669 tcg_gen_movi_i32(ret, 0);
671 tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
675 static inline void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
676 TCGv_i32 arg1, int32_t arg2)
678 if (cond == TCG_COND_ALWAYS) {
679 tcg_gen_movi_i32(ret, 1);
680 } else if (cond == TCG_COND_NEVER) {
681 tcg_gen_movi_i32(ret, 0);
683 TCGv_i32 t0 = tcg_const_i32(arg2);
684 tcg_gen_setcond_i32(cond, ret, arg1, t0);
685 tcg_temp_free_i32(t0);
689 static inline void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
691 tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
694 static inline void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
696 TCGv_i32 t0 = tcg_const_i32(arg2);
697 tcg_gen_mul_i32(ret, arg1, t0);
698 tcg_temp_free_i32(t0);
701 static inline void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
703 if (TCG_TARGET_HAS_div_i32) {
704 tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
705 } else if (TCG_TARGET_HAS_div2_i32) {
706 TCGv_i32 t0 = tcg_temp_new_i32();
707 tcg_gen_sari_i32(t0, arg1, 31);
708 tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
709 tcg_temp_free_i32(t0);
712 /* Return value and both arguments are 32-bit and signed. */
713 sizemask |= tcg_gen_sizemask(0, 0, 1);
714 sizemask |= tcg_gen_sizemask(1, 0, 1);
715 sizemask |= tcg_gen_sizemask(2, 0, 1);
716 tcg_gen_helper32(helper_div_i32, sizemask, ret, arg1, arg2);
720 static inline void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
722 if (TCG_TARGET_HAS_rem_i32) {
723 tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
724 } else if (TCG_TARGET_HAS_div_i32) {
725 TCGv_i32 t0 = tcg_temp_new_i32();
726 tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
727 tcg_gen_mul_i32(t0, t0, arg2);
728 tcg_gen_sub_i32(ret, arg1, t0);
729 tcg_temp_free_i32(t0);
730 } else if (TCG_TARGET_HAS_div2_i32) {
731 TCGv_i32 t0 = tcg_temp_new_i32();
732 tcg_gen_sari_i32(t0, arg1, 31);
733 tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
734 tcg_temp_free_i32(t0);
737 /* Return value and both arguments are 32-bit and signed. */
738 sizemask |= tcg_gen_sizemask(0, 0, 1);
739 sizemask |= tcg_gen_sizemask(1, 0, 1);
740 sizemask |= tcg_gen_sizemask(2, 0, 1);
741 tcg_gen_helper32(helper_rem_i32, sizemask, ret, arg1, arg2);
745 static inline void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
747 if (TCG_TARGET_HAS_div_i32) {
748 tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
749 } else if (TCG_TARGET_HAS_div2_i32) {
750 TCGv_i32 t0 = tcg_temp_new_i32();
751 tcg_gen_movi_i32(t0, 0);
752 tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
753 tcg_temp_free_i32(t0);
756 /* Return value and both arguments are 32-bit and unsigned. */
757 sizemask |= tcg_gen_sizemask(0, 0, 0);
758 sizemask |= tcg_gen_sizemask(1, 0, 0);
759 sizemask |= tcg_gen_sizemask(2, 0, 0);
760 tcg_gen_helper32(helper_divu_i32, sizemask, ret, arg1, arg2);
764 static inline void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
766 if (TCG_TARGET_HAS_rem_i32) {
767 tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
768 } else if (TCG_TARGET_HAS_div_i32) {
769 TCGv_i32 t0 = tcg_temp_new_i32();
770 tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
771 tcg_gen_mul_i32(t0, t0, arg2);
772 tcg_gen_sub_i32(ret, arg1, t0);
773 tcg_temp_free_i32(t0);
774 } else if (TCG_TARGET_HAS_div2_i32) {
775 TCGv_i32 t0 = tcg_temp_new_i32();
776 tcg_gen_movi_i32(t0, 0);
777 tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
778 tcg_temp_free_i32(t0);
781 /* Return value and both arguments are 32-bit and unsigned. */
782 sizemask |= tcg_gen_sizemask(0, 0, 0);
783 sizemask |= tcg_gen_sizemask(1, 0, 0);
784 sizemask |= tcg_gen_sizemask(2, 0, 0);
785 tcg_gen_helper32(helper_remu_i32, sizemask, ret, arg1, arg2);
789 #if TCG_TARGET_REG_BITS == 32
791 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
793 if (!TCGV_EQUAL_I64(ret, arg)) {
794 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
795 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
799 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
801 tcg_gen_movi_i32(TCGV_LOW(ret), arg);
802 tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
805 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
806 tcg_target_long offset)
808 tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
809 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
812 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
813 tcg_target_long offset)
815 tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
816 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
819 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
820 tcg_target_long offset)
822 tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
823 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
826 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
827 tcg_target_long offset)
829 tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
830 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
833 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
834 tcg_target_long offset)
836 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
837 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
840 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
841 tcg_target_long offset)
843 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
844 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
847 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
848 tcg_target_long offset)
850 /* since arg2 and ret have different types, they cannot be the
852 #ifdef HOST_WORDS_BIGENDIAN
853 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
854 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
856 tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
857 tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
861 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
862 tcg_target_long offset)
864 tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
867 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
868 tcg_target_long offset)
870 tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
873 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
874 tcg_target_long offset)
876 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
879 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
880 tcg_target_long offset)
882 #ifdef HOST_WORDS_BIGENDIAN
883 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
884 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
886 tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
887 tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
891 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
893 tcg_gen_op6_i32(INDEX_op_add2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
894 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
896 /* Allow the optimizer room to replace add2 with two moves. */
897 tcg_gen_op0(INDEX_op_nop);
900 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
902 tcg_gen_op6_i32(INDEX_op_sub2_i32, TCGV_LOW(ret), TCGV_HIGH(ret),
903 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
905 /* Allow the optimizer room to replace sub2 with two moves. */
906 tcg_gen_op0(INDEX_op_nop);
909 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
911 tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
912 tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
915 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
917 tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
918 tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
921 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
923 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
924 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
927 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
929 tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
930 tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
933 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
935 tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
936 tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
939 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
941 tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
942 tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
945 /* XXX: use generic code when basic block handling is OK or CPU
946 specific code (x86) */
947 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
950 /* Return value and both arguments are 64-bit and signed. */
951 sizemask |= tcg_gen_sizemask(0, 1, 1);
952 sizemask |= tcg_gen_sizemask(1, 1, 1);
953 sizemask |= tcg_gen_sizemask(2, 1, 1);
955 tcg_gen_helper64(helper_shl_i64, sizemask, ret, arg1, arg2);
958 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
960 tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
963 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
966 /* Return value and both arguments are 64-bit and signed. */
967 sizemask |= tcg_gen_sizemask(0, 1, 1);
968 sizemask |= tcg_gen_sizemask(1, 1, 1);
969 sizemask |= tcg_gen_sizemask(2, 1, 1);
971 tcg_gen_helper64(helper_shr_i64, sizemask, ret, arg1, arg2);
974 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
976 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
979 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
982 /* Return value and both arguments are 64-bit and signed. */
983 sizemask |= tcg_gen_sizemask(0, 1, 1);
984 sizemask |= tcg_gen_sizemask(1, 1, 1);
985 sizemask |= tcg_gen_sizemask(2, 1, 1);
987 tcg_gen_helper64(helper_sar_i64, sizemask, ret, arg1, arg2);
990 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
992 tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
995 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
996 TCGv_i64 arg2, int label_index)
998 if (cond == TCG_COND_ALWAYS) {
999 tcg_gen_br(label_index);
1000 } else if (cond != TCG_COND_NEVER) {
1001 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1002 TCGV_LOW(arg1), TCGV_HIGH(arg1), TCGV_LOW(arg2),
1003 TCGV_HIGH(arg2), cond, label_index);
1007 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1008 TCGv_i64 arg1, TCGv_i64 arg2)
1010 if (cond == TCG_COND_ALWAYS) {
1011 tcg_gen_movi_i32(TCGV_LOW(ret), 1);
1012 } else if (cond == TCG_COND_NEVER) {
1013 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1015 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1016 TCGV_LOW(arg1), TCGV_HIGH(arg1),
1017 TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1019 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1022 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1027 t0 = tcg_temp_new_i64();
1028 t1 = tcg_temp_new_i32();
1030 if (TCG_TARGET_HAS_mulu2_i32) {
1031 tcg_gen_op4_i32(INDEX_op_mulu2_i32, TCGV_LOW(t0), TCGV_HIGH(t0),
1032 TCGV_LOW(arg1), TCGV_LOW(arg2));
1033 /* Allow the optimizer room to replace mulu2 with two moves. */
1034 tcg_gen_op0(INDEX_op_nop);
1036 tcg_debug_assert(TCG_TARGET_HAS_muluh_i32);
1037 tcg_gen_op3_i32(INDEX_op_mul_i32, TCGV_LOW(t0),
1038 TCGV_LOW(arg1), TCGV_LOW(arg2));
1039 tcg_gen_op3_i32(INDEX_op_muluh_i32, TCGV_HIGH(t0),
1040 TCGV_LOW(arg1), TCGV_LOW(arg2));
1043 tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1044 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1045 tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1046 tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1048 tcg_gen_mov_i64(ret, t0);
1049 tcg_temp_free_i64(t0);
1050 tcg_temp_free_i32(t1);
1053 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1056 /* Return value and both arguments are 64-bit and signed. */
1057 sizemask |= tcg_gen_sizemask(0, 1, 1);
1058 sizemask |= tcg_gen_sizemask(1, 1, 1);
1059 sizemask |= tcg_gen_sizemask(2, 1, 1);
1061 tcg_gen_helper64(helper_div_i64, sizemask, ret, arg1, arg2);
1064 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1067 /* Return value and both arguments are 64-bit and signed. */
1068 sizemask |= tcg_gen_sizemask(0, 1, 1);
1069 sizemask |= tcg_gen_sizemask(1, 1, 1);
1070 sizemask |= tcg_gen_sizemask(2, 1, 1);
1072 tcg_gen_helper64(helper_rem_i64, sizemask, ret, arg1, arg2);
1075 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1078 /* Return value and both arguments are 64-bit and unsigned. */
1079 sizemask |= tcg_gen_sizemask(0, 1, 0);
1080 sizemask |= tcg_gen_sizemask(1, 1, 0);
1081 sizemask |= tcg_gen_sizemask(2, 1, 0);
1083 tcg_gen_helper64(helper_divu_i64, sizemask, ret, arg1, arg2);
1086 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1089 /* Return value and both arguments are 64-bit and unsigned. */
1090 sizemask |= tcg_gen_sizemask(0, 1, 0);
1091 sizemask |= tcg_gen_sizemask(1, 1, 0);
1092 sizemask |= tcg_gen_sizemask(2, 1, 0);
1094 tcg_gen_helper64(helper_remu_i64, sizemask, ret, arg1, arg2);
1099 static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1101 if (!TCGV_EQUAL_I64(ret, arg))
1102 tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
1105 static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1107 tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
1110 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1111 tcg_target_long offset)
1113 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
1116 static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1117 tcg_target_long offset)
1119 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
1122 static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1123 tcg_target_long offset)
1125 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
1128 static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1129 tcg_target_long offset)
1131 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
1134 static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
1135 tcg_target_long offset)
1137 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
1140 static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
1141 tcg_target_long offset)
1143 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
1146 static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1148 tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
1151 static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1152 tcg_target_long offset)
1154 tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
1157 static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1158 tcg_target_long offset)
1160 tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
1163 static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
1164 tcg_target_long offset)
1166 tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
1169 static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1171 tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
1174 static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1176 tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
1179 static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1181 tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
1184 static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1186 if (TCGV_EQUAL_I64(arg1, arg2)) {
1187 tcg_gen_mov_i64(ret, arg1);
1189 tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
1193 static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1196 /* Some cases can be optimized here. */
1199 tcg_gen_movi_i64(ret, 0);
1201 case 0xffffffffffffffffull:
1202 tcg_gen_mov_i64(ret, arg1);
1205 /* Don't recurse with tcg_gen_ext8u_i32. */
1206 if (TCG_TARGET_HAS_ext8u_i64) {
1207 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1212 if (TCG_TARGET_HAS_ext16u_i64) {
1213 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1218 if (TCG_TARGET_HAS_ext32u_i64) {
1219 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1224 t0 = tcg_const_i64(arg2);
1225 tcg_gen_and_i64(ret, arg1, t0);
1226 tcg_temp_free_i64(t0);
1229 static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1231 if (TCGV_EQUAL_I64(arg1, arg2)) {
1232 tcg_gen_mov_i64(ret, arg1);
1234 tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
1238 static inline void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1240 /* Some cases can be optimized here. */
1242 tcg_gen_movi_i64(ret, -1);
1243 } else if (arg2 == 0) {
1244 tcg_gen_mov_i64(ret, arg1);
1246 TCGv_i64 t0 = tcg_const_i64(arg2);
1247 tcg_gen_or_i64(ret, arg1, t0);
1248 tcg_temp_free_i64(t0);
1252 static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1254 if (TCGV_EQUAL_I64(arg1, arg2)) {
1255 tcg_gen_movi_i64(ret, 0);
1257 tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
1261 static inline void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1263 /* Some cases can be optimized here. */
1265 tcg_gen_mov_i64(ret, arg1);
1266 } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1267 /* Don't recurse with tcg_gen_not_i64. */
1268 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1270 TCGv_i64 t0 = tcg_const_i64(arg2);
1271 tcg_gen_xor_i64(ret, arg1, t0);
1272 tcg_temp_free_i64(t0);
1276 static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1278 tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
1281 static inline void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1284 tcg_gen_mov_i64(ret, arg1);
1286 TCGv_i64 t0 = tcg_const_i64(arg2);
1287 tcg_gen_shl_i64(ret, arg1, t0);
1288 tcg_temp_free_i64(t0);
1292 static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1294 tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
1297 static inline void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1300 tcg_gen_mov_i64(ret, arg1);
1302 TCGv_i64 t0 = tcg_const_i64(arg2);
1303 tcg_gen_shr_i64(ret, arg1, t0);
1304 tcg_temp_free_i64(t0);
1308 static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1310 tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
1313 static inline void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1316 tcg_gen_mov_i64(ret, arg1);
1318 TCGv_i64 t0 = tcg_const_i64(arg2);
1319 tcg_gen_sar_i64(ret, arg1, t0);
1320 tcg_temp_free_i64(t0);
1324 static inline void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1,
1325 TCGv_i64 arg2, int label_index)
1327 if (cond == TCG_COND_ALWAYS) {
1328 tcg_gen_br(label_index);
1329 } else if (cond != TCG_COND_NEVER) {
1330 tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond, label_index);
1334 static inline void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1335 TCGv_i64 arg1, TCGv_i64 arg2)
1337 if (cond == TCG_COND_ALWAYS) {
1338 tcg_gen_movi_i64(ret, 1);
1339 } else if (cond == TCG_COND_NEVER) {
1340 tcg_gen_movi_i64(ret, 0);
1342 tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1346 static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1348 tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
1351 static inline void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1353 if (TCG_TARGET_HAS_div_i64) {
1354 tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1355 } else if (TCG_TARGET_HAS_div2_i64) {
1356 TCGv_i64 t0 = tcg_temp_new_i64();
1357 tcg_gen_sari_i64(t0, arg1, 63);
1358 tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1359 tcg_temp_free_i64(t0);
1362 /* Return value and both arguments are 64-bit and signed. */
1363 sizemask |= tcg_gen_sizemask(0, 1, 1);
1364 sizemask |= tcg_gen_sizemask(1, 1, 1);
1365 sizemask |= tcg_gen_sizemask(2, 1, 1);
1366 tcg_gen_helper64(helper_div_i64, sizemask, ret, arg1, arg2);
1370 static inline void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1372 if (TCG_TARGET_HAS_rem_i64) {
1373 tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1374 } else if (TCG_TARGET_HAS_div_i64) {
1375 TCGv_i64 t0 = tcg_temp_new_i64();
1376 tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1377 tcg_gen_mul_i64(t0, t0, arg2);
1378 tcg_gen_sub_i64(ret, arg1, t0);
1379 tcg_temp_free_i64(t0);
1380 } else if (TCG_TARGET_HAS_div2_i64) {
1381 TCGv_i64 t0 = tcg_temp_new_i64();
1382 tcg_gen_sari_i64(t0, arg1, 63);
1383 tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1384 tcg_temp_free_i64(t0);
1387 /* Return value and both arguments are 64-bit and signed. */
1388 sizemask |= tcg_gen_sizemask(0, 1, 1);
1389 sizemask |= tcg_gen_sizemask(1, 1, 1);
1390 sizemask |= tcg_gen_sizemask(2, 1, 1);
1391 tcg_gen_helper64(helper_rem_i64, sizemask, ret, arg1, arg2);
1395 static inline void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1397 if (TCG_TARGET_HAS_div_i64) {
1398 tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1399 } else if (TCG_TARGET_HAS_div2_i64) {
1400 TCGv_i64 t0 = tcg_temp_new_i64();
1401 tcg_gen_movi_i64(t0, 0);
1402 tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1403 tcg_temp_free_i64(t0);
1406 /* Return value and both arguments are 64-bit and unsigned. */
1407 sizemask |= tcg_gen_sizemask(0, 1, 0);
1408 sizemask |= tcg_gen_sizemask(1, 1, 0);
1409 sizemask |= tcg_gen_sizemask(2, 1, 0);
1410 tcg_gen_helper64(helper_divu_i64, sizemask, ret, arg1, arg2);
1414 static inline void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1416 if (TCG_TARGET_HAS_rem_i64) {
1417 tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1418 } else if (TCG_TARGET_HAS_div_i64) {
1419 TCGv_i64 t0 = tcg_temp_new_i64();
1420 tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1421 tcg_gen_mul_i64(t0, t0, arg2);
1422 tcg_gen_sub_i64(ret, arg1, t0);
1423 tcg_temp_free_i64(t0);
1424 } else if (TCG_TARGET_HAS_div2_i64) {
1425 TCGv_i64 t0 = tcg_temp_new_i64();
1426 tcg_gen_movi_i64(t0, 0);
1427 tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1428 tcg_temp_free_i64(t0);
1431 /* Return value and both arguments are 64-bit and unsigned. */
1432 sizemask |= tcg_gen_sizemask(0, 1, 0);
1433 sizemask |= tcg_gen_sizemask(1, 1, 0);
1434 sizemask |= tcg_gen_sizemask(2, 1, 0);
1435 tcg_gen_helper64(helper_remu_i64, sizemask, ret, arg1, arg2);
1438 #endif /* TCG_TARGET_REG_BITS == 32 */
1440 static inline void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1442 /* some cases can be optimized here */
1444 tcg_gen_mov_i64(ret, arg1);
1446 TCGv_i64 t0 = tcg_const_i64(arg2);
1447 tcg_gen_add_i64(ret, arg1, t0);
1448 tcg_temp_free_i64(t0);
1452 static inline void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1454 TCGv_i64 t0 = tcg_const_i64(arg1);
1455 tcg_gen_sub_i64(ret, t0, arg2);
1456 tcg_temp_free_i64(t0);
1459 static inline void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1461 /* some cases can be optimized here */
1463 tcg_gen_mov_i64(ret, arg1);
1465 TCGv_i64 t0 = tcg_const_i64(arg2);
1466 tcg_gen_sub_i64(ret, arg1, t0);
1467 tcg_temp_free_i64(t0);
1470 static inline void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1,
1471 int64_t arg2, int label_index)
1473 if (cond == TCG_COND_ALWAYS) {
1474 tcg_gen_br(label_index);
1475 } else if (cond != TCG_COND_NEVER) {
1476 TCGv_i64 t0 = tcg_const_i64(arg2);
1477 tcg_gen_brcond_i64(cond, arg1, t0, label_index);
1478 tcg_temp_free_i64(t0);
1482 static inline void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1483 TCGv_i64 arg1, int64_t arg2)
1485 TCGv_i64 t0 = tcg_const_i64(arg2);
1486 tcg_gen_setcond_i64(cond, ret, arg1, t0);
1487 tcg_temp_free_i64(t0);
1490 static inline void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1492 TCGv_i64 t0 = tcg_const_i64(arg2);
1493 tcg_gen_mul_i64(ret, arg1, t0);
1494 tcg_temp_free_i64(t0);
1498 /***************************************/
1499 /* optional operations */
1501 static inline void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1503 if (TCG_TARGET_HAS_ext8s_i32) {
1504 tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1506 tcg_gen_shli_i32(ret, arg, 24);
1507 tcg_gen_sari_i32(ret, ret, 24);
1511 static inline void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1513 if (TCG_TARGET_HAS_ext16s_i32) {
1514 tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1516 tcg_gen_shli_i32(ret, arg, 16);
1517 tcg_gen_sari_i32(ret, ret, 16);
1521 static inline void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1523 if (TCG_TARGET_HAS_ext8u_i32) {
1524 tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1526 tcg_gen_andi_i32(ret, arg, 0xffu);
1530 static inline void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1532 if (TCG_TARGET_HAS_ext16u_i32) {
1533 tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1535 tcg_gen_andi_i32(ret, arg, 0xffffu);
1539 /* Note: we assume the two high bytes are set to zero */
1540 static inline void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1542 if (TCG_TARGET_HAS_bswap16_i32) {
1543 tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1545 TCGv_i32 t0 = tcg_temp_new_i32();
1547 tcg_gen_ext8u_i32(t0, arg);
1548 tcg_gen_shli_i32(t0, t0, 8);
1549 tcg_gen_shri_i32(ret, arg, 8);
1550 tcg_gen_or_i32(ret, ret, t0);
1551 tcg_temp_free_i32(t0);
1555 static inline void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1557 if (TCG_TARGET_HAS_bswap32_i32) {
1558 tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1561 t0 = tcg_temp_new_i32();
1562 t1 = tcg_temp_new_i32();
1564 tcg_gen_shli_i32(t0, arg, 24);
1566 tcg_gen_andi_i32(t1, arg, 0x0000ff00);
1567 tcg_gen_shli_i32(t1, t1, 8);
1568 tcg_gen_or_i32(t0, t0, t1);
1570 tcg_gen_shri_i32(t1, arg, 8);
1571 tcg_gen_andi_i32(t1, t1, 0x0000ff00);
1572 tcg_gen_or_i32(t0, t0, t1);
1574 tcg_gen_shri_i32(t1, arg, 24);
1575 tcg_gen_or_i32(ret, t0, t1);
1576 tcg_temp_free_i32(t0);
1577 tcg_temp_free_i32(t1);
1581 #if TCG_TARGET_REG_BITS == 32
1582 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1584 tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1585 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1588 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1590 tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1591 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1594 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1596 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1597 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1600 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1602 tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1603 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1606 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1608 tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1609 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1612 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1614 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1615 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1618 static inline void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret, TCGv_i64 arg,
1621 tcg_debug_assert(count < 64);
1623 tcg_gen_shri_i32(ret, TCGV_HIGH(arg), count - 32);
1624 } else if (count == 0) {
1625 tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1627 TCGv_i64 t = tcg_temp_new_i64();
1628 tcg_gen_shri_i64(t, arg, count);
1629 tcg_gen_mov_i32(ret, TCGV_LOW(t));
1630 tcg_temp_free_i64(t);
1634 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1636 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1637 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1640 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1642 tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1643 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1646 /* Note: we assume the six high bytes are set to zero */
1647 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1649 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1650 tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1653 /* Note: we assume the four high bytes are set to zero */
1654 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1656 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1657 tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1660 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1663 t0 = tcg_temp_new_i32();
1664 t1 = tcg_temp_new_i32();
1666 tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1667 tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1668 tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1669 tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1670 tcg_temp_free_i32(t0);
1671 tcg_temp_free_i32(t1);
1675 static inline void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1677 if (TCG_TARGET_HAS_ext8s_i64) {
1678 tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1680 tcg_gen_shli_i64(ret, arg, 56);
1681 tcg_gen_sari_i64(ret, ret, 56);
1685 static inline void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1687 if (TCG_TARGET_HAS_ext16s_i64) {
1688 tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1690 tcg_gen_shli_i64(ret, arg, 48);
1691 tcg_gen_sari_i64(ret, ret, 48);
1695 static inline void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1697 if (TCG_TARGET_HAS_ext32s_i64) {
1698 tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1700 tcg_gen_shli_i64(ret, arg, 32);
1701 tcg_gen_sari_i64(ret, ret, 32);
1705 static inline void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1707 if (TCG_TARGET_HAS_ext8u_i64) {
1708 tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1710 tcg_gen_andi_i64(ret, arg, 0xffu);
1714 static inline void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1716 if (TCG_TARGET_HAS_ext16u_i64) {
1717 tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1719 tcg_gen_andi_i64(ret, arg, 0xffffu);
1723 static inline void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1725 if (TCG_TARGET_HAS_ext32u_i64) {
1726 tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1728 tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1732 static inline void tcg_gen_trunc_shr_i64_i32(TCGv_i32 ret, TCGv_i64 arg,
1735 tcg_debug_assert(count < 64);
1736 if (TCG_TARGET_HAS_trunc_shr_i32) {
1737 tcg_gen_op3i_i32(INDEX_op_trunc_shr_i32, ret,
1738 MAKE_TCGV_I32(GET_TCGV_I64(arg)), count);
1739 } else if (count == 0) {
1740 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1742 TCGv_i64 t = tcg_temp_new_i64();
1743 tcg_gen_shri_i64(t, arg, count);
1744 tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1745 tcg_temp_free_i64(t);
1749 /* Note: we assume the target supports move between 32 and 64 bit
1751 static inline void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1753 tcg_gen_ext32u_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1756 /* Note: we assume the target supports move between 32 and 64 bit
1758 static inline void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1760 tcg_gen_ext32s_i64(ret, MAKE_TCGV_I64(GET_TCGV_I32(arg)));
1763 /* Note: we assume the six high bytes are set to zero */
1764 static inline void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1766 if (TCG_TARGET_HAS_bswap16_i64) {
1767 tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1769 TCGv_i64 t0 = tcg_temp_new_i64();
1771 tcg_gen_ext8u_i64(t0, arg);
1772 tcg_gen_shli_i64(t0, t0, 8);
1773 tcg_gen_shri_i64(ret, arg, 8);
1774 tcg_gen_or_i64(ret, ret, t0);
1775 tcg_temp_free_i64(t0);
1779 /* Note: we assume the four high bytes are set to zero */
1780 static inline void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1782 if (TCG_TARGET_HAS_bswap32_i64) {
1783 tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1786 t0 = tcg_temp_new_i64();
1787 t1 = tcg_temp_new_i64();
1789 tcg_gen_shli_i64(t0, arg, 24);
1790 tcg_gen_ext32u_i64(t0, t0);
1792 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1793 tcg_gen_shli_i64(t1, t1, 8);
1794 tcg_gen_or_i64(t0, t0, t1);
1796 tcg_gen_shri_i64(t1, arg, 8);
1797 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1798 tcg_gen_or_i64(t0, t0, t1);
1800 tcg_gen_shri_i64(t1, arg, 24);
1801 tcg_gen_or_i64(ret, t0, t1);
1802 tcg_temp_free_i64(t0);
1803 tcg_temp_free_i64(t1);
1807 static inline void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1809 if (TCG_TARGET_HAS_bswap64_i64) {
1810 tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1812 TCGv_i64 t0 = tcg_temp_new_i64();
1813 TCGv_i64 t1 = tcg_temp_new_i64();
1815 tcg_gen_shli_i64(t0, arg, 56);
1817 tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1818 tcg_gen_shli_i64(t1, t1, 40);
1819 tcg_gen_or_i64(t0, t0, t1);
1821 tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1822 tcg_gen_shli_i64(t1, t1, 24);
1823 tcg_gen_or_i64(t0, t0, t1);
1825 tcg_gen_andi_i64(t1, arg, 0xff000000);
1826 tcg_gen_shli_i64(t1, t1, 8);
1827 tcg_gen_or_i64(t0, t0, t1);
1829 tcg_gen_shri_i64(t1, arg, 8);
1830 tcg_gen_andi_i64(t1, t1, 0xff000000);
1831 tcg_gen_or_i64(t0, t0, t1);
1833 tcg_gen_shri_i64(t1, arg, 24);
1834 tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1835 tcg_gen_or_i64(t0, t0, t1);
1837 tcg_gen_shri_i64(t1, arg, 40);
1838 tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1839 tcg_gen_or_i64(t0, t0, t1);
1841 tcg_gen_shri_i64(t1, arg, 56);
1842 tcg_gen_or_i64(ret, t0, t1);
1843 tcg_temp_free_i64(t0);
1844 tcg_temp_free_i64(t1);
1850 static inline void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
1852 if (TCG_TARGET_HAS_neg_i32) {
1853 tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
1855 TCGv_i32 t0 = tcg_const_i32(0);
1856 tcg_gen_sub_i32(ret, t0, arg);
1857 tcg_temp_free_i32(t0);
1861 static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
1863 if (TCG_TARGET_HAS_neg_i64) {
1864 tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
1866 TCGv_i64 t0 = tcg_const_i64(0);
1867 tcg_gen_sub_i64(ret, t0, arg);
1868 tcg_temp_free_i64(t0);
1872 static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
1874 if (TCG_TARGET_HAS_not_i32) {
1875 tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
1877 tcg_gen_xori_i32(ret, arg, -1);
1881 static inline void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1883 #if TCG_TARGET_REG_BITS == 64
1884 if (TCG_TARGET_HAS_not_i64) {
1885 tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1887 tcg_gen_xori_i64(ret, arg, -1);
1890 tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1891 tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1895 static inline void tcg_gen_discard_i32(TCGv_i32 arg)
1897 tcg_gen_op1_i32(INDEX_op_discard, arg);
1900 static inline void tcg_gen_discard_i64(TCGv_i64 arg)
1902 #if TCG_TARGET_REG_BITS == 32
1903 tcg_gen_discard_i32(TCGV_LOW(arg));
1904 tcg_gen_discard_i32(TCGV_HIGH(arg));
1906 tcg_gen_op1_i64(INDEX_op_discard, arg);
1910 static inline void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1912 if (TCG_TARGET_HAS_andc_i32) {
1913 tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
1915 TCGv_i32 t0 = tcg_temp_new_i32();
1916 tcg_gen_not_i32(t0, arg2);
1917 tcg_gen_and_i32(ret, arg1, t0);
1918 tcg_temp_free_i32(t0);
1922 static inline void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1924 #if TCG_TARGET_REG_BITS == 64
1925 if (TCG_TARGET_HAS_andc_i64) {
1926 tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1928 TCGv_i64 t0 = tcg_temp_new_i64();
1929 tcg_gen_not_i64(t0, arg2);
1930 tcg_gen_and_i64(ret, arg1, t0);
1931 tcg_temp_free_i64(t0);
1934 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1935 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1939 static inline void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1941 if (TCG_TARGET_HAS_eqv_i32) {
1942 tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
1944 tcg_gen_xor_i32(ret, arg1, arg2);
1945 tcg_gen_not_i32(ret, ret);
1949 static inline void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1951 #if TCG_TARGET_REG_BITS == 64
1952 if (TCG_TARGET_HAS_eqv_i64) {
1953 tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1955 tcg_gen_xor_i64(ret, arg1, arg2);
1956 tcg_gen_not_i64(ret, ret);
1959 tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1960 tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1964 static inline void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1966 if (TCG_TARGET_HAS_nand_i32) {
1967 tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
1969 tcg_gen_and_i32(ret, arg1, arg2);
1970 tcg_gen_not_i32(ret, ret);
1974 static inline void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1976 #if TCG_TARGET_REG_BITS == 64
1977 if (TCG_TARGET_HAS_nand_i64) {
1978 tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1980 tcg_gen_and_i64(ret, arg1, arg2);
1981 tcg_gen_not_i64(ret, ret);
1984 tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1985 tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1989 static inline void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
1991 if (TCG_TARGET_HAS_nor_i32) {
1992 tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
1994 tcg_gen_or_i32(ret, arg1, arg2);
1995 tcg_gen_not_i32(ret, ret);
1999 static inline void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2001 #if TCG_TARGET_REG_BITS == 64
2002 if (TCG_TARGET_HAS_nor_i64) {
2003 tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2005 tcg_gen_or_i64(ret, arg1, arg2);
2006 tcg_gen_not_i64(ret, ret);
2009 tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2010 tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2014 static inline void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2016 if (TCG_TARGET_HAS_orc_i32) {
2017 tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
2019 TCGv_i32 t0 = tcg_temp_new_i32();
2020 tcg_gen_not_i32(t0, arg2);
2021 tcg_gen_or_i32(ret, arg1, t0);
2022 tcg_temp_free_i32(t0);
2026 static inline void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2028 #if TCG_TARGET_REG_BITS == 64
2029 if (TCG_TARGET_HAS_orc_i64) {
2030 tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2032 TCGv_i64 t0 = tcg_temp_new_i64();
2033 tcg_gen_not_i64(t0, arg2);
2034 tcg_gen_or_i64(ret, arg1, t0);
2035 tcg_temp_free_i64(t0);
2038 tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2039 tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2043 static inline void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2045 if (TCG_TARGET_HAS_rot_i32) {
2046 tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
2050 t0 = tcg_temp_new_i32();
2051 t1 = tcg_temp_new_i32();
2052 tcg_gen_shl_i32(t0, arg1, arg2);
2053 tcg_gen_subfi_i32(t1, 32, arg2);
2054 tcg_gen_shr_i32(t1, arg1, t1);
2055 tcg_gen_or_i32(ret, t0, t1);
2056 tcg_temp_free_i32(t0);
2057 tcg_temp_free_i32(t1);
2061 static inline void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2063 if (TCG_TARGET_HAS_rot_i64) {
2064 tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2067 t0 = tcg_temp_new_i64();
2068 t1 = tcg_temp_new_i64();
2069 tcg_gen_shl_i64(t0, arg1, arg2);
2070 tcg_gen_subfi_i64(t1, 64, arg2);
2071 tcg_gen_shr_i64(t1, arg1, t1);
2072 tcg_gen_or_i64(ret, t0, t1);
2073 tcg_temp_free_i64(t0);
2074 tcg_temp_free_i64(t1);
2078 static inline void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2080 /* some cases can be optimized here */
2082 tcg_gen_mov_i32(ret, arg1);
2083 } else if (TCG_TARGET_HAS_rot_i32) {
2084 TCGv_i32 t0 = tcg_const_i32(arg2);
2085 tcg_gen_rotl_i32(ret, arg1, t0);
2086 tcg_temp_free_i32(t0);
2089 t0 = tcg_temp_new_i32();
2090 t1 = tcg_temp_new_i32();
2091 tcg_gen_shli_i32(t0, arg1, arg2);
2092 tcg_gen_shri_i32(t1, arg1, 32 - arg2);
2093 tcg_gen_or_i32(ret, t0, t1);
2094 tcg_temp_free_i32(t0);
2095 tcg_temp_free_i32(t1);
2099 static inline void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2101 /* some cases can be optimized here */
2103 tcg_gen_mov_i64(ret, arg1);
2104 } else if (TCG_TARGET_HAS_rot_i64) {
2105 TCGv_i64 t0 = tcg_const_i64(arg2);
2106 tcg_gen_rotl_i64(ret, arg1, t0);
2107 tcg_temp_free_i64(t0);
2110 t0 = tcg_temp_new_i64();
2111 t1 = tcg_temp_new_i64();
2112 tcg_gen_shli_i64(t0, arg1, arg2);
2113 tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2114 tcg_gen_or_i64(ret, t0, t1);
2115 tcg_temp_free_i64(t0);
2116 tcg_temp_free_i64(t1);
2120 static inline void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
2122 if (TCG_TARGET_HAS_rot_i32) {
2123 tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
2127 t0 = tcg_temp_new_i32();
2128 t1 = tcg_temp_new_i32();
2129 tcg_gen_shr_i32(t0, arg1, arg2);
2130 tcg_gen_subfi_i32(t1, 32, arg2);
2131 tcg_gen_shl_i32(t1, arg1, t1);
2132 tcg_gen_or_i32(ret, t0, t1);
2133 tcg_temp_free_i32(t0);
2134 tcg_temp_free_i32(t1);
2138 static inline void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2140 if (TCG_TARGET_HAS_rot_i64) {
2141 tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2144 t0 = tcg_temp_new_i64();
2145 t1 = tcg_temp_new_i64();
2146 tcg_gen_shr_i64(t0, arg1, arg2);
2147 tcg_gen_subfi_i64(t1, 64, arg2);
2148 tcg_gen_shl_i64(t1, arg1, t1);
2149 tcg_gen_or_i64(ret, t0, t1);
2150 tcg_temp_free_i64(t0);
2151 tcg_temp_free_i64(t1);
2155 static inline void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
2157 /* some cases can be optimized here */
2159 tcg_gen_mov_i32(ret, arg1);
2161 tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
2165 static inline void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2167 /* some cases can be optimized here */
2169 tcg_gen_mov_i64(ret, arg1);
2171 tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2175 static inline void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1,
2176 TCGv_i32 arg2, unsigned int ofs,
2182 tcg_debug_assert(ofs < 32);
2183 tcg_debug_assert(len <= 32);
2184 tcg_debug_assert(ofs + len <= 32);
2186 if (ofs == 0 && len == 32) {
2187 tcg_gen_mov_i32(ret, arg2);
2190 if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
2191 tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
2195 mask = (1u << len) - 1;
2196 t1 = tcg_temp_new_i32();
2198 if (ofs + len < 32) {
2199 tcg_gen_andi_i32(t1, arg2, mask);
2200 tcg_gen_shli_i32(t1, t1, ofs);
2202 tcg_gen_shli_i32(t1, arg2, ofs);
2204 tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
2205 tcg_gen_or_i32(ret, ret, t1);
2207 tcg_temp_free_i32(t1);
2210 static inline void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1,
2211 TCGv_i64 arg2, unsigned int ofs,
2217 tcg_debug_assert(ofs < 64);
2218 tcg_debug_assert(len <= 64);
2219 tcg_debug_assert(ofs + len <= 64);
2221 if (ofs == 0 && len == 64) {
2222 tcg_gen_mov_i64(ret, arg2);
2225 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2226 tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2230 #if TCG_TARGET_REG_BITS == 32
2232 tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2233 TCGV_LOW(arg2), ofs - 32, len);
2234 tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2237 if (ofs + len <= 32) {
2238 tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2239 TCGV_LOW(arg2), ofs, len);
2240 tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2245 mask = (1ull << len) - 1;
2246 t1 = tcg_temp_new_i64();
2248 if (ofs + len < 64) {
2249 tcg_gen_andi_i64(t1, arg2, mask);
2250 tcg_gen_shli_i64(t1, t1, ofs);
2252 tcg_gen_shli_i64(t1, arg2, ofs);
2254 tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2255 tcg_gen_or_i64(ret, ret, t1);
2257 tcg_temp_free_i64(t1);
2260 static inline void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low,
2263 #if TCG_TARGET_REG_BITS == 32
2264 tcg_gen_mov_i32(TCGV_LOW(dest), low);
2265 tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2267 TCGv_i64 tmp = tcg_temp_new_i64();
2268 /* These extensions are only needed for type correctness.
2269 We may be able to do better given target specific information. */
2270 tcg_gen_extu_i32_i64(tmp, high);
2271 tcg_gen_extu_i32_i64(dest, low);
2272 /* If deposit is available, use it. Otherwise use the extra
2273 knowledge that we have of the zero-extensions above. */
2274 if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2275 tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2277 tcg_gen_shli_i64(tmp, tmp, 32);
2278 tcg_gen_or_i64(dest, dest, tmp);
2280 tcg_temp_free_i64(tmp);
2284 static inline void tcg_gen_concat32_i64(TCGv_i64 dest, TCGv_i64 low,
2287 tcg_gen_deposit_i64(dest, low, high, 32, 32);
2290 static inline void tcg_gen_trunc_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2292 tcg_gen_trunc_shr_i64_i32(ret, arg, 0);
2295 static inline void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2297 tcg_gen_trunc_shr_i64_i32(lo, arg, 0);
2298 tcg_gen_trunc_shr_i64_i32(hi, arg, 32);
2301 static inline void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2303 tcg_gen_ext32u_i64(lo, arg);
2304 tcg_gen_shri_i64(hi, arg, 32);
2307 static inline void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret,
2308 TCGv_i32 c1, TCGv_i32 c2,
2309 TCGv_i32 v1, TCGv_i32 v2)
2311 if (TCG_TARGET_HAS_movcond_i32) {
2312 tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
2314 TCGv_i32 t0 = tcg_temp_new_i32();
2315 TCGv_i32 t1 = tcg_temp_new_i32();
2316 tcg_gen_setcond_i32(cond, t0, c1, c2);
2317 tcg_gen_neg_i32(t0, t0);
2318 tcg_gen_and_i32(t1, v1, t0);
2319 tcg_gen_andc_i32(ret, v2, t0);
2320 tcg_gen_or_i32(ret, ret, t1);
2321 tcg_temp_free_i32(t0);
2322 tcg_temp_free_i32(t1);
2326 static inline void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret,
2327 TCGv_i64 c1, TCGv_i64 c2,
2328 TCGv_i64 v1, TCGv_i64 v2)
2330 #if TCG_TARGET_REG_BITS == 32
2331 TCGv_i32 t0 = tcg_temp_new_i32();
2332 TCGv_i32 t1 = tcg_temp_new_i32();
2333 tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2334 TCGV_LOW(c1), TCGV_HIGH(c1),
2335 TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2337 if (TCG_TARGET_HAS_movcond_i32) {
2338 tcg_gen_movi_i32(t1, 0);
2339 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2340 TCGV_LOW(v1), TCGV_LOW(v2));
2341 tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2342 TCGV_HIGH(v1), TCGV_HIGH(v2));
2344 tcg_gen_neg_i32(t0, t0);
2346 tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2347 tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2348 tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2350 tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2351 tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2352 tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2354 tcg_temp_free_i32(t0);
2355 tcg_temp_free_i32(t1);
2357 if (TCG_TARGET_HAS_movcond_i64) {
2358 tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2360 TCGv_i64 t0 = tcg_temp_new_i64();
2361 TCGv_i64 t1 = tcg_temp_new_i64();
2362 tcg_gen_setcond_i64(cond, t0, c1, c2);
2363 tcg_gen_neg_i64(t0, t0);
2364 tcg_gen_and_i64(t1, v1, t0);
2365 tcg_gen_andc_i64(ret, v2, t0);
2366 tcg_gen_or_i64(ret, ret, t1);
2367 tcg_temp_free_i64(t0);
2368 tcg_temp_free_i64(t1);
2373 static inline void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
2374 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
2376 if (TCG_TARGET_HAS_add2_i32) {
2377 tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
2378 /* Allow the optimizer room to replace add2 with two moves. */
2379 tcg_gen_op0(INDEX_op_nop);
2381 TCGv_i64 t0 = tcg_temp_new_i64();
2382 TCGv_i64 t1 = tcg_temp_new_i64();
2383 tcg_gen_concat_i32_i64(t0, al, ah);
2384 tcg_gen_concat_i32_i64(t1, bl, bh);
2385 tcg_gen_add_i64(t0, t0, t1);
2386 tcg_gen_extr_i64_i32(rl, rh, t0);
2387 tcg_temp_free_i64(t0);
2388 tcg_temp_free_i64(t1);
2392 static inline void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
2393 TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
2395 if (TCG_TARGET_HAS_sub2_i32) {
2396 tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
2397 /* Allow the optimizer room to replace sub2 with two moves. */
2398 tcg_gen_op0(INDEX_op_nop);
2400 TCGv_i64 t0 = tcg_temp_new_i64();
2401 TCGv_i64 t1 = tcg_temp_new_i64();
2402 tcg_gen_concat_i32_i64(t0, al, ah);
2403 tcg_gen_concat_i32_i64(t1, bl, bh);
2404 tcg_gen_sub_i64(t0, t0, t1);
2405 tcg_gen_extr_i64_i32(rl, rh, t0);
2406 tcg_temp_free_i64(t0);
2407 tcg_temp_free_i64(t1);
2411 static inline void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh,
2412 TCGv_i32 arg1, TCGv_i32 arg2)
2414 if (TCG_TARGET_HAS_mulu2_i32) {
2415 tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
2416 /* Allow the optimizer room to replace mulu2 with two moves. */
2417 tcg_gen_op0(INDEX_op_nop);
2418 } else if (TCG_TARGET_HAS_muluh_i32) {
2419 TCGv_i32 t = tcg_temp_new_i32();
2420 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
2421 tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
2422 tcg_gen_mov_i32(rl, t);
2423 tcg_temp_free_i32(t);
2425 TCGv_i64 t0 = tcg_temp_new_i64();
2426 TCGv_i64 t1 = tcg_temp_new_i64();
2427 tcg_gen_extu_i32_i64(t0, arg1);
2428 tcg_gen_extu_i32_i64(t1, arg2);
2429 tcg_gen_mul_i64(t0, t0, t1);
2430 tcg_gen_extr_i64_i32(rl, rh, t0);
2431 tcg_temp_free_i64(t0);
2432 tcg_temp_free_i64(t1);
2436 static inline void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh,
2437 TCGv_i32 arg1, TCGv_i32 arg2)
2439 if (TCG_TARGET_HAS_muls2_i32) {
2440 tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
2441 /* Allow the optimizer room to replace muls2 with two moves. */
2442 tcg_gen_op0(INDEX_op_nop);
2443 } else if (TCG_TARGET_HAS_mulsh_i32) {
2444 TCGv_i32 t = tcg_temp_new_i32();
2445 tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
2446 tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
2447 tcg_gen_mov_i32(rl, t);
2448 tcg_temp_free_i32(t);
2449 } else if (TCG_TARGET_REG_BITS == 32) {
2450 TCGv_i32 t0 = tcg_temp_new_i32();
2451 TCGv_i32 t1 = tcg_temp_new_i32();
2452 TCGv_i32 t2 = tcg_temp_new_i32();
2453 TCGv_i32 t3 = tcg_temp_new_i32();
2454 tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
2455 /* Adjust for negative inputs. */
2456 tcg_gen_sari_i32(t2, arg1, 31);
2457 tcg_gen_sari_i32(t3, arg2, 31);
2458 tcg_gen_and_i32(t2, t2, arg2);
2459 tcg_gen_and_i32(t3, t3, arg1);
2460 tcg_gen_sub_i32(rh, t1, t2);
2461 tcg_gen_sub_i32(rh, rh, t3);
2462 tcg_gen_mov_i32(rl, t0);
2463 tcg_temp_free_i32(t0);
2464 tcg_temp_free_i32(t1);
2465 tcg_temp_free_i32(t2);
2466 tcg_temp_free_i32(t3);
2468 TCGv_i64 t0 = tcg_temp_new_i64();
2469 TCGv_i64 t1 = tcg_temp_new_i64();
2470 tcg_gen_ext_i32_i64(t0, arg1);
2471 tcg_gen_ext_i32_i64(t1, arg2);
2472 tcg_gen_mul_i64(t0, t0, t1);
2473 tcg_gen_extr_i64_i32(rl, rh, t0);
2474 tcg_temp_free_i64(t0);
2475 tcg_temp_free_i64(t1);
2479 static inline void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2480 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2482 if (TCG_TARGET_HAS_add2_i64) {
2483 tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2484 /* Allow the optimizer room to replace add2 with two moves. */
2485 tcg_gen_op0(INDEX_op_nop);
2487 TCGv_i64 t0 = tcg_temp_new_i64();
2488 TCGv_i64 t1 = tcg_temp_new_i64();
2489 tcg_gen_add_i64(t0, al, bl);
2490 tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2491 tcg_gen_add_i64(rh, ah, bh);
2492 tcg_gen_add_i64(rh, rh, t1);
2493 tcg_gen_mov_i64(rl, t0);
2494 tcg_temp_free_i64(t0);
2495 tcg_temp_free_i64(t1);
2499 static inline void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2500 TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2502 if (TCG_TARGET_HAS_sub2_i64) {
2503 tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2504 /* Allow the optimizer room to replace sub2 with two moves. */
2505 tcg_gen_op0(INDEX_op_nop);
2507 TCGv_i64 t0 = tcg_temp_new_i64();
2508 TCGv_i64 t1 = tcg_temp_new_i64();
2509 tcg_gen_sub_i64(t0, al, bl);
2510 tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2511 tcg_gen_sub_i64(rh, ah, bh);
2512 tcg_gen_sub_i64(rh, rh, t1);
2513 tcg_gen_mov_i64(rl, t0);
2514 tcg_temp_free_i64(t0);
2515 tcg_temp_free_i64(t1);
2519 static inline void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh,
2520 TCGv_i64 arg1, TCGv_i64 arg2)
2522 if (TCG_TARGET_HAS_mulu2_i64) {
2523 tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2524 /* Allow the optimizer room to replace mulu2 with two moves. */
2525 tcg_gen_op0(INDEX_op_nop);
2526 } else if (TCG_TARGET_HAS_muluh_i64) {
2527 TCGv_i64 t = tcg_temp_new_i64();
2528 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2529 tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2530 tcg_gen_mov_i64(rl, t);
2531 tcg_temp_free_i64(t);
2533 TCGv_i64 t0 = tcg_temp_new_i64();
2535 /* Return value and both arguments are 64-bit and unsigned. */
2536 sizemask |= tcg_gen_sizemask(0, 1, 0);
2537 sizemask |= tcg_gen_sizemask(1, 1, 0);
2538 sizemask |= tcg_gen_sizemask(2, 1, 0);
2539 tcg_gen_mul_i64(t0, arg1, arg2);
2540 tcg_gen_helper64(helper_muluh_i64, sizemask, rh, arg1, arg2);
2541 tcg_gen_mov_i64(rl, t0);
2542 tcg_temp_free_i64(t0);
2546 static inline void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh,
2547 TCGv_i64 arg1, TCGv_i64 arg2)
2549 if (TCG_TARGET_HAS_muls2_i64) {
2550 tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2551 /* Allow the optimizer room to replace muls2 with two moves. */
2552 tcg_gen_op0(INDEX_op_nop);
2553 } else if (TCG_TARGET_HAS_mulsh_i64) {
2554 TCGv_i64 t = tcg_temp_new_i64();
2555 tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2556 tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2557 tcg_gen_mov_i64(rl, t);
2558 tcg_temp_free_i64(t);
2559 } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2560 TCGv_i64 t0 = tcg_temp_new_i64();
2561 TCGv_i64 t1 = tcg_temp_new_i64();
2562 TCGv_i64 t2 = tcg_temp_new_i64();
2563 TCGv_i64 t3 = tcg_temp_new_i64();
2564 tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2565 /* Adjust for negative inputs. */
2566 tcg_gen_sari_i64(t2, arg1, 63);
2567 tcg_gen_sari_i64(t3, arg2, 63);
2568 tcg_gen_and_i64(t2, t2, arg2);
2569 tcg_gen_and_i64(t3, t3, arg1);
2570 tcg_gen_sub_i64(rh, t1, t2);
2571 tcg_gen_sub_i64(rh, rh, t3);
2572 tcg_gen_mov_i64(rl, t0);
2573 tcg_temp_free_i64(t0);
2574 tcg_temp_free_i64(t1);
2575 tcg_temp_free_i64(t2);
2576 tcg_temp_free_i64(t3);
2578 TCGv_i64 t0 = tcg_temp_new_i64();
2580 /* Return value and both arguments are 64-bit and signed. */
2581 sizemask |= tcg_gen_sizemask(0, 1, 1);
2582 sizemask |= tcg_gen_sizemask(1, 1, 1);
2583 sizemask |= tcg_gen_sizemask(2, 1, 1);
2584 tcg_gen_mul_i64(t0, arg1, arg2);
2585 tcg_gen_helper64(helper_mulsh_i64, sizemask, rh, arg1, arg2);
2586 tcg_gen_mov_i64(rl, t0);
2587 tcg_temp_free_i64(t0);
2591 /***************************************/
2592 /* QEMU specific operations. Their type depend on the QEMU CPU
2594 #ifndef TARGET_LONG_BITS
2595 #error must include QEMU headers
2598 #if TARGET_LONG_BITS == 32
2599 #define TCGv TCGv_i32
2600 #define tcg_temp_new() tcg_temp_new_i32()
2601 #define tcg_global_reg_new tcg_global_reg_new_i32
2602 #define tcg_global_mem_new tcg_global_mem_new_i32
2603 #define tcg_temp_local_new() tcg_temp_local_new_i32()
2604 #define tcg_temp_free tcg_temp_free_i32
2605 #define TCGV_UNUSED(x) TCGV_UNUSED_I32(x)
2606 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I32(x)
2607 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I32(a, b)
2608 #define tcg_add_param_tl tcg_add_param_i32
2609 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i32
2610 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i32
2612 #define TCGv TCGv_i64
2613 #define tcg_temp_new() tcg_temp_new_i64()
2614 #define tcg_global_reg_new tcg_global_reg_new_i64
2615 #define tcg_global_mem_new tcg_global_mem_new_i64
2616 #define tcg_temp_local_new() tcg_temp_local_new_i64()
2617 #define tcg_temp_free tcg_temp_free_i64
2618 #define TCGV_UNUSED(x) TCGV_UNUSED_I64(x)
2619 #define TCGV_IS_UNUSED(x) TCGV_IS_UNUSED_I64(x)
2620 #define TCGV_EQUAL(a, b) TCGV_EQUAL_I64(a, b)
2621 #define tcg_add_param_tl tcg_add_param_i64
2622 #define tcg_gen_qemu_ld_tl tcg_gen_qemu_ld_i64
2623 #define tcg_gen_qemu_st_tl tcg_gen_qemu_st_i64
2626 /* debug info: write the PC of the corresponding QEMU CPU instruction */
2627 static inline void tcg_gen_debug_insn_start(uint64_t pc)
2629 /* XXX: must really use a 32 bit size for TCGArg in all cases */
2630 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
2631 tcg_gen_op2ii(INDEX_op_debug_insn_start,
2632 (uint32_t)(pc), (uint32_t)(pc >> 32));
2634 tcg_gen_op1i(INDEX_op_debug_insn_start, pc);
2638 static inline void tcg_gen_exit_tb(uintptr_t val)
2640 tcg_gen_op1i(INDEX_op_exit_tb, val);
2643 static inline void tcg_gen_goto_tb(unsigned idx)
2645 /* We only support two chained exits. */
2646 tcg_debug_assert(idx <= 1);
2647 #ifdef CONFIG_DEBUG_TCG
2648 /* Verify that we havn't seen this numbered exit before. */
2649 tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
2650 tcg_ctx.goto_tb_issue_mask |= 1 << idx;
2652 tcg_gen_op1i(INDEX_op_goto_tb, idx);
2656 void tcg_gen_qemu_ld_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp);
2657 void tcg_gen_qemu_st_i32(TCGv_i32, TCGv, TCGArg, TCGMemOp);
2658 void tcg_gen_qemu_ld_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp);
2659 void tcg_gen_qemu_st_i64(TCGv_i64, TCGv, TCGArg, TCGMemOp);
2661 static inline void tcg_gen_qemu_ld8u(TCGv ret, TCGv addr, int mem_index)
2663 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_UB);
2666 static inline void tcg_gen_qemu_ld8s(TCGv ret, TCGv addr, int mem_index)
2668 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_SB);
2671 static inline void tcg_gen_qemu_ld16u(TCGv ret, TCGv addr, int mem_index)
2673 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUW);
2676 static inline void tcg_gen_qemu_ld16s(TCGv ret, TCGv addr, int mem_index)
2678 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESW);
2681 static inline void tcg_gen_qemu_ld32u(TCGv ret, TCGv addr, int mem_index)
2683 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TEUL);
2686 static inline void tcg_gen_qemu_ld32s(TCGv ret, TCGv addr, int mem_index)
2688 tcg_gen_qemu_ld_tl(ret, addr, mem_index, MO_TESL);
2691 static inline void tcg_gen_qemu_ld64(TCGv_i64 ret, TCGv addr, int mem_index)
2693 tcg_gen_qemu_ld_i64(ret, addr, mem_index, MO_TEQ);
2696 static inline void tcg_gen_qemu_st8(TCGv arg, TCGv addr, int mem_index)
2698 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_UB);
2701 static inline void tcg_gen_qemu_st16(TCGv arg, TCGv addr, int mem_index)
2703 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUW);
2706 static inline void tcg_gen_qemu_st32(TCGv arg, TCGv addr, int mem_index)
2708 tcg_gen_qemu_st_tl(arg, addr, mem_index, MO_TEUL);
2711 static inline void tcg_gen_qemu_st64(TCGv_i64 arg, TCGv addr, int mem_index)
2713 tcg_gen_qemu_st_i64(arg, addr, mem_index, MO_TEQ);
2716 #if TARGET_LONG_BITS == 64
2717 #define tcg_gen_movi_tl tcg_gen_movi_i64
2718 #define tcg_gen_mov_tl tcg_gen_mov_i64
2719 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i64
2720 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i64
2721 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i64
2722 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i64
2723 #define tcg_gen_ld32u_tl tcg_gen_ld32u_i64
2724 #define tcg_gen_ld32s_tl tcg_gen_ld32s_i64
2725 #define tcg_gen_ld_tl tcg_gen_ld_i64
2726 #define tcg_gen_st8_tl tcg_gen_st8_i64
2727 #define tcg_gen_st16_tl tcg_gen_st16_i64
2728 #define tcg_gen_st32_tl tcg_gen_st32_i64
2729 #define tcg_gen_st_tl tcg_gen_st_i64
2730 #define tcg_gen_add_tl tcg_gen_add_i64
2731 #define tcg_gen_addi_tl tcg_gen_addi_i64
2732 #define tcg_gen_sub_tl tcg_gen_sub_i64
2733 #define tcg_gen_neg_tl tcg_gen_neg_i64
2734 #define tcg_gen_subfi_tl tcg_gen_subfi_i64
2735 #define tcg_gen_subi_tl tcg_gen_subi_i64
2736 #define tcg_gen_and_tl tcg_gen_and_i64
2737 #define tcg_gen_andi_tl tcg_gen_andi_i64
2738 #define tcg_gen_or_tl tcg_gen_or_i64
2739 #define tcg_gen_ori_tl tcg_gen_ori_i64
2740 #define tcg_gen_xor_tl tcg_gen_xor_i64
2741 #define tcg_gen_xori_tl tcg_gen_xori_i64
2742 #define tcg_gen_not_tl tcg_gen_not_i64
2743 #define tcg_gen_shl_tl tcg_gen_shl_i64
2744 #define tcg_gen_shli_tl tcg_gen_shli_i64
2745 #define tcg_gen_shr_tl tcg_gen_shr_i64
2746 #define tcg_gen_shri_tl tcg_gen_shri_i64
2747 #define tcg_gen_sar_tl tcg_gen_sar_i64
2748 #define tcg_gen_sari_tl tcg_gen_sari_i64
2749 #define tcg_gen_brcond_tl tcg_gen_brcond_i64
2750 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i64
2751 #define tcg_gen_setcond_tl tcg_gen_setcond_i64
2752 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i64
2753 #define tcg_gen_mul_tl tcg_gen_mul_i64
2754 #define tcg_gen_muli_tl tcg_gen_muli_i64
2755 #define tcg_gen_div_tl tcg_gen_div_i64
2756 #define tcg_gen_rem_tl tcg_gen_rem_i64
2757 #define tcg_gen_divu_tl tcg_gen_divu_i64
2758 #define tcg_gen_remu_tl tcg_gen_remu_i64
2759 #define tcg_gen_discard_tl tcg_gen_discard_i64
2760 #define tcg_gen_trunc_tl_i32 tcg_gen_trunc_i64_i32
2761 #define tcg_gen_trunc_i64_tl tcg_gen_mov_i64
2762 #define tcg_gen_extu_i32_tl tcg_gen_extu_i32_i64
2763 #define tcg_gen_ext_i32_tl tcg_gen_ext_i32_i64
2764 #define tcg_gen_extu_tl_i64 tcg_gen_mov_i64
2765 #define tcg_gen_ext_tl_i64 tcg_gen_mov_i64
2766 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i64
2767 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i64
2768 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i64
2769 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i64
2770 #define tcg_gen_ext32u_tl tcg_gen_ext32u_i64
2771 #define tcg_gen_ext32s_tl tcg_gen_ext32s_i64
2772 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i64
2773 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i64
2774 #define tcg_gen_bswap64_tl tcg_gen_bswap64_i64
2775 #define tcg_gen_concat_tl_i64 tcg_gen_concat32_i64
2776 #define tcg_gen_extr_i64_tl tcg_gen_extr32_i64
2777 #define tcg_gen_andc_tl tcg_gen_andc_i64
2778 #define tcg_gen_eqv_tl tcg_gen_eqv_i64
2779 #define tcg_gen_nand_tl tcg_gen_nand_i64
2780 #define tcg_gen_nor_tl tcg_gen_nor_i64
2781 #define tcg_gen_orc_tl tcg_gen_orc_i64
2782 #define tcg_gen_rotl_tl tcg_gen_rotl_i64
2783 #define tcg_gen_rotli_tl tcg_gen_rotli_i64
2784 #define tcg_gen_rotr_tl tcg_gen_rotr_i64
2785 #define tcg_gen_rotri_tl tcg_gen_rotri_i64
2786 #define tcg_gen_deposit_tl tcg_gen_deposit_i64
2787 #define tcg_const_tl tcg_const_i64
2788 #define tcg_const_local_tl tcg_const_local_i64
2789 #define tcg_gen_movcond_tl tcg_gen_movcond_i64
2790 #define tcg_gen_add2_tl tcg_gen_add2_i64
2791 #define tcg_gen_sub2_tl tcg_gen_sub2_i64
2792 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i64
2793 #define tcg_gen_muls2_tl tcg_gen_muls2_i64
2795 #define tcg_gen_movi_tl tcg_gen_movi_i32
2796 #define tcg_gen_mov_tl tcg_gen_mov_i32
2797 #define tcg_gen_ld8u_tl tcg_gen_ld8u_i32
2798 #define tcg_gen_ld8s_tl tcg_gen_ld8s_i32
2799 #define tcg_gen_ld16u_tl tcg_gen_ld16u_i32
2800 #define tcg_gen_ld16s_tl tcg_gen_ld16s_i32
2801 #define tcg_gen_ld32u_tl tcg_gen_ld_i32
2802 #define tcg_gen_ld32s_tl tcg_gen_ld_i32
2803 #define tcg_gen_ld_tl tcg_gen_ld_i32
2804 #define tcg_gen_st8_tl tcg_gen_st8_i32
2805 #define tcg_gen_st16_tl tcg_gen_st16_i32
2806 #define tcg_gen_st32_tl tcg_gen_st_i32
2807 #define tcg_gen_st_tl tcg_gen_st_i32
2808 #define tcg_gen_add_tl tcg_gen_add_i32
2809 #define tcg_gen_addi_tl tcg_gen_addi_i32
2810 #define tcg_gen_sub_tl tcg_gen_sub_i32
2811 #define tcg_gen_neg_tl tcg_gen_neg_i32
2812 #define tcg_gen_subfi_tl tcg_gen_subfi_i32
2813 #define tcg_gen_subi_tl tcg_gen_subi_i32
2814 #define tcg_gen_and_tl tcg_gen_and_i32
2815 #define tcg_gen_andi_tl tcg_gen_andi_i32
2816 #define tcg_gen_or_tl tcg_gen_or_i32
2817 #define tcg_gen_ori_tl tcg_gen_ori_i32
2818 #define tcg_gen_xor_tl tcg_gen_xor_i32
2819 #define tcg_gen_xori_tl tcg_gen_xori_i32
2820 #define tcg_gen_not_tl tcg_gen_not_i32
2821 #define tcg_gen_shl_tl tcg_gen_shl_i32
2822 #define tcg_gen_shli_tl tcg_gen_shli_i32
2823 #define tcg_gen_shr_tl tcg_gen_shr_i32
2824 #define tcg_gen_shri_tl tcg_gen_shri_i32
2825 #define tcg_gen_sar_tl tcg_gen_sar_i32
2826 #define tcg_gen_sari_tl tcg_gen_sari_i32
2827 #define tcg_gen_brcond_tl tcg_gen_brcond_i32
2828 #define tcg_gen_brcondi_tl tcg_gen_brcondi_i32
2829 #define tcg_gen_setcond_tl tcg_gen_setcond_i32
2830 #define tcg_gen_setcondi_tl tcg_gen_setcondi_i32
2831 #define tcg_gen_mul_tl tcg_gen_mul_i32
2832 #define tcg_gen_muli_tl tcg_gen_muli_i32
2833 #define tcg_gen_div_tl tcg_gen_div_i32
2834 #define tcg_gen_rem_tl tcg_gen_rem_i32
2835 #define tcg_gen_divu_tl tcg_gen_divu_i32
2836 #define tcg_gen_remu_tl tcg_gen_remu_i32
2837 #define tcg_gen_discard_tl tcg_gen_discard_i32
2838 #define tcg_gen_trunc_tl_i32 tcg_gen_mov_i32
2839 #define tcg_gen_trunc_i64_tl tcg_gen_trunc_i64_i32
2840 #define tcg_gen_extu_i32_tl tcg_gen_mov_i32
2841 #define tcg_gen_ext_i32_tl tcg_gen_mov_i32
2842 #define tcg_gen_extu_tl_i64 tcg_gen_extu_i32_i64
2843 #define tcg_gen_ext_tl_i64 tcg_gen_ext_i32_i64
2844 #define tcg_gen_ext8u_tl tcg_gen_ext8u_i32
2845 #define tcg_gen_ext8s_tl tcg_gen_ext8s_i32
2846 #define tcg_gen_ext16u_tl tcg_gen_ext16u_i32
2847 #define tcg_gen_ext16s_tl tcg_gen_ext16s_i32
2848 #define tcg_gen_ext32u_tl tcg_gen_mov_i32
2849 #define tcg_gen_ext32s_tl tcg_gen_mov_i32
2850 #define tcg_gen_bswap16_tl tcg_gen_bswap16_i32
2851 #define tcg_gen_bswap32_tl tcg_gen_bswap32_i32
2852 #define tcg_gen_concat_tl_i64 tcg_gen_concat_i32_i64
2853 #define tcg_gen_extr_tl_i64 tcg_gen_extr_i32_i64
2854 #define tcg_gen_andc_tl tcg_gen_andc_i32
2855 #define tcg_gen_eqv_tl tcg_gen_eqv_i32
2856 #define tcg_gen_nand_tl tcg_gen_nand_i32
2857 #define tcg_gen_nor_tl tcg_gen_nor_i32
2858 #define tcg_gen_orc_tl tcg_gen_orc_i32
2859 #define tcg_gen_rotl_tl tcg_gen_rotl_i32
2860 #define tcg_gen_rotli_tl tcg_gen_rotli_i32
2861 #define tcg_gen_rotr_tl tcg_gen_rotr_i32
2862 #define tcg_gen_rotri_tl tcg_gen_rotri_i32
2863 #define tcg_gen_deposit_tl tcg_gen_deposit_i32
2864 #define tcg_const_tl tcg_const_i32
2865 #define tcg_const_local_tl tcg_const_local_i32
2866 #define tcg_gen_movcond_tl tcg_gen_movcond_i32
2867 #define tcg_gen_add2_tl tcg_gen_add2_i32
2868 #define tcg_gen_sub2_tl tcg_gen_sub2_i32
2869 #define tcg_gen_mulu2_tl tcg_gen_mulu2_i32
2870 #define tcg_gen_muls2_tl tcg_gen_muls2_i32
2873 #if UINTPTR_MAX == UINT32_MAX
2874 # define tcg_gen_ld_ptr(R, A, O) \
2875 tcg_gen_ld_i32(TCGV_PTR_TO_NAT(R), (A), (O))
2876 # define tcg_gen_discard_ptr(A) \
2877 tcg_gen_discard_i32(TCGV_PTR_TO_NAT(A))
2878 # define tcg_gen_add_ptr(R, A, B) \
2879 tcg_gen_add_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
2880 # define tcg_gen_addi_ptr(R, A, B) \
2881 tcg_gen_addi_i32(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
2882 # define tcg_gen_ext_i32_ptr(R, A) \
2883 tcg_gen_mov_i32(TCGV_PTR_TO_NAT(R), (A))
2885 # define tcg_gen_ld_ptr(R, A, O) \
2886 tcg_gen_ld_i64(TCGV_PTR_TO_NAT(R), (A), (O))
2887 # define tcg_gen_discard_ptr(A) \
2888 tcg_gen_discard_i64(TCGV_PTR_TO_NAT(A))
2889 # define tcg_gen_add_ptr(R, A, B) \
2890 tcg_gen_add_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), TCGV_PTR_TO_NAT(B))
2891 # define tcg_gen_addi_ptr(R, A, B) \
2892 tcg_gen_addi_i64(TCGV_PTR_TO_NAT(R), TCGV_PTR_TO_NAT(A), (B))
2893 # define tcg_gen_ext_i32_ptr(R, A) \
2894 tcg_gen_ext_i32_i64(TCGV_PTR_TO_NAT(R), (A))
2895 #endif /* UINTPTR_MAX == UINT32_MAX */