]> Git Repo - qemu.git/blob - tcg/tcg-op.c
Merge remote-tracking branch 'remotes/gkurz/tags/for-upstream' into staging
[qemu.git] / tcg / tcg-op.c
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
27 #include "cpu.h"
28 #include "exec/exec-all.h"
29 #include "tcg.h"
30 #include "tcg-op.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
33
34 /* Reduce the number of ifdefs below.  This assumes that all uses of
35    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36    the compiler can eliminate.  */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40 #define TCGV_LOW  TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
42 #endif
43
44 /* Note that this is optimized for sequential allocation during translate.
45    Up to and including filling in the forward link immediately.  We'll do
46    proper termination of the end of the list after we finish translation.  */
47
48 static void tcg_emit_op(TCGContext *ctx, TCGOpcode opc, int args)
49 {
50     int oi = ctx->gen_next_op_idx;
51     int ni = oi + 1;
52     int pi = oi - 1;
53
54     tcg_debug_assert(oi < OPC_BUF_SIZE);
55     ctx->gen_op_buf[0].prev = oi;
56     ctx->gen_next_op_idx = ni;
57
58     ctx->gen_op_buf[oi] = (TCGOp){
59         .opc = opc,
60         .args = args,
61         .prev = pi,
62         .next = ni
63     };
64 }
65
66 void tcg_gen_op1(TCGContext *ctx, TCGOpcode opc, TCGArg a1)
67 {
68     int pi = ctx->gen_next_parm_idx;
69
70     tcg_debug_assert(pi + 1 <= OPPARAM_BUF_SIZE);
71     ctx->gen_next_parm_idx = pi + 1;
72     ctx->gen_opparam_buf[pi] = a1;
73
74     tcg_emit_op(ctx, opc, pi);
75 }
76
77 void tcg_gen_op2(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2)
78 {
79     int pi = ctx->gen_next_parm_idx;
80
81     tcg_debug_assert(pi + 2 <= OPPARAM_BUF_SIZE);
82     ctx->gen_next_parm_idx = pi + 2;
83     ctx->gen_opparam_buf[pi + 0] = a1;
84     ctx->gen_opparam_buf[pi + 1] = a2;
85
86     tcg_emit_op(ctx, opc, pi);
87 }
88
89 void tcg_gen_op3(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
90                  TCGArg a2, TCGArg a3)
91 {
92     int pi = ctx->gen_next_parm_idx;
93
94     tcg_debug_assert(pi + 3 <= OPPARAM_BUF_SIZE);
95     ctx->gen_next_parm_idx = pi + 3;
96     ctx->gen_opparam_buf[pi + 0] = a1;
97     ctx->gen_opparam_buf[pi + 1] = a2;
98     ctx->gen_opparam_buf[pi + 2] = a3;
99
100     tcg_emit_op(ctx, opc, pi);
101 }
102
103 void tcg_gen_op4(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
104                  TCGArg a2, TCGArg a3, TCGArg a4)
105 {
106     int pi = ctx->gen_next_parm_idx;
107
108     tcg_debug_assert(pi + 4 <= OPPARAM_BUF_SIZE);
109     ctx->gen_next_parm_idx = pi + 4;
110     ctx->gen_opparam_buf[pi + 0] = a1;
111     ctx->gen_opparam_buf[pi + 1] = a2;
112     ctx->gen_opparam_buf[pi + 2] = a3;
113     ctx->gen_opparam_buf[pi + 3] = a4;
114
115     tcg_emit_op(ctx, opc, pi);
116 }
117
118 void tcg_gen_op5(TCGContext *ctx, TCGOpcode opc, TCGArg a1,
119                  TCGArg a2, TCGArg a3, TCGArg a4, TCGArg a5)
120 {
121     int pi = ctx->gen_next_parm_idx;
122
123     tcg_debug_assert(pi + 5 <= OPPARAM_BUF_SIZE);
124     ctx->gen_next_parm_idx = pi + 5;
125     ctx->gen_opparam_buf[pi + 0] = a1;
126     ctx->gen_opparam_buf[pi + 1] = a2;
127     ctx->gen_opparam_buf[pi + 2] = a3;
128     ctx->gen_opparam_buf[pi + 3] = a4;
129     ctx->gen_opparam_buf[pi + 4] = a5;
130
131     tcg_emit_op(ctx, opc, pi);
132 }
133
134 void tcg_gen_op6(TCGContext *ctx, TCGOpcode opc, TCGArg a1, TCGArg a2,
135                  TCGArg a3, TCGArg a4, TCGArg a5, TCGArg a6)
136 {
137     int pi = ctx->gen_next_parm_idx;
138
139     tcg_debug_assert(pi + 6 <= OPPARAM_BUF_SIZE);
140     ctx->gen_next_parm_idx = pi + 6;
141     ctx->gen_opparam_buf[pi + 0] = a1;
142     ctx->gen_opparam_buf[pi + 1] = a2;
143     ctx->gen_opparam_buf[pi + 2] = a3;
144     ctx->gen_opparam_buf[pi + 3] = a4;
145     ctx->gen_opparam_buf[pi + 4] = a5;
146     ctx->gen_opparam_buf[pi + 5] = a6;
147
148     tcg_emit_op(ctx, opc, pi);
149 }
150
151 void tcg_gen_mb(TCGBar mb_type)
152 {
153     bool emit_barriers = true;
154
155 #ifndef CONFIG_USER_ONLY
156     /* TODO: When MTTCG is available for system mode, we will check
157      * the following condition and enable emit_barriers
158      * (qemu_tcg_mttcg_enabled() && smp_cpus > 1)
159      */
160     emit_barriers = false;
161 #endif
162
163     if (emit_barriers) {
164         tcg_gen_op1(&tcg_ctx, INDEX_op_mb, mb_type);
165     }
166 }
167
168 /* 32 bit ops */
169
170 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
171 {
172     /* some cases can be optimized here */
173     if (arg2 == 0) {
174         tcg_gen_mov_i32(ret, arg1);
175     } else {
176         TCGv_i32 t0 = tcg_const_i32(arg2);
177         tcg_gen_add_i32(ret, arg1, t0);
178         tcg_temp_free_i32(t0);
179     }
180 }
181
182 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
183 {
184     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
185         /* Don't recurse with tcg_gen_neg_i32.  */
186         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
187     } else {
188         TCGv_i32 t0 = tcg_const_i32(arg1);
189         tcg_gen_sub_i32(ret, t0, arg2);
190         tcg_temp_free_i32(t0);
191     }
192 }
193
194 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
195 {
196     /* some cases can be optimized here */
197     if (arg2 == 0) {
198         tcg_gen_mov_i32(ret, arg1);
199     } else {
200         TCGv_i32 t0 = tcg_const_i32(arg2);
201         tcg_gen_sub_i32(ret, arg1, t0);
202         tcg_temp_free_i32(t0);
203     }
204 }
205
206 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
207 {
208     TCGv_i32 t0;
209     /* Some cases can be optimized here.  */
210     switch (arg2) {
211     case 0:
212         tcg_gen_movi_i32(ret, 0);
213         return;
214     case 0xffffffffu:
215         tcg_gen_mov_i32(ret, arg1);
216         return;
217     case 0xffu:
218         /* Don't recurse with tcg_gen_ext8u_i32.  */
219         if (TCG_TARGET_HAS_ext8u_i32) {
220             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
221             return;
222         }
223         break;
224     case 0xffffu:
225         if (TCG_TARGET_HAS_ext16u_i32) {
226             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
227             return;
228         }
229         break;
230     }
231     t0 = tcg_const_i32(arg2);
232     tcg_gen_and_i32(ret, arg1, t0);
233     tcg_temp_free_i32(t0);
234 }
235
236 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
237 {
238     /* Some cases can be optimized here.  */
239     if (arg2 == -1) {
240         tcg_gen_movi_i32(ret, -1);
241     } else if (arg2 == 0) {
242         tcg_gen_mov_i32(ret, arg1);
243     } else {
244         TCGv_i32 t0 = tcg_const_i32(arg2);
245         tcg_gen_or_i32(ret, arg1, t0);
246         tcg_temp_free_i32(t0);
247     }
248 }
249
250 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
251 {
252     /* Some cases can be optimized here.  */
253     if (arg2 == 0) {
254         tcg_gen_mov_i32(ret, arg1);
255     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
256         /* Don't recurse with tcg_gen_not_i32.  */
257         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
258     } else {
259         TCGv_i32 t0 = tcg_const_i32(arg2);
260         tcg_gen_xor_i32(ret, arg1, t0);
261         tcg_temp_free_i32(t0);
262     }
263 }
264
265 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
266 {
267     tcg_debug_assert(arg2 < 32);
268     if (arg2 == 0) {
269         tcg_gen_mov_i32(ret, arg1);
270     } else {
271         TCGv_i32 t0 = tcg_const_i32(arg2);
272         tcg_gen_shl_i32(ret, arg1, t0);
273         tcg_temp_free_i32(t0);
274     }
275 }
276
277 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
278 {
279     tcg_debug_assert(arg2 < 32);
280     if (arg2 == 0) {
281         tcg_gen_mov_i32(ret, arg1);
282     } else {
283         TCGv_i32 t0 = tcg_const_i32(arg2);
284         tcg_gen_shr_i32(ret, arg1, t0);
285         tcg_temp_free_i32(t0);
286     }
287 }
288
289 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
290 {
291     tcg_debug_assert(arg2 < 32);
292     if (arg2 == 0) {
293         tcg_gen_mov_i32(ret, arg1);
294     } else {
295         TCGv_i32 t0 = tcg_const_i32(arg2);
296         tcg_gen_sar_i32(ret, arg1, t0);
297         tcg_temp_free_i32(t0);
298     }
299 }
300
301 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
302 {
303     if (cond == TCG_COND_ALWAYS) {
304         tcg_gen_br(l);
305     } else if (cond != TCG_COND_NEVER) {
306         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
307     }
308 }
309
310 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
311 {
312     if (cond == TCG_COND_ALWAYS) {
313         tcg_gen_br(l);
314     } else if (cond != TCG_COND_NEVER) {
315         TCGv_i32 t0 = tcg_const_i32(arg2);
316         tcg_gen_brcond_i32(cond, arg1, t0, l);
317         tcg_temp_free_i32(t0);
318     }
319 }
320
321 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
322                          TCGv_i32 arg1, TCGv_i32 arg2)
323 {
324     if (cond == TCG_COND_ALWAYS) {
325         tcg_gen_movi_i32(ret, 1);
326     } else if (cond == TCG_COND_NEVER) {
327         tcg_gen_movi_i32(ret, 0);
328     } else {
329         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
330     }
331 }
332
333 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
334                           TCGv_i32 arg1, int32_t arg2)
335 {
336     TCGv_i32 t0 = tcg_const_i32(arg2);
337     tcg_gen_setcond_i32(cond, ret, arg1, t0);
338     tcg_temp_free_i32(t0);
339 }
340
341 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
342 {
343     TCGv_i32 t0 = tcg_const_i32(arg2);
344     tcg_gen_mul_i32(ret, arg1, t0);
345     tcg_temp_free_i32(t0);
346 }
347
348 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
349 {
350     if (TCG_TARGET_HAS_div_i32) {
351         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
352     } else if (TCG_TARGET_HAS_div2_i32) {
353         TCGv_i32 t0 = tcg_temp_new_i32();
354         tcg_gen_sari_i32(t0, arg1, 31);
355         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
356         tcg_temp_free_i32(t0);
357     } else {
358         gen_helper_div_i32(ret, arg1, arg2);
359     }
360 }
361
362 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
363 {
364     if (TCG_TARGET_HAS_rem_i32) {
365         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
366     } else if (TCG_TARGET_HAS_div_i32) {
367         TCGv_i32 t0 = tcg_temp_new_i32();
368         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
369         tcg_gen_mul_i32(t0, t0, arg2);
370         tcg_gen_sub_i32(ret, arg1, t0);
371         tcg_temp_free_i32(t0);
372     } else if (TCG_TARGET_HAS_div2_i32) {
373         TCGv_i32 t0 = tcg_temp_new_i32();
374         tcg_gen_sari_i32(t0, arg1, 31);
375         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
376         tcg_temp_free_i32(t0);
377     } else {
378         gen_helper_rem_i32(ret, arg1, arg2);
379     }
380 }
381
382 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
383 {
384     if (TCG_TARGET_HAS_div_i32) {
385         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
386     } else if (TCG_TARGET_HAS_div2_i32) {
387         TCGv_i32 t0 = tcg_temp_new_i32();
388         tcg_gen_movi_i32(t0, 0);
389         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
390         tcg_temp_free_i32(t0);
391     } else {
392         gen_helper_divu_i32(ret, arg1, arg2);
393     }
394 }
395
396 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
397 {
398     if (TCG_TARGET_HAS_rem_i32) {
399         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
400     } else if (TCG_TARGET_HAS_div_i32) {
401         TCGv_i32 t0 = tcg_temp_new_i32();
402         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
403         tcg_gen_mul_i32(t0, t0, arg2);
404         tcg_gen_sub_i32(ret, arg1, t0);
405         tcg_temp_free_i32(t0);
406     } else if (TCG_TARGET_HAS_div2_i32) {
407         TCGv_i32 t0 = tcg_temp_new_i32();
408         tcg_gen_movi_i32(t0, 0);
409         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
410         tcg_temp_free_i32(t0);
411     } else {
412         gen_helper_remu_i32(ret, arg1, arg2);
413     }
414 }
415
416 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
417 {
418     if (TCG_TARGET_HAS_andc_i32) {
419         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
420     } else {
421         TCGv_i32 t0 = tcg_temp_new_i32();
422         tcg_gen_not_i32(t0, arg2);
423         tcg_gen_and_i32(ret, arg1, t0);
424         tcg_temp_free_i32(t0);
425     }
426 }
427
428 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
429 {
430     if (TCG_TARGET_HAS_eqv_i32) {
431         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
432     } else {
433         tcg_gen_xor_i32(ret, arg1, arg2);
434         tcg_gen_not_i32(ret, ret);
435     }
436 }
437
438 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
439 {
440     if (TCG_TARGET_HAS_nand_i32) {
441         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
442     } else {
443         tcg_gen_and_i32(ret, arg1, arg2);
444         tcg_gen_not_i32(ret, ret);
445     }
446 }
447
448 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
449 {
450     if (TCG_TARGET_HAS_nor_i32) {
451         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
452     } else {
453         tcg_gen_or_i32(ret, arg1, arg2);
454         tcg_gen_not_i32(ret, ret);
455     }
456 }
457
458 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
459 {
460     if (TCG_TARGET_HAS_orc_i32) {
461         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
462     } else {
463         TCGv_i32 t0 = tcg_temp_new_i32();
464         tcg_gen_not_i32(t0, arg2);
465         tcg_gen_or_i32(ret, arg1, t0);
466         tcg_temp_free_i32(t0);
467     }
468 }
469
470 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
471 {
472     if (TCG_TARGET_HAS_rot_i32) {
473         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
474     } else {
475         TCGv_i32 t0, t1;
476
477         t0 = tcg_temp_new_i32();
478         t1 = tcg_temp_new_i32();
479         tcg_gen_shl_i32(t0, arg1, arg2);
480         tcg_gen_subfi_i32(t1, 32, arg2);
481         tcg_gen_shr_i32(t1, arg1, t1);
482         tcg_gen_or_i32(ret, t0, t1);
483         tcg_temp_free_i32(t0);
484         tcg_temp_free_i32(t1);
485     }
486 }
487
488 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
489 {
490     tcg_debug_assert(arg2 < 32);
491     /* some cases can be optimized here */
492     if (arg2 == 0) {
493         tcg_gen_mov_i32(ret, arg1);
494     } else if (TCG_TARGET_HAS_rot_i32) {
495         TCGv_i32 t0 = tcg_const_i32(arg2);
496         tcg_gen_rotl_i32(ret, arg1, t0);
497         tcg_temp_free_i32(t0);
498     } else {
499         TCGv_i32 t0, t1;
500         t0 = tcg_temp_new_i32();
501         t1 = tcg_temp_new_i32();
502         tcg_gen_shli_i32(t0, arg1, arg2);
503         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
504         tcg_gen_or_i32(ret, t0, t1);
505         tcg_temp_free_i32(t0);
506         tcg_temp_free_i32(t1);
507     }
508 }
509
510 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
511 {
512     if (TCG_TARGET_HAS_rot_i32) {
513         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
514     } else {
515         TCGv_i32 t0, t1;
516
517         t0 = tcg_temp_new_i32();
518         t1 = tcg_temp_new_i32();
519         tcg_gen_shr_i32(t0, arg1, arg2);
520         tcg_gen_subfi_i32(t1, 32, arg2);
521         tcg_gen_shl_i32(t1, arg1, t1);
522         tcg_gen_or_i32(ret, t0, t1);
523         tcg_temp_free_i32(t0);
524         tcg_temp_free_i32(t1);
525     }
526 }
527
528 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
529 {
530     tcg_debug_assert(arg2 < 32);
531     /* some cases can be optimized here */
532     if (arg2 == 0) {
533         tcg_gen_mov_i32(ret, arg1);
534     } else {
535         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
536     }
537 }
538
539 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
540                          unsigned int ofs, unsigned int len)
541 {
542     uint32_t mask;
543     TCGv_i32 t1;
544
545     tcg_debug_assert(ofs < 32);
546     tcg_debug_assert(len <= 32);
547     tcg_debug_assert(ofs + len <= 32);
548
549     if (ofs == 0 && len == 32) {
550         tcg_gen_mov_i32(ret, arg2);
551         return;
552     }
553     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
554         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
555         return;
556     }
557
558     mask = (1u << len) - 1;
559     t1 = tcg_temp_new_i32();
560
561     if (ofs + len < 32) {
562         tcg_gen_andi_i32(t1, arg2, mask);
563         tcg_gen_shli_i32(t1, t1, ofs);
564     } else {
565         tcg_gen_shli_i32(t1, arg2, ofs);
566     }
567     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
568     tcg_gen_or_i32(ret, ret, t1);
569
570     tcg_temp_free_i32(t1);
571 }
572
573 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
574                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
575 {
576     if (cond == TCG_COND_ALWAYS) {
577         tcg_gen_mov_i32(ret, v1);
578     } else if (cond == TCG_COND_NEVER) {
579         tcg_gen_mov_i32(ret, v2);
580     } else if (TCG_TARGET_HAS_movcond_i32) {
581         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
582     } else {
583         TCGv_i32 t0 = tcg_temp_new_i32();
584         TCGv_i32 t1 = tcg_temp_new_i32();
585         tcg_gen_setcond_i32(cond, t0, c1, c2);
586         tcg_gen_neg_i32(t0, t0);
587         tcg_gen_and_i32(t1, v1, t0);
588         tcg_gen_andc_i32(ret, v2, t0);
589         tcg_gen_or_i32(ret, ret, t1);
590         tcg_temp_free_i32(t0);
591         tcg_temp_free_i32(t1);
592     }
593 }
594
595 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
596                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
597 {
598     if (TCG_TARGET_HAS_add2_i32) {
599         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
600     } else {
601         TCGv_i64 t0 = tcg_temp_new_i64();
602         TCGv_i64 t1 = tcg_temp_new_i64();
603         tcg_gen_concat_i32_i64(t0, al, ah);
604         tcg_gen_concat_i32_i64(t1, bl, bh);
605         tcg_gen_add_i64(t0, t0, t1);
606         tcg_gen_extr_i64_i32(rl, rh, t0);
607         tcg_temp_free_i64(t0);
608         tcg_temp_free_i64(t1);
609     }
610 }
611
612 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
613                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
614 {
615     if (TCG_TARGET_HAS_sub2_i32) {
616         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
617     } else {
618         TCGv_i64 t0 = tcg_temp_new_i64();
619         TCGv_i64 t1 = tcg_temp_new_i64();
620         tcg_gen_concat_i32_i64(t0, al, ah);
621         tcg_gen_concat_i32_i64(t1, bl, bh);
622         tcg_gen_sub_i64(t0, t0, t1);
623         tcg_gen_extr_i64_i32(rl, rh, t0);
624         tcg_temp_free_i64(t0);
625         tcg_temp_free_i64(t1);
626     }
627 }
628
629 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
630 {
631     if (TCG_TARGET_HAS_mulu2_i32) {
632         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
633     } else if (TCG_TARGET_HAS_muluh_i32) {
634         TCGv_i32 t = tcg_temp_new_i32();
635         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
636         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
637         tcg_gen_mov_i32(rl, t);
638         tcg_temp_free_i32(t);
639     } else {
640         TCGv_i64 t0 = tcg_temp_new_i64();
641         TCGv_i64 t1 = tcg_temp_new_i64();
642         tcg_gen_extu_i32_i64(t0, arg1);
643         tcg_gen_extu_i32_i64(t1, arg2);
644         tcg_gen_mul_i64(t0, t0, t1);
645         tcg_gen_extr_i64_i32(rl, rh, t0);
646         tcg_temp_free_i64(t0);
647         tcg_temp_free_i64(t1);
648     }
649 }
650
651 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
652 {
653     if (TCG_TARGET_HAS_muls2_i32) {
654         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
655     } else if (TCG_TARGET_HAS_mulsh_i32) {
656         TCGv_i32 t = tcg_temp_new_i32();
657         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
658         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
659         tcg_gen_mov_i32(rl, t);
660         tcg_temp_free_i32(t);
661     } else if (TCG_TARGET_REG_BITS == 32) {
662         TCGv_i32 t0 = tcg_temp_new_i32();
663         TCGv_i32 t1 = tcg_temp_new_i32();
664         TCGv_i32 t2 = tcg_temp_new_i32();
665         TCGv_i32 t3 = tcg_temp_new_i32();
666         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
667         /* Adjust for negative inputs.  */
668         tcg_gen_sari_i32(t2, arg1, 31);
669         tcg_gen_sari_i32(t3, arg2, 31);
670         tcg_gen_and_i32(t2, t2, arg2);
671         tcg_gen_and_i32(t3, t3, arg1);
672         tcg_gen_sub_i32(rh, t1, t2);
673         tcg_gen_sub_i32(rh, rh, t3);
674         tcg_gen_mov_i32(rl, t0);
675         tcg_temp_free_i32(t0);
676         tcg_temp_free_i32(t1);
677         tcg_temp_free_i32(t2);
678         tcg_temp_free_i32(t3);
679     } else {
680         TCGv_i64 t0 = tcg_temp_new_i64();
681         TCGv_i64 t1 = tcg_temp_new_i64();
682         tcg_gen_ext_i32_i64(t0, arg1);
683         tcg_gen_ext_i32_i64(t1, arg2);
684         tcg_gen_mul_i64(t0, t0, t1);
685         tcg_gen_extr_i64_i32(rl, rh, t0);
686         tcg_temp_free_i64(t0);
687         tcg_temp_free_i64(t1);
688     }
689 }
690
691 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
692 {
693     if (TCG_TARGET_HAS_ext8s_i32) {
694         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
695     } else {
696         tcg_gen_shli_i32(ret, arg, 24);
697         tcg_gen_sari_i32(ret, ret, 24);
698     }
699 }
700
701 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
702 {
703     if (TCG_TARGET_HAS_ext16s_i32) {
704         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
705     } else {
706         tcg_gen_shli_i32(ret, arg, 16);
707         tcg_gen_sari_i32(ret, ret, 16);
708     }
709 }
710
711 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
712 {
713     if (TCG_TARGET_HAS_ext8u_i32) {
714         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
715     } else {
716         tcg_gen_andi_i32(ret, arg, 0xffu);
717     }
718 }
719
720 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
721 {
722     if (TCG_TARGET_HAS_ext16u_i32) {
723         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
724     } else {
725         tcg_gen_andi_i32(ret, arg, 0xffffu);
726     }
727 }
728
729 /* Note: we assume the two high bytes are set to zero */
730 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
731 {
732     if (TCG_TARGET_HAS_bswap16_i32) {
733         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
734     } else {
735         TCGv_i32 t0 = tcg_temp_new_i32();
736
737         tcg_gen_ext8u_i32(t0, arg);
738         tcg_gen_shli_i32(t0, t0, 8);
739         tcg_gen_shri_i32(ret, arg, 8);
740         tcg_gen_or_i32(ret, ret, t0);
741         tcg_temp_free_i32(t0);
742     }
743 }
744
745 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
746 {
747     if (TCG_TARGET_HAS_bswap32_i32) {
748         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
749     } else {
750         TCGv_i32 t0, t1;
751         t0 = tcg_temp_new_i32();
752         t1 = tcg_temp_new_i32();
753
754         tcg_gen_shli_i32(t0, arg, 24);
755
756         tcg_gen_andi_i32(t1, arg, 0x0000ff00);
757         tcg_gen_shli_i32(t1, t1, 8);
758         tcg_gen_or_i32(t0, t0, t1);
759
760         tcg_gen_shri_i32(t1, arg, 8);
761         tcg_gen_andi_i32(t1, t1, 0x0000ff00);
762         tcg_gen_or_i32(t0, t0, t1);
763
764         tcg_gen_shri_i32(t1, arg, 24);
765         tcg_gen_or_i32(ret, t0, t1);
766         tcg_temp_free_i32(t0);
767         tcg_temp_free_i32(t1);
768     }
769 }
770
771 /* 64-bit ops */
772
773 #if TCG_TARGET_REG_BITS == 32
774 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
775
776 void tcg_gen_discard_i64(TCGv_i64 arg)
777 {
778     tcg_gen_discard_i32(TCGV_LOW(arg));
779     tcg_gen_discard_i32(TCGV_HIGH(arg));
780 }
781
782 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
783 {
784     tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
785     tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
786 }
787
788 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
789 {
790     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
791     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
792 }
793
794 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
795 {
796     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
797     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
798 }
799
800 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
801 {
802     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
803     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), 31);
804 }
805
806 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
807 {
808     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
809     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
810 }
811
812 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
813 {
814     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
815     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
816 }
817
818 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
819 {
820     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
821     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
822 }
823
824 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
825 {
826     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
827     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
828 }
829
830 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
831 {
832     /* Since arg2 and ret have different types,
833        they cannot be the same temporary */
834 #ifdef HOST_WORDS_BIGENDIAN
835     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
836     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
837 #else
838     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
839     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
840 #endif
841 }
842
843 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
844 {
845 #ifdef HOST_WORDS_BIGENDIAN
846     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
847     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
848 #else
849     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
850     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
851 #endif
852 }
853
854 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
855 {
856     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
857     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
858 }
859
860 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
861 {
862     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
863     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
864 }
865
866 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
867 {
868     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
869     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
870 }
871
872 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
873 {
874     gen_helper_shl_i64(ret, arg1, arg2);
875 }
876
877 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
878 {
879     gen_helper_shr_i64(ret, arg1, arg2);
880 }
881
882 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
883 {
884     gen_helper_sar_i64(ret, arg1, arg2);
885 }
886
887 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
888 {
889     TCGv_i64 t0;
890     TCGv_i32 t1;
891
892     t0 = tcg_temp_new_i64();
893     t1 = tcg_temp_new_i32();
894
895     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
896                       TCGV_LOW(arg1), TCGV_LOW(arg2));
897
898     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
899     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
900     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
901     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
902
903     tcg_gen_mov_i64(ret, t0);
904     tcg_temp_free_i64(t0);
905     tcg_temp_free_i32(t1);
906 }
907 #endif /* TCG_TARGET_REG_SIZE == 32 */
908
909 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
910 {
911     /* some cases can be optimized here */
912     if (arg2 == 0) {
913         tcg_gen_mov_i64(ret, arg1);
914     } else {
915         TCGv_i64 t0 = tcg_const_i64(arg2);
916         tcg_gen_add_i64(ret, arg1, t0);
917         tcg_temp_free_i64(t0);
918     }
919 }
920
921 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
922 {
923     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
924         /* Don't recurse with tcg_gen_neg_i64.  */
925         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
926     } else {
927         TCGv_i64 t0 = tcg_const_i64(arg1);
928         tcg_gen_sub_i64(ret, t0, arg2);
929         tcg_temp_free_i64(t0);
930     }
931 }
932
933 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
934 {
935     /* some cases can be optimized here */
936     if (arg2 == 0) {
937         tcg_gen_mov_i64(ret, arg1);
938     } else {
939         TCGv_i64 t0 = tcg_const_i64(arg2);
940         tcg_gen_sub_i64(ret, arg1, t0);
941         tcg_temp_free_i64(t0);
942     }
943 }
944
945 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
946 {
947     TCGv_i64 t0;
948
949     if (TCG_TARGET_REG_BITS == 32) {
950         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
951         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
952         return;
953     }
954
955     /* Some cases can be optimized here.  */
956     switch (arg2) {
957     case 0:
958         tcg_gen_movi_i64(ret, 0);
959         return;
960     case 0xffffffffffffffffull:
961         tcg_gen_mov_i64(ret, arg1);
962         return;
963     case 0xffull:
964         /* Don't recurse with tcg_gen_ext8u_i64.  */
965         if (TCG_TARGET_HAS_ext8u_i64) {
966             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
967             return;
968         }
969         break;
970     case 0xffffu:
971         if (TCG_TARGET_HAS_ext16u_i64) {
972             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
973             return;
974         }
975         break;
976     case 0xffffffffull:
977         if (TCG_TARGET_HAS_ext32u_i64) {
978             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
979             return;
980         }
981         break;
982     }
983     t0 = tcg_const_i64(arg2);
984     tcg_gen_and_i64(ret, arg1, t0);
985     tcg_temp_free_i64(t0);
986 }
987
988 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
989 {
990     if (TCG_TARGET_REG_BITS == 32) {
991         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
992         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
993         return;
994     }
995     /* Some cases can be optimized here.  */
996     if (arg2 == -1) {
997         tcg_gen_movi_i64(ret, -1);
998     } else if (arg2 == 0) {
999         tcg_gen_mov_i64(ret, arg1);
1000     } else {
1001         TCGv_i64 t0 = tcg_const_i64(arg2);
1002         tcg_gen_or_i64(ret, arg1, t0);
1003         tcg_temp_free_i64(t0);
1004     }
1005 }
1006
1007 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1008 {
1009     if (TCG_TARGET_REG_BITS == 32) {
1010         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1011         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1012         return;
1013     }
1014     /* Some cases can be optimized here.  */
1015     if (arg2 == 0) {
1016         tcg_gen_mov_i64(ret, arg1);
1017     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1018         /* Don't recurse with tcg_gen_not_i64.  */
1019         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1020     } else {
1021         TCGv_i64 t0 = tcg_const_i64(arg2);
1022         tcg_gen_xor_i64(ret, arg1, t0);
1023         tcg_temp_free_i64(t0);
1024     }
1025 }
1026
1027 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1028                                       unsigned c, bool right, bool arith)
1029 {
1030     tcg_debug_assert(c < 64);
1031     if (c == 0) {
1032         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1033         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1034     } else if (c >= 32) {
1035         c -= 32;
1036         if (right) {
1037             if (arith) {
1038                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1039                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1040             } else {
1041                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1042                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1043             }
1044         } else {
1045             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1046             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1047         }
1048     } else {
1049         TCGv_i32 t0, t1;
1050
1051         t0 = tcg_temp_new_i32();
1052         t1 = tcg_temp_new_i32();
1053         if (right) {
1054             tcg_gen_shli_i32(t0, TCGV_HIGH(arg1), 32 - c);
1055             if (arith) {
1056                 tcg_gen_sari_i32(t1, TCGV_HIGH(arg1), c);
1057             } else {
1058                 tcg_gen_shri_i32(t1, TCGV_HIGH(arg1), c);
1059             }
1060             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1061             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t0);
1062             tcg_gen_mov_i32(TCGV_HIGH(ret), t1);
1063         } else {
1064             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1065             /* Note: ret can be the same as arg1, so we use t1 */
1066             tcg_gen_shli_i32(t1, TCGV_LOW(arg1), c);
1067             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1068             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t0);
1069             tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1070         }
1071         tcg_temp_free_i32(t0);
1072         tcg_temp_free_i32(t1);
1073     }
1074 }
1075
1076 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1077 {
1078     tcg_debug_assert(arg2 < 64);
1079     if (TCG_TARGET_REG_BITS == 32) {
1080         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1081     } else if (arg2 == 0) {
1082         tcg_gen_mov_i64(ret, arg1);
1083     } else {
1084         TCGv_i64 t0 = tcg_const_i64(arg2);
1085         tcg_gen_shl_i64(ret, arg1, t0);
1086         tcg_temp_free_i64(t0);
1087     }
1088 }
1089
1090 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1091 {
1092     tcg_debug_assert(arg2 < 64);
1093     if (TCG_TARGET_REG_BITS == 32) {
1094         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1095     } else if (arg2 == 0) {
1096         tcg_gen_mov_i64(ret, arg1);
1097     } else {
1098         TCGv_i64 t0 = tcg_const_i64(arg2);
1099         tcg_gen_shr_i64(ret, arg1, t0);
1100         tcg_temp_free_i64(t0);
1101     }
1102 }
1103
1104 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1105 {
1106     tcg_debug_assert(arg2 < 64);
1107     if (TCG_TARGET_REG_BITS == 32) {
1108         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1109     } else if (arg2 == 0) {
1110         tcg_gen_mov_i64(ret, arg1);
1111     } else {
1112         TCGv_i64 t0 = tcg_const_i64(arg2);
1113         tcg_gen_sar_i64(ret, arg1, t0);
1114         tcg_temp_free_i64(t0);
1115     }
1116 }
1117
1118 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1119 {
1120     if (cond == TCG_COND_ALWAYS) {
1121         tcg_gen_br(l);
1122     } else if (cond != TCG_COND_NEVER) {
1123         if (TCG_TARGET_REG_BITS == 32) {
1124             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1125                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1126                               TCGV_HIGH(arg2), cond, label_arg(l));
1127         } else {
1128             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1129                               label_arg(l));
1130         }
1131     }
1132 }
1133
1134 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1135 {
1136     if (cond == TCG_COND_ALWAYS) {
1137         tcg_gen_br(l);
1138     } else if (cond != TCG_COND_NEVER) {
1139         TCGv_i64 t0 = tcg_const_i64(arg2);
1140         tcg_gen_brcond_i64(cond, arg1, t0, l);
1141         tcg_temp_free_i64(t0);
1142     }
1143 }
1144
1145 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1146                          TCGv_i64 arg1, TCGv_i64 arg2)
1147 {
1148     if (cond == TCG_COND_ALWAYS) {
1149         tcg_gen_movi_i64(ret, 1);
1150     } else if (cond == TCG_COND_NEVER) {
1151         tcg_gen_movi_i64(ret, 0);
1152     } else {
1153         if (TCG_TARGET_REG_BITS == 32) {
1154             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1155                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1156                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1157             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1158         } else {
1159             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1160         }
1161     }
1162 }
1163
1164 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1165                           TCGv_i64 arg1, int64_t arg2)
1166 {
1167     TCGv_i64 t0 = tcg_const_i64(arg2);
1168     tcg_gen_setcond_i64(cond, ret, arg1, t0);
1169     tcg_temp_free_i64(t0);
1170 }
1171
1172 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1173 {
1174     TCGv_i64 t0 = tcg_const_i64(arg2);
1175     tcg_gen_mul_i64(ret, arg1, t0);
1176     tcg_temp_free_i64(t0);
1177 }
1178
1179 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1180 {
1181     if (TCG_TARGET_HAS_div_i64) {
1182         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1183     } else if (TCG_TARGET_HAS_div2_i64) {
1184         TCGv_i64 t0 = tcg_temp_new_i64();
1185         tcg_gen_sari_i64(t0, arg1, 63);
1186         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1187         tcg_temp_free_i64(t0);
1188     } else {
1189         gen_helper_div_i64(ret, arg1, arg2);
1190     }
1191 }
1192
1193 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1194 {
1195     if (TCG_TARGET_HAS_rem_i64) {
1196         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1197     } else if (TCG_TARGET_HAS_div_i64) {
1198         TCGv_i64 t0 = tcg_temp_new_i64();
1199         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1200         tcg_gen_mul_i64(t0, t0, arg2);
1201         tcg_gen_sub_i64(ret, arg1, t0);
1202         tcg_temp_free_i64(t0);
1203     } else if (TCG_TARGET_HAS_div2_i64) {
1204         TCGv_i64 t0 = tcg_temp_new_i64();
1205         tcg_gen_sari_i64(t0, arg1, 63);
1206         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1207         tcg_temp_free_i64(t0);
1208     } else {
1209         gen_helper_rem_i64(ret, arg1, arg2);
1210     }
1211 }
1212
1213 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1214 {
1215     if (TCG_TARGET_HAS_div_i64) {
1216         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1217     } else if (TCG_TARGET_HAS_div2_i64) {
1218         TCGv_i64 t0 = tcg_temp_new_i64();
1219         tcg_gen_movi_i64(t0, 0);
1220         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1221         tcg_temp_free_i64(t0);
1222     } else {
1223         gen_helper_divu_i64(ret, arg1, arg2);
1224     }
1225 }
1226
1227 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1228 {
1229     if (TCG_TARGET_HAS_rem_i64) {
1230         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1231     } else if (TCG_TARGET_HAS_div_i64) {
1232         TCGv_i64 t0 = tcg_temp_new_i64();
1233         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1234         tcg_gen_mul_i64(t0, t0, arg2);
1235         tcg_gen_sub_i64(ret, arg1, t0);
1236         tcg_temp_free_i64(t0);
1237     } else if (TCG_TARGET_HAS_div2_i64) {
1238         TCGv_i64 t0 = tcg_temp_new_i64();
1239         tcg_gen_movi_i64(t0, 0);
1240         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1241         tcg_temp_free_i64(t0);
1242     } else {
1243         gen_helper_remu_i64(ret, arg1, arg2);
1244     }
1245 }
1246
1247 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1248 {
1249     if (TCG_TARGET_REG_BITS == 32) {
1250         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1251         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1252     } else if (TCG_TARGET_HAS_ext8s_i64) {
1253         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1254     } else {
1255         tcg_gen_shli_i64(ret, arg, 56);
1256         tcg_gen_sari_i64(ret, ret, 56);
1257     }
1258 }
1259
1260 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1261 {
1262     if (TCG_TARGET_REG_BITS == 32) {
1263         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1264         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1265     } else if (TCG_TARGET_HAS_ext16s_i64) {
1266         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1267     } else {
1268         tcg_gen_shli_i64(ret, arg, 48);
1269         tcg_gen_sari_i64(ret, ret, 48);
1270     }
1271 }
1272
1273 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1274 {
1275     if (TCG_TARGET_REG_BITS == 32) {
1276         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1277         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1278     } else if (TCG_TARGET_HAS_ext32s_i64) {
1279         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1280     } else {
1281         tcg_gen_shli_i64(ret, arg, 32);
1282         tcg_gen_sari_i64(ret, ret, 32);
1283     }
1284 }
1285
1286 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1287 {
1288     if (TCG_TARGET_REG_BITS == 32) {
1289         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1290         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1291     } else if (TCG_TARGET_HAS_ext8u_i64) {
1292         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1293     } else {
1294         tcg_gen_andi_i64(ret, arg, 0xffu);
1295     }
1296 }
1297
1298 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1299 {
1300     if (TCG_TARGET_REG_BITS == 32) {
1301         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1302         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1303     } else if (TCG_TARGET_HAS_ext16u_i64) {
1304         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1305     } else {
1306         tcg_gen_andi_i64(ret, arg, 0xffffu);
1307     }
1308 }
1309
1310 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1311 {
1312     if (TCG_TARGET_REG_BITS == 32) {
1313         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1314         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1315     } else if (TCG_TARGET_HAS_ext32u_i64) {
1316         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1317     } else {
1318         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1319     }
1320 }
1321
1322 /* Note: we assume the six high bytes are set to zero */
1323 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1324 {
1325     if (TCG_TARGET_REG_BITS == 32) {
1326         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1327         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1328     } else if (TCG_TARGET_HAS_bswap16_i64) {
1329         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1330     } else {
1331         TCGv_i64 t0 = tcg_temp_new_i64();
1332
1333         tcg_gen_ext8u_i64(t0, arg);
1334         tcg_gen_shli_i64(t0, t0, 8);
1335         tcg_gen_shri_i64(ret, arg, 8);
1336         tcg_gen_or_i64(ret, ret, t0);
1337         tcg_temp_free_i64(t0);
1338     }
1339 }
1340
1341 /* Note: we assume the four high bytes are set to zero */
1342 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1343 {
1344     if (TCG_TARGET_REG_BITS == 32) {
1345         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1346         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1347     } else if (TCG_TARGET_HAS_bswap32_i64) {
1348         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1349     } else {
1350         TCGv_i64 t0, t1;
1351         t0 = tcg_temp_new_i64();
1352         t1 = tcg_temp_new_i64();
1353
1354         tcg_gen_shli_i64(t0, arg, 24);
1355         tcg_gen_ext32u_i64(t0, t0);
1356
1357         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1358         tcg_gen_shli_i64(t1, t1, 8);
1359         tcg_gen_or_i64(t0, t0, t1);
1360
1361         tcg_gen_shri_i64(t1, arg, 8);
1362         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1363         tcg_gen_or_i64(t0, t0, t1);
1364
1365         tcg_gen_shri_i64(t1, arg, 24);
1366         tcg_gen_or_i64(ret, t0, t1);
1367         tcg_temp_free_i64(t0);
1368         tcg_temp_free_i64(t1);
1369     }
1370 }
1371
1372 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1373 {
1374     if (TCG_TARGET_REG_BITS == 32) {
1375         TCGv_i32 t0, t1;
1376         t0 = tcg_temp_new_i32();
1377         t1 = tcg_temp_new_i32();
1378
1379         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1380         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1381         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1382         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1383         tcg_temp_free_i32(t0);
1384         tcg_temp_free_i32(t1);
1385     } else if (TCG_TARGET_HAS_bswap64_i64) {
1386         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1387     } else {
1388         TCGv_i64 t0 = tcg_temp_new_i64();
1389         TCGv_i64 t1 = tcg_temp_new_i64();
1390
1391         tcg_gen_shli_i64(t0, arg, 56);
1392
1393         tcg_gen_andi_i64(t1, arg, 0x0000ff00);
1394         tcg_gen_shli_i64(t1, t1, 40);
1395         tcg_gen_or_i64(t0, t0, t1);
1396
1397         tcg_gen_andi_i64(t1, arg, 0x00ff0000);
1398         tcg_gen_shli_i64(t1, t1, 24);
1399         tcg_gen_or_i64(t0, t0, t1);
1400
1401         tcg_gen_andi_i64(t1, arg, 0xff000000);
1402         tcg_gen_shli_i64(t1, t1, 8);
1403         tcg_gen_or_i64(t0, t0, t1);
1404
1405         tcg_gen_shri_i64(t1, arg, 8);
1406         tcg_gen_andi_i64(t1, t1, 0xff000000);
1407         tcg_gen_or_i64(t0, t0, t1);
1408
1409         tcg_gen_shri_i64(t1, arg, 24);
1410         tcg_gen_andi_i64(t1, t1, 0x00ff0000);
1411         tcg_gen_or_i64(t0, t0, t1);
1412
1413         tcg_gen_shri_i64(t1, arg, 40);
1414         tcg_gen_andi_i64(t1, t1, 0x0000ff00);
1415         tcg_gen_or_i64(t0, t0, t1);
1416
1417         tcg_gen_shri_i64(t1, arg, 56);
1418         tcg_gen_or_i64(ret, t0, t1);
1419         tcg_temp_free_i64(t0);
1420         tcg_temp_free_i64(t1);
1421     }
1422 }
1423
1424 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1425 {
1426     if (TCG_TARGET_REG_BITS == 32) {
1427         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1428         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1429     } else if (TCG_TARGET_HAS_not_i64) {
1430         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1431     } else {
1432         tcg_gen_xori_i64(ret, arg, -1);
1433     }
1434 }
1435
1436 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1437 {
1438     if (TCG_TARGET_REG_BITS == 32) {
1439         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1440         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1441     } else if (TCG_TARGET_HAS_andc_i64) {
1442         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1443     } else {
1444         TCGv_i64 t0 = tcg_temp_new_i64();
1445         tcg_gen_not_i64(t0, arg2);
1446         tcg_gen_and_i64(ret, arg1, t0);
1447         tcg_temp_free_i64(t0);
1448     }
1449 }
1450
1451 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1452 {
1453     if (TCG_TARGET_REG_BITS == 32) {
1454         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1455         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1456     } else if (TCG_TARGET_HAS_eqv_i64) {
1457         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1458     } else {
1459         tcg_gen_xor_i64(ret, arg1, arg2);
1460         tcg_gen_not_i64(ret, ret);
1461     }
1462 }
1463
1464 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1465 {
1466     if (TCG_TARGET_REG_BITS == 32) {
1467         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1468         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1469     } else if (TCG_TARGET_HAS_nand_i64) {
1470         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1471     } else {
1472         tcg_gen_and_i64(ret, arg1, arg2);
1473         tcg_gen_not_i64(ret, ret);
1474     }
1475 }
1476
1477 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1478 {
1479     if (TCG_TARGET_REG_BITS == 32) {
1480         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1481         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1482     } else if (TCG_TARGET_HAS_nor_i64) {
1483         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1484     } else {
1485         tcg_gen_or_i64(ret, arg1, arg2);
1486         tcg_gen_not_i64(ret, ret);
1487     }
1488 }
1489
1490 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1491 {
1492     if (TCG_TARGET_REG_BITS == 32) {
1493         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1494         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1495     } else if (TCG_TARGET_HAS_orc_i64) {
1496         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1497     } else {
1498         TCGv_i64 t0 = tcg_temp_new_i64();
1499         tcg_gen_not_i64(t0, arg2);
1500         tcg_gen_or_i64(ret, arg1, t0);
1501         tcg_temp_free_i64(t0);
1502     }
1503 }
1504
1505 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1506 {
1507     if (TCG_TARGET_HAS_rot_i64) {
1508         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1509     } else {
1510         TCGv_i64 t0, t1;
1511         t0 = tcg_temp_new_i64();
1512         t1 = tcg_temp_new_i64();
1513         tcg_gen_shl_i64(t0, arg1, arg2);
1514         tcg_gen_subfi_i64(t1, 64, arg2);
1515         tcg_gen_shr_i64(t1, arg1, t1);
1516         tcg_gen_or_i64(ret, t0, t1);
1517         tcg_temp_free_i64(t0);
1518         tcg_temp_free_i64(t1);
1519     }
1520 }
1521
1522 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1523 {
1524     tcg_debug_assert(arg2 < 64);
1525     /* some cases can be optimized here */
1526     if (arg2 == 0) {
1527         tcg_gen_mov_i64(ret, arg1);
1528     } else if (TCG_TARGET_HAS_rot_i64) {
1529         TCGv_i64 t0 = tcg_const_i64(arg2);
1530         tcg_gen_rotl_i64(ret, arg1, t0);
1531         tcg_temp_free_i64(t0);
1532     } else {
1533         TCGv_i64 t0, t1;
1534         t0 = tcg_temp_new_i64();
1535         t1 = tcg_temp_new_i64();
1536         tcg_gen_shli_i64(t0, arg1, arg2);
1537         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1538         tcg_gen_or_i64(ret, t0, t1);
1539         tcg_temp_free_i64(t0);
1540         tcg_temp_free_i64(t1);
1541     }
1542 }
1543
1544 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1545 {
1546     if (TCG_TARGET_HAS_rot_i64) {
1547         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1548     } else {
1549         TCGv_i64 t0, t1;
1550         t0 = tcg_temp_new_i64();
1551         t1 = tcg_temp_new_i64();
1552         tcg_gen_shr_i64(t0, arg1, arg2);
1553         tcg_gen_subfi_i64(t1, 64, arg2);
1554         tcg_gen_shl_i64(t1, arg1, t1);
1555         tcg_gen_or_i64(ret, t0, t1);
1556         tcg_temp_free_i64(t0);
1557         tcg_temp_free_i64(t1);
1558     }
1559 }
1560
1561 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
1562 {
1563     tcg_debug_assert(arg2 < 64);
1564     /* some cases can be optimized here */
1565     if (arg2 == 0) {
1566         tcg_gen_mov_i64(ret, arg1);
1567     } else {
1568         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
1569     }
1570 }
1571
1572 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
1573                          unsigned int ofs, unsigned int len)
1574 {
1575     uint64_t mask;
1576     TCGv_i64 t1;
1577
1578     tcg_debug_assert(ofs < 64);
1579     tcg_debug_assert(len <= 64);
1580     tcg_debug_assert(ofs + len <= 64);
1581
1582     if (ofs == 0 && len == 64) {
1583         tcg_gen_mov_i64(ret, arg2);
1584         return;
1585     }
1586     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
1587         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
1588         return;
1589     }
1590
1591     if (TCG_TARGET_REG_BITS == 32) {
1592         if (ofs >= 32) {
1593             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
1594                                 TCGV_LOW(arg2), ofs - 32, len);
1595             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1596             return;
1597         }
1598         if (ofs + len <= 32) {
1599             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
1600                                 TCGV_LOW(arg2), ofs, len);
1601             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1602             return;
1603         }
1604     }
1605
1606     mask = (1ull << len) - 1;
1607     t1 = tcg_temp_new_i64();
1608
1609     if (ofs + len < 64) {
1610         tcg_gen_andi_i64(t1, arg2, mask);
1611         tcg_gen_shli_i64(t1, t1, ofs);
1612     } else {
1613         tcg_gen_shli_i64(t1, arg2, ofs);
1614     }
1615     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
1616     tcg_gen_or_i64(ret, ret, t1);
1617
1618     tcg_temp_free_i64(t1);
1619 }
1620
1621 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
1622                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
1623 {
1624     if (cond == TCG_COND_ALWAYS) {
1625         tcg_gen_mov_i64(ret, v1);
1626     } else if (cond == TCG_COND_NEVER) {
1627         tcg_gen_mov_i64(ret, v2);
1628     } else if (TCG_TARGET_REG_BITS == 32) {
1629         TCGv_i32 t0 = tcg_temp_new_i32();
1630         TCGv_i32 t1 = tcg_temp_new_i32();
1631         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
1632                          TCGV_LOW(c1), TCGV_HIGH(c1),
1633                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
1634
1635         if (TCG_TARGET_HAS_movcond_i32) {
1636             tcg_gen_movi_i32(t1, 0);
1637             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
1638                                 TCGV_LOW(v1), TCGV_LOW(v2));
1639             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
1640                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
1641         } else {
1642             tcg_gen_neg_i32(t0, t0);
1643
1644             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
1645             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
1646             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
1647
1648             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
1649             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
1650             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
1651         }
1652         tcg_temp_free_i32(t0);
1653         tcg_temp_free_i32(t1);
1654     } else if (TCG_TARGET_HAS_movcond_i64) {
1655         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
1656     } else {
1657         TCGv_i64 t0 = tcg_temp_new_i64();
1658         TCGv_i64 t1 = tcg_temp_new_i64();
1659         tcg_gen_setcond_i64(cond, t0, c1, c2);
1660         tcg_gen_neg_i64(t0, t0);
1661         tcg_gen_and_i64(t1, v1, t0);
1662         tcg_gen_andc_i64(ret, v2, t0);
1663         tcg_gen_or_i64(ret, ret, t1);
1664         tcg_temp_free_i64(t0);
1665         tcg_temp_free_i64(t1);
1666     }
1667 }
1668
1669 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1670                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1671 {
1672     if (TCG_TARGET_HAS_add2_i64) {
1673         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
1674     } else {
1675         TCGv_i64 t0 = tcg_temp_new_i64();
1676         TCGv_i64 t1 = tcg_temp_new_i64();
1677         tcg_gen_add_i64(t0, al, bl);
1678         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
1679         tcg_gen_add_i64(rh, ah, bh);
1680         tcg_gen_add_i64(rh, rh, t1);
1681         tcg_gen_mov_i64(rl, t0);
1682         tcg_temp_free_i64(t0);
1683         tcg_temp_free_i64(t1);
1684     }
1685 }
1686
1687 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
1688                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
1689 {
1690     if (TCG_TARGET_HAS_sub2_i64) {
1691         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
1692     } else {
1693         TCGv_i64 t0 = tcg_temp_new_i64();
1694         TCGv_i64 t1 = tcg_temp_new_i64();
1695         tcg_gen_sub_i64(t0, al, bl);
1696         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
1697         tcg_gen_sub_i64(rh, ah, bh);
1698         tcg_gen_sub_i64(rh, rh, t1);
1699         tcg_gen_mov_i64(rl, t0);
1700         tcg_temp_free_i64(t0);
1701         tcg_temp_free_i64(t1);
1702     }
1703 }
1704
1705 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1706 {
1707     if (TCG_TARGET_HAS_mulu2_i64) {
1708         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
1709     } else if (TCG_TARGET_HAS_muluh_i64) {
1710         TCGv_i64 t = tcg_temp_new_i64();
1711         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1712         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
1713         tcg_gen_mov_i64(rl, t);
1714         tcg_temp_free_i64(t);
1715     } else {
1716         TCGv_i64 t0 = tcg_temp_new_i64();
1717         tcg_gen_mul_i64(t0, arg1, arg2);
1718         gen_helper_muluh_i64(rh, arg1, arg2);
1719         tcg_gen_mov_i64(rl, t0);
1720         tcg_temp_free_i64(t0);
1721     }
1722 }
1723
1724 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
1725 {
1726     if (TCG_TARGET_HAS_muls2_i64) {
1727         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
1728     } else if (TCG_TARGET_HAS_mulsh_i64) {
1729         TCGv_i64 t = tcg_temp_new_i64();
1730         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
1731         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
1732         tcg_gen_mov_i64(rl, t);
1733         tcg_temp_free_i64(t);
1734     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
1735         TCGv_i64 t0 = tcg_temp_new_i64();
1736         TCGv_i64 t1 = tcg_temp_new_i64();
1737         TCGv_i64 t2 = tcg_temp_new_i64();
1738         TCGv_i64 t3 = tcg_temp_new_i64();
1739         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
1740         /* Adjust for negative inputs.  */
1741         tcg_gen_sari_i64(t2, arg1, 63);
1742         tcg_gen_sari_i64(t3, arg2, 63);
1743         tcg_gen_and_i64(t2, t2, arg2);
1744         tcg_gen_and_i64(t3, t3, arg1);
1745         tcg_gen_sub_i64(rh, t1, t2);
1746         tcg_gen_sub_i64(rh, rh, t3);
1747         tcg_gen_mov_i64(rl, t0);
1748         tcg_temp_free_i64(t0);
1749         tcg_temp_free_i64(t1);
1750         tcg_temp_free_i64(t2);
1751         tcg_temp_free_i64(t3);
1752     } else {
1753         TCGv_i64 t0 = tcg_temp_new_i64();
1754         tcg_gen_mul_i64(t0, arg1, arg2);
1755         gen_helper_mulsh_i64(rh, arg1, arg2);
1756         tcg_gen_mov_i64(rl, t0);
1757         tcg_temp_free_i64(t0);
1758     }
1759 }
1760
1761 /* Size changing operations.  */
1762
1763 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1764 {
1765     if (TCG_TARGET_REG_BITS == 32) {
1766         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
1767     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
1768         tcg_gen_op2(&tcg_ctx, INDEX_op_extrl_i64_i32,
1769                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1770     } else {
1771         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(arg)));
1772     }
1773 }
1774
1775 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
1776 {
1777     if (TCG_TARGET_REG_BITS == 32) {
1778         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
1779     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
1780         tcg_gen_op2(&tcg_ctx, INDEX_op_extrh_i64_i32,
1781                     GET_TCGV_I32(ret), GET_TCGV_I64(arg));
1782     } else {
1783         TCGv_i64 t = tcg_temp_new_i64();
1784         tcg_gen_shri_i64(t, arg, 32);
1785         tcg_gen_mov_i32(ret, MAKE_TCGV_I32(GET_TCGV_I64(t)));
1786         tcg_temp_free_i64(t);
1787     }
1788 }
1789
1790 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1791 {
1792     if (TCG_TARGET_REG_BITS == 32) {
1793         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1794         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1795     } else {
1796         tcg_gen_op2(&tcg_ctx, INDEX_op_extu_i32_i64,
1797                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1798     }
1799 }
1800
1801 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
1802 {
1803     if (TCG_TARGET_REG_BITS == 32) {
1804         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
1805         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1806     } else {
1807         tcg_gen_op2(&tcg_ctx, INDEX_op_ext_i32_i64,
1808                     GET_TCGV_I64(ret), GET_TCGV_I32(arg));
1809     }
1810 }
1811
1812 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
1813 {
1814     TCGv_i64 tmp;
1815
1816     if (TCG_TARGET_REG_BITS == 32) {
1817         tcg_gen_mov_i32(TCGV_LOW(dest), low);
1818         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
1819         return;
1820     }
1821
1822     tmp = tcg_temp_new_i64();
1823     /* These extensions are only needed for type correctness.
1824        We may be able to do better given target specific information.  */
1825     tcg_gen_extu_i32_i64(tmp, high);
1826     tcg_gen_extu_i32_i64(dest, low);
1827     /* If deposit is available, use it.  Otherwise use the extra
1828        knowledge that we have of the zero-extensions above.  */
1829     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
1830         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
1831     } else {
1832         tcg_gen_shli_i64(tmp, tmp, 32);
1833         tcg_gen_or_i64(dest, dest, tmp);
1834     }
1835     tcg_temp_free_i64(tmp);
1836 }
1837
1838 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
1839 {
1840     if (TCG_TARGET_REG_BITS == 32) {
1841         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
1842         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
1843     } else {
1844         tcg_gen_extrl_i64_i32(lo, arg);
1845         tcg_gen_extrh_i64_i32(hi, arg);
1846     }
1847 }
1848
1849 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
1850 {
1851     tcg_gen_ext32u_i64(lo, arg);
1852     tcg_gen_shri_i64(hi, arg, 32);
1853 }
1854
1855 /* QEMU specific operations.  */
1856
1857 void tcg_gen_goto_tb(unsigned idx)
1858 {
1859     /* We only support two chained exits.  */
1860     tcg_debug_assert(idx <= 1);
1861 #ifdef CONFIG_DEBUG_TCG
1862     /* Verify that we havn't seen this numbered exit before.  */
1863     tcg_debug_assert((tcg_ctx.goto_tb_issue_mask & (1 << idx)) == 0);
1864     tcg_ctx.goto_tb_issue_mask |= 1 << idx;
1865 #endif
1866     tcg_gen_op1i(INDEX_op_goto_tb, idx);
1867 }
1868
1869 static inline TCGMemOp tcg_canonicalize_memop(TCGMemOp op, bool is64, bool st)
1870 {
1871     /* Trigger the asserts within as early as possible.  */
1872     (void)get_alignment_bits(op);
1873
1874     switch (op & MO_SIZE) {
1875     case MO_8:
1876         op &= ~MO_BSWAP;
1877         break;
1878     case MO_16:
1879         break;
1880     case MO_32:
1881         if (!is64) {
1882             op &= ~MO_SIGN;
1883         }
1884         break;
1885     case MO_64:
1886         if (!is64) {
1887             tcg_abort();
1888         }
1889         break;
1890     }
1891     if (st) {
1892         op &= ~MO_SIGN;
1893     }
1894     return op;
1895 }
1896
1897 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
1898                          TCGMemOp memop, TCGArg idx)
1899 {
1900     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1901 #if TARGET_LONG_BITS == 32
1902     tcg_gen_op3i_i32(opc, val, addr, oi);
1903 #else
1904     if (TCG_TARGET_REG_BITS == 32) {
1905         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1906     } else {
1907         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I32(val), GET_TCGV_I64(addr), oi);
1908     }
1909 #endif
1910 }
1911
1912 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
1913                          TCGMemOp memop, TCGArg idx)
1914 {
1915     TCGMemOpIdx oi = make_memop_idx(memop, idx);
1916 #if TARGET_LONG_BITS == 32
1917     if (TCG_TARGET_REG_BITS == 32) {
1918         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
1919     } else {
1920         tcg_gen_op3(&tcg_ctx, opc, GET_TCGV_I64(val), GET_TCGV_I32(addr), oi);
1921     }
1922 #else
1923     if (TCG_TARGET_REG_BITS == 32) {
1924         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
1925                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
1926     } else {
1927         tcg_gen_op3i_i64(opc, val, addr, oi);
1928     }
1929 #endif
1930 }
1931
1932 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1933 {
1934     memop = tcg_canonicalize_memop(memop, 0, 0);
1935     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1936                                addr, trace_mem_get_info(memop, 0));
1937     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
1938 }
1939
1940 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1941 {
1942     memop = tcg_canonicalize_memop(memop, 0, 1);
1943     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1944                                addr, trace_mem_get_info(memop, 1));
1945     gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
1946 }
1947
1948 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1949 {
1950     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1951         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
1952         if (memop & MO_SIGN) {
1953             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
1954         } else {
1955             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
1956         }
1957         return;
1958     }
1959
1960     memop = tcg_canonicalize_memop(memop, 1, 0);
1961     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1962                                addr, trace_mem_get_info(memop, 0));
1963     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
1964 }
1965
1966 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, TCGMemOp memop)
1967 {
1968     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
1969         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
1970         return;
1971     }
1972
1973     memop = tcg_canonicalize_memop(memop, 1, 1);
1974     trace_guest_mem_before_tcg(tcg_ctx.cpu, tcg_ctx.tcg_env,
1975                                addr, trace_mem_get_info(memop, 1));
1976     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
1977 }
This page took 0.127267 seconds and 4 git commands to generate.