static TCGv_i64 cpu_bndl[4];
static TCGv_i64 cpu_bndu[4];
-static TCGv_i32 cpu_tmp3_i32;
static TCGv_i64 cpu_tmp1_i64;
#include "exec/gen-icount.h"
TCGv_ptr ptr0;
TCGv_ptr ptr1;
TCGv_i32 tmp2_i32;
+ TCGv_i32 tmp3_i32;
sigjmp_buf jmpbuf;
} DisasContext;
tcg_gen_trunc_tl_i32(s->tmp2_i32, cpu_regs[R_EDX]);
tcg_gen_andi_i32(s->tmp2_i32, s->tmp2_i32, 0xffff);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, s->T0);
- gen_helper_out_func(ot, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T0);
+ gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32);
gen_op_movl_T0_Dshift(s, ot);
gen_op_add_reg_T0(s, s->aflag, R_ESI);
gen_bpt_io(s, s->tmp2_i32, ot);
if (s->cc_op == CC_OP_DYNAMIC) {
oldop = cpu_cc_op;
} else {
- tcg_gen_movi_i32(cpu_tmp3_i32, s->cc_op);
- oldop = cpu_tmp3_i32;
+ tcg_gen_movi_i32(s->tmp3_i32, s->cc_op);
+ oldop = s->tmp3_i32;
}
/* Conditionally store the CC_OP value. */
#ifdef TARGET_X86_64
case MO_32:
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, s->T1);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1);
if (is_right) {
- tcg_gen_rotr_i32(s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_rotr_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32);
} else {
- tcg_gen_rotl_i32(s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_rotl_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32);
}
tcg_gen_extu_i32_tl(s->T0, s->tmp2_i32);
break;
t1 = tcg_temp_new_i32();
tcg_gen_trunc_tl_i32(t1, s->T1);
tcg_gen_movi_i32(s->tmp2_i32, CC_OP_ADCOX);
- tcg_gen_movi_i32(cpu_tmp3_i32, CC_OP_EFLAGS);
+ tcg_gen_movi_i32(s->tmp3_i32, CC_OP_EFLAGS);
tcg_gen_movcond_i32(TCG_COND_NE, cpu_cc_op, t1, t0,
- s->tmp2_i32, cpu_tmp3_i32);
+ s->tmp2_i32, s->tmp3_i32);
tcg_temp_free_i32(t0);
tcg_temp_free_i32(t1);
switch (ot) {
default:
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EDX]);
- tcg_gen_mulu2_i32(s->tmp2_i32, cpu_tmp3_i32,
- s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, cpu_regs[R_EDX]);
+ tcg_gen_mulu2_i32(s->tmp2_i32, s->tmp3_i32,
+ s->tmp2_i32, s->tmp3_i32);
tcg_gen_extu_i32_tl(cpu_regs[s->vex_v], s->tmp2_i32);
- tcg_gen_extu_i32_tl(cpu_regs[reg], cpu_tmp3_i32);
+ tcg_gen_extu_i32_tl(cpu_regs[reg], s->tmp3_i32);
break;
#ifdef TARGET_X86_64
case MO_64:
default:
case MO_32:
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
- tcg_gen_mulu2_i32(s->tmp2_i32, cpu_tmp3_i32,
- s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, cpu_regs[R_EAX]);
+ tcg_gen_mulu2_i32(s->tmp2_i32, s->tmp3_i32,
+ s->tmp2_i32, s->tmp3_i32);
tcg_gen_extu_i32_tl(cpu_regs[R_EAX], s->tmp2_i32);
- tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
+ tcg_gen_extu_i32_tl(cpu_regs[R_EDX], s->tmp3_i32);
tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
tcg_gen_mov_tl(cpu_cc_src, cpu_regs[R_EDX]);
set_cc_op(s, CC_OP_MULL);
default:
case MO_32:
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, cpu_regs[R_EAX]);
- tcg_gen_muls2_i32(s->tmp2_i32, cpu_tmp3_i32,
- s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, cpu_regs[R_EAX]);
+ tcg_gen_muls2_i32(s->tmp2_i32, s->tmp3_i32,
+ s->tmp2_i32, s->tmp3_i32);
tcg_gen_extu_i32_tl(cpu_regs[R_EAX], s->tmp2_i32);
- tcg_gen_extu_i32_tl(cpu_regs[R_EDX], cpu_tmp3_i32);
+ tcg_gen_extu_i32_tl(cpu_regs[R_EDX], s->tmp3_i32);
tcg_gen_sari_i32(s->tmp2_i32, s->tmp2_i32, 31);
tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[R_EAX]);
- tcg_gen_sub_i32(s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_sub_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32);
tcg_gen_extu_i32_tl(cpu_cc_src, s->tmp2_i32);
set_cc_op(s, CC_OP_MULL);
break;
#endif
case MO_32:
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, s->T1);
- tcg_gen_muls2_i32(s->tmp2_i32, cpu_tmp3_i32,
- s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1);
+ tcg_gen_muls2_i32(s->tmp2_i32, s->tmp3_i32,
+ s->tmp2_i32, s->tmp3_i32);
tcg_gen_extu_i32_tl(cpu_regs[reg], s->tmp2_i32);
tcg_gen_sari_i32(s->tmp2_i32, s->tmp2_i32, 31);
tcg_gen_mov_tl(cpu_cc_dst, cpu_regs[reg]);
- tcg_gen_sub_i32(s->tmp2_i32, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_sub_i32(s->tmp2_i32, s->tmp2_i32, s->tmp3_i32);
tcg_gen_extu_i32_tl(cpu_cc_src, s->tmp2_i32);
break;
default:
gen_io_start();
}
tcg_gen_movi_i32(s->tmp2_i32, val);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, s->T1);
- gen_helper_out_func(ot, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1);
+ gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32);
gen_bpt_io(s, s->tmp2_i32, ot);
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
gen_io_end();
gen_io_start();
}
tcg_gen_trunc_tl_i32(s->tmp2_i32, s->T0);
- tcg_gen_trunc_tl_i32(cpu_tmp3_i32, s->T1);
- gen_helper_out_func(ot, s->tmp2_i32, cpu_tmp3_i32);
+ tcg_gen_trunc_tl_i32(s->tmp3_i32, s->T1);
+ gen_helper_out_func(ot, s->tmp2_i32, s->tmp3_i32);
gen_bpt_io(s, s->tmp2_i32, ot);
if (tb_cflags(s->base.tb) & CF_USE_ICOUNT) {
gen_io_end();
dc->tmp0 = tcg_temp_new();
cpu_tmp1_i64 = tcg_temp_new_i64();
dc->tmp2_i32 = tcg_temp_new_i32();
- cpu_tmp3_i32 = tcg_temp_new_i32();
+ dc->tmp3_i32 = tcg_temp_new_i32();
dc->tmp4 = tcg_temp_new();
dc->ptr0 = tcg_temp_new_ptr();
dc->ptr1 = tcg_temp_new_ptr();