}
}
-#define TCG_CT_CONST_IS32 0x100
-#define TCG_CT_CONST_AIMM 0x200
-#define TCG_CT_CONST_LIMM 0x400
-#define TCG_CT_CONST_ZERO 0x800
-#define TCG_CT_CONST_MONE 0x1000
+#define TCG_CT_CONST_AIMM 0x100
+#define TCG_CT_CONST_LIMM 0x200
+#define TCG_CT_CONST_ZERO 0x400
+#define TCG_CT_CONST_MONE 0x800
/* parse target specific constraints */
static int target_parse_constraint(TCGArgConstraint *ct,
tcg_regset_reset_reg(ct->u.regs, TCG_REG_X3);
#endif
break;
- case 'w': /* The operand should be considered 32-bit. */
- ct->ct |= TCG_CT_CONST_IS32;
- break;
case 'A': /* Valid for arithmetic immediate (positive or negative). */
ct->ct |= TCG_CT_CONST_AIMM;
break;
return (val & (val - 1)) == 0;
}
-static int tcg_target_const_match(tcg_target_long val,
+static int tcg_target_const_match(tcg_target_long val, TCGType type,
const TCGArgConstraint *arg_ct)
{
int ct = arg_ct->ct;
if (ct & TCG_CT_CONST) {
return 1;
}
- if (ct & TCG_CT_CONST_IS32) {
+ if (type == TCG_TYPE_I32) {
val = (int32_t)val;
}
if ((ct & TCG_CT_CONST_AIMM) && (is_aimm(val) || is_aimm(-val))) {
}
tcg_out_insn_3503(s, insn, ext, rh, ah, bh);
- if (rl != orig_rl) {
- tcg_out_movr(s, ext, orig_rl, rl);
- }
+ tcg_out_mov(s, ext, orig_rl, rl);
}
#ifdef CONFIG_SOFTMMU
reloc_pc19(lb->label_ptr[0], (intptr_t)s->code_ptr);
- tcg_out_movr(s, TCG_TYPE_I64, TCG_REG_X0, TCG_AREG0);
- tcg_out_movr(s, TARGET_LONG_BITS == 64, TCG_REG_X1, lb->addrlo_reg);
+ tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_X0, TCG_AREG0);
+ tcg_out_mov(s, TARGET_LONG_BITS == 64, TCG_REG_X1, lb->addrlo_reg);
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_X2, lb->mem_index);
tcg_out_adr(s, TCG_REG_X3, (intptr_t)lb->raddr);
tcg_out_call(s, (intptr_t)qemu_ld_helpers[opc & ~MO_SIGN]);
if (opc & MO_SIGN) {
tcg_out_sxt(s, TCG_TYPE_I64, size, lb->datalo_reg, TCG_REG_X0);
} else {
- tcg_out_movr(s, TCG_TYPE_I64, lb->datalo_reg, TCG_REG_X0);
+ tcg_out_mov(s, size == MO_64, lb->datalo_reg, TCG_REG_X0);
}
tcg_out_goto(s, (intptr_t)lb->raddr);
reloc_pc19(lb->label_ptr[0], (intptr_t)s->code_ptr);
- tcg_out_movr(s, TCG_TYPE_I64, TCG_REG_X0, TCG_AREG0);
- tcg_out_movr(s, TARGET_LONG_BITS == 64, TCG_REG_X1, lb->addrlo_reg);
- tcg_out_movr(s, size == MO_64, TCG_REG_X2, lb->datalo_reg);
+ tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_X0, TCG_AREG0);
+ tcg_out_mov(s, TARGET_LONG_BITS == 64, TCG_REG_X1, lb->addrlo_reg);
+ tcg_out_mov(s, size == MO_64, TCG_REG_X2, lb->datalo_reg);
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_X3, lb->mem_index);
tcg_out_adr(s, TCG_REG_X4, (intptr_t)lb->raddr);
tcg_out_call(s, (intptr_t)qemu_st_helpers[opc]);
tcg_out_goto(s, (intptr_t)lb->raddr);
}
-static void add_qemu_ldst_label(TCGContext *s, int is_ld, TCGMemOp opc,
+static void add_qemu_ldst_label(TCGContext *s, bool is_ld, TCGMemOp opc,
TCGReg data_reg, TCGReg addr_reg,
int mem_index,
uint8_t *raddr, uint8_t *label_ptr)
tcg_out_tlb_read(s, addr_reg, s_bits, &label_ptr, mem_index, 1);
tcg_out_qemu_ld_direct(s, memop, data_reg, addr_reg, TCG_REG_X1);
- add_qemu_ldst_label(s, 1, memop, data_reg, addr_reg,
+ add_qemu_ldst_label(s, true, memop, data_reg, addr_reg,
mem_index, s->code_ptr, label_ptr);
#else /* !CONFIG_SOFTMMU */
tcg_out_qemu_ld_direct(s, memop, data_reg, addr_reg,
tcg_out_tlb_read(s, addr_reg, s_bits, &label_ptr, mem_index, 0);
tcg_out_qemu_st_direct(s, memop, data_reg, addr_reg, TCG_REG_X1);
- add_qemu_ldst_label(s, 0, memop, data_reg, addr_reg,
+ add_qemu_ldst_label(s, false, memop, data_reg, addr_reg,
mem_index, s->code_ptr, label_ptr);
#else /* !CONFIG_SOFTMMU */
tcg_out_qemu_st_direct(s, memop, data_reg, addr_reg,
{ INDEX_op_st32_i64, { "rZ", "r" } },
{ INDEX_op_st_i64, { "rZ", "r" } },
- { INDEX_op_add_i32, { "r", "r", "rwA" } },
+ { INDEX_op_add_i32, { "r", "r", "rA" } },
{ INDEX_op_add_i64, { "r", "r", "rA" } },
- { INDEX_op_sub_i32, { "r", "r", "rwA" } },
+ { INDEX_op_sub_i32, { "r", "r", "rA" } },
{ INDEX_op_sub_i64, { "r", "r", "rA" } },
{ INDEX_op_mul_i32, { "r", "r", "r" } },
{ INDEX_op_mul_i64, { "r", "r", "r" } },
{ INDEX_op_rem_i64, { "r", "r", "r" } },
{ INDEX_op_remu_i32, { "r", "r", "r" } },
{ INDEX_op_remu_i64, { "r", "r", "r" } },
- { INDEX_op_and_i32, { "r", "r", "rwL" } },
+ { INDEX_op_and_i32, { "r", "r", "rL" } },
{ INDEX_op_and_i64, { "r", "r", "rL" } },
- { INDEX_op_or_i32, { "r", "r", "rwL" } },
+ { INDEX_op_or_i32, { "r", "r", "rL" } },
{ INDEX_op_or_i64, { "r", "r", "rL" } },
- { INDEX_op_xor_i32, { "r", "r", "rwL" } },
+ { INDEX_op_xor_i32, { "r", "r", "rL" } },
{ INDEX_op_xor_i64, { "r", "r", "rL" } },
- { INDEX_op_andc_i32, { "r", "r", "rwL" } },
+ { INDEX_op_andc_i32, { "r", "r", "rL" } },
{ INDEX_op_andc_i64, { "r", "r", "rL" } },
- { INDEX_op_orc_i32, { "r", "r", "rwL" } },
+ { INDEX_op_orc_i32, { "r", "r", "rL" } },
{ INDEX_op_orc_i64, { "r", "r", "rL" } },
- { INDEX_op_eqv_i32, { "r", "r", "rwL" } },
+ { INDEX_op_eqv_i32, { "r", "r", "rL" } },
{ INDEX_op_eqv_i64, { "r", "r", "rL" } },
{ INDEX_op_neg_i32, { "r", "r" } },
{ INDEX_op_rotl_i64, { "r", "r", "ri" } },
{ INDEX_op_rotr_i64, { "r", "r", "ri" } },
- { INDEX_op_brcond_i32, { "r", "rwA" } },
+ { INDEX_op_brcond_i32, { "r", "rA" } },
{ INDEX_op_brcond_i64, { "r", "rA" } },
- { INDEX_op_setcond_i32, { "r", "r", "rwA" } },
+ { INDEX_op_setcond_i32, { "r", "r", "rA" } },
{ INDEX_op_setcond_i64, { "r", "r", "rA" } },
- { INDEX_op_movcond_i32, { "r", "r", "rwA", "rZ", "rZ" } },
+ { INDEX_op_movcond_i32, { "r", "r", "rA", "rZ", "rZ" } },
{ INDEX_op_movcond_i64, { "r", "r", "rA", "rZ", "rZ" } },
{ INDEX_op_qemu_ld_i32, { "r", "l" } },
{ INDEX_op_deposit_i32, { "r", "0", "rZ" } },
{ INDEX_op_deposit_i64, { "r", "0", "rZ" } },
- { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rwA", "rwMZ" } },
+ { INDEX_op_add2_i32, { "r", "r", "rZ", "rZ", "rA", "rMZ" } },
{ INDEX_op_add2_i64, { "r", "r", "rZ", "rZ", "rA", "rMZ" } },
- { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rwA", "rwMZ" } },
+ { INDEX_op_sub2_i32, { "r", "r", "rZ", "rZ", "rA", "rMZ" } },
{ INDEX_op_sub2_i64, { "r", "r", "rZ", "rZ", "rA", "rMZ" } },
{ INDEX_op_muluh_i64, { "r", "r", "r" } },