int condexec_cond;
int thumb;
int sctlr_b;
- TCGMemOp be_data;
+ MemOp be_data;
#if !defined(CONFIG_USER_ONLY)
int user;
#endif
* ie A64 LDX*, LDAX*, A32/T32 LDREX*, LDAEX*.
*/
bool is_ldex;
+ /* True if AccType_UNPRIV should be used for LDTR et al */
+ bool unpriv;
/* True if v8.3-PAuth is active. */
bool pauth_active;
/* True with v8.5-BTI and SCTLR_ELx.BT* set. */
bool bt;
+ /* True if any CP15 access is trapped by HSTR_EL2 */
+ bool hstr_active;
/*
* >= 0, a copy of PSTATE.BTYPE, which will be 0 without v8.5-BTI.
* < 0, set by the current instruction.
bool value_global;
} DisasCompare;
-void unallocated_encoding(DisasContext *s);
-
/* Share the TCG temporaries common between 32 and 64 bit modes. */
extern TCGv_i32 cpu_NF, cpu_ZF, cpu_CF, cpu_VF;
extern TCGv_i64 cpu_exclusive_addr;
* exceptions can only be routed to ELs above 1, so we target the higher of
* 1 or the current EL.
*/
- return (s->mmu_idx == ARMMMUIdx_S1SE0 && s->secure_routed_to_el3)
+ return (s->mmu_idx == ARMMMUIdx_SE10_0 && s->secure_routed_to_el3)
? 3 : MAX(1, s->current_el);
}
extern const GVecGen3 mla_op[4];
extern const GVecGen3 mls_op[4];
extern const GVecGen3 cmtst_op[4];
+extern const GVecGen3 sshl_op[4];
+extern const GVecGen3 ushl_op[4];
extern const GVecGen2i ssra_op[4];
extern const GVecGen2i usra_op[4];
extern const GVecGen2i sri_op[4];
extern const GVecGen4 uqsub_op[4];
extern const GVecGen4 sqsub_op[4];
void gen_cmtst_i64(TCGv_i64 d, TCGv_i64 a, TCGv_i64 b);
+void gen_ushl_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b);
+void gen_sshl_i32(TCGv_i32 d, TCGv_i32 a, TCGv_i32 b);
+void gen_ushl_i64(TCGv_i64 d, TCGv_i64 a, TCGv_i64 b);
+void gen_sshl_i64(TCGv_i64 d, TCGv_i64 a, TCGv_i64 b);
/*
* Forward to the isar_feature_* tests given a DisasContext pointer.