1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
228 static void convert_to_extended (const struct floatformat *, void *,
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct bound_minimal_symbol sym;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val);
459 return (val & 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name, "_from_thumb") != NULL)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm)
545 unsigned int count = imm >> 7;
553 return (imm & 0xff) | ((imm & 0xff) << 16);
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
561 return (0x80 | (imm & 0x7f)) << (32 - count);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst)
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1 & 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1, 7) && !bit (inst1, 8))
630 else if (!bit (inst1, 7) && bit (inst1, 8))
636 else if (bit (inst1, 7) && bit (inst1, 8))
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
659 if (bits (inst1, 0, 3) == 15)
665 if ((inst2 & 0x0fc0) == 0x0000)
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn)
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
712 struct pv_area *stack;
713 struct cleanup *back_to;
715 CORE_ADDR unrecognized_pc = 0;
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
722 while (start < limit)
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
755 else if (thumb_instruction_restores_sp (insn))
757 /* Don't scan past the epilogue. */
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
800 if (pv_area_store_would_trash (stack, addr))
803 pv_area_store (stack, addr, 4, regs[regno]);
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
814 if (pv_area_store_would_trash (stack, addr))
817 pv_area_store (stack, addr, 4, regs[rd]);
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2;
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1, j2, imm1, imm2;
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
890 pv_t addr = regs[bits (insn, 0, 3)];
893 if (pv_area_store_would_trash (stack, addr))
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
905 regs[bits (insn, 0, 3)] = addr;
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
916 offset = inst2 & 0xff;
918 addr = pv_add_constant (addr, offset);
920 addr = pv_add_constant (addr, -offset);
922 if (pv_area_store_would_trash (stack, addr))
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
930 regs[bits (insn, 0, 3)] = addr;
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
940 offset = inst2 & 0xff;
942 addr = pv_add_constant (addr, offset);
944 addr = pv_add_constant (addr, -offset);
946 if (pv_area_store_would_trash (stack, addr))
949 pv_area_store (stack, addr, 4, regs[regno]);
952 regs[bits (insn, 0, 3)] = addr;
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 int regno = bits (inst2, 12, 15);
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
964 if (pv_area_store_would_trash (stack, addr))
967 pv_area_store (stack, addr, 4, regs[regno]);
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant;
1084 offset = bits (inst2, 0, 11);
1086 loc = start + 4 + offset;
1088 loc = start + 4 - offset;
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant;
1100 offset = bits (inst2, 0, 7) << 2;
1102 loc = start + 4 + offset;
1104 loc = start + 4 - offset;
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1127 else if (thumb_instruction_changes_pc (insn))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg = bits (insn1, 8, 10);
1207 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1208 address = read_memory_unsigned_integer (address, 4,
1209 byte_order_for_code);
1211 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1216 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1219 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1221 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1223 /* movt Rd, #const */
1224 if ((insn1 & 0xfbc0) == 0xf2c0)
1226 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1227 *destreg = bits (insn2, 8, 11);
1229 address = (high << 16 | low);
1236 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1238 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 address = bits (insn, 0, 11) + pc + 8;
1241 address = read_memory_unsigned_integer (address, 4,
1242 byte_order_for_code);
1244 *destreg = bits (insn, 12, 15);
1247 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1252 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1254 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1257 *destreg = bits (insn, 12, 15);
1259 address = (high << 16 | low);
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1290 .word __stack_chk_guard
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1298 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1300 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1301 unsigned int basereg;
1302 struct bound_minimal_symbol stack_chk_guard;
1304 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1307 /* Try to parse the instructions in Step 1. */
1308 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1313 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard.minsym == NULL
1317 || strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1318 "__stack_chk_guard",
1319 strlen ("__stack_chk_guard")) != 0)
1324 unsigned int destreg;
1326 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1328 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1329 if ((insn & 0xf800) != 0x6800)
1331 if (bits (insn, 3, 5) != basereg)
1333 destreg = bits (insn, 0, 2);
1335 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1336 byte_order_for_code);
1337 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1338 if ((insn & 0xf800) != 0x6000)
1340 if (destreg != bits (insn, 0, 2))
1345 unsigned int destreg;
1347 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1349 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1350 if ((insn & 0x0e500000) != 0x04100000)
1352 if (bits (insn, 16, 19) != basereg)
1354 destreg = bits (insn, 12, 15);
1355 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1356 insn = read_memory_unsigned_integer (pc + offset + 4,
1357 4, byte_order_for_code);
1358 if ((insn & 0x0e500000) != 0x04000000)
1360 if (bits (insn, 12, 15) != destreg)
1363 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1366 return pc + offset + 4;
1368 return pc + offset + 8;
1371 /* Advance the PC across any function entry prologue instructions to
1372 reach some "real" code.
1374 The APCS (ARM Procedure Call Standard) defines the following
1378 [stmfd sp!, {a1,a2,a3,a4}]
1379 stmfd sp!, {...,fp,ip,lr,pc}
1380 [stfe f7, [sp, #-12]!]
1381 [stfe f6, [sp, #-12]!]
1382 [stfe f5, [sp, #-12]!]
1383 [stfe f4, [sp, #-12]!]
1384 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1387 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1389 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1391 CORE_ADDR func_addr, limit_pc;
1393 /* See if we can determine the end of the prologue via the symbol table.
1394 If so, then return either PC, or the PC after the prologue, whichever
1396 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1398 CORE_ADDR post_prologue_pc
1399 = skip_prologue_using_sal (gdbarch, func_addr);
1400 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1402 if (post_prologue_pc)
1404 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1407 /* GCC always emits a line note before the prologue and another
1408 one after, even if the two are at the same address or on the
1409 same line. Take advantage of this so that we do not need to
1410 know every instruction that might appear in the prologue. We
1411 will have producer information for most binaries; if it is
1412 missing (e.g. for -gstabs), assuming the GNU tools. */
1413 if (post_prologue_pc
1415 || COMPUNIT_PRODUCER (cust) == NULL
1416 || strncmp (COMPUNIT_PRODUCER (cust), "GNU ",
1417 sizeof ("GNU ") - 1) == 0
1418 || strncmp (COMPUNIT_PRODUCER (cust), "clang ",
1419 sizeof ("clang ") - 1) == 0))
1420 return post_prologue_pc;
1422 if (post_prologue_pc != 0)
1424 CORE_ADDR analyzed_limit;
1426 /* For non-GCC compilers, make sure the entire line is an
1427 acceptable prologue; GDB will round this function's
1428 return value up to the end of the following line so we
1429 can not skip just part of a line (and we do not want to).
1431 RealView does not treat the prologue specially, but does
1432 associate prologue code with the opening brace; so this
1433 lets us skip the first line if we think it is the opening
1435 if (arm_pc_is_thumb (gdbarch, func_addr))
1436 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1437 post_prologue_pc, NULL);
1439 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1440 post_prologue_pc, NULL);
1442 if (analyzed_limit != post_prologue_pc)
1445 return post_prologue_pc;
1449 /* Can't determine prologue from the symbol table, need to examine
1452 /* Find an upper limit on the function prologue using the debug
1453 information. If the debug information could not be used to provide
1454 that bound, then use an arbitrary large number as the upper bound. */
1455 /* Like arm_scan_prologue, stop no later than pc + 64. */
1456 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1458 limit_pc = pc + 64; /* Magic. */
1461 /* Check if this is Thumb code. */
1462 if (arm_pc_is_thumb (gdbarch, pc))
1463 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1465 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1469 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1470 This function decodes a Thumb function prologue to determine:
1471 1) the size of the stack frame
1472 2) which registers are saved on it
1473 3) the offsets of saved regs
1474 4) the offset from the stack pointer to the frame pointer
1476 A typical Thumb function prologue would create this stack frame
1477 (offsets relative to FP)
1478 old SP -> 24 stack parameters
1481 R7 -> 0 local variables (16 bytes)
1482 SP -> -12 additional stack space (12 bytes)
1483 The frame size would thus be 36 bytes, and the frame offset would be
1484 12 bytes. The frame register is R7.
1486 The comments for thumb_skip_prolog() describe the algorithm we use
1487 to detect the end of the prolog. */
1491 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1492 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1494 CORE_ADDR prologue_start;
1495 CORE_ADDR prologue_end;
1497 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1500 /* See comment in arm_scan_prologue for an explanation of
1502 if (prologue_end > prologue_start + 64)
1504 prologue_end = prologue_start + 64;
1508 /* We're in the boondocks: we have no idea where the start of the
1512 prologue_end = min (prologue_end, prev_pc);
1514 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1517 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1520 arm_instruction_changes_pc (uint32_t this_instr)
1522 if (bits (this_instr, 28, 31) == INST_NV)
1523 /* Unconditional instructions. */
1524 switch (bits (this_instr, 24, 27))
1528 /* Branch with Link and change to Thumb. */
1533 /* Coprocessor register transfer. */
1534 if (bits (this_instr, 12, 15) == 15)
1535 error (_("Invalid update to pc in instruction"));
1541 switch (bits (this_instr, 25, 27))
1544 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1546 /* Multiplies and extra load/stores. */
1547 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1548 /* Neither multiplies nor extension load/stores are allowed
1552 /* Otherwise, miscellaneous instructions. */
1554 /* BX <reg>, BXJ <reg>, BLX <reg> */
1555 if (bits (this_instr, 4, 27) == 0x12fff1
1556 || bits (this_instr, 4, 27) == 0x12fff2
1557 || bits (this_instr, 4, 27) == 0x12fff3)
1560 /* Other miscellaneous instructions are unpredictable if they
1564 /* Data processing instruction. Fall through. */
1567 if (bits (this_instr, 12, 15) == 15)
1574 /* Media instructions and architecturally undefined instructions. */
1575 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1579 if (bit (this_instr, 20) == 0)
1583 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1589 /* Load/store multiple. */
1590 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1596 /* Branch and branch with link. */
1601 /* Coprocessor transfers or SWIs can not affect PC. */
1605 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1609 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1613 arm_instruction_restores_sp (unsigned int insn)
1615 if (bits (insn, 28, 31) != INST_NV)
1617 if ((insn & 0x0df0f000) == 0x0080d000
1618 /* ADD SP (register or immediate). */
1619 || (insn & 0x0df0f000) == 0x0040d000
1620 /* SUB SP (register or immediate). */
1621 || (insn & 0x0ffffff0) == 0x01a0d000
1623 || (insn & 0x0fff0000) == 0x08bd0000
1625 || (insn & 0x0fff0000) == 0x049d0000)
1626 /* POP of a single register. */
1633 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1634 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1635 fill it in. Return the first address not recognized as a prologue
1638 We recognize all the instructions typically found in ARM prologues,
1639 plus harmless instructions which can be skipped (either for analysis
1640 purposes, or a more restrictive set that can be skipped when finding
1641 the end of the prologue). */
1644 arm_analyze_prologue (struct gdbarch *gdbarch,
1645 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1646 struct arm_prologue_cache *cache)
1648 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1649 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1651 CORE_ADDR offset, current_pc;
1652 pv_t regs[ARM_FPS_REGNUM];
1653 struct pv_area *stack;
1654 struct cleanup *back_to;
1655 CORE_ADDR unrecognized_pc = 0;
1657 /* Search the prologue looking for instructions that set up the
1658 frame pointer, adjust the stack pointer, and save registers.
1660 Be careful, however, and if it doesn't look like a prologue,
1661 don't try to scan it. If, for instance, a frameless function
1662 begins with stmfd sp!, then we will tell ourselves there is
1663 a frame, which will confuse stack traceback, as well as "finish"
1664 and other operations that rely on a knowledge of the stack
1667 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1668 regs[regno] = pv_register (regno, 0);
1669 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1670 back_to = make_cleanup_free_pv_area (stack);
1672 for (current_pc = prologue_start;
1673 current_pc < prologue_end;
1677 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1679 if (insn == 0xe1a0c00d) /* mov ip, sp */
1681 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1684 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1687 unsigned imm = insn & 0xff; /* immediate value */
1688 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1689 int rd = bits (insn, 12, 15);
1690 imm = (imm >> rot) | (imm << (32 - rot));
1691 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1694 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1695 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1697 unsigned imm = insn & 0xff; /* immediate value */
1698 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1699 int rd = bits (insn, 12, 15);
1700 imm = (imm >> rot) | (imm << (32 - rot));
1701 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1704 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1707 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1709 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1710 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1711 regs[bits (insn, 12, 15)]);
1714 else if ((insn & 0xffff0000) == 0xe92d0000)
1715 /* stmfd sp!, {..., fp, ip, lr, pc}
1717 stmfd sp!, {a1, a2, a3, a4} */
1719 int mask = insn & 0xffff;
1721 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1724 /* Calculate offsets of saved registers. */
1725 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1726 if (mask & (1 << regno))
1729 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1730 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1733 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1734 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1735 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1737 /* No need to add this to saved_regs -- it's just an arg reg. */
1740 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1741 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1742 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1744 /* No need to add this to saved_regs -- it's just an arg reg. */
1747 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1749 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1751 /* No need to add this to saved_regs -- it's just arg regs. */
1754 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1756 unsigned imm = insn & 0xff; /* immediate value */
1757 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1758 imm = (imm >> rot) | (imm << (32 - rot));
1759 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1761 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1763 unsigned imm = insn & 0xff; /* immediate value */
1764 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1765 imm = (imm >> rot) | (imm << (32 - rot));
1766 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1768 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1770 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1772 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1775 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1776 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1777 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1779 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1781 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1783 int n_saved_fp_regs;
1784 unsigned int fp_start_reg, fp_bound_reg;
1786 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1789 if ((insn & 0x800) == 0x800) /* N0 is set */
1791 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1792 n_saved_fp_regs = 3;
1794 n_saved_fp_regs = 1;
1798 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1799 n_saved_fp_regs = 2;
1801 n_saved_fp_regs = 4;
1804 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1805 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1806 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1808 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1809 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1810 regs[fp_start_reg++]);
1813 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1815 /* Allow some special function calls when skipping the
1816 prologue; GCC generates these before storing arguments to
1818 CORE_ADDR dest = BranchDest (current_pc, insn);
1820 if (skip_prologue_function (gdbarch, dest, 0))
1825 else if ((insn & 0xf0000000) != 0xe0000000)
1826 break; /* Condition not true, exit early. */
1827 else if (arm_instruction_changes_pc (insn))
1828 /* Don't scan past anything that might change control flow. */
1830 else if (arm_instruction_restores_sp (insn))
1832 /* Don't scan past the epilogue. */
1835 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1836 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1837 /* Ignore block loads from the stack, potentially copying
1838 parameters from memory. */
1840 else if ((insn & 0xfc500000) == 0xe4100000
1841 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1842 /* Similarly ignore single loads from the stack. */
1844 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1845 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1846 register instead of the stack. */
1850 /* The optimizer might shove anything into the prologue, if
1851 we build up cache (cache != NULL) from scanning prologue,
1852 we just skip what we don't recognize and scan further to
1853 make cache as complete as possible. However, if we skip
1854 prologue, we'll stop immediately on unrecognized
1856 unrecognized_pc = current_pc;
1864 if (unrecognized_pc == 0)
1865 unrecognized_pc = current_pc;
1869 int framereg, framesize;
1871 /* The frame size is just the distance from the frame register
1872 to the original stack pointer. */
1873 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1875 /* Frame pointer is fp. */
1876 framereg = ARM_FP_REGNUM;
1877 framesize = -regs[ARM_FP_REGNUM].k;
1881 /* Try the stack pointer... this is a bit desperate. */
1882 framereg = ARM_SP_REGNUM;
1883 framesize = -regs[ARM_SP_REGNUM].k;
1886 cache->framereg = framereg;
1887 cache->framesize = framesize;
1889 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1890 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1891 cache->saved_regs[regno].addr = offset;
1895 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1896 paddress (gdbarch, unrecognized_pc));
1898 do_cleanups (back_to);
1899 return unrecognized_pc;
1903 arm_scan_prologue (struct frame_info *this_frame,
1904 struct arm_prologue_cache *cache)
1906 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1907 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1909 CORE_ADDR prologue_start, prologue_end, current_pc;
1910 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1911 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1912 pv_t regs[ARM_FPS_REGNUM];
1913 struct pv_area *stack;
1914 struct cleanup *back_to;
1917 /* Assume there is no frame until proven otherwise. */
1918 cache->framereg = ARM_SP_REGNUM;
1919 cache->framesize = 0;
1921 /* Check for Thumb prologue. */
1922 if (arm_frame_is_thumb (this_frame))
1924 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1928 /* Find the function prologue. If we can't find the function in
1929 the symbol table, peek in the stack frame to find the PC. */
1930 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1933 /* One way to find the end of the prologue (which works well
1934 for unoptimized code) is to do the following:
1936 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1939 prologue_end = prev_pc;
1940 else if (sal.end < prologue_end)
1941 prologue_end = sal.end;
1943 This mechanism is very accurate so long as the optimizer
1944 doesn't move any instructions from the function body into the
1945 prologue. If this happens, sal.end will be the last
1946 instruction in the first hunk of prologue code just before
1947 the first instruction that the scheduler has moved from
1948 the body to the prologue.
1950 In order to make sure that we scan all of the prologue
1951 instructions, we use a slightly less accurate mechanism which
1952 may scan more than necessary. To help compensate for this
1953 lack of accuracy, the prologue scanning loop below contains
1954 several clauses which'll cause the loop to terminate early if
1955 an implausible prologue instruction is encountered.
1961 is a suitable endpoint since it accounts for the largest
1962 possible prologue plus up to five instructions inserted by
1965 if (prologue_end > prologue_start + 64)
1967 prologue_end = prologue_start + 64; /* See above. */
1972 /* We have no symbol information. Our only option is to assume this
1973 function has a standard stack frame and the normal frame register.
1974 Then, we can find the value of our frame pointer on entrance to
1975 the callee (or at the present moment if this is the innermost frame).
1976 The value stored there should be the address of the stmfd + 8. */
1977 CORE_ADDR frame_loc;
1978 LONGEST return_value;
1980 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1981 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1985 prologue_start = gdbarch_addr_bits_remove
1986 (gdbarch, return_value) - 8;
1987 prologue_end = prologue_start + 64; /* See above. */
1991 if (prev_pc < prologue_end)
1992 prologue_end = prev_pc;
1994 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1997 static struct arm_prologue_cache *
1998 arm_make_prologue_cache (struct frame_info *this_frame)
2001 struct arm_prologue_cache *cache;
2002 CORE_ADDR unwound_fp;
2004 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2005 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2007 arm_scan_prologue (this_frame, cache);
2009 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2010 if (unwound_fp == 0)
2013 cache->prev_sp = unwound_fp + cache->framesize;
2015 /* Calculate actual addresses of saved registers using offsets
2016 determined by arm_scan_prologue. */
2017 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2018 if (trad_frame_addr_p (cache->saved_regs, reg))
2019 cache->saved_regs[reg].addr += cache->prev_sp;
2024 /* Implementation of the stop_reason hook for arm_prologue frames. */
2026 static enum unwind_stop_reason
2027 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2030 struct arm_prologue_cache *cache;
2033 if (*this_cache == NULL)
2034 *this_cache = arm_make_prologue_cache (this_frame);
2035 cache = *this_cache;
2037 /* This is meant to halt the backtrace at "_start". */
2038 pc = get_frame_pc (this_frame);
2039 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2040 return UNWIND_OUTERMOST;
2042 /* If we've hit a wall, stop. */
2043 if (cache->prev_sp == 0)
2044 return UNWIND_OUTERMOST;
2046 return UNWIND_NO_REASON;
2049 /* Our frame ID for a normal frame is the current function's starting PC
2050 and the caller's SP when we were called. */
2053 arm_prologue_this_id (struct frame_info *this_frame,
2055 struct frame_id *this_id)
2057 struct arm_prologue_cache *cache;
2061 if (*this_cache == NULL)
2062 *this_cache = arm_make_prologue_cache (this_frame);
2063 cache = *this_cache;
2065 /* Use function start address as part of the frame ID. If we cannot
2066 identify the start address (due to missing symbol information),
2067 fall back to just using the current PC. */
2068 pc = get_frame_pc (this_frame);
2069 func = get_frame_func (this_frame);
2073 id = frame_id_build (cache->prev_sp, func);
2077 static struct value *
2078 arm_prologue_prev_register (struct frame_info *this_frame,
2082 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2083 struct arm_prologue_cache *cache;
2085 if (*this_cache == NULL)
2086 *this_cache = arm_make_prologue_cache (this_frame);
2087 cache = *this_cache;
2089 /* If we are asked to unwind the PC, then we need to return the LR
2090 instead. The prologue may save PC, but it will point into this
2091 frame's prologue, not the next frame's resume location. Also
2092 strip the saved T bit. A valid LR may have the low bit set, but
2093 a valid PC never does. */
2094 if (prev_regnum == ARM_PC_REGNUM)
2098 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2099 return frame_unwind_got_constant (this_frame, prev_regnum,
2100 arm_addr_bits_remove (gdbarch, lr));
2103 /* SP is generally not saved to the stack, but this frame is
2104 identified by the next frame's stack pointer at the time of the call.
2105 The value was already reconstructed into PREV_SP. */
2106 if (prev_regnum == ARM_SP_REGNUM)
2107 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2109 /* The CPSR may have been changed by the call instruction and by the
2110 called function. The only bit we can reconstruct is the T bit,
2111 by checking the low bit of LR as of the call. This is a reliable
2112 indicator of Thumb-ness except for some ARM v4T pre-interworking
2113 Thumb code, which could get away with a clear low bit as long as
2114 the called function did not use bx. Guess that all other
2115 bits are unchanged; the condition flags are presumably lost,
2116 but the processor status is likely valid. */
2117 if (prev_regnum == ARM_PS_REGNUM)
2120 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2122 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2123 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2124 if (IS_THUMB_ADDR (lr))
2128 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2131 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2135 struct frame_unwind arm_prologue_unwind = {
2137 arm_prologue_unwind_stop_reason,
2138 arm_prologue_this_id,
2139 arm_prologue_prev_register,
2141 default_frame_sniffer
2144 /* Maintain a list of ARM exception table entries per objfile, similar to the
2145 list of mapping symbols. We only cache entries for standard ARM-defined
2146 personality routines; the cache will contain only the frame unwinding
2147 instructions associated with the entry (not the descriptors). */
2149 static const struct objfile_data *arm_exidx_data_key;
2151 struct arm_exidx_entry
2156 typedef struct arm_exidx_entry arm_exidx_entry_s;
2157 DEF_VEC_O(arm_exidx_entry_s);
2159 struct arm_exidx_data
2161 VEC(arm_exidx_entry_s) **section_maps;
2165 arm_exidx_data_free (struct objfile *objfile, void *arg)
2167 struct arm_exidx_data *data = arg;
2170 for (i = 0; i < objfile->obfd->section_count; i++)
2171 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2175 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2176 const struct arm_exidx_entry *rhs)
2178 return lhs->addr < rhs->addr;
2181 static struct obj_section *
2182 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2184 struct obj_section *osect;
2186 ALL_OBJFILE_OSECTIONS (objfile, osect)
2187 if (bfd_get_section_flags (objfile->obfd,
2188 osect->the_bfd_section) & SEC_ALLOC)
2190 bfd_vma start, size;
2191 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2192 size = bfd_get_section_size (osect->the_bfd_section);
2194 if (start <= vma && vma < start + size)
2201 /* Parse contents of exception table and exception index sections
2202 of OBJFILE, and fill in the exception table entry cache.
2204 For each entry that refers to a standard ARM-defined personality
2205 routine, extract the frame unwinding instructions (from either
2206 the index or the table section). The unwinding instructions
2208 - extracting them from the rest of the table data
2209 - converting to host endianness
2210 - appending the implicit 0xb0 ("Finish") code
2212 The extracted and normalized instructions are stored for later
2213 retrieval by the arm_find_exidx_entry routine. */
2216 arm_exidx_new_objfile (struct objfile *objfile)
2218 struct cleanup *cleanups;
2219 struct arm_exidx_data *data;
2220 asection *exidx, *extab;
2221 bfd_vma exidx_vma = 0, extab_vma = 0;
2222 bfd_size_type exidx_size = 0, extab_size = 0;
2223 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2226 /* If we've already touched this file, do nothing. */
2227 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2229 cleanups = make_cleanup (null_cleanup, NULL);
2231 /* Read contents of exception table and index. */
2232 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2235 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2236 exidx_size = bfd_get_section_size (exidx);
2237 exidx_data = xmalloc (exidx_size);
2238 make_cleanup (xfree, exidx_data);
2240 if (!bfd_get_section_contents (objfile->obfd, exidx,
2241 exidx_data, 0, exidx_size))
2243 do_cleanups (cleanups);
2248 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2251 extab_vma = bfd_section_vma (objfile->obfd, extab);
2252 extab_size = bfd_get_section_size (extab);
2253 extab_data = xmalloc (extab_size);
2254 make_cleanup (xfree, extab_data);
2256 if (!bfd_get_section_contents (objfile->obfd, extab,
2257 extab_data, 0, extab_size))
2259 do_cleanups (cleanups);
2264 /* Allocate exception table data structure. */
2265 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2266 set_objfile_data (objfile, arm_exidx_data_key, data);
2267 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2268 objfile->obfd->section_count,
2269 VEC(arm_exidx_entry_s) *);
2271 /* Fill in exception table. */
2272 for (i = 0; i < exidx_size / 8; i++)
2274 struct arm_exidx_entry new_exidx_entry;
2275 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2276 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2277 bfd_vma addr = 0, word = 0;
2278 int n_bytes = 0, n_words = 0;
2279 struct obj_section *sec;
2280 gdb_byte *entry = NULL;
2282 /* Extract address of start of function. */
2283 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2284 idx += exidx_vma + i * 8;
2286 /* Find section containing function and compute section offset. */
2287 sec = arm_obj_section_from_vma (objfile, idx);
2290 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2292 /* Determine address of exception table entry. */
2295 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2297 else if ((val & 0xff000000) == 0x80000000)
2299 /* Exception table entry embedded in .ARM.exidx
2300 -- must be short form. */
2304 else if (!(val & 0x80000000))
2306 /* Exception table entry in .ARM.extab. */
2307 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2308 addr += exidx_vma + i * 8 + 4;
2310 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2312 word = bfd_h_get_32 (objfile->obfd,
2313 extab_data + addr - extab_vma);
2316 if ((word & 0xff000000) == 0x80000000)
2321 else if ((word & 0xff000000) == 0x81000000
2322 || (word & 0xff000000) == 0x82000000)
2326 n_words = ((word >> 16) & 0xff);
2328 else if (!(word & 0x80000000))
2331 struct obj_section *pers_sec;
2332 int gnu_personality = 0;
2334 /* Custom personality routine. */
2335 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2336 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2338 /* Check whether we've got one of the variants of the
2339 GNU personality routines. */
2340 pers_sec = arm_obj_section_from_vma (objfile, pers);
2343 static const char *personality[] =
2345 "__gcc_personality_v0",
2346 "__gxx_personality_v0",
2347 "__gcj_personality_v0",
2348 "__gnu_objc_personality_v0",
2352 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2355 for (k = 0; personality[k]; k++)
2356 if (lookup_minimal_symbol_by_pc_name
2357 (pc, personality[k], objfile))
2359 gnu_personality = 1;
2364 /* If so, the next word contains a word count in the high
2365 byte, followed by the same unwind instructions as the
2366 pre-defined forms. */
2368 && addr + 4 <= extab_vma + extab_size)
2370 word = bfd_h_get_32 (objfile->obfd,
2371 extab_data + addr - extab_vma);
2374 n_words = ((word >> 24) & 0xff);
2380 /* Sanity check address. */
2382 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2383 n_words = n_bytes = 0;
2385 /* The unwind instructions reside in WORD (only the N_BYTES least
2386 significant bytes are valid), followed by N_WORDS words in the
2387 extab section starting at ADDR. */
2388 if (n_bytes || n_words)
2390 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2391 n_bytes + n_words * 4 + 1);
2394 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2398 word = bfd_h_get_32 (objfile->obfd,
2399 extab_data + addr - extab_vma);
2402 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2403 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2404 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2405 *p++ = (gdb_byte) (word & 0xff);
2408 /* Implied "Finish" to terminate the list. */
2412 /* Push entry onto vector. They are guaranteed to always
2413 appear in order of increasing addresses. */
2414 new_exidx_entry.addr = idx;
2415 new_exidx_entry.entry = entry;
2416 VEC_safe_push (arm_exidx_entry_s,
2417 data->section_maps[sec->the_bfd_section->index],
2421 do_cleanups (cleanups);
2424 /* Search for the exception table entry covering MEMADDR. If one is found,
2425 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2426 set *START to the start of the region covered by this entry. */
2429 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2431 struct obj_section *sec;
2433 sec = find_pc_section (memaddr);
2436 struct arm_exidx_data *data;
2437 VEC(arm_exidx_entry_s) *map;
2438 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2441 data = objfile_data (sec->objfile, arm_exidx_data_key);
2444 map = data->section_maps[sec->the_bfd_section->index];
2445 if (!VEC_empty (arm_exidx_entry_s, map))
2447 struct arm_exidx_entry *map_sym;
2449 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2450 arm_compare_exidx_entries);
2452 /* VEC_lower_bound finds the earliest ordered insertion
2453 point. If the following symbol starts at this exact
2454 address, we use that; otherwise, the preceding
2455 exception table entry covers this address. */
2456 if (idx < VEC_length (arm_exidx_entry_s, map))
2458 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2459 if (map_sym->addr == map_key.addr)
2462 *start = map_sym->addr + obj_section_addr (sec);
2463 return map_sym->entry;
2469 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2471 *start = map_sym->addr + obj_section_addr (sec);
2472 return map_sym->entry;
2481 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2482 instruction list from the ARM exception table entry ENTRY, allocate and
2483 return a prologue cache structure describing how to unwind this frame.
2485 Return NULL if the unwinding instruction list contains a "spare",
2486 "reserved" or "refuse to unwind" instruction as defined in section
2487 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2488 for the ARM Architecture" document. */
2490 static struct arm_prologue_cache *
2491 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2496 struct arm_prologue_cache *cache;
2497 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2498 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2504 /* Whenever we reload SP, we actually have to retrieve its
2505 actual value in the current frame. */
2508 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2510 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2511 vsp = get_frame_register_unsigned (this_frame, reg);
2515 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2516 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2522 /* Decode next unwind instruction. */
2525 if ((insn & 0xc0) == 0)
2527 int offset = insn & 0x3f;
2528 vsp += (offset << 2) + 4;
2530 else if ((insn & 0xc0) == 0x40)
2532 int offset = insn & 0x3f;
2533 vsp -= (offset << 2) + 4;
2535 else if ((insn & 0xf0) == 0x80)
2537 int mask = ((insn & 0xf) << 8) | *entry++;
2540 /* The special case of an all-zero mask identifies
2541 "Refuse to unwind". We return NULL to fall back
2542 to the prologue analyzer. */
2546 /* Pop registers r4..r15 under mask. */
2547 for (i = 0; i < 12; i++)
2548 if (mask & (1 << i))
2550 cache->saved_regs[4 + i].addr = vsp;
2554 /* Special-case popping SP -- we need to reload vsp. */
2555 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2558 else if ((insn & 0xf0) == 0x90)
2560 int reg = insn & 0xf;
2562 /* Reserved cases. */
2563 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2566 /* Set SP from another register and mark VSP for reload. */
2567 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2570 else if ((insn & 0xf0) == 0xa0)
2572 int count = insn & 0x7;
2573 int pop_lr = (insn & 0x8) != 0;
2576 /* Pop r4..r[4+count]. */
2577 for (i = 0; i <= count; i++)
2579 cache->saved_regs[4 + i].addr = vsp;
2583 /* If indicated by flag, pop LR as well. */
2586 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2590 else if (insn == 0xb0)
2592 /* We could only have updated PC by popping into it; if so, it
2593 will show up as address. Otherwise, copy LR into PC. */
2594 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2595 cache->saved_regs[ARM_PC_REGNUM]
2596 = cache->saved_regs[ARM_LR_REGNUM];
2601 else if (insn == 0xb1)
2603 int mask = *entry++;
2606 /* All-zero mask and mask >= 16 is "spare". */
2607 if (mask == 0 || mask >= 16)
2610 /* Pop r0..r3 under mask. */
2611 for (i = 0; i < 4; i++)
2612 if (mask & (1 << i))
2614 cache->saved_regs[i].addr = vsp;
2618 else if (insn == 0xb2)
2620 ULONGEST offset = 0;
2625 offset |= (*entry & 0x7f) << shift;
2628 while (*entry++ & 0x80);
2630 vsp += 0x204 + (offset << 2);
2632 else if (insn == 0xb3)
2634 int start = *entry >> 4;
2635 int count = (*entry++) & 0xf;
2638 /* Only registers D0..D15 are valid here. */
2639 if (start + count >= 16)
2642 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2643 for (i = 0; i <= count; i++)
2645 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2649 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2652 else if ((insn & 0xf8) == 0xb8)
2654 int count = insn & 0x7;
2657 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2658 for (i = 0; i <= count; i++)
2660 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2664 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2667 else if (insn == 0xc6)
2669 int start = *entry >> 4;
2670 int count = (*entry++) & 0xf;
2673 /* Only registers WR0..WR15 are valid. */
2674 if (start + count >= 16)
2677 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2678 for (i = 0; i <= count; i++)
2680 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2684 else if (insn == 0xc7)
2686 int mask = *entry++;
2689 /* All-zero mask and mask >= 16 is "spare". */
2690 if (mask == 0 || mask >= 16)
2693 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2694 for (i = 0; i < 4; i++)
2695 if (mask & (1 << i))
2697 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2701 else if ((insn & 0xf8) == 0xc0)
2703 int count = insn & 0x7;
2706 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2707 for (i = 0; i <= count; i++)
2709 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2713 else if (insn == 0xc8)
2715 int start = *entry >> 4;
2716 int count = (*entry++) & 0xf;
2719 /* Only registers D0..D31 are valid. */
2720 if (start + count >= 16)
2723 /* Pop VFP double-precision registers
2724 D[16+start]..D[16+start+count]. */
2725 for (i = 0; i <= count; i++)
2727 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2731 else if (insn == 0xc9)
2733 int start = *entry >> 4;
2734 int count = (*entry++) & 0xf;
2737 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2738 for (i = 0; i <= count; i++)
2740 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2744 else if ((insn & 0xf8) == 0xd0)
2746 int count = insn & 0x7;
2749 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2750 for (i = 0; i <= count; i++)
2752 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2758 /* Everything else is "spare". */
2763 /* If we restore SP from a register, assume this was the frame register.
2764 Otherwise just fall back to SP as frame register. */
2765 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2766 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2768 cache->framereg = ARM_SP_REGNUM;
2770 /* Determine offset to previous frame. */
2772 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2774 /* We already got the previous SP. */
2775 cache->prev_sp = vsp;
2780 /* Unwinding via ARM exception table entries. Note that the sniffer
2781 already computes a filled-in prologue cache, which is then used
2782 with the same arm_prologue_this_id and arm_prologue_prev_register
2783 routines also used for prologue-parsing based unwinding. */
2786 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2787 struct frame_info *this_frame,
2788 void **this_prologue_cache)
2790 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2791 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2792 CORE_ADDR addr_in_block, exidx_region, func_start;
2793 struct arm_prologue_cache *cache;
2796 /* See if we have an ARM exception table entry covering this address. */
2797 addr_in_block = get_frame_address_in_block (this_frame);
2798 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2802 /* The ARM exception table does not describe unwind information
2803 for arbitrary PC values, but is guaranteed to be correct only
2804 at call sites. We have to decide here whether we want to use
2805 ARM exception table information for this frame, or fall back
2806 to using prologue parsing. (Note that if we have DWARF CFI,
2807 this sniffer isn't even called -- CFI is always preferred.)
2809 Before we make this decision, however, we check whether we
2810 actually have *symbol* information for the current frame.
2811 If not, prologue parsing would not work anyway, so we might
2812 as well use the exception table and hope for the best. */
2813 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2817 /* If the next frame is "normal", we are at a call site in this
2818 frame, so exception information is guaranteed to be valid. */
2819 if (get_next_frame (this_frame)
2820 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2823 /* We also assume exception information is valid if we're currently
2824 blocked in a system call. The system library is supposed to
2825 ensure this, so that e.g. pthread cancellation works. */
2826 if (arm_frame_is_thumb (this_frame))
2830 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2831 byte_order_for_code, &insn)
2832 && (insn & 0xff00) == 0xdf00 /* svc */)
2839 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2840 byte_order_for_code, &insn)
2841 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2845 /* Bail out if we don't know that exception information is valid. */
2849 /* The ARM exception index does not mark the *end* of the region
2850 covered by the entry, and some functions will not have any entry.
2851 To correctly recognize the end of the covered region, the linker
2852 should have inserted dummy records with a CANTUNWIND marker.
2854 Unfortunately, current versions of GNU ld do not reliably do
2855 this, and thus we may have found an incorrect entry above.
2856 As a (temporary) sanity check, we only use the entry if it
2857 lies *within* the bounds of the function. Note that this check
2858 might reject perfectly valid entries that just happen to cover
2859 multiple functions; therefore this check ought to be removed
2860 once the linker is fixed. */
2861 if (func_start > exidx_region)
2865 /* Decode the list of unwinding instructions into a prologue cache.
2866 Note that this may fail due to e.g. a "refuse to unwind" code. */
2867 cache = arm_exidx_fill_cache (this_frame, entry);
2871 *this_prologue_cache = cache;
2875 struct frame_unwind arm_exidx_unwind = {
2877 default_frame_unwind_stop_reason,
2878 arm_prologue_this_id,
2879 arm_prologue_prev_register,
2881 arm_exidx_unwind_sniffer
2884 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2885 trampoline, return the target PC. Otherwise return 0.
2887 void call0a (char c, short s, int i, long l) {}
2891 (*pointer_to_call0a) (c, s, i, l);
2894 Instead of calling a stub library function _call_via_xx (xx is
2895 the register name), GCC may inline the trampoline in the object
2896 file as below (register r2 has the address of call0a).
2899 .type main, %function
2908 The trampoline 'bx r2' doesn't belong to main. */
2911 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2913 /* The heuristics of recognizing such trampoline is that FRAME is
2914 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2915 if (arm_frame_is_thumb (frame))
2919 if (target_read_memory (pc, buf, 2) == 0)
2921 struct gdbarch *gdbarch = get_frame_arch (frame);
2922 enum bfd_endian byte_order_for_code
2923 = gdbarch_byte_order_for_code (gdbarch);
2925 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2927 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2930 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2932 /* Clear the LSB so that gdb core sets step-resume
2933 breakpoint at the right address. */
2934 return UNMAKE_THUMB_ADDR (dest);
2942 static struct arm_prologue_cache *
2943 arm_make_stub_cache (struct frame_info *this_frame)
2945 struct arm_prologue_cache *cache;
2947 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2948 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2950 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2955 /* Our frame ID for a stub frame is the current SP and LR. */
2958 arm_stub_this_id (struct frame_info *this_frame,
2960 struct frame_id *this_id)
2962 struct arm_prologue_cache *cache;
2964 if (*this_cache == NULL)
2965 *this_cache = arm_make_stub_cache (this_frame);
2966 cache = *this_cache;
2968 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2972 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2973 struct frame_info *this_frame,
2974 void **this_prologue_cache)
2976 CORE_ADDR addr_in_block;
2978 CORE_ADDR pc, start_addr;
2981 addr_in_block = get_frame_address_in_block (this_frame);
2982 pc = get_frame_pc (this_frame);
2983 if (in_plt_section (addr_in_block)
2984 /* We also use the stub winder if the target memory is unreadable
2985 to avoid having the prologue unwinder trying to read it. */
2986 || target_read_memory (pc, dummy, 4) != 0)
2989 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2990 && arm_skip_bx_reg (this_frame, pc) != 0)
2996 struct frame_unwind arm_stub_unwind = {
2998 default_frame_unwind_stop_reason,
3000 arm_prologue_prev_register,
3002 arm_stub_unwind_sniffer
3005 /* Put here the code to store, into CACHE->saved_regs, the addresses
3006 of the saved registers of frame described by THIS_FRAME. CACHE is
3009 static struct arm_prologue_cache *
3010 arm_m_exception_cache (struct frame_info *this_frame)
3012 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3013 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3014 struct arm_prologue_cache *cache;
3015 CORE_ADDR unwound_sp;
3018 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3019 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3021 unwound_sp = get_frame_register_unsigned (this_frame,
3024 /* The hardware saves eight 32-bit words, comprising xPSR,
3025 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3026 "B1.5.6 Exception entry behavior" in
3027 "ARMv7-M Architecture Reference Manual". */
3028 cache->saved_regs[0].addr = unwound_sp;
3029 cache->saved_regs[1].addr = unwound_sp + 4;
3030 cache->saved_regs[2].addr = unwound_sp + 8;
3031 cache->saved_regs[3].addr = unwound_sp + 12;
3032 cache->saved_regs[12].addr = unwound_sp + 16;
3033 cache->saved_regs[14].addr = unwound_sp + 20;
3034 cache->saved_regs[15].addr = unwound_sp + 24;
3035 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3037 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3038 aligner between the top of the 32-byte stack frame and the
3039 previous context's stack pointer. */
3040 cache->prev_sp = unwound_sp + 32;
3041 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3042 && (xpsr & (1 << 9)) != 0)
3043 cache->prev_sp += 4;
3048 /* Implementation of function hook 'this_id' in
3049 'struct frame_uwnind'. */
3052 arm_m_exception_this_id (struct frame_info *this_frame,
3054 struct frame_id *this_id)
3056 struct arm_prologue_cache *cache;
3058 if (*this_cache == NULL)
3059 *this_cache = arm_m_exception_cache (this_frame);
3060 cache = *this_cache;
3062 /* Our frame ID for a stub frame is the current SP and LR. */
3063 *this_id = frame_id_build (cache->prev_sp,
3064 get_frame_pc (this_frame));
3067 /* Implementation of function hook 'prev_register' in
3068 'struct frame_uwnind'. */
3070 static struct value *
3071 arm_m_exception_prev_register (struct frame_info *this_frame,
3075 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3076 struct arm_prologue_cache *cache;
3078 if (*this_cache == NULL)
3079 *this_cache = arm_m_exception_cache (this_frame);
3080 cache = *this_cache;
3082 /* The value was already reconstructed into PREV_SP. */
3083 if (prev_regnum == ARM_SP_REGNUM)
3084 return frame_unwind_got_constant (this_frame, prev_regnum,
3087 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3091 /* Implementation of function hook 'sniffer' in
3092 'struct frame_uwnind'. */
3095 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3096 struct frame_info *this_frame,
3097 void **this_prologue_cache)
3099 CORE_ADDR this_pc = get_frame_pc (this_frame);
3101 /* No need to check is_m; this sniffer is only registered for
3102 M-profile architectures. */
3104 /* Exception frames return to one of these magic PCs. Other values
3105 are not defined as of v7-M. See details in "B1.5.8 Exception
3106 return behavior" in "ARMv7-M Architecture Reference Manual". */
3107 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3108 || this_pc == 0xfffffffd)
3114 /* Frame unwinder for M-profile exceptions. */
3116 struct frame_unwind arm_m_exception_unwind =
3119 default_frame_unwind_stop_reason,
3120 arm_m_exception_this_id,
3121 arm_m_exception_prev_register,
3123 arm_m_exception_unwind_sniffer
3127 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3129 struct arm_prologue_cache *cache;
3131 if (*this_cache == NULL)
3132 *this_cache = arm_make_prologue_cache (this_frame);
3133 cache = *this_cache;
3135 return cache->prev_sp - cache->framesize;
3138 struct frame_base arm_normal_base = {
3139 &arm_prologue_unwind,
3140 arm_normal_frame_base,
3141 arm_normal_frame_base,
3142 arm_normal_frame_base
3145 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3146 dummy frame. The frame ID's base needs to match the TOS value
3147 saved by save_dummy_frame_tos() and returned from
3148 arm_push_dummy_call, and the PC needs to match the dummy frame's
3151 static struct frame_id
3152 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3154 return frame_id_build (get_frame_register_unsigned (this_frame,
3156 get_frame_pc (this_frame));
3159 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3160 be used to construct the previous frame's ID, after looking up the
3161 containing function). */
3164 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3167 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3168 return arm_addr_bits_remove (gdbarch, pc);
3172 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3174 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3177 static struct value *
3178 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3181 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3183 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3188 /* The PC is normally copied from the return column, which
3189 describes saves of LR. However, that version may have an
3190 extra bit set to indicate Thumb state. The bit is not
3192 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3193 return frame_unwind_got_constant (this_frame, regnum,
3194 arm_addr_bits_remove (gdbarch, lr));
3197 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3198 cpsr = get_frame_register_unsigned (this_frame, regnum);
3199 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3200 if (IS_THUMB_ADDR (lr))
3204 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3207 internal_error (__FILE__, __LINE__,
3208 _("Unexpected register %d"), regnum);
3213 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3214 struct dwarf2_frame_state_reg *reg,
3215 struct frame_info *this_frame)
3221 reg->how = DWARF2_FRAME_REG_FN;
3222 reg->loc.fn = arm_dwarf2_prev_register;
3225 reg->how = DWARF2_FRAME_REG_CFA;
3230 /* Return true if we are in the function's epilogue, i.e. after the
3231 instruction that destroyed the function's stack frame. */
3234 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3236 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3237 unsigned int insn, insn2;
3238 int found_return = 0, found_stack_adjust = 0;
3239 CORE_ADDR func_start, func_end;
3243 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3246 /* The epilogue is a sequence of instructions along the following lines:
3248 - add stack frame size to SP or FP
3249 - [if frame pointer used] restore SP from FP
3250 - restore registers from SP [may include PC]
3251 - a return-type instruction [if PC wasn't already restored]
3253 In a first pass, we scan forward from the current PC and verify the
3254 instructions we find as compatible with this sequence, ending in a
3257 However, this is not sufficient to distinguish indirect function calls
3258 within a function from indirect tail calls in the epilogue in some cases.
3259 Therefore, if we didn't already find any SP-changing instruction during
3260 forward scan, we add a backward scanning heuristic to ensure we actually
3261 are in the epilogue. */
3264 while (scan_pc < func_end && !found_return)
3266 if (target_read_memory (scan_pc, buf, 2))
3270 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3272 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3274 else if (insn == 0x46f7) /* mov pc, lr */
3276 else if (thumb_instruction_restores_sp (insn))
3278 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3281 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3283 if (target_read_memory (scan_pc, buf, 2))
3287 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3289 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3291 if (insn2 & 0x8000) /* <registers> include PC. */
3294 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3295 && (insn2 & 0x0fff) == 0x0b04)
3297 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3300 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3301 && (insn2 & 0x0e00) == 0x0a00)
3313 /* Since any instruction in the epilogue sequence, with the possible
3314 exception of return itself, updates the stack pointer, we need to
3315 scan backwards for at most one instruction. Try either a 16-bit or
3316 a 32-bit instruction. This is just a heuristic, so we do not worry
3317 too much about false positives. */
3319 if (pc - 4 < func_start)
3321 if (target_read_memory (pc - 4, buf, 4))
3324 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3325 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3327 if (thumb_instruction_restores_sp (insn2))
3328 found_stack_adjust = 1;
3329 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3330 found_stack_adjust = 1;
3331 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3332 && (insn2 & 0x0fff) == 0x0b04)
3333 found_stack_adjust = 1;
3334 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3335 && (insn2 & 0x0e00) == 0x0a00)
3336 found_stack_adjust = 1;
3338 return found_stack_adjust;
3341 /* Return true if we are in the function's epilogue, i.e. after the
3342 instruction that destroyed the function's stack frame. */
3345 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3347 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3350 CORE_ADDR func_start, func_end;
3352 if (arm_pc_is_thumb (gdbarch, pc))
3353 return thumb_in_function_epilogue_p (gdbarch, pc);
3355 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3358 /* We are in the epilogue if the previous instruction was a stack
3359 adjustment and the next instruction is a possible return (bx, mov
3360 pc, or pop). We could have to scan backwards to find the stack
3361 adjustment, or forwards to find the return, but this is a decent
3362 approximation. First scan forwards. */
3365 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3366 if (bits (insn, 28, 31) != INST_NV)
3368 if ((insn & 0x0ffffff0) == 0x012fff10)
3371 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3374 else if ((insn & 0x0fff0000) == 0x08bd0000
3375 && (insn & 0x0000c000) != 0)
3376 /* POP (LDMIA), including PC or LR. */
3383 /* Scan backwards. This is just a heuristic, so do not worry about
3384 false positives from mode changes. */
3386 if (pc < func_start + 4)
3389 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3390 if (arm_instruction_restores_sp (insn))
3397 /* When arguments must be pushed onto the stack, they go on in reverse
3398 order. The code below implements a FILO (stack) to do this. */
3403 struct stack_item *prev;
3407 static struct stack_item *
3408 push_stack_item (struct stack_item *prev, const void *contents, int len)
3410 struct stack_item *si;
3411 si = xmalloc (sizeof (struct stack_item));
3412 si->data = xmalloc (len);
3415 memcpy (si->data, contents, len);
3419 static struct stack_item *
3420 pop_stack_item (struct stack_item *si)
3422 struct stack_item *dead = si;
3430 /* Return the alignment (in bytes) of the given type. */
3433 arm_type_align (struct type *t)
3439 t = check_typedef (t);
3440 switch (TYPE_CODE (t))
3443 /* Should never happen. */
3444 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3448 case TYPE_CODE_ENUM:
3452 case TYPE_CODE_RANGE:
3454 case TYPE_CODE_CHAR:
3455 case TYPE_CODE_BOOL:
3456 return TYPE_LENGTH (t);
3458 case TYPE_CODE_ARRAY:
3459 case TYPE_CODE_COMPLEX:
3460 /* TODO: What about vector types? */
3461 return arm_type_align (TYPE_TARGET_TYPE (t));
3463 case TYPE_CODE_STRUCT:
3464 case TYPE_CODE_UNION:
3466 for (n = 0; n < TYPE_NFIELDS (t); n++)
3468 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3476 /* Possible base types for a candidate for passing and returning in
3479 enum arm_vfp_cprc_base_type
3488 /* The length of one element of base type B. */
3491 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3495 case VFP_CPRC_SINGLE:
3497 case VFP_CPRC_DOUBLE:
3499 case VFP_CPRC_VEC64:
3501 case VFP_CPRC_VEC128:
3504 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3509 /* The character ('s', 'd' or 'q') for the type of VFP register used
3510 for passing base type B. */
3513 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3517 case VFP_CPRC_SINGLE:
3519 case VFP_CPRC_DOUBLE:
3521 case VFP_CPRC_VEC64:
3523 case VFP_CPRC_VEC128:
3526 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3531 /* Determine whether T may be part of a candidate for passing and
3532 returning in VFP registers, ignoring the limit on the total number
3533 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3534 classification of the first valid component found; if it is not
3535 VFP_CPRC_UNKNOWN, all components must have the same classification
3536 as *BASE_TYPE. If it is found that T contains a type not permitted
3537 for passing and returning in VFP registers, a type differently
3538 classified from *BASE_TYPE, or two types differently classified
3539 from each other, return -1, otherwise return the total number of
3540 base-type elements found (possibly 0 in an empty structure or
3541 array). Vector types are not currently supported, matching the
3542 generic AAPCS support. */
3545 arm_vfp_cprc_sub_candidate (struct type *t,
3546 enum arm_vfp_cprc_base_type *base_type)
3548 t = check_typedef (t);
3549 switch (TYPE_CODE (t))
3552 switch (TYPE_LENGTH (t))
3555 if (*base_type == VFP_CPRC_UNKNOWN)
3556 *base_type = VFP_CPRC_SINGLE;
3557 else if (*base_type != VFP_CPRC_SINGLE)
3562 if (*base_type == VFP_CPRC_UNKNOWN)
3563 *base_type = VFP_CPRC_DOUBLE;
3564 else if (*base_type != VFP_CPRC_DOUBLE)
3573 case TYPE_CODE_COMPLEX:
3574 /* Arguments of complex T where T is one of the types float or
3575 double get treated as if they are implemented as:
3584 switch (TYPE_LENGTH (t))
3587 if (*base_type == VFP_CPRC_UNKNOWN)
3588 *base_type = VFP_CPRC_SINGLE;
3589 else if (*base_type != VFP_CPRC_SINGLE)
3594 if (*base_type == VFP_CPRC_UNKNOWN)
3595 *base_type = VFP_CPRC_DOUBLE;
3596 else if (*base_type != VFP_CPRC_DOUBLE)
3605 case TYPE_CODE_ARRAY:
3609 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3612 if (TYPE_LENGTH (t) == 0)
3614 gdb_assert (count == 0);
3617 else if (count == 0)
3619 unitlen = arm_vfp_cprc_unit_length (*base_type);
3620 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3621 return TYPE_LENGTH (t) / unitlen;
3625 case TYPE_CODE_STRUCT:
3630 for (i = 0; i < TYPE_NFIELDS (t); i++)
3632 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3634 if (sub_count == -1)
3638 if (TYPE_LENGTH (t) == 0)
3640 gdb_assert (count == 0);
3643 else if (count == 0)
3645 unitlen = arm_vfp_cprc_unit_length (*base_type);
3646 if (TYPE_LENGTH (t) != unitlen * count)
3651 case TYPE_CODE_UNION:
3656 for (i = 0; i < TYPE_NFIELDS (t); i++)
3658 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3660 if (sub_count == -1)
3662 count = (count > sub_count ? count : sub_count);
3664 if (TYPE_LENGTH (t) == 0)
3666 gdb_assert (count == 0);
3669 else if (count == 0)
3671 unitlen = arm_vfp_cprc_unit_length (*base_type);
3672 if (TYPE_LENGTH (t) != unitlen * count)
3684 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3685 if passed to or returned from a non-variadic function with the VFP
3686 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3687 *BASE_TYPE to the base type for T and *COUNT to the number of
3688 elements of that base type before returning. */
3691 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3694 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3695 int c = arm_vfp_cprc_sub_candidate (t, &b);
3696 if (c <= 0 || c > 4)
3703 /* Return 1 if the VFP ABI should be used for passing arguments to and
3704 returning values from a function of type FUNC_TYPE, 0
3708 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3710 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3711 /* Variadic functions always use the base ABI. Assume that functions
3712 without debug info are not variadic. */
3713 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3715 /* The VFP ABI is only supported as a variant of AAPCS. */
3716 if (tdep->arm_abi != ARM_ABI_AAPCS)
3718 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3721 /* We currently only support passing parameters in integer registers, which
3722 conforms with GCC's default model, and VFP argument passing following
3723 the VFP variant of AAPCS. Several other variants exist and
3724 we should probably support some of them based on the selected ABI. */
3727 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3728 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3729 struct value **args, CORE_ADDR sp, int struct_return,
3730 CORE_ADDR struct_addr)
3732 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3736 struct stack_item *si = NULL;
3739 unsigned vfp_regs_free = (1 << 16) - 1;
3741 /* Determine the type of this function and whether the VFP ABI
3743 ftype = check_typedef (value_type (function));
3744 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3745 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3746 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3748 /* Set the return address. For the ARM, the return breakpoint is
3749 always at BP_ADDR. */
3750 if (arm_pc_is_thumb (gdbarch, bp_addr))
3752 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3754 /* Walk through the list of args and determine how large a temporary
3755 stack is required. Need to take care here as structs may be
3756 passed on the stack, and we have to push them. */
3759 argreg = ARM_A1_REGNUM;
3762 /* The struct_return pointer occupies the first parameter
3763 passing register. */
3767 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3768 gdbarch_register_name (gdbarch, argreg),
3769 paddress (gdbarch, struct_addr));
3770 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3774 for (argnum = 0; argnum < nargs; argnum++)
3777 struct type *arg_type;
3778 struct type *target_type;
3779 enum type_code typecode;
3780 const bfd_byte *val;
3782 enum arm_vfp_cprc_base_type vfp_base_type;
3784 int may_use_core_reg = 1;
3786 arg_type = check_typedef (value_type (args[argnum]));
3787 len = TYPE_LENGTH (arg_type);
3788 target_type = TYPE_TARGET_TYPE (arg_type);
3789 typecode = TYPE_CODE (arg_type);
3790 val = value_contents (args[argnum]);
3792 align = arm_type_align (arg_type);
3793 /* Round alignment up to a whole number of words. */
3794 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3795 /* Different ABIs have different maximum alignments. */
3796 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3798 /* The APCS ABI only requires word alignment. */
3799 align = INT_REGISTER_SIZE;
3803 /* The AAPCS requires at most doubleword alignment. */
3804 if (align > INT_REGISTER_SIZE * 2)
3805 align = INT_REGISTER_SIZE * 2;
3809 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3817 /* Because this is a CPRC it cannot go in a core register or
3818 cause a core register to be skipped for alignment.
3819 Either it goes in VFP registers and the rest of this loop
3820 iteration is skipped for this argument, or it goes on the
3821 stack (and the stack alignment code is correct for this
3823 may_use_core_reg = 0;
3825 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3826 shift = unit_length / 4;
3827 mask = (1 << (shift * vfp_base_count)) - 1;
3828 for (regno = 0; regno < 16; regno += shift)
3829 if (((vfp_regs_free >> regno) & mask) == mask)
3838 vfp_regs_free &= ~(mask << regno);
3839 reg_scaled = regno / shift;
3840 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3841 for (i = 0; i < vfp_base_count; i++)
3845 if (reg_char == 'q')
3846 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3847 val + i * unit_length);
3850 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3851 reg_char, reg_scaled + i);
3852 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3854 regcache_cooked_write (regcache, regnum,
3855 val + i * unit_length);
3862 /* This CPRC could not go in VFP registers, so all VFP
3863 registers are now marked as used. */
3868 /* Push stack padding for dowubleword alignment. */
3869 if (nstack & (align - 1))
3871 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3872 nstack += INT_REGISTER_SIZE;
3875 /* Doubleword aligned quantities must go in even register pairs. */
3876 if (may_use_core_reg
3877 && argreg <= ARM_LAST_ARG_REGNUM
3878 && align > INT_REGISTER_SIZE
3882 /* If the argument is a pointer to a function, and it is a
3883 Thumb function, create a LOCAL copy of the value and set
3884 the THUMB bit in it. */
3885 if (TYPE_CODE_PTR == typecode
3886 && target_type != NULL
3887 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3889 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3890 if (arm_pc_is_thumb (gdbarch, regval))
3892 bfd_byte *copy = alloca (len);
3893 store_unsigned_integer (copy, len, byte_order,
3894 MAKE_THUMB_ADDR (regval));
3899 /* Copy the argument to general registers or the stack in
3900 register-sized pieces. Large arguments are split between
3901 registers and stack. */
3904 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3906 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3908 /* The argument is being passed in a general purpose
3911 = extract_unsigned_integer (val, partial_len, byte_order);
3912 if (byte_order == BFD_ENDIAN_BIG)
3913 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3915 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3917 gdbarch_register_name
3919 phex (regval, INT_REGISTER_SIZE));
3920 regcache_cooked_write_unsigned (regcache, argreg, regval);
3925 /* Push the arguments onto the stack. */
3927 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3929 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3930 nstack += INT_REGISTER_SIZE;
3937 /* If we have an odd number of words to push, then decrement the stack
3938 by one word now, so first stack argument will be dword aligned. */
3945 write_memory (sp, si->data, si->len);
3946 si = pop_stack_item (si);
3949 /* Finally, update teh SP register. */
3950 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3956 /* Always align the frame to an 8-byte boundary. This is required on
3957 some platforms and harmless on the rest. */
3960 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3962 /* Align the stack to eight bytes. */
3963 return sp & ~ (CORE_ADDR) 7;
3967 print_fpu_flags (struct ui_file *file, int flags)
3969 if (flags & (1 << 0))
3970 fputs_filtered ("IVO ", file);
3971 if (flags & (1 << 1))
3972 fputs_filtered ("DVZ ", file);
3973 if (flags & (1 << 2))
3974 fputs_filtered ("OFL ", file);
3975 if (flags & (1 << 3))
3976 fputs_filtered ("UFL ", file);
3977 if (flags & (1 << 4))
3978 fputs_filtered ("INX ", file);
3979 fputc_filtered ('\n', file);
3982 /* Print interesting information about the floating point processor
3983 (if present) or emulator. */
3985 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3986 struct frame_info *frame, const char *args)
3988 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3991 type = (status >> 24) & 127;
3992 if (status & (1 << 31))
3993 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3995 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3996 /* i18n: [floating point unit] mask */
3997 fputs_filtered (_("mask: "), file);
3998 print_fpu_flags (file, status >> 16);
3999 /* i18n: [floating point unit] flags */
4000 fputs_filtered (_("flags: "), file);
4001 print_fpu_flags (file, status);
4004 /* Construct the ARM extended floating point type. */
4005 static struct type *
4006 arm_ext_type (struct gdbarch *gdbarch)
4008 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4010 if (!tdep->arm_ext_type)
4012 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4013 floatformats_arm_ext);
4015 return tdep->arm_ext_type;
4018 static struct type *
4019 arm_neon_double_type (struct gdbarch *gdbarch)
4021 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4023 if (tdep->neon_double_type == NULL)
4025 struct type *t, *elem;
4027 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4029 elem = builtin_type (gdbarch)->builtin_uint8;
4030 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4031 elem = builtin_type (gdbarch)->builtin_uint16;
4032 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4033 elem = builtin_type (gdbarch)->builtin_uint32;
4034 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4035 elem = builtin_type (gdbarch)->builtin_uint64;
4036 append_composite_type_field (t, "u64", elem);
4037 elem = builtin_type (gdbarch)->builtin_float;
4038 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4039 elem = builtin_type (gdbarch)->builtin_double;
4040 append_composite_type_field (t, "f64", elem);
4042 TYPE_VECTOR (t) = 1;
4043 TYPE_NAME (t) = "neon_d";
4044 tdep->neon_double_type = t;
4047 return tdep->neon_double_type;
4050 /* FIXME: The vector types are not correctly ordered on big-endian
4051 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4052 bits of d0 - regardless of what unit size is being held in d0. So
4053 the offset of the first uint8 in d0 is 7, but the offset of the
4054 first float is 4. This code works as-is for little-endian
4057 static struct type *
4058 arm_neon_quad_type (struct gdbarch *gdbarch)
4060 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4062 if (tdep->neon_quad_type == NULL)
4064 struct type *t, *elem;
4066 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4068 elem = builtin_type (gdbarch)->builtin_uint8;
4069 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4070 elem = builtin_type (gdbarch)->builtin_uint16;
4071 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4072 elem = builtin_type (gdbarch)->builtin_uint32;
4073 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4074 elem = builtin_type (gdbarch)->builtin_uint64;
4075 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4076 elem = builtin_type (gdbarch)->builtin_float;
4077 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4078 elem = builtin_type (gdbarch)->builtin_double;
4079 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4081 TYPE_VECTOR (t) = 1;
4082 TYPE_NAME (t) = "neon_q";
4083 tdep->neon_quad_type = t;
4086 return tdep->neon_quad_type;
4089 /* Return the GDB type object for the "standard" data type of data in
4092 static struct type *
4093 arm_register_type (struct gdbarch *gdbarch, int regnum)
4095 int num_regs = gdbarch_num_regs (gdbarch);
4097 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4098 && regnum >= num_regs && regnum < num_regs + 32)
4099 return builtin_type (gdbarch)->builtin_float;
4101 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4102 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4103 return arm_neon_quad_type (gdbarch);
4105 /* If the target description has register information, we are only
4106 in this function so that we can override the types of
4107 double-precision registers for NEON. */
4108 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4110 struct type *t = tdesc_register_type (gdbarch, regnum);
4112 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4113 && TYPE_CODE (t) == TYPE_CODE_FLT
4114 && gdbarch_tdep (gdbarch)->have_neon)
4115 return arm_neon_double_type (gdbarch);
4120 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4122 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4123 return builtin_type (gdbarch)->builtin_void;
4125 return arm_ext_type (gdbarch);
4127 else if (regnum == ARM_SP_REGNUM)
4128 return builtin_type (gdbarch)->builtin_data_ptr;
4129 else if (regnum == ARM_PC_REGNUM)
4130 return builtin_type (gdbarch)->builtin_func_ptr;
4131 else if (regnum >= ARRAY_SIZE (arm_register_names))
4132 /* These registers are only supported on targets which supply
4133 an XML description. */
4134 return builtin_type (gdbarch)->builtin_int0;
4136 return builtin_type (gdbarch)->builtin_uint32;
4139 /* Map a DWARF register REGNUM onto the appropriate GDB register
4143 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4145 /* Core integer regs. */
4146 if (reg >= 0 && reg <= 15)
4149 /* Legacy FPA encoding. These were once used in a way which
4150 overlapped with VFP register numbering, so their use is
4151 discouraged, but GDB doesn't support the ARM toolchain
4152 which used them for VFP. */
4153 if (reg >= 16 && reg <= 23)
4154 return ARM_F0_REGNUM + reg - 16;
4156 /* New assignments for the FPA registers. */
4157 if (reg >= 96 && reg <= 103)
4158 return ARM_F0_REGNUM + reg - 96;
4160 /* WMMX register assignments. */
4161 if (reg >= 104 && reg <= 111)
4162 return ARM_WCGR0_REGNUM + reg - 104;
4164 if (reg >= 112 && reg <= 127)
4165 return ARM_WR0_REGNUM + reg - 112;
4167 if (reg >= 192 && reg <= 199)
4168 return ARM_WC0_REGNUM + reg - 192;
4170 /* VFP v2 registers. A double precision value is actually
4171 in d1 rather than s2, but the ABI only defines numbering
4172 for the single precision registers. This will "just work"
4173 in GDB for little endian targets (we'll read eight bytes,
4174 starting in s0 and then progressing to s1), but will be
4175 reversed on big endian targets with VFP. This won't
4176 be a problem for the new Neon quad registers; you're supposed
4177 to use DW_OP_piece for those. */
4178 if (reg >= 64 && reg <= 95)
4182 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4183 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4187 /* VFP v3 / Neon registers. This range is also used for VFP v2
4188 registers, except that it now describes d0 instead of s0. */
4189 if (reg >= 256 && reg <= 287)
4193 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4194 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4201 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4203 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4206 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4208 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4209 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4211 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4212 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4214 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4215 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4217 if (reg < NUM_GREGS)
4218 return SIM_ARM_R0_REGNUM + reg;
4221 if (reg < NUM_FREGS)
4222 return SIM_ARM_FP0_REGNUM + reg;
4225 if (reg < NUM_SREGS)
4226 return SIM_ARM_FPS_REGNUM + reg;
4229 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4232 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4233 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4234 It is thought that this is is the floating-point register format on
4235 little-endian systems. */
4238 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4239 void *dbl, int endianess)
4243 if (endianess == BFD_ENDIAN_BIG)
4244 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4246 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4248 floatformat_from_doublest (fmt, &d, dbl);
4252 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4257 floatformat_to_doublest (fmt, ptr, &d);
4258 if (endianess == BFD_ENDIAN_BIG)
4259 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4261 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4266 condition_true (unsigned long cond, unsigned long status_reg)
4268 if (cond == INST_AL || cond == INST_NV)
4274 return ((status_reg & FLAG_Z) != 0);
4276 return ((status_reg & FLAG_Z) == 0);
4278 return ((status_reg & FLAG_C) != 0);
4280 return ((status_reg & FLAG_C) == 0);
4282 return ((status_reg & FLAG_N) != 0);
4284 return ((status_reg & FLAG_N) == 0);
4286 return ((status_reg & FLAG_V) != 0);
4288 return ((status_reg & FLAG_V) == 0);
4290 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4292 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4294 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4296 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4298 return (((status_reg & FLAG_Z) == 0)
4299 && (((status_reg & FLAG_N) == 0)
4300 == ((status_reg & FLAG_V) == 0)));
4302 return (((status_reg & FLAG_Z) != 0)
4303 || (((status_reg & FLAG_N) == 0)
4304 != ((status_reg & FLAG_V) == 0)));
4309 static unsigned long
4310 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4311 unsigned long pc_val, unsigned long status_reg)
4313 unsigned long res, shift;
4314 int rm = bits (inst, 0, 3);
4315 unsigned long shifttype = bits (inst, 5, 6);
4319 int rs = bits (inst, 8, 11);
4320 shift = (rs == 15 ? pc_val + 8
4321 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4324 shift = bits (inst, 7, 11);
4326 res = (rm == ARM_PC_REGNUM
4327 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4328 : get_frame_register_unsigned (frame, rm));
4333 res = shift >= 32 ? 0 : res << shift;
4337 res = shift >= 32 ? 0 : res >> shift;
4343 res = ((res & 0x80000000L)
4344 ? ~((~res) >> shift) : res >> shift);
4347 case 3: /* ROR/RRX */
4350 res = (res >> 1) | (carry ? 0x80000000L : 0);
4352 res = (res >> shift) | (res << (32 - shift));
4356 return res & 0xffffffff;
4359 /* Return number of 1-bits in VAL. */
4362 bitcount (unsigned long val)
4365 for (nbits = 0; val != 0; nbits++)
4366 val &= val - 1; /* Delete rightmost 1-bit in val. */
4370 /* Return the size in bytes of the complete Thumb instruction whose
4371 first halfword is INST1. */
4374 thumb_insn_size (unsigned short inst1)
4376 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4383 thumb_advance_itstate (unsigned int itstate)
4385 /* Preserve IT[7:5], the first three bits of the condition. Shift
4386 the upcoming condition flags left by one bit. */
4387 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4389 /* If we have finished the IT block, clear the state. */
4390 if ((itstate & 0x0f) == 0)
4396 /* Find the next PC after the current instruction executes. In some
4397 cases we can not statically determine the answer (see the IT state
4398 handling in this function); in that case, a breakpoint may be
4399 inserted in addition to the returned PC, which will be used to set
4400 another breakpoint by our caller. */
4403 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4405 struct gdbarch *gdbarch = get_frame_arch (frame);
4406 struct address_space *aspace = get_frame_address_space (frame);
4407 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4408 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4409 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4410 unsigned short inst1;
4411 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4412 unsigned long offset;
4413 ULONGEST status, itstate;
4415 nextpc = MAKE_THUMB_ADDR (nextpc);
4416 pc_val = MAKE_THUMB_ADDR (pc_val);
4418 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4420 /* Thumb-2 conditional execution support. There are eight bits in
4421 the CPSR which describe conditional execution state. Once
4422 reconstructed (they're in a funny order), the low five bits
4423 describe the low bit of the condition for each instruction and
4424 how many instructions remain. The high three bits describe the
4425 base condition. One of the low four bits will be set if an IT
4426 block is active. These bits read as zero on earlier
4428 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4429 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4431 /* If-Then handling. On GNU/Linux, where this routine is used, we
4432 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4433 can disable execution of the undefined instruction. So we might
4434 miss the breakpoint if we set it on a skipped conditional
4435 instruction. Because conditional instructions can change the
4436 flags, affecting the execution of further instructions, we may
4437 need to set two breakpoints. */
4439 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4441 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4443 /* An IT instruction. Because this instruction does not
4444 modify the flags, we can accurately predict the next
4445 executed instruction. */
4446 itstate = inst1 & 0x00ff;
4447 pc += thumb_insn_size (inst1);
4449 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4451 inst1 = read_memory_unsigned_integer (pc, 2,
4452 byte_order_for_code);
4453 pc += thumb_insn_size (inst1);
4454 itstate = thumb_advance_itstate (itstate);
4457 return MAKE_THUMB_ADDR (pc);
4459 else if (itstate != 0)
4461 /* We are in a conditional block. Check the condition. */
4462 if (! condition_true (itstate >> 4, status))
4464 /* Advance to the next executed instruction. */
4465 pc += thumb_insn_size (inst1);
4466 itstate = thumb_advance_itstate (itstate);
4468 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4470 inst1 = read_memory_unsigned_integer (pc, 2,
4471 byte_order_for_code);
4472 pc += thumb_insn_size (inst1);
4473 itstate = thumb_advance_itstate (itstate);
4476 return MAKE_THUMB_ADDR (pc);
4478 else if ((itstate & 0x0f) == 0x08)
4480 /* This is the last instruction of the conditional
4481 block, and it is executed. We can handle it normally
4482 because the following instruction is not conditional,
4483 and we must handle it normally because it is
4484 permitted to branch. Fall through. */
4490 /* There are conditional instructions after this one.
4491 If this instruction modifies the flags, then we can
4492 not predict what the next executed instruction will
4493 be. Fortunately, this instruction is architecturally
4494 forbidden to branch; we know it will fall through.
4495 Start by skipping past it. */
4496 pc += thumb_insn_size (inst1);
4497 itstate = thumb_advance_itstate (itstate);
4499 /* Set a breakpoint on the following instruction. */
4500 gdb_assert ((itstate & 0x0f) != 0);
4501 arm_insert_single_step_breakpoint (gdbarch, aspace,
4502 MAKE_THUMB_ADDR (pc));
4503 cond_negated = (itstate >> 4) & 1;
4505 /* Skip all following instructions with the same
4506 condition. If there is a later instruction in the IT
4507 block with the opposite condition, set the other
4508 breakpoint there. If not, then set a breakpoint on
4509 the instruction after the IT block. */
4512 inst1 = read_memory_unsigned_integer (pc, 2,
4513 byte_order_for_code);
4514 pc += thumb_insn_size (inst1);
4515 itstate = thumb_advance_itstate (itstate);
4517 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4519 return MAKE_THUMB_ADDR (pc);
4523 else if (itstate & 0x0f)
4525 /* We are in a conditional block. Check the condition. */
4526 int cond = itstate >> 4;
4528 if (! condition_true (cond, status))
4529 /* Advance to the next instruction. All the 32-bit
4530 instructions share a common prefix. */
4531 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4533 /* Otherwise, handle the instruction normally. */
4536 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4540 /* Fetch the saved PC from the stack. It's stored above
4541 all of the other registers. */
4542 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4543 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4544 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4546 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4548 unsigned long cond = bits (inst1, 8, 11);
4549 if (cond == 0x0f) /* 0x0f = SWI */
4551 struct gdbarch_tdep *tdep;
4552 tdep = gdbarch_tdep (gdbarch);
4554 if (tdep->syscall_next_pc != NULL)
4555 nextpc = tdep->syscall_next_pc (frame);
4558 else if (cond != 0x0f && condition_true (cond, status))
4559 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4561 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4563 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4565 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4567 unsigned short inst2;
4568 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4570 /* Default to the next instruction. */
4572 nextpc = MAKE_THUMB_ADDR (nextpc);
4574 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4576 /* Branches and miscellaneous control instructions. */
4578 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4581 int j1, j2, imm1, imm2;
4583 imm1 = sbits (inst1, 0, 10);
4584 imm2 = bits (inst2, 0, 10);
4585 j1 = bit (inst2, 13);
4586 j2 = bit (inst2, 11);
4588 offset = ((imm1 << 12) + (imm2 << 1));
4589 offset ^= ((!j2) << 22) | ((!j1) << 23);
4591 nextpc = pc_val + offset;
4592 /* For BLX make sure to clear the low bits. */
4593 if (bit (inst2, 12) == 0)
4594 nextpc = nextpc & 0xfffffffc;
4596 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4598 /* SUBS PC, LR, #imm8. */
4599 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4600 nextpc -= inst2 & 0x00ff;
4602 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4604 /* Conditional branch. */
4605 if (condition_true (bits (inst1, 6, 9), status))
4607 int sign, j1, j2, imm1, imm2;
4609 sign = sbits (inst1, 10, 10);
4610 imm1 = bits (inst1, 0, 5);
4611 imm2 = bits (inst2, 0, 10);
4612 j1 = bit (inst2, 13);
4613 j2 = bit (inst2, 11);
4615 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4616 offset += (imm1 << 12) + (imm2 << 1);
4618 nextpc = pc_val + offset;
4622 else if ((inst1 & 0xfe50) == 0xe810)
4624 /* Load multiple or RFE. */
4625 int rn, offset, load_pc = 1;
4627 rn = bits (inst1, 0, 3);
4628 if (bit (inst1, 7) && !bit (inst1, 8))
4631 if (!bit (inst2, 15))
4633 offset = bitcount (inst2) * 4 - 4;
4635 else if (!bit (inst1, 7) && bit (inst1, 8))
4638 if (!bit (inst2, 15))
4642 else if (bit (inst1, 7) && bit (inst1, 8))
4647 else if (!bit (inst1, 7) && !bit (inst1, 8))
4657 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4658 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4661 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4663 /* MOV PC or MOVS PC. */
4664 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4665 nextpc = MAKE_THUMB_ADDR (nextpc);
4667 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4671 int rn, load_pc = 1;
4673 rn = bits (inst1, 0, 3);
4674 base = get_frame_register_unsigned (frame, rn);
4675 if (rn == ARM_PC_REGNUM)
4677 base = (base + 4) & ~(CORE_ADDR) 0x3;
4679 base += bits (inst2, 0, 11);
4681 base -= bits (inst2, 0, 11);
4683 else if (bit (inst1, 7))
4684 base += bits (inst2, 0, 11);
4685 else if (bit (inst2, 11))
4687 if (bit (inst2, 10))
4690 base += bits (inst2, 0, 7);
4692 base -= bits (inst2, 0, 7);
4695 else if ((inst2 & 0x0fc0) == 0x0000)
4697 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4698 base += get_frame_register_unsigned (frame, rm) << shift;
4705 nextpc = get_frame_memory_unsigned (frame, base, 4);
4707 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4710 CORE_ADDR tbl_reg, table, offset, length;
4712 tbl_reg = bits (inst1, 0, 3);
4713 if (tbl_reg == 0x0f)
4714 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4716 table = get_frame_register_unsigned (frame, tbl_reg);
4718 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4719 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4720 nextpc = pc_val + length;
4722 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4725 CORE_ADDR tbl_reg, table, offset, length;
4727 tbl_reg = bits (inst1, 0, 3);
4728 if (tbl_reg == 0x0f)
4729 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4731 table = get_frame_register_unsigned (frame, tbl_reg);
4733 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4734 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4735 nextpc = pc_val + length;
4738 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4740 if (bits (inst1, 3, 6) == 0x0f)
4741 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4743 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4745 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4747 if (bits (inst1, 3, 6) == 0x0f)
4750 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4752 nextpc = MAKE_THUMB_ADDR (nextpc);
4754 else if ((inst1 & 0xf500) == 0xb100)
4757 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4758 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4760 if (bit (inst1, 11) && reg != 0)
4761 nextpc = pc_val + imm;
4762 else if (!bit (inst1, 11) && reg == 0)
4763 nextpc = pc_val + imm;
4768 /* Get the raw next address. PC is the current program counter, in
4769 FRAME, which is assumed to be executing in ARM mode.
4771 The value returned has the execution state of the next instruction
4772 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4773 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4777 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4779 struct gdbarch *gdbarch = get_frame_arch (frame);
4780 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4781 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4782 unsigned long pc_val;
4783 unsigned long this_instr;
4784 unsigned long status;
4787 pc_val = (unsigned long) pc;
4788 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4790 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4791 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4793 if (bits (this_instr, 28, 31) == INST_NV)
4794 switch (bits (this_instr, 24, 27))
4799 /* Branch with Link and change to Thumb. */
4800 nextpc = BranchDest (pc, this_instr);
4801 nextpc |= bit (this_instr, 24) << 1;
4802 nextpc = MAKE_THUMB_ADDR (nextpc);
4808 /* Coprocessor register transfer. */
4809 if (bits (this_instr, 12, 15) == 15)
4810 error (_("Invalid update to pc in instruction"));
4813 else if (condition_true (bits (this_instr, 28, 31), status))
4815 switch (bits (this_instr, 24, 27))
4818 case 0x1: /* data processing */
4822 unsigned long operand1, operand2, result = 0;
4826 if (bits (this_instr, 12, 15) != 15)
4829 if (bits (this_instr, 22, 25) == 0
4830 && bits (this_instr, 4, 7) == 9) /* multiply */
4831 error (_("Invalid update to pc in instruction"));
4833 /* BX <reg>, BLX <reg> */
4834 if (bits (this_instr, 4, 27) == 0x12fff1
4835 || bits (this_instr, 4, 27) == 0x12fff3)
4837 rn = bits (this_instr, 0, 3);
4838 nextpc = ((rn == ARM_PC_REGNUM)
4840 : get_frame_register_unsigned (frame, rn));
4845 /* Multiply into PC. */
4846 c = (status & FLAG_C) ? 1 : 0;
4847 rn = bits (this_instr, 16, 19);
4848 operand1 = ((rn == ARM_PC_REGNUM)
4850 : get_frame_register_unsigned (frame, rn));
4852 if (bit (this_instr, 25))
4854 unsigned long immval = bits (this_instr, 0, 7);
4855 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4856 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4859 else /* operand 2 is a shifted register. */
4860 operand2 = shifted_reg_val (frame, this_instr, c,
4863 switch (bits (this_instr, 21, 24))
4866 result = operand1 & operand2;
4870 result = operand1 ^ operand2;
4874 result = operand1 - operand2;
4878 result = operand2 - operand1;
4882 result = operand1 + operand2;
4886 result = operand1 + operand2 + c;
4890 result = operand1 - operand2 + c;
4894 result = operand2 - operand1 + c;
4900 case 0xb: /* tst, teq, cmp, cmn */
4901 result = (unsigned long) nextpc;
4905 result = operand1 | operand2;
4909 /* Always step into a function. */
4914 result = operand1 & ~operand2;
4922 /* In 26-bit APCS the bottom two bits of the result are
4923 ignored, and we always end up in ARM state. */
4925 nextpc = arm_addr_bits_remove (gdbarch, result);
4933 case 0x5: /* data transfer */
4936 if (bit (this_instr, 20))
4939 if (bits (this_instr, 12, 15) == 15)
4945 if (bit (this_instr, 22))
4946 error (_("Invalid update to pc in instruction"));
4948 /* byte write to PC */
4949 rn = bits (this_instr, 16, 19);
4950 base = ((rn == ARM_PC_REGNUM)
4952 : get_frame_register_unsigned (frame, rn));
4954 if (bit (this_instr, 24))
4957 int c = (status & FLAG_C) ? 1 : 0;
4958 unsigned long offset =
4959 (bit (this_instr, 25)
4960 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4961 : bits (this_instr, 0, 11));
4963 if (bit (this_instr, 23))
4969 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4976 case 0x9: /* block transfer */
4977 if (bit (this_instr, 20))
4980 if (bit (this_instr, 15))
4984 unsigned long rn_val
4985 = get_frame_register_unsigned (frame,
4986 bits (this_instr, 16, 19));
4988 if (bit (this_instr, 23))
4991 unsigned long reglist = bits (this_instr, 0, 14);
4992 offset = bitcount (reglist) * 4;
4993 if (bit (this_instr, 24)) /* pre */
4996 else if (bit (this_instr, 24))
5000 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5007 case 0xb: /* branch & link */
5008 case 0xa: /* branch */
5010 nextpc = BranchDest (pc, this_instr);
5016 case 0xe: /* coproc ops */
5020 struct gdbarch_tdep *tdep;
5021 tdep = gdbarch_tdep (gdbarch);
5023 if (tdep->syscall_next_pc != NULL)
5024 nextpc = tdep->syscall_next_pc (frame);
5030 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5038 /* Determine next PC after current instruction executes. Will call either
5039 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5040 loop is detected. */
5043 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5047 if (arm_frame_is_thumb (frame))
5048 nextpc = thumb_get_next_pc_raw (frame, pc);
5050 nextpc = arm_get_next_pc_raw (frame, pc);
5055 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5056 of the appropriate mode (as encoded in the PC value), even if this
5057 differs from what would be expected according to the symbol tables. */
5060 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5061 struct address_space *aspace,
5064 struct cleanup *old_chain
5065 = make_cleanup_restore_integer (&arm_override_mode);
5067 arm_override_mode = IS_THUMB_ADDR (pc);
5068 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5070 insert_single_step_breakpoint (gdbarch, aspace, pc);
5072 do_cleanups (old_chain);
5075 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5076 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5077 is found, attempt to step through it. A breakpoint is placed at the end of
5081 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5083 struct gdbarch *gdbarch = get_frame_arch (frame);
5084 struct address_space *aspace = get_frame_address_space (frame);
5085 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5086 CORE_ADDR pc = get_frame_pc (frame);
5087 CORE_ADDR breaks[2] = {-1, -1};
5089 unsigned short insn1, insn2;
5092 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5093 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5094 ULONGEST status, itstate;
5096 /* We currently do not support atomic sequences within an IT block. */
5097 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5098 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5102 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5103 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5105 if (thumb_insn_size (insn1) != 4)
5108 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5110 if (!((insn1 & 0xfff0) == 0xe850
5111 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5114 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5116 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5118 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5121 if (thumb_insn_size (insn1) != 4)
5123 /* Assume that there is at most one conditional branch in the
5124 atomic sequence. If a conditional branch is found, put a
5125 breakpoint in its destination address. */
5126 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5128 if (last_breakpoint > 0)
5129 return 0; /* More than one conditional branch found,
5130 fallback to the standard code. */
5132 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5136 /* We do not support atomic sequences that use any *other*
5137 instructions but conditional branches to change the PC.
5138 Fall back to standard code to avoid losing control of
5140 else if (thumb_instruction_changes_pc (insn1))
5145 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5148 /* Assume that there is at most one conditional branch in the
5149 atomic sequence. If a conditional branch is found, put a
5150 breakpoint in its destination address. */
5151 if ((insn1 & 0xf800) == 0xf000
5152 && (insn2 & 0xd000) == 0x8000
5153 && (insn1 & 0x0380) != 0x0380)
5155 int sign, j1, j2, imm1, imm2;
5156 unsigned int offset;
5158 sign = sbits (insn1, 10, 10);
5159 imm1 = bits (insn1, 0, 5);
5160 imm2 = bits (insn2, 0, 10);
5161 j1 = bit (insn2, 13);
5162 j2 = bit (insn2, 11);
5164 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5165 offset += (imm1 << 12) + (imm2 << 1);
5167 if (last_breakpoint > 0)
5168 return 0; /* More than one conditional branch found,
5169 fallback to the standard code. */
5171 breaks[1] = loc + offset;
5175 /* We do not support atomic sequences that use any *other*
5176 instructions but conditional branches to change the PC.
5177 Fall back to standard code to avoid losing control of
5179 else if (thumb2_instruction_changes_pc (insn1, insn2))
5182 /* If we find a strex{,b,h,d}, we're done. */
5183 if ((insn1 & 0xfff0) == 0xe840
5184 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5189 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5190 if (insn_count == atomic_sequence_length)
5193 /* Insert a breakpoint right after the end of the atomic sequence. */
5196 /* Check for duplicated breakpoints. Check also for a breakpoint
5197 placed (branch instruction's destination) anywhere in sequence. */
5199 && (breaks[1] == breaks[0]
5200 || (breaks[1] >= pc && breaks[1] < loc)))
5201 last_breakpoint = 0;
5203 /* Effectively inserts the breakpoints. */
5204 for (index = 0; index <= last_breakpoint; index++)
5205 arm_insert_single_step_breakpoint (gdbarch, aspace,
5206 MAKE_THUMB_ADDR (breaks[index]));
5212 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5214 struct gdbarch *gdbarch = get_frame_arch (frame);
5215 struct address_space *aspace = get_frame_address_space (frame);
5216 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5217 CORE_ADDR pc = get_frame_pc (frame);
5218 CORE_ADDR breaks[2] = {-1, -1};
5223 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5224 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5226 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5227 Note that we do not currently support conditionally executed atomic
5229 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5231 if ((insn & 0xff9000f0) != 0xe1900090)
5234 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5236 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5238 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5241 /* Assume that there is at most one conditional branch in the atomic
5242 sequence. If a conditional branch is found, put a breakpoint in
5243 its destination address. */
5244 if (bits (insn, 24, 27) == 0xa)
5246 if (last_breakpoint > 0)
5247 return 0; /* More than one conditional branch found, fallback
5248 to the standard single-step code. */
5250 breaks[1] = BranchDest (loc - 4, insn);
5254 /* We do not support atomic sequences that use any *other* instructions
5255 but conditional branches to change the PC. Fall back to standard
5256 code to avoid losing control of execution. */
5257 else if (arm_instruction_changes_pc (insn))
5260 /* If we find a strex{,b,h,d}, we're done. */
5261 if ((insn & 0xff9000f0) == 0xe1800090)
5265 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5266 if (insn_count == atomic_sequence_length)
5269 /* Insert a breakpoint right after the end of the atomic sequence. */
5272 /* Check for duplicated breakpoints. Check also for a breakpoint
5273 placed (branch instruction's destination) anywhere in sequence. */
5275 && (breaks[1] == breaks[0]
5276 || (breaks[1] >= pc && breaks[1] < loc)))
5277 last_breakpoint = 0;
5279 /* Effectively inserts the breakpoints. */
5280 for (index = 0; index <= last_breakpoint; index++)
5281 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5287 arm_deal_with_atomic_sequence (struct frame_info *frame)
5289 if (arm_frame_is_thumb (frame))
5290 return thumb_deal_with_atomic_sequence_raw (frame);
5292 return arm_deal_with_atomic_sequence_raw (frame);
5295 /* single_step() is called just before we want to resume the inferior,
5296 if we want to single-step it but there is no hardware or kernel
5297 single-step support. We find the target of the coming instruction
5298 and breakpoint it. */
5301 arm_software_single_step (struct frame_info *frame)
5303 struct gdbarch *gdbarch = get_frame_arch (frame);
5304 struct address_space *aspace = get_frame_address_space (frame);
5307 if (arm_deal_with_atomic_sequence (frame))
5310 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5311 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5316 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5317 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5318 NULL if an error occurs. BUF is freed. */
5321 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5322 int old_len, int new_len)
5325 int bytes_to_read = new_len - old_len;
5327 new_buf = xmalloc (new_len);
5328 memcpy (new_buf + bytes_to_read, buf, old_len);
5330 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5338 /* An IT block is at most the 2-byte IT instruction followed by
5339 four 4-byte instructions. The furthest back we must search to
5340 find an IT block that affects the current instruction is thus
5341 2 + 3 * 4 == 14 bytes. */
5342 #define MAX_IT_BLOCK_PREFIX 14
5344 /* Use a quick scan if there are more than this many bytes of
5346 #define IT_SCAN_THRESHOLD 32
5348 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5349 A breakpoint in an IT block may not be hit, depending on the
5352 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5356 CORE_ADDR boundary, func_start;
5358 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5359 int i, any, last_it, last_it_count;
5361 /* If we are using BKPT breakpoints, none of this is necessary. */
5362 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5365 /* ARM mode does not have this problem. */
5366 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5369 /* We are setting a breakpoint in Thumb code that could potentially
5370 contain an IT block. The first step is to find how much Thumb
5371 code there is; we do not need to read outside of known Thumb
5373 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5375 /* Thumb-2 code must have mapping symbols to have a chance. */
5378 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5380 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5381 && func_start > boundary)
5382 boundary = func_start;
5384 /* Search for a candidate IT instruction. We have to do some fancy
5385 footwork to distinguish a real IT instruction from the second
5386 half of a 32-bit instruction, but there is no need for that if
5387 there's no candidate. */
5388 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5390 /* No room for an IT instruction. */
5393 buf = xmalloc (buf_len);
5394 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5397 for (i = 0; i < buf_len; i += 2)
5399 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5400 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5412 /* OK, the code bytes before this instruction contain at least one
5413 halfword which resembles an IT instruction. We know that it's
5414 Thumb code, but there are still two possibilities. Either the
5415 halfword really is an IT instruction, or it is the second half of
5416 a 32-bit Thumb instruction. The only way we can tell is to
5417 scan forwards from a known instruction boundary. */
5418 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5422 /* There's a lot of code before this instruction. Start with an
5423 optimistic search; it's easy to recognize halfwords that can
5424 not be the start of a 32-bit instruction, and use that to
5425 lock on to the instruction boundaries. */
5426 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5429 buf_len = IT_SCAN_THRESHOLD;
5432 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5434 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5435 if (thumb_insn_size (inst1) == 2)
5442 /* At this point, if DEFINITE, BUF[I] is the first place we
5443 are sure that we know the instruction boundaries, and it is far
5444 enough from BPADDR that we could not miss an IT instruction
5445 affecting BPADDR. If ! DEFINITE, give up - start from a
5449 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5453 buf_len = bpaddr - boundary;
5459 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5462 buf_len = bpaddr - boundary;
5466 /* Scan forwards. Find the last IT instruction before BPADDR. */
5471 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5473 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5478 else if (inst1 & 0x0002)
5480 else if (inst1 & 0x0004)
5485 i += thumb_insn_size (inst1);
5491 /* There wasn't really an IT instruction after all. */
5494 if (last_it_count < 1)
5495 /* It was too far away. */
5498 /* This really is a trouble spot. Move the breakpoint to the IT
5500 return bpaddr - buf_len + last_it;
5503 /* ARM displaced stepping support.
5505 Generally ARM displaced stepping works as follows:
5507 1. When an instruction is to be single-stepped, it is first decoded by
5508 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5509 Depending on the type of instruction, it is then copied to a scratch
5510 location, possibly in a modified form. The copy_* set of functions
5511 performs such modification, as necessary. A breakpoint is placed after
5512 the modified instruction in the scratch space to return control to GDB.
5513 Note in particular that instructions which modify the PC will no longer
5514 do so after modification.
5516 2. The instruction is single-stepped, by setting the PC to the scratch
5517 location address, and resuming. Control returns to GDB when the
5520 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5521 function used for the current instruction. This function's job is to
5522 put the CPU/memory state back to what it would have been if the
5523 instruction had been executed unmodified in its original location. */
5525 /* NOP instruction (mov r0, r0). */
5526 #define ARM_NOP 0xe1a00000
5527 #define THUMB_NOP 0x4600
5529 /* Helper for register reads for displaced stepping. In particular, this
5530 returns the PC as it would be seen by the instruction at its original
5534 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5538 CORE_ADDR from = dsc->insn_addr;
5540 if (regno == ARM_PC_REGNUM)
5542 /* Compute pipeline offset:
5543 - When executing an ARM instruction, PC reads as the address of the
5544 current instruction plus 8.
5545 - When executing a Thumb instruction, PC reads as the address of the
5546 current instruction plus 4. */
5553 if (debug_displaced)
5554 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5555 (unsigned long) from);
5556 return (ULONGEST) from;
5560 regcache_cooked_read_unsigned (regs, regno, &ret);
5561 if (debug_displaced)
5562 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5563 regno, (unsigned long) ret);
5569 displaced_in_arm_mode (struct regcache *regs)
5572 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5574 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5576 return (ps & t_bit) == 0;
5579 /* Write to the PC as from a branch instruction. */
5582 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5586 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5587 architecture versions < 6. */
5588 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5589 val & ~(ULONGEST) 0x3);
5591 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5592 val & ~(ULONGEST) 0x1);
5595 /* Write to the PC as from a branch-exchange instruction. */
5598 bx_write_pc (struct regcache *regs, ULONGEST val)
5601 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5603 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5607 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5608 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5610 else if ((val & 2) == 0)
5612 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5613 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5617 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5618 mode, align dest to 4 bytes). */
5619 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5620 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5621 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5625 /* Write to the PC as if from a load instruction. */
5628 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5631 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5632 bx_write_pc (regs, val);
5634 branch_write_pc (regs, dsc, val);
5637 /* Write to the PC as if from an ALU instruction. */
5640 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5643 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5644 bx_write_pc (regs, val);
5646 branch_write_pc (regs, dsc, val);
5649 /* Helper for writing to registers for displaced stepping. Writing to the PC
5650 has a varying effects depending on the instruction which does the write:
5651 this is controlled by the WRITE_PC argument. */
5654 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5655 int regno, ULONGEST val, enum pc_write_style write_pc)
5657 if (regno == ARM_PC_REGNUM)
5659 if (debug_displaced)
5660 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5661 (unsigned long) val);
5664 case BRANCH_WRITE_PC:
5665 branch_write_pc (regs, dsc, val);
5669 bx_write_pc (regs, val);
5673 load_write_pc (regs, dsc, val);
5677 alu_write_pc (regs, dsc, val);
5680 case CANNOT_WRITE_PC:
5681 warning (_("Instruction wrote to PC in an unexpected way when "
5682 "single-stepping"));
5686 internal_error (__FILE__, __LINE__,
5687 _("Invalid argument to displaced_write_reg"));
5690 dsc->wrote_to_pc = 1;
5694 if (debug_displaced)
5695 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5696 regno, (unsigned long) val);
5697 regcache_cooked_write_unsigned (regs, regno, val);
5701 /* This function is used to concisely determine if an instruction INSN
5702 references PC. Register fields of interest in INSN should have the
5703 corresponding fields of BITMASK set to 0b1111. The function
5704 returns return 1 if any of these fields in INSN reference the PC
5705 (also 0b1111, r15), else it returns 0. */
5708 insn_references_pc (uint32_t insn, uint32_t bitmask)
5710 uint32_t lowbit = 1;
5712 while (bitmask != 0)
5716 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5722 mask = lowbit * 0xf;
5724 if ((insn & mask) == mask)
5733 /* The simplest copy function. Many instructions have the same effect no
5734 matter what address they are executed at: in those cases, use this. */
5737 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5738 const char *iname, struct displaced_step_closure *dsc)
5740 if (debug_displaced)
5741 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5742 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5745 dsc->modinsn[0] = insn;
5751 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5752 uint16_t insn2, const char *iname,
5753 struct displaced_step_closure *dsc)
5755 if (debug_displaced)
5756 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5757 "opcode/class '%s' unmodified\n", insn1, insn2,
5760 dsc->modinsn[0] = insn1;
5761 dsc->modinsn[1] = insn2;
5767 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5770 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5772 struct displaced_step_closure *dsc)
5774 if (debug_displaced)
5775 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5776 "opcode/class '%s' unmodified\n", insn,
5779 dsc->modinsn[0] = insn;
5784 /* Preload instructions with immediate offset. */
5787 cleanup_preload (struct gdbarch *gdbarch,
5788 struct regcache *regs, struct displaced_step_closure *dsc)
5790 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5791 if (!dsc->u.preload.immed)
5792 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5796 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5797 struct displaced_step_closure *dsc, unsigned int rn)
5800 /* Preload instructions:
5802 {pli/pld} [rn, #+/-imm]
5804 {pli/pld} [r0, #+/-imm]. */
5806 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5807 rn_val = displaced_read_reg (regs, dsc, rn);
5808 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5809 dsc->u.preload.immed = 1;
5811 dsc->cleanup = &cleanup_preload;
5815 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5816 struct displaced_step_closure *dsc)
5818 unsigned int rn = bits (insn, 16, 19);
5820 if (!insn_references_pc (insn, 0x000f0000ul))
5821 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5823 if (debug_displaced)
5824 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5825 (unsigned long) insn);
5827 dsc->modinsn[0] = insn & 0xfff0ffff;
5829 install_preload (gdbarch, regs, dsc, rn);
5835 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5836 struct regcache *regs, struct displaced_step_closure *dsc)
5838 unsigned int rn = bits (insn1, 0, 3);
5839 unsigned int u_bit = bit (insn1, 7);
5840 int imm12 = bits (insn2, 0, 11);
5843 if (rn != ARM_PC_REGNUM)
5844 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5846 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5847 PLD (literal) Encoding T1. */
5848 if (debug_displaced)
5849 fprintf_unfiltered (gdb_stdlog,
5850 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5851 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5857 /* Rewrite instruction {pli/pld} PC imm12 into:
5858 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5862 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5864 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5865 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5867 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5869 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5870 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5871 dsc->u.preload.immed = 0;
5873 /* {pli/pld} [r0, r1] */
5874 dsc->modinsn[0] = insn1 & 0xfff0;
5875 dsc->modinsn[1] = 0xf001;
5878 dsc->cleanup = &cleanup_preload;
5882 /* Preload instructions with register offset. */
5885 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5886 struct displaced_step_closure *dsc, unsigned int rn,
5889 ULONGEST rn_val, rm_val;
5891 /* Preload register-offset instructions:
5893 {pli/pld} [rn, rm {, shift}]
5895 {pli/pld} [r0, r1 {, shift}]. */
5897 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5898 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5899 rn_val = displaced_read_reg (regs, dsc, rn);
5900 rm_val = displaced_read_reg (regs, dsc, rm);
5901 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5902 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5903 dsc->u.preload.immed = 0;
5905 dsc->cleanup = &cleanup_preload;
5909 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5910 struct regcache *regs,
5911 struct displaced_step_closure *dsc)
5913 unsigned int rn = bits (insn, 16, 19);
5914 unsigned int rm = bits (insn, 0, 3);
5917 if (!insn_references_pc (insn, 0x000f000ful))
5918 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5920 if (debug_displaced)
5921 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5922 (unsigned long) insn);
5924 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5926 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5930 /* Copy/cleanup coprocessor load and store instructions. */
5933 cleanup_copro_load_store (struct gdbarch *gdbarch,
5934 struct regcache *regs,
5935 struct displaced_step_closure *dsc)
5937 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5939 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5941 if (dsc->u.ldst.writeback)
5942 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5946 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5947 struct displaced_step_closure *dsc,
5948 int writeback, unsigned int rn)
5952 /* Coprocessor load/store instructions:
5954 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5956 {stc/stc2} [r0, #+/-imm].
5958 ldc/ldc2 are handled identically. */
5960 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5961 rn_val = displaced_read_reg (regs, dsc, rn);
5962 /* PC should be 4-byte aligned. */
5963 rn_val = rn_val & 0xfffffffc;
5964 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5966 dsc->u.ldst.writeback = writeback;
5967 dsc->u.ldst.rn = rn;
5969 dsc->cleanup = &cleanup_copro_load_store;
5973 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5974 struct regcache *regs,
5975 struct displaced_step_closure *dsc)
5977 unsigned int rn = bits (insn, 16, 19);
5979 if (!insn_references_pc (insn, 0x000f0000ul))
5980 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5982 if (debug_displaced)
5983 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5984 "load/store insn %.8lx\n", (unsigned long) insn);
5986 dsc->modinsn[0] = insn & 0xfff0ffff;
5988 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5994 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5995 uint16_t insn2, struct regcache *regs,
5996 struct displaced_step_closure *dsc)
5998 unsigned int rn = bits (insn1, 0, 3);
6000 if (rn != ARM_PC_REGNUM)
6001 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6002 "copro load/store", dsc);
6004 if (debug_displaced)
6005 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6006 "load/store insn %.4x%.4x\n", insn1, insn2);
6008 dsc->modinsn[0] = insn1 & 0xfff0;
6009 dsc->modinsn[1] = insn2;
6012 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6013 doesn't support writeback, so pass 0. */
6014 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6019 /* Clean up branch instructions (actually perform the branch, by setting
6023 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6024 struct displaced_step_closure *dsc)
6026 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6027 int branch_taken = condition_true (dsc->u.branch.cond, status);
6028 enum pc_write_style write_pc = dsc->u.branch.exchange
6029 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6034 if (dsc->u.branch.link)
6036 /* The value of LR should be the next insn of current one. In order
6037 not to confuse logic hanlding later insn `bx lr', if current insn mode
6038 is Thumb, the bit 0 of LR value should be set to 1. */
6039 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6042 next_insn_addr |= 0x1;
6044 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6048 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6051 /* Copy B/BL/BLX instructions with immediate destinations. */
6054 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6055 struct displaced_step_closure *dsc,
6056 unsigned int cond, int exchange, int link, long offset)
6058 /* Implement "BL<cond> <label>" as:
6060 Preparation: cond <- instruction condition
6061 Insn: mov r0, r0 (nop)
6062 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6064 B<cond> similar, but don't set r14 in cleanup. */
6066 dsc->u.branch.cond = cond;
6067 dsc->u.branch.link = link;
6068 dsc->u.branch.exchange = exchange;
6070 dsc->u.branch.dest = dsc->insn_addr;
6071 if (link && exchange)
6072 /* For BLX, offset is computed from the Align (PC, 4). */
6073 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6076 dsc->u.branch.dest += 4 + offset;
6078 dsc->u.branch.dest += 8 + offset;
6080 dsc->cleanup = &cleanup_branch;
6083 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6084 struct regcache *regs, struct displaced_step_closure *dsc)
6086 unsigned int cond = bits (insn, 28, 31);
6087 int exchange = (cond == 0xf);
6088 int link = exchange || bit (insn, 24);
6091 if (debug_displaced)
6092 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6093 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6094 (unsigned long) insn);
6096 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6097 then arrange the switch into Thumb mode. */
6098 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6100 offset = bits (insn, 0, 23) << 2;
6102 if (bit (offset, 25))
6103 offset = offset | ~0x3ffffff;
6105 dsc->modinsn[0] = ARM_NOP;
6107 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6112 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6113 uint16_t insn2, struct regcache *regs,
6114 struct displaced_step_closure *dsc)
6116 int link = bit (insn2, 14);
6117 int exchange = link && !bit (insn2, 12);
6120 int j1 = bit (insn2, 13);
6121 int j2 = bit (insn2, 11);
6122 int s = sbits (insn1, 10, 10);
6123 int i1 = !(j1 ^ bit (insn1, 10));
6124 int i2 = !(j2 ^ bit (insn1, 10));
6126 if (!link && !exchange) /* B */
6128 offset = (bits (insn2, 0, 10) << 1);
6129 if (bit (insn2, 12)) /* Encoding T4 */
6131 offset |= (bits (insn1, 0, 9) << 12)
6137 else /* Encoding T3 */
6139 offset |= (bits (insn1, 0, 5) << 12)
6143 cond = bits (insn1, 6, 9);
6148 offset = (bits (insn1, 0, 9) << 12);
6149 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6150 offset |= exchange ?
6151 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6154 if (debug_displaced)
6155 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6156 "%.4x %.4x with offset %.8lx\n",
6157 link ? (exchange) ? "blx" : "bl" : "b",
6158 insn1, insn2, offset);
6160 dsc->modinsn[0] = THUMB_NOP;
6162 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6166 /* Copy B Thumb instructions. */
6168 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6169 struct displaced_step_closure *dsc)
6171 unsigned int cond = 0;
6173 unsigned short bit_12_15 = bits (insn, 12, 15);
6174 CORE_ADDR from = dsc->insn_addr;
6176 if (bit_12_15 == 0xd)
6178 /* offset = SignExtend (imm8:0, 32) */
6179 offset = sbits ((insn << 1), 0, 8);
6180 cond = bits (insn, 8, 11);
6182 else if (bit_12_15 == 0xe) /* Encoding T2 */
6184 offset = sbits ((insn << 1), 0, 11);
6188 if (debug_displaced)
6189 fprintf_unfiltered (gdb_stdlog,
6190 "displaced: copying b immediate insn %.4x "
6191 "with offset %d\n", insn, offset);
6193 dsc->u.branch.cond = cond;
6194 dsc->u.branch.link = 0;
6195 dsc->u.branch.exchange = 0;
6196 dsc->u.branch.dest = from + 4 + offset;
6198 dsc->modinsn[0] = THUMB_NOP;
6200 dsc->cleanup = &cleanup_branch;
6205 /* Copy BX/BLX with register-specified destinations. */
6208 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6209 struct displaced_step_closure *dsc, int link,
6210 unsigned int cond, unsigned int rm)
6212 /* Implement {BX,BLX}<cond> <reg>" as:
6214 Preparation: cond <- instruction condition
6215 Insn: mov r0, r0 (nop)
6216 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6218 Don't set r14 in cleanup for BX. */
6220 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6222 dsc->u.branch.cond = cond;
6223 dsc->u.branch.link = link;
6225 dsc->u.branch.exchange = 1;
6227 dsc->cleanup = &cleanup_branch;
6231 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6232 struct regcache *regs, struct displaced_step_closure *dsc)
6234 unsigned int cond = bits (insn, 28, 31);
6237 int link = bit (insn, 5);
6238 unsigned int rm = bits (insn, 0, 3);
6240 if (debug_displaced)
6241 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6242 (unsigned long) insn);
6244 dsc->modinsn[0] = ARM_NOP;
6246 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6251 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6252 struct regcache *regs,
6253 struct displaced_step_closure *dsc)
6255 int link = bit (insn, 7);
6256 unsigned int rm = bits (insn, 3, 6);
6258 if (debug_displaced)
6259 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6260 (unsigned short) insn);
6262 dsc->modinsn[0] = THUMB_NOP;
6264 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6270 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6273 cleanup_alu_imm (struct gdbarch *gdbarch,
6274 struct regcache *regs, struct displaced_step_closure *dsc)
6276 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6277 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6278 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6279 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6283 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6284 struct displaced_step_closure *dsc)
6286 unsigned int rn = bits (insn, 16, 19);
6287 unsigned int rd = bits (insn, 12, 15);
6288 unsigned int op = bits (insn, 21, 24);
6289 int is_mov = (op == 0xd);
6290 ULONGEST rd_val, rn_val;
6292 if (!insn_references_pc (insn, 0x000ff000ul))
6293 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6295 if (debug_displaced)
6296 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6297 "%.8lx\n", is_mov ? "move" : "ALU",
6298 (unsigned long) insn);
6300 /* Instruction is of form:
6302 <op><cond> rd, [rn,] #imm
6306 Preparation: tmp1, tmp2 <- r0, r1;
6308 Insn: <op><cond> r0, r1, #imm
6309 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6312 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6313 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6314 rn_val = displaced_read_reg (regs, dsc, rn);
6315 rd_val = displaced_read_reg (regs, dsc, rd);
6316 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6317 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6321 dsc->modinsn[0] = insn & 0xfff00fff;
6323 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6325 dsc->cleanup = &cleanup_alu_imm;
6331 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6332 uint16_t insn2, struct regcache *regs,
6333 struct displaced_step_closure *dsc)
6335 unsigned int op = bits (insn1, 5, 8);
6336 unsigned int rn, rm, rd;
6337 ULONGEST rd_val, rn_val;
6339 rn = bits (insn1, 0, 3); /* Rn */
6340 rm = bits (insn2, 0, 3); /* Rm */
6341 rd = bits (insn2, 8, 11); /* Rd */
6343 /* This routine is only called for instruction MOV. */
6344 gdb_assert (op == 0x2 && rn == 0xf);
6346 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6347 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6349 if (debug_displaced)
6350 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6351 "ALU", insn1, insn2);
6353 /* Instruction is of form:
6355 <op><cond> rd, [rn,] #imm
6359 Preparation: tmp1, tmp2 <- r0, r1;
6361 Insn: <op><cond> r0, r1, #imm
6362 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6365 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6366 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6367 rn_val = displaced_read_reg (regs, dsc, rn);
6368 rd_val = displaced_read_reg (regs, dsc, rd);
6369 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6370 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6373 dsc->modinsn[0] = insn1;
6374 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6377 dsc->cleanup = &cleanup_alu_imm;
6382 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6385 cleanup_alu_reg (struct gdbarch *gdbarch,
6386 struct regcache *regs, struct displaced_step_closure *dsc)
6391 rd_val = displaced_read_reg (regs, dsc, 0);
6393 for (i = 0; i < 3; i++)
6394 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6396 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6400 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6401 struct displaced_step_closure *dsc,
6402 unsigned int rd, unsigned int rn, unsigned int rm)
6404 ULONGEST rd_val, rn_val, rm_val;
6406 /* Instruction is of form:
6408 <op><cond> rd, [rn,] rm [, <shift>]
6412 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6413 r0, r1, r2 <- rd, rn, rm
6414 Insn: <op><cond> r0, r1, r2 [, <shift>]
6415 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6418 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6419 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6420 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6421 rd_val = displaced_read_reg (regs, dsc, rd);
6422 rn_val = displaced_read_reg (regs, dsc, rn);
6423 rm_val = displaced_read_reg (regs, dsc, rm);
6424 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6425 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6426 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6429 dsc->cleanup = &cleanup_alu_reg;
6433 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6434 struct displaced_step_closure *dsc)
6436 unsigned int op = bits (insn, 21, 24);
6437 int is_mov = (op == 0xd);
6439 if (!insn_references_pc (insn, 0x000ff00ful))
6440 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6442 if (debug_displaced)
6443 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6444 is_mov ? "move" : "ALU", (unsigned long) insn);
6447 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6449 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6451 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6457 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6458 struct regcache *regs,
6459 struct displaced_step_closure *dsc)
6461 unsigned rn, rm, rd;
6463 rd = bits (insn, 3, 6);
6464 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6467 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6468 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6470 if (debug_displaced)
6471 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6472 "ALU", (unsigned short) insn);
6474 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6476 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6481 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6484 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6485 struct regcache *regs,
6486 struct displaced_step_closure *dsc)
6488 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6491 for (i = 0; i < 4; i++)
6492 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6494 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6498 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6499 struct displaced_step_closure *dsc,
6500 unsigned int rd, unsigned int rn, unsigned int rm,
6504 ULONGEST rd_val, rn_val, rm_val, rs_val;
6506 /* Instruction is of form:
6508 <op><cond> rd, [rn,] rm, <shift> rs
6512 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6513 r0, r1, r2, r3 <- rd, rn, rm, rs
6514 Insn: <op><cond> r0, r1, r2, <shift> r3
6516 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6520 for (i = 0; i < 4; i++)
6521 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6523 rd_val = displaced_read_reg (regs, dsc, rd);
6524 rn_val = displaced_read_reg (regs, dsc, rn);
6525 rm_val = displaced_read_reg (regs, dsc, rm);
6526 rs_val = displaced_read_reg (regs, dsc, rs);
6527 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6528 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6529 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6530 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6532 dsc->cleanup = &cleanup_alu_shifted_reg;
6536 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6537 struct regcache *regs,
6538 struct displaced_step_closure *dsc)
6540 unsigned int op = bits (insn, 21, 24);
6541 int is_mov = (op == 0xd);
6542 unsigned int rd, rn, rm, rs;
6544 if (!insn_references_pc (insn, 0x000fff0ful))
6545 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6547 if (debug_displaced)
6548 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6549 "%.8lx\n", is_mov ? "move" : "ALU",
6550 (unsigned long) insn);
6552 rn = bits (insn, 16, 19);
6553 rm = bits (insn, 0, 3);
6554 rs = bits (insn, 8, 11);
6555 rd = bits (insn, 12, 15);
6558 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6560 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6562 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6567 /* Clean up load instructions. */
6570 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6571 struct displaced_step_closure *dsc)
6573 ULONGEST rt_val, rt_val2 = 0, rn_val;
6575 rt_val = displaced_read_reg (regs, dsc, 0);
6576 if (dsc->u.ldst.xfersize == 8)
6577 rt_val2 = displaced_read_reg (regs, dsc, 1);
6578 rn_val = displaced_read_reg (regs, dsc, 2);
6580 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6581 if (dsc->u.ldst.xfersize > 4)
6582 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6583 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6584 if (!dsc->u.ldst.immed)
6585 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6587 /* Handle register writeback. */
6588 if (dsc->u.ldst.writeback)
6589 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6590 /* Put result in right place. */
6591 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6592 if (dsc->u.ldst.xfersize == 8)
6593 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6596 /* Clean up store instructions. */
6599 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6600 struct displaced_step_closure *dsc)
6602 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6604 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6605 if (dsc->u.ldst.xfersize > 4)
6606 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6607 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6608 if (!dsc->u.ldst.immed)
6609 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6610 if (!dsc->u.ldst.restore_r4)
6611 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6614 if (dsc->u.ldst.writeback)
6615 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6618 /* Copy "extra" load/store instructions. These are halfword/doubleword
6619 transfers, which have a different encoding to byte/word transfers. */
6622 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6623 struct regcache *regs, struct displaced_step_closure *dsc)
6625 unsigned int op1 = bits (insn, 20, 24);
6626 unsigned int op2 = bits (insn, 5, 6);
6627 unsigned int rt = bits (insn, 12, 15);
6628 unsigned int rn = bits (insn, 16, 19);
6629 unsigned int rm = bits (insn, 0, 3);
6630 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6631 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6632 int immed = (op1 & 0x4) != 0;
6634 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6636 if (!insn_references_pc (insn, 0x000ff00ful))
6637 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6639 if (debug_displaced)
6640 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6641 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6642 (unsigned long) insn);
6644 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6647 internal_error (__FILE__, __LINE__,
6648 _("copy_extra_ld_st: instruction decode error"));
6650 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6651 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6652 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6654 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6656 rt_val = displaced_read_reg (regs, dsc, rt);
6657 if (bytesize[opcode] == 8)
6658 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6659 rn_val = displaced_read_reg (regs, dsc, rn);
6661 rm_val = displaced_read_reg (regs, dsc, rm);
6663 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6664 if (bytesize[opcode] == 8)
6665 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6666 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6668 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6671 dsc->u.ldst.xfersize = bytesize[opcode];
6672 dsc->u.ldst.rn = rn;
6673 dsc->u.ldst.immed = immed;
6674 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6675 dsc->u.ldst.restore_r4 = 0;
6678 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6680 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6681 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6683 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6685 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6686 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6688 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6693 /* Copy byte/half word/word loads and stores. */
6696 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6697 struct displaced_step_closure *dsc, int load,
6698 int immed, int writeback, int size, int usermode,
6699 int rt, int rm, int rn)
6701 ULONGEST rt_val, rn_val, rm_val = 0;
6703 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6704 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6706 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6708 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6710 rt_val = displaced_read_reg (regs, dsc, rt);
6711 rn_val = displaced_read_reg (regs, dsc, rn);
6713 rm_val = displaced_read_reg (regs, dsc, rm);
6715 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6716 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6718 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6720 dsc->u.ldst.xfersize = size;
6721 dsc->u.ldst.rn = rn;
6722 dsc->u.ldst.immed = immed;
6723 dsc->u.ldst.writeback = writeback;
6725 /* To write PC we can do:
6727 Before this sequence of instructions:
6728 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6729 r2 is the Rn value got from dispalced_read_reg.
6731 Insn1: push {pc} Write address of STR instruction + offset on stack
6732 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6733 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6734 = addr(Insn1) + offset - addr(Insn3) - 8
6736 Insn4: add r4, r4, #8 r4 = offset - 8
6737 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6739 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6741 Otherwise we don't know what value to write for PC, since the offset is
6742 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6743 of this can be found in Section "Saving from r15" in
6744 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6746 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6751 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6752 uint16_t insn2, struct regcache *regs,
6753 struct displaced_step_closure *dsc, int size)
6755 unsigned int u_bit = bit (insn1, 7);
6756 unsigned int rt = bits (insn2, 12, 15);
6757 int imm12 = bits (insn2, 0, 11);
6760 if (debug_displaced)
6761 fprintf_unfiltered (gdb_stdlog,
6762 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6763 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6769 /* Rewrite instruction LDR Rt imm12 into:
6771 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6775 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6778 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6779 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6780 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6782 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6784 pc_val = pc_val & 0xfffffffc;
6786 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6787 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6791 dsc->u.ldst.xfersize = size;
6792 dsc->u.ldst.immed = 0;
6793 dsc->u.ldst.writeback = 0;
6794 dsc->u.ldst.restore_r4 = 0;
6796 /* LDR R0, R2, R3 */
6797 dsc->modinsn[0] = 0xf852;
6798 dsc->modinsn[1] = 0x3;
6801 dsc->cleanup = &cleanup_load;
6807 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6808 uint16_t insn2, struct regcache *regs,
6809 struct displaced_step_closure *dsc,
6810 int writeback, int immed)
6812 unsigned int rt = bits (insn2, 12, 15);
6813 unsigned int rn = bits (insn1, 0, 3);
6814 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6815 /* In LDR (register), there is also a register Rm, which is not allowed to
6816 be PC, so we don't have to check it. */
6818 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6819 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6822 if (debug_displaced)
6823 fprintf_unfiltered (gdb_stdlog,
6824 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6825 rt, rn, insn1, insn2);
6827 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6830 dsc->u.ldst.restore_r4 = 0;
6833 /* ldr[b]<cond> rt, [rn, #imm], etc.
6835 ldr[b]<cond> r0, [r2, #imm]. */
6837 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6838 dsc->modinsn[1] = insn2 & 0x0fff;
6841 /* ldr[b]<cond> rt, [rn, rm], etc.
6843 ldr[b]<cond> r0, [r2, r3]. */
6845 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6846 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6856 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6857 struct regcache *regs,
6858 struct displaced_step_closure *dsc,
6859 int load, int size, int usermode)
6861 int immed = !bit (insn, 25);
6862 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6863 unsigned int rt = bits (insn, 12, 15);
6864 unsigned int rn = bits (insn, 16, 19);
6865 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6867 if (!insn_references_pc (insn, 0x000ff00ful))
6868 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6870 if (debug_displaced)
6871 fprintf_unfiltered (gdb_stdlog,
6872 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6873 load ? (size == 1 ? "ldrb" : "ldr")
6874 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6876 (unsigned long) insn);
6878 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6879 usermode, rt, rm, rn);
6881 if (load || rt != ARM_PC_REGNUM)
6883 dsc->u.ldst.restore_r4 = 0;
6886 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6888 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6889 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6891 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6893 {ldr,str}[b]<cond> r0, [r2, r3]. */
6894 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6898 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6899 dsc->u.ldst.restore_r4 = 1;
6900 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6901 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6902 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6903 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6904 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6908 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6910 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6915 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6920 /* Cleanup LDM instructions with fully-populated register list. This is an
6921 unfortunate corner case: it's impossible to implement correctly by modifying
6922 the instruction. The issue is as follows: we have an instruction,
6926 which we must rewrite to avoid loading PC. A possible solution would be to
6927 do the load in two halves, something like (with suitable cleanup
6931 ldm[id][ab] r8!, {r0-r7}
6933 ldm[id][ab] r8, {r7-r14}
6936 but at present there's no suitable place for <temp>, since the scratch space
6937 is overwritten before the cleanup routine is called. For now, we simply
6938 emulate the instruction. */
6941 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6942 struct displaced_step_closure *dsc)
6944 int inc = dsc->u.block.increment;
6945 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6946 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6947 uint32_t regmask = dsc->u.block.regmask;
6948 int regno = inc ? 0 : 15;
6949 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6950 int exception_return = dsc->u.block.load && dsc->u.block.user
6951 && (regmask & 0x8000) != 0;
6952 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6953 int do_transfer = condition_true (dsc->u.block.cond, status);
6954 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6959 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6960 sensible we can do here. Complain loudly. */
6961 if (exception_return)
6962 error (_("Cannot single-step exception return"));
6964 /* We don't handle any stores here for now. */
6965 gdb_assert (dsc->u.block.load != 0);
6967 if (debug_displaced)
6968 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6969 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6970 dsc->u.block.increment ? "inc" : "dec",
6971 dsc->u.block.before ? "before" : "after");
6978 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6981 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6984 xfer_addr += bump_before;
6986 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6987 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6989 xfer_addr += bump_after;
6991 regmask &= ~(1 << regno);
6994 if (dsc->u.block.writeback)
6995 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6999 /* Clean up an STM which included the PC in the register list. */
7002 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7003 struct displaced_step_closure *dsc)
7005 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7006 int store_executed = condition_true (dsc->u.block.cond, status);
7007 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7008 CORE_ADDR stm_insn_addr;
7011 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7013 /* If condition code fails, there's nothing else to do. */
7014 if (!store_executed)
7017 if (dsc->u.block.increment)
7019 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7021 if (dsc->u.block.before)
7026 pc_stored_at = dsc->u.block.xfer_addr;
7028 if (dsc->u.block.before)
7032 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7033 stm_insn_addr = dsc->scratch_base;
7034 offset = pc_val - stm_insn_addr;
7036 if (debug_displaced)
7037 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7038 "STM instruction\n", offset);
7040 /* Rewrite the stored PC to the proper value for the non-displaced original
7042 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7043 dsc->insn_addr + offset);
7046 /* Clean up an LDM which includes the PC in the register list. We clumped all
7047 the registers in the transferred list into a contiguous range r0...rX (to
7048 avoid loading PC directly and losing control of the debugged program), so we
7049 must undo that here. */
7052 cleanup_block_load_pc (struct gdbarch *gdbarch,
7053 struct regcache *regs,
7054 struct displaced_step_closure *dsc)
7056 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7057 int load_executed = condition_true (dsc->u.block.cond, status);
7058 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7059 unsigned int regs_loaded = bitcount (mask);
7060 unsigned int num_to_shuffle = regs_loaded, clobbered;
7062 /* The method employed here will fail if the register list is fully populated
7063 (we need to avoid loading PC directly). */
7064 gdb_assert (num_to_shuffle < 16);
7069 clobbered = (1 << num_to_shuffle) - 1;
7071 while (num_to_shuffle > 0)
7073 if ((mask & (1 << write_reg)) != 0)
7075 unsigned int read_reg = num_to_shuffle - 1;
7077 if (read_reg != write_reg)
7079 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7080 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7081 if (debug_displaced)
7082 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7083 "loaded register r%d to r%d\n"), read_reg,
7086 else if (debug_displaced)
7087 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7088 "r%d already in the right place\n"),
7091 clobbered &= ~(1 << write_reg);
7099 /* Restore any registers we scribbled over. */
7100 for (write_reg = 0; clobbered != 0; write_reg++)
7102 if ((clobbered & (1 << write_reg)) != 0)
7104 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7106 if (debug_displaced)
7107 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7108 "clobbered register r%d\n"), write_reg);
7109 clobbered &= ~(1 << write_reg);
7113 /* Perform register writeback manually. */
7114 if (dsc->u.block.writeback)
7116 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7118 if (dsc->u.block.increment)
7119 new_rn_val += regs_loaded * 4;
7121 new_rn_val -= regs_loaded * 4;
7123 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7128 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7129 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7132 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7133 struct regcache *regs,
7134 struct displaced_step_closure *dsc)
7136 int load = bit (insn, 20);
7137 int user = bit (insn, 22);
7138 int increment = bit (insn, 23);
7139 int before = bit (insn, 24);
7140 int writeback = bit (insn, 21);
7141 int rn = bits (insn, 16, 19);
7143 /* Block transfers which don't mention PC can be run directly
7145 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7146 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7148 if (rn == ARM_PC_REGNUM)
7150 warning (_("displaced: Unpredictable LDM or STM with "
7151 "base register r15"));
7152 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7155 if (debug_displaced)
7156 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7157 "%.8lx\n", (unsigned long) insn);
7159 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7160 dsc->u.block.rn = rn;
7162 dsc->u.block.load = load;
7163 dsc->u.block.user = user;
7164 dsc->u.block.increment = increment;
7165 dsc->u.block.before = before;
7166 dsc->u.block.writeback = writeback;
7167 dsc->u.block.cond = bits (insn, 28, 31);
7169 dsc->u.block.regmask = insn & 0xffff;
7173 if ((insn & 0xffff) == 0xffff)
7175 /* LDM with a fully-populated register list. This case is
7176 particularly tricky. Implement for now by fully emulating the
7177 instruction (which might not behave perfectly in all cases, but
7178 these instructions should be rare enough for that not to matter
7180 dsc->modinsn[0] = ARM_NOP;
7182 dsc->cleanup = &cleanup_block_load_all;
7186 /* LDM of a list of registers which includes PC. Implement by
7187 rewriting the list of registers to be transferred into a
7188 contiguous chunk r0...rX before doing the transfer, then shuffling
7189 registers into the correct places in the cleanup routine. */
7190 unsigned int regmask = insn & 0xffff;
7191 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7192 unsigned int to = 0, from = 0, i, new_rn;
7194 for (i = 0; i < num_in_list; i++)
7195 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7197 /* Writeback makes things complicated. We need to avoid clobbering
7198 the base register with one of the registers in our modified
7199 register list, but just using a different register can't work in
7202 ldm r14!, {r0-r13,pc}
7204 which would need to be rewritten as:
7208 but that can't work, because there's no free register for N.
7210 Solve this by turning off the writeback bit, and emulating
7211 writeback manually in the cleanup routine. */
7216 new_regmask = (1 << num_in_list) - 1;
7218 if (debug_displaced)
7219 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7220 "{..., pc}: original reg list %.4x, modified "
7221 "list %.4x\n"), rn, writeback ? "!" : "",
7222 (int) insn & 0xffff, new_regmask);
7224 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7226 dsc->cleanup = &cleanup_block_load_pc;
7231 /* STM of a list of registers which includes PC. Run the instruction
7232 as-is, but out of line: this will store the wrong value for the PC,
7233 so we must manually fix up the memory in the cleanup routine.
7234 Doing things this way has the advantage that we can auto-detect
7235 the offset of the PC write (which is architecture-dependent) in
7236 the cleanup routine. */
7237 dsc->modinsn[0] = insn;
7239 dsc->cleanup = &cleanup_block_store_pc;
7246 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7247 struct regcache *regs,
7248 struct displaced_step_closure *dsc)
7250 int rn = bits (insn1, 0, 3);
7251 int load = bit (insn1, 4);
7252 int writeback = bit (insn1, 5);
7254 /* Block transfers which don't mention PC can be run directly
7256 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7257 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7259 if (rn == ARM_PC_REGNUM)
7261 warning (_("displaced: Unpredictable LDM or STM with "
7262 "base register r15"));
7263 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7264 "unpredictable ldm/stm", dsc);
7267 if (debug_displaced)
7268 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7269 "%.4x%.4x\n", insn1, insn2);
7271 /* Clear bit 13, since it should be always zero. */
7272 dsc->u.block.regmask = (insn2 & 0xdfff);
7273 dsc->u.block.rn = rn;
7275 dsc->u.block.load = load;
7276 dsc->u.block.user = 0;
7277 dsc->u.block.increment = bit (insn1, 7);
7278 dsc->u.block.before = bit (insn1, 8);
7279 dsc->u.block.writeback = writeback;
7280 dsc->u.block.cond = INST_AL;
7281 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7285 if (dsc->u.block.regmask == 0xffff)
7287 /* This branch is impossible to happen. */
7292 unsigned int regmask = dsc->u.block.regmask;
7293 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7294 unsigned int to = 0, from = 0, i, new_rn;
7296 for (i = 0; i < num_in_list; i++)
7297 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7302 new_regmask = (1 << num_in_list) - 1;
7304 if (debug_displaced)
7305 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7306 "{..., pc}: original reg list %.4x, modified "
7307 "list %.4x\n"), rn, writeback ? "!" : "",
7308 (int) dsc->u.block.regmask, new_regmask);
7310 dsc->modinsn[0] = insn1;
7311 dsc->modinsn[1] = (new_regmask & 0xffff);
7314 dsc->cleanup = &cleanup_block_load_pc;
7319 dsc->modinsn[0] = insn1;
7320 dsc->modinsn[1] = insn2;
7322 dsc->cleanup = &cleanup_block_store_pc;
7327 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7328 for Linux, where some SVC instructions must be treated specially. */
7331 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7332 struct displaced_step_closure *dsc)
7334 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7336 if (debug_displaced)
7337 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7338 "%.8lx\n", (unsigned long) resume_addr);
7340 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7344 /* Common copy routine for svc instruciton. */
7347 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7348 struct displaced_step_closure *dsc)
7350 /* Preparation: none.
7351 Insn: unmodified svc.
7352 Cleanup: pc <- insn_addr + insn_size. */
7354 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7356 dsc->wrote_to_pc = 1;
7358 /* Allow OS-specific code to override SVC handling. */
7359 if (dsc->u.svc.copy_svc_os)
7360 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7363 dsc->cleanup = &cleanup_svc;
7369 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7370 struct regcache *regs, struct displaced_step_closure *dsc)
7373 if (debug_displaced)
7374 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7375 (unsigned long) insn);
7377 dsc->modinsn[0] = insn;
7379 return install_svc (gdbarch, regs, dsc);
7383 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7384 struct regcache *regs, struct displaced_step_closure *dsc)
7387 if (debug_displaced)
7388 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7391 dsc->modinsn[0] = insn;
7393 return install_svc (gdbarch, regs, dsc);
7396 /* Copy undefined instructions. */
7399 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7400 struct displaced_step_closure *dsc)
7402 if (debug_displaced)
7403 fprintf_unfiltered (gdb_stdlog,
7404 "displaced: copying undefined insn %.8lx\n",
7405 (unsigned long) insn);
7407 dsc->modinsn[0] = insn;
7413 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7414 struct displaced_step_closure *dsc)
7417 if (debug_displaced)
7418 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7419 "%.4x %.4x\n", (unsigned short) insn1,
7420 (unsigned short) insn2);
7422 dsc->modinsn[0] = insn1;
7423 dsc->modinsn[1] = insn2;
7429 /* Copy unpredictable instructions. */
7432 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7433 struct displaced_step_closure *dsc)
7435 if (debug_displaced)
7436 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7437 "%.8lx\n", (unsigned long) insn);
7439 dsc->modinsn[0] = insn;
7444 /* The decode_* functions are instruction decoding helpers. They mostly follow
7445 the presentation in the ARM ARM. */
7448 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7449 struct regcache *regs,
7450 struct displaced_step_closure *dsc)
7452 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7453 unsigned int rn = bits (insn, 16, 19);
7455 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7456 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7457 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7458 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7459 else if ((op1 & 0x60) == 0x20)
7460 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7461 else if ((op1 & 0x71) == 0x40)
7462 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7464 else if ((op1 & 0x77) == 0x41)
7465 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7466 else if ((op1 & 0x77) == 0x45)
7467 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7468 else if ((op1 & 0x77) == 0x51)
7471 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7473 return arm_copy_unpred (gdbarch, insn, dsc);
7475 else if ((op1 & 0x77) == 0x55)
7476 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7477 else if (op1 == 0x57)
7480 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7481 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7482 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7483 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7484 default: return arm_copy_unpred (gdbarch, insn, dsc);
7486 else if ((op1 & 0x63) == 0x43)
7487 return arm_copy_unpred (gdbarch, insn, dsc);
7488 else if ((op2 & 0x1) == 0x0)
7489 switch (op1 & ~0x80)
7492 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7494 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7495 case 0x71: case 0x75:
7497 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7498 case 0x63: case 0x67: case 0x73: case 0x77:
7499 return arm_copy_unpred (gdbarch, insn, dsc);
7501 return arm_copy_undef (gdbarch, insn, dsc);
7504 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7508 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7509 struct regcache *regs,
7510 struct displaced_step_closure *dsc)
7512 if (bit (insn, 27) == 0)
7513 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7514 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7515 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7518 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7521 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7523 case 0x4: case 0x5: case 0x6: case 0x7:
7524 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7527 switch ((insn & 0xe00000) >> 21)
7529 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7531 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7534 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7537 return arm_copy_undef (gdbarch, insn, dsc);
7542 int rn_f = (bits (insn, 16, 19) == 0xf);
7543 switch ((insn & 0xe00000) >> 21)
7546 /* ldc/ldc2 imm (undefined for rn == pc). */
7547 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7548 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7551 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7553 case 0x4: case 0x5: case 0x6: case 0x7:
7554 /* ldc/ldc2 lit (undefined for rn != pc). */
7555 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7556 : arm_copy_undef (gdbarch, insn, dsc);
7559 return arm_copy_undef (gdbarch, insn, dsc);
7564 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7567 if (bits (insn, 16, 19) == 0xf)
7569 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7571 return arm_copy_undef (gdbarch, insn, dsc);
7575 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7577 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7581 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7583 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7586 return arm_copy_undef (gdbarch, insn, dsc);
7590 /* Decode miscellaneous instructions in dp/misc encoding space. */
7593 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7594 struct regcache *regs,
7595 struct displaced_step_closure *dsc)
7597 unsigned int op2 = bits (insn, 4, 6);
7598 unsigned int op = bits (insn, 21, 22);
7599 unsigned int op1 = bits (insn, 16, 19);
7604 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7607 if (op == 0x1) /* bx. */
7608 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7610 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7612 return arm_copy_undef (gdbarch, insn, dsc);
7616 /* Not really supported. */
7617 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7619 return arm_copy_undef (gdbarch, insn, dsc);
7623 return arm_copy_bx_blx_reg (gdbarch, insn,
7624 regs, dsc); /* blx register. */
7626 return arm_copy_undef (gdbarch, insn, dsc);
7629 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7633 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7635 /* Not really supported. */
7636 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7639 return arm_copy_undef (gdbarch, insn, dsc);
7644 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7645 struct regcache *regs,
7646 struct displaced_step_closure *dsc)
7649 switch (bits (insn, 20, 24))
7652 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7655 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7657 case 0x12: case 0x16:
7658 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7661 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7665 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7667 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7668 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7669 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7670 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7671 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7672 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7673 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7674 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7675 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7676 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7677 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7678 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7679 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7680 /* 2nd arg means "unpriveleged". */
7681 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7685 /* Should be unreachable. */
7690 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7691 struct regcache *regs,
7692 struct displaced_step_closure *dsc)
7694 int a = bit (insn, 25), b = bit (insn, 4);
7695 uint32_t op1 = bits (insn, 20, 24);
7696 int rn_f = bits (insn, 16, 19) == 0xf;
7698 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7699 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7700 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7701 else if ((!a && (op1 & 0x17) == 0x02)
7702 || (a && (op1 & 0x17) == 0x02 && !b))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7704 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7705 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7707 else if ((!a && (op1 & 0x17) == 0x03)
7708 || (a && (op1 & 0x17) == 0x03 && !b))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7710 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7711 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7713 else if ((!a && (op1 & 0x17) == 0x06)
7714 || (a && (op1 & 0x17) == 0x06 && !b))
7715 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7716 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7717 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7718 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7719 else if ((!a && (op1 & 0x17) == 0x07)
7720 || (a && (op1 & 0x17) == 0x07 && !b))
7721 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7723 /* Should be unreachable. */
7728 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7729 struct displaced_step_closure *dsc)
7731 switch (bits (insn, 20, 24))
7733 case 0x00: case 0x01: case 0x02: case 0x03:
7734 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7736 case 0x04: case 0x05: case 0x06: case 0x07:
7737 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7739 case 0x08: case 0x09: case 0x0a: case 0x0b:
7740 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7741 return arm_copy_unmodified (gdbarch, insn,
7742 "decode/pack/unpack/saturate/reverse", dsc);
7745 if (bits (insn, 5, 7) == 0) /* op2. */
7747 if (bits (insn, 12, 15) == 0xf)
7748 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7750 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7753 return arm_copy_undef (gdbarch, insn, dsc);
7755 case 0x1a: case 0x1b:
7756 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7757 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7759 return arm_copy_undef (gdbarch, insn, dsc);
7761 case 0x1c: case 0x1d:
7762 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7764 if (bits (insn, 0, 3) == 0xf)
7765 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7767 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7770 return arm_copy_undef (gdbarch, insn, dsc);
7772 case 0x1e: case 0x1f:
7773 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7774 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7776 return arm_copy_undef (gdbarch, insn, dsc);
7779 /* Should be unreachable. */
7784 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7785 struct regcache *regs,
7786 struct displaced_step_closure *dsc)
7789 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7791 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7795 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7796 struct regcache *regs,
7797 struct displaced_step_closure *dsc)
7799 unsigned int opcode = bits (insn, 20, 24);
7803 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7804 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7806 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7807 case 0x12: case 0x16:
7808 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7810 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7811 case 0x13: case 0x17:
7812 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7814 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7815 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7816 /* Note: no writeback for these instructions. Bit 25 will always be
7817 zero though (via caller), so the following works OK. */
7818 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7821 /* Should be unreachable. */
7825 /* Decode shifted register instructions. */
7828 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7829 uint16_t insn2, struct regcache *regs,
7830 struct displaced_step_closure *dsc)
7832 /* PC is only allowed to be used in instruction MOV. */
7834 unsigned int op = bits (insn1, 5, 8);
7835 unsigned int rn = bits (insn1, 0, 3);
7837 if (op == 0x2 && rn == 0xf) /* MOV */
7838 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7841 "dp (shift reg)", dsc);
7845 /* Decode extension register load/store. Exactly the same as
7846 arm_decode_ext_reg_ld_st. */
7849 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7850 uint16_t insn2, struct regcache *regs,
7851 struct displaced_step_closure *dsc)
7853 unsigned int opcode = bits (insn1, 4, 8);
7857 case 0x04: case 0x05:
7858 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7859 "vfp/neon vmov", dsc);
7861 case 0x08: case 0x0c: /* 01x00 */
7862 case 0x0a: case 0x0e: /* 01x10 */
7863 case 0x12: case 0x16: /* 10x10 */
7864 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7865 "vfp/neon vstm/vpush", dsc);
7867 case 0x09: case 0x0d: /* 01x01 */
7868 case 0x0b: case 0x0f: /* 01x11 */
7869 case 0x13: case 0x17: /* 10x11 */
7870 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7871 "vfp/neon vldm/vpop", dsc);
7873 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7874 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7876 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7877 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7880 /* Should be unreachable. */
7885 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7886 struct regcache *regs, struct displaced_step_closure *dsc)
7888 unsigned int op1 = bits (insn, 20, 25);
7889 int op = bit (insn, 4);
7890 unsigned int coproc = bits (insn, 8, 11);
7891 unsigned int rn = bits (insn, 16, 19);
7893 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7894 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7895 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7896 && (coproc & 0xe) != 0xa)
7898 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7899 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7900 && (coproc & 0xe) != 0xa)
7901 /* ldc/ldc2 imm/lit. */
7902 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7903 else if ((op1 & 0x3e) == 0x00)
7904 return arm_copy_undef (gdbarch, insn, dsc);
7905 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7906 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7907 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7908 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7909 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7910 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7911 else if ((op1 & 0x30) == 0x20 && !op)
7913 if ((coproc & 0xe) == 0xa)
7914 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7916 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7918 else if ((op1 & 0x30) == 0x20 && op)
7919 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7920 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7921 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7922 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7923 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7924 else if ((op1 & 0x30) == 0x30)
7925 return arm_copy_svc (gdbarch, insn, regs, dsc);
7927 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7931 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7932 uint16_t insn2, struct regcache *regs,
7933 struct displaced_step_closure *dsc)
7935 unsigned int coproc = bits (insn2, 8, 11);
7936 unsigned int op1 = bits (insn1, 4, 9);
7937 unsigned int bit_5_8 = bits (insn1, 5, 8);
7938 unsigned int bit_9 = bit (insn1, 9);
7939 unsigned int bit_4 = bit (insn1, 4);
7940 unsigned int rn = bits (insn1, 0, 3);
7945 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7946 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7948 else if (bit_5_8 == 0) /* UNDEFINED. */
7949 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7952 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7953 if ((coproc & 0xe) == 0xa)
7954 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7956 else /* coproc is not 101x. */
7958 if (bit_4 == 0) /* STC/STC2. */
7959 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7961 else /* LDC/LDC2 {literal, immeidate}. */
7962 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7968 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7974 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7975 struct displaced_step_closure *dsc, int rd)
7981 Preparation: Rd <- PC
7987 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7988 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7992 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7993 struct displaced_step_closure *dsc,
7994 int rd, unsigned int imm)
7997 /* Encoding T2: ADDS Rd, #imm */
7998 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8000 install_pc_relative (gdbarch, regs, dsc, rd);
8006 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8007 struct regcache *regs,
8008 struct displaced_step_closure *dsc)
8010 unsigned int rd = bits (insn, 8, 10);
8011 unsigned int imm8 = bits (insn, 0, 7);
8013 if (debug_displaced)
8014 fprintf_unfiltered (gdb_stdlog,
8015 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8018 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8022 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8023 uint16_t insn2, struct regcache *regs,
8024 struct displaced_step_closure *dsc)
8026 unsigned int rd = bits (insn2, 8, 11);
8027 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8028 extract raw immediate encoding rather than computing immediate. When
8029 generating ADD or SUB instruction, we can simply perform OR operation to
8030 set immediate into ADD. */
8031 unsigned int imm_3_8 = insn2 & 0x70ff;
8032 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8034 if (debug_displaced)
8035 fprintf_unfiltered (gdb_stdlog,
8036 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8037 rd, imm_i, imm_3_8, insn1, insn2);
8039 if (bit (insn1, 7)) /* Encoding T2 */
8041 /* Encoding T3: SUB Rd, Rd, #imm */
8042 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8043 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8045 else /* Encoding T3 */
8047 /* Encoding T3: ADD Rd, Rd, #imm */
8048 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8049 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8053 install_pc_relative (gdbarch, regs, dsc, rd);
8059 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8060 struct regcache *regs,
8061 struct displaced_step_closure *dsc)
8063 unsigned int rt = bits (insn1, 8, 10);
8065 int imm8 = (bits (insn1, 0, 7) << 2);
8066 CORE_ADDR from = dsc->insn_addr;
8072 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8074 Insn: LDR R0, [R2, R3];
8075 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8077 if (debug_displaced)
8078 fprintf_unfiltered (gdb_stdlog,
8079 "displaced: copying thumb ldr r%d [pc #%d]\n"
8082 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8083 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8084 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8085 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8086 /* The assembler calculates the required value of the offset from the
8087 Align(PC,4) value of this instruction to the label. */
8088 pc = pc & 0xfffffffc;
8090 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8091 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8094 dsc->u.ldst.xfersize = 4;
8096 dsc->u.ldst.immed = 0;
8097 dsc->u.ldst.writeback = 0;
8098 dsc->u.ldst.restore_r4 = 0;
8100 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8102 dsc->cleanup = &cleanup_load;
8107 /* Copy Thumb cbnz/cbz insruction. */
8110 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8111 struct regcache *regs,
8112 struct displaced_step_closure *dsc)
8114 int non_zero = bit (insn1, 11);
8115 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8116 CORE_ADDR from = dsc->insn_addr;
8117 int rn = bits (insn1, 0, 2);
8118 int rn_val = displaced_read_reg (regs, dsc, rn);
8120 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8121 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8122 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8123 condition is false, let it be, cleanup_branch will do nothing. */
8124 if (dsc->u.branch.cond)
8126 dsc->u.branch.cond = INST_AL;
8127 dsc->u.branch.dest = from + 4 + imm5;
8130 dsc->u.branch.dest = from + 2;
8132 dsc->u.branch.link = 0;
8133 dsc->u.branch.exchange = 0;
8135 if (debug_displaced)
8136 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8137 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8138 rn, rn_val, insn1, dsc->u.branch.dest);
8140 dsc->modinsn[0] = THUMB_NOP;
8142 dsc->cleanup = &cleanup_branch;
8146 /* Copy Table Branch Byte/Halfword */
8148 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8149 uint16_t insn2, struct regcache *regs,
8150 struct displaced_step_closure *dsc)
8152 ULONGEST rn_val, rm_val;
8153 int is_tbh = bit (insn2, 4);
8154 CORE_ADDR halfwords = 0;
8155 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8157 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8158 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8164 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8165 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8171 target_read_memory (rn_val + rm_val, buf, 1);
8172 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8175 if (debug_displaced)
8176 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8177 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8178 (unsigned int) rn_val, (unsigned int) rm_val,
8179 (unsigned int) halfwords);
8181 dsc->u.branch.cond = INST_AL;
8182 dsc->u.branch.link = 0;
8183 dsc->u.branch.exchange = 0;
8184 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8186 dsc->cleanup = &cleanup_branch;
8192 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8193 struct displaced_step_closure *dsc)
8196 int val = displaced_read_reg (regs, dsc, 7);
8197 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8200 val = displaced_read_reg (regs, dsc, 8);
8201 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8204 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8209 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8210 struct regcache *regs,
8211 struct displaced_step_closure *dsc)
8213 dsc->u.block.regmask = insn1 & 0x00ff;
8215 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8218 (1) register list is full, that is, r0-r7 are used.
8219 Prepare: tmp[0] <- r8
8221 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8222 MOV r8, r7; Move value of r7 to r8;
8223 POP {r7}; Store PC value into r7.
8225 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8227 (2) register list is not full, supposing there are N registers in
8228 register list (except PC, 0 <= N <= 7).
8229 Prepare: for each i, 0 - N, tmp[i] <- ri.
8231 POP {r0, r1, ...., rN};
8233 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8234 from tmp[] properly.
8236 if (debug_displaced)
8237 fprintf_unfiltered (gdb_stdlog,
8238 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8239 dsc->u.block.regmask, insn1);
8241 if (dsc->u.block.regmask == 0xff)
8243 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8245 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8246 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8247 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8250 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8254 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8255 unsigned int new_regmask, bit = 1;
8256 unsigned int to = 0, from = 0, i, new_rn;
8258 for (i = 0; i < num_in_list + 1; i++)
8259 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8261 new_regmask = (1 << (num_in_list + 1)) - 1;
8263 if (debug_displaced)
8264 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8265 "{..., pc}: original reg list %.4x,"
8266 " modified list %.4x\n"),
8267 (int) dsc->u.block.regmask, new_regmask);
8269 dsc->u.block.regmask |= 0x8000;
8270 dsc->u.block.writeback = 0;
8271 dsc->u.block.cond = INST_AL;
8273 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8275 dsc->cleanup = &cleanup_block_load_pc;
8282 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8283 struct regcache *regs,
8284 struct displaced_step_closure *dsc)
8286 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8287 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8290 /* 16-bit thumb instructions. */
8291 switch (op_bit_12_15)
8293 /* Shift (imme), add, subtract, move and compare. */
8294 case 0: case 1: case 2: case 3:
8295 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8296 "shift/add/sub/mov/cmp",
8300 switch (op_bit_10_11)
8302 case 0: /* Data-processing */
8303 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8307 case 1: /* Special data instructions and branch and exchange. */
8309 unsigned short op = bits (insn1, 7, 9);
8310 if (op == 6 || op == 7) /* BX or BLX */
8311 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8312 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8313 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8315 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8319 default: /* LDR (literal) */
8320 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8323 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8324 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8327 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8328 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8329 else /* Generate SP-relative address */
8330 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8332 case 11: /* Misc 16-bit instructions */
8334 switch (bits (insn1, 8, 11))
8336 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8337 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8339 case 12: case 13: /* POP */
8340 if (bit (insn1, 8)) /* PC is in register list. */
8341 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8343 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8345 case 15: /* If-Then, and hints */
8346 if (bits (insn1, 0, 3))
8347 /* If-Then makes up to four following instructions conditional.
8348 IT instruction itself is not conditional, so handle it as a
8349 common unmodified instruction. */
8350 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8353 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8361 if (op_bit_10_11 < 2) /* Store multiple registers */
8362 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8363 else /* Load multiple registers */
8364 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8366 case 13: /* Conditional branch and supervisor call */
8367 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8368 err = thumb_copy_b (gdbarch, insn1, dsc);
8370 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8372 case 14: /* Unconditional branch */
8373 err = thumb_copy_b (gdbarch, insn1, dsc);
8380 internal_error (__FILE__, __LINE__,
8381 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8385 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8386 uint16_t insn1, uint16_t insn2,
8387 struct regcache *regs,
8388 struct displaced_step_closure *dsc)
8390 int rt = bits (insn2, 12, 15);
8391 int rn = bits (insn1, 0, 3);
8392 int op1 = bits (insn1, 7, 8);
8395 switch (bits (insn1, 5, 6))
8397 case 0: /* Load byte and memory hints */
8398 if (rt == 0xf) /* PLD/PLI */
8401 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8402 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8404 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8409 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8410 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8413 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8414 "ldrb{reg, immediate}/ldrbt",
8419 case 1: /* Load halfword and memory hints. */
8420 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8421 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8422 "pld/unalloc memhint", dsc);
8426 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8429 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8433 case 2: /* Load word */
8435 int insn2_bit_8_11 = bits (insn2, 8, 11);
8438 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8439 else if (op1 == 0x1) /* Encoding T3 */
8440 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8442 else /* op1 == 0x0 */
8444 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8445 /* LDR (immediate) */
8446 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8447 dsc, bit (insn2, 8), 1);
8448 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8449 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8452 /* LDR (register) */
8453 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8459 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8466 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8467 uint16_t insn2, struct regcache *regs,
8468 struct displaced_step_closure *dsc)
8471 unsigned short op = bit (insn2, 15);
8472 unsigned int op1 = bits (insn1, 11, 12);
8478 switch (bits (insn1, 9, 10))
8483 /* Load/store {dual, execlusive}, table branch. */
8484 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8485 && bits (insn2, 5, 7) == 0)
8486 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8489 /* PC is not allowed to use in load/store {dual, exclusive}
8491 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8492 "load/store dual/ex", dsc);
8494 else /* load/store multiple */
8496 switch (bits (insn1, 7, 8))
8498 case 0: case 3: /* SRS, RFE */
8499 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8502 case 1: case 2: /* LDM/STM/PUSH/POP */
8503 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8510 /* Data-processing (shift register). */
8511 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8514 default: /* Coprocessor instructions. */
8515 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8520 case 2: /* op1 = 2 */
8521 if (op) /* Branch and misc control. */
8523 if (bit (insn2, 14) /* BLX/BL */
8524 || bit (insn2, 12) /* Unconditional branch */
8525 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8526 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8528 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8533 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8535 int op = bits (insn1, 4, 8);
8536 int rn = bits (insn1, 0, 3);
8537 if ((op == 0 || op == 0xa) && rn == 0xf)
8538 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8541 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8544 else /* Data processing (modified immeidate) */
8545 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8549 case 3: /* op1 = 3 */
8550 switch (bits (insn1, 9, 10))
8554 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8556 else /* NEON Load/Store and Store single data item */
8557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8558 "neon elt/struct load/store",
8561 case 1: /* op1 = 3, bits (9, 10) == 1 */
8562 switch (bits (insn1, 7, 8))
8564 case 0: case 1: /* Data processing (register) */
8565 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8568 case 2: /* Multiply and absolute difference */
8569 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8570 "mul/mua/diff", dsc);
8572 case 3: /* Long multiply and divide */
8573 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8578 default: /* Coprocessor instructions */
8579 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8588 internal_error (__FILE__, __LINE__,
8589 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8594 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8595 CORE_ADDR to, struct regcache *regs,
8596 struct displaced_step_closure *dsc)
8598 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8600 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8602 if (debug_displaced)
8603 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8604 "at %.8lx\n", insn1, (unsigned long) from);
8607 dsc->insn_size = thumb_insn_size (insn1);
8608 if (thumb_insn_size (insn1) == 4)
8611 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8612 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8615 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8619 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8620 CORE_ADDR to, struct regcache *regs,
8621 struct displaced_step_closure *dsc)
8624 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8627 /* Most displaced instructions use a 1-instruction scratch space, so set this
8628 here and override below if/when necessary. */
8630 dsc->insn_addr = from;
8631 dsc->scratch_base = to;
8632 dsc->cleanup = NULL;
8633 dsc->wrote_to_pc = 0;
8635 if (!displaced_in_arm_mode (regs))
8636 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8640 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8641 if (debug_displaced)
8642 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8643 "at %.8lx\n", (unsigned long) insn,
8644 (unsigned long) from);
8646 if ((insn & 0xf0000000) == 0xf0000000)
8647 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8648 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8650 case 0x0: case 0x1: case 0x2: case 0x3:
8651 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8654 case 0x4: case 0x5: case 0x6:
8655 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8659 err = arm_decode_media (gdbarch, insn, dsc);
8662 case 0x8: case 0x9: case 0xa: case 0xb:
8663 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8666 case 0xc: case 0xd: case 0xe: case 0xf:
8667 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8672 internal_error (__FILE__, __LINE__,
8673 _("arm_process_displaced_insn: Instruction decode error"));
8676 /* Actually set up the scratch space for a displaced instruction. */
8679 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8680 CORE_ADDR to, struct displaced_step_closure *dsc)
8682 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8683 unsigned int i, len, offset;
8684 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8685 int size = dsc->is_thumb? 2 : 4;
8686 const gdb_byte *bkp_insn;
8689 /* Poke modified instruction(s). */
8690 for (i = 0; i < dsc->numinsns; i++)
8692 if (debug_displaced)
8694 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8696 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8699 fprintf_unfiltered (gdb_stdlog, "%.4x",
8700 (unsigned short)dsc->modinsn[i]);
8702 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8703 (unsigned long) to + offset);
8706 write_memory_unsigned_integer (to + offset, size,
8707 byte_order_for_code,
8712 /* Choose the correct breakpoint instruction. */
8715 bkp_insn = tdep->thumb_breakpoint;
8716 len = tdep->thumb_breakpoint_size;
8720 bkp_insn = tdep->arm_breakpoint;
8721 len = tdep->arm_breakpoint_size;
8724 /* Put breakpoint afterwards. */
8725 write_memory (to + offset, bkp_insn, len);
8727 if (debug_displaced)
8728 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8729 paddress (gdbarch, from), paddress (gdbarch, to));
8732 /* Entry point for copying an instruction into scratch space for displaced
8735 struct displaced_step_closure *
8736 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8737 CORE_ADDR from, CORE_ADDR to,
8738 struct regcache *regs)
8740 struct displaced_step_closure *dsc
8741 = xmalloc (sizeof (struct displaced_step_closure));
8742 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8743 arm_displaced_init_closure (gdbarch, from, to, dsc);
8748 /* Entry point for cleaning things up after a displaced instruction has been
8752 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8753 struct displaced_step_closure *dsc,
8754 CORE_ADDR from, CORE_ADDR to,
8755 struct regcache *regs)
8758 dsc->cleanup (gdbarch, regs, dsc);
8760 if (!dsc->wrote_to_pc)
8761 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8762 dsc->insn_addr + dsc->insn_size);
8766 #include "bfd-in2.h"
8767 #include "libcoff.h"
8770 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8772 struct gdbarch *gdbarch = info->application_data;
8774 if (arm_pc_is_thumb (gdbarch, memaddr))
8776 static asymbol *asym;
8777 static combined_entry_type ce;
8778 static struct coff_symbol_struct csym;
8779 static struct bfd fake_bfd;
8780 static bfd_target fake_target;
8782 if (csym.native == NULL)
8784 /* Create a fake symbol vector containing a Thumb symbol.
8785 This is solely so that the code in print_insn_little_arm()
8786 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8787 the presence of a Thumb symbol and switch to decoding
8788 Thumb instructions. */
8790 fake_target.flavour = bfd_target_coff_flavour;
8791 fake_bfd.xvec = &fake_target;
8792 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8794 csym.symbol.the_bfd = &fake_bfd;
8795 csym.symbol.name = "fake";
8796 asym = (asymbol *) & csym;
8799 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8800 info->symbols = &asym;
8803 info->symbols = NULL;
8805 if (info->endian == BFD_ENDIAN_BIG)
8806 return print_insn_big_arm (memaddr, info);
8808 return print_insn_little_arm (memaddr, info);
8811 /* The following define instruction sequences that will cause ARM
8812 cpu's to take an undefined instruction trap. These are used to
8813 signal a breakpoint to GDB.
8815 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8816 modes. A different instruction is required for each mode. The ARM
8817 cpu's can also be big or little endian. Thus four different
8818 instructions are needed to support all cases.
8820 Note: ARMv4 defines several new instructions that will take the
8821 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8822 not in fact add the new instructions. The new undefined
8823 instructions in ARMv4 are all instructions that had no defined
8824 behaviour in earlier chips. There is no guarantee that they will
8825 raise an exception, but may be treated as NOP's. In practice, it
8826 may only safe to rely on instructions matching:
8828 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8829 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8830 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8832 Even this may only true if the condition predicate is true. The
8833 following use a condition predicate of ALWAYS so it is always TRUE.
8835 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8836 and NetBSD all use a software interrupt rather than an undefined
8837 instruction to force a trap. This can be handled by by the
8838 abi-specific code during establishment of the gdbarch vector. */
8840 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8841 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8842 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8843 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8845 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8846 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8847 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8848 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8850 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8851 the program counter value to determine whether a 16-bit or 32-bit
8852 breakpoint should be used. It returns a pointer to a string of
8853 bytes that encode a breakpoint instruction, stores the length of
8854 the string to *lenptr, and adjusts the program counter (if
8855 necessary) to point to the actual memory location where the
8856 breakpoint should be inserted. */
8858 static const unsigned char *
8859 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8861 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8862 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8864 if (arm_pc_is_thumb (gdbarch, *pcptr))
8866 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8868 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8869 check whether we are replacing a 32-bit instruction. */
8870 if (tdep->thumb2_breakpoint != NULL)
8873 if (target_read_memory (*pcptr, buf, 2) == 0)
8875 unsigned short inst1;
8876 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8877 if (thumb_insn_size (inst1) == 4)
8879 *lenptr = tdep->thumb2_breakpoint_size;
8880 return tdep->thumb2_breakpoint;
8885 *lenptr = tdep->thumb_breakpoint_size;
8886 return tdep->thumb_breakpoint;
8890 *lenptr = tdep->arm_breakpoint_size;
8891 return tdep->arm_breakpoint;
8896 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8899 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8901 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8902 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8903 that this is not confused with a 32-bit ARM breakpoint. */
8907 /* Extract from an array REGBUF containing the (raw) register state a
8908 function return value of type TYPE, and copy that, in virtual
8909 format, into VALBUF. */
8912 arm_extract_return_value (struct type *type, struct regcache *regs,
8915 struct gdbarch *gdbarch = get_regcache_arch (regs);
8916 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8918 if (TYPE_CODE_FLT == TYPE_CODE (type))
8920 switch (gdbarch_tdep (gdbarch)->fp_model)
8924 /* The value is in register F0 in internal format. We need to
8925 extract the raw value and then convert it to the desired
8927 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8929 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8930 convert_from_extended (floatformat_from_type (type), tmpbuf,
8931 valbuf, gdbarch_byte_order (gdbarch));
8935 case ARM_FLOAT_SOFT_FPA:
8936 case ARM_FLOAT_SOFT_VFP:
8937 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8938 not using the VFP ABI code. */
8940 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8941 if (TYPE_LENGTH (type) > 4)
8942 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8943 valbuf + INT_REGISTER_SIZE);
8947 internal_error (__FILE__, __LINE__,
8948 _("arm_extract_return_value: "
8949 "Floating point model not supported"));
8953 else if (TYPE_CODE (type) == TYPE_CODE_INT
8954 || TYPE_CODE (type) == TYPE_CODE_CHAR
8955 || TYPE_CODE (type) == TYPE_CODE_BOOL
8956 || TYPE_CODE (type) == TYPE_CODE_PTR
8957 || TYPE_CODE (type) == TYPE_CODE_REF
8958 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8960 /* If the type is a plain integer, then the access is
8961 straight-forward. Otherwise we have to play around a bit
8963 int len = TYPE_LENGTH (type);
8964 int regno = ARM_A1_REGNUM;
8969 /* By using store_unsigned_integer we avoid having to do
8970 anything special for small big-endian values. */
8971 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8972 store_unsigned_integer (valbuf,
8973 (len > INT_REGISTER_SIZE
8974 ? INT_REGISTER_SIZE : len),
8976 len -= INT_REGISTER_SIZE;
8977 valbuf += INT_REGISTER_SIZE;
8982 /* For a structure or union the behaviour is as if the value had
8983 been stored to word-aligned memory and then loaded into
8984 registers with 32-bit load instruction(s). */
8985 int len = TYPE_LENGTH (type);
8986 int regno = ARM_A1_REGNUM;
8987 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8991 regcache_cooked_read (regs, regno++, tmpbuf);
8992 memcpy (valbuf, tmpbuf,
8993 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8994 len -= INT_REGISTER_SIZE;
8995 valbuf += INT_REGISTER_SIZE;
9001 /* Will a function return an aggregate type in memory or in a
9002 register? Return 0 if an aggregate type can be returned in a
9003 register, 1 if it must be returned in memory. */
9006 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9009 enum type_code code;
9011 CHECK_TYPEDEF (type);
9013 /* In the ARM ABI, "integer" like aggregate types are returned in
9014 registers. For an aggregate type to be integer like, its size
9015 must be less than or equal to INT_REGISTER_SIZE and the
9016 offset of each addressable subfield must be zero. Note that bit
9017 fields are not addressable, and all addressable subfields of
9018 unions always start at offset zero.
9020 This function is based on the behaviour of GCC 2.95.1.
9021 See: gcc/arm.c: arm_return_in_memory() for details.
9023 Note: All versions of GCC before GCC 2.95.2 do not set up the
9024 parameters correctly for a function returning the following
9025 structure: struct { float f;}; This should be returned in memory,
9026 not a register. Richard Earnshaw sent me a patch, but I do not
9027 know of any way to detect if a function like the above has been
9028 compiled with the correct calling convention. */
9030 /* All aggregate types that won't fit in a register must be returned
9032 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9037 /* The AAPCS says all aggregates not larger than a word are returned
9039 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9042 /* The only aggregate types that can be returned in a register are
9043 structs and unions. Arrays must be returned in memory. */
9044 code = TYPE_CODE (type);
9045 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9050 /* Assume all other aggregate types can be returned in a register.
9051 Run a check for structures, unions and arrays. */
9054 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9057 /* Need to check if this struct/union is "integer" like. For
9058 this to be true, its size must be less than or equal to
9059 INT_REGISTER_SIZE and the offset of each addressable
9060 subfield must be zero. Note that bit fields are not
9061 addressable, and unions always start at offset zero. If any
9062 of the subfields is a floating point type, the struct/union
9063 cannot be an integer type. */
9065 /* For each field in the object, check:
9066 1) Is it FP? --> yes, nRc = 1;
9067 2) Is it addressable (bitpos != 0) and
9068 not packed (bitsize == 0)?
9072 for (i = 0; i < TYPE_NFIELDS (type); i++)
9074 enum type_code field_type_code;
9075 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9078 /* Is it a floating point type field? */
9079 if (field_type_code == TYPE_CODE_FLT)
9085 /* If bitpos != 0, then we have to care about it. */
9086 if (TYPE_FIELD_BITPOS (type, i) != 0)
9088 /* Bitfields are not addressable. If the field bitsize is
9089 zero, then the field is not packed. Hence it cannot be
9090 a bitfield or any other packed type. */
9091 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9103 /* Write into appropriate registers a function return value of type
9104 TYPE, given in virtual format. */
9107 arm_store_return_value (struct type *type, struct regcache *regs,
9108 const gdb_byte *valbuf)
9110 struct gdbarch *gdbarch = get_regcache_arch (regs);
9111 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9113 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9115 gdb_byte buf[MAX_REGISTER_SIZE];
9117 switch (gdbarch_tdep (gdbarch)->fp_model)
9121 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9122 gdbarch_byte_order (gdbarch));
9123 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9126 case ARM_FLOAT_SOFT_FPA:
9127 case ARM_FLOAT_SOFT_VFP:
9128 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9129 not using the VFP ABI code. */
9131 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9132 if (TYPE_LENGTH (type) > 4)
9133 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9134 valbuf + INT_REGISTER_SIZE);
9138 internal_error (__FILE__, __LINE__,
9139 _("arm_store_return_value: Floating "
9140 "point model not supported"));
9144 else if (TYPE_CODE (type) == TYPE_CODE_INT
9145 || TYPE_CODE (type) == TYPE_CODE_CHAR
9146 || TYPE_CODE (type) == TYPE_CODE_BOOL
9147 || TYPE_CODE (type) == TYPE_CODE_PTR
9148 || TYPE_CODE (type) == TYPE_CODE_REF
9149 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9151 if (TYPE_LENGTH (type) <= 4)
9153 /* Values of one word or less are zero/sign-extended and
9155 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9156 LONGEST val = unpack_long (type, valbuf);
9158 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9159 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9163 /* Integral values greater than one word are stored in consecutive
9164 registers starting with r0. This will always be a multiple of
9165 the regiser size. */
9166 int len = TYPE_LENGTH (type);
9167 int regno = ARM_A1_REGNUM;
9171 regcache_cooked_write (regs, regno++, valbuf);
9172 len -= INT_REGISTER_SIZE;
9173 valbuf += INT_REGISTER_SIZE;
9179 /* For a structure or union the behaviour is as if the value had
9180 been stored to word-aligned memory and then loaded into
9181 registers with 32-bit load instruction(s). */
9182 int len = TYPE_LENGTH (type);
9183 int regno = ARM_A1_REGNUM;
9184 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9188 memcpy (tmpbuf, valbuf,
9189 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9190 regcache_cooked_write (regs, regno++, tmpbuf);
9191 len -= INT_REGISTER_SIZE;
9192 valbuf += INT_REGISTER_SIZE;
9198 /* Handle function return values. */
9200 static enum return_value_convention
9201 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9202 struct type *valtype, struct regcache *regcache,
9203 gdb_byte *readbuf, const gdb_byte *writebuf)
9205 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9206 struct type *func_type = function ? value_type (function) : NULL;
9207 enum arm_vfp_cprc_base_type vfp_base_type;
9210 if (arm_vfp_abi_for_function (gdbarch, func_type)
9211 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9213 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9214 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9216 for (i = 0; i < vfp_base_count; i++)
9218 if (reg_char == 'q')
9221 arm_neon_quad_write (gdbarch, regcache, i,
9222 writebuf + i * unit_length);
9225 arm_neon_quad_read (gdbarch, regcache, i,
9226 readbuf + i * unit_length);
9233 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9234 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9237 regcache_cooked_write (regcache, regnum,
9238 writebuf + i * unit_length);
9240 regcache_cooked_read (regcache, regnum,
9241 readbuf + i * unit_length);
9244 return RETURN_VALUE_REGISTER_CONVENTION;
9247 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9248 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9249 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9251 if (tdep->struct_return == pcc_struct_return
9252 || arm_return_in_memory (gdbarch, valtype))
9253 return RETURN_VALUE_STRUCT_CONVENTION;
9256 /* AAPCS returns complex types longer than a register in memory. */
9257 if (tdep->arm_abi != ARM_ABI_APCS
9258 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9259 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9260 return RETURN_VALUE_STRUCT_CONVENTION;
9263 arm_store_return_value (valtype, regcache, writebuf);
9266 arm_extract_return_value (valtype, regcache, readbuf);
9268 return RETURN_VALUE_REGISTER_CONVENTION;
9273 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9275 struct gdbarch *gdbarch = get_frame_arch (frame);
9276 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9277 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9279 gdb_byte buf[INT_REGISTER_SIZE];
9281 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9283 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9287 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9291 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9292 return the target PC. Otherwise return 0. */
9295 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9299 CORE_ADDR start_addr;
9301 /* Find the starting address and name of the function containing the PC. */
9302 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9304 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9306 start_addr = arm_skip_bx_reg (frame, pc);
9307 if (start_addr != 0)
9313 /* If PC is in a Thumb call or return stub, return the address of the
9314 target PC, which is in a register. The thunk functions are called
9315 _call_via_xx, where x is the register name. The possible names
9316 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9317 functions, named __ARM_call_via_r[0-7]. */
9318 if (strncmp (name, "_call_via_", 10) == 0
9319 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9321 /* Use the name suffix to determine which register contains the
9323 static char *table[15] =
9324 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9325 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9328 int offset = strlen (name) - 2;
9330 for (regno = 0; regno <= 14; regno++)
9331 if (strcmp (&name[offset], table[regno]) == 0)
9332 return get_frame_register_unsigned (frame, regno);
9335 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9336 non-interworking calls to foo. We could decode the stubs
9337 to find the target but it's easier to use the symbol table. */
9338 namelen = strlen (name);
9339 if (name[0] == '_' && name[1] == '_'
9340 && ((namelen > 2 + strlen ("_from_thumb")
9341 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9342 strlen ("_from_thumb")) == 0)
9343 || (namelen > 2 + strlen ("_from_arm")
9344 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9345 strlen ("_from_arm")) == 0)))
9348 int target_len = namelen - 2;
9349 struct bound_minimal_symbol minsym;
9350 struct objfile *objfile;
9351 struct obj_section *sec;
9353 if (name[namelen - 1] == 'b')
9354 target_len -= strlen ("_from_thumb");
9356 target_len -= strlen ("_from_arm");
9358 target_name = alloca (target_len + 1);
9359 memcpy (target_name, name + 2, target_len);
9360 target_name[target_len] = '\0';
9362 sec = find_pc_section (pc);
9363 objfile = (sec == NULL) ? NULL : sec->objfile;
9364 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9365 if (minsym.minsym != NULL)
9366 return BMSYMBOL_VALUE_ADDRESS (minsym);
9371 return 0; /* not a stub */
9375 set_arm_command (char *args, int from_tty)
9377 printf_unfiltered (_("\
9378 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9379 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9383 show_arm_command (char *args, int from_tty)
9385 cmd_show_list (showarmcmdlist, from_tty, "");
9389 arm_update_current_architecture (void)
9391 struct gdbarch_info info;
9393 /* If the current architecture is not ARM, we have nothing to do. */
9394 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9397 /* Update the architecture. */
9398 gdbarch_info_init (&info);
9400 if (!gdbarch_update_p (info))
9401 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9405 set_fp_model_sfunc (char *args, int from_tty,
9406 struct cmd_list_element *c)
9410 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9411 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9413 arm_fp_model = fp_model;
9417 if (fp_model == ARM_FLOAT_LAST)
9418 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9421 arm_update_current_architecture ();
9425 show_fp_model (struct ui_file *file, int from_tty,
9426 struct cmd_list_element *c, const char *value)
9428 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9430 if (arm_fp_model == ARM_FLOAT_AUTO
9431 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9432 fprintf_filtered (file, _("\
9433 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9434 fp_model_strings[tdep->fp_model]);
9436 fprintf_filtered (file, _("\
9437 The current ARM floating point model is \"%s\".\n"),
9438 fp_model_strings[arm_fp_model]);
9442 arm_set_abi (char *args, int from_tty,
9443 struct cmd_list_element *c)
9447 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9448 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9450 arm_abi_global = arm_abi;
9454 if (arm_abi == ARM_ABI_LAST)
9455 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9458 arm_update_current_architecture ();
9462 arm_show_abi (struct ui_file *file, int from_tty,
9463 struct cmd_list_element *c, const char *value)
9465 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9467 if (arm_abi_global == ARM_ABI_AUTO
9468 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9469 fprintf_filtered (file, _("\
9470 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9471 arm_abi_strings[tdep->arm_abi]);
9473 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9478 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9479 struct cmd_list_element *c, const char *value)
9481 fprintf_filtered (file,
9482 _("The current execution mode assumed "
9483 "(when symbols are unavailable) is \"%s\".\n"),
9484 arm_fallback_mode_string);
9488 arm_show_force_mode (struct ui_file *file, int from_tty,
9489 struct cmd_list_element *c, const char *value)
9491 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9493 fprintf_filtered (file,
9494 _("The current execution mode assumed "
9495 "(even when symbols are available) is \"%s\".\n"),
9496 arm_force_mode_string);
9499 /* If the user changes the register disassembly style used for info
9500 register and other commands, we have to also switch the style used
9501 in opcodes for disassembly output. This function is run in the "set
9502 arm disassembly" command, and does that. */
9505 set_disassembly_style_sfunc (char *args, int from_tty,
9506 struct cmd_list_element *c)
9508 set_disassembly_style ();
9511 /* Return the ARM register name corresponding to register I. */
9513 arm_register_name (struct gdbarch *gdbarch, int i)
9515 const int num_regs = gdbarch_num_regs (gdbarch);
9517 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9518 && i >= num_regs && i < num_regs + 32)
9520 static const char *const vfp_pseudo_names[] = {
9521 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9522 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9523 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9524 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9527 return vfp_pseudo_names[i - num_regs];
9530 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9531 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9533 static const char *const neon_pseudo_names[] = {
9534 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9535 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9538 return neon_pseudo_names[i - num_regs - 32];
9541 if (i >= ARRAY_SIZE (arm_register_names))
9542 /* These registers are only supported on targets which supply
9543 an XML description. */
9546 return arm_register_names[i];
9550 set_disassembly_style (void)
9554 /* Find the style that the user wants. */
9555 for (current = 0; current < num_disassembly_options; current++)
9556 if (disassembly_style == valid_disassembly_styles[current])
9558 gdb_assert (current < num_disassembly_options);
9560 /* Synchronize the disassembler. */
9561 set_arm_regname_option (current);
9564 /* Test whether the coff symbol specific value corresponds to a Thumb
9568 coff_sym_is_thumb (int val)
9570 return (val == C_THUMBEXT
9571 || val == C_THUMBSTAT
9572 || val == C_THUMBEXTFUNC
9573 || val == C_THUMBSTATFUNC
9574 || val == C_THUMBLABEL);
9577 /* arm_coff_make_msymbol_special()
9578 arm_elf_make_msymbol_special()
9580 These functions test whether the COFF or ELF symbol corresponds to
9581 an address in thumb code, and set a "special" bit in a minimal
9582 symbol to indicate that it does. */
9585 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9587 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9588 == ST_BRANCH_TO_THUMB)
9589 MSYMBOL_SET_SPECIAL (msym);
9593 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9595 if (coff_sym_is_thumb (val))
9596 MSYMBOL_SET_SPECIAL (msym);
9600 arm_objfile_data_free (struct objfile *objfile, void *arg)
9602 struct arm_per_objfile *data = arg;
9605 for (i = 0; i < objfile->obfd->section_count; i++)
9606 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9610 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9613 const char *name = bfd_asymbol_name (sym);
9614 struct arm_per_objfile *data;
9615 VEC(arm_mapping_symbol_s) **map_p;
9616 struct arm_mapping_symbol new_map_sym;
9618 gdb_assert (name[0] == '$');
9619 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9622 data = objfile_data (objfile, arm_objfile_data_key);
9625 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9626 struct arm_per_objfile);
9627 set_objfile_data (objfile, arm_objfile_data_key, data);
9628 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9629 objfile->obfd->section_count,
9630 VEC(arm_mapping_symbol_s) *);
9632 map_p = &data->section_maps[bfd_get_section (sym)->index];
9634 new_map_sym.value = sym->value;
9635 new_map_sym.type = name[1];
9637 /* Assume that most mapping symbols appear in order of increasing
9638 value. If they were randomly distributed, it would be faster to
9639 always push here and then sort at first use. */
9640 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9642 struct arm_mapping_symbol *prev_map_sym;
9644 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9645 if (prev_map_sym->value >= sym->value)
9648 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9649 arm_compare_mapping_symbols);
9650 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9655 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9659 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9661 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9662 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9664 /* If necessary, set the T bit. */
9667 ULONGEST val, t_bit;
9668 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9669 t_bit = arm_psr_thumb_bit (gdbarch);
9670 if (arm_pc_is_thumb (gdbarch, pc))
9671 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9674 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9679 /* Read the contents of a NEON quad register, by reading from two
9680 double registers. This is used to implement the quad pseudo
9681 registers, and for argument passing in case the quad registers are
9682 missing; vectors are passed in quad registers when using the VFP
9683 ABI, even if a NEON unit is not present. REGNUM is the index of
9684 the quad register, in [0, 15]. */
9686 static enum register_status
9687 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9688 int regnum, gdb_byte *buf)
9691 gdb_byte reg_buf[8];
9692 int offset, double_regnum;
9693 enum register_status status;
9695 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9696 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9699 /* d0 is always the least significant half of q0. */
9700 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9705 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9706 if (status != REG_VALID)
9708 memcpy (buf + offset, reg_buf, 8);
9710 offset = 8 - offset;
9711 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9712 if (status != REG_VALID)
9714 memcpy (buf + offset, reg_buf, 8);
9719 static enum register_status
9720 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9721 int regnum, gdb_byte *buf)
9723 const int num_regs = gdbarch_num_regs (gdbarch);
9725 gdb_byte reg_buf[8];
9726 int offset, double_regnum;
9728 gdb_assert (regnum >= num_regs);
9731 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9732 /* Quad-precision register. */
9733 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9736 enum register_status status;
9738 /* Single-precision register. */
9739 gdb_assert (regnum < 32);
9741 /* s0 is always the least significant half of d0. */
9742 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9743 offset = (regnum & 1) ? 0 : 4;
9745 offset = (regnum & 1) ? 4 : 0;
9747 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9748 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9751 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9752 if (status == REG_VALID)
9753 memcpy (buf, reg_buf + offset, 4);
9758 /* Store the contents of BUF to a NEON quad register, by writing to
9759 two double registers. This is used to implement the quad pseudo
9760 registers, and for argument passing in case the quad registers are
9761 missing; vectors are passed in quad registers when using the VFP
9762 ABI, even if a NEON unit is not present. REGNUM is the index
9763 of the quad register, in [0, 15]. */
9766 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9767 int regnum, const gdb_byte *buf)
9770 int offset, double_regnum;
9772 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9773 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9776 /* d0 is always the least significant half of q0. */
9777 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9782 regcache_raw_write (regcache, double_regnum, buf + offset);
9783 offset = 8 - offset;
9784 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9788 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9789 int regnum, const gdb_byte *buf)
9791 const int num_regs = gdbarch_num_regs (gdbarch);
9793 gdb_byte reg_buf[8];
9794 int offset, double_regnum;
9796 gdb_assert (regnum >= num_regs);
9799 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9800 /* Quad-precision register. */
9801 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9804 /* Single-precision register. */
9805 gdb_assert (regnum < 32);
9807 /* s0 is always the least significant half of d0. */
9808 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9809 offset = (regnum & 1) ? 0 : 4;
9811 offset = (regnum & 1) ? 4 : 0;
9813 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9814 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9817 regcache_raw_read (regcache, double_regnum, reg_buf);
9818 memcpy (reg_buf + offset, buf, 4);
9819 regcache_raw_write (regcache, double_regnum, reg_buf);
9823 static struct value *
9824 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9826 const int *reg_p = baton;
9827 return value_of_register (*reg_p, frame);
9830 static enum gdb_osabi
9831 arm_elf_osabi_sniffer (bfd *abfd)
9833 unsigned int elfosabi;
9834 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9836 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9838 if (elfosabi == ELFOSABI_ARM)
9839 /* GNU tools use this value. Check note sections in this case,
9841 bfd_map_over_sections (abfd,
9842 generic_elf_osabi_sniff_abi_tag_sections,
9845 /* Anything else will be handled by the generic ELF sniffer. */
9850 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9851 struct reggroup *group)
9853 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9854 this, FPS register belongs to save_regroup, restore_reggroup, and
9855 all_reggroup, of course. */
9856 if (regnum == ARM_FPS_REGNUM)
9857 return (group == float_reggroup
9858 || group == save_reggroup
9859 || group == restore_reggroup
9860 || group == all_reggroup);
9862 return default_register_reggroup_p (gdbarch, regnum, group);
9866 /* For backward-compatibility we allow two 'g' packet lengths with
9867 the remote protocol depending on whether FPA registers are
9868 supplied. M-profile targets do not have FPA registers, but some
9869 stubs already exist in the wild which use a 'g' packet which
9870 supplies them albeit with dummy values. The packet format which
9871 includes FPA registers should be considered deprecated for
9872 M-profile targets. */
9875 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9877 if (gdbarch_tdep (gdbarch)->is_m)
9879 /* If we know from the executable this is an M-profile target,
9880 cater for remote targets whose register set layout is the
9881 same as the FPA layout. */
9882 register_remote_g_packet_guess (gdbarch,
9883 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9884 (16 * INT_REGISTER_SIZE)
9885 + (8 * FP_REGISTER_SIZE)
9886 + (2 * INT_REGISTER_SIZE),
9887 tdesc_arm_with_m_fpa_layout);
9889 /* The regular M-profile layout. */
9890 register_remote_g_packet_guess (gdbarch,
9891 /* r0-r12,sp,lr,pc; xpsr */
9892 (16 * INT_REGISTER_SIZE)
9893 + INT_REGISTER_SIZE,
9896 /* M-profile plus M4F VFP. */
9897 register_remote_g_packet_guess (gdbarch,
9898 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9899 (16 * INT_REGISTER_SIZE)
9900 + (16 * VFP_REGISTER_SIZE)
9901 + (2 * INT_REGISTER_SIZE),
9902 tdesc_arm_with_m_vfp_d16);
9905 /* Otherwise we don't have a useful guess. */
9909 /* Initialize the current architecture based on INFO. If possible,
9910 re-use an architecture from ARCHES, which is a list of
9911 architectures already created during this debugging session.
9913 Called e.g. at program startup, when reading a core file, and when
9914 reading a binary file. */
9916 static struct gdbarch *
9917 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9919 struct gdbarch_tdep *tdep;
9920 struct gdbarch *gdbarch;
9921 struct gdbarch_list *best_arch;
9922 enum arm_abi_kind arm_abi = arm_abi_global;
9923 enum arm_float_model fp_model = arm_fp_model;
9924 struct tdesc_arch_data *tdesc_data = NULL;
9926 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9928 int have_fpa_registers = 1;
9929 const struct target_desc *tdesc = info.target_desc;
9931 /* If we have an object to base this architecture on, try to determine
9934 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9936 int ei_osabi, e_flags;
9938 switch (bfd_get_flavour (info.abfd))
9940 case bfd_target_aout_flavour:
9941 /* Assume it's an old APCS-style ABI. */
9942 arm_abi = ARM_ABI_APCS;
9945 case bfd_target_coff_flavour:
9946 /* Assume it's an old APCS-style ABI. */
9948 arm_abi = ARM_ABI_APCS;
9951 case bfd_target_elf_flavour:
9952 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9953 e_flags = elf_elfheader (info.abfd)->e_flags;
9955 if (ei_osabi == ELFOSABI_ARM)
9957 /* GNU tools used to use this value, but do not for EABI
9958 objects. There's nowhere to tag an EABI version
9959 anyway, so assume APCS. */
9960 arm_abi = ARM_ABI_APCS;
9962 else if (ei_osabi == ELFOSABI_NONE)
9964 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9965 int attr_arch, attr_profile;
9969 case EF_ARM_EABI_UNKNOWN:
9970 /* Assume GNU tools. */
9971 arm_abi = ARM_ABI_APCS;
9974 case EF_ARM_EABI_VER4:
9975 case EF_ARM_EABI_VER5:
9976 arm_abi = ARM_ABI_AAPCS;
9977 /* EABI binaries default to VFP float ordering.
9978 They may also contain build attributes that can
9979 be used to identify if the VFP argument-passing
9981 if (fp_model == ARM_FLOAT_AUTO)
9984 switch (bfd_elf_get_obj_attr_int (info.abfd,
9988 case AEABI_VFP_args_base:
9989 /* "The user intended FP parameter/result
9990 passing to conform to AAPCS, base
9992 fp_model = ARM_FLOAT_SOFT_VFP;
9994 case AEABI_VFP_args_vfp:
9995 /* "The user intended FP parameter/result
9996 passing to conform to AAPCS, VFP
9998 fp_model = ARM_FLOAT_VFP;
10000 case AEABI_VFP_args_toolchain:
10001 /* "The user intended FP parameter/result
10002 passing to conform to tool chain-specific
10003 conventions" - we don't know any such
10004 conventions, so leave it as "auto". */
10006 case AEABI_VFP_args_compatible:
10007 /* "Code is compatible with both the base
10008 and VFP variants; the user did not permit
10009 non-variadic functions to pass FP
10010 parameters/results" - leave it as
10014 /* Attribute value not mentioned in the
10015 November 2012 ABI, so leave it as
10020 fp_model = ARM_FLOAT_SOFT_VFP;
10026 /* Leave it as "auto". */
10027 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10032 /* Detect M-profile programs. This only works if the
10033 executable file includes build attributes; GCC does
10034 copy them to the executable, but e.g. RealView does
10036 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10038 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10040 Tag_CPU_arch_profile);
10041 /* GCC specifies the profile for v6-M; RealView only
10042 specifies the profile for architectures starting with
10043 V7 (as opposed to architectures with a tag
10044 numerically greater than TAG_CPU_ARCH_V7). */
10045 if (!tdesc_has_registers (tdesc)
10046 && (attr_arch == TAG_CPU_ARCH_V6_M
10047 || attr_arch == TAG_CPU_ARCH_V6S_M
10048 || attr_profile == 'M'))
10053 if (fp_model == ARM_FLOAT_AUTO)
10055 int e_flags = elf_elfheader (info.abfd)->e_flags;
10057 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10060 /* Leave it as "auto". Strictly speaking this case
10061 means FPA, but almost nobody uses that now, and
10062 many toolchains fail to set the appropriate bits
10063 for the floating-point model they use. */
10065 case EF_ARM_SOFT_FLOAT:
10066 fp_model = ARM_FLOAT_SOFT_FPA;
10068 case EF_ARM_VFP_FLOAT:
10069 fp_model = ARM_FLOAT_VFP;
10071 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10072 fp_model = ARM_FLOAT_SOFT_VFP;
10077 if (e_flags & EF_ARM_BE8)
10078 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10083 /* Leave it as "auto". */
10088 /* Check any target description for validity. */
10089 if (tdesc_has_registers (tdesc))
10091 /* For most registers we require GDB's default names; but also allow
10092 the numeric names for sp / lr / pc, as a convenience. */
10093 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10094 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10095 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10097 const struct tdesc_feature *feature;
10100 feature = tdesc_find_feature (tdesc,
10101 "org.gnu.gdb.arm.core");
10102 if (feature == NULL)
10104 feature = tdesc_find_feature (tdesc,
10105 "org.gnu.gdb.arm.m-profile");
10106 if (feature == NULL)
10112 tdesc_data = tdesc_data_alloc ();
10115 for (i = 0; i < ARM_SP_REGNUM; i++)
10116 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10117 arm_register_names[i]);
10118 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10121 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10124 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10128 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10129 ARM_PS_REGNUM, "xpsr");
10131 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10132 ARM_PS_REGNUM, "cpsr");
10136 tdesc_data_cleanup (tdesc_data);
10140 feature = tdesc_find_feature (tdesc,
10141 "org.gnu.gdb.arm.fpa");
10142 if (feature != NULL)
10145 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10146 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10147 arm_register_names[i]);
10150 tdesc_data_cleanup (tdesc_data);
10155 have_fpa_registers = 0;
10157 feature = tdesc_find_feature (tdesc,
10158 "org.gnu.gdb.xscale.iwmmxt");
10159 if (feature != NULL)
10161 static const char *const iwmmxt_names[] = {
10162 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10163 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10164 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10165 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10169 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10171 &= tdesc_numbered_register (feature, tdesc_data, i,
10172 iwmmxt_names[i - ARM_WR0_REGNUM]);
10174 /* Check for the control registers, but do not fail if they
10176 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10177 tdesc_numbered_register (feature, tdesc_data, i,
10178 iwmmxt_names[i - ARM_WR0_REGNUM]);
10180 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10182 &= tdesc_numbered_register (feature, tdesc_data, i,
10183 iwmmxt_names[i - ARM_WR0_REGNUM]);
10187 tdesc_data_cleanup (tdesc_data);
10192 /* If we have a VFP unit, check whether the single precision registers
10193 are present. If not, then we will synthesize them as pseudo
10195 feature = tdesc_find_feature (tdesc,
10196 "org.gnu.gdb.arm.vfp");
10197 if (feature != NULL)
10199 static const char *const vfp_double_names[] = {
10200 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10201 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10202 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10203 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10206 /* Require the double precision registers. There must be either
10209 for (i = 0; i < 32; i++)
10211 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10213 vfp_double_names[i]);
10217 if (!valid_p && i == 16)
10220 /* Also require FPSCR. */
10221 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10222 ARM_FPSCR_REGNUM, "fpscr");
10225 tdesc_data_cleanup (tdesc_data);
10229 if (tdesc_unnumbered_register (feature, "s0") == 0)
10230 have_vfp_pseudos = 1;
10232 have_vfp_registers = 1;
10234 /* If we have VFP, also check for NEON. The architecture allows
10235 NEON without VFP (integer vector operations only), but GDB
10236 does not support that. */
10237 feature = tdesc_find_feature (tdesc,
10238 "org.gnu.gdb.arm.neon");
10239 if (feature != NULL)
10241 /* NEON requires 32 double-precision registers. */
10244 tdesc_data_cleanup (tdesc_data);
10248 /* If there are quad registers defined by the stub, use
10249 their type; otherwise (normally) provide them with
10250 the default type. */
10251 if (tdesc_unnumbered_register (feature, "q0") == 0)
10252 have_neon_pseudos = 1;
10259 /* If there is already a candidate, use it. */
10260 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10262 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10264 if (arm_abi != ARM_ABI_AUTO
10265 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10268 if (fp_model != ARM_FLOAT_AUTO
10269 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10272 /* There are various other properties in tdep that we do not
10273 need to check here: those derived from a target description,
10274 since gdbarches with a different target description are
10275 automatically disqualified. */
10277 /* Do check is_m, though, since it might come from the binary. */
10278 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10281 /* Found a match. */
10285 if (best_arch != NULL)
10287 if (tdesc_data != NULL)
10288 tdesc_data_cleanup (tdesc_data);
10289 return best_arch->gdbarch;
10292 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10293 gdbarch = gdbarch_alloc (&info, tdep);
10295 /* Record additional information about the architecture we are defining.
10296 These are gdbarch discriminators, like the OSABI. */
10297 tdep->arm_abi = arm_abi;
10298 tdep->fp_model = fp_model;
10300 tdep->have_fpa_registers = have_fpa_registers;
10301 tdep->have_vfp_registers = have_vfp_registers;
10302 tdep->have_vfp_pseudos = have_vfp_pseudos;
10303 tdep->have_neon_pseudos = have_neon_pseudos;
10304 tdep->have_neon = have_neon;
10306 arm_register_g_packet_guesses (gdbarch);
10309 switch (info.byte_order_for_code)
10311 case BFD_ENDIAN_BIG:
10312 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10313 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10314 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10315 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10319 case BFD_ENDIAN_LITTLE:
10320 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10321 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10322 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10323 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10328 internal_error (__FILE__, __LINE__,
10329 _("arm_gdbarch_init: bad byte order for float format"));
10332 /* On ARM targets char defaults to unsigned. */
10333 set_gdbarch_char_signed (gdbarch, 0);
10335 /* Note: for displaced stepping, this includes the breakpoint, and one word
10336 of additional scratch space. This setting isn't used for anything beside
10337 displaced stepping at present. */
10338 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10340 /* This should be low enough for everything. */
10341 tdep->lowest_pc = 0x20;
10342 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10344 /* The default, for both APCS and AAPCS, is to return small
10345 structures in registers. */
10346 tdep->struct_return = reg_struct_return;
10348 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10349 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10351 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10353 /* Frame handling. */
10354 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10355 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10356 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10358 frame_base_set_default (gdbarch, &arm_normal_base);
10360 /* Address manipulation. */
10361 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10363 /* Advance PC across function entry code. */
10364 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10366 /* Detect whether PC is in function epilogue. */
10367 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10369 /* Skip trampolines. */
10370 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10372 /* The stack grows downward. */
10373 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10375 /* Breakpoint manipulation. */
10376 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10377 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10378 arm_remote_breakpoint_from_pc);
10380 /* Information about registers, etc. */
10381 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10382 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10383 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10384 set_gdbarch_register_type (gdbarch, arm_register_type);
10385 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10387 /* This "info float" is FPA-specific. Use the generic version if we
10388 do not have FPA. */
10389 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10390 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10392 /* Internal <-> external register number maps. */
10393 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10394 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10396 set_gdbarch_register_name (gdbarch, arm_register_name);
10398 /* Returning results. */
10399 set_gdbarch_return_value (gdbarch, arm_return_value);
10402 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10404 /* Minsymbol frobbing. */
10405 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10406 set_gdbarch_coff_make_msymbol_special (gdbarch,
10407 arm_coff_make_msymbol_special);
10408 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10410 /* Thumb-2 IT block support. */
10411 set_gdbarch_adjust_breakpoint_address (gdbarch,
10412 arm_adjust_breakpoint_address);
10414 /* Virtual tables. */
10415 set_gdbarch_vbit_in_delta (gdbarch, 1);
10417 /* Hook in the ABI-specific overrides, if they have been registered. */
10418 gdbarch_init_osabi (info, gdbarch);
10420 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10422 /* Add some default predicates. */
10424 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10425 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10426 dwarf2_append_unwinders (gdbarch);
10427 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10428 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10430 /* Now we have tuned the configuration, set a few final things,
10431 based on what the OS ABI has told us. */
10433 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10434 binaries are always marked. */
10435 if (tdep->arm_abi == ARM_ABI_AUTO)
10436 tdep->arm_abi = ARM_ABI_APCS;
10438 /* Watchpoints are not steppable. */
10439 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10441 /* We used to default to FPA for generic ARM, but almost nobody
10442 uses that now, and we now provide a way for the user to force
10443 the model. So default to the most useful variant. */
10444 if (tdep->fp_model == ARM_FLOAT_AUTO)
10445 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10447 if (tdep->jb_pc >= 0)
10448 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10450 /* Floating point sizes and format. */
10451 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10452 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10454 set_gdbarch_double_format
10455 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10456 set_gdbarch_long_double_format
10457 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10461 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10462 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10465 if (have_vfp_pseudos)
10467 /* NOTE: These are the only pseudo registers used by
10468 the ARM target at the moment. If more are added, a
10469 little more care in numbering will be needed. */
10471 int num_pseudos = 32;
10472 if (have_neon_pseudos)
10474 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10475 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10476 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10481 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10483 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10485 /* Override tdesc_register_type to adjust the types of VFP
10486 registers for NEON. */
10487 set_gdbarch_register_type (gdbarch, arm_register_type);
10490 /* Add standard register aliases. We add aliases even for those
10491 nanes which are used by the current architecture - it's simpler,
10492 and does no harm, since nothing ever lists user registers. */
10493 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10494 user_reg_add (gdbarch, arm_register_aliases[i].name,
10495 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10501 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10503 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10508 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10509 (unsigned long) tdep->lowest_pc);
10512 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10515 _initialize_arm_tdep (void)
10517 struct ui_file *stb;
10519 struct cmd_list_element *new_set, *new_show;
10520 const char *setname;
10521 const char *setdesc;
10522 const char *const *regnames;
10524 static char *helptext;
10525 char regdesc[1024], *rdptr = regdesc;
10526 size_t rest = sizeof (regdesc);
10528 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10530 arm_objfile_data_key
10531 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10533 /* Add ourselves to objfile event chain. */
10534 observer_attach_new_objfile (arm_exidx_new_objfile);
10536 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10538 /* Register an ELF OS ABI sniffer for ARM binaries. */
10539 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10540 bfd_target_elf_flavour,
10541 arm_elf_osabi_sniffer);
10543 /* Initialize the standard target descriptions. */
10544 initialize_tdesc_arm_with_m ();
10545 initialize_tdesc_arm_with_m_fpa_layout ();
10546 initialize_tdesc_arm_with_m_vfp_d16 ();
10547 initialize_tdesc_arm_with_iwmmxt ();
10548 initialize_tdesc_arm_with_vfpv2 ();
10549 initialize_tdesc_arm_with_vfpv3 ();
10550 initialize_tdesc_arm_with_neon ();
10552 /* Get the number of possible sets of register names defined in opcodes. */
10553 num_disassembly_options = get_arm_regname_num_options ();
10555 /* Add root prefix command for all "set arm"/"show arm" commands. */
10556 add_prefix_cmd ("arm", no_class, set_arm_command,
10557 _("Various ARM-specific commands."),
10558 &setarmcmdlist, "set arm ", 0, &setlist);
10560 add_prefix_cmd ("arm", no_class, show_arm_command,
10561 _("Various ARM-specific commands."),
10562 &showarmcmdlist, "show arm ", 0, &showlist);
10564 /* Sync the opcode insn printer with our register viewer. */
10565 parse_arm_disassembler_option ("reg-names-std");
10567 /* Initialize the array that will be passed to
10568 add_setshow_enum_cmd(). */
10569 valid_disassembly_styles
10570 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10571 for (i = 0; i < num_disassembly_options; i++)
10573 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10574 valid_disassembly_styles[i] = setname;
10575 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10578 /* When we find the default names, tell the disassembler to use
10580 if (!strcmp (setname, "std"))
10582 disassembly_style = setname;
10583 set_arm_regname_option (i);
10586 /* Mark the end of valid options. */
10587 valid_disassembly_styles[num_disassembly_options] = NULL;
10589 /* Create the help text. */
10590 stb = mem_fileopen ();
10591 fprintf_unfiltered (stb, "%s%s%s",
10592 _("The valid values are:\n"),
10594 _("The default is \"std\"."));
10595 helptext = ui_file_xstrdup (stb, NULL);
10596 ui_file_delete (stb);
10598 add_setshow_enum_cmd("disassembler", no_class,
10599 valid_disassembly_styles, &disassembly_style,
10600 _("Set the disassembly style."),
10601 _("Show the disassembly style."),
10603 set_disassembly_style_sfunc,
10604 NULL, /* FIXME: i18n: The disassembly style is
10606 &setarmcmdlist, &showarmcmdlist);
10608 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10609 _("Set usage of ARM 32-bit mode."),
10610 _("Show usage of ARM 32-bit mode."),
10611 _("When off, a 26-bit PC will be used."),
10613 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10615 &setarmcmdlist, &showarmcmdlist);
10617 /* Add a command to allow the user to force the FPU model. */
10618 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10619 _("Set the floating point type."),
10620 _("Show the floating point type."),
10621 _("auto - Determine the FP typefrom the OS-ABI.\n\
10622 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10623 fpa - FPA co-processor (GCC compiled).\n\
10624 softvfp - Software FP with pure-endian doubles.\n\
10625 vfp - VFP co-processor."),
10626 set_fp_model_sfunc, show_fp_model,
10627 &setarmcmdlist, &showarmcmdlist);
10629 /* Add a command to allow the user to force the ABI. */
10630 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10632 _("Show the ABI."),
10633 NULL, arm_set_abi, arm_show_abi,
10634 &setarmcmdlist, &showarmcmdlist);
10636 /* Add two commands to allow the user to force the assumed
10638 add_setshow_enum_cmd ("fallback-mode", class_support,
10639 arm_mode_strings, &arm_fallback_mode_string,
10640 _("Set the mode assumed when symbols are unavailable."),
10641 _("Show the mode assumed when symbols are unavailable."),
10642 NULL, NULL, arm_show_fallback_mode,
10643 &setarmcmdlist, &showarmcmdlist);
10644 add_setshow_enum_cmd ("force-mode", class_support,
10645 arm_mode_strings, &arm_force_mode_string,
10646 _("Set the mode assumed even when symbols are available."),
10647 _("Show the mode assumed even when symbols are available."),
10648 NULL, NULL, arm_show_force_mode,
10649 &setarmcmdlist, &showarmcmdlist);
10651 /* Debugging flag. */
10652 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10653 _("Set ARM debugging."),
10654 _("Show ARM debugging."),
10655 _("When on, arm-specific debugging is enabled."),
10657 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10658 &setdebuglist, &showdebuglist);
10661 /* ARM-reversible process record data structures. */
10663 #define ARM_INSN_SIZE_BYTES 4
10664 #define THUMB_INSN_SIZE_BYTES 2
10665 #define THUMB2_INSN_SIZE_BYTES 4
10668 /* Position of the bit within a 32-bit ARM instruction
10669 that defines whether the instruction is a load or store. */
10670 #define INSN_S_L_BIT_NUM 20
10672 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10675 unsigned int reg_len = LENGTH; \
10678 REGS = XNEWVEC (uint32_t, reg_len); \
10679 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10684 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10687 unsigned int mem_len = LENGTH; \
10690 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10691 memcpy(&MEMS->len, &RECORD_BUF[0], \
10692 sizeof(struct arm_mem_r) * LENGTH); \
10697 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10698 #define INSN_RECORDED(ARM_RECORD) \
10699 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10701 /* ARM memory record structure. */
10704 uint32_t len; /* Record length. */
10705 uint32_t addr; /* Memory address. */
10708 /* ARM instruction record contains opcode of current insn
10709 and execution state (before entry to decode_insn()),
10710 contains list of to-be-modified registers and
10711 memory blocks (on return from decode_insn()). */
10713 typedef struct insn_decode_record_t
10715 struct gdbarch *gdbarch;
10716 struct regcache *regcache;
10717 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10718 uint32_t arm_insn; /* Should accommodate thumb. */
10719 uint32_t cond; /* Condition code. */
10720 uint32_t opcode; /* Insn opcode. */
10721 uint32_t decode; /* Insn decode bits. */
10722 uint32_t mem_rec_count; /* No of mem records. */
10723 uint32_t reg_rec_count; /* No of reg records. */
10724 uint32_t *arm_regs; /* Registers to be saved for this record. */
10725 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10726 } insn_decode_record;
10729 /* Checks ARM SBZ and SBO mandatory fields. */
10732 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10734 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10753 enum arm_record_result
10755 ARM_RECORD_SUCCESS = 0,
10756 ARM_RECORD_FAILURE = 1
10763 } arm_record_strx_t;
10774 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10775 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10778 struct regcache *reg_cache = arm_insn_r->regcache;
10779 ULONGEST u_regval[2]= {0};
10781 uint32_t reg_src1 = 0, reg_src2 = 0;
10782 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10783 uint32_t opcode1 = 0;
10785 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10786 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10787 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10790 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10792 /* 1) Handle misc store, immediate offset. */
10793 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10794 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10795 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10796 regcache_raw_read_unsigned (reg_cache, reg_src1,
10798 if (ARM_PC_REGNUM == reg_src1)
10800 /* If R15 was used as Rn, hence current PC+8. */
10801 u_regval[0] = u_regval[0] + 8;
10803 offset_8 = (immed_high << 4) | immed_low;
10804 /* Calculate target store address. */
10805 if (14 == arm_insn_r->opcode)
10807 tgt_mem_addr = u_regval[0] + offset_8;
10811 tgt_mem_addr = u_regval[0] - offset_8;
10813 if (ARM_RECORD_STRH == str_type)
10815 record_buf_mem[0] = 2;
10816 record_buf_mem[1] = tgt_mem_addr;
10817 arm_insn_r->mem_rec_count = 1;
10819 else if (ARM_RECORD_STRD == str_type)
10821 record_buf_mem[0] = 4;
10822 record_buf_mem[1] = tgt_mem_addr;
10823 record_buf_mem[2] = 4;
10824 record_buf_mem[3] = tgt_mem_addr + 4;
10825 arm_insn_r->mem_rec_count = 2;
10828 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10830 /* 2) Store, register offset. */
10832 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10834 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10835 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10836 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10837 if (15 == reg_src2)
10839 /* If R15 was used as Rn, hence current PC+8. */
10840 u_regval[0] = u_regval[0] + 8;
10842 /* Calculate target store address, Rn +/- Rm, register offset. */
10843 if (12 == arm_insn_r->opcode)
10845 tgt_mem_addr = u_regval[0] + u_regval[1];
10849 tgt_mem_addr = u_regval[1] - u_regval[0];
10851 if (ARM_RECORD_STRH == str_type)
10853 record_buf_mem[0] = 2;
10854 record_buf_mem[1] = tgt_mem_addr;
10855 arm_insn_r->mem_rec_count = 1;
10857 else if (ARM_RECORD_STRD == str_type)
10859 record_buf_mem[0] = 4;
10860 record_buf_mem[1] = tgt_mem_addr;
10861 record_buf_mem[2] = 4;
10862 record_buf_mem[3] = tgt_mem_addr + 4;
10863 arm_insn_r->mem_rec_count = 2;
10866 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10867 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10869 /* 3) Store, immediate pre-indexed. */
10870 /* 5) Store, immediate post-indexed. */
10871 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10872 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10873 offset_8 = (immed_high << 4) | immed_low;
10874 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10875 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10876 /* Calculate target store address, Rn +/- Rm, register offset. */
10877 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10879 tgt_mem_addr = u_regval[0] + offset_8;
10883 tgt_mem_addr = u_regval[0] - offset_8;
10885 if (ARM_RECORD_STRH == str_type)
10887 record_buf_mem[0] = 2;
10888 record_buf_mem[1] = tgt_mem_addr;
10889 arm_insn_r->mem_rec_count = 1;
10891 else if (ARM_RECORD_STRD == str_type)
10893 record_buf_mem[0] = 4;
10894 record_buf_mem[1] = tgt_mem_addr;
10895 record_buf_mem[2] = 4;
10896 record_buf_mem[3] = tgt_mem_addr + 4;
10897 arm_insn_r->mem_rec_count = 2;
10899 /* Record Rn also as it changes. */
10900 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10901 arm_insn_r->reg_rec_count = 1;
10903 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10904 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10906 /* 4) Store, register pre-indexed. */
10907 /* 6) Store, register post -indexed. */
10908 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10909 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10910 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10911 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10912 /* Calculate target store address, Rn +/- Rm, register offset. */
10913 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10915 tgt_mem_addr = u_regval[0] + u_regval[1];
10919 tgt_mem_addr = u_regval[1] - u_regval[0];
10921 if (ARM_RECORD_STRH == str_type)
10923 record_buf_mem[0] = 2;
10924 record_buf_mem[1] = tgt_mem_addr;
10925 arm_insn_r->mem_rec_count = 1;
10927 else if (ARM_RECORD_STRD == str_type)
10929 record_buf_mem[0] = 4;
10930 record_buf_mem[1] = tgt_mem_addr;
10931 record_buf_mem[2] = 4;
10932 record_buf_mem[3] = tgt_mem_addr + 4;
10933 arm_insn_r->mem_rec_count = 2;
10935 /* Record Rn also as it changes. */
10936 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10937 arm_insn_r->reg_rec_count = 1;
10942 /* Handling ARM extension space insns. */
10945 arm_record_extension_space (insn_decode_record *arm_insn_r)
10947 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10948 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10949 uint32_t record_buf[8], record_buf_mem[8];
10950 uint32_t reg_src1 = 0;
10951 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10952 struct regcache *reg_cache = arm_insn_r->regcache;
10953 ULONGEST u_regval = 0;
10955 gdb_assert (!INSN_RECORDED(arm_insn_r));
10956 /* Handle unconditional insn extension space. */
10958 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10959 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10960 if (arm_insn_r->cond)
10962 /* PLD has no affect on architectural state, it just affects
10964 if (5 == ((opcode1 & 0xE0) >> 5))
10967 record_buf[0] = ARM_PS_REGNUM;
10968 record_buf[1] = ARM_LR_REGNUM;
10969 arm_insn_r->reg_rec_count = 2;
10971 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10975 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10976 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10979 /* Undefined instruction on ARM V5; need to handle if later
10980 versions define it. */
10983 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10984 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10985 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10987 /* Handle arithmetic insn extension space. */
10988 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10989 && !INSN_RECORDED(arm_insn_r))
10991 /* Handle MLA(S) and MUL(S). */
10992 if (0 <= insn_op1 && 3 >= insn_op1)
10994 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10995 record_buf[1] = ARM_PS_REGNUM;
10996 arm_insn_r->reg_rec_count = 2;
10998 else if (4 <= insn_op1 && 15 >= insn_op1)
11000 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11001 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11002 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11003 record_buf[2] = ARM_PS_REGNUM;
11004 arm_insn_r->reg_rec_count = 3;
11008 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11009 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11010 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11012 /* Handle control insn extension space. */
11014 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11015 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11017 if (!bit (arm_insn_r->arm_insn,25))
11019 if (!bits (arm_insn_r->arm_insn, 4, 7))
11021 if ((0 == insn_op1) || (2 == insn_op1))
11024 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11025 arm_insn_r->reg_rec_count = 1;
11027 else if (1 == insn_op1)
11029 /* CSPR is going to be changed. */
11030 record_buf[0] = ARM_PS_REGNUM;
11031 arm_insn_r->reg_rec_count = 1;
11033 else if (3 == insn_op1)
11035 /* SPSR is going to be changed. */
11036 /* We need to get SPSR value, which is yet to be done. */
11037 printf_unfiltered (_("Process record does not support "
11038 "instruction 0x%0x at address %s.\n"),
11039 arm_insn_r->arm_insn,
11040 paddress (arm_insn_r->gdbarch,
11041 arm_insn_r->this_addr));
11045 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11050 record_buf[0] = ARM_PS_REGNUM;
11051 arm_insn_r->reg_rec_count = 1;
11053 else if (3 == insn_op1)
11056 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11057 arm_insn_r->reg_rec_count = 1;
11060 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11063 record_buf[0] = ARM_PS_REGNUM;
11064 record_buf[1] = ARM_LR_REGNUM;
11065 arm_insn_r->reg_rec_count = 2;
11067 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11069 /* QADD, QSUB, QDADD, QDSUB */
11070 record_buf[0] = ARM_PS_REGNUM;
11071 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11072 arm_insn_r->reg_rec_count = 2;
11074 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11077 record_buf[0] = ARM_PS_REGNUM;
11078 record_buf[1] = ARM_LR_REGNUM;
11079 arm_insn_r->reg_rec_count = 2;
11081 /* Save SPSR also;how? */
11082 printf_unfiltered (_("Process record does not support "
11083 "instruction 0x%0x at address %s.\n"),
11084 arm_insn_r->arm_insn,
11085 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11088 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11089 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11090 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11091 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11094 if (0 == insn_op1 || 1 == insn_op1)
11096 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11097 /* We dont do optimization for SMULW<y> where we
11099 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11100 record_buf[1] = ARM_PS_REGNUM;
11101 arm_insn_r->reg_rec_count = 2;
11103 else if (2 == insn_op1)
11106 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11107 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11108 arm_insn_r->reg_rec_count = 2;
11110 else if (3 == insn_op1)
11113 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11114 arm_insn_r->reg_rec_count = 1;
11120 /* MSR : immediate form. */
11123 /* CSPR is going to be changed. */
11124 record_buf[0] = ARM_PS_REGNUM;
11125 arm_insn_r->reg_rec_count = 1;
11127 else if (3 == insn_op1)
11129 /* SPSR is going to be changed. */
11130 /* we need to get SPSR value, which is yet to be done */
11131 printf_unfiltered (_("Process record does not support "
11132 "instruction 0x%0x at address %s.\n"),
11133 arm_insn_r->arm_insn,
11134 paddress (arm_insn_r->gdbarch,
11135 arm_insn_r->this_addr));
11141 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11142 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11143 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11145 /* Handle load/store insn extension space. */
11147 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11148 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11149 && !INSN_RECORDED(arm_insn_r))
11154 /* These insn, changes register and memory as well. */
11155 /* SWP or SWPB insn. */
11156 /* Get memory address given by Rn. */
11157 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11158 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11159 /* SWP insn ?, swaps word. */
11160 if (8 == arm_insn_r->opcode)
11162 record_buf_mem[0] = 4;
11166 /* SWPB insn, swaps only byte. */
11167 record_buf_mem[0] = 1;
11169 record_buf_mem[1] = u_regval;
11170 arm_insn_r->mem_rec_count = 1;
11171 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11172 arm_insn_r->reg_rec_count = 1;
11174 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11177 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11180 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11183 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11184 record_buf[1] = record_buf[0] + 1;
11185 arm_insn_r->reg_rec_count = 2;
11187 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11190 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11193 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11195 /* LDRH, LDRSB, LDRSH. */
11196 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11197 arm_insn_r->reg_rec_count = 1;
11202 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11203 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11204 && !INSN_RECORDED(arm_insn_r))
11207 /* Handle coprocessor insn extension space. */
11210 /* To be done for ARMv5 and later; as of now we return -1. */
11212 printf_unfiltered (_("Process record does not support instruction x%0x "
11213 "at address %s.\n"),arm_insn_r->arm_insn,
11214 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11217 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11218 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11223 /* Handling opcode 000 insns. */
11226 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11228 struct regcache *reg_cache = arm_insn_r->regcache;
11229 uint32_t record_buf[8], record_buf_mem[8];
11230 ULONGEST u_regval[2] = {0};
11232 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11233 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11234 uint32_t opcode1 = 0;
11236 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11237 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11238 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11240 /* Data processing insn /multiply insn. */
11241 if (9 == arm_insn_r->decode
11242 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11243 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11245 /* Handle multiply instructions. */
11246 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11247 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11249 /* Handle MLA and MUL. */
11250 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11251 record_buf[1] = ARM_PS_REGNUM;
11252 arm_insn_r->reg_rec_count = 2;
11254 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11256 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11257 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11258 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11259 record_buf[2] = ARM_PS_REGNUM;
11260 arm_insn_r->reg_rec_count = 3;
11263 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11264 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11266 /* Handle misc load insns, as 20th bit (L = 1). */
11267 /* LDR insn has a capability to do branching, if
11268 MOV LR, PC is precceded by LDR insn having Rn as R15
11269 in that case, it emulates branch and link insn, and hence we
11270 need to save CSPR and PC as well. I am not sure this is right
11271 place; as opcode = 010 LDR insn make this happen, if R15 was
11273 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11274 if (15 != reg_dest)
11276 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11277 arm_insn_r->reg_rec_count = 1;
11281 record_buf[0] = reg_dest;
11282 record_buf[1] = ARM_PS_REGNUM;
11283 arm_insn_r->reg_rec_count = 2;
11286 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11287 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11288 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11289 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11291 /* Handle MSR insn. */
11292 if (9 == arm_insn_r->opcode)
11294 /* CSPR is going to be changed. */
11295 record_buf[0] = ARM_PS_REGNUM;
11296 arm_insn_r->reg_rec_count = 1;
11300 /* SPSR is going to be changed. */
11301 /* How to read SPSR value? */
11302 printf_unfiltered (_("Process record does not support instruction "
11303 "0x%0x at address %s.\n"),
11304 arm_insn_r->arm_insn,
11305 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11309 else if (9 == arm_insn_r->decode
11310 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11311 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11313 /* Handling SWP, SWPB. */
11314 /* These insn, changes register and memory as well. */
11315 /* SWP or SWPB insn. */
11317 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11318 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11319 /* SWP insn ?, swaps word. */
11320 if (8 == arm_insn_r->opcode)
11322 record_buf_mem[0] = 4;
11326 /* SWPB insn, swaps only byte. */
11327 record_buf_mem[0] = 1;
11329 record_buf_mem[1] = u_regval[0];
11330 arm_insn_r->mem_rec_count = 1;
11331 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11332 arm_insn_r->reg_rec_count = 1;
11334 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11335 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11337 /* Handle BLX, branch and link/exchange. */
11338 if (9 == arm_insn_r->opcode)
11340 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11341 and R14 stores the return address. */
11342 record_buf[0] = ARM_PS_REGNUM;
11343 record_buf[1] = ARM_LR_REGNUM;
11344 arm_insn_r->reg_rec_count = 2;
11347 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11349 /* Handle enhanced software breakpoint insn, BKPT. */
11350 /* CPSR is changed to be executed in ARM state, disabling normal
11351 interrupts, entering abort mode. */
11352 /* According to high vector configuration PC is set. */
11353 /* user hit breakpoint and type reverse, in
11354 that case, we need to go back with previous CPSR and
11355 Program Counter. */
11356 record_buf[0] = ARM_PS_REGNUM;
11357 record_buf[1] = ARM_LR_REGNUM;
11358 arm_insn_r->reg_rec_count = 2;
11360 /* Save SPSR also; how? */
11361 printf_unfiltered (_("Process record does not support instruction "
11362 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11363 paddress (arm_insn_r->gdbarch,
11364 arm_insn_r->this_addr));
11367 else if (11 == arm_insn_r->decode
11368 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11370 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11372 /* Handle str(x) insn */
11373 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11376 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11377 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11379 /* Handle BX, branch and link/exchange. */
11380 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11381 record_buf[0] = ARM_PS_REGNUM;
11382 arm_insn_r->reg_rec_count = 1;
11384 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11385 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11386 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11388 /* Count leading zeros: CLZ. */
11389 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11390 arm_insn_r->reg_rec_count = 1;
11392 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11393 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11394 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11395 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11398 /* Handle MRS insn. */
11399 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11400 arm_insn_r->reg_rec_count = 1;
11402 else if (arm_insn_r->opcode <= 15)
11404 /* Normal data processing insns. */
11405 /* Out of 11 shifter operands mode, all the insn modifies destination
11406 register, which is specified by 13-16 decode. */
11407 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11408 record_buf[1] = ARM_PS_REGNUM;
11409 arm_insn_r->reg_rec_count = 2;
11416 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11417 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11421 /* Handling opcode 001 insns. */
11424 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11426 uint32_t record_buf[8], record_buf_mem[8];
11428 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11429 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11431 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11432 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11433 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11436 /* Handle MSR insn. */
11437 if (9 == arm_insn_r->opcode)
11439 /* CSPR is going to be changed. */
11440 record_buf[0] = ARM_PS_REGNUM;
11441 arm_insn_r->reg_rec_count = 1;
11445 /* SPSR is going to be changed. */
11448 else if (arm_insn_r->opcode <= 15)
11450 /* Normal data processing insns. */
11451 /* Out of 11 shifter operands mode, all the insn modifies destination
11452 register, which is specified by 13-16 decode. */
11453 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11454 record_buf[1] = ARM_PS_REGNUM;
11455 arm_insn_r->reg_rec_count = 2;
11462 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11463 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11467 /* Handle ARM mode instructions with opcode 010. */
11470 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11472 struct regcache *reg_cache = arm_insn_r->regcache;
11474 uint32_t reg_base , reg_dest;
11475 uint32_t offset_12, tgt_mem_addr;
11476 uint32_t record_buf[8], record_buf_mem[8];
11477 unsigned char wback;
11480 /* Calculate wback. */
11481 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11482 || (bit (arm_insn_r->arm_insn, 21) == 1);
11484 arm_insn_r->reg_rec_count = 0;
11485 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11487 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11489 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11492 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11493 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11495 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11496 preceeds a LDR instruction having R15 as reg_base, it
11497 emulates a branch and link instruction, and hence we need to save
11498 CPSR and PC as well. */
11499 if (ARM_PC_REGNUM == reg_dest)
11500 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11502 /* If wback is true, also save the base register, which is going to be
11505 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11509 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11511 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11512 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11514 /* Handle bit U. */
11515 if (bit (arm_insn_r->arm_insn, 23))
11517 /* U == 1: Add the offset. */
11518 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11522 /* U == 0: subtract the offset. */
11523 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11526 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11528 if (bit (arm_insn_r->arm_insn, 22))
11530 /* STRB and STRBT: 1 byte. */
11531 record_buf_mem[0] = 1;
11535 /* STR and STRT: 4 bytes. */
11536 record_buf_mem[0] = 4;
11539 /* Handle bit P. */
11540 if (bit (arm_insn_r->arm_insn, 24))
11541 record_buf_mem[1] = tgt_mem_addr;
11543 record_buf_mem[1] = (uint32_t) u_regval;
11545 arm_insn_r->mem_rec_count = 1;
11547 /* If wback is true, also save the base register, which is going to be
11550 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11553 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11554 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11558 /* Handling opcode 011 insns. */
11561 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11563 struct regcache *reg_cache = arm_insn_r->regcache;
11565 uint32_t shift_imm = 0;
11566 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11567 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11568 uint32_t record_buf[8], record_buf_mem[8];
11571 ULONGEST u_regval[2];
11573 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11574 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11576 /* Handle enhanced store insns and LDRD DSP insn,
11577 order begins according to addressing modes for store insns
11581 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11583 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11584 /* LDR insn has a capability to do branching, if
11585 MOV LR, PC is precedded by LDR insn having Rn as R15
11586 in that case, it emulates branch and link insn, and hence we
11587 need to save CSPR and PC as well. */
11588 if (15 != reg_dest)
11590 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11591 arm_insn_r->reg_rec_count = 1;
11595 record_buf[0] = reg_dest;
11596 record_buf[1] = ARM_PS_REGNUM;
11597 arm_insn_r->reg_rec_count = 2;
11602 if (! bits (arm_insn_r->arm_insn, 4, 11))
11604 /* Store insn, register offset and register pre-indexed,
11605 register post-indexed. */
11607 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11609 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11610 regcache_raw_read_unsigned (reg_cache, reg_src1
11612 regcache_raw_read_unsigned (reg_cache, reg_src2
11614 if (15 == reg_src2)
11616 /* If R15 was used as Rn, hence current PC+8. */
11617 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11618 u_regval[0] = u_regval[0] + 8;
11620 /* Calculate target store address, Rn +/- Rm, register offset. */
11622 if (bit (arm_insn_r->arm_insn, 23))
11624 tgt_mem_addr = u_regval[0] + u_regval[1];
11628 tgt_mem_addr = u_regval[1] - u_regval[0];
11631 switch (arm_insn_r->opcode)
11645 record_buf_mem[0] = 4;
11660 record_buf_mem[0] = 1;
11664 gdb_assert_not_reached ("no decoding pattern found");
11667 record_buf_mem[1] = tgt_mem_addr;
11668 arm_insn_r->mem_rec_count = 1;
11670 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11671 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11672 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11673 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11674 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11675 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11678 /* Rn is going to be changed in pre-indexed mode and
11679 post-indexed mode as well. */
11680 record_buf[0] = reg_src2;
11681 arm_insn_r->reg_rec_count = 1;
11686 /* Store insn, scaled register offset; scaled pre-indexed. */
11687 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11689 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11691 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11692 /* Get shift_imm. */
11693 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11694 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11695 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11696 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11697 /* Offset_12 used as shift. */
11701 /* Offset_12 used as index. */
11702 offset_12 = u_regval[0] << shift_imm;
11706 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11712 if (bit (u_regval[0], 31))
11714 offset_12 = 0xFFFFFFFF;
11723 /* This is arithmetic shift. */
11724 offset_12 = s_word >> shift_imm;
11731 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11733 /* Get C flag value and shift it by 31. */
11734 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11735 | (u_regval[0]) >> 1);
11739 offset_12 = (u_regval[0] >> shift_imm) \
11741 (sizeof(uint32_t) - shift_imm));
11746 gdb_assert_not_reached ("no decoding pattern found");
11750 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11752 if (bit (arm_insn_r->arm_insn, 23))
11754 tgt_mem_addr = u_regval[1] + offset_12;
11758 tgt_mem_addr = u_regval[1] - offset_12;
11761 switch (arm_insn_r->opcode)
11775 record_buf_mem[0] = 4;
11790 record_buf_mem[0] = 1;
11794 gdb_assert_not_reached ("no decoding pattern found");
11797 record_buf_mem[1] = tgt_mem_addr;
11798 arm_insn_r->mem_rec_count = 1;
11800 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11801 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11802 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11803 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11804 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11805 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11808 /* Rn is going to be changed in register scaled pre-indexed
11809 mode,and scaled post indexed mode. */
11810 record_buf[0] = reg_src2;
11811 arm_insn_r->reg_rec_count = 1;
11816 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11817 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11821 /* Handle ARM mode instructions with opcode 100. */
11824 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11826 struct regcache *reg_cache = arm_insn_r->regcache;
11827 uint32_t register_count = 0, register_bits;
11828 uint32_t reg_base, addr_mode;
11829 uint32_t record_buf[24], record_buf_mem[48];
11833 /* Fetch the list of registers. */
11834 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11835 arm_insn_r->reg_rec_count = 0;
11837 /* Fetch the base register that contains the address we are loading data
11839 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11841 /* Calculate wback. */
11842 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11844 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11846 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11848 /* Find out which registers are going to be loaded from memory. */
11849 while (register_bits)
11851 if (register_bits & 0x00000001)
11852 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11853 register_bits = register_bits >> 1;
11858 /* If wback is true, also save the base register, which is going to be
11861 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11863 /* Save the CPSR register. */
11864 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11868 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11870 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11872 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11874 /* Find out how many registers are going to be stored to memory. */
11875 while (register_bits)
11877 if (register_bits & 0x00000001)
11879 register_bits = register_bits >> 1;
11884 /* STMDA (STMED): Decrement after. */
11886 record_buf_mem[1] = (uint32_t) u_regval
11887 - register_count * INT_REGISTER_SIZE + 4;
11889 /* STM (STMIA, STMEA): Increment after. */
11891 record_buf_mem[1] = (uint32_t) u_regval;
11893 /* STMDB (STMFD): Decrement before. */
11895 record_buf_mem[1] = (uint32_t) u_regval
11896 - register_count * INT_REGISTER_SIZE;
11898 /* STMIB (STMFA): Increment before. */
11900 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11903 gdb_assert_not_reached ("no decoding pattern found");
11907 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11908 arm_insn_r->mem_rec_count = 1;
11910 /* If wback is true, also save the base register, which is going to be
11913 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11916 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11917 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11921 /* Handling opcode 101 insns. */
11924 arm_record_b_bl (insn_decode_record *arm_insn_r)
11926 uint32_t record_buf[8];
11928 /* Handle B, BL, BLX(1) insns. */
11929 /* B simply branches so we do nothing here. */
11930 /* Note: BLX(1) doesnt fall here but instead it falls into
11931 extension space. */
11932 if (bit (arm_insn_r->arm_insn, 24))
11934 record_buf[0] = ARM_LR_REGNUM;
11935 arm_insn_r->reg_rec_count = 1;
11938 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11943 /* Handling opcode 110 insns. */
11946 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11948 printf_unfiltered (_("Process record does not support instruction "
11949 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11950 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11955 /* Record handler for vector data transfer instructions. */
11958 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11960 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11961 uint32_t record_buf[4];
11963 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11964 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11965 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11966 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11967 bit_l = bit (arm_insn_r->arm_insn, 20);
11968 bit_c = bit (arm_insn_r->arm_insn, 8);
11970 /* Handle VMOV instruction. */
11971 if (bit_l && bit_c)
11973 record_buf[0] = reg_t;
11974 arm_insn_r->reg_rec_count = 1;
11976 else if (bit_l && !bit_c)
11978 /* Handle VMOV instruction. */
11979 if (bits_a == 0x00)
11981 if (bit (arm_insn_r->arm_insn, 20))
11982 record_buf[0] = reg_t;
11984 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
11987 arm_insn_r->reg_rec_count = 1;
11989 /* Handle VMRS instruction. */
11990 else if (bits_a == 0x07)
11993 reg_t = ARM_PS_REGNUM;
11995 record_buf[0] = reg_t;
11996 arm_insn_r->reg_rec_count = 1;
11999 else if (!bit_l && !bit_c)
12001 /* Handle VMOV instruction. */
12002 if (bits_a == 0x00)
12004 if (bit (arm_insn_r->arm_insn, 20))
12005 record_buf[0] = reg_t;
12007 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12010 arm_insn_r->reg_rec_count = 1;
12012 /* Handle VMSR instruction. */
12013 else if (bits_a == 0x07)
12015 record_buf[0] = ARM_FPSCR_REGNUM;
12016 arm_insn_r->reg_rec_count = 1;
12019 else if (!bit_l && bit_c)
12021 /* Handle VMOV instruction. */
12022 if (!(bits_a & 0x04))
12024 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12026 arm_insn_r->reg_rec_count = 1;
12028 /* Handle VDUP instruction. */
12031 if (bit (arm_insn_r->arm_insn, 21))
12033 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12034 record_buf[0] = reg_v + ARM_D0_REGNUM;
12035 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12036 arm_insn_r->reg_rec_count = 2;
12040 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12041 record_buf[0] = reg_v + ARM_D0_REGNUM;
12042 arm_insn_r->reg_rec_count = 1;
12047 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12051 /* Record handler for extension register load/store instructions. */
12054 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12056 uint32_t opcode, single_reg;
12057 uint8_t op_vldm_vstm;
12058 uint32_t record_buf[8], record_buf_mem[128];
12059 ULONGEST u_regval = 0;
12061 struct regcache *reg_cache = arm_insn_r->regcache;
12062 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12064 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12065 single_reg = bit (arm_insn_r->arm_insn, 8);
12066 op_vldm_vstm = opcode & 0x1b;
12068 /* Handle VMOV instructions. */
12069 if ((opcode & 0x1e) == 0x04)
12071 if (bit (arm_insn_r->arm_insn, 4))
12073 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12074 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12075 arm_insn_r->reg_rec_count = 2;
12079 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12080 | bit (arm_insn_r->arm_insn, 5);
12084 record_buf[0] = num_regs + reg_m;
12085 record_buf[1] = num_regs + reg_m + 1;
12086 arm_insn_r->reg_rec_count = 2;
12090 record_buf[0] = reg_m + ARM_D0_REGNUM;
12091 arm_insn_r->reg_rec_count = 1;
12095 /* Handle VSTM and VPUSH instructions. */
12096 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12097 || op_vldm_vstm == 0x12)
12099 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12100 uint32_t memory_index = 0;
12102 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12103 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12104 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12105 imm_off32 = imm_off8 << 24;
12106 memory_count = imm_off8;
12108 if (bit (arm_insn_r->arm_insn, 23))
12109 start_address = u_regval;
12111 start_address = u_regval - imm_off32;
12113 if (bit (arm_insn_r->arm_insn, 21))
12115 record_buf[0] = reg_rn;
12116 arm_insn_r->reg_rec_count = 1;
12119 while (memory_count > 0)
12123 record_buf_mem[memory_index] = start_address;
12124 record_buf_mem[memory_index + 1] = 4;
12125 start_address = start_address + 4;
12126 memory_index = memory_index + 2;
12130 record_buf_mem[memory_index] = start_address;
12131 record_buf_mem[memory_index + 1] = 4;
12132 record_buf_mem[memory_index + 2] = start_address + 4;
12133 record_buf_mem[memory_index + 3] = 4;
12134 start_address = start_address + 8;
12135 memory_index = memory_index + 4;
12139 arm_insn_r->mem_rec_count = (memory_index >> 1);
12141 /* Handle VLDM instructions. */
12142 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12143 || op_vldm_vstm == 0x13)
12145 uint32_t reg_count, reg_vd;
12146 uint32_t reg_index = 0;
12148 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12149 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12152 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12154 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12156 if (bit (arm_insn_r->arm_insn, 21))
12157 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12159 while (reg_count > 0)
12162 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12164 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12168 arm_insn_r->reg_rec_count = reg_index;
12170 /* VSTR Vector store register. */
12171 else if ((opcode & 0x13) == 0x10)
12173 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12174 uint32_t memory_index = 0;
12176 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12177 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12178 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12179 imm_off32 = imm_off8 << 24;
12180 memory_count = imm_off8;
12182 if (bit (arm_insn_r->arm_insn, 23))
12183 start_address = u_regval + imm_off32;
12185 start_address = u_regval - imm_off32;
12189 record_buf_mem[memory_index] = start_address;
12190 record_buf_mem[memory_index + 1] = 4;
12191 arm_insn_r->mem_rec_count = 1;
12195 record_buf_mem[memory_index] = start_address;
12196 record_buf_mem[memory_index + 1] = 4;
12197 record_buf_mem[memory_index + 2] = start_address + 4;
12198 record_buf_mem[memory_index + 3] = 4;
12199 arm_insn_r->mem_rec_count = 2;
12202 /* VLDR Vector load register. */
12203 else if ((opcode & 0x13) == 0x11)
12205 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12209 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12210 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12214 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12215 record_buf[0] = num_regs + reg_vd;
12217 arm_insn_r->reg_rec_count = 1;
12220 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12221 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12225 /* Record handler for arm/thumb mode VFP data processing instructions. */
12228 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12230 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12231 uint32_t record_buf[4];
12232 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12233 enum insn_types curr_insn_type = INSN_INV;
12235 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12236 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12237 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12238 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12239 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12240 bit_d = bit (arm_insn_r->arm_insn, 22);
12241 opc1 = opc1 & 0x04;
12243 /* Handle VMLA, VMLS. */
12246 if (bit (arm_insn_r->arm_insn, 10))
12248 if (bit (arm_insn_r->arm_insn, 6))
12249 curr_insn_type = INSN_T0;
12251 curr_insn_type = INSN_T1;
12256 curr_insn_type = INSN_T1;
12258 curr_insn_type = INSN_T2;
12261 /* Handle VNMLA, VNMLS, VNMUL. */
12262 else if (opc1 == 0x01)
12265 curr_insn_type = INSN_T1;
12267 curr_insn_type = INSN_T2;
12270 else if (opc1 == 0x02 && !(opc3 & 0x01))
12272 if (bit (arm_insn_r->arm_insn, 10))
12274 if (bit (arm_insn_r->arm_insn, 6))
12275 curr_insn_type = INSN_T0;
12277 curr_insn_type = INSN_T1;
12282 curr_insn_type = INSN_T1;
12284 curr_insn_type = INSN_T2;
12287 /* Handle VADD, VSUB. */
12288 else if (opc1 == 0x03)
12290 if (!bit (arm_insn_r->arm_insn, 9))
12292 if (bit (arm_insn_r->arm_insn, 6))
12293 curr_insn_type = INSN_T0;
12295 curr_insn_type = INSN_T1;
12300 curr_insn_type = INSN_T1;
12302 curr_insn_type = INSN_T2;
12306 else if (opc1 == 0x0b)
12309 curr_insn_type = INSN_T1;
12311 curr_insn_type = INSN_T2;
12313 /* Handle all other vfp data processing instructions. */
12314 else if (opc1 == 0x0b)
12317 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12319 if (bit (arm_insn_r->arm_insn, 4))
12321 if (bit (arm_insn_r->arm_insn, 6))
12322 curr_insn_type = INSN_T0;
12324 curr_insn_type = INSN_T1;
12329 curr_insn_type = INSN_T1;
12331 curr_insn_type = INSN_T2;
12334 /* Handle VNEG and VABS. */
12335 else if ((opc2 == 0x01 && opc3 == 0x01)
12336 || (opc2 == 0x00 && opc3 == 0x03))
12338 if (!bit (arm_insn_r->arm_insn, 11))
12340 if (bit (arm_insn_r->arm_insn, 6))
12341 curr_insn_type = INSN_T0;
12343 curr_insn_type = INSN_T1;
12348 curr_insn_type = INSN_T1;
12350 curr_insn_type = INSN_T2;
12353 /* Handle VSQRT. */
12354 else if (opc2 == 0x01 && opc3 == 0x03)
12357 curr_insn_type = INSN_T1;
12359 curr_insn_type = INSN_T2;
12362 else if (opc2 == 0x07 && opc3 == 0x03)
12365 curr_insn_type = INSN_T1;
12367 curr_insn_type = INSN_T2;
12369 else if (opc3 & 0x01)
12372 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12374 if (!bit (arm_insn_r->arm_insn, 18))
12375 curr_insn_type = INSN_T2;
12379 curr_insn_type = INSN_T1;
12381 curr_insn_type = INSN_T2;
12385 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12388 curr_insn_type = INSN_T1;
12390 curr_insn_type = INSN_T2;
12392 /* Handle VCVTB, VCVTT. */
12393 else if ((opc2 & 0x0e) == 0x02)
12394 curr_insn_type = INSN_T2;
12395 /* Handle VCMP, VCMPE. */
12396 else if ((opc2 & 0x0e) == 0x04)
12397 curr_insn_type = INSN_T3;
12401 switch (curr_insn_type)
12404 reg_vd = reg_vd | (bit_d << 4);
12405 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12406 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12407 arm_insn_r->reg_rec_count = 2;
12411 reg_vd = reg_vd | (bit_d << 4);
12412 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12413 arm_insn_r->reg_rec_count = 1;
12417 reg_vd = (reg_vd << 1) | bit_d;
12418 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12419 arm_insn_r->reg_rec_count = 1;
12423 record_buf[0] = ARM_FPSCR_REGNUM;
12424 arm_insn_r->reg_rec_count = 1;
12428 gdb_assert_not_reached ("no decoding pattern found");
12432 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12436 /* Handling opcode 110 insns. */
12439 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12441 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12443 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12444 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12445 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12447 if ((coproc & 0x0e) == 0x0a)
12449 /* Handle extension register ld/st instructions. */
12451 return arm_record_exreg_ld_st_insn (arm_insn_r);
12453 /* 64-bit transfers between arm core and extension registers. */
12454 if ((op1 & 0x3e) == 0x04)
12455 return arm_record_exreg_ld_st_insn (arm_insn_r);
12459 /* Handle coprocessor ld/st instructions. */
12464 return arm_record_unsupported_insn (arm_insn_r);
12467 return arm_record_unsupported_insn (arm_insn_r);
12470 /* Move to coprocessor from two arm core registers. */
12472 return arm_record_unsupported_insn (arm_insn_r);
12474 /* Move to two arm core registers from coprocessor. */
12479 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12480 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12481 arm_insn_r->reg_rec_count = 2;
12483 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12487 return arm_record_unsupported_insn (arm_insn_r);
12490 /* Handling opcode 111 insns. */
12493 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12495 uint32_t op, op1_sbit, op1_ebit, coproc;
12496 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12497 struct regcache *reg_cache = arm_insn_r->regcache;
12498 ULONGEST u_regval = 0;
12500 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12501 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12502 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12503 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12504 op = bit (arm_insn_r->arm_insn, 4);
12506 /* Handle arm SWI/SVC system call instructions. */
12509 if (tdep->arm_syscall_record != NULL)
12511 ULONGEST svc_operand, svc_number;
12513 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12515 if (svc_operand) /* OABI. */
12516 svc_number = svc_operand - 0x900000;
12518 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12520 return tdep->arm_syscall_record (reg_cache, svc_number);
12524 printf_unfiltered (_("no syscall record support\n"));
12529 if ((coproc & 0x0e) == 0x0a)
12531 /* VFP data-processing instructions. */
12532 if (!op1_sbit && !op)
12533 return arm_record_vfp_data_proc_insn (arm_insn_r);
12535 /* Advanced SIMD, VFP instructions. */
12536 if (!op1_sbit && op)
12537 return arm_record_vdata_transfer_insn (arm_insn_r);
12541 /* Coprocessor data operations. */
12542 if (!op1_sbit && !op)
12543 return arm_record_unsupported_insn (arm_insn_r);
12545 /* Move to Coprocessor from ARM core register. */
12546 if (!op1_sbit && !op1_ebit && op)
12547 return arm_record_unsupported_insn (arm_insn_r);
12549 /* Move to arm core register from coprocessor. */
12550 if (!op1_sbit && op1_ebit && op)
12552 uint32_t record_buf[1];
12554 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12555 if (record_buf[0] == 15)
12556 record_buf[0] = ARM_PS_REGNUM;
12558 arm_insn_r->reg_rec_count = 1;
12559 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12565 return arm_record_unsupported_insn (arm_insn_r);
12568 /* Handling opcode 000 insns. */
12571 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12573 uint32_t record_buf[8];
12574 uint32_t reg_src1 = 0;
12576 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12578 record_buf[0] = ARM_PS_REGNUM;
12579 record_buf[1] = reg_src1;
12580 thumb_insn_r->reg_rec_count = 2;
12582 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12588 /* Handling opcode 001 insns. */
12591 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12593 uint32_t record_buf[8];
12594 uint32_t reg_src1 = 0;
12596 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12598 record_buf[0] = ARM_PS_REGNUM;
12599 record_buf[1] = reg_src1;
12600 thumb_insn_r->reg_rec_count = 2;
12602 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12607 /* Handling opcode 010 insns. */
12610 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12612 struct regcache *reg_cache = thumb_insn_r->regcache;
12613 uint32_t record_buf[8], record_buf_mem[8];
12615 uint32_t reg_src1 = 0, reg_src2 = 0;
12616 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12618 ULONGEST u_regval[2] = {0};
12620 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12622 if (bit (thumb_insn_r->arm_insn, 12))
12624 /* Handle load/store register offset. */
12625 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12626 if (opcode2 >= 12 && opcode2 <= 15)
12628 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12629 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12630 record_buf[0] = reg_src1;
12631 thumb_insn_r->reg_rec_count = 1;
12633 else if (opcode2 >= 8 && opcode2 <= 10)
12635 /* STR(2), STRB(2), STRH(2) . */
12636 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12637 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12638 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12639 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12641 record_buf_mem[0] = 4; /* STR (2). */
12642 else if (10 == opcode2)
12643 record_buf_mem[0] = 1; /* STRB (2). */
12644 else if (9 == opcode2)
12645 record_buf_mem[0] = 2; /* STRH (2). */
12646 record_buf_mem[1] = u_regval[0] + u_regval[1];
12647 thumb_insn_r->mem_rec_count = 1;
12650 else if (bit (thumb_insn_r->arm_insn, 11))
12652 /* Handle load from literal pool. */
12654 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12655 record_buf[0] = reg_src1;
12656 thumb_insn_r->reg_rec_count = 1;
12660 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12661 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12662 if ((3 == opcode2) && (!opcode3))
12664 /* Branch with exchange. */
12665 record_buf[0] = ARM_PS_REGNUM;
12666 thumb_insn_r->reg_rec_count = 1;
12670 /* Format 8; special data processing insns. */
12671 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12672 record_buf[0] = ARM_PS_REGNUM;
12673 record_buf[1] = reg_src1;
12674 thumb_insn_r->reg_rec_count = 2;
12679 /* Format 5; data processing insns. */
12680 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12681 if (bit (thumb_insn_r->arm_insn, 7))
12683 reg_src1 = reg_src1 + 8;
12685 record_buf[0] = ARM_PS_REGNUM;
12686 record_buf[1] = reg_src1;
12687 thumb_insn_r->reg_rec_count = 2;
12690 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12691 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12697 /* Handling opcode 001 insns. */
12700 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12702 struct regcache *reg_cache = thumb_insn_r->regcache;
12703 uint32_t record_buf[8], record_buf_mem[8];
12705 uint32_t reg_src1 = 0;
12706 uint32_t opcode = 0, immed_5 = 0;
12708 ULONGEST u_regval = 0;
12710 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12715 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12716 record_buf[0] = reg_src1;
12717 thumb_insn_r->reg_rec_count = 1;
12722 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12723 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12724 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12725 record_buf_mem[0] = 4;
12726 record_buf_mem[1] = u_regval + (immed_5 * 4);
12727 thumb_insn_r->mem_rec_count = 1;
12730 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12731 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12737 /* Handling opcode 100 insns. */
12740 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12742 struct regcache *reg_cache = thumb_insn_r->regcache;
12743 uint32_t record_buf[8], record_buf_mem[8];
12745 uint32_t reg_src1 = 0;
12746 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12748 ULONGEST u_regval = 0;
12750 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12755 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12756 record_buf[0] = reg_src1;
12757 thumb_insn_r->reg_rec_count = 1;
12759 else if (1 == opcode)
12762 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12763 record_buf[0] = reg_src1;
12764 thumb_insn_r->reg_rec_count = 1;
12766 else if (2 == opcode)
12769 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12770 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12771 record_buf_mem[0] = 4;
12772 record_buf_mem[1] = u_regval + (immed_8 * 4);
12773 thumb_insn_r->mem_rec_count = 1;
12775 else if (0 == opcode)
12778 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12779 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12780 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12781 record_buf_mem[0] = 2;
12782 record_buf_mem[1] = u_regval + (immed_5 * 2);
12783 thumb_insn_r->mem_rec_count = 1;
12786 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12787 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12793 /* Handling opcode 101 insns. */
12796 thumb_record_misc (insn_decode_record *thumb_insn_r)
12798 struct regcache *reg_cache = thumb_insn_r->regcache;
12800 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12801 uint32_t register_bits = 0, register_count = 0;
12802 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12803 uint32_t record_buf[24], record_buf_mem[48];
12806 ULONGEST u_regval = 0;
12808 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12809 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12810 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12815 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12816 while (register_bits)
12818 if (register_bits & 0x00000001)
12819 record_buf[index++] = register_count;
12820 register_bits = register_bits >> 1;
12823 record_buf[index++] = ARM_PS_REGNUM;
12824 record_buf[index++] = ARM_SP_REGNUM;
12825 thumb_insn_r->reg_rec_count = index;
12827 else if (10 == opcode2)
12830 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12831 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12832 while (register_bits)
12834 if (register_bits & 0x00000001)
12836 register_bits = register_bits >> 1;
12838 start_address = u_regval - \
12839 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12840 thumb_insn_r->mem_rec_count = register_count;
12841 while (register_count)
12843 record_buf_mem[(register_count * 2) - 1] = start_address;
12844 record_buf_mem[(register_count * 2) - 2] = 4;
12845 start_address = start_address + 4;
12848 record_buf[0] = ARM_SP_REGNUM;
12849 thumb_insn_r->reg_rec_count = 1;
12851 else if (0x1E == opcode1)
12854 /* Handle enhanced software breakpoint insn, BKPT. */
12855 /* CPSR is changed to be executed in ARM state, disabling normal
12856 interrupts, entering abort mode. */
12857 /* According to high vector configuration PC is set. */
12858 /* User hits breakpoint and type reverse, in that case, we need to go back with
12859 previous CPSR and Program Counter. */
12860 record_buf[0] = ARM_PS_REGNUM;
12861 record_buf[1] = ARM_LR_REGNUM;
12862 thumb_insn_r->reg_rec_count = 2;
12863 /* We need to save SPSR value, which is not yet done. */
12864 printf_unfiltered (_("Process record does not support instruction "
12865 "0x%0x at address %s.\n"),
12866 thumb_insn_r->arm_insn,
12867 paddress (thumb_insn_r->gdbarch,
12868 thumb_insn_r->this_addr));
12871 else if ((0 == opcode) || (1 == opcode))
12873 /* ADD(5), ADD(6). */
12874 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12875 record_buf[0] = reg_src1;
12876 thumb_insn_r->reg_rec_count = 1;
12878 else if (2 == opcode)
12880 /* ADD(7), SUB(4). */
12881 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12882 record_buf[0] = ARM_SP_REGNUM;
12883 thumb_insn_r->reg_rec_count = 1;
12886 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12887 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12893 /* Handling opcode 110 insns. */
12896 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12898 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12899 struct regcache *reg_cache = thumb_insn_r->regcache;
12901 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12902 uint32_t reg_src1 = 0;
12903 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12904 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12905 uint32_t record_buf[24], record_buf_mem[48];
12907 ULONGEST u_regval = 0;
12909 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12910 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12916 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12918 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12919 while (register_bits)
12921 if (register_bits & 0x00000001)
12922 record_buf[index++] = register_count;
12923 register_bits = register_bits >> 1;
12926 record_buf[index++] = reg_src1;
12927 thumb_insn_r->reg_rec_count = index;
12929 else if (0 == opcode2)
12931 /* It handles both STMIA. */
12932 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12934 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12935 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12936 while (register_bits)
12938 if (register_bits & 0x00000001)
12940 register_bits = register_bits >> 1;
12942 start_address = u_regval;
12943 thumb_insn_r->mem_rec_count = register_count;
12944 while (register_count)
12946 record_buf_mem[(register_count * 2) - 1] = start_address;
12947 record_buf_mem[(register_count * 2) - 2] = 4;
12948 start_address = start_address + 4;
12952 else if (0x1F == opcode1)
12954 /* Handle arm syscall insn. */
12955 if (tdep->arm_syscall_record != NULL)
12957 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12958 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12962 printf_unfiltered (_("no syscall record support\n"));
12967 /* B (1), conditional branch is automatically taken care in process_record,
12968 as PC is saved there. */
12970 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12971 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12977 /* Handling opcode 111 insns. */
12980 thumb_record_branch (insn_decode_record *thumb_insn_r)
12982 uint32_t record_buf[8];
12983 uint32_t bits_h = 0;
12985 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12987 if (2 == bits_h || 3 == bits_h)
12990 record_buf[0] = ARM_LR_REGNUM;
12991 thumb_insn_r->reg_rec_count = 1;
12993 else if (1 == bits_h)
12996 record_buf[0] = ARM_PS_REGNUM;
12997 record_buf[1] = ARM_LR_REGNUM;
12998 thumb_insn_r->reg_rec_count = 2;
13001 /* B(2) is automatically taken care in process_record, as PC is
13004 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13009 /* Handler for thumb2 load/store multiple instructions. */
13012 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13014 struct regcache *reg_cache = thumb2_insn_r->regcache;
13016 uint32_t reg_rn, op;
13017 uint32_t register_bits = 0, register_count = 0;
13018 uint32_t index = 0, start_address = 0;
13019 uint32_t record_buf[24], record_buf_mem[48];
13021 ULONGEST u_regval = 0;
13023 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13024 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13026 if (0 == op || 3 == op)
13028 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13030 /* Handle RFE instruction. */
13031 record_buf[0] = ARM_PS_REGNUM;
13032 thumb2_insn_r->reg_rec_count = 1;
13036 /* Handle SRS instruction after reading banked SP. */
13037 return arm_record_unsupported_insn (thumb2_insn_r);
13040 else if (1 == op || 2 == op)
13042 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13044 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13045 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13046 while (register_bits)
13048 if (register_bits & 0x00000001)
13049 record_buf[index++] = register_count;
13052 register_bits = register_bits >> 1;
13054 record_buf[index++] = reg_rn;
13055 record_buf[index++] = ARM_PS_REGNUM;
13056 thumb2_insn_r->reg_rec_count = index;
13060 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13061 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13062 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13063 while (register_bits)
13065 if (register_bits & 0x00000001)
13068 register_bits = register_bits >> 1;
13073 /* Start address calculation for LDMDB/LDMEA. */
13074 start_address = u_regval;
13078 /* Start address calculation for LDMDB/LDMEA. */
13079 start_address = u_regval - register_count * 4;
13082 thumb2_insn_r->mem_rec_count = register_count;
13083 while (register_count)
13085 record_buf_mem[register_count * 2 - 1] = start_address;
13086 record_buf_mem[register_count * 2 - 2] = 4;
13087 start_address = start_address + 4;
13090 record_buf[0] = reg_rn;
13091 record_buf[1] = ARM_PS_REGNUM;
13092 thumb2_insn_r->reg_rec_count = 2;
13096 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13098 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13100 return ARM_RECORD_SUCCESS;
13103 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13107 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13109 struct regcache *reg_cache = thumb2_insn_r->regcache;
13111 uint32_t reg_rd, reg_rn, offset_imm;
13112 uint32_t reg_dest1, reg_dest2;
13113 uint32_t address, offset_addr;
13114 uint32_t record_buf[8], record_buf_mem[8];
13115 uint32_t op1, op2, op3;
13118 ULONGEST u_regval[2];
13120 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13121 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13122 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13124 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13126 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13128 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13129 record_buf[0] = reg_dest1;
13130 record_buf[1] = ARM_PS_REGNUM;
13131 thumb2_insn_r->reg_rec_count = 2;
13134 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13136 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13137 record_buf[2] = reg_dest2;
13138 thumb2_insn_r->reg_rec_count = 3;
13143 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13144 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13146 if (0 == op1 && 0 == op2)
13148 /* Handle STREX. */
13149 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13150 address = u_regval[0] + (offset_imm * 4);
13151 record_buf_mem[0] = 4;
13152 record_buf_mem[1] = address;
13153 thumb2_insn_r->mem_rec_count = 1;
13154 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13155 record_buf[0] = reg_rd;
13156 thumb2_insn_r->reg_rec_count = 1;
13158 else if (1 == op1 && 0 == op2)
13160 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13161 record_buf[0] = reg_rd;
13162 thumb2_insn_r->reg_rec_count = 1;
13163 address = u_regval[0];
13164 record_buf_mem[1] = address;
13168 /* Handle STREXB. */
13169 record_buf_mem[0] = 1;
13170 thumb2_insn_r->mem_rec_count = 1;
13174 /* Handle STREXH. */
13175 record_buf_mem[0] = 2 ;
13176 thumb2_insn_r->mem_rec_count = 1;
13180 /* Handle STREXD. */
13181 address = u_regval[0];
13182 record_buf_mem[0] = 4;
13183 record_buf_mem[2] = 4;
13184 record_buf_mem[3] = address + 4;
13185 thumb2_insn_r->mem_rec_count = 2;
13190 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13192 if (bit (thumb2_insn_r->arm_insn, 24))
13194 if (bit (thumb2_insn_r->arm_insn, 23))
13195 offset_addr = u_regval[0] + (offset_imm * 4);
13197 offset_addr = u_regval[0] - (offset_imm * 4);
13199 address = offset_addr;
13202 address = u_regval[0];
13204 record_buf_mem[0] = 4;
13205 record_buf_mem[1] = address;
13206 record_buf_mem[2] = 4;
13207 record_buf_mem[3] = address + 4;
13208 thumb2_insn_r->mem_rec_count = 2;
13209 record_buf[0] = reg_rn;
13210 thumb2_insn_r->reg_rec_count = 1;
13214 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13216 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13218 return ARM_RECORD_SUCCESS;
13221 /* Handler for thumb2 data processing (shift register and modified immediate)
13225 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13227 uint32_t reg_rd, op;
13228 uint32_t record_buf[8];
13230 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13231 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13233 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13235 record_buf[0] = ARM_PS_REGNUM;
13236 thumb2_insn_r->reg_rec_count = 1;
13240 record_buf[0] = reg_rd;
13241 record_buf[1] = ARM_PS_REGNUM;
13242 thumb2_insn_r->reg_rec_count = 2;
13245 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13247 return ARM_RECORD_SUCCESS;
13250 /* Generic handler for thumb2 instructions which effect destination and PS
13254 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13257 uint32_t record_buf[8];
13259 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13261 record_buf[0] = reg_rd;
13262 record_buf[1] = ARM_PS_REGNUM;
13263 thumb2_insn_r->reg_rec_count = 2;
13265 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13267 return ARM_RECORD_SUCCESS;
13270 /* Handler for thumb2 branch and miscellaneous control instructions. */
13273 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13275 uint32_t op, op1, op2;
13276 uint32_t record_buf[8];
13278 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13279 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13280 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13282 /* Handle MSR insn. */
13283 if (!(op1 & 0x2) && 0x38 == op)
13287 /* CPSR is going to be changed. */
13288 record_buf[0] = ARM_PS_REGNUM;
13289 thumb2_insn_r->reg_rec_count = 1;
13293 arm_record_unsupported_insn(thumb2_insn_r);
13297 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13300 record_buf[0] = ARM_PS_REGNUM;
13301 record_buf[1] = ARM_LR_REGNUM;
13302 thumb2_insn_r->reg_rec_count = 2;
13305 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13307 return ARM_RECORD_SUCCESS;
13310 /* Handler for thumb2 store single data item instructions. */
13313 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13315 struct regcache *reg_cache = thumb2_insn_r->regcache;
13317 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13318 uint32_t address, offset_addr;
13319 uint32_t record_buf[8], record_buf_mem[8];
13322 ULONGEST u_regval[2];
13324 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13325 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13326 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13327 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13329 if (bit (thumb2_insn_r->arm_insn, 23))
13332 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13333 offset_addr = u_regval[0] + offset_imm;
13334 address = offset_addr;
13339 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13341 /* Handle STRB (register). */
13342 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13343 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13344 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13345 offset_addr = u_regval[1] << shift_imm;
13346 address = u_regval[0] + offset_addr;
13350 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13351 if (bit (thumb2_insn_r->arm_insn, 10))
13353 if (bit (thumb2_insn_r->arm_insn, 9))
13354 offset_addr = u_regval[0] + offset_imm;
13356 offset_addr = u_regval[0] - offset_imm;
13358 address = offset_addr;
13361 address = u_regval[0];
13367 /* Store byte instructions. */
13370 record_buf_mem[0] = 1;
13372 /* Store half word instructions. */
13375 record_buf_mem[0] = 2;
13377 /* Store word instructions. */
13380 record_buf_mem[0] = 4;
13384 gdb_assert_not_reached ("no decoding pattern found");
13388 record_buf_mem[1] = address;
13389 thumb2_insn_r->mem_rec_count = 1;
13390 record_buf[0] = reg_rn;
13391 thumb2_insn_r->reg_rec_count = 1;
13393 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13395 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13397 return ARM_RECORD_SUCCESS;
13400 /* Handler for thumb2 load memory hints instructions. */
13403 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13405 uint32_t record_buf[8];
13406 uint32_t reg_rt, reg_rn;
13408 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13409 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13411 if (ARM_PC_REGNUM != reg_rt)
13413 record_buf[0] = reg_rt;
13414 record_buf[1] = reg_rn;
13415 record_buf[2] = ARM_PS_REGNUM;
13416 thumb2_insn_r->reg_rec_count = 3;
13418 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13420 return ARM_RECORD_SUCCESS;
13423 return ARM_RECORD_FAILURE;
13426 /* Handler for thumb2 load word instructions. */
13429 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13431 uint32_t opcode1 = 0, opcode2 = 0;
13432 uint32_t record_buf[8];
13434 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13435 record_buf[1] = ARM_PS_REGNUM;
13436 thumb2_insn_r->reg_rec_count = 2;
13438 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13440 return ARM_RECORD_SUCCESS;
13443 /* Handler for thumb2 long multiply, long multiply accumulate, and
13444 divide instructions. */
13447 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13449 uint32_t opcode1 = 0, opcode2 = 0;
13450 uint32_t record_buf[8];
13451 uint32_t reg_src1 = 0;
13453 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13454 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13456 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13458 /* Handle SMULL, UMULL, SMULAL. */
13459 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13460 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13461 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13462 record_buf[2] = ARM_PS_REGNUM;
13463 thumb2_insn_r->reg_rec_count = 3;
13465 else if (1 == opcode1 || 3 == opcode2)
13467 /* Handle SDIV and UDIV. */
13468 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13469 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13470 record_buf[2] = ARM_PS_REGNUM;
13471 thumb2_insn_r->reg_rec_count = 3;
13474 return ARM_RECORD_FAILURE;
13476 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13478 return ARM_RECORD_SUCCESS;
13481 /* Record handler for thumb32 coprocessor instructions. */
13484 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13486 if (bit (thumb2_insn_r->arm_insn, 25))
13487 return arm_record_coproc_data_proc (thumb2_insn_r);
13489 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13492 /* Record handler for advance SIMD structure load/store instructions. */
13495 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13497 struct regcache *reg_cache = thumb2_insn_r->regcache;
13498 uint32_t l_bit, a_bit, b_bits;
13499 uint32_t record_buf[128], record_buf_mem[128];
13500 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13501 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13504 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13505 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13506 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13507 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13508 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13509 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13510 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13511 f_esize = 8 * f_ebytes;
13512 f_elem = 8 / f_ebytes;
13516 ULONGEST u_regval = 0;
13517 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13518 address = u_regval;
13523 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13525 if (b_bits == 0x07)
13527 else if (b_bits == 0x0a)
13529 else if (b_bits == 0x06)
13531 else if (b_bits == 0x02)
13536 for (index_r = 0; index_r < bf_regs; index_r++)
13538 for (index_e = 0; index_e < f_elem; index_e++)
13540 record_buf_mem[index_m++] = f_ebytes;
13541 record_buf_mem[index_m++] = address;
13542 address = address + f_ebytes;
13543 thumb2_insn_r->mem_rec_count += 1;
13548 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13550 if (b_bits == 0x09 || b_bits == 0x08)
13552 else if (b_bits == 0x03)
13557 for (index_r = 0; index_r < bf_regs; index_r++)
13558 for (index_e = 0; index_e < f_elem; index_e++)
13560 for (loop_t = 0; loop_t < 2; loop_t++)
13562 record_buf_mem[index_m++] = f_ebytes;
13563 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13564 thumb2_insn_r->mem_rec_count += 1;
13566 address = address + (2 * f_ebytes);
13570 else if ((b_bits & 0x0e) == 0x04)
13572 for (index_e = 0; index_e < f_elem; index_e++)
13574 for (loop_t = 0; loop_t < 3; loop_t++)
13576 record_buf_mem[index_m++] = f_ebytes;
13577 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13578 thumb2_insn_r->mem_rec_count += 1;
13580 address = address + (3 * f_ebytes);
13584 else if (!(b_bits & 0x0e))
13586 for (index_e = 0; index_e < f_elem; index_e++)
13588 for (loop_t = 0; loop_t < 4; loop_t++)
13590 record_buf_mem[index_m++] = f_ebytes;
13591 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13592 thumb2_insn_r->mem_rec_count += 1;
13594 address = address + (4 * f_ebytes);
13600 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13602 if (bft_size == 0x00)
13604 else if (bft_size == 0x01)
13606 else if (bft_size == 0x02)
13612 if (!(b_bits & 0x0b) || b_bits == 0x08)
13613 thumb2_insn_r->mem_rec_count = 1;
13615 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13616 thumb2_insn_r->mem_rec_count = 2;
13618 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13619 thumb2_insn_r->mem_rec_count = 3;
13621 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13622 thumb2_insn_r->mem_rec_count = 4;
13624 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13626 record_buf_mem[index_m] = f_ebytes;
13627 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13636 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13637 thumb2_insn_r->reg_rec_count = 1;
13639 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13640 thumb2_insn_r->reg_rec_count = 2;
13642 else if ((b_bits & 0x0e) == 0x04)
13643 thumb2_insn_r->reg_rec_count = 3;
13645 else if (!(b_bits & 0x0e))
13646 thumb2_insn_r->reg_rec_count = 4;
13651 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13652 thumb2_insn_r->reg_rec_count = 1;
13654 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13655 thumb2_insn_r->reg_rec_count = 2;
13657 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13658 thumb2_insn_r->reg_rec_count = 3;
13660 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13661 thumb2_insn_r->reg_rec_count = 4;
13663 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13664 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13668 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13670 record_buf[index_r] = reg_rn;
13671 thumb2_insn_r->reg_rec_count += 1;
13674 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13676 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13681 /* Decodes thumb2 instruction type and invokes its record handler. */
13683 static unsigned int
13684 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13686 uint32_t op, op1, op2;
13688 op = bit (thumb2_insn_r->arm_insn, 15);
13689 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13690 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13694 if (!(op2 & 0x64 ))
13696 /* Load/store multiple instruction. */
13697 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13699 else if (!((op2 & 0x64) ^ 0x04))
13701 /* Load/store (dual/exclusive) and table branch instruction. */
13702 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13704 else if (!((op2 & 0x20) ^ 0x20))
13706 /* Data-processing (shifted register). */
13707 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13709 else if (op2 & 0x40)
13711 /* Co-processor instructions. */
13712 return thumb2_record_coproc_insn (thumb2_insn_r);
13715 else if (op1 == 0x02)
13719 /* Branches and miscellaneous control instructions. */
13720 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13722 else if (op2 & 0x20)
13724 /* Data-processing (plain binary immediate) instruction. */
13725 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13729 /* Data-processing (modified immediate). */
13730 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13733 else if (op1 == 0x03)
13735 if (!(op2 & 0x71 ))
13737 /* Store single data item. */
13738 return thumb2_record_str_single_data (thumb2_insn_r);
13740 else if (!((op2 & 0x71) ^ 0x10))
13742 /* Advanced SIMD or structure load/store instructions. */
13743 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13745 else if (!((op2 & 0x67) ^ 0x01))
13747 /* Load byte, memory hints instruction. */
13748 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13750 else if (!((op2 & 0x67) ^ 0x03))
13752 /* Load halfword, memory hints instruction. */
13753 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13755 else if (!((op2 & 0x67) ^ 0x05))
13757 /* Load word instruction. */
13758 return thumb2_record_ld_word (thumb2_insn_r);
13760 else if (!((op2 & 0x70) ^ 0x20))
13762 /* Data-processing (register) instruction. */
13763 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13765 else if (!((op2 & 0x78) ^ 0x30))
13767 /* Multiply, multiply accumulate, abs diff instruction. */
13768 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13770 else if (!((op2 & 0x78) ^ 0x38))
13772 /* Long multiply, long multiply accumulate, and divide. */
13773 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13775 else if (op2 & 0x40)
13777 /* Co-processor instructions. */
13778 return thumb2_record_coproc_insn (thumb2_insn_r);
13785 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13786 and positive val on fauilure. */
13789 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13791 gdb_byte buf[insn_size];
13793 memset (&buf[0], 0, insn_size);
13795 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13797 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13799 gdbarch_byte_order_for_code (insn_record->gdbarch));
13803 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13805 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13809 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13810 uint32_t insn_size)
13813 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13814 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13816 arm_record_data_proc_misc_ld_str, /* 000. */
13817 arm_record_data_proc_imm, /* 001. */
13818 arm_record_ld_st_imm_offset, /* 010. */
13819 arm_record_ld_st_reg_offset, /* 011. */
13820 arm_record_ld_st_multiple, /* 100. */
13821 arm_record_b_bl, /* 101. */
13822 arm_record_asimd_vfp_coproc, /* 110. */
13823 arm_record_coproc_data_proc /* 111. */
13826 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13827 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13829 thumb_record_shift_add_sub, /* 000. */
13830 thumb_record_add_sub_cmp_mov, /* 001. */
13831 thumb_record_ld_st_reg_offset, /* 010. */
13832 thumb_record_ld_st_imm_offset, /* 011. */
13833 thumb_record_ld_st_stack, /* 100. */
13834 thumb_record_misc, /* 101. */
13835 thumb_record_ldm_stm_swi, /* 110. */
13836 thumb_record_branch /* 111. */
13839 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13840 uint32_t insn_id = 0;
13842 if (extract_arm_insn (arm_record, insn_size))
13846 printf_unfiltered (_("Process record: error reading memory at "
13847 "addr %s len = %d.\n"),
13848 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13852 else if (ARM_RECORD == record_type)
13854 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13855 insn_id = bits (arm_record->arm_insn, 25, 27);
13856 ret = arm_record_extension_space (arm_record);
13857 /* If this insn has fallen into extension space
13858 then we need not decode it anymore. */
13859 if (ret != -1 && !INSN_RECORDED(arm_record))
13861 ret = arm_handle_insn[insn_id] (arm_record);
13864 else if (THUMB_RECORD == record_type)
13866 /* As thumb does not have condition codes, we set negative. */
13867 arm_record->cond = -1;
13868 insn_id = bits (arm_record->arm_insn, 13, 15);
13869 ret = thumb_handle_insn[insn_id] (arm_record);
13871 else if (THUMB2_RECORD == record_type)
13873 /* As thumb does not have condition codes, we set negative. */
13874 arm_record->cond = -1;
13876 /* Swap first half of 32bit thumb instruction with second half. */
13877 arm_record->arm_insn
13878 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13880 insn_id = thumb2_record_decode_insn_handler (arm_record);
13882 if (insn_id != ARM_RECORD_SUCCESS)
13884 arm_record_unsupported_insn (arm_record);
13890 /* Throw assertion. */
13891 gdb_assert_not_reached ("not a valid instruction, could not decode");
13898 /* Cleans up local record registers and memory allocations. */
13901 deallocate_reg_mem (insn_decode_record *record)
13903 xfree (record->arm_regs);
13904 xfree (record->arm_mems);
13908 /* Parse the current instruction and record the values of the registers and
13909 memory that will be changed in current instruction to record_arch_list".
13910 Return -1 if something is wrong. */
13913 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13914 CORE_ADDR insn_addr)
13917 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13918 uint32_t no_of_rec = 0;
13919 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13920 ULONGEST t_bit = 0, insn_id = 0;
13922 ULONGEST u_regval = 0;
13924 insn_decode_record arm_record;
13926 memset (&arm_record, 0, sizeof (insn_decode_record));
13927 arm_record.regcache = regcache;
13928 arm_record.this_addr = insn_addr;
13929 arm_record.gdbarch = gdbarch;
13932 if (record_debug > 1)
13934 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13936 paddress (gdbarch, arm_record.this_addr));
13939 if (extract_arm_insn (&arm_record, 2))
13943 printf_unfiltered (_("Process record: error reading memory at "
13944 "addr %s len = %d.\n"),
13945 paddress (arm_record.gdbarch,
13946 arm_record.this_addr), 2);
13951 /* Check the insn, whether it is thumb or arm one. */
13953 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13954 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13957 if (!(u_regval & t_bit))
13959 /* We are decoding arm insn. */
13960 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13964 insn_id = bits (arm_record.arm_insn, 11, 15);
13965 /* is it thumb2 insn? */
13966 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13968 ret = decode_insn (&arm_record, THUMB2_RECORD,
13969 THUMB2_INSN_SIZE_BYTES);
13973 /* We are decoding thumb insn. */
13974 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13980 /* Record registers. */
13981 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13982 if (arm_record.arm_regs)
13984 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13986 if (record_full_arch_list_add_reg
13987 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13991 /* Record memories. */
13992 if (arm_record.arm_mems)
13994 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13996 if (record_full_arch_list_add_mem
13997 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13998 arm_record.arm_mems[no_of_rec].len))
14003 if (record_full_arch_list_add_end ())
14008 deallocate_reg_mem (&arm_record);