1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
228 static void convert_to_extended (const struct floatformat *, void *,
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct bound_minimal_symbol sym;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val);
459 return (val & 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name, "_from_thumb") != NULL)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm)
545 unsigned int count = imm >> 7;
553 return (imm & 0xff) | ((imm & 0xff) << 16);
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
561 return (0x80 | (imm & 0x7f)) << (32 - count);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst)
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1 & 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1, 7) && !bit (inst1, 8))
630 else if (!bit (inst1, 7) && bit (inst1, 8))
636 else if (bit (inst1, 7) && bit (inst1, 8))
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
659 if (bits (inst1, 0, 3) == 15)
665 if ((inst2 & 0x0fc0) == 0x0000)
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn)
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
712 struct pv_area *stack;
713 struct cleanup *back_to;
715 CORE_ADDR unrecognized_pc = 0;
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
722 while (start < limit)
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
755 else if (thumb_instruction_restores_sp (insn))
757 /* Don't scan past the epilogue. */
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
800 if (pv_area_store_would_trash (stack, addr))
803 pv_area_store (stack, addr, 4, regs[regno]);
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
814 if (pv_area_store_would_trash (stack, addr))
817 pv_area_store (stack, addr, 4, regs[rd]);
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2;
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1, j2, imm1, imm2;
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
890 pv_t addr = regs[bits (insn, 0, 3)];
893 if (pv_area_store_would_trash (stack, addr))
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
905 regs[bits (insn, 0, 3)] = addr;
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
916 offset = inst2 & 0xff;
918 addr = pv_add_constant (addr, offset);
920 addr = pv_add_constant (addr, -offset);
922 if (pv_area_store_would_trash (stack, addr))
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
930 regs[bits (insn, 0, 3)] = addr;
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
940 offset = inst2 & 0xff;
942 addr = pv_add_constant (addr, offset);
944 addr = pv_add_constant (addr, -offset);
946 if (pv_area_store_would_trash (stack, addr))
949 pv_area_store (stack, addr, 4, regs[regno]);
952 regs[bits (insn, 0, 3)] = addr;
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 int regno = bits (inst2, 12, 15);
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
964 if (pv_area_store_would_trash (stack, addr))
967 pv_area_store (stack, addr, 4, regs[regno]);
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant;
1084 offset = bits (inst2, 0, 11);
1086 loc = start + 4 + offset;
1088 loc = start + 4 - offset;
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant;
1100 offset = bits (inst2, 0, 7) << 2;
1102 loc = start + 4 + offset;
1104 loc = start + 4 - offset;
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1127 else if (thumb_instruction_changes_pc (insn))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg = bits (insn1, 8, 10);
1207 address = bits (insn1, 0, 7);
1209 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1211 unsigned short insn2
1212 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1214 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1217 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1219 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1221 /* movt Rd, #const */
1222 if ((insn1 & 0xfbc0) == 0xf2c0)
1224 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1225 *destreg = bits (insn2, 8, 11);
1227 address = (high << 16 | low);
1234 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1236 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1238 address = bits (insn, 0, 11);
1239 *destreg = bits (insn, 12, 15);
1242 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1244 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1247 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1249 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1251 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1252 *destreg = bits (insn, 12, 15);
1254 address = (high << 16 | low);
1262 /* Try to skip a sequence of instructions used for stack protector. If PC
1263 points to the first instruction of this sequence, return the address of
1264 first instruction after this sequence, otherwise, return original PC.
1266 On arm, this sequence of instructions is composed of mainly three steps,
1267 Step 1: load symbol __stack_chk_guard,
1268 Step 2: load from address of __stack_chk_guard,
1269 Step 3: store it to somewhere else.
1271 Usually, instructions on step 2 and step 3 are the same on various ARM
1272 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1273 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1274 instructions in step 1 vary from different ARM architectures. On ARMv7,
1277 movw Rn, #:lower16:__stack_chk_guard
1278 movt Rn, #:upper16:__stack_chk_guard
1285 .word __stack_chk_guard
1287 Since ldr/str is a very popular instruction, we can't use them as
1288 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1289 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1290 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1293 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1295 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1296 unsigned int basereg;
1297 struct bound_minimal_symbol stack_chk_guard;
1299 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1302 /* Try to parse the instructions in Step 1. */
1303 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1308 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1309 /* If name of symbol doesn't start with '__stack_chk_guard', this
1310 instruction sequence is not for stack protector. If symbol is
1311 removed, we conservatively think this sequence is for stack protector. */
1312 if (stack_chk_guard.minsym
1313 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1314 "__stack_chk_guard",
1315 strlen ("__stack_chk_guard")) != 0)
1320 unsigned int destreg;
1322 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1324 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1325 if ((insn & 0xf800) != 0x6800)
1327 if (bits (insn, 3, 5) != basereg)
1329 destreg = bits (insn, 0, 2);
1331 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1332 byte_order_for_code);
1333 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1334 if ((insn & 0xf800) != 0x6000)
1336 if (destreg != bits (insn, 0, 2))
1341 unsigned int destreg;
1343 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1345 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1346 if ((insn & 0x0e500000) != 0x04100000)
1348 if (bits (insn, 16, 19) != basereg)
1350 destreg = bits (insn, 12, 15);
1351 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1352 insn = read_memory_unsigned_integer (pc + offset + 4,
1353 4, byte_order_for_code);
1354 if ((insn & 0x0e500000) != 0x04000000)
1356 if (bits (insn, 12, 15) != destreg)
1359 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1362 return pc + offset + 4;
1364 return pc + offset + 8;
1367 /* Advance the PC across any function entry prologue instructions to
1368 reach some "real" code.
1370 The APCS (ARM Procedure Call Standard) defines the following
1374 [stmfd sp!, {a1,a2,a3,a4}]
1375 stmfd sp!, {...,fp,ip,lr,pc}
1376 [stfe f7, [sp, #-12]!]
1377 [stfe f6, [sp, #-12]!]
1378 [stfe f5, [sp, #-12]!]
1379 [stfe f4, [sp, #-12]!]
1380 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1383 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1385 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1388 CORE_ADDR func_addr, limit_pc;
1390 /* See if we can determine the end of the prologue via the symbol table.
1391 If so, then return either PC, or the PC after the prologue, whichever
1393 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1395 CORE_ADDR post_prologue_pc
1396 = skip_prologue_using_sal (gdbarch, func_addr);
1397 struct symtab *s = find_pc_symtab (func_addr);
1399 if (post_prologue_pc)
1401 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1404 /* GCC always emits a line note before the prologue and another
1405 one after, even if the two are at the same address or on the
1406 same line. Take advantage of this so that we do not need to
1407 know every instruction that might appear in the prologue. We
1408 will have producer information for most binaries; if it is
1409 missing (e.g. for -gstabs), assuming the GNU tools. */
1410 if (post_prologue_pc
1412 || s->producer == NULL
1413 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1414 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1415 return post_prologue_pc;
1417 if (post_prologue_pc != 0)
1419 CORE_ADDR analyzed_limit;
1421 /* For non-GCC compilers, make sure the entire line is an
1422 acceptable prologue; GDB will round this function's
1423 return value up to the end of the following line so we
1424 can not skip just part of a line (and we do not want to).
1426 RealView does not treat the prologue specially, but does
1427 associate prologue code with the opening brace; so this
1428 lets us skip the first line if we think it is the opening
1430 if (arm_pc_is_thumb (gdbarch, func_addr))
1431 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1432 post_prologue_pc, NULL);
1434 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1435 post_prologue_pc, NULL);
1437 if (analyzed_limit != post_prologue_pc)
1440 return post_prologue_pc;
1444 /* Can't determine prologue from the symbol table, need to examine
1447 /* Find an upper limit on the function prologue using the debug
1448 information. If the debug information could not be used to provide
1449 that bound, then use an arbitrary large number as the upper bound. */
1450 /* Like arm_scan_prologue, stop no later than pc + 64. */
1451 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1453 limit_pc = pc + 64; /* Magic. */
1456 /* Check if this is Thumb code. */
1457 if (arm_pc_is_thumb (gdbarch, pc))
1458 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1460 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1462 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1464 /* "mov ip, sp" is no longer a required part of the prologue. */
1465 if (inst == 0xe1a0c00d) /* mov ip, sp */
1468 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1471 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1474 /* Some prologues begin with "str lr, [sp, #-4]!". */
1475 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1478 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1481 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1484 /* Any insns after this point may float into the code, if it makes
1485 for better instruction scheduling, so we skip them only if we
1486 find them, but still consider the function to be frame-ful. */
1488 /* We may have either one sfmfd instruction here, or several stfe
1489 insns, depending on the version of floating point code we
1491 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1494 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1497 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1500 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1503 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1504 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1505 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1508 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1509 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1510 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1513 /* Un-recognized instruction; stop scanning. */
1517 return skip_pc; /* End of prologue. */
1521 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1522 This function decodes a Thumb function prologue to determine:
1523 1) the size of the stack frame
1524 2) which registers are saved on it
1525 3) the offsets of saved regs
1526 4) the offset from the stack pointer to the frame pointer
1528 A typical Thumb function prologue would create this stack frame
1529 (offsets relative to FP)
1530 old SP -> 24 stack parameters
1533 R7 -> 0 local variables (16 bytes)
1534 SP -> -12 additional stack space (12 bytes)
1535 The frame size would thus be 36 bytes, and the frame offset would be
1536 12 bytes. The frame register is R7.
1538 The comments for thumb_skip_prolog() describe the algorithm we use
1539 to detect the end of the prolog. */
1543 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1544 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1546 CORE_ADDR prologue_start;
1547 CORE_ADDR prologue_end;
1549 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1552 /* See comment in arm_scan_prologue for an explanation of
1554 if (prologue_end > prologue_start + 64)
1556 prologue_end = prologue_start + 64;
1560 /* We're in the boondocks: we have no idea where the start of the
1564 prologue_end = min (prologue_end, prev_pc);
1566 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1569 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1572 arm_instruction_changes_pc (uint32_t this_instr)
1574 if (bits (this_instr, 28, 31) == INST_NV)
1575 /* Unconditional instructions. */
1576 switch (bits (this_instr, 24, 27))
1580 /* Branch with Link and change to Thumb. */
1585 /* Coprocessor register transfer. */
1586 if (bits (this_instr, 12, 15) == 15)
1587 error (_("Invalid update to pc in instruction"));
1593 switch (bits (this_instr, 25, 27))
1596 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1598 /* Multiplies and extra load/stores. */
1599 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1600 /* Neither multiplies nor extension load/stores are allowed
1604 /* Otherwise, miscellaneous instructions. */
1606 /* BX <reg>, BXJ <reg>, BLX <reg> */
1607 if (bits (this_instr, 4, 27) == 0x12fff1
1608 || bits (this_instr, 4, 27) == 0x12fff2
1609 || bits (this_instr, 4, 27) == 0x12fff3)
1612 /* Other miscellaneous instructions are unpredictable if they
1616 /* Data processing instruction. Fall through. */
1619 if (bits (this_instr, 12, 15) == 15)
1626 /* Media instructions and architecturally undefined instructions. */
1627 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1631 if (bit (this_instr, 20) == 0)
1635 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1641 /* Load/store multiple. */
1642 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1648 /* Branch and branch with link. */
1653 /* Coprocessor transfers or SWIs can not affect PC. */
1657 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1661 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1662 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1663 fill it in. Return the first address not recognized as a prologue
1666 We recognize all the instructions typically found in ARM prologues,
1667 plus harmless instructions which can be skipped (either for analysis
1668 purposes, or a more restrictive set that can be skipped when finding
1669 the end of the prologue). */
1672 arm_analyze_prologue (struct gdbarch *gdbarch,
1673 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1674 struct arm_prologue_cache *cache)
1676 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1677 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1679 CORE_ADDR offset, current_pc;
1680 pv_t regs[ARM_FPS_REGNUM];
1681 struct pv_area *stack;
1682 struct cleanup *back_to;
1683 int framereg, framesize;
1684 CORE_ADDR unrecognized_pc = 0;
1686 /* Search the prologue looking for instructions that set up the
1687 frame pointer, adjust the stack pointer, and save registers.
1689 Be careful, however, and if it doesn't look like a prologue,
1690 don't try to scan it. If, for instance, a frameless function
1691 begins with stmfd sp!, then we will tell ourselves there is
1692 a frame, which will confuse stack traceback, as well as "finish"
1693 and other operations that rely on a knowledge of the stack
1696 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1697 regs[regno] = pv_register (regno, 0);
1698 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1699 back_to = make_cleanup_free_pv_area (stack);
1701 for (current_pc = prologue_start;
1702 current_pc < prologue_end;
1706 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1708 if (insn == 0xe1a0c00d) /* mov ip, sp */
1710 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1713 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1714 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1716 unsigned imm = insn & 0xff; /* immediate value */
1717 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1718 int rd = bits (insn, 12, 15);
1719 imm = (imm >> rot) | (imm << (32 - rot));
1720 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1723 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1724 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1726 unsigned imm = insn & 0xff; /* immediate value */
1727 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1728 int rd = bits (insn, 12, 15);
1729 imm = (imm >> rot) | (imm << (32 - rot));
1730 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1733 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1736 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1739 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1740 regs[bits (insn, 12, 15)]);
1743 else if ((insn & 0xffff0000) == 0xe92d0000)
1744 /* stmfd sp!, {..., fp, ip, lr, pc}
1746 stmfd sp!, {a1, a2, a3, a4} */
1748 int mask = insn & 0xffff;
1750 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1753 /* Calculate offsets of saved registers. */
1754 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1755 if (mask & (1 << regno))
1758 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1759 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1762 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1763 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1764 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1766 /* No need to add this to saved_regs -- it's just an arg reg. */
1769 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1770 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1771 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1773 /* No need to add this to saved_regs -- it's just an arg reg. */
1776 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1778 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1780 /* No need to add this to saved_regs -- it's just arg regs. */
1783 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1785 unsigned imm = insn & 0xff; /* immediate value */
1786 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1787 imm = (imm >> rot) | (imm << (32 - rot));
1788 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1790 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1792 unsigned imm = insn & 0xff; /* immediate value */
1793 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1794 imm = (imm >> rot) | (imm << (32 - rot));
1795 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1797 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1799 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1801 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1804 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1805 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1806 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1808 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1810 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1812 int n_saved_fp_regs;
1813 unsigned int fp_start_reg, fp_bound_reg;
1815 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1818 if ((insn & 0x800) == 0x800) /* N0 is set */
1820 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1821 n_saved_fp_regs = 3;
1823 n_saved_fp_regs = 1;
1827 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1828 n_saved_fp_regs = 2;
1830 n_saved_fp_regs = 4;
1833 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1834 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1835 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1837 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1838 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1839 regs[fp_start_reg++]);
1842 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1844 /* Allow some special function calls when skipping the
1845 prologue; GCC generates these before storing arguments to
1847 CORE_ADDR dest = BranchDest (current_pc, insn);
1849 if (skip_prologue_function (gdbarch, dest, 0))
1854 else if ((insn & 0xf0000000) != 0xe0000000)
1855 break; /* Condition not true, exit early. */
1856 else if (arm_instruction_changes_pc (insn))
1857 /* Don't scan past anything that might change control flow. */
1859 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1860 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1861 /* Ignore block loads from the stack, potentially copying
1862 parameters from memory. */
1864 else if ((insn & 0xfc500000) == 0xe4100000
1865 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1866 /* Similarly ignore single loads from the stack. */
1868 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1869 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1870 register instead of the stack. */
1874 /* The optimizer might shove anything into the prologue,
1875 so we just skip what we don't recognize. */
1876 unrecognized_pc = current_pc;
1881 if (unrecognized_pc == 0)
1882 unrecognized_pc = current_pc;
1884 /* The frame size is just the distance from the frame register
1885 to the original stack pointer. */
1886 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1888 /* Frame pointer is fp. */
1889 framereg = ARM_FP_REGNUM;
1890 framesize = -regs[ARM_FP_REGNUM].k;
1894 /* Try the stack pointer... this is a bit desperate. */
1895 framereg = ARM_SP_REGNUM;
1896 framesize = -regs[ARM_SP_REGNUM].k;
1901 cache->framereg = framereg;
1902 cache->framesize = framesize;
1904 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1905 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1906 cache->saved_regs[regno].addr = offset;
1910 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1911 paddress (gdbarch, unrecognized_pc));
1913 do_cleanups (back_to);
1914 return unrecognized_pc;
1918 arm_scan_prologue (struct frame_info *this_frame,
1919 struct arm_prologue_cache *cache)
1921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1924 CORE_ADDR prologue_start, prologue_end, current_pc;
1925 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1926 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1927 pv_t regs[ARM_FPS_REGNUM];
1928 struct pv_area *stack;
1929 struct cleanup *back_to;
1932 /* Assume there is no frame until proven otherwise. */
1933 cache->framereg = ARM_SP_REGNUM;
1934 cache->framesize = 0;
1936 /* Check for Thumb prologue. */
1937 if (arm_frame_is_thumb (this_frame))
1939 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1943 /* Find the function prologue. If we can't find the function in
1944 the symbol table, peek in the stack frame to find the PC. */
1945 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1948 /* One way to find the end of the prologue (which works well
1949 for unoptimized code) is to do the following:
1951 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1954 prologue_end = prev_pc;
1955 else if (sal.end < prologue_end)
1956 prologue_end = sal.end;
1958 This mechanism is very accurate so long as the optimizer
1959 doesn't move any instructions from the function body into the
1960 prologue. If this happens, sal.end will be the last
1961 instruction in the first hunk of prologue code just before
1962 the first instruction that the scheduler has moved from
1963 the body to the prologue.
1965 In order to make sure that we scan all of the prologue
1966 instructions, we use a slightly less accurate mechanism which
1967 may scan more than necessary. To help compensate for this
1968 lack of accuracy, the prologue scanning loop below contains
1969 several clauses which'll cause the loop to terminate early if
1970 an implausible prologue instruction is encountered.
1976 is a suitable endpoint since it accounts for the largest
1977 possible prologue plus up to five instructions inserted by
1980 if (prologue_end > prologue_start + 64)
1982 prologue_end = prologue_start + 64; /* See above. */
1987 /* We have no symbol information. Our only option is to assume this
1988 function has a standard stack frame and the normal frame register.
1989 Then, we can find the value of our frame pointer on entrance to
1990 the callee (or at the present moment if this is the innermost frame).
1991 The value stored there should be the address of the stmfd + 8. */
1992 CORE_ADDR frame_loc;
1993 LONGEST return_value;
1995 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1996 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
2000 prologue_start = gdbarch_addr_bits_remove
2001 (gdbarch, return_value) - 8;
2002 prologue_end = prologue_start + 64; /* See above. */
2006 if (prev_pc < prologue_end)
2007 prologue_end = prev_pc;
2009 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2012 static struct arm_prologue_cache *
2013 arm_make_prologue_cache (struct frame_info *this_frame)
2016 struct arm_prologue_cache *cache;
2017 CORE_ADDR unwound_fp;
2019 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2020 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2022 arm_scan_prologue (this_frame, cache);
2024 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2025 if (unwound_fp == 0)
2028 cache->prev_sp = unwound_fp + cache->framesize;
2030 /* Calculate actual addresses of saved registers using offsets
2031 determined by arm_scan_prologue. */
2032 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2033 if (trad_frame_addr_p (cache->saved_regs, reg))
2034 cache->saved_regs[reg].addr += cache->prev_sp;
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2043 arm_prologue_this_id (struct frame_info *this_frame,
2045 struct frame_id *this_id)
2047 struct arm_prologue_cache *cache;
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = *this_cache;
2055 /* This is meant to halt the backtrace at "_start". */
2056 pc = get_frame_pc (this_frame);
2057 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2060 /* If we've hit a wall, stop. */
2061 if (cache->prev_sp == 0)
2064 /* Use function start address as part of the frame ID. If we cannot
2065 identify the start address (due to missing symbol information),
2066 fall back to just using the current PC. */
2067 func = get_frame_func (this_frame);
2071 id = frame_id_build (cache->prev_sp, func);
2075 static struct value *
2076 arm_prologue_prev_register (struct frame_info *this_frame,
2080 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2081 struct arm_prologue_cache *cache;
2083 if (*this_cache == NULL)
2084 *this_cache = arm_make_prologue_cache (this_frame);
2085 cache = *this_cache;
2087 /* If we are asked to unwind the PC, then we need to return the LR
2088 instead. The prologue may save PC, but it will point into this
2089 frame's prologue, not the next frame's resume location. Also
2090 strip the saved T bit. A valid LR may have the low bit set, but
2091 a valid PC never does. */
2092 if (prev_regnum == ARM_PC_REGNUM)
2096 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2097 return frame_unwind_got_constant (this_frame, prev_regnum,
2098 arm_addr_bits_remove (gdbarch, lr));
2101 /* SP is generally not saved to the stack, but this frame is
2102 identified by the next frame's stack pointer at the time of the call.
2103 The value was already reconstructed into PREV_SP. */
2104 if (prev_regnum == ARM_SP_REGNUM)
2105 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2107 /* The CPSR may have been changed by the call instruction and by the
2108 called function. The only bit we can reconstruct is the T bit,
2109 by checking the low bit of LR as of the call. This is a reliable
2110 indicator of Thumb-ness except for some ARM v4T pre-interworking
2111 Thumb code, which could get away with a clear low bit as long as
2112 the called function did not use bx. Guess that all other
2113 bits are unchanged; the condition flags are presumably lost,
2114 but the processor status is likely valid. */
2115 if (prev_regnum == ARM_PS_REGNUM)
2118 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2120 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2122 if (IS_THUMB_ADDR (lr))
2126 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2129 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2133 struct frame_unwind arm_prologue_unwind = {
2135 default_frame_unwind_stop_reason,
2136 arm_prologue_this_id,
2137 arm_prologue_prev_register,
2139 default_frame_sniffer
2142 /* Maintain a list of ARM exception table entries per objfile, similar to the
2143 list of mapping symbols. We only cache entries for standard ARM-defined
2144 personality routines; the cache will contain only the frame unwinding
2145 instructions associated with the entry (not the descriptors). */
2147 static const struct objfile_data *arm_exidx_data_key;
2149 struct arm_exidx_entry
2154 typedef struct arm_exidx_entry arm_exidx_entry_s;
2155 DEF_VEC_O(arm_exidx_entry_s);
2157 struct arm_exidx_data
2159 VEC(arm_exidx_entry_s) **section_maps;
2163 arm_exidx_data_free (struct objfile *objfile, void *arg)
2165 struct arm_exidx_data *data = arg;
2168 for (i = 0; i < objfile->obfd->section_count; i++)
2169 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2173 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2174 const struct arm_exidx_entry *rhs)
2176 return lhs->addr < rhs->addr;
2179 static struct obj_section *
2180 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2182 struct obj_section *osect;
2184 ALL_OBJFILE_OSECTIONS (objfile, osect)
2185 if (bfd_get_section_flags (objfile->obfd,
2186 osect->the_bfd_section) & SEC_ALLOC)
2188 bfd_vma start, size;
2189 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2190 size = bfd_get_section_size (osect->the_bfd_section);
2192 if (start <= vma && vma < start + size)
2199 /* Parse contents of exception table and exception index sections
2200 of OBJFILE, and fill in the exception table entry cache.
2202 For each entry that refers to a standard ARM-defined personality
2203 routine, extract the frame unwinding instructions (from either
2204 the index or the table section). The unwinding instructions
2206 - extracting them from the rest of the table data
2207 - converting to host endianness
2208 - appending the implicit 0xb0 ("Finish") code
2210 The extracted and normalized instructions are stored for later
2211 retrieval by the arm_find_exidx_entry routine. */
2214 arm_exidx_new_objfile (struct objfile *objfile)
2216 struct cleanup *cleanups;
2217 struct arm_exidx_data *data;
2218 asection *exidx, *extab;
2219 bfd_vma exidx_vma = 0, extab_vma = 0;
2220 bfd_size_type exidx_size = 0, extab_size = 0;
2221 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2224 /* If we've already touched this file, do nothing. */
2225 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2227 cleanups = make_cleanup (null_cleanup, NULL);
2229 /* Read contents of exception table and index. */
2230 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2233 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2234 exidx_size = bfd_get_section_size (exidx);
2235 exidx_data = xmalloc (exidx_size);
2236 make_cleanup (xfree, exidx_data);
2238 if (!bfd_get_section_contents (objfile->obfd, exidx,
2239 exidx_data, 0, exidx_size))
2241 do_cleanups (cleanups);
2246 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2249 extab_vma = bfd_section_vma (objfile->obfd, extab);
2250 extab_size = bfd_get_section_size (extab);
2251 extab_data = xmalloc (extab_size);
2252 make_cleanup (xfree, extab_data);
2254 if (!bfd_get_section_contents (objfile->obfd, extab,
2255 extab_data, 0, extab_size))
2257 do_cleanups (cleanups);
2262 /* Allocate exception table data structure. */
2263 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2264 set_objfile_data (objfile, arm_exidx_data_key, data);
2265 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2266 objfile->obfd->section_count,
2267 VEC(arm_exidx_entry_s) *);
2269 /* Fill in exception table. */
2270 for (i = 0; i < exidx_size / 8; i++)
2272 struct arm_exidx_entry new_exidx_entry;
2273 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2274 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2275 bfd_vma addr = 0, word = 0;
2276 int n_bytes = 0, n_words = 0;
2277 struct obj_section *sec;
2278 gdb_byte *entry = NULL;
2280 /* Extract address of start of function. */
2281 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2282 idx += exidx_vma + i * 8;
2284 /* Find section containing function and compute section offset. */
2285 sec = arm_obj_section_from_vma (objfile, idx);
2288 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2290 /* Determine address of exception table entry. */
2293 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2295 else if ((val & 0xff000000) == 0x80000000)
2297 /* Exception table entry embedded in .ARM.exidx
2298 -- must be short form. */
2302 else if (!(val & 0x80000000))
2304 /* Exception table entry in .ARM.extab. */
2305 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2306 addr += exidx_vma + i * 8 + 4;
2308 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2310 word = bfd_h_get_32 (objfile->obfd,
2311 extab_data + addr - extab_vma);
2314 if ((word & 0xff000000) == 0x80000000)
2319 else if ((word & 0xff000000) == 0x81000000
2320 || (word & 0xff000000) == 0x82000000)
2324 n_words = ((word >> 16) & 0xff);
2326 else if (!(word & 0x80000000))
2329 struct obj_section *pers_sec;
2330 int gnu_personality = 0;
2332 /* Custom personality routine. */
2333 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2334 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2336 /* Check whether we've got one of the variants of the
2337 GNU personality routines. */
2338 pers_sec = arm_obj_section_from_vma (objfile, pers);
2341 static const char *personality[] =
2343 "__gcc_personality_v0",
2344 "__gxx_personality_v0",
2345 "__gcj_personality_v0",
2346 "__gnu_objc_personality_v0",
2350 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2353 for (k = 0; personality[k]; k++)
2354 if (lookup_minimal_symbol_by_pc_name
2355 (pc, personality[k], objfile))
2357 gnu_personality = 1;
2362 /* If so, the next word contains a word count in the high
2363 byte, followed by the same unwind instructions as the
2364 pre-defined forms. */
2366 && addr + 4 <= extab_vma + extab_size)
2368 word = bfd_h_get_32 (objfile->obfd,
2369 extab_data + addr - extab_vma);
2372 n_words = ((word >> 24) & 0xff);
2378 /* Sanity check address. */
2380 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2381 n_words = n_bytes = 0;
2383 /* The unwind instructions reside in WORD (only the N_BYTES least
2384 significant bytes are valid), followed by N_WORDS words in the
2385 extab section starting at ADDR. */
2386 if (n_bytes || n_words)
2388 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2389 n_bytes + n_words * 4 + 1);
2392 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2396 word = bfd_h_get_32 (objfile->obfd,
2397 extab_data + addr - extab_vma);
2400 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2401 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2402 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2403 *p++ = (gdb_byte) (word & 0xff);
2406 /* Implied "Finish" to terminate the list. */
2410 /* Push entry onto vector. They are guaranteed to always
2411 appear in order of increasing addresses. */
2412 new_exidx_entry.addr = idx;
2413 new_exidx_entry.entry = entry;
2414 VEC_safe_push (arm_exidx_entry_s,
2415 data->section_maps[sec->the_bfd_section->index],
2419 do_cleanups (cleanups);
2422 /* Search for the exception table entry covering MEMADDR. If one is found,
2423 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2424 set *START to the start of the region covered by this entry. */
2427 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2429 struct obj_section *sec;
2431 sec = find_pc_section (memaddr);
2434 struct arm_exidx_data *data;
2435 VEC(arm_exidx_entry_s) *map;
2436 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2439 data = objfile_data (sec->objfile, arm_exidx_data_key);
2442 map = data->section_maps[sec->the_bfd_section->index];
2443 if (!VEC_empty (arm_exidx_entry_s, map))
2445 struct arm_exidx_entry *map_sym;
2447 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2448 arm_compare_exidx_entries);
2450 /* VEC_lower_bound finds the earliest ordered insertion
2451 point. If the following symbol starts at this exact
2452 address, we use that; otherwise, the preceding
2453 exception table entry covers this address. */
2454 if (idx < VEC_length (arm_exidx_entry_s, map))
2456 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2457 if (map_sym->addr == map_key.addr)
2460 *start = map_sym->addr + obj_section_addr (sec);
2461 return map_sym->entry;
2467 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2469 *start = map_sym->addr + obj_section_addr (sec);
2470 return map_sym->entry;
2479 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2480 instruction list from the ARM exception table entry ENTRY, allocate and
2481 return a prologue cache structure describing how to unwind this frame.
2483 Return NULL if the unwinding instruction list contains a "spare",
2484 "reserved" or "refuse to unwind" instruction as defined in section
2485 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2486 for the ARM Architecture" document. */
2488 static struct arm_prologue_cache *
2489 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2494 struct arm_prologue_cache *cache;
2495 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2496 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2502 /* Whenever we reload SP, we actually have to retrieve its
2503 actual value in the current frame. */
2506 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2508 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2509 vsp = get_frame_register_unsigned (this_frame, reg);
2513 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2514 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2520 /* Decode next unwind instruction. */
2523 if ((insn & 0xc0) == 0)
2525 int offset = insn & 0x3f;
2526 vsp += (offset << 2) + 4;
2528 else if ((insn & 0xc0) == 0x40)
2530 int offset = insn & 0x3f;
2531 vsp -= (offset << 2) + 4;
2533 else if ((insn & 0xf0) == 0x80)
2535 int mask = ((insn & 0xf) << 8) | *entry++;
2538 /* The special case of an all-zero mask identifies
2539 "Refuse to unwind". We return NULL to fall back
2540 to the prologue analyzer. */
2544 /* Pop registers r4..r15 under mask. */
2545 for (i = 0; i < 12; i++)
2546 if (mask & (1 << i))
2548 cache->saved_regs[4 + i].addr = vsp;
2552 /* Special-case popping SP -- we need to reload vsp. */
2553 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2556 else if ((insn & 0xf0) == 0x90)
2558 int reg = insn & 0xf;
2560 /* Reserved cases. */
2561 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2564 /* Set SP from another register and mark VSP for reload. */
2565 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2568 else if ((insn & 0xf0) == 0xa0)
2570 int count = insn & 0x7;
2571 int pop_lr = (insn & 0x8) != 0;
2574 /* Pop r4..r[4+count]. */
2575 for (i = 0; i <= count; i++)
2577 cache->saved_regs[4 + i].addr = vsp;
2581 /* If indicated by flag, pop LR as well. */
2584 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2588 else if (insn == 0xb0)
2590 /* We could only have updated PC by popping into it; if so, it
2591 will show up as address. Otherwise, copy LR into PC. */
2592 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2593 cache->saved_regs[ARM_PC_REGNUM]
2594 = cache->saved_regs[ARM_LR_REGNUM];
2599 else if (insn == 0xb1)
2601 int mask = *entry++;
2604 /* All-zero mask and mask >= 16 is "spare". */
2605 if (mask == 0 || mask >= 16)
2608 /* Pop r0..r3 under mask. */
2609 for (i = 0; i < 4; i++)
2610 if (mask & (1 << i))
2612 cache->saved_regs[i].addr = vsp;
2616 else if (insn == 0xb2)
2618 ULONGEST offset = 0;
2623 offset |= (*entry & 0x7f) << shift;
2626 while (*entry++ & 0x80);
2628 vsp += 0x204 + (offset << 2);
2630 else if (insn == 0xb3)
2632 int start = *entry >> 4;
2633 int count = (*entry++) & 0xf;
2636 /* Only registers D0..D15 are valid here. */
2637 if (start + count >= 16)
2640 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2641 for (i = 0; i <= count; i++)
2643 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2647 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2650 else if ((insn & 0xf8) == 0xb8)
2652 int count = insn & 0x7;
2655 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2656 for (i = 0; i <= count; i++)
2658 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2662 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2665 else if (insn == 0xc6)
2667 int start = *entry >> 4;
2668 int count = (*entry++) & 0xf;
2671 /* Only registers WR0..WR15 are valid. */
2672 if (start + count >= 16)
2675 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2676 for (i = 0; i <= count; i++)
2678 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2682 else if (insn == 0xc7)
2684 int mask = *entry++;
2687 /* All-zero mask and mask >= 16 is "spare". */
2688 if (mask == 0 || mask >= 16)
2691 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2692 for (i = 0; i < 4; i++)
2693 if (mask & (1 << i))
2695 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2699 else if ((insn & 0xf8) == 0xc0)
2701 int count = insn & 0x7;
2704 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2705 for (i = 0; i <= count; i++)
2707 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2711 else if (insn == 0xc8)
2713 int start = *entry >> 4;
2714 int count = (*entry++) & 0xf;
2717 /* Only registers D0..D31 are valid. */
2718 if (start + count >= 16)
2721 /* Pop VFP double-precision registers
2722 D[16+start]..D[16+start+count]. */
2723 for (i = 0; i <= count; i++)
2725 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2729 else if (insn == 0xc9)
2731 int start = *entry >> 4;
2732 int count = (*entry++) & 0xf;
2735 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2736 for (i = 0; i <= count; i++)
2738 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2742 else if ((insn & 0xf8) == 0xd0)
2744 int count = insn & 0x7;
2747 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2748 for (i = 0; i <= count; i++)
2750 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2756 /* Everything else is "spare". */
2761 /* If we restore SP from a register, assume this was the frame register.
2762 Otherwise just fall back to SP as frame register. */
2763 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2764 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2766 cache->framereg = ARM_SP_REGNUM;
2768 /* Determine offset to previous frame. */
2770 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2772 /* We already got the previous SP. */
2773 cache->prev_sp = vsp;
2778 /* Unwinding via ARM exception table entries. Note that the sniffer
2779 already computes a filled-in prologue cache, which is then used
2780 with the same arm_prologue_this_id and arm_prologue_prev_register
2781 routines also used for prologue-parsing based unwinding. */
2784 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2785 struct frame_info *this_frame,
2786 void **this_prologue_cache)
2788 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2789 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2790 CORE_ADDR addr_in_block, exidx_region, func_start;
2791 struct arm_prologue_cache *cache;
2794 /* See if we have an ARM exception table entry covering this address. */
2795 addr_in_block = get_frame_address_in_block (this_frame);
2796 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2800 /* The ARM exception table does not describe unwind information
2801 for arbitrary PC values, but is guaranteed to be correct only
2802 at call sites. We have to decide here whether we want to use
2803 ARM exception table information for this frame, or fall back
2804 to using prologue parsing. (Note that if we have DWARF CFI,
2805 this sniffer isn't even called -- CFI is always preferred.)
2807 Before we make this decision, however, we check whether we
2808 actually have *symbol* information for the current frame.
2809 If not, prologue parsing would not work anyway, so we might
2810 as well use the exception table and hope for the best. */
2811 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2815 /* If the next frame is "normal", we are at a call site in this
2816 frame, so exception information is guaranteed to be valid. */
2817 if (get_next_frame (this_frame)
2818 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2821 /* We also assume exception information is valid if we're currently
2822 blocked in a system call. The system library is supposed to
2823 ensure this, so that e.g. pthread cancellation works. */
2824 if (arm_frame_is_thumb (this_frame))
2828 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2829 byte_order_for_code, &insn)
2830 && (insn & 0xff00) == 0xdf00 /* svc */)
2837 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2838 byte_order_for_code, &insn)
2839 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2843 /* Bail out if we don't know that exception information is valid. */
2847 /* The ARM exception index does not mark the *end* of the region
2848 covered by the entry, and some functions will not have any entry.
2849 To correctly recognize the end of the covered region, the linker
2850 should have inserted dummy records with a CANTUNWIND marker.
2852 Unfortunately, current versions of GNU ld do not reliably do
2853 this, and thus we may have found an incorrect entry above.
2854 As a (temporary) sanity check, we only use the entry if it
2855 lies *within* the bounds of the function. Note that this check
2856 might reject perfectly valid entries that just happen to cover
2857 multiple functions; therefore this check ought to be removed
2858 once the linker is fixed. */
2859 if (func_start > exidx_region)
2863 /* Decode the list of unwinding instructions into a prologue cache.
2864 Note that this may fail due to e.g. a "refuse to unwind" code. */
2865 cache = arm_exidx_fill_cache (this_frame, entry);
2869 *this_prologue_cache = cache;
2873 struct frame_unwind arm_exidx_unwind = {
2875 default_frame_unwind_stop_reason,
2876 arm_prologue_this_id,
2877 arm_prologue_prev_register,
2879 arm_exidx_unwind_sniffer
2882 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2883 trampoline, return the target PC. Otherwise return 0.
2885 void call0a (char c, short s, int i, long l) {}
2889 (*pointer_to_call0a) (c, s, i, l);
2892 Instead of calling a stub library function _call_via_xx (xx is
2893 the register name), GCC may inline the trampoline in the object
2894 file as below (register r2 has the address of call0a).
2897 .type main, %function
2906 The trampoline 'bx r2' doesn't belong to main. */
2909 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2911 /* The heuristics of recognizing such trampoline is that FRAME is
2912 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2913 if (arm_frame_is_thumb (frame))
2917 if (target_read_memory (pc, buf, 2) == 0)
2919 struct gdbarch *gdbarch = get_frame_arch (frame);
2920 enum bfd_endian byte_order_for_code
2921 = gdbarch_byte_order_for_code (gdbarch);
2923 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2925 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2928 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2930 /* Clear the LSB so that gdb core sets step-resume
2931 breakpoint at the right address. */
2932 return UNMAKE_THUMB_ADDR (dest);
2940 static struct arm_prologue_cache *
2941 arm_make_stub_cache (struct frame_info *this_frame)
2943 struct arm_prologue_cache *cache;
2945 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2946 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2948 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2953 /* Our frame ID for a stub frame is the current SP and LR. */
2956 arm_stub_this_id (struct frame_info *this_frame,
2958 struct frame_id *this_id)
2960 struct arm_prologue_cache *cache;
2962 if (*this_cache == NULL)
2963 *this_cache = arm_make_stub_cache (this_frame);
2964 cache = *this_cache;
2966 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2970 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2971 struct frame_info *this_frame,
2972 void **this_prologue_cache)
2974 CORE_ADDR addr_in_block;
2976 CORE_ADDR pc, start_addr;
2979 addr_in_block = get_frame_address_in_block (this_frame);
2980 pc = get_frame_pc (this_frame);
2981 if (in_plt_section (addr_in_block)
2982 /* We also use the stub winder if the target memory is unreadable
2983 to avoid having the prologue unwinder trying to read it. */
2984 || target_read_memory (pc, dummy, 4) != 0)
2987 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2988 && arm_skip_bx_reg (this_frame, pc) != 0)
2994 struct frame_unwind arm_stub_unwind = {
2996 default_frame_unwind_stop_reason,
2998 arm_prologue_prev_register,
3000 arm_stub_unwind_sniffer
3003 /* Put here the code to store, into CACHE->saved_regs, the addresses
3004 of the saved registers of frame described by THIS_FRAME. CACHE is
3007 static struct arm_prologue_cache *
3008 arm_m_exception_cache (struct frame_info *this_frame)
3010 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3011 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3012 struct arm_prologue_cache *cache;
3013 CORE_ADDR unwound_sp;
3016 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3017 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3019 unwound_sp = get_frame_register_unsigned (this_frame,
3022 /* The hardware saves eight 32-bit words, comprising xPSR,
3023 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3024 "B1.5.6 Exception entry behavior" in
3025 "ARMv7-M Architecture Reference Manual". */
3026 cache->saved_regs[0].addr = unwound_sp;
3027 cache->saved_regs[1].addr = unwound_sp + 4;
3028 cache->saved_regs[2].addr = unwound_sp + 8;
3029 cache->saved_regs[3].addr = unwound_sp + 12;
3030 cache->saved_regs[12].addr = unwound_sp + 16;
3031 cache->saved_regs[14].addr = unwound_sp + 20;
3032 cache->saved_regs[15].addr = unwound_sp + 24;
3033 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3035 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3036 aligner between the top of the 32-byte stack frame and the
3037 previous context's stack pointer. */
3038 cache->prev_sp = unwound_sp + 32;
3039 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3040 && (xpsr & (1 << 9)) != 0)
3041 cache->prev_sp += 4;
3046 /* Implementation of function hook 'this_id' in
3047 'struct frame_uwnind'. */
3050 arm_m_exception_this_id (struct frame_info *this_frame,
3052 struct frame_id *this_id)
3054 struct arm_prologue_cache *cache;
3056 if (*this_cache == NULL)
3057 *this_cache = arm_m_exception_cache (this_frame);
3058 cache = *this_cache;
3060 /* Our frame ID for a stub frame is the current SP and LR. */
3061 *this_id = frame_id_build (cache->prev_sp,
3062 get_frame_pc (this_frame));
3065 /* Implementation of function hook 'prev_register' in
3066 'struct frame_uwnind'. */
3068 static struct value *
3069 arm_m_exception_prev_register (struct frame_info *this_frame,
3073 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3074 struct arm_prologue_cache *cache;
3076 if (*this_cache == NULL)
3077 *this_cache = arm_m_exception_cache (this_frame);
3078 cache = *this_cache;
3080 /* The value was already reconstructed into PREV_SP. */
3081 if (prev_regnum == ARM_SP_REGNUM)
3082 return frame_unwind_got_constant (this_frame, prev_regnum,
3085 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3089 /* Implementation of function hook 'sniffer' in
3090 'struct frame_uwnind'. */
3093 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3094 struct frame_info *this_frame,
3095 void **this_prologue_cache)
3097 CORE_ADDR this_pc = get_frame_pc (this_frame);
3099 /* No need to check is_m; this sniffer is only registered for
3100 M-profile architectures. */
3102 /* Exception frames return to one of these magic PCs. Other values
3103 are not defined as of v7-M. See details in "B1.5.8 Exception
3104 return behavior" in "ARMv7-M Architecture Reference Manual". */
3105 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3106 || this_pc == 0xfffffffd)
3112 /* Frame unwinder for M-profile exceptions. */
3114 struct frame_unwind arm_m_exception_unwind =
3117 default_frame_unwind_stop_reason,
3118 arm_m_exception_this_id,
3119 arm_m_exception_prev_register,
3121 arm_m_exception_unwind_sniffer
3125 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3127 struct arm_prologue_cache *cache;
3129 if (*this_cache == NULL)
3130 *this_cache = arm_make_prologue_cache (this_frame);
3131 cache = *this_cache;
3133 return cache->prev_sp - cache->framesize;
3136 struct frame_base arm_normal_base = {
3137 &arm_prologue_unwind,
3138 arm_normal_frame_base,
3139 arm_normal_frame_base,
3140 arm_normal_frame_base
3143 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3144 dummy frame. The frame ID's base needs to match the TOS value
3145 saved by save_dummy_frame_tos() and returned from
3146 arm_push_dummy_call, and the PC needs to match the dummy frame's
3149 static struct frame_id
3150 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3152 return frame_id_build (get_frame_register_unsigned (this_frame,
3154 get_frame_pc (this_frame));
3157 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3158 be used to construct the previous frame's ID, after looking up the
3159 containing function). */
3162 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3165 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3166 return arm_addr_bits_remove (gdbarch, pc);
3170 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3172 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3175 static struct value *
3176 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3179 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3181 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3186 /* The PC is normally copied from the return column, which
3187 describes saves of LR. However, that version may have an
3188 extra bit set to indicate Thumb state. The bit is not
3190 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3191 return frame_unwind_got_constant (this_frame, regnum,
3192 arm_addr_bits_remove (gdbarch, lr));
3195 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3196 cpsr = get_frame_register_unsigned (this_frame, regnum);
3197 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3198 if (IS_THUMB_ADDR (lr))
3202 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3205 internal_error (__FILE__, __LINE__,
3206 _("Unexpected register %d"), regnum);
3211 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3212 struct dwarf2_frame_state_reg *reg,
3213 struct frame_info *this_frame)
3219 reg->how = DWARF2_FRAME_REG_FN;
3220 reg->loc.fn = arm_dwarf2_prev_register;
3223 reg->how = DWARF2_FRAME_REG_CFA;
3228 /* Return true if we are in the function's epilogue, i.e. after the
3229 instruction that destroyed the function's stack frame. */
3232 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3234 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3235 unsigned int insn, insn2;
3236 int found_return = 0, found_stack_adjust = 0;
3237 CORE_ADDR func_start, func_end;
3241 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3244 /* The epilogue is a sequence of instructions along the following lines:
3246 - add stack frame size to SP or FP
3247 - [if frame pointer used] restore SP from FP
3248 - restore registers from SP [may include PC]
3249 - a return-type instruction [if PC wasn't already restored]
3251 In a first pass, we scan forward from the current PC and verify the
3252 instructions we find as compatible with this sequence, ending in a
3255 However, this is not sufficient to distinguish indirect function calls
3256 within a function from indirect tail calls in the epilogue in some cases.
3257 Therefore, if we didn't already find any SP-changing instruction during
3258 forward scan, we add a backward scanning heuristic to ensure we actually
3259 are in the epilogue. */
3262 while (scan_pc < func_end && !found_return)
3264 if (target_read_memory (scan_pc, buf, 2))
3268 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3270 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3272 else if (insn == 0x46f7) /* mov pc, lr */
3274 else if (thumb_instruction_restores_sp (insn))
3276 found_stack_adjust = 1;
3277 if ((insn & 0xfe00) == 0xbd00) /* pop <registers, PC> */
3280 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3282 if (target_read_memory (scan_pc, buf, 2))
3286 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3288 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3290 found_stack_adjust = 1;
3291 if (insn2 & 0x8000) /* <registers> include PC. */
3294 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3295 && (insn2 & 0x0fff) == 0x0b04)
3297 found_stack_adjust = 1;
3298 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3301 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3302 && (insn2 & 0x0e00) == 0x0a00)
3303 found_stack_adjust = 1;
3314 /* Since any instruction in the epilogue sequence, with the possible
3315 exception of return itself, updates the stack pointer, we need to
3316 scan backwards for at most one instruction. Try either a 16-bit or
3317 a 32-bit instruction. This is just a heuristic, so we do not worry
3318 too much about false positives. */
3320 if (!found_stack_adjust)
3322 if (pc - 4 < func_start)
3324 if (target_read_memory (pc - 4, buf, 4))
3327 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3328 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3330 if (thumb_instruction_restores_sp (insn2))
3331 found_stack_adjust = 1;
3332 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3333 found_stack_adjust = 1;
3334 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3335 && (insn2 & 0x0fff) == 0x0b04)
3336 found_stack_adjust = 1;
3337 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3338 && (insn2 & 0x0e00) == 0x0a00)
3339 found_stack_adjust = 1;
3342 return found_stack_adjust;
3345 /* Return true if we are in the function's epilogue, i.e. after the
3346 instruction that destroyed the function's stack frame. */
3349 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3351 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3353 int found_return, found_stack_adjust;
3354 CORE_ADDR func_start, func_end;
3356 if (arm_pc_is_thumb (gdbarch, pc))
3357 return thumb_in_function_epilogue_p (gdbarch, pc);
3359 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3362 /* We are in the epilogue if the previous instruction was a stack
3363 adjustment and the next instruction is a possible return (bx, mov
3364 pc, or pop). We could have to scan backwards to find the stack
3365 adjustment, or forwards to find the return, but this is a decent
3366 approximation. First scan forwards. */
3369 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3370 if (bits (insn, 28, 31) != INST_NV)
3372 if ((insn & 0x0ffffff0) == 0x012fff10)
3375 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3378 else if ((insn & 0x0fff0000) == 0x08bd0000
3379 && (insn & 0x0000c000) != 0)
3380 /* POP (LDMIA), including PC or LR. */
3387 /* Scan backwards. This is just a heuristic, so do not worry about
3388 false positives from mode changes. */
3390 if (pc < func_start + 4)
3393 found_stack_adjust = 0;
3394 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3395 if (bits (insn, 28, 31) != INST_NV)
3397 if ((insn & 0x0df0f000) == 0x0080d000)
3398 /* ADD SP (register or immediate). */
3399 found_stack_adjust = 1;
3400 else if ((insn & 0x0df0f000) == 0x0040d000)
3401 /* SUB SP (register or immediate). */
3402 found_stack_adjust = 1;
3403 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3405 found_stack_adjust = 1;
3406 else if ((insn & 0x0fff0000) == 0x08bd0000)
3408 found_stack_adjust = 1;
3409 else if ((insn & 0x0fff0000) == 0x049d0000)
3410 /* POP of a single register. */
3411 found_stack_adjust = 1;
3414 if (found_stack_adjust)
3421 /* When arguments must be pushed onto the stack, they go on in reverse
3422 order. The code below implements a FILO (stack) to do this. */
3427 struct stack_item *prev;
3431 static struct stack_item *
3432 push_stack_item (struct stack_item *prev, const void *contents, int len)
3434 struct stack_item *si;
3435 si = xmalloc (sizeof (struct stack_item));
3436 si->data = xmalloc (len);
3439 memcpy (si->data, contents, len);
3443 static struct stack_item *
3444 pop_stack_item (struct stack_item *si)
3446 struct stack_item *dead = si;
3454 /* Return the alignment (in bytes) of the given type. */
3457 arm_type_align (struct type *t)
3463 t = check_typedef (t);
3464 switch (TYPE_CODE (t))
3467 /* Should never happen. */
3468 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3472 case TYPE_CODE_ENUM:
3476 case TYPE_CODE_RANGE:
3478 case TYPE_CODE_CHAR:
3479 case TYPE_CODE_BOOL:
3480 return TYPE_LENGTH (t);
3482 case TYPE_CODE_ARRAY:
3483 case TYPE_CODE_COMPLEX:
3484 /* TODO: What about vector types? */
3485 return arm_type_align (TYPE_TARGET_TYPE (t));
3487 case TYPE_CODE_STRUCT:
3488 case TYPE_CODE_UNION:
3490 for (n = 0; n < TYPE_NFIELDS (t); n++)
3492 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3500 /* Possible base types for a candidate for passing and returning in
3503 enum arm_vfp_cprc_base_type
3512 /* The length of one element of base type B. */
3515 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3519 case VFP_CPRC_SINGLE:
3521 case VFP_CPRC_DOUBLE:
3523 case VFP_CPRC_VEC64:
3525 case VFP_CPRC_VEC128:
3528 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3533 /* The character ('s', 'd' or 'q') for the type of VFP register used
3534 for passing base type B. */
3537 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3541 case VFP_CPRC_SINGLE:
3543 case VFP_CPRC_DOUBLE:
3545 case VFP_CPRC_VEC64:
3547 case VFP_CPRC_VEC128:
3550 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3555 /* Determine whether T may be part of a candidate for passing and
3556 returning in VFP registers, ignoring the limit on the total number
3557 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3558 classification of the first valid component found; if it is not
3559 VFP_CPRC_UNKNOWN, all components must have the same classification
3560 as *BASE_TYPE. If it is found that T contains a type not permitted
3561 for passing and returning in VFP registers, a type differently
3562 classified from *BASE_TYPE, or two types differently classified
3563 from each other, return -1, otherwise return the total number of
3564 base-type elements found (possibly 0 in an empty structure or
3565 array). Vectors and complex types are not currently supported,
3566 matching the generic AAPCS support. */
3569 arm_vfp_cprc_sub_candidate (struct type *t,
3570 enum arm_vfp_cprc_base_type *base_type)
3572 t = check_typedef (t);
3573 switch (TYPE_CODE (t))
3576 switch (TYPE_LENGTH (t))
3579 if (*base_type == VFP_CPRC_UNKNOWN)
3580 *base_type = VFP_CPRC_SINGLE;
3581 else if (*base_type != VFP_CPRC_SINGLE)
3586 if (*base_type == VFP_CPRC_UNKNOWN)
3587 *base_type = VFP_CPRC_DOUBLE;
3588 else if (*base_type != VFP_CPRC_DOUBLE)
3597 case TYPE_CODE_ARRAY:
3601 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3604 if (TYPE_LENGTH (t) == 0)
3606 gdb_assert (count == 0);
3609 else if (count == 0)
3611 unitlen = arm_vfp_cprc_unit_length (*base_type);
3612 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3613 return TYPE_LENGTH (t) / unitlen;
3617 case TYPE_CODE_STRUCT:
3622 for (i = 0; i < TYPE_NFIELDS (t); i++)
3624 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3626 if (sub_count == -1)
3630 if (TYPE_LENGTH (t) == 0)
3632 gdb_assert (count == 0);
3635 else if (count == 0)
3637 unitlen = arm_vfp_cprc_unit_length (*base_type);
3638 if (TYPE_LENGTH (t) != unitlen * count)
3643 case TYPE_CODE_UNION:
3648 for (i = 0; i < TYPE_NFIELDS (t); i++)
3650 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3652 if (sub_count == -1)
3654 count = (count > sub_count ? count : sub_count);
3656 if (TYPE_LENGTH (t) == 0)
3658 gdb_assert (count == 0);
3661 else if (count == 0)
3663 unitlen = arm_vfp_cprc_unit_length (*base_type);
3664 if (TYPE_LENGTH (t) != unitlen * count)
3676 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3677 if passed to or returned from a non-variadic function with the VFP
3678 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3679 *BASE_TYPE to the base type for T and *COUNT to the number of
3680 elements of that base type before returning. */
3683 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3686 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3687 int c = arm_vfp_cprc_sub_candidate (t, &b);
3688 if (c <= 0 || c > 4)
3695 /* Return 1 if the VFP ABI should be used for passing arguments to and
3696 returning values from a function of type FUNC_TYPE, 0
3700 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3702 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3703 /* Variadic functions always use the base ABI. Assume that functions
3704 without debug info are not variadic. */
3705 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3707 /* The VFP ABI is only supported as a variant of AAPCS. */
3708 if (tdep->arm_abi != ARM_ABI_AAPCS)
3710 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3713 /* We currently only support passing parameters in integer registers, which
3714 conforms with GCC's default model, and VFP argument passing following
3715 the VFP variant of AAPCS. Several other variants exist and
3716 we should probably support some of them based on the selected ABI. */
3719 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3720 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3721 struct value **args, CORE_ADDR sp, int struct_return,
3722 CORE_ADDR struct_addr)
3724 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3728 struct stack_item *si = NULL;
3731 unsigned vfp_regs_free = (1 << 16) - 1;
3733 /* Determine the type of this function and whether the VFP ABI
3735 ftype = check_typedef (value_type (function));
3736 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3737 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3738 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3740 /* Set the return address. For the ARM, the return breakpoint is
3741 always at BP_ADDR. */
3742 if (arm_pc_is_thumb (gdbarch, bp_addr))
3744 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3746 /* Walk through the list of args and determine how large a temporary
3747 stack is required. Need to take care here as structs may be
3748 passed on the stack, and we have to push them. */
3751 argreg = ARM_A1_REGNUM;
3754 /* The struct_return pointer occupies the first parameter
3755 passing register. */
3759 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3760 gdbarch_register_name (gdbarch, argreg),
3761 paddress (gdbarch, struct_addr));
3762 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3766 for (argnum = 0; argnum < nargs; argnum++)
3769 struct type *arg_type;
3770 struct type *target_type;
3771 enum type_code typecode;
3772 const bfd_byte *val;
3774 enum arm_vfp_cprc_base_type vfp_base_type;
3776 int may_use_core_reg = 1;
3778 arg_type = check_typedef (value_type (args[argnum]));
3779 len = TYPE_LENGTH (arg_type);
3780 target_type = TYPE_TARGET_TYPE (arg_type);
3781 typecode = TYPE_CODE (arg_type);
3782 val = value_contents (args[argnum]);
3784 align = arm_type_align (arg_type);
3785 /* Round alignment up to a whole number of words. */
3786 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3787 /* Different ABIs have different maximum alignments. */
3788 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3790 /* The APCS ABI only requires word alignment. */
3791 align = INT_REGISTER_SIZE;
3795 /* The AAPCS requires at most doubleword alignment. */
3796 if (align > INT_REGISTER_SIZE * 2)
3797 align = INT_REGISTER_SIZE * 2;
3801 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3809 /* Because this is a CPRC it cannot go in a core register or
3810 cause a core register to be skipped for alignment.
3811 Either it goes in VFP registers and the rest of this loop
3812 iteration is skipped for this argument, or it goes on the
3813 stack (and the stack alignment code is correct for this
3815 may_use_core_reg = 0;
3817 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3818 shift = unit_length / 4;
3819 mask = (1 << (shift * vfp_base_count)) - 1;
3820 for (regno = 0; regno < 16; regno += shift)
3821 if (((vfp_regs_free >> regno) & mask) == mask)
3830 vfp_regs_free &= ~(mask << regno);
3831 reg_scaled = regno / shift;
3832 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3833 for (i = 0; i < vfp_base_count; i++)
3837 if (reg_char == 'q')
3838 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3839 val + i * unit_length);
3842 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3843 reg_char, reg_scaled + i);
3844 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3846 regcache_cooked_write (regcache, regnum,
3847 val + i * unit_length);
3854 /* This CPRC could not go in VFP registers, so all VFP
3855 registers are now marked as used. */
3860 /* Push stack padding for dowubleword alignment. */
3861 if (nstack & (align - 1))
3863 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3864 nstack += INT_REGISTER_SIZE;
3867 /* Doubleword aligned quantities must go in even register pairs. */
3868 if (may_use_core_reg
3869 && argreg <= ARM_LAST_ARG_REGNUM
3870 && align > INT_REGISTER_SIZE
3874 /* If the argument is a pointer to a function, and it is a
3875 Thumb function, create a LOCAL copy of the value and set
3876 the THUMB bit in it. */
3877 if (TYPE_CODE_PTR == typecode
3878 && target_type != NULL
3879 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3881 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3882 if (arm_pc_is_thumb (gdbarch, regval))
3884 bfd_byte *copy = alloca (len);
3885 store_unsigned_integer (copy, len, byte_order,
3886 MAKE_THUMB_ADDR (regval));
3891 /* Copy the argument to general registers or the stack in
3892 register-sized pieces. Large arguments are split between
3893 registers and stack. */
3896 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3898 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3900 /* The argument is being passed in a general purpose
3903 = extract_unsigned_integer (val, partial_len, byte_order);
3904 if (byte_order == BFD_ENDIAN_BIG)
3905 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3907 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3909 gdbarch_register_name
3911 phex (regval, INT_REGISTER_SIZE));
3912 regcache_cooked_write_unsigned (regcache, argreg, regval);
3917 /* Push the arguments onto the stack. */
3919 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3921 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3922 nstack += INT_REGISTER_SIZE;
3929 /* If we have an odd number of words to push, then decrement the stack
3930 by one word now, so first stack argument will be dword aligned. */
3937 write_memory (sp, si->data, si->len);
3938 si = pop_stack_item (si);
3941 /* Finally, update teh SP register. */
3942 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3948 /* Always align the frame to an 8-byte boundary. This is required on
3949 some platforms and harmless on the rest. */
3952 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3954 /* Align the stack to eight bytes. */
3955 return sp & ~ (CORE_ADDR) 7;
3959 print_fpu_flags (struct ui_file *file, int flags)
3961 if (flags & (1 << 0))
3962 fputs_filtered ("IVO ", file);
3963 if (flags & (1 << 1))
3964 fputs_filtered ("DVZ ", file);
3965 if (flags & (1 << 2))
3966 fputs_filtered ("OFL ", file);
3967 if (flags & (1 << 3))
3968 fputs_filtered ("UFL ", file);
3969 if (flags & (1 << 4))
3970 fputs_filtered ("INX ", file);
3971 fputc_filtered ('\n', file);
3974 /* Print interesting information about the floating point processor
3975 (if present) or emulator. */
3977 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3978 struct frame_info *frame, const char *args)
3980 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3983 type = (status >> 24) & 127;
3984 if (status & (1 << 31))
3985 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3987 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3988 /* i18n: [floating point unit] mask */
3989 fputs_filtered (_("mask: "), file);
3990 print_fpu_flags (file, status >> 16);
3991 /* i18n: [floating point unit] flags */
3992 fputs_filtered (_("flags: "), file);
3993 print_fpu_flags (file, status);
3996 /* Construct the ARM extended floating point type. */
3997 static struct type *
3998 arm_ext_type (struct gdbarch *gdbarch)
4000 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4002 if (!tdep->arm_ext_type)
4004 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4005 floatformats_arm_ext);
4007 return tdep->arm_ext_type;
4010 static struct type *
4011 arm_neon_double_type (struct gdbarch *gdbarch)
4013 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4015 if (tdep->neon_double_type == NULL)
4017 struct type *t, *elem;
4019 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4021 elem = builtin_type (gdbarch)->builtin_uint8;
4022 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4023 elem = builtin_type (gdbarch)->builtin_uint16;
4024 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4025 elem = builtin_type (gdbarch)->builtin_uint32;
4026 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4027 elem = builtin_type (gdbarch)->builtin_uint64;
4028 append_composite_type_field (t, "u64", elem);
4029 elem = builtin_type (gdbarch)->builtin_float;
4030 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4031 elem = builtin_type (gdbarch)->builtin_double;
4032 append_composite_type_field (t, "f64", elem);
4034 TYPE_VECTOR (t) = 1;
4035 TYPE_NAME (t) = "neon_d";
4036 tdep->neon_double_type = t;
4039 return tdep->neon_double_type;
4042 /* FIXME: The vector types are not correctly ordered on big-endian
4043 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4044 bits of d0 - regardless of what unit size is being held in d0. So
4045 the offset of the first uint8 in d0 is 7, but the offset of the
4046 first float is 4. This code works as-is for little-endian
4049 static struct type *
4050 arm_neon_quad_type (struct gdbarch *gdbarch)
4052 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4054 if (tdep->neon_quad_type == NULL)
4056 struct type *t, *elem;
4058 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4060 elem = builtin_type (gdbarch)->builtin_uint8;
4061 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4062 elem = builtin_type (gdbarch)->builtin_uint16;
4063 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4064 elem = builtin_type (gdbarch)->builtin_uint32;
4065 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4066 elem = builtin_type (gdbarch)->builtin_uint64;
4067 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4068 elem = builtin_type (gdbarch)->builtin_float;
4069 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4070 elem = builtin_type (gdbarch)->builtin_double;
4071 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4073 TYPE_VECTOR (t) = 1;
4074 TYPE_NAME (t) = "neon_q";
4075 tdep->neon_quad_type = t;
4078 return tdep->neon_quad_type;
4081 /* Return the GDB type object for the "standard" data type of data in
4084 static struct type *
4085 arm_register_type (struct gdbarch *gdbarch, int regnum)
4087 int num_regs = gdbarch_num_regs (gdbarch);
4089 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4090 && regnum >= num_regs && regnum < num_regs + 32)
4091 return builtin_type (gdbarch)->builtin_float;
4093 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4094 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4095 return arm_neon_quad_type (gdbarch);
4097 /* If the target description has register information, we are only
4098 in this function so that we can override the types of
4099 double-precision registers for NEON. */
4100 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4102 struct type *t = tdesc_register_type (gdbarch, regnum);
4104 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4105 && TYPE_CODE (t) == TYPE_CODE_FLT
4106 && gdbarch_tdep (gdbarch)->have_neon)
4107 return arm_neon_double_type (gdbarch);
4112 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4114 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4115 return builtin_type (gdbarch)->builtin_void;
4117 return arm_ext_type (gdbarch);
4119 else if (regnum == ARM_SP_REGNUM)
4120 return builtin_type (gdbarch)->builtin_data_ptr;
4121 else if (regnum == ARM_PC_REGNUM)
4122 return builtin_type (gdbarch)->builtin_func_ptr;
4123 else if (regnum >= ARRAY_SIZE (arm_register_names))
4124 /* These registers are only supported on targets which supply
4125 an XML description. */
4126 return builtin_type (gdbarch)->builtin_int0;
4128 return builtin_type (gdbarch)->builtin_uint32;
4131 /* Map a DWARF register REGNUM onto the appropriate GDB register
4135 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4137 /* Core integer regs. */
4138 if (reg >= 0 && reg <= 15)
4141 /* Legacy FPA encoding. These were once used in a way which
4142 overlapped with VFP register numbering, so their use is
4143 discouraged, but GDB doesn't support the ARM toolchain
4144 which used them for VFP. */
4145 if (reg >= 16 && reg <= 23)
4146 return ARM_F0_REGNUM + reg - 16;
4148 /* New assignments for the FPA registers. */
4149 if (reg >= 96 && reg <= 103)
4150 return ARM_F0_REGNUM + reg - 96;
4152 /* WMMX register assignments. */
4153 if (reg >= 104 && reg <= 111)
4154 return ARM_WCGR0_REGNUM + reg - 104;
4156 if (reg >= 112 && reg <= 127)
4157 return ARM_WR0_REGNUM + reg - 112;
4159 if (reg >= 192 && reg <= 199)
4160 return ARM_WC0_REGNUM + reg - 192;
4162 /* VFP v2 registers. A double precision value is actually
4163 in d1 rather than s2, but the ABI only defines numbering
4164 for the single precision registers. This will "just work"
4165 in GDB for little endian targets (we'll read eight bytes,
4166 starting in s0 and then progressing to s1), but will be
4167 reversed on big endian targets with VFP. This won't
4168 be a problem for the new Neon quad registers; you're supposed
4169 to use DW_OP_piece for those. */
4170 if (reg >= 64 && reg <= 95)
4174 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4175 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4179 /* VFP v3 / Neon registers. This range is also used for VFP v2
4180 registers, except that it now describes d0 instead of s0. */
4181 if (reg >= 256 && reg <= 287)
4185 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4186 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4193 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4195 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4198 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4200 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4201 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4203 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4204 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4206 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4207 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4209 if (reg < NUM_GREGS)
4210 return SIM_ARM_R0_REGNUM + reg;
4213 if (reg < NUM_FREGS)
4214 return SIM_ARM_FP0_REGNUM + reg;
4217 if (reg < NUM_SREGS)
4218 return SIM_ARM_FPS_REGNUM + reg;
4221 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4224 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4225 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4226 It is thought that this is is the floating-point register format on
4227 little-endian systems. */
4230 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4231 void *dbl, int endianess)
4235 if (endianess == BFD_ENDIAN_BIG)
4236 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4238 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4240 floatformat_from_doublest (fmt, &d, dbl);
4244 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4249 floatformat_to_doublest (fmt, ptr, &d);
4250 if (endianess == BFD_ENDIAN_BIG)
4251 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4253 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4258 condition_true (unsigned long cond, unsigned long status_reg)
4260 if (cond == INST_AL || cond == INST_NV)
4266 return ((status_reg & FLAG_Z) != 0);
4268 return ((status_reg & FLAG_Z) == 0);
4270 return ((status_reg & FLAG_C) != 0);
4272 return ((status_reg & FLAG_C) == 0);
4274 return ((status_reg & FLAG_N) != 0);
4276 return ((status_reg & FLAG_N) == 0);
4278 return ((status_reg & FLAG_V) != 0);
4280 return ((status_reg & FLAG_V) == 0);
4282 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4284 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4286 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4288 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4290 return (((status_reg & FLAG_Z) == 0)
4291 && (((status_reg & FLAG_N) == 0)
4292 == ((status_reg & FLAG_V) == 0)));
4294 return (((status_reg & FLAG_Z) != 0)
4295 || (((status_reg & FLAG_N) == 0)
4296 != ((status_reg & FLAG_V) == 0)));
4301 static unsigned long
4302 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4303 unsigned long pc_val, unsigned long status_reg)
4305 unsigned long res, shift;
4306 int rm = bits (inst, 0, 3);
4307 unsigned long shifttype = bits (inst, 5, 6);
4311 int rs = bits (inst, 8, 11);
4312 shift = (rs == 15 ? pc_val + 8
4313 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4316 shift = bits (inst, 7, 11);
4318 res = (rm == ARM_PC_REGNUM
4319 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4320 : get_frame_register_unsigned (frame, rm));
4325 res = shift >= 32 ? 0 : res << shift;
4329 res = shift >= 32 ? 0 : res >> shift;
4335 res = ((res & 0x80000000L)
4336 ? ~((~res) >> shift) : res >> shift);
4339 case 3: /* ROR/RRX */
4342 res = (res >> 1) | (carry ? 0x80000000L : 0);
4344 res = (res >> shift) | (res << (32 - shift));
4348 return res & 0xffffffff;
4351 /* Return number of 1-bits in VAL. */
4354 bitcount (unsigned long val)
4357 for (nbits = 0; val != 0; nbits++)
4358 val &= val - 1; /* Delete rightmost 1-bit in val. */
4362 /* Return the size in bytes of the complete Thumb instruction whose
4363 first halfword is INST1. */
4366 thumb_insn_size (unsigned short inst1)
4368 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4375 thumb_advance_itstate (unsigned int itstate)
4377 /* Preserve IT[7:5], the first three bits of the condition. Shift
4378 the upcoming condition flags left by one bit. */
4379 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4381 /* If we have finished the IT block, clear the state. */
4382 if ((itstate & 0x0f) == 0)
4388 /* Find the next PC after the current instruction executes. In some
4389 cases we can not statically determine the answer (see the IT state
4390 handling in this function); in that case, a breakpoint may be
4391 inserted in addition to the returned PC, which will be used to set
4392 another breakpoint by our caller. */
4395 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4397 struct gdbarch *gdbarch = get_frame_arch (frame);
4398 struct address_space *aspace = get_frame_address_space (frame);
4399 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4400 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4401 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4402 unsigned short inst1;
4403 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4404 unsigned long offset;
4405 ULONGEST status, itstate;
4407 nextpc = MAKE_THUMB_ADDR (nextpc);
4408 pc_val = MAKE_THUMB_ADDR (pc_val);
4410 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4412 /* Thumb-2 conditional execution support. There are eight bits in
4413 the CPSR which describe conditional execution state. Once
4414 reconstructed (they're in a funny order), the low five bits
4415 describe the low bit of the condition for each instruction and
4416 how many instructions remain. The high three bits describe the
4417 base condition. One of the low four bits will be set if an IT
4418 block is active. These bits read as zero on earlier
4420 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4421 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4423 /* If-Then handling. On GNU/Linux, where this routine is used, we
4424 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4425 can disable execution of the undefined instruction. So we might
4426 miss the breakpoint if we set it on a skipped conditional
4427 instruction. Because conditional instructions can change the
4428 flags, affecting the execution of further instructions, we may
4429 need to set two breakpoints. */
4431 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4433 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4435 /* An IT instruction. Because this instruction does not
4436 modify the flags, we can accurately predict the next
4437 executed instruction. */
4438 itstate = inst1 & 0x00ff;
4439 pc += thumb_insn_size (inst1);
4441 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4443 inst1 = read_memory_unsigned_integer (pc, 2,
4444 byte_order_for_code);
4445 pc += thumb_insn_size (inst1);
4446 itstate = thumb_advance_itstate (itstate);
4449 return MAKE_THUMB_ADDR (pc);
4451 else if (itstate != 0)
4453 /* We are in a conditional block. Check the condition. */
4454 if (! condition_true (itstate >> 4, status))
4456 /* Advance to the next executed instruction. */
4457 pc += thumb_insn_size (inst1);
4458 itstate = thumb_advance_itstate (itstate);
4460 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4462 inst1 = read_memory_unsigned_integer (pc, 2,
4463 byte_order_for_code);
4464 pc += thumb_insn_size (inst1);
4465 itstate = thumb_advance_itstate (itstate);
4468 return MAKE_THUMB_ADDR (pc);
4470 else if ((itstate & 0x0f) == 0x08)
4472 /* This is the last instruction of the conditional
4473 block, and it is executed. We can handle it normally
4474 because the following instruction is not conditional,
4475 and we must handle it normally because it is
4476 permitted to branch. Fall through. */
4482 /* There are conditional instructions after this one.
4483 If this instruction modifies the flags, then we can
4484 not predict what the next executed instruction will
4485 be. Fortunately, this instruction is architecturally
4486 forbidden to branch; we know it will fall through.
4487 Start by skipping past it. */
4488 pc += thumb_insn_size (inst1);
4489 itstate = thumb_advance_itstate (itstate);
4491 /* Set a breakpoint on the following instruction. */
4492 gdb_assert ((itstate & 0x0f) != 0);
4493 arm_insert_single_step_breakpoint (gdbarch, aspace,
4494 MAKE_THUMB_ADDR (pc));
4495 cond_negated = (itstate >> 4) & 1;
4497 /* Skip all following instructions with the same
4498 condition. If there is a later instruction in the IT
4499 block with the opposite condition, set the other
4500 breakpoint there. If not, then set a breakpoint on
4501 the instruction after the IT block. */
4504 inst1 = read_memory_unsigned_integer (pc, 2,
4505 byte_order_for_code);
4506 pc += thumb_insn_size (inst1);
4507 itstate = thumb_advance_itstate (itstate);
4509 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4511 return MAKE_THUMB_ADDR (pc);
4515 else if (itstate & 0x0f)
4517 /* We are in a conditional block. Check the condition. */
4518 int cond = itstate >> 4;
4520 if (! condition_true (cond, status))
4521 /* Advance to the next instruction. All the 32-bit
4522 instructions share a common prefix. */
4523 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4525 /* Otherwise, handle the instruction normally. */
4528 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4532 /* Fetch the saved PC from the stack. It's stored above
4533 all of the other registers. */
4534 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4535 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4536 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4538 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4540 unsigned long cond = bits (inst1, 8, 11);
4541 if (cond == 0x0f) /* 0x0f = SWI */
4543 struct gdbarch_tdep *tdep;
4544 tdep = gdbarch_tdep (gdbarch);
4546 if (tdep->syscall_next_pc != NULL)
4547 nextpc = tdep->syscall_next_pc (frame);
4550 else if (cond != 0x0f && condition_true (cond, status))
4551 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4553 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4555 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4557 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4559 unsigned short inst2;
4560 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4562 /* Default to the next instruction. */
4564 nextpc = MAKE_THUMB_ADDR (nextpc);
4566 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4568 /* Branches and miscellaneous control instructions. */
4570 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4573 int j1, j2, imm1, imm2;
4575 imm1 = sbits (inst1, 0, 10);
4576 imm2 = bits (inst2, 0, 10);
4577 j1 = bit (inst2, 13);
4578 j2 = bit (inst2, 11);
4580 offset = ((imm1 << 12) + (imm2 << 1));
4581 offset ^= ((!j2) << 22) | ((!j1) << 23);
4583 nextpc = pc_val + offset;
4584 /* For BLX make sure to clear the low bits. */
4585 if (bit (inst2, 12) == 0)
4586 nextpc = nextpc & 0xfffffffc;
4588 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4590 /* SUBS PC, LR, #imm8. */
4591 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4592 nextpc -= inst2 & 0x00ff;
4594 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4596 /* Conditional branch. */
4597 if (condition_true (bits (inst1, 6, 9), status))
4599 int sign, j1, j2, imm1, imm2;
4601 sign = sbits (inst1, 10, 10);
4602 imm1 = bits (inst1, 0, 5);
4603 imm2 = bits (inst2, 0, 10);
4604 j1 = bit (inst2, 13);
4605 j2 = bit (inst2, 11);
4607 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4608 offset += (imm1 << 12) + (imm2 << 1);
4610 nextpc = pc_val + offset;
4614 else if ((inst1 & 0xfe50) == 0xe810)
4616 /* Load multiple or RFE. */
4617 int rn, offset, load_pc = 1;
4619 rn = bits (inst1, 0, 3);
4620 if (bit (inst1, 7) && !bit (inst1, 8))
4623 if (!bit (inst2, 15))
4625 offset = bitcount (inst2) * 4 - 4;
4627 else if (!bit (inst1, 7) && bit (inst1, 8))
4630 if (!bit (inst2, 15))
4634 else if (bit (inst1, 7) && bit (inst1, 8))
4639 else if (!bit (inst1, 7) && !bit (inst1, 8))
4649 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4650 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4653 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4655 /* MOV PC or MOVS PC. */
4656 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4657 nextpc = MAKE_THUMB_ADDR (nextpc);
4659 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4663 int rn, load_pc = 1;
4665 rn = bits (inst1, 0, 3);
4666 base = get_frame_register_unsigned (frame, rn);
4667 if (rn == ARM_PC_REGNUM)
4669 base = (base + 4) & ~(CORE_ADDR) 0x3;
4671 base += bits (inst2, 0, 11);
4673 base -= bits (inst2, 0, 11);
4675 else if (bit (inst1, 7))
4676 base += bits (inst2, 0, 11);
4677 else if (bit (inst2, 11))
4679 if (bit (inst2, 10))
4682 base += bits (inst2, 0, 7);
4684 base -= bits (inst2, 0, 7);
4687 else if ((inst2 & 0x0fc0) == 0x0000)
4689 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4690 base += get_frame_register_unsigned (frame, rm) << shift;
4697 nextpc = get_frame_memory_unsigned (frame, base, 4);
4699 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4702 CORE_ADDR tbl_reg, table, offset, length;
4704 tbl_reg = bits (inst1, 0, 3);
4705 if (tbl_reg == 0x0f)
4706 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4708 table = get_frame_register_unsigned (frame, tbl_reg);
4710 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4711 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4712 nextpc = pc_val + length;
4714 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4717 CORE_ADDR tbl_reg, table, offset, length;
4719 tbl_reg = bits (inst1, 0, 3);
4720 if (tbl_reg == 0x0f)
4721 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4723 table = get_frame_register_unsigned (frame, tbl_reg);
4725 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4726 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4727 nextpc = pc_val + length;
4730 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4732 if (bits (inst1, 3, 6) == 0x0f)
4733 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4735 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4737 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4739 if (bits (inst1, 3, 6) == 0x0f)
4742 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4744 nextpc = MAKE_THUMB_ADDR (nextpc);
4746 else if ((inst1 & 0xf500) == 0xb100)
4749 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4750 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4752 if (bit (inst1, 11) && reg != 0)
4753 nextpc = pc_val + imm;
4754 else if (!bit (inst1, 11) && reg == 0)
4755 nextpc = pc_val + imm;
4760 /* Get the raw next address. PC is the current program counter, in
4761 FRAME, which is assumed to be executing in ARM mode.
4763 The value returned has the execution state of the next instruction
4764 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4765 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4769 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4771 struct gdbarch *gdbarch = get_frame_arch (frame);
4772 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4773 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4774 unsigned long pc_val;
4775 unsigned long this_instr;
4776 unsigned long status;
4779 pc_val = (unsigned long) pc;
4780 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4782 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4783 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4785 if (bits (this_instr, 28, 31) == INST_NV)
4786 switch (bits (this_instr, 24, 27))
4791 /* Branch with Link and change to Thumb. */
4792 nextpc = BranchDest (pc, this_instr);
4793 nextpc |= bit (this_instr, 24) << 1;
4794 nextpc = MAKE_THUMB_ADDR (nextpc);
4800 /* Coprocessor register transfer. */
4801 if (bits (this_instr, 12, 15) == 15)
4802 error (_("Invalid update to pc in instruction"));
4805 else if (condition_true (bits (this_instr, 28, 31), status))
4807 switch (bits (this_instr, 24, 27))
4810 case 0x1: /* data processing */
4814 unsigned long operand1, operand2, result = 0;
4818 if (bits (this_instr, 12, 15) != 15)
4821 if (bits (this_instr, 22, 25) == 0
4822 && bits (this_instr, 4, 7) == 9) /* multiply */
4823 error (_("Invalid update to pc in instruction"));
4825 /* BX <reg>, BLX <reg> */
4826 if (bits (this_instr, 4, 27) == 0x12fff1
4827 || bits (this_instr, 4, 27) == 0x12fff3)
4829 rn = bits (this_instr, 0, 3);
4830 nextpc = ((rn == ARM_PC_REGNUM)
4832 : get_frame_register_unsigned (frame, rn));
4837 /* Multiply into PC. */
4838 c = (status & FLAG_C) ? 1 : 0;
4839 rn = bits (this_instr, 16, 19);
4840 operand1 = ((rn == ARM_PC_REGNUM)
4842 : get_frame_register_unsigned (frame, rn));
4844 if (bit (this_instr, 25))
4846 unsigned long immval = bits (this_instr, 0, 7);
4847 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4848 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4851 else /* operand 2 is a shifted register. */
4852 operand2 = shifted_reg_val (frame, this_instr, c,
4855 switch (bits (this_instr, 21, 24))
4858 result = operand1 & operand2;
4862 result = operand1 ^ operand2;
4866 result = operand1 - operand2;
4870 result = operand2 - operand1;
4874 result = operand1 + operand2;
4878 result = operand1 + operand2 + c;
4882 result = operand1 - operand2 + c;
4886 result = operand2 - operand1 + c;
4892 case 0xb: /* tst, teq, cmp, cmn */
4893 result = (unsigned long) nextpc;
4897 result = operand1 | operand2;
4901 /* Always step into a function. */
4906 result = operand1 & ~operand2;
4914 /* In 26-bit APCS the bottom two bits of the result are
4915 ignored, and we always end up in ARM state. */
4917 nextpc = arm_addr_bits_remove (gdbarch, result);
4925 case 0x5: /* data transfer */
4928 if (bit (this_instr, 20))
4931 if (bits (this_instr, 12, 15) == 15)
4937 if (bit (this_instr, 22))
4938 error (_("Invalid update to pc in instruction"));
4940 /* byte write to PC */
4941 rn = bits (this_instr, 16, 19);
4942 base = ((rn == ARM_PC_REGNUM)
4944 : get_frame_register_unsigned (frame, rn));
4946 if (bit (this_instr, 24))
4949 int c = (status & FLAG_C) ? 1 : 0;
4950 unsigned long offset =
4951 (bit (this_instr, 25)
4952 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4953 : bits (this_instr, 0, 11));
4955 if (bit (this_instr, 23))
4961 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4968 case 0x9: /* block transfer */
4969 if (bit (this_instr, 20))
4972 if (bit (this_instr, 15))
4976 unsigned long rn_val
4977 = get_frame_register_unsigned (frame,
4978 bits (this_instr, 16, 19));
4980 if (bit (this_instr, 23))
4983 unsigned long reglist = bits (this_instr, 0, 14);
4984 offset = bitcount (reglist) * 4;
4985 if (bit (this_instr, 24)) /* pre */
4988 else if (bit (this_instr, 24))
4992 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4999 case 0xb: /* branch & link */
5000 case 0xa: /* branch */
5002 nextpc = BranchDest (pc, this_instr);
5008 case 0xe: /* coproc ops */
5012 struct gdbarch_tdep *tdep;
5013 tdep = gdbarch_tdep (gdbarch);
5015 if (tdep->syscall_next_pc != NULL)
5016 nextpc = tdep->syscall_next_pc (frame);
5022 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5030 /* Determine next PC after current instruction executes. Will call either
5031 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5032 loop is detected. */
5035 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5039 if (arm_frame_is_thumb (frame))
5040 nextpc = thumb_get_next_pc_raw (frame, pc);
5042 nextpc = arm_get_next_pc_raw (frame, pc);
5047 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5048 of the appropriate mode (as encoded in the PC value), even if this
5049 differs from what would be expected according to the symbol tables. */
5052 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5053 struct address_space *aspace,
5056 struct cleanup *old_chain
5057 = make_cleanup_restore_integer (&arm_override_mode);
5059 arm_override_mode = IS_THUMB_ADDR (pc);
5060 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5062 insert_single_step_breakpoint (gdbarch, aspace, pc);
5064 do_cleanups (old_chain);
5067 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5068 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5069 is found, attempt to step through it. A breakpoint is placed at the end of
5073 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5075 struct gdbarch *gdbarch = get_frame_arch (frame);
5076 struct address_space *aspace = get_frame_address_space (frame);
5077 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5078 CORE_ADDR pc = get_frame_pc (frame);
5079 CORE_ADDR breaks[2] = {-1, -1};
5081 unsigned short insn1, insn2;
5084 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5085 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5086 ULONGEST status, itstate;
5088 /* We currently do not support atomic sequences within an IT block. */
5089 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5090 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5094 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5095 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5097 if (thumb_insn_size (insn1) != 4)
5100 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5102 if (!((insn1 & 0xfff0) == 0xe850
5103 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5106 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5108 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5110 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5113 if (thumb_insn_size (insn1) != 4)
5115 /* Assume that there is at most one conditional branch in the
5116 atomic sequence. If a conditional branch is found, put a
5117 breakpoint in its destination address. */
5118 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5120 if (last_breakpoint > 0)
5121 return 0; /* More than one conditional branch found,
5122 fallback to the standard code. */
5124 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5128 /* We do not support atomic sequences that use any *other*
5129 instructions but conditional branches to change the PC.
5130 Fall back to standard code to avoid losing control of
5132 else if (thumb_instruction_changes_pc (insn1))
5137 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5140 /* Assume that there is at most one conditional branch in the
5141 atomic sequence. If a conditional branch is found, put a
5142 breakpoint in its destination address. */
5143 if ((insn1 & 0xf800) == 0xf000
5144 && (insn2 & 0xd000) == 0x8000
5145 && (insn1 & 0x0380) != 0x0380)
5147 int sign, j1, j2, imm1, imm2;
5148 unsigned int offset;
5150 sign = sbits (insn1, 10, 10);
5151 imm1 = bits (insn1, 0, 5);
5152 imm2 = bits (insn2, 0, 10);
5153 j1 = bit (insn2, 13);
5154 j2 = bit (insn2, 11);
5156 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5157 offset += (imm1 << 12) + (imm2 << 1);
5159 if (last_breakpoint > 0)
5160 return 0; /* More than one conditional branch found,
5161 fallback to the standard code. */
5163 breaks[1] = loc + offset;
5167 /* We do not support atomic sequences that use any *other*
5168 instructions but conditional branches to change the PC.
5169 Fall back to standard code to avoid losing control of
5171 else if (thumb2_instruction_changes_pc (insn1, insn2))
5174 /* If we find a strex{,b,h,d}, we're done. */
5175 if ((insn1 & 0xfff0) == 0xe840
5176 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5181 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5182 if (insn_count == atomic_sequence_length)
5185 /* Insert a breakpoint right after the end of the atomic sequence. */
5188 /* Check for duplicated breakpoints. Check also for a breakpoint
5189 placed (branch instruction's destination) anywhere in sequence. */
5191 && (breaks[1] == breaks[0]
5192 || (breaks[1] >= pc && breaks[1] < loc)))
5193 last_breakpoint = 0;
5195 /* Effectively inserts the breakpoints. */
5196 for (index = 0; index <= last_breakpoint; index++)
5197 arm_insert_single_step_breakpoint (gdbarch, aspace,
5198 MAKE_THUMB_ADDR (breaks[index]));
5204 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5206 struct gdbarch *gdbarch = get_frame_arch (frame);
5207 struct address_space *aspace = get_frame_address_space (frame);
5208 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5209 CORE_ADDR pc = get_frame_pc (frame);
5210 CORE_ADDR breaks[2] = {-1, -1};
5215 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5216 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5218 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5219 Note that we do not currently support conditionally executed atomic
5221 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5223 if ((insn & 0xff9000f0) != 0xe1900090)
5226 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5228 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5230 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5233 /* Assume that there is at most one conditional branch in the atomic
5234 sequence. If a conditional branch is found, put a breakpoint in
5235 its destination address. */
5236 if (bits (insn, 24, 27) == 0xa)
5238 if (last_breakpoint > 0)
5239 return 0; /* More than one conditional branch found, fallback
5240 to the standard single-step code. */
5242 breaks[1] = BranchDest (loc - 4, insn);
5246 /* We do not support atomic sequences that use any *other* instructions
5247 but conditional branches to change the PC. Fall back to standard
5248 code to avoid losing control of execution. */
5249 else if (arm_instruction_changes_pc (insn))
5252 /* If we find a strex{,b,h,d}, we're done. */
5253 if ((insn & 0xff9000f0) == 0xe1800090)
5257 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5258 if (insn_count == atomic_sequence_length)
5261 /* Insert a breakpoint right after the end of the atomic sequence. */
5264 /* Check for duplicated breakpoints. Check also for a breakpoint
5265 placed (branch instruction's destination) anywhere in sequence. */
5267 && (breaks[1] == breaks[0]
5268 || (breaks[1] >= pc && breaks[1] < loc)))
5269 last_breakpoint = 0;
5271 /* Effectively inserts the breakpoints. */
5272 for (index = 0; index <= last_breakpoint; index++)
5273 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5279 arm_deal_with_atomic_sequence (struct frame_info *frame)
5281 if (arm_frame_is_thumb (frame))
5282 return thumb_deal_with_atomic_sequence_raw (frame);
5284 return arm_deal_with_atomic_sequence_raw (frame);
5287 /* single_step() is called just before we want to resume the inferior,
5288 if we want to single-step it but there is no hardware or kernel
5289 single-step support. We find the target of the coming instruction
5290 and breakpoint it. */
5293 arm_software_single_step (struct frame_info *frame)
5295 struct gdbarch *gdbarch = get_frame_arch (frame);
5296 struct address_space *aspace = get_frame_address_space (frame);
5299 if (arm_deal_with_atomic_sequence (frame))
5302 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5303 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5308 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5309 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5310 NULL if an error occurs. BUF is freed. */
5313 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5314 int old_len, int new_len)
5317 int bytes_to_read = new_len - old_len;
5319 new_buf = xmalloc (new_len);
5320 memcpy (new_buf + bytes_to_read, buf, old_len);
5322 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5330 /* An IT block is at most the 2-byte IT instruction followed by
5331 four 4-byte instructions. The furthest back we must search to
5332 find an IT block that affects the current instruction is thus
5333 2 + 3 * 4 == 14 bytes. */
5334 #define MAX_IT_BLOCK_PREFIX 14
5336 /* Use a quick scan if there are more than this many bytes of
5338 #define IT_SCAN_THRESHOLD 32
5340 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5341 A breakpoint in an IT block may not be hit, depending on the
5344 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5348 CORE_ADDR boundary, func_start;
5350 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5351 int i, any, last_it, last_it_count;
5353 /* If we are using BKPT breakpoints, none of this is necessary. */
5354 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5357 /* ARM mode does not have this problem. */
5358 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5361 /* We are setting a breakpoint in Thumb code that could potentially
5362 contain an IT block. The first step is to find how much Thumb
5363 code there is; we do not need to read outside of known Thumb
5365 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5367 /* Thumb-2 code must have mapping symbols to have a chance. */
5370 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5372 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5373 && func_start > boundary)
5374 boundary = func_start;
5376 /* Search for a candidate IT instruction. We have to do some fancy
5377 footwork to distinguish a real IT instruction from the second
5378 half of a 32-bit instruction, but there is no need for that if
5379 there's no candidate. */
5380 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5382 /* No room for an IT instruction. */
5385 buf = xmalloc (buf_len);
5386 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5389 for (i = 0; i < buf_len; i += 2)
5391 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5392 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5404 /* OK, the code bytes before this instruction contain at least one
5405 halfword which resembles an IT instruction. We know that it's
5406 Thumb code, but there are still two possibilities. Either the
5407 halfword really is an IT instruction, or it is the second half of
5408 a 32-bit Thumb instruction. The only way we can tell is to
5409 scan forwards from a known instruction boundary. */
5410 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5414 /* There's a lot of code before this instruction. Start with an
5415 optimistic search; it's easy to recognize halfwords that can
5416 not be the start of a 32-bit instruction, and use that to
5417 lock on to the instruction boundaries. */
5418 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5421 buf_len = IT_SCAN_THRESHOLD;
5424 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5426 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5427 if (thumb_insn_size (inst1) == 2)
5434 /* At this point, if DEFINITE, BUF[I] is the first place we
5435 are sure that we know the instruction boundaries, and it is far
5436 enough from BPADDR that we could not miss an IT instruction
5437 affecting BPADDR. If ! DEFINITE, give up - start from a
5441 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5445 buf_len = bpaddr - boundary;
5451 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5454 buf_len = bpaddr - boundary;
5458 /* Scan forwards. Find the last IT instruction before BPADDR. */
5463 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5465 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5470 else if (inst1 & 0x0002)
5472 else if (inst1 & 0x0004)
5477 i += thumb_insn_size (inst1);
5483 /* There wasn't really an IT instruction after all. */
5486 if (last_it_count < 1)
5487 /* It was too far away. */
5490 /* This really is a trouble spot. Move the breakpoint to the IT
5492 return bpaddr - buf_len + last_it;
5495 /* ARM displaced stepping support.
5497 Generally ARM displaced stepping works as follows:
5499 1. When an instruction is to be single-stepped, it is first decoded by
5500 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5501 Depending on the type of instruction, it is then copied to a scratch
5502 location, possibly in a modified form. The copy_* set of functions
5503 performs such modification, as necessary. A breakpoint is placed after
5504 the modified instruction in the scratch space to return control to GDB.
5505 Note in particular that instructions which modify the PC will no longer
5506 do so after modification.
5508 2. The instruction is single-stepped, by setting the PC to the scratch
5509 location address, and resuming. Control returns to GDB when the
5512 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5513 function used for the current instruction. This function's job is to
5514 put the CPU/memory state back to what it would have been if the
5515 instruction had been executed unmodified in its original location. */
5517 /* NOP instruction (mov r0, r0). */
5518 #define ARM_NOP 0xe1a00000
5519 #define THUMB_NOP 0x4600
5521 /* Helper for register reads for displaced stepping. In particular, this
5522 returns the PC as it would be seen by the instruction at its original
5526 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5530 CORE_ADDR from = dsc->insn_addr;
5532 if (regno == ARM_PC_REGNUM)
5534 /* Compute pipeline offset:
5535 - When executing an ARM instruction, PC reads as the address of the
5536 current instruction plus 8.
5537 - When executing a Thumb instruction, PC reads as the address of the
5538 current instruction plus 4. */
5545 if (debug_displaced)
5546 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5547 (unsigned long) from);
5548 return (ULONGEST) from;
5552 regcache_cooked_read_unsigned (regs, regno, &ret);
5553 if (debug_displaced)
5554 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5555 regno, (unsigned long) ret);
5561 displaced_in_arm_mode (struct regcache *regs)
5564 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5566 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5568 return (ps & t_bit) == 0;
5571 /* Write to the PC as from a branch instruction. */
5574 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5578 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5579 architecture versions < 6. */
5580 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5581 val & ~(ULONGEST) 0x3);
5583 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5584 val & ~(ULONGEST) 0x1);
5587 /* Write to the PC as from a branch-exchange instruction. */
5590 bx_write_pc (struct regcache *regs, ULONGEST val)
5593 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5595 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5599 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5600 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5602 else if ((val & 2) == 0)
5604 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5605 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5609 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5610 mode, align dest to 4 bytes). */
5611 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5612 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5613 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5617 /* Write to the PC as if from a load instruction. */
5620 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5623 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5624 bx_write_pc (regs, val);
5626 branch_write_pc (regs, dsc, val);
5629 /* Write to the PC as if from an ALU instruction. */
5632 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5635 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5636 bx_write_pc (regs, val);
5638 branch_write_pc (regs, dsc, val);
5641 /* Helper for writing to registers for displaced stepping. Writing to the PC
5642 has a varying effects depending on the instruction which does the write:
5643 this is controlled by the WRITE_PC argument. */
5646 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5647 int regno, ULONGEST val, enum pc_write_style write_pc)
5649 if (regno == ARM_PC_REGNUM)
5651 if (debug_displaced)
5652 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5653 (unsigned long) val);
5656 case BRANCH_WRITE_PC:
5657 branch_write_pc (regs, dsc, val);
5661 bx_write_pc (regs, val);
5665 load_write_pc (regs, dsc, val);
5669 alu_write_pc (regs, dsc, val);
5672 case CANNOT_WRITE_PC:
5673 warning (_("Instruction wrote to PC in an unexpected way when "
5674 "single-stepping"));
5678 internal_error (__FILE__, __LINE__,
5679 _("Invalid argument to displaced_write_reg"));
5682 dsc->wrote_to_pc = 1;
5686 if (debug_displaced)
5687 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5688 regno, (unsigned long) val);
5689 regcache_cooked_write_unsigned (regs, regno, val);
5693 /* This function is used to concisely determine if an instruction INSN
5694 references PC. Register fields of interest in INSN should have the
5695 corresponding fields of BITMASK set to 0b1111. The function
5696 returns return 1 if any of these fields in INSN reference the PC
5697 (also 0b1111, r15), else it returns 0. */
5700 insn_references_pc (uint32_t insn, uint32_t bitmask)
5702 uint32_t lowbit = 1;
5704 while (bitmask != 0)
5708 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5714 mask = lowbit * 0xf;
5716 if ((insn & mask) == mask)
5725 /* The simplest copy function. Many instructions have the same effect no
5726 matter what address they are executed at: in those cases, use this. */
5729 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5730 const char *iname, struct displaced_step_closure *dsc)
5732 if (debug_displaced)
5733 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5734 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5737 dsc->modinsn[0] = insn;
5743 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5744 uint16_t insn2, const char *iname,
5745 struct displaced_step_closure *dsc)
5747 if (debug_displaced)
5748 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5749 "opcode/class '%s' unmodified\n", insn1, insn2,
5752 dsc->modinsn[0] = insn1;
5753 dsc->modinsn[1] = insn2;
5759 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5762 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5764 struct displaced_step_closure *dsc)
5766 if (debug_displaced)
5767 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5768 "opcode/class '%s' unmodified\n", insn,
5771 dsc->modinsn[0] = insn;
5776 /* Preload instructions with immediate offset. */
5779 cleanup_preload (struct gdbarch *gdbarch,
5780 struct regcache *regs, struct displaced_step_closure *dsc)
5782 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5783 if (!dsc->u.preload.immed)
5784 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5788 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5789 struct displaced_step_closure *dsc, unsigned int rn)
5792 /* Preload instructions:
5794 {pli/pld} [rn, #+/-imm]
5796 {pli/pld} [r0, #+/-imm]. */
5798 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5799 rn_val = displaced_read_reg (regs, dsc, rn);
5800 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5801 dsc->u.preload.immed = 1;
5803 dsc->cleanup = &cleanup_preload;
5807 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5808 struct displaced_step_closure *dsc)
5810 unsigned int rn = bits (insn, 16, 19);
5812 if (!insn_references_pc (insn, 0x000f0000ul))
5813 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5815 if (debug_displaced)
5816 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5817 (unsigned long) insn);
5819 dsc->modinsn[0] = insn & 0xfff0ffff;
5821 install_preload (gdbarch, regs, dsc, rn);
5827 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5828 struct regcache *regs, struct displaced_step_closure *dsc)
5830 unsigned int rn = bits (insn1, 0, 3);
5831 unsigned int u_bit = bit (insn1, 7);
5832 int imm12 = bits (insn2, 0, 11);
5835 if (rn != ARM_PC_REGNUM)
5836 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5838 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5839 PLD (literal) Encoding T1. */
5840 if (debug_displaced)
5841 fprintf_unfiltered (gdb_stdlog,
5842 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5843 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5849 /* Rewrite instruction {pli/pld} PC imm12 into:
5850 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5854 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5856 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5857 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5859 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5861 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5862 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5863 dsc->u.preload.immed = 0;
5865 /* {pli/pld} [r0, r1] */
5866 dsc->modinsn[0] = insn1 & 0xfff0;
5867 dsc->modinsn[1] = 0xf001;
5870 dsc->cleanup = &cleanup_preload;
5874 /* Preload instructions with register offset. */
5877 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5878 struct displaced_step_closure *dsc, unsigned int rn,
5881 ULONGEST rn_val, rm_val;
5883 /* Preload register-offset instructions:
5885 {pli/pld} [rn, rm {, shift}]
5887 {pli/pld} [r0, r1 {, shift}]. */
5889 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5890 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5891 rn_val = displaced_read_reg (regs, dsc, rn);
5892 rm_val = displaced_read_reg (regs, dsc, rm);
5893 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5894 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5895 dsc->u.preload.immed = 0;
5897 dsc->cleanup = &cleanup_preload;
5901 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5902 struct regcache *regs,
5903 struct displaced_step_closure *dsc)
5905 unsigned int rn = bits (insn, 16, 19);
5906 unsigned int rm = bits (insn, 0, 3);
5909 if (!insn_references_pc (insn, 0x000f000ful))
5910 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5912 if (debug_displaced)
5913 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5914 (unsigned long) insn);
5916 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5918 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5922 /* Copy/cleanup coprocessor load and store instructions. */
5925 cleanup_copro_load_store (struct gdbarch *gdbarch,
5926 struct regcache *regs,
5927 struct displaced_step_closure *dsc)
5929 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5931 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5933 if (dsc->u.ldst.writeback)
5934 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5938 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5939 struct displaced_step_closure *dsc,
5940 int writeback, unsigned int rn)
5944 /* Coprocessor load/store instructions:
5946 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5948 {stc/stc2} [r0, #+/-imm].
5950 ldc/ldc2 are handled identically. */
5952 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5953 rn_val = displaced_read_reg (regs, dsc, rn);
5954 /* PC should be 4-byte aligned. */
5955 rn_val = rn_val & 0xfffffffc;
5956 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5958 dsc->u.ldst.writeback = writeback;
5959 dsc->u.ldst.rn = rn;
5961 dsc->cleanup = &cleanup_copro_load_store;
5965 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5966 struct regcache *regs,
5967 struct displaced_step_closure *dsc)
5969 unsigned int rn = bits (insn, 16, 19);
5971 if (!insn_references_pc (insn, 0x000f0000ul))
5972 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5974 if (debug_displaced)
5975 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5976 "load/store insn %.8lx\n", (unsigned long) insn);
5978 dsc->modinsn[0] = insn & 0xfff0ffff;
5980 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5986 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5987 uint16_t insn2, struct regcache *regs,
5988 struct displaced_step_closure *dsc)
5990 unsigned int rn = bits (insn1, 0, 3);
5992 if (rn != ARM_PC_REGNUM)
5993 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5994 "copro load/store", dsc);
5996 if (debug_displaced)
5997 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5998 "load/store insn %.4x%.4x\n", insn1, insn2);
6000 dsc->modinsn[0] = insn1 & 0xfff0;
6001 dsc->modinsn[1] = insn2;
6004 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6005 doesn't support writeback, so pass 0. */
6006 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6011 /* Clean up branch instructions (actually perform the branch, by setting
6015 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6016 struct displaced_step_closure *dsc)
6018 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6019 int branch_taken = condition_true (dsc->u.branch.cond, status);
6020 enum pc_write_style write_pc = dsc->u.branch.exchange
6021 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6026 if (dsc->u.branch.link)
6028 /* The value of LR should be the next insn of current one. In order
6029 not to confuse logic hanlding later insn `bx lr', if current insn mode
6030 is Thumb, the bit 0 of LR value should be set to 1. */
6031 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6034 next_insn_addr |= 0x1;
6036 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6040 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6043 /* Copy B/BL/BLX instructions with immediate destinations. */
6046 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6047 struct displaced_step_closure *dsc,
6048 unsigned int cond, int exchange, int link, long offset)
6050 /* Implement "BL<cond> <label>" as:
6052 Preparation: cond <- instruction condition
6053 Insn: mov r0, r0 (nop)
6054 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6056 B<cond> similar, but don't set r14 in cleanup. */
6058 dsc->u.branch.cond = cond;
6059 dsc->u.branch.link = link;
6060 dsc->u.branch.exchange = exchange;
6062 dsc->u.branch.dest = dsc->insn_addr;
6063 if (link && exchange)
6064 /* For BLX, offset is computed from the Align (PC, 4). */
6065 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6068 dsc->u.branch.dest += 4 + offset;
6070 dsc->u.branch.dest += 8 + offset;
6072 dsc->cleanup = &cleanup_branch;
6075 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6076 struct regcache *regs, struct displaced_step_closure *dsc)
6078 unsigned int cond = bits (insn, 28, 31);
6079 int exchange = (cond == 0xf);
6080 int link = exchange || bit (insn, 24);
6083 if (debug_displaced)
6084 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6085 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6086 (unsigned long) insn);
6088 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6089 then arrange the switch into Thumb mode. */
6090 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6092 offset = bits (insn, 0, 23) << 2;
6094 if (bit (offset, 25))
6095 offset = offset | ~0x3ffffff;
6097 dsc->modinsn[0] = ARM_NOP;
6099 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6104 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6105 uint16_t insn2, struct regcache *regs,
6106 struct displaced_step_closure *dsc)
6108 int link = bit (insn2, 14);
6109 int exchange = link && !bit (insn2, 12);
6112 int j1 = bit (insn2, 13);
6113 int j2 = bit (insn2, 11);
6114 int s = sbits (insn1, 10, 10);
6115 int i1 = !(j1 ^ bit (insn1, 10));
6116 int i2 = !(j2 ^ bit (insn1, 10));
6118 if (!link && !exchange) /* B */
6120 offset = (bits (insn2, 0, 10) << 1);
6121 if (bit (insn2, 12)) /* Encoding T4 */
6123 offset |= (bits (insn1, 0, 9) << 12)
6129 else /* Encoding T3 */
6131 offset |= (bits (insn1, 0, 5) << 12)
6135 cond = bits (insn1, 6, 9);
6140 offset = (bits (insn1, 0, 9) << 12);
6141 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6142 offset |= exchange ?
6143 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6146 if (debug_displaced)
6147 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6148 "%.4x %.4x with offset %.8lx\n",
6149 link ? (exchange) ? "blx" : "bl" : "b",
6150 insn1, insn2, offset);
6152 dsc->modinsn[0] = THUMB_NOP;
6154 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6158 /* Copy B Thumb instructions. */
6160 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6161 struct displaced_step_closure *dsc)
6163 unsigned int cond = 0;
6165 unsigned short bit_12_15 = bits (insn, 12, 15);
6166 CORE_ADDR from = dsc->insn_addr;
6168 if (bit_12_15 == 0xd)
6170 /* offset = SignExtend (imm8:0, 32) */
6171 offset = sbits ((insn << 1), 0, 8);
6172 cond = bits (insn, 8, 11);
6174 else if (bit_12_15 == 0xe) /* Encoding T2 */
6176 offset = sbits ((insn << 1), 0, 11);
6180 if (debug_displaced)
6181 fprintf_unfiltered (gdb_stdlog,
6182 "displaced: copying b immediate insn %.4x "
6183 "with offset %d\n", insn, offset);
6185 dsc->u.branch.cond = cond;
6186 dsc->u.branch.link = 0;
6187 dsc->u.branch.exchange = 0;
6188 dsc->u.branch.dest = from + 4 + offset;
6190 dsc->modinsn[0] = THUMB_NOP;
6192 dsc->cleanup = &cleanup_branch;
6197 /* Copy BX/BLX with register-specified destinations. */
6200 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6201 struct displaced_step_closure *dsc, int link,
6202 unsigned int cond, unsigned int rm)
6204 /* Implement {BX,BLX}<cond> <reg>" as:
6206 Preparation: cond <- instruction condition
6207 Insn: mov r0, r0 (nop)
6208 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6210 Don't set r14 in cleanup for BX. */
6212 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6214 dsc->u.branch.cond = cond;
6215 dsc->u.branch.link = link;
6217 dsc->u.branch.exchange = 1;
6219 dsc->cleanup = &cleanup_branch;
6223 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6224 struct regcache *regs, struct displaced_step_closure *dsc)
6226 unsigned int cond = bits (insn, 28, 31);
6229 int link = bit (insn, 5);
6230 unsigned int rm = bits (insn, 0, 3);
6232 if (debug_displaced)
6233 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6234 (unsigned long) insn);
6236 dsc->modinsn[0] = ARM_NOP;
6238 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6243 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6244 struct regcache *regs,
6245 struct displaced_step_closure *dsc)
6247 int link = bit (insn, 7);
6248 unsigned int rm = bits (insn, 3, 6);
6250 if (debug_displaced)
6251 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6252 (unsigned short) insn);
6254 dsc->modinsn[0] = THUMB_NOP;
6256 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6262 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6265 cleanup_alu_imm (struct gdbarch *gdbarch,
6266 struct regcache *regs, struct displaced_step_closure *dsc)
6268 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6269 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6270 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6271 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6275 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6276 struct displaced_step_closure *dsc)
6278 unsigned int rn = bits (insn, 16, 19);
6279 unsigned int rd = bits (insn, 12, 15);
6280 unsigned int op = bits (insn, 21, 24);
6281 int is_mov = (op == 0xd);
6282 ULONGEST rd_val, rn_val;
6284 if (!insn_references_pc (insn, 0x000ff000ul))
6285 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6287 if (debug_displaced)
6288 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6289 "%.8lx\n", is_mov ? "move" : "ALU",
6290 (unsigned long) insn);
6292 /* Instruction is of form:
6294 <op><cond> rd, [rn,] #imm
6298 Preparation: tmp1, tmp2 <- r0, r1;
6300 Insn: <op><cond> r0, r1, #imm
6301 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6304 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6305 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6306 rn_val = displaced_read_reg (regs, dsc, rn);
6307 rd_val = displaced_read_reg (regs, dsc, rd);
6308 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6309 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6313 dsc->modinsn[0] = insn & 0xfff00fff;
6315 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6317 dsc->cleanup = &cleanup_alu_imm;
6323 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6324 uint16_t insn2, struct regcache *regs,
6325 struct displaced_step_closure *dsc)
6327 unsigned int op = bits (insn1, 5, 8);
6328 unsigned int rn, rm, rd;
6329 ULONGEST rd_val, rn_val;
6331 rn = bits (insn1, 0, 3); /* Rn */
6332 rm = bits (insn2, 0, 3); /* Rm */
6333 rd = bits (insn2, 8, 11); /* Rd */
6335 /* This routine is only called for instruction MOV. */
6336 gdb_assert (op == 0x2 && rn == 0xf);
6338 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6339 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6341 if (debug_displaced)
6342 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6343 "ALU", insn1, insn2);
6345 /* Instruction is of form:
6347 <op><cond> rd, [rn,] #imm
6351 Preparation: tmp1, tmp2 <- r0, r1;
6353 Insn: <op><cond> r0, r1, #imm
6354 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6357 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6358 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6359 rn_val = displaced_read_reg (regs, dsc, rn);
6360 rd_val = displaced_read_reg (regs, dsc, rd);
6361 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6362 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6365 dsc->modinsn[0] = insn1;
6366 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6369 dsc->cleanup = &cleanup_alu_imm;
6374 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6377 cleanup_alu_reg (struct gdbarch *gdbarch,
6378 struct regcache *regs, struct displaced_step_closure *dsc)
6383 rd_val = displaced_read_reg (regs, dsc, 0);
6385 for (i = 0; i < 3; i++)
6386 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6388 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6392 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6393 struct displaced_step_closure *dsc,
6394 unsigned int rd, unsigned int rn, unsigned int rm)
6396 ULONGEST rd_val, rn_val, rm_val;
6398 /* Instruction is of form:
6400 <op><cond> rd, [rn,] rm [, <shift>]
6404 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6405 r0, r1, r2 <- rd, rn, rm
6406 Insn: <op><cond> r0, r1, r2 [, <shift>]
6407 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6410 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6411 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6412 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6413 rd_val = displaced_read_reg (regs, dsc, rd);
6414 rn_val = displaced_read_reg (regs, dsc, rn);
6415 rm_val = displaced_read_reg (regs, dsc, rm);
6416 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6417 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6418 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6421 dsc->cleanup = &cleanup_alu_reg;
6425 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6426 struct displaced_step_closure *dsc)
6428 unsigned int op = bits (insn, 21, 24);
6429 int is_mov = (op == 0xd);
6431 if (!insn_references_pc (insn, 0x000ff00ful))
6432 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6434 if (debug_displaced)
6435 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6436 is_mov ? "move" : "ALU", (unsigned long) insn);
6439 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6441 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6443 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6449 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6450 struct regcache *regs,
6451 struct displaced_step_closure *dsc)
6453 unsigned rn, rm, rd;
6455 rd = bits (insn, 3, 6);
6456 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6459 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6460 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6462 if (debug_displaced)
6463 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6464 "ALU", (unsigned short) insn);
6466 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6468 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6473 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6476 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6477 struct regcache *regs,
6478 struct displaced_step_closure *dsc)
6480 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6483 for (i = 0; i < 4; i++)
6484 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6486 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6490 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6491 struct displaced_step_closure *dsc,
6492 unsigned int rd, unsigned int rn, unsigned int rm,
6496 ULONGEST rd_val, rn_val, rm_val, rs_val;
6498 /* Instruction is of form:
6500 <op><cond> rd, [rn,] rm, <shift> rs
6504 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6505 r0, r1, r2, r3 <- rd, rn, rm, rs
6506 Insn: <op><cond> r0, r1, r2, <shift> r3
6508 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6512 for (i = 0; i < 4; i++)
6513 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6515 rd_val = displaced_read_reg (regs, dsc, rd);
6516 rn_val = displaced_read_reg (regs, dsc, rn);
6517 rm_val = displaced_read_reg (regs, dsc, rm);
6518 rs_val = displaced_read_reg (regs, dsc, rs);
6519 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6520 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6521 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6522 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6524 dsc->cleanup = &cleanup_alu_shifted_reg;
6528 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6529 struct regcache *regs,
6530 struct displaced_step_closure *dsc)
6532 unsigned int op = bits (insn, 21, 24);
6533 int is_mov = (op == 0xd);
6534 unsigned int rd, rn, rm, rs;
6536 if (!insn_references_pc (insn, 0x000fff0ful))
6537 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6539 if (debug_displaced)
6540 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6541 "%.8lx\n", is_mov ? "move" : "ALU",
6542 (unsigned long) insn);
6544 rn = bits (insn, 16, 19);
6545 rm = bits (insn, 0, 3);
6546 rs = bits (insn, 8, 11);
6547 rd = bits (insn, 12, 15);
6550 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6552 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6554 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6559 /* Clean up load instructions. */
6562 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6563 struct displaced_step_closure *dsc)
6565 ULONGEST rt_val, rt_val2 = 0, rn_val;
6567 rt_val = displaced_read_reg (regs, dsc, 0);
6568 if (dsc->u.ldst.xfersize == 8)
6569 rt_val2 = displaced_read_reg (regs, dsc, 1);
6570 rn_val = displaced_read_reg (regs, dsc, 2);
6572 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6573 if (dsc->u.ldst.xfersize > 4)
6574 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6575 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6576 if (!dsc->u.ldst.immed)
6577 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6579 /* Handle register writeback. */
6580 if (dsc->u.ldst.writeback)
6581 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6582 /* Put result in right place. */
6583 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6584 if (dsc->u.ldst.xfersize == 8)
6585 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6588 /* Clean up store instructions. */
6591 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6592 struct displaced_step_closure *dsc)
6594 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6596 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6597 if (dsc->u.ldst.xfersize > 4)
6598 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6599 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6600 if (!dsc->u.ldst.immed)
6601 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6602 if (!dsc->u.ldst.restore_r4)
6603 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6606 if (dsc->u.ldst.writeback)
6607 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6610 /* Copy "extra" load/store instructions. These are halfword/doubleword
6611 transfers, which have a different encoding to byte/word transfers. */
6614 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6615 struct regcache *regs, struct displaced_step_closure *dsc)
6617 unsigned int op1 = bits (insn, 20, 24);
6618 unsigned int op2 = bits (insn, 5, 6);
6619 unsigned int rt = bits (insn, 12, 15);
6620 unsigned int rn = bits (insn, 16, 19);
6621 unsigned int rm = bits (insn, 0, 3);
6622 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6623 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6624 int immed = (op1 & 0x4) != 0;
6626 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6628 if (!insn_references_pc (insn, 0x000ff00ful))
6629 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6631 if (debug_displaced)
6632 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6633 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6634 (unsigned long) insn);
6636 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6639 internal_error (__FILE__, __LINE__,
6640 _("copy_extra_ld_st: instruction decode error"));
6642 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6643 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6644 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6646 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6648 rt_val = displaced_read_reg (regs, dsc, rt);
6649 if (bytesize[opcode] == 8)
6650 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6651 rn_val = displaced_read_reg (regs, dsc, rn);
6653 rm_val = displaced_read_reg (regs, dsc, rm);
6655 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6656 if (bytesize[opcode] == 8)
6657 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6658 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6660 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6663 dsc->u.ldst.xfersize = bytesize[opcode];
6664 dsc->u.ldst.rn = rn;
6665 dsc->u.ldst.immed = immed;
6666 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6667 dsc->u.ldst.restore_r4 = 0;
6670 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6672 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6673 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6675 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6677 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6678 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6680 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6685 /* Copy byte/half word/word loads and stores. */
6688 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6689 struct displaced_step_closure *dsc, int load,
6690 int immed, int writeback, int size, int usermode,
6691 int rt, int rm, int rn)
6693 ULONGEST rt_val, rn_val, rm_val = 0;
6695 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6696 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6698 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6700 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6702 rt_val = displaced_read_reg (regs, dsc, rt);
6703 rn_val = displaced_read_reg (regs, dsc, rn);
6705 rm_val = displaced_read_reg (regs, dsc, rm);
6707 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6708 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6710 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6712 dsc->u.ldst.xfersize = size;
6713 dsc->u.ldst.rn = rn;
6714 dsc->u.ldst.immed = immed;
6715 dsc->u.ldst.writeback = writeback;
6717 /* To write PC we can do:
6719 Before this sequence of instructions:
6720 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6721 r2 is the Rn value got from dispalced_read_reg.
6723 Insn1: push {pc} Write address of STR instruction + offset on stack
6724 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6725 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6726 = addr(Insn1) + offset - addr(Insn3) - 8
6728 Insn4: add r4, r4, #8 r4 = offset - 8
6729 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6731 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6733 Otherwise we don't know what value to write for PC, since the offset is
6734 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6735 of this can be found in Section "Saving from r15" in
6736 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6738 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6743 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6744 uint16_t insn2, struct regcache *regs,
6745 struct displaced_step_closure *dsc, int size)
6747 unsigned int u_bit = bit (insn1, 7);
6748 unsigned int rt = bits (insn2, 12, 15);
6749 int imm12 = bits (insn2, 0, 11);
6752 if (debug_displaced)
6753 fprintf_unfiltered (gdb_stdlog,
6754 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6755 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6761 /* Rewrite instruction LDR Rt imm12 into:
6763 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6767 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6770 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6771 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6772 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6774 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6776 pc_val = pc_val & 0xfffffffc;
6778 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6779 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6783 dsc->u.ldst.xfersize = size;
6784 dsc->u.ldst.immed = 0;
6785 dsc->u.ldst.writeback = 0;
6786 dsc->u.ldst.restore_r4 = 0;
6788 /* LDR R0, R2, R3 */
6789 dsc->modinsn[0] = 0xf852;
6790 dsc->modinsn[1] = 0x3;
6793 dsc->cleanup = &cleanup_load;
6799 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6800 uint16_t insn2, struct regcache *regs,
6801 struct displaced_step_closure *dsc,
6802 int writeback, int immed)
6804 unsigned int rt = bits (insn2, 12, 15);
6805 unsigned int rn = bits (insn1, 0, 3);
6806 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6807 /* In LDR (register), there is also a register Rm, which is not allowed to
6808 be PC, so we don't have to check it. */
6810 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6814 if (debug_displaced)
6815 fprintf_unfiltered (gdb_stdlog,
6816 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6817 rt, rn, insn1, insn2);
6819 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6822 dsc->u.ldst.restore_r4 = 0;
6825 /* ldr[b]<cond> rt, [rn, #imm], etc.
6827 ldr[b]<cond> r0, [r2, #imm]. */
6829 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6830 dsc->modinsn[1] = insn2 & 0x0fff;
6833 /* ldr[b]<cond> rt, [rn, rm], etc.
6835 ldr[b]<cond> r0, [r2, r3]. */
6837 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6838 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6848 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6849 struct regcache *regs,
6850 struct displaced_step_closure *dsc,
6851 int load, int size, int usermode)
6853 int immed = !bit (insn, 25);
6854 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6855 unsigned int rt = bits (insn, 12, 15);
6856 unsigned int rn = bits (insn, 16, 19);
6857 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6859 if (!insn_references_pc (insn, 0x000ff00ful))
6860 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6862 if (debug_displaced)
6863 fprintf_unfiltered (gdb_stdlog,
6864 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6865 load ? (size == 1 ? "ldrb" : "ldr")
6866 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6868 (unsigned long) insn);
6870 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6871 usermode, rt, rm, rn);
6873 if (load || rt != ARM_PC_REGNUM)
6875 dsc->u.ldst.restore_r4 = 0;
6878 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6880 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6881 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6883 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6885 {ldr,str}[b]<cond> r0, [r2, r3]. */
6886 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6890 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6891 dsc->u.ldst.restore_r4 = 1;
6892 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6893 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6894 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6895 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6896 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6900 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6902 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6907 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6912 /* Cleanup LDM instructions with fully-populated register list. This is an
6913 unfortunate corner case: it's impossible to implement correctly by modifying
6914 the instruction. The issue is as follows: we have an instruction,
6918 which we must rewrite to avoid loading PC. A possible solution would be to
6919 do the load in two halves, something like (with suitable cleanup
6923 ldm[id][ab] r8!, {r0-r7}
6925 ldm[id][ab] r8, {r7-r14}
6928 but at present there's no suitable place for <temp>, since the scratch space
6929 is overwritten before the cleanup routine is called. For now, we simply
6930 emulate the instruction. */
6933 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6934 struct displaced_step_closure *dsc)
6936 int inc = dsc->u.block.increment;
6937 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6938 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6939 uint32_t regmask = dsc->u.block.regmask;
6940 int regno = inc ? 0 : 15;
6941 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6942 int exception_return = dsc->u.block.load && dsc->u.block.user
6943 && (regmask & 0x8000) != 0;
6944 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6945 int do_transfer = condition_true (dsc->u.block.cond, status);
6946 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6951 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6952 sensible we can do here. Complain loudly. */
6953 if (exception_return)
6954 error (_("Cannot single-step exception return"));
6956 /* We don't handle any stores here for now. */
6957 gdb_assert (dsc->u.block.load != 0);
6959 if (debug_displaced)
6960 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6961 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6962 dsc->u.block.increment ? "inc" : "dec",
6963 dsc->u.block.before ? "before" : "after");
6970 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6973 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6976 xfer_addr += bump_before;
6978 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6979 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6981 xfer_addr += bump_after;
6983 regmask &= ~(1 << regno);
6986 if (dsc->u.block.writeback)
6987 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6991 /* Clean up an STM which included the PC in the register list. */
6994 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6995 struct displaced_step_closure *dsc)
6997 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6998 int store_executed = condition_true (dsc->u.block.cond, status);
6999 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7000 CORE_ADDR stm_insn_addr;
7003 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7005 /* If condition code fails, there's nothing else to do. */
7006 if (!store_executed)
7009 if (dsc->u.block.increment)
7011 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7013 if (dsc->u.block.before)
7018 pc_stored_at = dsc->u.block.xfer_addr;
7020 if (dsc->u.block.before)
7024 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7025 stm_insn_addr = dsc->scratch_base;
7026 offset = pc_val - stm_insn_addr;
7028 if (debug_displaced)
7029 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7030 "STM instruction\n", offset);
7032 /* Rewrite the stored PC to the proper value for the non-displaced original
7034 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7035 dsc->insn_addr + offset);
7038 /* Clean up an LDM which includes the PC in the register list. We clumped all
7039 the registers in the transferred list into a contiguous range r0...rX (to
7040 avoid loading PC directly and losing control of the debugged program), so we
7041 must undo that here. */
7044 cleanup_block_load_pc (struct gdbarch *gdbarch,
7045 struct regcache *regs,
7046 struct displaced_step_closure *dsc)
7048 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7049 int load_executed = condition_true (dsc->u.block.cond, status);
7050 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7051 unsigned int regs_loaded = bitcount (mask);
7052 unsigned int num_to_shuffle = regs_loaded, clobbered;
7054 /* The method employed here will fail if the register list is fully populated
7055 (we need to avoid loading PC directly). */
7056 gdb_assert (num_to_shuffle < 16);
7061 clobbered = (1 << num_to_shuffle) - 1;
7063 while (num_to_shuffle > 0)
7065 if ((mask & (1 << write_reg)) != 0)
7067 unsigned int read_reg = num_to_shuffle - 1;
7069 if (read_reg != write_reg)
7071 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7072 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7073 if (debug_displaced)
7074 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7075 "loaded register r%d to r%d\n"), read_reg,
7078 else if (debug_displaced)
7079 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7080 "r%d already in the right place\n"),
7083 clobbered &= ~(1 << write_reg);
7091 /* Restore any registers we scribbled over. */
7092 for (write_reg = 0; clobbered != 0; write_reg++)
7094 if ((clobbered & (1 << write_reg)) != 0)
7096 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7098 if (debug_displaced)
7099 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7100 "clobbered register r%d\n"), write_reg);
7101 clobbered &= ~(1 << write_reg);
7105 /* Perform register writeback manually. */
7106 if (dsc->u.block.writeback)
7108 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7110 if (dsc->u.block.increment)
7111 new_rn_val += regs_loaded * 4;
7113 new_rn_val -= regs_loaded * 4;
7115 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7120 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7121 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7124 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7125 struct regcache *regs,
7126 struct displaced_step_closure *dsc)
7128 int load = bit (insn, 20);
7129 int user = bit (insn, 22);
7130 int increment = bit (insn, 23);
7131 int before = bit (insn, 24);
7132 int writeback = bit (insn, 21);
7133 int rn = bits (insn, 16, 19);
7135 /* Block transfers which don't mention PC can be run directly
7137 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7138 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7140 if (rn == ARM_PC_REGNUM)
7142 warning (_("displaced: Unpredictable LDM or STM with "
7143 "base register r15"));
7144 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7147 if (debug_displaced)
7148 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7149 "%.8lx\n", (unsigned long) insn);
7151 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7152 dsc->u.block.rn = rn;
7154 dsc->u.block.load = load;
7155 dsc->u.block.user = user;
7156 dsc->u.block.increment = increment;
7157 dsc->u.block.before = before;
7158 dsc->u.block.writeback = writeback;
7159 dsc->u.block.cond = bits (insn, 28, 31);
7161 dsc->u.block.regmask = insn & 0xffff;
7165 if ((insn & 0xffff) == 0xffff)
7167 /* LDM with a fully-populated register list. This case is
7168 particularly tricky. Implement for now by fully emulating the
7169 instruction (which might not behave perfectly in all cases, but
7170 these instructions should be rare enough for that not to matter
7172 dsc->modinsn[0] = ARM_NOP;
7174 dsc->cleanup = &cleanup_block_load_all;
7178 /* LDM of a list of registers which includes PC. Implement by
7179 rewriting the list of registers to be transferred into a
7180 contiguous chunk r0...rX before doing the transfer, then shuffling
7181 registers into the correct places in the cleanup routine. */
7182 unsigned int regmask = insn & 0xffff;
7183 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7184 unsigned int to = 0, from = 0, i, new_rn;
7186 for (i = 0; i < num_in_list; i++)
7187 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7189 /* Writeback makes things complicated. We need to avoid clobbering
7190 the base register with one of the registers in our modified
7191 register list, but just using a different register can't work in
7194 ldm r14!, {r0-r13,pc}
7196 which would need to be rewritten as:
7200 but that can't work, because there's no free register for N.
7202 Solve this by turning off the writeback bit, and emulating
7203 writeback manually in the cleanup routine. */
7208 new_regmask = (1 << num_in_list) - 1;
7210 if (debug_displaced)
7211 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7212 "{..., pc}: original reg list %.4x, modified "
7213 "list %.4x\n"), rn, writeback ? "!" : "",
7214 (int) insn & 0xffff, new_regmask);
7216 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7218 dsc->cleanup = &cleanup_block_load_pc;
7223 /* STM of a list of registers which includes PC. Run the instruction
7224 as-is, but out of line: this will store the wrong value for the PC,
7225 so we must manually fix up the memory in the cleanup routine.
7226 Doing things this way has the advantage that we can auto-detect
7227 the offset of the PC write (which is architecture-dependent) in
7228 the cleanup routine. */
7229 dsc->modinsn[0] = insn;
7231 dsc->cleanup = &cleanup_block_store_pc;
7238 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7239 struct regcache *regs,
7240 struct displaced_step_closure *dsc)
7242 int rn = bits (insn1, 0, 3);
7243 int load = bit (insn1, 4);
7244 int writeback = bit (insn1, 5);
7246 /* Block transfers which don't mention PC can be run directly
7248 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7249 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7251 if (rn == ARM_PC_REGNUM)
7253 warning (_("displaced: Unpredictable LDM or STM with "
7254 "base register r15"));
7255 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7256 "unpredictable ldm/stm", dsc);
7259 if (debug_displaced)
7260 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7261 "%.4x%.4x\n", insn1, insn2);
7263 /* Clear bit 13, since it should be always zero. */
7264 dsc->u.block.regmask = (insn2 & 0xdfff);
7265 dsc->u.block.rn = rn;
7267 dsc->u.block.load = load;
7268 dsc->u.block.user = 0;
7269 dsc->u.block.increment = bit (insn1, 7);
7270 dsc->u.block.before = bit (insn1, 8);
7271 dsc->u.block.writeback = writeback;
7272 dsc->u.block.cond = INST_AL;
7273 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7277 if (dsc->u.block.regmask == 0xffff)
7279 /* This branch is impossible to happen. */
7284 unsigned int regmask = dsc->u.block.regmask;
7285 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7286 unsigned int to = 0, from = 0, i, new_rn;
7288 for (i = 0; i < num_in_list; i++)
7289 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7294 new_regmask = (1 << num_in_list) - 1;
7296 if (debug_displaced)
7297 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7298 "{..., pc}: original reg list %.4x, modified "
7299 "list %.4x\n"), rn, writeback ? "!" : "",
7300 (int) dsc->u.block.regmask, new_regmask);
7302 dsc->modinsn[0] = insn1;
7303 dsc->modinsn[1] = (new_regmask & 0xffff);
7306 dsc->cleanup = &cleanup_block_load_pc;
7311 dsc->modinsn[0] = insn1;
7312 dsc->modinsn[1] = insn2;
7314 dsc->cleanup = &cleanup_block_store_pc;
7319 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7320 for Linux, where some SVC instructions must be treated specially. */
7323 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7324 struct displaced_step_closure *dsc)
7326 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7328 if (debug_displaced)
7329 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7330 "%.8lx\n", (unsigned long) resume_addr);
7332 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7336 /* Common copy routine for svc instruciton. */
7339 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7340 struct displaced_step_closure *dsc)
7342 /* Preparation: none.
7343 Insn: unmodified svc.
7344 Cleanup: pc <- insn_addr + insn_size. */
7346 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7348 dsc->wrote_to_pc = 1;
7350 /* Allow OS-specific code to override SVC handling. */
7351 if (dsc->u.svc.copy_svc_os)
7352 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7355 dsc->cleanup = &cleanup_svc;
7361 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7362 struct regcache *regs, struct displaced_step_closure *dsc)
7365 if (debug_displaced)
7366 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7367 (unsigned long) insn);
7369 dsc->modinsn[0] = insn;
7371 return install_svc (gdbarch, regs, dsc);
7375 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7376 struct regcache *regs, struct displaced_step_closure *dsc)
7379 if (debug_displaced)
7380 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7383 dsc->modinsn[0] = insn;
7385 return install_svc (gdbarch, regs, dsc);
7388 /* Copy undefined instructions. */
7391 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7392 struct displaced_step_closure *dsc)
7394 if (debug_displaced)
7395 fprintf_unfiltered (gdb_stdlog,
7396 "displaced: copying undefined insn %.8lx\n",
7397 (unsigned long) insn);
7399 dsc->modinsn[0] = insn;
7405 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7406 struct displaced_step_closure *dsc)
7409 if (debug_displaced)
7410 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7411 "%.4x %.4x\n", (unsigned short) insn1,
7412 (unsigned short) insn2);
7414 dsc->modinsn[0] = insn1;
7415 dsc->modinsn[1] = insn2;
7421 /* Copy unpredictable instructions. */
7424 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7425 struct displaced_step_closure *dsc)
7427 if (debug_displaced)
7428 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7429 "%.8lx\n", (unsigned long) insn);
7431 dsc->modinsn[0] = insn;
7436 /* The decode_* functions are instruction decoding helpers. They mostly follow
7437 the presentation in the ARM ARM. */
7440 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7441 struct regcache *regs,
7442 struct displaced_step_closure *dsc)
7444 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7445 unsigned int rn = bits (insn, 16, 19);
7447 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7448 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7449 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7450 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7451 else if ((op1 & 0x60) == 0x20)
7452 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7453 else if ((op1 & 0x71) == 0x40)
7454 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7456 else if ((op1 & 0x77) == 0x41)
7457 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7458 else if ((op1 & 0x77) == 0x45)
7459 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7460 else if ((op1 & 0x77) == 0x51)
7463 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7465 return arm_copy_unpred (gdbarch, insn, dsc);
7467 else if ((op1 & 0x77) == 0x55)
7468 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7469 else if (op1 == 0x57)
7472 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7473 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7474 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7475 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7476 default: return arm_copy_unpred (gdbarch, insn, dsc);
7478 else if ((op1 & 0x63) == 0x43)
7479 return arm_copy_unpred (gdbarch, insn, dsc);
7480 else if ((op2 & 0x1) == 0x0)
7481 switch (op1 & ~0x80)
7484 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7486 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7487 case 0x71: case 0x75:
7489 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7490 case 0x63: case 0x67: case 0x73: case 0x77:
7491 return arm_copy_unpred (gdbarch, insn, dsc);
7493 return arm_copy_undef (gdbarch, insn, dsc);
7496 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7500 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7501 struct regcache *regs,
7502 struct displaced_step_closure *dsc)
7504 if (bit (insn, 27) == 0)
7505 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7506 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7507 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7510 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7513 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7515 case 0x4: case 0x5: case 0x6: case 0x7:
7516 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7519 switch ((insn & 0xe00000) >> 21)
7521 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7523 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7526 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7529 return arm_copy_undef (gdbarch, insn, dsc);
7534 int rn_f = (bits (insn, 16, 19) == 0xf);
7535 switch ((insn & 0xe00000) >> 21)
7538 /* ldc/ldc2 imm (undefined for rn == pc). */
7539 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7540 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7543 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7545 case 0x4: case 0x5: case 0x6: case 0x7:
7546 /* ldc/ldc2 lit (undefined for rn != pc). */
7547 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7548 : arm_copy_undef (gdbarch, insn, dsc);
7551 return arm_copy_undef (gdbarch, insn, dsc);
7556 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7559 if (bits (insn, 16, 19) == 0xf)
7561 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7563 return arm_copy_undef (gdbarch, insn, dsc);
7567 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7569 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7573 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7575 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7578 return arm_copy_undef (gdbarch, insn, dsc);
7582 /* Decode miscellaneous instructions in dp/misc encoding space. */
7585 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7586 struct regcache *regs,
7587 struct displaced_step_closure *dsc)
7589 unsigned int op2 = bits (insn, 4, 6);
7590 unsigned int op = bits (insn, 21, 22);
7591 unsigned int op1 = bits (insn, 16, 19);
7596 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7599 if (op == 0x1) /* bx. */
7600 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7602 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7604 return arm_copy_undef (gdbarch, insn, dsc);
7608 /* Not really supported. */
7609 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7611 return arm_copy_undef (gdbarch, insn, dsc);
7615 return arm_copy_bx_blx_reg (gdbarch, insn,
7616 regs, dsc); /* blx register. */
7618 return arm_copy_undef (gdbarch, insn, dsc);
7621 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7625 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7627 /* Not really supported. */
7628 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7631 return arm_copy_undef (gdbarch, insn, dsc);
7636 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7637 struct regcache *regs,
7638 struct displaced_step_closure *dsc)
7641 switch (bits (insn, 20, 24))
7644 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7647 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7649 case 0x12: case 0x16:
7650 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7653 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7657 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7659 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7660 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7661 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7662 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7663 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7664 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7665 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7666 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7667 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7668 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7669 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7670 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7671 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7672 /* 2nd arg means "unpriveleged". */
7673 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7677 /* Should be unreachable. */
7682 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7683 struct regcache *regs,
7684 struct displaced_step_closure *dsc)
7686 int a = bit (insn, 25), b = bit (insn, 4);
7687 uint32_t op1 = bits (insn, 20, 24);
7688 int rn_f = bits (insn, 16, 19) == 0xf;
7690 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7691 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7692 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7693 else if ((!a && (op1 & 0x17) == 0x02)
7694 || (a && (op1 & 0x17) == 0x02 && !b))
7695 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7696 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7697 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7698 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7699 else if ((!a && (op1 & 0x17) == 0x03)
7700 || (a && (op1 & 0x17) == 0x03 && !b))
7701 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7702 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7703 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7704 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7705 else if ((!a && (op1 & 0x17) == 0x06)
7706 || (a && (op1 & 0x17) == 0x06 && !b))
7707 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7708 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7709 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7710 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7711 else if ((!a && (op1 & 0x17) == 0x07)
7712 || (a && (op1 & 0x17) == 0x07 && !b))
7713 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7715 /* Should be unreachable. */
7720 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7721 struct displaced_step_closure *dsc)
7723 switch (bits (insn, 20, 24))
7725 case 0x00: case 0x01: case 0x02: case 0x03:
7726 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7728 case 0x04: case 0x05: case 0x06: case 0x07:
7729 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7731 case 0x08: case 0x09: case 0x0a: case 0x0b:
7732 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7733 return arm_copy_unmodified (gdbarch, insn,
7734 "decode/pack/unpack/saturate/reverse", dsc);
7737 if (bits (insn, 5, 7) == 0) /* op2. */
7739 if (bits (insn, 12, 15) == 0xf)
7740 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7742 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7745 return arm_copy_undef (gdbarch, insn, dsc);
7747 case 0x1a: case 0x1b:
7748 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7749 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7751 return arm_copy_undef (gdbarch, insn, dsc);
7753 case 0x1c: case 0x1d:
7754 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7756 if (bits (insn, 0, 3) == 0xf)
7757 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7759 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7762 return arm_copy_undef (gdbarch, insn, dsc);
7764 case 0x1e: case 0x1f:
7765 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7766 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7768 return arm_copy_undef (gdbarch, insn, dsc);
7771 /* Should be unreachable. */
7776 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7777 struct regcache *regs,
7778 struct displaced_step_closure *dsc)
7781 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7783 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7787 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7788 struct regcache *regs,
7789 struct displaced_step_closure *dsc)
7791 unsigned int opcode = bits (insn, 20, 24);
7795 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7796 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7798 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7799 case 0x12: case 0x16:
7800 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7802 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7803 case 0x13: case 0x17:
7804 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7806 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7807 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7808 /* Note: no writeback for these instructions. Bit 25 will always be
7809 zero though (via caller), so the following works OK. */
7810 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7813 /* Should be unreachable. */
7817 /* Decode shifted register instructions. */
7820 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7821 uint16_t insn2, struct regcache *regs,
7822 struct displaced_step_closure *dsc)
7824 /* PC is only allowed to be used in instruction MOV. */
7826 unsigned int op = bits (insn1, 5, 8);
7827 unsigned int rn = bits (insn1, 0, 3);
7829 if (op == 0x2 && rn == 0xf) /* MOV */
7830 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7832 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7833 "dp (shift reg)", dsc);
7837 /* Decode extension register load/store. Exactly the same as
7838 arm_decode_ext_reg_ld_st. */
7841 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7842 uint16_t insn2, struct regcache *regs,
7843 struct displaced_step_closure *dsc)
7845 unsigned int opcode = bits (insn1, 4, 8);
7849 case 0x04: case 0x05:
7850 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7851 "vfp/neon vmov", dsc);
7853 case 0x08: case 0x0c: /* 01x00 */
7854 case 0x0a: case 0x0e: /* 01x10 */
7855 case 0x12: case 0x16: /* 10x10 */
7856 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7857 "vfp/neon vstm/vpush", dsc);
7859 case 0x09: case 0x0d: /* 01x01 */
7860 case 0x0b: case 0x0f: /* 01x11 */
7861 case 0x13: case 0x17: /* 10x11 */
7862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7863 "vfp/neon vldm/vpop", dsc);
7865 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7866 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7868 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7869 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7872 /* Should be unreachable. */
7877 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7878 struct regcache *regs, struct displaced_step_closure *dsc)
7880 unsigned int op1 = bits (insn, 20, 25);
7881 int op = bit (insn, 4);
7882 unsigned int coproc = bits (insn, 8, 11);
7883 unsigned int rn = bits (insn, 16, 19);
7885 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7886 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7887 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7888 && (coproc & 0xe) != 0xa)
7890 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7891 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7892 && (coproc & 0xe) != 0xa)
7893 /* ldc/ldc2 imm/lit. */
7894 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7895 else if ((op1 & 0x3e) == 0x00)
7896 return arm_copy_undef (gdbarch, insn, dsc);
7897 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7898 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7899 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7900 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7901 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7902 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7903 else if ((op1 & 0x30) == 0x20 && !op)
7905 if ((coproc & 0xe) == 0xa)
7906 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7908 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7910 else if ((op1 & 0x30) == 0x20 && op)
7911 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7912 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7913 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7914 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7915 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7916 else if ((op1 & 0x30) == 0x30)
7917 return arm_copy_svc (gdbarch, insn, regs, dsc);
7919 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7923 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7924 uint16_t insn2, struct regcache *regs,
7925 struct displaced_step_closure *dsc)
7927 unsigned int coproc = bits (insn2, 8, 11);
7928 unsigned int op1 = bits (insn1, 4, 9);
7929 unsigned int bit_5_8 = bits (insn1, 5, 8);
7930 unsigned int bit_9 = bit (insn1, 9);
7931 unsigned int bit_4 = bit (insn1, 4);
7932 unsigned int rn = bits (insn1, 0, 3);
7937 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7938 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7940 else if (bit_5_8 == 0) /* UNDEFINED. */
7941 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7944 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7945 if ((coproc & 0xe) == 0xa)
7946 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7948 else /* coproc is not 101x. */
7950 if (bit_4 == 0) /* STC/STC2. */
7951 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7953 else /* LDC/LDC2 {literal, immeidate}. */
7954 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7960 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7966 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7967 struct displaced_step_closure *dsc, int rd)
7973 Preparation: Rd <- PC
7979 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7980 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7984 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7985 struct displaced_step_closure *dsc,
7986 int rd, unsigned int imm)
7989 /* Encoding T2: ADDS Rd, #imm */
7990 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7992 install_pc_relative (gdbarch, regs, dsc, rd);
7998 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7999 struct regcache *regs,
8000 struct displaced_step_closure *dsc)
8002 unsigned int rd = bits (insn, 8, 10);
8003 unsigned int imm8 = bits (insn, 0, 7);
8005 if (debug_displaced)
8006 fprintf_unfiltered (gdb_stdlog,
8007 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8010 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8014 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8015 uint16_t insn2, struct regcache *regs,
8016 struct displaced_step_closure *dsc)
8018 unsigned int rd = bits (insn2, 8, 11);
8019 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8020 extract raw immediate encoding rather than computing immediate. When
8021 generating ADD or SUB instruction, we can simply perform OR operation to
8022 set immediate into ADD. */
8023 unsigned int imm_3_8 = insn2 & 0x70ff;
8024 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8026 if (debug_displaced)
8027 fprintf_unfiltered (gdb_stdlog,
8028 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8029 rd, imm_i, imm_3_8, insn1, insn2);
8031 if (bit (insn1, 7)) /* Encoding T2 */
8033 /* Encoding T3: SUB Rd, Rd, #imm */
8034 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8035 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8037 else /* Encoding T3 */
8039 /* Encoding T3: ADD Rd, Rd, #imm */
8040 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8041 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8045 install_pc_relative (gdbarch, regs, dsc, rd);
8051 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8052 struct regcache *regs,
8053 struct displaced_step_closure *dsc)
8055 unsigned int rt = bits (insn1, 8, 10);
8057 int imm8 = (bits (insn1, 0, 7) << 2);
8058 CORE_ADDR from = dsc->insn_addr;
8064 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8066 Insn: LDR R0, [R2, R3];
8067 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8069 if (debug_displaced)
8070 fprintf_unfiltered (gdb_stdlog,
8071 "displaced: copying thumb ldr r%d [pc #%d]\n"
8074 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8075 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8076 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8077 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8078 /* The assembler calculates the required value of the offset from the
8079 Align(PC,4) value of this instruction to the label. */
8080 pc = pc & 0xfffffffc;
8082 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8083 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8086 dsc->u.ldst.xfersize = 4;
8088 dsc->u.ldst.immed = 0;
8089 dsc->u.ldst.writeback = 0;
8090 dsc->u.ldst.restore_r4 = 0;
8092 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8094 dsc->cleanup = &cleanup_load;
8099 /* Copy Thumb cbnz/cbz insruction. */
8102 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8103 struct regcache *regs,
8104 struct displaced_step_closure *dsc)
8106 int non_zero = bit (insn1, 11);
8107 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8108 CORE_ADDR from = dsc->insn_addr;
8109 int rn = bits (insn1, 0, 2);
8110 int rn_val = displaced_read_reg (regs, dsc, rn);
8112 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8113 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8114 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8115 condition is false, let it be, cleanup_branch will do nothing. */
8116 if (dsc->u.branch.cond)
8118 dsc->u.branch.cond = INST_AL;
8119 dsc->u.branch.dest = from + 4 + imm5;
8122 dsc->u.branch.dest = from + 2;
8124 dsc->u.branch.link = 0;
8125 dsc->u.branch.exchange = 0;
8127 if (debug_displaced)
8128 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8129 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8130 rn, rn_val, insn1, dsc->u.branch.dest);
8132 dsc->modinsn[0] = THUMB_NOP;
8134 dsc->cleanup = &cleanup_branch;
8138 /* Copy Table Branch Byte/Halfword */
8140 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8141 uint16_t insn2, struct regcache *regs,
8142 struct displaced_step_closure *dsc)
8144 ULONGEST rn_val, rm_val;
8145 int is_tbh = bit (insn2, 4);
8146 CORE_ADDR halfwords = 0;
8147 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8149 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8150 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8156 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8157 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8163 target_read_memory (rn_val + rm_val, buf, 1);
8164 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8167 if (debug_displaced)
8168 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8169 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8170 (unsigned int) rn_val, (unsigned int) rm_val,
8171 (unsigned int) halfwords);
8173 dsc->u.branch.cond = INST_AL;
8174 dsc->u.branch.link = 0;
8175 dsc->u.branch.exchange = 0;
8176 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8178 dsc->cleanup = &cleanup_branch;
8184 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8185 struct displaced_step_closure *dsc)
8188 int val = displaced_read_reg (regs, dsc, 7);
8189 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8192 val = displaced_read_reg (regs, dsc, 8);
8193 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8196 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8201 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8202 struct regcache *regs,
8203 struct displaced_step_closure *dsc)
8205 dsc->u.block.regmask = insn1 & 0x00ff;
8207 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8210 (1) register list is full, that is, r0-r7 are used.
8211 Prepare: tmp[0] <- r8
8213 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8214 MOV r8, r7; Move value of r7 to r8;
8215 POP {r7}; Store PC value into r7.
8217 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8219 (2) register list is not full, supposing there are N registers in
8220 register list (except PC, 0 <= N <= 7).
8221 Prepare: for each i, 0 - N, tmp[i] <- ri.
8223 POP {r0, r1, ...., rN};
8225 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8226 from tmp[] properly.
8228 if (debug_displaced)
8229 fprintf_unfiltered (gdb_stdlog,
8230 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8231 dsc->u.block.regmask, insn1);
8233 if (dsc->u.block.regmask == 0xff)
8235 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8237 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8238 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8239 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8242 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8246 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8247 unsigned int new_regmask, bit = 1;
8248 unsigned int to = 0, from = 0, i, new_rn;
8250 for (i = 0; i < num_in_list + 1; i++)
8251 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8253 new_regmask = (1 << (num_in_list + 1)) - 1;
8255 if (debug_displaced)
8256 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8257 "{..., pc}: original reg list %.4x,"
8258 " modified list %.4x\n"),
8259 (int) dsc->u.block.regmask, new_regmask);
8261 dsc->u.block.regmask |= 0x8000;
8262 dsc->u.block.writeback = 0;
8263 dsc->u.block.cond = INST_AL;
8265 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8267 dsc->cleanup = &cleanup_block_load_pc;
8274 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8275 struct regcache *regs,
8276 struct displaced_step_closure *dsc)
8278 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8279 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8282 /* 16-bit thumb instructions. */
8283 switch (op_bit_12_15)
8285 /* Shift (imme), add, subtract, move and compare. */
8286 case 0: case 1: case 2: case 3:
8287 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8288 "shift/add/sub/mov/cmp",
8292 switch (op_bit_10_11)
8294 case 0: /* Data-processing */
8295 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8299 case 1: /* Special data instructions and branch and exchange. */
8301 unsigned short op = bits (insn1, 7, 9);
8302 if (op == 6 || op == 7) /* BX or BLX */
8303 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8304 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8305 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8307 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8311 default: /* LDR (literal) */
8312 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8315 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8316 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8319 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8320 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8321 else /* Generate SP-relative address */
8322 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8324 case 11: /* Misc 16-bit instructions */
8326 switch (bits (insn1, 8, 11))
8328 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8329 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8331 case 12: case 13: /* POP */
8332 if (bit (insn1, 8)) /* PC is in register list. */
8333 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8335 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8337 case 15: /* If-Then, and hints */
8338 if (bits (insn1, 0, 3))
8339 /* If-Then makes up to four following instructions conditional.
8340 IT instruction itself is not conditional, so handle it as a
8341 common unmodified instruction. */
8342 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8345 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8353 if (op_bit_10_11 < 2) /* Store multiple registers */
8354 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8355 else /* Load multiple registers */
8356 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8358 case 13: /* Conditional branch and supervisor call */
8359 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8360 err = thumb_copy_b (gdbarch, insn1, dsc);
8362 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8364 case 14: /* Unconditional branch */
8365 err = thumb_copy_b (gdbarch, insn1, dsc);
8372 internal_error (__FILE__, __LINE__,
8373 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8377 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8378 uint16_t insn1, uint16_t insn2,
8379 struct regcache *regs,
8380 struct displaced_step_closure *dsc)
8382 int rt = bits (insn2, 12, 15);
8383 int rn = bits (insn1, 0, 3);
8384 int op1 = bits (insn1, 7, 8);
8387 switch (bits (insn1, 5, 6))
8389 case 0: /* Load byte and memory hints */
8390 if (rt == 0xf) /* PLD/PLI */
8393 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8394 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8396 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8401 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8402 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8405 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8406 "ldrb{reg, immediate}/ldrbt",
8411 case 1: /* Load halfword and memory hints. */
8412 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8413 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8414 "pld/unalloc memhint", dsc);
8418 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8421 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8425 case 2: /* Load word */
8427 int insn2_bit_8_11 = bits (insn2, 8, 11);
8430 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8431 else if (op1 == 0x1) /* Encoding T3 */
8432 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8434 else /* op1 == 0x0 */
8436 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8437 /* LDR (immediate) */
8438 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8439 dsc, bit (insn2, 8), 1);
8440 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8441 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8444 /* LDR (register) */
8445 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8451 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8458 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8459 uint16_t insn2, struct regcache *regs,
8460 struct displaced_step_closure *dsc)
8463 unsigned short op = bit (insn2, 15);
8464 unsigned int op1 = bits (insn1, 11, 12);
8470 switch (bits (insn1, 9, 10))
8475 /* Load/store {dual, execlusive}, table branch. */
8476 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8477 && bits (insn2, 5, 7) == 0)
8478 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8481 /* PC is not allowed to use in load/store {dual, exclusive}
8483 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8484 "load/store dual/ex", dsc);
8486 else /* load/store multiple */
8488 switch (bits (insn1, 7, 8))
8490 case 0: case 3: /* SRS, RFE */
8491 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8494 case 1: case 2: /* LDM/STM/PUSH/POP */
8495 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8502 /* Data-processing (shift register). */
8503 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8506 default: /* Coprocessor instructions. */
8507 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8512 case 2: /* op1 = 2 */
8513 if (op) /* Branch and misc control. */
8515 if (bit (insn2, 14) /* BLX/BL */
8516 || bit (insn2, 12) /* Unconditional branch */
8517 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8518 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8525 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8527 int op = bits (insn1, 4, 8);
8528 int rn = bits (insn1, 0, 3);
8529 if ((op == 0 || op == 0xa) && rn == 0xf)
8530 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8533 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8536 else /* Data processing (modified immeidate) */
8537 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8541 case 3: /* op1 = 3 */
8542 switch (bits (insn1, 9, 10))
8546 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8548 else /* NEON Load/Store and Store single data item */
8549 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8550 "neon elt/struct load/store",
8553 case 1: /* op1 = 3, bits (9, 10) == 1 */
8554 switch (bits (insn1, 7, 8))
8556 case 0: case 1: /* Data processing (register) */
8557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8560 case 2: /* Multiply and absolute difference */
8561 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8562 "mul/mua/diff", dsc);
8564 case 3: /* Long multiply and divide */
8565 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8570 default: /* Coprocessor instructions */
8571 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8580 internal_error (__FILE__, __LINE__,
8581 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8586 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8587 CORE_ADDR to, struct regcache *regs,
8588 struct displaced_step_closure *dsc)
8590 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8592 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8594 if (debug_displaced)
8595 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8596 "at %.8lx\n", insn1, (unsigned long) from);
8599 dsc->insn_size = thumb_insn_size (insn1);
8600 if (thumb_insn_size (insn1) == 4)
8603 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8604 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8607 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8611 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8612 CORE_ADDR to, struct regcache *regs,
8613 struct displaced_step_closure *dsc)
8616 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8619 /* Most displaced instructions use a 1-instruction scratch space, so set this
8620 here and override below if/when necessary. */
8622 dsc->insn_addr = from;
8623 dsc->scratch_base = to;
8624 dsc->cleanup = NULL;
8625 dsc->wrote_to_pc = 0;
8627 if (!displaced_in_arm_mode (regs))
8628 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8632 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8633 if (debug_displaced)
8634 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8635 "at %.8lx\n", (unsigned long) insn,
8636 (unsigned long) from);
8638 if ((insn & 0xf0000000) == 0xf0000000)
8639 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8640 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8642 case 0x0: case 0x1: case 0x2: case 0x3:
8643 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8646 case 0x4: case 0x5: case 0x6:
8647 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8651 err = arm_decode_media (gdbarch, insn, dsc);
8654 case 0x8: case 0x9: case 0xa: case 0xb:
8655 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8658 case 0xc: case 0xd: case 0xe: case 0xf:
8659 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8664 internal_error (__FILE__, __LINE__,
8665 _("arm_process_displaced_insn: Instruction decode error"));
8668 /* Actually set up the scratch space for a displaced instruction. */
8671 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8672 CORE_ADDR to, struct displaced_step_closure *dsc)
8674 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8675 unsigned int i, len, offset;
8676 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8677 int size = dsc->is_thumb? 2 : 4;
8678 const gdb_byte *bkp_insn;
8681 /* Poke modified instruction(s). */
8682 for (i = 0; i < dsc->numinsns; i++)
8684 if (debug_displaced)
8686 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8688 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8691 fprintf_unfiltered (gdb_stdlog, "%.4x",
8692 (unsigned short)dsc->modinsn[i]);
8694 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8695 (unsigned long) to + offset);
8698 write_memory_unsigned_integer (to + offset, size,
8699 byte_order_for_code,
8704 /* Choose the correct breakpoint instruction. */
8707 bkp_insn = tdep->thumb_breakpoint;
8708 len = tdep->thumb_breakpoint_size;
8712 bkp_insn = tdep->arm_breakpoint;
8713 len = tdep->arm_breakpoint_size;
8716 /* Put breakpoint afterwards. */
8717 write_memory (to + offset, bkp_insn, len);
8719 if (debug_displaced)
8720 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8721 paddress (gdbarch, from), paddress (gdbarch, to));
8724 /* Entry point for copying an instruction into scratch space for displaced
8727 struct displaced_step_closure *
8728 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8729 CORE_ADDR from, CORE_ADDR to,
8730 struct regcache *regs)
8732 struct displaced_step_closure *dsc
8733 = xmalloc (sizeof (struct displaced_step_closure));
8734 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8735 arm_displaced_init_closure (gdbarch, from, to, dsc);
8740 /* Entry point for cleaning things up after a displaced instruction has been
8744 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8745 struct displaced_step_closure *dsc,
8746 CORE_ADDR from, CORE_ADDR to,
8747 struct regcache *regs)
8750 dsc->cleanup (gdbarch, regs, dsc);
8752 if (!dsc->wrote_to_pc)
8753 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8754 dsc->insn_addr + dsc->insn_size);
8758 #include "bfd-in2.h"
8759 #include "libcoff.h"
8762 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8764 struct gdbarch *gdbarch = info->application_data;
8766 if (arm_pc_is_thumb (gdbarch, memaddr))
8768 static asymbol *asym;
8769 static combined_entry_type ce;
8770 static struct coff_symbol_struct csym;
8771 static struct bfd fake_bfd;
8772 static bfd_target fake_target;
8774 if (csym.native == NULL)
8776 /* Create a fake symbol vector containing a Thumb symbol.
8777 This is solely so that the code in print_insn_little_arm()
8778 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8779 the presence of a Thumb symbol and switch to decoding
8780 Thumb instructions. */
8782 fake_target.flavour = bfd_target_coff_flavour;
8783 fake_bfd.xvec = &fake_target;
8784 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8786 csym.symbol.the_bfd = &fake_bfd;
8787 csym.symbol.name = "fake";
8788 asym = (asymbol *) & csym;
8791 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8792 info->symbols = &asym;
8795 info->symbols = NULL;
8797 if (info->endian == BFD_ENDIAN_BIG)
8798 return print_insn_big_arm (memaddr, info);
8800 return print_insn_little_arm (memaddr, info);
8803 /* The following define instruction sequences that will cause ARM
8804 cpu's to take an undefined instruction trap. These are used to
8805 signal a breakpoint to GDB.
8807 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8808 modes. A different instruction is required for each mode. The ARM
8809 cpu's can also be big or little endian. Thus four different
8810 instructions are needed to support all cases.
8812 Note: ARMv4 defines several new instructions that will take the
8813 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8814 not in fact add the new instructions. The new undefined
8815 instructions in ARMv4 are all instructions that had no defined
8816 behaviour in earlier chips. There is no guarantee that they will
8817 raise an exception, but may be treated as NOP's. In practice, it
8818 may only safe to rely on instructions matching:
8820 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8821 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8822 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8824 Even this may only true if the condition predicate is true. The
8825 following use a condition predicate of ALWAYS so it is always TRUE.
8827 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8828 and NetBSD all use a software interrupt rather than an undefined
8829 instruction to force a trap. This can be handled by by the
8830 abi-specific code during establishment of the gdbarch vector. */
8832 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8833 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8834 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8835 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8837 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8838 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8839 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8840 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8842 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8843 the program counter value to determine whether a 16-bit or 32-bit
8844 breakpoint should be used. It returns a pointer to a string of
8845 bytes that encode a breakpoint instruction, stores the length of
8846 the string to *lenptr, and adjusts the program counter (if
8847 necessary) to point to the actual memory location where the
8848 breakpoint should be inserted. */
8850 static const unsigned char *
8851 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8853 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8854 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8856 if (arm_pc_is_thumb (gdbarch, *pcptr))
8858 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8860 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8861 check whether we are replacing a 32-bit instruction. */
8862 if (tdep->thumb2_breakpoint != NULL)
8865 if (target_read_memory (*pcptr, buf, 2) == 0)
8867 unsigned short inst1;
8868 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8869 if (thumb_insn_size (inst1) == 4)
8871 *lenptr = tdep->thumb2_breakpoint_size;
8872 return tdep->thumb2_breakpoint;
8877 *lenptr = tdep->thumb_breakpoint_size;
8878 return tdep->thumb_breakpoint;
8882 *lenptr = tdep->arm_breakpoint_size;
8883 return tdep->arm_breakpoint;
8888 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8891 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8893 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8894 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8895 that this is not confused with a 32-bit ARM breakpoint. */
8899 /* Extract from an array REGBUF containing the (raw) register state a
8900 function return value of type TYPE, and copy that, in virtual
8901 format, into VALBUF. */
8904 arm_extract_return_value (struct type *type, struct regcache *regs,
8907 struct gdbarch *gdbarch = get_regcache_arch (regs);
8908 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8910 if (TYPE_CODE_FLT == TYPE_CODE (type))
8912 switch (gdbarch_tdep (gdbarch)->fp_model)
8916 /* The value is in register F0 in internal format. We need to
8917 extract the raw value and then convert it to the desired
8919 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8921 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8922 convert_from_extended (floatformat_from_type (type), tmpbuf,
8923 valbuf, gdbarch_byte_order (gdbarch));
8927 case ARM_FLOAT_SOFT_FPA:
8928 case ARM_FLOAT_SOFT_VFP:
8929 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8930 not using the VFP ABI code. */
8932 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8933 if (TYPE_LENGTH (type) > 4)
8934 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8935 valbuf + INT_REGISTER_SIZE);
8939 internal_error (__FILE__, __LINE__,
8940 _("arm_extract_return_value: "
8941 "Floating point model not supported"));
8945 else if (TYPE_CODE (type) == TYPE_CODE_INT
8946 || TYPE_CODE (type) == TYPE_CODE_CHAR
8947 || TYPE_CODE (type) == TYPE_CODE_BOOL
8948 || TYPE_CODE (type) == TYPE_CODE_PTR
8949 || TYPE_CODE (type) == TYPE_CODE_REF
8950 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8952 /* If the type is a plain integer, then the access is
8953 straight-forward. Otherwise we have to play around a bit
8955 int len = TYPE_LENGTH (type);
8956 int regno = ARM_A1_REGNUM;
8961 /* By using store_unsigned_integer we avoid having to do
8962 anything special for small big-endian values. */
8963 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8964 store_unsigned_integer (valbuf,
8965 (len > INT_REGISTER_SIZE
8966 ? INT_REGISTER_SIZE : len),
8968 len -= INT_REGISTER_SIZE;
8969 valbuf += INT_REGISTER_SIZE;
8974 /* For a structure or union the behaviour is as if the value had
8975 been stored to word-aligned memory and then loaded into
8976 registers with 32-bit load instruction(s). */
8977 int len = TYPE_LENGTH (type);
8978 int regno = ARM_A1_REGNUM;
8979 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8983 regcache_cooked_read (regs, regno++, tmpbuf);
8984 memcpy (valbuf, tmpbuf,
8985 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8986 len -= INT_REGISTER_SIZE;
8987 valbuf += INT_REGISTER_SIZE;
8993 /* Will a function return an aggregate type in memory or in a
8994 register? Return 0 if an aggregate type can be returned in a
8995 register, 1 if it must be returned in memory. */
8998 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9001 enum type_code code;
9003 CHECK_TYPEDEF (type);
9005 /* In the ARM ABI, "integer" like aggregate types are returned in
9006 registers. For an aggregate type to be integer like, its size
9007 must be less than or equal to INT_REGISTER_SIZE and the
9008 offset of each addressable subfield must be zero. Note that bit
9009 fields are not addressable, and all addressable subfields of
9010 unions always start at offset zero.
9012 This function is based on the behaviour of GCC 2.95.1.
9013 See: gcc/arm.c: arm_return_in_memory() for details.
9015 Note: All versions of GCC before GCC 2.95.2 do not set up the
9016 parameters correctly for a function returning the following
9017 structure: struct { float f;}; This should be returned in memory,
9018 not a register. Richard Earnshaw sent me a patch, but I do not
9019 know of any way to detect if a function like the above has been
9020 compiled with the correct calling convention. */
9022 /* All aggregate types that won't fit in a register must be returned
9024 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9029 /* The AAPCS says all aggregates not larger than a word are returned
9031 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9034 /* The only aggregate types that can be returned in a register are
9035 structs and unions. Arrays must be returned in memory. */
9036 code = TYPE_CODE (type);
9037 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9042 /* Assume all other aggregate types can be returned in a register.
9043 Run a check for structures, unions and arrays. */
9046 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9049 /* Need to check if this struct/union is "integer" like. For
9050 this to be true, its size must be less than or equal to
9051 INT_REGISTER_SIZE and the offset of each addressable
9052 subfield must be zero. Note that bit fields are not
9053 addressable, and unions always start at offset zero. If any
9054 of the subfields is a floating point type, the struct/union
9055 cannot be an integer type. */
9057 /* For each field in the object, check:
9058 1) Is it FP? --> yes, nRc = 1;
9059 2) Is it addressable (bitpos != 0) and
9060 not packed (bitsize == 0)?
9064 for (i = 0; i < TYPE_NFIELDS (type); i++)
9066 enum type_code field_type_code;
9067 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9070 /* Is it a floating point type field? */
9071 if (field_type_code == TYPE_CODE_FLT)
9077 /* If bitpos != 0, then we have to care about it. */
9078 if (TYPE_FIELD_BITPOS (type, i) != 0)
9080 /* Bitfields are not addressable. If the field bitsize is
9081 zero, then the field is not packed. Hence it cannot be
9082 a bitfield or any other packed type. */
9083 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9095 /* Write into appropriate registers a function return value of type
9096 TYPE, given in virtual format. */
9099 arm_store_return_value (struct type *type, struct regcache *regs,
9100 const gdb_byte *valbuf)
9102 struct gdbarch *gdbarch = get_regcache_arch (regs);
9103 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9105 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9107 gdb_byte buf[MAX_REGISTER_SIZE];
9109 switch (gdbarch_tdep (gdbarch)->fp_model)
9113 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9114 gdbarch_byte_order (gdbarch));
9115 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9118 case ARM_FLOAT_SOFT_FPA:
9119 case ARM_FLOAT_SOFT_VFP:
9120 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9121 not using the VFP ABI code. */
9123 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9124 if (TYPE_LENGTH (type) > 4)
9125 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9126 valbuf + INT_REGISTER_SIZE);
9130 internal_error (__FILE__, __LINE__,
9131 _("arm_store_return_value: Floating "
9132 "point model not supported"));
9136 else if (TYPE_CODE (type) == TYPE_CODE_INT
9137 || TYPE_CODE (type) == TYPE_CODE_CHAR
9138 || TYPE_CODE (type) == TYPE_CODE_BOOL
9139 || TYPE_CODE (type) == TYPE_CODE_PTR
9140 || TYPE_CODE (type) == TYPE_CODE_REF
9141 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9143 if (TYPE_LENGTH (type) <= 4)
9145 /* Values of one word or less are zero/sign-extended and
9147 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9148 LONGEST val = unpack_long (type, valbuf);
9150 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9151 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9155 /* Integral values greater than one word are stored in consecutive
9156 registers starting with r0. This will always be a multiple of
9157 the regiser size. */
9158 int len = TYPE_LENGTH (type);
9159 int regno = ARM_A1_REGNUM;
9163 regcache_cooked_write (regs, regno++, valbuf);
9164 len -= INT_REGISTER_SIZE;
9165 valbuf += INT_REGISTER_SIZE;
9171 /* For a structure or union the behaviour is as if the value had
9172 been stored to word-aligned memory and then loaded into
9173 registers with 32-bit load instruction(s). */
9174 int len = TYPE_LENGTH (type);
9175 int regno = ARM_A1_REGNUM;
9176 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9180 memcpy (tmpbuf, valbuf,
9181 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9182 regcache_cooked_write (regs, regno++, tmpbuf);
9183 len -= INT_REGISTER_SIZE;
9184 valbuf += INT_REGISTER_SIZE;
9190 /* Handle function return values. */
9192 static enum return_value_convention
9193 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9194 struct type *valtype, struct regcache *regcache,
9195 gdb_byte *readbuf, const gdb_byte *writebuf)
9197 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9198 struct type *func_type = function ? value_type (function) : NULL;
9199 enum arm_vfp_cprc_base_type vfp_base_type;
9202 if (arm_vfp_abi_for_function (gdbarch, func_type)
9203 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9205 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9206 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9208 for (i = 0; i < vfp_base_count; i++)
9210 if (reg_char == 'q')
9213 arm_neon_quad_write (gdbarch, regcache, i,
9214 writebuf + i * unit_length);
9217 arm_neon_quad_read (gdbarch, regcache, i,
9218 readbuf + i * unit_length);
9225 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9226 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9229 regcache_cooked_write (regcache, regnum,
9230 writebuf + i * unit_length);
9232 regcache_cooked_read (regcache, regnum,
9233 readbuf + i * unit_length);
9236 return RETURN_VALUE_REGISTER_CONVENTION;
9239 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9240 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9241 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9243 if (tdep->struct_return == pcc_struct_return
9244 || arm_return_in_memory (gdbarch, valtype))
9245 return RETURN_VALUE_STRUCT_CONVENTION;
9248 /* AAPCS returns complex types longer than a register in memory. */
9249 if (tdep->arm_abi != ARM_ABI_APCS
9250 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9251 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9252 return RETURN_VALUE_STRUCT_CONVENTION;
9255 arm_store_return_value (valtype, regcache, writebuf);
9258 arm_extract_return_value (valtype, regcache, readbuf);
9260 return RETURN_VALUE_REGISTER_CONVENTION;
9265 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9267 struct gdbarch *gdbarch = get_frame_arch (frame);
9268 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9269 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9271 gdb_byte buf[INT_REGISTER_SIZE];
9273 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9275 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9279 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9283 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9284 return the target PC. Otherwise return 0. */
9287 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9291 CORE_ADDR start_addr;
9293 /* Find the starting address and name of the function containing the PC. */
9294 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9296 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9298 start_addr = arm_skip_bx_reg (frame, pc);
9299 if (start_addr != 0)
9305 /* If PC is in a Thumb call or return stub, return the address of the
9306 target PC, which is in a register. The thunk functions are called
9307 _call_via_xx, where x is the register name. The possible names
9308 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9309 functions, named __ARM_call_via_r[0-7]. */
9310 if (strncmp (name, "_call_via_", 10) == 0
9311 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9313 /* Use the name suffix to determine which register contains the
9315 static char *table[15] =
9316 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9317 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9320 int offset = strlen (name) - 2;
9322 for (regno = 0; regno <= 14; regno++)
9323 if (strcmp (&name[offset], table[regno]) == 0)
9324 return get_frame_register_unsigned (frame, regno);
9327 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9328 non-interworking calls to foo. We could decode the stubs
9329 to find the target but it's easier to use the symbol table. */
9330 namelen = strlen (name);
9331 if (name[0] == '_' && name[1] == '_'
9332 && ((namelen > 2 + strlen ("_from_thumb")
9333 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9334 strlen ("_from_thumb")) == 0)
9335 || (namelen > 2 + strlen ("_from_arm")
9336 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9337 strlen ("_from_arm")) == 0)))
9340 int target_len = namelen - 2;
9341 struct bound_minimal_symbol minsym;
9342 struct objfile *objfile;
9343 struct obj_section *sec;
9345 if (name[namelen - 1] == 'b')
9346 target_len -= strlen ("_from_thumb");
9348 target_len -= strlen ("_from_arm");
9350 target_name = alloca (target_len + 1);
9351 memcpy (target_name, name + 2, target_len);
9352 target_name[target_len] = '\0';
9354 sec = find_pc_section (pc);
9355 objfile = (sec == NULL) ? NULL : sec->objfile;
9356 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9357 if (minsym.minsym != NULL)
9358 return BMSYMBOL_VALUE_ADDRESS (minsym);
9363 return 0; /* not a stub */
9367 set_arm_command (char *args, int from_tty)
9369 printf_unfiltered (_("\
9370 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9371 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9375 show_arm_command (char *args, int from_tty)
9377 cmd_show_list (showarmcmdlist, from_tty, "");
9381 arm_update_current_architecture (void)
9383 struct gdbarch_info info;
9385 /* If the current architecture is not ARM, we have nothing to do. */
9386 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9389 /* Update the architecture. */
9390 gdbarch_info_init (&info);
9392 if (!gdbarch_update_p (info))
9393 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9397 set_fp_model_sfunc (char *args, int from_tty,
9398 struct cmd_list_element *c)
9400 enum arm_float_model fp_model;
9402 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9403 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9405 arm_fp_model = fp_model;
9409 if (fp_model == ARM_FLOAT_LAST)
9410 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9413 arm_update_current_architecture ();
9417 show_fp_model (struct ui_file *file, int from_tty,
9418 struct cmd_list_element *c, const char *value)
9420 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9422 if (arm_fp_model == ARM_FLOAT_AUTO
9423 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9424 fprintf_filtered (file, _("\
9425 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9426 fp_model_strings[tdep->fp_model]);
9428 fprintf_filtered (file, _("\
9429 The current ARM floating point model is \"%s\".\n"),
9430 fp_model_strings[arm_fp_model]);
9434 arm_set_abi (char *args, int from_tty,
9435 struct cmd_list_element *c)
9437 enum arm_abi_kind arm_abi;
9439 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9440 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9442 arm_abi_global = arm_abi;
9446 if (arm_abi == ARM_ABI_LAST)
9447 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9450 arm_update_current_architecture ();
9454 arm_show_abi (struct ui_file *file, int from_tty,
9455 struct cmd_list_element *c, const char *value)
9457 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9459 if (arm_abi_global == ARM_ABI_AUTO
9460 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9461 fprintf_filtered (file, _("\
9462 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9463 arm_abi_strings[tdep->arm_abi]);
9465 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9470 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9471 struct cmd_list_element *c, const char *value)
9473 fprintf_filtered (file,
9474 _("The current execution mode assumed "
9475 "(when symbols are unavailable) is \"%s\".\n"),
9476 arm_fallback_mode_string);
9480 arm_show_force_mode (struct ui_file *file, int from_tty,
9481 struct cmd_list_element *c, const char *value)
9483 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9485 fprintf_filtered (file,
9486 _("The current execution mode assumed "
9487 "(even when symbols are available) is \"%s\".\n"),
9488 arm_force_mode_string);
9491 /* If the user changes the register disassembly style used for info
9492 register and other commands, we have to also switch the style used
9493 in opcodes for disassembly output. This function is run in the "set
9494 arm disassembly" command, and does that. */
9497 set_disassembly_style_sfunc (char *args, int from_tty,
9498 struct cmd_list_element *c)
9500 set_disassembly_style ();
9503 /* Return the ARM register name corresponding to register I. */
9505 arm_register_name (struct gdbarch *gdbarch, int i)
9507 const int num_regs = gdbarch_num_regs (gdbarch);
9509 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9510 && i >= num_regs && i < num_regs + 32)
9512 static const char *const vfp_pseudo_names[] = {
9513 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9514 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9515 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9516 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9519 return vfp_pseudo_names[i - num_regs];
9522 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9523 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9525 static const char *const neon_pseudo_names[] = {
9526 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9527 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9530 return neon_pseudo_names[i - num_regs - 32];
9533 if (i >= ARRAY_SIZE (arm_register_names))
9534 /* These registers are only supported on targets which supply
9535 an XML description. */
9538 return arm_register_names[i];
9542 set_disassembly_style (void)
9546 /* Find the style that the user wants. */
9547 for (current = 0; current < num_disassembly_options; current++)
9548 if (disassembly_style == valid_disassembly_styles[current])
9550 gdb_assert (current < num_disassembly_options);
9552 /* Synchronize the disassembler. */
9553 set_arm_regname_option (current);
9556 /* Test whether the coff symbol specific value corresponds to a Thumb
9560 coff_sym_is_thumb (int val)
9562 return (val == C_THUMBEXT
9563 || val == C_THUMBSTAT
9564 || val == C_THUMBEXTFUNC
9565 || val == C_THUMBSTATFUNC
9566 || val == C_THUMBLABEL);
9569 /* arm_coff_make_msymbol_special()
9570 arm_elf_make_msymbol_special()
9572 These functions test whether the COFF or ELF symbol corresponds to
9573 an address in thumb code, and set a "special" bit in a minimal
9574 symbol to indicate that it does. */
9577 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9579 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9580 == ST_BRANCH_TO_THUMB)
9581 MSYMBOL_SET_SPECIAL (msym);
9585 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9587 if (coff_sym_is_thumb (val))
9588 MSYMBOL_SET_SPECIAL (msym);
9592 arm_objfile_data_free (struct objfile *objfile, void *arg)
9594 struct arm_per_objfile *data = arg;
9597 for (i = 0; i < objfile->obfd->section_count; i++)
9598 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9602 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9605 const char *name = bfd_asymbol_name (sym);
9606 struct arm_per_objfile *data;
9607 VEC(arm_mapping_symbol_s) **map_p;
9608 struct arm_mapping_symbol new_map_sym;
9610 gdb_assert (name[0] == '$');
9611 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9614 data = objfile_data (objfile, arm_objfile_data_key);
9617 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9618 struct arm_per_objfile);
9619 set_objfile_data (objfile, arm_objfile_data_key, data);
9620 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9621 objfile->obfd->section_count,
9622 VEC(arm_mapping_symbol_s) *);
9624 map_p = &data->section_maps[bfd_get_section (sym)->index];
9626 new_map_sym.value = sym->value;
9627 new_map_sym.type = name[1];
9629 /* Assume that most mapping symbols appear in order of increasing
9630 value. If they were randomly distributed, it would be faster to
9631 always push here and then sort at first use. */
9632 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9634 struct arm_mapping_symbol *prev_map_sym;
9636 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9637 if (prev_map_sym->value >= sym->value)
9640 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9641 arm_compare_mapping_symbols);
9642 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9647 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9651 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9653 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9654 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9656 /* If necessary, set the T bit. */
9659 ULONGEST val, t_bit;
9660 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9661 t_bit = arm_psr_thumb_bit (gdbarch);
9662 if (arm_pc_is_thumb (gdbarch, pc))
9663 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9666 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9671 /* Read the contents of a NEON quad register, by reading from two
9672 double registers. This is used to implement the quad pseudo
9673 registers, and for argument passing in case the quad registers are
9674 missing; vectors are passed in quad registers when using the VFP
9675 ABI, even if a NEON unit is not present. REGNUM is the index of
9676 the quad register, in [0, 15]. */
9678 static enum register_status
9679 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9680 int regnum, gdb_byte *buf)
9683 gdb_byte reg_buf[8];
9684 int offset, double_regnum;
9685 enum register_status status;
9687 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9688 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9691 /* d0 is always the least significant half of q0. */
9692 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9697 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9698 if (status != REG_VALID)
9700 memcpy (buf + offset, reg_buf, 8);
9702 offset = 8 - offset;
9703 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9704 if (status != REG_VALID)
9706 memcpy (buf + offset, reg_buf, 8);
9711 static enum register_status
9712 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9713 int regnum, gdb_byte *buf)
9715 const int num_regs = gdbarch_num_regs (gdbarch);
9717 gdb_byte reg_buf[8];
9718 int offset, double_regnum;
9720 gdb_assert (regnum >= num_regs);
9723 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9724 /* Quad-precision register. */
9725 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9728 enum register_status status;
9730 /* Single-precision register. */
9731 gdb_assert (regnum < 32);
9733 /* s0 is always the least significant half of d0. */
9734 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9735 offset = (regnum & 1) ? 0 : 4;
9737 offset = (regnum & 1) ? 4 : 0;
9739 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9740 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9743 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9744 if (status == REG_VALID)
9745 memcpy (buf, reg_buf + offset, 4);
9750 /* Store the contents of BUF to a NEON quad register, by writing to
9751 two double registers. This is used to implement the quad pseudo
9752 registers, and for argument passing in case the quad registers are
9753 missing; vectors are passed in quad registers when using the VFP
9754 ABI, even if a NEON unit is not present. REGNUM is the index
9755 of the quad register, in [0, 15]. */
9758 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9759 int regnum, const gdb_byte *buf)
9762 int offset, double_regnum;
9764 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9765 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9768 /* d0 is always the least significant half of q0. */
9769 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9774 regcache_raw_write (regcache, double_regnum, buf + offset);
9775 offset = 8 - offset;
9776 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9780 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9781 int regnum, const gdb_byte *buf)
9783 const int num_regs = gdbarch_num_regs (gdbarch);
9785 gdb_byte reg_buf[8];
9786 int offset, double_regnum;
9788 gdb_assert (regnum >= num_regs);
9791 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9792 /* Quad-precision register. */
9793 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9796 /* Single-precision register. */
9797 gdb_assert (regnum < 32);
9799 /* s0 is always the least significant half of d0. */
9800 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9801 offset = (regnum & 1) ? 0 : 4;
9803 offset = (regnum & 1) ? 4 : 0;
9805 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9806 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9809 regcache_raw_read (regcache, double_regnum, reg_buf);
9810 memcpy (reg_buf + offset, buf, 4);
9811 regcache_raw_write (regcache, double_regnum, reg_buf);
9815 static struct value *
9816 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9818 const int *reg_p = baton;
9819 return value_of_register (*reg_p, frame);
9822 static enum gdb_osabi
9823 arm_elf_osabi_sniffer (bfd *abfd)
9825 unsigned int elfosabi;
9826 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9828 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9830 if (elfosabi == ELFOSABI_ARM)
9831 /* GNU tools use this value. Check note sections in this case,
9833 bfd_map_over_sections (abfd,
9834 generic_elf_osabi_sniff_abi_tag_sections,
9837 /* Anything else will be handled by the generic ELF sniffer. */
9842 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9843 struct reggroup *group)
9845 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9846 this, FPS register belongs to save_regroup, restore_reggroup, and
9847 all_reggroup, of course. */
9848 if (regnum == ARM_FPS_REGNUM)
9849 return (group == float_reggroup
9850 || group == save_reggroup
9851 || group == restore_reggroup
9852 || group == all_reggroup);
9854 return default_register_reggroup_p (gdbarch, regnum, group);
9858 /* For backward-compatibility we allow two 'g' packet lengths with
9859 the remote protocol depending on whether FPA registers are
9860 supplied. M-profile targets do not have FPA registers, but some
9861 stubs already exist in the wild which use a 'g' packet which
9862 supplies them albeit with dummy values. The packet format which
9863 includes FPA registers should be considered deprecated for
9864 M-profile targets. */
9867 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9869 if (gdbarch_tdep (gdbarch)->is_m)
9871 /* If we know from the executable this is an M-profile target,
9872 cater for remote targets whose register set layout is the
9873 same as the FPA layout. */
9874 register_remote_g_packet_guess (gdbarch,
9875 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9876 (16 * INT_REGISTER_SIZE)
9877 + (8 * FP_REGISTER_SIZE)
9878 + (2 * INT_REGISTER_SIZE),
9879 tdesc_arm_with_m_fpa_layout);
9881 /* The regular M-profile layout. */
9882 register_remote_g_packet_guess (gdbarch,
9883 /* r0-r12,sp,lr,pc; xpsr */
9884 (16 * INT_REGISTER_SIZE)
9885 + INT_REGISTER_SIZE,
9888 /* M-profile plus M4F VFP. */
9889 register_remote_g_packet_guess (gdbarch,
9890 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9891 (16 * INT_REGISTER_SIZE)
9892 + (16 * VFP_REGISTER_SIZE)
9893 + (2 * INT_REGISTER_SIZE),
9894 tdesc_arm_with_m_vfp_d16);
9897 /* Otherwise we don't have a useful guess. */
9901 /* Initialize the current architecture based on INFO. If possible,
9902 re-use an architecture from ARCHES, which is a list of
9903 architectures already created during this debugging session.
9905 Called e.g. at program startup, when reading a core file, and when
9906 reading a binary file. */
9908 static struct gdbarch *
9909 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9911 struct gdbarch_tdep *tdep;
9912 struct gdbarch *gdbarch;
9913 struct gdbarch_list *best_arch;
9914 enum arm_abi_kind arm_abi = arm_abi_global;
9915 enum arm_float_model fp_model = arm_fp_model;
9916 struct tdesc_arch_data *tdesc_data = NULL;
9918 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9920 int have_fpa_registers = 1;
9921 const struct target_desc *tdesc = info.target_desc;
9923 /* If we have an object to base this architecture on, try to determine
9926 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9928 int ei_osabi, e_flags;
9930 switch (bfd_get_flavour (info.abfd))
9932 case bfd_target_aout_flavour:
9933 /* Assume it's an old APCS-style ABI. */
9934 arm_abi = ARM_ABI_APCS;
9937 case bfd_target_coff_flavour:
9938 /* Assume it's an old APCS-style ABI. */
9940 arm_abi = ARM_ABI_APCS;
9943 case bfd_target_elf_flavour:
9944 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9945 e_flags = elf_elfheader (info.abfd)->e_flags;
9947 if (ei_osabi == ELFOSABI_ARM)
9949 /* GNU tools used to use this value, but do not for EABI
9950 objects. There's nowhere to tag an EABI version
9951 anyway, so assume APCS. */
9952 arm_abi = ARM_ABI_APCS;
9954 else if (ei_osabi == ELFOSABI_NONE)
9956 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9957 int attr_arch, attr_profile;
9961 case EF_ARM_EABI_UNKNOWN:
9962 /* Assume GNU tools. */
9963 arm_abi = ARM_ABI_APCS;
9966 case EF_ARM_EABI_VER4:
9967 case EF_ARM_EABI_VER5:
9968 arm_abi = ARM_ABI_AAPCS;
9969 /* EABI binaries default to VFP float ordering.
9970 They may also contain build attributes that can
9971 be used to identify if the VFP argument-passing
9973 if (fp_model == ARM_FLOAT_AUTO)
9976 switch (bfd_elf_get_obj_attr_int (info.abfd,
9981 /* "The user intended FP parameter/result
9982 passing to conform to AAPCS, base
9984 fp_model = ARM_FLOAT_SOFT_VFP;
9987 /* "The user intended FP parameter/result
9988 passing to conform to AAPCS, VFP
9990 fp_model = ARM_FLOAT_VFP;
9993 /* "The user intended FP parameter/result
9994 passing to conform to tool chain-specific
9995 conventions" - we don't know any such
9996 conventions, so leave it as "auto". */
9999 /* Attribute value not mentioned in the
10000 October 2008 ABI, so leave it as
10005 fp_model = ARM_FLOAT_SOFT_VFP;
10011 /* Leave it as "auto". */
10012 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10017 /* Detect M-profile programs. This only works if the
10018 executable file includes build attributes; GCC does
10019 copy them to the executable, but e.g. RealView does
10021 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10023 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10025 Tag_CPU_arch_profile);
10026 /* GCC specifies the profile for v6-M; RealView only
10027 specifies the profile for architectures starting with
10028 V7 (as opposed to architectures with a tag
10029 numerically greater than TAG_CPU_ARCH_V7). */
10030 if (!tdesc_has_registers (tdesc)
10031 && (attr_arch == TAG_CPU_ARCH_V6_M
10032 || attr_arch == TAG_CPU_ARCH_V6S_M
10033 || attr_profile == 'M'))
10038 if (fp_model == ARM_FLOAT_AUTO)
10040 int e_flags = elf_elfheader (info.abfd)->e_flags;
10042 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10045 /* Leave it as "auto". Strictly speaking this case
10046 means FPA, but almost nobody uses that now, and
10047 many toolchains fail to set the appropriate bits
10048 for the floating-point model they use. */
10050 case EF_ARM_SOFT_FLOAT:
10051 fp_model = ARM_FLOAT_SOFT_FPA;
10053 case EF_ARM_VFP_FLOAT:
10054 fp_model = ARM_FLOAT_VFP;
10056 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10057 fp_model = ARM_FLOAT_SOFT_VFP;
10062 if (e_flags & EF_ARM_BE8)
10063 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10068 /* Leave it as "auto". */
10073 /* Check any target description for validity. */
10074 if (tdesc_has_registers (tdesc))
10076 /* For most registers we require GDB's default names; but also allow
10077 the numeric names for sp / lr / pc, as a convenience. */
10078 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10079 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10080 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10082 const struct tdesc_feature *feature;
10085 feature = tdesc_find_feature (tdesc,
10086 "org.gnu.gdb.arm.core");
10087 if (feature == NULL)
10089 feature = tdesc_find_feature (tdesc,
10090 "org.gnu.gdb.arm.m-profile");
10091 if (feature == NULL)
10097 tdesc_data = tdesc_data_alloc ();
10100 for (i = 0; i < ARM_SP_REGNUM; i++)
10101 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10102 arm_register_names[i]);
10103 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10106 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10109 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10113 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10114 ARM_PS_REGNUM, "xpsr");
10116 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10117 ARM_PS_REGNUM, "cpsr");
10121 tdesc_data_cleanup (tdesc_data);
10125 feature = tdesc_find_feature (tdesc,
10126 "org.gnu.gdb.arm.fpa");
10127 if (feature != NULL)
10130 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10131 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10132 arm_register_names[i]);
10135 tdesc_data_cleanup (tdesc_data);
10140 have_fpa_registers = 0;
10142 feature = tdesc_find_feature (tdesc,
10143 "org.gnu.gdb.xscale.iwmmxt");
10144 if (feature != NULL)
10146 static const char *const iwmmxt_names[] = {
10147 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10148 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10149 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10150 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10154 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10156 &= tdesc_numbered_register (feature, tdesc_data, i,
10157 iwmmxt_names[i - ARM_WR0_REGNUM]);
10159 /* Check for the control registers, but do not fail if they
10161 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10162 tdesc_numbered_register (feature, tdesc_data, i,
10163 iwmmxt_names[i - ARM_WR0_REGNUM]);
10165 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10167 &= tdesc_numbered_register (feature, tdesc_data, i,
10168 iwmmxt_names[i - ARM_WR0_REGNUM]);
10172 tdesc_data_cleanup (tdesc_data);
10177 /* If we have a VFP unit, check whether the single precision registers
10178 are present. If not, then we will synthesize them as pseudo
10180 feature = tdesc_find_feature (tdesc,
10181 "org.gnu.gdb.arm.vfp");
10182 if (feature != NULL)
10184 static const char *const vfp_double_names[] = {
10185 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10186 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10187 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10188 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10191 /* Require the double precision registers. There must be either
10194 for (i = 0; i < 32; i++)
10196 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10198 vfp_double_names[i]);
10202 if (!valid_p && i == 16)
10205 /* Also require FPSCR. */
10206 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10207 ARM_FPSCR_REGNUM, "fpscr");
10210 tdesc_data_cleanup (tdesc_data);
10214 if (tdesc_unnumbered_register (feature, "s0") == 0)
10215 have_vfp_pseudos = 1;
10217 have_vfp_registers = 1;
10219 /* If we have VFP, also check for NEON. The architecture allows
10220 NEON without VFP (integer vector operations only), but GDB
10221 does not support that. */
10222 feature = tdesc_find_feature (tdesc,
10223 "org.gnu.gdb.arm.neon");
10224 if (feature != NULL)
10226 /* NEON requires 32 double-precision registers. */
10229 tdesc_data_cleanup (tdesc_data);
10233 /* If there are quad registers defined by the stub, use
10234 their type; otherwise (normally) provide them with
10235 the default type. */
10236 if (tdesc_unnumbered_register (feature, "q0") == 0)
10237 have_neon_pseudos = 1;
10244 /* If there is already a candidate, use it. */
10245 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10247 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10249 if (arm_abi != ARM_ABI_AUTO
10250 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10253 if (fp_model != ARM_FLOAT_AUTO
10254 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10257 /* There are various other properties in tdep that we do not
10258 need to check here: those derived from a target description,
10259 since gdbarches with a different target description are
10260 automatically disqualified. */
10262 /* Do check is_m, though, since it might come from the binary. */
10263 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10266 /* Found a match. */
10270 if (best_arch != NULL)
10272 if (tdesc_data != NULL)
10273 tdesc_data_cleanup (tdesc_data);
10274 return best_arch->gdbarch;
10277 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10278 gdbarch = gdbarch_alloc (&info, tdep);
10280 /* Record additional information about the architecture we are defining.
10281 These are gdbarch discriminators, like the OSABI. */
10282 tdep->arm_abi = arm_abi;
10283 tdep->fp_model = fp_model;
10285 tdep->have_fpa_registers = have_fpa_registers;
10286 tdep->have_vfp_registers = have_vfp_registers;
10287 tdep->have_vfp_pseudos = have_vfp_pseudos;
10288 tdep->have_neon_pseudos = have_neon_pseudos;
10289 tdep->have_neon = have_neon;
10291 arm_register_g_packet_guesses (gdbarch);
10294 switch (info.byte_order_for_code)
10296 case BFD_ENDIAN_BIG:
10297 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10298 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10299 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10300 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10304 case BFD_ENDIAN_LITTLE:
10305 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10306 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10307 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10308 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10313 internal_error (__FILE__, __LINE__,
10314 _("arm_gdbarch_init: bad byte order for float format"));
10317 /* On ARM targets char defaults to unsigned. */
10318 set_gdbarch_char_signed (gdbarch, 0);
10320 /* Note: for displaced stepping, this includes the breakpoint, and one word
10321 of additional scratch space. This setting isn't used for anything beside
10322 displaced stepping at present. */
10323 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10325 /* This should be low enough for everything. */
10326 tdep->lowest_pc = 0x20;
10327 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10329 /* The default, for both APCS and AAPCS, is to return small
10330 structures in registers. */
10331 tdep->struct_return = reg_struct_return;
10333 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10334 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10336 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10338 /* Frame handling. */
10339 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10340 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10341 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10343 frame_base_set_default (gdbarch, &arm_normal_base);
10345 /* Address manipulation. */
10346 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10348 /* Advance PC across function entry code. */
10349 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10351 /* Detect whether PC is in function epilogue. */
10352 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10354 /* Skip trampolines. */
10355 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10357 /* The stack grows downward. */
10358 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10360 /* Breakpoint manipulation. */
10361 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10362 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10363 arm_remote_breakpoint_from_pc);
10365 /* Information about registers, etc. */
10366 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10367 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10368 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10369 set_gdbarch_register_type (gdbarch, arm_register_type);
10370 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10372 /* This "info float" is FPA-specific. Use the generic version if we
10373 do not have FPA. */
10374 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10375 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10377 /* Internal <-> external register number maps. */
10378 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10379 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10381 set_gdbarch_register_name (gdbarch, arm_register_name);
10383 /* Returning results. */
10384 set_gdbarch_return_value (gdbarch, arm_return_value);
10387 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10389 /* Minsymbol frobbing. */
10390 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10391 set_gdbarch_coff_make_msymbol_special (gdbarch,
10392 arm_coff_make_msymbol_special);
10393 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10395 /* Thumb-2 IT block support. */
10396 set_gdbarch_adjust_breakpoint_address (gdbarch,
10397 arm_adjust_breakpoint_address);
10399 /* Virtual tables. */
10400 set_gdbarch_vbit_in_delta (gdbarch, 1);
10402 /* Hook in the ABI-specific overrides, if they have been registered. */
10403 gdbarch_init_osabi (info, gdbarch);
10405 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10407 /* Add some default predicates. */
10409 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10410 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10411 dwarf2_append_unwinders (gdbarch);
10412 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10413 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10415 /* Now we have tuned the configuration, set a few final things,
10416 based on what the OS ABI has told us. */
10418 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10419 binaries are always marked. */
10420 if (tdep->arm_abi == ARM_ABI_AUTO)
10421 tdep->arm_abi = ARM_ABI_APCS;
10423 /* Watchpoints are not steppable. */
10424 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10426 /* We used to default to FPA for generic ARM, but almost nobody
10427 uses that now, and we now provide a way for the user to force
10428 the model. So default to the most useful variant. */
10429 if (tdep->fp_model == ARM_FLOAT_AUTO)
10430 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10432 if (tdep->jb_pc >= 0)
10433 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10435 /* Floating point sizes and format. */
10436 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10437 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10439 set_gdbarch_double_format
10440 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10441 set_gdbarch_long_double_format
10442 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10446 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10447 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10450 if (have_vfp_pseudos)
10452 /* NOTE: These are the only pseudo registers used by
10453 the ARM target at the moment. If more are added, a
10454 little more care in numbering will be needed. */
10456 int num_pseudos = 32;
10457 if (have_neon_pseudos)
10459 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10460 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10461 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10466 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10468 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10470 /* Override tdesc_register_type to adjust the types of VFP
10471 registers for NEON. */
10472 set_gdbarch_register_type (gdbarch, arm_register_type);
10475 /* Add standard register aliases. We add aliases even for those
10476 nanes which are used by the current architecture - it's simpler,
10477 and does no harm, since nothing ever lists user registers. */
10478 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10479 user_reg_add (gdbarch, arm_register_aliases[i].name,
10480 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10486 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10488 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10493 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10494 (unsigned long) tdep->lowest_pc);
10497 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10500 _initialize_arm_tdep (void)
10502 struct ui_file *stb;
10504 struct cmd_list_element *new_set, *new_show;
10505 const char *setname;
10506 const char *setdesc;
10507 const char *const *regnames;
10509 static char *helptext;
10510 char regdesc[1024], *rdptr = regdesc;
10511 size_t rest = sizeof (regdesc);
10513 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10515 arm_objfile_data_key
10516 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10518 /* Add ourselves to objfile event chain. */
10519 observer_attach_new_objfile (arm_exidx_new_objfile);
10521 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10523 /* Register an ELF OS ABI sniffer for ARM binaries. */
10524 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10525 bfd_target_elf_flavour,
10526 arm_elf_osabi_sniffer);
10528 /* Initialize the standard target descriptions. */
10529 initialize_tdesc_arm_with_m ();
10530 initialize_tdesc_arm_with_m_fpa_layout ();
10531 initialize_tdesc_arm_with_m_vfp_d16 ();
10532 initialize_tdesc_arm_with_iwmmxt ();
10533 initialize_tdesc_arm_with_vfpv2 ();
10534 initialize_tdesc_arm_with_vfpv3 ();
10535 initialize_tdesc_arm_with_neon ();
10537 /* Get the number of possible sets of register names defined in opcodes. */
10538 num_disassembly_options = get_arm_regname_num_options ();
10540 /* Add root prefix command for all "set arm"/"show arm" commands. */
10541 add_prefix_cmd ("arm", no_class, set_arm_command,
10542 _("Various ARM-specific commands."),
10543 &setarmcmdlist, "set arm ", 0, &setlist);
10545 add_prefix_cmd ("arm", no_class, show_arm_command,
10546 _("Various ARM-specific commands."),
10547 &showarmcmdlist, "show arm ", 0, &showlist);
10549 /* Sync the opcode insn printer with our register viewer. */
10550 parse_arm_disassembler_option ("reg-names-std");
10552 /* Initialize the array that will be passed to
10553 add_setshow_enum_cmd(). */
10554 valid_disassembly_styles
10555 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10556 for (i = 0; i < num_disassembly_options; i++)
10558 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10559 valid_disassembly_styles[i] = setname;
10560 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10563 /* When we find the default names, tell the disassembler to use
10565 if (!strcmp (setname, "std"))
10567 disassembly_style = setname;
10568 set_arm_regname_option (i);
10571 /* Mark the end of valid options. */
10572 valid_disassembly_styles[num_disassembly_options] = NULL;
10574 /* Create the help text. */
10575 stb = mem_fileopen ();
10576 fprintf_unfiltered (stb, "%s%s%s",
10577 _("The valid values are:\n"),
10579 _("The default is \"std\"."));
10580 helptext = ui_file_xstrdup (stb, NULL);
10581 ui_file_delete (stb);
10583 add_setshow_enum_cmd("disassembler", no_class,
10584 valid_disassembly_styles, &disassembly_style,
10585 _("Set the disassembly style."),
10586 _("Show the disassembly style."),
10588 set_disassembly_style_sfunc,
10589 NULL, /* FIXME: i18n: The disassembly style is
10591 &setarmcmdlist, &showarmcmdlist);
10593 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10594 _("Set usage of ARM 32-bit mode."),
10595 _("Show usage of ARM 32-bit mode."),
10596 _("When off, a 26-bit PC will be used."),
10598 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10600 &setarmcmdlist, &showarmcmdlist);
10602 /* Add a command to allow the user to force the FPU model. */
10603 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10604 _("Set the floating point type."),
10605 _("Show the floating point type."),
10606 _("auto - Determine the FP typefrom the OS-ABI.\n\
10607 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10608 fpa - FPA co-processor (GCC compiled).\n\
10609 softvfp - Software FP with pure-endian doubles.\n\
10610 vfp - VFP co-processor."),
10611 set_fp_model_sfunc, show_fp_model,
10612 &setarmcmdlist, &showarmcmdlist);
10614 /* Add a command to allow the user to force the ABI. */
10615 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10617 _("Show the ABI."),
10618 NULL, arm_set_abi, arm_show_abi,
10619 &setarmcmdlist, &showarmcmdlist);
10621 /* Add two commands to allow the user to force the assumed
10623 add_setshow_enum_cmd ("fallback-mode", class_support,
10624 arm_mode_strings, &arm_fallback_mode_string,
10625 _("Set the mode assumed when symbols are unavailable."),
10626 _("Show the mode assumed when symbols are unavailable."),
10627 NULL, NULL, arm_show_fallback_mode,
10628 &setarmcmdlist, &showarmcmdlist);
10629 add_setshow_enum_cmd ("force-mode", class_support,
10630 arm_mode_strings, &arm_force_mode_string,
10631 _("Set the mode assumed even when symbols are available."),
10632 _("Show the mode assumed even when symbols are available."),
10633 NULL, NULL, arm_show_force_mode,
10634 &setarmcmdlist, &showarmcmdlist);
10636 /* Debugging flag. */
10637 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10638 _("Set ARM debugging."),
10639 _("Show ARM debugging."),
10640 _("When on, arm-specific debugging is enabled."),
10642 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10643 &setdebuglist, &showdebuglist);
10646 /* ARM-reversible process record data structures. */
10648 #define ARM_INSN_SIZE_BYTES 4
10649 #define THUMB_INSN_SIZE_BYTES 2
10650 #define THUMB2_INSN_SIZE_BYTES 4
10653 #define INSN_S_L_BIT_NUM 20
10655 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10658 unsigned int reg_len = LENGTH; \
10661 REGS = XNEWVEC (uint32_t, reg_len); \
10662 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10667 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10670 unsigned int mem_len = LENGTH; \
10673 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10674 memcpy(&MEMS->len, &RECORD_BUF[0], \
10675 sizeof(struct arm_mem_r) * LENGTH); \
10680 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10681 #define INSN_RECORDED(ARM_RECORD) \
10682 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10684 /* ARM memory record structure. */
10687 uint32_t len; /* Record length. */
10688 uint32_t addr; /* Memory address. */
10691 /* ARM instruction record contains opcode of current insn
10692 and execution state (before entry to decode_insn()),
10693 contains list of to-be-modified registers and
10694 memory blocks (on return from decode_insn()). */
10696 typedef struct insn_decode_record_t
10698 struct gdbarch *gdbarch;
10699 struct regcache *regcache;
10700 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10701 uint32_t arm_insn; /* Should accommodate thumb. */
10702 uint32_t cond; /* Condition code. */
10703 uint32_t opcode; /* Insn opcode. */
10704 uint32_t decode; /* Insn decode bits. */
10705 uint32_t mem_rec_count; /* No of mem records. */
10706 uint32_t reg_rec_count; /* No of reg records. */
10707 uint32_t *arm_regs; /* Registers to be saved for this record. */
10708 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10709 } insn_decode_record;
10712 /* Checks ARM SBZ and SBO mandatory fields. */
10715 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10717 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10736 enum arm_record_result
10738 ARM_RECORD_SUCCESS = 0,
10739 ARM_RECORD_FAILURE = 1
10746 } arm_record_strx_t;
10757 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10758 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10761 struct regcache *reg_cache = arm_insn_r->regcache;
10762 ULONGEST u_regval[2]= {0};
10764 uint32_t reg_src1 = 0, reg_src2 = 0;
10765 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10766 uint32_t opcode1 = 0;
10768 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10769 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10770 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10773 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10775 /* 1) Handle misc store, immediate offset. */
10776 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10777 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10778 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10779 regcache_raw_read_unsigned (reg_cache, reg_src1,
10781 if (ARM_PC_REGNUM == reg_src1)
10783 /* If R15 was used as Rn, hence current PC+8. */
10784 u_regval[0] = u_regval[0] + 8;
10786 offset_8 = (immed_high << 4) | immed_low;
10787 /* Calculate target store address. */
10788 if (14 == arm_insn_r->opcode)
10790 tgt_mem_addr = u_regval[0] + offset_8;
10794 tgt_mem_addr = u_regval[0] - offset_8;
10796 if (ARM_RECORD_STRH == str_type)
10798 record_buf_mem[0] = 2;
10799 record_buf_mem[1] = tgt_mem_addr;
10800 arm_insn_r->mem_rec_count = 1;
10802 else if (ARM_RECORD_STRD == str_type)
10804 record_buf_mem[0] = 4;
10805 record_buf_mem[1] = tgt_mem_addr;
10806 record_buf_mem[2] = 4;
10807 record_buf_mem[3] = tgt_mem_addr + 4;
10808 arm_insn_r->mem_rec_count = 2;
10811 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10813 /* 2) Store, register offset. */
10815 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10817 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10818 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10819 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10820 if (15 == reg_src2)
10822 /* If R15 was used as Rn, hence current PC+8. */
10823 u_regval[0] = u_regval[0] + 8;
10825 /* Calculate target store address, Rn +/- Rm, register offset. */
10826 if (12 == arm_insn_r->opcode)
10828 tgt_mem_addr = u_regval[0] + u_regval[1];
10832 tgt_mem_addr = u_regval[1] - u_regval[0];
10834 if (ARM_RECORD_STRH == str_type)
10836 record_buf_mem[0] = 2;
10837 record_buf_mem[1] = tgt_mem_addr;
10838 arm_insn_r->mem_rec_count = 1;
10840 else if (ARM_RECORD_STRD == str_type)
10842 record_buf_mem[0] = 4;
10843 record_buf_mem[1] = tgt_mem_addr;
10844 record_buf_mem[2] = 4;
10845 record_buf_mem[3] = tgt_mem_addr + 4;
10846 arm_insn_r->mem_rec_count = 2;
10849 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10850 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10852 /* 3) Store, immediate pre-indexed. */
10853 /* 5) Store, immediate post-indexed. */
10854 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10855 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10856 offset_8 = (immed_high << 4) | immed_low;
10857 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10858 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10859 /* Calculate target store address, Rn +/- Rm, register offset. */
10860 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10862 tgt_mem_addr = u_regval[0] + offset_8;
10866 tgt_mem_addr = u_regval[0] - offset_8;
10868 if (ARM_RECORD_STRH == str_type)
10870 record_buf_mem[0] = 2;
10871 record_buf_mem[1] = tgt_mem_addr;
10872 arm_insn_r->mem_rec_count = 1;
10874 else if (ARM_RECORD_STRD == str_type)
10876 record_buf_mem[0] = 4;
10877 record_buf_mem[1] = tgt_mem_addr;
10878 record_buf_mem[2] = 4;
10879 record_buf_mem[3] = tgt_mem_addr + 4;
10880 arm_insn_r->mem_rec_count = 2;
10882 /* Record Rn also as it changes. */
10883 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10884 arm_insn_r->reg_rec_count = 1;
10886 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10887 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10889 /* 4) Store, register pre-indexed. */
10890 /* 6) Store, register post -indexed. */
10891 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10892 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10893 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10894 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10895 /* Calculate target store address, Rn +/- Rm, register offset. */
10896 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10898 tgt_mem_addr = u_regval[0] + u_regval[1];
10902 tgt_mem_addr = u_regval[1] - u_regval[0];
10904 if (ARM_RECORD_STRH == str_type)
10906 record_buf_mem[0] = 2;
10907 record_buf_mem[1] = tgt_mem_addr;
10908 arm_insn_r->mem_rec_count = 1;
10910 else if (ARM_RECORD_STRD == str_type)
10912 record_buf_mem[0] = 4;
10913 record_buf_mem[1] = tgt_mem_addr;
10914 record_buf_mem[2] = 4;
10915 record_buf_mem[3] = tgt_mem_addr + 4;
10916 arm_insn_r->mem_rec_count = 2;
10918 /* Record Rn also as it changes. */
10919 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10920 arm_insn_r->reg_rec_count = 1;
10925 /* Handling ARM extension space insns. */
10928 arm_record_extension_space (insn_decode_record *arm_insn_r)
10930 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10931 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10932 uint32_t record_buf[8], record_buf_mem[8];
10933 uint32_t reg_src1 = 0;
10934 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10935 struct regcache *reg_cache = arm_insn_r->regcache;
10936 ULONGEST u_regval = 0;
10938 gdb_assert (!INSN_RECORDED(arm_insn_r));
10939 /* Handle unconditional insn extension space. */
10941 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10942 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10943 if (arm_insn_r->cond)
10945 /* PLD has no affect on architectural state, it just affects
10947 if (5 == ((opcode1 & 0xE0) >> 5))
10950 record_buf[0] = ARM_PS_REGNUM;
10951 record_buf[1] = ARM_LR_REGNUM;
10952 arm_insn_r->reg_rec_count = 2;
10954 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10958 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10959 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10962 /* Undefined instruction on ARM V5; need to handle if later
10963 versions define it. */
10966 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10967 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10968 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10970 /* Handle arithmetic insn extension space. */
10971 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10972 && !INSN_RECORDED(arm_insn_r))
10974 /* Handle MLA(S) and MUL(S). */
10975 if (0 <= insn_op1 && 3 >= insn_op1)
10977 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10978 record_buf[1] = ARM_PS_REGNUM;
10979 arm_insn_r->reg_rec_count = 2;
10981 else if (4 <= insn_op1 && 15 >= insn_op1)
10983 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10984 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10985 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10986 record_buf[2] = ARM_PS_REGNUM;
10987 arm_insn_r->reg_rec_count = 3;
10991 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10992 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10993 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10995 /* Handle control insn extension space. */
10997 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10998 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11000 if (!bit (arm_insn_r->arm_insn,25))
11002 if (!bits (arm_insn_r->arm_insn, 4, 7))
11004 if ((0 == insn_op1) || (2 == insn_op1))
11007 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11008 arm_insn_r->reg_rec_count = 1;
11010 else if (1 == insn_op1)
11012 /* CSPR is going to be changed. */
11013 record_buf[0] = ARM_PS_REGNUM;
11014 arm_insn_r->reg_rec_count = 1;
11016 else if (3 == insn_op1)
11018 /* SPSR is going to be changed. */
11019 /* We need to get SPSR value, which is yet to be done. */
11020 printf_unfiltered (_("Process record does not support "
11021 "instruction 0x%0x at address %s.\n"),
11022 arm_insn_r->arm_insn,
11023 paddress (arm_insn_r->gdbarch,
11024 arm_insn_r->this_addr));
11028 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11033 record_buf[0] = ARM_PS_REGNUM;
11034 arm_insn_r->reg_rec_count = 1;
11036 else if (3 == insn_op1)
11039 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11040 arm_insn_r->reg_rec_count = 1;
11043 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11046 record_buf[0] = ARM_PS_REGNUM;
11047 record_buf[1] = ARM_LR_REGNUM;
11048 arm_insn_r->reg_rec_count = 2;
11050 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11052 /* QADD, QSUB, QDADD, QDSUB */
11053 record_buf[0] = ARM_PS_REGNUM;
11054 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11055 arm_insn_r->reg_rec_count = 2;
11057 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11060 record_buf[0] = ARM_PS_REGNUM;
11061 record_buf[1] = ARM_LR_REGNUM;
11062 arm_insn_r->reg_rec_count = 2;
11064 /* Save SPSR also;how? */
11065 printf_unfiltered (_("Process record does not support "
11066 "instruction 0x%0x at address %s.\n"),
11067 arm_insn_r->arm_insn,
11068 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11071 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11072 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11073 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11074 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11077 if (0 == insn_op1 || 1 == insn_op1)
11079 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11080 /* We dont do optimization for SMULW<y> where we
11082 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11083 record_buf[1] = ARM_PS_REGNUM;
11084 arm_insn_r->reg_rec_count = 2;
11086 else if (2 == insn_op1)
11089 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11090 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11091 arm_insn_r->reg_rec_count = 2;
11093 else if (3 == insn_op1)
11096 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11097 arm_insn_r->reg_rec_count = 1;
11103 /* MSR : immediate form. */
11106 /* CSPR is going to be changed. */
11107 record_buf[0] = ARM_PS_REGNUM;
11108 arm_insn_r->reg_rec_count = 1;
11110 else if (3 == insn_op1)
11112 /* SPSR is going to be changed. */
11113 /* we need to get SPSR value, which is yet to be done */
11114 printf_unfiltered (_("Process record does not support "
11115 "instruction 0x%0x at address %s.\n"),
11116 arm_insn_r->arm_insn,
11117 paddress (arm_insn_r->gdbarch,
11118 arm_insn_r->this_addr));
11124 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11125 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11126 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11128 /* Handle load/store insn extension space. */
11130 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11131 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11132 && !INSN_RECORDED(arm_insn_r))
11137 /* These insn, changes register and memory as well. */
11138 /* SWP or SWPB insn. */
11139 /* Get memory address given by Rn. */
11140 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11141 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11142 /* SWP insn ?, swaps word. */
11143 if (8 == arm_insn_r->opcode)
11145 record_buf_mem[0] = 4;
11149 /* SWPB insn, swaps only byte. */
11150 record_buf_mem[0] = 1;
11152 record_buf_mem[1] = u_regval;
11153 arm_insn_r->mem_rec_count = 1;
11154 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11155 arm_insn_r->reg_rec_count = 1;
11157 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11160 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11163 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11166 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11167 record_buf[1] = record_buf[0] + 1;
11168 arm_insn_r->reg_rec_count = 2;
11170 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11173 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11176 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11178 /* LDRH, LDRSB, LDRSH. */
11179 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11180 arm_insn_r->reg_rec_count = 1;
11185 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11186 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11187 && !INSN_RECORDED(arm_insn_r))
11190 /* Handle coprocessor insn extension space. */
11193 /* To be done for ARMv5 and later; as of now we return -1. */
11195 printf_unfiltered (_("Process record does not support instruction x%0x "
11196 "at address %s.\n"),arm_insn_r->arm_insn,
11197 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11201 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11206 /* Handling opcode 000 insns. */
11209 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11211 struct regcache *reg_cache = arm_insn_r->regcache;
11212 uint32_t record_buf[8], record_buf_mem[8];
11213 ULONGEST u_regval[2] = {0};
11215 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11216 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11217 uint32_t opcode1 = 0;
11219 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11220 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11221 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11223 /* Data processing insn /multiply insn. */
11224 if (9 == arm_insn_r->decode
11225 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11226 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11228 /* Handle multiply instructions. */
11229 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11230 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11232 /* Handle MLA and MUL. */
11233 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11234 record_buf[1] = ARM_PS_REGNUM;
11235 arm_insn_r->reg_rec_count = 2;
11237 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11239 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11240 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11241 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11242 record_buf[2] = ARM_PS_REGNUM;
11243 arm_insn_r->reg_rec_count = 3;
11246 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11247 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11249 /* Handle misc load insns, as 20th bit (L = 1). */
11250 /* LDR insn has a capability to do branching, if
11251 MOV LR, PC is precceded by LDR insn having Rn as R15
11252 in that case, it emulates branch and link insn, and hence we
11253 need to save CSPR and PC as well. I am not sure this is right
11254 place; as opcode = 010 LDR insn make this happen, if R15 was
11256 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11257 if (15 != reg_dest)
11259 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11260 arm_insn_r->reg_rec_count = 1;
11264 record_buf[0] = reg_dest;
11265 record_buf[1] = ARM_PS_REGNUM;
11266 arm_insn_r->reg_rec_count = 2;
11269 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11270 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11271 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11272 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11274 /* Handle MSR insn. */
11275 if (9 == arm_insn_r->opcode)
11277 /* CSPR is going to be changed. */
11278 record_buf[0] = ARM_PS_REGNUM;
11279 arm_insn_r->reg_rec_count = 1;
11283 /* SPSR is going to be changed. */
11284 /* How to read SPSR value? */
11285 printf_unfiltered (_("Process record does not support instruction "
11286 "0x%0x at address %s.\n"),
11287 arm_insn_r->arm_insn,
11288 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11292 else if (9 == arm_insn_r->decode
11293 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11294 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11296 /* Handling SWP, SWPB. */
11297 /* These insn, changes register and memory as well. */
11298 /* SWP or SWPB insn. */
11300 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11301 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11302 /* SWP insn ?, swaps word. */
11303 if (8 == arm_insn_r->opcode)
11305 record_buf_mem[0] = 4;
11309 /* SWPB insn, swaps only byte. */
11310 record_buf_mem[0] = 1;
11312 record_buf_mem[1] = u_regval[0];
11313 arm_insn_r->mem_rec_count = 1;
11314 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11315 arm_insn_r->reg_rec_count = 1;
11317 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11318 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11320 /* Handle BLX, branch and link/exchange. */
11321 if (9 == arm_insn_r->opcode)
11323 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11324 and R14 stores the return address. */
11325 record_buf[0] = ARM_PS_REGNUM;
11326 record_buf[1] = ARM_LR_REGNUM;
11327 arm_insn_r->reg_rec_count = 2;
11330 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11332 /* Handle enhanced software breakpoint insn, BKPT. */
11333 /* CPSR is changed to be executed in ARM state, disabling normal
11334 interrupts, entering abort mode. */
11335 /* According to high vector configuration PC is set. */
11336 /* user hit breakpoint and type reverse, in
11337 that case, we need to go back with previous CPSR and
11338 Program Counter. */
11339 record_buf[0] = ARM_PS_REGNUM;
11340 record_buf[1] = ARM_LR_REGNUM;
11341 arm_insn_r->reg_rec_count = 2;
11343 /* Save SPSR also; how? */
11344 printf_unfiltered (_("Process record does not support instruction "
11345 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11346 paddress (arm_insn_r->gdbarch,
11347 arm_insn_r->this_addr));
11350 else if (11 == arm_insn_r->decode
11351 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11353 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11355 /* Handle str(x) insn */
11356 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11359 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11360 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11362 /* Handle BX, branch and link/exchange. */
11363 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11364 record_buf[0] = ARM_PS_REGNUM;
11365 arm_insn_r->reg_rec_count = 1;
11367 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11368 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11369 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11371 /* Count leading zeros: CLZ. */
11372 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11373 arm_insn_r->reg_rec_count = 1;
11375 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11376 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11377 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11378 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11381 /* Handle MRS insn. */
11382 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11383 arm_insn_r->reg_rec_count = 1;
11385 else if (arm_insn_r->opcode <= 15)
11387 /* Normal data processing insns. */
11388 /* Out of 11 shifter operands mode, all the insn modifies destination
11389 register, which is specified by 13-16 decode. */
11390 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11391 record_buf[1] = ARM_PS_REGNUM;
11392 arm_insn_r->reg_rec_count = 2;
11399 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11400 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11404 /* Handling opcode 001 insns. */
11407 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11409 uint32_t record_buf[8], record_buf_mem[8];
11411 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11412 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11414 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11415 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11416 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11419 /* Handle MSR insn. */
11420 if (9 == arm_insn_r->opcode)
11422 /* CSPR is going to be changed. */
11423 record_buf[0] = ARM_PS_REGNUM;
11424 arm_insn_r->reg_rec_count = 1;
11428 /* SPSR is going to be changed. */
11431 else if (arm_insn_r->opcode <= 15)
11433 /* Normal data processing insns. */
11434 /* Out of 11 shifter operands mode, all the insn modifies destination
11435 register, which is specified by 13-16 decode. */
11436 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11437 record_buf[1] = ARM_PS_REGNUM;
11438 arm_insn_r->reg_rec_count = 2;
11445 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11446 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11450 /* Handling opcode 010 insns. */
11453 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11455 struct regcache *reg_cache = arm_insn_r->regcache;
11457 uint32_t reg_src1 = 0 , reg_dest = 0;
11458 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11459 uint32_t record_buf[8], record_buf_mem[8];
11461 ULONGEST u_regval = 0;
11463 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11464 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11466 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11468 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11469 /* LDR insn has a capability to do branching, if
11470 MOV LR, PC is precedded by LDR insn having Rn as R15
11471 in that case, it emulates branch and link insn, and hence we
11472 need to save CSPR and PC as well. */
11473 if (ARM_PC_REGNUM != reg_dest)
11475 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11476 arm_insn_r->reg_rec_count = 1;
11480 record_buf[0] = reg_dest;
11481 record_buf[1] = ARM_PS_REGNUM;
11482 arm_insn_r->reg_rec_count = 2;
11487 /* Store, immediate offset, immediate pre-indexed,
11488 immediate post-indexed. */
11489 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11490 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11491 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11493 if (bit (arm_insn_r->arm_insn, 23))
11495 tgt_mem_addr = u_regval + offset_12;
11499 tgt_mem_addr = u_regval - offset_12;
11502 switch (arm_insn_r->opcode)
11516 record_buf_mem[0] = 4;
11531 record_buf_mem[0] = 1;
11535 gdb_assert_not_reached ("no decoding pattern found");
11538 record_buf_mem[1] = tgt_mem_addr;
11539 arm_insn_r->mem_rec_count = 1;
11541 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11542 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11543 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11544 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11545 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11546 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11549 /* We are handling pre-indexed mode; post-indexed mode;
11550 where Rn is going to be changed. */
11551 record_buf[0] = reg_src1;
11552 arm_insn_r->reg_rec_count = 1;
11556 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11557 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11561 /* Handling opcode 011 insns. */
11564 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11566 struct regcache *reg_cache = arm_insn_r->regcache;
11568 uint32_t shift_imm = 0;
11569 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11570 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11571 uint32_t record_buf[8], record_buf_mem[8];
11574 ULONGEST u_regval[2];
11576 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11577 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11579 /* Handle enhanced store insns and LDRD DSP insn,
11580 order begins according to addressing modes for store insns
11584 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11586 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11587 /* LDR insn has a capability to do branching, if
11588 MOV LR, PC is precedded by LDR insn having Rn as R15
11589 in that case, it emulates branch and link insn, and hence we
11590 need to save CSPR and PC as well. */
11591 if (15 != reg_dest)
11593 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11594 arm_insn_r->reg_rec_count = 1;
11598 record_buf[0] = reg_dest;
11599 record_buf[1] = ARM_PS_REGNUM;
11600 arm_insn_r->reg_rec_count = 2;
11605 if (! bits (arm_insn_r->arm_insn, 4, 11))
11607 /* Store insn, register offset and register pre-indexed,
11608 register post-indexed. */
11610 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11612 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11613 regcache_raw_read_unsigned (reg_cache, reg_src1
11615 regcache_raw_read_unsigned (reg_cache, reg_src2
11617 if (15 == reg_src2)
11619 /* If R15 was used as Rn, hence current PC+8. */
11620 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11621 u_regval[0] = u_regval[0] + 8;
11623 /* Calculate target store address, Rn +/- Rm, register offset. */
11625 if (bit (arm_insn_r->arm_insn, 23))
11627 tgt_mem_addr = u_regval[0] + u_regval[1];
11631 tgt_mem_addr = u_regval[1] - u_regval[0];
11634 switch (arm_insn_r->opcode)
11648 record_buf_mem[0] = 4;
11663 record_buf_mem[0] = 1;
11667 gdb_assert_not_reached ("no decoding pattern found");
11670 record_buf_mem[1] = tgt_mem_addr;
11671 arm_insn_r->mem_rec_count = 1;
11673 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11674 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11675 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11676 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11677 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11678 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11681 /* Rn is going to be changed in pre-indexed mode and
11682 post-indexed mode as well. */
11683 record_buf[0] = reg_src2;
11684 arm_insn_r->reg_rec_count = 1;
11689 /* Store insn, scaled register offset; scaled pre-indexed. */
11690 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11692 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11694 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11695 /* Get shift_imm. */
11696 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11697 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11698 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11699 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11700 /* Offset_12 used as shift. */
11704 /* Offset_12 used as index. */
11705 offset_12 = u_regval[0] << shift_imm;
11709 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11715 if (bit (u_regval[0], 31))
11717 offset_12 = 0xFFFFFFFF;
11726 /* This is arithmetic shift. */
11727 offset_12 = s_word >> shift_imm;
11734 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11736 /* Get C flag value and shift it by 31. */
11737 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11738 | (u_regval[0]) >> 1);
11742 offset_12 = (u_regval[0] >> shift_imm) \
11744 (sizeof(uint32_t) - shift_imm));
11749 gdb_assert_not_reached ("no decoding pattern found");
11753 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11755 if (bit (arm_insn_r->arm_insn, 23))
11757 tgt_mem_addr = u_regval[1] + offset_12;
11761 tgt_mem_addr = u_regval[1] - offset_12;
11764 switch (arm_insn_r->opcode)
11778 record_buf_mem[0] = 4;
11793 record_buf_mem[0] = 1;
11797 gdb_assert_not_reached ("no decoding pattern found");
11800 record_buf_mem[1] = tgt_mem_addr;
11801 arm_insn_r->mem_rec_count = 1;
11803 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11804 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11805 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11806 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11807 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11808 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11811 /* Rn is going to be changed in register scaled pre-indexed
11812 mode,and scaled post indexed mode. */
11813 record_buf[0] = reg_src2;
11814 arm_insn_r->reg_rec_count = 1;
11819 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11820 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11824 /* Handling opcode 100 insns. */
11827 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11829 struct regcache *reg_cache = arm_insn_r->regcache;
11831 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11832 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11833 uint32_t start_address = 0, index = 0;
11834 uint32_t record_buf[24], record_buf_mem[48];
11836 ULONGEST u_regval[2] = {0};
11838 /* This mode is exclusively for load and store multiple. */
11839 /* Handle incremenrt after/before and decrment after.before mode;
11840 Rn is changing depending on W bit, but as of now we store Rn too
11841 without optimization. */
11843 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11845 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11847 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11849 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11854 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11858 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11859 while (register_bits)
11861 if (register_bits & 0x00000001)
11862 record_buf[index++] = register_count;
11863 register_bits = register_bits >> 1;
11867 /* Extra space for Base Register and CPSR; wihtout optimization. */
11868 record_buf[index++] = reg_src1;
11869 record_buf[index++] = ARM_PS_REGNUM;
11870 arm_insn_r->reg_rec_count = index;
11874 /* It handles both STM(1) and STM(2). */
11875 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11877 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11879 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11880 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11881 while (register_bits)
11883 if (register_bits & 0x00000001)
11885 register_bits = register_bits >> 1;
11890 /* Decrement after. */
11892 start_address = (u_regval[0]) - (register_count * 4) + 4;
11893 arm_insn_r->mem_rec_count = register_count;
11894 while (register_count)
11896 record_buf_mem[(register_count * 2) - 1] = start_address;
11897 record_buf_mem[(register_count * 2) - 2] = 4;
11898 start_address = start_address + 4;
11903 /* Increment after. */
11905 start_address = u_regval[0];
11906 arm_insn_r->mem_rec_count = register_count;
11907 while (register_count)
11909 record_buf_mem[(register_count * 2) - 1] = start_address;
11910 record_buf_mem[(register_count * 2) - 2] = 4;
11911 start_address = start_address + 4;
11916 /* Decrement before. */
11919 start_address = (u_regval[0]) - (register_count * 4);
11920 arm_insn_r->mem_rec_count = register_count;
11921 while (register_count)
11923 record_buf_mem[(register_count * 2) - 1] = start_address;
11924 record_buf_mem[(register_count * 2) - 2] = 4;
11925 start_address = start_address + 4;
11930 /* Increment before. */
11932 start_address = u_regval[0] + 4;
11933 arm_insn_r->mem_rec_count = register_count;
11934 while (register_count)
11936 record_buf_mem[(register_count * 2) - 1] = start_address;
11937 record_buf_mem[(register_count * 2) - 2] = 4;
11938 start_address = start_address + 4;
11944 gdb_assert_not_reached ("no decoding pattern found");
11948 /* Base register also changes; based on condition and W bit. */
11949 /* We save it anyway without optimization. */
11950 record_buf[0] = reg_src1;
11951 arm_insn_r->reg_rec_count = 1;
11954 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11955 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11959 /* Handling opcode 101 insns. */
11962 arm_record_b_bl (insn_decode_record *arm_insn_r)
11964 uint32_t record_buf[8];
11966 /* Handle B, BL, BLX(1) insns. */
11967 /* B simply branches so we do nothing here. */
11968 /* Note: BLX(1) doesnt fall here but instead it falls into
11969 extension space. */
11970 if (bit (arm_insn_r->arm_insn, 24))
11972 record_buf[0] = ARM_LR_REGNUM;
11973 arm_insn_r->reg_rec_count = 1;
11976 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11981 /* Handling opcode 110 insns. */
11984 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11986 printf_unfiltered (_("Process record does not support instruction "
11987 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11988 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11993 /* Handling opcode 111 insns. */
11996 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11998 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11999 struct regcache *reg_cache = arm_insn_r->regcache;
12000 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12001 ULONGEST u_regval = 0;
12003 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12005 /* Handle arm SWI/SVC system call instructions. */
12006 if (15 == arm_insn_r->opcode)
12008 if (tdep->arm_syscall_record != NULL)
12010 ULONGEST svc_operand, svc_number;
12012 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12014 if (svc_operand) /* OABI. */
12015 svc_number = svc_operand - 0x900000;
12017 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12019 ret = tdep->arm_syscall_record (reg_cache, svc_number);
12023 printf_unfiltered (_("no syscall record support\n"));
12029 arm_record_unsupported_insn (arm_insn_r);
12036 /* Handling opcode 000 insns. */
12039 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12041 uint32_t record_buf[8];
12042 uint32_t reg_src1 = 0;
12044 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12046 record_buf[0] = ARM_PS_REGNUM;
12047 record_buf[1] = reg_src1;
12048 thumb_insn_r->reg_rec_count = 2;
12050 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12056 /* Handling opcode 001 insns. */
12059 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12061 uint32_t record_buf[8];
12062 uint32_t reg_src1 = 0;
12064 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12066 record_buf[0] = ARM_PS_REGNUM;
12067 record_buf[1] = reg_src1;
12068 thumb_insn_r->reg_rec_count = 2;
12070 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12075 /* Handling opcode 010 insns. */
12078 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12080 struct regcache *reg_cache = thumb_insn_r->regcache;
12081 uint32_t record_buf[8], record_buf_mem[8];
12083 uint32_t reg_src1 = 0, reg_src2 = 0;
12084 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12086 ULONGEST u_regval[2] = {0};
12088 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12090 if (bit (thumb_insn_r->arm_insn, 12))
12092 /* Handle load/store register offset. */
12093 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12094 if (opcode2 >= 12 && opcode2 <= 15)
12096 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12097 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12098 record_buf[0] = reg_src1;
12099 thumb_insn_r->reg_rec_count = 1;
12101 else if (opcode2 >= 8 && opcode2 <= 10)
12103 /* STR(2), STRB(2), STRH(2) . */
12104 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12105 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12106 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12107 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12109 record_buf_mem[0] = 4; /* STR (2). */
12110 else if (10 == opcode2)
12111 record_buf_mem[0] = 1; /* STRB (2). */
12112 else if (9 == opcode2)
12113 record_buf_mem[0] = 2; /* STRH (2). */
12114 record_buf_mem[1] = u_regval[0] + u_regval[1];
12115 thumb_insn_r->mem_rec_count = 1;
12118 else if (bit (thumb_insn_r->arm_insn, 11))
12120 /* Handle load from literal pool. */
12122 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12123 record_buf[0] = reg_src1;
12124 thumb_insn_r->reg_rec_count = 1;
12128 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12129 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12130 if ((3 == opcode2) && (!opcode3))
12132 /* Branch with exchange. */
12133 record_buf[0] = ARM_PS_REGNUM;
12134 thumb_insn_r->reg_rec_count = 1;
12138 /* Format 8; special data processing insns. */
12139 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12140 record_buf[0] = ARM_PS_REGNUM;
12141 record_buf[1] = reg_src1;
12142 thumb_insn_r->reg_rec_count = 2;
12147 /* Format 5; data processing insns. */
12148 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12149 if (bit (thumb_insn_r->arm_insn, 7))
12151 reg_src1 = reg_src1 + 8;
12153 record_buf[0] = ARM_PS_REGNUM;
12154 record_buf[1] = reg_src1;
12155 thumb_insn_r->reg_rec_count = 2;
12158 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12159 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12165 /* Handling opcode 001 insns. */
12168 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12170 struct regcache *reg_cache = thumb_insn_r->regcache;
12171 uint32_t record_buf[8], record_buf_mem[8];
12173 uint32_t reg_src1 = 0;
12174 uint32_t opcode = 0, immed_5 = 0;
12176 ULONGEST u_regval = 0;
12178 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12183 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12184 record_buf[0] = reg_src1;
12185 thumb_insn_r->reg_rec_count = 1;
12190 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12191 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12192 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12193 record_buf_mem[0] = 4;
12194 record_buf_mem[1] = u_regval + (immed_5 * 4);
12195 thumb_insn_r->mem_rec_count = 1;
12198 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12199 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12205 /* Handling opcode 100 insns. */
12208 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12210 struct regcache *reg_cache = thumb_insn_r->regcache;
12211 uint32_t record_buf[8], record_buf_mem[8];
12213 uint32_t reg_src1 = 0;
12214 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12216 ULONGEST u_regval = 0;
12218 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12223 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12224 record_buf[0] = reg_src1;
12225 thumb_insn_r->reg_rec_count = 1;
12227 else if (1 == opcode)
12230 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12231 record_buf[0] = reg_src1;
12232 thumb_insn_r->reg_rec_count = 1;
12234 else if (2 == opcode)
12237 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12238 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12239 record_buf_mem[0] = 4;
12240 record_buf_mem[1] = u_regval + (immed_8 * 4);
12241 thumb_insn_r->mem_rec_count = 1;
12243 else if (0 == opcode)
12246 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12247 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12248 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12249 record_buf_mem[0] = 2;
12250 record_buf_mem[1] = u_regval + (immed_5 * 2);
12251 thumb_insn_r->mem_rec_count = 1;
12254 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12255 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12261 /* Handling opcode 101 insns. */
12264 thumb_record_misc (insn_decode_record *thumb_insn_r)
12266 struct regcache *reg_cache = thumb_insn_r->regcache;
12268 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12269 uint32_t register_bits = 0, register_count = 0;
12270 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12271 uint32_t record_buf[24], record_buf_mem[48];
12274 ULONGEST u_regval = 0;
12276 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12277 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12278 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12283 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12284 while (register_bits)
12286 if (register_bits & 0x00000001)
12287 record_buf[index++] = register_count;
12288 register_bits = register_bits >> 1;
12291 record_buf[index++] = ARM_PS_REGNUM;
12292 record_buf[index++] = ARM_SP_REGNUM;
12293 thumb_insn_r->reg_rec_count = index;
12295 else if (10 == opcode2)
12298 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12299 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12300 while (register_bits)
12302 if (register_bits & 0x00000001)
12304 register_bits = register_bits >> 1;
12306 start_address = u_regval - \
12307 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12308 thumb_insn_r->mem_rec_count = register_count;
12309 while (register_count)
12311 record_buf_mem[(register_count * 2) - 1] = start_address;
12312 record_buf_mem[(register_count * 2) - 2] = 4;
12313 start_address = start_address + 4;
12316 record_buf[0] = ARM_SP_REGNUM;
12317 thumb_insn_r->reg_rec_count = 1;
12319 else if (0x1E == opcode1)
12322 /* Handle enhanced software breakpoint insn, BKPT. */
12323 /* CPSR is changed to be executed in ARM state, disabling normal
12324 interrupts, entering abort mode. */
12325 /* According to high vector configuration PC is set. */
12326 /* User hits breakpoint and type reverse, in that case, we need to go back with
12327 previous CPSR and Program Counter. */
12328 record_buf[0] = ARM_PS_REGNUM;
12329 record_buf[1] = ARM_LR_REGNUM;
12330 thumb_insn_r->reg_rec_count = 2;
12331 /* We need to save SPSR value, which is not yet done. */
12332 printf_unfiltered (_("Process record does not support instruction "
12333 "0x%0x at address %s.\n"),
12334 thumb_insn_r->arm_insn,
12335 paddress (thumb_insn_r->gdbarch,
12336 thumb_insn_r->this_addr));
12339 else if ((0 == opcode) || (1 == opcode))
12341 /* ADD(5), ADD(6). */
12342 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12343 record_buf[0] = reg_src1;
12344 thumb_insn_r->reg_rec_count = 1;
12346 else if (2 == opcode)
12348 /* ADD(7), SUB(4). */
12349 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12350 record_buf[0] = ARM_SP_REGNUM;
12351 thumb_insn_r->reg_rec_count = 1;
12354 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12355 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12361 /* Handling opcode 110 insns. */
12364 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12366 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12367 struct regcache *reg_cache = thumb_insn_r->regcache;
12369 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12370 uint32_t reg_src1 = 0;
12371 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12372 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12373 uint32_t record_buf[24], record_buf_mem[48];
12375 ULONGEST u_regval = 0;
12377 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12378 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12384 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12386 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12387 while (register_bits)
12389 if (register_bits & 0x00000001)
12390 record_buf[index++] = register_count;
12391 register_bits = register_bits >> 1;
12394 record_buf[index++] = reg_src1;
12395 thumb_insn_r->reg_rec_count = index;
12397 else if (0 == opcode2)
12399 /* It handles both STMIA. */
12400 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12402 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12403 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12404 while (register_bits)
12406 if (register_bits & 0x00000001)
12408 register_bits = register_bits >> 1;
12410 start_address = u_regval;
12411 thumb_insn_r->mem_rec_count = register_count;
12412 while (register_count)
12414 record_buf_mem[(register_count * 2) - 1] = start_address;
12415 record_buf_mem[(register_count * 2) - 2] = 4;
12416 start_address = start_address + 4;
12420 else if (0x1F == opcode1)
12422 /* Handle arm syscall insn. */
12423 if (tdep->arm_syscall_record != NULL)
12425 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12426 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12430 printf_unfiltered (_("no syscall record support\n"));
12435 /* B (1), conditional branch is automatically taken care in process_record,
12436 as PC is saved there. */
12438 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12439 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12445 /* Handling opcode 111 insns. */
12448 thumb_record_branch (insn_decode_record *thumb_insn_r)
12450 uint32_t record_buf[8];
12451 uint32_t bits_h = 0;
12453 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12455 if (2 == bits_h || 3 == bits_h)
12458 record_buf[0] = ARM_LR_REGNUM;
12459 thumb_insn_r->reg_rec_count = 1;
12461 else if (1 == bits_h)
12464 record_buf[0] = ARM_PS_REGNUM;
12465 record_buf[1] = ARM_LR_REGNUM;
12466 thumb_insn_r->reg_rec_count = 2;
12469 /* B(2) is automatically taken care in process_record, as PC is
12472 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12477 /* Handler for thumb2 load/store multiple instructions. */
12480 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12482 struct regcache *reg_cache = thumb2_insn_r->regcache;
12484 uint32_t reg_rn, op;
12485 uint32_t register_bits = 0, register_count = 0;
12486 uint32_t index = 0, start_address = 0;
12487 uint32_t record_buf[24], record_buf_mem[48];
12489 ULONGEST u_regval = 0;
12491 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12492 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12494 if (0 == op || 3 == op)
12496 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12498 /* Handle RFE instruction. */
12499 record_buf[0] = ARM_PS_REGNUM;
12500 thumb2_insn_r->reg_rec_count = 1;
12504 /* Handle SRS instruction after reading banked SP. */
12505 return arm_record_unsupported_insn (thumb2_insn_r);
12508 else if (1 == op || 2 == op)
12510 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12512 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12513 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12514 while (register_bits)
12516 if (register_bits & 0x00000001)
12517 record_buf[index++] = register_count;
12520 register_bits = register_bits >> 1;
12522 record_buf[index++] = reg_rn;
12523 record_buf[index++] = ARM_PS_REGNUM;
12524 thumb2_insn_r->reg_rec_count = index;
12528 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12529 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12530 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12531 while (register_bits)
12533 if (register_bits & 0x00000001)
12536 register_bits = register_bits >> 1;
12541 /* Start address calculation for LDMDB/LDMEA. */
12542 start_address = u_regval;
12546 /* Start address calculation for LDMDB/LDMEA. */
12547 start_address = u_regval - register_count * 4;
12550 thumb2_insn_r->mem_rec_count = register_count;
12551 while (register_count)
12553 record_buf_mem[register_count * 2 - 1] = start_address;
12554 record_buf_mem[register_count * 2 - 2] = 4;
12555 start_address = start_address + 4;
12558 record_buf[0] = reg_rn;
12559 record_buf[1] = ARM_PS_REGNUM;
12560 thumb2_insn_r->reg_rec_count = 2;
12564 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12566 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12568 return ARM_RECORD_SUCCESS;
12571 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12575 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12577 struct regcache *reg_cache = thumb2_insn_r->regcache;
12579 uint32_t reg_rd, reg_rn, offset_imm;
12580 uint32_t reg_dest1, reg_dest2;
12581 uint32_t address, offset_addr;
12582 uint32_t record_buf[8], record_buf_mem[8];
12583 uint32_t op1, op2, op3;
12586 ULONGEST u_regval[2];
12588 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12589 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12590 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12592 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12594 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12596 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12597 record_buf[0] = reg_dest1;
12598 record_buf[1] = ARM_PS_REGNUM;
12599 thumb2_insn_r->reg_rec_count = 2;
12602 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12604 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12605 record_buf[2] = reg_dest2;
12606 thumb2_insn_r->reg_rec_count = 3;
12611 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12612 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12614 if (0 == op1 && 0 == op2)
12616 /* Handle STREX. */
12617 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12618 address = u_regval[0] + (offset_imm * 4);
12619 record_buf_mem[0] = 4;
12620 record_buf_mem[1] = address;
12621 thumb2_insn_r->mem_rec_count = 1;
12622 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12623 record_buf[0] = reg_rd;
12624 thumb2_insn_r->reg_rec_count = 1;
12626 else if (1 == op1 && 0 == op2)
12628 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12629 record_buf[0] = reg_rd;
12630 thumb2_insn_r->reg_rec_count = 1;
12631 address = u_regval[0];
12632 record_buf_mem[1] = address;
12636 /* Handle STREXB. */
12637 record_buf_mem[0] = 1;
12638 thumb2_insn_r->mem_rec_count = 1;
12642 /* Handle STREXH. */
12643 record_buf_mem[0] = 2 ;
12644 thumb2_insn_r->mem_rec_count = 1;
12648 /* Handle STREXD. */
12649 address = u_regval[0];
12650 record_buf_mem[0] = 4;
12651 record_buf_mem[2] = 4;
12652 record_buf_mem[3] = address + 4;
12653 thumb2_insn_r->mem_rec_count = 2;
12658 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12660 if (bit (thumb2_insn_r->arm_insn, 24))
12662 if (bit (thumb2_insn_r->arm_insn, 23))
12663 offset_addr = u_regval[0] + (offset_imm * 4);
12665 offset_addr = u_regval[0] - (offset_imm * 4);
12667 address = offset_addr;
12670 address = u_regval[0];
12672 record_buf_mem[0] = 4;
12673 record_buf_mem[1] = address;
12674 record_buf_mem[2] = 4;
12675 record_buf_mem[3] = address + 4;
12676 thumb2_insn_r->mem_rec_count = 2;
12677 record_buf[0] = reg_rn;
12678 thumb2_insn_r->reg_rec_count = 1;
12682 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12684 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12686 return ARM_RECORD_SUCCESS;
12689 /* Handler for thumb2 data processing (shift register and modified immediate)
12693 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12695 uint32_t reg_rd, op;
12696 uint32_t record_buf[8];
12698 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12699 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12701 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12703 record_buf[0] = ARM_PS_REGNUM;
12704 thumb2_insn_r->reg_rec_count = 1;
12708 record_buf[0] = reg_rd;
12709 record_buf[1] = ARM_PS_REGNUM;
12710 thumb2_insn_r->reg_rec_count = 2;
12713 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12715 return ARM_RECORD_SUCCESS;
12718 /* Generic handler for thumb2 instructions which effect destination and PS
12722 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12725 uint32_t record_buf[8];
12727 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12729 record_buf[0] = reg_rd;
12730 record_buf[1] = ARM_PS_REGNUM;
12731 thumb2_insn_r->reg_rec_count = 2;
12733 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12735 return ARM_RECORD_SUCCESS;
12738 /* Handler for thumb2 branch and miscellaneous control instructions. */
12741 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12743 uint32_t op, op1, op2;
12744 uint32_t record_buf[8];
12746 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12747 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12748 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12750 /* Handle MSR insn. */
12751 if (!(op1 & 0x2) && 0x38 == op)
12755 /* CPSR is going to be changed. */
12756 record_buf[0] = ARM_PS_REGNUM;
12757 thumb2_insn_r->reg_rec_count = 1;
12761 arm_record_unsupported_insn(thumb2_insn_r);
12765 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12768 record_buf[0] = ARM_PS_REGNUM;
12769 record_buf[1] = ARM_LR_REGNUM;
12770 thumb2_insn_r->reg_rec_count = 2;
12773 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12775 return ARM_RECORD_SUCCESS;
12778 /* Handler for thumb2 store single data item instructions. */
12781 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12783 struct regcache *reg_cache = thumb2_insn_r->regcache;
12785 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12786 uint32_t address, offset_addr;
12787 uint32_t record_buf[8], record_buf_mem[8];
12790 ULONGEST u_regval[2];
12792 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12793 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12794 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12795 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12797 if (bit (thumb2_insn_r->arm_insn, 23))
12800 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12801 offset_addr = u_regval[0] + offset_imm;
12802 address = offset_addr;
12807 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12809 /* Handle STRB (register). */
12810 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12811 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12812 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12813 offset_addr = u_regval[1] << shift_imm;
12814 address = u_regval[0] + offset_addr;
12818 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12819 if (bit (thumb2_insn_r->arm_insn, 10))
12821 if (bit (thumb2_insn_r->arm_insn, 9))
12822 offset_addr = u_regval[0] + offset_imm;
12824 offset_addr = u_regval[0] - offset_imm;
12826 address = offset_addr;
12829 address = u_regval[0];
12835 /* Store byte instructions. */
12838 record_buf_mem[0] = 1;
12840 /* Store half word instructions. */
12843 record_buf_mem[0] = 2;
12845 /* Store word instructions. */
12848 record_buf_mem[0] = 4;
12852 gdb_assert_not_reached ("no decoding pattern found");
12856 record_buf_mem[1] = address;
12857 thumb2_insn_r->mem_rec_count = 1;
12858 record_buf[0] = reg_rn;
12859 thumb2_insn_r->reg_rec_count = 1;
12861 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12863 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12865 return ARM_RECORD_SUCCESS;
12868 /* Handler for thumb2 load memory hints instructions. */
12871 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12873 uint32_t record_buf[8];
12874 uint32_t reg_rt, reg_rn;
12876 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12877 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12879 if (ARM_PC_REGNUM != reg_rt)
12881 record_buf[0] = reg_rt;
12882 record_buf[1] = reg_rn;
12883 record_buf[2] = ARM_PS_REGNUM;
12884 thumb2_insn_r->reg_rec_count = 3;
12886 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12888 return ARM_RECORD_SUCCESS;
12891 return ARM_RECORD_FAILURE;
12894 /* Handler for thumb2 load word instructions. */
12897 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12899 uint32_t opcode1 = 0, opcode2 = 0;
12900 uint32_t record_buf[8];
12902 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12903 record_buf[1] = ARM_PS_REGNUM;
12904 thumb2_insn_r->reg_rec_count = 2;
12906 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12908 return ARM_RECORD_SUCCESS;
12911 /* Handler for thumb2 long multiply, long multiply accumulate, and
12912 divide instructions. */
12915 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12917 uint32_t opcode1 = 0, opcode2 = 0;
12918 uint32_t record_buf[8];
12919 uint32_t reg_src1 = 0;
12921 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12922 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12924 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12926 /* Handle SMULL, UMULL, SMULAL. */
12927 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12928 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12929 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12930 record_buf[2] = ARM_PS_REGNUM;
12931 thumb2_insn_r->reg_rec_count = 3;
12933 else if (1 == opcode1 || 3 == opcode2)
12935 /* Handle SDIV and UDIV. */
12936 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12937 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12938 record_buf[2] = ARM_PS_REGNUM;
12939 thumb2_insn_r->reg_rec_count = 3;
12942 return ARM_RECORD_FAILURE;
12944 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12946 return ARM_RECORD_SUCCESS;
12949 /* Decodes thumb2 instruction type and invokes its record handler. */
12951 static unsigned int
12952 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12954 uint32_t op, op1, op2;
12956 op = bit (thumb2_insn_r->arm_insn, 15);
12957 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12958 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12962 if (!(op2 & 0x64 ))
12964 /* Load/store multiple instruction. */
12965 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12967 else if (!((op2 & 0x64) ^ 0x04))
12969 /* Load/store (dual/exclusive) and table branch instruction. */
12970 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12972 else if (!((op2 & 0x20) ^ 0x20))
12974 /* Data-processing (shifted register). */
12975 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12977 else if (op2 & 0x40)
12979 /* Co-processor instructions. */
12980 arm_record_unsupported_insn (thumb2_insn_r);
12983 else if (op1 == 0x02)
12987 /* Branches and miscellaneous control instructions. */
12988 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12990 else if (op2 & 0x20)
12992 /* Data-processing (plain binary immediate) instruction. */
12993 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12997 /* Data-processing (modified immediate). */
12998 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13001 else if (op1 == 0x03)
13003 if (!(op2 & 0x71 ))
13005 /* Store single data item. */
13006 return thumb2_record_str_single_data (thumb2_insn_r);
13008 else if (!((op2 & 0x71) ^ 0x10))
13010 /* Advanced SIMD or structure load/store instructions. */
13011 return arm_record_unsupported_insn (thumb2_insn_r);
13013 else if (!((op2 & 0x67) ^ 0x01))
13015 /* Load byte, memory hints instruction. */
13016 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13018 else if (!((op2 & 0x67) ^ 0x03))
13020 /* Load halfword, memory hints instruction. */
13021 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13023 else if (!((op2 & 0x67) ^ 0x05))
13025 /* Load word instruction. */
13026 return thumb2_record_ld_word (thumb2_insn_r);
13028 else if (!((op2 & 0x70) ^ 0x20))
13030 /* Data-processing (register) instruction. */
13031 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13033 else if (!((op2 & 0x78) ^ 0x30))
13035 /* Multiply, multiply accumulate, abs diff instruction. */
13036 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13038 else if (!((op2 & 0x78) ^ 0x38))
13040 /* Long multiply, long multiply accumulate, and divide. */
13041 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13043 else if (op2 & 0x40)
13045 /* Co-processor instructions. */
13046 return arm_record_unsupported_insn (thumb2_insn_r);
13053 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13054 and positive val on fauilure. */
13057 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13059 gdb_byte buf[insn_size];
13061 memset (&buf[0], 0, insn_size);
13063 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13065 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13067 gdbarch_byte_order (insn_record->gdbarch));
13071 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13073 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13077 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13078 uint32_t insn_size)
13081 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13082 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13084 arm_record_data_proc_misc_ld_str, /* 000. */
13085 arm_record_data_proc_imm, /* 001. */
13086 arm_record_ld_st_imm_offset, /* 010. */
13087 arm_record_ld_st_reg_offset, /* 011. */
13088 arm_record_ld_st_multiple, /* 100. */
13089 arm_record_b_bl, /* 101. */
13090 arm_record_unsupported_insn, /* 110. */
13091 arm_record_coproc_data_proc /* 111. */
13094 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13095 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13097 thumb_record_shift_add_sub, /* 000. */
13098 thumb_record_add_sub_cmp_mov, /* 001. */
13099 thumb_record_ld_st_reg_offset, /* 010. */
13100 thumb_record_ld_st_imm_offset, /* 011. */
13101 thumb_record_ld_st_stack, /* 100. */
13102 thumb_record_misc, /* 101. */
13103 thumb_record_ldm_stm_swi, /* 110. */
13104 thumb_record_branch /* 111. */
13107 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13108 uint32_t insn_id = 0;
13110 if (extract_arm_insn (arm_record, insn_size))
13114 printf_unfiltered (_("Process record: error reading memory at "
13115 "addr %s len = %d.\n"),
13116 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13120 else if (ARM_RECORD == record_type)
13122 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13123 insn_id = bits (arm_record->arm_insn, 25, 27);
13124 ret = arm_record_extension_space (arm_record);
13125 /* If this insn has fallen into extension space
13126 then we need not decode it anymore. */
13127 if (ret != -1 && !INSN_RECORDED(arm_record))
13129 ret = arm_handle_insn[insn_id] (arm_record);
13132 else if (THUMB_RECORD == record_type)
13134 /* As thumb does not have condition codes, we set negative. */
13135 arm_record->cond = -1;
13136 insn_id = bits (arm_record->arm_insn, 13, 15);
13137 ret = thumb_handle_insn[insn_id] (arm_record);
13139 else if (THUMB2_RECORD == record_type)
13141 /* As thumb does not have condition codes, we set negative. */
13142 arm_record->cond = -1;
13144 /* Swap first half of 32bit thumb instruction with second half. */
13145 arm_record->arm_insn
13146 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13148 insn_id = thumb2_record_decode_insn_handler (arm_record);
13150 if (insn_id != ARM_RECORD_SUCCESS)
13152 arm_record_unsupported_insn (arm_record);
13158 /* Throw assertion. */
13159 gdb_assert_not_reached ("not a valid instruction, could not decode");
13166 /* Cleans up local record registers and memory allocations. */
13169 deallocate_reg_mem (insn_decode_record *record)
13171 xfree (record->arm_regs);
13172 xfree (record->arm_mems);
13176 /* Parse the current instruction and record the values of the registers and
13177 memory that will be changed in current instruction to record_arch_list".
13178 Return -1 if something is wrong. */
13181 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13182 CORE_ADDR insn_addr)
13185 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13186 uint32_t no_of_rec = 0;
13187 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13188 ULONGEST t_bit = 0, insn_id = 0;
13190 ULONGEST u_regval = 0;
13192 insn_decode_record arm_record;
13194 memset (&arm_record, 0, sizeof (insn_decode_record));
13195 arm_record.regcache = regcache;
13196 arm_record.this_addr = insn_addr;
13197 arm_record.gdbarch = gdbarch;
13200 if (record_debug > 1)
13202 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13204 paddress (gdbarch, arm_record.this_addr));
13207 if (extract_arm_insn (&arm_record, 2))
13211 printf_unfiltered (_("Process record: error reading memory at "
13212 "addr %s len = %d.\n"),
13213 paddress (arm_record.gdbarch,
13214 arm_record.this_addr), 2);
13219 /* Check the insn, whether it is thumb or arm one. */
13221 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13222 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13225 if (!(u_regval & t_bit))
13227 /* We are decoding arm insn. */
13228 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13232 insn_id = bits (arm_record.arm_insn, 11, 15);
13233 /* is it thumb2 insn? */
13234 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13236 ret = decode_insn (&arm_record, THUMB2_RECORD,
13237 THUMB2_INSN_SIZE_BYTES);
13241 /* We are decoding thumb insn. */
13242 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13248 /* Record registers. */
13249 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13250 if (arm_record.arm_regs)
13252 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13254 if (record_full_arch_list_add_reg
13255 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13259 /* Record memories. */
13260 if (arm_record.arm_mems)
13262 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13264 if (record_full_arch_list_add_mem
13265 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13266 arm_record.arm_mems[no_of_rec].len))
13271 if (record_full_arch_list_add_end ())
13276 deallocate_reg_mem (&arm_record);