1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988, 1989, 1991, 1992, 1993, 1995, 1996, 1998, 1999, 2000,
4 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
5 Free Software Foundation, Inc.
7 This file is part of GDB.
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3 of the License, or
12 (at your option) any later version.
14 This program is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper () */
29 #include "gdb_string.h"
30 #include "dis-asm.h" /* For register styles. */
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
43 #include "target-descriptions.h"
44 #include "user-regs.h"
47 #include "gdb/sim-arm.h"
50 #include "coff/internal.h"
53 #include "gdb_assert.h"
56 #include "features/arm-with-m.c"
60 /* Macros for setting and testing a bit in a minimal symbol that marks
61 it as Thumb function. The MSB of the minimal symbol's "info" field
62 is used for this purpose.
64 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
65 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
67 #define MSYMBOL_SET_SPECIAL(msym) \
68 MSYMBOL_TARGET_FLAG_1 (msym) = 1
70 #define MSYMBOL_IS_SPECIAL(msym) \
71 MSYMBOL_TARGET_FLAG_1 (msym)
73 /* Per-objfile data used for mapping symbols. */
74 static const struct objfile_data *arm_objfile_data_key;
76 struct arm_mapping_symbol
81 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
82 DEF_VEC_O(arm_mapping_symbol_s);
84 struct arm_per_objfile
86 VEC(arm_mapping_symbol_s) **section_maps;
89 /* The list of available "set arm ..." and "show arm ..." commands. */
90 static struct cmd_list_element *setarmcmdlist = NULL;
91 static struct cmd_list_element *showarmcmdlist = NULL;
93 /* The type of floating-point to use. Keep this in sync with enum
94 arm_float_model, and the help string in _initialize_arm_tdep. */
95 static const char *fp_model_strings[] =
105 /* A variable that can be configured by the user. */
106 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
107 static const char *current_fp_model = "auto";
109 /* The ABI to use. Keep this in sync with arm_abi_kind. */
110 static const char *arm_abi_strings[] =
118 /* A variable that can be configured by the user. */
119 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
120 static const char *arm_abi_string = "auto";
122 /* The execution mode to assume. */
123 static const char *arm_mode_strings[] =
130 static const char *arm_fallback_mode_string = "auto";
131 static const char *arm_force_mode_string = "auto";
133 /* Number of different reg name sets (options). */
134 static int num_disassembly_options;
136 /* The standard register names, and all the valid aliases for them. */
141 } arm_register_aliases[] = {
142 /* Basic register numbers. */
159 /* Synonyms (argument and variable registers). */
172 /* Other platform-specific names for r9. */
180 /* Names used by GCC (not listed in the ARM EABI). */
183 /* A special name from the older ATPCS. */
187 static const char *const arm_register_names[] =
188 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
189 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
190 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
191 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
192 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
193 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
194 "fps", "cpsr" }; /* 24 25 */
196 /* Valid register name styles. */
197 static const char **valid_disassembly_styles;
199 /* Disassembly style to use. Default to "std" register names. */
200 static const char *disassembly_style;
202 /* This is used to keep the bfd arch_info in sync with the disassembly
204 static void set_disassembly_style_sfunc(char *, int,
205 struct cmd_list_element *);
206 static void set_disassembly_style (void);
208 static void convert_from_extended (const struct floatformat *, const void *,
210 static void convert_to_extended (const struct floatformat *, void *,
213 static void arm_neon_quad_read (struct gdbarch *gdbarch,
214 struct regcache *regcache,
215 int regnum, gdb_byte *buf);
216 static void arm_neon_quad_write (struct gdbarch *gdbarch,
217 struct regcache *regcache,
218 int regnum, const gdb_byte *buf);
220 struct arm_prologue_cache
222 /* The stack pointer at the time this frame was created; i.e. the
223 caller's stack pointer when this function was called. It is used
224 to identify this frame. */
227 /* The frame base for this frame is just prev_sp - frame size.
228 FRAMESIZE is the distance from the frame pointer to the
229 initial stack pointer. */
233 /* The register used to hold the frame pointer for this frame. */
236 /* Saved register offsets. */
237 struct trad_frame_saved_reg *saved_regs;
240 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
241 CORE_ADDR prologue_start,
242 CORE_ADDR prologue_end,
243 struct arm_prologue_cache *cache);
245 /* Architecture version for displaced stepping. This effects the behaviour of
246 certain instructions, and really should not be hard-wired. */
248 #define DISPLACED_STEPPING_ARCH_VERSION 5
250 /* Addresses for calling Thumb functions have the bit 0 set.
251 Here are some macros to test, set, or clear bit 0 of addresses. */
252 #define IS_THUMB_ADDR(addr) ((addr) & 1)
253 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
254 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
256 /* Set to true if the 32-bit mode is in use. */
260 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
263 arm_psr_thumb_bit (struct gdbarch *gdbarch)
265 if (gdbarch_tdep (gdbarch)->is_m)
271 /* Determine if FRAME is executing in Thumb mode. */
274 arm_frame_is_thumb (struct frame_info *frame)
277 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
279 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
280 directly (from a signal frame or dummy frame) or by interpreting
281 the saved LR (from a prologue or DWARF frame). So consult it and
282 trust the unwinders. */
283 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
285 return (cpsr & t_bit) != 0;
288 /* Callback for VEC_lower_bound. */
291 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
292 const struct arm_mapping_symbol *rhs)
294 return lhs->value < rhs->value;
297 /* Search for the mapping symbol covering MEMADDR. If one is found,
298 return its type. Otherwise, return 0. If START is non-NULL,
299 set *START to the location of the mapping symbol. */
302 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
304 struct obj_section *sec;
306 /* If there are mapping symbols, consult them. */
307 sec = find_pc_section (memaddr);
310 struct arm_per_objfile *data;
311 VEC(arm_mapping_symbol_s) *map;
312 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
316 data = objfile_data (sec->objfile, arm_objfile_data_key);
319 map = data->section_maps[sec->the_bfd_section->index];
320 if (!VEC_empty (arm_mapping_symbol_s, map))
322 struct arm_mapping_symbol *map_sym;
324 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
325 arm_compare_mapping_symbols);
327 /* VEC_lower_bound finds the earliest ordered insertion
328 point. If the following symbol starts at this exact
329 address, we use that; otherwise, the preceding
330 mapping symbol covers this address. */
331 if (idx < VEC_length (arm_mapping_symbol_s, map))
333 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
334 if (map_sym->value == map_key.value)
337 *start = map_sym->value + obj_section_addr (sec);
338 return map_sym->type;
344 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
346 *start = map_sym->value + obj_section_addr (sec);
347 return map_sym->type;
356 static CORE_ADDR arm_get_next_pc_raw (struct frame_info *frame,
357 CORE_ADDR pc, int insert_bkpt);
359 /* Determine if the program counter specified in MEMADDR is in a Thumb
360 function. This function should be called for addresses unrelated to
361 any executing frame; otherwise, prefer arm_frame_is_thumb. */
364 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
366 struct obj_section *sec;
367 struct minimal_symbol *sym;
370 /* If bit 0 of the address is set, assume this is a Thumb address. */
371 if (IS_THUMB_ADDR (memaddr))
374 /* If the user wants to override the symbol table, let him. */
375 if (strcmp (arm_force_mode_string, "arm") == 0)
377 if (strcmp (arm_force_mode_string, "thumb") == 0)
380 /* ARM v6-M and v7-M are always in Thumb mode. */
381 if (gdbarch_tdep (gdbarch)->is_m)
384 /* If there are mapping symbols, consult them. */
385 type = arm_find_mapping_symbol (memaddr, NULL);
389 /* Thumb functions have a "special" bit set in minimal symbols. */
390 sym = lookup_minimal_symbol_by_pc (memaddr);
392 return (MSYMBOL_IS_SPECIAL (sym));
394 /* If the user wants to override the fallback mode, let them. */
395 if (strcmp (arm_fallback_mode_string, "arm") == 0)
397 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
400 /* If we couldn't find any symbol, but we're talking to a running
401 target, then trust the current value of $cpsr. This lets
402 "display/i $pc" always show the correct mode (though if there is
403 a symbol table we will not reach here, so it still may not be
404 displayed in the mode it will be executed).
406 As a further heuristic if we detect that we are doing a single-step we
407 see what state executing the current instruction ends up with us being
409 if (target_has_registers)
411 struct frame_info *current_frame = get_current_frame ();
412 CORE_ADDR current_pc = get_frame_pc (current_frame);
413 int is_thumb = arm_frame_is_thumb (current_frame);
415 if (memaddr == current_pc)
419 struct gdbarch *gdbarch = get_frame_arch (current_frame);
420 next_pc = arm_get_next_pc_raw (current_frame, current_pc, FALSE);
421 if (memaddr == gdbarch_addr_bits_remove (gdbarch, next_pc))
422 return IS_THUMB_ADDR (next_pc);
428 /* Otherwise we're out of luck; we assume ARM. */
432 /* Remove useless bits from addresses in a running program. */
434 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
437 return UNMAKE_THUMB_ADDR (val);
439 return (val & 0x03fffffc);
442 /* When reading symbols, we need to zap the low bit of the address,
443 which may be set to 1 for Thumb functions. */
445 arm_smash_text_address (struct gdbarch *gdbarch, CORE_ADDR val)
450 /* Return 1 if PC is the start of a compiler helper function which
451 can be safely ignored during prologue skipping. */
453 skip_prologue_function (CORE_ADDR pc)
455 struct minimal_symbol *msym;
458 msym = lookup_minimal_symbol_by_pc (pc);
459 if (msym == NULL || SYMBOL_VALUE_ADDRESS (msym) != pc)
462 name = SYMBOL_LINKAGE_NAME (msym);
466 /* The GNU linker's Thumb call stub to foo is named
468 if (strstr (name, "_from_thumb") != NULL)
471 /* On soft-float targets, __truncdfsf2 is called to convert promoted
472 arguments to their argument types in non-prototyped
474 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
476 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
479 /* Internal functions related to thread-local storage. */
480 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
482 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
488 /* Support routines for instruction parsing. */
489 #define submask(x) ((1L << ((x) + 1)) - 1)
490 #define bit(obj,st) (((obj) >> (st)) & 1)
491 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
492 #define sbits(obj,st,fn) \
493 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
494 #define BranchDest(addr,instr) \
495 ((CORE_ADDR) (((long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
497 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
500 thumb_expand_immediate (unsigned int imm)
502 unsigned int count = imm >> 7;
510 return (imm & 0xff) | ((imm & 0xff) << 16);
512 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
514 return (imm & 0xff) | ((imm & 0xff) << 8)
515 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
518 return (0x80 | (imm & 0x7f)) << (32 - count);
521 /* Return 1 if the 16-bit Thumb instruction INST might change
522 control flow, 0 otherwise. */
525 thumb_instruction_changes_pc (unsigned short inst)
527 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
530 if ((inst & 0xf000) == 0xd000) /* conditional branch */
533 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
536 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
539 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
545 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
546 might change control flow, 0 otherwise. */
549 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
551 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
553 /* Branches and miscellaneous control instructions. */
555 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
560 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
562 /* SUBS PC, LR, #imm8. */
565 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
567 /* Conditional branch. */
574 if ((inst1 & 0xfe50) == 0xe810)
576 /* Load multiple or RFE. */
578 if (bit (inst1, 7) && !bit (inst1, 8))
584 else if (!bit (inst1, 7) && bit (inst1, 8))
590 else if (bit (inst1, 7) && bit (inst1, 8))
595 else if (!bit (inst1, 7) && !bit (inst1, 8))
604 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
606 /* MOV PC or MOVS PC. */
610 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
613 if (bits (inst1, 0, 3) == 15)
619 if ((inst2 & 0x0fc0) == 0x0000)
625 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
631 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
640 /* Analyze a Thumb prologue, looking for a recognizable stack frame
641 and frame pointer. Scan until we encounter a store that could
642 clobber the stack frame unexpectedly, or an unknown instruction.
643 Return the last address which is definitely safe to skip for an
644 initial breakpoint. */
647 thumb_analyze_prologue (struct gdbarch *gdbarch,
648 CORE_ADDR start, CORE_ADDR limit,
649 struct arm_prologue_cache *cache)
651 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
652 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
655 struct pv_area *stack;
656 struct cleanup *back_to;
658 CORE_ADDR unrecognized_pc = 0;
660 for (i = 0; i < 16; i++)
661 regs[i] = pv_register (i, 0);
662 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
663 back_to = make_cleanup_free_pv_area (stack);
665 while (start < limit)
669 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
671 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
676 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
679 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
680 whether to save LR (R14). */
681 mask = (insn & 0xff) | ((insn & 0x100) << 6);
683 /* Calculate offsets of saved R0-R7 and LR. */
684 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
685 if (mask & (1 << regno))
687 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
692 else if ((insn & 0xff00) == 0xb000) /* add sp, #simm OR
695 offset = (insn & 0x7f) << 2; /* get scaled offset */
696 if (insn & 0x80) /* Check for SUB. */
697 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
700 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
703 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
704 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
706 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
707 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
708 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
710 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
711 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
712 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
714 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
715 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
716 && pv_is_constant (regs[bits (insn, 3, 5)]))
717 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
718 regs[bits (insn, 6, 8)]);
719 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
720 && pv_is_constant (regs[bits (insn, 3, 6)]))
722 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
723 int rm = bits (insn, 3, 6);
724 regs[rd] = pv_add (regs[rd], regs[rm]);
726 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
728 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
729 int src_reg = (insn & 0x78) >> 3;
730 regs[dst_reg] = regs[src_reg];
732 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
734 /* Handle stores to the stack. Normally pushes are used,
735 but with GCC -mtpcs-frame, there may be other stores
736 in the prologue to create the frame. */
737 int regno = (insn >> 8) & 0x7;
740 offset = (insn & 0xff) << 2;
741 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
743 if (pv_area_store_would_trash (stack, addr))
746 pv_area_store (stack, addr, 4, regs[regno]);
748 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
750 int rd = bits (insn, 0, 2);
751 int rn = bits (insn, 3, 5);
754 offset = bits (insn, 6, 10) << 2;
755 addr = pv_add_constant (regs[rn], offset);
757 if (pv_area_store_would_trash (stack, addr))
760 pv_area_store (stack, addr, 4, regs[rd]);
762 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
763 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 /* Ignore stores of argument registers to the stack. */
767 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 /* Ignore block loads from the stack, potentially copying
770 parameters from memory. */
772 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
773 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
774 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
775 /* Similarly ignore single loads from the stack. */
777 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
778 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
779 /* Skip register copies, i.e. saves to another register
780 instead of the stack. */
782 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
783 /* Recognize constant loads; even with small stacks these are necessary
785 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
786 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
788 /* Constant pool loads, for the same reason. */
789 unsigned int constant;
792 loc = start + 4 + bits (insn, 0, 7) * 4;
793 constant = read_memory_unsigned_integer (loc, 4, byte_order);
794 regs[bits (insn, 8, 10)] = pv_constant (constant);
796 else if ((insn & 0xe000) == 0xe000)
798 unsigned short inst2;
800 inst2 = read_memory_unsigned_integer (start + 2, 2,
801 byte_order_for_code);
803 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
805 /* BL, BLX. Allow some special function calls when
806 skipping the prologue; GCC generates these before
807 storing arguments to the stack. */
809 int j1, j2, imm1, imm2;
811 imm1 = sbits (insn, 0, 10);
812 imm2 = bits (inst2, 0, 10);
813 j1 = bit (inst2, 13);
814 j2 = bit (inst2, 11);
816 offset = ((imm1 << 12) + (imm2 << 1));
817 offset ^= ((!j2) << 22) | ((!j1) << 23);
819 nextpc = start + 4 + offset;
820 /* For BLX make sure to clear the low bits. */
821 if (bit (inst2, 12) == 0)
822 nextpc = nextpc & 0xfffffffc;
824 if (!skip_prologue_function (nextpc))
828 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!}, { registers } */
829 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
831 pv_t addr = regs[bits (insn, 0, 3)];
834 if (pv_area_store_would_trash (stack, addr))
837 /* Calculate offsets of saved registers. */
838 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
839 if (inst2 & (1 << regno))
841 addr = pv_add_constant (addr, -4);
842 pv_area_store (stack, addr, 4, regs[regno]);
846 regs[bits (insn, 0, 3)] = addr;
849 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2, [Rn, #+/-imm]{!} */
850 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
852 int regno1 = bits (inst2, 12, 15);
853 int regno2 = bits (inst2, 8, 11);
854 pv_t addr = regs[bits (insn, 0, 3)];
856 offset = inst2 & 0xff;
858 addr = pv_add_constant (addr, offset);
860 addr = pv_add_constant (addr, -offset);
862 if (pv_area_store_would_trash (stack, addr))
865 pv_area_store (stack, addr, 4, regs[regno1]);
866 pv_area_store (stack, pv_add_constant (addr, 4),
870 regs[bits (insn, 0, 3)] = addr;
873 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
874 && (inst2 & 0x0c00) == 0x0c00
875 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
877 int regno = bits (inst2, 12, 15);
878 pv_t addr = regs[bits (insn, 0, 3)];
880 offset = inst2 & 0xff;
882 addr = pv_add_constant (addr, offset);
884 addr = pv_add_constant (addr, -offset);
886 if (pv_area_store_would_trash (stack, addr))
889 pv_area_store (stack, addr, 4, regs[regno]);
892 regs[bits (insn, 0, 3)] = addr;
895 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
896 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
898 int regno = bits (inst2, 12, 15);
901 offset = inst2 & 0xfff;
902 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
904 if (pv_area_store_would_trash (stack, addr))
907 pv_area_store (stack, addr, 4, regs[regno]);
910 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
911 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 /* Ignore stores of argument registers to the stack. */
915 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
916 && (inst2 & 0x0d00) == 0x0c00
917 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
918 /* Ignore stores of argument registers to the stack. */
921 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!], { registers } */
922 && (inst2 & 0x8000) == 0x0000
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
924 /* Ignore block loads from the stack, potentially copying
925 parameters from memory. */
928 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2, [Rn, #+/-imm] */
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 /* Similarly ignore dual loads from the stack. */
933 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
934 && (inst2 & 0x0d00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 /* Similarly ignore single loads from the stack. */
939 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
940 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
941 /* Similarly ignore single loads from the stack. */
944 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
945 && (inst2 & 0x8000) == 0x0000)
947 unsigned int imm = ((bits (insn, 10, 10) << 11)
948 | (bits (inst2, 12, 14) << 8)
949 | bits (inst2, 0, 7));
951 regs[bits (inst2, 8, 11)]
952 = pv_add_constant (regs[bits (insn, 0, 3)],
953 thumb_expand_immediate (imm));
956 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
957 && (inst2 & 0x8000) == 0x0000)
959 unsigned int imm = ((bits (insn, 10, 10) << 11)
960 | (bits (inst2, 12, 14) << 8)
961 | bits (inst2, 0, 7));
963 regs[bits (inst2, 8, 11)]
964 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
967 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
968 && (inst2 & 0x8000) == 0x0000)
970 unsigned int imm = ((bits (insn, 10, 10) << 11)
971 | (bits (inst2, 12, 14) << 8)
972 | bits (inst2, 0, 7));
974 regs[bits (inst2, 8, 11)]
975 = pv_add_constant (regs[bits (insn, 0, 3)],
976 - (CORE_ADDR) thumb_expand_immediate (imm));
979 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
980 && (inst2 & 0x8000) == 0x0000)
982 unsigned int imm = ((bits (insn, 10, 10) << 11)
983 | (bits (inst2, 12, 14) << 8)
984 | bits (inst2, 0, 7));
986 regs[bits (inst2, 8, 11)]
987 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
990 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
992 unsigned int imm = ((bits (insn, 10, 10) << 11)
993 | (bits (inst2, 12, 14) << 8)
994 | bits (inst2, 0, 7));
996 regs[bits (inst2, 8, 11)]
997 = pv_constant (thumb_expand_immediate (imm));
1000 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1002 unsigned int imm = ((bits (insn, 0, 3) << 12)
1003 | (bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1007 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1010 else if (insn == 0xea5f /* mov.w Rd,Rm */
1011 && (inst2 & 0xf0f0) == 0)
1013 int dst_reg = (inst2 & 0x0f00) >> 8;
1014 int src_reg = inst2 & 0xf;
1015 regs[dst_reg] = regs[src_reg];
1018 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1020 /* Constant pool loads. */
1021 unsigned int constant;
1024 offset = bits (insn, 0, 11);
1026 loc = start + 4 + offset;
1028 loc = start + 4 - offset;
1030 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1031 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1034 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1036 /* Constant pool loads. */
1037 unsigned int constant;
1040 offset = bits (insn, 0, 7) << 2;
1042 loc = start + 4 + offset;
1044 loc = start + 4 - offset;
1046 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1047 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1049 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1050 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1053 else if (thumb2_instruction_changes_pc (insn, inst2))
1055 /* Don't scan past anything that might change control flow. */
1060 /* The optimizer might shove anything into the prologue,
1061 so we just skip what we don't recognize. */
1062 unrecognized_pc = start;
1067 else if (thumb_instruction_changes_pc (insn))
1069 /* Don't scan past anything that might change control flow. */
1074 /* The optimizer might shove anything into the prologue,
1075 so we just skip what we don't recognize. */
1076 unrecognized_pc = start;
1083 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1084 paddress (gdbarch, start));
1086 if (unrecognized_pc == 0)
1087 unrecognized_pc = start;
1091 do_cleanups (back_to);
1092 return unrecognized_pc;
1095 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1097 /* Frame pointer is fp. Frame size is constant. */
1098 cache->framereg = ARM_FP_REGNUM;
1099 cache->framesize = -regs[ARM_FP_REGNUM].k;
1101 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1103 /* Frame pointer is r7. Frame size is constant. */
1104 cache->framereg = THUMB_FP_REGNUM;
1105 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1107 else if (pv_is_register (regs[ARM_SP_REGNUM], ARM_SP_REGNUM))
1109 /* Try the stack pointer... this is a bit desperate. */
1110 cache->framereg = ARM_SP_REGNUM;
1111 cache->framesize = -regs[ARM_SP_REGNUM].k;
1115 /* We're just out of luck. We don't know where the frame is. */
1116 cache->framereg = -1;
1117 cache->framesize = 0;
1120 for (i = 0; i < 16; i++)
1121 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1122 cache->saved_regs[i].addr = offset;
1124 do_cleanups (back_to);
1125 return unrecognized_pc;
1128 /* Advance the PC across any function entry prologue instructions to
1129 reach some "real" code.
1131 The APCS (ARM Procedure Call Standard) defines the following
1135 [stmfd sp!, {a1,a2,a3,a4}]
1136 stmfd sp!, {...,fp,ip,lr,pc}
1137 [stfe f7, [sp, #-12]!]
1138 [stfe f6, [sp, #-12]!]
1139 [stfe f5, [sp, #-12]!]
1140 [stfe f4, [sp, #-12]!]
1141 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn */
1144 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1146 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1149 CORE_ADDR func_addr, limit_pc;
1150 struct symtab_and_line sal;
1152 /* See if we can determine the end of the prologue via the symbol table.
1153 If so, then return either PC, or the PC after the prologue, whichever
1155 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1157 CORE_ADDR post_prologue_pc
1158 = skip_prologue_using_sal (gdbarch, func_addr);
1159 struct symtab *s = find_pc_symtab (func_addr);
1161 /* GCC always emits a line note before the prologue and another
1162 one after, even if the two are at the same address or on the
1163 same line. Take advantage of this so that we do not need to
1164 know every instruction that might appear in the prologue. We
1165 will have producer information for most binaries; if it is
1166 missing (e.g. for -gstabs), assuming the GNU tools. */
1167 if (post_prologue_pc
1169 || s->producer == NULL
1170 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0))
1171 return post_prologue_pc;
1173 if (post_prologue_pc != 0)
1175 CORE_ADDR analyzed_limit;
1177 /* For non-GCC compilers, make sure the entire line is an
1178 acceptable prologue; GDB will round this function's
1179 return value up to the end of the following line so we
1180 can not skip just part of a line (and we do not want to).
1182 RealView does not treat the prologue specially, but does
1183 associate prologue code with the opening brace; so this
1184 lets us skip the first line if we think it is the opening
1186 if (arm_pc_is_thumb (gdbarch, func_addr))
1187 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1188 post_prologue_pc, NULL);
1190 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1191 post_prologue_pc, NULL);
1193 if (analyzed_limit != post_prologue_pc)
1196 return post_prologue_pc;
1200 /* Can't determine prologue from the symbol table, need to examine
1203 /* Find an upper limit on the function prologue using the debug
1204 information. If the debug information could not be used to provide
1205 that bound, then use an arbitrary large number as the upper bound. */
1206 /* Like arm_scan_prologue, stop no later than pc + 64. */
1207 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1209 limit_pc = pc + 64; /* Magic. */
1212 /* Check if this is Thumb code. */
1213 if (arm_pc_is_thumb (gdbarch, pc))
1214 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1216 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1218 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1220 /* "mov ip, sp" is no longer a required part of the prologue. */
1221 if (inst == 0xe1a0c00d) /* mov ip, sp */
1224 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1227 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1230 /* Some prologues begin with "str lr, [sp, #-4]!". */
1231 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1234 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1237 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1240 /* Any insns after this point may float into the code, if it makes
1241 for better instruction scheduling, so we skip them only if we
1242 find them, but still consider the function to be frame-ful. */
1244 /* We may have either one sfmfd instruction here, or several stfe
1245 insns, depending on the version of floating point code we
1247 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1250 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1253 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1256 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1259 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1260 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1261 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1264 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1265 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1266 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1269 /* Un-recognized instruction; stop scanning. */
1273 return skip_pc; /* End of prologue */
1277 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1278 This function decodes a Thumb function prologue to determine:
1279 1) the size of the stack frame
1280 2) which registers are saved on it
1281 3) the offsets of saved regs
1282 4) the offset from the stack pointer to the frame pointer
1284 A typical Thumb function prologue would create this stack frame
1285 (offsets relative to FP)
1286 old SP -> 24 stack parameters
1289 R7 -> 0 local variables (16 bytes)
1290 SP -> -12 additional stack space (12 bytes)
1291 The frame size would thus be 36 bytes, and the frame offset would be
1292 12 bytes. The frame register is R7.
1294 The comments for thumb_skip_prolog() describe the algorithm we use
1295 to detect the end of the prolog. */
1299 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1300 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1302 CORE_ADDR prologue_start;
1303 CORE_ADDR prologue_end;
1304 CORE_ADDR current_pc;
1306 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1309 /* See comment in arm_scan_prologue for an explanation of
1311 if (prologue_end > prologue_start + 64)
1313 prologue_end = prologue_start + 64;
1317 /* We're in the boondocks: we have no idea where the start of the
1321 prologue_end = min (prologue_end, prev_pc);
1323 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1326 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1329 arm_instruction_changes_pc (uint32_t this_instr)
1331 if (bits (this_instr, 28, 31) == INST_NV)
1332 /* Unconditional instructions. */
1333 switch (bits (this_instr, 24, 27))
1337 /* Branch with Link and change to Thumb. */
1342 /* Coprocessor register transfer. */
1343 if (bits (this_instr, 12, 15) == 15)
1344 error (_("Invalid update to pc in instruction"));
1350 switch (bits (this_instr, 25, 27))
1353 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1355 /* Multiplies and extra load/stores. */
1356 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1357 /* Neither multiplies nor extension load/stores are allowed
1361 /* Otherwise, miscellaneous instructions. */
1363 /* BX <reg>, BXJ <reg>, BLX <reg> */
1364 if (bits (this_instr, 4, 27) == 0x12fff1
1365 || bits (this_instr, 4, 27) == 0x12fff2
1366 || bits (this_instr, 4, 27) == 0x12fff3)
1369 /* Other miscellaneous instructions are unpredictable if they
1373 /* Data processing instruction. Fall through. */
1376 if (bits (this_instr, 12, 15) == 15)
1383 /* Media instructions and architecturally undefined instructions. */
1384 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1388 if (bit (this_instr, 20) == 0)
1392 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1398 /* Load/store multiple. */
1399 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1405 /* Branch and branch with link. */
1410 /* Coprocessor transfers or SWIs can not affect PC. */
1414 internal_error (__FILE__, __LINE__, "bad value in switch");
1418 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1419 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1420 fill it in. Return the first address not recognized as a prologue
1423 We recognize all the instructions typically found in ARM prologues,
1424 plus harmless instructions which can be skipped (either for analysis
1425 purposes, or a more restrictive set that can be skipped when finding
1426 the end of the prologue). */
1429 arm_analyze_prologue (struct gdbarch *gdbarch,
1430 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1431 struct arm_prologue_cache *cache)
1433 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1434 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1436 CORE_ADDR offset, current_pc;
1437 pv_t regs[ARM_FPS_REGNUM];
1438 struct pv_area *stack;
1439 struct cleanup *back_to;
1440 int framereg, framesize;
1441 CORE_ADDR unrecognized_pc = 0;
1443 /* Search the prologue looking for instructions that set up the
1444 frame pointer, adjust the stack pointer, and save registers.
1446 Be careful, however, and if it doesn't look like a prologue,
1447 don't try to scan it. If, for instance, a frameless function
1448 begins with stmfd sp!, then we will tell ourselves there is
1449 a frame, which will confuse stack traceback, as well as "finish"
1450 and other operations that rely on a knowledge of the stack
1453 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1454 regs[regno] = pv_register (regno, 0);
1455 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1456 back_to = make_cleanup_free_pv_area (stack);
1458 for (current_pc = prologue_start;
1459 current_pc < prologue_end;
1463 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1465 if (insn == 0xe1a0c00d) /* mov ip, sp */
1467 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1470 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1471 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1473 unsigned imm = insn & 0xff; /* immediate value */
1474 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1475 int rd = bits (insn, 12, 15);
1476 imm = (imm >> rot) | (imm << (32 - rot));
1477 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1480 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1481 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1483 unsigned imm = insn & 0xff; /* immediate value */
1484 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1485 int rd = bits (insn, 12, 15);
1486 imm = (imm >> rot) | (imm << (32 - rot));
1487 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1490 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd, [sp, #-4]! */
1492 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1494 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1495 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1496 regs[bits (insn, 12, 15)]);
1499 else if ((insn & 0xffff0000) == 0xe92d0000)
1500 /* stmfd sp!, {..., fp, ip, lr, pc}
1502 stmfd sp!, {a1, a2, a3, a4} */
1504 int mask = insn & 0xffff;
1506 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1509 /* Calculate offsets of saved registers. */
1510 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1511 if (mask & (1 << regno))
1513 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1514 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1517 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1518 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1519 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1521 /* No need to add this to saved_regs -- it's just an arg reg. */
1524 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1525 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1526 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1528 /* No need to add this to saved_regs -- it's just an arg reg. */
1531 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn, { registers } */
1532 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1534 /* No need to add this to saved_regs -- it's just arg regs. */
1537 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1539 unsigned imm = insn & 0xff; /* immediate value */
1540 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1541 imm = (imm >> rot) | (imm << (32 - rot));
1542 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1544 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1546 unsigned imm = insn & 0xff; /* immediate value */
1547 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1548 imm = (imm >> rot) | (imm << (32 - rot));
1549 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1551 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?, [sp, -#c]! */
1552 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1554 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1557 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1558 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1559 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1561 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4, [sp!] */
1562 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1564 int n_saved_fp_regs;
1565 unsigned int fp_start_reg, fp_bound_reg;
1567 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1570 if ((insn & 0x800) == 0x800) /* N0 is set */
1572 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1573 n_saved_fp_regs = 3;
1575 n_saved_fp_regs = 1;
1579 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1580 n_saved_fp_regs = 2;
1582 n_saved_fp_regs = 4;
1585 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1586 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1587 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1589 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1590 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1591 regs[fp_start_reg++]);
1594 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1596 /* Allow some special function calls when skipping the
1597 prologue; GCC generates these before storing arguments to
1599 CORE_ADDR dest = BranchDest (current_pc, insn);
1601 if (skip_prologue_function (dest))
1606 else if ((insn & 0xf0000000) != 0xe0000000)
1607 break; /* Condition not true, exit early */
1608 else if (arm_instruction_changes_pc (insn))
1609 /* Don't scan past anything that might change control flow. */
1611 else if ((insn & 0xfe500000) == 0xe8100000) /* ldm */
1613 /* Ignore block loads from the stack, potentially copying
1614 parameters from memory. */
1615 if (pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1620 else if ((insn & 0xfc500000) == 0xe4100000)
1622 /* Similarly ignore single loads from the stack. */
1623 if (pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1628 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1629 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1630 register instead of the stack. */
1634 /* The optimizer might shove anything into the prologue,
1635 so we just skip what we don't recognize. */
1636 unrecognized_pc = current_pc;
1641 if (unrecognized_pc == 0)
1642 unrecognized_pc = current_pc;
1644 /* The frame size is just the distance from the frame register
1645 to the original stack pointer. */
1646 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1648 /* Frame pointer is fp. */
1649 framereg = ARM_FP_REGNUM;
1650 framesize = -regs[ARM_FP_REGNUM].k;
1652 else if (pv_is_register (regs[ARM_SP_REGNUM], ARM_SP_REGNUM))
1654 /* Try the stack pointer... this is a bit desperate. */
1655 framereg = ARM_SP_REGNUM;
1656 framesize = -regs[ARM_SP_REGNUM].k;
1660 /* We're just out of luck. We don't know where the frame is. */
1667 cache->framereg = framereg;
1668 cache->framesize = framesize;
1670 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1671 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1672 cache->saved_regs[regno].addr = offset;
1676 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1677 paddress (gdbarch, unrecognized_pc));
1679 do_cleanups (back_to);
1680 return unrecognized_pc;
1684 arm_scan_prologue (struct frame_info *this_frame,
1685 struct arm_prologue_cache *cache)
1687 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1688 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1690 CORE_ADDR prologue_start, prologue_end, current_pc;
1691 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1692 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1693 pv_t regs[ARM_FPS_REGNUM];
1694 struct pv_area *stack;
1695 struct cleanup *back_to;
1698 /* Assume there is no frame until proven otherwise. */
1699 cache->framereg = ARM_SP_REGNUM;
1700 cache->framesize = 0;
1702 /* Check for Thumb prologue. */
1703 if (arm_frame_is_thumb (this_frame))
1705 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1709 /* Find the function prologue. If we can't find the function in
1710 the symbol table, peek in the stack frame to find the PC. */
1711 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1714 /* One way to find the end of the prologue (which works well
1715 for unoptimized code) is to do the following:
1717 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1720 prologue_end = prev_pc;
1721 else if (sal.end < prologue_end)
1722 prologue_end = sal.end;
1724 This mechanism is very accurate so long as the optimizer
1725 doesn't move any instructions from the function body into the
1726 prologue. If this happens, sal.end will be the last
1727 instruction in the first hunk of prologue code just before
1728 the first instruction that the scheduler has moved from
1729 the body to the prologue.
1731 In order to make sure that we scan all of the prologue
1732 instructions, we use a slightly less accurate mechanism which
1733 may scan more than necessary. To help compensate for this
1734 lack of accuracy, the prologue scanning loop below contains
1735 several clauses which'll cause the loop to terminate early if
1736 an implausible prologue instruction is encountered.
1742 is a suitable endpoint since it accounts for the largest
1743 possible prologue plus up to five instructions inserted by
1746 if (prologue_end > prologue_start + 64)
1748 prologue_end = prologue_start + 64; /* See above. */
1753 /* We have no symbol information. Our only option is to assume this
1754 function has a standard stack frame and the normal frame register.
1755 Then, we can find the value of our frame pointer on entrance to
1756 the callee (or at the present moment if this is the innermost frame).
1757 The value stored there should be the address of the stmfd + 8. */
1758 CORE_ADDR frame_loc;
1759 LONGEST return_value;
1761 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1762 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1766 prologue_start = gdbarch_addr_bits_remove
1767 (gdbarch, return_value) - 8;
1768 prologue_end = prologue_start + 64; /* See above. */
1772 if (prev_pc < prologue_end)
1773 prologue_end = prev_pc;
1775 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1778 static struct arm_prologue_cache *
1779 arm_make_prologue_cache (struct frame_info *this_frame)
1782 struct arm_prologue_cache *cache;
1783 CORE_ADDR unwound_fp;
1785 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1786 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1788 arm_scan_prologue (this_frame, cache);
1790 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1791 if (unwound_fp == 0)
1794 cache->prev_sp = unwound_fp + cache->framesize;
1796 /* Calculate actual addresses of saved registers using offsets
1797 determined by arm_scan_prologue. */
1798 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1799 if (trad_frame_addr_p (cache->saved_regs, reg))
1800 cache->saved_regs[reg].addr += cache->prev_sp;
1805 /* Our frame ID for a normal frame is the current function's starting PC
1806 and the caller's SP when we were called. */
1809 arm_prologue_this_id (struct frame_info *this_frame,
1811 struct frame_id *this_id)
1813 struct arm_prologue_cache *cache;
1817 if (*this_cache == NULL)
1818 *this_cache = arm_make_prologue_cache (this_frame);
1819 cache = *this_cache;
1821 /* This is meant to halt the backtrace at "_start". */
1822 pc = get_frame_pc (this_frame);
1823 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1826 /* If we've hit a wall, stop. */
1827 if (cache->prev_sp == 0)
1830 func = get_frame_func (this_frame);
1831 id = frame_id_build (cache->prev_sp, func);
1835 static struct value *
1836 arm_prologue_prev_register (struct frame_info *this_frame,
1840 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1841 struct arm_prologue_cache *cache;
1843 if (*this_cache == NULL)
1844 *this_cache = arm_make_prologue_cache (this_frame);
1845 cache = *this_cache;
1847 /* If we are asked to unwind the PC, then we need to return the LR
1848 instead. The prologue may save PC, but it will point into this
1849 frame's prologue, not the next frame's resume location. Also
1850 strip the saved T bit. A valid LR may have the low bit set, but
1851 a valid PC never does. */
1852 if (prev_regnum == ARM_PC_REGNUM)
1856 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1857 return frame_unwind_got_constant (this_frame, prev_regnum,
1858 arm_addr_bits_remove (gdbarch, lr));
1861 /* SP is generally not saved to the stack, but this frame is
1862 identified by the next frame's stack pointer at the time of the call.
1863 The value was already reconstructed into PREV_SP. */
1864 if (prev_regnum == ARM_SP_REGNUM)
1865 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1867 /* The CPSR may have been changed by the call instruction and by the
1868 called function. The only bit we can reconstruct is the T bit,
1869 by checking the low bit of LR as of the call. This is a reliable
1870 indicator of Thumb-ness except for some ARM v4T pre-interworking
1871 Thumb code, which could get away with a clear low bit as long as
1872 the called function did not use bx. Guess that all other
1873 bits are unchanged; the condition flags are presumably lost,
1874 but the processor status is likely valid. */
1875 if (prev_regnum == ARM_PS_REGNUM)
1878 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1880 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1881 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1882 if (IS_THUMB_ADDR (lr))
1886 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1889 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1893 struct frame_unwind arm_prologue_unwind = {
1895 arm_prologue_this_id,
1896 arm_prologue_prev_register,
1898 default_frame_sniffer
1901 static struct arm_prologue_cache *
1902 arm_make_stub_cache (struct frame_info *this_frame)
1904 struct arm_prologue_cache *cache;
1906 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1907 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1909 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
1914 /* Our frame ID for a stub frame is the current SP and LR. */
1917 arm_stub_this_id (struct frame_info *this_frame,
1919 struct frame_id *this_id)
1921 struct arm_prologue_cache *cache;
1923 if (*this_cache == NULL)
1924 *this_cache = arm_make_stub_cache (this_frame);
1925 cache = *this_cache;
1927 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
1931 arm_stub_unwind_sniffer (const struct frame_unwind *self,
1932 struct frame_info *this_frame,
1933 void **this_prologue_cache)
1935 CORE_ADDR addr_in_block;
1938 addr_in_block = get_frame_address_in_block (this_frame);
1939 if (in_plt_section (addr_in_block, NULL)
1940 /* We also use the stub winder if the target memory is unreadable
1941 to avoid having the prologue unwinder trying to read it. */
1942 || target_read_memory (get_frame_pc (this_frame), dummy, 4) != 0)
1948 struct frame_unwind arm_stub_unwind = {
1951 arm_prologue_prev_register,
1953 arm_stub_unwind_sniffer
1957 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
1959 struct arm_prologue_cache *cache;
1961 if (*this_cache == NULL)
1962 *this_cache = arm_make_prologue_cache (this_frame);
1963 cache = *this_cache;
1965 return cache->prev_sp - cache->framesize;
1968 struct frame_base arm_normal_base = {
1969 &arm_prologue_unwind,
1970 arm_normal_frame_base,
1971 arm_normal_frame_base,
1972 arm_normal_frame_base
1975 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
1976 dummy frame. The frame ID's base needs to match the TOS value
1977 saved by save_dummy_frame_tos() and returned from
1978 arm_push_dummy_call, and the PC needs to match the dummy frame's
1981 static struct frame_id
1982 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
1984 return frame_id_build (get_frame_register_unsigned (this_frame, ARM_SP_REGNUM),
1985 get_frame_pc (this_frame));
1988 /* Given THIS_FRAME, find the previous frame's resume PC (which will
1989 be used to construct the previous frame's ID, after looking up the
1990 containing function). */
1993 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
1996 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
1997 return arm_addr_bits_remove (gdbarch, pc);
2001 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2003 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2006 static struct value *
2007 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2010 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2012 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2017 /* The PC is normally copied from the return column, which
2018 describes saves of LR. However, that version may have an
2019 extra bit set to indicate Thumb state. The bit is not
2021 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2022 return frame_unwind_got_constant (this_frame, regnum,
2023 arm_addr_bits_remove (gdbarch, lr));
2026 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2027 cpsr = get_frame_register_unsigned (this_frame, regnum);
2028 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2029 if (IS_THUMB_ADDR (lr))
2033 return frame_unwind_got_constant (this_frame, regnum, cpsr);
2036 internal_error (__FILE__, __LINE__,
2037 _("Unexpected register %d"), regnum);
2042 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
2043 struct dwarf2_frame_state_reg *reg,
2044 struct frame_info *this_frame)
2050 reg->how = DWARF2_FRAME_REG_FN;
2051 reg->loc.fn = arm_dwarf2_prev_register;
2054 reg->how = DWARF2_FRAME_REG_CFA;
2059 /* Return true if we are in the function's epilogue, i.e. after the
2060 instruction that destroyed the function's stack frame. */
2063 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
2065 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2066 unsigned int insn, insn2;
2067 int found_return = 0, found_stack_adjust = 0;
2068 CORE_ADDR func_start, func_end;
2072 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
2075 /* The epilogue is a sequence of instructions along the following lines:
2077 - add stack frame size to SP or FP
2078 - [if frame pointer used] restore SP from FP
2079 - restore registers from SP [may include PC]
2080 - a return-type instruction [if PC wasn't already restored]
2082 In a first pass, we scan forward from the current PC and verify the
2083 instructions we find as compatible with this sequence, ending in a
2086 However, this is not sufficient to distinguish indirect function calls
2087 within a function from indirect tail calls in the epilogue in some cases.
2088 Therefore, if we didn't already find any SP-changing instruction during
2089 forward scan, we add a backward scanning heuristic to ensure we actually
2090 are in the epilogue. */
2093 while (scan_pc < func_end && !found_return)
2095 if (target_read_memory (scan_pc, buf, 2))
2099 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
2101 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2103 else if (insn == 0x46f7) /* mov pc, lr */
2105 else if (insn == 0x46bd) /* mov sp, r7 */
2106 found_stack_adjust = 1;
2107 else if ((insn & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
2108 found_stack_adjust = 1;
2109 else if ((insn & 0xfe00) == 0xbc00) /* pop <registers> */
2111 found_stack_adjust = 1;
2112 if (insn & 0x0100) /* <registers> include PC. */
2115 else if ((insn & 0xe000) == 0xe000) /* 32-bit Thumb-2 instruction */
2117 if (target_read_memory (scan_pc, buf, 2))
2121 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2123 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
2125 found_stack_adjust = 1;
2126 if (insn2 & 0x8000) /* <registers> include PC. */
2129 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
2130 && (insn2 & 0x0fff) == 0x0b04)
2132 found_stack_adjust = 1;
2133 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
2136 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
2137 && (insn2 & 0x0e00) == 0x0a00)
2138 found_stack_adjust = 1;
2149 /* Since any instruction in the epilogue sequence, with the possible
2150 exception of return itself, updates the stack pointer, we need to
2151 scan backwards for at most one instruction. Try either a 16-bit or
2152 a 32-bit instruction. This is just a heuristic, so we do not worry
2153 too much about false positives.*/
2155 if (!found_stack_adjust)
2157 if (pc - 4 < func_start)
2159 if (target_read_memory (pc - 4, buf, 4))
2162 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
2163 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
2165 if (insn2 == 0x46bd) /* mov sp, r7 */
2166 found_stack_adjust = 1;
2167 else if ((insn2 & 0xff00) == 0xb000) /* add sp, imm or sub sp, imm */
2168 found_stack_adjust = 1;
2169 else if ((insn2 & 0xff00) == 0xbc00) /* pop <registers> without PC */
2170 found_stack_adjust = 1;
2171 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
2172 found_stack_adjust = 1;
2173 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
2174 && (insn2 & 0x0fff) == 0x0b04)
2175 found_stack_adjust = 1;
2176 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
2177 && (insn2 & 0x0e00) == 0x0a00)
2178 found_stack_adjust = 1;
2181 return found_stack_adjust;
2184 /* Return true if we are in the function's epilogue, i.e. after the
2185 instruction that destroyed the function's stack frame. */
2188 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
2190 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2192 int found_return, found_stack_adjust;
2193 CORE_ADDR func_start, func_end;
2195 if (arm_pc_is_thumb (gdbarch, pc))
2196 return thumb_in_function_epilogue_p (gdbarch, pc);
2198 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
2201 /* We are in the epilogue if the previous instruction was a stack
2202 adjustment and the next instruction is a possible return (bx, mov
2203 pc, or pop). We could have to scan backwards to find the stack
2204 adjustment, or forwards to find the return, but this is a decent
2205 approximation. First scan forwards. */
2208 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
2209 if (bits (insn, 28, 31) != INST_NV)
2211 if ((insn & 0x0ffffff0) == 0x012fff10)
2214 else if ((insn & 0x0ffffff0) == 0x01a0f000)
2217 else if ((insn & 0x0fff0000) == 0x08bd0000
2218 && (insn & 0x0000c000) != 0)
2219 /* POP (LDMIA), including PC or LR. */
2226 /* Scan backwards. This is just a heuristic, so do not worry about
2227 false positives from mode changes. */
2229 if (pc < func_start + 4)
2232 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
2233 if (bits (insn, 28, 31) != INST_NV)
2235 if ((insn & 0x0df0f000) == 0x0080d000)
2236 /* ADD SP (register or immediate). */
2237 found_stack_adjust = 1;
2238 else if ((insn & 0x0df0f000) == 0x0040d000)
2239 /* SUB SP (register or immediate). */
2240 found_stack_adjust = 1;
2241 else if ((insn & 0x0ffffff0) == 0x01a0d000)
2244 else if ((insn & 0x0fff0000) == 0x08bd0000)
2246 found_stack_adjust = 1;
2249 if (found_stack_adjust)
2256 /* When arguments must be pushed onto the stack, they go on in reverse
2257 order. The code below implements a FILO (stack) to do this. */
2262 struct stack_item *prev;
2266 static struct stack_item *
2267 push_stack_item (struct stack_item *prev, const void *contents, int len)
2269 struct stack_item *si;
2270 si = xmalloc (sizeof (struct stack_item));
2271 si->data = xmalloc (len);
2274 memcpy (si->data, contents, len);
2278 static struct stack_item *
2279 pop_stack_item (struct stack_item *si)
2281 struct stack_item *dead = si;
2289 /* Return the alignment (in bytes) of the given type. */
2292 arm_type_align (struct type *t)
2298 t = check_typedef (t);
2299 switch (TYPE_CODE (t))
2302 /* Should never happen. */
2303 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
2307 case TYPE_CODE_ENUM:
2311 case TYPE_CODE_RANGE:
2312 case TYPE_CODE_BITSTRING:
2314 case TYPE_CODE_CHAR:
2315 case TYPE_CODE_BOOL:
2316 return TYPE_LENGTH (t);
2318 case TYPE_CODE_ARRAY:
2319 case TYPE_CODE_COMPLEX:
2320 /* TODO: What about vector types? */
2321 return arm_type_align (TYPE_TARGET_TYPE (t));
2323 case TYPE_CODE_STRUCT:
2324 case TYPE_CODE_UNION:
2326 for (n = 0; n < TYPE_NFIELDS (t); n++)
2328 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
2336 /* Possible base types for a candidate for passing and returning in
2339 enum arm_vfp_cprc_base_type
2348 /* The length of one element of base type B. */
2351 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
2355 case VFP_CPRC_SINGLE:
2357 case VFP_CPRC_DOUBLE:
2359 case VFP_CPRC_VEC64:
2361 case VFP_CPRC_VEC128:
2364 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
2369 /* The character ('s', 'd' or 'q') for the type of VFP register used
2370 for passing base type B. */
2373 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
2377 case VFP_CPRC_SINGLE:
2379 case VFP_CPRC_DOUBLE:
2381 case VFP_CPRC_VEC64:
2383 case VFP_CPRC_VEC128:
2386 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
2391 /* Determine whether T may be part of a candidate for passing and
2392 returning in VFP registers, ignoring the limit on the total number
2393 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
2394 classification of the first valid component found; if it is not
2395 VFP_CPRC_UNKNOWN, all components must have the same classification
2396 as *BASE_TYPE. If it is found that T contains a type not permitted
2397 for passing and returning in VFP registers, a type differently
2398 classified from *BASE_TYPE, or two types differently classified
2399 from each other, return -1, otherwise return the total number of
2400 base-type elements found (possibly 0 in an empty structure or
2401 array). Vectors and complex types are not currently supported,
2402 matching the generic AAPCS support. */
2405 arm_vfp_cprc_sub_candidate (struct type *t,
2406 enum arm_vfp_cprc_base_type *base_type)
2408 t = check_typedef (t);
2409 switch (TYPE_CODE (t))
2412 switch (TYPE_LENGTH (t))
2415 if (*base_type == VFP_CPRC_UNKNOWN)
2416 *base_type = VFP_CPRC_SINGLE;
2417 else if (*base_type != VFP_CPRC_SINGLE)
2422 if (*base_type == VFP_CPRC_UNKNOWN)
2423 *base_type = VFP_CPRC_DOUBLE;
2424 else if (*base_type != VFP_CPRC_DOUBLE)
2433 case TYPE_CODE_ARRAY:
2437 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
2440 if (TYPE_LENGTH (t) == 0)
2442 gdb_assert (count == 0);
2445 else if (count == 0)
2447 unitlen = arm_vfp_cprc_unit_length (*base_type);
2448 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
2449 return TYPE_LENGTH (t) / unitlen;
2453 case TYPE_CODE_STRUCT:
2458 for (i = 0; i < TYPE_NFIELDS (t); i++)
2460 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
2462 if (sub_count == -1)
2466 if (TYPE_LENGTH (t) == 0)
2468 gdb_assert (count == 0);
2471 else if (count == 0)
2473 unitlen = arm_vfp_cprc_unit_length (*base_type);
2474 if (TYPE_LENGTH (t) != unitlen * count)
2479 case TYPE_CODE_UNION:
2484 for (i = 0; i < TYPE_NFIELDS (t); i++)
2486 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
2488 if (sub_count == -1)
2490 count = (count > sub_count ? count : sub_count);
2492 if (TYPE_LENGTH (t) == 0)
2494 gdb_assert (count == 0);
2497 else if (count == 0)
2499 unitlen = arm_vfp_cprc_unit_length (*base_type);
2500 if (TYPE_LENGTH (t) != unitlen * count)
2512 /* Determine whether T is a VFP co-processor register candidate (CPRC)
2513 if passed to or returned from a non-variadic function with the VFP
2514 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
2515 *BASE_TYPE to the base type for T and *COUNT to the number of
2516 elements of that base type before returning. */
2519 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
2522 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
2523 int c = arm_vfp_cprc_sub_candidate (t, &b);
2524 if (c <= 0 || c > 4)
2531 /* Return 1 if the VFP ABI should be used for passing arguments to and
2532 returning values from a function of type FUNC_TYPE, 0
2536 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
2538 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
2539 /* Variadic functions always use the base ABI. Assume that functions
2540 without debug info are not variadic. */
2541 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
2543 /* The VFP ABI is only supported as a variant of AAPCS. */
2544 if (tdep->arm_abi != ARM_ABI_AAPCS)
2546 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
2549 /* We currently only support passing parameters in integer registers, which
2550 conforms with GCC's default model, and VFP argument passing following
2551 the VFP variant of AAPCS. Several other variants exist and
2552 we should probably support some of them based on the selected ABI. */
2555 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
2556 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
2557 struct value **args, CORE_ADDR sp, int struct_return,
2558 CORE_ADDR struct_addr)
2560 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2564 struct stack_item *si = NULL;
2567 unsigned vfp_regs_free = (1 << 16) - 1;
2569 /* Determine the type of this function and whether the VFP ABI
2571 ftype = check_typedef (value_type (function));
2572 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
2573 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
2574 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
2576 /* Set the return address. For the ARM, the return breakpoint is
2577 always at BP_ADDR. */
2578 if (arm_pc_is_thumb (gdbarch, bp_addr))
2580 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
2582 /* Walk through the list of args and determine how large a temporary
2583 stack is required. Need to take care here as structs may be
2584 passed on the stack, and we have to to push them. */
2587 argreg = ARM_A1_REGNUM;
2590 /* The struct_return pointer occupies the first parameter
2591 passing register. */
2595 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
2596 gdbarch_register_name (gdbarch, argreg),
2597 paddress (gdbarch, struct_addr));
2598 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
2602 for (argnum = 0; argnum < nargs; argnum++)
2605 struct type *arg_type;
2606 struct type *target_type;
2607 enum type_code typecode;
2608 const bfd_byte *val;
2610 enum arm_vfp_cprc_base_type vfp_base_type;
2612 int may_use_core_reg = 1;
2614 arg_type = check_typedef (value_type (args[argnum]));
2615 len = TYPE_LENGTH (arg_type);
2616 target_type = TYPE_TARGET_TYPE (arg_type);
2617 typecode = TYPE_CODE (arg_type);
2618 val = value_contents (args[argnum]);
2620 align = arm_type_align (arg_type);
2621 /* Round alignment up to a whole number of words. */
2622 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
2623 /* Different ABIs have different maximum alignments. */
2624 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
2626 /* The APCS ABI only requires word alignment. */
2627 align = INT_REGISTER_SIZE;
2631 /* The AAPCS requires at most doubleword alignment. */
2632 if (align > INT_REGISTER_SIZE * 2)
2633 align = INT_REGISTER_SIZE * 2;
2637 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
2645 /* Because this is a CPRC it cannot go in a core register or
2646 cause a core register to be skipped for alignment.
2647 Either it goes in VFP registers and the rest of this loop
2648 iteration is skipped for this argument, or it goes on the
2649 stack (and the stack alignment code is correct for this
2651 may_use_core_reg = 0;
2653 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
2654 shift = unit_length / 4;
2655 mask = (1 << (shift * vfp_base_count)) - 1;
2656 for (regno = 0; regno < 16; regno += shift)
2657 if (((vfp_regs_free >> regno) & mask) == mask)
2666 vfp_regs_free &= ~(mask << regno);
2667 reg_scaled = regno / shift;
2668 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
2669 for (i = 0; i < vfp_base_count; i++)
2673 if (reg_char == 'q')
2674 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
2675 val + i * unit_length);
2678 sprintf (name_buf, "%c%d", reg_char, reg_scaled + i);
2679 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
2681 regcache_cooked_write (regcache, regnum,
2682 val + i * unit_length);
2689 /* This CPRC could not go in VFP registers, so all VFP
2690 registers are now marked as used. */
2695 /* Push stack padding for dowubleword alignment. */
2696 if (nstack & (align - 1))
2698 si = push_stack_item (si, val, INT_REGISTER_SIZE);
2699 nstack += INT_REGISTER_SIZE;
2702 /* Doubleword aligned quantities must go in even register pairs. */
2703 if (may_use_core_reg
2704 && argreg <= ARM_LAST_ARG_REGNUM
2705 && align > INT_REGISTER_SIZE
2709 /* If the argument is a pointer to a function, and it is a
2710 Thumb function, create a LOCAL copy of the value and set
2711 the THUMB bit in it. */
2712 if (TYPE_CODE_PTR == typecode
2713 && target_type != NULL
2714 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
2716 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
2717 if (arm_pc_is_thumb (gdbarch, regval))
2719 bfd_byte *copy = alloca (len);
2720 store_unsigned_integer (copy, len, byte_order,
2721 MAKE_THUMB_ADDR (regval));
2726 /* Copy the argument to general registers or the stack in
2727 register-sized pieces. Large arguments are split between
2728 registers and stack. */
2731 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
2733 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
2735 /* The argument is being passed in a general purpose
2738 = extract_unsigned_integer (val, partial_len, byte_order);
2739 if (byte_order == BFD_ENDIAN_BIG)
2740 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
2742 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
2744 gdbarch_register_name
2746 phex (regval, INT_REGISTER_SIZE));
2747 regcache_cooked_write_unsigned (regcache, argreg, regval);
2752 /* Push the arguments onto the stack. */
2754 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
2756 si = push_stack_item (si, val, INT_REGISTER_SIZE);
2757 nstack += INT_REGISTER_SIZE;
2764 /* If we have an odd number of words to push, then decrement the stack
2765 by one word now, so first stack argument will be dword aligned. */
2772 write_memory (sp, si->data, si->len);
2773 si = pop_stack_item (si);
2776 /* Finally, update teh SP register. */
2777 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
2783 /* Always align the frame to an 8-byte boundary. This is required on
2784 some platforms and harmless on the rest. */
2787 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
2789 /* Align the stack to eight bytes. */
2790 return sp & ~ (CORE_ADDR) 7;
2794 print_fpu_flags (int flags)
2796 if (flags & (1 << 0))
2797 fputs ("IVO ", stdout);
2798 if (flags & (1 << 1))
2799 fputs ("DVZ ", stdout);
2800 if (flags & (1 << 2))
2801 fputs ("OFL ", stdout);
2802 if (flags & (1 << 3))
2803 fputs ("UFL ", stdout);
2804 if (flags & (1 << 4))
2805 fputs ("INX ", stdout);
2809 /* Print interesting information about the floating point processor
2810 (if present) or emulator. */
2812 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
2813 struct frame_info *frame, const char *args)
2815 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
2818 type = (status >> 24) & 127;
2819 if (status & (1 << 31))
2820 printf (_("Hardware FPU type %d\n"), type);
2822 printf (_("Software FPU type %d\n"), type);
2823 /* i18n: [floating point unit] mask */
2824 fputs (_("mask: "), stdout);
2825 print_fpu_flags (status >> 16);
2826 /* i18n: [floating point unit] flags */
2827 fputs (_("flags: "), stdout);
2828 print_fpu_flags (status);
2831 /* Construct the ARM extended floating point type. */
2832 static struct type *
2833 arm_ext_type (struct gdbarch *gdbarch)
2835 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
2837 if (!tdep->arm_ext_type)
2839 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
2840 floatformats_arm_ext);
2842 return tdep->arm_ext_type;
2845 static struct type *
2846 arm_neon_double_type (struct gdbarch *gdbarch)
2848 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
2850 if (tdep->neon_double_type == NULL)
2852 struct type *t, *elem;
2854 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
2856 elem = builtin_type (gdbarch)->builtin_uint8;
2857 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
2858 elem = builtin_type (gdbarch)->builtin_uint16;
2859 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
2860 elem = builtin_type (gdbarch)->builtin_uint32;
2861 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
2862 elem = builtin_type (gdbarch)->builtin_uint64;
2863 append_composite_type_field (t, "u64", elem);
2864 elem = builtin_type (gdbarch)->builtin_float;
2865 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
2866 elem = builtin_type (gdbarch)->builtin_double;
2867 append_composite_type_field (t, "f64", elem);
2869 TYPE_VECTOR (t) = 1;
2870 TYPE_NAME (t) = "neon_d";
2871 tdep->neon_double_type = t;
2874 return tdep->neon_double_type;
2877 /* FIXME: The vector types are not correctly ordered on big-endian
2878 targets. Just as s0 is the low bits of d0, d0[0] is also the low
2879 bits of d0 - regardless of what unit size is being held in d0. So
2880 the offset of the first uint8 in d0 is 7, but the offset of the
2881 first float is 4. This code works as-is for little-endian
2884 static struct type *
2885 arm_neon_quad_type (struct gdbarch *gdbarch)
2887 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
2889 if (tdep->neon_quad_type == NULL)
2891 struct type *t, *elem;
2893 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
2895 elem = builtin_type (gdbarch)->builtin_uint8;
2896 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
2897 elem = builtin_type (gdbarch)->builtin_uint16;
2898 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
2899 elem = builtin_type (gdbarch)->builtin_uint32;
2900 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
2901 elem = builtin_type (gdbarch)->builtin_uint64;
2902 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
2903 elem = builtin_type (gdbarch)->builtin_float;
2904 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
2905 elem = builtin_type (gdbarch)->builtin_double;
2906 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
2908 TYPE_VECTOR (t) = 1;
2909 TYPE_NAME (t) = "neon_q";
2910 tdep->neon_quad_type = t;
2913 return tdep->neon_quad_type;
2916 /* Return the GDB type object for the "standard" data type of data in
2919 static struct type *
2920 arm_register_type (struct gdbarch *gdbarch, int regnum)
2922 int num_regs = gdbarch_num_regs (gdbarch);
2924 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
2925 && regnum >= num_regs && regnum < num_regs + 32)
2926 return builtin_type (gdbarch)->builtin_float;
2928 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
2929 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
2930 return arm_neon_quad_type (gdbarch);
2932 /* If the target description has register information, we are only
2933 in this function so that we can override the types of
2934 double-precision registers for NEON. */
2935 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
2937 struct type *t = tdesc_register_type (gdbarch, regnum);
2939 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
2940 && TYPE_CODE (t) == TYPE_CODE_FLT
2941 && gdbarch_tdep (gdbarch)->have_neon)
2942 return arm_neon_double_type (gdbarch);
2947 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
2949 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
2950 return builtin_type (gdbarch)->builtin_void;
2952 return arm_ext_type (gdbarch);
2954 else if (regnum == ARM_SP_REGNUM)
2955 return builtin_type (gdbarch)->builtin_data_ptr;
2956 else if (regnum == ARM_PC_REGNUM)
2957 return builtin_type (gdbarch)->builtin_func_ptr;
2958 else if (regnum >= ARRAY_SIZE (arm_register_names))
2959 /* These registers are only supported on targets which supply
2960 an XML description. */
2961 return builtin_type (gdbarch)->builtin_int0;
2963 return builtin_type (gdbarch)->builtin_uint32;
2966 /* Map a DWARF register REGNUM onto the appropriate GDB register
2970 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
2972 /* Core integer regs. */
2973 if (reg >= 0 && reg <= 15)
2976 /* Legacy FPA encoding. These were once used in a way which
2977 overlapped with VFP register numbering, so their use is
2978 discouraged, but GDB doesn't support the ARM toolchain
2979 which used them for VFP. */
2980 if (reg >= 16 && reg <= 23)
2981 return ARM_F0_REGNUM + reg - 16;
2983 /* New assignments for the FPA registers. */
2984 if (reg >= 96 && reg <= 103)
2985 return ARM_F0_REGNUM + reg - 96;
2987 /* WMMX register assignments. */
2988 if (reg >= 104 && reg <= 111)
2989 return ARM_WCGR0_REGNUM + reg - 104;
2991 if (reg >= 112 && reg <= 127)
2992 return ARM_WR0_REGNUM + reg - 112;
2994 if (reg >= 192 && reg <= 199)
2995 return ARM_WC0_REGNUM + reg - 192;
2997 /* VFP v2 registers. A double precision value is actually
2998 in d1 rather than s2, but the ABI only defines numbering
2999 for the single precision registers. This will "just work"
3000 in GDB for little endian targets (we'll read eight bytes,
3001 starting in s0 and then progressing to s1), but will be
3002 reversed on big endian targets with VFP. This won't
3003 be a problem for the new Neon quad registers; you're supposed
3004 to use DW_OP_piece for those. */
3005 if (reg >= 64 && reg <= 95)
3009 sprintf (name_buf, "s%d", reg - 64);
3010 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3014 /* VFP v3 / Neon registers. This range is also used for VFP v2
3015 registers, except that it now describes d0 instead of s0. */
3016 if (reg >= 256 && reg <= 287)
3020 sprintf (name_buf, "d%d", reg - 256);
3021 return user_reg_map_name_to_regnum (gdbarch, name_buf,
3028 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
3030 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
3033 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
3035 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
3036 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
3038 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
3039 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
3041 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
3042 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
3044 if (reg < NUM_GREGS)
3045 return SIM_ARM_R0_REGNUM + reg;
3048 if (reg < NUM_FREGS)
3049 return SIM_ARM_FP0_REGNUM + reg;
3052 if (reg < NUM_SREGS)
3053 return SIM_ARM_FPS_REGNUM + reg;
3056 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
3059 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
3060 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
3061 It is thought that this is is the floating-point register format on
3062 little-endian systems. */
3065 convert_from_extended (const struct floatformat *fmt, const void *ptr,
3066 void *dbl, int endianess)
3070 if (endianess == BFD_ENDIAN_BIG)
3071 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
3073 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
3075 floatformat_from_doublest (fmt, &d, dbl);
3079 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
3084 floatformat_to_doublest (fmt, ptr, &d);
3085 if (endianess == BFD_ENDIAN_BIG)
3086 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
3088 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
3093 condition_true (unsigned long cond, unsigned long status_reg)
3095 if (cond == INST_AL || cond == INST_NV)
3101 return ((status_reg & FLAG_Z) != 0);
3103 return ((status_reg & FLAG_Z) == 0);
3105 return ((status_reg & FLAG_C) != 0);
3107 return ((status_reg & FLAG_C) == 0);
3109 return ((status_reg & FLAG_N) != 0);
3111 return ((status_reg & FLAG_N) == 0);
3113 return ((status_reg & FLAG_V) != 0);
3115 return ((status_reg & FLAG_V) == 0);
3117 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
3119 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
3121 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
3123 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
3125 return (((status_reg & FLAG_Z) == 0)
3126 && (((status_reg & FLAG_N) == 0)
3127 == ((status_reg & FLAG_V) == 0)));
3129 return (((status_reg & FLAG_Z) != 0)
3130 || (((status_reg & FLAG_N) == 0)
3131 != ((status_reg & FLAG_V) == 0)));
3136 static unsigned long
3137 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
3138 unsigned long pc_val, unsigned long status_reg)
3140 unsigned long res, shift;
3141 int rm = bits (inst, 0, 3);
3142 unsigned long shifttype = bits (inst, 5, 6);
3146 int rs = bits (inst, 8, 11);
3147 shift = (rs == 15 ? pc_val + 8
3148 : get_frame_register_unsigned (frame, rs)) & 0xFF;
3151 shift = bits (inst, 7, 11);
3154 ? (pc_val + (bit (inst, 4) ? 12 : 8))
3155 : get_frame_register_unsigned (frame, rm));
3160 res = shift >= 32 ? 0 : res << shift;
3164 res = shift >= 32 ? 0 : res >> shift;
3170 res = ((res & 0x80000000L)
3171 ? ~((~res) >> shift) : res >> shift);
3174 case 3: /* ROR/RRX */
3177 res = (res >> 1) | (carry ? 0x80000000L : 0);
3179 res = (res >> shift) | (res << (32 - shift));
3183 return res & 0xffffffff;
3186 /* Return number of 1-bits in VAL. */
3189 bitcount (unsigned long val)
3192 for (nbits = 0; val != 0; nbits++)
3193 val &= val - 1; /* delete rightmost 1-bit in val */
3197 /* Return the size in bytes of the complete Thumb instruction whose
3198 first halfword is INST1. */
3201 thumb_insn_size (unsigned short inst1)
3203 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
3210 thumb_advance_itstate (unsigned int itstate)
3212 /* Preserve IT[7:5], the first three bits of the condition. Shift
3213 the upcoming condition flags left by one bit. */
3214 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
3216 /* If we have finished the IT block, clear the state. */
3217 if ((itstate & 0x0f) == 0)
3223 /* Find the next PC after the current instruction executes. In some
3224 cases we can not statically determine the answer (see the IT state
3225 handling in this function); in that case, a breakpoint may be
3226 inserted in addition to the returned PC, which will be used to set
3227 another breakpoint by our caller. */
3230 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc, int insert_bkpt)
3232 struct gdbarch *gdbarch = get_frame_arch (frame);
3233 struct address_space *aspace = get_frame_address_space (frame);
3234 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3235 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3236 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
3237 unsigned short inst1;
3238 CORE_ADDR nextpc = pc + 2; /* default is next instruction */
3239 unsigned long offset;
3240 ULONGEST status, itstate;
3242 nextpc = MAKE_THUMB_ADDR (nextpc);
3243 pc_val = MAKE_THUMB_ADDR (pc_val);
3245 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
3247 /* Thumb-2 conditional execution support. There are eight bits in
3248 the CPSR which describe conditional execution state. Once
3249 reconstructed (they're in a funny order), the low five bits
3250 describe the low bit of the condition for each instruction and
3251 how many instructions remain. The high three bits describe the
3252 base condition. One of the low four bits will be set if an IT
3253 block is active. These bits read as zero on earlier
3255 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
3256 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
3258 /* If-Then handling. On GNU/Linux, where this routine is used, we
3259 use an undefined instruction as a breakpoint. Unlike BKPT, IT
3260 can disable execution of the undefined instruction. So we might
3261 miss the breakpoint if we set it on a skipped conditional
3262 instruction. Because conditional instructions can change the
3263 flags, affecting the execution of further instructions, we may
3264 need to set two breakpoints. */
3266 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
3268 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
3270 /* An IT instruction. Because this instruction does not
3271 modify the flags, we can accurately predict the next
3272 executed instruction. */
3273 itstate = inst1 & 0x00ff;
3274 pc += thumb_insn_size (inst1);
3276 while (itstate != 0 && ! condition_true (itstate >> 4, status))
3278 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
3279 pc += thumb_insn_size (inst1);
3280 itstate = thumb_advance_itstate (itstate);
3283 return MAKE_THUMB_ADDR (pc);
3285 else if (itstate != 0)
3287 /* We are in a conditional block. Check the condition. */
3288 if (! condition_true (itstate >> 4, status))
3290 /* Advance to the next executed instruction. */
3291 pc += thumb_insn_size (inst1);
3292 itstate = thumb_advance_itstate (itstate);
3294 while (itstate != 0 && ! condition_true (itstate >> 4, status))
3296 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
3297 pc += thumb_insn_size (inst1);
3298 itstate = thumb_advance_itstate (itstate);
3301 return MAKE_THUMB_ADDR (pc);
3303 else if ((itstate & 0x0f) == 0x08)
3305 /* This is the last instruction of the conditional
3306 block, and it is executed. We can handle it normally
3307 because the following instruction is not conditional,
3308 and we must handle it normally because it is
3309 permitted to branch. Fall through. */
3315 /* There are conditional instructions after this one.
3316 If this instruction modifies the flags, then we can
3317 not predict what the next executed instruction will
3318 be. Fortunately, this instruction is architecturally
3319 forbidden to branch; we know it will fall through.
3320 Start by skipping past it. */
3321 pc += thumb_insn_size (inst1);
3322 itstate = thumb_advance_itstate (itstate);
3324 /* Set a breakpoint on the following instruction. */
3325 gdb_assert ((itstate & 0x0f) != 0);
3327 insert_single_step_breakpoint (gdbarch, aspace, pc);
3328 cond_negated = (itstate >> 4) & 1;
3330 /* Skip all following instructions with the same
3331 condition. If there is a later instruction in the IT
3332 block with the opposite condition, set the other
3333 breakpoint there. If not, then set a breakpoint on
3334 the instruction after the IT block. */
3337 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
3338 pc += thumb_insn_size (inst1);
3339 itstate = thumb_advance_itstate (itstate);
3341 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
3343 return MAKE_THUMB_ADDR (pc);
3347 else if (itstate & 0x0f)
3349 /* We are in a conditional block. Check the condition. */
3350 int cond = itstate >> 4;
3352 if (! condition_true (cond, status))
3354 /* Advance to the next instruction. All the 32-bit
3355 instructions share a common prefix. */
3356 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
3357 return MAKE_THUMB_ADDR (pc + 4);
3359 return MAKE_THUMB_ADDR (pc + 2);
3362 /* Otherwise, handle the instruction normally. */
3365 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
3369 /* Fetch the saved PC from the stack. It's stored above
3370 all of the other registers. */
3371 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
3372 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
3373 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
3375 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
3377 unsigned long cond = bits (inst1, 8, 11);
3378 if (cond == 0x0f) /* 0x0f = SWI */
3380 struct gdbarch_tdep *tdep;
3381 tdep = gdbarch_tdep (gdbarch);
3383 if (tdep->syscall_next_pc != NULL)
3384 nextpc = tdep->syscall_next_pc (frame);
3387 else if (cond != 0x0f && condition_true (cond, status))
3388 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
3390 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
3392 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
3394 else if ((inst1 & 0xe000) == 0xe000) /* 32-bit instruction */
3396 unsigned short inst2;
3397 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
3399 /* Default to the next instruction. */
3401 nextpc = MAKE_THUMB_ADDR (nextpc);
3403 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
3405 /* Branches and miscellaneous control instructions. */
3407 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
3410 int j1, j2, imm1, imm2;
3412 imm1 = sbits (inst1, 0, 10);
3413 imm2 = bits (inst2, 0, 10);
3414 j1 = bit (inst2, 13);
3415 j2 = bit (inst2, 11);
3417 offset = ((imm1 << 12) + (imm2 << 1));
3418 offset ^= ((!j2) << 22) | ((!j1) << 23);
3420 nextpc = pc_val + offset;
3421 /* For BLX make sure to clear the low bits. */
3422 if (bit (inst2, 12) == 0)
3423 nextpc = nextpc & 0xfffffffc;
3425 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
3427 /* SUBS PC, LR, #imm8. */
3428 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
3429 nextpc -= inst2 & 0x00ff;
3431 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
3433 /* Conditional branch. */
3434 if (condition_true (bits (inst1, 6, 9), status))
3436 int sign, j1, j2, imm1, imm2;
3438 sign = sbits (inst1, 10, 10);
3439 imm1 = bits (inst1, 0, 5);
3440 imm2 = bits (inst2, 0, 10);
3441 j1 = bit (inst2, 13);
3442 j2 = bit (inst2, 11);
3444 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
3445 offset += (imm1 << 12) + (imm2 << 1);
3447 nextpc = pc_val + offset;
3451 else if ((inst1 & 0xfe50) == 0xe810)
3453 /* Load multiple or RFE. */
3454 int rn, offset, load_pc = 1;
3456 rn = bits (inst1, 0, 3);
3457 if (bit (inst1, 7) && !bit (inst1, 8))
3460 if (!bit (inst2, 15))
3462 offset = bitcount (inst2) * 4 - 4;
3464 else if (!bit (inst1, 7) && bit (inst1, 8))
3467 if (!bit (inst2, 15))
3471 else if (bit (inst1, 7) && bit (inst1, 8))
3476 else if (!bit (inst1, 7) && !bit (inst1, 8))
3486 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
3487 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
3490 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
3492 /* MOV PC or MOVS PC. */
3493 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
3494 nextpc = MAKE_THUMB_ADDR (nextpc);
3496 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
3500 int rn, load_pc = 1;
3502 rn = bits (inst1, 0, 3);
3503 base = get_frame_register_unsigned (frame, rn);
3506 base = (base + 4) & ~(CORE_ADDR) 0x3;
3508 base += bits (inst2, 0, 11);
3510 base -= bits (inst2, 0, 11);
3512 else if (bit (inst1, 7))
3513 base += bits (inst2, 0, 11);
3514 else if (bit (inst2, 11))
3516 if (bit (inst2, 10))
3519 base += bits (inst2, 0, 7);
3521 base -= bits (inst2, 0, 7);
3524 else if ((inst2 & 0x0fc0) == 0x0000)
3526 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
3527 base += get_frame_register_unsigned (frame, rm) << shift;
3534 nextpc = get_frame_memory_unsigned (frame, base, 4);
3536 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
3539 CORE_ADDR tbl_reg, table, offset, length;
3541 tbl_reg = bits (inst1, 0, 3);
3542 if (tbl_reg == 0x0f)
3543 table = pc + 4; /* Regcache copy of PC isn't right yet. */
3545 table = get_frame_register_unsigned (frame, tbl_reg);
3547 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
3548 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
3549 nextpc = pc_val + length;
3551 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
3554 CORE_ADDR tbl_reg, table, offset, length;
3556 tbl_reg = bits (inst1, 0, 3);
3557 if (tbl_reg == 0x0f)
3558 table = pc + 4; /* Regcache copy of PC isn't right yet. */
3560 table = get_frame_register_unsigned (frame, tbl_reg);
3562 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
3563 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
3564 nextpc = pc_val + length;
3567 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
3569 if (bits (inst1, 3, 6) == 0x0f)
3572 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
3574 else if ((inst1 & 0xf500) == 0xb100)
3577 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
3578 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
3580 if (bit (inst1, 11) && reg != 0)
3581 nextpc = pc_val + imm;
3582 else if (!bit (inst1, 11) && reg == 0)
3583 nextpc = pc_val + imm;
3588 /* Get the raw next address. PC is the current program counter, in
3589 FRAME. INSERT_BKPT should be TRUE if we want a breakpoint set on
3590 the alternative next instruction if there are two options.
3592 The value returned has the execution state of the next instruction
3593 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
3594 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
3598 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc, int insert_bkpt)
3600 struct gdbarch *gdbarch = get_frame_arch (frame);
3601 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3602 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3603 unsigned long pc_val;
3604 unsigned long this_instr;
3605 unsigned long status;
3608 if (arm_frame_is_thumb (frame))
3609 return thumb_get_next_pc_raw (frame, pc, insert_bkpt);
3611 pc_val = (unsigned long) pc;
3612 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3614 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
3615 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
3617 if (bits (this_instr, 28, 31) == INST_NV)
3618 switch (bits (this_instr, 24, 27))
3623 /* Branch with Link and change to Thumb. */
3624 nextpc = BranchDest (pc, this_instr);
3625 nextpc |= bit (this_instr, 24) << 1;
3626 nextpc = MAKE_THUMB_ADDR (nextpc);
3632 /* Coprocessor register transfer. */
3633 if (bits (this_instr, 12, 15) == 15)
3634 error (_("Invalid update to pc in instruction"));
3637 else if (condition_true (bits (this_instr, 28, 31), status))
3639 switch (bits (this_instr, 24, 27))
3642 case 0x1: /* data processing */
3646 unsigned long operand1, operand2, result = 0;
3650 if (bits (this_instr, 12, 15) != 15)
3653 if (bits (this_instr, 22, 25) == 0
3654 && bits (this_instr, 4, 7) == 9) /* multiply */
3655 error (_("Invalid update to pc in instruction"));
3657 /* BX <reg>, BLX <reg> */
3658 if (bits (this_instr, 4, 27) == 0x12fff1
3659 || bits (this_instr, 4, 27) == 0x12fff3)
3661 rn = bits (this_instr, 0, 3);
3662 nextpc = (rn == 15) ? pc_val + 8
3663 : get_frame_register_unsigned (frame, rn);
3667 /* Multiply into PC */
3668 c = (status & FLAG_C) ? 1 : 0;
3669 rn = bits (this_instr, 16, 19);
3670 operand1 = (rn == 15) ? pc_val + 8
3671 : get_frame_register_unsigned (frame, rn);
3673 if (bit (this_instr, 25))
3675 unsigned long immval = bits (this_instr, 0, 7);
3676 unsigned long rotate = 2 * bits (this_instr, 8, 11);
3677 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
3680 else /* operand 2 is a shifted register */
3681 operand2 = shifted_reg_val (frame, this_instr, c, pc_val, status);
3683 switch (bits (this_instr, 21, 24))
3686 result = operand1 & operand2;
3690 result = operand1 ^ operand2;
3694 result = operand1 - operand2;
3698 result = operand2 - operand1;
3702 result = operand1 + operand2;
3706 result = operand1 + operand2 + c;
3710 result = operand1 - operand2 + c;
3714 result = operand2 - operand1 + c;
3720 case 0xb: /* tst, teq, cmp, cmn */
3721 result = (unsigned long) nextpc;
3725 result = operand1 | operand2;
3729 /* Always step into a function. */
3734 result = operand1 & ~operand2;
3742 /* In 26-bit APCS the bottom two bits of the result are
3743 ignored, and we always end up in ARM state. */
3745 nextpc = arm_addr_bits_remove (gdbarch, result);
3753 case 0x5: /* data transfer */
3756 if (bit (this_instr, 20))
3759 if (bits (this_instr, 12, 15) == 15)
3765 if (bit (this_instr, 22))
3766 error (_("Invalid update to pc in instruction"));
3768 /* byte write to PC */
3769 rn = bits (this_instr, 16, 19);
3770 base = (rn == 15) ? pc_val + 8
3771 : get_frame_register_unsigned (frame, rn);
3772 if (bit (this_instr, 24))
3775 int c = (status & FLAG_C) ? 1 : 0;
3776 unsigned long offset =
3777 (bit (this_instr, 25)
3778 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
3779 : bits (this_instr, 0, 11));
3781 if (bit (this_instr, 23))
3786 nextpc = (CORE_ADDR) read_memory_integer ((CORE_ADDR) base,
3793 case 0x9: /* block transfer */
3794 if (bit (this_instr, 20))
3797 if (bit (this_instr, 15))
3802 if (bit (this_instr, 23))
3805 unsigned long reglist = bits (this_instr, 0, 14);
3806 offset = bitcount (reglist) * 4;
3807 if (bit (this_instr, 24)) /* pre */
3810 else if (bit (this_instr, 24))
3814 unsigned long rn_val =
3815 get_frame_register_unsigned (frame,
3816 bits (this_instr, 16, 19));
3818 (CORE_ADDR) read_memory_integer ((CORE_ADDR) (rn_val
3826 case 0xb: /* branch & link */
3827 case 0xa: /* branch */
3829 nextpc = BranchDest (pc, this_instr);
3835 case 0xe: /* coproc ops */
3839 struct gdbarch_tdep *tdep;
3840 tdep = gdbarch_tdep (gdbarch);
3842 if (tdep->syscall_next_pc != NULL)
3843 nextpc = tdep->syscall_next_pc (frame);
3849 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
3858 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
3860 struct gdbarch *gdbarch = get_frame_arch (frame);
3862 gdbarch_addr_bits_remove (gdbarch,
3863 arm_get_next_pc_raw (frame, pc, TRUE));
3865 error (_("Infinite loop detected"));
3869 /* single_step() is called just before we want to resume the inferior,
3870 if we want to single-step it but there is no hardware or kernel
3871 single-step support. We find the target of the coming instruction
3872 and breakpoint it. */
3875 arm_software_single_step (struct frame_info *frame)
3877 struct gdbarch *gdbarch = get_frame_arch (frame);
3878 struct address_space *aspace = get_frame_address_space (frame);
3880 /* NOTE: This may insert the wrong breakpoint instruction when
3881 single-stepping over a mode-changing instruction, if the
3882 CPSR heuristics are used. */
3884 CORE_ADDR next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
3885 insert_single_step_breakpoint (gdbarch, aspace, next_pc);
3890 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
3891 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
3892 NULL if an error occurs. BUF is freed. */
3895 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
3896 int old_len, int new_len)
3898 gdb_byte *new_buf, *middle;
3899 int bytes_to_read = new_len - old_len;
3901 new_buf = xmalloc (new_len);
3902 memcpy (new_buf + bytes_to_read, buf, old_len);
3904 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
3912 /* An IT block is at most the 2-byte IT instruction followed by
3913 four 4-byte instructions. The furthest back we must search to
3914 find an IT block that affects the current instruction is thus
3915 2 + 3 * 4 == 14 bytes. */
3916 #define MAX_IT_BLOCK_PREFIX 14
3918 /* Use a quick scan if there are more than this many bytes of
3920 #define IT_SCAN_THRESHOLD 32
3922 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
3923 A breakpoint in an IT block may not be hit, depending on the
3926 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
3930 CORE_ADDR boundary, func_start;
3931 int buf_len, buf2_len;
3932 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
3933 int i, any, last_it, last_it_count;
3935 /* If we are using BKPT breakpoints, none of this is necessary. */
3936 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
3939 /* ARM mode does not have this problem. */
3940 if (!arm_pc_is_thumb (gdbarch, bpaddr))
3943 /* We are setting a breakpoint in Thumb code that could potentially
3944 contain an IT block. The first step is to find how much Thumb
3945 code there is; we do not need to read outside of known Thumb
3947 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
3949 /* Thumb-2 code must have mapping symbols to have a chance. */
3952 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
3954 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
3955 && func_start > boundary)
3956 boundary = func_start;
3958 /* Search for a candidate IT instruction. We have to do some fancy
3959 footwork to distinguish a real IT instruction from the second
3960 half of a 32-bit instruction, but there is no need for that if
3961 there's no candidate. */
3962 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
3964 /* No room for an IT instruction. */
3967 buf = xmalloc (buf_len);
3968 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
3971 for (i = 0; i < buf_len; i += 2)
3973 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
3974 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
3986 /* OK, the code bytes before this instruction contain at least one
3987 halfword which resembles an IT instruction. We know that it's
3988 Thumb code, but there are still two possibilities. Either the
3989 halfword really is an IT instruction, or it is the second half of
3990 a 32-bit Thumb instruction. The only way we can tell is to
3991 scan forwards from a known instruction boundary. */
3992 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
3996 /* There's a lot of code before this instruction. Start with an
3997 optimistic search; it's easy to recognize halfwords that can
3998 not be the start of a 32-bit instruction, and use that to
3999 lock on to the instruction boundaries. */
4000 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4003 buf_len = IT_SCAN_THRESHOLD;
4006 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4008 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4009 if (thumb_insn_size (inst1) == 2)
4016 /* At this point, if DEFINITE, BUF[I] is the first place we
4017 are sure that we know the instruction boundaries, and it is far
4018 enough from BPADDR that we could not miss an IT instruction
4019 affecting BPADDR. If ! DEFINITE, give up - start from a
4023 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4026 buf_len = bpaddr - boundary;
4032 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4035 buf_len = bpaddr - boundary;
4039 /* Scan forwards. Find the last IT instruction before BPADDR. */
4044 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4046 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4051 else if (inst1 & 0x0002)
4053 else if (inst1 & 0x0004)
4058 i += thumb_insn_size (inst1);
4064 /* There wasn't really an IT instruction after all. */
4067 if (last_it_count < 1)
4068 /* It was too far away. */
4071 /* This really is a trouble spot. Move the breakpoint to the IT
4073 return bpaddr - buf_len + last_it;
4076 /* ARM displaced stepping support.
4078 Generally ARM displaced stepping works as follows:
4080 1. When an instruction is to be single-stepped, it is first decoded by
4081 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
4082 Depending on the type of instruction, it is then copied to a scratch
4083 location, possibly in a modified form. The copy_* set of functions
4084 performs such modification, as necessary. A breakpoint is placed after
4085 the modified instruction in the scratch space to return control to GDB.
4086 Note in particular that instructions which modify the PC will no longer
4087 do so after modification.
4089 2. The instruction is single-stepped, by setting the PC to the scratch
4090 location address, and resuming. Control returns to GDB when the
4093 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4094 function used for the current instruction. This function's job is to
4095 put the CPU/memory state back to what it would have been if the
4096 instruction had been executed unmodified in its original location. */
4098 /* NOP instruction (mov r0, r0). */
4099 #define ARM_NOP 0xe1a00000
4101 /* Helper for register reads for displaced stepping. In particular, this
4102 returns the PC as it would be seen by the instruction at its original
4106 displaced_read_reg (struct regcache *regs, CORE_ADDR from, int regno)
4112 if (debug_displaced)
4113 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4114 (unsigned long) from + 8);
4115 return (ULONGEST) from + 8; /* Pipeline offset. */
4119 regcache_cooked_read_unsigned (regs, regno, &ret);
4120 if (debug_displaced)
4121 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4122 regno, (unsigned long) ret);
4128 displaced_in_arm_mode (struct regcache *regs)
4131 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4133 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4135 return (ps & t_bit) == 0;
4138 /* Write to the PC as from a branch instruction. */
4141 branch_write_pc (struct regcache *regs, ULONGEST val)
4143 if (displaced_in_arm_mode (regs))
4144 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4145 architecture versions < 6. */
4146 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & ~(ULONGEST) 0x3);
4148 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & ~(ULONGEST) 0x1);
4151 /* Write to the PC as from a branch-exchange instruction. */
4154 bx_write_pc (struct regcache *regs, ULONGEST val)
4157 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4159 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4163 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4164 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4166 else if ((val & 2) == 0)
4168 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4169 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4173 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4174 mode, align dest to 4 bytes). */
4175 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4176 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4177 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4181 /* Write to the PC as if from a load instruction. */
4184 load_write_pc (struct regcache *regs, ULONGEST val)
4186 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4187 bx_write_pc (regs, val);
4189 branch_write_pc (regs, val);
4192 /* Write to the PC as if from an ALU instruction. */
4195 alu_write_pc (struct regcache *regs, ULONGEST val)
4197 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && displaced_in_arm_mode (regs))
4198 bx_write_pc (regs, val);
4200 branch_write_pc (regs, val);
4203 /* Helper for writing to registers for displaced stepping. Writing to the PC
4204 has a varying effects depending on the instruction which does the write:
4205 this is controlled by the WRITE_PC argument. */
4208 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4209 int regno, ULONGEST val, enum pc_write_style write_pc)
4213 if (debug_displaced)
4214 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4215 (unsigned long) val);
4218 case BRANCH_WRITE_PC:
4219 branch_write_pc (regs, val);
4223 bx_write_pc (regs, val);
4227 load_write_pc (regs, val);
4231 alu_write_pc (regs, val);
4234 case CANNOT_WRITE_PC:
4235 warning (_("Instruction wrote to PC in an unexpected way when "
4236 "single-stepping"));
4240 internal_error (__FILE__, __LINE__,
4241 _("Invalid argument to displaced_write_reg"));
4244 dsc->wrote_to_pc = 1;
4248 if (debug_displaced)
4249 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4250 regno, (unsigned long) val);
4251 regcache_cooked_write_unsigned (regs, regno, val);
4255 /* This function is used to concisely determine if an instruction INSN
4256 references PC. Register fields of interest in INSN should have the
4257 corresponding fields of BITMASK set to 0b1111. The function returns return 1
4258 if any of these fields in INSN reference the PC (also 0b1111, r15), else it
4262 insn_references_pc (uint32_t insn, uint32_t bitmask)
4264 uint32_t lowbit = 1;
4266 while (bitmask != 0)
4270 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4276 mask = lowbit * 0xf;
4278 if ((insn & mask) == mask)
4287 /* The simplest copy function. Many instructions have the same effect no
4288 matter what address they are executed at: in those cases, use this. */
4291 copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4292 const char *iname, struct displaced_step_closure *dsc)
4294 if (debug_displaced)
4295 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4296 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4299 dsc->modinsn[0] = insn;
4304 /* Preload instructions with immediate offset. */
4307 cleanup_preload (struct gdbarch *gdbarch,
4308 struct regcache *regs, struct displaced_step_closure *dsc)
4310 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4311 if (!dsc->u.preload.immed)
4312 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4316 copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4317 struct displaced_step_closure *dsc)
4319 unsigned int rn = bits (insn, 16, 19);
4321 CORE_ADDR from = dsc->insn_addr;
4323 if (!insn_references_pc (insn, 0x000f0000ul))
4324 return copy_unmodified (gdbarch, insn, "preload", dsc);
4326 if (debug_displaced)
4327 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4328 (unsigned long) insn);
4330 /* Preload instructions:
4332 {pli/pld} [rn, #+/-imm]
4334 {pli/pld} [r0, #+/-imm]. */
4336 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4337 rn_val = displaced_read_reg (regs, from, rn);
4338 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4340 dsc->u.preload.immed = 1;
4342 dsc->modinsn[0] = insn & 0xfff0ffff;
4344 dsc->cleanup = &cleanup_preload;
4349 /* Preload instructions with register offset. */
4352 copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4353 struct displaced_step_closure *dsc)
4355 unsigned int rn = bits (insn, 16, 19);
4356 unsigned int rm = bits (insn, 0, 3);
4357 ULONGEST rn_val, rm_val;
4358 CORE_ADDR from = dsc->insn_addr;
4360 if (!insn_references_pc (insn, 0x000f000ful))
4361 return copy_unmodified (gdbarch, insn, "preload reg", dsc);
4363 if (debug_displaced)
4364 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4365 (unsigned long) insn);
4367 /* Preload register-offset instructions:
4369 {pli/pld} [rn, rm {, shift}]
4371 {pli/pld} [r0, r1 {, shift}]. */
4373 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4374 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
4375 rn_val = displaced_read_reg (regs, from, rn);
4376 rm_val = displaced_read_reg (regs, from, rm);
4377 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4378 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4380 dsc->u.preload.immed = 0;
4382 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4384 dsc->cleanup = &cleanup_preload;
4389 /* Copy/cleanup coprocessor load and store instructions. */
4392 cleanup_copro_load_store (struct gdbarch *gdbarch,
4393 struct regcache *regs,
4394 struct displaced_step_closure *dsc)
4396 ULONGEST rn_val = displaced_read_reg (regs, dsc->insn_addr, 0);
4398 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4400 if (dsc->u.ldst.writeback)
4401 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4405 copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4406 struct regcache *regs,
4407 struct displaced_step_closure *dsc)
4409 unsigned int rn = bits (insn, 16, 19);
4411 CORE_ADDR from = dsc->insn_addr;
4413 if (!insn_references_pc (insn, 0x000f0000ul))
4414 return copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4416 if (debug_displaced)
4417 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4418 "load/store insn %.8lx\n", (unsigned long) insn);
4420 /* Coprocessor load/store instructions:
4422 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4424 {stc/stc2} [r0, #+/-imm].
4426 ldc/ldc2 are handled identically. */
4428 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4429 rn_val = displaced_read_reg (regs, from, rn);
4430 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4432 dsc->u.ldst.writeback = bit (insn, 25);
4433 dsc->u.ldst.rn = rn;
4435 dsc->modinsn[0] = insn & 0xfff0ffff;
4437 dsc->cleanup = &cleanup_copro_load_store;
4442 /* Clean up branch instructions (actually perform the branch, by setting
4446 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4447 struct displaced_step_closure *dsc)
4449 ULONGEST from = dsc->insn_addr;
4450 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
4451 int branch_taken = condition_true (dsc->u.branch.cond, status);
4452 enum pc_write_style write_pc = dsc->u.branch.exchange
4453 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4458 if (dsc->u.branch.link)
4460 ULONGEST pc = displaced_read_reg (regs, from, 15);
4461 displaced_write_reg (regs, dsc, 14, pc - 4, CANNOT_WRITE_PC);
4464 displaced_write_reg (regs, dsc, 15, dsc->u.branch.dest, write_pc);
4467 /* Copy B/BL/BLX instructions with immediate destinations. */
4470 copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4471 struct regcache *regs, struct displaced_step_closure *dsc)
4473 unsigned int cond = bits (insn, 28, 31);
4474 int exchange = (cond == 0xf);
4475 int link = exchange || bit (insn, 24);
4476 CORE_ADDR from = dsc->insn_addr;
4479 if (debug_displaced)
4480 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4481 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4482 (unsigned long) insn);
4484 /* Implement "BL<cond> <label>" as:
4486 Preparation: cond <- instruction condition
4487 Insn: mov r0, r0 (nop)
4488 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4490 B<cond> similar, but don't set r14 in cleanup. */
4493 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4494 then arrange the switch into Thumb mode. */
4495 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4497 offset = bits (insn, 0, 23) << 2;
4499 if (bit (offset, 25))
4500 offset = offset | ~0x3ffffff;
4502 dsc->u.branch.cond = cond;
4503 dsc->u.branch.link = link;
4504 dsc->u.branch.exchange = exchange;
4505 dsc->u.branch.dest = from + 8 + offset;
4507 dsc->modinsn[0] = ARM_NOP;
4509 dsc->cleanup = &cleanup_branch;
4514 /* Copy BX/BLX with register-specified destinations. */
4517 copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
4518 struct regcache *regs, struct displaced_step_closure *dsc)
4520 unsigned int cond = bits (insn, 28, 31);
4523 int link = bit (insn, 5);
4524 unsigned int rm = bits (insn, 0, 3);
4525 CORE_ADDR from = dsc->insn_addr;
4527 if (debug_displaced)
4528 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s register insn "
4529 "%.8lx\n", (link) ? "blx" : "bx", (unsigned long) insn);
4531 /* Implement {BX,BLX}<cond> <reg>" as:
4533 Preparation: cond <- instruction condition
4534 Insn: mov r0, r0 (nop)
4535 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
4537 Don't set r14 in cleanup for BX. */
4539 dsc->u.branch.dest = displaced_read_reg (regs, from, rm);
4541 dsc->u.branch.cond = cond;
4542 dsc->u.branch.link = link;
4543 dsc->u.branch.exchange = 1;
4545 dsc->modinsn[0] = ARM_NOP;
4547 dsc->cleanup = &cleanup_branch;
4552 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
4555 cleanup_alu_imm (struct gdbarch *gdbarch,
4556 struct regcache *regs, struct displaced_step_closure *dsc)
4558 ULONGEST rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
4559 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4560 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4561 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
4565 copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4566 struct displaced_step_closure *dsc)
4568 unsigned int rn = bits (insn, 16, 19);
4569 unsigned int rd = bits (insn, 12, 15);
4570 unsigned int op = bits (insn, 21, 24);
4571 int is_mov = (op == 0xd);
4572 ULONGEST rd_val, rn_val;
4573 CORE_ADDR from = dsc->insn_addr;
4575 if (!insn_references_pc (insn, 0x000ff000ul))
4576 return copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
4578 if (debug_displaced)
4579 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
4580 "%.8lx\n", is_mov ? "move" : "ALU",
4581 (unsigned long) insn);
4583 /* Instruction is of form:
4585 <op><cond> rd, [rn,] #imm
4589 Preparation: tmp1, tmp2 <- r0, r1;
4591 Insn: <op><cond> r0, r1, #imm
4592 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
4595 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4596 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
4597 rn_val = displaced_read_reg (regs, from, rn);
4598 rd_val = displaced_read_reg (regs, from, rd);
4599 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
4600 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
4604 dsc->modinsn[0] = insn & 0xfff00fff;
4606 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
4608 dsc->cleanup = &cleanup_alu_imm;
4613 /* Copy/cleanup arithmetic/logic insns with register RHS. */
4616 cleanup_alu_reg (struct gdbarch *gdbarch,
4617 struct regcache *regs, struct displaced_step_closure *dsc)
4622 rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
4624 for (i = 0; i < 3; i++)
4625 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
4627 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
4631 copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4632 struct displaced_step_closure *dsc)
4634 unsigned int rn = bits (insn, 16, 19);
4635 unsigned int rm = bits (insn, 0, 3);
4636 unsigned int rd = bits (insn, 12, 15);
4637 unsigned int op = bits (insn, 21, 24);
4638 int is_mov = (op == 0xd);
4639 ULONGEST rd_val, rn_val, rm_val;
4640 CORE_ADDR from = dsc->insn_addr;
4642 if (!insn_references_pc (insn, 0x000ff00ful))
4643 return copy_unmodified (gdbarch, insn, "ALU reg", dsc);
4645 if (debug_displaced)
4646 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
4647 is_mov ? "move" : "ALU", (unsigned long) insn);
4649 /* Instruction is of form:
4651 <op><cond> rd, [rn,] rm [, <shift>]
4655 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
4656 r0, r1, r2 <- rd, rn, rm
4657 Insn: <op><cond> r0, r1, r2 [, <shift>]
4658 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
4661 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4662 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
4663 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
4664 rd_val = displaced_read_reg (regs, from, rd);
4665 rn_val = displaced_read_reg (regs, from, rn);
4666 rm_val = displaced_read_reg (regs, from, rm);
4667 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
4668 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
4669 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
4673 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
4675 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
4677 dsc->cleanup = &cleanup_alu_reg;
4682 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
4685 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
4686 struct regcache *regs,
4687 struct displaced_step_closure *dsc)
4689 ULONGEST rd_val = displaced_read_reg (regs, dsc->insn_addr, 0);
4692 for (i = 0; i < 4; i++)
4693 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
4695 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
4699 copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
4700 struct regcache *regs, struct displaced_step_closure *dsc)
4702 unsigned int rn = bits (insn, 16, 19);
4703 unsigned int rm = bits (insn, 0, 3);
4704 unsigned int rd = bits (insn, 12, 15);
4705 unsigned int rs = bits (insn, 8, 11);
4706 unsigned int op = bits (insn, 21, 24);
4707 int is_mov = (op == 0xd), i;
4708 ULONGEST rd_val, rn_val, rm_val, rs_val;
4709 CORE_ADDR from = dsc->insn_addr;
4711 if (!insn_references_pc (insn, 0x000fff0ful))
4712 return copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
4714 if (debug_displaced)
4715 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
4716 "%.8lx\n", is_mov ? "move" : "ALU",
4717 (unsigned long) insn);
4719 /* Instruction is of form:
4721 <op><cond> rd, [rn,] rm, <shift> rs
4725 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
4726 r0, r1, r2, r3 <- rd, rn, rm, rs
4727 Insn: <op><cond> r0, r1, r2, <shift> r3
4729 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
4733 for (i = 0; i < 4; i++)
4734 dsc->tmp[i] = displaced_read_reg (regs, from, i);
4736 rd_val = displaced_read_reg (regs, from, rd);
4737 rn_val = displaced_read_reg (regs, from, rn);
4738 rm_val = displaced_read_reg (regs, from, rm);
4739 rs_val = displaced_read_reg (regs, from, rs);
4740 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
4741 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
4742 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
4743 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
4747 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
4749 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
4751 dsc->cleanup = &cleanup_alu_shifted_reg;
4756 /* Clean up load instructions. */
4759 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
4760 struct displaced_step_closure *dsc)
4762 ULONGEST rt_val, rt_val2 = 0, rn_val;
4763 CORE_ADDR from = dsc->insn_addr;
4765 rt_val = displaced_read_reg (regs, from, 0);
4766 if (dsc->u.ldst.xfersize == 8)
4767 rt_val2 = displaced_read_reg (regs, from, 1);
4768 rn_val = displaced_read_reg (regs, from, 2);
4770 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4771 if (dsc->u.ldst.xfersize > 4)
4772 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4773 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
4774 if (!dsc->u.ldst.immed)
4775 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
4777 /* Handle register writeback. */
4778 if (dsc->u.ldst.writeback)
4779 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
4780 /* Put result in right place. */
4781 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
4782 if (dsc->u.ldst.xfersize == 8)
4783 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
4786 /* Clean up store instructions. */
4789 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
4790 struct displaced_step_closure *dsc)
4792 CORE_ADDR from = dsc->insn_addr;
4793 ULONGEST rn_val = displaced_read_reg (regs, from, 2);
4795 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4796 if (dsc->u.ldst.xfersize > 4)
4797 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4798 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
4799 if (!dsc->u.ldst.immed)
4800 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
4801 if (!dsc->u.ldst.restore_r4)
4802 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
4805 if (dsc->u.ldst.writeback)
4806 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
4809 /* Copy "extra" load/store instructions. These are halfword/doubleword
4810 transfers, which have a different encoding to byte/word transfers. */
4813 copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
4814 struct regcache *regs, struct displaced_step_closure *dsc)
4816 unsigned int op1 = bits (insn, 20, 24);
4817 unsigned int op2 = bits (insn, 5, 6);
4818 unsigned int rt = bits (insn, 12, 15);
4819 unsigned int rn = bits (insn, 16, 19);
4820 unsigned int rm = bits (insn, 0, 3);
4821 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
4822 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
4823 int immed = (op1 & 0x4) != 0;
4825 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
4826 CORE_ADDR from = dsc->insn_addr;
4828 if (!insn_references_pc (insn, 0x000ff00ful))
4829 return copy_unmodified (gdbarch, insn, "extra load/store", dsc);
4831 if (debug_displaced)
4832 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
4833 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
4834 (unsigned long) insn);
4836 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
4839 internal_error (__FILE__, __LINE__,
4840 _("copy_extra_ld_st: instruction decode error"));
4842 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4843 dsc->tmp[1] = displaced_read_reg (regs, from, 1);
4844 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
4846 dsc->tmp[3] = displaced_read_reg (regs, from, 3);
4848 rt_val = displaced_read_reg (regs, from, rt);
4849 if (bytesize[opcode] == 8)
4850 rt_val2 = displaced_read_reg (regs, from, rt + 1);
4851 rn_val = displaced_read_reg (regs, from, rn);
4853 rm_val = displaced_read_reg (regs, from, rm);
4855 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
4856 if (bytesize[opcode] == 8)
4857 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
4858 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
4860 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
4863 dsc->u.ldst.xfersize = bytesize[opcode];
4864 dsc->u.ldst.rn = rn;
4865 dsc->u.ldst.immed = immed;
4866 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
4867 dsc->u.ldst.restore_r4 = 0;
4870 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
4872 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
4873 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
4875 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
4877 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
4878 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
4880 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
4885 /* Copy byte/word loads and stores. */
4888 copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
4889 struct regcache *regs,
4890 struct displaced_step_closure *dsc, int load, int byte,
4893 int immed = !bit (insn, 25);
4894 unsigned int rt = bits (insn, 12, 15);
4895 unsigned int rn = bits (insn, 16, 19);
4896 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
4897 ULONGEST rt_val, rn_val, rm_val = 0;
4898 CORE_ADDR from = dsc->insn_addr;
4900 if (!insn_references_pc (insn, 0x000ff00ful))
4901 return copy_unmodified (gdbarch, insn, "load/store", dsc);
4903 if (debug_displaced)
4904 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s%s insn %.8lx\n",
4905 load ? (byte ? "ldrb" : "ldr")
4906 : (byte ? "strb" : "str"), usermode ? "t" : "",
4907 (unsigned long) insn);
4909 dsc->tmp[0] = displaced_read_reg (regs, from, 0);
4910 dsc->tmp[2] = displaced_read_reg (regs, from, 2);
4912 dsc->tmp[3] = displaced_read_reg (regs, from, 3);
4914 dsc->tmp[4] = displaced_read_reg (regs, from, 4);
4916 rt_val = displaced_read_reg (regs, from, rt);
4917 rn_val = displaced_read_reg (regs, from, rn);
4919 rm_val = displaced_read_reg (regs, from, rm);
4921 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
4922 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
4924 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
4927 dsc->u.ldst.xfersize = byte ? 1 : 4;
4928 dsc->u.ldst.rn = rn;
4929 dsc->u.ldst.immed = immed;
4930 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
4932 /* To write PC we can do:
4934 scratch+0: str pc, temp (*temp = scratch + 8 + offset)
4935 scratch+4: ldr r4, temp
4936 scratch+8: sub r4, r4, pc (r4 = scratch + 8 + offset - scratch - 8 - 8)
4937 scratch+12: add r4, r4, #8 (r4 = offset)
4938 scratch+16: add r0, r0, r4
4939 scratch+20: str r0, [r2, #imm] (or str r0, [r2, r3])
4942 Otherwise we don't know what value to write for PC, since the offset is
4943 architecture-dependent (sometimes PC+8, sometimes PC+12). */
4945 if (load || rt != 15)
4947 dsc->u.ldst.restore_r4 = 0;
4950 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
4952 {ldr,str}[b]<cond> r0, [r2, #imm]. */
4953 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
4955 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
4957 {ldr,str}[b]<cond> r0, [r2, r3]. */
4958 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
4962 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
4963 dsc->u.ldst.restore_r4 = 1;
4965 dsc->modinsn[0] = 0xe58ff014; /* str pc, [pc, #20]. */
4966 dsc->modinsn[1] = 0xe59f4010; /* ldr r4, [pc, #16]. */
4967 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
4968 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
4969 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
4973 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
4975 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
4977 dsc->modinsn[6] = 0x0; /* breakpoint location. */
4978 dsc->modinsn[7] = 0x0; /* scratch space. */
4983 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
4988 /* Cleanup LDM instructions with fully-populated register list. This is an
4989 unfortunate corner case: it's impossible to implement correctly by modifying
4990 the instruction. The issue is as follows: we have an instruction,
4994 which we must rewrite to avoid loading PC. A possible solution would be to
4995 do the load in two halves, something like (with suitable cleanup
4999 ldm[id][ab] r8!, {r0-r7}
5001 ldm[id][ab] r8, {r7-r14}
5004 but at present there's no suitable place for <temp>, since the scratch space
5005 is overwritten before the cleanup routine is called. For now, we simply
5006 emulate the instruction. */
5009 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5010 struct displaced_step_closure *dsc)
5012 ULONGEST from = dsc->insn_addr;
5013 int inc = dsc->u.block.increment;
5014 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5015 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5016 uint32_t regmask = dsc->u.block.regmask;
5017 int regno = inc ? 0 : 15;
5018 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5019 int exception_return = dsc->u.block.load && dsc->u.block.user
5020 && (regmask & 0x8000) != 0;
5021 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
5022 int do_transfer = condition_true (dsc->u.block.cond, status);
5023 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5028 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5029 sensible we can do here. Complain loudly. */
5030 if (exception_return)
5031 error (_("Cannot single-step exception return"));
5033 /* We don't handle any stores here for now. */
5034 gdb_assert (dsc->u.block.load != 0);
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5038 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5039 dsc->u.block.increment ? "inc" : "dec",
5040 dsc->u.block.before ? "before" : "after");
5047 while (regno <= 15 && (regmask & (1 << regno)) == 0)
5050 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5053 xfer_addr += bump_before;
5055 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5056 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5058 xfer_addr += bump_after;
5060 regmask &= ~(1 << regno);
5063 if (dsc->u.block.writeback)
5064 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5068 /* Clean up an STM which included the PC in the register list. */
5071 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5072 struct displaced_step_closure *dsc)
5074 ULONGEST from = dsc->insn_addr;
5075 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
5076 int store_executed = condition_true (dsc->u.block.cond, status);
5077 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5078 CORE_ADDR stm_insn_addr;
5081 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5083 /* If condition code fails, there's nothing else to do. */
5084 if (!store_executed)
5087 if (dsc->u.block.increment)
5089 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5091 if (dsc->u.block.before)
5096 pc_stored_at = dsc->u.block.xfer_addr;
5098 if (dsc->u.block.before)
5102 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5103 stm_insn_addr = dsc->scratch_base;
5104 offset = pc_val - stm_insn_addr;
5106 if (debug_displaced)
5107 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5108 "STM instruction\n", offset);
5110 /* Rewrite the stored PC to the proper value for the non-displaced original
5112 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5113 dsc->insn_addr + offset);
5116 /* Clean up an LDM which includes the PC in the register list. We clumped all
5117 the registers in the transferred list into a contiguous range r0...rX (to
5118 avoid loading PC directly and losing control of the debugged program), so we
5119 must undo that here. */
5122 cleanup_block_load_pc (struct gdbarch *gdbarch,
5123 struct regcache *regs,
5124 struct displaced_step_closure *dsc)
5126 ULONGEST from = dsc->insn_addr;
5127 uint32_t status = displaced_read_reg (regs, from, ARM_PS_REGNUM);
5128 int load_executed = condition_true (dsc->u.block.cond, status), i;
5129 unsigned int mask = dsc->u.block.regmask, write_reg = 15;
5130 unsigned int regs_loaded = bitcount (mask);
5131 unsigned int num_to_shuffle = regs_loaded, clobbered;
5133 /* The method employed here will fail if the register list is fully populated
5134 (we need to avoid loading PC directly). */
5135 gdb_assert (num_to_shuffle < 16);
5140 clobbered = (1 << num_to_shuffle) - 1;
5142 while (num_to_shuffle > 0)
5144 if ((mask & (1 << write_reg)) != 0)
5146 unsigned int read_reg = num_to_shuffle - 1;
5148 if (read_reg != write_reg)
5150 ULONGEST rval = displaced_read_reg (regs, from, read_reg);
5151 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5152 if (debug_displaced)
5153 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5154 "loaded register r%d to r%d\n"), read_reg,
5157 else if (debug_displaced)
5158 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5159 "r%d already in the right place\n"),
5162 clobbered &= ~(1 << write_reg);
5170 /* Restore any registers we scribbled over. */
5171 for (write_reg = 0; clobbered != 0; write_reg++)
5173 if ((clobbered & (1 << write_reg)) != 0)
5175 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5177 if (debug_displaced)
5178 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5179 "clobbered register r%d\n"), write_reg);
5180 clobbered &= ~(1 << write_reg);
5184 /* Perform register writeback manually. */
5185 if (dsc->u.block.writeback)
5187 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5189 if (dsc->u.block.increment)
5190 new_rn_val += regs_loaded * 4;
5192 new_rn_val -= regs_loaded * 4;
5194 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5199 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5200 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5203 copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5204 struct displaced_step_closure *dsc)
5206 int load = bit (insn, 20);
5207 int user = bit (insn, 22);
5208 int increment = bit (insn, 23);
5209 int before = bit (insn, 24);
5210 int writeback = bit (insn, 21);
5211 int rn = bits (insn, 16, 19);
5212 CORE_ADDR from = dsc->insn_addr;
5214 /* Block transfers which don't mention PC can be run directly out-of-line. */
5215 if (rn != 15 && (insn & 0x8000) == 0)
5216 return copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5220 warning (_("displaced: Unpredictable LDM or STM with base register r15"));
5221 return copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5224 if (debug_displaced)
5225 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5226 "%.8lx\n", (unsigned long) insn);
5228 dsc->u.block.xfer_addr = displaced_read_reg (regs, from, rn);
5229 dsc->u.block.rn = rn;
5231 dsc->u.block.load = load;
5232 dsc->u.block.user = user;
5233 dsc->u.block.increment = increment;
5234 dsc->u.block.before = before;
5235 dsc->u.block.writeback = writeback;
5236 dsc->u.block.cond = bits (insn, 28, 31);
5238 dsc->u.block.regmask = insn & 0xffff;
5242 if ((insn & 0xffff) == 0xffff)
5244 /* LDM with a fully-populated register list. This case is
5245 particularly tricky. Implement for now by fully emulating the
5246 instruction (which might not behave perfectly in all cases, but
5247 these instructions should be rare enough for that not to matter
5249 dsc->modinsn[0] = ARM_NOP;
5251 dsc->cleanup = &cleanup_block_load_all;
5255 /* LDM of a list of registers which includes PC. Implement by
5256 rewriting the list of registers to be transferred into a
5257 contiguous chunk r0...rX before doing the transfer, then shuffling
5258 registers into the correct places in the cleanup routine. */
5259 unsigned int regmask = insn & 0xffff;
5260 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
5261 unsigned int to = 0, from = 0, i, new_rn;
5263 for (i = 0; i < num_in_list; i++)
5264 dsc->tmp[i] = displaced_read_reg (regs, from, i);
5266 /* Writeback makes things complicated. We need to avoid clobbering
5267 the base register with one of the registers in our modified
5268 register list, but just using a different register can't work in
5271 ldm r14!, {r0-r13,pc}
5273 which would need to be rewritten as:
5277 but that can't work, because there's no free register for N.
5279 Solve this by turning off the writeback bit, and emulating
5280 writeback manually in the cleanup routine. */
5285 new_regmask = (1 << num_in_list) - 1;
5287 if (debug_displaced)
5288 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
5289 "{..., pc}: original reg list %.4x, modified "
5290 "list %.4x\n"), rn, writeback ? "!" : "",
5291 (int) insn & 0xffff, new_regmask);
5293 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
5295 dsc->cleanup = &cleanup_block_load_pc;
5300 /* STM of a list of registers which includes PC. Run the instruction
5301 as-is, but out of line: this will store the wrong value for the PC,
5302 so we must manually fix up the memory in the cleanup routine.
5303 Doing things this way has the advantage that we can auto-detect
5304 the offset of the PC write (which is architecture-dependent) in
5305 the cleanup routine. */
5306 dsc->modinsn[0] = insn;
5308 dsc->cleanup = &cleanup_block_store_pc;
5314 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
5315 for Linux, where some SVC instructions must be treated specially. */
5318 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
5319 struct displaced_step_closure *dsc)
5321 CORE_ADDR from = dsc->insn_addr;
5322 CORE_ADDR resume_addr = from + 4;
5324 if (debug_displaced)
5325 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
5326 "%.8lx\n", (unsigned long) resume_addr);
5328 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
5332 copy_svc (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
5333 struct regcache *regs, struct displaced_step_closure *dsc)
5335 CORE_ADDR from = dsc->insn_addr;
5337 /* Allow OS-specific code to override SVC handling. */
5338 if (dsc->u.svc.copy_svc_os)
5339 return dsc->u.svc.copy_svc_os (gdbarch, insn, to, regs, dsc);
5341 if (debug_displaced)
5342 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
5343 (unsigned long) insn);
5345 /* Preparation: none.
5346 Insn: unmodified svc.
5347 Cleanup: pc <- insn_addr + 4. */
5349 dsc->modinsn[0] = insn;
5351 dsc->cleanup = &cleanup_svc;
5352 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
5354 dsc->wrote_to_pc = 1;
5359 /* Copy undefined instructions. */
5362 copy_undef (struct gdbarch *gdbarch, uint32_t insn,
5363 struct displaced_step_closure *dsc)
5365 if (debug_displaced)
5366 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn %.8lx\n",
5367 (unsigned long) insn);
5369 dsc->modinsn[0] = insn;
5374 /* Copy unpredictable instructions. */
5377 copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
5378 struct displaced_step_closure *dsc)
5380 if (debug_displaced)
5381 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
5382 "%.8lx\n", (unsigned long) insn);
5384 dsc->modinsn[0] = insn;
5389 /* The decode_* functions are instruction decoding helpers. They mostly follow
5390 the presentation in the ARM ARM. */
5393 decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
5394 struct regcache *regs,
5395 struct displaced_step_closure *dsc)
5397 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
5398 unsigned int rn = bits (insn, 16, 19);
5400 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
5401 return copy_unmodified (gdbarch, insn, "cps", dsc);
5402 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
5403 return copy_unmodified (gdbarch, insn, "setend", dsc);
5404 else if ((op1 & 0x60) == 0x20)
5405 return copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
5406 else if ((op1 & 0x71) == 0x40)
5407 return copy_unmodified (gdbarch, insn, "neon elt/struct load/store", dsc);
5408 else if ((op1 & 0x77) == 0x41)
5409 return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
5410 else if ((op1 & 0x77) == 0x45)
5411 return copy_preload (gdbarch, insn, regs, dsc); /* pli. */
5412 else if ((op1 & 0x77) == 0x51)
5415 return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
5417 return copy_unpred (gdbarch, insn, dsc);
5419 else if ((op1 & 0x77) == 0x55)
5420 return copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
5421 else if (op1 == 0x57)
5424 case 0x1: return copy_unmodified (gdbarch, insn, "clrex", dsc);
5425 case 0x4: return copy_unmodified (gdbarch, insn, "dsb", dsc);
5426 case 0x5: return copy_unmodified (gdbarch, insn, "dmb", dsc);
5427 case 0x6: return copy_unmodified (gdbarch, insn, "isb", dsc);
5428 default: return copy_unpred (gdbarch, insn, dsc);
5430 else if ((op1 & 0x63) == 0x43)
5431 return copy_unpred (gdbarch, insn, dsc);
5432 else if ((op2 & 0x1) == 0x0)
5433 switch (op1 & ~0x80)
5436 return copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
5438 return copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
5439 case 0x71: case 0x75:
5441 return copy_preload_reg (gdbarch, insn, regs, dsc);
5442 case 0x63: case 0x67: case 0x73: case 0x77:
5443 return copy_unpred (gdbarch, insn, dsc);
5445 return copy_undef (gdbarch, insn, dsc);
5448 return copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
5452 decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
5453 struct regcache *regs, struct displaced_step_closure *dsc)
5455 if (bit (insn, 27) == 0)
5456 return decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
5457 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
5458 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
5461 return copy_unmodified (gdbarch, insn, "srs", dsc);
5464 return copy_unmodified (gdbarch, insn, "rfe", dsc);
5466 case 0x4: case 0x5: case 0x6: case 0x7:
5467 return copy_b_bl_blx (gdbarch, insn, regs, dsc);
5470 switch ((insn & 0xe00000) >> 21)
5472 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
5474 return copy_copro_load_store (gdbarch, insn, regs, dsc);
5477 return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
5480 return copy_undef (gdbarch, insn, dsc);
5485 int rn_f = (bits (insn, 16, 19) == 0xf);
5486 switch ((insn & 0xe00000) >> 21)
5489 /* ldc/ldc2 imm (undefined for rn == pc). */
5490 return rn_f ? copy_undef (gdbarch, insn, dsc)
5491 : copy_copro_load_store (gdbarch, insn, regs, dsc);
5494 return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
5496 case 0x4: case 0x5: case 0x6: case 0x7:
5497 /* ldc/ldc2 lit (undefined for rn != pc). */
5498 return rn_f ? copy_copro_load_store (gdbarch, insn, regs, dsc)
5499 : copy_undef (gdbarch, insn, dsc);
5502 return copy_undef (gdbarch, insn, dsc);
5507 return copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
5510 if (bits (insn, 16, 19) == 0xf)
5512 return copy_copro_load_store (gdbarch, insn, regs, dsc);
5514 return copy_undef (gdbarch, insn, dsc);
5518 return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
5520 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
5524 return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
5526 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
5529 return copy_undef (gdbarch, insn, dsc);
5533 /* Decode miscellaneous instructions in dp/misc encoding space. */
5536 decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
5537 struct regcache *regs, struct displaced_step_closure *dsc)
5539 unsigned int op2 = bits (insn, 4, 6);
5540 unsigned int op = bits (insn, 21, 22);
5541 unsigned int op1 = bits (insn, 16, 19);
5546 return copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
5549 if (op == 0x1) /* bx. */
5550 return copy_bx_blx_reg (gdbarch, insn, regs, dsc);
5552 return copy_unmodified (gdbarch, insn, "clz", dsc);
5554 return copy_undef (gdbarch, insn, dsc);
5558 /* Not really supported. */
5559 return copy_unmodified (gdbarch, insn, "bxj", dsc);
5561 return copy_undef (gdbarch, insn, dsc);
5565 return copy_bx_blx_reg (gdbarch, insn, regs, dsc); /* blx register. */
5567 return copy_undef (gdbarch, insn, dsc);
5570 return copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
5574 return copy_unmodified (gdbarch, insn, "bkpt", dsc);
5576 /* Not really supported. */
5577 return copy_unmodified (gdbarch, insn, "smc", dsc);
5580 return copy_undef (gdbarch, insn, dsc);
5585 decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5586 struct displaced_step_closure *dsc)
5589 switch (bits (insn, 20, 24))
5592 return copy_unmodified (gdbarch, insn, "movw", dsc);
5595 return copy_unmodified (gdbarch, insn, "movt", dsc);
5597 case 0x12: case 0x16:
5598 return copy_unmodified (gdbarch, insn, "msr imm", dsc);
5601 return copy_alu_imm (gdbarch, insn, regs, dsc);
5605 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
5607 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
5608 return copy_alu_reg (gdbarch, insn, regs, dsc);
5609 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
5610 return copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
5611 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
5612 return decode_miscellaneous (gdbarch, insn, regs, dsc);
5613 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
5614 return copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
5615 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
5616 return copy_unmodified (gdbarch, insn, "mul/mla", dsc);
5617 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
5618 return copy_unmodified (gdbarch, insn, "synch", dsc);
5619 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
5620 /* 2nd arg means "unpriveleged". */
5621 return copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
5625 /* Should be unreachable. */
5630 decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
5631 struct regcache *regs,
5632 struct displaced_step_closure *dsc)
5634 int a = bit (insn, 25), b = bit (insn, 4);
5635 uint32_t op1 = bits (insn, 20, 24);
5636 int rn_f = bits (insn, 16, 19) == 0xf;
5638 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
5639 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
5640 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 0);
5641 else if ((!a && (op1 & 0x17) == 0x02)
5642 || (a && (op1 & 0x17) == 0x02 && !b))
5643 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 0, 1);
5644 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
5645 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
5646 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 0);
5647 else if ((!a && (op1 & 0x17) == 0x03)
5648 || (a && (op1 & 0x17) == 0x03 && !b))
5649 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 0, 1);
5650 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
5651 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
5652 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
5653 else if ((!a && (op1 & 0x17) == 0x06)
5654 || (a && (op1 & 0x17) == 0x06 && !b))
5655 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
5656 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
5657 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
5658 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
5659 else if ((!a && (op1 & 0x17) == 0x07)
5660 || (a && (op1 & 0x17) == 0x07 && !b))
5661 return copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
5663 /* Should be unreachable. */
5668 decode_media (struct gdbarch *gdbarch, uint32_t insn,
5669 struct displaced_step_closure *dsc)
5671 switch (bits (insn, 20, 24))
5673 case 0x00: case 0x01: case 0x02: case 0x03:
5674 return copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
5676 case 0x04: case 0x05: case 0x06: case 0x07:
5677 return copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
5679 case 0x08: case 0x09: case 0x0a: case 0x0b:
5680 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
5681 return copy_unmodified (gdbarch, insn,
5682 "decode/pack/unpack/saturate/reverse", dsc);
5685 if (bits (insn, 5, 7) == 0) /* op2. */
5687 if (bits (insn, 12, 15) == 0xf)
5688 return copy_unmodified (gdbarch, insn, "usad8", dsc);
5690 return copy_unmodified (gdbarch, insn, "usada8", dsc);
5693 return copy_undef (gdbarch, insn, dsc);
5695 case 0x1a: case 0x1b:
5696 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
5697 return copy_unmodified (gdbarch, insn, "sbfx", dsc);
5699 return copy_undef (gdbarch, insn, dsc);
5701 case 0x1c: case 0x1d:
5702 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
5704 if (bits (insn, 0, 3) == 0xf)
5705 return copy_unmodified (gdbarch, insn, "bfc", dsc);
5707 return copy_unmodified (gdbarch, insn, "bfi", dsc);
5710 return copy_undef (gdbarch, insn, dsc);
5712 case 0x1e: case 0x1f:
5713 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
5714 return copy_unmodified (gdbarch, insn, "ubfx", dsc);
5716 return copy_undef (gdbarch, insn, dsc);
5719 /* Should be unreachable. */
5724 decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
5725 struct regcache *regs, struct displaced_step_closure *dsc)
5728 return copy_b_bl_blx (gdbarch, insn, regs, dsc);
5730 return copy_block_xfer (gdbarch, insn, regs, dsc);
5734 decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
5735 struct regcache *regs, struct displaced_step_closure *dsc)
5737 unsigned int opcode = bits (insn, 20, 24);
5741 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
5742 return copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
5744 case 0x08: case 0x0a: case 0x0c: case 0x0e:
5745 case 0x12: case 0x16:
5746 return copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
5748 case 0x09: case 0x0b: case 0x0d: case 0x0f:
5749 case 0x13: case 0x17:
5750 return copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
5752 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
5753 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
5754 /* Note: no writeback for these instructions. Bit 25 will always be
5755 zero though (via caller), so the following works OK. */
5756 return copy_copro_load_store (gdbarch, insn, regs, dsc);
5759 /* Should be unreachable. */
5764 decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
5765 struct regcache *regs, struct displaced_step_closure *dsc)
5767 unsigned int op1 = bits (insn, 20, 25);
5768 int op = bit (insn, 4);
5769 unsigned int coproc = bits (insn, 8, 11);
5770 unsigned int rn = bits (insn, 16, 19);
5772 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
5773 return decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
5774 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
5775 && (coproc & 0xe) != 0xa)
5777 return copy_copro_load_store (gdbarch, insn, regs, dsc);
5778 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
5779 && (coproc & 0xe) != 0xa)
5780 /* ldc/ldc2 imm/lit. */
5781 return copy_copro_load_store (gdbarch, insn, regs, dsc);
5782 else if ((op1 & 0x3e) == 0x00)
5783 return copy_undef (gdbarch, insn, dsc);
5784 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
5785 return copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
5786 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
5787 return copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
5788 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
5789 return copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
5790 else if ((op1 & 0x30) == 0x20 && !op)
5792 if ((coproc & 0xe) == 0xa)
5793 return copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
5795 return copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
5797 else if ((op1 & 0x30) == 0x20 && op)
5798 return copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
5799 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
5800 return copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
5801 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
5802 return copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
5803 else if ((op1 & 0x30) == 0x30)
5804 return copy_svc (gdbarch, insn, to, regs, dsc);
5806 return copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
5810 arm_process_displaced_insn (struct gdbarch *gdbarch, uint32_t insn,
5811 CORE_ADDR from, CORE_ADDR to, struct regcache *regs,
5812 struct displaced_step_closure *dsc)
5816 if (!displaced_in_arm_mode (regs))
5817 error (_("Displaced stepping is only supported in ARM mode"));
5819 /* Most displaced instructions use a 1-instruction scratch space, so set this
5820 here and override below if/when necessary. */
5822 dsc->insn_addr = from;
5823 dsc->scratch_base = to;
5824 dsc->cleanup = NULL;
5825 dsc->wrote_to_pc = 0;
5827 if ((insn & 0xf0000000) == 0xf0000000)
5828 err = decode_unconditional (gdbarch, insn, regs, dsc);
5829 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
5831 case 0x0: case 0x1: case 0x2: case 0x3:
5832 err = decode_dp_misc (gdbarch, insn, regs, dsc);
5835 case 0x4: case 0x5: case 0x6:
5836 err = decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
5840 err = decode_media (gdbarch, insn, dsc);
5843 case 0x8: case 0x9: case 0xa: case 0xb:
5844 err = decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
5847 case 0xc: case 0xd: case 0xe: case 0xf:
5848 err = decode_svc_copro (gdbarch, insn, to, regs, dsc);
5853 internal_error (__FILE__, __LINE__,
5854 _("arm_process_displaced_insn: Instruction decode error"));
5857 /* Actually set up the scratch space for a displaced instruction. */
5860 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
5861 CORE_ADDR to, struct displaced_step_closure *dsc)
5863 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
5865 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5867 /* Poke modified instruction(s). */
5868 for (i = 0; i < dsc->numinsns; i++)
5870 if (debug_displaced)
5871 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn %.8lx at "
5872 "%.8lx\n", (unsigned long) dsc->modinsn[i],
5873 (unsigned long) to + i * 4);
5874 write_memory_unsigned_integer (to + i * 4, 4, byte_order_for_code,
5878 /* Put breakpoint afterwards. */
5879 write_memory (to + dsc->numinsns * 4, tdep->arm_breakpoint,
5880 tdep->arm_breakpoint_size);
5882 if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
5884 paddress (gdbarch, from), paddress (gdbarch, to));
5887 /* Entry point for copying an instruction into scratch space for displaced
5890 struct displaced_step_closure *
5891 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
5892 CORE_ADDR from, CORE_ADDR to,
5893 struct regcache *regs)
5895 struct displaced_step_closure *dsc
5896 = xmalloc (sizeof (struct displaced_step_closure));
5897 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5898 uint32_t insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
5900 if (debug_displaced)
5901 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
5902 "at %.8lx\n", (unsigned long) insn,
5903 (unsigned long) from);
5905 arm_process_displaced_insn (gdbarch, insn, from, to, regs, dsc);
5906 arm_displaced_init_closure (gdbarch, from, to, dsc);
5911 /* Entry point for cleaning things up after a displaced instruction has been
5915 arm_displaced_step_fixup (struct gdbarch *gdbarch,
5916 struct displaced_step_closure *dsc,
5917 CORE_ADDR from, CORE_ADDR to,
5918 struct regcache *regs)
5921 dsc->cleanup (gdbarch, regs, dsc);
5923 if (!dsc->wrote_to_pc)
5924 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, dsc->insn_addr + 4);
5927 #include "bfd-in2.h"
5928 #include "libcoff.h"
5931 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
5933 struct gdbarch *gdbarch = info->application_data;
5935 if (arm_pc_is_thumb (gdbarch, memaddr))
5937 static asymbol *asym;
5938 static combined_entry_type ce;
5939 static struct coff_symbol_struct csym;
5940 static struct bfd fake_bfd;
5941 static bfd_target fake_target;
5943 if (csym.native == NULL)
5945 /* Create a fake symbol vector containing a Thumb symbol.
5946 This is solely so that the code in print_insn_little_arm()
5947 and print_insn_big_arm() in opcodes/arm-dis.c will detect
5948 the presence of a Thumb symbol and switch to decoding
5949 Thumb instructions. */
5951 fake_target.flavour = bfd_target_coff_flavour;
5952 fake_bfd.xvec = &fake_target;
5953 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
5955 csym.symbol.the_bfd = &fake_bfd;
5956 csym.symbol.name = "fake";
5957 asym = (asymbol *) & csym;
5960 memaddr = UNMAKE_THUMB_ADDR (memaddr);
5961 info->symbols = &asym;
5964 info->symbols = NULL;
5966 if (info->endian == BFD_ENDIAN_BIG)
5967 return print_insn_big_arm (memaddr, info);
5969 return print_insn_little_arm (memaddr, info);
5972 /* The following define instruction sequences that will cause ARM
5973 cpu's to take an undefined instruction trap. These are used to
5974 signal a breakpoint to GDB.
5976 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
5977 modes. A different instruction is required for each mode. The ARM
5978 cpu's can also be big or little endian. Thus four different
5979 instructions are needed to support all cases.
5981 Note: ARMv4 defines several new instructions that will take the
5982 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
5983 not in fact add the new instructions. The new undefined
5984 instructions in ARMv4 are all instructions that had no defined
5985 behaviour in earlier chips. There is no guarantee that they will
5986 raise an exception, but may be treated as NOP's. In practice, it
5987 may only safe to rely on instructions matching:
5989 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
5990 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
5991 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
5993 Even this may only true if the condition predicate is true. The
5994 following use a condition predicate of ALWAYS so it is always TRUE.
5996 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
5997 and NetBSD all use a software interrupt rather than an undefined
5998 instruction to force a trap. This can be handled by by the
5999 abi-specific code during establishment of the gdbarch vector. */
6001 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
6002 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
6003 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
6004 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
6006 static const char arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
6007 static const char arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
6008 static const char arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
6009 static const char arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
6011 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
6012 the program counter value to determine whether a 16-bit or 32-bit
6013 breakpoint should be used. It returns a pointer to a string of
6014 bytes that encode a breakpoint instruction, stores the length of
6015 the string to *lenptr, and adjusts the program counter (if
6016 necessary) to point to the actual memory location where the
6017 breakpoint should be inserted. */
6019 static const unsigned char *
6020 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
6022 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6023 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
6025 if (arm_pc_is_thumb (gdbarch, *pcptr))
6027 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
6029 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
6030 check whether we are replacing a 32-bit instruction. */
6031 if (tdep->thumb2_breakpoint != NULL)
6034 if (target_read_memory (*pcptr, buf, 2) == 0)
6036 unsigned short inst1;
6037 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
6038 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
6040 *lenptr = tdep->thumb2_breakpoint_size;
6041 return tdep->thumb2_breakpoint;
6046 *lenptr = tdep->thumb_breakpoint_size;
6047 return tdep->thumb_breakpoint;
6051 *lenptr = tdep->arm_breakpoint_size;
6052 return tdep->arm_breakpoint;
6057 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
6060 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6062 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
6064 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
6065 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
6066 that this is not confused with a 32-bit ARM breakpoint. */
6070 /* Extract from an array REGBUF containing the (raw) register state a
6071 function return value of type TYPE, and copy that, in virtual
6072 format, into VALBUF. */
6075 arm_extract_return_value (struct type *type, struct regcache *regs,
6078 struct gdbarch *gdbarch = get_regcache_arch (regs);
6079 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6081 if (TYPE_CODE_FLT == TYPE_CODE (type))
6083 switch (gdbarch_tdep (gdbarch)->fp_model)
6087 /* The value is in register F0 in internal format. We need to
6088 extract the raw value and then convert it to the desired
6090 bfd_byte tmpbuf[FP_REGISTER_SIZE];
6092 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
6093 convert_from_extended (floatformat_from_type (type), tmpbuf,
6094 valbuf, gdbarch_byte_order (gdbarch));
6098 case ARM_FLOAT_SOFT_FPA:
6099 case ARM_FLOAT_SOFT_VFP:
6100 /* ARM_FLOAT_VFP can arise if this is a variadic function so
6101 not using the VFP ABI code. */
6103 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
6104 if (TYPE_LENGTH (type) > 4)
6105 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
6106 valbuf + INT_REGISTER_SIZE);
6111 (__FILE__, __LINE__,
6112 _("arm_extract_return_value: Floating point model not supported"));
6116 else if (TYPE_CODE (type) == TYPE_CODE_INT
6117 || TYPE_CODE (type) == TYPE_CODE_CHAR
6118 || TYPE_CODE (type) == TYPE_CODE_BOOL
6119 || TYPE_CODE (type) == TYPE_CODE_PTR
6120 || TYPE_CODE (type) == TYPE_CODE_REF
6121 || TYPE_CODE (type) == TYPE_CODE_ENUM)
6123 /* If the the type is a plain integer, then the access is
6124 straight-forward. Otherwise we have to play around a bit more. */
6125 int len = TYPE_LENGTH (type);
6126 int regno = ARM_A1_REGNUM;
6131 /* By using store_unsigned_integer we avoid having to do
6132 anything special for small big-endian values. */
6133 regcache_cooked_read_unsigned (regs, regno++, &tmp);
6134 store_unsigned_integer (valbuf,
6135 (len > INT_REGISTER_SIZE
6136 ? INT_REGISTER_SIZE : len),
6138 len -= INT_REGISTER_SIZE;
6139 valbuf += INT_REGISTER_SIZE;
6144 /* For a structure or union the behaviour is as if the value had
6145 been stored to word-aligned memory and then loaded into
6146 registers with 32-bit load instruction(s). */
6147 int len = TYPE_LENGTH (type);
6148 int regno = ARM_A1_REGNUM;
6149 bfd_byte tmpbuf[INT_REGISTER_SIZE];
6153 regcache_cooked_read (regs, regno++, tmpbuf);
6154 memcpy (valbuf, tmpbuf,
6155 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
6156 len -= INT_REGISTER_SIZE;
6157 valbuf += INT_REGISTER_SIZE;
6163 /* Will a function return an aggregate type in memory or in a
6164 register? Return 0 if an aggregate type can be returned in a
6165 register, 1 if it must be returned in memory. */
6168 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
6171 enum type_code code;
6173 CHECK_TYPEDEF (type);
6175 /* In the ARM ABI, "integer" like aggregate types are returned in
6176 registers. For an aggregate type to be integer like, its size
6177 must be less than or equal to INT_REGISTER_SIZE and the
6178 offset of each addressable subfield must be zero. Note that bit
6179 fields are not addressable, and all addressable subfields of
6180 unions always start at offset zero.
6182 This function is based on the behaviour of GCC 2.95.1.
6183 See: gcc/arm.c: arm_return_in_memory() for details.
6185 Note: All versions of GCC before GCC 2.95.2 do not set up the
6186 parameters correctly for a function returning the following
6187 structure: struct { float f;}; This should be returned in memory,
6188 not a register. Richard Earnshaw sent me a patch, but I do not
6189 know of any way to detect if a function like the above has been
6190 compiled with the correct calling convention. */
6192 /* All aggregate types that won't fit in a register must be returned
6194 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
6199 /* The AAPCS says all aggregates not larger than a word are returned
6201 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
6204 /* The only aggregate types that can be returned in a register are
6205 structs and unions. Arrays must be returned in memory. */
6206 code = TYPE_CODE (type);
6207 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
6212 /* Assume all other aggregate types can be returned in a register.
6213 Run a check for structures, unions and arrays. */
6216 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
6219 /* Need to check if this struct/union is "integer" like. For
6220 this to be true, its size must be less than or equal to
6221 INT_REGISTER_SIZE and the offset of each addressable
6222 subfield must be zero. Note that bit fields are not
6223 addressable, and unions always start at offset zero. If any
6224 of the subfields is a floating point type, the struct/union
6225 cannot be an integer type. */
6227 /* For each field in the object, check:
6228 1) Is it FP? --> yes, nRc = 1;
6229 2) Is it addressable (bitpos != 0) and
6230 not packed (bitsize == 0)?
6234 for (i = 0; i < TYPE_NFIELDS (type); i++)
6236 enum type_code field_type_code;
6237 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type, i)));
6239 /* Is it a floating point type field? */
6240 if (field_type_code == TYPE_CODE_FLT)
6246 /* If bitpos != 0, then we have to care about it. */
6247 if (TYPE_FIELD_BITPOS (type, i) != 0)
6249 /* Bitfields are not addressable. If the field bitsize is
6250 zero, then the field is not packed. Hence it cannot be
6251 a bitfield or any other packed type. */
6252 if (TYPE_FIELD_BITSIZE (type, i) == 0)
6264 /* Write into appropriate registers a function return value of type
6265 TYPE, given in virtual format. */
6268 arm_store_return_value (struct type *type, struct regcache *regs,
6269 const gdb_byte *valbuf)
6271 struct gdbarch *gdbarch = get_regcache_arch (regs);
6272 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6274 if (TYPE_CODE (type) == TYPE_CODE_FLT)
6276 char buf[MAX_REGISTER_SIZE];
6278 switch (gdbarch_tdep (gdbarch)->fp_model)
6282 convert_to_extended (floatformat_from_type (type), buf, valbuf,
6283 gdbarch_byte_order (gdbarch));
6284 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
6287 case ARM_FLOAT_SOFT_FPA:
6288 case ARM_FLOAT_SOFT_VFP:
6289 /* ARM_FLOAT_VFP can arise if this is a variadic function so
6290 not using the VFP ABI code. */
6292 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
6293 if (TYPE_LENGTH (type) > 4)
6294 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
6295 valbuf + INT_REGISTER_SIZE);
6300 (__FILE__, __LINE__,
6301 _("arm_store_return_value: Floating point model not supported"));
6305 else if (TYPE_CODE (type) == TYPE_CODE_INT
6306 || TYPE_CODE (type) == TYPE_CODE_CHAR
6307 || TYPE_CODE (type) == TYPE_CODE_BOOL
6308 || TYPE_CODE (type) == TYPE_CODE_PTR
6309 || TYPE_CODE (type) == TYPE_CODE_REF
6310 || TYPE_CODE (type) == TYPE_CODE_ENUM)
6312 if (TYPE_LENGTH (type) <= 4)
6314 /* Values of one word or less are zero/sign-extended and
6316 bfd_byte tmpbuf[INT_REGISTER_SIZE];
6317 LONGEST val = unpack_long (type, valbuf);
6319 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
6320 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
6324 /* Integral values greater than one word are stored in consecutive
6325 registers starting with r0. This will always be a multiple of
6326 the regiser size. */
6327 int len = TYPE_LENGTH (type);
6328 int regno = ARM_A1_REGNUM;
6332 regcache_cooked_write (regs, regno++, valbuf);
6333 len -= INT_REGISTER_SIZE;
6334 valbuf += INT_REGISTER_SIZE;
6340 /* For a structure or union the behaviour is as if the value had
6341 been stored to word-aligned memory and then loaded into
6342 registers with 32-bit load instruction(s). */
6343 int len = TYPE_LENGTH (type);
6344 int regno = ARM_A1_REGNUM;
6345 bfd_byte tmpbuf[INT_REGISTER_SIZE];
6349 memcpy (tmpbuf, valbuf,
6350 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
6351 regcache_cooked_write (regs, regno++, tmpbuf);
6352 len -= INT_REGISTER_SIZE;
6353 valbuf += INT_REGISTER_SIZE;
6359 /* Handle function return values. */
6361 static enum return_value_convention
6362 arm_return_value (struct gdbarch *gdbarch, struct type *func_type,
6363 struct type *valtype, struct regcache *regcache,
6364 gdb_byte *readbuf, const gdb_byte *writebuf)
6366 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6367 enum arm_vfp_cprc_base_type vfp_base_type;
6370 if (arm_vfp_abi_for_function (gdbarch, func_type)
6371 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
6373 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
6374 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
6376 for (i = 0; i < vfp_base_count; i++)
6378 if (reg_char == 'q')
6381 arm_neon_quad_write (gdbarch, regcache, i,
6382 writebuf + i * unit_length);
6385 arm_neon_quad_read (gdbarch, regcache, i,
6386 readbuf + i * unit_length);
6393 sprintf (name_buf, "%c%d", reg_char, i);
6394 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
6397 regcache_cooked_write (regcache, regnum,
6398 writebuf + i * unit_length);
6400 regcache_cooked_read (regcache, regnum,
6401 readbuf + i * unit_length);
6404 return RETURN_VALUE_REGISTER_CONVENTION;
6407 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
6408 || TYPE_CODE (valtype) == TYPE_CODE_UNION
6409 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
6411 if (tdep->struct_return == pcc_struct_return
6412 || arm_return_in_memory (gdbarch, valtype))
6413 return RETURN_VALUE_STRUCT_CONVENTION;
6417 arm_store_return_value (valtype, regcache, writebuf);
6420 arm_extract_return_value (valtype, regcache, readbuf);
6422 return RETURN_VALUE_REGISTER_CONVENTION;
6427 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
6429 struct gdbarch *gdbarch = get_frame_arch (frame);
6430 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
6431 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6433 char buf[INT_REGISTER_SIZE];
6435 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
6437 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
6441 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
6445 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
6446 return the target PC. Otherwise return 0. */
6449 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
6453 CORE_ADDR start_addr;
6455 /* Find the starting address and name of the function containing the PC. */
6456 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
6459 /* If PC is in a Thumb call or return stub, return the address of the
6460 target PC, which is in a register. The thunk functions are called
6461 _call_via_xx, where x is the register name. The possible names
6462 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
6463 functions, named __ARM_call_via_r[0-7]. */
6464 if (strncmp (name, "_call_via_", 10) == 0
6465 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
6467 /* Use the name suffix to determine which register contains the
6469 static char *table[15] =
6470 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
6471 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
6474 int offset = strlen (name) - 2;
6476 for (regno = 0; regno <= 14; regno++)
6477 if (strcmp (&name[offset], table[regno]) == 0)
6478 return get_frame_register_unsigned (frame, regno);
6481 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
6482 non-interworking calls to foo. We could decode the stubs
6483 to find the target but it's easier to use the symbol table. */
6484 namelen = strlen (name);
6485 if (name[0] == '_' && name[1] == '_'
6486 && ((namelen > 2 + strlen ("_from_thumb")
6487 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
6488 strlen ("_from_thumb")) == 0)
6489 || (namelen > 2 + strlen ("_from_arm")
6490 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
6491 strlen ("_from_arm")) == 0)))
6494 int target_len = namelen - 2;
6495 struct minimal_symbol *minsym;
6496 struct objfile *objfile;
6497 struct obj_section *sec;
6499 if (name[namelen - 1] == 'b')
6500 target_len -= strlen ("_from_thumb");
6502 target_len -= strlen ("_from_arm");
6504 target_name = alloca (target_len + 1);
6505 memcpy (target_name, name + 2, target_len);
6506 target_name[target_len] = '\0';
6508 sec = find_pc_section (pc);
6509 objfile = (sec == NULL) ? NULL : sec->objfile;
6510 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
6512 return SYMBOL_VALUE_ADDRESS (minsym);
6517 return 0; /* not a stub */
6521 set_arm_command (char *args, int from_tty)
6523 printf_unfiltered (_("\
6524 \"set arm\" must be followed by an apporpriate subcommand.\n"));
6525 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
6529 show_arm_command (char *args, int from_tty)
6531 cmd_show_list (showarmcmdlist, from_tty, "");
6535 arm_update_current_architecture (void)
6537 struct gdbarch_info info;
6539 /* If the current architecture is not ARM, we have nothing to do. */
6540 if (gdbarch_bfd_arch_info (target_gdbarch)->arch != bfd_arch_arm)
6543 /* Update the architecture. */
6544 gdbarch_info_init (&info);
6546 if (!gdbarch_update_p (info))
6547 internal_error (__FILE__, __LINE__, "could not update architecture");
6551 set_fp_model_sfunc (char *args, int from_tty,
6552 struct cmd_list_element *c)
6554 enum arm_float_model fp_model;
6556 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
6557 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
6559 arm_fp_model = fp_model;
6563 if (fp_model == ARM_FLOAT_LAST)
6564 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
6567 arm_update_current_architecture ();
6571 show_fp_model (struct ui_file *file, int from_tty,
6572 struct cmd_list_element *c, const char *value)
6574 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
6576 if (arm_fp_model == ARM_FLOAT_AUTO
6577 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
6578 fprintf_filtered (file, _("\
6579 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
6580 fp_model_strings[tdep->fp_model]);
6582 fprintf_filtered (file, _("\
6583 The current ARM floating point model is \"%s\".\n"),
6584 fp_model_strings[arm_fp_model]);
6588 arm_set_abi (char *args, int from_tty,
6589 struct cmd_list_element *c)
6591 enum arm_abi_kind arm_abi;
6593 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
6594 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
6596 arm_abi_global = arm_abi;
6600 if (arm_abi == ARM_ABI_LAST)
6601 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
6604 arm_update_current_architecture ();
6608 arm_show_abi (struct ui_file *file, int from_tty,
6609 struct cmd_list_element *c, const char *value)
6611 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
6613 if (arm_abi_global == ARM_ABI_AUTO
6614 && gdbarch_bfd_arch_info (target_gdbarch)->arch == bfd_arch_arm)
6615 fprintf_filtered (file, _("\
6616 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
6617 arm_abi_strings[tdep->arm_abi]);
6619 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
6624 arm_show_fallback_mode (struct ui_file *file, int from_tty,
6625 struct cmd_list_element *c, const char *value)
6627 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
6629 fprintf_filtered (file, _("\
6630 The current execution mode assumed (when symbols are unavailable) is \"%s\".\n"),
6631 arm_fallback_mode_string);
6635 arm_show_force_mode (struct ui_file *file, int from_tty,
6636 struct cmd_list_element *c, const char *value)
6638 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch);
6640 fprintf_filtered (file, _("\
6641 The current execution mode assumed (even when symbols are available) is \"%s\".\n"),
6642 arm_force_mode_string);
6645 /* If the user changes the register disassembly style used for info
6646 register and other commands, we have to also switch the style used
6647 in opcodes for disassembly output. This function is run in the "set
6648 arm disassembly" command, and does that. */
6651 set_disassembly_style_sfunc (char *args, int from_tty,
6652 struct cmd_list_element *c)
6654 set_disassembly_style ();
6657 /* Return the ARM register name corresponding to register I. */
6659 arm_register_name (struct gdbarch *gdbarch, int i)
6661 const int num_regs = gdbarch_num_regs (gdbarch);
6663 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
6664 && i >= num_regs && i < num_regs + 32)
6666 static const char *const vfp_pseudo_names[] = {
6667 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
6668 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
6669 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
6670 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
6673 return vfp_pseudo_names[i - num_regs];
6676 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
6677 && i >= num_regs + 32 && i < num_regs + 32 + 16)
6679 static const char *const neon_pseudo_names[] = {
6680 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
6681 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
6684 return neon_pseudo_names[i - num_regs - 32];
6687 if (i >= ARRAY_SIZE (arm_register_names))
6688 /* These registers are only supported on targets which supply
6689 an XML description. */
6692 return arm_register_names[i];
6696 set_disassembly_style (void)
6700 /* Find the style that the user wants. */
6701 for (current = 0; current < num_disassembly_options; current++)
6702 if (disassembly_style == valid_disassembly_styles[current])
6704 gdb_assert (current < num_disassembly_options);
6706 /* Synchronize the disassembler. */
6707 set_arm_regname_option (current);
6710 /* Test whether the coff symbol specific value corresponds to a Thumb
6714 coff_sym_is_thumb (int val)
6716 return (val == C_THUMBEXT
6717 || val == C_THUMBSTAT
6718 || val == C_THUMBEXTFUNC
6719 || val == C_THUMBSTATFUNC
6720 || val == C_THUMBLABEL);
6723 /* arm_coff_make_msymbol_special()
6724 arm_elf_make_msymbol_special()
6726 These functions test whether the COFF or ELF symbol corresponds to
6727 an address in thumb code, and set a "special" bit in a minimal
6728 symbol to indicate that it does. */
6731 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
6733 /* Thumb symbols are of type STT_LOPROC, (synonymous with
6735 if (ELF_ST_TYPE (((elf_symbol_type *)sym)->internal_elf_sym.st_info)
6737 MSYMBOL_SET_SPECIAL (msym);
6741 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
6743 if (coff_sym_is_thumb (val))
6744 MSYMBOL_SET_SPECIAL (msym);
6748 arm_objfile_data_free (struct objfile *objfile, void *arg)
6750 struct arm_per_objfile *data = arg;
6753 for (i = 0; i < objfile->obfd->section_count; i++)
6754 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
6758 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
6761 const char *name = bfd_asymbol_name (sym);
6762 struct arm_per_objfile *data;
6763 VEC(arm_mapping_symbol_s) **map_p;
6764 struct arm_mapping_symbol new_map_sym;
6766 gdb_assert (name[0] == '$');
6767 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
6770 data = objfile_data (objfile, arm_objfile_data_key);
6773 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
6774 struct arm_per_objfile);
6775 set_objfile_data (objfile, arm_objfile_data_key, data);
6776 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
6777 objfile->obfd->section_count,
6778 VEC(arm_mapping_symbol_s) *);
6780 map_p = &data->section_maps[bfd_get_section (sym)->index];
6782 new_map_sym.value = sym->value;
6783 new_map_sym.type = name[1];
6785 /* Assume that most mapping symbols appear in order of increasing
6786 value. If they were randomly distributed, it would be faster to
6787 always push here and then sort at first use. */
6788 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
6790 struct arm_mapping_symbol *prev_map_sym;
6792 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
6793 if (prev_map_sym->value >= sym->value)
6796 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
6797 arm_compare_mapping_symbols);
6798 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
6803 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
6807 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
6809 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6810 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
6812 /* If necessary, set the T bit. */
6815 ULONGEST val, t_bit;
6816 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
6817 t_bit = arm_psr_thumb_bit (gdbarch);
6818 if (arm_pc_is_thumb (gdbarch, pc))
6819 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
6822 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
6827 /* Read the contents of a NEON quad register, by reading from two
6828 double registers. This is used to implement the quad pseudo
6829 registers, and for argument passing in case the quad registers are
6830 missing; vectors are passed in quad registers when using the VFP
6831 ABI, even if a NEON unit is not present. REGNUM is the index of
6832 the quad register, in [0, 15]. */
6835 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
6836 int regnum, gdb_byte *buf)
6839 gdb_byte reg_buf[8];
6840 int offset, double_regnum;
6842 sprintf (name_buf, "d%d", regnum << 1);
6843 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
6846 /* d0 is always the least significant half of q0. */
6847 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
6852 regcache_raw_read (regcache, double_regnum, reg_buf);
6853 memcpy (buf + offset, reg_buf, 8);
6855 offset = 8 - offset;
6856 regcache_raw_read (regcache, double_regnum + 1, reg_buf);
6857 memcpy (buf + offset, reg_buf, 8);
6861 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
6862 int regnum, gdb_byte *buf)
6864 const int num_regs = gdbarch_num_regs (gdbarch);
6866 gdb_byte reg_buf[8];
6867 int offset, double_regnum;
6869 gdb_assert (regnum >= num_regs);
6872 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
6873 /* Quad-precision register. */
6874 arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
6877 /* Single-precision register. */
6878 gdb_assert (regnum < 32);
6880 /* s0 is always the least significant half of d0. */
6881 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
6882 offset = (regnum & 1) ? 0 : 4;
6884 offset = (regnum & 1) ? 4 : 0;
6886 sprintf (name_buf, "d%d", regnum >> 1);
6887 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
6890 regcache_raw_read (regcache, double_regnum, reg_buf);
6891 memcpy (buf, reg_buf + offset, 4);
6895 /* Store the contents of BUF to a NEON quad register, by writing to
6896 two double registers. This is used to implement the quad pseudo
6897 registers, and for argument passing in case the quad registers are
6898 missing; vectors are passed in quad registers when using the VFP
6899 ABI, even if a NEON unit is not present. REGNUM is the index
6900 of the quad register, in [0, 15]. */
6903 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
6904 int regnum, const gdb_byte *buf)
6907 gdb_byte reg_buf[8];
6908 int offset, double_regnum;
6910 sprintf (name_buf, "d%d", regnum << 1);
6911 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
6914 /* d0 is always the least significant half of q0. */
6915 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
6920 regcache_raw_write (regcache, double_regnum, buf + offset);
6921 offset = 8 - offset;
6922 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
6926 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
6927 int regnum, const gdb_byte *buf)
6929 const int num_regs = gdbarch_num_regs (gdbarch);
6931 gdb_byte reg_buf[8];
6932 int offset, double_regnum;
6934 gdb_assert (regnum >= num_regs);
6937 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
6938 /* Quad-precision register. */
6939 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
6942 /* Single-precision register. */
6943 gdb_assert (regnum < 32);
6945 /* s0 is always the least significant half of d0. */
6946 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
6947 offset = (regnum & 1) ? 0 : 4;
6949 offset = (regnum & 1) ? 4 : 0;
6951 sprintf (name_buf, "d%d", regnum >> 1);
6952 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
6955 regcache_raw_read (regcache, double_regnum, reg_buf);
6956 memcpy (reg_buf + offset, buf, 4);
6957 regcache_raw_write (regcache, double_regnum, reg_buf);
6961 static struct value *
6962 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
6964 const int *reg_p = baton;
6965 return value_of_register (*reg_p, frame);
6968 static enum gdb_osabi
6969 arm_elf_osabi_sniffer (bfd *abfd)
6971 unsigned int elfosabi;
6972 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
6974 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
6976 if (elfosabi == ELFOSABI_ARM)
6977 /* GNU tools use this value. Check note sections in this case,
6979 bfd_map_over_sections (abfd,
6980 generic_elf_osabi_sniff_abi_tag_sections,
6983 /* Anything else will be handled by the generic ELF sniffer. */
6988 /* Initialize the current architecture based on INFO. If possible,
6989 re-use an architecture from ARCHES, which is a list of
6990 architectures already created during this debugging session.
6992 Called e.g. at program startup, when reading a core file, and when
6993 reading a binary file. */
6995 static struct gdbarch *
6996 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
6998 struct gdbarch_tdep *tdep;
6999 struct gdbarch *gdbarch;
7000 struct gdbarch_list *best_arch;
7001 enum arm_abi_kind arm_abi = arm_abi_global;
7002 enum arm_float_model fp_model = arm_fp_model;
7003 struct tdesc_arch_data *tdesc_data = NULL;
7005 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
7007 int have_fpa_registers = 1;
7008 const struct target_desc *tdesc = info.target_desc;
7010 /* If we have an object to base this architecture on, try to determine
7013 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
7015 int ei_osabi, e_flags;
7017 switch (bfd_get_flavour (info.abfd))
7019 case bfd_target_aout_flavour:
7020 /* Assume it's an old APCS-style ABI. */
7021 arm_abi = ARM_ABI_APCS;
7024 case bfd_target_coff_flavour:
7025 /* Assume it's an old APCS-style ABI. */
7027 arm_abi = ARM_ABI_APCS;
7030 case bfd_target_elf_flavour:
7031 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
7032 e_flags = elf_elfheader (info.abfd)->e_flags;
7034 if (ei_osabi == ELFOSABI_ARM)
7036 /* GNU tools used to use this value, but do not for EABI
7037 objects. There's nowhere to tag an EABI version
7038 anyway, so assume APCS. */
7039 arm_abi = ARM_ABI_APCS;
7041 else if (ei_osabi == ELFOSABI_NONE)
7043 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
7044 int attr_arch, attr_profile;
7048 case EF_ARM_EABI_UNKNOWN:
7049 /* Assume GNU tools. */
7050 arm_abi = ARM_ABI_APCS;
7053 case EF_ARM_EABI_VER4:
7054 case EF_ARM_EABI_VER5:
7055 arm_abi = ARM_ABI_AAPCS;
7056 /* EABI binaries default to VFP float ordering.
7057 They may also contain build attributes that can
7058 be used to identify if the VFP argument-passing
7060 if (fp_model == ARM_FLOAT_AUTO)
7063 switch (bfd_elf_get_obj_attr_int (info.abfd,
7068 /* "The user intended FP parameter/result
7069 passing to conform to AAPCS, base
7071 fp_model = ARM_FLOAT_SOFT_VFP;
7074 /* "The user intended FP parameter/result
7075 passing to conform to AAPCS, VFP
7077 fp_model = ARM_FLOAT_VFP;
7080 /* "The user intended FP parameter/result
7081 passing to conform to tool chain-specific
7082 conventions" - we don't know any such
7083 conventions, so leave it as "auto". */
7086 /* Attribute value not mentioned in the
7087 October 2008 ABI, so leave it as
7092 fp_model = ARM_FLOAT_SOFT_VFP;
7098 /* Leave it as "auto". */
7099 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
7104 /* Detect M-profile programs. This only works if the
7105 executable file includes build attributes; GCC does
7106 copy them to the executable, but e.g. RealView does
7108 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
7110 attr_profile = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
7111 Tag_CPU_arch_profile);
7112 /* GCC specifies the profile for v6-M; RealView only
7113 specifies the profile for architectures starting with
7114 V7 (as opposed to architectures with a tag
7115 numerically greater than TAG_CPU_ARCH_V7). */
7116 if (!tdesc_has_registers (tdesc)
7117 && (attr_arch == TAG_CPU_ARCH_V6_M
7118 || attr_arch == TAG_CPU_ARCH_V6S_M
7119 || attr_profile == 'M'))
7120 tdesc = tdesc_arm_with_m;
7124 if (fp_model == ARM_FLOAT_AUTO)
7126 int e_flags = elf_elfheader (info.abfd)->e_flags;
7128 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
7131 /* Leave it as "auto". Strictly speaking this case
7132 means FPA, but almost nobody uses that now, and
7133 many toolchains fail to set the appropriate bits
7134 for the floating-point model they use. */
7136 case EF_ARM_SOFT_FLOAT:
7137 fp_model = ARM_FLOAT_SOFT_FPA;
7139 case EF_ARM_VFP_FLOAT:
7140 fp_model = ARM_FLOAT_VFP;
7142 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
7143 fp_model = ARM_FLOAT_SOFT_VFP;
7148 if (e_flags & EF_ARM_BE8)
7149 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
7154 /* Leave it as "auto". */
7159 /* Check any target description for validity. */
7160 if (tdesc_has_registers (tdesc))
7162 /* For most registers we require GDB's default names; but also allow
7163 the numeric names for sp / lr / pc, as a convenience. */
7164 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
7165 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
7166 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
7168 const struct tdesc_feature *feature;
7171 feature = tdesc_find_feature (tdesc,
7172 "org.gnu.gdb.arm.core");
7173 if (feature == NULL)
7175 feature = tdesc_find_feature (tdesc,
7176 "org.gnu.gdb.arm.m-profile");
7177 if (feature == NULL)
7183 tdesc_data = tdesc_data_alloc ();
7186 for (i = 0; i < ARM_SP_REGNUM; i++)
7187 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
7188 arm_register_names[i]);
7189 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
7192 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
7195 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
7199 valid_p &= tdesc_numbered_register (feature, tdesc_data,
7200 ARM_PS_REGNUM, "xpsr");
7202 valid_p &= tdesc_numbered_register (feature, tdesc_data,
7203 ARM_PS_REGNUM, "cpsr");
7207 tdesc_data_cleanup (tdesc_data);
7211 feature = tdesc_find_feature (tdesc,
7212 "org.gnu.gdb.arm.fpa");
7213 if (feature != NULL)
7216 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
7217 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
7218 arm_register_names[i]);
7221 tdesc_data_cleanup (tdesc_data);
7226 have_fpa_registers = 0;
7228 feature = tdesc_find_feature (tdesc,
7229 "org.gnu.gdb.xscale.iwmmxt");
7230 if (feature != NULL)
7232 static const char *const iwmmxt_names[] = {
7233 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
7234 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
7235 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
7236 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
7240 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
7242 &= tdesc_numbered_register (feature, tdesc_data, i,
7243 iwmmxt_names[i - ARM_WR0_REGNUM]);
7245 /* Check for the control registers, but do not fail if they
7247 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
7248 tdesc_numbered_register (feature, tdesc_data, i,
7249 iwmmxt_names[i - ARM_WR0_REGNUM]);
7251 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
7253 &= tdesc_numbered_register (feature, tdesc_data, i,
7254 iwmmxt_names[i - ARM_WR0_REGNUM]);
7258 tdesc_data_cleanup (tdesc_data);
7263 /* If we have a VFP unit, check whether the single precision registers
7264 are present. If not, then we will synthesize them as pseudo
7266 feature = tdesc_find_feature (tdesc,
7267 "org.gnu.gdb.arm.vfp");
7268 if (feature != NULL)
7270 static const char *const vfp_double_names[] = {
7271 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
7272 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
7273 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
7274 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
7277 /* Require the double precision registers. There must be either
7280 for (i = 0; i < 32; i++)
7282 valid_p &= tdesc_numbered_register (feature, tdesc_data,
7284 vfp_double_names[i]);
7289 if (!valid_p && i != 16)
7291 tdesc_data_cleanup (tdesc_data);
7295 if (tdesc_unnumbered_register (feature, "s0") == 0)
7296 have_vfp_pseudos = 1;
7298 have_vfp_registers = 1;
7300 /* If we have VFP, also check for NEON. The architecture allows
7301 NEON without VFP (integer vector operations only), but GDB
7302 does not support that. */
7303 feature = tdesc_find_feature (tdesc,
7304 "org.gnu.gdb.arm.neon");
7305 if (feature != NULL)
7307 /* NEON requires 32 double-precision registers. */
7310 tdesc_data_cleanup (tdesc_data);
7314 /* If there are quad registers defined by the stub, use
7315 their type; otherwise (normally) provide them with
7316 the default type. */
7317 if (tdesc_unnumbered_register (feature, "q0") == 0)
7318 have_neon_pseudos = 1;
7325 /* If there is already a candidate, use it. */
7326 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
7328 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
7330 if (arm_abi != ARM_ABI_AUTO
7331 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
7334 if (fp_model != ARM_FLOAT_AUTO
7335 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
7338 /* There are various other properties in tdep that we do not
7339 need to check here: those derived from a target description,
7340 since gdbarches with a different target description are
7341 automatically disqualified. */
7343 /* Do check is_m, though, since it might come from the binary. */
7344 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
7347 /* Found a match. */
7351 if (best_arch != NULL)
7353 if (tdesc_data != NULL)
7354 tdesc_data_cleanup (tdesc_data);
7355 return best_arch->gdbarch;
7358 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
7359 gdbarch = gdbarch_alloc (&info, tdep);
7361 /* Record additional information about the architecture we are defining.
7362 These are gdbarch discriminators, like the OSABI. */
7363 tdep->arm_abi = arm_abi;
7364 tdep->fp_model = fp_model;
7366 tdep->have_fpa_registers = have_fpa_registers;
7367 tdep->have_vfp_registers = have_vfp_registers;
7368 tdep->have_vfp_pseudos = have_vfp_pseudos;
7369 tdep->have_neon_pseudos = have_neon_pseudos;
7370 tdep->have_neon = have_neon;
7373 switch (info.byte_order_for_code)
7375 case BFD_ENDIAN_BIG:
7376 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
7377 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
7378 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
7379 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
7383 case BFD_ENDIAN_LITTLE:
7384 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
7385 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
7386 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
7387 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
7392 internal_error (__FILE__, __LINE__,
7393 _("arm_gdbarch_init: bad byte order for float format"));
7396 /* On ARM targets char defaults to unsigned. */
7397 set_gdbarch_char_signed (gdbarch, 0);
7399 /* Note: for displaced stepping, this includes the breakpoint, and one word
7400 of additional scratch space. This setting isn't used for anything beside
7401 displaced stepping at present. */
7402 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
7404 /* This should be low enough for everything. */
7405 tdep->lowest_pc = 0x20;
7406 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
7408 /* The default, for both APCS and AAPCS, is to return small
7409 structures in registers. */
7410 tdep->struct_return = reg_struct_return;
7412 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
7413 set_gdbarch_frame_align (gdbarch, arm_frame_align);
7415 set_gdbarch_write_pc (gdbarch, arm_write_pc);
7417 /* Frame handling. */
7418 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
7419 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
7420 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
7422 frame_base_set_default (gdbarch, &arm_normal_base);
7424 /* Address manipulation. */
7425 set_gdbarch_smash_text_address (gdbarch, arm_smash_text_address);
7426 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
7428 /* Advance PC across function entry code. */
7429 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
7431 /* Detect whether PC is in function epilogue. */
7432 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
7434 /* Skip trampolines. */
7435 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
7437 /* The stack grows downward. */
7438 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
7440 /* Breakpoint manipulation. */
7441 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
7442 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
7443 arm_remote_breakpoint_from_pc);
7445 /* Information about registers, etc. */
7446 set_gdbarch_deprecated_fp_regnum (gdbarch, ARM_FP_REGNUM); /* ??? */
7447 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
7448 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
7449 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
7450 set_gdbarch_register_type (gdbarch, arm_register_type);
7452 /* This "info float" is FPA-specific. Use the generic version if we
7454 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
7455 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
7457 /* Internal <-> external register number maps. */
7458 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
7459 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
7461 set_gdbarch_register_name (gdbarch, arm_register_name);
7463 /* Returning results. */
7464 set_gdbarch_return_value (gdbarch, arm_return_value);
7467 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
7469 /* Minsymbol frobbing. */
7470 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
7471 set_gdbarch_coff_make_msymbol_special (gdbarch,
7472 arm_coff_make_msymbol_special);
7473 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
7475 /* Thumb-2 IT block support. */
7476 set_gdbarch_adjust_breakpoint_address (gdbarch,
7477 arm_adjust_breakpoint_address);
7479 /* Virtual tables. */
7480 set_gdbarch_vbit_in_delta (gdbarch, 1);
7482 /* Hook in the ABI-specific overrides, if they have been registered. */
7483 gdbarch_init_osabi (info, gdbarch);
7485 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
7487 /* Add some default predicates. */
7488 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
7489 dwarf2_append_unwinders (gdbarch);
7490 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
7492 /* Now we have tuned the configuration, set a few final things,
7493 based on what the OS ABI has told us. */
7495 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
7496 binaries are always marked. */
7497 if (tdep->arm_abi == ARM_ABI_AUTO)
7498 tdep->arm_abi = ARM_ABI_APCS;
7500 /* We used to default to FPA for generic ARM, but almost nobody
7501 uses that now, and we now provide a way for the user to force
7502 the model. So default to the most useful variant. */
7503 if (tdep->fp_model == ARM_FLOAT_AUTO)
7504 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
7506 if (tdep->jb_pc >= 0)
7507 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
7509 /* Floating point sizes and format. */
7510 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
7511 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
7513 set_gdbarch_double_format
7514 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
7515 set_gdbarch_long_double_format
7516 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
7520 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
7521 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
7524 if (have_vfp_pseudos)
7526 /* NOTE: These are the only pseudo registers used by
7527 the ARM target at the moment. If more are added, a
7528 little more care in numbering will be needed. */
7530 int num_pseudos = 32;
7531 if (have_neon_pseudos)
7533 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
7534 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
7535 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
7540 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
7542 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
7544 /* Override tdesc_register_type to adjust the types of VFP
7545 registers for NEON. */
7546 set_gdbarch_register_type (gdbarch, arm_register_type);
7549 /* Add standard register aliases. We add aliases even for those
7550 nanes which are used by the current architecture - it's simpler,
7551 and does no harm, since nothing ever lists user registers. */
7552 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
7553 user_reg_add (gdbarch, arm_register_aliases[i].name,
7554 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
7560 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
7562 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7567 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
7568 (unsigned long) tdep->lowest_pc);
7571 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
7574 _initialize_arm_tdep (void)
7576 struct ui_file *stb;
7578 struct cmd_list_element *new_set, *new_show;
7579 const char *setname;
7580 const char *setdesc;
7581 const char *const *regnames;
7583 static char *helptext;
7584 char regdesc[1024], *rdptr = regdesc;
7585 size_t rest = sizeof (regdesc);
7587 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
7589 arm_objfile_data_key
7590 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
7592 /* Register an ELF OS ABI sniffer for ARM binaries. */
7593 gdbarch_register_osabi_sniffer (bfd_arch_arm,
7594 bfd_target_elf_flavour,
7595 arm_elf_osabi_sniffer);
7597 /* Initialize the standard target descriptions. */
7598 initialize_tdesc_arm_with_m ();
7600 /* Get the number of possible sets of register names defined in opcodes. */
7601 num_disassembly_options = get_arm_regname_num_options ();
7603 /* Add root prefix command for all "set arm"/"show arm" commands. */
7604 add_prefix_cmd ("arm", no_class, set_arm_command,
7605 _("Various ARM-specific commands."),
7606 &setarmcmdlist, "set arm ", 0, &setlist);
7608 add_prefix_cmd ("arm", no_class, show_arm_command,
7609 _("Various ARM-specific commands."),
7610 &showarmcmdlist, "show arm ", 0, &showlist);
7612 /* Sync the opcode insn printer with our register viewer. */
7613 parse_arm_disassembler_option ("reg-names-std");
7615 /* Initialize the array that will be passed to
7616 add_setshow_enum_cmd(). */
7617 valid_disassembly_styles
7618 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
7619 for (i = 0; i < num_disassembly_options; i++)
7621 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
7622 valid_disassembly_styles[i] = setname;
7623 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
7626 /* When we find the default names, tell the disassembler to use
7628 if (!strcmp (setname, "std"))
7630 disassembly_style = setname;
7631 set_arm_regname_option (i);
7634 /* Mark the end of valid options. */
7635 valid_disassembly_styles[num_disassembly_options] = NULL;
7637 /* Create the help text. */
7638 stb = mem_fileopen ();
7639 fprintf_unfiltered (stb, "%s%s%s",
7640 _("The valid values are:\n"),
7642 _("The default is \"std\"."));
7643 helptext = ui_file_xstrdup (stb, NULL);
7644 ui_file_delete (stb);
7646 add_setshow_enum_cmd("disassembler", no_class,
7647 valid_disassembly_styles, &disassembly_style,
7648 _("Set the disassembly style."),
7649 _("Show the disassembly style."),
7651 set_disassembly_style_sfunc,
7652 NULL, /* FIXME: i18n: The disassembly style is \"%s\". */
7653 &setarmcmdlist, &showarmcmdlist);
7655 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
7656 _("Set usage of ARM 32-bit mode."),
7657 _("Show usage of ARM 32-bit mode."),
7658 _("When off, a 26-bit PC will be used."),
7660 NULL, /* FIXME: i18n: Usage of ARM 32-bit mode is %s. */
7661 &setarmcmdlist, &showarmcmdlist);
7663 /* Add a command to allow the user to force the FPU model. */
7664 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
7665 _("Set the floating point type."),
7666 _("Show the floating point type."),
7667 _("auto - Determine the FP typefrom the OS-ABI.\n\
7668 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
7669 fpa - FPA co-processor (GCC compiled).\n\
7670 softvfp - Software FP with pure-endian doubles.\n\
7671 vfp - VFP co-processor."),
7672 set_fp_model_sfunc, show_fp_model,
7673 &setarmcmdlist, &showarmcmdlist);
7675 /* Add a command to allow the user to force the ABI. */
7676 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
7679 NULL, arm_set_abi, arm_show_abi,
7680 &setarmcmdlist, &showarmcmdlist);
7682 /* Add two commands to allow the user to force the assumed
7684 add_setshow_enum_cmd ("fallback-mode", class_support,
7685 arm_mode_strings, &arm_fallback_mode_string,
7686 _("Set the mode assumed when symbols are unavailable."),
7687 _("Show the mode assumed when symbols are unavailable."),
7688 NULL, NULL, arm_show_fallback_mode,
7689 &setarmcmdlist, &showarmcmdlist);
7690 add_setshow_enum_cmd ("force-mode", class_support,
7691 arm_mode_strings, &arm_force_mode_string,
7692 _("Set the mode assumed even when symbols are available."),
7693 _("Show the mode assumed even when symbols are available."),
7694 NULL, NULL, arm_show_force_mode,
7695 &setarmcmdlist, &showarmcmdlist);
7697 /* Debugging flag. */
7698 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
7699 _("Set ARM debugging."),
7700 _("Show ARM debugging."),
7701 _("When on, arm-specific debugging is enabled."),
7703 NULL, /* FIXME: i18n: "ARM debugging is %s. */
7704 &setdebuglist, &showdebuglist);