1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
228 static void convert_to_extended (const struct floatformat *, void *,
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct bound_minimal_symbol sym;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val);
459 return (val & 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name, "_from_thumb") != NULL)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm)
545 unsigned int count = imm >> 7;
553 return (imm & 0xff) | ((imm & 0xff) << 16);
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
561 return (0x80 | (imm & 0x7f)) << (32 - count);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst)
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1 & 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1, 7) && !bit (inst1, 8))
630 else if (!bit (inst1, 7) && bit (inst1, 8))
636 else if (bit (inst1, 7) && bit (inst1, 8))
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
659 if (bits (inst1, 0, 3) == 15)
665 if ((inst2 & 0x0fc0) == 0x0000)
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn)
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
712 struct pv_area *stack;
713 struct cleanup *back_to;
715 CORE_ADDR unrecognized_pc = 0;
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
722 while (start < limit)
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
755 else if (thumb_instruction_restores_sp (insn))
757 /* Don't scan past the epilogue. */
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
800 if (pv_area_store_would_trash (stack, addr))
803 pv_area_store (stack, addr, 4, regs[regno]);
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
814 if (pv_area_store_would_trash (stack, addr))
817 pv_area_store (stack, addr, 4, regs[rd]);
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2;
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1, j2, imm1, imm2;
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
890 pv_t addr = regs[bits (insn, 0, 3)];
893 if (pv_area_store_would_trash (stack, addr))
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
905 regs[bits (insn, 0, 3)] = addr;
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
916 offset = inst2 & 0xff;
918 addr = pv_add_constant (addr, offset);
920 addr = pv_add_constant (addr, -offset);
922 if (pv_area_store_would_trash (stack, addr))
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
930 regs[bits (insn, 0, 3)] = addr;
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
940 offset = inst2 & 0xff;
942 addr = pv_add_constant (addr, offset);
944 addr = pv_add_constant (addr, -offset);
946 if (pv_area_store_would_trash (stack, addr))
949 pv_area_store (stack, addr, 4, regs[regno]);
952 regs[bits (insn, 0, 3)] = addr;
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 int regno = bits (inst2, 12, 15);
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
964 if (pv_area_store_would_trash (stack, addr))
967 pv_area_store (stack, addr, 4, regs[regno]);
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant;
1084 offset = bits (inst2, 0, 11);
1086 loc = start + 4 + offset;
1088 loc = start + 4 - offset;
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant;
1100 offset = bits (inst2, 0, 7) << 2;
1102 loc = start + 4 + offset;
1104 loc = start + 4 - offset;
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1127 else if (thumb_instruction_changes_pc (insn))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg = bits (insn1, 8, 10);
1207 address = bits (insn1, 0, 7);
1209 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1211 unsigned short insn2
1212 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1214 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1217 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1219 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1221 /* movt Rd, #const */
1222 if ((insn1 & 0xfbc0) == 0xf2c0)
1224 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1225 *destreg = bits (insn2, 8, 11);
1227 address = (high << 16 | low);
1234 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1236 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, #immed */
1238 address = bits (insn, 0, 11);
1239 *destreg = bits (insn, 12, 15);
1242 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1244 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1247 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1249 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1251 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1252 *destreg = bits (insn, 12, 15);
1254 address = (high << 16 | low);
1262 /* Try to skip a sequence of instructions used for stack protector. If PC
1263 points to the first instruction of this sequence, return the address of
1264 first instruction after this sequence, otherwise, return original PC.
1266 On arm, this sequence of instructions is composed of mainly three steps,
1267 Step 1: load symbol __stack_chk_guard,
1268 Step 2: load from address of __stack_chk_guard,
1269 Step 3: store it to somewhere else.
1271 Usually, instructions on step 2 and step 3 are the same on various ARM
1272 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1273 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1274 instructions in step 1 vary from different ARM architectures. On ARMv7,
1277 movw Rn, #:lower16:__stack_chk_guard
1278 movt Rn, #:upper16:__stack_chk_guard
1285 .word __stack_chk_guard
1287 Since ldr/str is a very popular instruction, we can't use them as
1288 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1289 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1290 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1293 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1295 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1296 unsigned int basereg;
1297 struct bound_minimal_symbol stack_chk_guard;
1299 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1302 /* Try to parse the instructions in Step 1. */
1303 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1308 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1309 /* If name of symbol doesn't start with '__stack_chk_guard', this
1310 instruction sequence is not for stack protector. If symbol is
1311 removed, we conservatively think this sequence is for stack protector. */
1312 if (stack_chk_guard.minsym
1313 && strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1314 "__stack_chk_guard",
1315 strlen ("__stack_chk_guard")) != 0)
1320 unsigned int destreg;
1322 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1324 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1325 if ((insn & 0xf800) != 0x6800)
1327 if (bits (insn, 3, 5) != basereg)
1329 destreg = bits (insn, 0, 2);
1331 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1332 byte_order_for_code);
1333 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1334 if ((insn & 0xf800) != 0x6000)
1336 if (destreg != bits (insn, 0, 2))
1341 unsigned int destreg;
1343 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1345 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1346 if ((insn & 0x0e500000) != 0x04100000)
1348 if (bits (insn, 16, 19) != basereg)
1350 destreg = bits (insn, 12, 15);
1351 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1352 insn = read_memory_unsigned_integer (pc + offset + 4,
1353 4, byte_order_for_code);
1354 if ((insn & 0x0e500000) != 0x04000000)
1356 if (bits (insn, 12, 15) != destreg)
1359 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1362 return pc + offset + 4;
1364 return pc + offset + 8;
1367 /* Advance the PC across any function entry prologue instructions to
1368 reach some "real" code.
1370 The APCS (ARM Procedure Call Standard) defines the following
1374 [stmfd sp!, {a1,a2,a3,a4}]
1375 stmfd sp!, {...,fp,ip,lr,pc}
1376 [stfe f7, [sp, #-12]!]
1377 [stfe f6, [sp, #-12]!]
1378 [stfe f5, [sp, #-12]!]
1379 [stfe f4, [sp, #-12]!]
1380 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1383 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1385 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1388 CORE_ADDR func_addr, limit_pc;
1390 /* See if we can determine the end of the prologue via the symbol table.
1391 If so, then return either PC, or the PC after the prologue, whichever
1393 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1395 CORE_ADDR post_prologue_pc
1396 = skip_prologue_using_sal (gdbarch, func_addr);
1397 struct symtab *s = find_pc_symtab (func_addr);
1399 if (post_prologue_pc)
1401 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1404 /* GCC always emits a line note before the prologue and another
1405 one after, even if the two are at the same address or on the
1406 same line. Take advantage of this so that we do not need to
1407 know every instruction that might appear in the prologue. We
1408 will have producer information for most binaries; if it is
1409 missing (e.g. for -gstabs), assuming the GNU tools. */
1410 if (post_prologue_pc
1412 || s->producer == NULL
1413 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1414 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1415 return post_prologue_pc;
1417 if (post_prologue_pc != 0)
1419 CORE_ADDR analyzed_limit;
1421 /* For non-GCC compilers, make sure the entire line is an
1422 acceptable prologue; GDB will round this function's
1423 return value up to the end of the following line so we
1424 can not skip just part of a line (and we do not want to).
1426 RealView does not treat the prologue specially, but does
1427 associate prologue code with the opening brace; so this
1428 lets us skip the first line if we think it is the opening
1430 if (arm_pc_is_thumb (gdbarch, func_addr))
1431 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1432 post_prologue_pc, NULL);
1434 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1435 post_prologue_pc, NULL);
1437 if (analyzed_limit != post_prologue_pc)
1440 return post_prologue_pc;
1444 /* Can't determine prologue from the symbol table, need to examine
1447 /* Find an upper limit on the function prologue using the debug
1448 information. If the debug information could not be used to provide
1449 that bound, then use an arbitrary large number as the upper bound. */
1450 /* Like arm_scan_prologue, stop no later than pc + 64. */
1451 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1453 limit_pc = pc + 64; /* Magic. */
1456 /* Check if this is Thumb code. */
1457 if (arm_pc_is_thumb (gdbarch, pc))
1458 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1460 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1462 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1464 /* "mov ip, sp" is no longer a required part of the prologue. */
1465 if (inst == 0xe1a0c00d) /* mov ip, sp */
1468 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1471 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1474 /* Some prologues begin with "str lr, [sp, #-4]!". */
1475 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1478 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1481 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1484 /* Any insns after this point may float into the code, if it makes
1485 for better instruction scheduling, so we skip them only if we
1486 find them, but still consider the function to be frame-ful. */
1488 /* We may have either one sfmfd instruction here, or several stfe
1489 insns, depending on the version of floating point code we
1491 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1494 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1497 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1500 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1503 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1504 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1505 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1508 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1509 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1510 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1513 /* Un-recognized instruction; stop scanning. */
1517 return skip_pc; /* End of prologue. */
1521 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1522 This function decodes a Thumb function prologue to determine:
1523 1) the size of the stack frame
1524 2) which registers are saved on it
1525 3) the offsets of saved regs
1526 4) the offset from the stack pointer to the frame pointer
1528 A typical Thumb function prologue would create this stack frame
1529 (offsets relative to FP)
1530 old SP -> 24 stack parameters
1533 R7 -> 0 local variables (16 bytes)
1534 SP -> -12 additional stack space (12 bytes)
1535 The frame size would thus be 36 bytes, and the frame offset would be
1536 12 bytes. The frame register is R7.
1538 The comments for thumb_skip_prolog() describe the algorithm we use
1539 to detect the end of the prolog. */
1543 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1544 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1546 CORE_ADDR prologue_start;
1547 CORE_ADDR prologue_end;
1549 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1552 /* See comment in arm_scan_prologue for an explanation of
1554 if (prologue_end > prologue_start + 64)
1556 prologue_end = prologue_start + 64;
1560 /* We're in the boondocks: we have no idea where the start of the
1564 prologue_end = min (prologue_end, prev_pc);
1566 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1569 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1572 arm_instruction_changes_pc (uint32_t this_instr)
1574 if (bits (this_instr, 28, 31) == INST_NV)
1575 /* Unconditional instructions. */
1576 switch (bits (this_instr, 24, 27))
1580 /* Branch with Link and change to Thumb. */
1585 /* Coprocessor register transfer. */
1586 if (bits (this_instr, 12, 15) == 15)
1587 error (_("Invalid update to pc in instruction"));
1593 switch (bits (this_instr, 25, 27))
1596 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1598 /* Multiplies and extra load/stores. */
1599 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1600 /* Neither multiplies nor extension load/stores are allowed
1604 /* Otherwise, miscellaneous instructions. */
1606 /* BX <reg>, BXJ <reg>, BLX <reg> */
1607 if (bits (this_instr, 4, 27) == 0x12fff1
1608 || bits (this_instr, 4, 27) == 0x12fff2
1609 || bits (this_instr, 4, 27) == 0x12fff3)
1612 /* Other miscellaneous instructions are unpredictable if they
1616 /* Data processing instruction. Fall through. */
1619 if (bits (this_instr, 12, 15) == 15)
1626 /* Media instructions and architecturally undefined instructions. */
1627 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1631 if (bit (this_instr, 20) == 0)
1635 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1641 /* Load/store multiple. */
1642 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1648 /* Branch and branch with link. */
1653 /* Coprocessor transfers or SWIs can not affect PC. */
1657 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1661 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1662 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1663 fill it in. Return the first address not recognized as a prologue
1666 We recognize all the instructions typically found in ARM prologues,
1667 plus harmless instructions which can be skipped (either for analysis
1668 purposes, or a more restrictive set that can be skipped when finding
1669 the end of the prologue). */
1672 arm_analyze_prologue (struct gdbarch *gdbarch,
1673 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1674 struct arm_prologue_cache *cache)
1676 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1677 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1679 CORE_ADDR offset, current_pc;
1680 pv_t regs[ARM_FPS_REGNUM];
1681 struct pv_area *stack;
1682 struct cleanup *back_to;
1683 int framereg, framesize;
1684 CORE_ADDR unrecognized_pc = 0;
1686 /* Search the prologue looking for instructions that set up the
1687 frame pointer, adjust the stack pointer, and save registers.
1689 Be careful, however, and if it doesn't look like a prologue,
1690 don't try to scan it. If, for instance, a frameless function
1691 begins with stmfd sp!, then we will tell ourselves there is
1692 a frame, which will confuse stack traceback, as well as "finish"
1693 and other operations that rely on a knowledge of the stack
1696 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1697 regs[regno] = pv_register (regno, 0);
1698 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1699 back_to = make_cleanup_free_pv_area (stack);
1701 for (current_pc = prologue_start;
1702 current_pc < prologue_end;
1706 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1708 if (insn == 0xe1a0c00d) /* mov ip, sp */
1710 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1713 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1714 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1716 unsigned imm = insn & 0xff; /* immediate value */
1717 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1718 int rd = bits (insn, 12, 15);
1719 imm = (imm >> rot) | (imm << (32 - rot));
1720 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1723 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1724 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1726 unsigned imm = insn & 0xff; /* immediate value */
1727 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1728 int rd = bits (insn, 12, 15);
1729 imm = (imm >> rot) | (imm << (32 - rot));
1730 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1733 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1736 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1739 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1740 regs[bits (insn, 12, 15)]);
1743 else if ((insn & 0xffff0000) == 0xe92d0000)
1744 /* stmfd sp!, {..., fp, ip, lr, pc}
1746 stmfd sp!, {a1, a2, a3, a4} */
1748 int mask = insn & 0xffff;
1750 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1753 /* Calculate offsets of saved registers. */
1754 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1755 if (mask & (1 << regno))
1758 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1759 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1762 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1763 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1764 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1766 /* No need to add this to saved_regs -- it's just an arg reg. */
1769 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1770 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1771 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1773 /* No need to add this to saved_regs -- it's just an arg reg. */
1776 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1778 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1780 /* No need to add this to saved_regs -- it's just arg regs. */
1783 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1785 unsigned imm = insn & 0xff; /* immediate value */
1786 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1787 imm = (imm >> rot) | (imm << (32 - rot));
1788 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1790 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1792 unsigned imm = insn & 0xff; /* immediate value */
1793 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1794 imm = (imm >> rot) | (imm << (32 - rot));
1795 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1797 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1799 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1801 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1804 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1805 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1806 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1808 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1810 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1812 int n_saved_fp_regs;
1813 unsigned int fp_start_reg, fp_bound_reg;
1815 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1818 if ((insn & 0x800) == 0x800) /* N0 is set */
1820 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1821 n_saved_fp_regs = 3;
1823 n_saved_fp_regs = 1;
1827 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1828 n_saved_fp_regs = 2;
1830 n_saved_fp_regs = 4;
1833 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1834 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1835 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1837 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1838 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1839 regs[fp_start_reg++]);
1842 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1844 /* Allow some special function calls when skipping the
1845 prologue; GCC generates these before storing arguments to
1847 CORE_ADDR dest = BranchDest (current_pc, insn);
1849 if (skip_prologue_function (gdbarch, dest, 0))
1854 else if ((insn & 0xf0000000) != 0xe0000000)
1855 break; /* Condition not true, exit early. */
1856 else if (arm_instruction_changes_pc (insn))
1857 /* Don't scan past anything that might change control flow. */
1859 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1860 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1861 /* Ignore block loads from the stack, potentially copying
1862 parameters from memory. */
1864 else if ((insn & 0xfc500000) == 0xe4100000
1865 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1866 /* Similarly ignore single loads from the stack. */
1868 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1869 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1870 register instead of the stack. */
1874 /* The optimizer might shove anything into the prologue,
1875 so we just skip what we don't recognize. */
1876 unrecognized_pc = current_pc;
1881 if (unrecognized_pc == 0)
1882 unrecognized_pc = current_pc;
1884 /* The frame size is just the distance from the frame register
1885 to the original stack pointer. */
1886 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1888 /* Frame pointer is fp. */
1889 framereg = ARM_FP_REGNUM;
1890 framesize = -regs[ARM_FP_REGNUM].k;
1894 /* Try the stack pointer... this is a bit desperate. */
1895 framereg = ARM_SP_REGNUM;
1896 framesize = -regs[ARM_SP_REGNUM].k;
1901 cache->framereg = framereg;
1902 cache->framesize = framesize;
1904 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1905 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1906 cache->saved_regs[regno].addr = offset;
1910 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1911 paddress (gdbarch, unrecognized_pc));
1913 do_cleanups (back_to);
1914 return unrecognized_pc;
1918 arm_scan_prologue (struct frame_info *this_frame,
1919 struct arm_prologue_cache *cache)
1921 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1922 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1924 CORE_ADDR prologue_start, prologue_end, current_pc;
1925 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1926 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1927 pv_t regs[ARM_FPS_REGNUM];
1928 struct pv_area *stack;
1929 struct cleanup *back_to;
1932 /* Assume there is no frame until proven otherwise. */
1933 cache->framereg = ARM_SP_REGNUM;
1934 cache->framesize = 0;
1936 /* Check for Thumb prologue. */
1937 if (arm_frame_is_thumb (this_frame))
1939 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1943 /* Find the function prologue. If we can't find the function in
1944 the symbol table, peek in the stack frame to find the PC. */
1945 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1948 /* One way to find the end of the prologue (which works well
1949 for unoptimized code) is to do the following:
1951 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1954 prologue_end = prev_pc;
1955 else if (sal.end < prologue_end)
1956 prologue_end = sal.end;
1958 This mechanism is very accurate so long as the optimizer
1959 doesn't move any instructions from the function body into the
1960 prologue. If this happens, sal.end will be the last
1961 instruction in the first hunk of prologue code just before
1962 the first instruction that the scheduler has moved from
1963 the body to the prologue.
1965 In order to make sure that we scan all of the prologue
1966 instructions, we use a slightly less accurate mechanism which
1967 may scan more than necessary. To help compensate for this
1968 lack of accuracy, the prologue scanning loop below contains
1969 several clauses which'll cause the loop to terminate early if
1970 an implausible prologue instruction is encountered.
1976 is a suitable endpoint since it accounts for the largest
1977 possible prologue plus up to five instructions inserted by
1980 if (prologue_end > prologue_start + 64)
1982 prologue_end = prologue_start + 64; /* See above. */
1987 /* We have no symbol information. Our only option is to assume this
1988 function has a standard stack frame and the normal frame register.
1989 Then, we can find the value of our frame pointer on entrance to
1990 the callee (or at the present moment if this is the innermost frame).
1991 The value stored there should be the address of the stmfd + 8. */
1992 CORE_ADDR frame_loc;
1993 LONGEST return_value;
1995 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1996 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
2000 prologue_start = gdbarch_addr_bits_remove
2001 (gdbarch, return_value) - 8;
2002 prologue_end = prologue_start + 64; /* See above. */
2006 if (prev_pc < prologue_end)
2007 prologue_end = prev_pc;
2009 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2012 static struct arm_prologue_cache *
2013 arm_make_prologue_cache (struct frame_info *this_frame)
2016 struct arm_prologue_cache *cache;
2017 CORE_ADDR unwound_fp;
2019 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2020 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2022 arm_scan_prologue (this_frame, cache);
2024 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2025 if (unwound_fp == 0)
2028 cache->prev_sp = unwound_fp + cache->framesize;
2030 /* Calculate actual addresses of saved registers using offsets
2031 determined by arm_scan_prologue. */
2032 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2033 if (trad_frame_addr_p (cache->saved_regs, reg))
2034 cache->saved_regs[reg].addr += cache->prev_sp;
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2043 arm_prologue_this_id (struct frame_info *this_frame,
2045 struct frame_id *this_id)
2047 struct arm_prologue_cache *cache;
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = *this_cache;
2055 /* This is meant to halt the backtrace at "_start". */
2056 pc = get_frame_pc (this_frame);
2057 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2060 /* If we've hit a wall, stop. */
2061 if (cache->prev_sp == 0)
2064 /* Use function start address as part of the frame ID. If we cannot
2065 identify the start address (due to missing symbol information),
2066 fall back to just using the current PC. */
2067 func = get_frame_func (this_frame);
2071 id = frame_id_build (cache->prev_sp, func);
2075 static struct value *
2076 arm_prologue_prev_register (struct frame_info *this_frame,
2080 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2081 struct arm_prologue_cache *cache;
2083 if (*this_cache == NULL)
2084 *this_cache = arm_make_prologue_cache (this_frame);
2085 cache = *this_cache;
2087 /* If we are asked to unwind the PC, then we need to return the LR
2088 instead. The prologue may save PC, but it will point into this
2089 frame's prologue, not the next frame's resume location. Also
2090 strip the saved T bit. A valid LR may have the low bit set, but
2091 a valid PC never does. */
2092 if (prev_regnum == ARM_PC_REGNUM)
2096 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2097 return frame_unwind_got_constant (this_frame, prev_regnum,
2098 arm_addr_bits_remove (gdbarch, lr));
2101 /* SP is generally not saved to the stack, but this frame is
2102 identified by the next frame's stack pointer at the time of the call.
2103 The value was already reconstructed into PREV_SP. */
2104 if (prev_regnum == ARM_SP_REGNUM)
2105 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2107 /* The CPSR may have been changed by the call instruction and by the
2108 called function. The only bit we can reconstruct is the T bit,
2109 by checking the low bit of LR as of the call. This is a reliable
2110 indicator of Thumb-ness except for some ARM v4T pre-interworking
2111 Thumb code, which could get away with a clear low bit as long as
2112 the called function did not use bx. Guess that all other
2113 bits are unchanged; the condition flags are presumably lost,
2114 but the processor status is likely valid. */
2115 if (prev_regnum == ARM_PS_REGNUM)
2118 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2120 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2121 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2122 if (IS_THUMB_ADDR (lr))
2126 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2129 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2133 struct frame_unwind arm_prologue_unwind = {
2135 default_frame_unwind_stop_reason,
2136 arm_prologue_this_id,
2137 arm_prologue_prev_register,
2139 default_frame_sniffer
2142 /* Maintain a list of ARM exception table entries per objfile, similar to the
2143 list of mapping symbols. We only cache entries for standard ARM-defined
2144 personality routines; the cache will contain only the frame unwinding
2145 instructions associated with the entry (not the descriptors). */
2147 static const struct objfile_data *arm_exidx_data_key;
2149 struct arm_exidx_entry
2154 typedef struct arm_exidx_entry arm_exidx_entry_s;
2155 DEF_VEC_O(arm_exidx_entry_s);
2157 struct arm_exidx_data
2159 VEC(arm_exidx_entry_s) **section_maps;
2163 arm_exidx_data_free (struct objfile *objfile, void *arg)
2165 struct arm_exidx_data *data = arg;
2168 for (i = 0; i < objfile->obfd->section_count; i++)
2169 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2173 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2174 const struct arm_exidx_entry *rhs)
2176 return lhs->addr < rhs->addr;
2179 static struct obj_section *
2180 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2182 struct obj_section *osect;
2184 ALL_OBJFILE_OSECTIONS (objfile, osect)
2185 if (bfd_get_section_flags (objfile->obfd,
2186 osect->the_bfd_section) & SEC_ALLOC)
2188 bfd_vma start, size;
2189 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2190 size = bfd_get_section_size (osect->the_bfd_section);
2192 if (start <= vma && vma < start + size)
2199 /* Parse contents of exception table and exception index sections
2200 of OBJFILE, and fill in the exception table entry cache.
2202 For each entry that refers to a standard ARM-defined personality
2203 routine, extract the frame unwinding instructions (from either
2204 the index or the table section). The unwinding instructions
2206 - extracting them from the rest of the table data
2207 - converting to host endianness
2208 - appending the implicit 0xb0 ("Finish") code
2210 The extracted and normalized instructions are stored for later
2211 retrieval by the arm_find_exidx_entry routine. */
2214 arm_exidx_new_objfile (struct objfile *objfile)
2216 struct cleanup *cleanups;
2217 struct arm_exidx_data *data;
2218 asection *exidx, *extab;
2219 bfd_vma exidx_vma = 0, extab_vma = 0;
2220 bfd_size_type exidx_size = 0, extab_size = 0;
2221 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2224 /* If we've already touched this file, do nothing. */
2225 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2227 cleanups = make_cleanup (null_cleanup, NULL);
2229 /* Read contents of exception table and index. */
2230 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2233 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2234 exidx_size = bfd_get_section_size (exidx);
2235 exidx_data = xmalloc (exidx_size);
2236 make_cleanup (xfree, exidx_data);
2238 if (!bfd_get_section_contents (objfile->obfd, exidx,
2239 exidx_data, 0, exidx_size))
2241 do_cleanups (cleanups);
2246 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2249 extab_vma = bfd_section_vma (objfile->obfd, extab);
2250 extab_size = bfd_get_section_size (extab);
2251 extab_data = xmalloc (extab_size);
2252 make_cleanup (xfree, extab_data);
2254 if (!bfd_get_section_contents (objfile->obfd, extab,
2255 extab_data, 0, extab_size))
2257 do_cleanups (cleanups);
2262 /* Allocate exception table data structure. */
2263 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2264 set_objfile_data (objfile, arm_exidx_data_key, data);
2265 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2266 objfile->obfd->section_count,
2267 VEC(arm_exidx_entry_s) *);
2269 /* Fill in exception table. */
2270 for (i = 0; i < exidx_size / 8; i++)
2272 struct arm_exidx_entry new_exidx_entry;
2273 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2274 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2275 bfd_vma addr = 0, word = 0;
2276 int n_bytes = 0, n_words = 0;
2277 struct obj_section *sec;
2278 gdb_byte *entry = NULL;
2280 /* Extract address of start of function. */
2281 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2282 idx += exidx_vma + i * 8;
2284 /* Find section containing function and compute section offset. */
2285 sec = arm_obj_section_from_vma (objfile, idx);
2288 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2290 /* Determine address of exception table entry. */
2293 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2295 else if ((val & 0xff000000) == 0x80000000)
2297 /* Exception table entry embedded in .ARM.exidx
2298 -- must be short form. */
2302 else if (!(val & 0x80000000))
2304 /* Exception table entry in .ARM.extab. */
2305 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2306 addr += exidx_vma + i * 8 + 4;
2308 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2310 word = bfd_h_get_32 (objfile->obfd,
2311 extab_data + addr - extab_vma);
2314 if ((word & 0xff000000) == 0x80000000)
2319 else if ((word & 0xff000000) == 0x81000000
2320 || (word & 0xff000000) == 0x82000000)
2324 n_words = ((word >> 16) & 0xff);
2326 else if (!(word & 0x80000000))
2329 struct obj_section *pers_sec;
2330 int gnu_personality = 0;
2332 /* Custom personality routine. */
2333 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2334 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2336 /* Check whether we've got one of the variants of the
2337 GNU personality routines. */
2338 pers_sec = arm_obj_section_from_vma (objfile, pers);
2341 static const char *personality[] =
2343 "__gcc_personality_v0",
2344 "__gxx_personality_v0",
2345 "__gcj_personality_v0",
2346 "__gnu_objc_personality_v0",
2350 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2353 for (k = 0; personality[k]; k++)
2354 if (lookup_minimal_symbol_by_pc_name
2355 (pc, personality[k], objfile))
2357 gnu_personality = 1;
2362 /* If so, the next word contains a word count in the high
2363 byte, followed by the same unwind instructions as the
2364 pre-defined forms. */
2366 && addr + 4 <= extab_vma + extab_size)
2368 word = bfd_h_get_32 (objfile->obfd,
2369 extab_data + addr - extab_vma);
2372 n_words = ((word >> 24) & 0xff);
2378 /* Sanity check address. */
2380 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2381 n_words = n_bytes = 0;
2383 /* The unwind instructions reside in WORD (only the N_BYTES least
2384 significant bytes are valid), followed by N_WORDS words in the
2385 extab section starting at ADDR. */
2386 if (n_bytes || n_words)
2388 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2389 n_bytes + n_words * 4 + 1);
2392 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2396 word = bfd_h_get_32 (objfile->obfd,
2397 extab_data + addr - extab_vma);
2400 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2401 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2402 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2403 *p++ = (gdb_byte) (word & 0xff);
2406 /* Implied "Finish" to terminate the list. */
2410 /* Push entry onto vector. They are guaranteed to always
2411 appear in order of increasing addresses. */
2412 new_exidx_entry.addr = idx;
2413 new_exidx_entry.entry = entry;
2414 VEC_safe_push (arm_exidx_entry_s,
2415 data->section_maps[sec->the_bfd_section->index],
2419 do_cleanups (cleanups);
2422 /* Search for the exception table entry covering MEMADDR. If one is found,
2423 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2424 set *START to the start of the region covered by this entry. */
2427 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2429 struct obj_section *sec;
2431 sec = find_pc_section (memaddr);
2434 struct arm_exidx_data *data;
2435 VEC(arm_exidx_entry_s) *map;
2436 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2439 data = objfile_data (sec->objfile, arm_exidx_data_key);
2442 map = data->section_maps[sec->the_bfd_section->index];
2443 if (!VEC_empty (arm_exidx_entry_s, map))
2445 struct arm_exidx_entry *map_sym;
2447 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2448 arm_compare_exidx_entries);
2450 /* VEC_lower_bound finds the earliest ordered insertion
2451 point. If the following symbol starts at this exact
2452 address, we use that; otherwise, the preceding
2453 exception table entry covers this address. */
2454 if (idx < VEC_length (arm_exidx_entry_s, map))
2456 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2457 if (map_sym->addr == map_key.addr)
2460 *start = map_sym->addr + obj_section_addr (sec);
2461 return map_sym->entry;
2467 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2469 *start = map_sym->addr + obj_section_addr (sec);
2470 return map_sym->entry;
2479 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2480 instruction list from the ARM exception table entry ENTRY, allocate and
2481 return a prologue cache structure describing how to unwind this frame.
2483 Return NULL if the unwinding instruction list contains a "spare",
2484 "reserved" or "refuse to unwind" instruction as defined in section
2485 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2486 for the ARM Architecture" document. */
2488 static struct arm_prologue_cache *
2489 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2494 struct arm_prologue_cache *cache;
2495 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2496 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2502 /* Whenever we reload SP, we actually have to retrieve its
2503 actual value in the current frame. */
2506 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2508 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2509 vsp = get_frame_register_unsigned (this_frame, reg);
2513 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2514 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2520 /* Decode next unwind instruction. */
2523 if ((insn & 0xc0) == 0)
2525 int offset = insn & 0x3f;
2526 vsp += (offset << 2) + 4;
2528 else if ((insn & 0xc0) == 0x40)
2530 int offset = insn & 0x3f;
2531 vsp -= (offset << 2) + 4;
2533 else if ((insn & 0xf0) == 0x80)
2535 int mask = ((insn & 0xf) << 8) | *entry++;
2538 /* The special case of an all-zero mask identifies
2539 "Refuse to unwind". We return NULL to fall back
2540 to the prologue analyzer. */
2544 /* Pop registers r4..r15 under mask. */
2545 for (i = 0; i < 12; i++)
2546 if (mask & (1 << i))
2548 cache->saved_regs[4 + i].addr = vsp;
2552 /* Special-case popping SP -- we need to reload vsp. */
2553 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2556 else if ((insn & 0xf0) == 0x90)
2558 int reg = insn & 0xf;
2560 /* Reserved cases. */
2561 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2564 /* Set SP from another register and mark VSP for reload. */
2565 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2568 else if ((insn & 0xf0) == 0xa0)
2570 int count = insn & 0x7;
2571 int pop_lr = (insn & 0x8) != 0;
2574 /* Pop r4..r[4+count]. */
2575 for (i = 0; i <= count; i++)
2577 cache->saved_regs[4 + i].addr = vsp;
2581 /* If indicated by flag, pop LR as well. */
2584 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2588 else if (insn == 0xb0)
2590 /* We could only have updated PC by popping into it; if so, it
2591 will show up as address. Otherwise, copy LR into PC. */
2592 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2593 cache->saved_regs[ARM_PC_REGNUM]
2594 = cache->saved_regs[ARM_LR_REGNUM];
2599 else if (insn == 0xb1)
2601 int mask = *entry++;
2604 /* All-zero mask and mask >= 16 is "spare". */
2605 if (mask == 0 || mask >= 16)
2608 /* Pop r0..r3 under mask. */
2609 for (i = 0; i < 4; i++)
2610 if (mask & (1 << i))
2612 cache->saved_regs[i].addr = vsp;
2616 else if (insn == 0xb2)
2618 ULONGEST offset = 0;
2623 offset |= (*entry & 0x7f) << shift;
2626 while (*entry++ & 0x80);
2628 vsp += 0x204 + (offset << 2);
2630 else if (insn == 0xb3)
2632 int start = *entry >> 4;
2633 int count = (*entry++) & 0xf;
2636 /* Only registers D0..D15 are valid here. */
2637 if (start + count >= 16)
2640 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2641 for (i = 0; i <= count; i++)
2643 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2647 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2650 else if ((insn & 0xf8) == 0xb8)
2652 int count = insn & 0x7;
2655 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2656 for (i = 0; i <= count; i++)
2658 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2662 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2665 else if (insn == 0xc6)
2667 int start = *entry >> 4;
2668 int count = (*entry++) & 0xf;
2671 /* Only registers WR0..WR15 are valid. */
2672 if (start + count >= 16)
2675 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2676 for (i = 0; i <= count; i++)
2678 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2682 else if (insn == 0xc7)
2684 int mask = *entry++;
2687 /* All-zero mask and mask >= 16 is "spare". */
2688 if (mask == 0 || mask >= 16)
2691 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2692 for (i = 0; i < 4; i++)
2693 if (mask & (1 << i))
2695 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2699 else if ((insn & 0xf8) == 0xc0)
2701 int count = insn & 0x7;
2704 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2705 for (i = 0; i <= count; i++)
2707 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2711 else if (insn == 0xc8)
2713 int start = *entry >> 4;
2714 int count = (*entry++) & 0xf;
2717 /* Only registers D0..D31 are valid. */
2718 if (start + count >= 16)
2721 /* Pop VFP double-precision registers
2722 D[16+start]..D[16+start+count]. */
2723 for (i = 0; i <= count; i++)
2725 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2729 else if (insn == 0xc9)
2731 int start = *entry >> 4;
2732 int count = (*entry++) & 0xf;
2735 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2736 for (i = 0; i <= count; i++)
2738 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2742 else if ((insn & 0xf8) == 0xd0)
2744 int count = insn & 0x7;
2747 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2748 for (i = 0; i <= count; i++)
2750 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2756 /* Everything else is "spare". */
2761 /* If we restore SP from a register, assume this was the frame register.
2762 Otherwise just fall back to SP as frame register. */
2763 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2764 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2766 cache->framereg = ARM_SP_REGNUM;
2768 /* Determine offset to previous frame. */
2770 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2772 /* We already got the previous SP. */
2773 cache->prev_sp = vsp;
2778 /* Unwinding via ARM exception table entries. Note that the sniffer
2779 already computes a filled-in prologue cache, which is then used
2780 with the same arm_prologue_this_id and arm_prologue_prev_register
2781 routines also used for prologue-parsing based unwinding. */
2784 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2785 struct frame_info *this_frame,
2786 void **this_prologue_cache)
2788 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2789 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2790 CORE_ADDR addr_in_block, exidx_region, func_start;
2791 struct arm_prologue_cache *cache;
2794 /* See if we have an ARM exception table entry covering this address. */
2795 addr_in_block = get_frame_address_in_block (this_frame);
2796 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2800 /* The ARM exception table does not describe unwind information
2801 for arbitrary PC values, but is guaranteed to be correct only
2802 at call sites. We have to decide here whether we want to use
2803 ARM exception table information for this frame, or fall back
2804 to using prologue parsing. (Note that if we have DWARF CFI,
2805 this sniffer isn't even called -- CFI is always preferred.)
2807 Before we make this decision, however, we check whether we
2808 actually have *symbol* information for the current frame.
2809 If not, prologue parsing would not work anyway, so we might
2810 as well use the exception table and hope for the best. */
2811 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2815 /* If the next frame is "normal", we are at a call site in this
2816 frame, so exception information is guaranteed to be valid. */
2817 if (get_next_frame (this_frame)
2818 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2821 /* We also assume exception information is valid if we're currently
2822 blocked in a system call. The system library is supposed to
2823 ensure this, so that e.g. pthread cancellation works. */
2824 if (arm_frame_is_thumb (this_frame))
2828 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2829 byte_order_for_code, &insn)
2830 && (insn & 0xff00) == 0xdf00 /* svc */)
2837 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2838 byte_order_for_code, &insn)
2839 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2843 /* Bail out if we don't know that exception information is valid. */
2847 /* The ARM exception index does not mark the *end* of the region
2848 covered by the entry, and some functions will not have any entry.
2849 To correctly recognize the end of the covered region, the linker
2850 should have inserted dummy records with a CANTUNWIND marker.
2852 Unfortunately, current versions of GNU ld do not reliably do
2853 this, and thus we may have found an incorrect entry above.
2854 As a (temporary) sanity check, we only use the entry if it
2855 lies *within* the bounds of the function. Note that this check
2856 might reject perfectly valid entries that just happen to cover
2857 multiple functions; therefore this check ought to be removed
2858 once the linker is fixed. */
2859 if (func_start > exidx_region)
2863 /* Decode the list of unwinding instructions into a prologue cache.
2864 Note that this may fail due to e.g. a "refuse to unwind" code. */
2865 cache = arm_exidx_fill_cache (this_frame, entry);
2869 *this_prologue_cache = cache;
2873 struct frame_unwind arm_exidx_unwind = {
2875 default_frame_unwind_stop_reason,
2876 arm_prologue_this_id,
2877 arm_prologue_prev_register,
2879 arm_exidx_unwind_sniffer
2882 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2883 trampoline, return the target PC. Otherwise return 0.
2885 void call0a (char c, short s, int i, long l) {}
2889 (*pointer_to_call0a) (c, s, i, l);
2892 Instead of calling a stub library function _call_via_xx (xx is
2893 the register name), GCC may inline the trampoline in the object
2894 file as below (register r2 has the address of call0a).
2897 .type main, %function
2906 The trampoline 'bx r2' doesn't belong to main. */
2909 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2911 /* The heuristics of recognizing such trampoline is that FRAME is
2912 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2913 if (arm_frame_is_thumb (frame))
2917 if (target_read_memory (pc, buf, 2) == 0)
2919 struct gdbarch *gdbarch = get_frame_arch (frame);
2920 enum bfd_endian byte_order_for_code
2921 = gdbarch_byte_order_for_code (gdbarch);
2923 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2925 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2928 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2930 /* Clear the LSB so that gdb core sets step-resume
2931 breakpoint at the right address. */
2932 return UNMAKE_THUMB_ADDR (dest);
2940 static struct arm_prologue_cache *
2941 arm_make_stub_cache (struct frame_info *this_frame)
2943 struct arm_prologue_cache *cache;
2945 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2946 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2948 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2953 /* Our frame ID for a stub frame is the current SP and LR. */
2956 arm_stub_this_id (struct frame_info *this_frame,
2958 struct frame_id *this_id)
2960 struct arm_prologue_cache *cache;
2962 if (*this_cache == NULL)
2963 *this_cache = arm_make_stub_cache (this_frame);
2964 cache = *this_cache;
2966 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2970 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2971 struct frame_info *this_frame,
2972 void **this_prologue_cache)
2974 CORE_ADDR addr_in_block;
2976 CORE_ADDR pc, start_addr;
2979 addr_in_block = get_frame_address_in_block (this_frame);
2980 pc = get_frame_pc (this_frame);
2981 if (in_plt_section (addr_in_block)
2982 /* We also use the stub winder if the target memory is unreadable
2983 to avoid having the prologue unwinder trying to read it. */
2984 || target_read_memory (pc, dummy, 4) != 0)
2987 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2988 && arm_skip_bx_reg (this_frame, pc) != 0)
2994 struct frame_unwind arm_stub_unwind = {
2996 default_frame_unwind_stop_reason,
2998 arm_prologue_prev_register,
3000 arm_stub_unwind_sniffer
3003 /* Put here the code to store, into CACHE->saved_regs, the addresses
3004 of the saved registers of frame described by THIS_FRAME. CACHE is
3007 static struct arm_prologue_cache *
3008 arm_m_exception_cache (struct frame_info *this_frame)
3010 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3011 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3012 struct arm_prologue_cache *cache;
3013 CORE_ADDR unwound_sp;
3016 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3017 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3019 unwound_sp = get_frame_register_unsigned (this_frame,
3022 /* The hardware saves eight 32-bit words, comprising xPSR,
3023 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3024 "B1.5.6 Exception entry behavior" in
3025 "ARMv7-M Architecture Reference Manual". */
3026 cache->saved_regs[0].addr = unwound_sp;
3027 cache->saved_regs[1].addr = unwound_sp + 4;
3028 cache->saved_regs[2].addr = unwound_sp + 8;
3029 cache->saved_regs[3].addr = unwound_sp + 12;
3030 cache->saved_regs[12].addr = unwound_sp + 16;
3031 cache->saved_regs[14].addr = unwound_sp + 20;
3032 cache->saved_regs[15].addr = unwound_sp + 24;
3033 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3035 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3036 aligner between the top of the 32-byte stack frame and the
3037 previous context's stack pointer. */
3038 cache->prev_sp = unwound_sp + 32;
3039 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3040 && (xpsr & (1 << 9)) != 0)
3041 cache->prev_sp += 4;
3046 /* Implementation of function hook 'this_id' in
3047 'struct frame_uwnind'. */
3050 arm_m_exception_this_id (struct frame_info *this_frame,
3052 struct frame_id *this_id)
3054 struct arm_prologue_cache *cache;
3056 if (*this_cache == NULL)
3057 *this_cache = arm_m_exception_cache (this_frame);
3058 cache = *this_cache;
3060 /* Our frame ID for a stub frame is the current SP and LR. */
3061 *this_id = frame_id_build (cache->prev_sp,
3062 get_frame_pc (this_frame));
3065 /* Implementation of function hook 'prev_register' in
3066 'struct frame_uwnind'. */
3068 static struct value *
3069 arm_m_exception_prev_register (struct frame_info *this_frame,
3073 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3074 struct arm_prologue_cache *cache;
3076 if (*this_cache == NULL)
3077 *this_cache = arm_m_exception_cache (this_frame);
3078 cache = *this_cache;
3080 /* The value was already reconstructed into PREV_SP. */
3081 if (prev_regnum == ARM_SP_REGNUM)
3082 return frame_unwind_got_constant (this_frame, prev_regnum,
3085 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3089 /* Implementation of function hook 'sniffer' in
3090 'struct frame_uwnind'. */
3093 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3094 struct frame_info *this_frame,
3095 void **this_prologue_cache)
3097 CORE_ADDR this_pc = get_frame_pc (this_frame);
3099 /* No need to check is_m; this sniffer is only registered for
3100 M-profile architectures. */
3102 /* Exception frames return to one of these magic PCs. Other values
3103 are not defined as of v7-M. See details in "B1.5.8 Exception
3104 return behavior" in "ARMv7-M Architecture Reference Manual". */
3105 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3106 || this_pc == 0xfffffffd)
3112 /* Frame unwinder for M-profile exceptions. */
3114 struct frame_unwind arm_m_exception_unwind =
3117 default_frame_unwind_stop_reason,
3118 arm_m_exception_this_id,
3119 arm_m_exception_prev_register,
3121 arm_m_exception_unwind_sniffer
3125 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3127 struct arm_prologue_cache *cache;
3129 if (*this_cache == NULL)
3130 *this_cache = arm_make_prologue_cache (this_frame);
3131 cache = *this_cache;
3133 return cache->prev_sp - cache->framesize;
3136 struct frame_base arm_normal_base = {
3137 &arm_prologue_unwind,
3138 arm_normal_frame_base,
3139 arm_normal_frame_base,
3140 arm_normal_frame_base
3143 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3144 dummy frame. The frame ID's base needs to match the TOS value
3145 saved by save_dummy_frame_tos() and returned from
3146 arm_push_dummy_call, and the PC needs to match the dummy frame's
3149 static struct frame_id
3150 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3152 return frame_id_build (get_frame_register_unsigned (this_frame,
3154 get_frame_pc (this_frame));
3157 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3158 be used to construct the previous frame's ID, after looking up the
3159 containing function). */
3162 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3165 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3166 return arm_addr_bits_remove (gdbarch, pc);
3170 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3172 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3175 static struct value *
3176 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3179 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3181 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3186 /* The PC is normally copied from the return column, which
3187 describes saves of LR. However, that version may have an
3188 extra bit set to indicate Thumb state. The bit is not
3190 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3191 return frame_unwind_got_constant (this_frame, regnum,
3192 arm_addr_bits_remove (gdbarch, lr));
3195 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3196 cpsr = get_frame_register_unsigned (this_frame, regnum);
3197 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3198 if (IS_THUMB_ADDR (lr))
3202 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3205 internal_error (__FILE__, __LINE__,
3206 _("Unexpected register %d"), regnum);
3211 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3212 struct dwarf2_frame_state_reg *reg,
3213 struct frame_info *this_frame)
3219 reg->how = DWARF2_FRAME_REG_FN;
3220 reg->loc.fn = arm_dwarf2_prev_register;
3223 reg->how = DWARF2_FRAME_REG_CFA;
3228 /* Return true if we are in the function's epilogue, i.e. after the
3229 instruction that destroyed the function's stack frame. */
3232 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3234 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3235 unsigned int insn, insn2;
3236 int found_return = 0, found_stack_adjust = 0;
3237 CORE_ADDR func_start, func_end;
3241 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3244 /* The epilogue is a sequence of instructions along the following lines:
3246 - add stack frame size to SP or FP
3247 - [if frame pointer used] restore SP from FP
3248 - restore registers from SP [may include PC]
3249 - a return-type instruction [if PC wasn't already restored]
3251 In a first pass, we scan forward from the current PC and verify the
3252 instructions we find as compatible with this sequence, ending in a
3255 However, this is not sufficient to distinguish indirect function calls
3256 within a function from indirect tail calls in the epilogue in some cases.
3257 Therefore, if we didn't already find any SP-changing instruction during
3258 forward scan, we add a backward scanning heuristic to ensure we actually
3259 are in the epilogue. */
3262 while (scan_pc < func_end && !found_return)
3264 if (target_read_memory (scan_pc, buf, 2))
3268 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3270 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3272 else if (insn == 0x46f7) /* mov pc, lr */
3274 else if (thumb_instruction_restores_sp (insn))
3276 if ((insn & 0xfe00) == 0xbd00) /* pop <registers, PC> */
3279 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3281 if (target_read_memory (scan_pc, buf, 2))
3285 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3287 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3289 if (insn2 & 0x8000) /* <registers> include PC. */
3292 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3293 && (insn2 & 0x0fff) == 0x0b04)
3295 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3298 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3299 && (insn2 & 0x0e00) == 0x0a00)
3311 /* Since any instruction in the epilogue sequence, with the possible
3312 exception of return itself, updates the stack pointer, we need to
3313 scan backwards for at most one instruction. Try either a 16-bit or
3314 a 32-bit instruction. This is just a heuristic, so we do not worry
3315 too much about false positives. */
3317 if (pc - 4 < func_start)
3319 if (target_read_memory (pc - 4, buf, 4))
3322 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3323 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3325 if (thumb_instruction_restores_sp (insn2))
3326 found_stack_adjust = 1;
3327 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3328 found_stack_adjust = 1;
3329 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3330 && (insn2 & 0x0fff) == 0x0b04)
3331 found_stack_adjust = 1;
3332 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3333 && (insn2 & 0x0e00) == 0x0a00)
3334 found_stack_adjust = 1;
3336 return found_stack_adjust;
3339 /* Return true if we are in the function's epilogue, i.e. after the
3340 instruction that destroyed the function's stack frame. */
3343 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3345 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3347 int found_return, found_stack_adjust;
3348 CORE_ADDR func_start, func_end;
3350 if (arm_pc_is_thumb (gdbarch, pc))
3351 return thumb_in_function_epilogue_p (gdbarch, pc);
3353 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3356 /* We are in the epilogue if the previous instruction was a stack
3357 adjustment and the next instruction is a possible return (bx, mov
3358 pc, or pop). We could have to scan backwards to find the stack
3359 adjustment, or forwards to find the return, but this is a decent
3360 approximation. First scan forwards. */
3363 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3364 if (bits (insn, 28, 31) != INST_NV)
3366 if ((insn & 0x0ffffff0) == 0x012fff10)
3369 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3372 else if ((insn & 0x0fff0000) == 0x08bd0000
3373 && (insn & 0x0000c000) != 0)
3374 /* POP (LDMIA), including PC or LR. */
3381 /* Scan backwards. This is just a heuristic, so do not worry about
3382 false positives from mode changes. */
3384 if (pc < func_start + 4)
3387 found_stack_adjust = 0;
3388 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3389 if (bits (insn, 28, 31) != INST_NV)
3391 if ((insn & 0x0df0f000) == 0x0080d000)
3392 /* ADD SP (register or immediate). */
3393 found_stack_adjust = 1;
3394 else if ((insn & 0x0df0f000) == 0x0040d000)
3395 /* SUB SP (register or immediate). */
3396 found_stack_adjust = 1;
3397 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3399 found_stack_adjust = 1;
3400 else if ((insn & 0x0fff0000) == 0x08bd0000)
3402 found_stack_adjust = 1;
3403 else if ((insn & 0x0fff0000) == 0x049d0000)
3404 /* POP of a single register. */
3405 found_stack_adjust = 1;
3408 if (found_stack_adjust)
3415 /* When arguments must be pushed onto the stack, they go on in reverse
3416 order. The code below implements a FILO (stack) to do this. */
3421 struct stack_item *prev;
3425 static struct stack_item *
3426 push_stack_item (struct stack_item *prev, const void *contents, int len)
3428 struct stack_item *si;
3429 si = xmalloc (sizeof (struct stack_item));
3430 si->data = xmalloc (len);
3433 memcpy (si->data, contents, len);
3437 static struct stack_item *
3438 pop_stack_item (struct stack_item *si)
3440 struct stack_item *dead = si;
3448 /* Return the alignment (in bytes) of the given type. */
3451 arm_type_align (struct type *t)
3457 t = check_typedef (t);
3458 switch (TYPE_CODE (t))
3461 /* Should never happen. */
3462 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3466 case TYPE_CODE_ENUM:
3470 case TYPE_CODE_RANGE:
3472 case TYPE_CODE_CHAR:
3473 case TYPE_CODE_BOOL:
3474 return TYPE_LENGTH (t);
3476 case TYPE_CODE_ARRAY:
3477 case TYPE_CODE_COMPLEX:
3478 /* TODO: What about vector types? */
3479 return arm_type_align (TYPE_TARGET_TYPE (t));
3481 case TYPE_CODE_STRUCT:
3482 case TYPE_CODE_UNION:
3484 for (n = 0; n < TYPE_NFIELDS (t); n++)
3486 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3494 /* Possible base types for a candidate for passing and returning in
3497 enum arm_vfp_cprc_base_type
3506 /* The length of one element of base type B. */
3509 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3513 case VFP_CPRC_SINGLE:
3515 case VFP_CPRC_DOUBLE:
3517 case VFP_CPRC_VEC64:
3519 case VFP_CPRC_VEC128:
3522 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3527 /* The character ('s', 'd' or 'q') for the type of VFP register used
3528 for passing base type B. */
3531 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3535 case VFP_CPRC_SINGLE:
3537 case VFP_CPRC_DOUBLE:
3539 case VFP_CPRC_VEC64:
3541 case VFP_CPRC_VEC128:
3544 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3549 /* Determine whether T may be part of a candidate for passing and
3550 returning in VFP registers, ignoring the limit on the total number
3551 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3552 classification of the first valid component found; if it is not
3553 VFP_CPRC_UNKNOWN, all components must have the same classification
3554 as *BASE_TYPE. If it is found that T contains a type not permitted
3555 for passing and returning in VFP registers, a type differently
3556 classified from *BASE_TYPE, or two types differently classified
3557 from each other, return -1, otherwise return the total number of
3558 base-type elements found (possibly 0 in an empty structure or
3559 array). Vector types are not currently supported, matching the
3560 generic AAPCS support. */
3563 arm_vfp_cprc_sub_candidate (struct type *t,
3564 enum arm_vfp_cprc_base_type *base_type)
3566 t = check_typedef (t);
3567 switch (TYPE_CODE (t))
3570 switch (TYPE_LENGTH (t))
3573 if (*base_type == VFP_CPRC_UNKNOWN)
3574 *base_type = VFP_CPRC_SINGLE;
3575 else if (*base_type != VFP_CPRC_SINGLE)
3580 if (*base_type == VFP_CPRC_UNKNOWN)
3581 *base_type = VFP_CPRC_DOUBLE;
3582 else if (*base_type != VFP_CPRC_DOUBLE)
3591 case TYPE_CODE_COMPLEX:
3592 /* Arguments of complex T where T is one of the types float or
3593 double get treated as if they are implemented as:
3602 switch (TYPE_LENGTH (t))
3605 if (*base_type == VFP_CPRC_UNKNOWN)
3606 *base_type = VFP_CPRC_SINGLE;
3607 else if (*base_type != VFP_CPRC_SINGLE)
3612 if (*base_type == VFP_CPRC_UNKNOWN)
3613 *base_type = VFP_CPRC_DOUBLE;
3614 else if (*base_type != VFP_CPRC_DOUBLE)
3623 case TYPE_CODE_ARRAY:
3627 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3630 if (TYPE_LENGTH (t) == 0)
3632 gdb_assert (count == 0);
3635 else if (count == 0)
3637 unitlen = arm_vfp_cprc_unit_length (*base_type);
3638 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3639 return TYPE_LENGTH (t) / unitlen;
3643 case TYPE_CODE_STRUCT:
3648 for (i = 0; i < TYPE_NFIELDS (t); i++)
3650 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3652 if (sub_count == -1)
3656 if (TYPE_LENGTH (t) == 0)
3658 gdb_assert (count == 0);
3661 else if (count == 0)
3663 unitlen = arm_vfp_cprc_unit_length (*base_type);
3664 if (TYPE_LENGTH (t) != unitlen * count)
3669 case TYPE_CODE_UNION:
3674 for (i = 0; i < TYPE_NFIELDS (t); i++)
3676 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3678 if (sub_count == -1)
3680 count = (count > sub_count ? count : sub_count);
3682 if (TYPE_LENGTH (t) == 0)
3684 gdb_assert (count == 0);
3687 else if (count == 0)
3689 unitlen = arm_vfp_cprc_unit_length (*base_type);
3690 if (TYPE_LENGTH (t) != unitlen * count)
3702 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3703 if passed to or returned from a non-variadic function with the VFP
3704 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3705 *BASE_TYPE to the base type for T and *COUNT to the number of
3706 elements of that base type before returning. */
3709 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3712 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3713 int c = arm_vfp_cprc_sub_candidate (t, &b);
3714 if (c <= 0 || c > 4)
3721 /* Return 1 if the VFP ABI should be used for passing arguments to and
3722 returning values from a function of type FUNC_TYPE, 0
3726 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3728 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3729 /* Variadic functions always use the base ABI. Assume that functions
3730 without debug info are not variadic. */
3731 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3733 /* The VFP ABI is only supported as a variant of AAPCS. */
3734 if (tdep->arm_abi != ARM_ABI_AAPCS)
3736 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3739 /* We currently only support passing parameters in integer registers, which
3740 conforms with GCC's default model, and VFP argument passing following
3741 the VFP variant of AAPCS. Several other variants exist and
3742 we should probably support some of them based on the selected ABI. */
3745 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3746 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3747 struct value **args, CORE_ADDR sp, int struct_return,
3748 CORE_ADDR struct_addr)
3750 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3754 struct stack_item *si = NULL;
3757 unsigned vfp_regs_free = (1 << 16) - 1;
3759 /* Determine the type of this function and whether the VFP ABI
3761 ftype = check_typedef (value_type (function));
3762 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3763 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3764 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3766 /* Set the return address. For the ARM, the return breakpoint is
3767 always at BP_ADDR. */
3768 if (arm_pc_is_thumb (gdbarch, bp_addr))
3770 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3772 /* Walk through the list of args and determine how large a temporary
3773 stack is required. Need to take care here as structs may be
3774 passed on the stack, and we have to push them. */
3777 argreg = ARM_A1_REGNUM;
3780 /* The struct_return pointer occupies the first parameter
3781 passing register. */
3785 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3786 gdbarch_register_name (gdbarch, argreg),
3787 paddress (gdbarch, struct_addr));
3788 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3792 for (argnum = 0; argnum < nargs; argnum++)
3795 struct type *arg_type;
3796 struct type *target_type;
3797 enum type_code typecode;
3798 const bfd_byte *val;
3800 enum arm_vfp_cprc_base_type vfp_base_type;
3802 int may_use_core_reg = 1;
3804 arg_type = check_typedef (value_type (args[argnum]));
3805 len = TYPE_LENGTH (arg_type);
3806 target_type = TYPE_TARGET_TYPE (arg_type);
3807 typecode = TYPE_CODE (arg_type);
3808 val = value_contents (args[argnum]);
3810 align = arm_type_align (arg_type);
3811 /* Round alignment up to a whole number of words. */
3812 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3813 /* Different ABIs have different maximum alignments. */
3814 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3816 /* The APCS ABI only requires word alignment. */
3817 align = INT_REGISTER_SIZE;
3821 /* The AAPCS requires at most doubleword alignment. */
3822 if (align > INT_REGISTER_SIZE * 2)
3823 align = INT_REGISTER_SIZE * 2;
3827 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3835 /* Because this is a CPRC it cannot go in a core register or
3836 cause a core register to be skipped for alignment.
3837 Either it goes in VFP registers and the rest of this loop
3838 iteration is skipped for this argument, or it goes on the
3839 stack (and the stack alignment code is correct for this
3841 may_use_core_reg = 0;
3843 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3844 shift = unit_length / 4;
3845 mask = (1 << (shift * vfp_base_count)) - 1;
3846 for (regno = 0; regno < 16; regno += shift)
3847 if (((vfp_regs_free >> regno) & mask) == mask)
3856 vfp_regs_free &= ~(mask << regno);
3857 reg_scaled = regno / shift;
3858 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3859 for (i = 0; i < vfp_base_count; i++)
3863 if (reg_char == 'q')
3864 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3865 val + i * unit_length);
3868 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3869 reg_char, reg_scaled + i);
3870 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3872 regcache_cooked_write (regcache, regnum,
3873 val + i * unit_length);
3880 /* This CPRC could not go in VFP registers, so all VFP
3881 registers are now marked as used. */
3886 /* Push stack padding for dowubleword alignment. */
3887 if (nstack & (align - 1))
3889 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3890 nstack += INT_REGISTER_SIZE;
3893 /* Doubleword aligned quantities must go in even register pairs. */
3894 if (may_use_core_reg
3895 && argreg <= ARM_LAST_ARG_REGNUM
3896 && align > INT_REGISTER_SIZE
3900 /* If the argument is a pointer to a function, and it is a
3901 Thumb function, create a LOCAL copy of the value and set
3902 the THUMB bit in it. */
3903 if (TYPE_CODE_PTR == typecode
3904 && target_type != NULL
3905 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3907 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3908 if (arm_pc_is_thumb (gdbarch, regval))
3910 bfd_byte *copy = alloca (len);
3911 store_unsigned_integer (copy, len, byte_order,
3912 MAKE_THUMB_ADDR (regval));
3917 /* Copy the argument to general registers or the stack in
3918 register-sized pieces. Large arguments are split between
3919 registers and stack. */
3922 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3924 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3926 /* The argument is being passed in a general purpose
3929 = extract_unsigned_integer (val, partial_len, byte_order);
3930 if (byte_order == BFD_ENDIAN_BIG)
3931 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3933 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3935 gdbarch_register_name
3937 phex (regval, INT_REGISTER_SIZE));
3938 regcache_cooked_write_unsigned (regcache, argreg, regval);
3943 /* Push the arguments onto the stack. */
3945 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3947 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3948 nstack += INT_REGISTER_SIZE;
3955 /* If we have an odd number of words to push, then decrement the stack
3956 by one word now, so first stack argument will be dword aligned. */
3963 write_memory (sp, si->data, si->len);
3964 si = pop_stack_item (si);
3967 /* Finally, update teh SP register. */
3968 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3974 /* Always align the frame to an 8-byte boundary. This is required on
3975 some platforms and harmless on the rest. */
3978 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3980 /* Align the stack to eight bytes. */
3981 return sp & ~ (CORE_ADDR) 7;
3985 print_fpu_flags (struct ui_file *file, int flags)
3987 if (flags & (1 << 0))
3988 fputs_filtered ("IVO ", file);
3989 if (flags & (1 << 1))
3990 fputs_filtered ("DVZ ", file);
3991 if (flags & (1 << 2))
3992 fputs_filtered ("OFL ", file);
3993 if (flags & (1 << 3))
3994 fputs_filtered ("UFL ", file);
3995 if (flags & (1 << 4))
3996 fputs_filtered ("INX ", file);
3997 fputc_filtered ('\n', file);
4000 /* Print interesting information about the floating point processor
4001 (if present) or emulator. */
4003 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4004 struct frame_info *frame, const char *args)
4006 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4009 type = (status >> 24) & 127;
4010 if (status & (1 << 31))
4011 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4013 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4014 /* i18n: [floating point unit] mask */
4015 fputs_filtered (_("mask: "), file);
4016 print_fpu_flags (file, status >> 16);
4017 /* i18n: [floating point unit] flags */
4018 fputs_filtered (_("flags: "), file);
4019 print_fpu_flags (file, status);
4022 /* Construct the ARM extended floating point type. */
4023 static struct type *
4024 arm_ext_type (struct gdbarch *gdbarch)
4026 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4028 if (!tdep->arm_ext_type)
4030 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4031 floatformats_arm_ext);
4033 return tdep->arm_ext_type;
4036 static struct type *
4037 arm_neon_double_type (struct gdbarch *gdbarch)
4039 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4041 if (tdep->neon_double_type == NULL)
4043 struct type *t, *elem;
4045 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4047 elem = builtin_type (gdbarch)->builtin_uint8;
4048 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4049 elem = builtin_type (gdbarch)->builtin_uint16;
4050 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4051 elem = builtin_type (gdbarch)->builtin_uint32;
4052 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4053 elem = builtin_type (gdbarch)->builtin_uint64;
4054 append_composite_type_field (t, "u64", elem);
4055 elem = builtin_type (gdbarch)->builtin_float;
4056 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4057 elem = builtin_type (gdbarch)->builtin_double;
4058 append_composite_type_field (t, "f64", elem);
4060 TYPE_VECTOR (t) = 1;
4061 TYPE_NAME (t) = "neon_d";
4062 tdep->neon_double_type = t;
4065 return tdep->neon_double_type;
4068 /* FIXME: The vector types are not correctly ordered on big-endian
4069 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4070 bits of d0 - regardless of what unit size is being held in d0. So
4071 the offset of the first uint8 in d0 is 7, but the offset of the
4072 first float is 4. This code works as-is for little-endian
4075 static struct type *
4076 arm_neon_quad_type (struct gdbarch *gdbarch)
4078 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4080 if (tdep->neon_quad_type == NULL)
4082 struct type *t, *elem;
4084 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4086 elem = builtin_type (gdbarch)->builtin_uint8;
4087 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4088 elem = builtin_type (gdbarch)->builtin_uint16;
4089 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4090 elem = builtin_type (gdbarch)->builtin_uint32;
4091 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4092 elem = builtin_type (gdbarch)->builtin_uint64;
4093 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4094 elem = builtin_type (gdbarch)->builtin_float;
4095 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4096 elem = builtin_type (gdbarch)->builtin_double;
4097 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4099 TYPE_VECTOR (t) = 1;
4100 TYPE_NAME (t) = "neon_q";
4101 tdep->neon_quad_type = t;
4104 return tdep->neon_quad_type;
4107 /* Return the GDB type object for the "standard" data type of data in
4110 static struct type *
4111 arm_register_type (struct gdbarch *gdbarch, int regnum)
4113 int num_regs = gdbarch_num_regs (gdbarch);
4115 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4116 && regnum >= num_regs && regnum < num_regs + 32)
4117 return builtin_type (gdbarch)->builtin_float;
4119 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4120 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4121 return arm_neon_quad_type (gdbarch);
4123 /* If the target description has register information, we are only
4124 in this function so that we can override the types of
4125 double-precision registers for NEON. */
4126 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4128 struct type *t = tdesc_register_type (gdbarch, regnum);
4130 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4131 && TYPE_CODE (t) == TYPE_CODE_FLT
4132 && gdbarch_tdep (gdbarch)->have_neon)
4133 return arm_neon_double_type (gdbarch);
4138 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4140 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4141 return builtin_type (gdbarch)->builtin_void;
4143 return arm_ext_type (gdbarch);
4145 else if (regnum == ARM_SP_REGNUM)
4146 return builtin_type (gdbarch)->builtin_data_ptr;
4147 else if (regnum == ARM_PC_REGNUM)
4148 return builtin_type (gdbarch)->builtin_func_ptr;
4149 else if (regnum >= ARRAY_SIZE (arm_register_names))
4150 /* These registers are only supported on targets which supply
4151 an XML description. */
4152 return builtin_type (gdbarch)->builtin_int0;
4154 return builtin_type (gdbarch)->builtin_uint32;
4157 /* Map a DWARF register REGNUM onto the appropriate GDB register
4161 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4163 /* Core integer regs. */
4164 if (reg >= 0 && reg <= 15)
4167 /* Legacy FPA encoding. These were once used in a way which
4168 overlapped with VFP register numbering, so their use is
4169 discouraged, but GDB doesn't support the ARM toolchain
4170 which used them for VFP. */
4171 if (reg >= 16 && reg <= 23)
4172 return ARM_F0_REGNUM + reg - 16;
4174 /* New assignments for the FPA registers. */
4175 if (reg >= 96 && reg <= 103)
4176 return ARM_F0_REGNUM + reg - 96;
4178 /* WMMX register assignments. */
4179 if (reg >= 104 && reg <= 111)
4180 return ARM_WCGR0_REGNUM + reg - 104;
4182 if (reg >= 112 && reg <= 127)
4183 return ARM_WR0_REGNUM + reg - 112;
4185 if (reg >= 192 && reg <= 199)
4186 return ARM_WC0_REGNUM + reg - 192;
4188 /* VFP v2 registers. A double precision value is actually
4189 in d1 rather than s2, but the ABI only defines numbering
4190 for the single precision registers. This will "just work"
4191 in GDB for little endian targets (we'll read eight bytes,
4192 starting in s0 and then progressing to s1), but will be
4193 reversed on big endian targets with VFP. This won't
4194 be a problem for the new Neon quad registers; you're supposed
4195 to use DW_OP_piece for those. */
4196 if (reg >= 64 && reg <= 95)
4200 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4201 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4205 /* VFP v3 / Neon registers. This range is also used for VFP v2
4206 registers, except that it now describes d0 instead of s0. */
4207 if (reg >= 256 && reg <= 287)
4211 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4212 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4219 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4221 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4224 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4226 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4227 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4229 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4230 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4232 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4233 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4235 if (reg < NUM_GREGS)
4236 return SIM_ARM_R0_REGNUM + reg;
4239 if (reg < NUM_FREGS)
4240 return SIM_ARM_FP0_REGNUM + reg;
4243 if (reg < NUM_SREGS)
4244 return SIM_ARM_FPS_REGNUM + reg;
4247 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4250 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4251 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4252 It is thought that this is is the floating-point register format on
4253 little-endian systems. */
4256 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4257 void *dbl, int endianess)
4261 if (endianess == BFD_ENDIAN_BIG)
4262 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4264 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4266 floatformat_from_doublest (fmt, &d, dbl);
4270 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4275 floatformat_to_doublest (fmt, ptr, &d);
4276 if (endianess == BFD_ENDIAN_BIG)
4277 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4279 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4284 condition_true (unsigned long cond, unsigned long status_reg)
4286 if (cond == INST_AL || cond == INST_NV)
4292 return ((status_reg & FLAG_Z) != 0);
4294 return ((status_reg & FLAG_Z) == 0);
4296 return ((status_reg & FLAG_C) != 0);
4298 return ((status_reg & FLAG_C) == 0);
4300 return ((status_reg & FLAG_N) != 0);
4302 return ((status_reg & FLAG_N) == 0);
4304 return ((status_reg & FLAG_V) != 0);
4306 return ((status_reg & FLAG_V) == 0);
4308 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4310 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4312 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4314 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4316 return (((status_reg & FLAG_Z) == 0)
4317 && (((status_reg & FLAG_N) == 0)
4318 == ((status_reg & FLAG_V) == 0)));
4320 return (((status_reg & FLAG_Z) != 0)
4321 || (((status_reg & FLAG_N) == 0)
4322 != ((status_reg & FLAG_V) == 0)));
4327 static unsigned long
4328 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4329 unsigned long pc_val, unsigned long status_reg)
4331 unsigned long res, shift;
4332 int rm = bits (inst, 0, 3);
4333 unsigned long shifttype = bits (inst, 5, 6);
4337 int rs = bits (inst, 8, 11);
4338 shift = (rs == 15 ? pc_val + 8
4339 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4342 shift = bits (inst, 7, 11);
4344 res = (rm == ARM_PC_REGNUM
4345 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4346 : get_frame_register_unsigned (frame, rm));
4351 res = shift >= 32 ? 0 : res << shift;
4355 res = shift >= 32 ? 0 : res >> shift;
4361 res = ((res & 0x80000000L)
4362 ? ~((~res) >> shift) : res >> shift);
4365 case 3: /* ROR/RRX */
4368 res = (res >> 1) | (carry ? 0x80000000L : 0);
4370 res = (res >> shift) | (res << (32 - shift));
4374 return res & 0xffffffff;
4377 /* Return number of 1-bits in VAL. */
4380 bitcount (unsigned long val)
4383 for (nbits = 0; val != 0; nbits++)
4384 val &= val - 1; /* Delete rightmost 1-bit in val. */
4388 /* Return the size in bytes of the complete Thumb instruction whose
4389 first halfword is INST1. */
4392 thumb_insn_size (unsigned short inst1)
4394 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4401 thumb_advance_itstate (unsigned int itstate)
4403 /* Preserve IT[7:5], the first three bits of the condition. Shift
4404 the upcoming condition flags left by one bit. */
4405 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4407 /* If we have finished the IT block, clear the state. */
4408 if ((itstate & 0x0f) == 0)
4414 /* Find the next PC after the current instruction executes. In some
4415 cases we can not statically determine the answer (see the IT state
4416 handling in this function); in that case, a breakpoint may be
4417 inserted in addition to the returned PC, which will be used to set
4418 another breakpoint by our caller. */
4421 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4423 struct gdbarch *gdbarch = get_frame_arch (frame);
4424 struct address_space *aspace = get_frame_address_space (frame);
4425 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4426 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4427 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4428 unsigned short inst1;
4429 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4430 unsigned long offset;
4431 ULONGEST status, itstate;
4433 nextpc = MAKE_THUMB_ADDR (nextpc);
4434 pc_val = MAKE_THUMB_ADDR (pc_val);
4436 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4438 /* Thumb-2 conditional execution support. There are eight bits in
4439 the CPSR which describe conditional execution state. Once
4440 reconstructed (they're in a funny order), the low five bits
4441 describe the low bit of the condition for each instruction and
4442 how many instructions remain. The high three bits describe the
4443 base condition. One of the low four bits will be set if an IT
4444 block is active. These bits read as zero on earlier
4446 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4447 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4449 /* If-Then handling. On GNU/Linux, where this routine is used, we
4450 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4451 can disable execution of the undefined instruction. So we might
4452 miss the breakpoint if we set it on a skipped conditional
4453 instruction. Because conditional instructions can change the
4454 flags, affecting the execution of further instructions, we may
4455 need to set two breakpoints. */
4457 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4459 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4461 /* An IT instruction. Because this instruction does not
4462 modify the flags, we can accurately predict the next
4463 executed instruction. */
4464 itstate = inst1 & 0x00ff;
4465 pc += thumb_insn_size (inst1);
4467 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4469 inst1 = read_memory_unsigned_integer (pc, 2,
4470 byte_order_for_code);
4471 pc += thumb_insn_size (inst1);
4472 itstate = thumb_advance_itstate (itstate);
4475 return MAKE_THUMB_ADDR (pc);
4477 else if (itstate != 0)
4479 /* We are in a conditional block. Check the condition. */
4480 if (! condition_true (itstate >> 4, status))
4482 /* Advance to the next executed instruction. */
4483 pc += thumb_insn_size (inst1);
4484 itstate = thumb_advance_itstate (itstate);
4486 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4488 inst1 = read_memory_unsigned_integer (pc, 2,
4489 byte_order_for_code);
4490 pc += thumb_insn_size (inst1);
4491 itstate = thumb_advance_itstate (itstate);
4494 return MAKE_THUMB_ADDR (pc);
4496 else if ((itstate & 0x0f) == 0x08)
4498 /* This is the last instruction of the conditional
4499 block, and it is executed. We can handle it normally
4500 because the following instruction is not conditional,
4501 and we must handle it normally because it is
4502 permitted to branch. Fall through. */
4508 /* There are conditional instructions after this one.
4509 If this instruction modifies the flags, then we can
4510 not predict what the next executed instruction will
4511 be. Fortunately, this instruction is architecturally
4512 forbidden to branch; we know it will fall through.
4513 Start by skipping past it. */
4514 pc += thumb_insn_size (inst1);
4515 itstate = thumb_advance_itstate (itstate);
4517 /* Set a breakpoint on the following instruction. */
4518 gdb_assert ((itstate & 0x0f) != 0);
4519 arm_insert_single_step_breakpoint (gdbarch, aspace,
4520 MAKE_THUMB_ADDR (pc));
4521 cond_negated = (itstate >> 4) & 1;
4523 /* Skip all following instructions with the same
4524 condition. If there is a later instruction in the IT
4525 block with the opposite condition, set the other
4526 breakpoint there. If not, then set a breakpoint on
4527 the instruction after the IT block. */
4530 inst1 = read_memory_unsigned_integer (pc, 2,
4531 byte_order_for_code);
4532 pc += thumb_insn_size (inst1);
4533 itstate = thumb_advance_itstate (itstate);
4535 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4537 return MAKE_THUMB_ADDR (pc);
4541 else if (itstate & 0x0f)
4543 /* We are in a conditional block. Check the condition. */
4544 int cond = itstate >> 4;
4546 if (! condition_true (cond, status))
4547 /* Advance to the next instruction. All the 32-bit
4548 instructions share a common prefix. */
4549 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4551 /* Otherwise, handle the instruction normally. */
4554 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4558 /* Fetch the saved PC from the stack. It's stored above
4559 all of the other registers. */
4560 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4561 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4562 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4564 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4566 unsigned long cond = bits (inst1, 8, 11);
4567 if (cond == 0x0f) /* 0x0f = SWI */
4569 struct gdbarch_tdep *tdep;
4570 tdep = gdbarch_tdep (gdbarch);
4572 if (tdep->syscall_next_pc != NULL)
4573 nextpc = tdep->syscall_next_pc (frame);
4576 else if (cond != 0x0f && condition_true (cond, status))
4577 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4579 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4581 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4583 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4585 unsigned short inst2;
4586 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4588 /* Default to the next instruction. */
4590 nextpc = MAKE_THUMB_ADDR (nextpc);
4592 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4594 /* Branches and miscellaneous control instructions. */
4596 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4599 int j1, j2, imm1, imm2;
4601 imm1 = sbits (inst1, 0, 10);
4602 imm2 = bits (inst2, 0, 10);
4603 j1 = bit (inst2, 13);
4604 j2 = bit (inst2, 11);
4606 offset = ((imm1 << 12) + (imm2 << 1));
4607 offset ^= ((!j2) << 22) | ((!j1) << 23);
4609 nextpc = pc_val + offset;
4610 /* For BLX make sure to clear the low bits. */
4611 if (bit (inst2, 12) == 0)
4612 nextpc = nextpc & 0xfffffffc;
4614 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4616 /* SUBS PC, LR, #imm8. */
4617 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4618 nextpc -= inst2 & 0x00ff;
4620 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4622 /* Conditional branch. */
4623 if (condition_true (bits (inst1, 6, 9), status))
4625 int sign, j1, j2, imm1, imm2;
4627 sign = sbits (inst1, 10, 10);
4628 imm1 = bits (inst1, 0, 5);
4629 imm2 = bits (inst2, 0, 10);
4630 j1 = bit (inst2, 13);
4631 j2 = bit (inst2, 11);
4633 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4634 offset += (imm1 << 12) + (imm2 << 1);
4636 nextpc = pc_val + offset;
4640 else if ((inst1 & 0xfe50) == 0xe810)
4642 /* Load multiple or RFE. */
4643 int rn, offset, load_pc = 1;
4645 rn = bits (inst1, 0, 3);
4646 if (bit (inst1, 7) && !bit (inst1, 8))
4649 if (!bit (inst2, 15))
4651 offset = bitcount (inst2) * 4 - 4;
4653 else if (!bit (inst1, 7) && bit (inst1, 8))
4656 if (!bit (inst2, 15))
4660 else if (bit (inst1, 7) && bit (inst1, 8))
4665 else if (!bit (inst1, 7) && !bit (inst1, 8))
4675 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4676 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4679 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4681 /* MOV PC or MOVS PC. */
4682 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4683 nextpc = MAKE_THUMB_ADDR (nextpc);
4685 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4689 int rn, load_pc = 1;
4691 rn = bits (inst1, 0, 3);
4692 base = get_frame_register_unsigned (frame, rn);
4693 if (rn == ARM_PC_REGNUM)
4695 base = (base + 4) & ~(CORE_ADDR) 0x3;
4697 base += bits (inst2, 0, 11);
4699 base -= bits (inst2, 0, 11);
4701 else if (bit (inst1, 7))
4702 base += bits (inst2, 0, 11);
4703 else if (bit (inst2, 11))
4705 if (bit (inst2, 10))
4708 base += bits (inst2, 0, 7);
4710 base -= bits (inst2, 0, 7);
4713 else if ((inst2 & 0x0fc0) == 0x0000)
4715 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4716 base += get_frame_register_unsigned (frame, rm) << shift;
4723 nextpc = get_frame_memory_unsigned (frame, base, 4);
4725 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4728 CORE_ADDR tbl_reg, table, offset, length;
4730 tbl_reg = bits (inst1, 0, 3);
4731 if (tbl_reg == 0x0f)
4732 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4734 table = get_frame_register_unsigned (frame, tbl_reg);
4736 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4737 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4738 nextpc = pc_val + length;
4740 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4743 CORE_ADDR tbl_reg, table, offset, length;
4745 tbl_reg = bits (inst1, 0, 3);
4746 if (tbl_reg == 0x0f)
4747 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4749 table = get_frame_register_unsigned (frame, tbl_reg);
4751 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4752 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4753 nextpc = pc_val + length;
4756 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4758 if (bits (inst1, 3, 6) == 0x0f)
4759 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4761 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4763 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4765 if (bits (inst1, 3, 6) == 0x0f)
4768 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4770 nextpc = MAKE_THUMB_ADDR (nextpc);
4772 else if ((inst1 & 0xf500) == 0xb100)
4775 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4776 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4778 if (bit (inst1, 11) && reg != 0)
4779 nextpc = pc_val + imm;
4780 else if (!bit (inst1, 11) && reg == 0)
4781 nextpc = pc_val + imm;
4786 /* Get the raw next address. PC is the current program counter, in
4787 FRAME, which is assumed to be executing in ARM mode.
4789 The value returned has the execution state of the next instruction
4790 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4791 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4795 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4797 struct gdbarch *gdbarch = get_frame_arch (frame);
4798 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4799 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4800 unsigned long pc_val;
4801 unsigned long this_instr;
4802 unsigned long status;
4805 pc_val = (unsigned long) pc;
4806 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4808 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4809 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4811 if (bits (this_instr, 28, 31) == INST_NV)
4812 switch (bits (this_instr, 24, 27))
4817 /* Branch with Link and change to Thumb. */
4818 nextpc = BranchDest (pc, this_instr);
4819 nextpc |= bit (this_instr, 24) << 1;
4820 nextpc = MAKE_THUMB_ADDR (nextpc);
4826 /* Coprocessor register transfer. */
4827 if (bits (this_instr, 12, 15) == 15)
4828 error (_("Invalid update to pc in instruction"));
4831 else if (condition_true (bits (this_instr, 28, 31), status))
4833 switch (bits (this_instr, 24, 27))
4836 case 0x1: /* data processing */
4840 unsigned long operand1, operand2, result = 0;
4844 if (bits (this_instr, 12, 15) != 15)
4847 if (bits (this_instr, 22, 25) == 0
4848 && bits (this_instr, 4, 7) == 9) /* multiply */
4849 error (_("Invalid update to pc in instruction"));
4851 /* BX <reg>, BLX <reg> */
4852 if (bits (this_instr, 4, 27) == 0x12fff1
4853 || bits (this_instr, 4, 27) == 0x12fff3)
4855 rn = bits (this_instr, 0, 3);
4856 nextpc = ((rn == ARM_PC_REGNUM)
4858 : get_frame_register_unsigned (frame, rn));
4863 /* Multiply into PC. */
4864 c = (status & FLAG_C) ? 1 : 0;
4865 rn = bits (this_instr, 16, 19);
4866 operand1 = ((rn == ARM_PC_REGNUM)
4868 : get_frame_register_unsigned (frame, rn));
4870 if (bit (this_instr, 25))
4872 unsigned long immval = bits (this_instr, 0, 7);
4873 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4874 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4877 else /* operand 2 is a shifted register. */
4878 operand2 = shifted_reg_val (frame, this_instr, c,
4881 switch (bits (this_instr, 21, 24))
4884 result = operand1 & operand2;
4888 result = operand1 ^ operand2;
4892 result = operand1 - operand2;
4896 result = operand2 - operand1;
4900 result = operand1 + operand2;
4904 result = operand1 + operand2 + c;
4908 result = operand1 - operand2 + c;
4912 result = operand2 - operand1 + c;
4918 case 0xb: /* tst, teq, cmp, cmn */
4919 result = (unsigned long) nextpc;
4923 result = operand1 | operand2;
4927 /* Always step into a function. */
4932 result = operand1 & ~operand2;
4940 /* In 26-bit APCS the bottom two bits of the result are
4941 ignored, and we always end up in ARM state. */
4943 nextpc = arm_addr_bits_remove (gdbarch, result);
4951 case 0x5: /* data transfer */
4954 if (bit (this_instr, 20))
4957 if (bits (this_instr, 12, 15) == 15)
4963 if (bit (this_instr, 22))
4964 error (_("Invalid update to pc in instruction"));
4966 /* byte write to PC */
4967 rn = bits (this_instr, 16, 19);
4968 base = ((rn == ARM_PC_REGNUM)
4970 : get_frame_register_unsigned (frame, rn));
4972 if (bit (this_instr, 24))
4975 int c = (status & FLAG_C) ? 1 : 0;
4976 unsigned long offset =
4977 (bit (this_instr, 25)
4978 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4979 : bits (this_instr, 0, 11));
4981 if (bit (this_instr, 23))
4987 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4994 case 0x9: /* block transfer */
4995 if (bit (this_instr, 20))
4998 if (bit (this_instr, 15))
5002 unsigned long rn_val
5003 = get_frame_register_unsigned (frame,
5004 bits (this_instr, 16, 19));
5006 if (bit (this_instr, 23))
5009 unsigned long reglist = bits (this_instr, 0, 14);
5010 offset = bitcount (reglist) * 4;
5011 if (bit (this_instr, 24)) /* pre */
5014 else if (bit (this_instr, 24))
5018 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5025 case 0xb: /* branch & link */
5026 case 0xa: /* branch */
5028 nextpc = BranchDest (pc, this_instr);
5034 case 0xe: /* coproc ops */
5038 struct gdbarch_tdep *tdep;
5039 tdep = gdbarch_tdep (gdbarch);
5041 if (tdep->syscall_next_pc != NULL)
5042 nextpc = tdep->syscall_next_pc (frame);
5048 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5056 /* Determine next PC after current instruction executes. Will call either
5057 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5058 loop is detected. */
5061 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5065 if (arm_frame_is_thumb (frame))
5066 nextpc = thumb_get_next_pc_raw (frame, pc);
5068 nextpc = arm_get_next_pc_raw (frame, pc);
5073 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5074 of the appropriate mode (as encoded in the PC value), even if this
5075 differs from what would be expected according to the symbol tables. */
5078 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5079 struct address_space *aspace,
5082 struct cleanup *old_chain
5083 = make_cleanup_restore_integer (&arm_override_mode);
5085 arm_override_mode = IS_THUMB_ADDR (pc);
5086 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5088 insert_single_step_breakpoint (gdbarch, aspace, pc);
5090 do_cleanups (old_chain);
5093 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5094 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5095 is found, attempt to step through it. A breakpoint is placed at the end of
5099 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5101 struct gdbarch *gdbarch = get_frame_arch (frame);
5102 struct address_space *aspace = get_frame_address_space (frame);
5103 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5104 CORE_ADDR pc = get_frame_pc (frame);
5105 CORE_ADDR breaks[2] = {-1, -1};
5107 unsigned short insn1, insn2;
5110 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5111 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5112 ULONGEST status, itstate;
5114 /* We currently do not support atomic sequences within an IT block. */
5115 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5116 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5120 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5121 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5123 if (thumb_insn_size (insn1) != 4)
5126 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5128 if (!((insn1 & 0xfff0) == 0xe850
5129 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5132 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5134 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5136 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5139 if (thumb_insn_size (insn1) != 4)
5141 /* Assume that there is at most one conditional branch in the
5142 atomic sequence. If a conditional branch is found, put a
5143 breakpoint in its destination address. */
5144 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5146 if (last_breakpoint > 0)
5147 return 0; /* More than one conditional branch found,
5148 fallback to the standard code. */
5150 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5154 /* We do not support atomic sequences that use any *other*
5155 instructions but conditional branches to change the PC.
5156 Fall back to standard code to avoid losing control of
5158 else if (thumb_instruction_changes_pc (insn1))
5163 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5166 /* Assume that there is at most one conditional branch in the
5167 atomic sequence. If a conditional branch is found, put a
5168 breakpoint in its destination address. */
5169 if ((insn1 & 0xf800) == 0xf000
5170 && (insn2 & 0xd000) == 0x8000
5171 && (insn1 & 0x0380) != 0x0380)
5173 int sign, j1, j2, imm1, imm2;
5174 unsigned int offset;
5176 sign = sbits (insn1, 10, 10);
5177 imm1 = bits (insn1, 0, 5);
5178 imm2 = bits (insn2, 0, 10);
5179 j1 = bit (insn2, 13);
5180 j2 = bit (insn2, 11);
5182 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5183 offset += (imm1 << 12) + (imm2 << 1);
5185 if (last_breakpoint > 0)
5186 return 0; /* More than one conditional branch found,
5187 fallback to the standard code. */
5189 breaks[1] = loc + offset;
5193 /* We do not support atomic sequences that use any *other*
5194 instructions but conditional branches to change the PC.
5195 Fall back to standard code to avoid losing control of
5197 else if (thumb2_instruction_changes_pc (insn1, insn2))
5200 /* If we find a strex{,b,h,d}, we're done. */
5201 if ((insn1 & 0xfff0) == 0xe840
5202 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5207 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5208 if (insn_count == atomic_sequence_length)
5211 /* Insert a breakpoint right after the end of the atomic sequence. */
5214 /* Check for duplicated breakpoints. Check also for a breakpoint
5215 placed (branch instruction's destination) anywhere in sequence. */
5217 && (breaks[1] == breaks[0]
5218 || (breaks[1] >= pc && breaks[1] < loc)))
5219 last_breakpoint = 0;
5221 /* Effectively inserts the breakpoints. */
5222 for (index = 0; index <= last_breakpoint; index++)
5223 arm_insert_single_step_breakpoint (gdbarch, aspace,
5224 MAKE_THUMB_ADDR (breaks[index]));
5230 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5232 struct gdbarch *gdbarch = get_frame_arch (frame);
5233 struct address_space *aspace = get_frame_address_space (frame);
5234 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5235 CORE_ADDR pc = get_frame_pc (frame);
5236 CORE_ADDR breaks[2] = {-1, -1};
5241 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5242 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5244 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5245 Note that we do not currently support conditionally executed atomic
5247 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5249 if ((insn & 0xff9000f0) != 0xe1900090)
5252 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5254 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5256 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5259 /* Assume that there is at most one conditional branch in the atomic
5260 sequence. If a conditional branch is found, put a breakpoint in
5261 its destination address. */
5262 if (bits (insn, 24, 27) == 0xa)
5264 if (last_breakpoint > 0)
5265 return 0; /* More than one conditional branch found, fallback
5266 to the standard single-step code. */
5268 breaks[1] = BranchDest (loc - 4, insn);
5272 /* We do not support atomic sequences that use any *other* instructions
5273 but conditional branches to change the PC. Fall back to standard
5274 code to avoid losing control of execution. */
5275 else if (arm_instruction_changes_pc (insn))
5278 /* If we find a strex{,b,h,d}, we're done. */
5279 if ((insn & 0xff9000f0) == 0xe1800090)
5283 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5284 if (insn_count == atomic_sequence_length)
5287 /* Insert a breakpoint right after the end of the atomic sequence. */
5290 /* Check for duplicated breakpoints. Check also for a breakpoint
5291 placed (branch instruction's destination) anywhere in sequence. */
5293 && (breaks[1] == breaks[0]
5294 || (breaks[1] >= pc && breaks[1] < loc)))
5295 last_breakpoint = 0;
5297 /* Effectively inserts the breakpoints. */
5298 for (index = 0; index <= last_breakpoint; index++)
5299 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5305 arm_deal_with_atomic_sequence (struct frame_info *frame)
5307 if (arm_frame_is_thumb (frame))
5308 return thumb_deal_with_atomic_sequence_raw (frame);
5310 return arm_deal_with_atomic_sequence_raw (frame);
5313 /* single_step() is called just before we want to resume the inferior,
5314 if we want to single-step it but there is no hardware or kernel
5315 single-step support. We find the target of the coming instruction
5316 and breakpoint it. */
5319 arm_software_single_step (struct frame_info *frame)
5321 struct gdbarch *gdbarch = get_frame_arch (frame);
5322 struct address_space *aspace = get_frame_address_space (frame);
5325 if (arm_deal_with_atomic_sequence (frame))
5328 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5329 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5334 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5335 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5336 NULL if an error occurs. BUF is freed. */
5339 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5340 int old_len, int new_len)
5343 int bytes_to_read = new_len - old_len;
5345 new_buf = xmalloc (new_len);
5346 memcpy (new_buf + bytes_to_read, buf, old_len);
5348 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5356 /* An IT block is at most the 2-byte IT instruction followed by
5357 four 4-byte instructions. The furthest back we must search to
5358 find an IT block that affects the current instruction is thus
5359 2 + 3 * 4 == 14 bytes. */
5360 #define MAX_IT_BLOCK_PREFIX 14
5362 /* Use a quick scan if there are more than this many bytes of
5364 #define IT_SCAN_THRESHOLD 32
5366 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5367 A breakpoint in an IT block may not be hit, depending on the
5370 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5374 CORE_ADDR boundary, func_start;
5376 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5377 int i, any, last_it, last_it_count;
5379 /* If we are using BKPT breakpoints, none of this is necessary. */
5380 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5383 /* ARM mode does not have this problem. */
5384 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5387 /* We are setting a breakpoint in Thumb code that could potentially
5388 contain an IT block. The first step is to find how much Thumb
5389 code there is; we do not need to read outside of known Thumb
5391 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5393 /* Thumb-2 code must have mapping symbols to have a chance. */
5396 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5398 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5399 && func_start > boundary)
5400 boundary = func_start;
5402 /* Search for a candidate IT instruction. We have to do some fancy
5403 footwork to distinguish a real IT instruction from the second
5404 half of a 32-bit instruction, but there is no need for that if
5405 there's no candidate. */
5406 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5408 /* No room for an IT instruction. */
5411 buf = xmalloc (buf_len);
5412 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5415 for (i = 0; i < buf_len; i += 2)
5417 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5418 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5430 /* OK, the code bytes before this instruction contain at least one
5431 halfword which resembles an IT instruction. We know that it's
5432 Thumb code, but there are still two possibilities. Either the
5433 halfword really is an IT instruction, or it is the second half of
5434 a 32-bit Thumb instruction. The only way we can tell is to
5435 scan forwards from a known instruction boundary. */
5436 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5440 /* There's a lot of code before this instruction. Start with an
5441 optimistic search; it's easy to recognize halfwords that can
5442 not be the start of a 32-bit instruction, and use that to
5443 lock on to the instruction boundaries. */
5444 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5447 buf_len = IT_SCAN_THRESHOLD;
5450 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5452 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5453 if (thumb_insn_size (inst1) == 2)
5460 /* At this point, if DEFINITE, BUF[I] is the first place we
5461 are sure that we know the instruction boundaries, and it is far
5462 enough from BPADDR that we could not miss an IT instruction
5463 affecting BPADDR. If ! DEFINITE, give up - start from a
5467 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5471 buf_len = bpaddr - boundary;
5477 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5480 buf_len = bpaddr - boundary;
5484 /* Scan forwards. Find the last IT instruction before BPADDR. */
5489 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5491 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5496 else if (inst1 & 0x0002)
5498 else if (inst1 & 0x0004)
5503 i += thumb_insn_size (inst1);
5509 /* There wasn't really an IT instruction after all. */
5512 if (last_it_count < 1)
5513 /* It was too far away. */
5516 /* This really is a trouble spot. Move the breakpoint to the IT
5518 return bpaddr - buf_len + last_it;
5521 /* ARM displaced stepping support.
5523 Generally ARM displaced stepping works as follows:
5525 1. When an instruction is to be single-stepped, it is first decoded by
5526 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5527 Depending on the type of instruction, it is then copied to a scratch
5528 location, possibly in a modified form. The copy_* set of functions
5529 performs such modification, as necessary. A breakpoint is placed after
5530 the modified instruction in the scratch space to return control to GDB.
5531 Note in particular that instructions which modify the PC will no longer
5532 do so after modification.
5534 2. The instruction is single-stepped, by setting the PC to the scratch
5535 location address, and resuming. Control returns to GDB when the
5538 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5539 function used for the current instruction. This function's job is to
5540 put the CPU/memory state back to what it would have been if the
5541 instruction had been executed unmodified in its original location. */
5543 /* NOP instruction (mov r0, r0). */
5544 #define ARM_NOP 0xe1a00000
5545 #define THUMB_NOP 0x4600
5547 /* Helper for register reads for displaced stepping. In particular, this
5548 returns the PC as it would be seen by the instruction at its original
5552 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5556 CORE_ADDR from = dsc->insn_addr;
5558 if (regno == ARM_PC_REGNUM)
5560 /* Compute pipeline offset:
5561 - When executing an ARM instruction, PC reads as the address of the
5562 current instruction plus 8.
5563 - When executing a Thumb instruction, PC reads as the address of the
5564 current instruction plus 4. */
5571 if (debug_displaced)
5572 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5573 (unsigned long) from);
5574 return (ULONGEST) from;
5578 regcache_cooked_read_unsigned (regs, regno, &ret);
5579 if (debug_displaced)
5580 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5581 regno, (unsigned long) ret);
5587 displaced_in_arm_mode (struct regcache *regs)
5590 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5592 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5594 return (ps & t_bit) == 0;
5597 /* Write to the PC as from a branch instruction. */
5600 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5604 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5605 architecture versions < 6. */
5606 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5607 val & ~(ULONGEST) 0x3);
5609 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5610 val & ~(ULONGEST) 0x1);
5613 /* Write to the PC as from a branch-exchange instruction. */
5616 bx_write_pc (struct regcache *regs, ULONGEST val)
5619 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5621 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5625 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5626 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5628 else if ((val & 2) == 0)
5630 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5631 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5635 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5636 mode, align dest to 4 bytes). */
5637 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5638 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5639 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5643 /* Write to the PC as if from a load instruction. */
5646 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5649 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5650 bx_write_pc (regs, val);
5652 branch_write_pc (regs, dsc, val);
5655 /* Write to the PC as if from an ALU instruction. */
5658 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5661 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5662 bx_write_pc (regs, val);
5664 branch_write_pc (regs, dsc, val);
5667 /* Helper for writing to registers for displaced stepping. Writing to the PC
5668 has a varying effects depending on the instruction which does the write:
5669 this is controlled by the WRITE_PC argument. */
5672 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5673 int regno, ULONGEST val, enum pc_write_style write_pc)
5675 if (regno == ARM_PC_REGNUM)
5677 if (debug_displaced)
5678 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5679 (unsigned long) val);
5682 case BRANCH_WRITE_PC:
5683 branch_write_pc (regs, dsc, val);
5687 bx_write_pc (regs, val);
5691 load_write_pc (regs, dsc, val);
5695 alu_write_pc (regs, dsc, val);
5698 case CANNOT_WRITE_PC:
5699 warning (_("Instruction wrote to PC in an unexpected way when "
5700 "single-stepping"));
5704 internal_error (__FILE__, __LINE__,
5705 _("Invalid argument to displaced_write_reg"));
5708 dsc->wrote_to_pc = 1;
5712 if (debug_displaced)
5713 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5714 regno, (unsigned long) val);
5715 regcache_cooked_write_unsigned (regs, regno, val);
5719 /* This function is used to concisely determine if an instruction INSN
5720 references PC. Register fields of interest in INSN should have the
5721 corresponding fields of BITMASK set to 0b1111. The function
5722 returns return 1 if any of these fields in INSN reference the PC
5723 (also 0b1111, r15), else it returns 0. */
5726 insn_references_pc (uint32_t insn, uint32_t bitmask)
5728 uint32_t lowbit = 1;
5730 while (bitmask != 0)
5734 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5740 mask = lowbit * 0xf;
5742 if ((insn & mask) == mask)
5751 /* The simplest copy function. Many instructions have the same effect no
5752 matter what address they are executed at: in those cases, use this. */
5755 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5756 const char *iname, struct displaced_step_closure *dsc)
5758 if (debug_displaced)
5759 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5760 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5763 dsc->modinsn[0] = insn;
5769 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5770 uint16_t insn2, const char *iname,
5771 struct displaced_step_closure *dsc)
5773 if (debug_displaced)
5774 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5775 "opcode/class '%s' unmodified\n", insn1, insn2,
5778 dsc->modinsn[0] = insn1;
5779 dsc->modinsn[1] = insn2;
5785 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5788 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5790 struct displaced_step_closure *dsc)
5792 if (debug_displaced)
5793 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5794 "opcode/class '%s' unmodified\n", insn,
5797 dsc->modinsn[0] = insn;
5802 /* Preload instructions with immediate offset. */
5805 cleanup_preload (struct gdbarch *gdbarch,
5806 struct regcache *regs, struct displaced_step_closure *dsc)
5808 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5809 if (!dsc->u.preload.immed)
5810 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5814 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5815 struct displaced_step_closure *dsc, unsigned int rn)
5818 /* Preload instructions:
5820 {pli/pld} [rn, #+/-imm]
5822 {pli/pld} [r0, #+/-imm]. */
5824 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5825 rn_val = displaced_read_reg (regs, dsc, rn);
5826 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5827 dsc->u.preload.immed = 1;
5829 dsc->cleanup = &cleanup_preload;
5833 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5834 struct displaced_step_closure *dsc)
5836 unsigned int rn = bits (insn, 16, 19);
5838 if (!insn_references_pc (insn, 0x000f0000ul))
5839 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5841 if (debug_displaced)
5842 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5843 (unsigned long) insn);
5845 dsc->modinsn[0] = insn & 0xfff0ffff;
5847 install_preload (gdbarch, regs, dsc, rn);
5853 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5854 struct regcache *regs, struct displaced_step_closure *dsc)
5856 unsigned int rn = bits (insn1, 0, 3);
5857 unsigned int u_bit = bit (insn1, 7);
5858 int imm12 = bits (insn2, 0, 11);
5861 if (rn != ARM_PC_REGNUM)
5862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5864 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5865 PLD (literal) Encoding T1. */
5866 if (debug_displaced)
5867 fprintf_unfiltered (gdb_stdlog,
5868 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5869 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5875 /* Rewrite instruction {pli/pld} PC imm12 into:
5876 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5880 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5882 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5883 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5885 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5887 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5888 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5889 dsc->u.preload.immed = 0;
5891 /* {pli/pld} [r0, r1] */
5892 dsc->modinsn[0] = insn1 & 0xfff0;
5893 dsc->modinsn[1] = 0xf001;
5896 dsc->cleanup = &cleanup_preload;
5900 /* Preload instructions with register offset. */
5903 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5904 struct displaced_step_closure *dsc, unsigned int rn,
5907 ULONGEST rn_val, rm_val;
5909 /* Preload register-offset instructions:
5911 {pli/pld} [rn, rm {, shift}]
5913 {pli/pld} [r0, r1 {, shift}]. */
5915 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5916 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5917 rn_val = displaced_read_reg (regs, dsc, rn);
5918 rm_val = displaced_read_reg (regs, dsc, rm);
5919 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5920 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5921 dsc->u.preload.immed = 0;
5923 dsc->cleanup = &cleanup_preload;
5927 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5928 struct regcache *regs,
5929 struct displaced_step_closure *dsc)
5931 unsigned int rn = bits (insn, 16, 19);
5932 unsigned int rm = bits (insn, 0, 3);
5935 if (!insn_references_pc (insn, 0x000f000ful))
5936 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5938 if (debug_displaced)
5939 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5940 (unsigned long) insn);
5942 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5944 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5948 /* Copy/cleanup coprocessor load and store instructions. */
5951 cleanup_copro_load_store (struct gdbarch *gdbarch,
5952 struct regcache *regs,
5953 struct displaced_step_closure *dsc)
5955 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5957 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5959 if (dsc->u.ldst.writeback)
5960 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5964 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5965 struct displaced_step_closure *dsc,
5966 int writeback, unsigned int rn)
5970 /* Coprocessor load/store instructions:
5972 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5974 {stc/stc2} [r0, #+/-imm].
5976 ldc/ldc2 are handled identically. */
5978 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5979 rn_val = displaced_read_reg (regs, dsc, rn);
5980 /* PC should be 4-byte aligned. */
5981 rn_val = rn_val & 0xfffffffc;
5982 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5984 dsc->u.ldst.writeback = writeback;
5985 dsc->u.ldst.rn = rn;
5987 dsc->cleanup = &cleanup_copro_load_store;
5991 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5992 struct regcache *regs,
5993 struct displaced_step_closure *dsc)
5995 unsigned int rn = bits (insn, 16, 19);
5997 if (!insn_references_pc (insn, 0x000f0000ul))
5998 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6000 if (debug_displaced)
6001 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6002 "load/store insn %.8lx\n", (unsigned long) insn);
6004 dsc->modinsn[0] = insn & 0xfff0ffff;
6006 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6012 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6013 uint16_t insn2, struct regcache *regs,
6014 struct displaced_step_closure *dsc)
6016 unsigned int rn = bits (insn1, 0, 3);
6018 if (rn != ARM_PC_REGNUM)
6019 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6020 "copro load/store", dsc);
6022 if (debug_displaced)
6023 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6024 "load/store insn %.4x%.4x\n", insn1, insn2);
6026 dsc->modinsn[0] = insn1 & 0xfff0;
6027 dsc->modinsn[1] = insn2;
6030 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6031 doesn't support writeback, so pass 0. */
6032 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6037 /* Clean up branch instructions (actually perform the branch, by setting
6041 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6042 struct displaced_step_closure *dsc)
6044 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6045 int branch_taken = condition_true (dsc->u.branch.cond, status);
6046 enum pc_write_style write_pc = dsc->u.branch.exchange
6047 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6052 if (dsc->u.branch.link)
6054 /* The value of LR should be the next insn of current one. In order
6055 not to confuse logic hanlding later insn `bx lr', if current insn mode
6056 is Thumb, the bit 0 of LR value should be set to 1. */
6057 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6060 next_insn_addr |= 0x1;
6062 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6066 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6069 /* Copy B/BL/BLX instructions with immediate destinations. */
6072 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6073 struct displaced_step_closure *dsc,
6074 unsigned int cond, int exchange, int link, long offset)
6076 /* Implement "BL<cond> <label>" as:
6078 Preparation: cond <- instruction condition
6079 Insn: mov r0, r0 (nop)
6080 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6082 B<cond> similar, but don't set r14 in cleanup. */
6084 dsc->u.branch.cond = cond;
6085 dsc->u.branch.link = link;
6086 dsc->u.branch.exchange = exchange;
6088 dsc->u.branch.dest = dsc->insn_addr;
6089 if (link && exchange)
6090 /* For BLX, offset is computed from the Align (PC, 4). */
6091 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6094 dsc->u.branch.dest += 4 + offset;
6096 dsc->u.branch.dest += 8 + offset;
6098 dsc->cleanup = &cleanup_branch;
6101 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6102 struct regcache *regs, struct displaced_step_closure *dsc)
6104 unsigned int cond = bits (insn, 28, 31);
6105 int exchange = (cond == 0xf);
6106 int link = exchange || bit (insn, 24);
6109 if (debug_displaced)
6110 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6111 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6112 (unsigned long) insn);
6114 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6115 then arrange the switch into Thumb mode. */
6116 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6118 offset = bits (insn, 0, 23) << 2;
6120 if (bit (offset, 25))
6121 offset = offset | ~0x3ffffff;
6123 dsc->modinsn[0] = ARM_NOP;
6125 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6130 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6131 uint16_t insn2, struct regcache *regs,
6132 struct displaced_step_closure *dsc)
6134 int link = bit (insn2, 14);
6135 int exchange = link && !bit (insn2, 12);
6138 int j1 = bit (insn2, 13);
6139 int j2 = bit (insn2, 11);
6140 int s = sbits (insn1, 10, 10);
6141 int i1 = !(j1 ^ bit (insn1, 10));
6142 int i2 = !(j2 ^ bit (insn1, 10));
6144 if (!link && !exchange) /* B */
6146 offset = (bits (insn2, 0, 10) << 1);
6147 if (bit (insn2, 12)) /* Encoding T4 */
6149 offset |= (bits (insn1, 0, 9) << 12)
6155 else /* Encoding T3 */
6157 offset |= (bits (insn1, 0, 5) << 12)
6161 cond = bits (insn1, 6, 9);
6166 offset = (bits (insn1, 0, 9) << 12);
6167 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6168 offset |= exchange ?
6169 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6172 if (debug_displaced)
6173 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6174 "%.4x %.4x with offset %.8lx\n",
6175 link ? (exchange) ? "blx" : "bl" : "b",
6176 insn1, insn2, offset);
6178 dsc->modinsn[0] = THUMB_NOP;
6180 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6184 /* Copy B Thumb instructions. */
6186 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6187 struct displaced_step_closure *dsc)
6189 unsigned int cond = 0;
6191 unsigned short bit_12_15 = bits (insn, 12, 15);
6192 CORE_ADDR from = dsc->insn_addr;
6194 if (bit_12_15 == 0xd)
6196 /* offset = SignExtend (imm8:0, 32) */
6197 offset = sbits ((insn << 1), 0, 8);
6198 cond = bits (insn, 8, 11);
6200 else if (bit_12_15 == 0xe) /* Encoding T2 */
6202 offset = sbits ((insn << 1), 0, 11);
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog,
6208 "displaced: copying b immediate insn %.4x "
6209 "with offset %d\n", insn, offset);
6211 dsc->u.branch.cond = cond;
6212 dsc->u.branch.link = 0;
6213 dsc->u.branch.exchange = 0;
6214 dsc->u.branch.dest = from + 4 + offset;
6216 dsc->modinsn[0] = THUMB_NOP;
6218 dsc->cleanup = &cleanup_branch;
6223 /* Copy BX/BLX with register-specified destinations. */
6226 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6227 struct displaced_step_closure *dsc, int link,
6228 unsigned int cond, unsigned int rm)
6230 /* Implement {BX,BLX}<cond> <reg>" as:
6232 Preparation: cond <- instruction condition
6233 Insn: mov r0, r0 (nop)
6234 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6236 Don't set r14 in cleanup for BX. */
6238 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6240 dsc->u.branch.cond = cond;
6241 dsc->u.branch.link = link;
6243 dsc->u.branch.exchange = 1;
6245 dsc->cleanup = &cleanup_branch;
6249 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6250 struct regcache *regs, struct displaced_step_closure *dsc)
6252 unsigned int cond = bits (insn, 28, 31);
6255 int link = bit (insn, 5);
6256 unsigned int rm = bits (insn, 0, 3);
6258 if (debug_displaced)
6259 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6260 (unsigned long) insn);
6262 dsc->modinsn[0] = ARM_NOP;
6264 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6269 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6270 struct regcache *regs,
6271 struct displaced_step_closure *dsc)
6273 int link = bit (insn, 7);
6274 unsigned int rm = bits (insn, 3, 6);
6276 if (debug_displaced)
6277 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6278 (unsigned short) insn);
6280 dsc->modinsn[0] = THUMB_NOP;
6282 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6288 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6291 cleanup_alu_imm (struct gdbarch *gdbarch,
6292 struct regcache *regs, struct displaced_step_closure *dsc)
6294 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6295 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6296 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6297 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6301 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6302 struct displaced_step_closure *dsc)
6304 unsigned int rn = bits (insn, 16, 19);
6305 unsigned int rd = bits (insn, 12, 15);
6306 unsigned int op = bits (insn, 21, 24);
6307 int is_mov = (op == 0xd);
6308 ULONGEST rd_val, rn_val;
6310 if (!insn_references_pc (insn, 0x000ff000ul))
6311 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6313 if (debug_displaced)
6314 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6315 "%.8lx\n", is_mov ? "move" : "ALU",
6316 (unsigned long) insn);
6318 /* Instruction is of form:
6320 <op><cond> rd, [rn,] #imm
6324 Preparation: tmp1, tmp2 <- r0, r1;
6326 Insn: <op><cond> r0, r1, #imm
6327 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6330 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6331 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6332 rn_val = displaced_read_reg (regs, dsc, rn);
6333 rd_val = displaced_read_reg (regs, dsc, rd);
6334 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6335 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6339 dsc->modinsn[0] = insn & 0xfff00fff;
6341 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6343 dsc->cleanup = &cleanup_alu_imm;
6349 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6350 uint16_t insn2, struct regcache *regs,
6351 struct displaced_step_closure *dsc)
6353 unsigned int op = bits (insn1, 5, 8);
6354 unsigned int rn, rm, rd;
6355 ULONGEST rd_val, rn_val;
6357 rn = bits (insn1, 0, 3); /* Rn */
6358 rm = bits (insn2, 0, 3); /* Rm */
6359 rd = bits (insn2, 8, 11); /* Rd */
6361 /* This routine is only called for instruction MOV. */
6362 gdb_assert (op == 0x2 && rn == 0xf);
6364 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6365 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6367 if (debug_displaced)
6368 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6369 "ALU", insn1, insn2);
6371 /* Instruction is of form:
6373 <op><cond> rd, [rn,] #imm
6377 Preparation: tmp1, tmp2 <- r0, r1;
6379 Insn: <op><cond> r0, r1, #imm
6380 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6383 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6384 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6385 rn_val = displaced_read_reg (regs, dsc, rn);
6386 rd_val = displaced_read_reg (regs, dsc, rd);
6387 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6388 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6391 dsc->modinsn[0] = insn1;
6392 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6395 dsc->cleanup = &cleanup_alu_imm;
6400 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6403 cleanup_alu_reg (struct gdbarch *gdbarch,
6404 struct regcache *regs, struct displaced_step_closure *dsc)
6409 rd_val = displaced_read_reg (regs, dsc, 0);
6411 for (i = 0; i < 3; i++)
6412 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6414 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6418 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6419 struct displaced_step_closure *dsc,
6420 unsigned int rd, unsigned int rn, unsigned int rm)
6422 ULONGEST rd_val, rn_val, rm_val;
6424 /* Instruction is of form:
6426 <op><cond> rd, [rn,] rm [, <shift>]
6430 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6431 r0, r1, r2 <- rd, rn, rm
6432 Insn: <op><cond> r0, r1, r2 [, <shift>]
6433 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6436 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6437 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6438 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6439 rd_val = displaced_read_reg (regs, dsc, rd);
6440 rn_val = displaced_read_reg (regs, dsc, rn);
6441 rm_val = displaced_read_reg (regs, dsc, rm);
6442 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6443 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6444 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6447 dsc->cleanup = &cleanup_alu_reg;
6451 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6452 struct displaced_step_closure *dsc)
6454 unsigned int op = bits (insn, 21, 24);
6455 int is_mov = (op == 0xd);
6457 if (!insn_references_pc (insn, 0x000ff00ful))
6458 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6460 if (debug_displaced)
6461 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6462 is_mov ? "move" : "ALU", (unsigned long) insn);
6465 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6467 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6469 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6475 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6476 struct regcache *regs,
6477 struct displaced_step_closure *dsc)
6479 unsigned rn, rm, rd;
6481 rd = bits (insn, 3, 6);
6482 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6485 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6486 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6488 if (debug_displaced)
6489 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6490 "ALU", (unsigned short) insn);
6492 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6494 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6499 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6502 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6503 struct regcache *regs,
6504 struct displaced_step_closure *dsc)
6506 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6509 for (i = 0; i < 4; i++)
6510 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6512 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6516 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6517 struct displaced_step_closure *dsc,
6518 unsigned int rd, unsigned int rn, unsigned int rm,
6522 ULONGEST rd_val, rn_val, rm_val, rs_val;
6524 /* Instruction is of form:
6526 <op><cond> rd, [rn,] rm, <shift> rs
6530 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6531 r0, r1, r2, r3 <- rd, rn, rm, rs
6532 Insn: <op><cond> r0, r1, r2, <shift> r3
6534 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6538 for (i = 0; i < 4; i++)
6539 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6541 rd_val = displaced_read_reg (regs, dsc, rd);
6542 rn_val = displaced_read_reg (regs, dsc, rn);
6543 rm_val = displaced_read_reg (regs, dsc, rm);
6544 rs_val = displaced_read_reg (regs, dsc, rs);
6545 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6546 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6547 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6548 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6550 dsc->cleanup = &cleanup_alu_shifted_reg;
6554 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6555 struct regcache *regs,
6556 struct displaced_step_closure *dsc)
6558 unsigned int op = bits (insn, 21, 24);
6559 int is_mov = (op == 0xd);
6560 unsigned int rd, rn, rm, rs;
6562 if (!insn_references_pc (insn, 0x000fff0ful))
6563 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6565 if (debug_displaced)
6566 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6567 "%.8lx\n", is_mov ? "move" : "ALU",
6568 (unsigned long) insn);
6570 rn = bits (insn, 16, 19);
6571 rm = bits (insn, 0, 3);
6572 rs = bits (insn, 8, 11);
6573 rd = bits (insn, 12, 15);
6576 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6578 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6580 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6585 /* Clean up load instructions. */
6588 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6589 struct displaced_step_closure *dsc)
6591 ULONGEST rt_val, rt_val2 = 0, rn_val;
6593 rt_val = displaced_read_reg (regs, dsc, 0);
6594 if (dsc->u.ldst.xfersize == 8)
6595 rt_val2 = displaced_read_reg (regs, dsc, 1);
6596 rn_val = displaced_read_reg (regs, dsc, 2);
6598 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6599 if (dsc->u.ldst.xfersize > 4)
6600 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6601 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6602 if (!dsc->u.ldst.immed)
6603 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6605 /* Handle register writeback. */
6606 if (dsc->u.ldst.writeback)
6607 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6608 /* Put result in right place. */
6609 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6610 if (dsc->u.ldst.xfersize == 8)
6611 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6614 /* Clean up store instructions. */
6617 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6618 struct displaced_step_closure *dsc)
6620 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6622 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6623 if (dsc->u.ldst.xfersize > 4)
6624 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6625 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6626 if (!dsc->u.ldst.immed)
6627 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6628 if (!dsc->u.ldst.restore_r4)
6629 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6632 if (dsc->u.ldst.writeback)
6633 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6636 /* Copy "extra" load/store instructions. These are halfword/doubleword
6637 transfers, which have a different encoding to byte/word transfers. */
6640 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6641 struct regcache *regs, struct displaced_step_closure *dsc)
6643 unsigned int op1 = bits (insn, 20, 24);
6644 unsigned int op2 = bits (insn, 5, 6);
6645 unsigned int rt = bits (insn, 12, 15);
6646 unsigned int rn = bits (insn, 16, 19);
6647 unsigned int rm = bits (insn, 0, 3);
6648 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6649 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6650 int immed = (op1 & 0x4) != 0;
6652 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6654 if (!insn_references_pc (insn, 0x000ff00ful))
6655 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6657 if (debug_displaced)
6658 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6659 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6660 (unsigned long) insn);
6662 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6665 internal_error (__FILE__, __LINE__,
6666 _("copy_extra_ld_st: instruction decode error"));
6668 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6669 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6670 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6672 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6674 rt_val = displaced_read_reg (regs, dsc, rt);
6675 if (bytesize[opcode] == 8)
6676 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6677 rn_val = displaced_read_reg (regs, dsc, rn);
6679 rm_val = displaced_read_reg (regs, dsc, rm);
6681 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6682 if (bytesize[opcode] == 8)
6683 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6684 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6686 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6689 dsc->u.ldst.xfersize = bytesize[opcode];
6690 dsc->u.ldst.rn = rn;
6691 dsc->u.ldst.immed = immed;
6692 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6693 dsc->u.ldst.restore_r4 = 0;
6696 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6698 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6699 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6701 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6703 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6704 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6706 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6711 /* Copy byte/half word/word loads and stores. */
6714 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6715 struct displaced_step_closure *dsc, int load,
6716 int immed, int writeback, int size, int usermode,
6717 int rt, int rm, int rn)
6719 ULONGEST rt_val, rn_val, rm_val = 0;
6721 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6722 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6724 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6726 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6728 rt_val = displaced_read_reg (regs, dsc, rt);
6729 rn_val = displaced_read_reg (regs, dsc, rn);
6731 rm_val = displaced_read_reg (regs, dsc, rm);
6733 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6734 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6736 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6738 dsc->u.ldst.xfersize = size;
6739 dsc->u.ldst.rn = rn;
6740 dsc->u.ldst.immed = immed;
6741 dsc->u.ldst.writeback = writeback;
6743 /* To write PC we can do:
6745 Before this sequence of instructions:
6746 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6747 r2 is the Rn value got from dispalced_read_reg.
6749 Insn1: push {pc} Write address of STR instruction + offset on stack
6750 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6751 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6752 = addr(Insn1) + offset - addr(Insn3) - 8
6754 Insn4: add r4, r4, #8 r4 = offset - 8
6755 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6757 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6759 Otherwise we don't know what value to write for PC, since the offset is
6760 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6761 of this can be found in Section "Saving from r15" in
6762 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6764 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6769 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6770 uint16_t insn2, struct regcache *regs,
6771 struct displaced_step_closure *dsc, int size)
6773 unsigned int u_bit = bit (insn1, 7);
6774 unsigned int rt = bits (insn2, 12, 15);
6775 int imm12 = bits (insn2, 0, 11);
6778 if (debug_displaced)
6779 fprintf_unfiltered (gdb_stdlog,
6780 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6781 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6787 /* Rewrite instruction LDR Rt imm12 into:
6789 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6793 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6796 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6797 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6798 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6800 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6802 pc_val = pc_val & 0xfffffffc;
6804 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6805 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6809 dsc->u.ldst.xfersize = size;
6810 dsc->u.ldst.immed = 0;
6811 dsc->u.ldst.writeback = 0;
6812 dsc->u.ldst.restore_r4 = 0;
6814 /* LDR R0, R2, R3 */
6815 dsc->modinsn[0] = 0xf852;
6816 dsc->modinsn[1] = 0x3;
6819 dsc->cleanup = &cleanup_load;
6825 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6826 uint16_t insn2, struct regcache *regs,
6827 struct displaced_step_closure *dsc,
6828 int writeback, int immed)
6830 unsigned int rt = bits (insn2, 12, 15);
6831 unsigned int rn = bits (insn1, 0, 3);
6832 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6833 /* In LDR (register), there is also a register Rm, which is not allowed to
6834 be PC, so we don't have to check it. */
6836 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6837 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6840 if (debug_displaced)
6841 fprintf_unfiltered (gdb_stdlog,
6842 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6843 rt, rn, insn1, insn2);
6845 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6848 dsc->u.ldst.restore_r4 = 0;
6851 /* ldr[b]<cond> rt, [rn, #imm], etc.
6853 ldr[b]<cond> r0, [r2, #imm]. */
6855 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6856 dsc->modinsn[1] = insn2 & 0x0fff;
6859 /* ldr[b]<cond> rt, [rn, rm], etc.
6861 ldr[b]<cond> r0, [r2, r3]. */
6863 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6864 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6874 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6875 struct regcache *regs,
6876 struct displaced_step_closure *dsc,
6877 int load, int size, int usermode)
6879 int immed = !bit (insn, 25);
6880 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6881 unsigned int rt = bits (insn, 12, 15);
6882 unsigned int rn = bits (insn, 16, 19);
6883 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6885 if (!insn_references_pc (insn, 0x000ff00ful))
6886 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6888 if (debug_displaced)
6889 fprintf_unfiltered (gdb_stdlog,
6890 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6891 load ? (size == 1 ? "ldrb" : "ldr")
6892 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6894 (unsigned long) insn);
6896 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6897 usermode, rt, rm, rn);
6899 if (load || rt != ARM_PC_REGNUM)
6901 dsc->u.ldst.restore_r4 = 0;
6904 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6906 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6907 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6909 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6911 {ldr,str}[b]<cond> r0, [r2, r3]. */
6912 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6916 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6917 dsc->u.ldst.restore_r4 = 1;
6918 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6919 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6920 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6921 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6922 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6926 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6928 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6933 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6938 /* Cleanup LDM instructions with fully-populated register list. This is an
6939 unfortunate corner case: it's impossible to implement correctly by modifying
6940 the instruction. The issue is as follows: we have an instruction,
6944 which we must rewrite to avoid loading PC. A possible solution would be to
6945 do the load in two halves, something like (with suitable cleanup
6949 ldm[id][ab] r8!, {r0-r7}
6951 ldm[id][ab] r8, {r7-r14}
6954 but at present there's no suitable place for <temp>, since the scratch space
6955 is overwritten before the cleanup routine is called. For now, we simply
6956 emulate the instruction. */
6959 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6960 struct displaced_step_closure *dsc)
6962 int inc = dsc->u.block.increment;
6963 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6964 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6965 uint32_t regmask = dsc->u.block.regmask;
6966 int regno = inc ? 0 : 15;
6967 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6968 int exception_return = dsc->u.block.load && dsc->u.block.user
6969 && (regmask & 0x8000) != 0;
6970 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6971 int do_transfer = condition_true (dsc->u.block.cond, status);
6972 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6977 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6978 sensible we can do here. Complain loudly. */
6979 if (exception_return)
6980 error (_("Cannot single-step exception return"));
6982 /* We don't handle any stores here for now. */
6983 gdb_assert (dsc->u.block.load != 0);
6985 if (debug_displaced)
6986 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6987 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6988 dsc->u.block.increment ? "inc" : "dec",
6989 dsc->u.block.before ? "before" : "after");
6996 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6999 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7002 xfer_addr += bump_before;
7004 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7005 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7007 xfer_addr += bump_after;
7009 regmask &= ~(1 << regno);
7012 if (dsc->u.block.writeback)
7013 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7017 /* Clean up an STM which included the PC in the register list. */
7020 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7021 struct displaced_step_closure *dsc)
7023 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7024 int store_executed = condition_true (dsc->u.block.cond, status);
7025 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7026 CORE_ADDR stm_insn_addr;
7029 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7031 /* If condition code fails, there's nothing else to do. */
7032 if (!store_executed)
7035 if (dsc->u.block.increment)
7037 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7039 if (dsc->u.block.before)
7044 pc_stored_at = dsc->u.block.xfer_addr;
7046 if (dsc->u.block.before)
7050 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7051 stm_insn_addr = dsc->scratch_base;
7052 offset = pc_val - stm_insn_addr;
7054 if (debug_displaced)
7055 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7056 "STM instruction\n", offset);
7058 /* Rewrite the stored PC to the proper value for the non-displaced original
7060 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7061 dsc->insn_addr + offset);
7064 /* Clean up an LDM which includes the PC in the register list. We clumped all
7065 the registers in the transferred list into a contiguous range r0...rX (to
7066 avoid loading PC directly and losing control of the debugged program), so we
7067 must undo that here. */
7070 cleanup_block_load_pc (struct gdbarch *gdbarch,
7071 struct regcache *regs,
7072 struct displaced_step_closure *dsc)
7074 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7075 int load_executed = condition_true (dsc->u.block.cond, status);
7076 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7077 unsigned int regs_loaded = bitcount (mask);
7078 unsigned int num_to_shuffle = regs_loaded, clobbered;
7080 /* The method employed here will fail if the register list is fully populated
7081 (we need to avoid loading PC directly). */
7082 gdb_assert (num_to_shuffle < 16);
7087 clobbered = (1 << num_to_shuffle) - 1;
7089 while (num_to_shuffle > 0)
7091 if ((mask & (1 << write_reg)) != 0)
7093 unsigned int read_reg = num_to_shuffle - 1;
7095 if (read_reg != write_reg)
7097 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7098 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7099 if (debug_displaced)
7100 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7101 "loaded register r%d to r%d\n"), read_reg,
7104 else if (debug_displaced)
7105 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7106 "r%d already in the right place\n"),
7109 clobbered &= ~(1 << write_reg);
7117 /* Restore any registers we scribbled over. */
7118 for (write_reg = 0; clobbered != 0; write_reg++)
7120 if ((clobbered & (1 << write_reg)) != 0)
7122 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7124 if (debug_displaced)
7125 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7126 "clobbered register r%d\n"), write_reg);
7127 clobbered &= ~(1 << write_reg);
7131 /* Perform register writeback manually. */
7132 if (dsc->u.block.writeback)
7134 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7136 if (dsc->u.block.increment)
7137 new_rn_val += regs_loaded * 4;
7139 new_rn_val -= regs_loaded * 4;
7141 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7146 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7147 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7150 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7151 struct regcache *regs,
7152 struct displaced_step_closure *dsc)
7154 int load = bit (insn, 20);
7155 int user = bit (insn, 22);
7156 int increment = bit (insn, 23);
7157 int before = bit (insn, 24);
7158 int writeback = bit (insn, 21);
7159 int rn = bits (insn, 16, 19);
7161 /* Block transfers which don't mention PC can be run directly
7163 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7164 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7166 if (rn == ARM_PC_REGNUM)
7168 warning (_("displaced: Unpredictable LDM or STM with "
7169 "base register r15"));
7170 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7173 if (debug_displaced)
7174 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7175 "%.8lx\n", (unsigned long) insn);
7177 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7178 dsc->u.block.rn = rn;
7180 dsc->u.block.load = load;
7181 dsc->u.block.user = user;
7182 dsc->u.block.increment = increment;
7183 dsc->u.block.before = before;
7184 dsc->u.block.writeback = writeback;
7185 dsc->u.block.cond = bits (insn, 28, 31);
7187 dsc->u.block.regmask = insn & 0xffff;
7191 if ((insn & 0xffff) == 0xffff)
7193 /* LDM with a fully-populated register list. This case is
7194 particularly tricky. Implement for now by fully emulating the
7195 instruction (which might not behave perfectly in all cases, but
7196 these instructions should be rare enough for that not to matter
7198 dsc->modinsn[0] = ARM_NOP;
7200 dsc->cleanup = &cleanup_block_load_all;
7204 /* LDM of a list of registers which includes PC. Implement by
7205 rewriting the list of registers to be transferred into a
7206 contiguous chunk r0...rX before doing the transfer, then shuffling
7207 registers into the correct places in the cleanup routine. */
7208 unsigned int regmask = insn & 0xffff;
7209 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7210 unsigned int to = 0, from = 0, i, new_rn;
7212 for (i = 0; i < num_in_list; i++)
7213 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7215 /* Writeback makes things complicated. We need to avoid clobbering
7216 the base register with one of the registers in our modified
7217 register list, but just using a different register can't work in
7220 ldm r14!, {r0-r13,pc}
7222 which would need to be rewritten as:
7226 but that can't work, because there's no free register for N.
7228 Solve this by turning off the writeback bit, and emulating
7229 writeback manually in the cleanup routine. */
7234 new_regmask = (1 << num_in_list) - 1;
7236 if (debug_displaced)
7237 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7238 "{..., pc}: original reg list %.4x, modified "
7239 "list %.4x\n"), rn, writeback ? "!" : "",
7240 (int) insn & 0xffff, new_regmask);
7242 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7244 dsc->cleanup = &cleanup_block_load_pc;
7249 /* STM of a list of registers which includes PC. Run the instruction
7250 as-is, but out of line: this will store the wrong value for the PC,
7251 so we must manually fix up the memory in the cleanup routine.
7252 Doing things this way has the advantage that we can auto-detect
7253 the offset of the PC write (which is architecture-dependent) in
7254 the cleanup routine. */
7255 dsc->modinsn[0] = insn;
7257 dsc->cleanup = &cleanup_block_store_pc;
7264 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7265 struct regcache *regs,
7266 struct displaced_step_closure *dsc)
7268 int rn = bits (insn1, 0, 3);
7269 int load = bit (insn1, 4);
7270 int writeback = bit (insn1, 5);
7272 /* Block transfers which don't mention PC can be run directly
7274 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7275 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7277 if (rn == ARM_PC_REGNUM)
7279 warning (_("displaced: Unpredictable LDM or STM with "
7280 "base register r15"));
7281 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7282 "unpredictable ldm/stm", dsc);
7285 if (debug_displaced)
7286 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7287 "%.4x%.4x\n", insn1, insn2);
7289 /* Clear bit 13, since it should be always zero. */
7290 dsc->u.block.regmask = (insn2 & 0xdfff);
7291 dsc->u.block.rn = rn;
7293 dsc->u.block.load = load;
7294 dsc->u.block.user = 0;
7295 dsc->u.block.increment = bit (insn1, 7);
7296 dsc->u.block.before = bit (insn1, 8);
7297 dsc->u.block.writeback = writeback;
7298 dsc->u.block.cond = INST_AL;
7299 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7303 if (dsc->u.block.regmask == 0xffff)
7305 /* This branch is impossible to happen. */
7310 unsigned int regmask = dsc->u.block.regmask;
7311 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7312 unsigned int to = 0, from = 0, i, new_rn;
7314 for (i = 0; i < num_in_list; i++)
7315 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7320 new_regmask = (1 << num_in_list) - 1;
7322 if (debug_displaced)
7323 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7324 "{..., pc}: original reg list %.4x, modified "
7325 "list %.4x\n"), rn, writeback ? "!" : "",
7326 (int) dsc->u.block.regmask, new_regmask);
7328 dsc->modinsn[0] = insn1;
7329 dsc->modinsn[1] = (new_regmask & 0xffff);
7332 dsc->cleanup = &cleanup_block_load_pc;
7337 dsc->modinsn[0] = insn1;
7338 dsc->modinsn[1] = insn2;
7340 dsc->cleanup = &cleanup_block_store_pc;
7345 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7346 for Linux, where some SVC instructions must be treated specially. */
7349 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7350 struct displaced_step_closure *dsc)
7352 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7354 if (debug_displaced)
7355 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7356 "%.8lx\n", (unsigned long) resume_addr);
7358 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7362 /* Common copy routine for svc instruciton. */
7365 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7366 struct displaced_step_closure *dsc)
7368 /* Preparation: none.
7369 Insn: unmodified svc.
7370 Cleanup: pc <- insn_addr + insn_size. */
7372 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7374 dsc->wrote_to_pc = 1;
7376 /* Allow OS-specific code to override SVC handling. */
7377 if (dsc->u.svc.copy_svc_os)
7378 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7381 dsc->cleanup = &cleanup_svc;
7387 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7388 struct regcache *regs, struct displaced_step_closure *dsc)
7391 if (debug_displaced)
7392 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7393 (unsigned long) insn);
7395 dsc->modinsn[0] = insn;
7397 return install_svc (gdbarch, regs, dsc);
7401 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7402 struct regcache *regs, struct displaced_step_closure *dsc)
7405 if (debug_displaced)
7406 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7409 dsc->modinsn[0] = insn;
7411 return install_svc (gdbarch, regs, dsc);
7414 /* Copy undefined instructions. */
7417 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7418 struct displaced_step_closure *dsc)
7420 if (debug_displaced)
7421 fprintf_unfiltered (gdb_stdlog,
7422 "displaced: copying undefined insn %.8lx\n",
7423 (unsigned long) insn);
7425 dsc->modinsn[0] = insn;
7431 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7432 struct displaced_step_closure *dsc)
7435 if (debug_displaced)
7436 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7437 "%.4x %.4x\n", (unsigned short) insn1,
7438 (unsigned short) insn2);
7440 dsc->modinsn[0] = insn1;
7441 dsc->modinsn[1] = insn2;
7447 /* Copy unpredictable instructions. */
7450 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7451 struct displaced_step_closure *dsc)
7453 if (debug_displaced)
7454 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7455 "%.8lx\n", (unsigned long) insn);
7457 dsc->modinsn[0] = insn;
7462 /* The decode_* functions are instruction decoding helpers. They mostly follow
7463 the presentation in the ARM ARM. */
7466 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7467 struct regcache *regs,
7468 struct displaced_step_closure *dsc)
7470 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7471 unsigned int rn = bits (insn, 16, 19);
7473 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7474 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7475 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7476 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7477 else if ((op1 & 0x60) == 0x20)
7478 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7479 else if ((op1 & 0x71) == 0x40)
7480 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7482 else if ((op1 & 0x77) == 0x41)
7483 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7484 else if ((op1 & 0x77) == 0x45)
7485 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7486 else if ((op1 & 0x77) == 0x51)
7489 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7491 return arm_copy_unpred (gdbarch, insn, dsc);
7493 else if ((op1 & 0x77) == 0x55)
7494 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7495 else if (op1 == 0x57)
7498 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7499 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7500 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7501 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7502 default: return arm_copy_unpred (gdbarch, insn, dsc);
7504 else if ((op1 & 0x63) == 0x43)
7505 return arm_copy_unpred (gdbarch, insn, dsc);
7506 else if ((op2 & 0x1) == 0x0)
7507 switch (op1 & ~0x80)
7510 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7512 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7513 case 0x71: case 0x75:
7515 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7516 case 0x63: case 0x67: case 0x73: case 0x77:
7517 return arm_copy_unpred (gdbarch, insn, dsc);
7519 return arm_copy_undef (gdbarch, insn, dsc);
7522 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7526 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7527 struct regcache *regs,
7528 struct displaced_step_closure *dsc)
7530 if (bit (insn, 27) == 0)
7531 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7532 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7533 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7536 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7539 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7541 case 0x4: case 0x5: case 0x6: case 0x7:
7542 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7545 switch ((insn & 0xe00000) >> 21)
7547 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7549 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7552 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7555 return arm_copy_undef (gdbarch, insn, dsc);
7560 int rn_f = (bits (insn, 16, 19) == 0xf);
7561 switch ((insn & 0xe00000) >> 21)
7564 /* ldc/ldc2 imm (undefined for rn == pc). */
7565 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7566 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7569 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7571 case 0x4: case 0x5: case 0x6: case 0x7:
7572 /* ldc/ldc2 lit (undefined for rn != pc). */
7573 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7574 : arm_copy_undef (gdbarch, insn, dsc);
7577 return arm_copy_undef (gdbarch, insn, dsc);
7582 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7585 if (bits (insn, 16, 19) == 0xf)
7587 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7589 return arm_copy_undef (gdbarch, insn, dsc);
7593 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7595 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7599 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7601 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7604 return arm_copy_undef (gdbarch, insn, dsc);
7608 /* Decode miscellaneous instructions in dp/misc encoding space. */
7611 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7612 struct regcache *regs,
7613 struct displaced_step_closure *dsc)
7615 unsigned int op2 = bits (insn, 4, 6);
7616 unsigned int op = bits (insn, 21, 22);
7617 unsigned int op1 = bits (insn, 16, 19);
7622 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7625 if (op == 0x1) /* bx. */
7626 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7628 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7630 return arm_copy_undef (gdbarch, insn, dsc);
7634 /* Not really supported. */
7635 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7637 return arm_copy_undef (gdbarch, insn, dsc);
7641 return arm_copy_bx_blx_reg (gdbarch, insn,
7642 regs, dsc); /* blx register. */
7644 return arm_copy_undef (gdbarch, insn, dsc);
7647 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7651 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7653 /* Not really supported. */
7654 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7657 return arm_copy_undef (gdbarch, insn, dsc);
7662 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7663 struct regcache *regs,
7664 struct displaced_step_closure *dsc)
7667 switch (bits (insn, 20, 24))
7670 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7673 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7675 case 0x12: case 0x16:
7676 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7679 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7683 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7685 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7686 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7687 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7688 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7689 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7690 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7691 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7692 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7693 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7694 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7695 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7696 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7697 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7698 /* 2nd arg means "unpriveleged". */
7699 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7703 /* Should be unreachable. */
7708 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7709 struct regcache *regs,
7710 struct displaced_step_closure *dsc)
7712 int a = bit (insn, 25), b = bit (insn, 4);
7713 uint32_t op1 = bits (insn, 20, 24);
7714 int rn_f = bits (insn, 16, 19) == 0xf;
7716 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7717 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7718 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7719 else if ((!a && (op1 & 0x17) == 0x02)
7720 || (a && (op1 & 0x17) == 0x02 && !b))
7721 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7722 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7723 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7724 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7725 else if ((!a && (op1 & 0x17) == 0x03)
7726 || (a && (op1 & 0x17) == 0x03 && !b))
7727 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7728 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7729 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7730 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7731 else if ((!a && (op1 & 0x17) == 0x06)
7732 || (a && (op1 & 0x17) == 0x06 && !b))
7733 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7734 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7735 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7736 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7737 else if ((!a && (op1 & 0x17) == 0x07)
7738 || (a && (op1 & 0x17) == 0x07 && !b))
7739 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7741 /* Should be unreachable. */
7746 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7747 struct displaced_step_closure *dsc)
7749 switch (bits (insn, 20, 24))
7751 case 0x00: case 0x01: case 0x02: case 0x03:
7752 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7754 case 0x04: case 0x05: case 0x06: case 0x07:
7755 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7757 case 0x08: case 0x09: case 0x0a: case 0x0b:
7758 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7759 return arm_copy_unmodified (gdbarch, insn,
7760 "decode/pack/unpack/saturate/reverse", dsc);
7763 if (bits (insn, 5, 7) == 0) /* op2. */
7765 if (bits (insn, 12, 15) == 0xf)
7766 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7768 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7771 return arm_copy_undef (gdbarch, insn, dsc);
7773 case 0x1a: case 0x1b:
7774 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7775 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7777 return arm_copy_undef (gdbarch, insn, dsc);
7779 case 0x1c: case 0x1d:
7780 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7782 if (bits (insn, 0, 3) == 0xf)
7783 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7785 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7788 return arm_copy_undef (gdbarch, insn, dsc);
7790 case 0x1e: case 0x1f:
7791 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7792 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7794 return arm_copy_undef (gdbarch, insn, dsc);
7797 /* Should be unreachable. */
7802 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7803 struct regcache *regs,
7804 struct displaced_step_closure *dsc)
7807 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7809 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7813 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7814 struct regcache *regs,
7815 struct displaced_step_closure *dsc)
7817 unsigned int opcode = bits (insn, 20, 24);
7821 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7822 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7824 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7825 case 0x12: case 0x16:
7826 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7828 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7829 case 0x13: case 0x17:
7830 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7832 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7833 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7834 /* Note: no writeback for these instructions. Bit 25 will always be
7835 zero though (via caller), so the following works OK. */
7836 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7839 /* Should be unreachable. */
7843 /* Decode shifted register instructions. */
7846 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7847 uint16_t insn2, struct regcache *regs,
7848 struct displaced_step_closure *dsc)
7850 /* PC is only allowed to be used in instruction MOV. */
7852 unsigned int op = bits (insn1, 5, 8);
7853 unsigned int rn = bits (insn1, 0, 3);
7855 if (op == 0x2 && rn == 0xf) /* MOV */
7856 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7858 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7859 "dp (shift reg)", dsc);
7863 /* Decode extension register load/store. Exactly the same as
7864 arm_decode_ext_reg_ld_st. */
7867 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7868 uint16_t insn2, struct regcache *regs,
7869 struct displaced_step_closure *dsc)
7871 unsigned int opcode = bits (insn1, 4, 8);
7875 case 0x04: case 0x05:
7876 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7877 "vfp/neon vmov", dsc);
7879 case 0x08: case 0x0c: /* 01x00 */
7880 case 0x0a: case 0x0e: /* 01x10 */
7881 case 0x12: case 0x16: /* 10x10 */
7882 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7883 "vfp/neon vstm/vpush", dsc);
7885 case 0x09: case 0x0d: /* 01x01 */
7886 case 0x0b: case 0x0f: /* 01x11 */
7887 case 0x13: case 0x17: /* 10x11 */
7888 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7889 "vfp/neon vldm/vpop", dsc);
7891 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7892 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7894 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7895 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7898 /* Should be unreachable. */
7903 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7904 struct regcache *regs, struct displaced_step_closure *dsc)
7906 unsigned int op1 = bits (insn, 20, 25);
7907 int op = bit (insn, 4);
7908 unsigned int coproc = bits (insn, 8, 11);
7909 unsigned int rn = bits (insn, 16, 19);
7911 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7912 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7913 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7914 && (coproc & 0xe) != 0xa)
7916 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7917 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7918 && (coproc & 0xe) != 0xa)
7919 /* ldc/ldc2 imm/lit. */
7920 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7921 else if ((op1 & 0x3e) == 0x00)
7922 return arm_copy_undef (gdbarch, insn, dsc);
7923 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7924 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7925 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7926 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7927 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7928 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7929 else if ((op1 & 0x30) == 0x20 && !op)
7931 if ((coproc & 0xe) == 0xa)
7932 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7934 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7936 else if ((op1 & 0x30) == 0x20 && op)
7937 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7938 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7939 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7940 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7941 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7942 else if ((op1 & 0x30) == 0x30)
7943 return arm_copy_svc (gdbarch, insn, regs, dsc);
7945 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7949 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7950 uint16_t insn2, struct regcache *regs,
7951 struct displaced_step_closure *dsc)
7953 unsigned int coproc = bits (insn2, 8, 11);
7954 unsigned int op1 = bits (insn1, 4, 9);
7955 unsigned int bit_5_8 = bits (insn1, 5, 8);
7956 unsigned int bit_9 = bit (insn1, 9);
7957 unsigned int bit_4 = bit (insn1, 4);
7958 unsigned int rn = bits (insn1, 0, 3);
7963 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7964 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7966 else if (bit_5_8 == 0) /* UNDEFINED. */
7967 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7970 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7971 if ((coproc & 0xe) == 0xa)
7972 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7974 else /* coproc is not 101x. */
7976 if (bit_4 == 0) /* STC/STC2. */
7977 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7979 else /* LDC/LDC2 {literal, immeidate}. */
7980 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7986 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7992 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7993 struct displaced_step_closure *dsc, int rd)
7999 Preparation: Rd <- PC
8005 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8006 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8010 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8011 struct displaced_step_closure *dsc,
8012 int rd, unsigned int imm)
8015 /* Encoding T2: ADDS Rd, #imm */
8016 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8018 install_pc_relative (gdbarch, regs, dsc, rd);
8024 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8025 struct regcache *regs,
8026 struct displaced_step_closure *dsc)
8028 unsigned int rd = bits (insn, 8, 10);
8029 unsigned int imm8 = bits (insn, 0, 7);
8031 if (debug_displaced)
8032 fprintf_unfiltered (gdb_stdlog,
8033 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8036 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8040 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8041 uint16_t insn2, struct regcache *regs,
8042 struct displaced_step_closure *dsc)
8044 unsigned int rd = bits (insn2, 8, 11);
8045 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8046 extract raw immediate encoding rather than computing immediate. When
8047 generating ADD or SUB instruction, we can simply perform OR operation to
8048 set immediate into ADD. */
8049 unsigned int imm_3_8 = insn2 & 0x70ff;
8050 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8052 if (debug_displaced)
8053 fprintf_unfiltered (gdb_stdlog,
8054 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8055 rd, imm_i, imm_3_8, insn1, insn2);
8057 if (bit (insn1, 7)) /* Encoding T2 */
8059 /* Encoding T3: SUB Rd, Rd, #imm */
8060 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8061 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8063 else /* Encoding T3 */
8065 /* Encoding T3: ADD Rd, Rd, #imm */
8066 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8067 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8071 install_pc_relative (gdbarch, regs, dsc, rd);
8077 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8078 struct regcache *regs,
8079 struct displaced_step_closure *dsc)
8081 unsigned int rt = bits (insn1, 8, 10);
8083 int imm8 = (bits (insn1, 0, 7) << 2);
8084 CORE_ADDR from = dsc->insn_addr;
8090 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8092 Insn: LDR R0, [R2, R3];
8093 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8095 if (debug_displaced)
8096 fprintf_unfiltered (gdb_stdlog,
8097 "displaced: copying thumb ldr r%d [pc #%d]\n"
8100 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8101 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8102 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8103 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8104 /* The assembler calculates the required value of the offset from the
8105 Align(PC,4) value of this instruction to the label. */
8106 pc = pc & 0xfffffffc;
8108 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8109 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8112 dsc->u.ldst.xfersize = 4;
8114 dsc->u.ldst.immed = 0;
8115 dsc->u.ldst.writeback = 0;
8116 dsc->u.ldst.restore_r4 = 0;
8118 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8120 dsc->cleanup = &cleanup_load;
8125 /* Copy Thumb cbnz/cbz insruction. */
8128 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8129 struct regcache *regs,
8130 struct displaced_step_closure *dsc)
8132 int non_zero = bit (insn1, 11);
8133 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8134 CORE_ADDR from = dsc->insn_addr;
8135 int rn = bits (insn1, 0, 2);
8136 int rn_val = displaced_read_reg (regs, dsc, rn);
8138 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8139 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8140 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8141 condition is false, let it be, cleanup_branch will do nothing. */
8142 if (dsc->u.branch.cond)
8144 dsc->u.branch.cond = INST_AL;
8145 dsc->u.branch.dest = from + 4 + imm5;
8148 dsc->u.branch.dest = from + 2;
8150 dsc->u.branch.link = 0;
8151 dsc->u.branch.exchange = 0;
8153 if (debug_displaced)
8154 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8155 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8156 rn, rn_val, insn1, dsc->u.branch.dest);
8158 dsc->modinsn[0] = THUMB_NOP;
8160 dsc->cleanup = &cleanup_branch;
8164 /* Copy Table Branch Byte/Halfword */
8166 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8167 uint16_t insn2, struct regcache *regs,
8168 struct displaced_step_closure *dsc)
8170 ULONGEST rn_val, rm_val;
8171 int is_tbh = bit (insn2, 4);
8172 CORE_ADDR halfwords = 0;
8173 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8175 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8176 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8182 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8183 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8189 target_read_memory (rn_val + rm_val, buf, 1);
8190 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8193 if (debug_displaced)
8194 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8195 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8196 (unsigned int) rn_val, (unsigned int) rm_val,
8197 (unsigned int) halfwords);
8199 dsc->u.branch.cond = INST_AL;
8200 dsc->u.branch.link = 0;
8201 dsc->u.branch.exchange = 0;
8202 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8204 dsc->cleanup = &cleanup_branch;
8210 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8211 struct displaced_step_closure *dsc)
8214 int val = displaced_read_reg (regs, dsc, 7);
8215 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8218 val = displaced_read_reg (regs, dsc, 8);
8219 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8222 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8227 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8228 struct regcache *regs,
8229 struct displaced_step_closure *dsc)
8231 dsc->u.block.regmask = insn1 & 0x00ff;
8233 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8236 (1) register list is full, that is, r0-r7 are used.
8237 Prepare: tmp[0] <- r8
8239 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8240 MOV r8, r7; Move value of r7 to r8;
8241 POP {r7}; Store PC value into r7.
8243 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8245 (2) register list is not full, supposing there are N registers in
8246 register list (except PC, 0 <= N <= 7).
8247 Prepare: for each i, 0 - N, tmp[i] <- ri.
8249 POP {r0, r1, ...., rN};
8251 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8252 from tmp[] properly.
8254 if (debug_displaced)
8255 fprintf_unfiltered (gdb_stdlog,
8256 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8257 dsc->u.block.regmask, insn1);
8259 if (dsc->u.block.regmask == 0xff)
8261 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8263 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8264 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8265 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8268 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8272 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8273 unsigned int new_regmask, bit = 1;
8274 unsigned int to = 0, from = 0, i, new_rn;
8276 for (i = 0; i < num_in_list + 1; i++)
8277 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8279 new_regmask = (1 << (num_in_list + 1)) - 1;
8281 if (debug_displaced)
8282 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8283 "{..., pc}: original reg list %.4x,"
8284 " modified list %.4x\n"),
8285 (int) dsc->u.block.regmask, new_regmask);
8287 dsc->u.block.regmask |= 0x8000;
8288 dsc->u.block.writeback = 0;
8289 dsc->u.block.cond = INST_AL;
8291 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8293 dsc->cleanup = &cleanup_block_load_pc;
8300 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8301 struct regcache *regs,
8302 struct displaced_step_closure *dsc)
8304 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8305 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8308 /* 16-bit thumb instructions. */
8309 switch (op_bit_12_15)
8311 /* Shift (imme), add, subtract, move and compare. */
8312 case 0: case 1: case 2: case 3:
8313 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8314 "shift/add/sub/mov/cmp",
8318 switch (op_bit_10_11)
8320 case 0: /* Data-processing */
8321 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8325 case 1: /* Special data instructions and branch and exchange. */
8327 unsigned short op = bits (insn1, 7, 9);
8328 if (op == 6 || op == 7) /* BX or BLX */
8329 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8330 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8331 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8333 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8337 default: /* LDR (literal) */
8338 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8341 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8342 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8345 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8346 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8347 else /* Generate SP-relative address */
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8350 case 11: /* Misc 16-bit instructions */
8352 switch (bits (insn1, 8, 11))
8354 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8355 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8357 case 12: case 13: /* POP */
8358 if (bit (insn1, 8)) /* PC is in register list. */
8359 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8361 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8363 case 15: /* If-Then, and hints */
8364 if (bits (insn1, 0, 3))
8365 /* If-Then makes up to four following instructions conditional.
8366 IT instruction itself is not conditional, so handle it as a
8367 common unmodified instruction. */
8368 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8371 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8374 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8379 if (op_bit_10_11 < 2) /* Store multiple registers */
8380 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8381 else /* Load multiple registers */
8382 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8384 case 13: /* Conditional branch and supervisor call */
8385 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8386 err = thumb_copy_b (gdbarch, insn1, dsc);
8388 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8390 case 14: /* Unconditional branch */
8391 err = thumb_copy_b (gdbarch, insn1, dsc);
8398 internal_error (__FILE__, __LINE__,
8399 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8403 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8404 uint16_t insn1, uint16_t insn2,
8405 struct regcache *regs,
8406 struct displaced_step_closure *dsc)
8408 int rt = bits (insn2, 12, 15);
8409 int rn = bits (insn1, 0, 3);
8410 int op1 = bits (insn1, 7, 8);
8413 switch (bits (insn1, 5, 6))
8415 case 0: /* Load byte and memory hints */
8416 if (rt == 0xf) /* PLD/PLI */
8419 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8420 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8422 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8427 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8428 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8431 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8432 "ldrb{reg, immediate}/ldrbt",
8437 case 1: /* Load halfword and memory hints. */
8438 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8439 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8440 "pld/unalloc memhint", dsc);
8444 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8447 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8451 case 2: /* Load word */
8453 int insn2_bit_8_11 = bits (insn2, 8, 11);
8456 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8457 else if (op1 == 0x1) /* Encoding T3 */
8458 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8460 else /* op1 == 0x0 */
8462 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8463 /* LDR (immediate) */
8464 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8465 dsc, bit (insn2, 8), 1);
8466 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8467 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8470 /* LDR (register) */
8471 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8477 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8484 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8485 uint16_t insn2, struct regcache *regs,
8486 struct displaced_step_closure *dsc)
8489 unsigned short op = bit (insn2, 15);
8490 unsigned int op1 = bits (insn1, 11, 12);
8496 switch (bits (insn1, 9, 10))
8501 /* Load/store {dual, execlusive}, table branch. */
8502 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8503 && bits (insn2, 5, 7) == 0)
8504 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8507 /* PC is not allowed to use in load/store {dual, exclusive}
8509 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8510 "load/store dual/ex", dsc);
8512 else /* load/store multiple */
8514 switch (bits (insn1, 7, 8))
8516 case 0: case 3: /* SRS, RFE */
8517 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8520 case 1: case 2: /* LDM/STM/PUSH/POP */
8521 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8528 /* Data-processing (shift register). */
8529 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8532 default: /* Coprocessor instructions. */
8533 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8538 case 2: /* op1 = 2 */
8539 if (op) /* Branch and misc control. */
8541 if (bit (insn2, 14) /* BLX/BL */
8542 || bit (insn2, 12) /* Unconditional branch */
8543 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8544 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8546 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8551 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8553 int op = bits (insn1, 4, 8);
8554 int rn = bits (insn1, 0, 3);
8555 if ((op == 0 || op == 0xa) && rn == 0xf)
8556 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8559 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8562 else /* Data processing (modified immeidate) */
8563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8567 case 3: /* op1 = 3 */
8568 switch (bits (insn1, 9, 10))
8572 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8574 else /* NEON Load/Store and Store single data item */
8575 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8576 "neon elt/struct load/store",
8579 case 1: /* op1 = 3, bits (9, 10) == 1 */
8580 switch (bits (insn1, 7, 8))
8582 case 0: case 1: /* Data processing (register) */
8583 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8586 case 2: /* Multiply and absolute difference */
8587 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8588 "mul/mua/diff", dsc);
8590 case 3: /* Long multiply and divide */
8591 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8596 default: /* Coprocessor instructions */
8597 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8606 internal_error (__FILE__, __LINE__,
8607 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8612 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8613 CORE_ADDR to, struct regcache *regs,
8614 struct displaced_step_closure *dsc)
8616 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8618 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8620 if (debug_displaced)
8621 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8622 "at %.8lx\n", insn1, (unsigned long) from);
8625 dsc->insn_size = thumb_insn_size (insn1);
8626 if (thumb_insn_size (insn1) == 4)
8629 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8630 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8633 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8637 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8638 CORE_ADDR to, struct regcache *regs,
8639 struct displaced_step_closure *dsc)
8642 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8645 /* Most displaced instructions use a 1-instruction scratch space, so set this
8646 here and override below if/when necessary. */
8648 dsc->insn_addr = from;
8649 dsc->scratch_base = to;
8650 dsc->cleanup = NULL;
8651 dsc->wrote_to_pc = 0;
8653 if (!displaced_in_arm_mode (regs))
8654 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8658 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8659 if (debug_displaced)
8660 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8661 "at %.8lx\n", (unsigned long) insn,
8662 (unsigned long) from);
8664 if ((insn & 0xf0000000) == 0xf0000000)
8665 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8666 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8668 case 0x0: case 0x1: case 0x2: case 0x3:
8669 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8672 case 0x4: case 0x5: case 0x6:
8673 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8677 err = arm_decode_media (gdbarch, insn, dsc);
8680 case 0x8: case 0x9: case 0xa: case 0xb:
8681 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8684 case 0xc: case 0xd: case 0xe: case 0xf:
8685 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8690 internal_error (__FILE__, __LINE__,
8691 _("arm_process_displaced_insn: Instruction decode error"));
8694 /* Actually set up the scratch space for a displaced instruction. */
8697 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8698 CORE_ADDR to, struct displaced_step_closure *dsc)
8700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8701 unsigned int i, len, offset;
8702 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8703 int size = dsc->is_thumb? 2 : 4;
8704 const gdb_byte *bkp_insn;
8707 /* Poke modified instruction(s). */
8708 for (i = 0; i < dsc->numinsns; i++)
8710 if (debug_displaced)
8712 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8714 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8717 fprintf_unfiltered (gdb_stdlog, "%.4x",
8718 (unsigned short)dsc->modinsn[i]);
8720 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8721 (unsigned long) to + offset);
8724 write_memory_unsigned_integer (to + offset, size,
8725 byte_order_for_code,
8730 /* Choose the correct breakpoint instruction. */
8733 bkp_insn = tdep->thumb_breakpoint;
8734 len = tdep->thumb_breakpoint_size;
8738 bkp_insn = tdep->arm_breakpoint;
8739 len = tdep->arm_breakpoint_size;
8742 /* Put breakpoint afterwards. */
8743 write_memory (to + offset, bkp_insn, len);
8745 if (debug_displaced)
8746 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8747 paddress (gdbarch, from), paddress (gdbarch, to));
8750 /* Entry point for copying an instruction into scratch space for displaced
8753 struct displaced_step_closure *
8754 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8755 CORE_ADDR from, CORE_ADDR to,
8756 struct regcache *regs)
8758 struct displaced_step_closure *dsc
8759 = xmalloc (sizeof (struct displaced_step_closure));
8760 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8761 arm_displaced_init_closure (gdbarch, from, to, dsc);
8766 /* Entry point for cleaning things up after a displaced instruction has been
8770 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8771 struct displaced_step_closure *dsc,
8772 CORE_ADDR from, CORE_ADDR to,
8773 struct regcache *regs)
8776 dsc->cleanup (gdbarch, regs, dsc);
8778 if (!dsc->wrote_to_pc)
8779 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8780 dsc->insn_addr + dsc->insn_size);
8784 #include "bfd-in2.h"
8785 #include "libcoff.h"
8788 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8790 struct gdbarch *gdbarch = info->application_data;
8792 if (arm_pc_is_thumb (gdbarch, memaddr))
8794 static asymbol *asym;
8795 static combined_entry_type ce;
8796 static struct coff_symbol_struct csym;
8797 static struct bfd fake_bfd;
8798 static bfd_target fake_target;
8800 if (csym.native == NULL)
8802 /* Create a fake symbol vector containing a Thumb symbol.
8803 This is solely so that the code in print_insn_little_arm()
8804 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8805 the presence of a Thumb symbol and switch to decoding
8806 Thumb instructions. */
8808 fake_target.flavour = bfd_target_coff_flavour;
8809 fake_bfd.xvec = &fake_target;
8810 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8812 csym.symbol.the_bfd = &fake_bfd;
8813 csym.symbol.name = "fake";
8814 asym = (asymbol *) & csym;
8817 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8818 info->symbols = &asym;
8821 info->symbols = NULL;
8823 if (info->endian == BFD_ENDIAN_BIG)
8824 return print_insn_big_arm (memaddr, info);
8826 return print_insn_little_arm (memaddr, info);
8829 /* The following define instruction sequences that will cause ARM
8830 cpu's to take an undefined instruction trap. These are used to
8831 signal a breakpoint to GDB.
8833 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8834 modes. A different instruction is required for each mode. The ARM
8835 cpu's can also be big or little endian. Thus four different
8836 instructions are needed to support all cases.
8838 Note: ARMv4 defines several new instructions that will take the
8839 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8840 not in fact add the new instructions. The new undefined
8841 instructions in ARMv4 are all instructions that had no defined
8842 behaviour in earlier chips. There is no guarantee that they will
8843 raise an exception, but may be treated as NOP's. In practice, it
8844 may only safe to rely on instructions matching:
8846 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8847 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8848 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8850 Even this may only true if the condition predicate is true. The
8851 following use a condition predicate of ALWAYS so it is always TRUE.
8853 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8854 and NetBSD all use a software interrupt rather than an undefined
8855 instruction to force a trap. This can be handled by by the
8856 abi-specific code during establishment of the gdbarch vector. */
8858 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8859 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8860 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8861 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8863 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8864 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8865 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8866 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8868 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8869 the program counter value to determine whether a 16-bit or 32-bit
8870 breakpoint should be used. It returns a pointer to a string of
8871 bytes that encode a breakpoint instruction, stores the length of
8872 the string to *lenptr, and adjusts the program counter (if
8873 necessary) to point to the actual memory location where the
8874 breakpoint should be inserted. */
8876 static const unsigned char *
8877 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8879 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8880 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8882 if (arm_pc_is_thumb (gdbarch, *pcptr))
8884 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8886 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8887 check whether we are replacing a 32-bit instruction. */
8888 if (tdep->thumb2_breakpoint != NULL)
8891 if (target_read_memory (*pcptr, buf, 2) == 0)
8893 unsigned short inst1;
8894 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8895 if (thumb_insn_size (inst1) == 4)
8897 *lenptr = tdep->thumb2_breakpoint_size;
8898 return tdep->thumb2_breakpoint;
8903 *lenptr = tdep->thumb_breakpoint_size;
8904 return tdep->thumb_breakpoint;
8908 *lenptr = tdep->arm_breakpoint_size;
8909 return tdep->arm_breakpoint;
8914 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8917 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8919 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8920 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8921 that this is not confused with a 32-bit ARM breakpoint. */
8925 /* Extract from an array REGBUF containing the (raw) register state a
8926 function return value of type TYPE, and copy that, in virtual
8927 format, into VALBUF. */
8930 arm_extract_return_value (struct type *type, struct regcache *regs,
8933 struct gdbarch *gdbarch = get_regcache_arch (regs);
8934 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8936 if (TYPE_CODE_FLT == TYPE_CODE (type))
8938 switch (gdbarch_tdep (gdbarch)->fp_model)
8942 /* The value is in register F0 in internal format. We need to
8943 extract the raw value and then convert it to the desired
8945 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8947 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8948 convert_from_extended (floatformat_from_type (type), tmpbuf,
8949 valbuf, gdbarch_byte_order (gdbarch));
8953 case ARM_FLOAT_SOFT_FPA:
8954 case ARM_FLOAT_SOFT_VFP:
8955 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8956 not using the VFP ABI code. */
8958 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8959 if (TYPE_LENGTH (type) > 4)
8960 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8961 valbuf + INT_REGISTER_SIZE);
8965 internal_error (__FILE__, __LINE__,
8966 _("arm_extract_return_value: "
8967 "Floating point model not supported"));
8971 else if (TYPE_CODE (type) == TYPE_CODE_INT
8972 || TYPE_CODE (type) == TYPE_CODE_CHAR
8973 || TYPE_CODE (type) == TYPE_CODE_BOOL
8974 || TYPE_CODE (type) == TYPE_CODE_PTR
8975 || TYPE_CODE (type) == TYPE_CODE_REF
8976 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8978 /* If the type is a plain integer, then the access is
8979 straight-forward. Otherwise we have to play around a bit
8981 int len = TYPE_LENGTH (type);
8982 int regno = ARM_A1_REGNUM;
8987 /* By using store_unsigned_integer we avoid having to do
8988 anything special for small big-endian values. */
8989 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8990 store_unsigned_integer (valbuf,
8991 (len > INT_REGISTER_SIZE
8992 ? INT_REGISTER_SIZE : len),
8994 len -= INT_REGISTER_SIZE;
8995 valbuf += INT_REGISTER_SIZE;
9000 /* For a structure or union the behaviour is as if the value had
9001 been stored to word-aligned memory and then loaded into
9002 registers with 32-bit load instruction(s). */
9003 int len = TYPE_LENGTH (type);
9004 int regno = ARM_A1_REGNUM;
9005 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9009 regcache_cooked_read (regs, regno++, tmpbuf);
9010 memcpy (valbuf, tmpbuf,
9011 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9012 len -= INT_REGISTER_SIZE;
9013 valbuf += INT_REGISTER_SIZE;
9019 /* Will a function return an aggregate type in memory or in a
9020 register? Return 0 if an aggregate type can be returned in a
9021 register, 1 if it must be returned in memory. */
9024 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9027 enum type_code code;
9029 CHECK_TYPEDEF (type);
9031 /* In the ARM ABI, "integer" like aggregate types are returned in
9032 registers. For an aggregate type to be integer like, its size
9033 must be less than or equal to INT_REGISTER_SIZE and the
9034 offset of each addressable subfield must be zero. Note that bit
9035 fields are not addressable, and all addressable subfields of
9036 unions always start at offset zero.
9038 This function is based on the behaviour of GCC 2.95.1.
9039 See: gcc/arm.c: arm_return_in_memory() for details.
9041 Note: All versions of GCC before GCC 2.95.2 do not set up the
9042 parameters correctly for a function returning the following
9043 structure: struct { float f;}; This should be returned in memory,
9044 not a register. Richard Earnshaw sent me a patch, but I do not
9045 know of any way to detect if a function like the above has been
9046 compiled with the correct calling convention. */
9048 /* All aggregate types that won't fit in a register must be returned
9050 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9055 /* The AAPCS says all aggregates not larger than a word are returned
9057 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9060 /* The only aggregate types that can be returned in a register are
9061 structs and unions. Arrays must be returned in memory. */
9062 code = TYPE_CODE (type);
9063 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9068 /* Assume all other aggregate types can be returned in a register.
9069 Run a check for structures, unions and arrays. */
9072 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9075 /* Need to check if this struct/union is "integer" like. For
9076 this to be true, its size must be less than or equal to
9077 INT_REGISTER_SIZE and the offset of each addressable
9078 subfield must be zero. Note that bit fields are not
9079 addressable, and unions always start at offset zero. If any
9080 of the subfields is a floating point type, the struct/union
9081 cannot be an integer type. */
9083 /* For each field in the object, check:
9084 1) Is it FP? --> yes, nRc = 1;
9085 2) Is it addressable (bitpos != 0) and
9086 not packed (bitsize == 0)?
9090 for (i = 0; i < TYPE_NFIELDS (type); i++)
9092 enum type_code field_type_code;
9093 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9096 /* Is it a floating point type field? */
9097 if (field_type_code == TYPE_CODE_FLT)
9103 /* If bitpos != 0, then we have to care about it. */
9104 if (TYPE_FIELD_BITPOS (type, i) != 0)
9106 /* Bitfields are not addressable. If the field bitsize is
9107 zero, then the field is not packed. Hence it cannot be
9108 a bitfield or any other packed type. */
9109 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9121 /* Write into appropriate registers a function return value of type
9122 TYPE, given in virtual format. */
9125 arm_store_return_value (struct type *type, struct regcache *regs,
9126 const gdb_byte *valbuf)
9128 struct gdbarch *gdbarch = get_regcache_arch (regs);
9129 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9131 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9133 gdb_byte buf[MAX_REGISTER_SIZE];
9135 switch (gdbarch_tdep (gdbarch)->fp_model)
9139 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9140 gdbarch_byte_order (gdbarch));
9141 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9144 case ARM_FLOAT_SOFT_FPA:
9145 case ARM_FLOAT_SOFT_VFP:
9146 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9147 not using the VFP ABI code. */
9149 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9150 if (TYPE_LENGTH (type) > 4)
9151 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9152 valbuf + INT_REGISTER_SIZE);
9156 internal_error (__FILE__, __LINE__,
9157 _("arm_store_return_value: Floating "
9158 "point model not supported"));
9162 else if (TYPE_CODE (type) == TYPE_CODE_INT
9163 || TYPE_CODE (type) == TYPE_CODE_CHAR
9164 || TYPE_CODE (type) == TYPE_CODE_BOOL
9165 || TYPE_CODE (type) == TYPE_CODE_PTR
9166 || TYPE_CODE (type) == TYPE_CODE_REF
9167 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9169 if (TYPE_LENGTH (type) <= 4)
9171 /* Values of one word or less are zero/sign-extended and
9173 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9174 LONGEST val = unpack_long (type, valbuf);
9176 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9177 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9181 /* Integral values greater than one word are stored in consecutive
9182 registers starting with r0. This will always be a multiple of
9183 the regiser size. */
9184 int len = TYPE_LENGTH (type);
9185 int regno = ARM_A1_REGNUM;
9189 regcache_cooked_write (regs, regno++, valbuf);
9190 len -= INT_REGISTER_SIZE;
9191 valbuf += INT_REGISTER_SIZE;
9197 /* For a structure or union the behaviour is as if the value had
9198 been stored to word-aligned memory and then loaded into
9199 registers with 32-bit load instruction(s). */
9200 int len = TYPE_LENGTH (type);
9201 int regno = ARM_A1_REGNUM;
9202 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9206 memcpy (tmpbuf, valbuf,
9207 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9208 regcache_cooked_write (regs, regno++, tmpbuf);
9209 len -= INT_REGISTER_SIZE;
9210 valbuf += INT_REGISTER_SIZE;
9216 /* Handle function return values. */
9218 static enum return_value_convention
9219 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9220 struct type *valtype, struct regcache *regcache,
9221 gdb_byte *readbuf, const gdb_byte *writebuf)
9223 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9224 struct type *func_type = function ? value_type (function) : NULL;
9225 enum arm_vfp_cprc_base_type vfp_base_type;
9228 if (arm_vfp_abi_for_function (gdbarch, func_type)
9229 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9231 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9232 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9234 for (i = 0; i < vfp_base_count; i++)
9236 if (reg_char == 'q')
9239 arm_neon_quad_write (gdbarch, regcache, i,
9240 writebuf + i * unit_length);
9243 arm_neon_quad_read (gdbarch, regcache, i,
9244 readbuf + i * unit_length);
9251 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9252 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9255 regcache_cooked_write (regcache, regnum,
9256 writebuf + i * unit_length);
9258 regcache_cooked_read (regcache, regnum,
9259 readbuf + i * unit_length);
9262 return RETURN_VALUE_REGISTER_CONVENTION;
9265 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9266 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9267 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9269 if (tdep->struct_return == pcc_struct_return
9270 || arm_return_in_memory (gdbarch, valtype))
9271 return RETURN_VALUE_STRUCT_CONVENTION;
9274 /* AAPCS returns complex types longer than a register in memory. */
9275 if (tdep->arm_abi != ARM_ABI_APCS
9276 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9277 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9278 return RETURN_VALUE_STRUCT_CONVENTION;
9281 arm_store_return_value (valtype, regcache, writebuf);
9284 arm_extract_return_value (valtype, regcache, readbuf);
9286 return RETURN_VALUE_REGISTER_CONVENTION;
9291 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9293 struct gdbarch *gdbarch = get_frame_arch (frame);
9294 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9295 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9297 gdb_byte buf[INT_REGISTER_SIZE];
9299 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9301 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9305 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9309 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9310 return the target PC. Otherwise return 0. */
9313 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9317 CORE_ADDR start_addr;
9319 /* Find the starting address and name of the function containing the PC. */
9320 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9322 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9324 start_addr = arm_skip_bx_reg (frame, pc);
9325 if (start_addr != 0)
9331 /* If PC is in a Thumb call or return stub, return the address of the
9332 target PC, which is in a register. The thunk functions are called
9333 _call_via_xx, where x is the register name. The possible names
9334 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9335 functions, named __ARM_call_via_r[0-7]. */
9336 if (strncmp (name, "_call_via_", 10) == 0
9337 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9339 /* Use the name suffix to determine which register contains the
9341 static char *table[15] =
9342 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9343 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9346 int offset = strlen (name) - 2;
9348 for (regno = 0; regno <= 14; regno++)
9349 if (strcmp (&name[offset], table[regno]) == 0)
9350 return get_frame_register_unsigned (frame, regno);
9353 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9354 non-interworking calls to foo. We could decode the stubs
9355 to find the target but it's easier to use the symbol table. */
9356 namelen = strlen (name);
9357 if (name[0] == '_' && name[1] == '_'
9358 && ((namelen > 2 + strlen ("_from_thumb")
9359 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9360 strlen ("_from_thumb")) == 0)
9361 || (namelen > 2 + strlen ("_from_arm")
9362 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9363 strlen ("_from_arm")) == 0)))
9366 int target_len = namelen - 2;
9367 struct bound_minimal_symbol minsym;
9368 struct objfile *objfile;
9369 struct obj_section *sec;
9371 if (name[namelen - 1] == 'b')
9372 target_len -= strlen ("_from_thumb");
9374 target_len -= strlen ("_from_arm");
9376 target_name = alloca (target_len + 1);
9377 memcpy (target_name, name + 2, target_len);
9378 target_name[target_len] = '\0';
9380 sec = find_pc_section (pc);
9381 objfile = (sec == NULL) ? NULL : sec->objfile;
9382 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9383 if (minsym.minsym != NULL)
9384 return BMSYMBOL_VALUE_ADDRESS (minsym);
9389 return 0; /* not a stub */
9393 set_arm_command (char *args, int from_tty)
9395 printf_unfiltered (_("\
9396 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9397 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9401 show_arm_command (char *args, int from_tty)
9403 cmd_show_list (showarmcmdlist, from_tty, "");
9407 arm_update_current_architecture (void)
9409 struct gdbarch_info info;
9411 /* If the current architecture is not ARM, we have nothing to do. */
9412 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9415 /* Update the architecture. */
9416 gdbarch_info_init (&info);
9418 if (!gdbarch_update_p (info))
9419 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9423 set_fp_model_sfunc (char *args, int from_tty,
9424 struct cmd_list_element *c)
9426 enum arm_float_model fp_model;
9428 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9429 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9431 arm_fp_model = fp_model;
9435 if (fp_model == ARM_FLOAT_LAST)
9436 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9439 arm_update_current_architecture ();
9443 show_fp_model (struct ui_file *file, int from_tty,
9444 struct cmd_list_element *c, const char *value)
9446 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9448 if (arm_fp_model == ARM_FLOAT_AUTO
9449 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9450 fprintf_filtered (file, _("\
9451 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9452 fp_model_strings[tdep->fp_model]);
9454 fprintf_filtered (file, _("\
9455 The current ARM floating point model is \"%s\".\n"),
9456 fp_model_strings[arm_fp_model]);
9460 arm_set_abi (char *args, int from_tty,
9461 struct cmd_list_element *c)
9463 enum arm_abi_kind arm_abi;
9465 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9466 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9468 arm_abi_global = arm_abi;
9472 if (arm_abi == ARM_ABI_LAST)
9473 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9476 arm_update_current_architecture ();
9480 arm_show_abi (struct ui_file *file, int from_tty,
9481 struct cmd_list_element *c, const char *value)
9483 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9485 if (arm_abi_global == ARM_ABI_AUTO
9486 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9487 fprintf_filtered (file, _("\
9488 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9489 arm_abi_strings[tdep->arm_abi]);
9491 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9496 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9497 struct cmd_list_element *c, const char *value)
9499 fprintf_filtered (file,
9500 _("The current execution mode assumed "
9501 "(when symbols are unavailable) is \"%s\".\n"),
9502 arm_fallback_mode_string);
9506 arm_show_force_mode (struct ui_file *file, int from_tty,
9507 struct cmd_list_element *c, const char *value)
9509 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9511 fprintf_filtered (file,
9512 _("The current execution mode assumed "
9513 "(even when symbols are available) is \"%s\".\n"),
9514 arm_force_mode_string);
9517 /* If the user changes the register disassembly style used for info
9518 register and other commands, we have to also switch the style used
9519 in opcodes for disassembly output. This function is run in the "set
9520 arm disassembly" command, and does that. */
9523 set_disassembly_style_sfunc (char *args, int from_tty,
9524 struct cmd_list_element *c)
9526 set_disassembly_style ();
9529 /* Return the ARM register name corresponding to register I. */
9531 arm_register_name (struct gdbarch *gdbarch, int i)
9533 const int num_regs = gdbarch_num_regs (gdbarch);
9535 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9536 && i >= num_regs && i < num_regs + 32)
9538 static const char *const vfp_pseudo_names[] = {
9539 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9540 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9541 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9542 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9545 return vfp_pseudo_names[i - num_regs];
9548 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9549 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9551 static const char *const neon_pseudo_names[] = {
9552 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9553 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9556 return neon_pseudo_names[i - num_regs - 32];
9559 if (i >= ARRAY_SIZE (arm_register_names))
9560 /* These registers are only supported on targets which supply
9561 an XML description. */
9564 return arm_register_names[i];
9568 set_disassembly_style (void)
9572 /* Find the style that the user wants. */
9573 for (current = 0; current < num_disassembly_options; current++)
9574 if (disassembly_style == valid_disassembly_styles[current])
9576 gdb_assert (current < num_disassembly_options);
9578 /* Synchronize the disassembler. */
9579 set_arm_regname_option (current);
9582 /* Test whether the coff symbol specific value corresponds to a Thumb
9586 coff_sym_is_thumb (int val)
9588 return (val == C_THUMBEXT
9589 || val == C_THUMBSTAT
9590 || val == C_THUMBEXTFUNC
9591 || val == C_THUMBSTATFUNC
9592 || val == C_THUMBLABEL);
9595 /* arm_coff_make_msymbol_special()
9596 arm_elf_make_msymbol_special()
9598 These functions test whether the COFF or ELF symbol corresponds to
9599 an address in thumb code, and set a "special" bit in a minimal
9600 symbol to indicate that it does. */
9603 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9605 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9606 == ST_BRANCH_TO_THUMB)
9607 MSYMBOL_SET_SPECIAL (msym);
9611 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9613 if (coff_sym_is_thumb (val))
9614 MSYMBOL_SET_SPECIAL (msym);
9618 arm_objfile_data_free (struct objfile *objfile, void *arg)
9620 struct arm_per_objfile *data = arg;
9623 for (i = 0; i < objfile->obfd->section_count; i++)
9624 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9628 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9631 const char *name = bfd_asymbol_name (sym);
9632 struct arm_per_objfile *data;
9633 VEC(arm_mapping_symbol_s) **map_p;
9634 struct arm_mapping_symbol new_map_sym;
9636 gdb_assert (name[0] == '$');
9637 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9640 data = objfile_data (objfile, arm_objfile_data_key);
9643 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9644 struct arm_per_objfile);
9645 set_objfile_data (objfile, arm_objfile_data_key, data);
9646 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9647 objfile->obfd->section_count,
9648 VEC(arm_mapping_symbol_s) *);
9650 map_p = &data->section_maps[bfd_get_section (sym)->index];
9652 new_map_sym.value = sym->value;
9653 new_map_sym.type = name[1];
9655 /* Assume that most mapping symbols appear in order of increasing
9656 value. If they were randomly distributed, it would be faster to
9657 always push here and then sort at first use. */
9658 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9660 struct arm_mapping_symbol *prev_map_sym;
9662 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9663 if (prev_map_sym->value >= sym->value)
9666 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9667 arm_compare_mapping_symbols);
9668 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9673 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9677 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9679 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9680 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9682 /* If necessary, set the T bit. */
9685 ULONGEST val, t_bit;
9686 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9687 t_bit = arm_psr_thumb_bit (gdbarch);
9688 if (arm_pc_is_thumb (gdbarch, pc))
9689 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9692 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9697 /* Read the contents of a NEON quad register, by reading from two
9698 double registers. This is used to implement the quad pseudo
9699 registers, and for argument passing in case the quad registers are
9700 missing; vectors are passed in quad registers when using the VFP
9701 ABI, even if a NEON unit is not present. REGNUM is the index of
9702 the quad register, in [0, 15]. */
9704 static enum register_status
9705 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9706 int regnum, gdb_byte *buf)
9709 gdb_byte reg_buf[8];
9710 int offset, double_regnum;
9711 enum register_status status;
9713 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9714 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9717 /* d0 is always the least significant half of q0. */
9718 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9723 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9724 if (status != REG_VALID)
9726 memcpy (buf + offset, reg_buf, 8);
9728 offset = 8 - offset;
9729 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9730 if (status != REG_VALID)
9732 memcpy (buf + offset, reg_buf, 8);
9737 static enum register_status
9738 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9739 int regnum, gdb_byte *buf)
9741 const int num_regs = gdbarch_num_regs (gdbarch);
9743 gdb_byte reg_buf[8];
9744 int offset, double_regnum;
9746 gdb_assert (regnum >= num_regs);
9749 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9750 /* Quad-precision register. */
9751 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9754 enum register_status status;
9756 /* Single-precision register. */
9757 gdb_assert (regnum < 32);
9759 /* s0 is always the least significant half of d0. */
9760 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9761 offset = (regnum & 1) ? 0 : 4;
9763 offset = (regnum & 1) ? 4 : 0;
9765 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9766 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9769 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9770 if (status == REG_VALID)
9771 memcpy (buf, reg_buf + offset, 4);
9776 /* Store the contents of BUF to a NEON quad register, by writing to
9777 two double registers. This is used to implement the quad pseudo
9778 registers, and for argument passing in case the quad registers are
9779 missing; vectors are passed in quad registers when using the VFP
9780 ABI, even if a NEON unit is not present. REGNUM is the index
9781 of the quad register, in [0, 15]. */
9784 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9785 int regnum, const gdb_byte *buf)
9788 int offset, double_regnum;
9790 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9791 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9794 /* d0 is always the least significant half of q0. */
9795 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9800 regcache_raw_write (regcache, double_regnum, buf + offset);
9801 offset = 8 - offset;
9802 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9806 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9807 int regnum, const gdb_byte *buf)
9809 const int num_regs = gdbarch_num_regs (gdbarch);
9811 gdb_byte reg_buf[8];
9812 int offset, double_regnum;
9814 gdb_assert (regnum >= num_regs);
9817 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9818 /* Quad-precision register. */
9819 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9822 /* Single-precision register. */
9823 gdb_assert (regnum < 32);
9825 /* s0 is always the least significant half of d0. */
9826 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9827 offset = (regnum & 1) ? 0 : 4;
9829 offset = (regnum & 1) ? 4 : 0;
9831 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9832 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9835 regcache_raw_read (regcache, double_regnum, reg_buf);
9836 memcpy (reg_buf + offset, buf, 4);
9837 regcache_raw_write (regcache, double_regnum, reg_buf);
9841 static struct value *
9842 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9844 const int *reg_p = baton;
9845 return value_of_register (*reg_p, frame);
9848 static enum gdb_osabi
9849 arm_elf_osabi_sniffer (bfd *abfd)
9851 unsigned int elfosabi;
9852 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9854 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9856 if (elfosabi == ELFOSABI_ARM)
9857 /* GNU tools use this value. Check note sections in this case,
9859 bfd_map_over_sections (abfd,
9860 generic_elf_osabi_sniff_abi_tag_sections,
9863 /* Anything else will be handled by the generic ELF sniffer. */
9868 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9869 struct reggroup *group)
9871 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9872 this, FPS register belongs to save_regroup, restore_reggroup, and
9873 all_reggroup, of course. */
9874 if (regnum == ARM_FPS_REGNUM)
9875 return (group == float_reggroup
9876 || group == save_reggroup
9877 || group == restore_reggroup
9878 || group == all_reggroup);
9880 return default_register_reggroup_p (gdbarch, regnum, group);
9884 /* For backward-compatibility we allow two 'g' packet lengths with
9885 the remote protocol depending on whether FPA registers are
9886 supplied. M-profile targets do not have FPA registers, but some
9887 stubs already exist in the wild which use a 'g' packet which
9888 supplies them albeit with dummy values. The packet format which
9889 includes FPA registers should be considered deprecated for
9890 M-profile targets. */
9893 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9895 if (gdbarch_tdep (gdbarch)->is_m)
9897 /* If we know from the executable this is an M-profile target,
9898 cater for remote targets whose register set layout is the
9899 same as the FPA layout. */
9900 register_remote_g_packet_guess (gdbarch,
9901 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9902 (16 * INT_REGISTER_SIZE)
9903 + (8 * FP_REGISTER_SIZE)
9904 + (2 * INT_REGISTER_SIZE),
9905 tdesc_arm_with_m_fpa_layout);
9907 /* The regular M-profile layout. */
9908 register_remote_g_packet_guess (gdbarch,
9909 /* r0-r12,sp,lr,pc; xpsr */
9910 (16 * INT_REGISTER_SIZE)
9911 + INT_REGISTER_SIZE,
9914 /* M-profile plus M4F VFP. */
9915 register_remote_g_packet_guess (gdbarch,
9916 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9917 (16 * INT_REGISTER_SIZE)
9918 + (16 * VFP_REGISTER_SIZE)
9919 + (2 * INT_REGISTER_SIZE),
9920 tdesc_arm_with_m_vfp_d16);
9923 /* Otherwise we don't have a useful guess. */
9927 /* Initialize the current architecture based on INFO. If possible,
9928 re-use an architecture from ARCHES, which is a list of
9929 architectures already created during this debugging session.
9931 Called e.g. at program startup, when reading a core file, and when
9932 reading a binary file. */
9934 static struct gdbarch *
9935 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9937 struct gdbarch_tdep *tdep;
9938 struct gdbarch *gdbarch;
9939 struct gdbarch_list *best_arch;
9940 enum arm_abi_kind arm_abi = arm_abi_global;
9941 enum arm_float_model fp_model = arm_fp_model;
9942 struct tdesc_arch_data *tdesc_data = NULL;
9944 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9946 int have_fpa_registers = 1;
9947 const struct target_desc *tdesc = info.target_desc;
9949 /* If we have an object to base this architecture on, try to determine
9952 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9954 int ei_osabi, e_flags;
9956 switch (bfd_get_flavour (info.abfd))
9958 case bfd_target_aout_flavour:
9959 /* Assume it's an old APCS-style ABI. */
9960 arm_abi = ARM_ABI_APCS;
9963 case bfd_target_coff_flavour:
9964 /* Assume it's an old APCS-style ABI. */
9966 arm_abi = ARM_ABI_APCS;
9969 case bfd_target_elf_flavour:
9970 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9971 e_flags = elf_elfheader (info.abfd)->e_flags;
9973 if (ei_osabi == ELFOSABI_ARM)
9975 /* GNU tools used to use this value, but do not for EABI
9976 objects. There's nowhere to tag an EABI version
9977 anyway, so assume APCS. */
9978 arm_abi = ARM_ABI_APCS;
9980 else if (ei_osabi == ELFOSABI_NONE)
9982 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9983 int attr_arch, attr_profile;
9987 case EF_ARM_EABI_UNKNOWN:
9988 /* Assume GNU tools. */
9989 arm_abi = ARM_ABI_APCS;
9992 case EF_ARM_EABI_VER4:
9993 case EF_ARM_EABI_VER5:
9994 arm_abi = ARM_ABI_AAPCS;
9995 /* EABI binaries default to VFP float ordering.
9996 They may also contain build attributes that can
9997 be used to identify if the VFP argument-passing
9999 if (fp_model == ARM_FLOAT_AUTO)
10002 switch (bfd_elf_get_obj_attr_int (info.abfd,
10007 /* "The user intended FP parameter/result
10008 passing to conform to AAPCS, base
10010 fp_model = ARM_FLOAT_SOFT_VFP;
10013 /* "The user intended FP parameter/result
10014 passing to conform to AAPCS, VFP
10016 fp_model = ARM_FLOAT_VFP;
10019 /* "The user intended FP parameter/result
10020 passing to conform to tool chain-specific
10021 conventions" - we don't know any such
10022 conventions, so leave it as "auto". */
10025 /* Attribute value not mentioned in the
10026 October 2008 ABI, so leave it as
10031 fp_model = ARM_FLOAT_SOFT_VFP;
10037 /* Leave it as "auto". */
10038 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10043 /* Detect M-profile programs. This only works if the
10044 executable file includes build attributes; GCC does
10045 copy them to the executable, but e.g. RealView does
10047 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10049 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10051 Tag_CPU_arch_profile);
10052 /* GCC specifies the profile for v6-M; RealView only
10053 specifies the profile for architectures starting with
10054 V7 (as opposed to architectures with a tag
10055 numerically greater than TAG_CPU_ARCH_V7). */
10056 if (!tdesc_has_registers (tdesc)
10057 && (attr_arch == TAG_CPU_ARCH_V6_M
10058 || attr_arch == TAG_CPU_ARCH_V6S_M
10059 || attr_profile == 'M'))
10064 if (fp_model == ARM_FLOAT_AUTO)
10066 int e_flags = elf_elfheader (info.abfd)->e_flags;
10068 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10071 /* Leave it as "auto". Strictly speaking this case
10072 means FPA, but almost nobody uses that now, and
10073 many toolchains fail to set the appropriate bits
10074 for the floating-point model they use. */
10076 case EF_ARM_SOFT_FLOAT:
10077 fp_model = ARM_FLOAT_SOFT_FPA;
10079 case EF_ARM_VFP_FLOAT:
10080 fp_model = ARM_FLOAT_VFP;
10082 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10083 fp_model = ARM_FLOAT_SOFT_VFP;
10088 if (e_flags & EF_ARM_BE8)
10089 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10094 /* Leave it as "auto". */
10099 /* Check any target description for validity. */
10100 if (tdesc_has_registers (tdesc))
10102 /* For most registers we require GDB's default names; but also allow
10103 the numeric names for sp / lr / pc, as a convenience. */
10104 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10105 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10106 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10108 const struct tdesc_feature *feature;
10111 feature = tdesc_find_feature (tdesc,
10112 "org.gnu.gdb.arm.core");
10113 if (feature == NULL)
10115 feature = tdesc_find_feature (tdesc,
10116 "org.gnu.gdb.arm.m-profile");
10117 if (feature == NULL)
10123 tdesc_data = tdesc_data_alloc ();
10126 for (i = 0; i < ARM_SP_REGNUM; i++)
10127 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10128 arm_register_names[i]);
10129 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10132 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10135 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10139 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10140 ARM_PS_REGNUM, "xpsr");
10142 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10143 ARM_PS_REGNUM, "cpsr");
10147 tdesc_data_cleanup (tdesc_data);
10151 feature = tdesc_find_feature (tdesc,
10152 "org.gnu.gdb.arm.fpa");
10153 if (feature != NULL)
10156 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10157 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10158 arm_register_names[i]);
10161 tdesc_data_cleanup (tdesc_data);
10166 have_fpa_registers = 0;
10168 feature = tdesc_find_feature (tdesc,
10169 "org.gnu.gdb.xscale.iwmmxt");
10170 if (feature != NULL)
10172 static const char *const iwmmxt_names[] = {
10173 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10174 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10175 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10176 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10180 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10182 &= tdesc_numbered_register (feature, tdesc_data, i,
10183 iwmmxt_names[i - ARM_WR0_REGNUM]);
10185 /* Check for the control registers, but do not fail if they
10187 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10188 tdesc_numbered_register (feature, tdesc_data, i,
10189 iwmmxt_names[i - ARM_WR0_REGNUM]);
10191 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10193 &= tdesc_numbered_register (feature, tdesc_data, i,
10194 iwmmxt_names[i - ARM_WR0_REGNUM]);
10198 tdesc_data_cleanup (tdesc_data);
10203 /* If we have a VFP unit, check whether the single precision registers
10204 are present. If not, then we will synthesize them as pseudo
10206 feature = tdesc_find_feature (tdesc,
10207 "org.gnu.gdb.arm.vfp");
10208 if (feature != NULL)
10210 static const char *const vfp_double_names[] = {
10211 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10212 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10213 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10214 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10217 /* Require the double precision registers. There must be either
10220 for (i = 0; i < 32; i++)
10222 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10224 vfp_double_names[i]);
10228 if (!valid_p && i == 16)
10231 /* Also require FPSCR. */
10232 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10233 ARM_FPSCR_REGNUM, "fpscr");
10236 tdesc_data_cleanup (tdesc_data);
10240 if (tdesc_unnumbered_register (feature, "s0") == 0)
10241 have_vfp_pseudos = 1;
10243 have_vfp_registers = 1;
10245 /* If we have VFP, also check for NEON. The architecture allows
10246 NEON without VFP (integer vector operations only), but GDB
10247 does not support that. */
10248 feature = tdesc_find_feature (tdesc,
10249 "org.gnu.gdb.arm.neon");
10250 if (feature != NULL)
10252 /* NEON requires 32 double-precision registers. */
10255 tdesc_data_cleanup (tdesc_data);
10259 /* If there are quad registers defined by the stub, use
10260 their type; otherwise (normally) provide them with
10261 the default type. */
10262 if (tdesc_unnumbered_register (feature, "q0") == 0)
10263 have_neon_pseudos = 1;
10270 /* If there is already a candidate, use it. */
10271 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10273 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10275 if (arm_abi != ARM_ABI_AUTO
10276 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10279 if (fp_model != ARM_FLOAT_AUTO
10280 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10283 /* There are various other properties in tdep that we do not
10284 need to check here: those derived from a target description,
10285 since gdbarches with a different target description are
10286 automatically disqualified. */
10288 /* Do check is_m, though, since it might come from the binary. */
10289 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10292 /* Found a match. */
10296 if (best_arch != NULL)
10298 if (tdesc_data != NULL)
10299 tdesc_data_cleanup (tdesc_data);
10300 return best_arch->gdbarch;
10303 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10304 gdbarch = gdbarch_alloc (&info, tdep);
10306 /* Record additional information about the architecture we are defining.
10307 These are gdbarch discriminators, like the OSABI. */
10308 tdep->arm_abi = arm_abi;
10309 tdep->fp_model = fp_model;
10311 tdep->have_fpa_registers = have_fpa_registers;
10312 tdep->have_vfp_registers = have_vfp_registers;
10313 tdep->have_vfp_pseudos = have_vfp_pseudos;
10314 tdep->have_neon_pseudos = have_neon_pseudos;
10315 tdep->have_neon = have_neon;
10317 arm_register_g_packet_guesses (gdbarch);
10320 switch (info.byte_order_for_code)
10322 case BFD_ENDIAN_BIG:
10323 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10324 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10325 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10326 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10330 case BFD_ENDIAN_LITTLE:
10331 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10332 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10333 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10334 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10339 internal_error (__FILE__, __LINE__,
10340 _("arm_gdbarch_init: bad byte order for float format"));
10343 /* On ARM targets char defaults to unsigned. */
10344 set_gdbarch_char_signed (gdbarch, 0);
10346 /* Note: for displaced stepping, this includes the breakpoint, and one word
10347 of additional scratch space. This setting isn't used for anything beside
10348 displaced stepping at present. */
10349 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10351 /* This should be low enough for everything. */
10352 tdep->lowest_pc = 0x20;
10353 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10355 /* The default, for both APCS and AAPCS, is to return small
10356 structures in registers. */
10357 tdep->struct_return = reg_struct_return;
10359 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10360 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10362 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10364 /* Frame handling. */
10365 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10366 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10367 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10369 frame_base_set_default (gdbarch, &arm_normal_base);
10371 /* Address manipulation. */
10372 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10374 /* Advance PC across function entry code. */
10375 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10377 /* Detect whether PC is in function epilogue. */
10378 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10380 /* Skip trampolines. */
10381 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10383 /* The stack grows downward. */
10384 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10386 /* Breakpoint manipulation. */
10387 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10388 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10389 arm_remote_breakpoint_from_pc);
10391 /* Information about registers, etc. */
10392 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10393 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10394 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10395 set_gdbarch_register_type (gdbarch, arm_register_type);
10396 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10398 /* This "info float" is FPA-specific. Use the generic version if we
10399 do not have FPA. */
10400 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10401 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10403 /* Internal <-> external register number maps. */
10404 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10405 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10407 set_gdbarch_register_name (gdbarch, arm_register_name);
10409 /* Returning results. */
10410 set_gdbarch_return_value (gdbarch, arm_return_value);
10413 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10415 /* Minsymbol frobbing. */
10416 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10417 set_gdbarch_coff_make_msymbol_special (gdbarch,
10418 arm_coff_make_msymbol_special);
10419 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10421 /* Thumb-2 IT block support. */
10422 set_gdbarch_adjust_breakpoint_address (gdbarch,
10423 arm_adjust_breakpoint_address);
10425 /* Virtual tables. */
10426 set_gdbarch_vbit_in_delta (gdbarch, 1);
10428 /* Hook in the ABI-specific overrides, if they have been registered. */
10429 gdbarch_init_osabi (info, gdbarch);
10431 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10433 /* Add some default predicates. */
10435 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10436 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10437 dwarf2_append_unwinders (gdbarch);
10438 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10439 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10441 /* Now we have tuned the configuration, set a few final things,
10442 based on what the OS ABI has told us. */
10444 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10445 binaries are always marked. */
10446 if (tdep->arm_abi == ARM_ABI_AUTO)
10447 tdep->arm_abi = ARM_ABI_APCS;
10449 /* Watchpoints are not steppable. */
10450 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10452 /* We used to default to FPA for generic ARM, but almost nobody
10453 uses that now, and we now provide a way for the user to force
10454 the model. So default to the most useful variant. */
10455 if (tdep->fp_model == ARM_FLOAT_AUTO)
10456 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10458 if (tdep->jb_pc >= 0)
10459 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10461 /* Floating point sizes and format. */
10462 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10463 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10465 set_gdbarch_double_format
10466 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10467 set_gdbarch_long_double_format
10468 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10472 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10473 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10476 if (have_vfp_pseudos)
10478 /* NOTE: These are the only pseudo registers used by
10479 the ARM target at the moment. If more are added, a
10480 little more care in numbering will be needed. */
10482 int num_pseudos = 32;
10483 if (have_neon_pseudos)
10485 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10486 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10487 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10492 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10494 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10496 /* Override tdesc_register_type to adjust the types of VFP
10497 registers for NEON. */
10498 set_gdbarch_register_type (gdbarch, arm_register_type);
10501 /* Add standard register aliases. We add aliases even for those
10502 nanes which are used by the current architecture - it's simpler,
10503 and does no harm, since nothing ever lists user registers. */
10504 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10505 user_reg_add (gdbarch, arm_register_aliases[i].name,
10506 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10512 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10514 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10519 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10520 (unsigned long) tdep->lowest_pc);
10523 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10526 _initialize_arm_tdep (void)
10528 struct ui_file *stb;
10530 struct cmd_list_element *new_set, *new_show;
10531 const char *setname;
10532 const char *setdesc;
10533 const char *const *regnames;
10535 static char *helptext;
10536 char regdesc[1024], *rdptr = regdesc;
10537 size_t rest = sizeof (regdesc);
10539 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10541 arm_objfile_data_key
10542 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10544 /* Add ourselves to objfile event chain. */
10545 observer_attach_new_objfile (arm_exidx_new_objfile);
10547 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10549 /* Register an ELF OS ABI sniffer for ARM binaries. */
10550 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10551 bfd_target_elf_flavour,
10552 arm_elf_osabi_sniffer);
10554 /* Initialize the standard target descriptions. */
10555 initialize_tdesc_arm_with_m ();
10556 initialize_tdesc_arm_with_m_fpa_layout ();
10557 initialize_tdesc_arm_with_m_vfp_d16 ();
10558 initialize_tdesc_arm_with_iwmmxt ();
10559 initialize_tdesc_arm_with_vfpv2 ();
10560 initialize_tdesc_arm_with_vfpv3 ();
10561 initialize_tdesc_arm_with_neon ();
10563 /* Get the number of possible sets of register names defined in opcodes. */
10564 num_disassembly_options = get_arm_regname_num_options ();
10566 /* Add root prefix command for all "set arm"/"show arm" commands. */
10567 add_prefix_cmd ("arm", no_class, set_arm_command,
10568 _("Various ARM-specific commands."),
10569 &setarmcmdlist, "set arm ", 0, &setlist);
10571 add_prefix_cmd ("arm", no_class, show_arm_command,
10572 _("Various ARM-specific commands."),
10573 &showarmcmdlist, "show arm ", 0, &showlist);
10575 /* Sync the opcode insn printer with our register viewer. */
10576 parse_arm_disassembler_option ("reg-names-std");
10578 /* Initialize the array that will be passed to
10579 add_setshow_enum_cmd(). */
10580 valid_disassembly_styles
10581 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10582 for (i = 0; i < num_disassembly_options; i++)
10584 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
10585 valid_disassembly_styles[i] = setname;
10586 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10589 /* When we find the default names, tell the disassembler to use
10591 if (!strcmp (setname, "std"))
10593 disassembly_style = setname;
10594 set_arm_regname_option (i);
10597 /* Mark the end of valid options. */
10598 valid_disassembly_styles[num_disassembly_options] = NULL;
10600 /* Create the help text. */
10601 stb = mem_fileopen ();
10602 fprintf_unfiltered (stb, "%s%s%s",
10603 _("The valid values are:\n"),
10605 _("The default is \"std\"."));
10606 helptext = ui_file_xstrdup (stb, NULL);
10607 ui_file_delete (stb);
10609 add_setshow_enum_cmd("disassembler", no_class,
10610 valid_disassembly_styles, &disassembly_style,
10611 _("Set the disassembly style."),
10612 _("Show the disassembly style."),
10614 set_disassembly_style_sfunc,
10615 NULL, /* FIXME: i18n: The disassembly style is
10617 &setarmcmdlist, &showarmcmdlist);
10619 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10620 _("Set usage of ARM 32-bit mode."),
10621 _("Show usage of ARM 32-bit mode."),
10622 _("When off, a 26-bit PC will be used."),
10624 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10626 &setarmcmdlist, &showarmcmdlist);
10628 /* Add a command to allow the user to force the FPU model. */
10629 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
10630 _("Set the floating point type."),
10631 _("Show the floating point type."),
10632 _("auto - Determine the FP typefrom the OS-ABI.\n\
10633 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10634 fpa - FPA co-processor (GCC compiled).\n\
10635 softvfp - Software FP with pure-endian doubles.\n\
10636 vfp - VFP co-processor."),
10637 set_fp_model_sfunc, show_fp_model,
10638 &setarmcmdlist, &showarmcmdlist);
10640 /* Add a command to allow the user to force the ABI. */
10641 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10643 _("Show the ABI."),
10644 NULL, arm_set_abi, arm_show_abi,
10645 &setarmcmdlist, &showarmcmdlist);
10647 /* Add two commands to allow the user to force the assumed
10649 add_setshow_enum_cmd ("fallback-mode", class_support,
10650 arm_mode_strings, &arm_fallback_mode_string,
10651 _("Set the mode assumed when symbols are unavailable."),
10652 _("Show the mode assumed when symbols are unavailable."),
10653 NULL, NULL, arm_show_fallback_mode,
10654 &setarmcmdlist, &showarmcmdlist);
10655 add_setshow_enum_cmd ("force-mode", class_support,
10656 arm_mode_strings, &arm_force_mode_string,
10657 _("Set the mode assumed even when symbols are available."),
10658 _("Show the mode assumed even when symbols are available."),
10659 NULL, NULL, arm_show_force_mode,
10660 &setarmcmdlist, &showarmcmdlist);
10662 /* Debugging flag. */
10663 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10664 _("Set ARM debugging."),
10665 _("Show ARM debugging."),
10666 _("When on, arm-specific debugging is enabled."),
10668 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10669 &setdebuglist, &showdebuglist);
10672 /* ARM-reversible process record data structures. */
10674 #define ARM_INSN_SIZE_BYTES 4
10675 #define THUMB_INSN_SIZE_BYTES 2
10676 #define THUMB2_INSN_SIZE_BYTES 4
10679 #define INSN_S_L_BIT_NUM 20
10681 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10684 unsigned int reg_len = LENGTH; \
10687 REGS = XNEWVEC (uint32_t, reg_len); \
10688 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10693 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10696 unsigned int mem_len = LENGTH; \
10699 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10700 memcpy(&MEMS->len, &RECORD_BUF[0], \
10701 sizeof(struct arm_mem_r) * LENGTH); \
10706 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10707 #define INSN_RECORDED(ARM_RECORD) \
10708 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10710 /* ARM memory record structure. */
10713 uint32_t len; /* Record length. */
10714 uint32_t addr; /* Memory address. */
10717 /* ARM instruction record contains opcode of current insn
10718 and execution state (before entry to decode_insn()),
10719 contains list of to-be-modified registers and
10720 memory blocks (on return from decode_insn()). */
10722 typedef struct insn_decode_record_t
10724 struct gdbarch *gdbarch;
10725 struct regcache *regcache;
10726 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10727 uint32_t arm_insn; /* Should accommodate thumb. */
10728 uint32_t cond; /* Condition code. */
10729 uint32_t opcode; /* Insn opcode. */
10730 uint32_t decode; /* Insn decode bits. */
10731 uint32_t mem_rec_count; /* No of mem records. */
10732 uint32_t reg_rec_count; /* No of reg records. */
10733 uint32_t *arm_regs; /* Registers to be saved for this record. */
10734 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10735 } insn_decode_record;
10738 /* Checks ARM SBZ and SBO mandatory fields. */
10741 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10743 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10762 enum arm_record_result
10764 ARM_RECORD_SUCCESS = 0,
10765 ARM_RECORD_FAILURE = 1
10772 } arm_record_strx_t;
10783 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10784 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10787 struct regcache *reg_cache = arm_insn_r->regcache;
10788 ULONGEST u_regval[2]= {0};
10790 uint32_t reg_src1 = 0, reg_src2 = 0;
10791 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10792 uint32_t opcode1 = 0;
10794 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10795 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10796 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10799 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10801 /* 1) Handle misc store, immediate offset. */
10802 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10803 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10804 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10805 regcache_raw_read_unsigned (reg_cache, reg_src1,
10807 if (ARM_PC_REGNUM == reg_src1)
10809 /* If R15 was used as Rn, hence current PC+8. */
10810 u_regval[0] = u_regval[0] + 8;
10812 offset_8 = (immed_high << 4) | immed_low;
10813 /* Calculate target store address. */
10814 if (14 == arm_insn_r->opcode)
10816 tgt_mem_addr = u_regval[0] + offset_8;
10820 tgt_mem_addr = u_regval[0] - offset_8;
10822 if (ARM_RECORD_STRH == str_type)
10824 record_buf_mem[0] = 2;
10825 record_buf_mem[1] = tgt_mem_addr;
10826 arm_insn_r->mem_rec_count = 1;
10828 else if (ARM_RECORD_STRD == str_type)
10830 record_buf_mem[0] = 4;
10831 record_buf_mem[1] = tgt_mem_addr;
10832 record_buf_mem[2] = 4;
10833 record_buf_mem[3] = tgt_mem_addr + 4;
10834 arm_insn_r->mem_rec_count = 2;
10837 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10839 /* 2) Store, register offset. */
10841 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10843 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10844 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10845 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10846 if (15 == reg_src2)
10848 /* If R15 was used as Rn, hence current PC+8. */
10849 u_regval[0] = u_regval[0] + 8;
10851 /* Calculate target store address, Rn +/- Rm, register offset. */
10852 if (12 == arm_insn_r->opcode)
10854 tgt_mem_addr = u_regval[0] + u_regval[1];
10858 tgt_mem_addr = u_regval[1] - u_regval[0];
10860 if (ARM_RECORD_STRH == str_type)
10862 record_buf_mem[0] = 2;
10863 record_buf_mem[1] = tgt_mem_addr;
10864 arm_insn_r->mem_rec_count = 1;
10866 else if (ARM_RECORD_STRD == str_type)
10868 record_buf_mem[0] = 4;
10869 record_buf_mem[1] = tgt_mem_addr;
10870 record_buf_mem[2] = 4;
10871 record_buf_mem[3] = tgt_mem_addr + 4;
10872 arm_insn_r->mem_rec_count = 2;
10875 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10876 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10878 /* 3) Store, immediate pre-indexed. */
10879 /* 5) Store, immediate post-indexed. */
10880 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10881 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10882 offset_8 = (immed_high << 4) | immed_low;
10883 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10884 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10885 /* Calculate target store address, Rn +/- Rm, register offset. */
10886 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10888 tgt_mem_addr = u_regval[0] + offset_8;
10892 tgt_mem_addr = u_regval[0] - offset_8;
10894 if (ARM_RECORD_STRH == str_type)
10896 record_buf_mem[0] = 2;
10897 record_buf_mem[1] = tgt_mem_addr;
10898 arm_insn_r->mem_rec_count = 1;
10900 else if (ARM_RECORD_STRD == str_type)
10902 record_buf_mem[0] = 4;
10903 record_buf_mem[1] = tgt_mem_addr;
10904 record_buf_mem[2] = 4;
10905 record_buf_mem[3] = tgt_mem_addr + 4;
10906 arm_insn_r->mem_rec_count = 2;
10908 /* Record Rn also as it changes. */
10909 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10910 arm_insn_r->reg_rec_count = 1;
10912 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10913 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10915 /* 4) Store, register pre-indexed. */
10916 /* 6) Store, register post -indexed. */
10917 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10918 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10919 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10920 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10921 /* Calculate target store address, Rn +/- Rm, register offset. */
10922 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10924 tgt_mem_addr = u_regval[0] + u_regval[1];
10928 tgt_mem_addr = u_regval[1] - u_regval[0];
10930 if (ARM_RECORD_STRH == str_type)
10932 record_buf_mem[0] = 2;
10933 record_buf_mem[1] = tgt_mem_addr;
10934 arm_insn_r->mem_rec_count = 1;
10936 else if (ARM_RECORD_STRD == str_type)
10938 record_buf_mem[0] = 4;
10939 record_buf_mem[1] = tgt_mem_addr;
10940 record_buf_mem[2] = 4;
10941 record_buf_mem[3] = tgt_mem_addr + 4;
10942 arm_insn_r->mem_rec_count = 2;
10944 /* Record Rn also as it changes. */
10945 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10946 arm_insn_r->reg_rec_count = 1;
10951 /* Handling ARM extension space insns. */
10954 arm_record_extension_space (insn_decode_record *arm_insn_r)
10956 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10957 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10958 uint32_t record_buf[8], record_buf_mem[8];
10959 uint32_t reg_src1 = 0;
10960 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10961 struct regcache *reg_cache = arm_insn_r->regcache;
10962 ULONGEST u_regval = 0;
10964 gdb_assert (!INSN_RECORDED(arm_insn_r));
10965 /* Handle unconditional insn extension space. */
10967 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10968 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10969 if (arm_insn_r->cond)
10971 /* PLD has no affect on architectural state, it just affects
10973 if (5 == ((opcode1 & 0xE0) >> 5))
10976 record_buf[0] = ARM_PS_REGNUM;
10977 record_buf[1] = ARM_LR_REGNUM;
10978 arm_insn_r->reg_rec_count = 2;
10980 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10984 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10985 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10988 /* Undefined instruction on ARM V5; need to handle if later
10989 versions define it. */
10992 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10993 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10994 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10996 /* Handle arithmetic insn extension space. */
10997 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10998 && !INSN_RECORDED(arm_insn_r))
11000 /* Handle MLA(S) and MUL(S). */
11001 if (0 <= insn_op1 && 3 >= insn_op1)
11003 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11004 record_buf[1] = ARM_PS_REGNUM;
11005 arm_insn_r->reg_rec_count = 2;
11007 else if (4 <= insn_op1 && 15 >= insn_op1)
11009 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11010 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11011 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11012 record_buf[2] = ARM_PS_REGNUM;
11013 arm_insn_r->reg_rec_count = 3;
11017 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11018 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11019 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11021 /* Handle control insn extension space. */
11023 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11024 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11026 if (!bit (arm_insn_r->arm_insn,25))
11028 if (!bits (arm_insn_r->arm_insn, 4, 7))
11030 if ((0 == insn_op1) || (2 == insn_op1))
11033 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11034 arm_insn_r->reg_rec_count = 1;
11036 else if (1 == insn_op1)
11038 /* CSPR is going to be changed. */
11039 record_buf[0] = ARM_PS_REGNUM;
11040 arm_insn_r->reg_rec_count = 1;
11042 else if (3 == insn_op1)
11044 /* SPSR is going to be changed. */
11045 /* We need to get SPSR value, which is yet to be done. */
11046 printf_unfiltered (_("Process record does not support "
11047 "instruction 0x%0x at address %s.\n"),
11048 arm_insn_r->arm_insn,
11049 paddress (arm_insn_r->gdbarch,
11050 arm_insn_r->this_addr));
11054 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11059 record_buf[0] = ARM_PS_REGNUM;
11060 arm_insn_r->reg_rec_count = 1;
11062 else if (3 == insn_op1)
11065 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11066 arm_insn_r->reg_rec_count = 1;
11069 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11072 record_buf[0] = ARM_PS_REGNUM;
11073 record_buf[1] = ARM_LR_REGNUM;
11074 arm_insn_r->reg_rec_count = 2;
11076 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11078 /* QADD, QSUB, QDADD, QDSUB */
11079 record_buf[0] = ARM_PS_REGNUM;
11080 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11081 arm_insn_r->reg_rec_count = 2;
11083 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11086 record_buf[0] = ARM_PS_REGNUM;
11087 record_buf[1] = ARM_LR_REGNUM;
11088 arm_insn_r->reg_rec_count = 2;
11090 /* Save SPSR also;how? */
11091 printf_unfiltered (_("Process record does not support "
11092 "instruction 0x%0x at address %s.\n"),
11093 arm_insn_r->arm_insn,
11094 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11097 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11098 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11099 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11100 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11103 if (0 == insn_op1 || 1 == insn_op1)
11105 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11106 /* We dont do optimization for SMULW<y> where we
11108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11109 record_buf[1] = ARM_PS_REGNUM;
11110 arm_insn_r->reg_rec_count = 2;
11112 else if (2 == insn_op1)
11115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11116 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11117 arm_insn_r->reg_rec_count = 2;
11119 else if (3 == insn_op1)
11122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11123 arm_insn_r->reg_rec_count = 1;
11129 /* MSR : immediate form. */
11132 /* CSPR is going to be changed. */
11133 record_buf[0] = ARM_PS_REGNUM;
11134 arm_insn_r->reg_rec_count = 1;
11136 else if (3 == insn_op1)
11138 /* SPSR is going to be changed. */
11139 /* we need to get SPSR value, which is yet to be done */
11140 printf_unfiltered (_("Process record does not support "
11141 "instruction 0x%0x at address %s.\n"),
11142 arm_insn_r->arm_insn,
11143 paddress (arm_insn_r->gdbarch,
11144 arm_insn_r->this_addr));
11150 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11151 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11152 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11154 /* Handle load/store insn extension space. */
11156 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11157 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11158 && !INSN_RECORDED(arm_insn_r))
11163 /* These insn, changes register and memory as well. */
11164 /* SWP or SWPB insn. */
11165 /* Get memory address given by Rn. */
11166 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11167 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11168 /* SWP insn ?, swaps word. */
11169 if (8 == arm_insn_r->opcode)
11171 record_buf_mem[0] = 4;
11175 /* SWPB insn, swaps only byte. */
11176 record_buf_mem[0] = 1;
11178 record_buf_mem[1] = u_regval;
11179 arm_insn_r->mem_rec_count = 1;
11180 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11181 arm_insn_r->reg_rec_count = 1;
11183 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11186 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11189 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11192 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11193 record_buf[1] = record_buf[0] + 1;
11194 arm_insn_r->reg_rec_count = 2;
11196 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11199 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11202 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11204 /* LDRH, LDRSB, LDRSH. */
11205 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11206 arm_insn_r->reg_rec_count = 1;
11211 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11212 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11213 && !INSN_RECORDED(arm_insn_r))
11216 /* Handle coprocessor insn extension space. */
11219 /* To be done for ARMv5 and later; as of now we return -1. */
11221 printf_unfiltered (_("Process record does not support instruction x%0x "
11222 "at address %s.\n"),arm_insn_r->arm_insn,
11223 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11226 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11227 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11232 /* Handling opcode 000 insns. */
11235 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11237 struct regcache *reg_cache = arm_insn_r->regcache;
11238 uint32_t record_buf[8], record_buf_mem[8];
11239 ULONGEST u_regval[2] = {0};
11241 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11242 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11243 uint32_t opcode1 = 0;
11245 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11246 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11247 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11249 /* Data processing insn /multiply insn. */
11250 if (9 == arm_insn_r->decode
11251 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11252 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11254 /* Handle multiply instructions. */
11255 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11256 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11258 /* Handle MLA and MUL. */
11259 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11260 record_buf[1] = ARM_PS_REGNUM;
11261 arm_insn_r->reg_rec_count = 2;
11263 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11265 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11266 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11267 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11268 record_buf[2] = ARM_PS_REGNUM;
11269 arm_insn_r->reg_rec_count = 3;
11272 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11273 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11275 /* Handle misc load insns, as 20th bit (L = 1). */
11276 /* LDR insn has a capability to do branching, if
11277 MOV LR, PC is precceded by LDR insn having Rn as R15
11278 in that case, it emulates branch and link insn, and hence we
11279 need to save CSPR and PC as well. I am not sure this is right
11280 place; as opcode = 010 LDR insn make this happen, if R15 was
11282 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11283 if (15 != reg_dest)
11285 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11286 arm_insn_r->reg_rec_count = 1;
11290 record_buf[0] = reg_dest;
11291 record_buf[1] = ARM_PS_REGNUM;
11292 arm_insn_r->reg_rec_count = 2;
11295 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11296 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11297 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11298 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11300 /* Handle MSR insn. */
11301 if (9 == arm_insn_r->opcode)
11303 /* CSPR is going to be changed. */
11304 record_buf[0] = ARM_PS_REGNUM;
11305 arm_insn_r->reg_rec_count = 1;
11309 /* SPSR is going to be changed. */
11310 /* How to read SPSR value? */
11311 printf_unfiltered (_("Process record does not support instruction "
11312 "0x%0x at address %s.\n"),
11313 arm_insn_r->arm_insn,
11314 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11318 else if (9 == arm_insn_r->decode
11319 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11320 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11322 /* Handling SWP, SWPB. */
11323 /* These insn, changes register and memory as well. */
11324 /* SWP or SWPB insn. */
11326 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11327 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11328 /* SWP insn ?, swaps word. */
11329 if (8 == arm_insn_r->opcode)
11331 record_buf_mem[0] = 4;
11335 /* SWPB insn, swaps only byte. */
11336 record_buf_mem[0] = 1;
11338 record_buf_mem[1] = u_regval[0];
11339 arm_insn_r->mem_rec_count = 1;
11340 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11341 arm_insn_r->reg_rec_count = 1;
11343 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11344 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11346 /* Handle BLX, branch and link/exchange. */
11347 if (9 == arm_insn_r->opcode)
11349 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11350 and R14 stores the return address. */
11351 record_buf[0] = ARM_PS_REGNUM;
11352 record_buf[1] = ARM_LR_REGNUM;
11353 arm_insn_r->reg_rec_count = 2;
11356 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11358 /* Handle enhanced software breakpoint insn, BKPT. */
11359 /* CPSR is changed to be executed in ARM state, disabling normal
11360 interrupts, entering abort mode. */
11361 /* According to high vector configuration PC is set. */
11362 /* user hit breakpoint and type reverse, in
11363 that case, we need to go back with previous CPSR and
11364 Program Counter. */
11365 record_buf[0] = ARM_PS_REGNUM;
11366 record_buf[1] = ARM_LR_REGNUM;
11367 arm_insn_r->reg_rec_count = 2;
11369 /* Save SPSR also; how? */
11370 printf_unfiltered (_("Process record does not support instruction "
11371 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11372 paddress (arm_insn_r->gdbarch,
11373 arm_insn_r->this_addr));
11376 else if (11 == arm_insn_r->decode
11377 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11379 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11381 /* Handle str(x) insn */
11382 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11385 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11386 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11388 /* Handle BX, branch and link/exchange. */
11389 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11390 record_buf[0] = ARM_PS_REGNUM;
11391 arm_insn_r->reg_rec_count = 1;
11393 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11394 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11395 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11397 /* Count leading zeros: CLZ. */
11398 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11399 arm_insn_r->reg_rec_count = 1;
11401 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11402 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11403 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11404 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11407 /* Handle MRS insn. */
11408 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11409 arm_insn_r->reg_rec_count = 1;
11411 else if (arm_insn_r->opcode <= 15)
11413 /* Normal data processing insns. */
11414 /* Out of 11 shifter operands mode, all the insn modifies destination
11415 register, which is specified by 13-16 decode. */
11416 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11417 record_buf[1] = ARM_PS_REGNUM;
11418 arm_insn_r->reg_rec_count = 2;
11425 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11426 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11430 /* Handling opcode 001 insns. */
11433 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11435 uint32_t record_buf[8], record_buf_mem[8];
11437 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11438 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11440 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11441 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11442 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11445 /* Handle MSR insn. */
11446 if (9 == arm_insn_r->opcode)
11448 /* CSPR is going to be changed. */
11449 record_buf[0] = ARM_PS_REGNUM;
11450 arm_insn_r->reg_rec_count = 1;
11454 /* SPSR is going to be changed. */
11457 else if (arm_insn_r->opcode <= 15)
11459 /* Normal data processing insns. */
11460 /* Out of 11 shifter operands mode, all the insn modifies destination
11461 register, which is specified by 13-16 decode. */
11462 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11463 record_buf[1] = ARM_PS_REGNUM;
11464 arm_insn_r->reg_rec_count = 2;
11471 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11472 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11476 /* Handling opcode 010 insns. */
11479 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11481 struct regcache *reg_cache = arm_insn_r->regcache;
11483 uint32_t reg_src1 = 0 , reg_dest = 0;
11484 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11485 uint32_t record_buf[8], record_buf_mem[8];
11487 ULONGEST u_regval = 0;
11489 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11490 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11492 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11494 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11495 /* LDR insn has a capability to do branching, if
11496 MOV LR, PC is precedded by LDR insn having Rn as R15
11497 in that case, it emulates branch and link insn, and hence we
11498 need to save CSPR and PC as well. */
11499 if (ARM_PC_REGNUM != reg_dest)
11501 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11502 arm_insn_r->reg_rec_count = 1;
11506 record_buf[0] = reg_dest;
11507 record_buf[1] = ARM_PS_REGNUM;
11508 arm_insn_r->reg_rec_count = 2;
11513 /* Store, immediate offset, immediate pre-indexed,
11514 immediate post-indexed. */
11515 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11516 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11517 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11519 if (bit (arm_insn_r->arm_insn, 23))
11521 tgt_mem_addr = u_regval + offset_12;
11525 tgt_mem_addr = u_regval - offset_12;
11528 switch (arm_insn_r->opcode)
11542 record_buf_mem[0] = 4;
11557 record_buf_mem[0] = 1;
11561 gdb_assert_not_reached ("no decoding pattern found");
11564 record_buf_mem[1] = tgt_mem_addr;
11565 arm_insn_r->mem_rec_count = 1;
11567 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11568 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11569 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11570 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11571 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11572 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11575 /* We are handling pre-indexed mode; post-indexed mode;
11576 where Rn is going to be changed. */
11577 record_buf[0] = reg_src1;
11578 arm_insn_r->reg_rec_count = 1;
11582 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11583 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11587 /* Handling opcode 011 insns. */
11590 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11592 struct regcache *reg_cache = arm_insn_r->regcache;
11594 uint32_t shift_imm = 0;
11595 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11596 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11597 uint32_t record_buf[8], record_buf_mem[8];
11600 ULONGEST u_regval[2];
11602 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11603 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11605 /* Handle enhanced store insns and LDRD DSP insn,
11606 order begins according to addressing modes for store insns
11610 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11612 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11613 /* LDR insn has a capability to do branching, if
11614 MOV LR, PC is precedded by LDR insn having Rn as R15
11615 in that case, it emulates branch and link insn, and hence we
11616 need to save CSPR and PC as well. */
11617 if (15 != reg_dest)
11619 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11620 arm_insn_r->reg_rec_count = 1;
11624 record_buf[0] = reg_dest;
11625 record_buf[1] = ARM_PS_REGNUM;
11626 arm_insn_r->reg_rec_count = 2;
11631 if (! bits (arm_insn_r->arm_insn, 4, 11))
11633 /* Store insn, register offset and register pre-indexed,
11634 register post-indexed. */
11636 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11638 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11639 regcache_raw_read_unsigned (reg_cache, reg_src1
11641 regcache_raw_read_unsigned (reg_cache, reg_src2
11643 if (15 == reg_src2)
11645 /* If R15 was used as Rn, hence current PC+8. */
11646 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11647 u_regval[0] = u_regval[0] + 8;
11649 /* Calculate target store address, Rn +/- Rm, register offset. */
11651 if (bit (arm_insn_r->arm_insn, 23))
11653 tgt_mem_addr = u_regval[0] + u_regval[1];
11657 tgt_mem_addr = u_regval[1] - u_regval[0];
11660 switch (arm_insn_r->opcode)
11674 record_buf_mem[0] = 4;
11689 record_buf_mem[0] = 1;
11693 gdb_assert_not_reached ("no decoding pattern found");
11696 record_buf_mem[1] = tgt_mem_addr;
11697 arm_insn_r->mem_rec_count = 1;
11699 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11700 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11701 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11702 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11703 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11704 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11707 /* Rn is going to be changed in pre-indexed mode and
11708 post-indexed mode as well. */
11709 record_buf[0] = reg_src2;
11710 arm_insn_r->reg_rec_count = 1;
11715 /* Store insn, scaled register offset; scaled pre-indexed. */
11716 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11718 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11720 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11721 /* Get shift_imm. */
11722 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11723 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11724 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11725 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11726 /* Offset_12 used as shift. */
11730 /* Offset_12 used as index. */
11731 offset_12 = u_regval[0] << shift_imm;
11735 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11741 if (bit (u_regval[0], 31))
11743 offset_12 = 0xFFFFFFFF;
11752 /* This is arithmetic shift. */
11753 offset_12 = s_word >> shift_imm;
11760 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11762 /* Get C flag value and shift it by 31. */
11763 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11764 | (u_regval[0]) >> 1);
11768 offset_12 = (u_regval[0] >> shift_imm) \
11770 (sizeof(uint32_t) - shift_imm));
11775 gdb_assert_not_reached ("no decoding pattern found");
11779 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11781 if (bit (arm_insn_r->arm_insn, 23))
11783 tgt_mem_addr = u_regval[1] + offset_12;
11787 tgt_mem_addr = u_regval[1] - offset_12;
11790 switch (arm_insn_r->opcode)
11804 record_buf_mem[0] = 4;
11819 record_buf_mem[0] = 1;
11823 gdb_assert_not_reached ("no decoding pattern found");
11826 record_buf_mem[1] = tgt_mem_addr;
11827 arm_insn_r->mem_rec_count = 1;
11829 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11830 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11831 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11832 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11833 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11834 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11837 /* Rn is going to be changed in register scaled pre-indexed
11838 mode,and scaled post indexed mode. */
11839 record_buf[0] = reg_src2;
11840 arm_insn_r->reg_rec_count = 1;
11845 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11846 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11850 /* Handling opcode 100 insns. */
11853 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11855 struct regcache *reg_cache = arm_insn_r->regcache;
11857 uint32_t register_list[16] = {0}, register_count = 0, register_bits = 0;
11858 uint32_t reg_src1 = 0, addr_mode = 0, no_of_regs = 0;
11859 uint32_t start_address = 0, index = 0;
11860 uint32_t record_buf[24], record_buf_mem[48];
11862 ULONGEST u_regval[2] = {0};
11864 /* This mode is exclusively for load and store multiple. */
11865 /* Handle incremenrt after/before and decrment after.before mode;
11866 Rn is changing depending on W bit, but as of now we store Rn too
11867 without optimization. */
11869 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11871 /* LDM (1,2,3) where LDM (3) changes CPSR too. */
11873 if (bit (arm_insn_r->arm_insn, 20) && !bit (arm_insn_r->arm_insn, 22))
11875 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11880 register_bits = bits (arm_insn_r->arm_insn, 0, 14);
11884 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11885 while (register_bits)
11887 if (register_bits & 0x00000001)
11888 record_buf[index++] = register_count;
11889 register_bits = register_bits >> 1;
11893 /* Extra space for Base Register and CPSR; wihtout optimization. */
11894 record_buf[index++] = reg_src1;
11895 record_buf[index++] = ARM_PS_REGNUM;
11896 arm_insn_r->reg_rec_count = index;
11900 /* It handles both STM(1) and STM(2). */
11901 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11903 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11905 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11906 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11907 while (register_bits)
11909 if (register_bits & 0x00000001)
11911 register_bits = register_bits >> 1;
11916 /* Decrement after. */
11918 start_address = (u_regval[0]) - (register_count * 4) + 4;
11919 arm_insn_r->mem_rec_count = register_count;
11920 while (register_count)
11922 record_buf_mem[(register_count * 2) - 1] = start_address;
11923 record_buf_mem[(register_count * 2) - 2] = 4;
11924 start_address = start_address + 4;
11929 /* Increment after. */
11931 start_address = u_regval[0];
11932 arm_insn_r->mem_rec_count = register_count;
11933 while (register_count)
11935 record_buf_mem[(register_count * 2) - 1] = start_address;
11936 record_buf_mem[(register_count * 2) - 2] = 4;
11937 start_address = start_address + 4;
11942 /* Decrement before. */
11945 start_address = (u_regval[0]) - (register_count * 4);
11946 arm_insn_r->mem_rec_count = register_count;
11947 while (register_count)
11949 record_buf_mem[(register_count * 2) - 1] = start_address;
11950 record_buf_mem[(register_count * 2) - 2] = 4;
11951 start_address = start_address + 4;
11956 /* Increment before. */
11958 start_address = u_regval[0] + 4;
11959 arm_insn_r->mem_rec_count = register_count;
11960 while (register_count)
11962 record_buf_mem[(register_count * 2) - 1] = start_address;
11963 record_buf_mem[(register_count * 2) - 2] = 4;
11964 start_address = start_address + 4;
11970 gdb_assert_not_reached ("no decoding pattern found");
11974 /* Base register also changes; based on condition and W bit. */
11975 /* We save it anyway without optimization. */
11976 record_buf[0] = reg_src1;
11977 arm_insn_r->reg_rec_count = 1;
11980 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11981 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11985 /* Handling opcode 101 insns. */
11988 arm_record_b_bl (insn_decode_record *arm_insn_r)
11990 uint32_t record_buf[8];
11992 /* Handle B, BL, BLX(1) insns. */
11993 /* B simply branches so we do nothing here. */
11994 /* Note: BLX(1) doesnt fall here but instead it falls into
11995 extension space. */
11996 if (bit (arm_insn_r->arm_insn, 24))
11998 record_buf[0] = ARM_LR_REGNUM;
11999 arm_insn_r->reg_rec_count = 1;
12002 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12007 /* Handling opcode 110 insns. */
12010 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
12012 printf_unfiltered (_("Process record does not support instruction "
12013 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12014 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12019 /* Record handler for extension register load/store instructions. */
12022 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12024 uint32_t opcode, single_reg;
12025 uint8_t op_vldm_vstm;
12026 uint32_t record_buf[8], record_buf_mem[128];
12027 ULONGEST u_regval = 0;
12029 struct regcache *reg_cache = arm_insn_r->regcache;
12030 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12032 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12033 single_reg = bit (arm_insn_r->arm_insn, 8);
12034 op_vldm_vstm = opcode & 0x1b;
12036 /* Handle VMOV instructions. */
12037 if ((opcode & 0x1e) == 0x04)
12039 if (bit (arm_insn_r->arm_insn, 4))
12041 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12042 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12043 arm_insn_r->reg_rec_count = 2;
12047 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12048 | bit (arm_insn_r->arm_insn, 5);
12052 record_buf[0] = num_regs + reg_m;
12053 record_buf[1] = num_regs + reg_m + 1;
12054 arm_insn_r->reg_rec_count = 2;
12058 record_buf[0] = reg_m + ARM_D0_REGNUM;
12059 arm_insn_r->reg_rec_count = 1;
12063 /* Handle VSTM and VPUSH instructions. */
12064 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12065 || op_vldm_vstm == 0x12)
12067 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12068 uint32_t memory_index = 0;
12070 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12071 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12072 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12073 imm_off32 = imm_off8 << 24;
12074 memory_count = imm_off8;
12076 if (bit (arm_insn_r->arm_insn, 23))
12077 start_address = u_regval;
12079 start_address = u_regval - imm_off32;
12081 if (bit (arm_insn_r->arm_insn, 21))
12083 record_buf[0] = reg_rn;
12084 arm_insn_r->reg_rec_count = 1;
12087 while (memory_count > 0)
12091 record_buf_mem[memory_index] = start_address;
12092 record_buf_mem[memory_index + 1] = 4;
12093 start_address = start_address + 4;
12094 memory_index = memory_index + 2;
12098 record_buf_mem[memory_index] = start_address;
12099 record_buf_mem[memory_index + 1] = 4;
12100 record_buf_mem[memory_index + 2] = start_address + 4;
12101 record_buf_mem[memory_index + 3] = 4;
12102 start_address = start_address + 8;
12103 memory_index = memory_index + 4;
12107 arm_insn_r->mem_rec_count = (memory_index >> 1);
12109 /* Handle VLDM instructions. */
12110 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12111 || op_vldm_vstm == 0x13)
12113 uint32_t reg_count, reg_vd;
12114 uint32_t reg_index = 0;
12116 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12117 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12120 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12122 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12124 if (bit (arm_insn_r->arm_insn, 21))
12125 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12127 while (reg_count > 0)
12130 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12132 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12136 arm_insn_r->reg_rec_count = reg_index;
12138 /* VSTR Vector store register. */
12139 else if ((opcode & 0x13) == 0x10)
12141 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12142 uint32_t memory_index = 0;
12144 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12145 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12146 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12147 imm_off32 = imm_off8 << 24;
12148 memory_count = imm_off8;
12150 if (bit (arm_insn_r->arm_insn, 23))
12151 start_address = u_regval + imm_off32;
12153 start_address = u_regval - imm_off32;
12157 record_buf_mem[memory_index] = start_address;
12158 record_buf_mem[memory_index + 1] = 4;
12159 arm_insn_r->mem_rec_count = 1;
12163 record_buf_mem[memory_index] = start_address;
12164 record_buf_mem[memory_index + 1] = 4;
12165 record_buf_mem[memory_index + 2] = start_address + 4;
12166 record_buf_mem[memory_index + 3] = 4;
12167 arm_insn_r->mem_rec_count = 2;
12170 /* VLDR Vector load register. */
12171 else if ((opcode & 0x13) == 0x11)
12173 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12177 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12178 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12182 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12183 record_buf[0] = num_regs + reg_vd;
12185 arm_insn_r->reg_rec_count = 1;
12188 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12189 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12193 /* Record handler for arm/thumb mode VFP data processing instructions. */
12196 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12198 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12199 uint32_t record_buf[4];
12200 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12201 enum insn_types curr_insn_type = INSN_INV;
12203 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12204 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12205 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12206 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12207 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12208 bit_d = bit (arm_insn_r->arm_insn, 22);
12209 opc1 = opc1 & 0x04;
12211 /* Handle VMLA, VMLS. */
12214 if (bit (arm_insn_r->arm_insn, 10))
12216 if (bit (arm_insn_r->arm_insn, 6))
12217 curr_insn_type = INSN_T0;
12219 curr_insn_type = INSN_T1;
12224 curr_insn_type = INSN_T1;
12226 curr_insn_type = INSN_T2;
12229 /* Handle VNMLA, VNMLS, VNMUL. */
12230 else if (opc1 == 0x01)
12233 curr_insn_type = INSN_T1;
12235 curr_insn_type = INSN_T2;
12238 else if (opc1 == 0x02 && !(opc3 & 0x01))
12240 if (bit (arm_insn_r->arm_insn, 10))
12242 if (bit (arm_insn_r->arm_insn, 6))
12243 curr_insn_type = INSN_T0;
12245 curr_insn_type = INSN_T1;
12250 curr_insn_type = INSN_T1;
12252 curr_insn_type = INSN_T2;
12255 /* Handle VADD, VSUB. */
12256 else if (opc1 == 0x03)
12258 if (!bit (arm_insn_r->arm_insn, 9))
12260 if (bit (arm_insn_r->arm_insn, 6))
12261 curr_insn_type = INSN_T0;
12263 curr_insn_type = INSN_T1;
12268 curr_insn_type = INSN_T1;
12270 curr_insn_type = INSN_T2;
12274 else if (opc1 == 0x0b)
12277 curr_insn_type = INSN_T1;
12279 curr_insn_type = INSN_T2;
12281 /* Handle all other vfp data processing instructions. */
12282 else if (opc1 == 0x0b)
12285 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12287 if (bit (arm_insn_r->arm_insn, 4))
12289 if (bit (arm_insn_r->arm_insn, 6))
12290 curr_insn_type = INSN_T0;
12292 curr_insn_type = INSN_T1;
12297 curr_insn_type = INSN_T1;
12299 curr_insn_type = INSN_T2;
12302 /* Handle VNEG and VABS. */
12303 else if ((opc2 == 0x01 && opc3 == 0x01)
12304 || (opc2 == 0x00 && opc3 == 0x03))
12306 if (!bit (arm_insn_r->arm_insn, 11))
12308 if (bit (arm_insn_r->arm_insn, 6))
12309 curr_insn_type = INSN_T0;
12311 curr_insn_type = INSN_T1;
12316 curr_insn_type = INSN_T1;
12318 curr_insn_type = INSN_T2;
12321 /* Handle VSQRT. */
12322 else if (opc2 == 0x01 && opc3 == 0x03)
12325 curr_insn_type = INSN_T1;
12327 curr_insn_type = INSN_T2;
12330 else if (opc2 == 0x07 && opc3 == 0x03)
12333 curr_insn_type = INSN_T1;
12335 curr_insn_type = INSN_T2;
12337 else if (opc3 & 0x01)
12340 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12342 if (!bit (arm_insn_r->arm_insn, 18))
12343 curr_insn_type = INSN_T2;
12347 curr_insn_type = INSN_T1;
12349 curr_insn_type = INSN_T2;
12353 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12356 curr_insn_type = INSN_T1;
12358 curr_insn_type = INSN_T2;
12360 /* Handle VCVTB, VCVTT. */
12361 else if ((opc2 & 0x0e) == 0x02)
12362 curr_insn_type = INSN_T2;
12363 /* Handle VCMP, VCMPE. */
12364 else if ((opc2 & 0x0e) == 0x04)
12365 curr_insn_type = INSN_T3;
12369 switch (curr_insn_type)
12372 reg_vd = reg_vd | (bit_d << 4);
12373 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12374 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12375 arm_insn_r->reg_rec_count = 2;
12379 reg_vd = reg_vd | (bit_d << 4);
12380 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12381 arm_insn_r->reg_rec_count = 1;
12385 reg_vd = (reg_vd << 1) | bit_d;
12386 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12387 arm_insn_r->reg_rec_count = 1;
12391 record_buf[0] = ARM_FPSCR_REGNUM;
12392 arm_insn_r->reg_rec_count = 1;
12396 gdb_assert_not_reached ("no decoding pattern found");
12400 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12404 /* Handling opcode 110 insns. */
12407 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12409 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12411 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12412 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12413 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12415 if ((coproc & 0x0e) == 0x0a)
12417 /* Handle extension register ld/st instructions. */
12419 return arm_record_exreg_ld_st_insn (arm_insn_r);
12421 /* 64-bit transfers between arm core and extension registers. */
12422 if ((op1 & 0x3e) == 0x04)
12423 return arm_record_exreg_ld_st_insn (arm_insn_r);
12427 /* Handle coprocessor ld/st instructions. */
12432 return arm_record_unsupported_insn (arm_insn_r);
12435 return arm_record_unsupported_insn (arm_insn_r);
12438 /* Move to coprocessor from two arm core registers. */
12440 return arm_record_unsupported_insn (arm_insn_r);
12442 /* Move to two arm core registers from coprocessor. */
12447 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12448 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12449 arm_insn_r->reg_rec_count = 2;
12451 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12455 return arm_record_unsupported_insn (arm_insn_r);
12458 /* Handling opcode 111 insns. */
12461 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12463 uint32_t op, op1_sbit, op1_ebit, coproc;
12464 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12465 struct regcache *reg_cache = arm_insn_r->regcache;
12466 ULONGEST u_regval = 0;
12468 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12469 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12470 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12471 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12472 op = bit (arm_insn_r->arm_insn, 4);
12474 /* Handle arm SWI/SVC system call instructions. */
12477 if (tdep->arm_syscall_record != NULL)
12479 ULONGEST svc_operand, svc_number;
12481 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12483 if (svc_operand) /* OABI. */
12484 svc_number = svc_operand - 0x900000;
12486 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12488 return tdep->arm_syscall_record (reg_cache, svc_number);
12492 printf_unfiltered (_("no syscall record support\n"));
12497 if ((coproc & 0x0e) == 0x0a)
12499 /* VFP data-processing instructions. */
12500 if (!op1_sbit && !op)
12501 return arm_record_vfp_data_proc_insn (arm_insn_r);
12503 /* Advanced SIMD, VFP instructions. */
12504 if (!op1_sbit && op)
12505 return arm_record_unsupported_insn (arm_insn_r);
12509 /* Coprocessor data operations. */
12510 if (!op1_sbit && !op)
12511 return arm_record_unsupported_insn (arm_insn_r);
12513 /* Move to Coprocessor from ARM core register. */
12514 if (!op1_sbit && !op1_ebit && op)
12515 return arm_record_unsupported_insn (arm_insn_r);
12517 /* Move to arm core register from coprocessor. */
12518 if (!op1_sbit && op1_ebit && op)
12520 uint32_t record_buf[1];
12522 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12523 if (record_buf[0] == 15)
12524 record_buf[0] = ARM_PS_REGNUM;
12526 arm_insn_r->reg_rec_count = 1;
12527 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12533 return arm_record_unsupported_insn (arm_insn_r);
12536 /* Handling opcode 000 insns. */
12539 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12541 uint32_t record_buf[8];
12542 uint32_t reg_src1 = 0;
12544 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12546 record_buf[0] = ARM_PS_REGNUM;
12547 record_buf[1] = reg_src1;
12548 thumb_insn_r->reg_rec_count = 2;
12550 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12556 /* Handling opcode 001 insns. */
12559 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12561 uint32_t record_buf[8];
12562 uint32_t reg_src1 = 0;
12564 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12566 record_buf[0] = ARM_PS_REGNUM;
12567 record_buf[1] = reg_src1;
12568 thumb_insn_r->reg_rec_count = 2;
12570 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12575 /* Handling opcode 010 insns. */
12578 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12580 struct regcache *reg_cache = thumb_insn_r->regcache;
12581 uint32_t record_buf[8], record_buf_mem[8];
12583 uint32_t reg_src1 = 0, reg_src2 = 0;
12584 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12586 ULONGEST u_regval[2] = {0};
12588 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12590 if (bit (thumb_insn_r->arm_insn, 12))
12592 /* Handle load/store register offset. */
12593 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12594 if (opcode2 >= 12 && opcode2 <= 15)
12596 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12597 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12598 record_buf[0] = reg_src1;
12599 thumb_insn_r->reg_rec_count = 1;
12601 else if (opcode2 >= 8 && opcode2 <= 10)
12603 /* STR(2), STRB(2), STRH(2) . */
12604 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12605 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12606 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12607 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12609 record_buf_mem[0] = 4; /* STR (2). */
12610 else if (10 == opcode2)
12611 record_buf_mem[0] = 1; /* STRB (2). */
12612 else if (9 == opcode2)
12613 record_buf_mem[0] = 2; /* STRH (2). */
12614 record_buf_mem[1] = u_regval[0] + u_regval[1];
12615 thumb_insn_r->mem_rec_count = 1;
12618 else if (bit (thumb_insn_r->arm_insn, 11))
12620 /* Handle load from literal pool. */
12622 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12623 record_buf[0] = reg_src1;
12624 thumb_insn_r->reg_rec_count = 1;
12628 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12629 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12630 if ((3 == opcode2) && (!opcode3))
12632 /* Branch with exchange. */
12633 record_buf[0] = ARM_PS_REGNUM;
12634 thumb_insn_r->reg_rec_count = 1;
12638 /* Format 8; special data processing insns. */
12639 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12640 record_buf[0] = ARM_PS_REGNUM;
12641 record_buf[1] = reg_src1;
12642 thumb_insn_r->reg_rec_count = 2;
12647 /* Format 5; data processing insns. */
12648 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12649 if (bit (thumb_insn_r->arm_insn, 7))
12651 reg_src1 = reg_src1 + 8;
12653 record_buf[0] = ARM_PS_REGNUM;
12654 record_buf[1] = reg_src1;
12655 thumb_insn_r->reg_rec_count = 2;
12658 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12659 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12665 /* Handling opcode 001 insns. */
12668 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12670 struct regcache *reg_cache = thumb_insn_r->regcache;
12671 uint32_t record_buf[8], record_buf_mem[8];
12673 uint32_t reg_src1 = 0;
12674 uint32_t opcode = 0, immed_5 = 0;
12676 ULONGEST u_regval = 0;
12678 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12683 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12684 record_buf[0] = reg_src1;
12685 thumb_insn_r->reg_rec_count = 1;
12690 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12691 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12692 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12693 record_buf_mem[0] = 4;
12694 record_buf_mem[1] = u_regval + (immed_5 * 4);
12695 thumb_insn_r->mem_rec_count = 1;
12698 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12699 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12705 /* Handling opcode 100 insns. */
12708 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12710 struct regcache *reg_cache = thumb_insn_r->regcache;
12711 uint32_t record_buf[8], record_buf_mem[8];
12713 uint32_t reg_src1 = 0;
12714 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12716 ULONGEST u_regval = 0;
12718 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12723 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12724 record_buf[0] = reg_src1;
12725 thumb_insn_r->reg_rec_count = 1;
12727 else if (1 == opcode)
12730 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12731 record_buf[0] = reg_src1;
12732 thumb_insn_r->reg_rec_count = 1;
12734 else if (2 == opcode)
12737 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12738 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12739 record_buf_mem[0] = 4;
12740 record_buf_mem[1] = u_regval + (immed_8 * 4);
12741 thumb_insn_r->mem_rec_count = 1;
12743 else if (0 == opcode)
12746 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12747 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12748 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12749 record_buf_mem[0] = 2;
12750 record_buf_mem[1] = u_regval + (immed_5 * 2);
12751 thumb_insn_r->mem_rec_count = 1;
12754 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12755 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12761 /* Handling opcode 101 insns. */
12764 thumb_record_misc (insn_decode_record *thumb_insn_r)
12766 struct regcache *reg_cache = thumb_insn_r->regcache;
12768 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12769 uint32_t register_bits = 0, register_count = 0;
12770 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12771 uint32_t record_buf[24], record_buf_mem[48];
12774 ULONGEST u_regval = 0;
12776 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12777 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12778 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12783 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12784 while (register_bits)
12786 if (register_bits & 0x00000001)
12787 record_buf[index++] = register_count;
12788 register_bits = register_bits >> 1;
12791 record_buf[index++] = ARM_PS_REGNUM;
12792 record_buf[index++] = ARM_SP_REGNUM;
12793 thumb_insn_r->reg_rec_count = index;
12795 else if (10 == opcode2)
12798 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12799 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12800 while (register_bits)
12802 if (register_bits & 0x00000001)
12804 register_bits = register_bits >> 1;
12806 start_address = u_regval - \
12807 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12808 thumb_insn_r->mem_rec_count = register_count;
12809 while (register_count)
12811 record_buf_mem[(register_count * 2) - 1] = start_address;
12812 record_buf_mem[(register_count * 2) - 2] = 4;
12813 start_address = start_address + 4;
12816 record_buf[0] = ARM_SP_REGNUM;
12817 thumb_insn_r->reg_rec_count = 1;
12819 else if (0x1E == opcode1)
12822 /* Handle enhanced software breakpoint insn, BKPT. */
12823 /* CPSR is changed to be executed in ARM state, disabling normal
12824 interrupts, entering abort mode. */
12825 /* According to high vector configuration PC is set. */
12826 /* User hits breakpoint and type reverse, in that case, we need to go back with
12827 previous CPSR and Program Counter. */
12828 record_buf[0] = ARM_PS_REGNUM;
12829 record_buf[1] = ARM_LR_REGNUM;
12830 thumb_insn_r->reg_rec_count = 2;
12831 /* We need to save SPSR value, which is not yet done. */
12832 printf_unfiltered (_("Process record does not support instruction "
12833 "0x%0x at address %s.\n"),
12834 thumb_insn_r->arm_insn,
12835 paddress (thumb_insn_r->gdbarch,
12836 thumb_insn_r->this_addr));
12839 else if ((0 == opcode) || (1 == opcode))
12841 /* ADD(5), ADD(6). */
12842 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12843 record_buf[0] = reg_src1;
12844 thumb_insn_r->reg_rec_count = 1;
12846 else if (2 == opcode)
12848 /* ADD(7), SUB(4). */
12849 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12850 record_buf[0] = ARM_SP_REGNUM;
12851 thumb_insn_r->reg_rec_count = 1;
12854 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12855 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12861 /* Handling opcode 110 insns. */
12864 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12866 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12867 struct regcache *reg_cache = thumb_insn_r->regcache;
12869 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12870 uint32_t reg_src1 = 0;
12871 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12872 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12873 uint32_t record_buf[24], record_buf_mem[48];
12875 ULONGEST u_regval = 0;
12877 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12878 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12884 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12886 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12887 while (register_bits)
12889 if (register_bits & 0x00000001)
12890 record_buf[index++] = register_count;
12891 register_bits = register_bits >> 1;
12894 record_buf[index++] = reg_src1;
12895 thumb_insn_r->reg_rec_count = index;
12897 else if (0 == opcode2)
12899 /* It handles both STMIA. */
12900 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12902 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12903 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12904 while (register_bits)
12906 if (register_bits & 0x00000001)
12908 register_bits = register_bits >> 1;
12910 start_address = u_regval;
12911 thumb_insn_r->mem_rec_count = register_count;
12912 while (register_count)
12914 record_buf_mem[(register_count * 2) - 1] = start_address;
12915 record_buf_mem[(register_count * 2) - 2] = 4;
12916 start_address = start_address + 4;
12920 else if (0x1F == opcode1)
12922 /* Handle arm syscall insn. */
12923 if (tdep->arm_syscall_record != NULL)
12925 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12926 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12930 printf_unfiltered (_("no syscall record support\n"));
12935 /* B (1), conditional branch is automatically taken care in process_record,
12936 as PC is saved there. */
12938 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12939 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12945 /* Handling opcode 111 insns. */
12948 thumb_record_branch (insn_decode_record *thumb_insn_r)
12950 uint32_t record_buf[8];
12951 uint32_t bits_h = 0;
12953 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12955 if (2 == bits_h || 3 == bits_h)
12958 record_buf[0] = ARM_LR_REGNUM;
12959 thumb_insn_r->reg_rec_count = 1;
12961 else if (1 == bits_h)
12964 record_buf[0] = ARM_PS_REGNUM;
12965 record_buf[1] = ARM_LR_REGNUM;
12966 thumb_insn_r->reg_rec_count = 2;
12969 /* B(2) is automatically taken care in process_record, as PC is
12972 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12977 /* Handler for thumb2 load/store multiple instructions. */
12980 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12982 struct regcache *reg_cache = thumb2_insn_r->regcache;
12984 uint32_t reg_rn, op;
12985 uint32_t register_bits = 0, register_count = 0;
12986 uint32_t index = 0, start_address = 0;
12987 uint32_t record_buf[24], record_buf_mem[48];
12989 ULONGEST u_regval = 0;
12991 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12992 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12994 if (0 == op || 3 == op)
12996 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12998 /* Handle RFE instruction. */
12999 record_buf[0] = ARM_PS_REGNUM;
13000 thumb2_insn_r->reg_rec_count = 1;
13004 /* Handle SRS instruction after reading banked SP. */
13005 return arm_record_unsupported_insn (thumb2_insn_r);
13008 else if (1 == op || 2 == op)
13010 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13012 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13013 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13014 while (register_bits)
13016 if (register_bits & 0x00000001)
13017 record_buf[index++] = register_count;
13020 register_bits = register_bits >> 1;
13022 record_buf[index++] = reg_rn;
13023 record_buf[index++] = ARM_PS_REGNUM;
13024 thumb2_insn_r->reg_rec_count = index;
13028 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13029 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13030 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13031 while (register_bits)
13033 if (register_bits & 0x00000001)
13036 register_bits = register_bits >> 1;
13041 /* Start address calculation for LDMDB/LDMEA. */
13042 start_address = u_regval;
13046 /* Start address calculation for LDMDB/LDMEA. */
13047 start_address = u_regval - register_count * 4;
13050 thumb2_insn_r->mem_rec_count = register_count;
13051 while (register_count)
13053 record_buf_mem[register_count * 2 - 1] = start_address;
13054 record_buf_mem[register_count * 2 - 2] = 4;
13055 start_address = start_address + 4;
13058 record_buf[0] = reg_rn;
13059 record_buf[1] = ARM_PS_REGNUM;
13060 thumb2_insn_r->reg_rec_count = 2;
13064 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13066 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13068 return ARM_RECORD_SUCCESS;
13071 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13075 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13077 struct regcache *reg_cache = thumb2_insn_r->regcache;
13079 uint32_t reg_rd, reg_rn, offset_imm;
13080 uint32_t reg_dest1, reg_dest2;
13081 uint32_t address, offset_addr;
13082 uint32_t record_buf[8], record_buf_mem[8];
13083 uint32_t op1, op2, op3;
13086 ULONGEST u_regval[2];
13088 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13089 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13090 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13092 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13094 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13096 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13097 record_buf[0] = reg_dest1;
13098 record_buf[1] = ARM_PS_REGNUM;
13099 thumb2_insn_r->reg_rec_count = 2;
13102 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13104 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13105 record_buf[2] = reg_dest2;
13106 thumb2_insn_r->reg_rec_count = 3;
13111 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13112 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13114 if (0 == op1 && 0 == op2)
13116 /* Handle STREX. */
13117 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13118 address = u_regval[0] + (offset_imm * 4);
13119 record_buf_mem[0] = 4;
13120 record_buf_mem[1] = address;
13121 thumb2_insn_r->mem_rec_count = 1;
13122 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13123 record_buf[0] = reg_rd;
13124 thumb2_insn_r->reg_rec_count = 1;
13126 else if (1 == op1 && 0 == op2)
13128 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13129 record_buf[0] = reg_rd;
13130 thumb2_insn_r->reg_rec_count = 1;
13131 address = u_regval[0];
13132 record_buf_mem[1] = address;
13136 /* Handle STREXB. */
13137 record_buf_mem[0] = 1;
13138 thumb2_insn_r->mem_rec_count = 1;
13142 /* Handle STREXH. */
13143 record_buf_mem[0] = 2 ;
13144 thumb2_insn_r->mem_rec_count = 1;
13148 /* Handle STREXD. */
13149 address = u_regval[0];
13150 record_buf_mem[0] = 4;
13151 record_buf_mem[2] = 4;
13152 record_buf_mem[3] = address + 4;
13153 thumb2_insn_r->mem_rec_count = 2;
13158 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13160 if (bit (thumb2_insn_r->arm_insn, 24))
13162 if (bit (thumb2_insn_r->arm_insn, 23))
13163 offset_addr = u_regval[0] + (offset_imm * 4);
13165 offset_addr = u_regval[0] - (offset_imm * 4);
13167 address = offset_addr;
13170 address = u_regval[0];
13172 record_buf_mem[0] = 4;
13173 record_buf_mem[1] = address;
13174 record_buf_mem[2] = 4;
13175 record_buf_mem[3] = address + 4;
13176 thumb2_insn_r->mem_rec_count = 2;
13177 record_buf[0] = reg_rn;
13178 thumb2_insn_r->reg_rec_count = 1;
13182 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13184 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13186 return ARM_RECORD_SUCCESS;
13189 /* Handler for thumb2 data processing (shift register and modified immediate)
13193 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13195 uint32_t reg_rd, op;
13196 uint32_t record_buf[8];
13198 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13199 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13201 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13203 record_buf[0] = ARM_PS_REGNUM;
13204 thumb2_insn_r->reg_rec_count = 1;
13208 record_buf[0] = reg_rd;
13209 record_buf[1] = ARM_PS_REGNUM;
13210 thumb2_insn_r->reg_rec_count = 2;
13213 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13215 return ARM_RECORD_SUCCESS;
13218 /* Generic handler for thumb2 instructions which effect destination and PS
13222 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13225 uint32_t record_buf[8];
13227 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13229 record_buf[0] = reg_rd;
13230 record_buf[1] = ARM_PS_REGNUM;
13231 thumb2_insn_r->reg_rec_count = 2;
13233 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13235 return ARM_RECORD_SUCCESS;
13238 /* Handler for thumb2 branch and miscellaneous control instructions. */
13241 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13243 uint32_t op, op1, op2;
13244 uint32_t record_buf[8];
13246 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13247 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13248 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13250 /* Handle MSR insn. */
13251 if (!(op1 & 0x2) && 0x38 == op)
13255 /* CPSR is going to be changed. */
13256 record_buf[0] = ARM_PS_REGNUM;
13257 thumb2_insn_r->reg_rec_count = 1;
13261 arm_record_unsupported_insn(thumb2_insn_r);
13265 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13268 record_buf[0] = ARM_PS_REGNUM;
13269 record_buf[1] = ARM_LR_REGNUM;
13270 thumb2_insn_r->reg_rec_count = 2;
13273 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13275 return ARM_RECORD_SUCCESS;
13278 /* Handler for thumb2 store single data item instructions. */
13281 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13283 struct regcache *reg_cache = thumb2_insn_r->regcache;
13285 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13286 uint32_t address, offset_addr;
13287 uint32_t record_buf[8], record_buf_mem[8];
13290 ULONGEST u_regval[2];
13292 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13293 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13294 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13295 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13297 if (bit (thumb2_insn_r->arm_insn, 23))
13300 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13301 offset_addr = u_regval[0] + offset_imm;
13302 address = offset_addr;
13307 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13309 /* Handle STRB (register). */
13310 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13311 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13312 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13313 offset_addr = u_regval[1] << shift_imm;
13314 address = u_regval[0] + offset_addr;
13318 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13319 if (bit (thumb2_insn_r->arm_insn, 10))
13321 if (bit (thumb2_insn_r->arm_insn, 9))
13322 offset_addr = u_regval[0] + offset_imm;
13324 offset_addr = u_regval[0] - offset_imm;
13326 address = offset_addr;
13329 address = u_regval[0];
13335 /* Store byte instructions. */
13338 record_buf_mem[0] = 1;
13340 /* Store half word instructions. */
13343 record_buf_mem[0] = 2;
13345 /* Store word instructions. */
13348 record_buf_mem[0] = 4;
13352 gdb_assert_not_reached ("no decoding pattern found");
13356 record_buf_mem[1] = address;
13357 thumb2_insn_r->mem_rec_count = 1;
13358 record_buf[0] = reg_rn;
13359 thumb2_insn_r->reg_rec_count = 1;
13361 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13363 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13365 return ARM_RECORD_SUCCESS;
13368 /* Handler for thumb2 load memory hints instructions. */
13371 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13373 uint32_t record_buf[8];
13374 uint32_t reg_rt, reg_rn;
13376 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13377 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13379 if (ARM_PC_REGNUM != reg_rt)
13381 record_buf[0] = reg_rt;
13382 record_buf[1] = reg_rn;
13383 record_buf[2] = ARM_PS_REGNUM;
13384 thumb2_insn_r->reg_rec_count = 3;
13386 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13388 return ARM_RECORD_SUCCESS;
13391 return ARM_RECORD_FAILURE;
13394 /* Handler for thumb2 load word instructions. */
13397 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13399 uint32_t opcode1 = 0, opcode2 = 0;
13400 uint32_t record_buf[8];
13402 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13403 record_buf[1] = ARM_PS_REGNUM;
13404 thumb2_insn_r->reg_rec_count = 2;
13406 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13408 return ARM_RECORD_SUCCESS;
13411 /* Handler for thumb2 long multiply, long multiply accumulate, and
13412 divide instructions. */
13415 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13417 uint32_t opcode1 = 0, opcode2 = 0;
13418 uint32_t record_buf[8];
13419 uint32_t reg_src1 = 0;
13421 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13422 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13424 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13426 /* Handle SMULL, UMULL, SMULAL. */
13427 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13428 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13429 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13430 record_buf[2] = ARM_PS_REGNUM;
13431 thumb2_insn_r->reg_rec_count = 3;
13433 else if (1 == opcode1 || 3 == opcode2)
13435 /* Handle SDIV and UDIV. */
13436 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13437 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13438 record_buf[2] = ARM_PS_REGNUM;
13439 thumb2_insn_r->reg_rec_count = 3;
13442 return ARM_RECORD_FAILURE;
13444 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13446 return ARM_RECORD_SUCCESS;
13449 /* Record handler for thumb32 coprocessor instructions. */
13452 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13454 if (bit (thumb2_insn_r->arm_insn, 25))
13455 return arm_record_coproc_data_proc (thumb2_insn_r);
13457 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13460 /* Record handler for advance SIMD structure load/store instructions. */
13463 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13465 struct regcache *reg_cache = thumb2_insn_r->regcache;
13466 uint32_t l_bit, a_bit, b_bits;
13467 uint32_t record_buf[128], record_buf_mem[128];
13468 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13469 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13472 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13473 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13474 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13475 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13476 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13477 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13478 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13479 f_esize = 8 * f_ebytes;
13480 f_elem = 8 / f_ebytes;
13484 ULONGEST u_regval = 0;
13485 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13486 address = u_regval;
13491 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13493 if (b_bits == 0x07)
13495 else if (b_bits == 0x0a)
13497 else if (b_bits == 0x06)
13499 else if (b_bits == 0x02)
13504 for (index_r = 0; index_r < bf_regs; index_r++)
13506 for (index_e = 0; index_e < f_elem; index_e++)
13508 record_buf_mem[index_m++] = f_ebytes;
13509 record_buf_mem[index_m++] = address;
13510 address = address + f_ebytes;
13511 thumb2_insn_r->mem_rec_count += 1;
13516 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13518 if (b_bits == 0x09 || b_bits == 0x08)
13520 else if (b_bits == 0x03)
13525 for (index_r = 0; index_r < bf_regs; index_r++)
13526 for (index_e = 0; index_e < f_elem; index_e++)
13528 for (loop_t = 0; loop_t < 2; loop_t++)
13530 record_buf_mem[index_m++] = f_ebytes;
13531 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13532 thumb2_insn_r->mem_rec_count += 1;
13534 address = address + (2 * f_ebytes);
13538 else if ((b_bits & 0x0e) == 0x04)
13540 for (index_e = 0; index_e < f_elem; index_e++)
13542 for (loop_t = 0; loop_t < 3; loop_t++)
13544 record_buf_mem[index_m++] = f_ebytes;
13545 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13546 thumb2_insn_r->mem_rec_count += 1;
13548 address = address + (3 * f_ebytes);
13552 else if (!(b_bits & 0x0e))
13554 for (index_e = 0; index_e < f_elem; index_e++)
13556 for (loop_t = 0; loop_t < 4; loop_t++)
13558 record_buf_mem[index_m++] = f_ebytes;
13559 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13560 thumb2_insn_r->mem_rec_count += 1;
13562 address = address + (4 * f_ebytes);
13568 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13570 if (bft_size == 0x00)
13572 else if (bft_size == 0x01)
13574 else if (bft_size == 0x02)
13580 if (!(b_bits & 0x0b) || b_bits == 0x08)
13581 thumb2_insn_r->mem_rec_count = 1;
13583 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13584 thumb2_insn_r->mem_rec_count = 2;
13586 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13587 thumb2_insn_r->mem_rec_count = 3;
13589 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13590 thumb2_insn_r->mem_rec_count = 4;
13592 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13594 record_buf_mem[index_m] = f_ebytes;
13595 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13604 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13605 thumb2_insn_r->reg_rec_count = 1;
13607 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13608 thumb2_insn_r->reg_rec_count = 2;
13610 else if ((b_bits & 0x0e) == 0x04)
13611 thumb2_insn_r->reg_rec_count = 3;
13613 else if (!(b_bits & 0x0e))
13614 thumb2_insn_r->reg_rec_count = 4;
13619 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13620 thumb2_insn_r->reg_rec_count = 1;
13622 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13623 thumb2_insn_r->reg_rec_count = 2;
13625 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13626 thumb2_insn_r->reg_rec_count = 3;
13628 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13629 thumb2_insn_r->reg_rec_count = 4;
13631 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13632 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13636 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13638 record_buf[index_r] = reg_rn;
13639 thumb2_insn_r->reg_rec_count += 1;
13642 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13644 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13649 /* Decodes thumb2 instruction type and invokes its record handler. */
13651 static unsigned int
13652 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13654 uint32_t op, op1, op2;
13656 op = bit (thumb2_insn_r->arm_insn, 15);
13657 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13658 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13662 if (!(op2 & 0x64 ))
13664 /* Load/store multiple instruction. */
13665 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13667 else if (!((op2 & 0x64) ^ 0x04))
13669 /* Load/store (dual/exclusive) and table branch instruction. */
13670 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13672 else if (!((op2 & 0x20) ^ 0x20))
13674 /* Data-processing (shifted register). */
13675 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13677 else if (op2 & 0x40)
13679 /* Co-processor instructions. */
13680 return thumb2_record_coproc_insn (thumb2_insn_r);
13683 else if (op1 == 0x02)
13687 /* Branches and miscellaneous control instructions. */
13688 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13690 else if (op2 & 0x20)
13692 /* Data-processing (plain binary immediate) instruction. */
13693 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13697 /* Data-processing (modified immediate). */
13698 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13701 else if (op1 == 0x03)
13703 if (!(op2 & 0x71 ))
13705 /* Store single data item. */
13706 return thumb2_record_str_single_data (thumb2_insn_r);
13708 else if (!((op2 & 0x71) ^ 0x10))
13710 /* Advanced SIMD or structure load/store instructions. */
13711 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13713 else if (!((op2 & 0x67) ^ 0x01))
13715 /* Load byte, memory hints instruction. */
13716 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13718 else if (!((op2 & 0x67) ^ 0x03))
13720 /* Load halfword, memory hints instruction. */
13721 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13723 else if (!((op2 & 0x67) ^ 0x05))
13725 /* Load word instruction. */
13726 return thumb2_record_ld_word (thumb2_insn_r);
13728 else if (!((op2 & 0x70) ^ 0x20))
13730 /* Data-processing (register) instruction. */
13731 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13733 else if (!((op2 & 0x78) ^ 0x30))
13735 /* Multiply, multiply accumulate, abs diff instruction. */
13736 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13738 else if (!((op2 & 0x78) ^ 0x38))
13740 /* Long multiply, long multiply accumulate, and divide. */
13741 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13743 else if (op2 & 0x40)
13745 /* Co-processor instructions. */
13746 return thumb2_record_coproc_insn (thumb2_insn_r);
13753 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13754 and positive val on fauilure. */
13757 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13759 gdb_byte buf[insn_size];
13761 memset (&buf[0], 0, insn_size);
13763 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13765 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13767 gdbarch_byte_order (insn_record->gdbarch));
13771 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13773 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13777 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13778 uint32_t insn_size)
13781 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13782 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13784 arm_record_data_proc_misc_ld_str, /* 000. */
13785 arm_record_data_proc_imm, /* 001. */
13786 arm_record_ld_st_imm_offset, /* 010. */
13787 arm_record_ld_st_reg_offset, /* 011. */
13788 arm_record_ld_st_multiple, /* 100. */
13789 arm_record_b_bl, /* 101. */
13790 arm_record_asimd_vfp_coproc, /* 110. */
13791 arm_record_coproc_data_proc /* 111. */
13794 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13795 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13797 thumb_record_shift_add_sub, /* 000. */
13798 thumb_record_add_sub_cmp_mov, /* 001. */
13799 thumb_record_ld_st_reg_offset, /* 010. */
13800 thumb_record_ld_st_imm_offset, /* 011. */
13801 thumb_record_ld_st_stack, /* 100. */
13802 thumb_record_misc, /* 101. */
13803 thumb_record_ldm_stm_swi, /* 110. */
13804 thumb_record_branch /* 111. */
13807 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13808 uint32_t insn_id = 0;
13810 if (extract_arm_insn (arm_record, insn_size))
13814 printf_unfiltered (_("Process record: error reading memory at "
13815 "addr %s len = %d.\n"),
13816 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13820 else if (ARM_RECORD == record_type)
13822 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13823 insn_id = bits (arm_record->arm_insn, 25, 27);
13824 ret = arm_record_extension_space (arm_record);
13825 /* If this insn has fallen into extension space
13826 then we need not decode it anymore. */
13827 if (ret != -1 && !INSN_RECORDED(arm_record))
13829 ret = arm_handle_insn[insn_id] (arm_record);
13832 else if (THUMB_RECORD == record_type)
13834 /* As thumb does not have condition codes, we set negative. */
13835 arm_record->cond = -1;
13836 insn_id = bits (arm_record->arm_insn, 13, 15);
13837 ret = thumb_handle_insn[insn_id] (arm_record);
13839 else if (THUMB2_RECORD == record_type)
13841 /* As thumb does not have condition codes, we set negative. */
13842 arm_record->cond = -1;
13844 /* Swap first half of 32bit thumb instruction with second half. */
13845 arm_record->arm_insn
13846 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13848 insn_id = thumb2_record_decode_insn_handler (arm_record);
13850 if (insn_id != ARM_RECORD_SUCCESS)
13852 arm_record_unsupported_insn (arm_record);
13858 /* Throw assertion. */
13859 gdb_assert_not_reached ("not a valid instruction, could not decode");
13866 /* Cleans up local record registers and memory allocations. */
13869 deallocate_reg_mem (insn_decode_record *record)
13871 xfree (record->arm_regs);
13872 xfree (record->arm_mems);
13876 /* Parse the current instruction and record the values of the registers and
13877 memory that will be changed in current instruction to record_arch_list".
13878 Return -1 if something is wrong. */
13881 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13882 CORE_ADDR insn_addr)
13885 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13886 uint32_t no_of_rec = 0;
13887 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13888 ULONGEST t_bit = 0, insn_id = 0;
13890 ULONGEST u_regval = 0;
13892 insn_decode_record arm_record;
13894 memset (&arm_record, 0, sizeof (insn_decode_record));
13895 arm_record.regcache = regcache;
13896 arm_record.this_addr = insn_addr;
13897 arm_record.gdbarch = gdbarch;
13900 if (record_debug > 1)
13902 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13904 paddress (gdbarch, arm_record.this_addr));
13907 if (extract_arm_insn (&arm_record, 2))
13911 printf_unfiltered (_("Process record: error reading memory at "
13912 "addr %s len = %d.\n"),
13913 paddress (arm_record.gdbarch,
13914 arm_record.this_addr), 2);
13919 /* Check the insn, whether it is thumb or arm one. */
13921 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13922 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13925 if (!(u_regval & t_bit))
13927 /* We are decoding arm insn. */
13928 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13932 insn_id = bits (arm_record.arm_insn, 11, 15);
13933 /* is it thumb2 insn? */
13934 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13936 ret = decode_insn (&arm_record, THUMB2_RECORD,
13937 THUMB2_INSN_SIZE_BYTES);
13941 /* We are decoding thumb insn. */
13942 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13948 /* Record registers. */
13949 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13950 if (arm_record.arm_regs)
13952 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13954 if (record_full_arch_list_add_reg
13955 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13959 /* Record memories. */
13960 if (arm_record.arm_mems)
13962 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13964 if (record_full_arch_list_add_mem
13965 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13966 arm_record.arm_mems[no_of_rec].len))
13971 if (record_full_arch_list_add_end ())
13976 deallocate_reg_mem (&arm_record);