1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
230 struct regcache *regcache,
231 int regnum, gdb_byte *buf);
232 static void arm_neon_quad_write (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, const gdb_byte *buf);
237 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
240 /* get_next_pcs operations. */
241 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
242 arm_get_next_pcs_read_memory_unsigned_integer,
243 arm_get_next_pcs_syscall_next_pc,
244 arm_get_next_pcs_addr_bits_remove,
245 arm_get_next_pcs_is_thumb,
249 struct arm_prologue_cache
251 /* The stack pointer at the time this frame was created; i.e. the
252 caller's stack pointer when this function was called. It is used
253 to identify this frame. */
256 /* The frame base for this frame is just prev_sp - frame size.
257 FRAMESIZE is the distance from the frame pointer to the
258 initial stack pointer. */
262 /* The register used to hold the frame pointer for this frame. */
265 /* Saved register offsets. */
266 struct trad_frame_saved_reg *saved_regs;
269 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
270 CORE_ADDR prologue_start,
271 CORE_ADDR prologue_end,
272 struct arm_prologue_cache *cache);
274 /* Architecture version for displaced stepping. This effects the behaviour of
275 certain instructions, and really should not be hard-wired. */
277 #define DISPLACED_STEPPING_ARCH_VERSION 5
279 /* Set to true if the 32-bit mode is in use. */
283 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
286 arm_psr_thumb_bit (struct gdbarch *gdbarch)
288 if (gdbarch_tdep (gdbarch)->is_m)
294 /* Determine if the processor is currently executing in Thumb mode. */
297 arm_is_thumb (struct regcache *regcache)
300 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
302 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
304 return (cpsr & t_bit) != 0;
307 /* Determine if FRAME is executing in Thumb mode. */
310 arm_frame_is_thumb (struct frame_info *frame)
313 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
315 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
316 directly (from a signal frame or dummy frame) or by interpreting
317 the saved LR (from a prologue or DWARF frame). So consult it and
318 trust the unwinders. */
319 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
321 return (cpsr & t_bit) != 0;
324 /* Callback for VEC_lower_bound. */
327 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
328 const struct arm_mapping_symbol *rhs)
330 return lhs->value < rhs->value;
333 /* Search for the mapping symbol covering MEMADDR. If one is found,
334 return its type. Otherwise, return 0. If START is non-NULL,
335 set *START to the location of the mapping symbol. */
338 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
340 struct obj_section *sec;
342 /* If there are mapping symbols, consult them. */
343 sec = find_pc_section (memaddr);
346 struct arm_per_objfile *data;
347 VEC(arm_mapping_symbol_s) *map;
348 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
352 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
353 arm_objfile_data_key);
356 map = data->section_maps[sec->the_bfd_section->index];
357 if (!VEC_empty (arm_mapping_symbol_s, map))
359 struct arm_mapping_symbol *map_sym;
361 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
362 arm_compare_mapping_symbols);
364 /* VEC_lower_bound finds the earliest ordered insertion
365 point. If the following symbol starts at this exact
366 address, we use that; otherwise, the preceding
367 mapping symbol covers this address. */
368 if (idx < VEC_length (arm_mapping_symbol_s, map))
370 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
371 if (map_sym->value == map_key.value)
374 *start = map_sym->value + obj_section_addr (sec);
375 return map_sym->type;
381 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
383 *start = map_sym->value + obj_section_addr (sec);
384 return map_sym->type;
393 /* Determine if the program counter specified in MEMADDR is in a Thumb
394 function. This function should be called for addresses unrelated to
395 any executing frame; otherwise, prefer arm_frame_is_thumb. */
398 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
400 struct bound_minimal_symbol sym;
402 struct displaced_step_closure* dsc
403 = get_displaced_step_closure_by_addr(memaddr);
405 /* If checking the mode of displaced instruction in copy area, the mode
406 should be determined by instruction on the original address. */
410 fprintf_unfiltered (gdb_stdlog,
411 "displaced: check mode of %.8lx instead of %.8lx\n",
412 (unsigned long) dsc->insn_addr,
413 (unsigned long) memaddr);
414 memaddr = dsc->insn_addr;
417 /* If bit 0 of the address is set, assume this is a Thumb address. */
418 if (IS_THUMB_ADDR (memaddr))
421 /* If the user wants to override the symbol table, let him. */
422 if (strcmp (arm_force_mode_string, "arm") == 0)
424 if (strcmp (arm_force_mode_string, "thumb") == 0)
427 /* ARM v6-M and v7-M are always in Thumb mode. */
428 if (gdbarch_tdep (gdbarch)->is_m)
431 /* If there are mapping symbols, consult them. */
432 type = arm_find_mapping_symbol (memaddr, NULL);
436 /* Thumb functions have a "special" bit set in minimal symbols. */
437 sym = lookup_minimal_symbol_by_pc (memaddr);
439 return (MSYMBOL_IS_SPECIAL (sym.minsym));
441 /* If the user wants to override the fallback mode, let them. */
442 if (strcmp (arm_fallback_mode_string, "arm") == 0)
444 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
447 /* If we couldn't find any symbol, but we're talking to a running
448 target, then trust the current value of $cpsr. This lets
449 "display/i $pc" always show the correct mode (though if there is
450 a symbol table we will not reach here, so it still may not be
451 displayed in the mode it will be executed). */
452 if (target_has_registers)
453 return arm_frame_is_thumb (get_current_frame ());
455 /* Otherwise we're out of luck; we assume ARM. */
459 /* Determine if the address specified equals any of these magic return
460 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
463 From ARMv6-M Reference Manual B1.5.8
464 Table B1-5 Exception return behavior
466 EXC_RETURN Return To Return Stack
467 0xFFFFFFF1 Handler mode Main
468 0xFFFFFFF9 Thread mode Main
469 0xFFFFFFFD Thread mode Process
471 From ARMv7-M Reference Manual B1.5.8
472 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
474 EXC_RETURN Return To Return Stack
475 0xFFFFFFF1 Handler mode Main
476 0xFFFFFFF9 Thread mode Main
477 0xFFFFFFFD Thread mode Process
479 Table B1-9 EXC_RETURN definition of exception return behavior, with
482 EXC_RETURN Return To Return Stack Frame Type
483 0xFFFFFFE1 Handler mode Main Extended
484 0xFFFFFFE9 Thread mode Main Extended
485 0xFFFFFFED Thread mode Process Extended
486 0xFFFFFFF1 Handler mode Main Basic
487 0xFFFFFFF9 Thread mode Main Basic
488 0xFFFFFFFD Thread mode Process Basic
490 For more details see "B1.5.8 Exception return behavior"
491 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
494 arm_m_addr_is_magic (CORE_ADDR addr)
498 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
499 the exception return behavior. */
506 /* Address is magic. */
510 /* Address is not magic. */
515 /* Remove useless bits from addresses in a running program. */
517 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
519 /* On M-profile devices, do not strip the low bit from EXC_RETURN
520 (the magic exception return address). */
521 if (gdbarch_tdep (gdbarch)->is_m
522 && arm_m_addr_is_magic (val))
526 return UNMAKE_THUMB_ADDR (val);
528 return (val & 0x03fffffc);
531 /* Return 1 if PC is the start of a compiler helper function which
532 can be safely ignored during prologue skipping. IS_THUMB is true
533 if the function is known to be a Thumb function due to the way it
536 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
538 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
539 struct bound_minimal_symbol msym;
541 msym = lookup_minimal_symbol_by_pc (pc);
542 if (msym.minsym != NULL
543 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
544 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
546 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
548 /* The GNU linker's Thumb call stub to foo is named
550 if (strstr (name, "_from_thumb") != NULL)
553 /* On soft-float targets, __truncdfsf2 is called to convert promoted
554 arguments to their argument types in non-prototyped
556 if (startswith (name, "__truncdfsf2"))
558 if (startswith (name, "__aeabi_d2f"))
561 /* Internal functions related to thread-local storage. */
562 if (startswith (name, "__tls_get_addr"))
564 if (startswith (name, "__aeabi_read_tp"))
569 /* If we run against a stripped glibc, we may be unable to identify
570 special functions by name. Check for one important case,
571 __aeabi_read_tp, by comparing the *code* against the default
572 implementation (this is hand-written ARM assembler in glibc). */
575 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
576 == 0xe3e00a0f /* mov r0, #0xffff0fff */
577 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
578 == 0xe240f01f) /* sub pc, r0, #31 */
585 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
586 the first 16-bit of instruction, and INSN2 is the second 16-bit of
588 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
589 ((bits ((insn1), 0, 3) << 12) \
590 | (bits ((insn1), 10, 10) << 11) \
591 | (bits ((insn2), 12, 14) << 8) \
592 | bits ((insn2), 0, 7))
594 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
595 the 32-bit instruction. */
596 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
597 ((bits ((insn), 16, 19) << 12) \
598 | bits ((insn), 0, 11))
600 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
603 thumb_expand_immediate (unsigned int imm)
605 unsigned int count = imm >> 7;
613 return (imm & 0xff) | ((imm & 0xff) << 16);
615 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
617 return (imm & 0xff) | ((imm & 0xff) << 8)
618 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
621 return (0x80 | (imm & 0x7f)) << (32 - count);
624 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
625 epilogue, 0 otherwise. */
628 thumb_instruction_restores_sp (unsigned short insn)
630 return (insn == 0x46bd /* mov sp, r7 */
631 || (insn & 0xff80) == 0xb000 /* add sp, imm */
632 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
635 /* Analyze a Thumb prologue, looking for a recognizable stack frame
636 and frame pointer. Scan until we encounter a store that could
637 clobber the stack frame unexpectedly, or an unknown instruction.
638 Return the last address which is definitely safe to skip for an
639 initial breakpoint. */
642 thumb_analyze_prologue (struct gdbarch *gdbarch,
643 CORE_ADDR start, CORE_ADDR limit,
644 struct arm_prologue_cache *cache)
646 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
647 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
651 CORE_ADDR unrecognized_pc = 0;
653 for (i = 0; i < 16; i++)
654 regs[i] = pv_register (i, 0);
655 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
657 while (start < limit)
661 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
663 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
668 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
671 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
672 whether to save LR (R14). */
673 mask = (insn & 0xff) | ((insn & 0x100) << 6);
675 /* Calculate offsets of saved R0-R7 and LR. */
676 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
677 if (mask & (1 << regno))
679 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
681 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
684 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
686 offset = (insn & 0x7f) << 2; /* get scaled offset */
687 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
690 else if (thumb_instruction_restores_sp (insn))
692 /* Don't scan past the epilogue. */
695 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
696 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
698 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
699 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
700 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
702 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
703 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
704 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
706 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
707 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
708 && pv_is_constant (regs[bits (insn, 3, 5)]))
709 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
710 regs[bits (insn, 6, 8)]);
711 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
712 && pv_is_constant (regs[bits (insn, 3, 6)]))
714 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
715 int rm = bits (insn, 3, 6);
716 regs[rd] = pv_add (regs[rd], regs[rm]);
718 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
720 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
721 int src_reg = (insn & 0x78) >> 3;
722 regs[dst_reg] = regs[src_reg];
724 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
726 /* Handle stores to the stack. Normally pushes are used,
727 but with GCC -mtpcs-frame, there may be other stores
728 in the prologue to create the frame. */
729 int regno = (insn >> 8) & 0x7;
732 offset = (insn & 0xff) << 2;
733 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
735 if (stack.store_would_trash (addr))
738 stack.store (addr, 4, regs[regno]);
740 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
742 int rd = bits (insn, 0, 2);
743 int rn = bits (insn, 3, 5);
746 offset = bits (insn, 6, 10) << 2;
747 addr = pv_add_constant (regs[rn], offset);
749 if (stack.store_would_trash (addr))
752 stack.store (addr, 4, regs[rd]);
754 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
755 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
756 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
757 /* Ignore stores of argument registers to the stack. */
759 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
760 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
761 /* Ignore block loads from the stack, potentially copying
762 parameters from memory. */
764 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
765 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
766 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
767 /* Similarly ignore single loads from the stack. */
769 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
770 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
771 /* Skip register copies, i.e. saves to another register
772 instead of the stack. */
774 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
775 /* Recognize constant loads; even with small stacks these are necessary
777 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
778 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
780 /* Constant pool loads, for the same reason. */
781 unsigned int constant;
784 loc = start + 4 + bits (insn, 0, 7) * 4;
785 constant = read_memory_unsigned_integer (loc, 4, byte_order);
786 regs[bits (insn, 8, 10)] = pv_constant (constant);
788 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
790 unsigned short inst2;
792 inst2 = read_code_unsigned_integer (start + 2, 2,
793 byte_order_for_code);
795 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
797 /* BL, BLX. Allow some special function calls when
798 skipping the prologue; GCC generates these before
799 storing arguments to the stack. */
801 int j1, j2, imm1, imm2;
803 imm1 = sbits (insn, 0, 10);
804 imm2 = bits (inst2, 0, 10);
805 j1 = bit (inst2, 13);
806 j2 = bit (inst2, 11);
808 offset = ((imm1 << 12) + (imm2 << 1));
809 offset ^= ((!j2) << 22) | ((!j1) << 23);
811 nextpc = start + 4 + offset;
812 /* For BLX make sure to clear the low bits. */
813 if (bit (inst2, 12) == 0)
814 nextpc = nextpc & 0xfffffffc;
816 if (!skip_prologue_function (gdbarch, nextpc,
817 bit (inst2, 12) != 0))
821 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
823 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
825 pv_t addr = regs[bits (insn, 0, 3)];
828 if (stack.store_would_trash (addr))
831 /* Calculate offsets of saved registers. */
832 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
833 if (inst2 & (1 << regno))
835 addr = pv_add_constant (addr, -4);
836 stack.store (addr, 4, regs[regno]);
840 regs[bits (insn, 0, 3)] = addr;
843 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
845 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
847 int regno1 = bits (inst2, 12, 15);
848 int regno2 = bits (inst2, 8, 11);
849 pv_t addr = regs[bits (insn, 0, 3)];
851 offset = inst2 & 0xff;
853 addr = pv_add_constant (addr, offset);
855 addr = pv_add_constant (addr, -offset);
857 if (stack.store_would_trash (addr))
860 stack.store (addr, 4, regs[regno1]);
861 stack.store (pv_add_constant (addr, 4),
865 regs[bits (insn, 0, 3)] = addr;
868 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
869 && (inst2 & 0x0c00) == 0x0c00
870 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
872 int regno = bits (inst2, 12, 15);
873 pv_t addr = regs[bits (insn, 0, 3)];
875 offset = inst2 & 0xff;
877 addr = pv_add_constant (addr, offset);
879 addr = pv_add_constant (addr, -offset);
881 if (stack.store_would_trash (addr))
884 stack.store (addr, 4, regs[regno]);
887 regs[bits (insn, 0, 3)] = addr;
890 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
891 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 int regno = bits (inst2, 12, 15);
896 offset = inst2 & 0xfff;
897 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
899 if (stack.store_would_trash (addr))
902 stack.store (addr, 4, regs[regno]);
905 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
906 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
907 /* Ignore stores of argument registers to the stack. */
910 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
911 && (inst2 & 0x0d00) == 0x0c00
912 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
913 /* Ignore stores of argument registers to the stack. */
916 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
918 && (inst2 & 0x8000) == 0x0000
919 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
920 /* Ignore block loads from the stack, potentially copying
921 parameters from memory. */
924 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
926 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
927 /* Similarly ignore dual loads from the stack. */
930 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
931 && (inst2 & 0x0d00) == 0x0c00
932 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
933 /* Similarly ignore single loads from the stack. */
936 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
937 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 /* Similarly ignore single loads from the stack. */
941 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
942 && (inst2 & 0x8000) == 0x0000)
944 unsigned int imm = ((bits (insn, 10, 10) << 11)
945 | (bits (inst2, 12, 14) << 8)
946 | bits (inst2, 0, 7));
948 regs[bits (inst2, 8, 11)]
949 = pv_add_constant (regs[bits (insn, 0, 3)],
950 thumb_expand_immediate (imm));
953 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
954 && (inst2 & 0x8000) == 0x0000)
956 unsigned int imm = ((bits (insn, 10, 10) << 11)
957 | (bits (inst2, 12, 14) << 8)
958 | bits (inst2, 0, 7));
960 regs[bits (inst2, 8, 11)]
961 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
964 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
965 && (inst2 & 0x8000) == 0x0000)
967 unsigned int imm = ((bits (insn, 10, 10) << 11)
968 | (bits (inst2, 12, 14) << 8)
969 | bits (inst2, 0, 7));
971 regs[bits (inst2, 8, 11)]
972 = pv_add_constant (regs[bits (insn, 0, 3)],
973 - (CORE_ADDR) thumb_expand_immediate (imm));
976 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
977 && (inst2 & 0x8000) == 0x0000)
979 unsigned int imm = ((bits (insn, 10, 10) << 11)
980 | (bits (inst2, 12, 14) << 8)
981 | bits (inst2, 0, 7));
983 regs[bits (inst2, 8, 11)]
984 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
987 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
993 regs[bits (inst2, 8, 11)]
994 = pv_constant (thumb_expand_immediate (imm));
997 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1000 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1002 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1005 else if (insn == 0xea5f /* mov.w Rd,Rm */
1006 && (inst2 & 0xf0f0) == 0)
1008 int dst_reg = (inst2 & 0x0f00) >> 8;
1009 int src_reg = inst2 & 0xf;
1010 regs[dst_reg] = regs[src_reg];
1013 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1015 /* Constant pool loads. */
1016 unsigned int constant;
1019 offset = bits (inst2, 0, 11);
1021 loc = start + 4 + offset;
1023 loc = start + 4 - offset;
1025 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1026 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1029 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1031 /* Constant pool loads. */
1032 unsigned int constant;
1035 offset = bits (inst2, 0, 7) << 2;
1037 loc = start + 4 + offset;
1039 loc = start + 4 - offset;
1041 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1042 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1044 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1045 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1048 else if (thumb2_instruction_changes_pc (insn, inst2))
1050 /* Don't scan past anything that might change control flow. */
1055 /* The optimizer might shove anything into the prologue,
1056 so we just skip what we don't recognize. */
1057 unrecognized_pc = start;
1062 else if (thumb_instruction_changes_pc (insn))
1064 /* Don't scan past anything that might change control flow. */
1069 /* The optimizer might shove anything into the prologue,
1070 so we just skip what we don't recognize. */
1071 unrecognized_pc = start;
1078 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1079 paddress (gdbarch, start));
1081 if (unrecognized_pc == 0)
1082 unrecognized_pc = start;
1085 return unrecognized_pc;
1087 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1089 /* Frame pointer is fp. Frame size is constant. */
1090 cache->framereg = ARM_FP_REGNUM;
1091 cache->framesize = -regs[ARM_FP_REGNUM].k;
1093 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1095 /* Frame pointer is r7. Frame size is constant. */
1096 cache->framereg = THUMB_FP_REGNUM;
1097 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1101 /* Try the stack pointer... this is a bit desperate. */
1102 cache->framereg = ARM_SP_REGNUM;
1103 cache->framesize = -regs[ARM_SP_REGNUM].k;
1106 for (i = 0; i < 16; i++)
1107 if (stack.find_reg (gdbarch, i, &offset))
1108 cache->saved_regs[i].addr = offset;
1110 return unrecognized_pc;
1114 /* Try to analyze the instructions starting from PC, which load symbol
1115 __stack_chk_guard. Return the address of instruction after loading this
1116 symbol, set the dest register number to *BASEREG, and set the size of
1117 instructions for loading symbol in OFFSET. Return 0 if instructions are
1121 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1122 unsigned int *destreg, int *offset)
1124 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1125 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1126 unsigned int low, high, address;
1131 unsigned short insn1
1132 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1134 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1136 *destreg = bits (insn1, 8, 10);
1138 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1139 address = read_memory_unsigned_integer (address, 4,
1140 byte_order_for_code);
1142 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1144 unsigned short insn2
1145 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1147 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1150 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1152 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1154 /* movt Rd, #const */
1155 if ((insn1 & 0xfbc0) == 0xf2c0)
1157 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1158 *destreg = bits (insn2, 8, 11);
1160 address = (high << 16 | low);
1167 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1169 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1171 address = bits (insn, 0, 11) + pc + 8;
1172 address = read_memory_unsigned_integer (address, 4,
1173 byte_order_for_code);
1175 *destreg = bits (insn, 12, 15);
1178 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1180 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1183 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1185 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1187 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1188 *destreg = bits (insn, 12, 15);
1190 address = (high << 16 | low);
1198 /* Try to skip a sequence of instructions used for stack protector. If PC
1199 points to the first instruction of this sequence, return the address of
1200 first instruction after this sequence, otherwise, return original PC.
1202 On arm, this sequence of instructions is composed of mainly three steps,
1203 Step 1: load symbol __stack_chk_guard,
1204 Step 2: load from address of __stack_chk_guard,
1205 Step 3: store it to somewhere else.
1207 Usually, instructions on step 2 and step 3 are the same on various ARM
1208 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1209 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1210 instructions in step 1 vary from different ARM architectures. On ARMv7,
1213 movw Rn, #:lower16:__stack_chk_guard
1214 movt Rn, #:upper16:__stack_chk_guard
1221 .word __stack_chk_guard
1223 Since ldr/str is a very popular instruction, we can't use them as
1224 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1225 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1226 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1229 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1231 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1232 unsigned int basereg;
1233 struct bound_minimal_symbol stack_chk_guard;
1235 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1238 /* Try to parse the instructions in Step 1. */
1239 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1244 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1245 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1246 Otherwise, this sequence cannot be for stack protector. */
1247 if (stack_chk_guard.minsym == NULL
1248 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1253 unsigned int destreg;
1255 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1257 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1258 if ((insn & 0xf800) != 0x6800)
1260 if (bits (insn, 3, 5) != basereg)
1262 destreg = bits (insn, 0, 2);
1264 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1265 byte_order_for_code);
1266 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1267 if ((insn & 0xf800) != 0x6000)
1269 if (destreg != bits (insn, 0, 2))
1274 unsigned int destreg;
1276 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1278 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1279 if ((insn & 0x0e500000) != 0x04100000)
1281 if (bits (insn, 16, 19) != basereg)
1283 destreg = bits (insn, 12, 15);
1284 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1285 insn = read_code_unsigned_integer (pc + offset + 4,
1286 4, byte_order_for_code);
1287 if ((insn & 0x0e500000) != 0x04000000)
1289 if (bits (insn, 12, 15) != destreg)
1292 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1295 return pc + offset + 4;
1297 return pc + offset + 8;
1300 /* Advance the PC across any function entry prologue instructions to
1301 reach some "real" code.
1303 The APCS (ARM Procedure Call Standard) defines the following
1307 [stmfd sp!, {a1,a2,a3,a4}]
1308 stmfd sp!, {...,fp,ip,lr,pc}
1309 [stfe f7, [sp, #-12]!]
1310 [stfe f6, [sp, #-12]!]
1311 [stfe f5, [sp, #-12]!]
1312 [stfe f4, [sp, #-12]!]
1313 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1316 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1318 CORE_ADDR func_addr, limit_pc;
1320 /* See if we can determine the end of the prologue via the symbol table.
1321 If so, then return either PC, or the PC after the prologue, whichever
1323 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1325 CORE_ADDR post_prologue_pc
1326 = skip_prologue_using_sal (gdbarch, func_addr);
1327 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1329 if (post_prologue_pc)
1331 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1334 /* GCC always emits a line note before the prologue and another
1335 one after, even if the two are at the same address or on the
1336 same line. Take advantage of this so that we do not need to
1337 know every instruction that might appear in the prologue. We
1338 will have producer information for most binaries; if it is
1339 missing (e.g. for -gstabs), assuming the GNU tools. */
1340 if (post_prologue_pc
1342 || COMPUNIT_PRODUCER (cust) == NULL
1343 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1344 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1345 return post_prologue_pc;
1347 if (post_prologue_pc != 0)
1349 CORE_ADDR analyzed_limit;
1351 /* For non-GCC compilers, make sure the entire line is an
1352 acceptable prologue; GDB will round this function's
1353 return value up to the end of the following line so we
1354 can not skip just part of a line (and we do not want to).
1356 RealView does not treat the prologue specially, but does
1357 associate prologue code with the opening brace; so this
1358 lets us skip the first line if we think it is the opening
1360 if (arm_pc_is_thumb (gdbarch, func_addr))
1361 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1362 post_prologue_pc, NULL);
1364 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1365 post_prologue_pc, NULL);
1367 if (analyzed_limit != post_prologue_pc)
1370 return post_prologue_pc;
1374 /* Can't determine prologue from the symbol table, need to examine
1377 /* Find an upper limit on the function prologue using the debug
1378 information. If the debug information could not be used to provide
1379 that bound, then use an arbitrary large number as the upper bound. */
1380 /* Like arm_scan_prologue, stop no later than pc + 64. */
1381 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1383 limit_pc = pc + 64; /* Magic. */
1386 /* Check if this is Thumb code. */
1387 if (arm_pc_is_thumb (gdbarch, pc))
1388 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1390 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1394 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1395 This function decodes a Thumb function prologue to determine:
1396 1) the size of the stack frame
1397 2) which registers are saved on it
1398 3) the offsets of saved regs
1399 4) the offset from the stack pointer to the frame pointer
1401 A typical Thumb function prologue would create this stack frame
1402 (offsets relative to FP)
1403 old SP -> 24 stack parameters
1406 R7 -> 0 local variables (16 bytes)
1407 SP -> -12 additional stack space (12 bytes)
1408 The frame size would thus be 36 bytes, and the frame offset would be
1409 12 bytes. The frame register is R7.
1411 The comments for thumb_skip_prolog() describe the algorithm we use
1412 to detect the end of the prolog. */
1416 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1417 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1419 CORE_ADDR prologue_start;
1420 CORE_ADDR prologue_end;
1422 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1425 /* See comment in arm_scan_prologue for an explanation of
1427 if (prologue_end > prologue_start + 64)
1429 prologue_end = prologue_start + 64;
1433 /* We're in the boondocks: we have no idea where the start of the
1437 prologue_end = std::min (prologue_end, prev_pc);
1439 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1442 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1446 arm_instruction_restores_sp (unsigned int insn)
1448 if (bits (insn, 28, 31) != INST_NV)
1450 if ((insn & 0x0df0f000) == 0x0080d000
1451 /* ADD SP (register or immediate). */
1452 || (insn & 0x0df0f000) == 0x0040d000
1453 /* SUB SP (register or immediate). */
1454 || (insn & 0x0ffffff0) == 0x01a0d000
1456 || (insn & 0x0fff0000) == 0x08bd0000
1458 || (insn & 0x0fff0000) == 0x049d0000)
1459 /* POP of a single register. */
1466 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1467 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1468 fill it in. Return the first address not recognized as a prologue
1471 We recognize all the instructions typically found in ARM prologues,
1472 plus harmless instructions which can be skipped (either for analysis
1473 purposes, or a more restrictive set that can be skipped when finding
1474 the end of the prologue). */
1477 arm_analyze_prologue (struct gdbarch *gdbarch,
1478 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1479 struct arm_prologue_cache *cache)
1481 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1483 CORE_ADDR offset, current_pc;
1484 pv_t regs[ARM_FPS_REGNUM];
1485 CORE_ADDR unrecognized_pc = 0;
1487 /* Search the prologue looking for instructions that set up the
1488 frame pointer, adjust the stack pointer, and save registers.
1490 Be careful, however, and if it doesn't look like a prologue,
1491 don't try to scan it. If, for instance, a frameless function
1492 begins with stmfd sp!, then we will tell ourselves there is
1493 a frame, which will confuse stack traceback, as well as "finish"
1494 and other operations that rely on a knowledge of the stack
1497 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1498 regs[regno] = pv_register (regno, 0);
1499 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1501 for (current_pc = prologue_start;
1502 current_pc < prologue_end;
1506 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1508 if (insn == 0xe1a0c00d) /* mov ip, sp */
1510 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1513 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1514 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1516 unsigned imm = insn & 0xff; /* immediate value */
1517 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1518 int rd = bits (insn, 12, 15);
1519 imm = (imm >> rot) | (imm << (32 - rot));
1520 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1523 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1524 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1526 unsigned imm = insn & 0xff; /* immediate value */
1527 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1528 int rd = bits (insn, 12, 15);
1529 imm = (imm >> rot) | (imm << (32 - rot));
1530 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1533 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1536 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1538 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1539 stack.store (regs[ARM_SP_REGNUM], 4,
1540 regs[bits (insn, 12, 15)]);
1543 else if ((insn & 0xffff0000) == 0xe92d0000)
1544 /* stmfd sp!, {..., fp, ip, lr, pc}
1546 stmfd sp!, {a1, a2, a3, a4} */
1548 int mask = insn & 0xffff;
1550 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1553 /* Calculate offsets of saved registers. */
1554 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1555 if (mask & (1 << regno))
1558 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1559 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1562 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1563 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1564 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1566 /* No need to add this to saved_regs -- it's just an arg reg. */
1569 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1570 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1571 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1573 /* No need to add this to saved_regs -- it's just an arg reg. */
1576 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1578 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1580 /* No need to add this to saved_regs -- it's just arg regs. */
1583 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1585 unsigned imm = insn & 0xff; /* immediate value */
1586 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1587 imm = (imm >> rot) | (imm << (32 - rot));
1588 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1590 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1592 unsigned imm = insn & 0xff; /* immediate value */
1593 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1594 imm = (imm >> rot) | (imm << (32 - rot));
1595 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1597 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1599 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1601 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1604 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1605 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1606 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1608 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1610 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1612 int n_saved_fp_regs;
1613 unsigned int fp_start_reg, fp_bound_reg;
1615 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1618 if ((insn & 0x800) == 0x800) /* N0 is set */
1620 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1621 n_saved_fp_regs = 3;
1623 n_saved_fp_regs = 1;
1627 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1628 n_saved_fp_regs = 2;
1630 n_saved_fp_regs = 4;
1633 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1634 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1635 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1637 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1638 stack.store (regs[ARM_SP_REGNUM], 12,
1639 regs[fp_start_reg++]);
1642 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1644 /* Allow some special function calls when skipping the
1645 prologue; GCC generates these before storing arguments to
1647 CORE_ADDR dest = BranchDest (current_pc, insn);
1649 if (skip_prologue_function (gdbarch, dest, 0))
1654 else if ((insn & 0xf0000000) != 0xe0000000)
1655 break; /* Condition not true, exit early. */
1656 else if (arm_instruction_changes_pc (insn))
1657 /* Don't scan past anything that might change control flow. */
1659 else if (arm_instruction_restores_sp (insn))
1661 /* Don't scan past the epilogue. */
1664 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1665 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1666 /* Ignore block loads from the stack, potentially copying
1667 parameters from memory. */
1669 else if ((insn & 0xfc500000) == 0xe4100000
1670 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1671 /* Similarly ignore single loads from the stack. */
1673 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1674 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1675 register instead of the stack. */
1679 /* The optimizer might shove anything into the prologue, if
1680 we build up cache (cache != NULL) from scanning prologue,
1681 we just skip what we don't recognize and scan further to
1682 make cache as complete as possible. However, if we skip
1683 prologue, we'll stop immediately on unrecognized
1685 unrecognized_pc = current_pc;
1693 if (unrecognized_pc == 0)
1694 unrecognized_pc = current_pc;
1698 int framereg, framesize;
1700 /* The frame size is just the distance from the frame register
1701 to the original stack pointer. */
1702 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1704 /* Frame pointer is fp. */
1705 framereg = ARM_FP_REGNUM;
1706 framesize = -regs[ARM_FP_REGNUM].k;
1710 /* Try the stack pointer... this is a bit desperate. */
1711 framereg = ARM_SP_REGNUM;
1712 framesize = -regs[ARM_SP_REGNUM].k;
1715 cache->framereg = framereg;
1716 cache->framesize = framesize;
1718 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1719 if (stack.find_reg (gdbarch, regno, &offset))
1720 cache->saved_regs[regno].addr = offset;
1724 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1725 paddress (gdbarch, unrecognized_pc));
1727 return unrecognized_pc;
1731 arm_scan_prologue (struct frame_info *this_frame,
1732 struct arm_prologue_cache *cache)
1734 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1735 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1736 CORE_ADDR prologue_start, prologue_end;
1737 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1738 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1740 /* Assume there is no frame until proven otherwise. */
1741 cache->framereg = ARM_SP_REGNUM;
1742 cache->framesize = 0;
1744 /* Check for Thumb prologue. */
1745 if (arm_frame_is_thumb (this_frame))
1747 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1751 /* Find the function prologue. If we can't find the function in
1752 the symbol table, peek in the stack frame to find the PC. */
1753 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1756 /* One way to find the end of the prologue (which works well
1757 for unoptimized code) is to do the following:
1759 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1762 prologue_end = prev_pc;
1763 else if (sal.end < prologue_end)
1764 prologue_end = sal.end;
1766 This mechanism is very accurate so long as the optimizer
1767 doesn't move any instructions from the function body into the
1768 prologue. If this happens, sal.end will be the last
1769 instruction in the first hunk of prologue code just before
1770 the first instruction that the scheduler has moved from
1771 the body to the prologue.
1773 In order to make sure that we scan all of the prologue
1774 instructions, we use a slightly less accurate mechanism which
1775 may scan more than necessary. To help compensate for this
1776 lack of accuracy, the prologue scanning loop below contains
1777 several clauses which'll cause the loop to terminate early if
1778 an implausible prologue instruction is encountered.
1784 is a suitable endpoint since it accounts for the largest
1785 possible prologue plus up to five instructions inserted by
1788 if (prologue_end > prologue_start + 64)
1790 prologue_end = prologue_start + 64; /* See above. */
1795 /* We have no symbol information. Our only option is to assume this
1796 function has a standard stack frame and the normal frame register.
1797 Then, we can find the value of our frame pointer on entrance to
1798 the callee (or at the present moment if this is the innermost frame).
1799 The value stored there should be the address of the stmfd + 8. */
1800 CORE_ADDR frame_loc;
1801 ULONGEST return_value;
1803 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1804 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1809 prologue_start = gdbarch_addr_bits_remove
1810 (gdbarch, return_value) - 8;
1811 prologue_end = prologue_start + 64; /* See above. */
1815 if (prev_pc < prologue_end)
1816 prologue_end = prev_pc;
1818 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1821 static struct arm_prologue_cache *
1822 arm_make_prologue_cache (struct frame_info *this_frame)
1825 struct arm_prologue_cache *cache;
1826 CORE_ADDR unwound_fp;
1828 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1829 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1831 arm_scan_prologue (this_frame, cache);
1833 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1834 if (unwound_fp == 0)
1837 cache->prev_sp = unwound_fp + cache->framesize;
1839 /* Calculate actual addresses of saved registers using offsets
1840 determined by arm_scan_prologue. */
1841 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1842 if (trad_frame_addr_p (cache->saved_regs, reg))
1843 cache->saved_regs[reg].addr += cache->prev_sp;
1848 /* Implementation of the stop_reason hook for arm_prologue frames. */
1850 static enum unwind_stop_reason
1851 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1854 struct arm_prologue_cache *cache;
1857 if (*this_cache == NULL)
1858 *this_cache = arm_make_prologue_cache (this_frame);
1859 cache = (struct arm_prologue_cache *) *this_cache;
1861 /* This is meant to halt the backtrace at "_start". */
1862 pc = get_frame_pc (this_frame);
1863 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1864 return UNWIND_OUTERMOST;
1866 /* If we've hit a wall, stop. */
1867 if (cache->prev_sp == 0)
1868 return UNWIND_OUTERMOST;
1870 return UNWIND_NO_REASON;
1873 /* Our frame ID for a normal frame is the current function's starting PC
1874 and the caller's SP when we were called. */
1877 arm_prologue_this_id (struct frame_info *this_frame,
1879 struct frame_id *this_id)
1881 struct arm_prologue_cache *cache;
1885 if (*this_cache == NULL)
1886 *this_cache = arm_make_prologue_cache (this_frame);
1887 cache = (struct arm_prologue_cache *) *this_cache;
1889 /* Use function start address as part of the frame ID. If we cannot
1890 identify the start address (due to missing symbol information),
1891 fall back to just using the current PC. */
1892 pc = get_frame_pc (this_frame);
1893 func = get_frame_func (this_frame);
1897 id = frame_id_build (cache->prev_sp, func);
1901 static struct value *
1902 arm_prologue_prev_register (struct frame_info *this_frame,
1906 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1907 struct arm_prologue_cache *cache;
1909 if (*this_cache == NULL)
1910 *this_cache = arm_make_prologue_cache (this_frame);
1911 cache = (struct arm_prologue_cache *) *this_cache;
1913 /* If we are asked to unwind the PC, then we need to return the LR
1914 instead. The prologue may save PC, but it will point into this
1915 frame's prologue, not the next frame's resume location. Also
1916 strip the saved T bit. A valid LR may have the low bit set, but
1917 a valid PC never does. */
1918 if (prev_regnum == ARM_PC_REGNUM)
1922 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1923 return frame_unwind_got_constant (this_frame, prev_regnum,
1924 arm_addr_bits_remove (gdbarch, lr));
1927 /* SP is generally not saved to the stack, but this frame is
1928 identified by the next frame's stack pointer at the time of the call.
1929 The value was already reconstructed into PREV_SP. */
1930 if (prev_regnum == ARM_SP_REGNUM)
1931 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1933 /* The CPSR may have been changed by the call instruction and by the
1934 called function. The only bit we can reconstruct is the T bit,
1935 by checking the low bit of LR as of the call. This is a reliable
1936 indicator of Thumb-ness except for some ARM v4T pre-interworking
1937 Thumb code, which could get away with a clear low bit as long as
1938 the called function did not use bx. Guess that all other
1939 bits are unchanged; the condition flags are presumably lost,
1940 but the processor status is likely valid. */
1941 if (prev_regnum == ARM_PS_REGNUM)
1944 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1946 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1947 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1948 if (IS_THUMB_ADDR (lr))
1952 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1955 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1959 struct frame_unwind arm_prologue_unwind = {
1961 arm_prologue_unwind_stop_reason,
1962 arm_prologue_this_id,
1963 arm_prologue_prev_register,
1965 default_frame_sniffer
1968 /* Maintain a list of ARM exception table entries per objfile, similar to the
1969 list of mapping symbols. We only cache entries for standard ARM-defined
1970 personality routines; the cache will contain only the frame unwinding
1971 instructions associated with the entry (not the descriptors). */
1973 static const struct objfile_data *arm_exidx_data_key;
1975 struct arm_exidx_entry
1980 typedef struct arm_exidx_entry arm_exidx_entry_s;
1981 DEF_VEC_O(arm_exidx_entry_s);
1983 struct arm_exidx_data
1985 VEC(arm_exidx_entry_s) **section_maps;
1989 arm_exidx_data_free (struct objfile *objfile, void *arg)
1991 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1994 for (i = 0; i < objfile->obfd->section_count; i++)
1995 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1999 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2000 const struct arm_exidx_entry *rhs)
2002 return lhs->addr < rhs->addr;
2005 static struct obj_section *
2006 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2008 struct obj_section *osect;
2010 ALL_OBJFILE_OSECTIONS (objfile, osect)
2011 if (bfd_get_section_flags (objfile->obfd,
2012 osect->the_bfd_section) & SEC_ALLOC)
2014 bfd_vma start, size;
2015 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2016 size = bfd_get_section_size (osect->the_bfd_section);
2018 if (start <= vma && vma < start + size)
2025 /* Parse contents of exception table and exception index sections
2026 of OBJFILE, and fill in the exception table entry cache.
2028 For each entry that refers to a standard ARM-defined personality
2029 routine, extract the frame unwinding instructions (from either
2030 the index or the table section). The unwinding instructions
2032 - extracting them from the rest of the table data
2033 - converting to host endianness
2034 - appending the implicit 0xb0 ("Finish") code
2036 The extracted and normalized instructions are stored for later
2037 retrieval by the arm_find_exidx_entry routine. */
2040 arm_exidx_new_objfile (struct objfile *objfile)
2042 struct cleanup *cleanups;
2043 struct arm_exidx_data *data;
2044 asection *exidx, *extab;
2045 bfd_vma exidx_vma = 0, extab_vma = 0;
2046 bfd_size_type exidx_size = 0, extab_size = 0;
2047 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2050 /* If we've already touched this file, do nothing. */
2051 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2053 cleanups = make_cleanup (null_cleanup, NULL);
2055 /* Read contents of exception table and index. */
2056 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2059 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2060 exidx_size = bfd_get_section_size (exidx);
2061 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2062 make_cleanup (xfree, exidx_data);
2064 if (!bfd_get_section_contents (objfile->obfd, exidx,
2065 exidx_data, 0, exidx_size))
2067 do_cleanups (cleanups);
2072 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2075 extab_vma = bfd_section_vma (objfile->obfd, extab);
2076 extab_size = bfd_get_section_size (extab);
2077 extab_data = (gdb_byte *) xmalloc (extab_size);
2078 make_cleanup (xfree, extab_data);
2080 if (!bfd_get_section_contents (objfile->obfd, extab,
2081 extab_data, 0, extab_size))
2083 do_cleanups (cleanups);
2088 /* Allocate exception table data structure. */
2089 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2090 set_objfile_data (objfile, arm_exidx_data_key, data);
2091 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2092 objfile->obfd->section_count,
2093 VEC(arm_exidx_entry_s) *);
2095 /* Fill in exception table. */
2096 for (i = 0; i < exidx_size / 8; i++)
2098 struct arm_exidx_entry new_exidx_entry;
2099 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2100 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2101 bfd_vma addr = 0, word = 0;
2102 int n_bytes = 0, n_words = 0;
2103 struct obj_section *sec;
2104 gdb_byte *entry = NULL;
2106 /* Extract address of start of function. */
2107 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2108 idx += exidx_vma + i * 8;
2110 /* Find section containing function and compute section offset. */
2111 sec = arm_obj_section_from_vma (objfile, idx);
2114 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2116 /* Determine address of exception table entry. */
2119 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2121 else if ((val & 0xff000000) == 0x80000000)
2123 /* Exception table entry embedded in .ARM.exidx
2124 -- must be short form. */
2128 else if (!(val & 0x80000000))
2130 /* Exception table entry in .ARM.extab. */
2131 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2132 addr += exidx_vma + i * 8 + 4;
2134 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2136 word = bfd_h_get_32 (objfile->obfd,
2137 extab_data + addr - extab_vma);
2140 if ((word & 0xff000000) == 0x80000000)
2145 else if ((word & 0xff000000) == 0x81000000
2146 || (word & 0xff000000) == 0x82000000)
2150 n_words = ((word >> 16) & 0xff);
2152 else if (!(word & 0x80000000))
2155 struct obj_section *pers_sec;
2156 int gnu_personality = 0;
2158 /* Custom personality routine. */
2159 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2160 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2162 /* Check whether we've got one of the variants of the
2163 GNU personality routines. */
2164 pers_sec = arm_obj_section_from_vma (objfile, pers);
2167 static const char *personality[] =
2169 "__gcc_personality_v0",
2170 "__gxx_personality_v0",
2171 "__gcj_personality_v0",
2172 "__gnu_objc_personality_v0",
2176 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2179 for (k = 0; personality[k]; k++)
2180 if (lookup_minimal_symbol_by_pc_name
2181 (pc, personality[k], objfile))
2183 gnu_personality = 1;
2188 /* If so, the next word contains a word count in the high
2189 byte, followed by the same unwind instructions as the
2190 pre-defined forms. */
2192 && addr + 4 <= extab_vma + extab_size)
2194 word = bfd_h_get_32 (objfile->obfd,
2195 extab_data + addr - extab_vma);
2198 n_words = ((word >> 24) & 0xff);
2204 /* Sanity check address. */
2206 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2207 n_words = n_bytes = 0;
2209 /* The unwind instructions reside in WORD (only the N_BYTES least
2210 significant bytes are valid), followed by N_WORDS words in the
2211 extab section starting at ADDR. */
2212 if (n_bytes || n_words)
2215 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2216 n_bytes + n_words * 4 + 1);
2219 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2223 word = bfd_h_get_32 (objfile->obfd,
2224 extab_data + addr - extab_vma);
2227 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2228 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2229 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2230 *p++ = (gdb_byte) (word & 0xff);
2233 /* Implied "Finish" to terminate the list. */
2237 /* Push entry onto vector. They are guaranteed to always
2238 appear in order of increasing addresses. */
2239 new_exidx_entry.addr = idx;
2240 new_exidx_entry.entry = entry;
2241 VEC_safe_push (arm_exidx_entry_s,
2242 data->section_maps[sec->the_bfd_section->index],
2246 do_cleanups (cleanups);
2249 /* Search for the exception table entry covering MEMADDR. If one is found,
2250 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2251 set *START to the start of the region covered by this entry. */
2254 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2256 struct obj_section *sec;
2258 sec = find_pc_section (memaddr);
2261 struct arm_exidx_data *data;
2262 VEC(arm_exidx_entry_s) *map;
2263 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2266 data = ((struct arm_exidx_data *)
2267 objfile_data (sec->objfile, arm_exidx_data_key));
2270 map = data->section_maps[sec->the_bfd_section->index];
2271 if (!VEC_empty (arm_exidx_entry_s, map))
2273 struct arm_exidx_entry *map_sym;
2275 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2276 arm_compare_exidx_entries);
2278 /* VEC_lower_bound finds the earliest ordered insertion
2279 point. If the following symbol starts at this exact
2280 address, we use that; otherwise, the preceding
2281 exception table entry covers this address. */
2282 if (idx < VEC_length (arm_exidx_entry_s, map))
2284 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2285 if (map_sym->addr == map_key.addr)
2288 *start = map_sym->addr + obj_section_addr (sec);
2289 return map_sym->entry;
2295 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2297 *start = map_sym->addr + obj_section_addr (sec);
2298 return map_sym->entry;
2307 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2308 instruction list from the ARM exception table entry ENTRY, allocate and
2309 return a prologue cache structure describing how to unwind this frame.
2311 Return NULL if the unwinding instruction list contains a "spare",
2312 "reserved" or "refuse to unwind" instruction as defined in section
2313 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2314 for the ARM Architecture" document. */
2316 static struct arm_prologue_cache *
2317 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2322 struct arm_prologue_cache *cache;
2323 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2324 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2330 /* Whenever we reload SP, we actually have to retrieve its
2331 actual value in the current frame. */
2334 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2336 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2337 vsp = get_frame_register_unsigned (this_frame, reg);
2341 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2342 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2348 /* Decode next unwind instruction. */
2351 if ((insn & 0xc0) == 0)
2353 int offset = insn & 0x3f;
2354 vsp += (offset << 2) + 4;
2356 else if ((insn & 0xc0) == 0x40)
2358 int offset = insn & 0x3f;
2359 vsp -= (offset << 2) + 4;
2361 else if ((insn & 0xf0) == 0x80)
2363 int mask = ((insn & 0xf) << 8) | *entry++;
2366 /* The special case of an all-zero mask identifies
2367 "Refuse to unwind". We return NULL to fall back
2368 to the prologue analyzer. */
2372 /* Pop registers r4..r15 under mask. */
2373 for (i = 0; i < 12; i++)
2374 if (mask & (1 << i))
2376 cache->saved_regs[4 + i].addr = vsp;
2380 /* Special-case popping SP -- we need to reload vsp. */
2381 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2384 else if ((insn & 0xf0) == 0x90)
2386 int reg = insn & 0xf;
2388 /* Reserved cases. */
2389 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2392 /* Set SP from another register and mark VSP for reload. */
2393 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2396 else if ((insn & 0xf0) == 0xa0)
2398 int count = insn & 0x7;
2399 int pop_lr = (insn & 0x8) != 0;
2402 /* Pop r4..r[4+count]. */
2403 for (i = 0; i <= count; i++)
2405 cache->saved_regs[4 + i].addr = vsp;
2409 /* If indicated by flag, pop LR as well. */
2412 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2416 else if (insn == 0xb0)
2418 /* We could only have updated PC by popping into it; if so, it
2419 will show up as address. Otherwise, copy LR into PC. */
2420 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2421 cache->saved_regs[ARM_PC_REGNUM]
2422 = cache->saved_regs[ARM_LR_REGNUM];
2427 else if (insn == 0xb1)
2429 int mask = *entry++;
2432 /* All-zero mask and mask >= 16 is "spare". */
2433 if (mask == 0 || mask >= 16)
2436 /* Pop r0..r3 under mask. */
2437 for (i = 0; i < 4; i++)
2438 if (mask & (1 << i))
2440 cache->saved_regs[i].addr = vsp;
2444 else if (insn == 0xb2)
2446 ULONGEST offset = 0;
2451 offset |= (*entry & 0x7f) << shift;
2454 while (*entry++ & 0x80);
2456 vsp += 0x204 + (offset << 2);
2458 else if (insn == 0xb3)
2460 int start = *entry >> 4;
2461 int count = (*entry++) & 0xf;
2464 /* Only registers D0..D15 are valid here. */
2465 if (start + count >= 16)
2468 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2469 for (i = 0; i <= count; i++)
2471 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2475 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2478 else if ((insn & 0xf8) == 0xb8)
2480 int count = insn & 0x7;
2483 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2484 for (i = 0; i <= count; i++)
2486 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2490 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2493 else if (insn == 0xc6)
2495 int start = *entry >> 4;
2496 int count = (*entry++) & 0xf;
2499 /* Only registers WR0..WR15 are valid. */
2500 if (start + count >= 16)
2503 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2504 for (i = 0; i <= count; i++)
2506 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2510 else if (insn == 0xc7)
2512 int mask = *entry++;
2515 /* All-zero mask and mask >= 16 is "spare". */
2516 if (mask == 0 || mask >= 16)
2519 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2520 for (i = 0; i < 4; i++)
2521 if (mask & (1 << i))
2523 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2527 else if ((insn & 0xf8) == 0xc0)
2529 int count = insn & 0x7;
2532 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2533 for (i = 0; i <= count; i++)
2535 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2539 else if (insn == 0xc8)
2541 int start = *entry >> 4;
2542 int count = (*entry++) & 0xf;
2545 /* Only registers D0..D31 are valid. */
2546 if (start + count >= 16)
2549 /* Pop VFP double-precision registers
2550 D[16+start]..D[16+start+count]. */
2551 for (i = 0; i <= count; i++)
2553 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2557 else if (insn == 0xc9)
2559 int start = *entry >> 4;
2560 int count = (*entry++) & 0xf;
2563 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2564 for (i = 0; i <= count; i++)
2566 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2570 else if ((insn & 0xf8) == 0xd0)
2572 int count = insn & 0x7;
2575 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2576 for (i = 0; i <= count; i++)
2578 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2584 /* Everything else is "spare". */
2589 /* If we restore SP from a register, assume this was the frame register.
2590 Otherwise just fall back to SP as frame register. */
2591 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2592 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2594 cache->framereg = ARM_SP_REGNUM;
2596 /* Determine offset to previous frame. */
2598 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2600 /* We already got the previous SP. */
2601 cache->prev_sp = vsp;
2606 /* Unwinding via ARM exception table entries. Note that the sniffer
2607 already computes a filled-in prologue cache, which is then used
2608 with the same arm_prologue_this_id and arm_prologue_prev_register
2609 routines also used for prologue-parsing based unwinding. */
2612 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2613 struct frame_info *this_frame,
2614 void **this_prologue_cache)
2616 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2617 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2618 CORE_ADDR addr_in_block, exidx_region, func_start;
2619 struct arm_prologue_cache *cache;
2622 /* See if we have an ARM exception table entry covering this address. */
2623 addr_in_block = get_frame_address_in_block (this_frame);
2624 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2628 /* The ARM exception table does not describe unwind information
2629 for arbitrary PC values, but is guaranteed to be correct only
2630 at call sites. We have to decide here whether we want to use
2631 ARM exception table information for this frame, or fall back
2632 to using prologue parsing. (Note that if we have DWARF CFI,
2633 this sniffer isn't even called -- CFI is always preferred.)
2635 Before we make this decision, however, we check whether we
2636 actually have *symbol* information for the current frame.
2637 If not, prologue parsing would not work anyway, so we might
2638 as well use the exception table and hope for the best. */
2639 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2643 /* If the next frame is "normal", we are at a call site in this
2644 frame, so exception information is guaranteed to be valid. */
2645 if (get_next_frame (this_frame)
2646 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2649 /* We also assume exception information is valid if we're currently
2650 blocked in a system call. The system library is supposed to
2651 ensure this, so that e.g. pthread cancellation works. */
2652 if (arm_frame_is_thumb (this_frame))
2656 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2657 2, byte_order_for_code, &insn)
2658 && (insn & 0xff00) == 0xdf00 /* svc */)
2665 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2666 4, byte_order_for_code, &insn)
2667 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2671 /* Bail out if we don't know that exception information is valid. */
2675 /* The ARM exception index does not mark the *end* of the region
2676 covered by the entry, and some functions will not have any entry.
2677 To correctly recognize the end of the covered region, the linker
2678 should have inserted dummy records with a CANTUNWIND marker.
2680 Unfortunately, current versions of GNU ld do not reliably do
2681 this, and thus we may have found an incorrect entry above.
2682 As a (temporary) sanity check, we only use the entry if it
2683 lies *within* the bounds of the function. Note that this check
2684 might reject perfectly valid entries that just happen to cover
2685 multiple functions; therefore this check ought to be removed
2686 once the linker is fixed. */
2687 if (func_start > exidx_region)
2691 /* Decode the list of unwinding instructions into a prologue cache.
2692 Note that this may fail due to e.g. a "refuse to unwind" code. */
2693 cache = arm_exidx_fill_cache (this_frame, entry);
2697 *this_prologue_cache = cache;
2701 struct frame_unwind arm_exidx_unwind = {
2703 default_frame_unwind_stop_reason,
2704 arm_prologue_this_id,
2705 arm_prologue_prev_register,
2707 arm_exidx_unwind_sniffer
2710 static struct arm_prologue_cache *
2711 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2713 struct arm_prologue_cache *cache;
2716 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2717 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2719 /* Still rely on the offset calculated from prologue. */
2720 arm_scan_prologue (this_frame, cache);
2722 /* Since we are in epilogue, the SP has been restored. */
2723 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2725 /* Calculate actual addresses of saved registers using offsets
2726 determined by arm_scan_prologue. */
2727 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2728 if (trad_frame_addr_p (cache->saved_regs, reg))
2729 cache->saved_regs[reg].addr += cache->prev_sp;
2734 /* Implementation of function hook 'this_id' in
2735 'struct frame_uwnind' for epilogue unwinder. */
2738 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2740 struct frame_id *this_id)
2742 struct arm_prologue_cache *cache;
2745 if (*this_cache == NULL)
2746 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2747 cache = (struct arm_prologue_cache *) *this_cache;
2749 /* Use function start address as part of the frame ID. If we cannot
2750 identify the start address (due to missing symbol information),
2751 fall back to just using the current PC. */
2752 pc = get_frame_pc (this_frame);
2753 func = get_frame_func (this_frame);
2757 (*this_id) = frame_id_build (cache->prev_sp, pc);
2760 /* Implementation of function hook 'prev_register' in
2761 'struct frame_uwnind' for epilogue unwinder. */
2763 static struct value *
2764 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2765 void **this_cache, int regnum)
2767 if (*this_cache == NULL)
2768 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2770 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2773 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2775 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2778 /* Implementation of function hook 'sniffer' in
2779 'struct frame_uwnind' for epilogue unwinder. */
2782 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2783 struct frame_info *this_frame,
2784 void **this_prologue_cache)
2786 if (frame_relative_level (this_frame) == 0)
2788 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2789 CORE_ADDR pc = get_frame_pc (this_frame);
2791 if (arm_frame_is_thumb (this_frame))
2792 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2794 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2800 /* Frame unwinder from epilogue. */
2802 static const struct frame_unwind arm_epilogue_frame_unwind =
2805 default_frame_unwind_stop_reason,
2806 arm_epilogue_frame_this_id,
2807 arm_epilogue_frame_prev_register,
2809 arm_epilogue_frame_sniffer,
2812 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2813 trampoline, return the target PC. Otherwise return 0.
2815 void call0a (char c, short s, int i, long l) {}
2819 (*pointer_to_call0a) (c, s, i, l);
2822 Instead of calling a stub library function _call_via_xx (xx is
2823 the register name), GCC may inline the trampoline in the object
2824 file as below (register r2 has the address of call0a).
2827 .type main, %function
2836 The trampoline 'bx r2' doesn't belong to main. */
2839 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2841 /* The heuristics of recognizing such trampoline is that FRAME is
2842 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2843 if (arm_frame_is_thumb (frame))
2847 if (target_read_memory (pc, buf, 2) == 0)
2849 struct gdbarch *gdbarch = get_frame_arch (frame);
2850 enum bfd_endian byte_order_for_code
2851 = gdbarch_byte_order_for_code (gdbarch);
2853 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2855 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2858 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2860 /* Clear the LSB so that gdb core sets step-resume
2861 breakpoint at the right address. */
2862 return UNMAKE_THUMB_ADDR (dest);
2870 static struct arm_prologue_cache *
2871 arm_make_stub_cache (struct frame_info *this_frame)
2873 struct arm_prologue_cache *cache;
2875 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2876 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2878 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2883 /* Our frame ID for a stub frame is the current SP and LR. */
2886 arm_stub_this_id (struct frame_info *this_frame,
2888 struct frame_id *this_id)
2890 struct arm_prologue_cache *cache;
2892 if (*this_cache == NULL)
2893 *this_cache = arm_make_stub_cache (this_frame);
2894 cache = (struct arm_prologue_cache *) *this_cache;
2896 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2900 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2901 struct frame_info *this_frame,
2902 void **this_prologue_cache)
2904 CORE_ADDR addr_in_block;
2906 CORE_ADDR pc, start_addr;
2909 addr_in_block = get_frame_address_in_block (this_frame);
2910 pc = get_frame_pc (this_frame);
2911 if (in_plt_section (addr_in_block)
2912 /* We also use the stub winder if the target memory is unreadable
2913 to avoid having the prologue unwinder trying to read it. */
2914 || target_read_memory (pc, dummy, 4) != 0)
2917 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2918 && arm_skip_bx_reg (this_frame, pc) != 0)
2924 struct frame_unwind arm_stub_unwind = {
2926 default_frame_unwind_stop_reason,
2928 arm_prologue_prev_register,
2930 arm_stub_unwind_sniffer
2933 /* Put here the code to store, into CACHE->saved_regs, the addresses
2934 of the saved registers of frame described by THIS_FRAME. CACHE is
2937 static struct arm_prologue_cache *
2938 arm_m_exception_cache (struct frame_info *this_frame)
2940 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2941 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2942 struct arm_prologue_cache *cache;
2943 CORE_ADDR unwound_sp;
2946 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2947 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2949 unwound_sp = get_frame_register_unsigned (this_frame,
2952 /* The hardware saves eight 32-bit words, comprising xPSR,
2953 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2954 "B1.5.6 Exception entry behavior" in
2955 "ARMv7-M Architecture Reference Manual". */
2956 cache->saved_regs[0].addr = unwound_sp;
2957 cache->saved_regs[1].addr = unwound_sp + 4;
2958 cache->saved_regs[2].addr = unwound_sp + 8;
2959 cache->saved_regs[3].addr = unwound_sp + 12;
2960 cache->saved_regs[12].addr = unwound_sp + 16;
2961 cache->saved_regs[14].addr = unwound_sp + 20;
2962 cache->saved_regs[15].addr = unwound_sp + 24;
2963 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2965 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2966 aligner between the top of the 32-byte stack frame and the
2967 previous context's stack pointer. */
2968 cache->prev_sp = unwound_sp + 32;
2969 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2970 && (xpsr & (1 << 9)) != 0)
2971 cache->prev_sp += 4;
2976 /* Implementation of function hook 'this_id' in
2977 'struct frame_uwnind'. */
2980 arm_m_exception_this_id (struct frame_info *this_frame,
2982 struct frame_id *this_id)
2984 struct arm_prologue_cache *cache;
2986 if (*this_cache == NULL)
2987 *this_cache = arm_m_exception_cache (this_frame);
2988 cache = (struct arm_prologue_cache *) *this_cache;
2990 /* Our frame ID for a stub frame is the current SP and LR. */
2991 *this_id = frame_id_build (cache->prev_sp,
2992 get_frame_pc (this_frame));
2995 /* Implementation of function hook 'prev_register' in
2996 'struct frame_uwnind'. */
2998 static struct value *
2999 arm_m_exception_prev_register (struct frame_info *this_frame,
3003 struct arm_prologue_cache *cache;
3005 if (*this_cache == NULL)
3006 *this_cache = arm_m_exception_cache (this_frame);
3007 cache = (struct arm_prologue_cache *) *this_cache;
3009 /* The value was already reconstructed into PREV_SP. */
3010 if (prev_regnum == ARM_SP_REGNUM)
3011 return frame_unwind_got_constant (this_frame, prev_regnum,
3014 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3018 /* Implementation of function hook 'sniffer' in
3019 'struct frame_uwnind'. */
3022 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3023 struct frame_info *this_frame,
3024 void **this_prologue_cache)
3026 CORE_ADDR this_pc = get_frame_pc (this_frame);
3028 /* No need to check is_m; this sniffer is only registered for
3029 M-profile architectures. */
3031 /* Check if exception frame returns to a magic PC value. */
3032 return arm_m_addr_is_magic (this_pc);
3035 /* Frame unwinder for M-profile exceptions. */
3037 struct frame_unwind arm_m_exception_unwind =
3040 default_frame_unwind_stop_reason,
3041 arm_m_exception_this_id,
3042 arm_m_exception_prev_register,
3044 arm_m_exception_unwind_sniffer
3048 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3050 struct arm_prologue_cache *cache;
3052 if (*this_cache == NULL)
3053 *this_cache = arm_make_prologue_cache (this_frame);
3054 cache = (struct arm_prologue_cache *) *this_cache;
3056 return cache->prev_sp - cache->framesize;
3059 struct frame_base arm_normal_base = {
3060 &arm_prologue_unwind,
3061 arm_normal_frame_base,
3062 arm_normal_frame_base,
3063 arm_normal_frame_base
3066 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3067 dummy frame. The frame ID's base needs to match the TOS value
3068 saved by save_dummy_frame_tos() and returned from
3069 arm_push_dummy_call, and the PC needs to match the dummy frame's
3072 static struct frame_id
3073 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3075 return frame_id_build (get_frame_register_unsigned (this_frame,
3077 get_frame_pc (this_frame));
3080 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3081 be used to construct the previous frame's ID, after looking up the
3082 containing function). */
3085 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3088 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3089 return arm_addr_bits_remove (gdbarch, pc);
3093 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3095 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3098 static struct value *
3099 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3102 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3104 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3109 /* The PC is normally copied from the return column, which
3110 describes saves of LR. However, that version may have an
3111 extra bit set to indicate Thumb state. The bit is not
3113 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3114 return frame_unwind_got_constant (this_frame, regnum,
3115 arm_addr_bits_remove (gdbarch, lr));
3118 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3119 cpsr = get_frame_register_unsigned (this_frame, regnum);
3120 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3121 if (IS_THUMB_ADDR (lr))
3125 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3128 internal_error (__FILE__, __LINE__,
3129 _("Unexpected register %d"), regnum);
3134 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3135 struct dwarf2_frame_state_reg *reg,
3136 struct frame_info *this_frame)
3142 reg->how = DWARF2_FRAME_REG_FN;
3143 reg->loc.fn = arm_dwarf2_prev_register;
3146 reg->how = DWARF2_FRAME_REG_CFA;
3151 /* Implement the stack_frame_destroyed_p gdbarch method. */
3154 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3156 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3157 unsigned int insn, insn2;
3158 int found_return = 0, found_stack_adjust = 0;
3159 CORE_ADDR func_start, func_end;
3163 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3166 /* The epilogue is a sequence of instructions along the following lines:
3168 - add stack frame size to SP or FP
3169 - [if frame pointer used] restore SP from FP
3170 - restore registers from SP [may include PC]
3171 - a return-type instruction [if PC wasn't already restored]
3173 In a first pass, we scan forward from the current PC and verify the
3174 instructions we find as compatible with this sequence, ending in a
3177 However, this is not sufficient to distinguish indirect function calls
3178 within a function from indirect tail calls in the epilogue in some cases.
3179 Therefore, if we didn't already find any SP-changing instruction during
3180 forward scan, we add a backward scanning heuristic to ensure we actually
3181 are in the epilogue. */
3184 while (scan_pc < func_end && !found_return)
3186 if (target_read_memory (scan_pc, buf, 2))
3190 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3192 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3194 else if (insn == 0x46f7) /* mov pc, lr */
3196 else if (thumb_instruction_restores_sp (insn))
3198 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3201 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3203 if (target_read_memory (scan_pc, buf, 2))
3207 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3209 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3211 if (insn2 & 0x8000) /* <registers> include PC. */
3214 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3215 && (insn2 & 0x0fff) == 0x0b04)
3217 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3220 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3221 && (insn2 & 0x0e00) == 0x0a00)
3233 /* Since any instruction in the epilogue sequence, with the possible
3234 exception of return itself, updates the stack pointer, we need to
3235 scan backwards for at most one instruction. Try either a 16-bit or
3236 a 32-bit instruction. This is just a heuristic, so we do not worry
3237 too much about false positives. */
3239 if (pc - 4 < func_start)
3241 if (target_read_memory (pc - 4, buf, 4))
3244 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3245 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3247 if (thumb_instruction_restores_sp (insn2))
3248 found_stack_adjust = 1;
3249 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3250 found_stack_adjust = 1;
3251 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3252 && (insn2 & 0x0fff) == 0x0b04)
3253 found_stack_adjust = 1;
3254 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3255 && (insn2 & 0x0e00) == 0x0a00)
3256 found_stack_adjust = 1;
3258 return found_stack_adjust;
3262 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3264 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3267 CORE_ADDR func_start, func_end;
3269 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3272 /* We are in the epilogue if the previous instruction was a stack
3273 adjustment and the next instruction is a possible return (bx, mov
3274 pc, or pop). We could have to scan backwards to find the stack
3275 adjustment, or forwards to find the return, but this is a decent
3276 approximation. First scan forwards. */
3279 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3280 if (bits (insn, 28, 31) != INST_NV)
3282 if ((insn & 0x0ffffff0) == 0x012fff10)
3285 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3288 else if ((insn & 0x0fff0000) == 0x08bd0000
3289 && (insn & 0x0000c000) != 0)
3290 /* POP (LDMIA), including PC or LR. */
3297 /* Scan backwards. This is just a heuristic, so do not worry about
3298 false positives from mode changes. */
3300 if (pc < func_start + 4)
3303 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3304 if (arm_instruction_restores_sp (insn))
3310 /* Implement the stack_frame_destroyed_p gdbarch method. */
3313 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3315 if (arm_pc_is_thumb (gdbarch, pc))
3316 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3318 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3321 /* When arguments must be pushed onto the stack, they go on in reverse
3322 order. The code below implements a FILO (stack) to do this. */
3327 struct stack_item *prev;
3331 static struct stack_item *
3332 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3334 struct stack_item *si;
3335 si = XNEW (struct stack_item);
3336 si->data = (gdb_byte *) xmalloc (len);
3339 memcpy (si->data, contents, len);
3343 static struct stack_item *
3344 pop_stack_item (struct stack_item *si)
3346 struct stack_item *dead = si;
3354 /* Return the alignment (in bytes) of the given type. */
3357 arm_type_align (struct type *t)
3363 t = check_typedef (t);
3364 switch (TYPE_CODE (t))
3367 /* Should never happen. */
3368 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3372 case TYPE_CODE_ENUM:
3376 case TYPE_CODE_RANGE:
3378 case TYPE_CODE_RVALUE_REF:
3379 case TYPE_CODE_CHAR:
3380 case TYPE_CODE_BOOL:
3381 return TYPE_LENGTH (t);
3383 case TYPE_CODE_ARRAY:
3384 if (TYPE_VECTOR (t))
3386 /* Use the natural alignment for vector types (the same for
3387 scalar type), but the maximum alignment is 64-bit. */
3388 if (TYPE_LENGTH (t) > 8)
3391 return TYPE_LENGTH (t);
3394 return arm_type_align (TYPE_TARGET_TYPE (t));
3395 case TYPE_CODE_COMPLEX:
3396 return arm_type_align (TYPE_TARGET_TYPE (t));
3398 case TYPE_CODE_STRUCT:
3399 case TYPE_CODE_UNION:
3401 for (n = 0; n < TYPE_NFIELDS (t); n++)
3403 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3411 /* Possible base types for a candidate for passing and returning in
3414 enum arm_vfp_cprc_base_type
3423 /* The length of one element of base type B. */
3426 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3430 case VFP_CPRC_SINGLE:
3432 case VFP_CPRC_DOUBLE:
3434 case VFP_CPRC_VEC64:
3436 case VFP_CPRC_VEC128:
3439 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3444 /* The character ('s', 'd' or 'q') for the type of VFP register used
3445 for passing base type B. */
3448 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3452 case VFP_CPRC_SINGLE:
3454 case VFP_CPRC_DOUBLE:
3456 case VFP_CPRC_VEC64:
3458 case VFP_CPRC_VEC128:
3461 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3466 /* Determine whether T may be part of a candidate for passing and
3467 returning in VFP registers, ignoring the limit on the total number
3468 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3469 classification of the first valid component found; if it is not
3470 VFP_CPRC_UNKNOWN, all components must have the same classification
3471 as *BASE_TYPE. If it is found that T contains a type not permitted
3472 for passing and returning in VFP registers, a type differently
3473 classified from *BASE_TYPE, or two types differently classified
3474 from each other, return -1, otherwise return the total number of
3475 base-type elements found (possibly 0 in an empty structure or
3476 array). Vector types are not currently supported, matching the
3477 generic AAPCS support. */
3480 arm_vfp_cprc_sub_candidate (struct type *t,
3481 enum arm_vfp_cprc_base_type *base_type)
3483 t = check_typedef (t);
3484 switch (TYPE_CODE (t))
3487 switch (TYPE_LENGTH (t))
3490 if (*base_type == VFP_CPRC_UNKNOWN)
3491 *base_type = VFP_CPRC_SINGLE;
3492 else if (*base_type != VFP_CPRC_SINGLE)
3497 if (*base_type == VFP_CPRC_UNKNOWN)
3498 *base_type = VFP_CPRC_DOUBLE;
3499 else if (*base_type != VFP_CPRC_DOUBLE)
3508 case TYPE_CODE_COMPLEX:
3509 /* Arguments of complex T where T is one of the types float or
3510 double get treated as if they are implemented as:
3519 switch (TYPE_LENGTH (t))
3522 if (*base_type == VFP_CPRC_UNKNOWN)
3523 *base_type = VFP_CPRC_SINGLE;
3524 else if (*base_type != VFP_CPRC_SINGLE)
3529 if (*base_type == VFP_CPRC_UNKNOWN)
3530 *base_type = VFP_CPRC_DOUBLE;
3531 else if (*base_type != VFP_CPRC_DOUBLE)
3540 case TYPE_CODE_ARRAY:
3542 if (TYPE_VECTOR (t))
3544 /* A 64-bit or 128-bit containerized vector type are VFP
3546 switch (TYPE_LENGTH (t))
3549 if (*base_type == VFP_CPRC_UNKNOWN)
3550 *base_type = VFP_CPRC_VEC64;
3553 if (*base_type == VFP_CPRC_UNKNOWN)
3554 *base_type = VFP_CPRC_VEC128;
3565 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3569 if (TYPE_LENGTH (t) == 0)
3571 gdb_assert (count == 0);
3574 else if (count == 0)
3576 unitlen = arm_vfp_cprc_unit_length (*base_type);
3577 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3578 return TYPE_LENGTH (t) / unitlen;
3583 case TYPE_CODE_STRUCT:
3588 for (i = 0; i < TYPE_NFIELDS (t); i++)
3592 if (!field_is_static (&TYPE_FIELD (t, i)))
3593 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3595 if (sub_count == -1)
3599 if (TYPE_LENGTH (t) == 0)
3601 gdb_assert (count == 0);
3604 else if (count == 0)
3606 unitlen = arm_vfp_cprc_unit_length (*base_type);
3607 if (TYPE_LENGTH (t) != unitlen * count)
3612 case TYPE_CODE_UNION:
3617 for (i = 0; i < TYPE_NFIELDS (t); i++)
3619 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3621 if (sub_count == -1)
3623 count = (count > sub_count ? count : sub_count);
3625 if (TYPE_LENGTH (t) == 0)
3627 gdb_assert (count == 0);
3630 else if (count == 0)
3632 unitlen = arm_vfp_cprc_unit_length (*base_type);
3633 if (TYPE_LENGTH (t) != unitlen * count)
3645 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3646 if passed to or returned from a non-variadic function with the VFP
3647 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3648 *BASE_TYPE to the base type for T and *COUNT to the number of
3649 elements of that base type before returning. */
3652 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3655 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3656 int c = arm_vfp_cprc_sub_candidate (t, &b);
3657 if (c <= 0 || c > 4)
3664 /* Return 1 if the VFP ABI should be used for passing arguments to and
3665 returning values from a function of type FUNC_TYPE, 0
3669 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3671 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3672 /* Variadic functions always use the base ABI. Assume that functions
3673 without debug info are not variadic. */
3674 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3676 /* The VFP ABI is only supported as a variant of AAPCS. */
3677 if (tdep->arm_abi != ARM_ABI_AAPCS)
3679 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3682 /* We currently only support passing parameters in integer registers, which
3683 conforms with GCC's default model, and VFP argument passing following
3684 the VFP variant of AAPCS. Several other variants exist and
3685 we should probably support some of them based on the selected ABI. */
3688 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3689 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3690 struct value **args, CORE_ADDR sp, int struct_return,
3691 CORE_ADDR struct_addr)
3693 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3697 struct stack_item *si = NULL;
3700 unsigned vfp_regs_free = (1 << 16) - 1;
3702 /* Determine the type of this function and whether the VFP ABI
3704 ftype = check_typedef (value_type (function));
3705 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3706 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3707 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3709 /* Set the return address. For the ARM, the return breakpoint is
3710 always at BP_ADDR. */
3711 if (arm_pc_is_thumb (gdbarch, bp_addr))
3713 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3715 /* Walk through the list of args and determine how large a temporary
3716 stack is required. Need to take care here as structs may be
3717 passed on the stack, and we have to push them. */
3720 argreg = ARM_A1_REGNUM;
3723 /* The struct_return pointer occupies the first parameter
3724 passing register. */
3728 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3729 gdbarch_register_name (gdbarch, argreg),
3730 paddress (gdbarch, struct_addr));
3731 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3735 for (argnum = 0; argnum < nargs; argnum++)
3738 struct type *arg_type;
3739 struct type *target_type;
3740 enum type_code typecode;
3741 const bfd_byte *val;
3743 enum arm_vfp_cprc_base_type vfp_base_type;
3745 int may_use_core_reg = 1;
3747 arg_type = check_typedef (value_type (args[argnum]));
3748 len = TYPE_LENGTH (arg_type);
3749 target_type = TYPE_TARGET_TYPE (arg_type);
3750 typecode = TYPE_CODE (arg_type);
3751 val = value_contents (args[argnum]);
3753 align = arm_type_align (arg_type);
3754 /* Round alignment up to a whole number of words. */
3755 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3756 /* Different ABIs have different maximum alignments. */
3757 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3759 /* The APCS ABI only requires word alignment. */
3760 align = INT_REGISTER_SIZE;
3764 /* The AAPCS requires at most doubleword alignment. */
3765 if (align > INT_REGISTER_SIZE * 2)
3766 align = INT_REGISTER_SIZE * 2;
3770 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3778 /* Because this is a CPRC it cannot go in a core register or
3779 cause a core register to be skipped for alignment.
3780 Either it goes in VFP registers and the rest of this loop
3781 iteration is skipped for this argument, or it goes on the
3782 stack (and the stack alignment code is correct for this
3784 may_use_core_reg = 0;
3786 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3787 shift = unit_length / 4;
3788 mask = (1 << (shift * vfp_base_count)) - 1;
3789 for (regno = 0; regno < 16; regno += shift)
3790 if (((vfp_regs_free >> regno) & mask) == mask)
3799 vfp_regs_free &= ~(mask << regno);
3800 reg_scaled = regno / shift;
3801 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3802 for (i = 0; i < vfp_base_count; i++)
3806 if (reg_char == 'q')
3807 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3808 val + i * unit_length);
3811 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3812 reg_char, reg_scaled + i);
3813 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3815 regcache_cooked_write (regcache, regnum,
3816 val + i * unit_length);
3823 /* This CPRC could not go in VFP registers, so all VFP
3824 registers are now marked as used. */
3829 /* Push stack padding for dowubleword alignment. */
3830 if (nstack & (align - 1))
3832 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3833 nstack += INT_REGISTER_SIZE;
3836 /* Doubleword aligned quantities must go in even register pairs. */
3837 if (may_use_core_reg
3838 && argreg <= ARM_LAST_ARG_REGNUM
3839 && align > INT_REGISTER_SIZE
3843 /* If the argument is a pointer to a function, and it is a
3844 Thumb function, create a LOCAL copy of the value and set
3845 the THUMB bit in it. */
3846 if (TYPE_CODE_PTR == typecode
3847 && target_type != NULL
3848 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3850 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3851 if (arm_pc_is_thumb (gdbarch, regval))
3853 bfd_byte *copy = (bfd_byte *) alloca (len);
3854 store_unsigned_integer (copy, len, byte_order,
3855 MAKE_THUMB_ADDR (regval));
3860 /* Copy the argument to general registers or the stack in
3861 register-sized pieces. Large arguments are split between
3862 registers and stack. */
3865 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3867 = extract_unsigned_integer (val, partial_len, byte_order);
3869 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3871 /* The argument is being passed in a general purpose
3873 if (byte_order == BFD_ENDIAN_BIG)
3874 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3876 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3878 gdbarch_register_name
3880 phex (regval, INT_REGISTER_SIZE));
3881 regcache_cooked_write_unsigned (regcache, argreg, regval);
3886 gdb_byte buf[INT_REGISTER_SIZE];
3888 memset (buf, 0, sizeof (buf));
3889 store_unsigned_integer (buf, partial_len, byte_order, regval);
3891 /* Push the arguments onto the stack. */
3893 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3895 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3896 nstack += INT_REGISTER_SIZE;
3903 /* If we have an odd number of words to push, then decrement the stack
3904 by one word now, so first stack argument will be dword aligned. */
3911 write_memory (sp, si->data, si->len);
3912 si = pop_stack_item (si);
3915 /* Finally, update teh SP register. */
3916 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3922 /* Always align the frame to an 8-byte boundary. This is required on
3923 some platforms and harmless on the rest. */
3926 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3928 /* Align the stack to eight bytes. */
3929 return sp & ~ (CORE_ADDR) 7;
3933 print_fpu_flags (struct ui_file *file, int flags)
3935 if (flags & (1 << 0))
3936 fputs_filtered ("IVO ", file);
3937 if (flags & (1 << 1))
3938 fputs_filtered ("DVZ ", file);
3939 if (flags & (1 << 2))
3940 fputs_filtered ("OFL ", file);
3941 if (flags & (1 << 3))
3942 fputs_filtered ("UFL ", file);
3943 if (flags & (1 << 4))
3944 fputs_filtered ("INX ", file);
3945 fputc_filtered ('\n', file);
3948 /* Print interesting information about the floating point processor
3949 (if present) or emulator. */
3951 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3952 struct frame_info *frame, const char *args)
3954 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3957 type = (status >> 24) & 127;
3958 if (status & (1 << 31))
3959 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3961 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3962 /* i18n: [floating point unit] mask */
3963 fputs_filtered (_("mask: "), file);
3964 print_fpu_flags (file, status >> 16);
3965 /* i18n: [floating point unit] flags */
3966 fputs_filtered (_("flags: "), file);
3967 print_fpu_flags (file, status);
3970 /* Construct the ARM extended floating point type. */
3971 static struct type *
3972 arm_ext_type (struct gdbarch *gdbarch)
3974 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3976 if (!tdep->arm_ext_type)
3978 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3979 floatformats_arm_ext);
3981 return tdep->arm_ext_type;
3984 static struct type *
3985 arm_neon_double_type (struct gdbarch *gdbarch)
3987 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3989 if (tdep->neon_double_type == NULL)
3991 struct type *t, *elem;
3993 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3995 elem = builtin_type (gdbarch)->builtin_uint8;
3996 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3997 elem = builtin_type (gdbarch)->builtin_uint16;
3998 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3999 elem = builtin_type (gdbarch)->builtin_uint32;
4000 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4001 elem = builtin_type (gdbarch)->builtin_uint64;
4002 append_composite_type_field (t, "u64", elem);
4003 elem = builtin_type (gdbarch)->builtin_float;
4004 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4005 elem = builtin_type (gdbarch)->builtin_double;
4006 append_composite_type_field (t, "f64", elem);
4008 TYPE_VECTOR (t) = 1;
4009 TYPE_NAME (t) = "neon_d";
4010 tdep->neon_double_type = t;
4013 return tdep->neon_double_type;
4016 /* FIXME: The vector types are not correctly ordered on big-endian
4017 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4018 bits of d0 - regardless of what unit size is being held in d0. So
4019 the offset of the first uint8 in d0 is 7, but the offset of the
4020 first float is 4. This code works as-is for little-endian
4023 static struct type *
4024 arm_neon_quad_type (struct gdbarch *gdbarch)
4026 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4028 if (tdep->neon_quad_type == NULL)
4030 struct type *t, *elem;
4032 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4034 elem = builtin_type (gdbarch)->builtin_uint8;
4035 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4036 elem = builtin_type (gdbarch)->builtin_uint16;
4037 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4038 elem = builtin_type (gdbarch)->builtin_uint32;
4039 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4040 elem = builtin_type (gdbarch)->builtin_uint64;
4041 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4042 elem = builtin_type (gdbarch)->builtin_float;
4043 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4044 elem = builtin_type (gdbarch)->builtin_double;
4045 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4047 TYPE_VECTOR (t) = 1;
4048 TYPE_NAME (t) = "neon_q";
4049 tdep->neon_quad_type = t;
4052 return tdep->neon_quad_type;
4055 /* Return the GDB type object for the "standard" data type of data in
4058 static struct type *
4059 arm_register_type (struct gdbarch *gdbarch, int regnum)
4061 int num_regs = gdbarch_num_regs (gdbarch);
4063 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4064 && regnum >= num_regs && regnum < num_regs + 32)
4065 return builtin_type (gdbarch)->builtin_float;
4067 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4068 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4069 return arm_neon_quad_type (gdbarch);
4071 /* If the target description has register information, we are only
4072 in this function so that we can override the types of
4073 double-precision registers for NEON. */
4074 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4076 struct type *t = tdesc_register_type (gdbarch, regnum);
4078 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4079 && TYPE_CODE (t) == TYPE_CODE_FLT
4080 && gdbarch_tdep (gdbarch)->have_neon)
4081 return arm_neon_double_type (gdbarch);
4086 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4088 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4089 return builtin_type (gdbarch)->builtin_void;
4091 return arm_ext_type (gdbarch);
4093 else if (regnum == ARM_SP_REGNUM)
4094 return builtin_type (gdbarch)->builtin_data_ptr;
4095 else if (regnum == ARM_PC_REGNUM)
4096 return builtin_type (gdbarch)->builtin_func_ptr;
4097 else if (regnum >= ARRAY_SIZE (arm_register_names))
4098 /* These registers are only supported on targets which supply
4099 an XML description. */
4100 return builtin_type (gdbarch)->builtin_int0;
4102 return builtin_type (gdbarch)->builtin_uint32;
4105 /* Map a DWARF register REGNUM onto the appropriate GDB register
4109 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4111 /* Core integer regs. */
4112 if (reg >= 0 && reg <= 15)
4115 /* Legacy FPA encoding. These were once used in a way which
4116 overlapped with VFP register numbering, so their use is
4117 discouraged, but GDB doesn't support the ARM toolchain
4118 which used them for VFP. */
4119 if (reg >= 16 && reg <= 23)
4120 return ARM_F0_REGNUM + reg - 16;
4122 /* New assignments for the FPA registers. */
4123 if (reg >= 96 && reg <= 103)
4124 return ARM_F0_REGNUM + reg - 96;
4126 /* WMMX register assignments. */
4127 if (reg >= 104 && reg <= 111)
4128 return ARM_WCGR0_REGNUM + reg - 104;
4130 if (reg >= 112 && reg <= 127)
4131 return ARM_WR0_REGNUM + reg - 112;
4133 if (reg >= 192 && reg <= 199)
4134 return ARM_WC0_REGNUM + reg - 192;
4136 /* VFP v2 registers. A double precision value is actually
4137 in d1 rather than s2, but the ABI only defines numbering
4138 for the single precision registers. This will "just work"
4139 in GDB for little endian targets (we'll read eight bytes,
4140 starting in s0 and then progressing to s1), but will be
4141 reversed on big endian targets with VFP. This won't
4142 be a problem for the new Neon quad registers; you're supposed
4143 to use DW_OP_piece for those. */
4144 if (reg >= 64 && reg <= 95)
4148 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4149 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4153 /* VFP v3 / Neon registers. This range is also used for VFP v2
4154 registers, except that it now describes d0 instead of s0. */
4155 if (reg >= 256 && reg <= 287)
4159 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4160 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4167 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4169 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4172 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4174 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4175 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4177 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4178 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4180 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4181 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4183 if (reg < NUM_GREGS)
4184 return SIM_ARM_R0_REGNUM + reg;
4187 if (reg < NUM_FREGS)
4188 return SIM_ARM_FP0_REGNUM + reg;
4191 if (reg < NUM_SREGS)
4192 return SIM_ARM_FPS_REGNUM + reg;
4195 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4198 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4199 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4200 NULL if an error occurs. BUF is freed. */
4203 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4204 int old_len, int new_len)
4207 int bytes_to_read = new_len - old_len;
4209 new_buf = (gdb_byte *) xmalloc (new_len);
4210 memcpy (new_buf + bytes_to_read, buf, old_len);
4212 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4220 /* An IT block is at most the 2-byte IT instruction followed by
4221 four 4-byte instructions. The furthest back we must search to
4222 find an IT block that affects the current instruction is thus
4223 2 + 3 * 4 == 14 bytes. */
4224 #define MAX_IT_BLOCK_PREFIX 14
4226 /* Use a quick scan if there are more than this many bytes of
4228 #define IT_SCAN_THRESHOLD 32
4230 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4231 A breakpoint in an IT block may not be hit, depending on the
4234 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4238 CORE_ADDR boundary, func_start;
4240 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4241 int i, any, last_it, last_it_count;
4243 /* If we are using BKPT breakpoints, none of this is necessary. */
4244 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4247 /* ARM mode does not have this problem. */
4248 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4251 /* We are setting a breakpoint in Thumb code that could potentially
4252 contain an IT block. The first step is to find how much Thumb
4253 code there is; we do not need to read outside of known Thumb
4255 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4257 /* Thumb-2 code must have mapping symbols to have a chance. */
4260 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4262 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4263 && func_start > boundary)
4264 boundary = func_start;
4266 /* Search for a candidate IT instruction. We have to do some fancy
4267 footwork to distinguish a real IT instruction from the second
4268 half of a 32-bit instruction, but there is no need for that if
4269 there's no candidate. */
4270 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4272 /* No room for an IT instruction. */
4275 buf = (gdb_byte *) xmalloc (buf_len);
4276 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4279 for (i = 0; i < buf_len; i += 2)
4281 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4282 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4295 /* OK, the code bytes before this instruction contain at least one
4296 halfword which resembles an IT instruction. We know that it's
4297 Thumb code, but there are still two possibilities. Either the
4298 halfword really is an IT instruction, or it is the second half of
4299 a 32-bit Thumb instruction. The only way we can tell is to
4300 scan forwards from a known instruction boundary. */
4301 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4305 /* There's a lot of code before this instruction. Start with an
4306 optimistic search; it's easy to recognize halfwords that can
4307 not be the start of a 32-bit instruction, and use that to
4308 lock on to the instruction boundaries. */
4309 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4312 buf_len = IT_SCAN_THRESHOLD;
4315 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4317 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4318 if (thumb_insn_size (inst1) == 2)
4325 /* At this point, if DEFINITE, BUF[I] is the first place we
4326 are sure that we know the instruction boundaries, and it is far
4327 enough from BPADDR that we could not miss an IT instruction
4328 affecting BPADDR. If ! DEFINITE, give up - start from a
4332 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4336 buf_len = bpaddr - boundary;
4342 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4345 buf_len = bpaddr - boundary;
4349 /* Scan forwards. Find the last IT instruction before BPADDR. */
4354 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4356 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4361 else if (inst1 & 0x0002)
4363 else if (inst1 & 0x0004)
4368 i += thumb_insn_size (inst1);
4374 /* There wasn't really an IT instruction after all. */
4377 if (last_it_count < 1)
4378 /* It was too far away. */
4381 /* This really is a trouble spot. Move the breakpoint to the IT
4383 return bpaddr - buf_len + last_it;
4386 /* ARM displaced stepping support.
4388 Generally ARM displaced stepping works as follows:
4390 1. When an instruction is to be single-stepped, it is first decoded by
4391 arm_process_displaced_insn. Depending on the type of instruction, it is
4392 then copied to a scratch location, possibly in a modified form. The
4393 copy_* set of functions performs such modification, as necessary. A
4394 breakpoint is placed after the modified instruction in the scratch space
4395 to return control to GDB. Note in particular that instructions which
4396 modify the PC will no longer do so after modification.
4398 2. The instruction is single-stepped, by setting the PC to the scratch
4399 location address, and resuming. Control returns to GDB when the
4402 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4403 function used for the current instruction. This function's job is to
4404 put the CPU/memory state back to what it would have been if the
4405 instruction had been executed unmodified in its original location. */
4407 /* NOP instruction (mov r0, r0). */
4408 #define ARM_NOP 0xe1a00000
4409 #define THUMB_NOP 0x4600
4411 /* Helper for register reads for displaced stepping. In particular, this
4412 returns the PC as it would be seen by the instruction at its original
4416 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4420 CORE_ADDR from = dsc->insn_addr;
4422 if (regno == ARM_PC_REGNUM)
4424 /* Compute pipeline offset:
4425 - When executing an ARM instruction, PC reads as the address of the
4426 current instruction plus 8.
4427 - When executing a Thumb instruction, PC reads as the address of the
4428 current instruction plus 4. */
4435 if (debug_displaced)
4436 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4437 (unsigned long) from);
4438 return (ULONGEST) from;
4442 regcache_cooked_read_unsigned (regs, regno, &ret);
4443 if (debug_displaced)
4444 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4445 regno, (unsigned long) ret);
4451 displaced_in_arm_mode (struct regcache *regs)
4454 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4456 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4458 return (ps & t_bit) == 0;
4461 /* Write to the PC as from a branch instruction. */
4464 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4468 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4469 architecture versions < 6. */
4470 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4471 val & ~(ULONGEST) 0x3);
4473 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4474 val & ~(ULONGEST) 0x1);
4477 /* Write to the PC as from a branch-exchange instruction. */
4480 bx_write_pc (struct regcache *regs, ULONGEST val)
4483 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4485 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4489 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4490 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4492 else if ((val & 2) == 0)
4494 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4495 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4499 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4500 mode, align dest to 4 bytes). */
4501 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4502 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4503 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4507 /* Write to the PC as if from a load instruction. */
4510 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4513 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4514 bx_write_pc (regs, val);
4516 branch_write_pc (regs, dsc, val);
4519 /* Write to the PC as if from an ALU instruction. */
4522 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4525 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4526 bx_write_pc (regs, val);
4528 branch_write_pc (regs, dsc, val);
4531 /* Helper for writing to registers for displaced stepping. Writing to the PC
4532 has a varying effects depending on the instruction which does the write:
4533 this is controlled by the WRITE_PC argument. */
4536 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4537 int regno, ULONGEST val, enum pc_write_style write_pc)
4539 if (regno == ARM_PC_REGNUM)
4541 if (debug_displaced)
4542 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4543 (unsigned long) val);
4546 case BRANCH_WRITE_PC:
4547 branch_write_pc (regs, dsc, val);
4551 bx_write_pc (regs, val);
4555 load_write_pc (regs, dsc, val);
4559 alu_write_pc (regs, dsc, val);
4562 case CANNOT_WRITE_PC:
4563 warning (_("Instruction wrote to PC in an unexpected way when "
4564 "single-stepping"));
4568 internal_error (__FILE__, __LINE__,
4569 _("Invalid argument to displaced_write_reg"));
4572 dsc->wrote_to_pc = 1;
4576 if (debug_displaced)
4577 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4578 regno, (unsigned long) val);
4579 regcache_cooked_write_unsigned (regs, regno, val);
4583 /* This function is used to concisely determine if an instruction INSN
4584 references PC. Register fields of interest in INSN should have the
4585 corresponding fields of BITMASK set to 0b1111. The function
4586 returns return 1 if any of these fields in INSN reference the PC
4587 (also 0b1111, r15), else it returns 0. */
4590 insn_references_pc (uint32_t insn, uint32_t bitmask)
4592 uint32_t lowbit = 1;
4594 while (bitmask != 0)
4598 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4604 mask = lowbit * 0xf;
4606 if ((insn & mask) == mask)
4615 /* The simplest copy function. Many instructions have the same effect no
4616 matter what address they are executed at: in those cases, use this. */
4619 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4620 const char *iname, struct displaced_step_closure *dsc)
4622 if (debug_displaced)
4623 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4624 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4627 dsc->modinsn[0] = insn;
4633 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4634 uint16_t insn2, const char *iname,
4635 struct displaced_step_closure *dsc)
4637 if (debug_displaced)
4638 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4639 "opcode/class '%s' unmodified\n", insn1, insn2,
4642 dsc->modinsn[0] = insn1;
4643 dsc->modinsn[1] = insn2;
4649 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4652 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4654 struct displaced_step_closure *dsc)
4656 if (debug_displaced)
4657 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4658 "opcode/class '%s' unmodified\n", insn,
4661 dsc->modinsn[0] = insn;
4666 /* Preload instructions with immediate offset. */
4669 cleanup_preload (struct gdbarch *gdbarch,
4670 struct regcache *regs, struct displaced_step_closure *dsc)
4672 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4673 if (!dsc->u.preload.immed)
4674 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4678 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4679 struct displaced_step_closure *dsc, unsigned int rn)
4682 /* Preload instructions:
4684 {pli/pld} [rn, #+/-imm]
4686 {pli/pld} [r0, #+/-imm]. */
4688 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4689 rn_val = displaced_read_reg (regs, dsc, rn);
4690 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4691 dsc->u.preload.immed = 1;
4693 dsc->cleanup = &cleanup_preload;
4697 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4698 struct displaced_step_closure *dsc)
4700 unsigned int rn = bits (insn, 16, 19);
4702 if (!insn_references_pc (insn, 0x000f0000ul))
4703 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4705 if (debug_displaced)
4706 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4707 (unsigned long) insn);
4709 dsc->modinsn[0] = insn & 0xfff0ffff;
4711 install_preload (gdbarch, regs, dsc, rn);
4717 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4718 struct regcache *regs, struct displaced_step_closure *dsc)
4720 unsigned int rn = bits (insn1, 0, 3);
4721 unsigned int u_bit = bit (insn1, 7);
4722 int imm12 = bits (insn2, 0, 11);
4725 if (rn != ARM_PC_REGNUM)
4726 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4728 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4729 PLD (literal) Encoding T1. */
4730 if (debug_displaced)
4731 fprintf_unfiltered (gdb_stdlog,
4732 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4733 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4739 /* Rewrite instruction {pli/pld} PC imm12 into:
4740 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4744 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4746 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4747 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4749 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4751 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4752 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4753 dsc->u.preload.immed = 0;
4755 /* {pli/pld} [r0, r1] */
4756 dsc->modinsn[0] = insn1 & 0xfff0;
4757 dsc->modinsn[1] = 0xf001;
4760 dsc->cleanup = &cleanup_preload;
4764 /* Preload instructions with register offset. */
4767 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4768 struct displaced_step_closure *dsc, unsigned int rn,
4771 ULONGEST rn_val, rm_val;
4773 /* Preload register-offset instructions:
4775 {pli/pld} [rn, rm {, shift}]
4777 {pli/pld} [r0, r1 {, shift}]. */
4779 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4780 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4781 rn_val = displaced_read_reg (regs, dsc, rn);
4782 rm_val = displaced_read_reg (regs, dsc, rm);
4783 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4784 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4785 dsc->u.preload.immed = 0;
4787 dsc->cleanup = &cleanup_preload;
4791 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4792 struct regcache *regs,
4793 struct displaced_step_closure *dsc)
4795 unsigned int rn = bits (insn, 16, 19);
4796 unsigned int rm = bits (insn, 0, 3);
4799 if (!insn_references_pc (insn, 0x000f000ful))
4800 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4802 if (debug_displaced)
4803 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4804 (unsigned long) insn);
4806 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4808 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4812 /* Copy/cleanup coprocessor load and store instructions. */
4815 cleanup_copro_load_store (struct gdbarch *gdbarch,
4816 struct regcache *regs,
4817 struct displaced_step_closure *dsc)
4819 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4821 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4823 if (dsc->u.ldst.writeback)
4824 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4828 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4829 struct displaced_step_closure *dsc,
4830 int writeback, unsigned int rn)
4834 /* Coprocessor load/store instructions:
4836 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4838 {stc/stc2} [r0, #+/-imm].
4840 ldc/ldc2 are handled identically. */
4842 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4843 rn_val = displaced_read_reg (regs, dsc, rn);
4844 /* PC should be 4-byte aligned. */
4845 rn_val = rn_val & 0xfffffffc;
4846 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4848 dsc->u.ldst.writeback = writeback;
4849 dsc->u.ldst.rn = rn;
4851 dsc->cleanup = &cleanup_copro_load_store;
4855 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4856 struct regcache *regs,
4857 struct displaced_step_closure *dsc)
4859 unsigned int rn = bits (insn, 16, 19);
4861 if (!insn_references_pc (insn, 0x000f0000ul))
4862 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4864 if (debug_displaced)
4865 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4866 "load/store insn %.8lx\n", (unsigned long) insn);
4868 dsc->modinsn[0] = insn & 0xfff0ffff;
4870 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4876 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4877 uint16_t insn2, struct regcache *regs,
4878 struct displaced_step_closure *dsc)
4880 unsigned int rn = bits (insn1, 0, 3);
4882 if (rn != ARM_PC_REGNUM)
4883 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4884 "copro load/store", dsc);
4886 if (debug_displaced)
4887 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4888 "load/store insn %.4x%.4x\n", insn1, insn2);
4890 dsc->modinsn[0] = insn1 & 0xfff0;
4891 dsc->modinsn[1] = insn2;
4894 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4895 doesn't support writeback, so pass 0. */
4896 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4901 /* Clean up branch instructions (actually perform the branch, by setting
4905 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4906 struct displaced_step_closure *dsc)
4908 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4909 int branch_taken = condition_true (dsc->u.branch.cond, status);
4910 enum pc_write_style write_pc = dsc->u.branch.exchange
4911 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4916 if (dsc->u.branch.link)
4918 /* The value of LR should be the next insn of current one. In order
4919 not to confuse logic hanlding later insn `bx lr', if current insn mode
4920 is Thumb, the bit 0 of LR value should be set to 1. */
4921 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4924 next_insn_addr |= 0x1;
4926 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4930 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4933 /* Copy B/BL/BLX instructions with immediate destinations. */
4936 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4937 struct displaced_step_closure *dsc,
4938 unsigned int cond, int exchange, int link, long offset)
4940 /* Implement "BL<cond> <label>" as:
4942 Preparation: cond <- instruction condition
4943 Insn: mov r0, r0 (nop)
4944 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4946 B<cond> similar, but don't set r14 in cleanup. */
4948 dsc->u.branch.cond = cond;
4949 dsc->u.branch.link = link;
4950 dsc->u.branch.exchange = exchange;
4952 dsc->u.branch.dest = dsc->insn_addr;
4953 if (link && exchange)
4954 /* For BLX, offset is computed from the Align (PC, 4). */
4955 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4958 dsc->u.branch.dest += 4 + offset;
4960 dsc->u.branch.dest += 8 + offset;
4962 dsc->cleanup = &cleanup_branch;
4965 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4966 struct regcache *regs, struct displaced_step_closure *dsc)
4968 unsigned int cond = bits (insn, 28, 31);
4969 int exchange = (cond == 0xf);
4970 int link = exchange || bit (insn, 24);
4973 if (debug_displaced)
4974 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4975 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4976 (unsigned long) insn);
4978 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4979 then arrange the switch into Thumb mode. */
4980 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4982 offset = bits (insn, 0, 23) << 2;
4984 if (bit (offset, 25))
4985 offset = offset | ~0x3ffffff;
4987 dsc->modinsn[0] = ARM_NOP;
4989 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4994 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4995 uint16_t insn2, struct regcache *regs,
4996 struct displaced_step_closure *dsc)
4998 int link = bit (insn2, 14);
4999 int exchange = link && !bit (insn2, 12);
5002 int j1 = bit (insn2, 13);
5003 int j2 = bit (insn2, 11);
5004 int s = sbits (insn1, 10, 10);
5005 int i1 = !(j1 ^ bit (insn1, 10));
5006 int i2 = !(j2 ^ bit (insn1, 10));
5008 if (!link && !exchange) /* B */
5010 offset = (bits (insn2, 0, 10) << 1);
5011 if (bit (insn2, 12)) /* Encoding T4 */
5013 offset |= (bits (insn1, 0, 9) << 12)
5019 else /* Encoding T3 */
5021 offset |= (bits (insn1, 0, 5) << 12)
5025 cond = bits (insn1, 6, 9);
5030 offset = (bits (insn1, 0, 9) << 12);
5031 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5032 offset |= exchange ?
5033 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5036 if (debug_displaced)
5037 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5038 "%.4x %.4x with offset %.8lx\n",
5039 link ? (exchange) ? "blx" : "bl" : "b",
5040 insn1, insn2, offset);
5042 dsc->modinsn[0] = THUMB_NOP;
5044 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5048 /* Copy B Thumb instructions. */
5050 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5051 struct displaced_step_closure *dsc)
5053 unsigned int cond = 0;
5055 unsigned short bit_12_15 = bits (insn, 12, 15);
5056 CORE_ADDR from = dsc->insn_addr;
5058 if (bit_12_15 == 0xd)
5060 /* offset = SignExtend (imm8:0, 32) */
5061 offset = sbits ((insn << 1), 0, 8);
5062 cond = bits (insn, 8, 11);
5064 else if (bit_12_15 == 0xe) /* Encoding T2 */
5066 offset = sbits ((insn << 1), 0, 11);
5070 if (debug_displaced)
5071 fprintf_unfiltered (gdb_stdlog,
5072 "displaced: copying b immediate insn %.4x "
5073 "with offset %d\n", insn, offset);
5075 dsc->u.branch.cond = cond;
5076 dsc->u.branch.link = 0;
5077 dsc->u.branch.exchange = 0;
5078 dsc->u.branch.dest = from + 4 + offset;
5080 dsc->modinsn[0] = THUMB_NOP;
5082 dsc->cleanup = &cleanup_branch;
5087 /* Copy BX/BLX with register-specified destinations. */
5090 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5091 struct displaced_step_closure *dsc, int link,
5092 unsigned int cond, unsigned int rm)
5094 /* Implement {BX,BLX}<cond> <reg>" as:
5096 Preparation: cond <- instruction condition
5097 Insn: mov r0, r0 (nop)
5098 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5100 Don't set r14 in cleanup for BX. */
5102 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5104 dsc->u.branch.cond = cond;
5105 dsc->u.branch.link = link;
5107 dsc->u.branch.exchange = 1;
5109 dsc->cleanup = &cleanup_branch;
5113 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5114 struct regcache *regs, struct displaced_step_closure *dsc)
5116 unsigned int cond = bits (insn, 28, 31);
5119 int link = bit (insn, 5);
5120 unsigned int rm = bits (insn, 0, 3);
5122 if (debug_displaced)
5123 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5124 (unsigned long) insn);
5126 dsc->modinsn[0] = ARM_NOP;
5128 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5133 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5134 struct regcache *regs,
5135 struct displaced_step_closure *dsc)
5137 int link = bit (insn, 7);
5138 unsigned int rm = bits (insn, 3, 6);
5140 if (debug_displaced)
5141 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5142 (unsigned short) insn);
5144 dsc->modinsn[0] = THUMB_NOP;
5146 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5152 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5155 cleanup_alu_imm (struct gdbarch *gdbarch,
5156 struct regcache *regs, struct displaced_step_closure *dsc)
5158 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5159 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5160 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5161 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5165 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5166 struct displaced_step_closure *dsc)
5168 unsigned int rn = bits (insn, 16, 19);
5169 unsigned int rd = bits (insn, 12, 15);
5170 unsigned int op = bits (insn, 21, 24);
5171 int is_mov = (op == 0xd);
5172 ULONGEST rd_val, rn_val;
5174 if (!insn_references_pc (insn, 0x000ff000ul))
5175 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5177 if (debug_displaced)
5178 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5179 "%.8lx\n", is_mov ? "move" : "ALU",
5180 (unsigned long) insn);
5182 /* Instruction is of form:
5184 <op><cond> rd, [rn,] #imm
5188 Preparation: tmp1, tmp2 <- r0, r1;
5190 Insn: <op><cond> r0, r1, #imm
5191 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5194 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5195 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5196 rn_val = displaced_read_reg (regs, dsc, rn);
5197 rd_val = displaced_read_reg (regs, dsc, rd);
5198 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5199 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5203 dsc->modinsn[0] = insn & 0xfff00fff;
5205 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5207 dsc->cleanup = &cleanup_alu_imm;
5213 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5214 uint16_t insn2, struct regcache *regs,
5215 struct displaced_step_closure *dsc)
5217 unsigned int op = bits (insn1, 5, 8);
5218 unsigned int rn, rm, rd;
5219 ULONGEST rd_val, rn_val;
5221 rn = bits (insn1, 0, 3); /* Rn */
5222 rm = bits (insn2, 0, 3); /* Rm */
5223 rd = bits (insn2, 8, 11); /* Rd */
5225 /* This routine is only called for instruction MOV. */
5226 gdb_assert (op == 0x2 && rn == 0xf);
5228 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5229 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5231 if (debug_displaced)
5232 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5233 "ALU", insn1, insn2);
5235 /* Instruction is of form:
5237 <op><cond> rd, [rn,] #imm
5241 Preparation: tmp1, tmp2 <- r0, r1;
5243 Insn: <op><cond> r0, r1, #imm
5244 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5247 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5248 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5249 rn_val = displaced_read_reg (regs, dsc, rn);
5250 rd_val = displaced_read_reg (regs, dsc, rd);
5251 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5252 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5255 dsc->modinsn[0] = insn1;
5256 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5259 dsc->cleanup = &cleanup_alu_imm;
5264 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5267 cleanup_alu_reg (struct gdbarch *gdbarch,
5268 struct regcache *regs, struct displaced_step_closure *dsc)
5273 rd_val = displaced_read_reg (regs, dsc, 0);
5275 for (i = 0; i < 3; i++)
5276 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5278 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5282 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5283 struct displaced_step_closure *dsc,
5284 unsigned int rd, unsigned int rn, unsigned int rm)
5286 ULONGEST rd_val, rn_val, rm_val;
5288 /* Instruction is of form:
5290 <op><cond> rd, [rn,] rm [, <shift>]
5294 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5295 r0, r1, r2 <- rd, rn, rm
5296 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5297 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5300 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5301 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5302 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5303 rd_val = displaced_read_reg (regs, dsc, rd);
5304 rn_val = displaced_read_reg (regs, dsc, rn);
5305 rm_val = displaced_read_reg (regs, dsc, rm);
5306 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5307 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5308 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5311 dsc->cleanup = &cleanup_alu_reg;
5315 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5316 struct displaced_step_closure *dsc)
5318 unsigned int op = bits (insn, 21, 24);
5319 int is_mov = (op == 0xd);
5321 if (!insn_references_pc (insn, 0x000ff00ful))
5322 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5324 if (debug_displaced)
5325 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5326 is_mov ? "move" : "ALU", (unsigned long) insn);
5329 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5331 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5333 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5339 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5340 struct regcache *regs,
5341 struct displaced_step_closure *dsc)
5345 rm = bits (insn, 3, 6);
5346 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5348 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5349 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5351 if (debug_displaced)
5352 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5353 (unsigned short) insn);
5355 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5357 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5362 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5365 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5366 struct regcache *regs,
5367 struct displaced_step_closure *dsc)
5369 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5372 for (i = 0; i < 4; i++)
5373 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5375 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5379 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5380 struct displaced_step_closure *dsc,
5381 unsigned int rd, unsigned int rn, unsigned int rm,
5385 ULONGEST rd_val, rn_val, rm_val, rs_val;
5387 /* Instruction is of form:
5389 <op><cond> rd, [rn,] rm, <shift> rs
5393 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5394 r0, r1, r2, r3 <- rd, rn, rm, rs
5395 Insn: <op><cond> r0, r1, r2, <shift> r3
5397 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5401 for (i = 0; i < 4; i++)
5402 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5404 rd_val = displaced_read_reg (regs, dsc, rd);
5405 rn_val = displaced_read_reg (regs, dsc, rn);
5406 rm_val = displaced_read_reg (regs, dsc, rm);
5407 rs_val = displaced_read_reg (regs, dsc, rs);
5408 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5409 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5410 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5411 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5413 dsc->cleanup = &cleanup_alu_shifted_reg;
5417 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5418 struct regcache *regs,
5419 struct displaced_step_closure *dsc)
5421 unsigned int op = bits (insn, 21, 24);
5422 int is_mov = (op == 0xd);
5423 unsigned int rd, rn, rm, rs;
5425 if (!insn_references_pc (insn, 0x000fff0ful))
5426 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5428 if (debug_displaced)
5429 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5430 "%.8lx\n", is_mov ? "move" : "ALU",
5431 (unsigned long) insn);
5433 rn = bits (insn, 16, 19);
5434 rm = bits (insn, 0, 3);
5435 rs = bits (insn, 8, 11);
5436 rd = bits (insn, 12, 15);
5439 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5441 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5443 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5448 /* Clean up load instructions. */
5451 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5452 struct displaced_step_closure *dsc)
5454 ULONGEST rt_val, rt_val2 = 0, rn_val;
5456 rt_val = displaced_read_reg (regs, dsc, 0);
5457 if (dsc->u.ldst.xfersize == 8)
5458 rt_val2 = displaced_read_reg (regs, dsc, 1);
5459 rn_val = displaced_read_reg (regs, dsc, 2);
5461 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5462 if (dsc->u.ldst.xfersize > 4)
5463 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5464 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5465 if (!dsc->u.ldst.immed)
5466 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5468 /* Handle register writeback. */
5469 if (dsc->u.ldst.writeback)
5470 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5471 /* Put result in right place. */
5472 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5473 if (dsc->u.ldst.xfersize == 8)
5474 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5477 /* Clean up store instructions. */
5480 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5481 struct displaced_step_closure *dsc)
5483 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5485 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5486 if (dsc->u.ldst.xfersize > 4)
5487 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5488 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5489 if (!dsc->u.ldst.immed)
5490 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5491 if (!dsc->u.ldst.restore_r4)
5492 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5495 if (dsc->u.ldst.writeback)
5496 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5499 /* Copy "extra" load/store instructions. These are halfword/doubleword
5500 transfers, which have a different encoding to byte/word transfers. */
5503 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5504 struct regcache *regs, struct displaced_step_closure *dsc)
5506 unsigned int op1 = bits (insn, 20, 24);
5507 unsigned int op2 = bits (insn, 5, 6);
5508 unsigned int rt = bits (insn, 12, 15);
5509 unsigned int rn = bits (insn, 16, 19);
5510 unsigned int rm = bits (insn, 0, 3);
5511 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5512 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5513 int immed = (op1 & 0x4) != 0;
5515 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5517 if (!insn_references_pc (insn, 0x000ff00ful))
5518 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5520 if (debug_displaced)
5521 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5522 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5523 (unsigned long) insn);
5525 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5528 internal_error (__FILE__, __LINE__,
5529 _("copy_extra_ld_st: instruction decode error"));
5531 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5532 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5533 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5535 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5537 rt_val = displaced_read_reg (regs, dsc, rt);
5538 if (bytesize[opcode] == 8)
5539 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5540 rn_val = displaced_read_reg (regs, dsc, rn);
5542 rm_val = displaced_read_reg (regs, dsc, rm);
5544 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5545 if (bytesize[opcode] == 8)
5546 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5547 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5549 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5552 dsc->u.ldst.xfersize = bytesize[opcode];
5553 dsc->u.ldst.rn = rn;
5554 dsc->u.ldst.immed = immed;
5555 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5556 dsc->u.ldst.restore_r4 = 0;
5559 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5561 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5562 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5564 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5566 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5567 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5569 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5574 /* Copy byte/half word/word loads and stores. */
5577 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5578 struct displaced_step_closure *dsc, int load,
5579 int immed, int writeback, int size, int usermode,
5580 int rt, int rm, int rn)
5582 ULONGEST rt_val, rn_val, rm_val = 0;
5584 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5585 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5587 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5589 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5591 rt_val = displaced_read_reg (regs, dsc, rt);
5592 rn_val = displaced_read_reg (regs, dsc, rn);
5594 rm_val = displaced_read_reg (regs, dsc, rm);
5596 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5597 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5599 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5601 dsc->u.ldst.xfersize = size;
5602 dsc->u.ldst.rn = rn;
5603 dsc->u.ldst.immed = immed;
5604 dsc->u.ldst.writeback = writeback;
5606 /* To write PC we can do:
5608 Before this sequence of instructions:
5609 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5610 r2 is the Rn value got from dispalced_read_reg.
5612 Insn1: push {pc} Write address of STR instruction + offset on stack
5613 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5614 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5615 = addr(Insn1) + offset - addr(Insn3) - 8
5617 Insn4: add r4, r4, #8 r4 = offset - 8
5618 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5620 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5622 Otherwise we don't know what value to write for PC, since the offset is
5623 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5624 of this can be found in Section "Saving from r15" in
5625 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5627 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5632 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5633 uint16_t insn2, struct regcache *regs,
5634 struct displaced_step_closure *dsc, int size)
5636 unsigned int u_bit = bit (insn1, 7);
5637 unsigned int rt = bits (insn2, 12, 15);
5638 int imm12 = bits (insn2, 0, 11);
5641 if (debug_displaced)
5642 fprintf_unfiltered (gdb_stdlog,
5643 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5644 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5650 /* Rewrite instruction LDR Rt imm12 into:
5652 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5656 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5659 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5660 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5661 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5663 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5665 pc_val = pc_val & 0xfffffffc;
5667 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5668 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5672 dsc->u.ldst.xfersize = size;
5673 dsc->u.ldst.immed = 0;
5674 dsc->u.ldst.writeback = 0;
5675 dsc->u.ldst.restore_r4 = 0;
5677 /* LDR R0, R2, R3 */
5678 dsc->modinsn[0] = 0xf852;
5679 dsc->modinsn[1] = 0x3;
5682 dsc->cleanup = &cleanup_load;
5688 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5689 uint16_t insn2, struct regcache *regs,
5690 struct displaced_step_closure *dsc,
5691 int writeback, int immed)
5693 unsigned int rt = bits (insn2, 12, 15);
5694 unsigned int rn = bits (insn1, 0, 3);
5695 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5696 /* In LDR (register), there is also a register Rm, which is not allowed to
5697 be PC, so we don't have to check it. */
5699 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5700 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5703 if (debug_displaced)
5704 fprintf_unfiltered (gdb_stdlog,
5705 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5706 rt, rn, insn1, insn2);
5708 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5711 dsc->u.ldst.restore_r4 = 0;
5714 /* ldr[b]<cond> rt, [rn, #imm], etc.
5716 ldr[b]<cond> r0, [r2, #imm]. */
5718 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5719 dsc->modinsn[1] = insn2 & 0x0fff;
5722 /* ldr[b]<cond> rt, [rn, rm], etc.
5724 ldr[b]<cond> r0, [r2, r3]. */
5726 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5727 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5737 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5738 struct regcache *regs,
5739 struct displaced_step_closure *dsc,
5740 int load, int size, int usermode)
5742 int immed = !bit (insn, 25);
5743 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5744 unsigned int rt = bits (insn, 12, 15);
5745 unsigned int rn = bits (insn, 16, 19);
5746 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5748 if (!insn_references_pc (insn, 0x000ff00ful))
5749 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5751 if (debug_displaced)
5752 fprintf_unfiltered (gdb_stdlog,
5753 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5754 load ? (size == 1 ? "ldrb" : "ldr")
5755 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5757 (unsigned long) insn);
5759 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5760 usermode, rt, rm, rn);
5762 if (load || rt != ARM_PC_REGNUM)
5764 dsc->u.ldst.restore_r4 = 0;
5767 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5769 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5770 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5772 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5774 {ldr,str}[b]<cond> r0, [r2, r3]. */
5775 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5779 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5780 dsc->u.ldst.restore_r4 = 1;
5781 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5782 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5783 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5784 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5785 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5789 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5791 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5796 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5801 /* Cleanup LDM instructions with fully-populated register list. This is an
5802 unfortunate corner case: it's impossible to implement correctly by modifying
5803 the instruction. The issue is as follows: we have an instruction,
5807 which we must rewrite to avoid loading PC. A possible solution would be to
5808 do the load in two halves, something like (with suitable cleanup
5812 ldm[id][ab] r8!, {r0-r7}
5814 ldm[id][ab] r8, {r7-r14}
5817 but at present there's no suitable place for <temp>, since the scratch space
5818 is overwritten before the cleanup routine is called. For now, we simply
5819 emulate the instruction. */
5822 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5823 struct displaced_step_closure *dsc)
5825 int inc = dsc->u.block.increment;
5826 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5827 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5828 uint32_t regmask = dsc->u.block.regmask;
5829 int regno = inc ? 0 : 15;
5830 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5831 int exception_return = dsc->u.block.load && dsc->u.block.user
5832 && (regmask & 0x8000) != 0;
5833 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5834 int do_transfer = condition_true (dsc->u.block.cond, status);
5835 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5840 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5841 sensible we can do here. Complain loudly. */
5842 if (exception_return)
5843 error (_("Cannot single-step exception return"));
5845 /* We don't handle any stores here for now. */
5846 gdb_assert (dsc->u.block.load != 0);
5848 if (debug_displaced)
5849 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5850 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5851 dsc->u.block.increment ? "inc" : "dec",
5852 dsc->u.block.before ? "before" : "after");
5859 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5862 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5865 xfer_addr += bump_before;
5867 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5868 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5870 xfer_addr += bump_after;
5872 regmask &= ~(1 << regno);
5875 if (dsc->u.block.writeback)
5876 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5880 /* Clean up an STM which included the PC in the register list. */
5883 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5884 struct displaced_step_closure *dsc)
5886 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5887 int store_executed = condition_true (dsc->u.block.cond, status);
5888 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5889 CORE_ADDR stm_insn_addr;
5892 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5894 /* If condition code fails, there's nothing else to do. */
5895 if (!store_executed)
5898 if (dsc->u.block.increment)
5900 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5902 if (dsc->u.block.before)
5907 pc_stored_at = dsc->u.block.xfer_addr;
5909 if (dsc->u.block.before)
5913 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5914 stm_insn_addr = dsc->scratch_base;
5915 offset = pc_val - stm_insn_addr;
5917 if (debug_displaced)
5918 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5919 "STM instruction\n", offset);
5921 /* Rewrite the stored PC to the proper value for the non-displaced original
5923 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5924 dsc->insn_addr + offset);
5927 /* Clean up an LDM which includes the PC in the register list. We clumped all
5928 the registers in the transferred list into a contiguous range r0...rX (to
5929 avoid loading PC directly and losing control of the debugged program), so we
5930 must undo that here. */
5933 cleanup_block_load_pc (struct gdbarch *gdbarch,
5934 struct regcache *regs,
5935 struct displaced_step_closure *dsc)
5937 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5938 int load_executed = condition_true (dsc->u.block.cond, status);
5939 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5940 unsigned int regs_loaded = bitcount (mask);
5941 unsigned int num_to_shuffle = regs_loaded, clobbered;
5943 /* The method employed here will fail if the register list is fully populated
5944 (we need to avoid loading PC directly). */
5945 gdb_assert (num_to_shuffle < 16);
5950 clobbered = (1 << num_to_shuffle) - 1;
5952 while (num_to_shuffle > 0)
5954 if ((mask & (1 << write_reg)) != 0)
5956 unsigned int read_reg = num_to_shuffle - 1;
5958 if (read_reg != write_reg)
5960 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5961 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5962 if (debug_displaced)
5963 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5964 "loaded register r%d to r%d\n"), read_reg,
5967 else if (debug_displaced)
5968 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5969 "r%d already in the right place\n"),
5972 clobbered &= ~(1 << write_reg);
5980 /* Restore any registers we scribbled over. */
5981 for (write_reg = 0; clobbered != 0; write_reg++)
5983 if ((clobbered & (1 << write_reg)) != 0)
5985 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5987 if (debug_displaced)
5988 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5989 "clobbered register r%d\n"), write_reg);
5990 clobbered &= ~(1 << write_reg);
5994 /* Perform register writeback manually. */
5995 if (dsc->u.block.writeback)
5997 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5999 if (dsc->u.block.increment)
6000 new_rn_val += regs_loaded * 4;
6002 new_rn_val -= regs_loaded * 4;
6004 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6009 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6010 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6013 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6014 struct regcache *regs,
6015 struct displaced_step_closure *dsc)
6017 int load = bit (insn, 20);
6018 int user = bit (insn, 22);
6019 int increment = bit (insn, 23);
6020 int before = bit (insn, 24);
6021 int writeback = bit (insn, 21);
6022 int rn = bits (insn, 16, 19);
6024 /* Block transfers which don't mention PC can be run directly
6026 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6027 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6029 if (rn == ARM_PC_REGNUM)
6031 warning (_("displaced: Unpredictable LDM or STM with "
6032 "base register r15"));
6033 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6036 if (debug_displaced)
6037 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6038 "%.8lx\n", (unsigned long) insn);
6040 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6041 dsc->u.block.rn = rn;
6043 dsc->u.block.load = load;
6044 dsc->u.block.user = user;
6045 dsc->u.block.increment = increment;
6046 dsc->u.block.before = before;
6047 dsc->u.block.writeback = writeback;
6048 dsc->u.block.cond = bits (insn, 28, 31);
6050 dsc->u.block.regmask = insn & 0xffff;
6054 if ((insn & 0xffff) == 0xffff)
6056 /* LDM with a fully-populated register list. This case is
6057 particularly tricky. Implement for now by fully emulating the
6058 instruction (which might not behave perfectly in all cases, but
6059 these instructions should be rare enough for that not to matter
6061 dsc->modinsn[0] = ARM_NOP;
6063 dsc->cleanup = &cleanup_block_load_all;
6067 /* LDM of a list of registers which includes PC. Implement by
6068 rewriting the list of registers to be transferred into a
6069 contiguous chunk r0...rX before doing the transfer, then shuffling
6070 registers into the correct places in the cleanup routine. */
6071 unsigned int regmask = insn & 0xffff;
6072 unsigned int num_in_list = bitcount (regmask), new_regmask;
6075 for (i = 0; i < num_in_list; i++)
6076 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6078 /* Writeback makes things complicated. We need to avoid clobbering
6079 the base register with one of the registers in our modified
6080 register list, but just using a different register can't work in
6083 ldm r14!, {r0-r13,pc}
6085 which would need to be rewritten as:
6089 but that can't work, because there's no free register for N.
6091 Solve this by turning off the writeback bit, and emulating
6092 writeback manually in the cleanup routine. */
6097 new_regmask = (1 << num_in_list) - 1;
6099 if (debug_displaced)
6100 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6101 "{..., pc}: original reg list %.4x, modified "
6102 "list %.4x\n"), rn, writeback ? "!" : "",
6103 (int) insn & 0xffff, new_regmask);
6105 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6107 dsc->cleanup = &cleanup_block_load_pc;
6112 /* STM of a list of registers which includes PC. Run the instruction
6113 as-is, but out of line: this will store the wrong value for the PC,
6114 so we must manually fix up the memory in the cleanup routine.
6115 Doing things this way has the advantage that we can auto-detect
6116 the offset of the PC write (which is architecture-dependent) in
6117 the cleanup routine. */
6118 dsc->modinsn[0] = insn;
6120 dsc->cleanup = &cleanup_block_store_pc;
6127 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6128 struct regcache *regs,
6129 struct displaced_step_closure *dsc)
6131 int rn = bits (insn1, 0, 3);
6132 int load = bit (insn1, 4);
6133 int writeback = bit (insn1, 5);
6135 /* Block transfers which don't mention PC can be run directly
6137 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6138 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6140 if (rn == ARM_PC_REGNUM)
6142 warning (_("displaced: Unpredictable LDM or STM with "
6143 "base register r15"));
6144 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6145 "unpredictable ldm/stm", dsc);
6148 if (debug_displaced)
6149 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6150 "%.4x%.4x\n", insn1, insn2);
6152 /* Clear bit 13, since it should be always zero. */
6153 dsc->u.block.regmask = (insn2 & 0xdfff);
6154 dsc->u.block.rn = rn;
6156 dsc->u.block.load = load;
6157 dsc->u.block.user = 0;
6158 dsc->u.block.increment = bit (insn1, 7);
6159 dsc->u.block.before = bit (insn1, 8);
6160 dsc->u.block.writeback = writeback;
6161 dsc->u.block.cond = INST_AL;
6162 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6166 if (dsc->u.block.regmask == 0xffff)
6168 /* This branch is impossible to happen. */
6173 unsigned int regmask = dsc->u.block.regmask;
6174 unsigned int num_in_list = bitcount (regmask), new_regmask;
6177 for (i = 0; i < num_in_list; i++)
6178 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6183 new_regmask = (1 << num_in_list) - 1;
6185 if (debug_displaced)
6186 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6187 "{..., pc}: original reg list %.4x, modified "
6188 "list %.4x\n"), rn, writeback ? "!" : "",
6189 (int) dsc->u.block.regmask, new_regmask);
6191 dsc->modinsn[0] = insn1;
6192 dsc->modinsn[1] = (new_regmask & 0xffff);
6195 dsc->cleanup = &cleanup_block_load_pc;
6200 dsc->modinsn[0] = insn1;
6201 dsc->modinsn[1] = insn2;
6203 dsc->cleanup = &cleanup_block_store_pc;
6208 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6209 This is used to avoid a dependency on BFD's bfd_endian enum. */
6212 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6215 return read_memory_unsigned_integer (memaddr, len,
6216 (enum bfd_endian) byte_order);
6219 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6222 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6225 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6228 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6231 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6236 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6239 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6241 return arm_is_thumb (self->regcache);
6244 /* single_step() is called just before we want to resume the inferior,
6245 if we want to single-step it but there is no hardware or kernel
6246 single-step support. We find the target of the coming instructions
6247 and breakpoint them. */
6249 std::vector<CORE_ADDR>
6250 arm_software_single_step (struct regcache *regcache)
6252 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6253 struct arm_get_next_pcs next_pcs_ctx;
6255 arm_get_next_pcs_ctor (&next_pcs_ctx,
6256 &arm_get_next_pcs_ops,
6257 gdbarch_byte_order (gdbarch),
6258 gdbarch_byte_order_for_code (gdbarch),
6262 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6264 for (CORE_ADDR &pc_ref : next_pcs)
6265 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6270 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6271 for Linux, where some SVC instructions must be treated specially. */
6274 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6275 struct displaced_step_closure *dsc)
6277 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6279 if (debug_displaced)
6280 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6281 "%.8lx\n", (unsigned long) resume_addr);
6283 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6287 /* Common copy routine for svc instruciton. */
6290 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6291 struct displaced_step_closure *dsc)
6293 /* Preparation: none.
6294 Insn: unmodified svc.
6295 Cleanup: pc <- insn_addr + insn_size. */
6297 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6299 dsc->wrote_to_pc = 1;
6301 /* Allow OS-specific code to override SVC handling. */
6302 if (dsc->u.svc.copy_svc_os)
6303 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6306 dsc->cleanup = &cleanup_svc;
6312 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6313 struct regcache *regs, struct displaced_step_closure *dsc)
6316 if (debug_displaced)
6317 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6318 (unsigned long) insn);
6320 dsc->modinsn[0] = insn;
6322 return install_svc (gdbarch, regs, dsc);
6326 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6327 struct regcache *regs, struct displaced_step_closure *dsc)
6330 if (debug_displaced)
6331 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6334 dsc->modinsn[0] = insn;
6336 return install_svc (gdbarch, regs, dsc);
6339 /* Copy undefined instructions. */
6342 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6343 struct displaced_step_closure *dsc)
6345 if (debug_displaced)
6346 fprintf_unfiltered (gdb_stdlog,
6347 "displaced: copying undefined insn %.8lx\n",
6348 (unsigned long) insn);
6350 dsc->modinsn[0] = insn;
6356 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6357 struct displaced_step_closure *dsc)
6360 if (debug_displaced)
6361 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6362 "%.4x %.4x\n", (unsigned short) insn1,
6363 (unsigned short) insn2);
6365 dsc->modinsn[0] = insn1;
6366 dsc->modinsn[1] = insn2;
6372 /* Copy unpredictable instructions. */
6375 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6376 struct displaced_step_closure *dsc)
6378 if (debug_displaced)
6379 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6380 "%.8lx\n", (unsigned long) insn);
6382 dsc->modinsn[0] = insn;
6387 /* The decode_* functions are instruction decoding helpers. They mostly follow
6388 the presentation in the ARM ARM. */
6391 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6392 struct regcache *regs,
6393 struct displaced_step_closure *dsc)
6395 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6396 unsigned int rn = bits (insn, 16, 19);
6398 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6399 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6400 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6401 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6402 else if ((op1 & 0x60) == 0x20)
6403 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6404 else if ((op1 & 0x71) == 0x40)
6405 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6407 else if ((op1 & 0x77) == 0x41)
6408 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6409 else if ((op1 & 0x77) == 0x45)
6410 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6411 else if ((op1 & 0x77) == 0x51)
6414 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6416 return arm_copy_unpred (gdbarch, insn, dsc);
6418 else if ((op1 & 0x77) == 0x55)
6419 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6420 else if (op1 == 0x57)
6423 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6424 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6425 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6426 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6427 default: return arm_copy_unpred (gdbarch, insn, dsc);
6429 else if ((op1 & 0x63) == 0x43)
6430 return arm_copy_unpred (gdbarch, insn, dsc);
6431 else if ((op2 & 0x1) == 0x0)
6432 switch (op1 & ~0x80)
6435 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6437 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6438 case 0x71: case 0x75:
6440 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6441 case 0x63: case 0x67: case 0x73: case 0x77:
6442 return arm_copy_unpred (gdbarch, insn, dsc);
6444 return arm_copy_undef (gdbarch, insn, dsc);
6447 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6451 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6452 struct regcache *regs,
6453 struct displaced_step_closure *dsc)
6455 if (bit (insn, 27) == 0)
6456 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6457 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6458 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6461 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6464 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6466 case 0x4: case 0x5: case 0x6: case 0x7:
6467 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6470 switch ((insn & 0xe00000) >> 21)
6472 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6474 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6477 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6480 return arm_copy_undef (gdbarch, insn, dsc);
6485 int rn_f = (bits (insn, 16, 19) == 0xf);
6486 switch ((insn & 0xe00000) >> 21)
6489 /* ldc/ldc2 imm (undefined for rn == pc). */
6490 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6491 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6494 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6496 case 0x4: case 0x5: case 0x6: case 0x7:
6497 /* ldc/ldc2 lit (undefined for rn != pc). */
6498 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6499 : arm_copy_undef (gdbarch, insn, dsc);
6502 return arm_copy_undef (gdbarch, insn, dsc);
6507 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6510 if (bits (insn, 16, 19) == 0xf)
6512 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6514 return arm_copy_undef (gdbarch, insn, dsc);
6518 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6520 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6524 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6526 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6529 return arm_copy_undef (gdbarch, insn, dsc);
6533 /* Decode miscellaneous instructions in dp/misc encoding space. */
6536 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6537 struct regcache *regs,
6538 struct displaced_step_closure *dsc)
6540 unsigned int op2 = bits (insn, 4, 6);
6541 unsigned int op = bits (insn, 21, 22);
6546 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6549 if (op == 0x1) /* bx. */
6550 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6552 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6554 return arm_copy_undef (gdbarch, insn, dsc);
6558 /* Not really supported. */
6559 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6561 return arm_copy_undef (gdbarch, insn, dsc);
6565 return arm_copy_bx_blx_reg (gdbarch, insn,
6566 regs, dsc); /* blx register. */
6568 return arm_copy_undef (gdbarch, insn, dsc);
6571 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6575 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6577 /* Not really supported. */
6578 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6581 return arm_copy_undef (gdbarch, insn, dsc);
6586 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6587 struct regcache *regs,
6588 struct displaced_step_closure *dsc)
6591 switch (bits (insn, 20, 24))
6594 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6597 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6599 case 0x12: case 0x16:
6600 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6603 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6607 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6609 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6610 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6611 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6612 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6613 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6614 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6615 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6616 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6617 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6618 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6619 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6620 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6621 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6622 /* 2nd arg means "unprivileged". */
6623 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6627 /* Should be unreachable. */
6632 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6633 struct regcache *regs,
6634 struct displaced_step_closure *dsc)
6636 int a = bit (insn, 25), b = bit (insn, 4);
6637 uint32_t op1 = bits (insn, 20, 24);
6639 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6640 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6641 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6642 else if ((!a && (op1 & 0x17) == 0x02)
6643 || (a && (op1 & 0x17) == 0x02 && !b))
6644 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6645 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6646 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6647 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6648 else if ((!a && (op1 & 0x17) == 0x03)
6649 || (a && (op1 & 0x17) == 0x03 && !b))
6650 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6651 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6652 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6653 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6654 else if ((!a && (op1 & 0x17) == 0x06)
6655 || (a && (op1 & 0x17) == 0x06 && !b))
6656 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6657 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6658 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6659 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6660 else if ((!a && (op1 & 0x17) == 0x07)
6661 || (a && (op1 & 0x17) == 0x07 && !b))
6662 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6664 /* Should be unreachable. */
6669 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6670 struct displaced_step_closure *dsc)
6672 switch (bits (insn, 20, 24))
6674 case 0x00: case 0x01: case 0x02: case 0x03:
6675 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6677 case 0x04: case 0x05: case 0x06: case 0x07:
6678 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6680 case 0x08: case 0x09: case 0x0a: case 0x0b:
6681 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6682 return arm_copy_unmodified (gdbarch, insn,
6683 "decode/pack/unpack/saturate/reverse", dsc);
6686 if (bits (insn, 5, 7) == 0) /* op2. */
6688 if (bits (insn, 12, 15) == 0xf)
6689 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6691 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6694 return arm_copy_undef (gdbarch, insn, dsc);
6696 case 0x1a: case 0x1b:
6697 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6698 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6700 return arm_copy_undef (gdbarch, insn, dsc);
6702 case 0x1c: case 0x1d:
6703 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6705 if (bits (insn, 0, 3) == 0xf)
6706 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6708 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6711 return arm_copy_undef (gdbarch, insn, dsc);
6713 case 0x1e: case 0x1f:
6714 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6715 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6717 return arm_copy_undef (gdbarch, insn, dsc);
6720 /* Should be unreachable. */
6725 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6726 struct regcache *regs,
6727 struct displaced_step_closure *dsc)
6730 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6732 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6736 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6737 struct regcache *regs,
6738 struct displaced_step_closure *dsc)
6740 unsigned int opcode = bits (insn, 20, 24);
6744 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6745 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6747 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6748 case 0x12: case 0x16:
6749 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6751 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6752 case 0x13: case 0x17:
6753 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6755 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6756 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6757 /* Note: no writeback for these instructions. Bit 25 will always be
6758 zero though (via caller), so the following works OK. */
6759 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6762 /* Should be unreachable. */
6766 /* Decode shifted register instructions. */
6769 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6770 uint16_t insn2, struct regcache *regs,
6771 struct displaced_step_closure *dsc)
6773 /* PC is only allowed to be used in instruction MOV. */
6775 unsigned int op = bits (insn1, 5, 8);
6776 unsigned int rn = bits (insn1, 0, 3);
6778 if (op == 0x2 && rn == 0xf) /* MOV */
6779 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6781 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6782 "dp (shift reg)", dsc);
6786 /* Decode extension register load/store. Exactly the same as
6787 arm_decode_ext_reg_ld_st. */
6790 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6791 uint16_t insn2, struct regcache *regs,
6792 struct displaced_step_closure *dsc)
6794 unsigned int opcode = bits (insn1, 4, 8);
6798 case 0x04: case 0x05:
6799 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6800 "vfp/neon vmov", dsc);
6802 case 0x08: case 0x0c: /* 01x00 */
6803 case 0x0a: case 0x0e: /* 01x10 */
6804 case 0x12: case 0x16: /* 10x10 */
6805 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6806 "vfp/neon vstm/vpush", dsc);
6808 case 0x09: case 0x0d: /* 01x01 */
6809 case 0x0b: case 0x0f: /* 01x11 */
6810 case 0x13: case 0x17: /* 10x11 */
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 "vfp/neon vldm/vpop", dsc);
6814 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6815 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6817 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6818 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6821 /* Should be unreachable. */
6826 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6827 struct regcache *regs, struct displaced_step_closure *dsc)
6829 unsigned int op1 = bits (insn, 20, 25);
6830 int op = bit (insn, 4);
6831 unsigned int coproc = bits (insn, 8, 11);
6833 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6834 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6835 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6836 && (coproc & 0xe) != 0xa)
6838 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6839 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6840 && (coproc & 0xe) != 0xa)
6841 /* ldc/ldc2 imm/lit. */
6842 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6843 else if ((op1 & 0x3e) == 0x00)
6844 return arm_copy_undef (gdbarch, insn, dsc);
6845 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6846 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6847 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6848 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6849 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6850 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6851 else if ((op1 & 0x30) == 0x20 && !op)
6853 if ((coproc & 0xe) == 0xa)
6854 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6856 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6858 else if ((op1 & 0x30) == 0x20 && op)
6859 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6860 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6861 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6862 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6863 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6864 else if ((op1 & 0x30) == 0x30)
6865 return arm_copy_svc (gdbarch, insn, regs, dsc);
6867 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6871 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6872 uint16_t insn2, struct regcache *regs,
6873 struct displaced_step_closure *dsc)
6875 unsigned int coproc = bits (insn2, 8, 11);
6876 unsigned int bit_5_8 = bits (insn1, 5, 8);
6877 unsigned int bit_9 = bit (insn1, 9);
6878 unsigned int bit_4 = bit (insn1, 4);
6883 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6884 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6886 else if (bit_5_8 == 0) /* UNDEFINED. */
6887 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6890 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6891 if ((coproc & 0xe) == 0xa)
6892 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6894 else /* coproc is not 101x. */
6896 if (bit_4 == 0) /* STC/STC2. */
6897 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6899 else /* LDC/LDC2 {literal, immeidate}. */
6900 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6906 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6912 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6913 struct displaced_step_closure *dsc, int rd)
6919 Preparation: Rd <- PC
6925 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6926 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6930 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6931 struct displaced_step_closure *dsc,
6932 int rd, unsigned int imm)
6935 /* Encoding T2: ADDS Rd, #imm */
6936 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6938 install_pc_relative (gdbarch, regs, dsc, rd);
6944 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6945 struct regcache *regs,
6946 struct displaced_step_closure *dsc)
6948 unsigned int rd = bits (insn, 8, 10);
6949 unsigned int imm8 = bits (insn, 0, 7);
6951 if (debug_displaced)
6952 fprintf_unfiltered (gdb_stdlog,
6953 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6956 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6960 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6961 uint16_t insn2, struct regcache *regs,
6962 struct displaced_step_closure *dsc)
6964 unsigned int rd = bits (insn2, 8, 11);
6965 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6966 extract raw immediate encoding rather than computing immediate. When
6967 generating ADD or SUB instruction, we can simply perform OR operation to
6968 set immediate into ADD. */
6969 unsigned int imm_3_8 = insn2 & 0x70ff;
6970 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6972 if (debug_displaced)
6973 fprintf_unfiltered (gdb_stdlog,
6974 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6975 rd, imm_i, imm_3_8, insn1, insn2);
6977 if (bit (insn1, 7)) /* Encoding T2 */
6979 /* Encoding T3: SUB Rd, Rd, #imm */
6980 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6981 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6983 else /* Encoding T3 */
6985 /* Encoding T3: ADD Rd, Rd, #imm */
6986 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6987 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6991 install_pc_relative (gdbarch, regs, dsc, rd);
6997 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6998 struct regcache *regs,
6999 struct displaced_step_closure *dsc)
7001 unsigned int rt = bits (insn1, 8, 10);
7003 int imm8 = (bits (insn1, 0, 7) << 2);
7009 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7011 Insn: LDR R0, [R2, R3];
7012 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7014 if (debug_displaced)
7015 fprintf_unfiltered (gdb_stdlog,
7016 "displaced: copying thumb ldr r%d [pc #%d]\n"
7019 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7020 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7021 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7022 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7023 /* The assembler calculates the required value of the offset from the
7024 Align(PC,4) value of this instruction to the label. */
7025 pc = pc & 0xfffffffc;
7027 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7028 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7031 dsc->u.ldst.xfersize = 4;
7033 dsc->u.ldst.immed = 0;
7034 dsc->u.ldst.writeback = 0;
7035 dsc->u.ldst.restore_r4 = 0;
7037 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7039 dsc->cleanup = &cleanup_load;
7044 /* Copy Thumb cbnz/cbz insruction. */
7047 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7048 struct regcache *regs,
7049 struct displaced_step_closure *dsc)
7051 int non_zero = bit (insn1, 11);
7052 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7053 CORE_ADDR from = dsc->insn_addr;
7054 int rn = bits (insn1, 0, 2);
7055 int rn_val = displaced_read_reg (regs, dsc, rn);
7057 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7058 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7059 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7060 condition is false, let it be, cleanup_branch will do nothing. */
7061 if (dsc->u.branch.cond)
7063 dsc->u.branch.cond = INST_AL;
7064 dsc->u.branch.dest = from + 4 + imm5;
7067 dsc->u.branch.dest = from + 2;
7069 dsc->u.branch.link = 0;
7070 dsc->u.branch.exchange = 0;
7072 if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7074 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7075 rn, rn_val, insn1, dsc->u.branch.dest);
7077 dsc->modinsn[0] = THUMB_NOP;
7079 dsc->cleanup = &cleanup_branch;
7083 /* Copy Table Branch Byte/Halfword */
7085 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7086 uint16_t insn2, struct regcache *regs,
7087 struct displaced_step_closure *dsc)
7089 ULONGEST rn_val, rm_val;
7090 int is_tbh = bit (insn2, 4);
7091 CORE_ADDR halfwords = 0;
7092 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7094 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7095 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7101 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7102 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7108 target_read_memory (rn_val + rm_val, buf, 1);
7109 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7112 if (debug_displaced)
7113 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7114 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7115 (unsigned int) rn_val, (unsigned int) rm_val,
7116 (unsigned int) halfwords);
7118 dsc->u.branch.cond = INST_AL;
7119 dsc->u.branch.link = 0;
7120 dsc->u.branch.exchange = 0;
7121 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7123 dsc->cleanup = &cleanup_branch;
7129 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7130 struct displaced_step_closure *dsc)
7133 int val = displaced_read_reg (regs, dsc, 7);
7134 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7137 val = displaced_read_reg (regs, dsc, 8);
7138 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7141 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7146 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7147 struct regcache *regs,
7148 struct displaced_step_closure *dsc)
7150 dsc->u.block.regmask = insn1 & 0x00ff;
7152 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7155 (1) register list is full, that is, r0-r7 are used.
7156 Prepare: tmp[0] <- r8
7158 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7159 MOV r8, r7; Move value of r7 to r8;
7160 POP {r7}; Store PC value into r7.
7162 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7164 (2) register list is not full, supposing there are N registers in
7165 register list (except PC, 0 <= N <= 7).
7166 Prepare: for each i, 0 - N, tmp[i] <- ri.
7168 POP {r0, r1, ...., rN};
7170 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7171 from tmp[] properly.
7173 if (debug_displaced)
7174 fprintf_unfiltered (gdb_stdlog,
7175 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7176 dsc->u.block.regmask, insn1);
7178 if (dsc->u.block.regmask == 0xff)
7180 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7182 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7183 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7184 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7187 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7191 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7193 unsigned int new_regmask;
7195 for (i = 0; i < num_in_list + 1; i++)
7196 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7198 new_regmask = (1 << (num_in_list + 1)) - 1;
7200 if (debug_displaced)
7201 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7202 "{..., pc}: original reg list %.4x,"
7203 " modified list %.4x\n"),
7204 (int) dsc->u.block.regmask, new_regmask);
7206 dsc->u.block.regmask |= 0x8000;
7207 dsc->u.block.writeback = 0;
7208 dsc->u.block.cond = INST_AL;
7210 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7212 dsc->cleanup = &cleanup_block_load_pc;
7219 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7220 struct regcache *regs,
7221 struct displaced_step_closure *dsc)
7223 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7224 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7227 /* 16-bit thumb instructions. */
7228 switch (op_bit_12_15)
7230 /* Shift (imme), add, subtract, move and compare. */
7231 case 0: case 1: case 2: case 3:
7232 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7233 "shift/add/sub/mov/cmp",
7237 switch (op_bit_10_11)
7239 case 0: /* Data-processing */
7240 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7244 case 1: /* Special data instructions and branch and exchange. */
7246 unsigned short op = bits (insn1, 7, 9);
7247 if (op == 6 || op == 7) /* BX or BLX */
7248 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7249 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7250 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7252 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7256 default: /* LDR (literal) */
7257 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7260 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7264 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7265 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7266 else /* Generate SP-relative address */
7267 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7269 case 11: /* Misc 16-bit instructions */
7271 switch (bits (insn1, 8, 11))
7273 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7274 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7276 case 12: case 13: /* POP */
7277 if (bit (insn1, 8)) /* PC is in register list. */
7278 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7280 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7282 case 15: /* If-Then, and hints */
7283 if (bits (insn1, 0, 3))
7284 /* If-Then makes up to four following instructions conditional.
7285 IT instruction itself is not conditional, so handle it as a
7286 common unmodified instruction. */
7287 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7293 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7298 if (op_bit_10_11 < 2) /* Store multiple registers */
7299 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7300 else /* Load multiple registers */
7301 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7303 case 13: /* Conditional branch and supervisor call */
7304 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7305 err = thumb_copy_b (gdbarch, insn1, dsc);
7307 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7309 case 14: /* Unconditional branch */
7310 err = thumb_copy_b (gdbarch, insn1, dsc);
7317 internal_error (__FILE__, __LINE__,
7318 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7322 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7323 uint16_t insn1, uint16_t insn2,
7324 struct regcache *regs,
7325 struct displaced_step_closure *dsc)
7327 int rt = bits (insn2, 12, 15);
7328 int rn = bits (insn1, 0, 3);
7329 int op1 = bits (insn1, 7, 8);
7331 switch (bits (insn1, 5, 6))
7333 case 0: /* Load byte and memory hints */
7334 if (rt == 0xf) /* PLD/PLI */
7337 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7338 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7340 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7345 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7346 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7349 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7350 "ldrb{reg, immediate}/ldrbt",
7355 case 1: /* Load halfword and memory hints. */
7356 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7357 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7358 "pld/unalloc memhint", dsc);
7362 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7365 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7369 case 2: /* Load word */
7371 int insn2_bit_8_11 = bits (insn2, 8, 11);
7374 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7375 else if (op1 == 0x1) /* Encoding T3 */
7376 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7378 else /* op1 == 0x0 */
7380 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7381 /* LDR (immediate) */
7382 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7383 dsc, bit (insn2, 8), 1);
7384 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7385 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7388 /* LDR (register) */
7389 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7395 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7402 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7403 uint16_t insn2, struct regcache *regs,
7404 struct displaced_step_closure *dsc)
7407 unsigned short op = bit (insn2, 15);
7408 unsigned int op1 = bits (insn1, 11, 12);
7414 switch (bits (insn1, 9, 10))
7419 /* Load/store {dual, execlusive}, table branch. */
7420 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7421 && bits (insn2, 5, 7) == 0)
7422 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7425 /* PC is not allowed to use in load/store {dual, exclusive}
7427 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7428 "load/store dual/ex", dsc);
7430 else /* load/store multiple */
7432 switch (bits (insn1, 7, 8))
7434 case 0: case 3: /* SRS, RFE */
7435 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7438 case 1: case 2: /* LDM/STM/PUSH/POP */
7439 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7446 /* Data-processing (shift register). */
7447 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7450 default: /* Coprocessor instructions. */
7451 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7456 case 2: /* op1 = 2 */
7457 if (op) /* Branch and misc control. */
7459 if (bit (insn2, 14) /* BLX/BL */
7460 || bit (insn2, 12) /* Unconditional branch */
7461 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7462 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7464 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7469 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7471 int op = bits (insn1, 4, 8);
7472 int rn = bits (insn1, 0, 3);
7473 if ((op == 0 || op == 0xa) && rn == 0xf)
7474 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7477 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7480 else /* Data processing (modified immeidate) */
7481 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7485 case 3: /* op1 = 3 */
7486 switch (bits (insn1, 9, 10))
7490 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7492 else /* NEON Load/Store and Store single data item */
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "neon elt/struct load/store",
7497 case 1: /* op1 = 3, bits (9, 10) == 1 */
7498 switch (bits (insn1, 7, 8))
7500 case 0: case 1: /* Data processing (register) */
7501 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7504 case 2: /* Multiply and absolute difference */
7505 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7506 "mul/mua/diff", dsc);
7508 case 3: /* Long multiply and divide */
7509 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7514 default: /* Coprocessor instructions */
7515 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7524 internal_error (__FILE__, __LINE__,
7525 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7530 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7531 struct regcache *regs,
7532 struct displaced_step_closure *dsc)
7534 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7536 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7538 if (debug_displaced)
7539 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7540 "at %.8lx\n", insn1, (unsigned long) from);
7543 dsc->insn_size = thumb_insn_size (insn1);
7544 if (thumb_insn_size (insn1) == 4)
7547 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7548 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7551 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7555 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7556 CORE_ADDR to, struct regcache *regs,
7557 struct displaced_step_closure *dsc)
7560 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7563 /* Most displaced instructions use a 1-instruction scratch space, so set this
7564 here and override below if/when necessary. */
7566 dsc->insn_addr = from;
7567 dsc->scratch_base = to;
7568 dsc->cleanup = NULL;
7569 dsc->wrote_to_pc = 0;
7571 if (!displaced_in_arm_mode (regs))
7572 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7576 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7577 if (debug_displaced)
7578 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7579 "at %.8lx\n", (unsigned long) insn,
7580 (unsigned long) from);
7582 if ((insn & 0xf0000000) == 0xf0000000)
7583 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7584 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7586 case 0x0: case 0x1: case 0x2: case 0x3:
7587 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7590 case 0x4: case 0x5: case 0x6:
7591 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7595 err = arm_decode_media (gdbarch, insn, dsc);
7598 case 0x8: case 0x9: case 0xa: case 0xb:
7599 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7602 case 0xc: case 0xd: case 0xe: case 0xf:
7603 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7608 internal_error (__FILE__, __LINE__,
7609 _("arm_process_displaced_insn: Instruction decode error"));
7612 /* Actually set up the scratch space for a displaced instruction. */
7615 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7616 CORE_ADDR to, struct displaced_step_closure *dsc)
7618 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7619 unsigned int i, len, offset;
7620 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7621 int size = dsc->is_thumb? 2 : 4;
7622 const gdb_byte *bkp_insn;
7625 /* Poke modified instruction(s). */
7626 for (i = 0; i < dsc->numinsns; i++)
7628 if (debug_displaced)
7630 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7632 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7635 fprintf_unfiltered (gdb_stdlog, "%.4x",
7636 (unsigned short)dsc->modinsn[i]);
7638 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7639 (unsigned long) to + offset);
7642 write_memory_unsigned_integer (to + offset, size,
7643 byte_order_for_code,
7648 /* Choose the correct breakpoint instruction. */
7651 bkp_insn = tdep->thumb_breakpoint;
7652 len = tdep->thumb_breakpoint_size;
7656 bkp_insn = tdep->arm_breakpoint;
7657 len = tdep->arm_breakpoint_size;
7660 /* Put breakpoint afterwards. */
7661 write_memory (to + offset, bkp_insn, len);
7663 if (debug_displaced)
7664 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7665 paddress (gdbarch, from), paddress (gdbarch, to));
7668 /* Entry point for cleaning things up after a displaced instruction has been
7672 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7673 struct displaced_step_closure *dsc,
7674 CORE_ADDR from, CORE_ADDR to,
7675 struct regcache *regs)
7678 dsc->cleanup (gdbarch, regs, dsc);
7680 if (!dsc->wrote_to_pc)
7681 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7682 dsc->insn_addr + dsc->insn_size);
7686 #include "bfd-in2.h"
7687 #include "libcoff.h"
7690 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7692 gdb_disassembler *di
7693 = static_cast<gdb_disassembler *>(info->application_data);
7694 struct gdbarch *gdbarch = di->arch ();
7696 if (arm_pc_is_thumb (gdbarch, memaddr))
7698 static asymbol *asym;
7699 static combined_entry_type ce;
7700 static struct coff_symbol_struct csym;
7701 static struct bfd fake_bfd;
7702 static bfd_target fake_target;
7704 if (csym.native == NULL)
7706 /* Create a fake symbol vector containing a Thumb symbol.
7707 This is solely so that the code in print_insn_little_arm()
7708 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7709 the presence of a Thumb symbol and switch to decoding
7710 Thumb instructions. */
7712 fake_target.flavour = bfd_target_coff_flavour;
7713 fake_bfd.xvec = &fake_target;
7714 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7716 csym.symbol.the_bfd = &fake_bfd;
7717 csym.symbol.name = "fake";
7718 asym = (asymbol *) & csym;
7721 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7722 info->symbols = &asym;
7725 info->symbols = NULL;
7727 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7728 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7729 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7730 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7731 in default_print_insn. */
7732 if (exec_bfd != NULL)
7733 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7735 return default_print_insn (memaddr, info);
7738 /* The following define instruction sequences that will cause ARM
7739 cpu's to take an undefined instruction trap. These are used to
7740 signal a breakpoint to GDB.
7742 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7743 modes. A different instruction is required for each mode. The ARM
7744 cpu's can also be big or little endian. Thus four different
7745 instructions are needed to support all cases.
7747 Note: ARMv4 defines several new instructions that will take the
7748 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7749 not in fact add the new instructions. The new undefined
7750 instructions in ARMv4 are all instructions that had no defined
7751 behaviour in earlier chips. There is no guarantee that they will
7752 raise an exception, but may be treated as NOP's. In practice, it
7753 may only safe to rely on instructions matching:
7755 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7756 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7757 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7759 Even this may only true if the condition predicate is true. The
7760 following use a condition predicate of ALWAYS so it is always TRUE.
7762 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7763 and NetBSD all use a software interrupt rather than an undefined
7764 instruction to force a trap. This can be handled by by the
7765 abi-specific code during establishment of the gdbarch vector. */
7767 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7768 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7769 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7770 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7772 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7773 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7774 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7775 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7777 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7780 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7782 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7783 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7785 if (arm_pc_is_thumb (gdbarch, *pcptr))
7787 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7789 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7790 check whether we are replacing a 32-bit instruction. */
7791 if (tdep->thumb2_breakpoint != NULL)
7795 if (target_read_memory (*pcptr, buf, 2) == 0)
7797 unsigned short inst1;
7799 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7800 if (thumb_insn_size (inst1) == 4)
7801 return ARM_BP_KIND_THUMB2;
7805 return ARM_BP_KIND_THUMB;
7808 return ARM_BP_KIND_ARM;
7812 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7814 static const gdb_byte *
7815 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7817 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7821 case ARM_BP_KIND_ARM:
7822 *size = tdep->arm_breakpoint_size;
7823 return tdep->arm_breakpoint;
7824 case ARM_BP_KIND_THUMB:
7825 *size = tdep->thumb_breakpoint_size;
7826 return tdep->thumb_breakpoint;
7827 case ARM_BP_KIND_THUMB2:
7828 *size = tdep->thumb2_breakpoint_size;
7829 return tdep->thumb2_breakpoint;
7831 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7835 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7838 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7839 struct regcache *regcache,
7844 /* Check the memory pointed by PC is readable. */
7845 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7847 struct arm_get_next_pcs next_pcs_ctx;
7849 arm_get_next_pcs_ctor (&next_pcs_ctx,
7850 &arm_get_next_pcs_ops,
7851 gdbarch_byte_order (gdbarch),
7852 gdbarch_byte_order_for_code (gdbarch),
7856 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7858 /* If MEMADDR is the next instruction of current pc, do the
7859 software single step computation, and get the thumb mode by
7860 the destination address. */
7861 for (CORE_ADDR pc : next_pcs)
7863 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7865 if (IS_THUMB_ADDR (pc))
7867 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7868 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7871 return ARM_BP_KIND_ARM;
7876 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7879 /* Extract from an array REGBUF containing the (raw) register state a
7880 function return value of type TYPE, and copy that, in virtual
7881 format, into VALBUF. */
7884 arm_extract_return_value (struct type *type, struct regcache *regs,
7887 struct gdbarch *gdbarch = get_regcache_arch (regs);
7888 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7890 if (TYPE_CODE_FLT == TYPE_CODE (type))
7892 switch (gdbarch_tdep (gdbarch)->fp_model)
7896 /* The value is in register F0 in internal format. We need to
7897 extract the raw value and then convert it to the desired
7899 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7901 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7902 convert_typed_floating (tmpbuf, arm_ext_type (gdbarch),
7907 case ARM_FLOAT_SOFT_FPA:
7908 case ARM_FLOAT_SOFT_VFP:
7909 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7910 not using the VFP ABI code. */
7912 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7913 if (TYPE_LENGTH (type) > 4)
7914 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7915 valbuf + INT_REGISTER_SIZE);
7919 internal_error (__FILE__, __LINE__,
7920 _("arm_extract_return_value: "
7921 "Floating point model not supported"));
7925 else if (TYPE_CODE (type) == TYPE_CODE_INT
7926 || TYPE_CODE (type) == TYPE_CODE_CHAR
7927 || TYPE_CODE (type) == TYPE_CODE_BOOL
7928 || TYPE_CODE (type) == TYPE_CODE_PTR
7929 || TYPE_IS_REFERENCE (type)
7930 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7932 /* If the type is a plain integer, then the access is
7933 straight-forward. Otherwise we have to play around a bit
7935 int len = TYPE_LENGTH (type);
7936 int regno = ARM_A1_REGNUM;
7941 /* By using store_unsigned_integer we avoid having to do
7942 anything special for small big-endian values. */
7943 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7944 store_unsigned_integer (valbuf,
7945 (len > INT_REGISTER_SIZE
7946 ? INT_REGISTER_SIZE : len),
7948 len -= INT_REGISTER_SIZE;
7949 valbuf += INT_REGISTER_SIZE;
7954 /* For a structure or union the behaviour is as if the value had
7955 been stored to word-aligned memory and then loaded into
7956 registers with 32-bit load instruction(s). */
7957 int len = TYPE_LENGTH (type);
7958 int regno = ARM_A1_REGNUM;
7959 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7963 regcache_cooked_read (regs, regno++, tmpbuf);
7964 memcpy (valbuf, tmpbuf,
7965 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7966 len -= INT_REGISTER_SIZE;
7967 valbuf += INT_REGISTER_SIZE;
7973 /* Will a function return an aggregate type in memory or in a
7974 register? Return 0 if an aggregate type can be returned in a
7975 register, 1 if it must be returned in memory. */
7978 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7980 enum type_code code;
7982 type = check_typedef (type);
7984 /* Simple, non-aggregate types (ie not including vectors and
7985 complex) are always returned in a register (or registers). */
7986 code = TYPE_CODE (type);
7987 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7988 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7991 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7993 /* Vector values should be returned using ARM registers if they
7994 are not over 16 bytes. */
7995 return (TYPE_LENGTH (type) > 16);
7998 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8000 /* The AAPCS says all aggregates not larger than a word are returned
8002 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8011 /* All aggregate types that won't fit in a register must be returned
8013 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8016 /* In the ARM ABI, "integer" like aggregate types are returned in
8017 registers. For an aggregate type to be integer like, its size
8018 must be less than or equal to INT_REGISTER_SIZE and the
8019 offset of each addressable subfield must be zero. Note that bit
8020 fields are not addressable, and all addressable subfields of
8021 unions always start at offset zero.
8023 This function is based on the behaviour of GCC 2.95.1.
8024 See: gcc/arm.c: arm_return_in_memory() for details.
8026 Note: All versions of GCC before GCC 2.95.2 do not set up the
8027 parameters correctly for a function returning the following
8028 structure: struct { float f;}; This should be returned in memory,
8029 not a register. Richard Earnshaw sent me a patch, but I do not
8030 know of any way to detect if a function like the above has been
8031 compiled with the correct calling convention. */
8033 /* Assume all other aggregate types can be returned in a register.
8034 Run a check for structures, unions and arrays. */
8037 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8040 /* Need to check if this struct/union is "integer" like. For
8041 this to be true, its size must be less than or equal to
8042 INT_REGISTER_SIZE and the offset of each addressable
8043 subfield must be zero. Note that bit fields are not
8044 addressable, and unions always start at offset zero. If any
8045 of the subfields is a floating point type, the struct/union
8046 cannot be an integer type. */
8048 /* For each field in the object, check:
8049 1) Is it FP? --> yes, nRc = 1;
8050 2) Is it addressable (bitpos != 0) and
8051 not packed (bitsize == 0)?
8055 for (i = 0; i < TYPE_NFIELDS (type); i++)
8057 enum type_code field_type_code;
8060 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8063 /* Is it a floating point type field? */
8064 if (field_type_code == TYPE_CODE_FLT)
8070 /* If bitpos != 0, then we have to care about it. */
8071 if (TYPE_FIELD_BITPOS (type, i) != 0)
8073 /* Bitfields are not addressable. If the field bitsize is
8074 zero, then the field is not packed. Hence it cannot be
8075 a bitfield or any other packed type. */
8076 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8089 /* Write into appropriate registers a function return value of type
8090 TYPE, given in virtual format. */
8093 arm_store_return_value (struct type *type, struct regcache *regs,
8094 const gdb_byte *valbuf)
8096 struct gdbarch *gdbarch = get_regcache_arch (regs);
8097 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8099 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8101 gdb_byte buf[FP_REGISTER_SIZE];
8103 switch (gdbarch_tdep (gdbarch)->fp_model)
8107 convert_typed_floating (valbuf, type, buf, arm_ext_type (gdbarch));
8108 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8111 case ARM_FLOAT_SOFT_FPA:
8112 case ARM_FLOAT_SOFT_VFP:
8113 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8114 not using the VFP ABI code. */
8116 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8117 if (TYPE_LENGTH (type) > 4)
8118 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8119 valbuf + INT_REGISTER_SIZE);
8123 internal_error (__FILE__, __LINE__,
8124 _("arm_store_return_value: Floating "
8125 "point model not supported"));
8129 else if (TYPE_CODE (type) == TYPE_CODE_INT
8130 || TYPE_CODE (type) == TYPE_CODE_CHAR
8131 || TYPE_CODE (type) == TYPE_CODE_BOOL
8132 || TYPE_CODE (type) == TYPE_CODE_PTR
8133 || TYPE_IS_REFERENCE (type)
8134 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8136 if (TYPE_LENGTH (type) <= 4)
8138 /* Values of one word or less are zero/sign-extended and
8140 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8141 LONGEST val = unpack_long (type, valbuf);
8143 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8144 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8148 /* Integral values greater than one word are stored in consecutive
8149 registers starting with r0. This will always be a multiple of
8150 the regiser size. */
8151 int len = TYPE_LENGTH (type);
8152 int regno = ARM_A1_REGNUM;
8156 regcache_cooked_write (regs, regno++, valbuf);
8157 len -= INT_REGISTER_SIZE;
8158 valbuf += INT_REGISTER_SIZE;
8164 /* For a structure or union the behaviour is as if the value had
8165 been stored to word-aligned memory and then loaded into
8166 registers with 32-bit load instruction(s). */
8167 int len = TYPE_LENGTH (type);
8168 int regno = ARM_A1_REGNUM;
8169 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8173 memcpy (tmpbuf, valbuf,
8174 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8175 regcache_cooked_write (regs, regno++, tmpbuf);
8176 len -= INT_REGISTER_SIZE;
8177 valbuf += INT_REGISTER_SIZE;
8183 /* Handle function return values. */
8185 static enum return_value_convention
8186 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8187 struct type *valtype, struct regcache *regcache,
8188 gdb_byte *readbuf, const gdb_byte *writebuf)
8190 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8191 struct type *func_type = function ? value_type (function) : NULL;
8192 enum arm_vfp_cprc_base_type vfp_base_type;
8195 if (arm_vfp_abi_for_function (gdbarch, func_type)
8196 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8198 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8199 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8201 for (i = 0; i < vfp_base_count; i++)
8203 if (reg_char == 'q')
8206 arm_neon_quad_write (gdbarch, regcache, i,
8207 writebuf + i * unit_length);
8210 arm_neon_quad_read (gdbarch, regcache, i,
8211 readbuf + i * unit_length);
8218 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8219 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8222 regcache_cooked_write (regcache, regnum,
8223 writebuf + i * unit_length);
8225 regcache_cooked_read (regcache, regnum,
8226 readbuf + i * unit_length);
8229 return RETURN_VALUE_REGISTER_CONVENTION;
8232 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8233 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8234 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8236 if (tdep->struct_return == pcc_struct_return
8237 || arm_return_in_memory (gdbarch, valtype))
8238 return RETURN_VALUE_STRUCT_CONVENTION;
8240 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8242 if (arm_return_in_memory (gdbarch, valtype))
8243 return RETURN_VALUE_STRUCT_CONVENTION;
8247 arm_store_return_value (valtype, regcache, writebuf);
8250 arm_extract_return_value (valtype, regcache, readbuf);
8252 return RETURN_VALUE_REGISTER_CONVENTION;
8257 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8259 struct gdbarch *gdbarch = get_frame_arch (frame);
8260 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8261 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8263 gdb_byte buf[INT_REGISTER_SIZE];
8265 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8267 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8271 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8275 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8276 return the target PC. Otherwise return 0. */
8279 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8283 CORE_ADDR start_addr;
8285 /* Find the starting address and name of the function containing the PC. */
8286 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8288 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8290 start_addr = arm_skip_bx_reg (frame, pc);
8291 if (start_addr != 0)
8297 /* If PC is in a Thumb call or return stub, return the address of the
8298 target PC, which is in a register. The thunk functions are called
8299 _call_via_xx, where x is the register name. The possible names
8300 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8301 functions, named __ARM_call_via_r[0-7]. */
8302 if (startswith (name, "_call_via_")
8303 || startswith (name, "__ARM_call_via_"))
8305 /* Use the name suffix to determine which register contains the
8307 static const char *table[15] =
8308 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8309 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8312 int offset = strlen (name) - 2;
8314 for (regno = 0; regno <= 14; regno++)
8315 if (strcmp (&name[offset], table[regno]) == 0)
8316 return get_frame_register_unsigned (frame, regno);
8319 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8320 non-interworking calls to foo. We could decode the stubs
8321 to find the target but it's easier to use the symbol table. */
8322 namelen = strlen (name);
8323 if (name[0] == '_' && name[1] == '_'
8324 && ((namelen > 2 + strlen ("_from_thumb")
8325 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8326 || (namelen > 2 + strlen ("_from_arm")
8327 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8330 int target_len = namelen - 2;
8331 struct bound_minimal_symbol minsym;
8332 struct objfile *objfile;
8333 struct obj_section *sec;
8335 if (name[namelen - 1] == 'b')
8336 target_len -= strlen ("_from_thumb");
8338 target_len -= strlen ("_from_arm");
8340 target_name = (char *) alloca (target_len + 1);
8341 memcpy (target_name, name + 2, target_len);
8342 target_name[target_len] = '\0';
8344 sec = find_pc_section (pc);
8345 objfile = (sec == NULL) ? NULL : sec->objfile;
8346 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8347 if (minsym.minsym != NULL)
8348 return BMSYMBOL_VALUE_ADDRESS (minsym);
8353 return 0; /* not a stub */
8357 set_arm_command (const char *args, int from_tty)
8359 printf_unfiltered (_("\
8360 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8361 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8365 show_arm_command (const char *args, int from_tty)
8367 cmd_show_list (showarmcmdlist, from_tty, "");
8371 arm_update_current_architecture (void)
8373 struct gdbarch_info info;
8375 /* If the current architecture is not ARM, we have nothing to do. */
8376 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8379 /* Update the architecture. */
8380 gdbarch_info_init (&info);
8382 if (!gdbarch_update_p (info))
8383 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8387 set_fp_model_sfunc (char *args, int from_tty,
8388 struct cmd_list_element *c)
8392 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8393 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8395 arm_fp_model = (enum arm_float_model) fp_model;
8399 if (fp_model == ARM_FLOAT_LAST)
8400 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8403 arm_update_current_architecture ();
8407 show_fp_model (struct ui_file *file, int from_tty,
8408 struct cmd_list_element *c, const char *value)
8410 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8412 if (arm_fp_model == ARM_FLOAT_AUTO
8413 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8414 fprintf_filtered (file, _("\
8415 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8416 fp_model_strings[tdep->fp_model]);
8418 fprintf_filtered (file, _("\
8419 The current ARM floating point model is \"%s\".\n"),
8420 fp_model_strings[arm_fp_model]);
8424 arm_set_abi (char *args, int from_tty,
8425 struct cmd_list_element *c)
8429 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8430 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8432 arm_abi_global = (enum arm_abi_kind) arm_abi;
8436 if (arm_abi == ARM_ABI_LAST)
8437 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8440 arm_update_current_architecture ();
8444 arm_show_abi (struct ui_file *file, int from_tty,
8445 struct cmd_list_element *c, const char *value)
8447 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8449 if (arm_abi_global == ARM_ABI_AUTO
8450 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8451 fprintf_filtered (file, _("\
8452 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8453 arm_abi_strings[tdep->arm_abi]);
8455 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8460 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8461 struct cmd_list_element *c, const char *value)
8463 fprintf_filtered (file,
8464 _("The current execution mode assumed "
8465 "(when symbols are unavailable) is \"%s\".\n"),
8466 arm_fallback_mode_string);
8470 arm_show_force_mode (struct ui_file *file, int from_tty,
8471 struct cmd_list_element *c, const char *value)
8473 fprintf_filtered (file,
8474 _("The current execution mode assumed "
8475 "(even when symbols are available) is \"%s\".\n"),
8476 arm_force_mode_string);
8479 /* If the user changes the register disassembly style used for info
8480 register and other commands, we have to also switch the style used
8481 in opcodes for disassembly output. This function is run in the "set
8482 arm disassembly" command, and does that. */
8485 set_disassembly_style_sfunc (char *args, int from_tty,
8486 struct cmd_list_element *c)
8488 /* Convert the short style name into the long style name (eg, reg-names-*)
8489 before calling the generic set_disassembler_options() function. */
8490 std::string long_name = std::string ("reg-names-") + disassembly_style;
8491 set_disassembler_options (&long_name[0]);
8495 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8496 struct cmd_list_element *c, const char *value)
8498 struct gdbarch *gdbarch = get_current_arch ();
8499 char *options = get_disassembler_options (gdbarch);
8500 const char *style = "";
8504 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8505 if (CONST_STRNEQ (opt, "reg-names-"))
8507 style = &opt[strlen ("reg-names-")];
8508 len = strcspn (style, ",");
8511 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8514 /* Return the ARM register name corresponding to register I. */
8516 arm_register_name (struct gdbarch *gdbarch, int i)
8518 const int num_regs = gdbarch_num_regs (gdbarch);
8520 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8521 && i >= num_regs && i < num_regs + 32)
8523 static const char *const vfp_pseudo_names[] = {
8524 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8525 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8526 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8527 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8530 return vfp_pseudo_names[i - num_regs];
8533 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8534 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8536 static const char *const neon_pseudo_names[] = {
8537 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8538 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8541 return neon_pseudo_names[i - num_regs - 32];
8544 if (i >= ARRAY_SIZE (arm_register_names))
8545 /* These registers are only supported on targets which supply
8546 an XML description. */
8549 return arm_register_names[i];
8552 /* Test whether the coff symbol specific value corresponds to a Thumb
8556 coff_sym_is_thumb (int val)
8558 return (val == C_THUMBEXT
8559 || val == C_THUMBSTAT
8560 || val == C_THUMBEXTFUNC
8561 || val == C_THUMBSTATFUNC
8562 || val == C_THUMBLABEL);
8565 /* arm_coff_make_msymbol_special()
8566 arm_elf_make_msymbol_special()
8568 These functions test whether the COFF or ELF symbol corresponds to
8569 an address in thumb code, and set a "special" bit in a minimal
8570 symbol to indicate that it does. */
8573 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8575 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8577 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8578 == ST_BRANCH_TO_THUMB)
8579 MSYMBOL_SET_SPECIAL (msym);
8583 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8585 if (coff_sym_is_thumb (val))
8586 MSYMBOL_SET_SPECIAL (msym);
8590 arm_objfile_data_free (struct objfile *objfile, void *arg)
8592 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8595 for (i = 0; i < objfile->obfd->section_count; i++)
8596 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8600 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8603 const char *name = bfd_asymbol_name (sym);
8604 struct arm_per_objfile *data;
8605 VEC(arm_mapping_symbol_s) **map_p;
8606 struct arm_mapping_symbol new_map_sym;
8608 gdb_assert (name[0] == '$');
8609 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8612 data = (struct arm_per_objfile *) objfile_data (objfile,
8613 arm_objfile_data_key);
8616 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8617 struct arm_per_objfile);
8618 set_objfile_data (objfile, arm_objfile_data_key, data);
8619 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8620 objfile->obfd->section_count,
8621 VEC(arm_mapping_symbol_s) *);
8623 map_p = &data->section_maps[bfd_get_section (sym)->index];
8625 new_map_sym.value = sym->value;
8626 new_map_sym.type = name[1];
8628 /* Assume that most mapping symbols appear in order of increasing
8629 value. If they were randomly distributed, it would be faster to
8630 always push here and then sort at first use. */
8631 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8633 struct arm_mapping_symbol *prev_map_sym;
8635 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8636 if (prev_map_sym->value >= sym->value)
8639 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8640 arm_compare_mapping_symbols);
8641 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8646 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8650 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8652 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8653 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8655 /* If necessary, set the T bit. */
8658 ULONGEST val, t_bit;
8659 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8660 t_bit = arm_psr_thumb_bit (gdbarch);
8661 if (arm_pc_is_thumb (gdbarch, pc))
8662 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8665 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8670 /* Read the contents of a NEON quad register, by reading from two
8671 double registers. This is used to implement the quad pseudo
8672 registers, and for argument passing in case the quad registers are
8673 missing; vectors are passed in quad registers when using the VFP
8674 ABI, even if a NEON unit is not present. REGNUM is the index of
8675 the quad register, in [0, 15]. */
8677 static enum register_status
8678 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8679 int regnum, gdb_byte *buf)
8682 gdb_byte reg_buf[8];
8683 int offset, double_regnum;
8684 enum register_status status;
8686 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8687 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8690 /* d0 is always the least significant half of q0. */
8691 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8696 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8697 if (status != REG_VALID)
8699 memcpy (buf + offset, reg_buf, 8);
8701 offset = 8 - offset;
8702 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8703 if (status != REG_VALID)
8705 memcpy (buf + offset, reg_buf, 8);
8710 static enum register_status
8711 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8712 int regnum, gdb_byte *buf)
8714 const int num_regs = gdbarch_num_regs (gdbarch);
8716 gdb_byte reg_buf[8];
8717 int offset, double_regnum;
8719 gdb_assert (regnum >= num_regs);
8722 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8723 /* Quad-precision register. */
8724 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8727 enum register_status status;
8729 /* Single-precision register. */
8730 gdb_assert (regnum < 32);
8732 /* s0 is always the least significant half of d0. */
8733 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8734 offset = (regnum & 1) ? 0 : 4;
8736 offset = (regnum & 1) ? 4 : 0;
8738 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8739 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8742 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8743 if (status == REG_VALID)
8744 memcpy (buf, reg_buf + offset, 4);
8749 /* Store the contents of BUF to a NEON quad register, by writing to
8750 two double registers. This is used to implement the quad pseudo
8751 registers, and for argument passing in case the quad registers are
8752 missing; vectors are passed in quad registers when using the VFP
8753 ABI, even if a NEON unit is not present. REGNUM is the index
8754 of the quad register, in [0, 15]. */
8757 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8758 int regnum, const gdb_byte *buf)
8761 int offset, double_regnum;
8763 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8764 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8767 /* d0 is always the least significant half of q0. */
8768 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8773 regcache_raw_write (regcache, double_regnum, buf + offset);
8774 offset = 8 - offset;
8775 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8779 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8780 int regnum, const gdb_byte *buf)
8782 const int num_regs = gdbarch_num_regs (gdbarch);
8784 gdb_byte reg_buf[8];
8785 int offset, double_regnum;
8787 gdb_assert (regnum >= num_regs);
8790 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8791 /* Quad-precision register. */
8792 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8795 /* Single-precision register. */
8796 gdb_assert (regnum < 32);
8798 /* s0 is always the least significant half of d0. */
8799 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8800 offset = (regnum & 1) ? 0 : 4;
8802 offset = (regnum & 1) ? 4 : 0;
8804 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8805 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8808 regcache_raw_read (regcache, double_regnum, reg_buf);
8809 memcpy (reg_buf + offset, buf, 4);
8810 regcache_raw_write (regcache, double_regnum, reg_buf);
8814 static struct value *
8815 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8817 const int *reg_p = (const int *) baton;
8818 return value_of_register (*reg_p, frame);
8821 static enum gdb_osabi
8822 arm_elf_osabi_sniffer (bfd *abfd)
8824 unsigned int elfosabi;
8825 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8827 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8829 if (elfosabi == ELFOSABI_ARM)
8830 /* GNU tools use this value. Check note sections in this case,
8832 bfd_map_over_sections (abfd,
8833 generic_elf_osabi_sniff_abi_tag_sections,
8836 /* Anything else will be handled by the generic ELF sniffer. */
8841 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8842 struct reggroup *group)
8844 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8845 this, FPS register belongs to save_regroup, restore_reggroup, and
8846 all_reggroup, of course. */
8847 if (regnum == ARM_FPS_REGNUM)
8848 return (group == float_reggroup
8849 || group == save_reggroup
8850 || group == restore_reggroup
8851 || group == all_reggroup);
8853 return default_register_reggroup_p (gdbarch, regnum, group);
8857 /* For backward-compatibility we allow two 'g' packet lengths with
8858 the remote protocol depending on whether FPA registers are
8859 supplied. M-profile targets do not have FPA registers, but some
8860 stubs already exist in the wild which use a 'g' packet which
8861 supplies them albeit with dummy values. The packet format which
8862 includes FPA registers should be considered deprecated for
8863 M-profile targets. */
8866 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8868 if (gdbarch_tdep (gdbarch)->is_m)
8870 /* If we know from the executable this is an M-profile target,
8871 cater for remote targets whose register set layout is the
8872 same as the FPA layout. */
8873 register_remote_g_packet_guess (gdbarch,
8874 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8875 (16 * INT_REGISTER_SIZE)
8876 + (8 * FP_REGISTER_SIZE)
8877 + (2 * INT_REGISTER_SIZE),
8878 tdesc_arm_with_m_fpa_layout);
8880 /* The regular M-profile layout. */
8881 register_remote_g_packet_guess (gdbarch,
8882 /* r0-r12,sp,lr,pc; xpsr */
8883 (16 * INT_REGISTER_SIZE)
8884 + INT_REGISTER_SIZE,
8887 /* M-profile plus M4F VFP. */
8888 register_remote_g_packet_guess (gdbarch,
8889 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8890 (16 * INT_REGISTER_SIZE)
8891 + (16 * VFP_REGISTER_SIZE)
8892 + (2 * INT_REGISTER_SIZE),
8893 tdesc_arm_with_m_vfp_d16);
8896 /* Otherwise we don't have a useful guess. */
8899 /* Implement the code_of_frame_writable gdbarch method. */
8902 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8904 if (gdbarch_tdep (gdbarch)->is_m
8905 && get_frame_type (frame) == SIGTRAMP_FRAME)
8907 /* M-profile exception frames return to some magic PCs, where
8908 isn't writable at all. */
8916 /* Initialize the current architecture based on INFO. If possible,
8917 re-use an architecture from ARCHES, which is a list of
8918 architectures already created during this debugging session.
8920 Called e.g. at program startup, when reading a core file, and when
8921 reading a binary file. */
8923 static struct gdbarch *
8924 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8926 struct gdbarch_tdep *tdep;
8927 struct gdbarch *gdbarch;
8928 struct gdbarch_list *best_arch;
8929 enum arm_abi_kind arm_abi = arm_abi_global;
8930 enum arm_float_model fp_model = arm_fp_model;
8931 struct tdesc_arch_data *tdesc_data = NULL;
8933 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8934 int have_wmmx_registers = 0;
8936 int have_fpa_registers = 1;
8937 const struct target_desc *tdesc = info.target_desc;
8939 /* If we have an object to base this architecture on, try to determine
8942 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8944 int ei_osabi, e_flags;
8946 switch (bfd_get_flavour (info.abfd))
8948 case bfd_target_coff_flavour:
8949 /* Assume it's an old APCS-style ABI. */
8951 arm_abi = ARM_ABI_APCS;
8954 case bfd_target_elf_flavour:
8955 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8956 e_flags = elf_elfheader (info.abfd)->e_flags;
8958 if (ei_osabi == ELFOSABI_ARM)
8960 /* GNU tools used to use this value, but do not for EABI
8961 objects. There's nowhere to tag an EABI version
8962 anyway, so assume APCS. */
8963 arm_abi = ARM_ABI_APCS;
8965 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8967 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8968 int attr_arch, attr_profile;
8972 case EF_ARM_EABI_UNKNOWN:
8973 /* Assume GNU tools. */
8974 arm_abi = ARM_ABI_APCS;
8977 case EF_ARM_EABI_VER4:
8978 case EF_ARM_EABI_VER5:
8979 arm_abi = ARM_ABI_AAPCS;
8980 /* EABI binaries default to VFP float ordering.
8981 They may also contain build attributes that can
8982 be used to identify if the VFP argument-passing
8984 if (fp_model == ARM_FLOAT_AUTO)
8987 switch (bfd_elf_get_obj_attr_int (info.abfd,
8991 case AEABI_VFP_args_base:
8992 /* "The user intended FP parameter/result
8993 passing to conform to AAPCS, base
8995 fp_model = ARM_FLOAT_SOFT_VFP;
8997 case AEABI_VFP_args_vfp:
8998 /* "The user intended FP parameter/result
8999 passing to conform to AAPCS, VFP
9001 fp_model = ARM_FLOAT_VFP;
9003 case AEABI_VFP_args_toolchain:
9004 /* "The user intended FP parameter/result
9005 passing to conform to tool chain-specific
9006 conventions" - we don't know any such
9007 conventions, so leave it as "auto". */
9009 case AEABI_VFP_args_compatible:
9010 /* "Code is compatible with both the base
9011 and VFP variants; the user did not permit
9012 non-variadic functions to pass FP
9013 parameters/results" - leave it as
9017 /* Attribute value not mentioned in the
9018 November 2012 ABI, so leave it as
9023 fp_model = ARM_FLOAT_SOFT_VFP;
9029 /* Leave it as "auto". */
9030 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9035 /* Detect M-profile programs. This only works if the
9036 executable file includes build attributes; GCC does
9037 copy them to the executable, but e.g. RealView does
9039 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9041 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9043 Tag_CPU_arch_profile);
9044 /* GCC specifies the profile for v6-M; RealView only
9045 specifies the profile for architectures starting with
9046 V7 (as opposed to architectures with a tag
9047 numerically greater than TAG_CPU_ARCH_V7). */
9048 if (!tdesc_has_registers (tdesc)
9049 && (attr_arch == TAG_CPU_ARCH_V6_M
9050 || attr_arch == TAG_CPU_ARCH_V6S_M
9051 || attr_profile == 'M'))
9056 if (fp_model == ARM_FLOAT_AUTO)
9058 int e_flags = elf_elfheader (info.abfd)->e_flags;
9060 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9063 /* Leave it as "auto". Strictly speaking this case
9064 means FPA, but almost nobody uses that now, and
9065 many toolchains fail to set the appropriate bits
9066 for the floating-point model they use. */
9068 case EF_ARM_SOFT_FLOAT:
9069 fp_model = ARM_FLOAT_SOFT_FPA;
9071 case EF_ARM_VFP_FLOAT:
9072 fp_model = ARM_FLOAT_VFP;
9074 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9075 fp_model = ARM_FLOAT_SOFT_VFP;
9080 if (e_flags & EF_ARM_BE8)
9081 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9086 /* Leave it as "auto". */
9091 /* Check any target description for validity. */
9092 if (tdesc_has_registers (tdesc))
9094 /* For most registers we require GDB's default names; but also allow
9095 the numeric names for sp / lr / pc, as a convenience. */
9096 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9097 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9098 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9100 const struct tdesc_feature *feature;
9103 feature = tdesc_find_feature (tdesc,
9104 "org.gnu.gdb.arm.core");
9105 if (feature == NULL)
9107 feature = tdesc_find_feature (tdesc,
9108 "org.gnu.gdb.arm.m-profile");
9109 if (feature == NULL)
9115 tdesc_data = tdesc_data_alloc ();
9118 for (i = 0; i < ARM_SP_REGNUM; i++)
9119 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9120 arm_register_names[i]);
9121 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9124 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9127 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9131 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9132 ARM_PS_REGNUM, "xpsr");
9134 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9135 ARM_PS_REGNUM, "cpsr");
9139 tdesc_data_cleanup (tdesc_data);
9143 feature = tdesc_find_feature (tdesc,
9144 "org.gnu.gdb.arm.fpa");
9145 if (feature != NULL)
9148 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9149 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9150 arm_register_names[i]);
9153 tdesc_data_cleanup (tdesc_data);
9158 have_fpa_registers = 0;
9160 feature = tdesc_find_feature (tdesc,
9161 "org.gnu.gdb.xscale.iwmmxt");
9162 if (feature != NULL)
9164 static const char *const iwmmxt_names[] = {
9165 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9166 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9167 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9168 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9172 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9174 &= tdesc_numbered_register (feature, tdesc_data, i,
9175 iwmmxt_names[i - ARM_WR0_REGNUM]);
9177 /* Check for the control registers, but do not fail if they
9179 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9180 tdesc_numbered_register (feature, tdesc_data, i,
9181 iwmmxt_names[i - ARM_WR0_REGNUM]);
9183 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9185 &= tdesc_numbered_register (feature, tdesc_data, i,
9186 iwmmxt_names[i - ARM_WR0_REGNUM]);
9190 tdesc_data_cleanup (tdesc_data);
9194 have_wmmx_registers = 1;
9197 /* If we have a VFP unit, check whether the single precision registers
9198 are present. If not, then we will synthesize them as pseudo
9200 feature = tdesc_find_feature (tdesc,
9201 "org.gnu.gdb.arm.vfp");
9202 if (feature != NULL)
9204 static const char *const vfp_double_names[] = {
9205 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9206 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9207 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9208 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9211 /* Require the double precision registers. There must be either
9214 for (i = 0; i < 32; i++)
9216 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9218 vfp_double_names[i]);
9222 if (!valid_p && i == 16)
9225 /* Also require FPSCR. */
9226 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9227 ARM_FPSCR_REGNUM, "fpscr");
9230 tdesc_data_cleanup (tdesc_data);
9234 if (tdesc_unnumbered_register (feature, "s0") == 0)
9235 have_vfp_pseudos = 1;
9237 vfp_register_count = i;
9239 /* If we have VFP, also check for NEON. The architecture allows
9240 NEON without VFP (integer vector operations only), but GDB
9241 does not support that. */
9242 feature = tdesc_find_feature (tdesc,
9243 "org.gnu.gdb.arm.neon");
9244 if (feature != NULL)
9246 /* NEON requires 32 double-precision registers. */
9249 tdesc_data_cleanup (tdesc_data);
9253 /* If there are quad registers defined by the stub, use
9254 their type; otherwise (normally) provide them with
9255 the default type. */
9256 if (tdesc_unnumbered_register (feature, "q0") == 0)
9257 have_neon_pseudos = 1;
9264 /* If there is already a candidate, use it. */
9265 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9267 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9269 if (arm_abi != ARM_ABI_AUTO
9270 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9273 if (fp_model != ARM_FLOAT_AUTO
9274 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9277 /* There are various other properties in tdep that we do not
9278 need to check here: those derived from a target description,
9279 since gdbarches with a different target description are
9280 automatically disqualified. */
9282 /* Do check is_m, though, since it might come from the binary. */
9283 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9286 /* Found a match. */
9290 if (best_arch != NULL)
9292 if (tdesc_data != NULL)
9293 tdesc_data_cleanup (tdesc_data);
9294 return best_arch->gdbarch;
9297 tdep = XCNEW (struct gdbarch_tdep);
9298 gdbarch = gdbarch_alloc (&info, tdep);
9300 /* Record additional information about the architecture we are defining.
9301 These are gdbarch discriminators, like the OSABI. */
9302 tdep->arm_abi = arm_abi;
9303 tdep->fp_model = fp_model;
9305 tdep->have_fpa_registers = have_fpa_registers;
9306 tdep->have_wmmx_registers = have_wmmx_registers;
9307 gdb_assert (vfp_register_count == 0
9308 || vfp_register_count == 16
9309 || vfp_register_count == 32);
9310 tdep->vfp_register_count = vfp_register_count;
9311 tdep->have_vfp_pseudos = have_vfp_pseudos;
9312 tdep->have_neon_pseudos = have_neon_pseudos;
9313 tdep->have_neon = have_neon;
9315 arm_register_g_packet_guesses (gdbarch);
9318 switch (info.byte_order_for_code)
9320 case BFD_ENDIAN_BIG:
9321 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9322 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9323 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9324 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9328 case BFD_ENDIAN_LITTLE:
9329 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9330 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9331 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9332 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9337 internal_error (__FILE__, __LINE__,
9338 _("arm_gdbarch_init: bad byte order for float format"));
9341 /* On ARM targets char defaults to unsigned. */
9342 set_gdbarch_char_signed (gdbarch, 0);
9344 /* wchar_t is unsigned under the AAPCS. */
9345 if (tdep->arm_abi == ARM_ABI_AAPCS)
9346 set_gdbarch_wchar_signed (gdbarch, 0);
9348 set_gdbarch_wchar_signed (gdbarch, 1);
9350 /* Note: for displaced stepping, this includes the breakpoint, and one word
9351 of additional scratch space. This setting isn't used for anything beside
9352 displaced stepping at present. */
9353 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9355 /* This should be low enough for everything. */
9356 tdep->lowest_pc = 0x20;
9357 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9359 /* The default, for both APCS and AAPCS, is to return small
9360 structures in registers. */
9361 tdep->struct_return = reg_struct_return;
9363 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9364 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9367 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9369 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9371 /* Frame handling. */
9372 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9373 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9374 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9376 frame_base_set_default (gdbarch, &arm_normal_base);
9378 /* Address manipulation. */
9379 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9381 /* Advance PC across function entry code. */
9382 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9384 /* Detect whether PC is at a point where the stack has been destroyed. */
9385 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9387 /* Skip trampolines. */
9388 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9390 /* The stack grows downward. */
9391 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9393 /* Breakpoint manipulation. */
9394 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9395 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9396 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9397 arm_breakpoint_kind_from_current_state);
9399 /* Information about registers, etc. */
9400 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9401 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9402 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9403 set_gdbarch_register_type (gdbarch, arm_register_type);
9404 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9406 /* This "info float" is FPA-specific. Use the generic version if we
9408 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9409 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9411 /* Internal <-> external register number maps. */
9412 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9413 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9415 set_gdbarch_register_name (gdbarch, arm_register_name);
9417 /* Returning results. */
9418 set_gdbarch_return_value (gdbarch, arm_return_value);
9421 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9423 /* Minsymbol frobbing. */
9424 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9425 set_gdbarch_coff_make_msymbol_special (gdbarch,
9426 arm_coff_make_msymbol_special);
9427 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9429 /* Thumb-2 IT block support. */
9430 set_gdbarch_adjust_breakpoint_address (gdbarch,
9431 arm_adjust_breakpoint_address);
9433 /* Virtual tables. */
9434 set_gdbarch_vbit_in_delta (gdbarch, 1);
9436 /* Hook in the ABI-specific overrides, if they have been registered. */
9437 gdbarch_init_osabi (info, gdbarch);
9439 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9441 /* Add some default predicates. */
9443 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9444 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9445 dwarf2_append_unwinders (gdbarch);
9446 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9447 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9448 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9450 /* Now we have tuned the configuration, set a few final things,
9451 based on what the OS ABI has told us. */
9453 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9454 binaries are always marked. */
9455 if (tdep->arm_abi == ARM_ABI_AUTO)
9456 tdep->arm_abi = ARM_ABI_APCS;
9458 /* Watchpoints are not steppable. */
9459 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9461 /* We used to default to FPA for generic ARM, but almost nobody
9462 uses that now, and we now provide a way for the user to force
9463 the model. So default to the most useful variant. */
9464 if (tdep->fp_model == ARM_FLOAT_AUTO)
9465 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9467 if (tdep->jb_pc >= 0)
9468 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9470 /* Floating point sizes and format. */
9471 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9472 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9474 set_gdbarch_double_format
9475 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9476 set_gdbarch_long_double_format
9477 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9481 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9482 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9485 if (have_vfp_pseudos)
9487 /* NOTE: These are the only pseudo registers used by
9488 the ARM target at the moment. If more are added, a
9489 little more care in numbering will be needed. */
9491 int num_pseudos = 32;
9492 if (have_neon_pseudos)
9494 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9495 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9496 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9501 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9503 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9505 /* Override tdesc_register_type to adjust the types of VFP
9506 registers for NEON. */
9507 set_gdbarch_register_type (gdbarch, arm_register_type);
9510 /* Add standard register aliases. We add aliases even for those
9511 nanes which are used by the current architecture - it's simpler,
9512 and does no harm, since nothing ever lists user registers. */
9513 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9514 user_reg_add (gdbarch, arm_register_aliases[i].name,
9515 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9517 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9518 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9524 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9526 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9531 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9532 (unsigned long) tdep->lowest_pc);
9538 static void arm_record_test (void);
9543 _initialize_arm_tdep (void)
9546 const char *setname;
9547 const char *setdesc;
9549 char regdesc[1024], *rdptr = regdesc;
9550 size_t rest = sizeof (regdesc);
9552 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9554 arm_objfile_data_key
9555 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9557 /* Add ourselves to objfile event chain. */
9558 observer_attach_new_objfile (arm_exidx_new_objfile);
9560 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9562 /* Register an ELF OS ABI sniffer for ARM binaries. */
9563 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9564 bfd_target_elf_flavour,
9565 arm_elf_osabi_sniffer);
9567 /* Initialize the standard target descriptions. */
9568 initialize_tdesc_arm_with_m ();
9569 initialize_tdesc_arm_with_m_fpa_layout ();
9570 initialize_tdesc_arm_with_m_vfp_d16 ();
9571 initialize_tdesc_arm_with_iwmmxt ();
9572 initialize_tdesc_arm_with_vfpv2 ();
9573 initialize_tdesc_arm_with_vfpv3 ();
9574 initialize_tdesc_arm_with_neon ();
9576 /* Add root prefix command for all "set arm"/"show arm" commands. */
9577 add_prefix_cmd ("arm", no_class, set_arm_command,
9578 _("Various ARM-specific commands."),
9579 &setarmcmdlist, "set arm ", 0, &setlist);
9581 add_prefix_cmd ("arm", no_class, show_arm_command,
9582 _("Various ARM-specific commands."),
9583 &showarmcmdlist, "show arm ", 0, &showlist);
9586 arm_disassembler_options = xstrdup ("reg-names-std");
9587 const disasm_options_t *disasm_options = disassembler_options_arm ();
9588 int num_disassembly_styles = 0;
9589 for (i = 0; disasm_options->name[i] != NULL; i++)
9590 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9591 num_disassembly_styles++;
9593 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9594 valid_disassembly_styles = XNEWVEC (const char *,
9595 num_disassembly_styles + 1);
9596 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9597 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9599 size_t offset = strlen ("reg-names-");
9600 const char *style = disasm_options->name[i];
9601 valid_disassembly_styles[j++] = &style[offset];
9602 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9603 disasm_options->description[i]);
9607 /* Mark the end of valid options. */
9608 valid_disassembly_styles[num_disassembly_styles] = NULL;
9610 /* Create the help text. */
9611 std::string helptext = string_printf ("%s%s%s",
9612 _("The valid values are:\n"),
9614 _("The default is \"std\"."));
9616 add_setshow_enum_cmd("disassembler", no_class,
9617 valid_disassembly_styles, &disassembly_style,
9618 _("Set the disassembly style."),
9619 _("Show the disassembly style."),
9621 set_disassembly_style_sfunc,
9622 show_disassembly_style_sfunc,
9623 &setarmcmdlist, &showarmcmdlist);
9625 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9626 _("Set usage of ARM 32-bit mode."),
9627 _("Show usage of ARM 32-bit mode."),
9628 _("When off, a 26-bit PC will be used."),
9630 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9632 &setarmcmdlist, &showarmcmdlist);
9634 /* Add a command to allow the user to force the FPU model. */
9635 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9636 _("Set the floating point type."),
9637 _("Show the floating point type."),
9638 _("auto - Determine the FP typefrom the OS-ABI.\n\
9639 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9640 fpa - FPA co-processor (GCC compiled).\n\
9641 softvfp - Software FP with pure-endian doubles.\n\
9642 vfp - VFP co-processor."),
9643 set_fp_model_sfunc, show_fp_model,
9644 &setarmcmdlist, &showarmcmdlist);
9646 /* Add a command to allow the user to force the ABI. */
9647 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9650 NULL, arm_set_abi, arm_show_abi,
9651 &setarmcmdlist, &showarmcmdlist);
9653 /* Add two commands to allow the user to force the assumed
9655 add_setshow_enum_cmd ("fallback-mode", class_support,
9656 arm_mode_strings, &arm_fallback_mode_string,
9657 _("Set the mode assumed when symbols are unavailable."),
9658 _("Show the mode assumed when symbols are unavailable."),
9659 NULL, NULL, arm_show_fallback_mode,
9660 &setarmcmdlist, &showarmcmdlist);
9661 add_setshow_enum_cmd ("force-mode", class_support,
9662 arm_mode_strings, &arm_force_mode_string,
9663 _("Set the mode assumed even when symbols are available."),
9664 _("Show the mode assumed even when symbols are available."),
9665 NULL, NULL, arm_show_force_mode,
9666 &setarmcmdlist, &showarmcmdlist);
9668 /* Debugging flag. */
9669 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9670 _("Set ARM debugging."),
9671 _("Show ARM debugging."),
9672 _("When on, arm-specific debugging is enabled."),
9674 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9675 &setdebuglist, &showdebuglist);
9678 selftests::register_test ("arm-record", selftests::arm_record_test);
9683 /* ARM-reversible process record data structures. */
9685 #define ARM_INSN_SIZE_BYTES 4
9686 #define THUMB_INSN_SIZE_BYTES 2
9687 #define THUMB2_INSN_SIZE_BYTES 4
9690 /* Position of the bit within a 32-bit ARM instruction
9691 that defines whether the instruction is a load or store. */
9692 #define INSN_S_L_BIT_NUM 20
9694 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9697 unsigned int reg_len = LENGTH; \
9700 REGS = XNEWVEC (uint32_t, reg_len); \
9701 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9706 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9709 unsigned int mem_len = LENGTH; \
9712 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9713 memcpy(&MEMS->len, &RECORD_BUF[0], \
9714 sizeof(struct arm_mem_r) * LENGTH); \
9719 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9720 #define INSN_RECORDED(ARM_RECORD) \
9721 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9723 /* ARM memory record structure. */
9726 uint32_t len; /* Record length. */
9727 uint32_t addr; /* Memory address. */
9730 /* ARM instruction record contains opcode of current insn
9731 and execution state (before entry to decode_insn()),
9732 contains list of to-be-modified registers and
9733 memory blocks (on return from decode_insn()). */
9735 typedef struct insn_decode_record_t
9737 struct gdbarch *gdbarch;
9738 struct regcache *regcache;
9739 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9740 uint32_t arm_insn; /* Should accommodate thumb. */
9741 uint32_t cond; /* Condition code. */
9742 uint32_t opcode; /* Insn opcode. */
9743 uint32_t decode; /* Insn decode bits. */
9744 uint32_t mem_rec_count; /* No of mem records. */
9745 uint32_t reg_rec_count; /* No of reg records. */
9746 uint32_t *arm_regs; /* Registers to be saved for this record. */
9747 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9748 } insn_decode_record;
9751 /* Checks ARM SBZ and SBO mandatory fields. */
9754 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9756 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9775 enum arm_record_result
9777 ARM_RECORD_SUCCESS = 0,
9778 ARM_RECORD_FAILURE = 1
9785 } arm_record_strx_t;
9796 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9797 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9800 struct regcache *reg_cache = arm_insn_r->regcache;
9801 ULONGEST u_regval[2]= {0};
9803 uint32_t reg_src1 = 0, reg_src2 = 0;
9804 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9806 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9807 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9809 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9811 /* 1) Handle misc store, immediate offset. */
9812 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9813 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9814 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9815 regcache_raw_read_unsigned (reg_cache, reg_src1,
9817 if (ARM_PC_REGNUM == reg_src1)
9819 /* If R15 was used as Rn, hence current PC+8. */
9820 u_regval[0] = u_regval[0] + 8;
9822 offset_8 = (immed_high << 4) | immed_low;
9823 /* Calculate target store address. */
9824 if (14 == arm_insn_r->opcode)
9826 tgt_mem_addr = u_regval[0] + offset_8;
9830 tgt_mem_addr = u_regval[0] - offset_8;
9832 if (ARM_RECORD_STRH == str_type)
9834 record_buf_mem[0] = 2;
9835 record_buf_mem[1] = tgt_mem_addr;
9836 arm_insn_r->mem_rec_count = 1;
9838 else if (ARM_RECORD_STRD == str_type)
9840 record_buf_mem[0] = 4;
9841 record_buf_mem[1] = tgt_mem_addr;
9842 record_buf_mem[2] = 4;
9843 record_buf_mem[3] = tgt_mem_addr + 4;
9844 arm_insn_r->mem_rec_count = 2;
9847 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9849 /* 2) Store, register offset. */
9851 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9853 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9854 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9855 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9858 /* If R15 was used as Rn, hence current PC+8. */
9859 u_regval[0] = u_regval[0] + 8;
9861 /* Calculate target store address, Rn +/- Rm, register offset. */
9862 if (12 == arm_insn_r->opcode)
9864 tgt_mem_addr = u_regval[0] + u_regval[1];
9868 tgt_mem_addr = u_regval[1] - u_regval[0];
9870 if (ARM_RECORD_STRH == str_type)
9872 record_buf_mem[0] = 2;
9873 record_buf_mem[1] = tgt_mem_addr;
9874 arm_insn_r->mem_rec_count = 1;
9876 else if (ARM_RECORD_STRD == str_type)
9878 record_buf_mem[0] = 4;
9879 record_buf_mem[1] = tgt_mem_addr;
9880 record_buf_mem[2] = 4;
9881 record_buf_mem[3] = tgt_mem_addr + 4;
9882 arm_insn_r->mem_rec_count = 2;
9885 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9886 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9888 /* 3) Store, immediate pre-indexed. */
9889 /* 5) Store, immediate post-indexed. */
9890 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9891 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9892 offset_8 = (immed_high << 4) | immed_low;
9893 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9894 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9895 /* Calculate target store address, Rn +/- Rm, register offset. */
9896 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9898 tgt_mem_addr = u_regval[0] + offset_8;
9902 tgt_mem_addr = u_regval[0] - offset_8;
9904 if (ARM_RECORD_STRH == str_type)
9906 record_buf_mem[0] = 2;
9907 record_buf_mem[1] = tgt_mem_addr;
9908 arm_insn_r->mem_rec_count = 1;
9910 else if (ARM_RECORD_STRD == str_type)
9912 record_buf_mem[0] = 4;
9913 record_buf_mem[1] = tgt_mem_addr;
9914 record_buf_mem[2] = 4;
9915 record_buf_mem[3] = tgt_mem_addr + 4;
9916 arm_insn_r->mem_rec_count = 2;
9918 /* Record Rn also as it changes. */
9919 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9920 arm_insn_r->reg_rec_count = 1;
9922 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9923 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9925 /* 4) Store, register pre-indexed. */
9926 /* 6) Store, register post -indexed. */
9927 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9928 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9929 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9930 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9931 /* Calculate target store address, Rn +/- Rm, register offset. */
9932 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9934 tgt_mem_addr = u_regval[0] + u_regval[1];
9938 tgt_mem_addr = u_regval[1] - u_regval[0];
9940 if (ARM_RECORD_STRH == str_type)
9942 record_buf_mem[0] = 2;
9943 record_buf_mem[1] = tgt_mem_addr;
9944 arm_insn_r->mem_rec_count = 1;
9946 else if (ARM_RECORD_STRD == str_type)
9948 record_buf_mem[0] = 4;
9949 record_buf_mem[1] = tgt_mem_addr;
9950 record_buf_mem[2] = 4;
9951 record_buf_mem[3] = tgt_mem_addr + 4;
9952 arm_insn_r->mem_rec_count = 2;
9954 /* Record Rn also as it changes. */
9955 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9956 arm_insn_r->reg_rec_count = 1;
9961 /* Handling ARM extension space insns. */
9964 arm_record_extension_space (insn_decode_record *arm_insn_r)
9966 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9967 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9968 uint32_t record_buf[8], record_buf_mem[8];
9969 uint32_t reg_src1 = 0;
9970 struct regcache *reg_cache = arm_insn_r->regcache;
9971 ULONGEST u_regval = 0;
9973 gdb_assert (!INSN_RECORDED(arm_insn_r));
9974 /* Handle unconditional insn extension space. */
9976 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9977 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9978 if (arm_insn_r->cond)
9980 /* PLD has no affect on architectural state, it just affects
9982 if (5 == ((opcode1 & 0xE0) >> 5))
9985 record_buf[0] = ARM_PS_REGNUM;
9986 record_buf[1] = ARM_LR_REGNUM;
9987 arm_insn_r->reg_rec_count = 2;
9989 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9993 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9994 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9997 /* Undefined instruction on ARM V5; need to handle if later
9998 versions define it. */
10001 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10002 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10003 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10005 /* Handle arithmetic insn extension space. */
10006 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10007 && !INSN_RECORDED(arm_insn_r))
10009 /* Handle MLA(S) and MUL(S). */
10010 if (0 <= insn_op1 && 3 >= insn_op1)
10012 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10013 record_buf[1] = ARM_PS_REGNUM;
10014 arm_insn_r->reg_rec_count = 2;
10016 else if (4 <= insn_op1 && 15 >= insn_op1)
10018 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10019 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10020 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10021 record_buf[2] = ARM_PS_REGNUM;
10022 arm_insn_r->reg_rec_count = 3;
10026 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10027 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10028 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10030 /* Handle control insn extension space. */
10032 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10033 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10035 if (!bit (arm_insn_r->arm_insn,25))
10037 if (!bits (arm_insn_r->arm_insn, 4, 7))
10039 if ((0 == insn_op1) || (2 == insn_op1))
10042 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10043 arm_insn_r->reg_rec_count = 1;
10045 else if (1 == insn_op1)
10047 /* CSPR is going to be changed. */
10048 record_buf[0] = ARM_PS_REGNUM;
10049 arm_insn_r->reg_rec_count = 1;
10051 else if (3 == insn_op1)
10053 /* SPSR is going to be changed. */
10054 /* We need to get SPSR value, which is yet to be done. */
10058 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10063 record_buf[0] = ARM_PS_REGNUM;
10064 arm_insn_r->reg_rec_count = 1;
10066 else if (3 == insn_op1)
10069 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10070 arm_insn_r->reg_rec_count = 1;
10073 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10076 record_buf[0] = ARM_PS_REGNUM;
10077 record_buf[1] = ARM_LR_REGNUM;
10078 arm_insn_r->reg_rec_count = 2;
10080 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10082 /* QADD, QSUB, QDADD, QDSUB */
10083 record_buf[0] = ARM_PS_REGNUM;
10084 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10085 arm_insn_r->reg_rec_count = 2;
10087 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10090 record_buf[0] = ARM_PS_REGNUM;
10091 record_buf[1] = ARM_LR_REGNUM;
10092 arm_insn_r->reg_rec_count = 2;
10094 /* Save SPSR also;how? */
10097 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10098 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10099 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10100 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10103 if (0 == insn_op1 || 1 == insn_op1)
10105 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10106 /* We dont do optimization for SMULW<y> where we
10108 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10109 record_buf[1] = ARM_PS_REGNUM;
10110 arm_insn_r->reg_rec_count = 2;
10112 else if (2 == insn_op1)
10115 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10116 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10117 arm_insn_r->reg_rec_count = 2;
10119 else if (3 == insn_op1)
10122 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10123 arm_insn_r->reg_rec_count = 1;
10129 /* MSR : immediate form. */
10132 /* CSPR is going to be changed. */
10133 record_buf[0] = ARM_PS_REGNUM;
10134 arm_insn_r->reg_rec_count = 1;
10136 else if (3 == insn_op1)
10138 /* SPSR is going to be changed. */
10139 /* we need to get SPSR value, which is yet to be done */
10145 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10146 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10147 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10149 /* Handle load/store insn extension space. */
10151 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10152 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10153 && !INSN_RECORDED(arm_insn_r))
10158 /* These insn, changes register and memory as well. */
10159 /* SWP or SWPB insn. */
10160 /* Get memory address given by Rn. */
10161 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10162 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10163 /* SWP insn ?, swaps word. */
10164 if (8 == arm_insn_r->opcode)
10166 record_buf_mem[0] = 4;
10170 /* SWPB insn, swaps only byte. */
10171 record_buf_mem[0] = 1;
10173 record_buf_mem[1] = u_regval;
10174 arm_insn_r->mem_rec_count = 1;
10175 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10176 arm_insn_r->reg_rec_count = 1;
10178 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10181 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10184 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10187 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10188 record_buf[1] = record_buf[0] + 1;
10189 arm_insn_r->reg_rec_count = 2;
10191 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10194 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10197 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10199 /* LDRH, LDRSB, LDRSH. */
10200 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10201 arm_insn_r->reg_rec_count = 1;
10206 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10207 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10208 && !INSN_RECORDED(arm_insn_r))
10211 /* Handle coprocessor insn extension space. */
10214 /* To be done for ARMv5 and later; as of now we return -1. */
10218 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10219 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10224 /* Handling opcode 000 insns. */
10227 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10229 struct regcache *reg_cache = arm_insn_r->regcache;
10230 uint32_t record_buf[8], record_buf_mem[8];
10231 ULONGEST u_regval[2] = {0};
10233 uint32_t reg_src1 = 0, reg_dest = 0;
10234 uint32_t opcode1 = 0;
10236 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10237 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10238 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10240 /* Data processing insn /multiply insn. */
10241 if (9 == arm_insn_r->decode
10242 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10243 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10245 /* Handle multiply instructions. */
10246 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10247 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10249 /* Handle MLA and MUL. */
10250 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10251 record_buf[1] = ARM_PS_REGNUM;
10252 arm_insn_r->reg_rec_count = 2;
10254 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10256 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10257 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10258 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10259 record_buf[2] = ARM_PS_REGNUM;
10260 arm_insn_r->reg_rec_count = 3;
10263 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10264 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10266 /* Handle misc load insns, as 20th bit (L = 1). */
10267 /* LDR insn has a capability to do branching, if
10268 MOV LR, PC is precceded by LDR insn having Rn as R15
10269 in that case, it emulates branch and link insn, and hence we
10270 need to save CSPR and PC as well. I am not sure this is right
10271 place; as opcode = 010 LDR insn make this happen, if R15 was
10273 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10274 if (15 != reg_dest)
10276 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10277 arm_insn_r->reg_rec_count = 1;
10281 record_buf[0] = reg_dest;
10282 record_buf[1] = ARM_PS_REGNUM;
10283 arm_insn_r->reg_rec_count = 2;
10286 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10287 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10288 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10289 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10291 /* Handle MSR insn. */
10292 if (9 == arm_insn_r->opcode)
10294 /* CSPR is going to be changed. */
10295 record_buf[0] = ARM_PS_REGNUM;
10296 arm_insn_r->reg_rec_count = 1;
10300 /* SPSR is going to be changed. */
10301 /* How to read SPSR value? */
10305 else if (9 == arm_insn_r->decode
10306 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10307 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10309 /* Handling SWP, SWPB. */
10310 /* These insn, changes register and memory as well. */
10311 /* SWP or SWPB insn. */
10313 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10314 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10315 /* SWP insn ?, swaps word. */
10316 if (8 == arm_insn_r->opcode)
10318 record_buf_mem[0] = 4;
10322 /* SWPB insn, swaps only byte. */
10323 record_buf_mem[0] = 1;
10325 record_buf_mem[1] = u_regval[0];
10326 arm_insn_r->mem_rec_count = 1;
10327 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10328 arm_insn_r->reg_rec_count = 1;
10330 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10331 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10333 /* Handle BLX, branch and link/exchange. */
10334 if (9 == arm_insn_r->opcode)
10336 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10337 and R14 stores the return address. */
10338 record_buf[0] = ARM_PS_REGNUM;
10339 record_buf[1] = ARM_LR_REGNUM;
10340 arm_insn_r->reg_rec_count = 2;
10343 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10345 /* Handle enhanced software breakpoint insn, BKPT. */
10346 /* CPSR is changed to be executed in ARM state, disabling normal
10347 interrupts, entering abort mode. */
10348 /* According to high vector configuration PC is set. */
10349 /* user hit breakpoint and type reverse, in
10350 that case, we need to go back with previous CPSR and
10351 Program Counter. */
10352 record_buf[0] = ARM_PS_REGNUM;
10353 record_buf[1] = ARM_LR_REGNUM;
10354 arm_insn_r->reg_rec_count = 2;
10356 /* Save SPSR also; how? */
10359 else if (11 == arm_insn_r->decode
10360 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10362 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10364 /* Handle str(x) insn */
10365 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10368 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10369 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10371 /* Handle BX, branch and link/exchange. */
10372 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10373 record_buf[0] = ARM_PS_REGNUM;
10374 arm_insn_r->reg_rec_count = 1;
10376 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10377 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10378 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10380 /* Count leading zeros: CLZ. */
10381 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10382 arm_insn_r->reg_rec_count = 1;
10384 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10385 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10386 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10387 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10390 /* Handle MRS insn. */
10391 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10392 arm_insn_r->reg_rec_count = 1;
10394 else if (arm_insn_r->opcode <= 15)
10396 /* Normal data processing insns. */
10397 /* Out of 11 shifter operands mode, all the insn modifies destination
10398 register, which is specified by 13-16 decode. */
10399 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10400 record_buf[1] = ARM_PS_REGNUM;
10401 arm_insn_r->reg_rec_count = 2;
10408 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10409 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10413 /* Handling opcode 001 insns. */
10416 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10418 uint32_t record_buf[8], record_buf_mem[8];
10420 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10421 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10423 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10424 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10425 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10428 /* Handle MSR insn. */
10429 if (9 == arm_insn_r->opcode)
10431 /* CSPR is going to be changed. */
10432 record_buf[0] = ARM_PS_REGNUM;
10433 arm_insn_r->reg_rec_count = 1;
10437 /* SPSR is going to be changed. */
10440 else if (arm_insn_r->opcode <= 15)
10442 /* Normal data processing insns. */
10443 /* Out of 11 shifter operands mode, all the insn modifies destination
10444 register, which is specified by 13-16 decode. */
10445 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10446 record_buf[1] = ARM_PS_REGNUM;
10447 arm_insn_r->reg_rec_count = 2;
10454 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10455 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10460 arm_record_media (insn_decode_record *arm_insn_r)
10462 uint32_t record_buf[8];
10464 switch (bits (arm_insn_r->arm_insn, 22, 24))
10467 /* Parallel addition and subtraction, signed */
10469 /* Parallel addition and subtraction, unsigned */
10472 /* Packing, unpacking, saturation and reversal */
10474 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10476 record_buf[arm_insn_r->reg_rec_count++] = rd;
10482 /* Signed multiplies */
10484 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10485 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10487 record_buf[arm_insn_r->reg_rec_count++] = rd;
10489 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10490 else if (op1 == 0x4)
10491 record_buf[arm_insn_r->reg_rec_count++]
10492 = bits (arm_insn_r->arm_insn, 12, 15);
10498 if (bit (arm_insn_r->arm_insn, 21)
10499 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10502 record_buf[arm_insn_r->reg_rec_count++]
10503 = bits (arm_insn_r->arm_insn, 12, 15);
10505 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10506 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10508 /* USAD8 and USADA8 */
10509 record_buf[arm_insn_r->reg_rec_count++]
10510 = bits (arm_insn_r->arm_insn, 16, 19);
10517 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10518 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10520 /* Permanently UNDEFINED */
10525 /* BFC, BFI and UBFX */
10526 record_buf[arm_insn_r->reg_rec_count++]
10527 = bits (arm_insn_r->arm_insn, 12, 15);
10536 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10541 /* Handle ARM mode instructions with opcode 010. */
10544 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10546 struct regcache *reg_cache = arm_insn_r->regcache;
10548 uint32_t reg_base , reg_dest;
10549 uint32_t offset_12, tgt_mem_addr;
10550 uint32_t record_buf[8], record_buf_mem[8];
10551 unsigned char wback;
10554 /* Calculate wback. */
10555 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10556 || (bit (arm_insn_r->arm_insn, 21) == 1);
10558 arm_insn_r->reg_rec_count = 0;
10559 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10561 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10563 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10566 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10567 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10569 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10570 preceeds a LDR instruction having R15 as reg_base, it
10571 emulates a branch and link instruction, and hence we need to save
10572 CPSR and PC as well. */
10573 if (ARM_PC_REGNUM == reg_dest)
10574 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10576 /* If wback is true, also save the base register, which is going to be
10579 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10583 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10585 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10586 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10588 /* Handle bit U. */
10589 if (bit (arm_insn_r->arm_insn, 23))
10591 /* U == 1: Add the offset. */
10592 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10596 /* U == 0: subtract the offset. */
10597 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10600 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10602 if (bit (arm_insn_r->arm_insn, 22))
10604 /* STRB and STRBT: 1 byte. */
10605 record_buf_mem[0] = 1;
10609 /* STR and STRT: 4 bytes. */
10610 record_buf_mem[0] = 4;
10613 /* Handle bit P. */
10614 if (bit (arm_insn_r->arm_insn, 24))
10615 record_buf_mem[1] = tgt_mem_addr;
10617 record_buf_mem[1] = (uint32_t) u_regval;
10619 arm_insn_r->mem_rec_count = 1;
10621 /* If wback is true, also save the base register, which is going to be
10624 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10627 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10628 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10632 /* Handling opcode 011 insns. */
10635 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10637 struct regcache *reg_cache = arm_insn_r->regcache;
10639 uint32_t shift_imm = 0;
10640 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10641 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10642 uint32_t record_buf[8], record_buf_mem[8];
10645 ULONGEST u_regval[2];
10647 if (bit (arm_insn_r->arm_insn, 4))
10648 return arm_record_media (arm_insn_r);
10650 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10651 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10653 /* Handle enhanced store insns and LDRD DSP insn,
10654 order begins according to addressing modes for store insns
10658 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10660 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10661 /* LDR insn has a capability to do branching, if
10662 MOV LR, PC is precedded by LDR insn having Rn as R15
10663 in that case, it emulates branch and link insn, and hence we
10664 need to save CSPR and PC as well. */
10665 if (15 != reg_dest)
10667 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10668 arm_insn_r->reg_rec_count = 1;
10672 record_buf[0] = reg_dest;
10673 record_buf[1] = ARM_PS_REGNUM;
10674 arm_insn_r->reg_rec_count = 2;
10679 if (! bits (arm_insn_r->arm_insn, 4, 11))
10681 /* Store insn, register offset and register pre-indexed,
10682 register post-indexed. */
10684 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10686 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10687 regcache_raw_read_unsigned (reg_cache, reg_src1
10689 regcache_raw_read_unsigned (reg_cache, reg_src2
10691 if (15 == reg_src2)
10693 /* If R15 was used as Rn, hence current PC+8. */
10694 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10695 u_regval[0] = u_regval[0] + 8;
10697 /* Calculate target store address, Rn +/- Rm, register offset. */
10699 if (bit (arm_insn_r->arm_insn, 23))
10701 tgt_mem_addr = u_regval[0] + u_regval[1];
10705 tgt_mem_addr = u_regval[1] - u_regval[0];
10708 switch (arm_insn_r->opcode)
10722 record_buf_mem[0] = 4;
10737 record_buf_mem[0] = 1;
10741 gdb_assert_not_reached ("no decoding pattern found");
10744 record_buf_mem[1] = tgt_mem_addr;
10745 arm_insn_r->mem_rec_count = 1;
10747 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10748 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10749 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10750 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10751 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10752 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10755 /* Rn is going to be changed in pre-indexed mode and
10756 post-indexed mode as well. */
10757 record_buf[0] = reg_src2;
10758 arm_insn_r->reg_rec_count = 1;
10763 /* Store insn, scaled register offset; scaled pre-indexed. */
10764 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10766 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10768 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10769 /* Get shift_imm. */
10770 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10771 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10772 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10773 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10774 /* Offset_12 used as shift. */
10778 /* Offset_12 used as index. */
10779 offset_12 = u_regval[0] << shift_imm;
10783 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10789 if (bit (u_regval[0], 31))
10791 offset_12 = 0xFFFFFFFF;
10800 /* This is arithmetic shift. */
10801 offset_12 = s_word >> shift_imm;
10808 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10810 /* Get C flag value and shift it by 31. */
10811 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10812 | (u_regval[0]) >> 1);
10816 offset_12 = (u_regval[0] >> shift_imm) \
10818 (sizeof(uint32_t) - shift_imm));
10823 gdb_assert_not_reached ("no decoding pattern found");
10827 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10829 if (bit (arm_insn_r->arm_insn, 23))
10831 tgt_mem_addr = u_regval[1] + offset_12;
10835 tgt_mem_addr = u_regval[1] - offset_12;
10838 switch (arm_insn_r->opcode)
10852 record_buf_mem[0] = 4;
10867 record_buf_mem[0] = 1;
10871 gdb_assert_not_reached ("no decoding pattern found");
10874 record_buf_mem[1] = tgt_mem_addr;
10875 arm_insn_r->mem_rec_count = 1;
10877 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10878 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10879 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10880 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10881 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10882 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10885 /* Rn is going to be changed in register scaled pre-indexed
10886 mode,and scaled post indexed mode. */
10887 record_buf[0] = reg_src2;
10888 arm_insn_r->reg_rec_count = 1;
10893 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10894 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10898 /* Handle ARM mode instructions with opcode 100. */
10901 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10903 struct regcache *reg_cache = arm_insn_r->regcache;
10904 uint32_t register_count = 0, register_bits;
10905 uint32_t reg_base, addr_mode;
10906 uint32_t record_buf[24], record_buf_mem[48];
10910 /* Fetch the list of registers. */
10911 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10912 arm_insn_r->reg_rec_count = 0;
10914 /* Fetch the base register that contains the address we are loading data
10916 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10918 /* Calculate wback. */
10919 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10921 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10923 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10925 /* Find out which registers are going to be loaded from memory. */
10926 while (register_bits)
10928 if (register_bits & 0x00000001)
10929 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10930 register_bits = register_bits >> 1;
10935 /* If wback is true, also save the base register, which is going to be
10938 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10940 /* Save the CPSR register. */
10941 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10945 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10947 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10949 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10951 /* Find out how many registers are going to be stored to memory. */
10952 while (register_bits)
10954 if (register_bits & 0x00000001)
10956 register_bits = register_bits >> 1;
10961 /* STMDA (STMED): Decrement after. */
10963 record_buf_mem[1] = (uint32_t) u_regval
10964 - register_count * INT_REGISTER_SIZE + 4;
10966 /* STM (STMIA, STMEA): Increment after. */
10968 record_buf_mem[1] = (uint32_t) u_regval;
10970 /* STMDB (STMFD): Decrement before. */
10972 record_buf_mem[1] = (uint32_t) u_regval
10973 - register_count * INT_REGISTER_SIZE;
10975 /* STMIB (STMFA): Increment before. */
10977 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10980 gdb_assert_not_reached ("no decoding pattern found");
10984 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10985 arm_insn_r->mem_rec_count = 1;
10987 /* If wback is true, also save the base register, which is going to be
10990 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10993 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10994 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10998 /* Handling opcode 101 insns. */
11001 arm_record_b_bl (insn_decode_record *arm_insn_r)
11003 uint32_t record_buf[8];
11005 /* Handle B, BL, BLX(1) insns. */
11006 /* B simply branches so we do nothing here. */
11007 /* Note: BLX(1) doesnt fall here but instead it falls into
11008 extension space. */
11009 if (bit (arm_insn_r->arm_insn, 24))
11011 record_buf[0] = ARM_LR_REGNUM;
11012 arm_insn_r->reg_rec_count = 1;
11015 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11021 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11023 printf_unfiltered (_("Process record does not support instruction "
11024 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11025 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11030 /* Record handler for vector data transfer instructions. */
11033 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11035 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11036 uint32_t record_buf[4];
11038 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11039 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11040 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11041 bit_l = bit (arm_insn_r->arm_insn, 20);
11042 bit_c = bit (arm_insn_r->arm_insn, 8);
11044 /* Handle VMOV instruction. */
11045 if (bit_l && bit_c)
11047 record_buf[0] = reg_t;
11048 arm_insn_r->reg_rec_count = 1;
11050 else if (bit_l && !bit_c)
11052 /* Handle VMOV instruction. */
11053 if (bits_a == 0x00)
11055 record_buf[0] = reg_t;
11056 arm_insn_r->reg_rec_count = 1;
11058 /* Handle VMRS instruction. */
11059 else if (bits_a == 0x07)
11062 reg_t = ARM_PS_REGNUM;
11064 record_buf[0] = reg_t;
11065 arm_insn_r->reg_rec_count = 1;
11068 else if (!bit_l && !bit_c)
11070 /* Handle VMOV instruction. */
11071 if (bits_a == 0x00)
11073 record_buf[0] = ARM_D0_REGNUM + reg_v;
11075 arm_insn_r->reg_rec_count = 1;
11077 /* Handle VMSR instruction. */
11078 else if (bits_a == 0x07)
11080 record_buf[0] = ARM_FPSCR_REGNUM;
11081 arm_insn_r->reg_rec_count = 1;
11084 else if (!bit_l && bit_c)
11086 /* Handle VMOV instruction. */
11087 if (!(bits_a & 0x04))
11089 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11091 arm_insn_r->reg_rec_count = 1;
11093 /* Handle VDUP instruction. */
11096 if (bit (arm_insn_r->arm_insn, 21))
11098 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11099 record_buf[0] = reg_v + ARM_D0_REGNUM;
11100 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11101 arm_insn_r->reg_rec_count = 2;
11105 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11106 record_buf[0] = reg_v + ARM_D0_REGNUM;
11107 arm_insn_r->reg_rec_count = 1;
11112 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11116 /* Record handler for extension register load/store instructions. */
11119 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11121 uint32_t opcode, single_reg;
11122 uint8_t op_vldm_vstm;
11123 uint32_t record_buf[8], record_buf_mem[128];
11124 ULONGEST u_regval = 0;
11126 struct regcache *reg_cache = arm_insn_r->regcache;
11128 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11129 single_reg = !bit (arm_insn_r->arm_insn, 8);
11130 op_vldm_vstm = opcode & 0x1b;
11132 /* Handle VMOV instructions. */
11133 if ((opcode & 0x1e) == 0x04)
11135 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11137 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11138 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11139 arm_insn_r->reg_rec_count = 2;
11143 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11144 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11148 /* The first S register number m is REG_M:M (M is bit 5),
11149 the corresponding D register number is REG_M:M / 2, which
11151 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11152 /* The second S register number is REG_M:M + 1, the
11153 corresponding D register number is (REG_M:M + 1) / 2.
11154 IOW, if bit M is 1, the first and second S registers
11155 are mapped to different D registers, otherwise, they are
11156 in the same D register. */
11159 record_buf[arm_insn_r->reg_rec_count++]
11160 = ARM_D0_REGNUM + reg_m + 1;
11165 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11166 arm_insn_r->reg_rec_count = 1;
11170 /* Handle VSTM and VPUSH instructions. */
11171 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11172 || op_vldm_vstm == 0x12)
11174 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11175 uint32_t memory_index = 0;
11177 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11178 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11179 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11180 imm_off32 = imm_off8 << 2;
11181 memory_count = imm_off8;
11183 if (bit (arm_insn_r->arm_insn, 23))
11184 start_address = u_regval;
11186 start_address = u_regval - imm_off32;
11188 if (bit (arm_insn_r->arm_insn, 21))
11190 record_buf[0] = reg_rn;
11191 arm_insn_r->reg_rec_count = 1;
11194 while (memory_count > 0)
11198 record_buf_mem[memory_index] = 4;
11199 record_buf_mem[memory_index + 1] = start_address;
11200 start_address = start_address + 4;
11201 memory_index = memory_index + 2;
11205 record_buf_mem[memory_index] = 4;
11206 record_buf_mem[memory_index + 1] = start_address;
11207 record_buf_mem[memory_index + 2] = 4;
11208 record_buf_mem[memory_index + 3] = start_address + 4;
11209 start_address = start_address + 8;
11210 memory_index = memory_index + 4;
11214 arm_insn_r->mem_rec_count = (memory_index >> 1);
11216 /* Handle VLDM instructions. */
11217 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11218 || op_vldm_vstm == 0x13)
11220 uint32_t reg_count, reg_vd;
11221 uint32_t reg_index = 0;
11222 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11224 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11225 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11227 /* REG_VD is the first D register number. If the instruction
11228 loads memory to S registers (SINGLE_REG is TRUE), the register
11229 number is (REG_VD << 1 | bit D), so the corresponding D
11230 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11232 reg_vd = reg_vd | (bit_d << 4);
11234 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11235 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11237 /* If the instruction loads memory to D register, REG_COUNT should
11238 be divided by 2, according to the ARM Architecture Reference
11239 Manual. If the instruction loads memory to S register, divide by
11240 2 as well because two S registers are mapped to D register. */
11241 reg_count = reg_count / 2;
11242 if (single_reg && bit_d)
11244 /* Increase the register count if S register list starts from
11245 an odd number (bit d is one). */
11249 while (reg_count > 0)
11251 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11254 arm_insn_r->reg_rec_count = reg_index;
11256 /* VSTR Vector store register. */
11257 else if ((opcode & 0x13) == 0x10)
11259 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11260 uint32_t memory_index = 0;
11262 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11263 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11264 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11265 imm_off32 = imm_off8 << 2;
11267 if (bit (arm_insn_r->arm_insn, 23))
11268 start_address = u_regval + imm_off32;
11270 start_address = u_regval - imm_off32;
11274 record_buf_mem[memory_index] = 4;
11275 record_buf_mem[memory_index + 1] = start_address;
11276 arm_insn_r->mem_rec_count = 1;
11280 record_buf_mem[memory_index] = 4;
11281 record_buf_mem[memory_index + 1] = start_address;
11282 record_buf_mem[memory_index + 2] = 4;
11283 record_buf_mem[memory_index + 3] = start_address + 4;
11284 arm_insn_r->mem_rec_count = 2;
11287 /* VLDR Vector load register. */
11288 else if ((opcode & 0x13) == 0x11)
11290 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11294 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11295 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11299 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11300 /* Record register D rather than pseudo register S. */
11301 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11303 arm_insn_r->reg_rec_count = 1;
11306 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11307 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11311 /* Record handler for arm/thumb mode VFP data processing instructions. */
11314 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11316 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11317 uint32_t record_buf[4];
11318 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11319 enum insn_types curr_insn_type = INSN_INV;
11321 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11322 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11323 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11324 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11325 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11326 bit_d = bit (arm_insn_r->arm_insn, 22);
11327 opc1 = opc1 & 0x04;
11329 /* Handle VMLA, VMLS. */
11332 if (bit (arm_insn_r->arm_insn, 10))
11334 if (bit (arm_insn_r->arm_insn, 6))
11335 curr_insn_type = INSN_T0;
11337 curr_insn_type = INSN_T1;
11342 curr_insn_type = INSN_T1;
11344 curr_insn_type = INSN_T2;
11347 /* Handle VNMLA, VNMLS, VNMUL. */
11348 else if (opc1 == 0x01)
11351 curr_insn_type = INSN_T1;
11353 curr_insn_type = INSN_T2;
11356 else if (opc1 == 0x02 && !(opc3 & 0x01))
11358 if (bit (arm_insn_r->arm_insn, 10))
11360 if (bit (arm_insn_r->arm_insn, 6))
11361 curr_insn_type = INSN_T0;
11363 curr_insn_type = INSN_T1;
11368 curr_insn_type = INSN_T1;
11370 curr_insn_type = INSN_T2;
11373 /* Handle VADD, VSUB. */
11374 else if (opc1 == 0x03)
11376 if (!bit (arm_insn_r->arm_insn, 9))
11378 if (bit (arm_insn_r->arm_insn, 6))
11379 curr_insn_type = INSN_T0;
11381 curr_insn_type = INSN_T1;
11386 curr_insn_type = INSN_T1;
11388 curr_insn_type = INSN_T2;
11392 else if (opc1 == 0x0b)
11395 curr_insn_type = INSN_T1;
11397 curr_insn_type = INSN_T2;
11399 /* Handle all other vfp data processing instructions. */
11400 else if (opc1 == 0x0b)
11403 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11405 if (bit (arm_insn_r->arm_insn, 4))
11407 if (bit (arm_insn_r->arm_insn, 6))
11408 curr_insn_type = INSN_T0;
11410 curr_insn_type = INSN_T1;
11415 curr_insn_type = INSN_T1;
11417 curr_insn_type = INSN_T2;
11420 /* Handle VNEG and VABS. */
11421 else if ((opc2 == 0x01 && opc3 == 0x01)
11422 || (opc2 == 0x00 && opc3 == 0x03))
11424 if (!bit (arm_insn_r->arm_insn, 11))
11426 if (bit (arm_insn_r->arm_insn, 6))
11427 curr_insn_type = INSN_T0;
11429 curr_insn_type = INSN_T1;
11434 curr_insn_type = INSN_T1;
11436 curr_insn_type = INSN_T2;
11439 /* Handle VSQRT. */
11440 else if (opc2 == 0x01 && opc3 == 0x03)
11443 curr_insn_type = INSN_T1;
11445 curr_insn_type = INSN_T2;
11448 else if (opc2 == 0x07 && opc3 == 0x03)
11451 curr_insn_type = INSN_T1;
11453 curr_insn_type = INSN_T2;
11455 else if (opc3 & 0x01)
11458 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11460 if (!bit (arm_insn_r->arm_insn, 18))
11461 curr_insn_type = INSN_T2;
11465 curr_insn_type = INSN_T1;
11467 curr_insn_type = INSN_T2;
11471 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11474 curr_insn_type = INSN_T1;
11476 curr_insn_type = INSN_T2;
11478 /* Handle VCVTB, VCVTT. */
11479 else if ((opc2 & 0x0e) == 0x02)
11480 curr_insn_type = INSN_T2;
11481 /* Handle VCMP, VCMPE. */
11482 else if ((opc2 & 0x0e) == 0x04)
11483 curr_insn_type = INSN_T3;
11487 switch (curr_insn_type)
11490 reg_vd = reg_vd | (bit_d << 4);
11491 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11492 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11493 arm_insn_r->reg_rec_count = 2;
11497 reg_vd = reg_vd | (bit_d << 4);
11498 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11499 arm_insn_r->reg_rec_count = 1;
11503 reg_vd = (reg_vd << 1) | bit_d;
11504 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11505 arm_insn_r->reg_rec_count = 1;
11509 record_buf[0] = ARM_FPSCR_REGNUM;
11510 arm_insn_r->reg_rec_count = 1;
11514 gdb_assert_not_reached ("no decoding pattern found");
11518 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11522 /* Handling opcode 110 insns. */
11525 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11527 uint32_t op1, op1_ebit, coproc;
11529 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11530 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11531 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11533 if ((coproc & 0x0e) == 0x0a)
11535 /* Handle extension register ld/st instructions. */
11537 return arm_record_exreg_ld_st_insn (arm_insn_r);
11539 /* 64-bit transfers between arm core and extension registers. */
11540 if ((op1 & 0x3e) == 0x04)
11541 return arm_record_exreg_ld_st_insn (arm_insn_r);
11545 /* Handle coprocessor ld/st instructions. */
11550 return arm_record_unsupported_insn (arm_insn_r);
11553 return arm_record_unsupported_insn (arm_insn_r);
11556 /* Move to coprocessor from two arm core registers. */
11558 return arm_record_unsupported_insn (arm_insn_r);
11560 /* Move to two arm core registers from coprocessor. */
11565 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11566 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11567 arm_insn_r->reg_rec_count = 2;
11569 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11573 return arm_record_unsupported_insn (arm_insn_r);
11576 /* Handling opcode 111 insns. */
11579 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11581 uint32_t op, op1_sbit, op1_ebit, coproc;
11582 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11583 struct regcache *reg_cache = arm_insn_r->regcache;
11585 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11586 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11587 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11588 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11589 op = bit (arm_insn_r->arm_insn, 4);
11591 /* Handle arm SWI/SVC system call instructions. */
11594 if (tdep->arm_syscall_record != NULL)
11596 ULONGEST svc_operand, svc_number;
11598 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11600 if (svc_operand) /* OABI. */
11601 svc_number = svc_operand - 0x900000;
11603 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11605 return tdep->arm_syscall_record (reg_cache, svc_number);
11609 printf_unfiltered (_("no syscall record support\n"));
11614 if ((coproc & 0x0e) == 0x0a)
11616 /* VFP data-processing instructions. */
11617 if (!op1_sbit && !op)
11618 return arm_record_vfp_data_proc_insn (arm_insn_r);
11620 /* Advanced SIMD, VFP instructions. */
11621 if (!op1_sbit && op)
11622 return arm_record_vdata_transfer_insn (arm_insn_r);
11626 /* Coprocessor data operations. */
11627 if (!op1_sbit && !op)
11628 return arm_record_unsupported_insn (arm_insn_r);
11630 /* Move to Coprocessor from ARM core register. */
11631 if (!op1_sbit && !op1_ebit && op)
11632 return arm_record_unsupported_insn (arm_insn_r);
11634 /* Move to arm core register from coprocessor. */
11635 if (!op1_sbit && op1_ebit && op)
11637 uint32_t record_buf[1];
11639 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11640 if (record_buf[0] == 15)
11641 record_buf[0] = ARM_PS_REGNUM;
11643 arm_insn_r->reg_rec_count = 1;
11644 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11650 return arm_record_unsupported_insn (arm_insn_r);
11653 /* Handling opcode 000 insns. */
11656 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11658 uint32_t record_buf[8];
11659 uint32_t reg_src1 = 0;
11661 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11663 record_buf[0] = ARM_PS_REGNUM;
11664 record_buf[1] = reg_src1;
11665 thumb_insn_r->reg_rec_count = 2;
11667 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11673 /* Handling opcode 001 insns. */
11676 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11678 uint32_t record_buf[8];
11679 uint32_t reg_src1 = 0;
11681 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11683 record_buf[0] = ARM_PS_REGNUM;
11684 record_buf[1] = reg_src1;
11685 thumb_insn_r->reg_rec_count = 2;
11687 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11692 /* Handling opcode 010 insns. */
11695 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11697 struct regcache *reg_cache = thumb_insn_r->regcache;
11698 uint32_t record_buf[8], record_buf_mem[8];
11700 uint32_t reg_src1 = 0, reg_src2 = 0;
11701 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11703 ULONGEST u_regval[2] = {0};
11705 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11707 if (bit (thumb_insn_r->arm_insn, 12))
11709 /* Handle load/store register offset. */
11710 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11712 if (opB >= 4 && opB <= 7)
11714 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11715 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11716 record_buf[0] = reg_src1;
11717 thumb_insn_r->reg_rec_count = 1;
11719 else if (opB >= 0 && opB <= 2)
11721 /* STR(2), STRB(2), STRH(2) . */
11722 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11723 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11724 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11725 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11727 record_buf_mem[0] = 4; /* STR (2). */
11729 record_buf_mem[0] = 1; /* STRB (2). */
11731 record_buf_mem[0] = 2; /* STRH (2). */
11732 record_buf_mem[1] = u_regval[0] + u_regval[1];
11733 thumb_insn_r->mem_rec_count = 1;
11736 else if (bit (thumb_insn_r->arm_insn, 11))
11738 /* Handle load from literal pool. */
11740 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11741 record_buf[0] = reg_src1;
11742 thumb_insn_r->reg_rec_count = 1;
11746 /* Special data instructions and branch and exchange */
11747 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11748 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11749 if ((3 == opcode2) && (!opcode3))
11751 /* Branch with exchange. */
11752 record_buf[0] = ARM_PS_REGNUM;
11753 thumb_insn_r->reg_rec_count = 1;
11757 /* Format 8; special data processing insns. */
11758 record_buf[0] = ARM_PS_REGNUM;
11759 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11760 | bits (thumb_insn_r->arm_insn, 0, 2));
11761 thumb_insn_r->reg_rec_count = 2;
11766 /* Format 5; data processing insns. */
11767 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11768 if (bit (thumb_insn_r->arm_insn, 7))
11770 reg_src1 = reg_src1 + 8;
11772 record_buf[0] = ARM_PS_REGNUM;
11773 record_buf[1] = reg_src1;
11774 thumb_insn_r->reg_rec_count = 2;
11777 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11778 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11784 /* Handling opcode 001 insns. */
11787 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11789 struct regcache *reg_cache = thumb_insn_r->regcache;
11790 uint32_t record_buf[8], record_buf_mem[8];
11792 uint32_t reg_src1 = 0;
11793 uint32_t opcode = 0, immed_5 = 0;
11795 ULONGEST u_regval = 0;
11797 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11802 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11803 record_buf[0] = reg_src1;
11804 thumb_insn_r->reg_rec_count = 1;
11809 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11810 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11811 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11812 record_buf_mem[0] = 4;
11813 record_buf_mem[1] = u_regval + (immed_5 * 4);
11814 thumb_insn_r->mem_rec_count = 1;
11817 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11818 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11824 /* Handling opcode 100 insns. */
11827 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11829 struct regcache *reg_cache = thumb_insn_r->regcache;
11830 uint32_t record_buf[8], record_buf_mem[8];
11832 uint32_t reg_src1 = 0;
11833 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11835 ULONGEST u_regval = 0;
11837 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11842 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11843 record_buf[0] = reg_src1;
11844 thumb_insn_r->reg_rec_count = 1;
11846 else if (1 == opcode)
11849 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11850 record_buf[0] = reg_src1;
11851 thumb_insn_r->reg_rec_count = 1;
11853 else if (2 == opcode)
11856 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11857 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11858 record_buf_mem[0] = 4;
11859 record_buf_mem[1] = u_regval + (immed_8 * 4);
11860 thumb_insn_r->mem_rec_count = 1;
11862 else if (0 == opcode)
11865 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11866 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11867 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11868 record_buf_mem[0] = 2;
11869 record_buf_mem[1] = u_regval + (immed_5 * 2);
11870 thumb_insn_r->mem_rec_count = 1;
11873 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11874 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11880 /* Handling opcode 101 insns. */
11883 thumb_record_misc (insn_decode_record *thumb_insn_r)
11885 struct regcache *reg_cache = thumb_insn_r->regcache;
11887 uint32_t opcode = 0;
11888 uint32_t register_bits = 0, register_count = 0;
11889 uint32_t index = 0, start_address = 0;
11890 uint32_t record_buf[24], record_buf_mem[48];
11893 ULONGEST u_regval = 0;
11895 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11897 if (opcode == 0 || opcode == 1)
11899 /* ADR and ADD (SP plus immediate) */
11901 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11902 record_buf[0] = reg_src1;
11903 thumb_insn_r->reg_rec_count = 1;
11907 /* Miscellaneous 16-bit instructions */
11908 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11913 /* SETEND and CPS */
11916 /* ADD/SUB (SP plus immediate) */
11917 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11918 record_buf[0] = ARM_SP_REGNUM;
11919 thumb_insn_r->reg_rec_count = 1;
11921 case 1: /* fall through */
11922 case 3: /* fall through */
11923 case 9: /* fall through */
11928 /* SXTH, SXTB, UXTH, UXTB */
11929 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11930 thumb_insn_r->reg_rec_count = 1;
11932 case 4: /* fall through */
11935 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11936 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11937 while (register_bits)
11939 if (register_bits & 0x00000001)
11941 register_bits = register_bits >> 1;
11943 start_address = u_regval - \
11944 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11945 thumb_insn_r->mem_rec_count = register_count;
11946 while (register_count)
11948 record_buf_mem[(register_count * 2) - 1] = start_address;
11949 record_buf_mem[(register_count * 2) - 2] = 4;
11950 start_address = start_address + 4;
11953 record_buf[0] = ARM_SP_REGNUM;
11954 thumb_insn_r->reg_rec_count = 1;
11957 /* REV, REV16, REVSH */
11958 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11959 thumb_insn_r->reg_rec_count = 1;
11961 case 12: /* fall through */
11964 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11965 while (register_bits)
11967 if (register_bits & 0x00000001)
11968 record_buf[index++] = register_count;
11969 register_bits = register_bits >> 1;
11972 record_buf[index++] = ARM_PS_REGNUM;
11973 record_buf[index++] = ARM_SP_REGNUM;
11974 thumb_insn_r->reg_rec_count = index;
11978 /* Handle enhanced software breakpoint insn, BKPT. */
11979 /* CPSR is changed to be executed in ARM state, disabling normal
11980 interrupts, entering abort mode. */
11981 /* According to high vector configuration PC is set. */
11982 /* User hits breakpoint and type reverse, in that case, we need to go back with
11983 previous CPSR and Program Counter. */
11984 record_buf[0] = ARM_PS_REGNUM;
11985 record_buf[1] = ARM_LR_REGNUM;
11986 thumb_insn_r->reg_rec_count = 2;
11987 /* We need to save SPSR value, which is not yet done. */
11988 printf_unfiltered (_("Process record does not support instruction "
11989 "0x%0x at address %s.\n"),
11990 thumb_insn_r->arm_insn,
11991 paddress (thumb_insn_r->gdbarch,
11992 thumb_insn_r->this_addr));
11996 /* If-Then, and hints */
12003 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12004 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12010 /* Handling opcode 110 insns. */
12013 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12015 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12016 struct regcache *reg_cache = thumb_insn_r->regcache;
12018 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12019 uint32_t reg_src1 = 0;
12020 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12021 uint32_t index = 0, start_address = 0;
12022 uint32_t record_buf[24], record_buf_mem[48];
12024 ULONGEST u_regval = 0;
12026 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12027 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12033 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12035 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12036 while (register_bits)
12038 if (register_bits & 0x00000001)
12039 record_buf[index++] = register_count;
12040 register_bits = register_bits >> 1;
12043 record_buf[index++] = reg_src1;
12044 thumb_insn_r->reg_rec_count = index;
12046 else if (0 == opcode2)
12048 /* It handles both STMIA. */
12049 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12051 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12052 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12053 while (register_bits)
12055 if (register_bits & 0x00000001)
12057 register_bits = register_bits >> 1;
12059 start_address = u_regval;
12060 thumb_insn_r->mem_rec_count = register_count;
12061 while (register_count)
12063 record_buf_mem[(register_count * 2) - 1] = start_address;
12064 record_buf_mem[(register_count * 2) - 2] = 4;
12065 start_address = start_address + 4;
12069 else if (0x1F == opcode1)
12071 /* Handle arm syscall insn. */
12072 if (tdep->arm_syscall_record != NULL)
12074 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12075 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12079 printf_unfiltered (_("no syscall record support\n"));
12084 /* B (1), conditional branch is automatically taken care in process_record,
12085 as PC is saved there. */
12087 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12088 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12094 /* Handling opcode 111 insns. */
12097 thumb_record_branch (insn_decode_record *thumb_insn_r)
12099 uint32_t record_buf[8];
12100 uint32_t bits_h = 0;
12102 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12104 if (2 == bits_h || 3 == bits_h)
12107 record_buf[0] = ARM_LR_REGNUM;
12108 thumb_insn_r->reg_rec_count = 1;
12110 else if (1 == bits_h)
12113 record_buf[0] = ARM_PS_REGNUM;
12114 record_buf[1] = ARM_LR_REGNUM;
12115 thumb_insn_r->reg_rec_count = 2;
12118 /* B(2) is automatically taken care in process_record, as PC is
12121 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12126 /* Handler for thumb2 load/store multiple instructions. */
12129 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12131 struct regcache *reg_cache = thumb2_insn_r->regcache;
12133 uint32_t reg_rn, op;
12134 uint32_t register_bits = 0, register_count = 0;
12135 uint32_t index = 0, start_address = 0;
12136 uint32_t record_buf[24], record_buf_mem[48];
12138 ULONGEST u_regval = 0;
12140 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12141 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12143 if (0 == op || 3 == op)
12145 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12147 /* Handle RFE instruction. */
12148 record_buf[0] = ARM_PS_REGNUM;
12149 thumb2_insn_r->reg_rec_count = 1;
12153 /* Handle SRS instruction after reading banked SP. */
12154 return arm_record_unsupported_insn (thumb2_insn_r);
12157 else if (1 == op || 2 == op)
12159 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12161 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12162 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12163 while (register_bits)
12165 if (register_bits & 0x00000001)
12166 record_buf[index++] = register_count;
12169 register_bits = register_bits >> 1;
12171 record_buf[index++] = reg_rn;
12172 record_buf[index++] = ARM_PS_REGNUM;
12173 thumb2_insn_r->reg_rec_count = index;
12177 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12178 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12179 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12180 while (register_bits)
12182 if (register_bits & 0x00000001)
12185 register_bits = register_bits >> 1;
12190 /* Start address calculation for LDMDB/LDMEA. */
12191 start_address = u_regval;
12195 /* Start address calculation for LDMDB/LDMEA. */
12196 start_address = u_regval - register_count * 4;
12199 thumb2_insn_r->mem_rec_count = register_count;
12200 while (register_count)
12202 record_buf_mem[register_count * 2 - 1] = start_address;
12203 record_buf_mem[register_count * 2 - 2] = 4;
12204 start_address = start_address + 4;
12207 record_buf[0] = reg_rn;
12208 record_buf[1] = ARM_PS_REGNUM;
12209 thumb2_insn_r->reg_rec_count = 2;
12213 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12215 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12217 return ARM_RECORD_SUCCESS;
12220 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12224 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12226 struct regcache *reg_cache = thumb2_insn_r->regcache;
12228 uint32_t reg_rd, reg_rn, offset_imm;
12229 uint32_t reg_dest1, reg_dest2;
12230 uint32_t address, offset_addr;
12231 uint32_t record_buf[8], record_buf_mem[8];
12232 uint32_t op1, op2, op3;
12234 ULONGEST u_regval[2];
12236 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12237 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12238 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12240 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12242 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12244 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12245 record_buf[0] = reg_dest1;
12246 record_buf[1] = ARM_PS_REGNUM;
12247 thumb2_insn_r->reg_rec_count = 2;
12250 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12252 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12253 record_buf[2] = reg_dest2;
12254 thumb2_insn_r->reg_rec_count = 3;
12259 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12260 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12262 if (0 == op1 && 0 == op2)
12264 /* Handle STREX. */
12265 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12266 address = u_regval[0] + (offset_imm * 4);
12267 record_buf_mem[0] = 4;
12268 record_buf_mem[1] = address;
12269 thumb2_insn_r->mem_rec_count = 1;
12270 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12271 record_buf[0] = reg_rd;
12272 thumb2_insn_r->reg_rec_count = 1;
12274 else if (1 == op1 && 0 == op2)
12276 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12277 record_buf[0] = reg_rd;
12278 thumb2_insn_r->reg_rec_count = 1;
12279 address = u_regval[0];
12280 record_buf_mem[1] = address;
12284 /* Handle STREXB. */
12285 record_buf_mem[0] = 1;
12286 thumb2_insn_r->mem_rec_count = 1;
12290 /* Handle STREXH. */
12291 record_buf_mem[0] = 2 ;
12292 thumb2_insn_r->mem_rec_count = 1;
12296 /* Handle STREXD. */
12297 address = u_regval[0];
12298 record_buf_mem[0] = 4;
12299 record_buf_mem[2] = 4;
12300 record_buf_mem[3] = address + 4;
12301 thumb2_insn_r->mem_rec_count = 2;
12306 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12308 if (bit (thumb2_insn_r->arm_insn, 24))
12310 if (bit (thumb2_insn_r->arm_insn, 23))
12311 offset_addr = u_regval[0] + (offset_imm * 4);
12313 offset_addr = u_regval[0] - (offset_imm * 4);
12315 address = offset_addr;
12318 address = u_regval[0];
12320 record_buf_mem[0] = 4;
12321 record_buf_mem[1] = address;
12322 record_buf_mem[2] = 4;
12323 record_buf_mem[3] = address + 4;
12324 thumb2_insn_r->mem_rec_count = 2;
12325 record_buf[0] = reg_rn;
12326 thumb2_insn_r->reg_rec_count = 1;
12330 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12332 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12334 return ARM_RECORD_SUCCESS;
12337 /* Handler for thumb2 data processing (shift register and modified immediate)
12341 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12343 uint32_t reg_rd, op;
12344 uint32_t record_buf[8];
12346 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12347 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12349 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12351 record_buf[0] = ARM_PS_REGNUM;
12352 thumb2_insn_r->reg_rec_count = 1;
12356 record_buf[0] = reg_rd;
12357 record_buf[1] = ARM_PS_REGNUM;
12358 thumb2_insn_r->reg_rec_count = 2;
12361 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12363 return ARM_RECORD_SUCCESS;
12366 /* Generic handler for thumb2 instructions which effect destination and PS
12370 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12373 uint32_t record_buf[8];
12375 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12377 record_buf[0] = reg_rd;
12378 record_buf[1] = ARM_PS_REGNUM;
12379 thumb2_insn_r->reg_rec_count = 2;
12381 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12383 return ARM_RECORD_SUCCESS;
12386 /* Handler for thumb2 branch and miscellaneous control instructions. */
12389 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12391 uint32_t op, op1, op2;
12392 uint32_t record_buf[8];
12394 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12395 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12396 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12398 /* Handle MSR insn. */
12399 if (!(op1 & 0x2) && 0x38 == op)
12403 /* CPSR is going to be changed. */
12404 record_buf[0] = ARM_PS_REGNUM;
12405 thumb2_insn_r->reg_rec_count = 1;
12409 arm_record_unsupported_insn(thumb2_insn_r);
12413 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12416 record_buf[0] = ARM_PS_REGNUM;
12417 record_buf[1] = ARM_LR_REGNUM;
12418 thumb2_insn_r->reg_rec_count = 2;
12421 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12423 return ARM_RECORD_SUCCESS;
12426 /* Handler for thumb2 store single data item instructions. */
12429 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12431 struct regcache *reg_cache = thumb2_insn_r->regcache;
12433 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12434 uint32_t address, offset_addr;
12435 uint32_t record_buf[8], record_buf_mem[8];
12438 ULONGEST u_regval[2];
12440 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12441 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12442 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12443 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12445 if (bit (thumb2_insn_r->arm_insn, 23))
12448 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12449 offset_addr = u_regval[0] + offset_imm;
12450 address = offset_addr;
12455 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12457 /* Handle STRB (register). */
12458 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12459 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12460 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12461 offset_addr = u_regval[1] << shift_imm;
12462 address = u_regval[0] + offset_addr;
12466 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12467 if (bit (thumb2_insn_r->arm_insn, 10))
12469 if (bit (thumb2_insn_r->arm_insn, 9))
12470 offset_addr = u_regval[0] + offset_imm;
12472 offset_addr = u_regval[0] - offset_imm;
12474 address = offset_addr;
12477 address = u_regval[0];
12483 /* Store byte instructions. */
12486 record_buf_mem[0] = 1;
12488 /* Store half word instructions. */
12491 record_buf_mem[0] = 2;
12493 /* Store word instructions. */
12496 record_buf_mem[0] = 4;
12500 gdb_assert_not_reached ("no decoding pattern found");
12504 record_buf_mem[1] = address;
12505 thumb2_insn_r->mem_rec_count = 1;
12506 record_buf[0] = reg_rn;
12507 thumb2_insn_r->reg_rec_count = 1;
12509 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12511 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12513 return ARM_RECORD_SUCCESS;
12516 /* Handler for thumb2 load memory hints instructions. */
12519 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12521 uint32_t record_buf[8];
12522 uint32_t reg_rt, reg_rn;
12524 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12525 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12527 if (ARM_PC_REGNUM != reg_rt)
12529 record_buf[0] = reg_rt;
12530 record_buf[1] = reg_rn;
12531 record_buf[2] = ARM_PS_REGNUM;
12532 thumb2_insn_r->reg_rec_count = 3;
12534 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12536 return ARM_RECORD_SUCCESS;
12539 return ARM_RECORD_FAILURE;
12542 /* Handler for thumb2 load word instructions. */
12545 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12547 uint32_t record_buf[8];
12549 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12550 record_buf[1] = ARM_PS_REGNUM;
12551 thumb2_insn_r->reg_rec_count = 2;
12553 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12555 return ARM_RECORD_SUCCESS;
12558 /* Handler for thumb2 long multiply, long multiply accumulate, and
12559 divide instructions. */
12562 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12564 uint32_t opcode1 = 0, opcode2 = 0;
12565 uint32_t record_buf[8];
12567 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12568 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12570 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12572 /* Handle SMULL, UMULL, SMULAL. */
12573 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12574 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12575 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12576 record_buf[2] = ARM_PS_REGNUM;
12577 thumb2_insn_r->reg_rec_count = 3;
12579 else if (1 == opcode1 || 3 == opcode2)
12581 /* Handle SDIV and UDIV. */
12582 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12583 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12584 record_buf[2] = ARM_PS_REGNUM;
12585 thumb2_insn_r->reg_rec_count = 3;
12588 return ARM_RECORD_FAILURE;
12590 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12592 return ARM_RECORD_SUCCESS;
12595 /* Record handler for thumb32 coprocessor instructions. */
12598 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12600 if (bit (thumb2_insn_r->arm_insn, 25))
12601 return arm_record_coproc_data_proc (thumb2_insn_r);
12603 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12606 /* Record handler for advance SIMD structure load/store instructions. */
12609 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12611 struct regcache *reg_cache = thumb2_insn_r->regcache;
12612 uint32_t l_bit, a_bit, b_bits;
12613 uint32_t record_buf[128], record_buf_mem[128];
12614 uint32_t reg_rn, reg_vd, address, f_elem;
12615 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12618 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12619 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12620 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12621 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12622 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12623 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12624 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12625 f_elem = 8 / f_ebytes;
12629 ULONGEST u_regval = 0;
12630 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12631 address = u_regval;
12636 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12638 if (b_bits == 0x07)
12640 else if (b_bits == 0x0a)
12642 else if (b_bits == 0x06)
12644 else if (b_bits == 0x02)
12649 for (index_r = 0; index_r < bf_regs; index_r++)
12651 for (index_e = 0; index_e < f_elem; index_e++)
12653 record_buf_mem[index_m++] = f_ebytes;
12654 record_buf_mem[index_m++] = address;
12655 address = address + f_ebytes;
12656 thumb2_insn_r->mem_rec_count += 1;
12661 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12663 if (b_bits == 0x09 || b_bits == 0x08)
12665 else if (b_bits == 0x03)
12670 for (index_r = 0; index_r < bf_regs; index_r++)
12671 for (index_e = 0; index_e < f_elem; index_e++)
12673 for (loop_t = 0; loop_t < 2; loop_t++)
12675 record_buf_mem[index_m++] = f_ebytes;
12676 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12677 thumb2_insn_r->mem_rec_count += 1;
12679 address = address + (2 * f_ebytes);
12683 else if ((b_bits & 0x0e) == 0x04)
12685 for (index_e = 0; index_e < f_elem; index_e++)
12687 for (loop_t = 0; loop_t < 3; loop_t++)
12689 record_buf_mem[index_m++] = f_ebytes;
12690 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12691 thumb2_insn_r->mem_rec_count += 1;
12693 address = address + (3 * f_ebytes);
12697 else if (!(b_bits & 0x0e))
12699 for (index_e = 0; index_e < f_elem; index_e++)
12701 for (loop_t = 0; loop_t < 4; loop_t++)
12703 record_buf_mem[index_m++] = f_ebytes;
12704 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12705 thumb2_insn_r->mem_rec_count += 1;
12707 address = address + (4 * f_ebytes);
12713 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12715 if (bft_size == 0x00)
12717 else if (bft_size == 0x01)
12719 else if (bft_size == 0x02)
12725 if (!(b_bits & 0x0b) || b_bits == 0x08)
12726 thumb2_insn_r->mem_rec_count = 1;
12728 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12729 thumb2_insn_r->mem_rec_count = 2;
12731 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12732 thumb2_insn_r->mem_rec_count = 3;
12734 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12735 thumb2_insn_r->mem_rec_count = 4;
12737 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12739 record_buf_mem[index_m] = f_ebytes;
12740 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12749 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12750 thumb2_insn_r->reg_rec_count = 1;
12752 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12753 thumb2_insn_r->reg_rec_count = 2;
12755 else if ((b_bits & 0x0e) == 0x04)
12756 thumb2_insn_r->reg_rec_count = 3;
12758 else if (!(b_bits & 0x0e))
12759 thumb2_insn_r->reg_rec_count = 4;
12764 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12765 thumb2_insn_r->reg_rec_count = 1;
12767 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12768 thumb2_insn_r->reg_rec_count = 2;
12770 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12771 thumb2_insn_r->reg_rec_count = 3;
12773 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12774 thumb2_insn_r->reg_rec_count = 4;
12776 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12777 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12781 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12783 record_buf[index_r] = reg_rn;
12784 thumb2_insn_r->reg_rec_count += 1;
12787 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12789 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12794 /* Decodes thumb2 instruction type and invokes its record handler. */
12796 static unsigned int
12797 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12799 uint32_t op, op1, op2;
12801 op = bit (thumb2_insn_r->arm_insn, 15);
12802 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12803 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12807 if (!(op2 & 0x64 ))
12809 /* Load/store multiple instruction. */
12810 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12812 else if ((op2 & 0x64) == 0x4)
12814 /* Load/store (dual/exclusive) and table branch instruction. */
12815 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12817 else if ((op2 & 0x60) == 0x20)
12819 /* Data-processing (shifted register). */
12820 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12822 else if (op2 & 0x40)
12824 /* Co-processor instructions. */
12825 return thumb2_record_coproc_insn (thumb2_insn_r);
12828 else if (op1 == 0x02)
12832 /* Branches and miscellaneous control instructions. */
12833 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12835 else if (op2 & 0x20)
12837 /* Data-processing (plain binary immediate) instruction. */
12838 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12842 /* Data-processing (modified immediate). */
12843 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12846 else if (op1 == 0x03)
12848 if (!(op2 & 0x71 ))
12850 /* Store single data item. */
12851 return thumb2_record_str_single_data (thumb2_insn_r);
12853 else if (!((op2 & 0x71) ^ 0x10))
12855 /* Advanced SIMD or structure load/store instructions. */
12856 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12858 else if (!((op2 & 0x67) ^ 0x01))
12860 /* Load byte, memory hints instruction. */
12861 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12863 else if (!((op2 & 0x67) ^ 0x03))
12865 /* Load halfword, memory hints instruction. */
12866 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12868 else if (!((op2 & 0x67) ^ 0x05))
12870 /* Load word instruction. */
12871 return thumb2_record_ld_word (thumb2_insn_r);
12873 else if (!((op2 & 0x70) ^ 0x20))
12875 /* Data-processing (register) instruction. */
12876 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12878 else if (!((op2 & 0x78) ^ 0x30))
12880 /* Multiply, multiply accumulate, abs diff instruction. */
12881 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12883 else if (!((op2 & 0x78) ^ 0x38))
12885 /* Long multiply, long multiply accumulate, and divide. */
12886 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12888 else if (op2 & 0x40)
12890 /* Co-processor instructions. */
12891 return thumb2_record_coproc_insn (thumb2_insn_r);
12899 /* Abstract memory reader. */
12901 class abstract_memory_reader
12904 /* Read LEN bytes of target memory at address MEMADDR, placing the
12905 results in GDB's memory at BUF. Return true on success. */
12907 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12910 /* Instruction reader from real target. */
12912 class instruction_reader : public abstract_memory_reader
12915 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12917 if (target_read_memory (memaddr, buf, len))
12926 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12927 and positive val on fauilure. */
12930 extract_arm_insn (abstract_memory_reader& reader,
12931 insn_decode_record *insn_record, uint32_t insn_size)
12933 gdb_byte buf[insn_size];
12935 memset (&buf[0], 0, insn_size);
12937 if (!reader.read (insn_record->this_addr, buf, insn_size))
12939 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12941 gdbarch_byte_order_for_code (insn_record->gdbarch));
12945 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12947 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12951 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
12952 record_type_t record_type, uint32_t insn_size)
12955 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12957 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12959 arm_record_data_proc_misc_ld_str, /* 000. */
12960 arm_record_data_proc_imm, /* 001. */
12961 arm_record_ld_st_imm_offset, /* 010. */
12962 arm_record_ld_st_reg_offset, /* 011. */
12963 arm_record_ld_st_multiple, /* 100. */
12964 arm_record_b_bl, /* 101. */
12965 arm_record_asimd_vfp_coproc, /* 110. */
12966 arm_record_coproc_data_proc /* 111. */
12969 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12971 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12973 thumb_record_shift_add_sub, /* 000. */
12974 thumb_record_add_sub_cmp_mov, /* 001. */
12975 thumb_record_ld_st_reg_offset, /* 010. */
12976 thumb_record_ld_st_imm_offset, /* 011. */
12977 thumb_record_ld_st_stack, /* 100. */
12978 thumb_record_misc, /* 101. */
12979 thumb_record_ldm_stm_swi, /* 110. */
12980 thumb_record_branch /* 111. */
12983 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12984 uint32_t insn_id = 0;
12986 if (extract_arm_insn (reader, arm_record, insn_size))
12990 printf_unfiltered (_("Process record: error reading memory at "
12991 "addr %s len = %d.\n"),
12992 paddress (arm_record->gdbarch,
12993 arm_record->this_addr), insn_size);
12997 else if (ARM_RECORD == record_type)
12999 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13000 insn_id = bits (arm_record->arm_insn, 25, 27);
13002 if (arm_record->cond == 0xf)
13003 ret = arm_record_extension_space (arm_record);
13006 /* If this insn has fallen into extension space
13007 then we need not decode it anymore. */
13008 ret = arm_handle_insn[insn_id] (arm_record);
13010 if (ret != ARM_RECORD_SUCCESS)
13012 arm_record_unsupported_insn (arm_record);
13016 else if (THUMB_RECORD == record_type)
13018 /* As thumb does not have condition codes, we set negative. */
13019 arm_record->cond = -1;
13020 insn_id = bits (arm_record->arm_insn, 13, 15);
13021 ret = thumb_handle_insn[insn_id] (arm_record);
13022 if (ret != ARM_RECORD_SUCCESS)
13024 arm_record_unsupported_insn (arm_record);
13028 else if (THUMB2_RECORD == record_type)
13030 /* As thumb does not have condition codes, we set negative. */
13031 arm_record->cond = -1;
13033 /* Swap first half of 32bit thumb instruction with second half. */
13034 arm_record->arm_insn
13035 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13037 ret = thumb2_record_decode_insn_handler (arm_record);
13039 if (ret != ARM_RECORD_SUCCESS)
13041 arm_record_unsupported_insn (arm_record);
13047 /* Throw assertion. */
13048 gdb_assert_not_reached ("not a valid instruction, could not decode");
13055 namespace selftests {
13057 /* Provide both 16-bit and 32-bit thumb instructions. */
13059 class instruction_reader_thumb : public abstract_memory_reader
13062 template<size_t SIZE>
13063 instruction_reader_thumb (enum bfd_endian endian,
13064 const uint16_t (&insns)[SIZE])
13065 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13068 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13070 SELF_CHECK (len == 4 || len == 2);
13071 SELF_CHECK (memaddr % 2 == 0);
13072 SELF_CHECK ((memaddr / 2) < m_insns_size);
13074 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13077 store_unsigned_integer (&buf[2], 2, m_endian,
13078 m_insns[memaddr / 2 + 1]);
13084 enum bfd_endian m_endian;
13085 const uint16_t *m_insns;
13086 size_t m_insns_size;
13090 arm_record_test (void)
13092 struct gdbarch_info info;
13093 gdbarch_info_init (&info);
13094 info.bfd_arch_info = bfd_scan_arch ("arm");
13096 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13098 SELF_CHECK (gdbarch != NULL);
13100 /* 16-bit Thumb instructions. */
13102 insn_decode_record arm_record;
13104 memset (&arm_record, 0, sizeof (insn_decode_record));
13105 arm_record.gdbarch = gdbarch;
13107 static const uint16_t insns[] = {
13108 /* db b2 uxtb r3, r3 */
13110 /* cd 58 ldr r5, [r1, r3] */
13114 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13115 instruction_reader_thumb reader (endian, insns);
13116 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13117 THUMB_INSN_SIZE_BYTES);
13119 SELF_CHECK (ret == 0);
13120 SELF_CHECK (arm_record.mem_rec_count == 0);
13121 SELF_CHECK (arm_record.reg_rec_count == 1);
13122 SELF_CHECK (arm_record.arm_regs[0] == 3);
13124 arm_record.this_addr += 2;
13125 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13126 THUMB_INSN_SIZE_BYTES);
13128 SELF_CHECK (ret == 0);
13129 SELF_CHECK (arm_record.mem_rec_count == 0);
13130 SELF_CHECK (arm_record.reg_rec_count == 1);
13131 SELF_CHECK (arm_record.arm_regs[0] == 5);
13134 /* 32-bit Thumb-2 instructions. */
13136 insn_decode_record arm_record;
13138 memset (&arm_record, 0, sizeof (insn_decode_record));
13139 arm_record.gdbarch = gdbarch;
13141 static const uint16_t insns[] = {
13142 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13146 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13147 instruction_reader_thumb reader (endian, insns);
13148 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13149 THUMB2_INSN_SIZE_BYTES);
13151 SELF_CHECK (ret == 0);
13152 SELF_CHECK (arm_record.mem_rec_count == 0);
13153 SELF_CHECK (arm_record.reg_rec_count == 1);
13154 SELF_CHECK (arm_record.arm_regs[0] == 7);
13157 } // namespace selftests
13158 #endif /* GDB_SELF_TEST */
13160 /* Cleans up local record registers and memory allocations. */
13163 deallocate_reg_mem (insn_decode_record *record)
13165 xfree (record->arm_regs);
13166 xfree (record->arm_mems);
13170 /* Parse the current instruction and record the values of the registers and
13171 memory that will be changed in current instruction to record_arch_list".
13172 Return -1 if something is wrong. */
13175 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13176 CORE_ADDR insn_addr)
13179 uint32_t no_of_rec = 0;
13180 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13181 ULONGEST t_bit = 0, insn_id = 0;
13183 ULONGEST u_regval = 0;
13185 insn_decode_record arm_record;
13187 memset (&arm_record, 0, sizeof (insn_decode_record));
13188 arm_record.regcache = regcache;
13189 arm_record.this_addr = insn_addr;
13190 arm_record.gdbarch = gdbarch;
13193 if (record_debug > 1)
13195 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13197 paddress (gdbarch, arm_record.this_addr));
13200 instruction_reader reader;
13201 if (extract_arm_insn (reader, &arm_record, 2))
13205 printf_unfiltered (_("Process record: error reading memory at "
13206 "addr %s len = %d.\n"),
13207 paddress (arm_record.gdbarch,
13208 arm_record.this_addr), 2);
13213 /* Check the insn, whether it is thumb or arm one. */
13215 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13216 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13219 if (!(u_regval & t_bit))
13221 /* We are decoding arm insn. */
13222 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13226 insn_id = bits (arm_record.arm_insn, 11, 15);
13227 /* is it thumb2 insn? */
13228 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13230 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13231 THUMB2_INSN_SIZE_BYTES);
13235 /* We are decoding thumb insn. */
13236 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13237 THUMB_INSN_SIZE_BYTES);
13243 /* Record registers. */
13244 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13245 if (arm_record.arm_regs)
13247 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13249 if (record_full_arch_list_add_reg
13250 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13254 /* Record memories. */
13255 if (arm_record.arm_mems)
13257 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13259 if (record_full_arch_list_add_mem
13260 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13261 arm_record.arm_mems[no_of_rec].len))
13266 if (record_full_arch_list_add_end ())
13271 deallocate_reg_mem (&arm_record);