1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
59 #include "record-full.h"
63 #include "gdbsupport/selftest.h"
66 static bool arm_debug;
68 /* Macros for setting and testing a bit in a minimal symbol that marks
69 it as Thumb function. The MSB of the minimal symbol's "info" field
70 is used for this purpose.
72 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
73 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
75 #define MSYMBOL_SET_SPECIAL(msym) \
76 MSYMBOL_TARGET_FLAG_1 (msym) = 1
78 #define MSYMBOL_IS_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym)
81 struct arm_mapping_symbol
86 bool operator< (const arm_mapping_symbol &other) const
87 { return this->value < other.value; }
90 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
92 struct arm_per_objfile
94 explicit arm_per_objfile (size_t num_sections)
95 : section_maps (new arm_mapping_symbol_vec[num_sections]),
96 section_maps_sorted (new bool[num_sections] ())
99 DISABLE_COPY_AND_ASSIGN (arm_per_objfile);
101 /* Information about mapping symbols ($a, $d, $t) in the objfile.
103 The format is an array of vectors of arm_mapping_symbols, there is one
104 vector for each section of the objfile (the array is index by BFD section
107 For each section, the vector of arm_mapping_symbol is sorted by
108 symbol value (address). */
109 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
111 /* For each corresponding element of section_maps above, is this vector
113 std::unique_ptr<bool[]> section_maps_sorted;
116 /* Per-objfile data used for mapping symbols. */
117 static objfile_key<arm_per_objfile> arm_objfile_data_key;
119 /* The list of available "set arm ..." and "show arm ..." commands. */
120 static struct cmd_list_element *setarmcmdlist = NULL;
121 static struct cmd_list_element *showarmcmdlist = NULL;
123 /* The type of floating-point to use. Keep this in sync with enum
124 arm_float_model, and the help string in _initialize_arm_tdep. */
125 static const char *const fp_model_strings[] =
135 /* A variable that can be configured by the user. */
136 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
137 static const char *current_fp_model = "auto";
139 /* The ABI to use. Keep this in sync with arm_abi_kind. */
140 static const char *const arm_abi_strings[] =
148 /* A variable that can be configured by the user. */
149 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
150 static const char *arm_abi_string = "auto";
152 /* The execution mode to assume. */
153 static const char *const arm_mode_strings[] =
161 static const char *arm_fallback_mode_string = "auto";
162 static const char *arm_force_mode_string = "auto";
164 /* The standard register names, and all the valid aliases for them. Note
165 that `fp', `sp' and `pc' are not added in this alias list, because they
166 have been added as builtin user registers in
167 std-regs.c:_initialize_frame_reg. */
172 } arm_register_aliases[] = {
173 /* Basic register numbers. */
190 /* Synonyms (argument and variable registers). */
203 /* Other platform-specific names for r9. */
209 /* Names used by GCC (not listed in the ARM EABI). */
211 /* A special name from the older ATPCS. */
215 static const char *const arm_register_names[] =
216 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
217 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
218 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
219 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
220 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
221 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
222 "fps", "cpsr" }; /* 24 25 */
224 /* Holds the current set of options to be passed to the disassembler. */
225 static char *arm_disassembler_options;
227 /* Valid register name styles. */
228 static const char **valid_disassembly_styles;
230 /* Disassembly style to use. Default to "std" register names. */
231 static const char *disassembly_style;
233 /* All possible arm target descriptors. */
234 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
235 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
237 /* This is used to keep the bfd arch_info in sync with the disassembly
239 static void set_disassembly_style_sfunc (const char *, int,
240 struct cmd_list_element *);
241 static void show_disassembly_style_sfunc (struct ui_file *, int,
242 struct cmd_list_element *,
245 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
246 readable_regcache *regcache,
247 int regnum, gdb_byte *buf);
248 static void arm_neon_quad_write (struct gdbarch *gdbarch,
249 struct regcache *regcache,
250 int regnum, const gdb_byte *buf);
253 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
256 /* get_next_pcs operations. */
257 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
258 arm_get_next_pcs_read_memory_unsigned_integer,
259 arm_get_next_pcs_syscall_next_pc,
260 arm_get_next_pcs_addr_bits_remove,
261 arm_get_next_pcs_is_thumb,
265 struct arm_prologue_cache
267 /* The stack pointer at the time this frame was created; i.e. the
268 caller's stack pointer when this function was called. It is used
269 to identify this frame. */
272 /* The frame base for this frame is just prev_sp - frame size.
273 FRAMESIZE is the distance from the frame pointer to the
274 initial stack pointer. */
278 /* The register used to hold the frame pointer for this frame. */
281 /* Saved register offsets. */
282 struct trad_frame_saved_reg *saved_regs;
285 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
286 CORE_ADDR prologue_start,
287 CORE_ADDR prologue_end,
288 struct arm_prologue_cache *cache);
290 /* Architecture version for displaced stepping. This effects the behaviour of
291 certain instructions, and really should not be hard-wired. */
293 #define DISPLACED_STEPPING_ARCH_VERSION 5
295 /* See arm-tdep.h. */
297 bool arm_apcs_32 = true;
299 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
302 arm_psr_thumb_bit (struct gdbarch *gdbarch)
304 if (gdbarch_tdep (gdbarch)->is_m)
310 /* Determine if the processor is currently executing in Thumb mode. */
313 arm_is_thumb (struct regcache *regcache)
316 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
318 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
320 return (cpsr & t_bit) != 0;
323 /* Determine if FRAME is executing in Thumb mode. */
326 arm_frame_is_thumb (struct frame_info *frame)
329 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
331 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
332 directly (from a signal frame or dummy frame) or by interpreting
333 the saved LR (from a prologue or DWARF frame). So consult it and
334 trust the unwinders. */
335 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
337 return (cpsr & t_bit) != 0;
340 /* Search for the mapping symbol covering MEMADDR. If one is found,
341 return its type. Otherwise, return 0. If START is non-NULL,
342 set *START to the location of the mapping symbol. */
345 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
347 struct obj_section *sec;
349 /* If there are mapping symbols, consult them. */
350 sec = find_pc_section (memaddr);
353 arm_per_objfile *data = arm_objfile_data_key.get (sec->objfile);
356 unsigned int section_idx = sec->the_bfd_section->index;
357 arm_mapping_symbol_vec &map
358 = data->section_maps[section_idx];
360 /* Sort the vector on first use. */
361 if (!data->section_maps_sorted[section_idx])
363 std::sort (map.begin (), map.end ());
364 data->section_maps_sorted[section_idx] = true;
367 struct arm_mapping_symbol map_key
368 = { memaddr - obj_section_addr (sec), 0 };
369 arm_mapping_symbol_vec::const_iterator it
370 = std::lower_bound (map.begin (), map.end (), map_key);
372 /* std::lower_bound finds the earliest ordered insertion
373 point. If the symbol at this position starts at this exact
374 address, we use that; otherwise, the preceding
375 mapping symbol covers this address. */
378 if (it->value == map_key.value)
381 *start = it->value + obj_section_addr (sec);
386 if (it > map.begin ())
388 arm_mapping_symbol_vec::const_iterator prev_it
392 *start = prev_it->value + obj_section_addr (sec);
393 return prev_it->type;
401 /* Determine if the program counter specified in MEMADDR is in a Thumb
402 function. This function should be called for addresses unrelated to
403 any executing frame; otherwise, prefer arm_frame_is_thumb. */
406 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
408 struct bound_minimal_symbol sym;
410 arm_displaced_step_closure *dsc
411 = ((arm_displaced_step_closure * )
412 get_displaced_step_closure_by_addr (memaddr));
414 /* If checking the mode of displaced instruction in copy area, the mode
415 should be determined by instruction on the original address. */
419 fprintf_unfiltered (gdb_stdlog,
420 "displaced: check mode of %.8lx instead of %.8lx\n",
421 (unsigned long) dsc->insn_addr,
422 (unsigned long) memaddr);
423 memaddr = dsc->insn_addr;
426 /* If bit 0 of the address is set, assume this is a Thumb address. */
427 if (IS_THUMB_ADDR (memaddr))
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
445 /* Thumb functions have a "special" bit set in minimal symbols. */
446 sym = lookup_minimal_symbol_by_pc (memaddr);
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
460 displayed in the mode it will be executed). */
461 if (target_has_registers)
462 return arm_frame_is_thumb (get_current_frame ());
464 /* Otherwise we're out of luck; we assume ARM. */
468 /* Determine if the address specified equals any of these magic return
469 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
472 From ARMv6-M Reference Manual B1.5.8
473 Table B1-5 Exception return behavior
475 EXC_RETURN Return To Return Stack
476 0xFFFFFFF1 Handler mode Main
477 0xFFFFFFF9 Thread mode Main
478 0xFFFFFFFD Thread mode Process
480 From ARMv7-M Reference Manual B1.5.8
481 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
483 EXC_RETURN Return To Return Stack
484 0xFFFFFFF1 Handler mode Main
485 0xFFFFFFF9 Thread mode Main
486 0xFFFFFFFD Thread mode Process
488 Table B1-9 EXC_RETURN definition of exception return behavior, with
491 EXC_RETURN Return To Return Stack Frame Type
492 0xFFFFFFE1 Handler mode Main Extended
493 0xFFFFFFE9 Thread mode Main Extended
494 0xFFFFFFED Thread mode Process Extended
495 0xFFFFFFF1 Handler mode Main Basic
496 0xFFFFFFF9 Thread mode Main Basic
497 0xFFFFFFFD Thread mode Process Basic
499 For more details see "B1.5.8 Exception return behavior"
500 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
503 arm_m_addr_is_magic (CORE_ADDR addr)
507 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
508 the exception return behavior. */
515 /* Address is magic. */
519 /* Address is not magic. */
524 /* Remove useless bits from addresses in a running program. */
526 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
528 /* On M-profile devices, do not strip the low bit from EXC_RETURN
529 (the magic exception return address). */
530 if (gdbarch_tdep (gdbarch)->is_m
531 && arm_m_addr_is_magic (val))
535 return UNMAKE_THUMB_ADDR (val);
537 return (val & 0x03fffffc);
540 /* Return 1 if PC is the start of a compiler helper function which
541 can be safely ignored during prologue skipping. IS_THUMB is true
542 if the function is known to be a Thumb function due to the way it
545 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
547 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
548 struct bound_minimal_symbol msym;
550 msym = lookup_minimal_symbol_by_pc (pc);
551 if (msym.minsym != NULL
552 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
553 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
555 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
557 /* The GNU linker's Thumb call stub to foo is named
559 if (strstr (name, "_from_thumb") != NULL)
562 /* On soft-float targets, __truncdfsf2 is called to convert promoted
563 arguments to their argument types in non-prototyped
565 if (startswith (name, "__truncdfsf2"))
567 if (startswith (name, "__aeabi_d2f"))
570 /* Internal functions related to thread-local storage. */
571 if (startswith (name, "__tls_get_addr"))
573 if (startswith (name, "__aeabi_read_tp"))
578 /* If we run against a stripped glibc, we may be unable to identify
579 special functions by name. Check for one important case,
580 __aeabi_read_tp, by comparing the *code* against the default
581 implementation (this is hand-written ARM assembler in glibc). */
584 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
585 == 0xe3e00a0f /* mov r0, #0xffff0fff */
586 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
587 == 0xe240f01f) /* sub pc, r0, #31 */
594 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
595 the first 16-bit of instruction, and INSN2 is the second 16-bit of
597 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
598 ((bits ((insn1), 0, 3) << 12) \
599 | (bits ((insn1), 10, 10) << 11) \
600 | (bits ((insn2), 12, 14) << 8) \
601 | bits ((insn2), 0, 7))
603 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
604 the 32-bit instruction. */
605 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
606 ((bits ((insn), 16, 19) << 12) \
607 | bits ((insn), 0, 11))
609 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
612 thumb_expand_immediate (unsigned int imm)
614 unsigned int count = imm >> 7;
622 return (imm & 0xff) | ((imm & 0xff) << 16);
624 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
626 return (imm & 0xff) | ((imm & 0xff) << 8)
627 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
630 return (0x80 | (imm & 0x7f)) << (32 - count);
633 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
634 epilogue, 0 otherwise. */
637 thumb_instruction_restores_sp (unsigned short insn)
639 return (insn == 0x46bd /* mov sp, r7 */
640 || (insn & 0xff80) == 0xb000 /* add sp, imm */
641 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
644 /* Analyze a Thumb prologue, looking for a recognizable stack frame
645 and frame pointer. Scan until we encounter a store that could
646 clobber the stack frame unexpectedly, or an unknown instruction.
647 Return the last address which is definitely safe to skip for an
648 initial breakpoint. */
651 thumb_analyze_prologue (struct gdbarch *gdbarch,
652 CORE_ADDR start, CORE_ADDR limit,
653 struct arm_prologue_cache *cache)
655 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
656 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
660 CORE_ADDR unrecognized_pc = 0;
662 for (i = 0; i < 16; i++)
663 regs[i] = pv_register (i, 0);
664 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
666 while (start < limit)
670 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
672 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
677 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
680 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
681 whether to save LR (R14). */
682 mask = (insn & 0xff) | ((insn & 0x100) << 6);
684 /* Calculate offsets of saved R0-R7 and LR. */
685 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
686 if (mask & (1 << regno))
688 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
690 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
693 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
695 offset = (insn & 0x7f) << 2; /* get scaled offset */
696 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
699 else if (thumb_instruction_restores_sp (insn))
701 /* Don't scan past the epilogue. */
704 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
705 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
707 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
708 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
709 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
711 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
712 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
713 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
715 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
716 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
717 && pv_is_constant (regs[bits (insn, 3, 5)]))
718 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
719 regs[bits (insn, 6, 8)]);
720 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
721 && pv_is_constant (regs[bits (insn, 3, 6)]))
723 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
724 int rm = bits (insn, 3, 6);
725 regs[rd] = pv_add (regs[rd], regs[rm]);
727 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
729 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
730 int src_reg = (insn & 0x78) >> 3;
731 regs[dst_reg] = regs[src_reg];
733 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
735 /* Handle stores to the stack. Normally pushes are used,
736 but with GCC -mtpcs-frame, there may be other stores
737 in the prologue to create the frame. */
738 int regno = (insn >> 8) & 0x7;
741 offset = (insn & 0xff) << 2;
742 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
744 if (stack.store_would_trash (addr))
747 stack.store (addr, 4, regs[regno]);
749 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
751 int rd = bits (insn, 0, 2);
752 int rn = bits (insn, 3, 5);
755 offset = bits (insn, 6, 10) << 2;
756 addr = pv_add_constant (regs[rn], offset);
758 if (stack.store_would_trash (addr))
761 stack.store (addr, 4, regs[rd]);
763 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
764 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
765 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
766 /* Ignore stores of argument registers to the stack. */
768 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
769 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
770 /* Ignore block loads from the stack, potentially copying
771 parameters from memory. */
773 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
774 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
775 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
776 /* Similarly ignore single loads from the stack. */
778 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
779 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
780 /* Skip register copies, i.e. saves to another register
781 instead of the stack. */
783 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
784 /* Recognize constant loads; even with small stacks these are necessary
786 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
787 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
789 /* Constant pool loads, for the same reason. */
790 unsigned int constant;
793 loc = start + 4 + bits (insn, 0, 7) * 4;
794 constant = read_memory_unsigned_integer (loc, 4, byte_order);
795 regs[bits (insn, 8, 10)] = pv_constant (constant);
797 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
799 unsigned short inst2;
801 inst2 = read_code_unsigned_integer (start + 2, 2,
802 byte_order_for_code);
804 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
806 /* BL, BLX. Allow some special function calls when
807 skipping the prologue; GCC generates these before
808 storing arguments to the stack. */
810 int j1, j2, imm1, imm2;
812 imm1 = sbits (insn, 0, 10);
813 imm2 = bits (inst2, 0, 10);
814 j1 = bit (inst2, 13);
815 j2 = bit (inst2, 11);
817 offset = ((imm1 << 12) + (imm2 << 1));
818 offset ^= ((!j2) << 22) | ((!j1) << 23);
820 nextpc = start + 4 + offset;
821 /* For BLX make sure to clear the low bits. */
822 if (bit (inst2, 12) == 0)
823 nextpc = nextpc & 0xfffffffc;
825 if (!skip_prologue_function (gdbarch, nextpc,
826 bit (inst2, 12) != 0))
830 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
832 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
834 pv_t addr = regs[bits (insn, 0, 3)];
837 if (stack.store_would_trash (addr))
840 /* Calculate offsets of saved registers. */
841 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
842 if (inst2 & (1 << regno))
844 addr = pv_add_constant (addr, -4);
845 stack.store (addr, 4, regs[regno]);
849 regs[bits (insn, 0, 3)] = addr;
852 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
854 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
856 int regno1 = bits (inst2, 12, 15);
857 int regno2 = bits (inst2, 8, 11);
858 pv_t addr = regs[bits (insn, 0, 3)];
860 offset = inst2 & 0xff;
862 addr = pv_add_constant (addr, offset);
864 addr = pv_add_constant (addr, -offset);
866 if (stack.store_would_trash (addr))
869 stack.store (addr, 4, regs[regno1]);
870 stack.store (pv_add_constant (addr, 4),
874 regs[bits (insn, 0, 3)] = addr;
877 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
878 && (inst2 & 0x0c00) == 0x0c00
879 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
881 int regno = bits (inst2, 12, 15);
882 pv_t addr = regs[bits (insn, 0, 3)];
884 offset = inst2 & 0xff;
886 addr = pv_add_constant (addr, offset);
888 addr = pv_add_constant (addr, -offset);
890 if (stack.store_would_trash (addr))
893 stack.store (addr, 4, regs[regno]);
896 regs[bits (insn, 0, 3)] = addr;
899 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
900 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
902 int regno = bits (inst2, 12, 15);
905 offset = inst2 & 0xfff;
906 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
908 if (stack.store_would_trash (addr))
911 stack.store (addr, 4, regs[regno]);
914 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
916 /* Ignore stores of argument registers to the stack. */
919 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
920 && (inst2 & 0x0d00) == 0x0c00
921 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
922 /* Ignore stores of argument registers to the stack. */
925 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
927 && (inst2 & 0x8000) == 0x0000
928 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
929 /* Ignore block loads from the stack, potentially copying
930 parameters from memory. */
933 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
936 /* Similarly ignore dual loads from the stack. */
939 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
940 && (inst2 & 0x0d00) == 0x0c00
941 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
942 /* Similarly ignore single loads from the stack. */
945 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
946 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
947 /* Similarly ignore single loads from the stack. */
950 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
951 && (inst2 & 0x8000) == 0x0000)
953 unsigned int imm = ((bits (insn, 10, 10) << 11)
954 | (bits (inst2, 12, 14) << 8)
955 | bits (inst2, 0, 7));
957 regs[bits (inst2, 8, 11)]
958 = pv_add_constant (regs[bits (insn, 0, 3)],
959 thumb_expand_immediate (imm));
962 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
963 && (inst2 & 0x8000) == 0x0000)
965 unsigned int imm = ((bits (insn, 10, 10) << 11)
966 | (bits (inst2, 12, 14) << 8)
967 | bits (inst2, 0, 7));
969 regs[bits (inst2, 8, 11)]
970 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
973 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
974 && (inst2 & 0x8000) == 0x0000)
976 unsigned int imm = ((bits (insn, 10, 10) << 11)
977 | (bits (inst2, 12, 14) << 8)
978 | bits (inst2, 0, 7));
980 regs[bits (inst2, 8, 11)]
981 = pv_add_constant (regs[bits (insn, 0, 3)],
982 - (CORE_ADDR) thumb_expand_immediate (imm));
985 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
986 && (inst2 & 0x8000) == 0x0000)
988 unsigned int imm = ((bits (insn, 10, 10) << 11)
989 | (bits (inst2, 12, 14) << 8)
990 | bits (inst2, 0, 7));
992 regs[bits (inst2, 8, 11)]
993 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
996 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
998 unsigned int imm = ((bits (insn, 10, 10) << 11)
999 | (bits (inst2, 12, 14) << 8)
1000 | bits (inst2, 0, 7));
1002 regs[bits (inst2, 8, 11)]
1003 = pv_constant (thumb_expand_immediate (imm));
1006 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1009 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1011 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1014 else if (insn == 0xea5f /* mov.w Rd,Rm */
1015 && (inst2 & 0xf0f0) == 0)
1017 int dst_reg = (inst2 & 0x0f00) >> 8;
1018 int src_reg = inst2 & 0xf;
1019 regs[dst_reg] = regs[src_reg];
1022 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1024 /* Constant pool loads. */
1025 unsigned int constant;
1028 offset = bits (inst2, 0, 11);
1030 loc = start + 4 + offset;
1032 loc = start + 4 - offset;
1034 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1035 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1038 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1040 /* Constant pool loads. */
1041 unsigned int constant;
1044 offset = bits (inst2, 0, 7) << 2;
1046 loc = start + 4 + offset;
1048 loc = start + 4 - offset;
1050 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1051 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1053 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1054 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1057 else if (thumb2_instruction_changes_pc (insn, inst2))
1059 /* Don't scan past anything that might change control flow. */
1064 /* The optimizer might shove anything into the prologue,
1065 so we just skip what we don't recognize. */
1066 unrecognized_pc = start;
1071 else if (thumb_instruction_changes_pc (insn))
1073 /* Don't scan past anything that might change control flow. */
1078 /* The optimizer might shove anything into the prologue,
1079 so we just skip what we don't recognize. */
1080 unrecognized_pc = start;
1087 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1088 paddress (gdbarch, start));
1090 if (unrecognized_pc == 0)
1091 unrecognized_pc = start;
1094 return unrecognized_pc;
1096 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1098 /* Frame pointer is fp. Frame size is constant. */
1099 cache->framereg = ARM_FP_REGNUM;
1100 cache->framesize = -regs[ARM_FP_REGNUM].k;
1102 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1104 /* Frame pointer is r7. Frame size is constant. */
1105 cache->framereg = THUMB_FP_REGNUM;
1106 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1110 /* Try the stack pointer... this is a bit desperate. */
1111 cache->framereg = ARM_SP_REGNUM;
1112 cache->framesize = -regs[ARM_SP_REGNUM].k;
1115 for (i = 0; i < 16; i++)
1116 if (stack.find_reg (gdbarch, i, &offset))
1117 cache->saved_regs[i].addr = offset;
1119 return unrecognized_pc;
1123 /* Try to analyze the instructions starting from PC, which load symbol
1124 __stack_chk_guard. Return the address of instruction after loading this
1125 symbol, set the dest register number to *BASEREG, and set the size of
1126 instructions for loading symbol in OFFSET. Return 0 if instructions are
1130 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1131 unsigned int *destreg, int *offset)
1133 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1134 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1135 unsigned int low, high, address;
1140 unsigned short insn1
1141 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1143 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1145 *destreg = bits (insn1, 8, 10);
1147 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1148 address = read_memory_unsigned_integer (address, 4,
1149 byte_order_for_code);
1151 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1153 unsigned short insn2
1154 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1156 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1159 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1161 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1163 /* movt Rd, #const */
1164 if ((insn1 & 0xfbc0) == 0xf2c0)
1166 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1167 *destreg = bits (insn2, 8, 11);
1169 address = (high << 16 | low);
1176 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1178 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1180 address = bits (insn, 0, 11) + pc + 8;
1181 address = read_memory_unsigned_integer (address, 4,
1182 byte_order_for_code);
1184 *destreg = bits (insn, 12, 15);
1187 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1189 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1192 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1194 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1196 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1197 *destreg = bits (insn, 12, 15);
1199 address = (high << 16 | low);
1207 /* Try to skip a sequence of instructions used for stack protector. If PC
1208 points to the first instruction of this sequence, return the address of
1209 first instruction after this sequence, otherwise, return original PC.
1211 On arm, this sequence of instructions is composed of mainly three steps,
1212 Step 1: load symbol __stack_chk_guard,
1213 Step 2: load from address of __stack_chk_guard,
1214 Step 3: store it to somewhere else.
1216 Usually, instructions on step 2 and step 3 are the same on various ARM
1217 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1218 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1219 instructions in step 1 vary from different ARM architectures. On ARMv7,
1222 movw Rn, #:lower16:__stack_chk_guard
1223 movt Rn, #:upper16:__stack_chk_guard
1230 .word __stack_chk_guard
1232 Since ldr/str is a very popular instruction, we can't use them as
1233 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1234 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1235 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1238 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1240 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1241 unsigned int basereg;
1242 struct bound_minimal_symbol stack_chk_guard;
1244 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1247 /* Try to parse the instructions in Step 1. */
1248 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1253 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1254 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1255 Otherwise, this sequence cannot be for stack protector. */
1256 if (stack_chk_guard.minsym == NULL
1257 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1262 unsigned int destreg;
1264 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1266 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1267 if ((insn & 0xf800) != 0x6800)
1269 if (bits (insn, 3, 5) != basereg)
1271 destreg = bits (insn, 0, 2);
1273 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1274 byte_order_for_code);
1275 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1276 if ((insn & 0xf800) != 0x6000)
1278 if (destreg != bits (insn, 0, 2))
1283 unsigned int destreg;
1285 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1287 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1288 if ((insn & 0x0e500000) != 0x04100000)
1290 if (bits (insn, 16, 19) != basereg)
1292 destreg = bits (insn, 12, 15);
1293 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1294 insn = read_code_unsigned_integer (pc + offset + 4,
1295 4, byte_order_for_code);
1296 if ((insn & 0x0e500000) != 0x04000000)
1298 if (bits (insn, 12, 15) != destreg)
1301 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1304 return pc + offset + 4;
1306 return pc + offset + 8;
1309 /* Advance the PC across any function entry prologue instructions to
1310 reach some "real" code.
1312 The APCS (ARM Procedure Call Standard) defines the following
1316 [stmfd sp!, {a1,a2,a3,a4}]
1317 stmfd sp!, {...,fp,ip,lr,pc}
1318 [stfe f7, [sp, #-12]!]
1319 [stfe f6, [sp, #-12]!]
1320 [stfe f5, [sp, #-12]!]
1321 [stfe f4, [sp, #-12]!]
1322 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1325 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1327 CORE_ADDR func_addr, limit_pc;
1329 /* See if we can determine the end of the prologue via the symbol table.
1330 If so, then return either PC, or the PC after the prologue, whichever
1332 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1334 CORE_ADDR post_prologue_pc
1335 = skip_prologue_using_sal (gdbarch, func_addr);
1336 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1338 if (post_prologue_pc)
1340 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1343 /* GCC always emits a line note before the prologue and another
1344 one after, even if the two are at the same address or on the
1345 same line. Take advantage of this so that we do not need to
1346 know every instruction that might appear in the prologue. We
1347 will have producer information for most binaries; if it is
1348 missing (e.g. for -gstabs), assuming the GNU tools. */
1349 if (post_prologue_pc
1351 || COMPUNIT_PRODUCER (cust) == NULL
1352 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1353 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1354 return post_prologue_pc;
1356 if (post_prologue_pc != 0)
1358 CORE_ADDR analyzed_limit;
1360 /* For non-GCC compilers, make sure the entire line is an
1361 acceptable prologue; GDB will round this function's
1362 return value up to the end of the following line so we
1363 can not skip just part of a line (and we do not want to).
1365 RealView does not treat the prologue specially, but does
1366 associate prologue code with the opening brace; so this
1367 lets us skip the first line if we think it is the opening
1369 if (arm_pc_is_thumb (gdbarch, func_addr))
1370 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1371 post_prologue_pc, NULL);
1373 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1374 post_prologue_pc, NULL);
1376 if (analyzed_limit != post_prologue_pc)
1379 return post_prologue_pc;
1383 /* Can't determine prologue from the symbol table, need to examine
1386 /* Find an upper limit on the function prologue using the debug
1387 information. If the debug information could not be used to provide
1388 that bound, then use an arbitrary large number as the upper bound. */
1389 /* Like arm_scan_prologue, stop no later than pc + 64. */
1390 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1392 limit_pc = pc + 64; /* Magic. */
1395 /* Check if this is Thumb code. */
1396 if (arm_pc_is_thumb (gdbarch, pc))
1397 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1399 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1403 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1404 This function decodes a Thumb function prologue to determine:
1405 1) the size of the stack frame
1406 2) which registers are saved on it
1407 3) the offsets of saved regs
1408 4) the offset from the stack pointer to the frame pointer
1410 A typical Thumb function prologue would create this stack frame
1411 (offsets relative to FP)
1412 old SP -> 24 stack parameters
1415 R7 -> 0 local variables (16 bytes)
1416 SP -> -12 additional stack space (12 bytes)
1417 The frame size would thus be 36 bytes, and the frame offset would be
1418 12 bytes. The frame register is R7.
1420 The comments for thumb_skip_prolog() describe the algorithm we use
1421 to detect the end of the prolog. */
1425 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1426 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1428 CORE_ADDR prologue_start;
1429 CORE_ADDR prologue_end;
1431 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1434 /* See comment in arm_scan_prologue for an explanation of
1436 if (prologue_end > prologue_start + 64)
1438 prologue_end = prologue_start + 64;
1442 /* We're in the boondocks: we have no idea where the start of the
1446 prologue_end = std::min (prologue_end, prev_pc);
1448 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1451 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1455 arm_instruction_restores_sp (unsigned int insn)
1457 if (bits (insn, 28, 31) != INST_NV)
1459 if ((insn & 0x0df0f000) == 0x0080d000
1460 /* ADD SP (register or immediate). */
1461 || (insn & 0x0df0f000) == 0x0040d000
1462 /* SUB SP (register or immediate). */
1463 || (insn & 0x0ffffff0) == 0x01a0d000
1465 || (insn & 0x0fff0000) == 0x08bd0000
1467 || (insn & 0x0fff0000) == 0x049d0000)
1468 /* POP of a single register. */
1475 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1476 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1477 fill it in. Return the first address not recognized as a prologue
1480 We recognize all the instructions typically found in ARM prologues,
1481 plus harmless instructions which can be skipped (either for analysis
1482 purposes, or a more restrictive set that can be skipped when finding
1483 the end of the prologue). */
1486 arm_analyze_prologue (struct gdbarch *gdbarch,
1487 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1488 struct arm_prologue_cache *cache)
1490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1492 CORE_ADDR offset, current_pc;
1493 pv_t regs[ARM_FPS_REGNUM];
1494 CORE_ADDR unrecognized_pc = 0;
1496 /* Search the prologue looking for instructions that set up the
1497 frame pointer, adjust the stack pointer, and save registers.
1499 Be careful, however, and if it doesn't look like a prologue,
1500 don't try to scan it. If, for instance, a frameless function
1501 begins with stmfd sp!, then we will tell ourselves there is
1502 a frame, which will confuse stack traceback, as well as "finish"
1503 and other operations that rely on a knowledge of the stack
1506 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1507 regs[regno] = pv_register (regno, 0);
1508 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1510 for (current_pc = prologue_start;
1511 current_pc < prologue_end;
1515 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1517 if (insn == 0xe1a0c00d) /* mov ip, sp */
1519 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1522 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1523 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1525 unsigned imm = insn & 0xff; /* immediate value */
1526 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1527 int rd = bits (insn, 12, 15);
1528 imm = (imm >> rot) | (imm << (32 - rot));
1529 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1532 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1533 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1535 unsigned imm = insn & 0xff; /* immediate value */
1536 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1537 int rd = bits (insn, 12, 15);
1538 imm = (imm >> rot) | (imm << (32 - rot));
1539 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1542 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1545 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1547 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1548 stack.store (regs[ARM_SP_REGNUM], 4,
1549 regs[bits (insn, 12, 15)]);
1552 else if ((insn & 0xffff0000) == 0xe92d0000)
1553 /* stmfd sp!, {..., fp, ip, lr, pc}
1555 stmfd sp!, {a1, a2, a3, a4} */
1557 int mask = insn & 0xffff;
1559 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1562 /* Calculate offsets of saved registers. */
1563 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1564 if (mask & (1 << regno))
1567 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1568 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1571 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1572 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1573 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1575 /* No need to add this to saved_regs -- it's just an arg reg. */
1578 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1579 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1580 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1582 /* No need to add this to saved_regs -- it's just an arg reg. */
1585 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1587 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1589 /* No need to add this to saved_regs -- it's just arg regs. */
1592 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1594 unsigned imm = insn & 0xff; /* immediate value */
1595 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1596 imm = (imm >> rot) | (imm << (32 - rot));
1597 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1599 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1601 unsigned imm = insn & 0xff; /* immediate value */
1602 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1603 imm = (imm >> rot) | (imm << (32 - rot));
1604 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1606 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1608 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1610 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1613 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1614 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1615 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1617 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1619 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1621 int n_saved_fp_regs;
1622 unsigned int fp_start_reg, fp_bound_reg;
1624 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1627 if ((insn & 0x800) == 0x800) /* N0 is set */
1629 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1630 n_saved_fp_regs = 3;
1632 n_saved_fp_regs = 1;
1636 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1637 n_saved_fp_regs = 2;
1639 n_saved_fp_regs = 4;
1642 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1643 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1644 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1646 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1647 stack.store (regs[ARM_SP_REGNUM], 12,
1648 regs[fp_start_reg++]);
1651 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1653 /* Allow some special function calls when skipping the
1654 prologue; GCC generates these before storing arguments to
1656 CORE_ADDR dest = BranchDest (current_pc, insn);
1658 if (skip_prologue_function (gdbarch, dest, 0))
1663 else if ((insn & 0xf0000000) != 0xe0000000)
1664 break; /* Condition not true, exit early. */
1665 else if (arm_instruction_changes_pc (insn))
1666 /* Don't scan past anything that might change control flow. */
1668 else if (arm_instruction_restores_sp (insn))
1670 /* Don't scan past the epilogue. */
1673 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1674 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1675 /* Ignore block loads from the stack, potentially copying
1676 parameters from memory. */
1678 else if ((insn & 0xfc500000) == 0xe4100000
1679 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1680 /* Similarly ignore single loads from the stack. */
1682 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1683 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1684 register instead of the stack. */
1688 /* The optimizer might shove anything into the prologue, if
1689 we build up cache (cache != NULL) from scanning prologue,
1690 we just skip what we don't recognize and scan further to
1691 make cache as complete as possible. However, if we skip
1692 prologue, we'll stop immediately on unrecognized
1694 unrecognized_pc = current_pc;
1702 if (unrecognized_pc == 0)
1703 unrecognized_pc = current_pc;
1707 int framereg, framesize;
1709 /* The frame size is just the distance from the frame register
1710 to the original stack pointer. */
1711 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1713 /* Frame pointer is fp. */
1714 framereg = ARM_FP_REGNUM;
1715 framesize = -regs[ARM_FP_REGNUM].k;
1719 /* Try the stack pointer... this is a bit desperate. */
1720 framereg = ARM_SP_REGNUM;
1721 framesize = -regs[ARM_SP_REGNUM].k;
1724 cache->framereg = framereg;
1725 cache->framesize = framesize;
1727 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1728 if (stack.find_reg (gdbarch, regno, &offset))
1729 cache->saved_regs[regno].addr = offset;
1733 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1734 paddress (gdbarch, unrecognized_pc));
1736 return unrecognized_pc;
1740 arm_scan_prologue (struct frame_info *this_frame,
1741 struct arm_prologue_cache *cache)
1743 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1744 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1745 CORE_ADDR prologue_start, prologue_end;
1746 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1747 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1749 /* Assume there is no frame until proven otherwise. */
1750 cache->framereg = ARM_SP_REGNUM;
1751 cache->framesize = 0;
1753 /* Check for Thumb prologue. */
1754 if (arm_frame_is_thumb (this_frame))
1756 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1760 /* Find the function prologue. If we can't find the function in
1761 the symbol table, peek in the stack frame to find the PC. */
1762 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1765 /* One way to find the end of the prologue (which works well
1766 for unoptimized code) is to do the following:
1768 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1771 prologue_end = prev_pc;
1772 else if (sal.end < prologue_end)
1773 prologue_end = sal.end;
1775 This mechanism is very accurate so long as the optimizer
1776 doesn't move any instructions from the function body into the
1777 prologue. If this happens, sal.end will be the last
1778 instruction in the first hunk of prologue code just before
1779 the first instruction that the scheduler has moved from
1780 the body to the prologue.
1782 In order to make sure that we scan all of the prologue
1783 instructions, we use a slightly less accurate mechanism which
1784 may scan more than necessary. To help compensate for this
1785 lack of accuracy, the prologue scanning loop below contains
1786 several clauses which'll cause the loop to terminate early if
1787 an implausible prologue instruction is encountered.
1793 is a suitable endpoint since it accounts for the largest
1794 possible prologue plus up to five instructions inserted by
1797 if (prologue_end > prologue_start + 64)
1799 prologue_end = prologue_start + 64; /* See above. */
1804 /* We have no symbol information. Our only option is to assume this
1805 function has a standard stack frame and the normal frame register.
1806 Then, we can find the value of our frame pointer on entrance to
1807 the callee (or at the present moment if this is the innermost frame).
1808 The value stored there should be the address of the stmfd + 8. */
1809 CORE_ADDR frame_loc;
1810 ULONGEST return_value;
1812 /* AAPCS does not use a frame register, so we can abort here. */
1813 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1816 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1817 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1822 prologue_start = gdbarch_addr_bits_remove
1823 (gdbarch, return_value) - 8;
1824 prologue_end = prologue_start + 64; /* See above. */
1828 if (prev_pc < prologue_end)
1829 prologue_end = prev_pc;
1831 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1834 static struct arm_prologue_cache *
1835 arm_make_prologue_cache (struct frame_info *this_frame)
1838 struct arm_prologue_cache *cache;
1839 CORE_ADDR unwound_fp;
1841 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1842 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1844 arm_scan_prologue (this_frame, cache);
1846 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1847 if (unwound_fp == 0)
1850 cache->prev_sp = unwound_fp + cache->framesize;
1852 /* Calculate actual addresses of saved registers using offsets
1853 determined by arm_scan_prologue. */
1854 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1855 if (trad_frame_addr_p (cache->saved_regs, reg))
1856 cache->saved_regs[reg].addr += cache->prev_sp;
1861 /* Implementation of the stop_reason hook for arm_prologue frames. */
1863 static enum unwind_stop_reason
1864 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1867 struct arm_prologue_cache *cache;
1870 if (*this_cache == NULL)
1871 *this_cache = arm_make_prologue_cache (this_frame);
1872 cache = (struct arm_prologue_cache *) *this_cache;
1874 /* This is meant to halt the backtrace at "_start". */
1875 pc = get_frame_pc (this_frame);
1876 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1877 return UNWIND_OUTERMOST;
1879 /* If we've hit a wall, stop. */
1880 if (cache->prev_sp == 0)
1881 return UNWIND_OUTERMOST;
1883 return UNWIND_NO_REASON;
1886 /* Our frame ID for a normal frame is the current function's starting PC
1887 and the caller's SP when we were called. */
1890 arm_prologue_this_id (struct frame_info *this_frame,
1892 struct frame_id *this_id)
1894 struct arm_prologue_cache *cache;
1898 if (*this_cache == NULL)
1899 *this_cache = arm_make_prologue_cache (this_frame);
1900 cache = (struct arm_prologue_cache *) *this_cache;
1902 /* Use function start address as part of the frame ID. If we cannot
1903 identify the start address (due to missing symbol information),
1904 fall back to just using the current PC. */
1905 pc = get_frame_pc (this_frame);
1906 func = get_frame_func (this_frame);
1910 id = frame_id_build (cache->prev_sp, func);
1914 static struct value *
1915 arm_prologue_prev_register (struct frame_info *this_frame,
1919 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1920 struct arm_prologue_cache *cache;
1922 if (*this_cache == NULL)
1923 *this_cache = arm_make_prologue_cache (this_frame);
1924 cache = (struct arm_prologue_cache *) *this_cache;
1926 /* If we are asked to unwind the PC, then we need to return the LR
1927 instead. The prologue may save PC, but it will point into this
1928 frame's prologue, not the next frame's resume location. Also
1929 strip the saved T bit. A valid LR may have the low bit set, but
1930 a valid PC never does. */
1931 if (prev_regnum == ARM_PC_REGNUM)
1935 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1936 return frame_unwind_got_constant (this_frame, prev_regnum,
1937 arm_addr_bits_remove (gdbarch, lr));
1940 /* SP is generally not saved to the stack, but this frame is
1941 identified by the next frame's stack pointer at the time of the call.
1942 The value was already reconstructed into PREV_SP. */
1943 if (prev_regnum == ARM_SP_REGNUM)
1944 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1946 /* The CPSR may have been changed by the call instruction and by the
1947 called function. The only bit we can reconstruct is the T bit,
1948 by checking the low bit of LR as of the call. This is a reliable
1949 indicator of Thumb-ness except for some ARM v4T pre-interworking
1950 Thumb code, which could get away with a clear low bit as long as
1951 the called function did not use bx. Guess that all other
1952 bits are unchanged; the condition flags are presumably lost,
1953 but the processor status is likely valid. */
1954 if (prev_regnum == ARM_PS_REGNUM)
1957 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1959 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1960 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1961 if (IS_THUMB_ADDR (lr))
1965 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1968 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1972 struct frame_unwind arm_prologue_unwind = {
1974 arm_prologue_unwind_stop_reason,
1975 arm_prologue_this_id,
1976 arm_prologue_prev_register,
1978 default_frame_sniffer
1981 /* Maintain a list of ARM exception table entries per objfile, similar to the
1982 list of mapping symbols. We only cache entries for standard ARM-defined
1983 personality routines; the cache will contain only the frame unwinding
1984 instructions associated with the entry (not the descriptors). */
1986 struct arm_exidx_entry
1991 bool operator< (const arm_exidx_entry &other) const
1993 return addr < other.addr;
1997 struct arm_exidx_data
1999 std::vector<std::vector<arm_exidx_entry>> section_maps;
2002 static const struct objfile_key<arm_exidx_data> arm_exidx_data_key;
2004 static struct obj_section *
2005 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2007 struct obj_section *osect;
2009 ALL_OBJFILE_OSECTIONS (objfile, osect)
2010 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2012 bfd_vma start, size;
2013 start = bfd_section_vma (osect->the_bfd_section);
2014 size = bfd_section_size (osect->the_bfd_section);
2016 if (start <= vma && vma < start + size)
2023 /* Parse contents of exception table and exception index sections
2024 of OBJFILE, and fill in the exception table entry cache.
2026 For each entry that refers to a standard ARM-defined personality
2027 routine, extract the frame unwinding instructions (from either
2028 the index or the table section). The unwinding instructions
2030 - extracting them from the rest of the table data
2031 - converting to host endianness
2032 - appending the implicit 0xb0 ("Finish") code
2034 The extracted and normalized instructions are stored for later
2035 retrieval by the arm_find_exidx_entry routine. */
2038 arm_exidx_new_objfile (struct objfile *objfile)
2040 struct arm_exidx_data *data;
2041 asection *exidx, *extab;
2042 bfd_vma exidx_vma = 0, extab_vma = 0;
2045 /* If we've already touched this file, do nothing. */
2046 if (!objfile || arm_exidx_data_key.get (objfile) != NULL)
2049 /* Read contents of exception table and index. */
2050 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2051 gdb::byte_vector exidx_data;
2054 exidx_vma = bfd_section_vma (exidx);
2055 exidx_data.resize (bfd_section_size (exidx));
2057 if (!bfd_get_section_contents (objfile->obfd, exidx,
2058 exidx_data.data (), 0,
2059 exidx_data.size ()))
2063 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2064 gdb::byte_vector extab_data;
2067 extab_vma = bfd_section_vma (extab);
2068 extab_data.resize (bfd_section_size (extab));
2070 if (!bfd_get_section_contents (objfile->obfd, extab,
2071 extab_data.data (), 0,
2072 extab_data.size ()))
2076 /* Allocate exception table data structure. */
2077 data = arm_exidx_data_key.emplace (objfile);
2078 data->section_maps.resize (objfile->obfd->section_count);
2080 /* Fill in exception table. */
2081 for (i = 0; i < exidx_data.size () / 8; i++)
2083 struct arm_exidx_entry new_exidx_entry;
2084 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2085 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2086 exidx_data.data () + i * 8 + 4);
2087 bfd_vma addr = 0, word = 0;
2088 int n_bytes = 0, n_words = 0;
2089 struct obj_section *sec;
2090 gdb_byte *entry = NULL;
2092 /* Extract address of start of function. */
2093 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2094 idx += exidx_vma + i * 8;
2096 /* Find section containing function and compute section offset. */
2097 sec = arm_obj_section_from_vma (objfile, idx);
2100 idx -= bfd_section_vma (sec->the_bfd_section);
2102 /* Determine address of exception table entry. */
2105 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2107 else if ((val & 0xff000000) == 0x80000000)
2109 /* Exception table entry embedded in .ARM.exidx
2110 -- must be short form. */
2114 else if (!(val & 0x80000000))
2116 /* Exception table entry in .ARM.extab. */
2117 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2118 addr += exidx_vma + i * 8 + 4;
2120 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2122 word = bfd_h_get_32 (objfile->obfd,
2123 extab_data.data () + addr - extab_vma);
2126 if ((word & 0xff000000) == 0x80000000)
2131 else if ((word & 0xff000000) == 0x81000000
2132 || (word & 0xff000000) == 0x82000000)
2136 n_words = ((word >> 16) & 0xff);
2138 else if (!(word & 0x80000000))
2141 struct obj_section *pers_sec;
2142 int gnu_personality = 0;
2144 /* Custom personality routine. */
2145 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2146 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2148 /* Check whether we've got one of the variants of the
2149 GNU personality routines. */
2150 pers_sec = arm_obj_section_from_vma (objfile, pers);
2153 static const char *personality[] =
2155 "__gcc_personality_v0",
2156 "__gxx_personality_v0",
2157 "__gcj_personality_v0",
2158 "__gnu_objc_personality_v0",
2162 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2165 for (k = 0; personality[k]; k++)
2166 if (lookup_minimal_symbol_by_pc_name
2167 (pc, personality[k], objfile))
2169 gnu_personality = 1;
2174 /* If so, the next word contains a word count in the high
2175 byte, followed by the same unwind instructions as the
2176 pre-defined forms. */
2178 && addr + 4 <= extab_vma + extab_data.size ())
2180 word = bfd_h_get_32 (objfile->obfd,
2182 + addr - extab_vma));
2185 n_words = ((word >> 24) & 0xff);
2191 /* Sanity check address. */
2193 if (addr < extab_vma
2194 || addr + 4 * n_words > extab_vma + extab_data.size ())
2195 n_words = n_bytes = 0;
2197 /* The unwind instructions reside in WORD (only the N_BYTES least
2198 significant bytes are valid), followed by N_WORDS words in the
2199 extab section starting at ADDR. */
2200 if (n_bytes || n_words)
2203 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2204 n_bytes + n_words * 4 + 1);
2207 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2211 word = bfd_h_get_32 (objfile->obfd,
2212 extab_data.data () + addr - extab_vma);
2215 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2216 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2217 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2218 *p++ = (gdb_byte) (word & 0xff);
2221 /* Implied "Finish" to terminate the list. */
2225 /* Push entry onto vector. They are guaranteed to always
2226 appear in order of increasing addresses. */
2227 new_exidx_entry.addr = idx;
2228 new_exidx_entry.entry = entry;
2229 data->section_maps[sec->the_bfd_section->index].push_back
2234 /* Search for the exception table entry covering MEMADDR. If one is found,
2235 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2236 set *START to the start of the region covered by this entry. */
2239 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2241 struct obj_section *sec;
2243 sec = find_pc_section (memaddr);
2246 struct arm_exidx_data *data;
2247 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2249 data = arm_exidx_data_key.get (sec->objfile);
2252 std::vector<arm_exidx_entry> &map
2253 = data->section_maps[sec->the_bfd_section->index];
2256 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2258 /* std::lower_bound finds the earliest ordered insertion
2259 point. If the following symbol starts at this exact
2260 address, we use that; otherwise, the preceding
2261 exception table entry covers this address. */
2262 if (idx < map.end ())
2264 if (idx->addr == map_key.addr)
2267 *start = idx->addr + obj_section_addr (sec);
2272 if (idx > map.begin ())
2276 *start = idx->addr + obj_section_addr (sec);
2286 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2287 instruction list from the ARM exception table entry ENTRY, allocate and
2288 return a prologue cache structure describing how to unwind this frame.
2290 Return NULL if the unwinding instruction list contains a "spare",
2291 "reserved" or "refuse to unwind" instruction as defined in section
2292 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2293 for the ARM Architecture" document. */
2295 static struct arm_prologue_cache *
2296 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2301 struct arm_prologue_cache *cache;
2302 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2303 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2309 /* Whenever we reload SP, we actually have to retrieve its
2310 actual value in the current frame. */
2313 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2315 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2316 vsp = get_frame_register_unsigned (this_frame, reg);
2320 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2321 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2327 /* Decode next unwind instruction. */
2330 if ((insn & 0xc0) == 0)
2332 int offset = insn & 0x3f;
2333 vsp += (offset << 2) + 4;
2335 else if ((insn & 0xc0) == 0x40)
2337 int offset = insn & 0x3f;
2338 vsp -= (offset << 2) + 4;
2340 else if ((insn & 0xf0) == 0x80)
2342 int mask = ((insn & 0xf) << 8) | *entry++;
2345 /* The special case of an all-zero mask identifies
2346 "Refuse to unwind". We return NULL to fall back
2347 to the prologue analyzer. */
2351 /* Pop registers r4..r15 under mask. */
2352 for (i = 0; i < 12; i++)
2353 if (mask & (1 << i))
2355 cache->saved_regs[4 + i].addr = vsp;
2359 /* Special-case popping SP -- we need to reload vsp. */
2360 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2363 else if ((insn & 0xf0) == 0x90)
2365 int reg = insn & 0xf;
2367 /* Reserved cases. */
2368 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2371 /* Set SP from another register and mark VSP for reload. */
2372 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2375 else if ((insn & 0xf0) == 0xa0)
2377 int count = insn & 0x7;
2378 int pop_lr = (insn & 0x8) != 0;
2381 /* Pop r4..r[4+count]. */
2382 for (i = 0; i <= count; i++)
2384 cache->saved_regs[4 + i].addr = vsp;
2388 /* If indicated by flag, pop LR as well. */
2391 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2395 else if (insn == 0xb0)
2397 /* We could only have updated PC by popping into it; if so, it
2398 will show up as address. Otherwise, copy LR into PC. */
2399 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2400 cache->saved_regs[ARM_PC_REGNUM]
2401 = cache->saved_regs[ARM_LR_REGNUM];
2406 else if (insn == 0xb1)
2408 int mask = *entry++;
2411 /* All-zero mask and mask >= 16 is "spare". */
2412 if (mask == 0 || mask >= 16)
2415 /* Pop r0..r3 under mask. */
2416 for (i = 0; i < 4; i++)
2417 if (mask & (1 << i))
2419 cache->saved_regs[i].addr = vsp;
2423 else if (insn == 0xb2)
2425 ULONGEST offset = 0;
2430 offset |= (*entry & 0x7f) << shift;
2433 while (*entry++ & 0x80);
2435 vsp += 0x204 + (offset << 2);
2437 else if (insn == 0xb3)
2439 int start = *entry >> 4;
2440 int count = (*entry++) & 0xf;
2443 /* Only registers D0..D15 are valid here. */
2444 if (start + count >= 16)
2447 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2448 for (i = 0; i <= count; i++)
2450 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2454 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2457 else if ((insn & 0xf8) == 0xb8)
2459 int count = insn & 0x7;
2462 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2463 for (i = 0; i <= count; i++)
2465 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2469 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2472 else if (insn == 0xc6)
2474 int start = *entry >> 4;
2475 int count = (*entry++) & 0xf;
2478 /* Only registers WR0..WR15 are valid. */
2479 if (start + count >= 16)
2482 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2483 for (i = 0; i <= count; i++)
2485 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2489 else if (insn == 0xc7)
2491 int mask = *entry++;
2494 /* All-zero mask and mask >= 16 is "spare". */
2495 if (mask == 0 || mask >= 16)
2498 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2499 for (i = 0; i < 4; i++)
2500 if (mask & (1 << i))
2502 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2506 else if ((insn & 0xf8) == 0xc0)
2508 int count = insn & 0x7;
2511 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2512 for (i = 0; i <= count; i++)
2514 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2518 else if (insn == 0xc8)
2520 int start = *entry >> 4;
2521 int count = (*entry++) & 0xf;
2524 /* Only registers D0..D31 are valid. */
2525 if (start + count >= 16)
2528 /* Pop VFP double-precision registers
2529 D[16+start]..D[16+start+count]. */
2530 for (i = 0; i <= count; i++)
2532 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2536 else if (insn == 0xc9)
2538 int start = *entry >> 4;
2539 int count = (*entry++) & 0xf;
2542 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2543 for (i = 0; i <= count; i++)
2545 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2549 else if ((insn & 0xf8) == 0xd0)
2551 int count = insn & 0x7;
2554 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2555 for (i = 0; i <= count; i++)
2557 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2563 /* Everything else is "spare". */
2568 /* If we restore SP from a register, assume this was the frame register.
2569 Otherwise just fall back to SP as frame register. */
2570 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2571 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2573 cache->framereg = ARM_SP_REGNUM;
2575 /* Determine offset to previous frame. */
2577 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2579 /* We already got the previous SP. */
2580 cache->prev_sp = vsp;
2585 /* Unwinding via ARM exception table entries. Note that the sniffer
2586 already computes a filled-in prologue cache, which is then used
2587 with the same arm_prologue_this_id and arm_prologue_prev_register
2588 routines also used for prologue-parsing based unwinding. */
2591 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2592 struct frame_info *this_frame,
2593 void **this_prologue_cache)
2595 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2596 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2597 CORE_ADDR addr_in_block, exidx_region, func_start;
2598 struct arm_prologue_cache *cache;
2601 /* See if we have an ARM exception table entry covering this address. */
2602 addr_in_block = get_frame_address_in_block (this_frame);
2603 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2607 /* The ARM exception table does not describe unwind information
2608 for arbitrary PC values, but is guaranteed to be correct only
2609 at call sites. We have to decide here whether we want to use
2610 ARM exception table information for this frame, or fall back
2611 to using prologue parsing. (Note that if we have DWARF CFI,
2612 this sniffer isn't even called -- CFI is always preferred.)
2614 Before we make this decision, however, we check whether we
2615 actually have *symbol* information for the current frame.
2616 If not, prologue parsing would not work anyway, so we might
2617 as well use the exception table and hope for the best. */
2618 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2622 /* If the next frame is "normal", we are at a call site in this
2623 frame, so exception information is guaranteed to be valid. */
2624 if (get_next_frame (this_frame)
2625 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2628 /* We also assume exception information is valid if we're currently
2629 blocked in a system call. The system library is supposed to
2630 ensure this, so that e.g. pthread cancellation works. */
2631 if (arm_frame_is_thumb (this_frame))
2635 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2636 2, byte_order_for_code, &insn)
2637 && (insn & 0xff00) == 0xdf00 /* svc */)
2644 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2645 4, byte_order_for_code, &insn)
2646 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2650 /* Bail out if we don't know that exception information is valid. */
2654 /* The ARM exception index does not mark the *end* of the region
2655 covered by the entry, and some functions will not have any entry.
2656 To correctly recognize the end of the covered region, the linker
2657 should have inserted dummy records with a CANTUNWIND marker.
2659 Unfortunately, current versions of GNU ld do not reliably do
2660 this, and thus we may have found an incorrect entry above.
2661 As a (temporary) sanity check, we only use the entry if it
2662 lies *within* the bounds of the function. Note that this check
2663 might reject perfectly valid entries that just happen to cover
2664 multiple functions; therefore this check ought to be removed
2665 once the linker is fixed. */
2666 if (func_start > exidx_region)
2670 /* Decode the list of unwinding instructions into a prologue cache.
2671 Note that this may fail due to e.g. a "refuse to unwind" code. */
2672 cache = arm_exidx_fill_cache (this_frame, entry);
2676 *this_prologue_cache = cache;
2680 struct frame_unwind arm_exidx_unwind = {
2682 default_frame_unwind_stop_reason,
2683 arm_prologue_this_id,
2684 arm_prologue_prev_register,
2686 arm_exidx_unwind_sniffer
2689 static struct arm_prologue_cache *
2690 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2692 struct arm_prologue_cache *cache;
2695 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2696 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2698 /* Still rely on the offset calculated from prologue. */
2699 arm_scan_prologue (this_frame, cache);
2701 /* Since we are in epilogue, the SP has been restored. */
2702 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2704 /* Calculate actual addresses of saved registers using offsets
2705 determined by arm_scan_prologue. */
2706 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2707 if (trad_frame_addr_p (cache->saved_regs, reg))
2708 cache->saved_regs[reg].addr += cache->prev_sp;
2713 /* Implementation of function hook 'this_id' in
2714 'struct frame_uwnind' for epilogue unwinder. */
2717 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2719 struct frame_id *this_id)
2721 struct arm_prologue_cache *cache;
2724 if (*this_cache == NULL)
2725 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2726 cache = (struct arm_prologue_cache *) *this_cache;
2728 /* Use function start address as part of the frame ID. If we cannot
2729 identify the start address (due to missing symbol information),
2730 fall back to just using the current PC. */
2731 pc = get_frame_pc (this_frame);
2732 func = get_frame_func (this_frame);
2736 (*this_id) = frame_id_build (cache->prev_sp, pc);
2739 /* Implementation of function hook 'prev_register' in
2740 'struct frame_uwnind' for epilogue unwinder. */
2742 static struct value *
2743 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2744 void **this_cache, int regnum)
2746 if (*this_cache == NULL)
2747 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2749 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2752 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2754 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2757 /* Implementation of function hook 'sniffer' in
2758 'struct frame_uwnind' for epilogue unwinder. */
2761 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2762 struct frame_info *this_frame,
2763 void **this_prologue_cache)
2765 if (frame_relative_level (this_frame) == 0)
2767 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2768 CORE_ADDR pc = get_frame_pc (this_frame);
2770 if (arm_frame_is_thumb (this_frame))
2771 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2773 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2779 /* Frame unwinder from epilogue. */
2781 static const struct frame_unwind arm_epilogue_frame_unwind =
2784 default_frame_unwind_stop_reason,
2785 arm_epilogue_frame_this_id,
2786 arm_epilogue_frame_prev_register,
2788 arm_epilogue_frame_sniffer,
2791 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2792 trampoline, return the target PC. Otherwise return 0.
2794 void call0a (char c, short s, int i, long l) {}
2798 (*pointer_to_call0a) (c, s, i, l);
2801 Instead of calling a stub library function _call_via_xx (xx is
2802 the register name), GCC may inline the trampoline in the object
2803 file as below (register r2 has the address of call0a).
2806 .type main, %function
2815 The trampoline 'bx r2' doesn't belong to main. */
2818 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2820 /* The heuristics of recognizing such trampoline is that FRAME is
2821 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2822 if (arm_frame_is_thumb (frame))
2826 if (target_read_memory (pc, buf, 2) == 0)
2828 struct gdbarch *gdbarch = get_frame_arch (frame);
2829 enum bfd_endian byte_order_for_code
2830 = gdbarch_byte_order_for_code (gdbarch);
2832 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2834 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2837 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2839 /* Clear the LSB so that gdb core sets step-resume
2840 breakpoint at the right address. */
2841 return UNMAKE_THUMB_ADDR (dest);
2849 static struct arm_prologue_cache *
2850 arm_make_stub_cache (struct frame_info *this_frame)
2852 struct arm_prologue_cache *cache;
2854 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2855 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2857 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2862 /* Our frame ID for a stub frame is the current SP and LR. */
2865 arm_stub_this_id (struct frame_info *this_frame,
2867 struct frame_id *this_id)
2869 struct arm_prologue_cache *cache;
2871 if (*this_cache == NULL)
2872 *this_cache = arm_make_stub_cache (this_frame);
2873 cache = (struct arm_prologue_cache *) *this_cache;
2875 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2879 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2880 struct frame_info *this_frame,
2881 void **this_prologue_cache)
2883 CORE_ADDR addr_in_block;
2885 CORE_ADDR pc, start_addr;
2888 addr_in_block = get_frame_address_in_block (this_frame);
2889 pc = get_frame_pc (this_frame);
2890 if (in_plt_section (addr_in_block)
2891 /* We also use the stub winder if the target memory is unreadable
2892 to avoid having the prologue unwinder trying to read it. */
2893 || target_read_memory (pc, dummy, 4) != 0)
2896 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2897 && arm_skip_bx_reg (this_frame, pc) != 0)
2903 struct frame_unwind arm_stub_unwind = {
2905 default_frame_unwind_stop_reason,
2907 arm_prologue_prev_register,
2909 arm_stub_unwind_sniffer
2912 /* Put here the code to store, into CACHE->saved_regs, the addresses
2913 of the saved registers of frame described by THIS_FRAME. CACHE is
2916 static struct arm_prologue_cache *
2917 arm_m_exception_cache (struct frame_info *this_frame)
2919 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2920 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2921 struct arm_prologue_cache *cache;
2922 CORE_ADDR unwound_sp;
2925 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2926 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2928 unwound_sp = get_frame_register_unsigned (this_frame,
2931 /* The hardware saves eight 32-bit words, comprising xPSR,
2932 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2933 "B1.5.6 Exception entry behavior" in
2934 "ARMv7-M Architecture Reference Manual". */
2935 cache->saved_regs[0].addr = unwound_sp;
2936 cache->saved_regs[1].addr = unwound_sp + 4;
2937 cache->saved_regs[2].addr = unwound_sp + 8;
2938 cache->saved_regs[3].addr = unwound_sp + 12;
2939 cache->saved_regs[12].addr = unwound_sp + 16;
2940 cache->saved_regs[14].addr = unwound_sp + 20;
2941 cache->saved_regs[15].addr = unwound_sp + 24;
2942 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2944 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2945 aligner between the top of the 32-byte stack frame and the
2946 previous context's stack pointer. */
2947 cache->prev_sp = unwound_sp + 32;
2948 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2949 && (xpsr & (1 << 9)) != 0)
2950 cache->prev_sp += 4;
2955 /* Implementation of function hook 'this_id' in
2956 'struct frame_uwnind'. */
2959 arm_m_exception_this_id (struct frame_info *this_frame,
2961 struct frame_id *this_id)
2963 struct arm_prologue_cache *cache;
2965 if (*this_cache == NULL)
2966 *this_cache = arm_m_exception_cache (this_frame);
2967 cache = (struct arm_prologue_cache *) *this_cache;
2969 /* Our frame ID for a stub frame is the current SP and LR. */
2970 *this_id = frame_id_build (cache->prev_sp,
2971 get_frame_pc (this_frame));
2974 /* Implementation of function hook 'prev_register' in
2975 'struct frame_uwnind'. */
2977 static struct value *
2978 arm_m_exception_prev_register (struct frame_info *this_frame,
2982 struct arm_prologue_cache *cache;
2984 if (*this_cache == NULL)
2985 *this_cache = arm_m_exception_cache (this_frame);
2986 cache = (struct arm_prologue_cache *) *this_cache;
2988 /* The value was already reconstructed into PREV_SP. */
2989 if (prev_regnum == ARM_SP_REGNUM)
2990 return frame_unwind_got_constant (this_frame, prev_regnum,
2993 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2997 /* Implementation of function hook 'sniffer' in
2998 'struct frame_uwnind'. */
3001 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3002 struct frame_info *this_frame,
3003 void **this_prologue_cache)
3005 CORE_ADDR this_pc = get_frame_pc (this_frame);
3007 /* No need to check is_m; this sniffer is only registered for
3008 M-profile architectures. */
3010 /* Check if exception frame returns to a magic PC value. */
3011 return arm_m_addr_is_magic (this_pc);
3014 /* Frame unwinder for M-profile exceptions. */
3016 struct frame_unwind arm_m_exception_unwind =
3019 default_frame_unwind_stop_reason,
3020 arm_m_exception_this_id,
3021 arm_m_exception_prev_register,
3023 arm_m_exception_unwind_sniffer
3027 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3029 struct arm_prologue_cache *cache;
3031 if (*this_cache == NULL)
3032 *this_cache = arm_make_prologue_cache (this_frame);
3033 cache = (struct arm_prologue_cache *) *this_cache;
3035 return cache->prev_sp - cache->framesize;
3038 struct frame_base arm_normal_base = {
3039 &arm_prologue_unwind,
3040 arm_normal_frame_base,
3041 arm_normal_frame_base,
3042 arm_normal_frame_base
3045 static struct value *
3046 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3049 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3051 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3056 /* The PC is normally copied from the return column, which
3057 describes saves of LR. However, that version may have an
3058 extra bit set to indicate Thumb state. The bit is not
3060 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3061 return frame_unwind_got_constant (this_frame, regnum,
3062 arm_addr_bits_remove (gdbarch, lr));
3065 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3066 cpsr = get_frame_register_unsigned (this_frame, regnum);
3067 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3068 if (IS_THUMB_ADDR (lr))
3072 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3075 internal_error (__FILE__, __LINE__,
3076 _("Unexpected register %d"), regnum);
3081 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3082 struct dwarf2_frame_state_reg *reg,
3083 struct frame_info *this_frame)
3089 reg->how = DWARF2_FRAME_REG_FN;
3090 reg->loc.fn = arm_dwarf2_prev_register;
3093 reg->how = DWARF2_FRAME_REG_CFA;
3098 /* Implement the stack_frame_destroyed_p gdbarch method. */
3101 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3103 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3104 unsigned int insn, insn2;
3105 int found_return = 0, found_stack_adjust = 0;
3106 CORE_ADDR func_start, func_end;
3110 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3113 /* The epilogue is a sequence of instructions along the following lines:
3115 - add stack frame size to SP or FP
3116 - [if frame pointer used] restore SP from FP
3117 - restore registers from SP [may include PC]
3118 - a return-type instruction [if PC wasn't already restored]
3120 In a first pass, we scan forward from the current PC and verify the
3121 instructions we find as compatible with this sequence, ending in a
3124 However, this is not sufficient to distinguish indirect function calls
3125 within a function from indirect tail calls in the epilogue in some cases.
3126 Therefore, if we didn't already find any SP-changing instruction during
3127 forward scan, we add a backward scanning heuristic to ensure we actually
3128 are in the epilogue. */
3131 while (scan_pc < func_end && !found_return)
3133 if (target_read_memory (scan_pc, buf, 2))
3137 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3139 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3141 else if (insn == 0x46f7) /* mov pc, lr */
3143 else if (thumb_instruction_restores_sp (insn))
3145 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3148 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3150 if (target_read_memory (scan_pc, buf, 2))
3154 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3156 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3158 if (insn2 & 0x8000) /* <registers> include PC. */
3161 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3162 && (insn2 & 0x0fff) == 0x0b04)
3164 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3167 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3168 && (insn2 & 0x0e00) == 0x0a00)
3180 /* Since any instruction in the epilogue sequence, with the possible
3181 exception of return itself, updates the stack pointer, we need to
3182 scan backwards for at most one instruction. Try either a 16-bit or
3183 a 32-bit instruction. This is just a heuristic, so we do not worry
3184 too much about false positives. */
3186 if (pc - 4 < func_start)
3188 if (target_read_memory (pc - 4, buf, 4))
3191 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3192 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3194 if (thumb_instruction_restores_sp (insn2))
3195 found_stack_adjust = 1;
3196 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3197 found_stack_adjust = 1;
3198 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3199 && (insn2 & 0x0fff) == 0x0b04)
3200 found_stack_adjust = 1;
3201 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3202 && (insn2 & 0x0e00) == 0x0a00)
3203 found_stack_adjust = 1;
3205 return found_stack_adjust;
3209 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3211 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3214 CORE_ADDR func_start, func_end;
3216 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3219 /* We are in the epilogue if the previous instruction was a stack
3220 adjustment and the next instruction is a possible return (bx, mov
3221 pc, or pop). We could have to scan backwards to find the stack
3222 adjustment, or forwards to find the return, but this is a decent
3223 approximation. First scan forwards. */
3226 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3227 if (bits (insn, 28, 31) != INST_NV)
3229 if ((insn & 0x0ffffff0) == 0x012fff10)
3232 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3235 else if ((insn & 0x0fff0000) == 0x08bd0000
3236 && (insn & 0x0000c000) != 0)
3237 /* POP (LDMIA), including PC or LR. */
3244 /* Scan backwards. This is just a heuristic, so do not worry about
3245 false positives from mode changes. */
3247 if (pc < func_start + 4)
3250 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3251 if (arm_instruction_restores_sp (insn))
3257 /* Implement the stack_frame_destroyed_p gdbarch method. */
3260 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3262 if (arm_pc_is_thumb (gdbarch, pc))
3263 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3265 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3268 /* When arguments must be pushed onto the stack, they go on in reverse
3269 order. The code below implements a FILO (stack) to do this. */
3274 struct stack_item *prev;
3278 static struct stack_item *
3279 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3281 struct stack_item *si;
3282 si = XNEW (struct stack_item);
3283 si->data = (gdb_byte *) xmalloc (len);
3286 memcpy (si->data, contents, len);
3290 static struct stack_item *
3291 pop_stack_item (struct stack_item *si)
3293 struct stack_item *dead = si;
3300 /* Implement the gdbarch type alignment method, overrides the generic
3301 alignment algorithm for anything that is arm specific. */
3304 arm_type_align (gdbarch *gdbarch, struct type *t)
3306 t = check_typedef (t);
3307 if (TYPE_CODE (t) == TYPE_CODE_ARRAY && TYPE_VECTOR (t))
3309 /* Use the natural alignment for vector types (the same for
3310 scalar type), but the maximum alignment is 64-bit. */
3311 if (TYPE_LENGTH (t) > 8)
3314 return TYPE_LENGTH (t);
3317 /* Allow the common code to calculate the alignment. */
3321 /* Possible base types for a candidate for passing and returning in
3324 enum arm_vfp_cprc_base_type
3333 /* The length of one element of base type B. */
3336 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3340 case VFP_CPRC_SINGLE:
3342 case VFP_CPRC_DOUBLE:
3344 case VFP_CPRC_VEC64:
3346 case VFP_CPRC_VEC128:
3349 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3354 /* The character ('s', 'd' or 'q') for the type of VFP register used
3355 for passing base type B. */
3358 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3362 case VFP_CPRC_SINGLE:
3364 case VFP_CPRC_DOUBLE:
3366 case VFP_CPRC_VEC64:
3368 case VFP_CPRC_VEC128:
3371 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3376 /* Determine whether T may be part of a candidate for passing and
3377 returning in VFP registers, ignoring the limit on the total number
3378 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3379 classification of the first valid component found; if it is not
3380 VFP_CPRC_UNKNOWN, all components must have the same classification
3381 as *BASE_TYPE. If it is found that T contains a type not permitted
3382 for passing and returning in VFP registers, a type differently
3383 classified from *BASE_TYPE, or two types differently classified
3384 from each other, return -1, otherwise return the total number of
3385 base-type elements found (possibly 0 in an empty structure or
3386 array). Vector types are not currently supported, matching the
3387 generic AAPCS support. */
3390 arm_vfp_cprc_sub_candidate (struct type *t,
3391 enum arm_vfp_cprc_base_type *base_type)
3393 t = check_typedef (t);
3394 switch (TYPE_CODE (t))
3397 switch (TYPE_LENGTH (t))
3400 if (*base_type == VFP_CPRC_UNKNOWN)
3401 *base_type = VFP_CPRC_SINGLE;
3402 else if (*base_type != VFP_CPRC_SINGLE)
3407 if (*base_type == VFP_CPRC_UNKNOWN)
3408 *base_type = VFP_CPRC_DOUBLE;
3409 else if (*base_type != VFP_CPRC_DOUBLE)
3418 case TYPE_CODE_COMPLEX:
3419 /* Arguments of complex T where T is one of the types float or
3420 double get treated as if they are implemented as:
3429 switch (TYPE_LENGTH (t))
3432 if (*base_type == VFP_CPRC_UNKNOWN)
3433 *base_type = VFP_CPRC_SINGLE;
3434 else if (*base_type != VFP_CPRC_SINGLE)
3439 if (*base_type == VFP_CPRC_UNKNOWN)
3440 *base_type = VFP_CPRC_DOUBLE;
3441 else if (*base_type != VFP_CPRC_DOUBLE)
3450 case TYPE_CODE_ARRAY:
3452 if (TYPE_VECTOR (t))
3454 /* A 64-bit or 128-bit containerized vector type are VFP
3456 switch (TYPE_LENGTH (t))
3459 if (*base_type == VFP_CPRC_UNKNOWN)
3460 *base_type = VFP_CPRC_VEC64;
3463 if (*base_type == VFP_CPRC_UNKNOWN)
3464 *base_type = VFP_CPRC_VEC128;
3475 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3479 if (TYPE_LENGTH (t) == 0)
3481 gdb_assert (count == 0);
3484 else if (count == 0)
3486 unitlen = arm_vfp_cprc_unit_length (*base_type);
3487 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3488 return TYPE_LENGTH (t) / unitlen;
3493 case TYPE_CODE_STRUCT:
3498 for (i = 0; i < TYPE_NFIELDS (t); i++)
3502 if (!field_is_static (&TYPE_FIELD (t, i)))
3503 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3505 if (sub_count == -1)
3509 if (TYPE_LENGTH (t) == 0)
3511 gdb_assert (count == 0);
3514 else if (count == 0)
3516 unitlen = arm_vfp_cprc_unit_length (*base_type);
3517 if (TYPE_LENGTH (t) != unitlen * count)
3522 case TYPE_CODE_UNION:
3527 for (i = 0; i < TYPE_NFIELDS (t); i++)
3529 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3531 if (sub_count == -1)
3533 count = (count > sub_count ? count : sub_count);
3535 if (TYPE_LENGTH (t) == 0)
3537 gdb_assert (count == 0);
3540 else if (count == 0)
3542 unitlen = arm_vfp_cprc_unit_length (*base_type);
3543 if (TYPE_LENGTH (t) != unitlen * count)
3555 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3556 if passed to or returned from a non-variadic function with the VFP
3557 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3558 *BASE_TYPE to the base type for T and *COUNT to the number of
3559 elements of that base type before returning. */
3562 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3565 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3566 int c = arm_vfp_cprc_sub_candidate (t, &b);
3567 if (c <= 0 || c > 4)
3574 /* Return 1 if the VFP ABI should be used for passing arguments to and
3575 returning values from a function of type FUNC_TYPE, 0
3579 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3581 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3582 /* Variadic functions always use the base ABI. Assume that functions
3583 without debug info are not variadic. */
3584 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3586 /* The VFP ABI is only supported as a variant of AAPCS. */
3587 if (tdep->arm_abi != ARM_ABI_AAPCS)
3589 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3592 /* We currently only support passing parameters in integer registers, which
3593 conforms with GCC's default model, and VFP argument passing following
3594 the VFP variant of AAPCS. Several other variants exist and
3595 we should probably support some of them based on the selected ABI. */
3598 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3599 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3600 struct value **args, CORE_ADDR sp,
3601 function_call_return_method return_method,
3602 CORE_ADDR struct_addr)
3604 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3608 struct stack_item *si = NULL;
3611 unsigned vfp_regs_free = (1 << 16) - 1;
3613 /* Determine the type of this function and whether the VFP ABI
3615 ftype = check_typedef (value_type (function));
3616 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3617 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3618 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3620 /* Set the return address. For the ARM, the return breakpoint is
3621 always at BP_ADDR. */
3622 if (arm_pc_is_thumb (gdbarch, bp_addr))
3624 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3626 /* Walk through the list of args and determine how large a temporary
3627 stack is required. Need to take care here as structs may be
3628 passed on the stack, and we have to push them. */
3631 argreg = ARM_A1_REGNUM;
3634 /* The struct_return pointer occupies the first parameter
3635 passing register. */
3636 if (return_method == return_method_struct)
3639 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3640 gdbarch_register_name (gdbarch, argreg),
3641 paddress (gdbarch, struct_addr));
3642 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3646 for (argnum = 0; argnum < nargs; argnum++)
3649 struct type *arg_type;
3650 struct type *target_type;
3651 enum type_code typecode;
3652 const bfd_byte *val;
3654 enum arm_vfp_cprc_base_type vfp_base_type;
3656 int may_use_core_reg = 1;
3658 arg_type = check_typedef (value_type (args[argnum]));
3659 len = TYPE_LENGTH (arg_type);
3660 target_type = TYPE_TARGET_TYPE (arg_type);
3661 typecode = TYPE_CODE (arg_type);
3662 val = value_contents (args[argnum]);
3664 align = type_align (arg_type);
3665 /* Round alignment up to a whole number of words. */
3666 align = (align + ARM_INT_REGISTER_SIZE - 1)
3667 & ~(ARM_INT_REGISTER_SIZE - 1);
3668 /* Different ABIs have different maximum alignments. */
3669 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3671 /* The APCS ABI only requires word alignment. */
3672 align = ARM_INT_REGISTER_SIZE;
3676 /* The AAPCS requires at most doubleword alignment. */
3677 if (align > ARM_INT_REGISTER_SIZE * 2)
3678 align = ARM_INT_REGISTER_SIZE * 2;
3682 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3690 /* Because this is a CPRC it cannot go in a core register or
3691 cause a core register to be skipped for alignment.
3692 Either it goes in VFP registers and the rest of this loop
3693 iteration is skipped for this argument, or it goes on the
3694 stack (and the stack alignment code is correct for this
3696 may_use_core_reg = 0;
3698 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3699 shift = unit_length / 4;
3700 mask = (1 << (shift * vfp_base_count)) - 1;
3701 for (regno = 0; regno < 16; regno += shift)
3702 if (((vfp_regs_free >> regno) & mask) == mask)
3711 vfp_regs_free &= ~(mask << regno);
3712 reg_scaled = regno / shift;
3713 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3714 for (i = 0; i < vfp_base_count; i++)
3718 if (reg_char == 'q')
3719 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3720 val + i * unit_length);
3723 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3724 reg_char, reg_scaled + i);
3725 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3727 regcache->cooked_write (regnum, val + i * unit_length);
3734 /* This CPRC could not go in VFP registers, so all VFP
3735 registers are now marked as used. */
3740 /* Push stack padding for doubleword alignment. */
3741 if (nstack & (align - 1))
3743 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3744 nstack += ARM_INT_REGISTER_SIZE;
3747 /* Doubleword aligned quantities must go in even register pairs. */
3748 if (may_use_core_reg
3749 && argreg <= ARM_LAST_ARG_REGNUM
3750 && align > ARM_INT_REGISTER_SIZE
3754 /* If the argument is a pointer to a function, and it is a
3755 Thumb function, create a LOCAL copy of the value and set
3756 the THUMB bit in it. */
3757 if (TYPE_CODE_PTR == typecode
3758 && target_type != NULL
3759 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3761 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3762 if (arm_pc_is_thumb (gdbarch, regval))
3764 bfd_byte *copy = (bfd_byte *) alloca (len);
3765 store_unsigned_integer (copy, len, byte_order,
3766 MAKE_THUMB_ADDR (regval));
3771 /* Copy the argument to general registers or the stack in
3772 register-sized pieces. Large arguments are split between
3773 registers and stack. */
3776 int partial_len = len < ARM_INT_REGISTER_SIZE
3777 ? len : ARM_INT_REGISTER_SIZE;
3779 = extract_unsigned_integer (val, partial_len, byte_order);
3781 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3783 /* The argument is being passed in a general purpose
3785 if (byte_order == BFD_ENDIAN_BIG)
3786 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3788 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3790 gdbarch_register_name
3792 phex (regval, ARM_INT_REGISTER_SIZE));
3793 regcache_cooked_write_unsigned (regcache, argreg, regval);
3798 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3800 memset (buf, 0, sizeof (buf));
3801 store_unsigned_integer (buf, partial_len, byte_order, regval);
3803 /* Push the arguments onto the stack. */
3805 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3807 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3808 nstack += ARM_INT_REGISTER_SIZE;
3815 /* If we have an odd number of words to push, then decrement the stack
3816 by one word now, so first stack argument will be dword aligned. */
3823 write_memory (sp, si->data, si->len);
3824 si = pop_stack_item (si);
3827 /* Finally, update teh SP register. */
3828 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3834 /* Always align the frame to an 8-byte boundary. This is required on
3835 some platforms and harmless on the rest. */
3838 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3840 /* Align the stack to eight bytes. */
3841 return sp & ~ (CORE_ADDR) 7;
3845 print_fpu_flags (struct ui_file *file, int flags)
3847 if (flags & (1 << 0))
3848 fputs_filtered ("IVO ", file);
3849 if (flags & (1 << 1))
3850 fputs_filtered ("DVZ ", file);
3851 if (flags & (1 << 2))
3852 fputs_filtered ("OFL ", file);
3853 if (flags & (1 << 3))
3854 fputs_filtered ("UFL ", file);
3855 if (flags & (1 << 4))
3856 fputs_filtered ("INX ", file);
3857 fputc_filtered ('\n', file);
3860 /* Print interesting information about the floating point processor
3861 (if present) or emulator. */
3863 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3864 struct frame_info *frame, const char *args)
3866 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3869 type = (status >> 24) & 127;
3870 if (status & (1 << 31))
3871 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3873 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3874 /* i18n: [floating point unit] mask */
3875 fputs_filtered (_("mask: "), file);
3876 print_fpu_flags (file, status >> 16);
3877 /* i18n: [floating point unit] flags */
3878 fputs_filtered (_("flags: "), file);
3879 print_fpu_flags (file, status);
3882 /* Construct the ARM extended floating point type. */
3883 static struct type *
3884 arm_ext_type (struct gdbarch *gdbarch)
3886 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3888 if (!tdep->arm_ext_type)
3890 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3891 floatformats_arm_ext);
3893 return tdep->arm_ext_type;
3896 static struct type *
3897 arm_neon_double_type (struct gdbarch *gdbarch)
3899 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3901 if (tdep->neon_double_type == NULL)
3903 struct type *t, *elem;
3905 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3907 elem = builtin_type (gdbarch)->builtin_uint8;
3908 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3909 elem = builtin_type (gdbarch)->builtin_uint16;
3910 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3911 elem = builtin_type (gdbarch)->builtin_uint32;
3912 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3913 elem = builtin_type (gdbarch)->builtin_uint64;
3914 append_composite_type_field (t, "u64", elem);
3915 elem = builtin_type (gdbarch)->builtin_float;
3916 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3917 elem = builtin_type (gdbarch)->builtin_double;
3918 append_composite_type_field (t, "f64", elem);
3920 TYPE_VECTOR (t) = 1;
3921 TYPE_NAME (t) = "neon_d";
3922 tdep->neon_double_type = t;
3925 return tdep->neon_double_type;
3928 /* FIXME: The vector types are not correctly ordered on big-endian
3929 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3930 bits of d0 - regardless of what unit size is being held in d0. So
3931 the offset of the first uint8 in d0 is 7, but the offset of the
3932 first float is 4. This code works as-is for little-endian
3935 static struct type *
3936 arm_neon_quad_type (struct gdbarch *gdbarch)
3938 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3940 if (tdep->neon_quad_type == NULL)
3942 struct type *t, *elem;
3944 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3946 elem = builtin_type (gdbarch)->builtin_uint8;
3947 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3948 elem = builtin_type (gdbarch)->builtin_uint16;
3949 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3950 elem = builtin_type (gdbarch)->builtin_uint32;
3951 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3952 elem = builtin_type (gdbarch)->builtin_uint64;
3953 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3954 elem = builtin_type (gdbarch)->builtin_float;
3955 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3956 elem = builtin_type (gdbarch)->builtin_double;
3957 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3959 TYPE_VECTOR (t) = 1;
3960 TYPE_NAME (t) = "neon_q";
3961 tdep->neon_quad_type = t;
3964 return tdep->neon_quad_type;
3967 /* Return the GDB type object for the "standard" data type of data in
3970 static struct type *
3971 arm_register_type (struct gdbarch *gdbarch, int regnum)
3973 int num_regs = gdbarch_num_regs (gdbarch);
3975 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3976 && regnum >= num_regs && regnum < num_regs + 32)
3977 return builtin_type (gdbarch)->builtin_float;
3979 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3980 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3981 return arm_neon_quad_type (gdbarch);
3983 /* If the target description has register information, we are only
3984 in this function so that we can override the types of
3985 double-precision registers for NEON. */
3986 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3988 struct type *t = tdesc_register_type (gdbarch, regnum);
3990 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3991 && TYPE_CODE (t) == TYPE_CODE_FLT
3992 && gdbarch_tdep (gdbarch)->have_neon)
3993 return arm_neon_double_type (gdbarch);
3998 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4000 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4001 return builtin_type (gdbarch)->builtin_void;
4003 return arm_ext_type (gdbarch);
4005 else if (regnum == ARM_SP_REGNUM)
4006 return builtin_type (gdbarch)->builtin_data_ptr;
4007 else if (regnum == ARM_PC_REGNUM)
4008 return builtin_type (gdbarch)->builtin_func_ptr;
4009 else if (regnum >= ARRAY_SIZE (arm_register_names))
4010 /* These registers are only supported on targets which supply
4011 an XML description. */
4012 return builtin_type (gdbarch)->builtin_int0;
4014 return builtin_type (gdbarch)->builtin_uint32;
4017 /* Map a DWARF register REGNUM onto the appropriate GDB register
4021 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4023 /* Core integer regs. */
4024 if (reg >= 0 && reg <= 15)
4027 /* Legacy FPA encoding. These were once used in a way which
4028 overlapped with VFP register numbering, so their use is
4029 discouraged, but GDB doesn't support the ARM toolchain
4030 which used them for VFP. */
4031 if (reg >= 16 && reg <= 23)
4032 return ARM_F0_REGNUM + reg - 16;
4034 /* New assignments for the FPA registers. */
4035 if (reg >= 96 && reg <= 103)
4036 return ARM_F0_REGNUM + reg - 96;
4038 /* WMMX register assignments. */
4039 if (reg >= 104 && reg <= 111)
4040 return ARM_WCGR0_REGNUM + reg - 104;
4042 if (reg >= 112 && reg <= 127)
4043 return ARM_WR0_REGNUM + reg - 112;
4045 if (reg >= 192 && reg <= 199)
4046 return ARM_WC0_REGNUM + reg - 192;
4048 /* VFP v2 registers. A double precision value is actually
4049 in d1 rather than s2, but the ABI only defines numbering
4050 for the single precision registers. This will "just work"
4051 in GDB for little endian targets (we'll read eight bytes,
4052 starting in s0 and then progressing to s1), but will be
4053 reversed on big endian targets with VFP. This won't
4054 be a problem for the new Neon quad registers; you're supposed
4055 to use DW_OP_piece for those. */
4056 if (reg >= 64 && reg <= 95)
4060 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4061 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4065 /* VFP v3 / Neon registers. This range is also used for VFP v2
4066 registers, except that it now describes d0 instead of s0. */
4067 if (reg >= 256 && reg <= 287)
4071 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4072 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4079 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4081 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4084 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4086 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4087 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4089 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4090 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4092 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4093 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4095 if (reg < NUM_GREGS)
4096 return SIM_ARM_R0_REGNUM + reg;
4099 if (reg < NUM_FREGS)
4100 return SIM_ARM_FP0_REGNUM + reg;
4103 if (reg < NUM_SREGS)
4104 return SIM_ARM_FPS_REGNUM + reg;
4107 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4110 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4111 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4112 NULL if an error occurs. BUF is freed. */
4115 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4116 int old_len, int new_len)
4119 int bytes_to_read = new_len - old_len;
4121 new_buf = (gdb_byte *) xmalloc (new_len);
4122 memcpy (new_buf + bytes_to_read, buf, old_len);
4124 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4132 /* An IT block is at most the 2-byte IT instruction followed by
4133 four 4-byte instructions. The furthest back we must search to
4134 find an IT block that affects the current instruction is thus
4135 2 + 3 * 4 == 14 bytes. */
4136 #define MAX_IT_BLOCK_PREFIX 14
4138 /* Use a quick scan if there are more than this many bytes of
4140 #define IT_SCAN_THRESHOLD 32
4142 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4143 A breakpoint in an IT block may not be hit, depending on the
4146 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4150 CORE_ADDR boundary, func_start;
4152 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4153 int i, any, last_it, last_it_count;
4155 /* If we are using BKPT breakpoints, none of this is necessary. */
4156 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4159 /* ARM mode does not have this problem. */
4160 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4163 /* We are setting a breakpoint in Thumb code that could potentially
4164 contain an IT block. The first step is to find how much Thumb
4165 code there is; we do not need to read outside of known Thumb
4167 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4169 /* Thumb-2 code must have mapping symbols to have a chance. */
4172 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4174 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4175 && func_start > boundary)
4176 boundary = func_start;
4178 /* Search for a candidate IT instruction. We have to do some fancy
4179 footwork to distinguish a real IT instruction from the second
4180 half of a 32-bit instruction, but there is no need for that if
4181 there's no candidate. */
4182 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4184 /* No room for an IT instruction. */
4187 buf = (gdb_byte *) xmalloc (buf_len);
4188 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4191 for (i = 0; i < buf_len; i += 2)
4193 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4194 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4207 /* OK, the code bytes before this instruction contain at least one
4208 halfword which resembles an IT instruction. We know that it's
4209 Thumb code, but there are still two possibilities. Either the
4210 halfword really is an IT instruction, or it is the second half of
4211 a 32-bit Thumb instruction. The only way we can tell is to
4212 scan forwards from a known instruction boundary. */
4213 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4217 /* There's a lot of code before this instruction. Start with an
4218 optimistic search; it's easy to recognize halfwords that can
4219 not be the start of a 32-bit instruction, and use that to
4220 lock on to the instruction boundaries. */
4221 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4224 buf_len = IT_SCAN_THRESHOLD;
4227 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4229 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4230 if (thumb_insn_size (inst1) == 2)
4237 /* At this point, if DEFINITE, BUF[I] is the first place we
4238 are sure that we know the instruction boundaries, and it is far
4239 enough from BPADDR that we could not miss an IT instruction
4240 affecting BPADDR. If ! DEFINITE, give up - start from a
4244 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4248 buf_len = bpaddr - boundary;
4254 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4257 buf_len = bpaddr - boundary;
4261 /* Scan forwards. Find the last IT instruction before BPADDR. */
4266 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4268 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4273 else if (inst1 & 0x0002)
4275 else if (inst1 & 0x0004)
4280 i += thumb_insn_size (inst1);
4286 /* There wasn't really an IT instruction after all. */
4289 if (last_it_count < 1)
4290 /* It was too far away. */
4293 /* This really is a trouble spot. Move the breakpoint to the IT
4295 return bpaddr - buf_len + last_it;
4298 /* ARM displaced stepping support.
4300 Generally ARM displaced stepping works as follows:
4302 1. When an instruction is to be single-stepped, it is first decoded by
4303 arm_process_displaced_insn. Depending on the type of instruction, it is
4304 then copied to a scratch location, possibly in a modified form. The
4305 copy_* set of functions performs such modification, as necessary. A
4306 breakpoint is placed after the modified instruction in the scratch space
4307 to return control to GDB. Note in particular that instructions which
4308 modify the PC will no longer do so after modification.
4310 2. The instruction is single-stepped, by setting the PC to the scratch
4311 location address, and resuming. Control returns to GDB when the
4314 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4315 function used for the current instruction. This function's job is to
4316 put the CPU/memory state back to what it would have been if the
4317 instruction had been executed unmodified in its original location. */
4319 /* NOP instruction (mov r0, r0). */
4320 #define ARM_NOP 0xe1a00000
4321 #define THUMB_NOP 0x4600
4323 /* Helper for register reads for displaced stepping. In particular, this
4324 returns the PC as it would be seen by the instruction at its original
4328 displaced_read_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4332 CORE_ADDR from = dsc->insn_addr;
4334 if (regno == ARM_PC_REGNUM)
4336 /* Compute pipeline offset:
4337 - When executing an ARM instruction, PC reads as the address of the
4338 current instruction plus 8.
4339 - When executing a Thumb instruction, PC reads as the address of the
4340 current instruction plus 4. */
4347 if (debug_displaced)
4348 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4349 (unsigned long) from);
4350 return (ULONGEST) from;
4354 regcache_cooked_read_unsigned (regs, regno, &ret);
4355 if (debug_displaced)
4356 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4357 regno, (unsigned long) ret);
4363 displaced_in_arm_mode (struct regcache *regs)
4366 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4368 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4370 return (ps & t_bit) == 0;
4373 /* Write to the PC as from a branch instruction. */
4376 branch_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4380 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4381 architecture versions < 6. */
4382 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4383 val & ~(ULONGEST) 0x3);
4385 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4386 val & ~(ULONGEST) 0x1);
4389 /* Write to the PC as from a branch-exchange instruction. */
4392 bx_write_pc (struct regcache *regs, ULONGEST val)
4395 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4397 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4401 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4402 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4404 else if ((val & 2) == 0)
4406 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4407 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4411 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4412 mode, align dest to 4 bytes). */
4413 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4414 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4415 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4419 /* Write to the PC as if from a load instruction. */
4422 load_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4425 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4426 bx_write_pc (regs, val);
4428 branch_write_pc (regs, dsc, val);
4431 /* Write to the PC as if from an ALU instruction. */
4434 alu_write_pc (struct regcache *regs, arm_displaced_step_closure *dsc,
4437 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4438 bx_write_pc (regs, val);
4440 branch_write_pc (regs, dsc, val);
4443 /* Helper for writing to registers for displaced stepping. Writing to the PC
4444 has a varying effects depending on the instruction which does the write:
4445 this is controlled by the WRITE_PC argument. */
4448 displaced_write_reg (struct regcache *regs, arm_displaced_step_closure *dsc,
4449 int regno, ULONGEST val, enum pc_write_style write_pc)
4451 if (regno == ARM_PC_REGNUM)
4453 if (debug_displaced)
4454 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4455 (unsigned long) val);
4458 case BRANCH_WRITE_PC:
4459 branch_write_pc (regs, dsc, val);
4463 bx_write_pc (regs, val);
4467 load_write_pc (regs, dsc, val);
4471 alu_write_pc (regs, dsc, val);
4474 case CANNOT_WRITE_PC:
4475 warning (_("Instruction wrote to PC in an unexpected way when "
4476 "single-stepping"));
4480 internal_error (__FILE__, __LINE__,
4481 _("Invalid argument to displaced_write_reg"));
4484 dsc->wrote_to_pc = 1;
4488 if (debug_displaced)
4489 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4490 regno, (unsigned long) val);
4491 regcache_cooked_write_unsigned (regs, regno, val);
4495 /* This function is used to concisely determine if an instruction INSN
4496 references PC. Register fields of interest in INSN should have the
4497 corresponding fields of BITMASK set to 0b1111. The function
4498 returns return 1 if any of these fields in INSN reference the PC
4499 (also 0b1111, r15), else it returns 0. */
4502 insn_references_pc (uint32_t insn, uint32_t bitmask)
4504 uint32_t lowbit = 1;
4506 while (bitmask != 0)
4510 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4516 mask = lowbit * 0xf;
4518 if ((insn & mask) == mask)
4527 /* The simplest copy function. Many instructions have the same effect no
4528 matter what address they are executed at: in those cases, use this. */
4531 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4532 const char *iname, arm_displaced_step_closure *dsc)
4534 if (debug_displaced)
4535 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4536 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4539 dsc->modinsn[0] = insn;
4545 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4546 uint16_t insn2, const char *iname,
4547 arm_displaced_step_closure *dsc)
4549 if (debug_displaced)
4550 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4551 "opcode/class '%s' unmodified\n", insn1, insn2,
4554 dsc->modinsn[0] = insn1;
4555 dsc->modinsn[1] = insn2;
4561 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4564 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4566 arm_displaced_step_closure *dsc)
4568 if (debug_displaced)
4569 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4570 "opcode/class '%s' unmodified\n", insn,
4573 dsc->modinsn[0] = insn;
4578 /* Preload instructions with immediate offset. */
4581 cleanup_preload (struct gdbarch *gdbarch,
4582 struct regcache *regs, arm_displaced_step_closure *dsc)
4584 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4585 if (!dsc->u.preload.immed)
4586 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4590 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4591 arm_displaced_step_closure *dsc, unsigned int rn)
4594 /* Preload instructions:
4596 {pli/pld} [rn, #+/-imm]
4598 {pli/pld} [r0, #+/-imm]. */
4600 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4601 rn_val = displaced_read_reg (regs, dsc, rn);
4602 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4603 dsc->u.preload.immed = 1;
4605 dsc->cleanup = &cleanup_preload;
4609 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4610 arm_displaced_step_closure *dsc)
4612 unsigned int rn = bits (insn, 16, 19);
4614 if (!insn_references_pc (insn, 0x000f0000ul))
4615 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4617 if (debug_displaced)
4618 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4619 (unsigned long) insn);
4621 dsc->modinsn[0] = insn & 0xfff0ffff;
4623 install_preload (gdbarch, regs, dsc, rn);
4629 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4630 struct regcache *regs, arm_displaced_step_closure *dsc)
4632 unsigned int rn = bits (insn1, 0, 3);
4633 unsigned int u_bit = bit (insn1, 7);
4634 int imm12 = bits (insn2, 0, 11);
4637 if (rn != ARM_PC_REGNUM)
4638 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4640 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4641 PLD (literal) Encoding T1. */
4642 if (debug_displaced)
4643 fprintf_unfiltered (gdb_stdlog,
4644 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4645 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4651 /* Rewrite instruction {pli/pld} PC imm12 into:
4652 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4656 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4658 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4659 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4661 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4663 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4664 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4665 dsc->u.preload.immed = 0;
4667 /* {pli/pld} [r0, r1] */
4668 dsc->modinsn[0] = insn1 & 0xfff0;
4669 dsc->modinsn[1] = 0xf001;
4672 dsc->cleanup = &cleanup_preload;
4676 /* Preload instructions with register offset. */
4679 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4680 arm_displaced_step_closure *dsc, unsigned int rn,
4683 ULONGEST rn_val, rm_val;
4685 /* Preload register-offset instructions:
4687 {pli/pld} [rn, rm {, shift}]
4689 {pli/pld} [r0, r1 {, shift}]. */
4691 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4692 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4693 rn_val = displaced_read_reg (regs, dsc, rn);
4694 rm_val = displaced_read_reg (regs, dsc, rm);
4695 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4696 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4697 dsc->u.preload.immed = 0;
4699 dsc->cleanup = &cleanup_preload;
4703 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4704 struct regcache *regs,
4705 arm_displaced_step_closure *dsc)
4707 unsigned int rn = bits (insn, 16, 19);
4708 unsigned int rm = bits (insn, 0, 3);
4711 if (!insn_references_pc (insn, 0x000f000ful))
4712 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4714 if (debug_displaced)
4715 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4716 (unsigned long) insn);
4718 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4720 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4724 /* Copy/cleanup coprocessor load and store instructions. */
4727 cleanup_copro_load_store (struct gdbarch *gdbarch,
4728 struct regcache *regs,
4729 arm_displaced_step_closure *dsc)
4731 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4733 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4735 if (dsc->u.ldst.writeback)
4736 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4740 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4741 arm_displaced_step_closure *dsc,
4742 int writeback, unsigned int rn)
4746 /* Coprocessor load/store instructions:
4748 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4750 {stc/stc2} [r0, #+/-imm].
4752 ldc/ldc2 are handled identically. */
4754 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4755 rn_val = displaced_read_reg (regs, dsc, rn);
4756 /* PC should be 4-byte aligned. */
4757 rn_val = rn_val & 0xfffffffc;
4758 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4760 dsc->u.ldst.writeback = writeback;
4761 dsc->u.ldst.rn = rn;
4763 dsc->cleanup = &cleanup_copro_load_store;
4767 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4768 struct regcache *regs,
4769 arm_displaced_step_closure *dsc)
4771 unsigned int rn = bits (insn, 16, 19);
4773 if (!insn_references_pc (insn, 0x000f0000ul))
4774 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4776 if (debug_displaced)
4777 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4778 "load/store insn %.8lx\n", (unsigned long) insn);
4780 dsc->modinsn[0] = insn & 0xfff0ffff;
4782 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4788 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4789 uint16_t insn2, struct regcache *regs,
4790 arm_displaced_step_closure *dsc)
4792 unsigned int rn = bits (insn1, 0, 3);
4794 if (rn != ARM_PC_REGNUM)
4795 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4796 "copro load/store", dsc);
4798 if (debug_displaced)
4799 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4800 "load/store insn %.4x%.4x\n", insn1, insn2);
4802 dsc->modinsn[0] = insn1 & 0xfff0;
4803 dsc->modinsn[1] = insn2;
4806 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4807 doesn't support writeback, so pass 0. */
4808 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4813 /* Clean up branch instructions (actually perform the branch, by setting
4817 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4818 arm_displaced_step_closure *dsc)
4820 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4821 int branch_taken = condition_true (dsc->u.branch.cond, status);
4822 enum pc_write_style write_pc = dsc->u.branch.exchange
4823 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4828 if (dsc->u.branch.link)
4830 /* The value of LR should be the next insn of current one. In order
4831 not to confuse logic handling later insn `bx lr', if current insn mode
4832 is Thumb, the bit 0 of LR value should be set to 1. */
4833 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4836 next_insn_addr |= 0x1;
4838 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4842 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4845 /* Copy B/BL/BLX instructions with immediate destinations. */
4848 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4849 arm_displaced_step_closure *dsc,
4850 unsigned int cond, int exchange, int link, long offset)
4852 /* Implement "BL<cond> <label>" as:
4854 Preparation: cond <- instruction condition
4855 Insn: mov r0, r0 (nop)
4856 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4858 B<cond> similar, but don't set r14 in cleanup. */
4860 dsc->u.branch.cond = cond;
4861 dsc->u.branch.link = link;
4862 dsc->u.branch.exchange = exchange;
4864 dsc->u.branch.dest = dsc->insn_addr;
4865 if (link && exchange)
4866 /* For BLX, offset is computed from the Align (PC, 4). */
4867 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4870 dsc->u.branch.dest += 4 + offset;
4872 dsc->u.branch.dest += 8 + offset;
4874 dsc->cleanup = &cleanup_branch;
4877 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4878 struct regcache *regs, arm_displaced_step_closure *dsc)
4880 unsigned int cond = bits (insn, 28, 31);
4881 int exchange = (cond == 0xf);
4882 int link = exchange || bit (insn, 24);
4885 if (debug_displaced)
4886 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4887 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4888 (unsigned long) insn);
4890 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4891 then arrange the switch into Thumb mode. */
4892 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4894 offset = bits (insn, 0, 23) << 2;
4896 if (bit (offset, 25))
4897 offset = offset | ~0x3ffffff;
4899 dsc->modinsn[0] = ARM_NOP;
4901 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4906 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4907 uint16_t insn2, struct regcache *regs,
4908 arm_displaced_step_closure *dsc)
4910 int link = bit (insn2, 14);
4911 int exchange = link && !bit (insn2, 12);
4914 int j1 = bit (insn2, 13);
4915 int j2 = bit (insn2, 11);
4916 int s = sbits (insn1, 10, 10);
4917 int i1 = !(j1 ^ bit (insn1, 10));
4918 int i2 = !(j2 ^ bit (insn1, 10));
4920 if (!link && !exchange) /* B */
4922 offset = (bits (insn2, 0, 10) << 1);
4923 if (bit (insn2, 12)) /* Encoding T4 */
4925 offset |= (bits (insn1, 0, 9) << 12)
4931 else /* Encoding T3 */
4933 offset |= (bits (insn1, 0, 5) << 12)
4937 cond = bits (insn1, 6, 9);
4942 offset = (bits (insn1, 0, 9) << 12);
4943 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4944 offset |= exchange ?
4945 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4948 if (debug_displaced)
4949 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4950 "%.4x %.4x with offset %.8lx\n",
4951 link ? (exchange) ? "blx" : "bl" : "b",
4952 insn1, insn2, offset);
4954 dsc->modinsn[0] = THUMB_NOP;
4956 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4960 /* Copy B Thumb instructions. */
4962 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
4963 arm_displaced_step_closure *dsc)
4965 unsigned int cond = 0;
4967 unsigned short bit_12_15 = bits (insn, 12, 15);
4968 CORE_ADDR from = dsc->insn_addr;
4970 if (bit_12_15 == 0xd)
4972 /* offset = SignExtend (imm8:0, 32) */
4973 offset = sbits ((insn << 1), 0, 8);
4974 cond = bits (insn, 8, 11);
4976 else if (bit_12_15 == 0xe) /* Encoding T2 */
4978 offset = sbits ((insn << 1), 0, 11);
4982 if (debug_displaced)
4983 fprintf_unfiltered (gdb_stdlog,
4984 "displaced: copying b immediate insn %.4x "
4985 "with offset %d\n", insn, offset);
4987 dsc->u.branch.cond = cond;
4988 dsc->u.branch.link = 0;
4989 dsc->u.branch.exchange = 0;
4990 dsc->u.branch.dest = from + 4 + offset;
4992 dsc->modinsn[0] = THUMB_NOP;
4994 dsc->cleanup = &cleanup_branch;
4999 /* Copy BX/BLX with register-specified destinations. */
5002 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5003 arm_displaced_step_closure *dsc, int link,
5004 unsigned int cond, unsigned int rm)
5006 /* Implement {BX,BLX}<cond> <reg>" as:
5008 Preparation: cond <- instruction condition
5009 Insn: mov r0, r0 (nop)
5010 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5012 Don't set r14 in cleanup for BX. */
5014 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5016 dsc->u.branch.cond = cond;
5017 dsc->u.branch.link = link;
5019 dsc->u.branch.exchange = 1;
5021 dsc->cleanup = &cleanup_branch;
5025 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5026 struct regcache *regs, arm_displaced_step_closure *dsc)
5028 unsigned int cond = bits (insn, 28, 31);
5031 int link = bit (insn, 5);
5032 unsigned int rm = bits (insn, 0, 3);
5034 if (debug_displaced)
5035 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5036 (unsigned long) insn);
5038 dsc->modinsn[0] = ARM_NOP;
5040 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5045 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5046 struct regcache *regs,
5047 arm_displaced_step_closure *dsc)
5049 int link = bit (insn, 7);
5050 unsigned int rm = bits (insn, 3, 6);
5052 if (debug_displaced)
5053 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5054 (unsigned short) insn);
5056 dsc->modinsn[0] = THUMB_NOP;
5058 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5064 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5067 cleanup_alu_imm (struct gdbarch *gdbarch,
5068 struct regcache *regs, arm_displaced_step_closure *dsc)
5070 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5071 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5072 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5073 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5077 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5078 arm_displaced_step_closure *dsc)
5080 unsigned int rn = bits (insn, 16, 19);
5081 unsigned int rd = bits (insn, 12, 15);
5082 unsigned int op = bits (insn, 21, 24);
5083 int is_mov = (op == 0xd);
5084 ULONGEST rd_val, rn_val;
5086 if (!insn_references_pc (insn, 0x000ff000ul))
5087 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5089 if (debug_displaced)
5090 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5091 "%.8lx\n", is_mov ? "move" : "ALU",
5092 (unsigned long) insn);
5094 /* Instruction is of form:
5096 <op><cond> rd, [rn,] #imm
5100 Preparation: tmp1, tmp2 <- r0, r1;
5102 Insn: <op><cond> r0, r1, #imm
5103 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5106 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5107 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5108 rn_val = displaced_read_reg (regs, dsc, rn);
5109 rd_val = displaced_read_reg (regs, dsc, rd);
5110 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5111 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5115 dsc->modinsn[0] = insn & 0xfff00fff;
5117 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5119 dsc->cleanup = &cleanup_alu_imm;
5125 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5126 uint16_t insn2, struct regcache *regs,
5127 arm_displaced_step_closure *dsc)
5129 unsigned int op = bits (insn1, 5, 8);
5130 unsigned int rn, rm, rd;
5131 ULONGEST rd_val, rn_val;
5133 rn = bits (insn1, 0, 3); /* Rn */
5134 rm = bits (insn2, 0, 3); /* Rm */
5135 rd = bits (insn2, 8, 11); /* Rd */
5137 /* This routine is only called for instruction MOV. */
5138 gdb_assert (op == 0x2 && rn == 0xf);
5140 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5141 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5143 if (debug_displaced)
5144 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5145 "ALU", insn1, insn2);
5147 /* Instruction is of form:
5149 <op><cond> rd, [rn,] #imm
5153 Preparation: tmp1, tmp2 <- r0, r1;
5155 Insn: <op><cond> r0, r1, #imm
5156 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5159 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5160 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5161 rn_val = displaced_read_reg (regs, dsc, rn);
5162 rd_val = displaced_read_reg (regs, dsc, rd);
5163 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5164 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5167 dsc->modinsn[0] = insn1;
5168 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5171 dsc->cleanup = &cleanup_alu_imm;
5176 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5179 cleanup_alu_reg (struct gdbarch *gdbarch,
5180 struct regcache *regs, arm_displaced_step_closure *dsc)
5185 rd_val = displaced_read_reg (regs, dsc, 0);
5187 for (i = 0; i < 3; i++)
5188 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5190 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5194 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5195 arm_displaced_step_closure *dsc,
5196 unsigned int rd, unsigned int rn, unsigned int rm)
5198 ULONGEST rd_val, rn_val, rm_val;
5200 /* Instruction is of form:
5202 <op><cond> rd, [rn,] rm [, <shift>]
5206 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5207 r0, r1, r2 <- rd, rn, rm
5208 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5209 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5212 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5213 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5214 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5215 rd_val = displaced_read_reg (regs, dsc, rd);
5216 rn_val = displaced_read_reg (regs, dsc, rn);
5217 rm_val = displaced_read_reg (regs, dsc, rm);
5218 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5219 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5220 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5223 dsc->cleanup = &cleanup_alu_reg;
5227 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5228 arm_displaced_step_closure *dsc)
5230 unsigned int op = bits (insn, 21, 24);
5231 int is_mov = (op == 0xd);
5233 if (!insn_references_pc (insn, 0x000ff00ful))
5234 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5236 if (debug_displaced)
5237 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5238 is_mov ? "move" : "ALU", (unsigned long) insn);
5241 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5243 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5245 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5251 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5252 struct regcache *regs,
5253 arm_displaced_step_closure *dsc)
5257 rm = bits (insn, 3, 6);
5258 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5260 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5261 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5263 if (debug_displaced)
5264 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5265 (unsigned short) insn);
5267 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5269 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5274 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5277 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5278 struct regcache *regs,
5279 arm_displaced_step_closure *dsc)
5281 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5284 for (i = 0; i < 4; i++)
5285 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5287 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5291 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5292 arm_displaced_step_closure *dsc,
5293 unsigned int rd, unsigned int rn, unsigned int rm,
5297 ULONGEST rd_val, rn_val, rm_val, rs_val;
5299 /* Instruction is of form:
5301 <op><cond> rd, [rn,] rm, <shift> rs
5305 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5306 r0, r1, r2, r3 <- rd, rn, rm, rs
5307 Insn: <op><cond> r0, r1, r2, <shift> r3
5309 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5313 for (i = 0; i < 4; i++)
5314 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5316 rd_val = displaced_read_reg (regs, dsc, rd);
5317 rn_val = displaced_read_reg (regs, dsc, rn);
5318 rm_val = displaced_read_reg (regs, dsc, rm);
5319 rs_val = displaced_read_reg (regs, dsc, rs);
5320 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5321 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5322 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5323 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5325 dsc->cleanup = &cleanup_alu_shifted_reg;
5329 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5330 struct regcache *regs,
5331 arm_displaced_step_closure *dsc)
5333 unsigned int op = bits (insn, 21, 24);
5334 int is_mov = (op == 0xd);
5335 unsigned int rd, rn, rm, rs;
5337 if (!insn_references_pc (insn, 0x000fff0ful))
5338 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5340 if (debug_displaced)
5341 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5342 "%.8lx\n", is_mov ? "move" : "ALU",
5343 (unsigned long) insn);
5345 rn = bits (insn, 16, 19);
5346 rm = bits (insn, 0, 3);
5347 rs = bits (insn, 8, 11);
5348 rd = bits (insn, 12, 15);
5351 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5353 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5355 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5360 /* Clean up load instructions. */
5363 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5364 arm_displaced_step_closure *dsc)
5366 ULONGEST rt_val, rt_val2 = 0, rn_val;
5368 rt_val = displaced_read_reg (regs, dsc, 0);
5369 if (dsc->u.ldst.xfersize == 8)
5370 rt_val2 = displaced_read_reg (regs, dsc, 1);
5371 rn_val = displaced_read_reg (regs, dsc, 2);
5373 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5374 if (dsc->u.ldst.xfersize > 4)
5375 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5376 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5377 if (!dsc->u.ldst.immed)
5378 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5380 /* Handle register writeback. */
5381 if (dsc->u.ldst.writeback)
5382 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5383 /* Put result in right place. */
5384 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5385 if (dsc->u.ldst.xfersize == 8)
5386 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5389 /* Clean up store instructions. */
5392 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5393 arm_displaced_step_closure *dsc)
5395 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5397 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5398 if (dsc->u.ldst.xfersize > 4)
5399 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5400 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5401 if (!dsc->u.ldst.immed)
5402 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5403 if (!dsc->u.ldst.restore_r4)
5404 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5407 if (dsc->u.ldst.writeback)
5408 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5411 /* Copy "extra" load/store instructions. These are halfword/doubleword
5412 transfers, which have a different encoding to byte/word transfers. */
5415 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5416 struct regcache *regs, arm_displaced_step_closure *dsc)
5418 unsigned int op1 = bits (insn, 20, 24);
5419 unsigned int op2 = bits (insn, 5, 6);
5420 unsigned int rt = bits (insn, 12, 15);
5421 unsigned int rn = bits (insn, 16, 19);
5422 unsigned int rm = bits (insn, 0, 3);
5423 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5424 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5425 int immed = (op1 & 0x4) != 0;
5427 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5429 if (!insn_references_pc (insn, 0x000ff00ful))
5430 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5432 if (debug_displaced)
5433 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5434 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5435 (unsigned long) insn);
5437 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5440 internal_error (__FILE__, __LINE__,
5441 _("copy_extra_ld_st: instruction decode error"));
5443 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5444 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5445 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5447 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5449 rt_val = displaced_read_reg (regs, dsc, rt);
5450 if (bytesize[opcode] == 8)
5451 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5452 rn_val = displaced_read_reg (regs, dsc, rn);
5454 rm_val = displaced_read_reg (regs, dsc, rm);
5456 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5457 if (bytesize[opcode] == 8)
5458 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5459 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5461 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5464 dsc->u.ldst.xfersize = bytesize[opcode];
5465 dsc->u.ldst.rn = rn;
5466 dsc->u.ldst.immed = immed;
5467 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5468 dsc->u.ldst.restore_r4 = 0;
5471 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5473 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5474 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5476 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5478 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5479 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5481 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5486 /* Copy byte/half word/word loads and stores. */
5489 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5490 arm_displaced_step_closure *dsc, int load,
5491 int immed, int writeback, int size, int usermode,
5492 int rt, int rm, int rn)
5494 ULONGEST rt_val, rn_val, rm_val = 0;
5496 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5497 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5499 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5501 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5503 rt_val = displaced_read_reg (regs, dsc, rt);
5504 rn_val = displaced_read_reg (regs, dsc, rn);
5506 rm_val = displaced_read_reg (regs, dsc, rm);
5508 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5509 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5511 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5513 dsc->u.ldst.xfersize = size;
5514 dsc->u.ldst.rn = rn;
5515 dsc->u.ldst.immed = immed;
5516 dsc->u.ldst.writeback = writeback;
5518 /* To write PC we can do:
5520 Before this sequence of instructions:
5521 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5522 r2 is the Rn value got from displaced_read_reg.
5524 Insn1: push {pc} Write address of STR instruction + offset on stack
5525 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5526 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5527 = addr(Insn1) + offset - addr(Insn3) - 8
5529 Insn4: add r4, r4, #8 r4 = offset - 8
5530 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5532 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5534 Otherwise we don't know what value to write for PC, since the offset is
5535 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5536 of this can be found in Section "Saving from r15" in
5537 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5539 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5544 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5545 uint16_t insn2, struct regcache *regs,
5546 arm_displaced_step_closure *dsc, int size)
5548 unsigned int u_bit = bit (insn1, 7);
5549 unsigned int rt = bits (insn2, 12, 15);
5550 int imm12 = bits (insn2, 0, 11);
5553 if (debug_displaced)
5554 fprintf_unfiltered (gdb_stdlog,
5555 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5556 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5562 /* Rewrite instruction LDR Rt imm12 into:
5564 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5568 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5571 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5572 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5573 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5575 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5577 pc_val = pc_val & 0xfffffffc;
5579 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5580 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5584 dsc->u.ldst.xfersize = size;
5585 dsc->u.ldst.immed = 0;
5586 dsc->u.ldst.writeback = 0;
5587 dsc->u.ldst.restore_r4 = 0;
5589 /* LDR R0, R2, R3 */
5590 dsc->modinsn[0] = 0xf852;
5591 dsc->modinsn[1] = 0x3;
5594 dsc->cleanup = &cleanup_load;
5600 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5601 uint16_t insn2, struct regcache *regs,
5602 arm_displaced_step_closure *dsc,
5603 int writeback, int immed)
5605 unsigned int rt = bits (insn2, 12, 15);
5606 unsigned int rn = bits (insn1, 0, 3);
5607 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5608 /* In LDR (register), there is also a register Rm, which is not allowed to
5609 be PC, so we don't have to check it. */
5611 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5612 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5615 if (debug_displaced)
5616 fprintf_unfiltered (gdb_stdlog,
5617 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5618 rt, rn, insn1, insn2);
5620 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5623 dsc->u.ldst.restore_r4 = 0;
5626 /* ldr[b]<cond> rt, [rn, #imm], etc.
5628 ldr[b]<cond> r0, [r2, #imm]. */
5630 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5631 dsc->modinsn[1] = insn2 & 0x0fff;
5634 /* ldr[b]<cond> rt, [rn, rm], etc.
5636 ldr[b]<cond> r0, [r2, r3]. */
5638 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5639 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5649 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5650 struct regcache *regs,
5651 arm_displaced_step_closure *dsc,
5652 int load, int size, int usermode)
5654 int immed = !bit (insn, 25);
5655 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5656 unsigned int rt = bits (insn, 12, 15);
5657 unsigned int rn = bits (insn, 16, 19);
5658 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5660 if (!insn_references_pc (insn, 0x000ff00ful))
5661 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5663 if (debug_displaced)
5664 fprintf_unfiltered (gdb_stdlog,
5665 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5666 load ? (size == 1 ? "ldrb" : "ldr")
5667 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5669 (unsigned long) insn);
5671 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5672 usermode, rt, rm, rn);
5674 if (load || rt != ARM_PC_REGNUM)
5676 dsc->u.ldst.restore_r4 = 0;
5679 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5681 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5682 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5684 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5686 {ldr,str}[b]<cond> r0, [r2, r3]. */
5687 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5691 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5692 dsc->u.ldst.restore_r4 = 1;
5693 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5694 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5695 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5696 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5697 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5701 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5703 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5708 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5713 /* Cleanup LDM instructions with fully-populated register list. This is an
5714 unfortunate corner case: it's impossible to implement correctly by modifying
5715 the instruction. The issue is as follows: we have an instruction,
5719 which we must rewrite to avoid loading PC. A possible solution would be to
5720 do the load in two halves, something like (with suitable cleanup
5724 ldm[id][ab] r8!, {r0-r7}
5726 ldm[id][ab] r8, {r7-r14}
5729 but at present there's no suitable place for <temp>, since the scratch space
5730 is overwritten before the cleanup routine is called. For now, we simply
5731 emulate the instruction. */
5734 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5735 arm_displaced_step_closure *dsc)
5737 int inc = dsc->u.block.increment;
5738 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5739 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5740 uint32_t regmask = dsc->u.block.regmask;
5741 int regno = inc ? 0 : 15;
5742 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5743 int exception_return = dsc->u.block.load && dsc->u.block.user
5744 && (regmask & 0x8000) != 0;
5745 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5746 int do_transfer = condition_true (dsc->u.block.cond, status);
5747 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5752 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5753 sensible we can do here. Complain loudly. */
5754 if (exception_return)
5755 error (_("Cannot single-step exception return"));
5757 /* We don't handle any stores here for now. */
5758 gdb_assert (dsc->u.block.load != 0);
5760 if (debug_displaced)
5761 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5762 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5763 dsc->u.block.increment ? "inc" : "dec",
5764 dsc->u.block.before ? "before" : "after");
5771 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5774 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5777 xfer_addr += bump_before;
5779 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5780 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5782 xfer_addr += bump_after;
5784 regmask &= ~(1 << regno);
5787 if (dsc->u.block.writeback)
5788 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5792 /* Clean up an STM which included the PC in the register list. */
5795 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5796 arm_displaced_step_closure *dsc)
5798 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5799 int store_executed = condition_true (dsc->u.block.cond, status);
5800 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5801 CORE_ADDR stm_insn_addr;
5804 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5806 /* If condition code fails, there's nothing else to do. */
5807 if (!store_executed)
5810 if (dsc->u.block.increment)
5812 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5814 if (dsc->u.block.before)
5819 pc_stored_at = dsc->u.block.xfer_addr;
5821 if (dsc->u.block.before)
5825 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5826 stm_insn_addr = dsc->scratch_base;
5827 offset = pc_val - stm_insn_addr;
5829 if (debug_displaced)
5830 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5831 "STM instruction\n", offset);
5833 /* Rewrite the stored PC to the proper value for the non-displaced original
5835 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5836 dsc->insn_addr + offset);
5839 /* Clean up an LDM which includes the PC in the register list. We clumped all
5840 the registers in the transferred list into a contiguous range r0...rX (to
5841 avoid loading PC directly and losing control of the debugged program), so we
5842 must undo that here. */
5845 cleanup_block_load_pc (struct gdbarch *gdbarch,
5846 struct regcache *regs,
5847 arm_displaced_step_closure *dsc)
5849 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5850 int load_executed = condition_true (dsc->u.block.cond, status);
5851 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5852 unsigned int regs_loaded = bitcount (mask);
5853 unsigned int num_to_shuffle = regs_loaded, clobbered;
5855 /* The method employed here will fail if the register list is fully populated
5856 (we need to avoid loading PC directly). */
5857 gdb_assert (num_to_shuffle < 16);
5862 clobbered = (1 << num_to_shuffle) - 1;
5864 while (num_to_shuffle > 0)
5866 if ((mask & (1 << write_reg)) != 0)
5868 unsigned int read_reg = num_to_shuffle - 1;
5870 if (read_reg != write_reg)
5872 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5873 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5874 if (debug_displaced)
5875 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5876 "loaded register r%d to r%d\n"), read_reg,
5879 else if (debug_displaced)
5880 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5881 "r%d already in the right place\n"),
5884 clobbered &= ~(1 << write_reg);
5892 /* Restore any registers we scribbled over. */
5893 for (write_reg = 0; clobbered != 0; write_reg++)
5895 if ((clobbered & (1 << write_reg)) != 0)
5897 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5899 if (debug_displaced)
5900 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5901 "clobbered register r%d\n"), write_reg);
5902 clobbered &= ~(1 << write_reg);
5906 /* Perform register writeback manually. */
5907 if (dsc->u.block.writeback)
5909 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5911 if (dsc->u.block.increment)
5912 new_rn_val += regs_loaded * 4;
5914 new_rn_val -= regs_loaded * 4;
5916 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5921 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5922 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5925 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5926 struct regcache *regs,
5927 arm_displaced_step_closure *dsc)
5929 int load = bit (insn, 20);
5930 int user = bit (insn, 22);
5931 int increment = bit (insn, 23);
5932 int before = bit (insn, 24);
5933 int writeback = bit (insn, 21);
5934 int rn = bits (insn, 16, 19);
5936 /* Block transfers which don't mention PC can be run directly
5938 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5939 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5941 if (rn == ARM_PC_REGNUM)
5943 warning (_("displaced: Unpredictable LDM or STM with "
5944 "base register r15"));
5945 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5948 if (debug_displaced)
5949 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5950 "%.8lx\n", (unsigned long) insn);
5952 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5953 dsc->u.block.rn = rn;
5955 dsc->u.block.load = load;
5956 dsc->u.block.user = user;
5957 dsc->u.block.increment = increment;
5958 dsc->u.block.before = before;
5959 dsc->u.block.writeback = writeback;
5960 dsc->u.block.cond = bits (insn, 28, 31);
5962 dsc->u.block.regmask = insn & 0xffff;
5966 if ((insn & 0xffff) == 0xffff)
5968 /* LDM with a fully-populated register list. This case is
5969 particularly tricky. Implement for now by fully emulating the
5970 instruction (which might not behave perfectly in all cases, but
5971 these instructions should be rare enough for that not to matter
5973 dsc->modinsn[0] = ARM_NOP;
5975 dsc->cleanup = &cleanup_block_load_all;
5979 /* LDM of a list of registers which includes PC. Implement by
5980 rewriting the list of registers to be transferred into a
5981 contiguous chunk r0...rX before doing the transfer, then shuffling
5982 registers into the correct places in the cleanup routine. */
5983 unsigned int regmask = insn & 0xffff;
5984 unsigned int num_in_list = bitcount (regmask), new_regmask;
5987 for (i = 0; i < num_in_list; i++)
5988 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5990 /* Writeback makes things complicated. We need to avoid clobbering
5991 the base register with one of the registers in our modified
5992 register list, but just using a different register can't work in
5995 ldm r14!, {r0-r13,pc}
5997 which would need to be rewritten as:
6001 but that can't work, because there's no free register for N.
6003 Solve this by turning off the writeback bit, and emulating
6004 writeback manually in the cleanup routine. */
6009 new_regmask = (1 << num_in_list) - 1;
6011 if (debug_displaced)
6012 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6013 "{..., pc}: original reg list %.4x, modified "
6014 "list %.4x\n"), rn, writeback ? "!" : "",
6015 (int) insn & 0xffff, new_regmask);
6017 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6019 dsc->cleanup = &cleanup_block_load_pc;
6024 /* STM of a list of registers which includes PC. Run the instruction
6025 as-is, but out of line: this will store the wrong value for the PC,
6026 so we must manually fix up the memory in the cleanup routine.
6027 Doing things this way has the advantage that we can auto-detect
6028 the offset of the PC write (which is architecture-dependent) in
6029 the cleanup routine. */
6030 dsc->modinsn[0] = insn;
6032 dsc->cleanup = &cleanup_block_store_pc;
6039 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6040 struct regcache *regs,
6041 arm_displaced_step_closure *dsc)
6043 int rn = bits (insn1, 0, 3);
6044 int load = bit (insn1, 4);
6045 int writeback = bit (insn1, 5);
6047 /* Block transfers which don't mention PC can be run directly
6049 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6050 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6052 if (rn == ARM_PC_REGNUM)
6054 warning (_("displaced: Unpredictable LDM or STM with "
6055 "base register r15"));
6056 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6057 "unpredictable ldm/stm", dsc);
6060 if (debug_displaced)
6061 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6062 "%.4x%.4x\n", insn1, insn2);
6064 /* Clear bit 13, since it should be always zero. */
6065 dsc->u.block.regmask = (insn2 & 0xdfff);
6066 dsc->u.block.rn = rn;
6068 dsc->u.block.load = load;
6069 dsc->u.block.user = 0;
6070 dsc->u.block.increment = bit (insn1, 7);
6071 dsc->u.block.before = bit (insn1, 8);
6072 dsc->u.block.writeback = writeback;
6073 dsc->u.block.cond = INST_AL;
6074 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6078 if (dsc->u.block.regmask == 0xffff)
6080 /* This branch is impossible to happen. */
6085 unsigned int regmask = dsc->u.block.regmask;
6086 unsigned int num_in_list = bitcount (regmask), new_regmask;
6089 for (i = 0; i < num_in_list; i++)
6090 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6095 new_regmask = (1 << num_in_list) - 1;
6097 if (debug_displaced)
6098 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6099 "{..., pc}: original reg list %.4x, modified "
6100 "list %.4x\n"), rn, writeback ? "!" : "",
6101 (int) dsc->u.block.regmask, new_regmask);
6103 dsc->modinsn[0] = insn1;
6104 dsc->modinsn[1] = (new_regmask & 0xffff);
6107 dsc->cleanup = &cleanup_block_load_pc;
6112 dsc->modinsn[0] = insn1;
6113 dsc->modinsn[1] = insn2;
6115 dsc->cleanup = &cleanup_block_store_pc;
6120 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6121 This is used to avoid a dependency on BFD's bfd_endian enum. */
6124 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6127 return read_memory_unsigned_integer (memaddr, len,
6128 (enum bfd_endian) byte_order);
6131 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6134 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6137 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6140 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6143 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6148 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6151 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6153 return arm_is_thumb (self->regcache);
6156 /* single_step() is called just before we want to resume the inferior,
6157 if we want to single-step it but there is no hardware or kernel
6158 single-step support. We find the target of the coming instructions
6159 and breakpoint them. */
6161 std::vector<CORE_ADDR>
6162 arm_software_single_step (struct regcache *regcache)
6164 struct gdbarch *gdbarch = regcache->arch ();
6165 struct arm_get_next_pcs next_pcs_ctx;
6167 arm_get_next_pcs_ctor (&next_pcs_ctx,
6168 &arm_get_next_pcs_ops,
6169 gdbarch_byte_order (gdbarch),
6170 gdbarch_byte_order_for_code (gdbarch),
6174 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6176 for (CORE_ADDR &pc_ref : next_pcs)
6177 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6182 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6183 for Linux, where some SVC instructions must be treated specially. */
6186 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6187 arm_displaced_step_closure *dsc)
6189 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6191 if (debug_displaced)
6192 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6193 "%.8lx\n", (unsigned long) resume_addr);
6195 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6199 /* Common copy routine for svc instruction. */
6202 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6203 arm_displaced_step_closure *dsc)
6205 /* Preparation: none.
6206 Insn: unmodified svc.
6207 Cleanup: pc <- insn_addr + insn_size. */
6209 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6211 dsc->wrote_to_pc = 1;
6213 /* Allow OS-specific code to override SVC handling. */
6214 if (dsc->u.svc.copy_svc_os)
6215 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6218 dsc->cleanup = &cleanup_svc;
6224 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6225 struct regcache *regs, arm_displaced_step_closure *dsc)
6228 if (debug_displaced)
6229 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6230 (unsigned long) insn);
6232 dsc->modinsn[0] = insn;
6234 return install_svc (gdbarch, regs, dsc);
6238 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6239 struct regcache *regs, arm_displaced_step_closure *dsc)
6242 if (debug_displaced)
6243 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6246 dsc->modinsn[0] = insn;
6248 return install_svc (gdbarch, regs, dsc);
6251 /* Copy undefined instructions. */
6254 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6255 arm_displaced_step_closure *dsc)
6257 if (debug_displaced)
6258 fprintf_unfiltered (gdb_stdlog,
6259 "displaced: copying undefined insn %.8lx\n",
6260 (unsigned long) insn);
6262 dsc->modinsn[0] = insn;
6268 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6269 arm_displaced_step_closure *dsc)
6272 if (debug_displaced)
6273 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6274 "%.4x %.4x\n", (unsigned short) insn1,
6275 (unsigned short) insn2);
6277 dsc->modinsn[0] = insn1;
6278 dsc->modinsn[1] = insn2;
6284 /* Copy unpredictable instructions. */
6287 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6288 arm_displaced_step_closure *dsc)
6290 if (debug_displaced)
6291 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6292 "%.8lx\n", (unsigned long) insn);
6294 dsc->modinsn[0] = insn;
6299 /* The decode_* functions are instruction decoding helpers. They mostly follow
6300 the presentation in the ARM ARM. */
6303 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6304 struct regcache *regs,
6305 arm_displaced_step_closure *dsc)
6307 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6308 unsigned int rn = bits (insn, 16, 19);
6310 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6311 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6312 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6313 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6314 else if ((op1 & 0x60) == 0x20)
6315 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6316 else if ((op1 & 0x71) == 0x40)
6317 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6319 else if ((op1 & 0x77) == 0x41)
6320 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6321 else if ((op1 & 0x77) == 0x45)
6322 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6323 else if ((op1 & 0x77) == 0x51)
6326 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6328 return arm_copy_unpred (gdbarch, insn, dsc);
6330 else if ((op1 & 0x77) == 0x55)
6331 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6332 else if (op1 == 0x57)
6335 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6336 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6337 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6338 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6339 default: return arm_copy_unpred (gdbarch, insn, dsc);
6341 else if ((op1 & 0x63) == 0x43)
6342 return arm_copy_unpred (gdbarch, insn, dsc);
6343 else if ((op2 & 0x1) == 0x0)
6344 switch (op1 & ~0x80)
6347 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6349 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6350 case 0x71: case 0x75:
6352 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6353 case 0x63: case 0x67: case 0x73: case 0x77:
6354 return arm_copy_unpred (gdbarch, insn, dsc);
6356 return arm_copy_undef (gdbarch, insn, dsc);
6359 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6363 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6364 struct regcache *regs,
6365 arm_displaced_step_closure *dsc)
6367 if (bit (insn, 27) == 0)
6368 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6369 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6370 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6373 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6376 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6378 case 0x4: case 0x5: case 0x6: case 0x7:
6379 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6382 switch ((insn & 0xe00000) >> 21)
6384 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6386 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6389 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6392 return arm_copy_undef (gdbarch, insn, dsc);
6397 int rn_f = (bits (insn, 16, 19) == 0xf);
6398 switch ((insn & 0xe00000) >> 21)
6401 /* ldc/ldc2 imm (undefined for rn == pc). */
6402 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6403 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6406 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6408 case 0x4: case 0x5: case 0x6: case 0x7:
6409 /* ldc/ldc2 lit (undefined for rn != pc). */
6410 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6411 : arm_copy_undef (gdbarch, insn, dsc);
6414 return arm_copy_undef (gdbarch, insn, dsc);
6419 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6422 if (bits (insn, 16, 19) == 0xf)
6424 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6426 return arm_copy_undef (gdbarch, insn, dsc);
6430 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6432 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6436 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6438 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6441 return arm_copy_undef (gdbarch, insn, dsc);
6445 /* Decode miscellaneous instructions in dp/misc encoding space. */
6448 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6449 struct regcache *regs,
6450 arm_displaced_step_closure *dsc)
6452 unsigned int op2 = bits (insn, 4, 6);
6453 unsigned int op = bits (insn, 21, 22);
6458 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6461 if (op == 0x1) /* bx. */
6462 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6464 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6466 return arm_copy_undef (gdbarch, insn, dsc);
6470 /* Not really supported. */
6471 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6473 return arm_copy_undef (gdbarch, insn, dsc);
6477 return arm_copy_bx_blx_reg (gdbarch, insn,
6478 regs, dsc); /* blx register. */
6480 return arm_copy_undef (gdbarch, insn, dsc);
6483 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6487 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6489 /* Not really supported. */
6490 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6494 return arm_copy_undef (gdbarch, insn, dsc);
6499 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6500 struct regcache *regs,
6501 arm_displaced_step_closure *dsc)
6504 switch (bits (insn, 20, 24))
6507 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6510 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6512 case 0x12: case 0x16:
6513 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6516 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6520 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6522 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6523 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6524 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6525 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6526 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6527 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6528 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6529 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6530 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6531 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6532 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6533 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6534 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6535 /* 2nd arg means "unprivileged". */
6536 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6540 /* Should be unreachable. */
6545 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6546 struct regcache *regs,
6547 arm_displaced_step_closure *dsc)
6549 int a = bit (insn, 25), b = bit (insn, 4);
6550 uint32_t op1 = bits (insn, 20, 24);
6552 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6553 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6554 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6555 else if ((!a && (op1 & 0x17) == 0x02)
6556 || (a && (op1 & 0x17) == 0x02 && !b))
6557 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6558 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6559 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6560 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6561 else if ((!a && (op1 & 0x17) == 0x03)
6562 || (a && (op1 & 0x17) == 0x03 && !b))
6563 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6564 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6565 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6566 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6567 else if ((!a && (op1 & 0x17) == 0x06)
6568 || (a && (op1 & 0x17) == 0x06 && !b))
6569 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6570 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6571 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6573 else if ((!a && (op1 & 0x17) == 0x07)
6574 || (a && (op1 & 0x17) == 0x07 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6577 /* Should be unreachable. */
6582 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6583 arm_displaced_step_closure *dsc)
6585 switch (bits (insn, 20, 24))
6587 case 0x00: case 0x01: case 0x02: case 0x03:
6588 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6590 case 0x04: case 0x05: case 0x06: case 0x07:
6591 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6593 case 0x08: case 0x09: case 0x0a: case 0x0b:
6594 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6595 return arm_copy_unmodified (gdbarch, insn,
6596 "decode/pack/unpack/saturate/reverse", dsc);
6599 if (bits (insn, 5, 7) == 0) /* op2. */
6601 if (bits (insn, 12, 15) == 0xf)
6602 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6604 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6607 return arm_copy_undef (gdbarch, insn, dsc);
6609 case 0x1a: case 0x1b:
6610 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6611 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6613 return arm_copy_undef (gdbarch, insn, dsc);
6615 case 0x1c: case 0x1d:
6616 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6618 if (bits (insn, 0, 3) == 0xf)
6619 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6621 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6624 return arm_copy_undef (gdbarch, insn, dsc);
6626 case 0x1e: case 0x1f:
6627 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6628 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6630 return arm_copy_undef (gdbarch, insn, dsc);
6633 /* Should be unreachable. */
6638 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6639 struct regcache *regs,
6640 arm_displaced_step_closure *dsc)
6643 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6645 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6649 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6650 struct regcache *regs,
6651 arm_displaced_step_closure *dsc)
6653 unsigned int opcode = bits (insn, 20, 24);
6657 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6658 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6660 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6661 case 0x12: case 0x16:
6662 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6664 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6665 case 0x13: case 0x17:
6666 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6668 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6669 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6670 /* Note: no writeback for these instructions. Bit 25 will always be
6671 zero though (via caller), so the following works OK. */
6672 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6675 /* Should be unreachable. */
6679 /* Decode shifted register instructions. */
6682 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6683 uint16_t insn2, struct regcache *regs,
6684 arm_displaced_step_closure *dsc)
6686 /* PC is only allowed to be used in instruction MOV. */
6688 unsigned int op = bits (insn1, 5, 8);
6689 unsigned int rn = bits (insn1, 0, 3);
6691 if (op == 0x2 && rn == 0xf) /* MOV */
6692 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6694 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6695 "dp (shift reg)", dsc);
6699 /* Decode extension register load/store. Exactly the same as
6700 arm_decode_ext_reg_ld_st. */
6703 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6704 uint16_t insn2, struct regcache *regs,
6705 arm_displaced_step_closure *dsc)
6707 unsigned int opcode = bits (insn1, 4, 8);
6711 case 0x04: case 0x05:
6712 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6713 "vfp/neon vmov", dsc);
6715 case 0x08: case 0x0c: /* 01x00 */
6716 case 0x0a: case 0x0e: /* 01x10 */
6717 case 0x12: case 0x16: /* 10x10 */
6718 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6719 "vfp/neon vstm/vpush", dsc);
6721 case 0x09: case 0x0d: /* 01x01 */
6722 case 0x0b: case 0x0f: /* 01x11 */
6723 case 0x13: case 0x17: /* 10x11 */
6724 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6725 "vfp/neon vldm/vpop", dsc);
6727 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6728 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6730 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6731 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6734 /* Should be unreachable. */
6739 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6740 struct regcache *regs, arm_displaced_step_closure *dsc)
6742 unsigned int op1 = bits (insn, 20, 25);
6743 int op = bit (insn, 4);
6744 unsigned int coproc = bits (insn, 8, 11);
6746 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6747 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6748 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6749 && (coproc & 0xe) != 0xa)
6751 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6752 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6753 && (coproc & 0xe) != 0xa)
6754 /* ldc/ldc2 imm/lit. */
6755 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6756 else if ((op1 & 0x3e) == 0x00)
6757 return arm_copy_undef (gdbarch, insn, dsc);
6758 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6759 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6760 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6761 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6762 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6763 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6764 else if ((op1 & 0x30) == 0x20 && !op)
6766 if ((coproc & 0xe) == 0xa)
6767 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6769 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6771 else if ((op1 & 0x30) == 0x20 && op)
6772 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6773 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6774 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6775 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6776 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6777 else if ((op1 & 0x30) == 0x30)
6778 return arm_copy_svc (gdbarch, insn, regs, dsc);
6780 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6784 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6785 uint16_t insn2, struct regcache *regs,
6786 arm_displaced_step_closure *dsc)
6788 unsigned int coproc = bits (insn2, 8, 11);
6789 unsigned int bit_5_8 = bits (insn1, 5, 8);
6790 unsigned int bit_9 = bit (insn1, 9);
6791 unsigned int bit_4 = bit (insn1, 4);
6796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6797 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6799 else if (bit_5_8 == 0) /* UNDEFINED. */
6800 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6803 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6804 if ((coproc & 0xe) == 0xa)
6805 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6807 else /* coproc is not 101x. */
6809 if (bit_4 == 0) /* STC/STC2. */
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 else /* LDC/LDC2 {literal, immediate}. */
6813 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6819 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6825 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6826 arm_displaced_step_closure *dsc, int rd)
6832 Preparation: Rd <- PC
6838 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6839 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6843 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6844 arm_displaced_step_closure *dsc,
6845 int rd, unsigned int imm)
6848 /* Encoding T2: ADDS Rd, #imm */
6849 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6851 install_pc_relative (gdbarch, regs, dsc, rd);
6857 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6858 struct regcache *regs,
6859 arm_displaced_step_closure *dsc)
6861 unsigned int rd = bits (insn, 8, 10);
6862 unsigned int imm8 = bits (insn, 0, 7);
6864 if (debug_displaced)
6865 fprintf_unfiltered (gdb_stdlog,
6866 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6869 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6873 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6874 uint16_t insn2, struct regcache *regs,
6875 arm_displaced_step_closure *dsc)
6877 unsigned int rd = bits (insn2, 8, 11);
6878 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6879 extract raw immediate encoding rather than computing immediate. When
6880 generating ADD or SUB instruction, we can simply perform OR operation to
6881 set immediate into ADD. */
6882 unsigned int imm_3_8 = insn2 & 0x70ff;
6883 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6885 if (debug_displaced)
6886 fprintf_unfiltered (gdb_stdlog,
6887 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6888 rd, imm_i, imm_3_8, insn1, insn2);
6890 if (bit (insn1, 7)) /* Encoding T2 */
6892 /* Encoding T3: SUB Rd, Rd, #imm */
6893 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6894 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6896 else /* Encoding T3 */
6898 /* Encoding T3: ADD Rd, Rd, #imm */
6899 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6900 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6904 install_pc_relative (gdbarch, regs, dsc, rd);
6910 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
6911 struct regcache *regs,
6912 arm_displaced_step_closure *dsc)
6914 unsigned int rt = bits (insn1, 8, 10);
6916 int imm8 = (bits (insn1, 0, 7) << 2);
6922 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6924 Insn: LDR R0, [R2, R3];
6925 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6927 if (debug_displaced)
6928 fprintf_unfiltered (gdb_stdlog,
6929 "displaced: copying thumb ldr r%d [pc #%d]\n"
6932 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6933 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6934 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6935 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6936 /* The assembler calculates the required value of the offset from the
6937 Align(PC,4) value of this instruction to the label. */
6938 pc = pc & 0xfffffffc;
6940 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6941 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6944 dsc->u.ldst.xfersize = 4;
6946 dsc->u.ldst.immed = 0;
6947 dsc->u.ldst.writeback = 0;
6948 dsc->u.ldst.restore_r4 = 0;
6950 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6952 dsc->cleanup = &cleanup_load;
6957 /* Copy Thumb cbnz/cbz instruction. */
6960 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6961 struct regcache *regs,
6962 arm_displaced_step_closure *dsc)
6964 int non_zero = bit (insn1, 11);
6965 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6966 CORE_ADDR from = dsc->insn_addr;
6967 int rn = bits (insn1, 0, 2);
6968 int rn_val = displaced_read_reg (regs, dsc, rn);
6970 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6971 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6972 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6973 condition is false, let it be, cleanup_branch will do nothing. */
6974 if (dsc->u.branch.cond)
6976 dsc->u.branch.cond = INST_AL;
6977 dsc->u.branch.dest = from + 4 + imm5;
6980 dsc->u.branch.dest = from + 2;
6982 dsc->u.branch.link = 0;
6983 dsc->u.branch.exchange = 0;
6985 if (debug_displaced)
6986 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
6987 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
6988 rn, rn_val, insn1, dsc->u.branch.dest);
6990 dsc->modinsn[0] = THUMB_NOP;
6992 dsc->cleanup = &cleanup_branch;
6996 /* Copy Table Branch Byte/Halfword */
6998 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
6999 uint16_t insn2, struct regcache *regs,
7000 arm_displaced_step_closure *dsc)
7002 ULONGEST rn_val, rm_val;
7003 int is_tbh = bit (insn2, 4);
7004 CORE_ADDR halfwords = 0;
7005 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7007 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7008 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7014 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7015 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7021 target_read_memory (rn_val + rm_val, buf, 1);
7022 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7025 if (debug_displaced)
7026 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7027 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7028 (unsigned int) rn_val, (unsigned int) rm_val,
7029 (unsigned int) halfwords);
7031 dsc->u.branch.cond = INST_AL;
7032 dsc->u.branch.link = 0;
7033 dsc->u.branch.exchange = 0;
7034 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7036 dsc->cleanup = &cleanup_branch;
7042 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7043 arm_displaced_step_closure *dsc)
7046 int val = displaced_read_reg (regs, dsc, 7);
7047 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7050 val = displaced_read_reg (regs, dsc, 8);
7051 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7054 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7059 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7060 struct regcache *regs,
7061 arm_displaced_step_closure *dsc)
7063 dsc->u.block.regmask = insn1 & 0x00ff;
7065 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7068 (1) register list is full, that is, r0-r7 are used.
7069 Prepare: tmp[0] <- r8
7071 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7072 MOV r8, r7; Move value of r7 to r8;
7073 POP {r7}; Store PC value into r7.
7075 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7077 (2) register list is not full, supposing there are N registers in
7078 register list (except PC, 0 <= N <= 7).
7079 Prepare: for each i, 0 - N, tmp[i] <- ri.
7081 POP {r0, r1, ...., rN};
7083 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7084 from tmp[] properly.
7086 if (debug_displaced)
7087 fprintf_unfiltered (gdb_stdlog,
7088 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7089 dsc->u.block.regmask, insn1);
7091 if (dsc->u.block.regmask == 0xff)
7093 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7095 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7096 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7097 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7100 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7104 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7106 unsigned int new_regmask;
7108 for (i = 0; i < num_in_list + 1; i++)
7109 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7111 new_regmask = (1 << (num_in_list + 1)) - 1;
7113 if (debug_displaced)
7114 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7115 "{..., pc}: original reg list %.4x,"
7116 " modified list %.4x\n"),
7117 (int) dsc->u.block.regmask, new_regmask);
7119 dsc->u.block.regmask |= 0x8000;
7120 dsc->u.block.writeback = 0;
7121 dsc->u.block.cond = INST_AL;
7123 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7125 dsc->cleanup = &cleanup_block_load_pc;
7132 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7133 struct regcache *regs,
7134 arm_displaced_step_closure *dsc)
7136 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7137 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7140 /* 16-bit thumb instructions. */
7141 switch (op_bit_12_15)
7143 /* Shift (imme), add, subtract, move and compare. */
7144 case 0: case 1: case 2: case 3:
7145 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7146 "shift/add/sub/mov/cmp",
7150 switch (op_bit_10_11)
7152 case 0: /* Data-processing */
7153 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7157 case 1: /* Special data instructions and branch and exchange. */
7159 unsigned short op = bits (insn1, 7, 9);
7160 if (op == 6 || op == 7) /* BX or BLX */
7161 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7162 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7163 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7165 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7169 default: /* LDR (literal) */
7170 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7173 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7174 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7177 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7178 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7179 else /* Generate SP-relative address */
7180 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7182 case 11: /* Misc 16-bit instructions */
7184 switch (bits (insn1, 8, 11))
7186 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7187 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7189 case 12: case 13: /* POP */
7190 if (bit (insn1, 8)) /* PC is in register list. */
7191 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7193 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7195 case 15: /* If-Then, and hints */
7196 if (bits (insn1, 0, 3))
7197 /* If-Then makes up to four following instructions conditional.
7198 IT instruction itself is not conditional, so handle it as a
7199 common unmodified instruction. */
7200 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7203 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7206 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7211 if (op_bit_10_11 < 2) /* Store multiple registers */
7212 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7213 else /* Load multiple registers */
7214 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7216 case 13: /* Conditional branch and supervisor call */
7217 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7218 err = thumb_copy_b (gdbarch, insn1, dsc);
7220 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7222 case 14: /* Unconditional branch */
7223 err = thumb_copy_b (gdbarch, insn1, dsc);
7230 internal_error (__FILE__, __LINE__,
7231 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7235 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7236 uint16_t insn1, uint16_t insn2,
7237 struct regcache *regs,
7238 arm_displaced_step_closure *dsc)
7240 int rt = bits (insn2, 12, 15);
7241 int rn = bits (insn1, 0, 3);
7242 int op1 = bits (insn1, 7, 8);
7244 switch (bits (insn1, 5, 6))
7246 case 0: /* Load byte and memory hints */
7247 if (rt == 0xf) /* PLD/PLI */
7250 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7251 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7253 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7258 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7259 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7262 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7263 "ldrb{reg, immediate}/ldrbt",
7268 case 1: /* Load halfword and memory hints. */
7269 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7270 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7271 "pld/unalloc memhint", dsc);
7275 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7278 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7282 case 2: /* Load word */
7284 int insn2_bit_8_11 = bits (insn2, 8, 11);
7287 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7288 else if (op1 == 0x1) /* Encoding T3 */
7289 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7291 else /* op1 == 0x0 */
7293 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7294 /* LDR (immediate) */
7295 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7296 dsc, bit (insn2, 8), 1);
7297 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7298 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7301 /* LDR (register) */
7302 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7308 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7315 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7316 uint16_t insn2, struct regcache *regs,
7317 arm_displaced_step_closure *dsc)
7320 unsigned short op = bit (insn2, 15);
7321 unsigned int op1 = bits (insn1, 11, 12);
7327 switch (bits (insn1, 9, 10))
7332 /* Load/store {dual, exclusive}, table branch. */
7333 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7334 && bits (insn2, 5, 7) == 0)
7335 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7338 /* PC is not allowed to use in load/store {dual, exclusive}
7340 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7341 "load/store dual/ex", dsc);
7343 else /* load/store multiple */
7345 switch (bits (insn1, 7, 8))
7347 case 0: case 3: /* SRS, RFE */
7348 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7351 case 1: case 2: /* LDM/STM/PUSH/POP */
7352 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7359 /* Data-processing (shift register). */
7360 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7363 default: /* Coprocessor instructions. */
7364 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7369 case 2: /* op1 = 2 */
7370 if (op) /* Branch and misc control. */
7372 if (bit (insn2, 14) /* BLX/BL */
7373 || bit (insn2, 12) /* Unconditional branch */
7374 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7375 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7377 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7382 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7384 int dp_op = bits (insn1, 4, 8);
7385 int rn = bits (insn1, 0, 3);
7386 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7387 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7390 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7393 else /* Data processing (modified immediate) */
7394 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7398 case 3: /* op1 = 3 */
7399 switch (bits (insn1, 9, 10))
7403 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7405 else /* NEON Load/Store and Store single data item */
7406 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7407 "neon elt/struct load/store",
7410 case 1: /* op1 = 3, bits (9, 10) == 1 */
7411 switch (bits (insn1, 7, 8))
7413 case 0: case 1: /* Data processing (register) */
7414 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7417 case 2: /* Multiply and absolute difference */
7418 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7419 "mul/mua/diff", dsc);
7421 case 3: /* Long multiply and divide */
7422 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7427 default: /* Coprocessor instructions */
7428 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7437 internal_error (__FILE__, __LINE__,
7438 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7443 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7444 struct regcache *regs,
7445 arm_displaced_step_closure *dsc)
7447 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7449 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7451 if (debug_displaced)
7452 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7453 "at %.8lx\n", insn1, (unsigned long) from);
7456 dsc->insn_size = thumb_insn_size (insn1);
7457 if (thumb_insn_size (insn1) == 4)
7460 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7461 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7464 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7468 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7469 CORE_ADDR to, struct regcache *regs,
7470 arm_displaced_step_closure *dsc)
7473 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7476 /* Most displaced instructions use a 1-instruction scratch space, so set this
7477 here and override below if/when necessary. */
7479 dsc->insn_addr = from;
7480 dsc->scratch_base = to;
7481 dsc->cleanup = NULL;
7482 dsc->wrote_to_pc = 0;
7484 if (!displaced_in_arm_mode (regs))
7485 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7489 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7490 if (debug_displaced)
7491 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7492 "at %.8lx\n", (unsigned long) insn,
7493 (unsigned long) from);
7495 if ((insn & 0xf0000000) == 0xf0000000)
7496 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7497 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7499 case 0x0: case 0x1: case 0x2: case 0x3:
7500 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7503 case 0x4: case 0x5: case 0x6:
7504 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7508 err = arm_decode_media (gdbarch, insn, dsc);
7511 case 0x8: case 0x9: case 0xa: case 0xb:
7512 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7515 case 0xc: case 0xd: case 0xe: case 0xf:
7516 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7521 internal_error (__FILE__, __LINE__,
7522 _("arm_process_displaced_insn: Instruction decode error"));
7525 /* Actually set up the scratch space for a displaced instruction. */
7528 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7529 CORE_ADDR to, arm_displaced_step_closure *dsc)
7531 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7532 unsigned int i, len, offset;
7533 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7534 int size = dsc->is_thumb? 2 : 4;
7535 const gdb_byte *bkp_insn;
7538 /* Poke modified instruction(s). */
7539 for (i = 0; i < dsc->numinsns; i++)
7541 if (debug_displaced)
7543 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7545 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7548 fprintf_unfiltered (gdb_stdlog, "%.4x",
7549 (unsigned short)dsc->modinsn[i]);
7551 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7552 (unsigned long) to + offset);
7555 write_memory_unsigned_integer (to + offset, size,
7556 byte_order_for_code,
7561 /* Choose the correct breakpoint instruction. */
7564 bkp_insn = tdep->thumb_breakpoint;
7565 len = tdep->thumb_breakpoint_size;
7569 bkp_insn = tdep->arm_breakpoint;
7570 len = tdep->arm_breakpoint_size;
7573 /* Put breakpoint afterwards. */
7574 write_memory (to + offset, bkp_insn, len);
7576 if (debug_displaced)
7577 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7578 paddress (gdbarch, from), paddress (gdbarch, to));
7581 /* Entry point for cleaning things up after a displaced instruction has been
7585 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7586 struct displaced_step_closure *dsc_,
7587 CORE_ADDR from, CORE_ADDR to,
7588 struct regcache *regs)
7590 arm_displaced_step_closure *dsc = (arm_displaced_step_closure *) dsc_;
7593 dsc->cleanup (gdbarch, regs, dsc);
7595 if (!dsc->wrote_to_pc)
7596 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7597 dsc->insn_addr + dsc->insn_size);
7601 #include "bfd-in2.h"
7602 #include "libcoff.h"
7605 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7607 gdb_disassembler *di
7608 = static_cast<gdb_disassembler *>(info->application_data);
7609 struct gdbarch *gdbarch = di->arch ();
7611 if (arm_pc_is_thumb (gdbarch, memaddr))
7613 static asymbol *asym;
7614 static combined_entry_type ce;
7615 static struct coff_symbol_struct csym;
7616 static struct bfd fake_bfd;
7617 static bfd_target fake_target;
7619 if (csym.native == NULL)
7621 /* Create a fake symbol vector containing a Thumb symbol.
7622 This is solely so that the code in print_insn_little_arm()
7623 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7624 the presence of a Thumb symbol and switch to decoding
7625 Thumb instructions. */
7627 fake_target.flavour = bfd_target_coff_flavour;
7628 fake_bfd.xvec = &fake_target;
7629 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7631 csym.symbol.the_bfd = &fake_bfd;
7632 csym.symbol.name = "fake";
7633 asym = (asymbol *) & csym;
7636 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7637 info->symbols = &asym;
7640 info->symbols = NULL;
7642 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7643 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7644 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7645 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7646 in default_print_insn. */
7647 if (exec_bfd != NULL)
7648 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7650 return default_print_insn (memaddr, info);
7653 /* The following define instruction sequences that will cause ARM
7654 cpu's to take an undefined instruction trap. These are used to
7655 signal a breakpoint to GDB.
7657 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7658 modes. A different instruction is required for each mode. The ARM
7659 cpu's can also be big or little endian. Thus four different
7660 instructions are needed to support all cases.
7662 Note: ARMv4 defines several new instructions that will take the
7663 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7664 not in fact add the new instructions. The new undefined
7665 instructions in ARMv4 are all instructions that had no defined
7666 behaviour in earlier chips. There is no guarantee that they will
7667 raise an exception, but may be treated as NOP's. In practice, it
7668 may only safe to rely on instructions matching:
7670 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7671 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7672 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7674 Even this may only true if the condition predicate is true. The
7675 following use a condition predicate of ALWAYS so it is always TRUE.
7677 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7678 and NetBSD all use a software interrupt rather than an undefined
7679 instruction to force a trap. This can be handled by by the
7680 abi-specific code during establishment of the gdbarch vector. */
7682 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7683 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7684 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7685 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7687 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7688 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7689 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7690 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7692 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7695 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7697 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7698 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7700 if (arm_pc_is_thumb (gdbarch, *pcptr))
7702 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7704 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7705 check whether we are replacing a 32-bit instruction. */
7706 if (tdep->thumb2_breakpoint != NULL)
7710 if (target_read_memory (*pcptr, buf, 2) == 0)
7712 unsigned short inst1;
7714 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7715 if (thumb_insn_size (inst1) == 4)
7716 return ARM_BP_KIND_THUMB2;
7720 return ARM_BP_KIND_THUMB;
7723 return ARM_BP_KIND_ARM;
7727 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7729 static const gdb_byte *
7730 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7732 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7736 case ARM_BP_KIND_ARM:
7737 *size = tdep->arm_breakpoint_size;
7738 return tdep->arm_breakpoint;
7739 case ARM_BP_KIND_THUMB:
7740 *size = tdep->thumb_breakpoint_size;
7741 return tdep->thumb_breakpoint;
7742 case ARM_BP_KIND_THUMB2:
7743 *size = tdep->thumb2_breakpoint_size;
7744 return tdep->thumb2_breakpoint;
7746 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7750 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7753 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7754 struct regcache *regcache,
7759 /* Check the memory pointed by PC is readable. */
7760 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7762 struct arm_get_next_pcs next_pcs_ctx;
7764 arm_get_next_pcs_ctor (&next_pcs_ctx,
7765 &arm_get_next_pcs_ops,
7766 gdbarch_byte_order (gdbarch),
7767 gdbarch_byte_order_for_code (gdbarch),
7771 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7773 /* If MEMADDR is the next instruction of current pc, do the
7774 software single step computation, and get the thumb mode by
7775 the destination address. */
7776 for (CORE_ADDR pc : next_pcs)
7778 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7780 if (IS_THUMB_ADDR (pc))
7782 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7783 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7786 return ARM_BP_KIND_ARM;
7791 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7794 /* Extract from an array REGBUF containing the (raw) register state a
7795 function return value of type TYPE, and copy that, in virtual
7796 format, into VALBUF. */
7799 arm_extract_return_value (struct type *type, struct regcache *regs,
7802 struct gdbarch *gdbarch = regs->arch ();
7803 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7805 if (TYPE_CODE_FLT == TYPE_CODE (type))
7807 switch (gdbarch_tdep (gdbarch)->fp_model)
7811 /* The value is in register F0 in internal format. We need to
7812 extract the raw value and then convert it to the desired
7814 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7816 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7817 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7822 case ARM_FLOAT_SOFT_FPA:
7823 case ARM_FLOAT_SOFT_VFP:
7824 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7825 not using the VFP ABI code. */
7827 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7828 if (TYPE_LENGTH (type) > 4)
7829 regs->cooked_read (ARM_A1_REGNUM + 1,
7830 valbuf + ARM_INT_REGISTER_SIZE);
7834 internal_error (__FILE__, __LINE__,
7835 _("arm_extract_return_value: "
7836 "Floating point model not supported"));
7840 else if (TYPE_CODE (type) == TYPE_CODE_INT
7841 || TYPE_CODE (type) == TYPE_CODE_CHAR
7842 || TYPE_CODE (type) == TYPE_CODE_BOOL
7843 || TYPE_CODE (type) == TYPE_CODE_PTR
7844 || TYPE_IS_REFERENCE (type)
7845 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7847 /* If the type is a plain integer, then the access is
7848 straight-forward. Otherwise we have to play around a bit
7850 int len = TYPE_LENGTH (type);
7851 int regno = ARM_A1_REGNUM;
7856 /* By using store_unsigned_integer we avoid having to do
7857 anything special for small big-endian values. */
7858 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7859 store_unsigned_integer (valbuf,
7860 (len > ARM_INT_REGISTER_SIZE
7861 ? ARM_INT_REGISTER_SIZE : len),
7863 len -= ARM_INT_REGISTER_SIZE;
7864 valbuf += ARM_INT_REGISTER_SIZE;
7869 /* For a structure or union the behaviour is as if the value had
7870 been stored to word-aligned memory and then loaded into
7871 registers with 32-bit load instruction(s). */
7872 int len = TYPE_LENGTH (type);
7873 int regno = ARM_A1_REGNUM;
7874 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7878 regs->cooked_read (regno++, tmpbuf);
7879 memcpy (valbuf, tmpbuf,
7880 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7881 len -= ARM_INT_REGISTER_SIZE;
7882 valbuf += ARM_INT_REGISTER_SIZE;
7888 /* Will a function return an aggregate type in memory or in a
7889 register? Return 0 if an aggregate type can be returned in a
7890 register, 1 if it must be returned in memory. */
7893 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7895 enum type_code code;
7897 type = check_typedef (type);
7899 /* Simple, non-aggregate types (ie not including vectors and
7900 complex) are always returned in a register (or registers). */
7901 code = TYPE_CODE (type);
7902 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7903 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7906 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7908 /* Vector values should be returned using ARM registers if they
7909 are not over 16 bytes. */
7910 return (TYPE_LENGTH (type) > 16);
7913 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7915 /* The AAPCS says all aggregates not larger than a word are returned
7917 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
7926 /* All aggregate types that won't fit in a register must be returned
7928 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
7931 /* In the ARM ABI, "integer" like aggregate types are returned in
7932 registers. For an aggregate type to be integer like, its size
7933 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7934 offset of each addressable subfield must be zero. Note that bit
7935 fields are not addressable, and all addressable subfields of
7936 unions always start at offset zero.
7938 This function is based on the behaviour of GCC 2.95.1.
7939 See: gcc/arm.c: arm_return_in_memory() for details.
7941 Note: All versions of GCC before GCC 2.95.2 do not set up the
7942 parameters correctly for a function returning the following
7943 structure: struct { float f;}; This should be returned in memory,
7944 not a register. Richard Earnshaw sent me a patch, but I do not
7945 know of any way to detect if a function like the above has been
7946 compiled with the correct calling convention. */
7948 /* Assume all other aggregate types can be returned in a register.
7949 Run a check for structures, unions and arrays. */
7952 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7955 /* Need to check if this struct/union is "integer" like. For
7956 this to be true, its size must be less than or equal to
7957 ARM_INT_REGISTER_SIZE and the offset of each addressable
7958 subfield must be zero. Note that bit fields are not
7959 addressable, and unions always start at offset zero. If any
7960 of the subfields is a floating point type, the struct/union
7961 cannot be an integer type. */
7963 /* For each field in the object, check:
7964 1) Is it FP? --> yes, nRc = 1;
7965 2) Is it addressable (bitpos != 0) and
7966 not packed (bitsize == 0)?
7970 for (i = 0; i < TYPE_NFIELDS (type); i++)
7972 enum type_code field_type_code;
7975 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7978 /* Is it a floating point type field? */
7979 if (field_type_code == TYPE_CODE_FLT)
7985 /* If bitpos != 0, then we have to care about it. */
7986 if (TYPE_FIELD_BITPOS (type, i) != 0)
7988 /* Bitfields are not addressable. If the field bitsize is
7989 zero, then the field is not packed. Hence it cannot be
7990 a bitfield or any other packed type. */
7991 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8004 /* Write into appropriate registers a function return value of type
8005 TYPE, given in virtual format. */
8008 arm_store_return_value (struct type *type, struct regcache *regs,
8009 const gdb_byte *valbuf)
8011 struct gdbarch *gdbarch = regs->arch ();
8012 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8014 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8016 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8018 switch (gdbarch_tdep (gdbarch)->fp_model)
8022 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8023 regs->cooked_write (ARM_F0_REGNUM, buf);
8026 case ARM_FLOAT_SOFT_FPA:
8027 case ARM_FLOAT_SOFT_VFP:
8028 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8029 not using the VFP ABI code. */
8031 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8032 if (TYPE_LENGTH (type) > 4)
8033 regs->cooked_write (ARM_A1_REGNUM + 1,
8034 valbuf + ARM_INT_REGISTER_SIZE);
8038 internal_error (__FILE__, __LINE__,
8039 _("arm_store_return_value: Floating "
8040 "point model not supported"));
8044 else if (TYPE_CODE (type) == TYPE_CODE_INT
8045 || TYPE_CODE (type) == TYPE_CODE_CHAR
8046 || TYPE_CODE (type) == TYPE_CODE_BOOL
8047 || TYPE_CODE (type) == TYPE_CODE_PTR
8048 || TYPE_IS_REFERENCE (type)
8049 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8051 if (TYPE_LENGTH (type) <= 4)
8053 /* Values of one word or less are zero/sign-extended and
8055 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8056 LONGEST val = unpack_long (type, valbuf);
8058 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8059 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8063 /* Integral values greater than one word are stored in consecutive
8064 registers starting with r0. This will always be a multiple of
8065 the regiser size. */
8066 int len = TYPE_LENGTH (type);
8067 int regno = ARM_A1_REGNUM;
8071 regs->cooked_write (regno++, valbuf);
8072 len -= ARM_INT_REGISTER_SIZE;
8073 valbuf += ARM_INT_REGISTER_SIZE;
8079 /* For a structure or union the behaviour is as if the value had
8080 been stored to word-aligned memory and then loaded into
8081 registers with 32-bit load instruction(s). */
8082 int len = TYPE_LENGTH (type);
8083 int regno = ARM_A1_REGNUM;
8084 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8088 memcpy (tmpbuf, valbuf,
8089 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8090 regs->cooked_write (regno++, tmpbuf);
8091 len -= ARM_INT_REGISTER_SIZE;
8092 valbuf += ARM_INT_REGISTER_SIZE;
8098 /* Handle function return values. */
8100 static enum return_value_convention
8101 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8102 struct type *valtype, struct regcache *regcache,
8103 gdb_byte *readbuf, const gdb_byte *writebuf)
8105 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8106 struct type *func_type = function ? value_type (function) : NULL;
8107 enum arm_vfp_cprc_base_type vfp_base_type;
8110 if (arm_vfp_abi_for_function (gdbarch, func_type)
8111 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8113 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8114 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8116 for (i = 0; i < vfp_base_count; i++)
8118 if (reg_char == 'q')
8121 arm_neon_quad_write (gdbarch, regcache, i,
8122 writebuf + i * unit_length);
8125 arm_neon_quad_read (gdbarch, regcache, i,
8126 readbuf + i * unit_length);
8133 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8134 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8137 regcache->cooked_write (regnum, writebuf + i * unit_length);
8139 regcache->cooked_read (regnum, readbuf + i * unit_length);
8142 return RETURN_VALUE_REGISTER_CONVENTION;
8145 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8146 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8147 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8149 if (tdep->struct_return == pcc_struct_return
8150 || arm_return_in_memory (gdbarch, valtype))
8151 return RETURN_VALUE_STRUCT_CONVENTION;
8153 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8155 if (arm_return_in_memory (gdbarch, valtype))
8156 return RETURN_VALUE_STRUCT_CONVENTION;
8160 arm_store_return_value (valtype, regcache, writebuf);
8163 arm_extract_return_value (valtype, regcache, readbuf);
8165 return RETURN_VALUE_REGISTER_CONVENTION;
8170 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8172 struct gdbarch *gdbarch = get_frame_arch (frame);
8173 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8174 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8176 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8178 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8180 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8181 ARM_INT_REGISTER_SIZE))
8184 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8187 /* A call to cmse secure entry function "foo" at "a" is modified by
8194 b) bl yyyy <__acle_se_foo>
8196 section .gnu.sgstubs:
8198 yyyy: sg // secure gateway
8199 b.w xxxx <__acle_se_foo> // original_branch_dest
8204 When the control at "b", the pc contains "yyyy" (sg address) which is a
8205 trampoline and does not exist in source code. This function returns the
8206 target pc "xxxx". For more details please refer to section 5.4
8207 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8208 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8209 document on www.developer.arm.com. */
8212 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8214 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8215 char *target_name = (char *) alloca (target_len);
8216 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8218 struct bound_minimal_symbol minsym
8219 = lookup_minimal_symbol (target_name, NULL, objfile);
8221 if (minsym.minsym != nullptr)
8222 return BMSYMBOL_VALUE_ADDRESS (minsym);
8227 /* Return true when SEC points to ".gnu.sgstubs" section. */
8230 arm_is_sgstubs_section (struct obj_section *sec)
8232 return (sec != nullptr
8233 && sec->the_bfd_section != nullptr
8234 && sec->the_bfd_section->name != nullptr
8235 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8238 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8239 return the target PC. Otherwise return 0. */
8242 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8246 CORE_ADDR start_addr;
8248 /* Find the starting address and name of the function containing the PC. */
8249 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8251 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8253 start_addr = arm_skip_bx_reg (frame, pc);
8254 if (start_addr != 0)
8260 /* If PC is in a Thumb call or return stub, return the address of the
8261 target PC, which is in a register. The thunk functions are called
8262 _call_via_xx, where x is the register name. The possible names
8263 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8264 functions, named __ARM_call_via_r[0-7]. */
8265 if (startswith (name, "_call_via_")
8266 || startswith (name, "__ARM_call_via_"))
8268 /* Use the name suffix to determine which register contains the
8270 static const char *table[15] =
8271 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8272 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8275 int offset = strlen (name) - 2;
8277 for (regno = 0; regno <= 14; regno++)
8278 if (strcmp (&name[offset], table[regno]) == 0)
8279 return get_frame_register_unsigned (frame, regno);
8282 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8283 non-interworking calls to foo. We could decode the stubs
8284 to find the target but it's easier to use the symbol table. */
8285 namelen = strlen (name);
8286 if (name[0] == '_' && name[1] == '_'
8287 && ((namelen > 2 + strlen ("_from_thumb")
8288 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8289 || (namelen > 2 + strlen ("_from_arm")
8290 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8293 int target_len = namelen - 2;
8294 struct bound_minimal_symbol minsym;
8295 struct objfile *objfile;
8296 struct obj_section *sec;
8298 if (name[namelen - 1] == 'b')
8299 target_len -= strlen ("_from_thumb");
8301 target_len -= strlen ("_from_arm");
8303 target_name = (char *) alloca (target_len + 1);
8304 memcpy (target_name, name + 2, target_len);
8305 target_name[target_len] = '\0';
8307 sec = find_pc_section (pc);
8308 objfile = (sec == NULL) ? NULL : sec->objfile;
8309 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8310 if (minsym.minsym != NULL)
8311 return BMSYMBOL_VALUE_ADDRESS (minsym);
8316 struct obj_section *section = find_pc_section (pc);
8318 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8319 if (arm_is_sgstubs_section (section))
8320 return arm_skip_cmse_entry (pc, name, section->objfile);
8322 return 0; /* not a stub */
8326 set_arm_command (const char *args, int from_tty)
8328 printf_unfiltered (_("\
8329 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8330 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8334 show_arm_command (const char *args, int from_tty)
8336 cmd_show_list (showarmcmdlist, from_tty, "");
8340 arm_update_current_architecture (void)
8342 struct gdbarch_info info;
8344 /* If the current architecture is not ARM, we have nothing to do. */
8345 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8348 /* Update the architecture. */
8349 gdbarch_info_init (&info);
8351 if (!gdbarch_update_p (info))
8352 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8356 set_fp_model_sfunc (const char *args, int from_tty,
8357 struct cmd_list_element *c)
8361 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8362 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8364 arm_fp_model = (enum arm_float_model) fp_model;
8368 if (fp_model == ARM_FLOAT_LAST)
8369 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8372 arm_update_current_architecture ();
8376 show_fp_model (struct ui_file *file, int from_tty,
8377 struct cmd_list_element *c, const char *value)
8379 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8381 if (arm_fp_model == ARM_FLOAT_AUTO
8382 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8383 fprintf_filtered (file, _("\
8384 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8385 fp_model_strings[tdep->fp_model]);
8387 fprintf_filtered (file, _("\
8388 The current ARM floating point model is \"%s\".\n"),
8389 fp_model_strings[arm_fp_model]);
8393 arm_set_abi (const char *args, int from_tty,
8394 struct cmd_list_element *c)
8398 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8399 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8401 arm_abi_global = (enum arm_abi_kind) arm_abi;
8405 if (arm_abi == ARM_ABI_LAST)
8406 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8409 arm_update_current_architecture ();
8413 arm_show_abi (struct ui_file *file, int from_tty,
8414 struct cmd_list_element *c, const char *value)
8416 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8418 if (arm_abi_global == ARM_ABI_AUTO
8419 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8420 fprintf_filtered (file, _("\
8421 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8422 arm_abi_strings[tdep->arm_abi]);
8424 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8429 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8430 struct cmd_list_element *c, const char *value)
8432 fprintf_filtered (file,
8433 _("The current execution mode assumed "
8434 "(when symbols are unavailable) is \"%s\".\n"),
8435 arm_fallback_mode_string);
8439 arm_show_force_mode (struct ui_file *file, int from_tty,
8440 struct cmd_list_element *c, const char *value)
8442 fprintf_filtered (file,
8443 _("The current execution mode assumed "
8444 "(even when symbols are available) is \"%s\".\n"),
8445 arm_force_mode_string);
8448 /* If the user changes the register disassembly style used for info
8449 register and other commands, we have to also switch the style used
8450 in opcodes for disassembly output. This function is run in the "set
8451 arm disassembly" command, and does that. */
8454 set_disassembly_style_sfunc (const char *args, int from_tty,
8455 struct cmd_list_element *c)
8457 /* Convert the short style name into the long style name (eg, reg-names-*)
8458 before calling the generic set_disassembler_options() function. */
8459 std::string long_name = std::string ("reg-names-") + disassembly_style;
8460 set_disassembler_options (&long_name[0]);
8464 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8465 struct cmd_list_element *c, const char *value)
8467 struct gdbarch *gdbarch = get_current_arch ();
8468 char *options = get_disassembler_options (gdbarch);
8469 const char *style = "";
8473 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8474 if (CONST_STRNEQ (opt, "reg-names-"))
8476 style = &opt[strlen ("reg-names-")];
8477 len = strcspn (style, ",");
8480 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8483 /* Return the ARM register name corresponding to register I. */
8485 arm_register_name (struct gdbarch *gdbarch, int i)
8487 const int num_regs = gdbarch_num_regs (gdbarch);
8489 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8490 && i >= num_regs && i < num_regs + 32)
8492 static const char *const vfp_pseudo_names[] = {
8493 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8494 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8495 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8496 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8499 return vfp_pseudo_names[i - num_regs];
8502 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8503 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8505 static const char *const neon_pseudo_names[] = {
8506 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8507 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8510 return neon_pseudo_names[i - num_regs - 32];
8513 if (i >= ARRAY_SIZE (arm_register_names))
8514 /* These registers are only supported on targets which supply
8515 an XML description. */
8518 return arm_register_names[i];
8521 /* Test whether the coff symbol specific value corresponds to a Thumb
8525 coff_sym_is_thumb (int val)
8527 return (val == C_THUMBEXT
8528 || val == C_THUMBSTAT
8529 || val == C_THUMBEXTFUNC
8530 || val == C_THUMBSTATFUNC
8531 || val == C_THUMBLABEL);
8534 /* arm_coff_make_msymbol_special()
8535 arm_elf_make_msymbol_special()
8537 These functions test whether the COFF or ELF symbol corresponds to
8538 an address in thumb code, and set a "special" bit in a minimal
8539 symbol to indicate that it does. */
8542 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8544 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8546 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8547 == ST_BRANCH_TO_THUMB)
8548 MSYMBOL_SET_SPECIAL (msym);
8552 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8554 if (coff_sym_is_thumb (val))
8555 MSYMBOL_SET_SPECIAL (msym);
8559 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8562 const char *name = bfd_asymbol_name (sym);
8563 struct arm_per_objfile *data;
8564 struct arm_mapping_symbol new_map_sym;
8566 gdb_assert (name[0] == '$');
8567 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8570 data = arm_objfile_data_key.get (objfile);
8572 data = arm_objfile_data_key.emplace (objfile,
8573 objfile->obfd->section_count);
8574 arm_mapping_symbol_vec &map
8575 = data->section_maps[bfd_asymbol_section (sym)->index];
8577 new_map_sym.value = sym->value;
8578 new_map_sym.type = name[1];
8580 /* Insert at the end, the vector will be sorted on first use. */
8581 map.push_back (new_map_sym);
8585 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8587 struct gdbarch *gdbarch = regcache->arch ();
8588 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8590 /* If necessary, set the T bit. */
8593 ULONGEST val, t_bit;
8594 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8595 t_bit = arm_psr_thumb_bit (gdbarch);
8596 if (arm_pc_is_thumb (gdbarch, pc))
8597 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8600 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8605 /* Read the contents of a NEON quad register, by reading from two
8606 double registers. This is used to implement the quad pseudo
8607 registers, and for argument passing in case the quad registers are
8608 missing; vectors are passed in quad registers when using the VFP
8609 ABI, even if a NEON unit is not present. REGNUM is the index of
8610 the quad register, in [0, 15]. */
8612 static enum register_status
8613 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8614 int regnum, gdb_byte *buf)
8617 gdb_byte reg_buf[8];
8618 int offset, double_regnum;
8619 enum register_status status;
8621 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8622 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8625 /* d0 is always the least significant half of q0. */
8626 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8631 status = regcache->raw_read (double_regnum, reg_buf);
8632 if (status != REG_VALID)
8634 memcpy (buf + offset, reg_buf, 8);
8636 offset = 8 - offset;
8637 status = regcache->raw_read (double_regnum + 1, reg_buf);
8638 if (status != REG_VALID)
8640 memcpy (buf + offset, reg_buf, 8);
8645 static enum register_status
8646 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8647 int regnum, gdb_byte *buf)
8649 const int num_regs = gdbarch_num_regs (gdbarch);
8651 gdb_byte reg_buf[8];
8652 int offset, double_regnum;
8654 gdb_assert (regnum >= num_regs);
8657 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8658 /* Quad-precision register. */
8659 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8662 enum register_status status;
8664 /* Single-precision register. */
8665 gdb_assert (regnum < 32);
8667 /* s0 is always the least significant half of d0. */
8668 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8669 offset = (regnum & 1) ? 0 : 4;
8671 offset = (regnum & 1) ? 4 : 0;
8673 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8674 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8677 status = regcache->raw_read (double_regnum, reg_buf);
8678 if (status == REG_VALID)
8679 memcpy (buf, reg_buf + offset, 4);
8684 /* Store the contents of BUF to a NEON quad register, by writing to
8685 two double registers. This is used to implement the quad pseudo
8686 registers, and for argument passing in case the quad registers are
8687 missing; vectors are passed in quad registers when using the VFP
8688 ABI, even if a NEON unit is not present. REGNUM is the index
8689 of the quad register, in [0, 15]. */
8692 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8693 int regnum, const gdb_byte *buf)
8696 int offset, double_regnum;
8698 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8699 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8702 /* d0 is always the least significant half of q0. */
8703 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8708 regcache->raw_write (double_regnum, buf + offset);
8709 offset = 8 - offset;
8710 regcache->raw_write (double_regnum + 1, buf + offset);
8714 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8715 int regnum, const gdb_byte *buf)
8717 const int num_regs = gdbarch_num_regs (gdbarch);
8719 gdb_byte reg_buf[8];
8720 int offset, double_regnum;
8722 gdb_assert (regnum >= num_regs);
8725 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8726 /* Quad-precision register. */
8727 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8730 /* Single-precision register. */
8731 gdb_assert (regnum < 32);
8733 /* s0 is always the least significant half of d0. */
8734 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8735 offset = (regnum & 1) ? 0 : 4;
8737 offset = (regnum & 1) ? 4 : 0;
8739 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8740 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8743 regcache->raw_read (double_regnum, reg_buf);
8744 memcpy (reg_buf + offset, buf, 4);
8745 regcache->raw_write (double_regnum, reg_buf);
8749 static struct value *
8750 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8752 const int *reg_p = (const int *) baton;
8753 return value_of_register (*reg_p, frame);
8756 static enum gdb_osabi
8757 arm_elf_osabi_sniffer (bfd *abfd)
8759 unsigned int elfosabi;
8760 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8762 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8764 if (elfosabi == ELFOSABI_ARM)
8765 /* GNU tools use this value. Check note sections in this case,
8767 bfd_map_over_sections (abfd,
8768 generic_elf_osabi_sniff_abi_tag_sections,
8771 /* Anything else will be handled by the generic ELF sniffer. */
8776 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8777 struct reggroup *group)
8779 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8780 this, FPS register belongs to save_regroup, restore_reggroup, and
8781 all_reggroup, of course. */
8782 if (regnum == ARM_FPS_REGNUM)
8783 return (group == float_reggroup
8784 || group == save_reggroup
8785 || group == restore_reggroup
8786 || group == all_reggroup);
8788 return default_register_reggroup_p (gdbarch, regnum, group);
8791 /* For backward-compatibility we allow two 'g' packet lengths with
8792 the remote protocol depending on whether FPA registers are
8793 supplied. M-profile targets do not have FPA registers, but some
8794 stubs already exist in the wild which use a 'g' packet which
8795 supplies them albeit with dummy values. The packet format which
8796 includes FPA registers should be considered deprecated for
8797 M-profile targets. */
8800 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8802 if (gdbarch_tdep (gdbarch)->is_m)
8804 const target_desc *tdesc;
8806 /* If we know from the executable this is an M-profile target,
8807 cater for remote targets whose register set layout is the
8808 same as the FPA layout. */
8809 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8810 register_remote_g_packet_guess (gdbarch,
8811 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8814 /* The regular M-profile layout. */
8815 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8816 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8819 /* M-profile plus M4F VFP. */
8820 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8821 register_remote_g_packet_guess (gdbarch,
8822 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8826 /* Otherwise we don't have a useful guess. */
8829 /* Implement the code_of_frame_writable gdbarch method. */
8832 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8834 if (gdbarch_tdep (gdbarch)->is_m
8835 && get_frame_type (frame) == SIGTRAMP_FRAME)
8837 /* M-profile exception frames return to some magic PCs, where
8838 isn't writable at all. */
8845 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8846 to be postfixed by a version (eg armv7hl). */
8849 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8851 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8852 return "arm(v[^- ]*)?";
8853 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8856 /* Initialize the current architecture based on INFO. If possible,
8857 re-use an architecture from ARCHES, which is a list of
8858 architectures already created during this debugging session.
8860 Called e.g. at program startup, when reading a core file, and when
8861 reading a binary file. */
8863 static struct gdbarch *
8864 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8866 struct gdbarch_tdep *tdep;
8867 struct gdbarch *gdbarch;
8868 struct gdbarch_list *best_arch;
8869 enum arm_abi_kind arm_abi = arm_abi_global;
8870 enum arm_float_model fp_model = arm_fp_model;
8871 struct tdesc_arch_data *tdesc_data = NULL;
8873 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8874 int have_wmmx_registers = 0;
8876 int have_fpa_registers = 1;
8877 const struct target_desc *tdesc = info.target_desc;
8879 /* If we have an object to base this architecture on, try to determine
8882 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8884 int ei_osabi, e_flags;
8886 switch (bfd_get_flavour (info.abfd))
8888 case bfd_target_coff_flavour:
8889 /* Assume it's an old APCS-style ABI. */
8891 arm_abi = ARM_ABI_APCS;
8894 case bfd_target_elf_flavour:
8895 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8896 e_flags = elf_elfheader (info.abfd)->e_flags;
8898 if (ei_osabi == ELFOSABI_ARM)
8900 /* GNU tools used to use this value, but do not for EABI
8901 objects. There's nowhere to tag an EABI version
8902 anyway, so assume APCS. */
8903 arm_abi = ARM_ABI_APCS;
8905 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8907 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8911 case EF_ARM_EABI_UNKNOWN:
8912 /* Assume GNU tools. */
8913 arm_abi = ARM_ABI_APCS;
8916 case EF_ARM_EABI_VER4:
8917 case EF_ARM_EABI_VER5:
8918 arm_abi = ARM_ABI_AAPCS;
8919 /* EABI binaries default to VFP float ordering.
8920 They may also contain build attributes that can
8921 be used to identify if the VFP argument-passing
8923 if (fp_model == ARM_FLOAT_AUTO)
8926 switch (bfd_elf_get_obj_attr_int (info.abfd,
8930 case AEABI_VFP_args_base:
8931 /* "The user intended FP parameter/result
8932 passing to conform to AAPCS, base
8934 fp_model = ARM_FLOAT_SOFT_VFP;
8936 case AEABI_VFP_args_vfp:
8937 /* "The user intended FP parameter/result
8938 passing to conform to AAPCS, VFP
8940 fp_model = ARM_FLOAT_VFP;
8942 case AEABI_VFP_args_toolchain:
8943 /* "The user intended FP parameter/result
8944 passing to conform to tool chain-specific
8945 conventions" - we don't know any such
8946 conventions, so leave it as "auto". */
8948 case AEABI_VFP_args_compatible:
8949 /* "Code is compatible with both the base
8950 and VFP variants; the user did not permit
8951 non-variadic functions to pass FP
8952 parameters/results" - leave it as
8956 /* Attribute value not mentioned in the
8957 November 2012 ABI, so leave it as
8962 fp_model = ARM_FLOAT_SOFT_VFP;
8968 /* Leave it as "auto". */
8969 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8974 /* Detect M-profile programs. This only works if the
8975 executable file includes build attributes; GCC does
8976 copy them to the executable, but e.g. RealView does
8979 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8982 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8983 Tag_CPU_arch_profile);
8985 /* GCC specifies the profile for v6-M; RealView only
8986 specifies the profile for architectures starting with
8987 V7 (as opposed to architectures with a tag
8988 numerically greater than TAG_CPU_ARCH_V7). */
8989 if (!tdesc_has_registers (tdesc)
8990 && (attr_arch == TAG_CPU_ARCH_V6_M
8991 || attr_arch == TAG_CPU_ARCH_V6S_M
8992 || attr_profile == 'M'))
8997 if (fp_model == ARM_FLOAT_AUTO)
8999 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9002 /* Leave it as "auto". Strictly speaking this case
9003 means FPA, but almost nobody uses that now, and
9004 many toolchains fail to set the appropriate bits
9005 for the floating-point model they use. */
9007 case EF_ARM_SOFT_FLOAT:
9008 fp_model = ARM_FLOAT_SOFT_FPA;
9010 case EF_ARM_VFP_FLOAT:
9011 fp_model = ARM_FLOAT_VFP;
9013 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9014 fp_model = ARM_FLOAT_SOFT_VFP;
9019 if (e_flags & EF_ARM_BE8)
9020 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9025 /* Leave it as "auto". */
9030 /* Check any target description for validity. */
9031 if (tdesc_has_registers (tdesc))
9033 /* For most registers we require GDB's default names; but also allow
9034 the numeric names for sp / lr / pc, as a convenience. */
9035 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9036 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9037 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9039 const struct tdesc_feature *feature;
9042 feature = tdesc_find_feature (tdesc,
9043 "org.gnu.gdb.arm.core");
9044 if (feature == NULL)
9046 feature = tdesc_find_feature (tdesc,
9047 "org.gnu.gdb.arm.m-profile");
9048 if (feature == NULL)
9054 tdesc_data = tdesc_data_alloc ();
9057 for (i = 0; i < ARM_SP_REGNUM; i++)
9058 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9059 arm_register_names[i]);
9060 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9063 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9066 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9070 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9071 ARM_PS_REGNUM, "xpsr");
9073 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9074 ARM_PS_REGNUM, "cpsr");
9078 tdesc_data_cleanup (tdesc_data);
9082 feature = tdesc_find_feature (tdesc,
9083 "org.gnu.gdb.arm.fpa");
9084 if (feature != NULL)
9087 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9088 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9089 arm_register_names[i]);
9092 tdesc_data_cleanup (tdesc_data);
9097 have_fpa_registers = 0;
9099 feature = tdesc_find_feature (tdesc,
9100 "org.gnu.gdb.xscale.iwmmxt");
9101 if (feature != NULL)
9103 static const char *const iwmmxt_names[] = {
9104 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9105 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9106 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9107 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9111 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9113 &= tdesc_numbered_register (feature, tdesc_data, i,
9114 iwmmxt_names[i - ARM_WR0_REGNUM]);
9116 /* Check for the control registers, but do not fail if they
9118 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9119 tdesc_numbered_register (feature, tdesc_data, i,
9120 iwmmxt_names[i - ARM_WR0_REGNUM]);
9122 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9124 &= tdesc_numbered_register (feature, tdesc_data, i,
9125 iwmmxt_names[i - ARM_WR0_REGNUM]);
9129 tdesc_data_cleanup (tdesc_data);
9133 have_wmmx_registers = 1;
9136 /* If we have a VFP unit, check whether the single precision registers
9137 are present. If not, then we will synthesize them as pseudo
9139 feature = tdesc_find_feature (tdesc,
9140 "org.gnu.gdb.arm.vfp");
9141 if (feature != NULL)
9143 static const char *const vfp_double_names[] = {
9144 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9145 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9146 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9147 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9150 /* Require the double precision registers. There must be either
9153 for (i = 0; i < 32; i++)
9155 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9157 vfp_double_names[i]);
9161 if (!valid_p && i == 16)
9164 /* Also require FPSCR. */
9165 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9166 ARM_FPSCR_REGNUM, "fpscr");
9169 tdesc_data_cleanup (tdesc_data);
9173 if (tdesc_unnumbered_register (feature, "s0") == 0)
9174 have_vfp_pseudos = 1;
9176 vfp_register_count = i;
9178 /* If we have VFP, also check for NEON. The architecture allows
9179 NEON without VFP (integer vector operations only), but GDB
9180 does not support that. */
9181 feature = tdesc_find_feature (tdesc,
9182 "org.gnu.gdb.arm.neon");
9183 if (feature != NULL)
9185 /* NEON requires 32 double-precision registers. */
9188 tdesc_data_cleanup (tdesc_data);
9192 /* If there are quad registers defined by the stub, use
9193 their type; otherwise (normally) provide them with
9194 the default type. */
9195 if (tdesc_unnumbered_register (feature, "q0") == 0)
9196 have_neon_pseudos = 1;
9203 /* If there is already a candidate, use it. */
9204 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9206 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9208 if (arm_abi != ARM_ABI_AUTO
9209 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9212 if (fp_model != ARM_FLOAT_AUTO
9213 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9216 /* There are various other properties in tdep that we do not
9217 need to check here: those derived from a target description,
9218 since gdbarches with a different target description are
9219 automatically disqualified. */
9221 /* Do check is_m, though, since it might come from the binary. */
9222 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9225 /* Found a match. */
9229 if (best_arch != NULL)
9231 if (tdesc_data != NULL)
9232 tdesc_data_cleanup (tdesc_data);
9233 return best_arch->gdbarch;
9236 tdep = XCNEW (struct gdbarch_tdep);
9237 gdbarch = gdbarch_alloc (&info, tdep);
9239 /* Record additional information about the architecture we are defining.
9240 These are gdbarch discriminators, like the OSABI. */
9241 tdep->arm_abi = arm_abi;
9242 tdep->fp_model = fp_model;
9244 tdep->have_fpa_registers = have_fpa_registers;
9245 tdep->have_wmmx_registers = have_wmmx_registers;
9246 gdb_assert (vfp_register_count == 0
9247 || vfp_register_count == 16
9248 || vfp_register_count == 32);
9249 tdep->vfp_register_count = vfp_register_count;
9250 tdep->have_vfp_pseudos = have_vfp_pseudos;
9251 tdep->have_neon_pseudos = have_neon_pseudos;
9252 tdep->have_neon = have_neon;
9254 arm_register_g_packet_guesses (gdbarch);
9257 switch (info.byte_order_for_code)
9259 case BFD_ENDIAN_BIG:
9260 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9261 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9262 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9263 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9267 case BFD_ENDIAN_LITTLE:
9268 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9269 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9270 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9271 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9276 internal_error (__FILE__, __LINE__,
9277 _("arm_gdbarch_init: bad byte order for float format"));
9280 /* On ARM targets char defaults to unsigned. */
9281 set_gdbarch_char_signed (gdbarch, 0);
9283 /* wchar_t is unsigned under the AAPCS. */
9284 if (tdep->arm_abi == ARM_ABI_AAPCS)
9285 set_gdbarch_wchar_signed (gdbarch, 0);
9287 set_gdbarch_wchar_signed (gdbarch, 1);
9289 /* Compute type alignment. */
9290 set_gdbarch_type_align (gdbarch, arm_type_align);
9292 /* Note: for displaced stepping, this includes the breakpoint, and one word
9293 of additional scratch space. This setting isn't used for anything beside
9294 displaced stepping at present. */
9295 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9297 /* This should be low enough for everything. */
9298 tdep->lowest_pc = 0x20;
9299 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9301 /* The default, for both APCS and AAPCS, is to return small
9302 structures in registers. */
9303 tdep->struct_return = reg_struct_return;
9305 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9306 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9309 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9311 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9313 frame_base_set_default (gdbarch, &arm_normal_base);
9315 /* Address manipulation. */
9316 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9318 /* Advance PC across function entry code. */
9319 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9321 /* Detect whether PC is at a point where the stack has been destroyed. */
9322 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9324 /* Skip trampolines. */
9325 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9327 /* The stack grows downward. */
9328 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9330 /* Breakpoint manipulation. */
9331 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9332 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9333 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9334 arm_breakpoint_kind_from_current_state);
9336 /* Information about registers, etc. */
9337 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9338 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9339 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9340 set_gdbarch_register_type (gdbarch, arm_register_type);
9341 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9343 /* This "info float" is FPA-specific. Use the generic version if we
9345 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9346 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9348 /* Internal <-> external register number maps. */
9349 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9350 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9352 set_gdbarch_register_name (gdbarch, arm_register_name);
9354 /* Returning results. */
9355 set_gdbarch_return_value (gdbarch, arm_return_value);
9358 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9360 /* Minsymbol frobbing. */
9361 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9362 set_gdbarch_coff_make_msymbol_special (gdbarch,
9363 arm_coff_make_msymbol_special);
9364 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9366 /* Thumb-2 IT block support. */
9367 set_gdbarch_adjust_breakpoint_address (gdbarch,
9368 arm_adjust_breakpoint_address);
9370 /* Virtual tables. */
9371 set_gdbarch_vbit_in_delta (gdbarch, 1);
9373 /* Hook in the ABI-specific overrides, if they have been registered. */
9374 gdbarch_init_osabi (info, gdbarch);
9376 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9378 /* Add some default predicates. */
9380 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9381 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9382 dwarf2_append_unwinders (gdbarch);
9383 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9384 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9385 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9387 /* Now we have tuned the configuration, set a few final things,
9388 based on what the OS ABI has told us. */
9390 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9391 binaries are always marked. */
9392 if (tdep->arm_abi == ARM_ABI_AUTO)
9393 tdep->arm_abi = ARM_ABI_APCS;
9395 /* Watchpoints are not steppable. */
9396 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9398 /* We used to default to FPA for generic ARM, but almost nobody
9399 uses that now, and we now provide a way for the user to force
9400 the model. So default to the most useful variant. */
9401 if (tdep->fp_model == ARM_FLOAT_AUTO)
9402 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9404 if (tdep->jb_pc >= 0)
9405 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9407 /* Floating point sizes and format. */
9408 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9409 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9411 set_gdbarch_double_format
9412 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9413 set_gdbarch_long_double_format
9414 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9418 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9419 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9422 if (have_vfp_pseudos)
9424 /* NOTE: These are the only pseudo registers used by
9425 the ARM target at the moment. If more are added, a
9426 little more care in numbering will be needed. */
9428 int num_pseudos = 32;
9429 if (have_neon_pseudos)
9431 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9432 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9433 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9438 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9440 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9442 /* Override tdesc_register_type to adjust the types of VFP
9443 registers for NEON. */
9444 set_gdbarch_register_type (gdbarch, arm_register_type);
9447 /* Add standard register aliases. We add aliases even for those
9448 names which are used by the current architecture - it's simpler,
9449 and does no harm, since nothing ever lists user registers. */
9450 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9451 user_reg_add (gdbarch, arm_register_aliases[i].name,
9452 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9454 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9455 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9457 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9463 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9465 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9470 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9471 (unsigned long) tdep->lowest_pc);
9477 static void arm_record_test (void);
9482 _initialize_arm_tdep (void)
9486 char regdesc[1024], *rdptr = regdesc;
9487 size_t rest = sizeof (regdesc);
9489 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9491 /* Add ourselves to objfile event chain. */
9492 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9494 /* Register an ELF OS ABI sniffer for ARM binaries. */
9495 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9496 bfd_target_elf_flavour,
9497 arm_elf_osabi_sniffer);
9499 /* Add root prefix command for all "set arm"/"show arm" commands. */
9500 add_prefix_cmd ("arm", no_class, set_arm_command,
9501 _("Various ARM-specific commands."),
9502 &setarmcmdlist, "set arm ", 0, &setlist);
9504 add_prefix_cmd ("arm", no_class, show_arm_command,
9505 _("Various ARM-specific commands."),
9506 &showarmcmdlist, "show arm ", 0, &showlist);
9509 arm_disassembler_options = xstrdup ("reg-names-std");
9510 const disasm_options_t *disasm_options
9511 = &disassembler_options_arm ()->options;
9512 int num_disassembly_styles = 0;
9513 for (i = 0; disasm_options->name[i] != NULL; i++)
9514 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9515 num_disassembly_styles++;
9517 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9518 valid_disassembly_styles = XNEWVEC (const char *,
9519 num_disassembly_styles + 1);
9520 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9521 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9523 size_t offset = strlen ("reg-names-");
9524 const char *style = disasm_options->name[i];
9525 valid_disassembly_styles[j++] = &style[offset];
9526 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9527 disasm_options->description[i]);
9531 /* Mark the end of valid options. */
9532 valid_disassembly_styles[num_disassembly_styles] = NULL;
9534 /* Create the help text. */
9535 std::string helptext = string_printf ("%s%s%s",
9536 _("The valid values are:\n"),
9538 _("The default is \"std\"."));
9540 add_setshow_enum_cmd("disassembler", no_class,
9541 valid_disassembly_styles, &disassembly_style,
9542 _("Set the disassembly style."),
9543 _("Show the disassembly style."),
9545 set_disassembly_style_sfunc,
9546 show_disassembly_style_sfunc,
9547 &setarmcmdlist, &showarmcmdlist);
9549 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9550 _("Set usage of ARM 32-bit mode."),
9551 _("Show usage of ARM 32-bit mode."),
9552 _("When off, a 26-bit PC will be used."),
9554 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9556 &setarmcmdlist, &showarmcmdlist);
9558 /* Add a command to allow the user to force the FPU model. */
9559 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9560 _("Set the floating point type."),
9561 _("Show the floating point type."),
9562 _("auto - Determine the FP typefrom the OS-ABI.\n\
9563 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9564 fpa - FPA co-processor (GCC compiled).\n\
9565 softvfp - Software FP with pure-endian doubles.\n\
9566 vfp - VFP co-processor."),
9567 set_fp_model_sfunc, show_fp_model,
9568 &setarmcmdlist, &showarmcmdlist);
9570 /* Add a command to allow the user to force the ABI. */
9571 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9574 NULL, arm_set_abi, arm_show_abi,
9575 &setarmcmdlist, &showarmcmdlist);
9577 /* Add two commands to allow the user to force the assumed
9579 add_setshow_enum_cmd ("fallback-mode", class_support,
9580 arm_mode_strings, &arm_fallback_mode_string,
9581 _("Set the mode assumed when symbols are unavailable."),
9582 _("Show the mode assumed when symbols are unavailable."),
9583 NULL, NULL, arm_show_fallback_mode,
9584 &setarmcmdlist, &showarmcmdlist);
9585 add_setshow_enum_cmd ("force-mode", class_support,
9586 arm_mode_strings, &arm_force_mode_string,
9587 _("Set the mode assumed even when symbols are available."),
9588 _("Show the mode assumed even when symbols are available."),
9589 NULL, NULL, arm_show_force_mode,
9590 &setarmcmdlist, &showarmcmdlist);
9592 /* Debugging flag. */
9593 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9594 _("Set ARM debugging."),
9595 _("Show ARM debugging."),
9596 _("When on, arm-specific debugging is enabled."),
9598 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9599 &setdebuglist, &showdebuglist);
9602 selftests::register_test ("arm-record", selftests::arm_record_test);
9607 /* ARM-reversible process record data structures. */
9609 #define ARM_INSN_SIZE_BYTES 4
9610 #define THUMB_INSN_SIZE_BYTES 2
9611 #define THUMB2_INSN_SIZE_BYTES 4
9614 /* Position of the bit within a 32-bit ARM instruction
9615 that defines whether the instruction is a load or store. */
9616 #define INSN_S_L_BIT_NUM 20
9618 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9621 unsigned int reg_len = LENGTH; \
9624 REGS = XNEWVEC (uint32_t, reg_len); \
9625 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9630 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9633 unsigned int mem_len = LENGTH; \
9636 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9637 memcpy(&MEMS->len, &RECORD_BUF[0], \
9638 sizeof(struct arm_mem_r) * LENGTH); \
9643 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9644 #define INSN_RECORDED(ARM_RECORD) \
9645 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9647 /* ARM memory record structure. */
9650 uint32_t len; /* Record length. */
9651 uint32_t addr; /* Memory address. */
9654 /* ARM instruction record contains opcode of current insn
9655 and execution state (before entry to decode_insn()),
9656 contains list of to-be-modified registers and
9657 memory blocks (on return from decode_insn()). */
9659 typedef struct insn_decode_record_t
9661 struct gdbarch *gdbarch;
9662 struct regcache *regcache;
9663 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9664 uint32_t arm_insn; /* Should accommodate thumb. */
9665 uint32_t cond; /* Condition code. */
9666 uint32_t opcode; /* Insn opcode. */
9667 uint32_t decode; /* Insn decode bits. */
9668 uint32_t mem_rec_count; /* No of mem records. */
9669 uint32_t reg_rec_count; /* No of reg records. */
9670 uint32_t *arm_regs; /* Registers to be saved for this record. */
9671 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9672 } insn_decode_record;
9675 /* Checks ARM SBZ and SBO mandatory fields. */
9678 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9680 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9699 enum arm_record_result
9701 ARM_RECORD_SUCCESS = 0,
9702 ARM_RECORD_FAILURE = 1
9709 } arm_record_strx_t;
9720 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9721 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9724 struct regcache *reg_cache = arm_insn_r->regcache;
9725 ULONGEST u_regval[2]= {0};
9727 uint32_t reg_src1 = 0, reg_src2 = 0;
9728 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9730 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9731 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9733 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9735 /* 1) Handle misc store, immediate offset. */
9736 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9737 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9738 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9739 regcache_raw_read_unsigned (reg_cache, reg_src1,
9741 if (ARM_PC_REGNUM == reg_src1)
9743 /* If R15 was used as Rn, hence current PC+8. */
9744 u_regval[0] = u_regval[0] + 8;
9746 offset_8 = (immed_high << 4) | immed_low;
9747 /* Calculate target store address. */
9748 if (14 == arm_insn_r->opcode)
9750 tgt_mem_addr = u_regval[0] + offset_8;
9754 tgt_mem_addr = u_regval[0] - offset_8;
9756 if (ARM_RECORD_STRH == str_type)
9758 record_buf_mem[0] = 2;
9759 record_buf_mem[1] = tgt_mem_addr;
9760 arm_insn_r->mem_rec_count = 1;
9762 else if (ARM_RECORD_STRD == str_type)
9764 record_buf_mem[0] = 4;
9765 record_buf_mem[1] = tgt_mem_addr;
9766 record_buf_mem[2] = 4;
9767 record_buf_mem[3] = tgt_mem_addr + 4;
9768 arm_insn_r->mem_rec_count = 2;
9771 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9773 /* 2) Store, register offset. */
9775 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9777 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9778 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9779 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9782 /* If R15 was used as Rn, hence current PC+8. */
9783 u_regval[0] = u_regval[0] + 8;
9785 /* Calculate target store address, Rn +/- Rm, register offset. */
9786 if (12 == arm_insn_r->opcode)
9788 tgt_mem_addr = u_regval[0] + u_regval[1];
9792 tgt_mem_addr = u_regval[1] - u_regval[0];
9794 if (ARM_RECORD_STRH == str_type)
9796 record_buf_mem[0] = 2;
9797 record_buf_mem[1] = tgt_mem_addr;
9798 arm_insn_r->mem_rec_count = 1;
9800 else if (ARM_RECORD_STRD == str_type)
9802 record_buf_mem[0] = 4;
9803 record_buf_mem[1] = tgt_mem_addr;
9804 record_buf_mem[2] = 4;
9805 record_buf_mem[3] = tgt_mem_addr + 4;
9806 arm_insn_r->mem_rec_count = 2;
9809 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9810 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9812 /* 3) Store, immediate pre-indexed. */
9813 /* 5) Store, immediate post-indexed. */
9814 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9815 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9816 offset_8 = (immed_high << 4) | immed_low;
9817 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9818 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9819 /* Calculate target store address, Rn +/- Rm, register offset. */
9820 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9822 tgt_mem_addr = u_regval[0] + offset_8;
9826 tgt_mem_addr = u_regval[0] - offset_8;
9828 if (ARM_RECORD_STRH == str_type)
9830 record_buf_mem[0] = 2;
9831 record_buf_mem[1] = tgt_mem_addr;
9832 arm_insn_r->mem_rec_count = 1;
9834 else if (ARM_RECORD_STRD == str_type)
9836 record_buf_mem[0] = 4;
9837 record_buf_mem[1] = tgt_mem_addr;
9838 record_buf_mem[2] = 4;
9839 record_buf_mem[3] = tgt_mem_addr + 4;
9840 arm_insn_r->mem_rec_count = 2;
9842 /* Record Rn also as it changes. */
9843 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9844 arm_insn_r->reg_rec_count = 1;
9846 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9847 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9849 /* 4) Store, register pre-indexed. */
9850 /* 6) Store, register post -indexed. */
9851 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9852 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9853 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9854 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9855 /* Calculate target store address, Rn +/- Rm, register offset. */
9856 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9858 tgt_mem_addr = u_regval[0] + u_regval[1];
9862 tgt_mem_addr = u_regval[1] - u_regval[0];
9864 if (ARM_RECORD_STRH == str_type)
9866 record_buf_mem[0] = 2;
9867 record_buf_mem[1] = tgt_mem_addr;
9868 arm_insn_r->mem_rec_count = 1;
9870 else if (ARM_RECORD_STRD == str_type)
9872 record_buf_mem[0] = 4;
9873 record_buf_mem[1] = tgt_mem_addr;
9874 record_buf_mem[2] = 4;
9875 record_buf_mem[3] = tgt_mem_addr + 4;
9876 arm_insn_r->mem_rec_count = 2;
9878 /* Record Rn also as it changes. */
9879 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9880 arm_insn_r->reg_rec_count = 1;
9885 /* Handling ARM extension space insns. */
9888 arm_record_extension_space (insn_decode_record *arm_insn_r)
9890 int ret = 0; /* Return value: -1:record failure ; 0:success */
9891 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9892 uint32_t record_buf[8], record_buf_mem[8];
9893 uint32_t reg_src1 = 0;
9894 struct regcache *reg_cache = arm_insn_r->regcache;
9895 ULONGEST u_regval = 0;
9897 gdb_assert (!INSN_RECORDED(arm_insn_r));
9898 /* Handle unconditional insn extension space. */
9900 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9901 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9902 if (arm_insn_r->cond)
9904 /* PLD has no affect on architectural state, it just affects
9906 if (5 == ((opcode1 & 0xE0) >> 5))
9909 record_buf[0] = ARM_PS_REGNUM;
9910 record_buf[1] = ARM_LR_REGNUM;
9911 arm_insn_r->reg_rec_count = 2;
9913 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9917 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9918 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9921 /* Undefined instruction on ARM V5; need to handle if later
9922 versions define it. */
9925 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9926 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9927 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9929 /* Handle arithmetic insn extension space. */
9930 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9931 && !INSN_RECORDED(arm_insn_r))
9933 /* Handle MLA(S) and MUL(S). */
9934 if (in_inclusive_range (insn_op1, 0U, 3U))
9936 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9937 record_buf[1] = ARM_PS_REGNUM;
9938 arm_insn_r->reg_rec_count = 2;
9940 else if (in_inclusive_range (insn_op1, 4U, 15U))
9942 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9943 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9944 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9945 record_buf[2] = ARM_PS_REGNUM;
9946 arm_insn_r->reg_rec_count = 3;
9950 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9951 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9952 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9954 /* Handle control insn extension space. */
9956 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9957 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9959 if (!bit (arm_insn_r->arm_insn,25))
9961 if (!bits (arm_insn_r->arm_insn, 4, 7))
9963 if ((0 == insn_op1) || (2 == insn_op1))
9966 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9967 arm_insn_r->reg_rec_count = 1;
9969 else if (1 == insn_op1)
9971 /* CSPR is going to be changed. */
9972 record_buf[0] = ARM_PS_REGNUM;
9973 arm_insn_r->reg_rec_count = 1;
9975 else if (3 == insn_op1)
9977 /* SPSR is going to be changed. */
9978 /* We need to get SPSR value, which is yet to be done. */
9982 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9987 record_buf[0] = ARM_PS_REGNUM;
9988 arm_insn_r->reg_rec_count = 1;
9990 else if (3 == insn_op1)
9993 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9994 arm_insn_r->reg_rec_count = 1;
9997 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10000 record_buf[0] = ARM_PS_REGNUM;
10001 record_buf[1] = ARM_LR_REGNUM;
10002 arm_insn_r->reg_rec_count = 2;
10004 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10006 /* QADD, QSUB, QDADD, QDSUB */
10007 record_buf[0] = ARM_PS_REGNUM;
10008 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10009 arm_insn_r->reg_rec_count = 2;
10011 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10014 record_buf[0] = ARM_PS_REGNUM;
10015 record_buf[1] = ARM_LR_REGNUM;
10016 arm_insn_r->reg_rec_count = 2;
10018 /* Save SPSR also;how? */
10021 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10022 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10023 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10024 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10027 if (0 == insn_op1 || 1 == insn_op1)
10029 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10030 /* We dont do optimization for SMULW<y> where we
10032 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10033 record_buf[1] = ARM_PS_REGNUM;
10034 arm_insn_r->reg_rec_count = 2;
10036 else if (2 == insn_op1)
10039 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10040 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10041 arm_insn_r->reg_rec_count = 2;
10043 else if (3 == insn_op1)
10046 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10047 arm_insn_r->reg_rec_count = 1;
10053 /* MSR : immediate form. */
10056 /* CSPR is going to be changed. */
10057 record_buf[0] = ARM_PS_REGNUM;
10058 arm_insn_r->reg_rec_count = 1;
10060 else if (3 == insn_op1)
10062 /* SPSR is going to be changed. */
10063 /* we need to get SPSR value, which is yet to be done */
10069 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10070 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10071 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10073 /* Handle load/store insn extension space. */
10075 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10076 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10077 && !INSN_RECORDED(arm_insn_r))
10082 /* These insn, changes register and memory as well. */
10083 /* SWP or SWPB insn. */
10084 /* Get memory address given by Rn. */
10085 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10086 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10087 /* SWP insn ?, swaps word. */
10088 if (8 == arm_insn_r->opcode)
10090 record_buf_mem[0] = 4;
10094 /* SWPB insn, swaps only byte. */
10095 record_buf_mem[0] = 1;
10097 record_buf_mem[1] = u_regval;
10098 arm_insn_r->mem_rec_count = 1;
10099 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10100 arm_insn_r->reg_rec_count = 1;
10102 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10105 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10108 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 record_buf[1] = record_buf[0] + 1;
10113 arm_insn_r->reg_rec_count = 2;
10115 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10118 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10121 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10123 /* LDRH, LDRSB, LDRSH. */
10124 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10125 arm_insn_r->reg_rec_count = 1;
10130 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10131 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10132 && !INSN_RECORDED(arm_insn_r))
10135 /* Handle coprocessor insn extension space. */
10138 /* To be done for ARMv5 and later; as of now we return -1. */
10142 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10143 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10148 /* Handling opcode 000 insns. */
10151 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10153 struct regcache *reg_cache = arm_insn_r->regcache;
10154 uint32_t record_buf[8], record_buf_mem[8];
10155 ULONGEST u_regval[2] = {0};
10157 uint32_t reg_src1 = 0;
10158 uint32_t opcode1 = 0;
10160 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10161 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10162 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10164 if (!((opcode1 & 0x19) == 0x10))
10166 /* Data-processing (register) and Data-processing (register-shifted
10168 /* Out of 11 shifter operands mode, all the insn modifies destination
10169 register, which is specified by 13-16 decode. */
10170 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10171 record_buf[1] = ARM_PS_REGNUM;
10172 arm_insn_r->reg_rec_count = 2;
10174 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10176 /* Miscellaneous instructions */
10178 if (3 == arm_insn_r->decode && 0x12 == opcode1
10179 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10181 /* Handle BLX, branch and link/exchange. */
10182 if (9 == arm_insn_r->opcode)
10184 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10185 and R14 stores the return address. */
10186 record_buf[0] = ARM_PS_REGNUM;
10187 record_buf[1] = ARM_LR_REGNUM;
10188 arm_insn_r->reg_rec_count = 2;
10191 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10193 /* Handle enhanced software breakpoint insn, BKPT. */
10194 /* CPSR is changed to be executed in ARM state, disabling normal
10195 interrupts, entering abort mode. */
10196 /* According to high vector configuration PC is set. */
10197 /* user hit breakpoint and type reverse, in
10198 that case, we need to go back with previous CPSR and
10199 Program Counter. */
10200 record_buf[0] = ARM_PS_REGNUM;
10201 record_buf[1] = ARM_LR_REGNUM;
10202 arm_insn_r->reg_rec_count = 2;
10204 /* Save SPSR also; how? */
10207 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10208 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10210 /* Handle BX, branch and link/exchange. */
10211 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10212 record_buf[0] = ARM_PS_REGNUM;
10213 arm_insn_r->reg_rec_count = 1;
10215 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10216 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10217 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10219 /* Count leading zeros: CLZ. */
10220 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10221 arm_insn_r->reg_rec_count = 1;
10223 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10224 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10225 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10226 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10228 /* Handle MRS insn. */
10229 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10230 arm_insn_r->reg_rec_count = 1;
10233 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10235 /* Multiply and multiply-accumulate */
10237 /* Handle multiply instructions. */
10238 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10239 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10241 /* Handle MLA and MUL. */
10242 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10243 record_buf[1] = ARM_PS_REGNUM;
10244 arm_insn_r->reg_rec_count = 2;
10246 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10248 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10249 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10250 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10251 record_buf[2] = ARM_PS_REGNUM;
10252 arm_insn_r->reg_rec_count = 3;
10255 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10257 /* Synchronization primitives */
10259 /* Handling SWP, SWPB. */
10260 /* These insn, changes register and memory as well. */
10261 /* SWP or SWPB insn. */
10263 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10264 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10265 /* SWP insn ?, swaps word. */
10266 if (8 == arm_insn_r->opcode)
10268 record_buf_mem[0] = 4;
10272 /* SWPB insn, swaps only byte. */
10273 record_buf_mem[0] = 1;
10275 record_buf_mem[1] = u_regval[0];
10276 arm_insn_r->mem_rec_count = 1;
10277 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10278 arm_insn_r->reg_rec_count = 1;
10280 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10281 || 15 == arm_insn_r->decode)
10283 if ((opcode1 & 0x12) == 2)
10285 /* Extra load/store (unprivileged) */
10290 /* Extra load/store */
10291 switch (bits (arm_insn_r->arm_insn, 5, 6))
10294 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10296 /* STRH (register), STRH (immediate) */
10297 arm_record_strx (arm_insn_r, &record_buf[0],
10298 &record_buf_mem[0], ARM_RECORD_STRH);
10300 else if ((opcode1 & 0x05) == 0x1)
10302 /* LDRH (register) */
10303 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10304 arm_insn_r->reg_rec_count = 1;
10306 if (bit (arm_insn_r->arm_insn, 21))
10308 /* Write back to Rn. */
10309 record_buf[arm_insn_r->reg_rec_count++]
10310 = bits (arm_insn_r->arm_insn, 16, 19);
10313 else if ((opcode1 & 0x05) == 0x5)
10315 /* LDRH (immediate), LDRH (literal) */
10316 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10318 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10319 arm_insn_r->reg_rec_count = 1;
10323 /*LDRH (immediate) */
10324 if (bit (arm_insn_r->arm_insn, 21))
10326 /* Write back to Rn. */
10327 record_buf[arm_insn_r->reg_rec_count++] = rn;
10335 if ((opcode1 & 0x05) == 0x0)
10337 /* LDRD (register) */
10338 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10339 record_buf[1] = record_buf[0] + 1;
10340 arm_insn_r->reg_rec_count = 2;
10342 if (bit (arm_insn_r->arm_insn, 21))
10344 /* Write back to Rn. */
10345 record_buf[arm_insn_r->reg_rec_count++]
10346 = bits (arm_insn_r->arm_insn, 16, 19);
10349 else if ((opcode1 & 0x05) == 0x1)
10351 /* LDRSB (register) */
10352 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10353 arm_insn_r->reg_rec_count = 1;
10355 if (bit (arm_insn_r->arm_insn, 21))
10357 /* Write back to Rn. */
10358 record_buf[arm_insn_r->reg_rec_count++]
10359 = bits (arm_insn_r->arm_insn, 16, 19);
10362 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10364 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10366 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10368 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10369 arm_insn_r->reg_rec_count = 1;
10373 /*LDRD (immediate), LDRSB (immediate) */
10374 if (bit (arm_insn_r->arm_insn, 21))
10376 /* Write back to Rn. */
10377 record_buf[arm_insn_r->reg_rec_count++] = rn;
10385 if ((opcode1 & 0x05) == 0x0)
10387 /* STRD (register) */
10388 arm_record_strx (arm_insn_r, &record_buf[0],
10389 &record_buf_mem[0], ARM_RECORD_STRD);
10391 else if ((opcode1 & 0x05) == 0x1)
10393 /* LDRSH (register) */
10394 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10395 arm_insn_r->reg_rec_count = 1;
10397 if (bit (arm_insn_r->arm_insn, 21))
10399 /* Write back to Rn. */
10400 record_buf[arm_insn_r->reg_rec_count++]
10401 = bits (arm_insn_r->arm_insn, 16, 19);
10404 else if ((opcode1 & 0x05) == 0x4)
10406 /* STRD (immediate) */
10407 arm_record_strx (arm_insn_r, &record_buf[0],
10408 &record_buf_mem[0], ARM_RECORD_STRD);
10410 else if ((opcode1 & 0x05) == 0x5)
10412 /* LDRSH (immediate), LDRSH (literal) */
10413 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10414 arm_insn_r->reg_rec_count = 1;
10416 if (bit (arm_insn_r->arm_insn, 21))
10418 /* Write back to Rn. */
10419 record_buf[arm_insn_r->reg_rec_count++]
10420 = bits (arm_insn_r->arm_insn, 16, 19);
10436 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10437 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10441 /* Handling opcode 001 insns. */
10444 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10446 uint32_t record_buf[8], record_buf_mem[8];
10448 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10449 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10451 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10452 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10453 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10456 /* Handle MSR insn. */
10457 if (9 == arm_insn_r->opcode)
10459 /* CSPR is going to be changed. */
10460 record_buf[0] = ARM_PS_REGNUM;
10461 arm_insn_r->reg_rec_count = 1;
10465 /* SPSR is going to be changed. */
10468 else if (arm_insn_r->opcode <= 15)
10470 /* Normal data processing insns. */
10471 /* Out of 11 shifter operands mode, all the insn modifies destination
10472 register, which is specified by 13-16 decode. */
10473 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10474 record_buf[1] = ARM_PS_REGNUM;
10475 arm_insn_r->reg_rec_count = 2;
10482 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10483 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10488 arm_record_media (insn_decode_record *arm_insn_r)
10490 uint32_t record_buf[8];
10492 switch (bits (arm_insn_r->arm_insn, 22, 24))
10495 /* Parallel addition and subtraction, signed */
10497 /* Parallel addition and subtraction, unsigned */
10500 /* Packing, unpacking, saturation and reversal */
10502 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10504 record_buf[arm_insn_r->reg_rec_count++] = rd;
10510 /* Signed multiplies */
10512 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10513 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10515 record_buf[arm_insn_r->reg_rec_count++] = rd;
10517 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10518 else if (op1 == 0x4)
10519 record_buf[arm_insn_r->reg_rec_count++]
10520 = bits (arm_insn_r->arm_insn, 12, 15);
10526 if (bit (arm_insn_r->arm_insn, 21)
10527 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10530 record_buf[arm_insn_r->reg_rec_count++]
10531 = bits (arm_insn_r->arm_insn, 12, 15);
10533 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10534 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10536 /* USAD8 and USADA8 */
10537 record_buf[arm_insn_r->reg_rec_count++]
10538 = bits (arm_insn_r->arm_insn, 16, 19);
10545 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10546 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10548 /* Permanently UNDEFINED */
10553 /* BFC, BFI and UBFX */
10554 record_buf[arm_insn_r->reg_rec_count++]
10555 = bits (arm_insn_r->arm_insn, 12, 15);
10564 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10569 /* Handle ARM mode instructions with opcode 010. */
10572 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10574 struct regcache *reg_cache = arm_insn_r->regcache;
10576 uint32_t reg_base , reg_dest;
10577 uint32_t offset_12, tgt_mem_addr;
10578 uint32_t record_buf[8], record_buf_mem[8];
10579 unsigned char wback;
10582 /* Calculate wback. */
10583 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10584 || (bit (arm_insn_r->arm_insn, 21) == 1);
10586 arm_insn_r->reg_rec_count = 0;
10587 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10589 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10591 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10594 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10595 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10597 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10598 preceeds a LDR instruction having R15 as reg_base, it
10599 emulates a branch and link instruction, and hence we need to save
10600 CPSR and PC as well. */
10601 if (ARM_PC_REGNUM == reg_dest)
10602 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10604 /* If wback is true, also save the base register, which is going to be
10607 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10611 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10613 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10614 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10616 /* Handle bit U. */
10617 if (bit (arm_insn_r->arm_insn, 23))
10619 /* U == 1: Add the offset. */
10620 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10624 /* U == 0: subtract the offset. */
10625 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10628 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10630 if (bit (arm_insn_r->arm_insn, 22))
10632 /* STRB and STRBT: 1 byte. */
10633 record_buf_mem[0] = 1;
10637 /* STR and STRT: 4 bytes. */
10638 record_buf_mem[0] = 4;
10641 /* Handle bit P. */
10642 if (bit (arm_insn_r->arm_insn, 24))
10643 record_buf_mem[1] = tgt_mem_addr;
10645 record_buf_mem[1] = (uint32_t) u_regval;
10647 arm_insn_r->mem_rec_count = 1;
10649 /* If wback is true, also save the base register, which is going to be
10652 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10655 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10656 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10660 /* Handling opcode 011 insns. */
10663 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10665 struct regcache *reg_cache = arm_insn_r->regcache;
10667 uint32_t shift_imm = 0;
10668 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10669 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10670 uint32_t record_buf[8], record_buf_mem[8];
10673 ULONGEST u_regval[2];
10675 if (bit (arm_insn_r->arm_insn, 4))
10676 return arm_record_media (arm_insn_r);
10678 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10679 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10681 /* Handle enhanced store insns and LDRD DSP insn,
10682 order begins according to addressing modes for store insns
10686 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10688 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10689 /* LDR insn has a capability to do branching, if
10690 MOV LR, PC is preceded by LDR insn having Rn as R15
10691 in that case, it emulates branch and link insn, and hence we
10692 need to save CSPR and PC as well. */
10693 if (15 != reg_dest)
10695 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10696 arm_insn_r->reg_rec_count = 1;
10700 record_buf[0] = reg_dest;
10701 record_buf[1] = ARM_PS_REGNUM;
10702 arm_insn_r->reg_rec_count = 2;
10707 if (! bits (arm_insn_r->arm_insn, 4, 11))
10709 /* Store insn, register offset and register pre-indexed,
10710 register post-indexed. */
10712 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10714 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10715 regcache_raw_read_unsigned (reg_cache, reg_src1
10717 regcache_raw_read_unsigned (reg_cache, reg_src2
10719 if (15 == reg_src2)
10721 /* If R15 was used as Rn, hence current PC+8. */
10722 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10723 u_regval[0] = u_regval[0] + 8;
10725 /* Calculate target store address, Rn +/- Rm, register offset. */
10727 if (bit (arm_insn_r->arm_insn, 23))
10729 tgt_mem_addr = u_regval[0] + u_regval[1];
10733 tgt_mem_addr = u_regval[1] - u_regval[0];
10736 switch (arm_insn_r->opcode)
10750 record_buf_mem[0] = 4;
10765 record_buf_mem[0] = 1;
10769 gdb_assert_not_reached ("no decoding pattern found");
10772 record_buf_mem[1] = tgt_mem_addr;
10773 arm_insn_r->mem_rec_count = 1;
10775 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10776 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10777 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10778 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10779 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10780 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10783 /* Rn is going to be changed in pre-indexed mode and
10784 post-indexed mode as well. */
10785 record_buf[0] = reg_src2;
10786 arm_insn_r->reg_rec_count = 1;
10791 /* Store insn, scaled register offset; scaled pre-indexed. */
10792 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10794 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10796 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10797 /* Get shift_imm. */
10798 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10799 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10800 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10801 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10802 /* Offset_12 used as shift. */
10806 /* Offset_12 used as index. */
10807 offset_12 = u_regval[0] << shift_imm;
10811 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10817 if (bit (u_regval[0], 31))
10819 offset_12 = 0xFFFFFFFF;
10828 /* This is arithmetic shift. */
10829 offset_12 = s_word >> shift_imm;
10836 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10838 /* Get C flag value and shift it by 31. */
10839 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10840 | (u_regval[0]) >> 1);
10844 offset_12 = (u_regval[0] >> shift_imm) \
10846 (sizeof(uint32_t) - shift_imm));
10851 gdb_assert_not_reached ("no decoding pattern found");
10855 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10857 if (bit (arm_insn_r->arm_insn, 23))
10859 tgt_mem_addr = u_regval[1] + offset_12;
10863 tgt_mem_addr = u_regval[1] - offset_12;
10866 switch (arm_insn_r->opcode)
10880 record_buf_mem[0] = 4;
10895 record_buf_mem[0] = 1;
10899 gdb_assert_not_reached ("no decoding pattern found");
10902 record_buf_mem[1] = tgt_mem_addr;
10903 arm_insn_r->mem_rec_count = 1;
10905 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10906 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10907 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10908 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10909 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10910 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10913 /* Rn is going to be changed in register scaled pre-indexed
10914 mode,and scaled post indexed mode. */
10915 record_buf[0] = reg_src2;
10916 arm_insn_r->reg_rec_count = 1;
10921 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10922 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10926 /* Handle ARM mode instructions with opcode 100. */
10929 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10931 struct regcache *reg_cache = arm_insn_r->regcache;
10932 uint32_t register_count = 0, register_bits;
10933 uint32_t reg_base, addr_mode;
10934 uint32_t record_buf[24], record_buf_mem[48];
10938 /* Fetch the list of registers. */
10939 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10940 arm_insn_r->reg_rec_count = 0;
10942 /* Fetch the base register that contains the address we are loading data
10944 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10946 /* Calculate wback. */
10947 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10949 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10951 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10953 /* Find out which registers are going to be loaded from memory. */
10954 while (register_bits)
10956 if (register_bits & 0x00000001)
10957 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10958 register_bits = register_bits >> 1;
10963 /* If wback is true, also save the base register, which is going to be
10966 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10968 /* Save the CPSR register. */
10969 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10973 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10975 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10977 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10979 /* Find out how many registers are going to be stored to memory. */
10980 while (register_bits)
10982 if (register_bits & 0x00000001)
10984 register_bits = register_bits >> 1;
10989 /* STMDA (STMED): Decrement after. */
10991 record_buf_mem[1] = (uint32_t) u_regval
10992 - register_count * ARM_INT_REGISTER_SIZE + 4;
10994 /* STM (STMIA, STMEA): Increment after. */
10996 record_buf_mem[1] = (uint32_t) u_regval;
10998 /* STMDB (STMFD): Decrement before. */
11000 record_buf_mem[1] = (uint32_t) u_regval
11001 - register_count * ARM_INT_REGISTER_SIZE;
11003 /* STMIB (STMFA): Increment before. */
11005 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11008 gdb_assert_not_reached ("no decoding pattern found");
11012 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11013 arm_insn_r->mem_rec_count = 1;
11015 /* If wback is true, also save the base register, which is going to be
11018 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11021 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11022 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11026 /* Handling opcode 101 insns. */
11029 arm_record_b_bl (insn_decode_record *arm_insn_r)
11031 uint32_t record_buf[8];
11033 /* Handle B, BL, BLX(1) insns. */
11034 /* B simply branches so we do nothing here. */
11035 /* Note: BLX(1) doesnt fall here but instead it falls into
11036 extension space. */
11037 if (bit (arm_insn_r->arm_insn, 24))
11039 record_buf[0] = ARM_LR_REGNUM;
11040 arm_insn_r->reg_rec_count = 1;
11043 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11049 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11051 printf_unfiltered (_("Process record does not support instruction "
11052 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11053 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11058 /* Record handler for vector data transfer instructions. */
11061 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11063 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11064 uint32_t record_buf[4];
11066 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11067 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11068 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11069 bit_l = bit (arm_insn_r->arm_insn, 20);
11070 bit_c = bit (arm_insn_r->arm_insn, 8);
11072 /* Handle VMOV instruction. */
11073 if (bit_l && bit_c)
11075 record_buf[0] = reg_t;
11076 arm_insn_r->reg_rec_count = 1;
11078 else if (bit_l && !bit_c)
11080 /* Handle VMOV instruction. */
11081 if (bits_a == 0x00)
11083 record_buf[0] = reg_t;
11084 arm_insn_r->reg_rec_count = 1;
11086 /* Handle VMRS instruction. */
11087 else if (bits_a == 0x07)
11090 reg_t = ARM_PS_REGNUM;
11092 record_buf[0] = reg_t;
11093 arm_insn_r->reg_rec_count = 1;
11096 else if (!bit_l && !bit_c)
11098 /* Handle VMOV instruction. */
11099 if (bits_a == 0x00)
11101 record_buf[0] = ARM_D0_REGNUM + reg_v;
11103 arm_insn_r->reg_rec_count = 1;
11105 /* Handle VMSR instruction. */
11106 else if (bits_a == 0x07)
11108 record_buf[0] = ARM_FPSCR_REGNUM;
11109 arm_insn_r->reg_rec_count = 1;
11112 else if (!bit_l && bit_c)
11114 /* Handle VMOV instruction. */
11115 if (!(bits_a & 0x04))
11117 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11119 arm_insn_r->reg_rec_count = 1;
11121 /* Handle VDUP instruction. */
11124 if (bit (arm_insn_r->arm_insn, 21))
11126 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11127 record_buf[0] = reg_v + ARM_D0_REGNUM;
11128 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11129 arm_insn_r->reg_rec_count = 2;
11133 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11134 record_buf[0] = reg_v + ARM_D0_REGNUM;
11135 arm_insn_r->reg_rec_count = 1;
11140 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11144 /* Record handler for extension register load/store instructions. */
11147 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11149 uint32_t opcode, single_reg;
11150 uint8_t op_vldm_vstm;
11151 uint32_t record_buf[8], record_buf_mem[128];
11152 ULONGEST u_regval = 0;
11154 struct regcache *reg_cache = arm_insn_r->regcache;
11156 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11157 single_reg = !bit (arm_insn_r->arm_insn, 8);
11158 op_vldm_vstm = opcode & 0x1b;
11160 /* Handle VMOV instructions. */
11161 if ((opcode & 0x1e) == 0x04)
11163 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11166 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11167 arm_insn_r->reg_rec_count = 2;
11171 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11172 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11176 /* The first S register number m is REG_M:M (M is bit 5),
11177 the corresponding D register number is REG_M:M / 2, which
11179 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11180 /* The second S register number is REG_M:M + 1, the
11181 corresponding D register number is (REG_M:M + 1) / 2.
11182 IOW, if bit M is 1, the first and second S registers
11183 are mapped to different D registers, otherwise, they are
11184 in the same D register. */
11187 record_buf[arm_insn_r->reg_rec_count++]
11188 = ARM_D0_REGNUM + reg_m + 1;
11193 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11194 arm_insn_r->reg_rec_count = 1;
11198 /* Handle VSTM and VPUSH instructions. */
11199 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11200 || op_vldm_vstm == 0x12)
11202 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11203 uint32_t memory_index = 0;
11205 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11206 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11207 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11208 imm_off32 = imm_off8 << 2;
11209 memory_count = imm_off8;
11211 if (bit (arm_insn_r->arm_insn, 23))
11212 start_address = u_regval;
11214 start_address = u_regval - imm_off32;
11216 if (bit (arm_insn_r->arm_insn, 21))
11218 record_buf[0] = reg_rn;
11219 arm_insn_r->reg_rec_count = 1;
11222 while (memory_count > 0)
11226 record_buf_mem[memory_index] = 4;
11227 record_buf_mem[memory_index + 1] = start_address;
11228 start_address = start_address + 4;
11229 memory_index = memory_index + 2;
11233 record_buf_mem[memory_index] = 4;
11234 record_buf_mem[memory_index + 1] = start_address;
11235 record_buf_mem[memory_index + 2] = 4;
11236 record_buf_mem[memory_index + 3] = start_address + 4;
11237 start_address = start_address + 8;
11238 memory_index = memory_index + 4;
11242 arm_insn_r->mem_rec_count = (memory_index >> 1);
11244 /* Handle VLDM instructions. */
11245 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11246 || op_vldm_vstm == 0x13)
11248 uint32_t reg_count, reg_vd;
11249 uint32_t reg_index = 0;
11250 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11252 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11253 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11255 /* REG_VD is the first D register number. If the instruction
11256 loads memory to S registers (SINGLE_REG is TRUE), the register
11257 number is (REG_VD << 1 | bit D), so the corresponding D
11258 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11260 reg_vd = reg_vd | (bit_d << 4);
11262 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11263 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11265 /* If the instruction loads memory to D register, REG_COUNT should
11266 be divided by 2, according to the ARM Architecture Reference
11267 Manual. If the instruction loads memory to S register, divide by
11268 2 as well because two S registers are mapped to D register. */
11269 reg_count = reg_count / 2;
11270 if (single_reg && bit_d)
11272 /* Increase the register count if S register list starts from
11273 an odd number (bit d is one). */
11277 while (reg_count > 0)
11279 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11282 arm_insn_r->reg_rec_count = reg_index;
11284 /* VSTR Vector store register. */
11285 else if ((opcode & 0x13) == 0x10)
11287 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11288 uint32_t memory_index = 0;
11290 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11291 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11292 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11293 imm_off32 = imm_off8 << 2;
11295 if (bit (arm_insn_r->arm_insn, 23))
11296 start_address = u_regval + imm_off32;
11298 start_address = u_regval - imm_off32;
11302 record_buf_mem[memory_index] = 4;
11303 record_buf_mem[memory_index + 1] = start_address;
11304 arm_insn_r->mem_rec_count = 1;
11308 record_buf_mem[memory_index] = 4;
11309 record_buf_mem[memory_index + 1] = start_address;
11310 record_buf_mem[memory_index + 2] = 4;
11311 record_buf_mem[memory_index + 3] = start_address + 4;
11312 arm_insn_r->mem_rec_count = 2;
11315 /* VLDR Vector load register. */
11316 else if ((opcode & 0x13) == 0x11)
11318 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11322 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11323 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11327 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11328 /* Record register D rather than pseudo register S. */
11329 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11331 arm_insn_r->reg_rec_count = 1;
11334 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11335 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11339 /* Record handler for arm/thumb mode VFP data processing instructions. */
11342 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11344 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11345 uint32_t record_buf[4];
11346 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11347 enum insn_types curr_insn_type = INSN_INV;
11349 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11350 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11351 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11352 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11353 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11354 bit_d = bit (arm_insn_r->arm_insn, 22);
11355 /* Mask off the "D" bit. */
11356 opc1 = opc1 & ~0x04;
11358 /* Handle VMLA, VMLS. */
11361 if (bit (arm_insn_r->arm_insn, 10))
11363 if (bit (arm_insn_r->arm_insn, 6))
11364 curr_insn_type = INSN_T0;
11366 curr_insn_type = INSN_T1;
11371 curr_insn_type = INSN_T1;
11373 curr_insn_type = INSN_T2;
11376 /* Handle VNMLA, VNMLS, VNMUL. */
11377 else if (opc1 == 0x01)
11380 curr_insn_type = INSN_T1;
11382 curr_insn_type = INSN_T2;
11385 else if (opc1 == 0x02 && !(opc3 & 0x01))
11387 if (bit (arm_insn_r->arm_insn, 10))
11389 if (bit (arm_insn_r->arm_insn, 6))
11390 curr_insn_type = INSN_T0;
11392 curr_insn_type = INSN_T1;
11397 curr_insn_type = INSN_T1;
11399 curr_insn_type = INSN_T2;
11402 /* Handle VADD, VSUB. */
11403 else if (opc1 == 0x03)
11405 if (!bit (arm_insn_r->arm_insn, 9))
11407 if (bit (arm_insn_r->arm_insn, 6))
11408 curr_insn_type = INSN_T0;
11410 curr_insn_type = INSN_T1;
11415 curr_insn_type = INSN_T1;
11417 curr_insn_type = INSN_T2;
11421 else if (opc1 == 0x08)
11424 curr_insn_type = INSN_T1;
11426 curr_insn_type = INSN_T2;
11428 /* Handle all other vfp data processing instructions. */
11429 else if (opc1 == 0x0b)
11432 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11434 if (bit (arm_insn_r->arm_insn, 4))
11436 if (bit (arm_insn_r->arm_insn, 6))
11437 curr_insn_type = INSN_T0;
11439 curr_insn_type = INSN_T1;
11444 curr_insn_type = INSN_T1;
11446 curr_insn_type = INSN_T2;
11449 /* Handle VNEG and VABS. */
11450 else if ((opc2 == 0x01 && opc3 == 0x01)
11451 || (opc2 == 0x00 && opc3 == 0x03))
11453 if (!bit (arm_insn_r->arm_insn, 11))
11455 if (bit (arm_insn_r->arm_insn, 6))
11456 curr_insn_type = INSN_T0;
11458 curr_insn_type = INSN_T1;
11463 curr_insn_type = INSN_T1;
11465 curr_insn_type = INSN_T2;
11468 /* Handle VSQRT. */
11469 else if (opc2 == 0x01 && opc3 == 0x03)
11472 curr_insn_type = INSN_T1;
11474 curr_insn_type = INSN_T2;
11477 else if (opc2 == 0x07 && opc3 == 0x03)
11480 curr_insn_type = INSN_T1;
11482 curr_insn_type = INSN_T2;
11484 else if (opc3 & 0x01)
11487 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11489 if (!bit (arm_insn_r->arm_insn, 18))
11490 curr_insn_type = INSN_T2;
11494 curr_insn_type = INSN_T1;
11496 curr_insn_type = INSN_T2;
11500 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11503 curr_insn_type = INSN_T1;
11505 curr_insn_type = INSN_T2;
11507 /* Handle VCVTB, VCVTT. */
11508 else if ((opc2 & 0x0e) == 0x02)
11509 curr_insn_type = INSN_T2;
11510 /* Handle VCMP, VCMPE. */
11511 else if ((opc2 & 0x0e) == 0x04)
11512 curr_insn_type = INSN_T3;
11516 switch (curr_insn_type)
11519 reg_vd = reg_vd | (bit_d << 4);
11520 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11521 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11522 arm_insn_r->reg_rec_count = 2;
11526 reg_vd = reg_vd | (bit_d << 4);
11527 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11528 arm_insn_r->reg_rec_count = 1;
11532 reg_vd = (reg_vd << 1) | bit_d;
11533 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11534 arm_insn_r->reg_rec_count = 1;
11538 record_buf[0] = ARM_FPSCR_REGNUM;
11539 arm_insn_r->reg_rec_count = 1;
11543 gdb_assert_not_reached ("no decoding pattern found");
11547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11551 /* Handling opcode 110 insns. */
11554 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11556 uint32_t op1, op1_ebit, coproc;
11558 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11559 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11560 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11562 if ((coproc & 0x0e) == 0x0a)
11564 /* Handle extension register ld/st instructions. */
11566 return arm_record_exreg_ld_st_insn (arm_insn_r);
11568 /* 64-bit transfers between arm core and extension registers. */
11569 if ((op1 & 0x3e) == 0x04)
11570 return arm_record_exreg_ld_st_insn (arm_insn_r);
11574 /* Handle coprocessor ld/st instructions. */
11579 return arm_record_unsupported_insn (arm_insn_r);
11582 return arm_record_unsupported_insn (arm_insn_r);
11585 /* Move to coprocessor from two arm core registers. */
11587 return arm_record_unsupported_insn (arm_insn_r);
11589 /* Move to two arm core registers from coprocessor. */
11594 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11595 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11596 arm_insn_r->reg_rec_count = 2;
11598 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11602 return arm_record_unsupported_insn (arm_insn_r);
11605 /* Handling opcode 111 insns. */
11608 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11610 uint32_t op, op1_ebit, coproc, bits_24_25;
11611 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11612 struct regcache *reg_cache = arm_insn_r->regcache;
11614 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11615 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11616 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11617 op = bit (arm_insn_r->arm_insn, 4);
11618 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11620 /* Handle arm SWI/SVC system call instructions. */
11621 if (bits_24_25 == 0x3)
11623 if (tdep->arm_syscall_record != NULL)
11625 ULONGEST svc_operand, svc_number;
11627 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11629 if (svc_operand) /* OABI. */
11630 svc_number = svc_operand - 0x900000;
11632 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11634 return tdep->arm_syscall_record (reg_cache, svc_number);
11638 printf_unfiltered (_("no syscall record support\n"));
11642 else if (bits_24_25 == 0x02)
11646 if ((coproc & 0x0e) == 0x0a)
11648 /* 8, 16, and 32-bit transfer */
11649 return arm_record_vdata_transfer_insn (arm_insn_r);
11656 uint32_t record_buf[1];
11658 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11659 if (record_buf[0] == 15)
11660 record_buf[0] = ARM_PS_REGNUM;
11662 arm_insn_r->reg_rec_count = 1;
11663 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11676 if ((coproc & 0x0e) == 0x0a)
11678 /* VFP data-processing instructions. */
11679 return arm_record_vfp_data_proc_insn (arm_insn_r);
11690 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11694 if ((coproc & 0x0e) != 0x0a)
11700 else if (op1 == 4 || op1 == 5)
11702 if ((coproc & 0x0e) == 0x0a)
11704 /* 64-bit transfers between ARM core and extension */
11713 else if (op1 == 0 || op1 == 1)
11720 if ((coproc & 0x0e) == 0x0a)
11722 /* Extension register load/store */
11726 /* STC, STC2, LDC, LDC2 */
11735 /* Handling opcode 000 insns. */
11738 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11740 uint32_t record_buf[8];
11741 uint32_t reg_src1 = 0;
11743 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11745 record_buf[0] = ARM_PS_REGNUM;
11746 record_buf[1] = reg_src1;
11747 thumb_insn_r->reg_rec_count = 2;
11749 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11755 /* Handling opcode 001 insns. */
11758 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11760 uint32_t record_buf[8];
11761 uint32_t reg_src1 = 0;
11763 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11765 record_buf[0] = ARM_PS_REGNUM;
11766 record_buf[1] = reg_src1;
11767 thumb_insn_r->reg_rec_count = 2;
11769 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11774 /* Handling opcode 010 insns. */
11777 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11779 struct regcache *reg_cache = thumb_insn_r->regcache;
11780 uint32_t record_buf[8], record_buf_mem[8];
11782 uint32_t reg_src1 = 0, reg_src2 = 0;
11783 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11785 ULONGEST u_regval[2] = {0};
11787 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11789 if (bit (thumb_insn_r->arm_insn, 12))
11791 /* Handle load/store register offset. */
11792 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11794 if (in_inclusive_range (opB, 4U, 7U))
11796 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11797 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11798 record_buf[0] = reg_src1;
11799 thumb_insn_r->reg_rec_count = 1;
11801 else if (in_inclusive_range (opB, 0U, 2U))
11803 /* STR(2), STRB(2), STRH(2) . */
11804 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11805 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11806 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11807 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11809 record_buf_mem[0] = 4; /* STR (2). */
11811 record_buf_mem[0] = 1; /* STRB (2). */
11813 record_buf_mem[0] = 2; /* STRH (2). */
11814 record_buf_mem[1] = u_regval[0] + u_regval[1];
11815 thumb_insn_r->mem_rec_count = 1;
11818 else if (bit (thumb_insn_r->arm_insn, 11))
11820 /* Handle load from literal pool. */
11822 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11823 record_buf[0] = reg_src1;
11824 thumb_insn_r->reg_rec_count = 1;
11828 /* Special data instructions and branch and exchange */
11829 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11830 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11831 if ((3 == opcode2) && (!opcode3))
11833 /* Branch with exchange. */
11834 record_buf[0] = ARM_PS_REGNUM;
11835 thumb_insn_r->reg_rec_count = 1;
11839 /* Format 8; special data processing insns. */
11840 record_buf[0] = ARM_PS_REGNUM;
11841 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11842 | bits (thumb_insn_r->arm_insn, 0, 2));
11843 thumb_insn_r->reg_rec_count = 2;
11848 /* Format 5; data processing insns. */
11849 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11850 if (bit (thumb_insn_r->arm_insn, 7))
11852 reg_src1 = reg_src1 + 8;
11854 record_buf[0] = ARM_PS_REGNUM;
11855 record_buf[1] = reg_src1;
11856 thumb_insn_r->reg_rec_count = 2;
11859 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11860 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11866 /* Handling opcode 001 insns. */
11869 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11871 struct regcache *reg_cache = thumb_insn_r->regcache;
11872 uint32_t record_buf[8], record_buf_mem[8];
11874 uint32_t reg_src1 = 0;
11875 uint32_t opcode = 0, immed_5 = 0;
11877 ULONGEST u_regval = 0;
11879 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11884 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11885 record_buf[0] = reg_src1;
11886 thumb_insn_r->reg_rec_count = 1;
11891 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11892 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11893 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11894 record_buf_mem[0] = 4;
11895 record_buf_mem[1] = u_regval + (immed_5 * 4);
11896 thumb_insn_r->mem_rec_count = 1;
11899 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11900 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11906 /* Handling opcode 100 insns. */
11909 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11911 struct regcache *reg_cache = thumb_insn_r->regcache;
11912 uint32_t record_buf[8], record_buf_mem[8];
11914 uint32_t reg_src1 = 0;
11915 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11917 ULONGEST u_regval = 0;
11919 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11924 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11925 record_buf[0] = reg_src1;
11926 thumb_insn_r->reg_rec_count = 1;
11928 else if (1 == opcode)
11931 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11932 record_buf[0] = reg_src1;
11933 thumb_insn_r->reg_rec_count = 1;
11935 else if (2 == opcode)
11938 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11939 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11940 record_buf_mem[0] = 4;
11941 record_buf_mem[1] = u_regval + (immed_8 * 4);
11942 thumb_insn_r->mem_rec_count = 1;
11944 else if (0 == opcode)
11947 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11948 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11949 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11950 record_buf_mem[0] = 2;
11951 record_buf_mem[1] = u_regval + (immed_5 * 2);
11952 thumb_insn_r->mem_rec_count = 1;
11955 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11956 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11962 /* Handling opcode 101 insns. */
11965 thumb_record_misc (insn_decode_record *thumb_insn_r)
11967 struct regcache *reg_cache = thumb_insn_r->regcache;
11969 uint32_t opcode = 0;
11970 uint32_t register_bits = 0, register_count = 0;
11971 uint32_t index = 0, start_address = 0;
11972 uint32_t record_buf[24], record_buf_mem[48];
11975 ULONGEST u_regval = 0;
11977 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11979 if (opcode == 0 || opcode == 1)
11981 /* ADR and ADD (SP plus immediate) */
11983 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11984 record_buf[0] = reg_src1;
11985 thumb_insn_r->reg_rec_count = 1;
11989 /* Miscellaneous 16-bit instructions */
11990 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11995 /* SETEND and CPS */
11998 /* ADD/SUB (SP plus immediate) */
11999 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12000 record_buf[0] = ARM_SP_REGNUM;
12001 thumb_insn_r->reg_rec_count = 1;
12003 case 1: /* fall through */
12004 case 3: /* fall through */
12005 case 9: /* fall through */
12010 /* SXTH, SXTB, UXTH, UXTB */
12011 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12012 thumb_insn_r->reg_rec_count = 1;
12014 case 4: /* fall through */
12017 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12018 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12019 while (register_bits)
12021 if (register_bits & 0x00000001)
12023 register_bits = register_bits >> 1;
12025 start_address = u_regval - \
12026 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12027 thumb_insn_r->mem_rec_count = register_count;
12028 while (register_count)
12030 record_buf_mem[(register_count * 2) - 1] = start_address;
12031 record_buf_mem[(register_count * 2) - 2] = 4;
12032 start_address = start_address + 4;
12035 record_buf[0] = ARM_SP_REGNUM;
12036 thumb_insn_r->reg_rec_count = 1;
12039 /* REV, REV16, REVSH */
12040 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12041 thumb_insn_r->reg_rec_count = 1;
12043 case 12: /* fall through */
12046 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12047 while (register_bits)
12049 if (register_bits & 0x00000001)
12050 record_buf[index++] = register_count;
12051 register_bits = register_bits >> 1;
12054 record_buf[index++] = ARM_PS_REGNUM;
12055 record_buf[index++] = ARM_SP_REGNUM;
12056 thumb_insn_r->reg_rec_count = index;
12060 /* Handle enhanced software breakpoint insn, BKPT. */
12061 /* CPSR is changed to be executed in ARM state, disabling normal
12062 interrupts, entering abort mode. */
12063 /* According to high vector configuration PC is set. */
12064 /* User hits breakpoint and type reverse, in that case, we need to go back with
12065 previous CPSR and Program Counter. */
12066 record_buf[0] = ARM_PS_REGNUM;
12067 record_buf[1] = ARM_LR_REGNUM;
12068 thumb_insn_r->reg_rec_count = 2;
12069 /* We need to save SPSR value, which is not yet done. */
12070 printf_unfiltered (_("Process record does not support instruction "
12071 "0x%0x at address %s.\n"),
12072 thumb_insn_r->arm_insn,
12073 paddress (thumb_insn_r->gdbarch,
12074 thumb_insn_r->this_addr));
12078 /* If-Then, and hints */
12085 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12086 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12092 /* Handling opcode 110 insns. */
12095 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12097 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12098 struct regcache *reg_cache = thumb_insn_r->regcache;
12100 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12101 uint32_t reg_src1 = 0;
12102 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12103 uint32_t index = 0, start_address = 0;
12104 uint32_t record_buf[24], record_buf_mem[48];
12106 ULONGEST u_regval = 0;
12108 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12109 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12115 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12117 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12118 while (register_bits)
12120 if (register_bits & 0x00000001)
12121 record_buf[index++] = register_count;
12122 register_bits = register_bits >> 1;
12125 record_buf[index++] = reg_src1;
12126 thumb_insn_r->reg_rec_count = index;
12128 else if (0 == opcode2)
12130 /* It handles both STMIA. */
12131 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12133 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12134 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12135 while (register_bits)
12137 if (register_bits & 0x00000001)
12139 register_bits = register_bits >> 1;
12141 start_address = u_regval;
12142 thumb_insn_r->mem_rec_count = register_count;
12143 while (register_count)
12145 record_buf_mem[(register_count * 2) - 1] = start_address;
12146 record_buf_mem[(register_count * 2) - 2] = 4;
12147 start_address = start_address + 4;
12151 else if (0x1F == opcode1)
12153 /* Handle arm syscall insn. */
12154 if (tdep->arm_syscall_record != NULL)
12156 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12157 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12161 printf_unfiltered (_("no syscall record support\n"));
12166 /* B (1), conditional branch is automatically taken care in process_record,
12167 as PC is saved there. */
12169 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12170 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12176 /* Handling opcode 111 insns. */
12179 thumb_record_branch (insn_decode_record *thumb_insn_r)
12181 uint32_t record_buf[8];
12182 uint32_t bits_h = 0;
12184 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12186 if (2 == bits_h || 3 == bits_h)
12189 record_buf[0] = ARM_LR_REGNUM;
12190 thumb_insn_r->reg_rec_count = 1;
12192 else if (1 == bits_h)
12195 record_buf[0] = ARM_PS_REGNUM;
12196 record_buf[1] = ARM_LR_REGNUM;
12197 thumb_insn_r->reg_rec_count = 2;
12200 /* B(2) is automatically taken care in process_record, as PC is
12203 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12208 /* Handler for thumb2 load/store multiple instructions. */
12211 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12213 struct regcache *reg_cache = thumb2_insn_r->regcache;
12215 uint32_t reg_rn, op;
12216 uint32_t register_bits = 0, register_count = 0;
12217 uint32_t index = 0, start_address = 0;
12218 uint32_t record_buf[24], record_buf_mem[48];
12220 ULONGEST u_regval = 0;
12222 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12223 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12225 if (0 == op || 3 == op)
12227 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12229 /* Handle RFE instruction. */
12230 record_buf[0] = ARM_PS_REGNUM;
12231 thumb2_insn_r->reg_rec_count = 1;
12235 /* Handle SRS instruction after reading banked SP. */
12236 return arm_record_unsupported_insn (thumb2_insn_r);
12239 else if (1 == op || 2 == op)
12241 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12243 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12244 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12245 while (register_bits)
12247 if (register_bits & 0x00000001)
12248 record_buf[index++] = register_count;
12251 register_bits = register_bits >> 1;
12253 record_buf[index++] = reg_rn;
12254 record_buf[index++] = ARM_PS_REGNUM;
12255 thumb2_insn_r->reg_rec_count = index;
12259 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12260 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12261 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12262 while (register_bits)
12264 if (register_bits & 0x00000001)
12267 register_bits = register_bits >> 1;
12272 /* Start address calculation for LDMDB/LDMEA. */
12273 start_address = u_regval;
12277 /* Start address calculation for LDMDB/LDMEA. */
12278 start_address = u_regval - register_count * 4;
12281 thumb2_insn_r->mem_rec_count = register_count;
12282 while (register_count)
12284 record_buf_mem[register_count * 2 - 1] = start_address;
12285 record_buf_mem[register_count * 2 - 2] = 4;
12286 start_address = start_address + 4;
12289 record_buf[0] = reg_rn;
12290 record_buf[1] = ARM_PS_REGNUM;
12291 thumb2_insn_r->reg_rec_count = 2;
12295 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12297 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12299 return ARM_RECORD_SUCCESS;
12302 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12306 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12308 struct regcache *reg_cache = thumb2_insn_r->regcache;
12310 uint32_t reg_rd, reg_rn, offset_imm;
12311 uint32_t reg_dest1, reg_dest2;
12312 uint32_t address, offset_addr;
12313 uint32_t record_buf[8], record_buf_mem[8];
12314 uint32_t op1, op2, op3;
12316 ULONGEST u_regval[2];
12318 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12319 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12320 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12322 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12324 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12326 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12327 record_buf[0] = reg_dest1;
12328 record_buf[1] = ARM_PS_REGNUM;
12329 thumb2_insn_r->reg_rec_count = 2;
12332 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12334 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12335 record_buf[2] = reg_dest2;
12336 thumb2_insn_r->reg_rec_count = 3;
12341 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12342 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12344 if (0 == op1 && 0 == op2)
12346 /* Handle STREX. */
12347 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12348 address = u_regval[0] + (offset_imm * 4);
12349 record_buf_mem[0] = 4;
12350 record_buf_mem[1] = address;
12351 thumb2_insn_r->mem_rec_count = 1;
12352 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12353 record_buf[0] = reg_rd;
12354 thumb2_insn_r->reg_rec_count = 1;
12356 else if (1 == op1 && 0 == op2)
12358 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12359 record_buf[0] = reg_rd;
12360 thumb2_insn_r->reg_rec_count = 1;
12361 address = u_regval[0];
12362 record_buf_mem[1] = address;
12366 /* Handle STREXB. */
12367 record_buf_mem[0] = 1;
12368 thumb2_insn_r->mem_rec_count = 1;
12372 /* Handle STREXH. */
12373 record_buf_mem[0] = 2 ;
12374 thumb2_insn_r->mem_rec_count = 1;
12378 /* Handle STREXD. */
12379 address = u_regval[0];
12380 record_buf_mem[0] = 4;
12381 record_buf_mem[2] = 4;
12382 record_buf_mem[3] = address + 4;
12383 thumb2_insn_r->mem_rec_count = 2;
12388 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12390 if (bit (thumb2_insn_r->arm_insn, 24))
12392 if (bit (thumb2_insn_r->arm_insn, 23))
12393 offset_addr = u_regval[0] + (offset_imm * 4);
12395 offset_addr = u_regval[0] - (offset_imm * 4);
12397 address = offset_addr;
12400 address = u_regval[0];
12402 record_buf_mem[0] = 4;
12403 record_buf_mem[1] = address;
12404 record_buf_mem[2] = 4;
12405 record_buf_mem[3] = address + 4;
12406 thumb2_insn_r->mem_rec_count = 2;
12407 record_buf[0] = reg_rn;
12408 thumb2_insn_r->reg_rec_count = 1;
12412 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12414 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12416 return ARM_RECORD_SUCCESS;
12419 /* Handler for thumb2 data processing (shift register and modified immediate)
12423 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12425 uint32_t reg_rd, op;
12426 uint32_t record_buf[8];
12428 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12429 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12431 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12433 record_buf[0] = ARM_PS_REGNUM;
12434 thumb2_insn_r->reg_rec_count = 1;
12438 record_buf[0] = reg_rd;
12439 record_buf[1] = ARM_PS_REGNUM;
12440 thumb2_insn_r->reg_rec_count = 2;
12443 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12445 return ARM_RECORD_SUCCESS;
12448 /* Generic handler for thumb2 instructions which effect destination and PS
12452 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12455 uint32_t record_buf[8];
12457 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12459 record_buf[0] = reg_rd;
12460 record_buf[1] = ARM_PS_REGNUM;
12461 thumb2_insn_r->reg_rec_count = 2;
12463 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12465 return ARM_RECORD_SUCCESS;
12468 /* Handler for thumb2 branch and miscellaneous control instructions. */
12471 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12473 uint32_t op, op1, op2;
12474 uint32_t record_buf[8];
12476 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12477 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12478 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12480 /* Handle MSR insn. */
12481 if (!(op1 & 0x2) && 0x38 == op)
12485 /* CPSR is going to be changed. */
12486 record_buf[0] = ARM_PS_REGNUM;
12487 thumb2_insn_r->reg_rec_count = 1;
12491 arm_record_unsupported_insn(thumb2_insn_r);
12495 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12498 record_buf[0] = ARM_PS_REGNUM;
12499 record_buf[1] = ARM_LR_REGNUM;
12500 thumb2_insn_r->reg_rec_count = 2;
12503 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12505 return ARM_RECORD_SUCCESS;
12508 /* Handler for thumb2 store single data item instructions. */
12511 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12513 struct regcache *reg_cache = thumb2_insn_r->regcache;
12515 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12516 uint32_t address, offset_addr;
12517 uint32_t record_buf[8], record_buf_mem[8];
12520 ULONGEST u_regval[2];
12522 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12523 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12524 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12525 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12527 if (bit (thumb2_insn_r->arm_insn, 23))
12530 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12531 offset_addr = u_regval[0] + offset_imm;
12532 address = offset_addr;
12537 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12539 /* Handle STRB (register). */
12540 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12541 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12542 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12543 offset_addr = u_regval[1] << shift_imm;
12544 address = u_regval[0] + offset_addr;
12548 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12549 if (bit (thumb2_insn_r->arm_insn, 10))
12551 if (bit (thumb2_insn_r->arm_insn, 9))
12552 offset_addr = u_regval[0] + offset_imm;
12554 offset_addr = u_regval[0] - offset_imm;
12556 address = offset_addr;
12559 address = u_regval[0];
12565 /* Store byte instructions. */
12568 record_buf_mem[0] = 1;
12570 /* Store half word instructions. */
12573 record_buf_mem[0] = 2;
12575 /* Store word instructions. */
12578 record_buf_mem[0] = 4;
12582 gdb_assert_not_reached ("no decoding pattern found");
12586 record_buf_mem[1] = address;
12587 thumb2_insn_r->mem_rec_count = 1;
12588 record_buf[0] = reg_rn;
12589 thumb2_insn_r->reg_rec_count = 1;
12591 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12593 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12595 return ARM_RECORD_SUCCESS;
12598 /* Handler for thumb2 load memory hints instructions. */
12601 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12603 uint32_t record_buf[8];
12604 uint32_t reg_rt, reg_rn;
12606 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12607 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12609 if (ARM_PC_REGNUM != reg_rt)
12611 record_buf[0] = reg_rt;
12612 record_buf[1] = reg_rn;
12613 record_buf[2] = ARM_PS_REGNUM;
12614 thumb2_insn_r->reg_rec_count = 3;
12616 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12618 return ARM_RECORD_SUCCESS;
12621 return ARM_RECORD_FAILURE;
12624 /* Handler for thumb2 load word instructions. */
12627 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12629 uint32_t record_buf[8];
12631 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12632 record_buf[1] = ARM_PS_REGNUM;
12633 thumb2_insn_r->reg_rec_count = 2;
12635 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12637 return ARM_RECORD_SUCCESS;
12640 /* Handler for thumb2 long multiply, long multiply accumulate, and
12641 divide instructions. */
12644 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12646 uint32_t opcode1 = 0, opcode2 = 0;
12647 uint32_t record_buf[8];
12649 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12650 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12652 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12654 /* Handle SMULL, UMULL, SMULAL. */
12655 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12656 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12657 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12658 record_buf[2] = ARM_PS_REGNUM;
12659 thumb2_insn_r->reg_rec_count = 3;
12661 else if (1 == opcode1 || 3 == opcode2)
12663 /* Handle SDIV and UDIV. */
12664 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12665 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12666 record_buf[2] = ARM_PS_REGNUM;
12667 thumb2_insn_r->reg_rec_count = 3;
12670 return ARM_RECORD_FAILURE;
12672 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12674 return ARM_RECORD_SUCCESS;
12677 /* Record handler for thumb32 coprocessor instructions. */
12680 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12682 if (bit (thumb2_insn_r->arm_insn, 25))
12683 return arm_record_coproc_data_proc (thumb2_insn_r);
12685 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12688 /* Record handler for advance SIMD structure load/store instructions. */
12691 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12693 struct regcache *reg_cache = thumb2_insn_r->regcache;
12694 uint32_t l_bit, a_bit, b_bits;
12695 uint32_t record_buf[128], record_buf_mem[128];
12696 uint32_t reg_rn, reg_vd, address, f_elem;
12697 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12700 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12701 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12702 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12703 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12704 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12705 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12706 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12707 f_elem = 8 / f_ebytes;
12711 ULONGEST u_regval = 0;
12712 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12713 address = u_regval;
12718 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12720 if (b_bits == 0x07)
12722 else if (b_bits == 0x0a)
12724 else if (b_bits == 0x06)
12726 else if (b_bits == 0x02)
12731 for (index_r = 0; index_r < bf_regs; index_r++)
12733 for (index_e = 0; index_e < f_elem; index_e++)
12735 record_buf_mem[index_m++] = f_ebytes;
12736 record_buf_mem[index_m++] = address;
12737 address = address + f_ebytes;
12738 thumb2_insn_r->mem_rec_count += 1;
12743 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12745 if (b_bits == 0x09 || b_bits == 0x08)
12747 else if (b_bits == 0x03)
12752 for (index_r = 0; index_r < bf_regs; index_r++)
12753 for (index_e = 0; index_e < f_elem; index_e++)
12755 for (loop_t = 0; loop_t < 2; loop_t++)
12757 record_buf_mem[index_m++] = f_ebytes;
12758 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12759 thumb2_insn_r->mem_rec_count += 1;
12761 address = address + (2 * f_ebytes);
12765 else if ((b_bits & 0x0e) == 0x04)
12767 for (index_e = 0; index_e < f_elem; index_e++)
12769 for (loop_t = 0; loop_t < 3; loop_t++)
12771 record_buf_mem[index_m++] = f_ebytes;
12772 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12773 thumb2_insn_r->mem_rec_count += 1;
12775 address = address + (3 * f_ebytes);
12779 else if (!(b_bits & 0x0e))
12781 for (index_e = 0; index_e < f_elem; index_e++)
12783 for (loop_t = 0; loop_t < 4; loop_t++)
12785 record_buf_mem[index_m++] = f_ebytes;
12786 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12787 thumb2_insn_r->mem_rec_count += 1;
12789 address = address + (4 * f_ebytes);
12795 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12797 if (bft_size == 0x00)
12799 else if (bft_size == 0x01)
12801 else if (bft_size == 0x02)
12807 if (!(b_bits & 0x0b) || b_bits == 0x08)
12808 thumb2_insn_r->mem_rec_count = 1;
12810 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12811 thumb2_insn_r->mem_rec_count = 2;
12813 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12814 thumb2_insn_r->mem_rec_count = 3;
12816 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12817 thumb2_insn_r->mem_rec_count = 4;
12819 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12821 record_buf_mem[index_m] = f_ebytes;
12822 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12831 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12832 thumb2_insn_r->reg_rec_count = 1;
12834 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12835 thumb2_insn_r->reg_rec_count = 2;
12837 else if ((b_bits & 0x0e) == 0x04)
12838 thumb2_insn_r->reg_rec_count = 3;
12840 else if (!(b_bits & 0x0e))
12841 thumb2_insn_r->reg_rec_count = 4;
12846 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12847 thumb2_insn_r->reg_rec_count = 1;
12849 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12850 thumb2_insn_r->reg_rec_count = 2;
12852 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12853 thumb2_insn_r->reg_rec_count = 3;
12855 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12856 thumb2_insn_r->reg_rec_count = 4;
12858 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12859 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12863 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12865 record_buf[index_r] = reg_rn;
12866 thumb2_insn_r->reg_rec_count += 1;
12869 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12871 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12876 /* Decodes thumb2 instruction type and invokes its record handler. */
12878 static unsigned int
12879 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12881 uint32_t op, op1, op2;
12883 op = bit (thumb2_insn_r->arm_insn, 15);
12884 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12885 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12889 if (!(op2 & 0x64 ))
12891 /* Load/store multiple instruction. */
12892 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12894 else if ((op2 & 0x64) == 0x4)
12896 /* Load/store (dual/exclusive) and table branch instruction. */
12897 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12899 else if ((op2 & 0x60) == 0x20)
12901 /* Data-processing (shifted register). */
12902 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12904 else if (op2 & 0x40)
12906 /* Co-processor instructions. */
12907 return thumb2_record_coproc_insn (thumb2_insn_r);
12910 else if (op1 == 0x02)
12914 /* Branches and miscellaneous control instructions. */
12915 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12917 else if (op2 & 0x20)
12919 /* Data-processing (plain binary immediate) instruction. */
12920 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12924 /* Data-processing (modified immediate). */
12925 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12928 else if (op1 == 0x03)
12930 if (!(op2 & 0x71 ))
12932 /* Store single data item. */
12933 return thumb2_record_str_single_data (thumb2_insn_r);
12935 else if (!((op2 & 0x71) ^ 0x10))
12937 /* Advanced SIMD or structure load/store instructions. */
12938 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12940 else if (!((op2 & 0x67) ^ 0x01))
12942 /* Load byte, memory hints instruction. */
12943 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12945 else if (!((op2 & 0x67) ^ 0x03))
12947 /* Load halfword, memory hints instruction. */
12948 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12950 else if (!((op2 & 0x67) ^ 0x05))
12952 /* Load word instruction. */
12953 return thumb2_record_ld_word (thumb2_insn_r);
12955 else if (!((op2 & 0x70) ^ 0x20))
12957 /* Data-processing (register) instruction. */
12958 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12960 else if (!((op2 & 0x78) ^ 0x30))
12962 /* Multiply, multiply accumulate, abs diff instruction. */
12963 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12965 else if (!((op2 & 0x78) ^ 0x38))
12967 /* Long multiply, long multiply accumulate, and divide. */
12968 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12970 else if (op2 & 0x40)
12972 /* Co-processor instructions. */
12973 return thumb2_record_coproc_insn (thumb2_insn_r);
12981 /* Abstract memory reader. */
12983 class abstract_memory_reader
12986 /* Read LEN bytes of target memory at address MEMADDR, placing the
12987 results in GDB's memory at BUF. Return true on success. */
12989 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12992 /* Instruction reader from real target. */
12994 class instruction_reader : public abstract_memory_reader
12997 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
12999 if (target_read_memory (memaddr, buf, len))
13008 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13009 and positive val on failure. */
13012 extract_arm_insn (abstract_memory_reader& reader,
13013 insn_decode_record *insn_record, uint32_t insn_size)
13015 gdb_byte buf[insn_size];
13017 memset (&buf[0], 0, insn_size);
13019 if (!reader.read (insn_record->this_addr, buf, insn_size))
13021 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13023 gdbarch_byte_order_for_code (insn_record->gdbarch));
13027 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13029 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13033 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13034 record_type_t record_type, uint32_t insn_size)
13037 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13039 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13041 arm_record_data_proc_misc_ld_str, /* 000. */
13042 arm_record_data_proc_imm, /* 001. */
13043 arm_record_ld_st_imm_offset, /* 010. */
13044 arm_record_ld_st_reg_offset, /* 011. */
13045 arm_record_ld_st_multiple, /* 100. */
13046 arm_record_b_bl, /* 101. */
13047 arm_record_asimd_vfp_coproc, /* 110. */
13048 arm_record_coproc_data_proc /* 111. */
13051 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13053 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13055 thumb_record_shift_add_sub, /* 000. */
13056 thumb_record_add_sub_cmp_mov, /* 001. */
13057 thumb_record_ld_st_reg_offset, /* 010. */
13058 thumb_record_ld_st_imm_offset, /* 011. */
13059 thumb_record_ld_st_stack, /* 100. */
13060 thumb_record_misc, /* 101. */
13061 thumb_record_ldm_stm_swi, /* 110. */
13062 thumb_record_branch /* 111. */
13065 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13066 uint32_t insn_id = 0;
13068 if (extract_arm_insn (reader, arm_record, insn_size))
13072 printf_unfiltered (_("Process record: error reading memory at "
13073 "addr %s len = %d.\n"),
13074 paddress (arm_record->gdbarch,
13075 arm_record->this_addr), insn_size);
13079 else if (ARM_RECORD == record_type)
13081 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13082 insn_id = bits (arm_record->arm_insn, 25, 27);
13084 if (arm_record->cond == 0xf)
13085 ret = arm_record_extension_space (arm_record);
13088 /* If this insn has fallen into extension space
13089 then we need not decode it anymore. */
13090 ret = arm_handle_insn[insn_id] (arm_record);
13092 if (ret != ARM_RECORD_SUCCESS)
13094 arm_record_unsupported_insn (arm_record);
13098 else if (THUMB_RECORD == record_type)
13100 /* As thumb does not have condition codes, we set negative. */
13101 arm_record->cond = -1;
13102 insn_id = bits (arm_record->arm_insn, 13, 15);
13103 ret = thumb_handle_insn[insn_id] (arm_record);
13104 if (ret != ARM_RECORD_SUCCESS)
13106 arm_record_unsupported_insn (arm_record);
13110 else if (THUMB2_RECORD == record_type)
13112 /* As thumb does not have condition codes, we set negative. */
13113 arm_record->cond = -1;
13115 /* Swap first half of 32bit thumb instruction with second half. */
13116 arm_record->arm_insn
13117 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13119 ret = thumb2_record_decode_insn_handler (arm_record);
13121 if (ret != ARM_RECORD_SUCCESS)
13123 arm_record_unsupported_insn (arm_record);
13129 /* Throw assertion. */
13130 gdb_assert_not_reached ("not a valid instruction, could not decode");
13137 namespace selftests {
13139 /* Provide both 16-bit and 32-bit thumb instructions. */
13141 class instruction_reader_thumb : public abstract_memory_reader
13144 template<size_t SIZE>
13145 instruction_reader_thumb (enum bfd_endian endian,
13146 const uint16_t (&insns)[SIZE])
13147 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13150 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13152 SELF_CHECK (len == 4 || len == 2);
13153 SELF_CHECK (memaddr % 2 == 0);
13154 SELF_CHECK ((memaddr / 2) < m_insns_size);
13156 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13159 store_unsigned_integer (&buf[2], 2, m_endian,
13160 m_insns[memaddr / 2 + 1]);
13166 enum bfd_endian m_endian;
13167 const uint16_t *m_insns;
13168 size_t m_insns_size;
13172 arm_record_test (void)
13174 struct gdbarch_info info;
13175 gdbarch_info_init (&info);
13176 info.bfd_arch_info = bfd_scan_arch ("arm");
13178 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13180 SELF_CHECK (gdbarch != NULL);
13182 /* 16-bit Thumb instructions. */
13184 insn_decode_record arm_record;
13186 memset (&arm_record, 0, sizeof (insn_decode_record));
13187 arm_record.gdbarch = gdbarch;
13189 static const uint16_t insns[] = {
13190 /* db b2 uxtb r3, r3 */
13192 /* cd 58 ldr r5, [r1, r3] */
13196 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13197 instruction_reader_thumb reader (endian, insns);
13198 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13199 THUMB_INSN_SIZE_BYTES);
13201 SELF_CHECK (ret == 0);
13202 SELF_CHECK (arm_record.mem_rec_count == 0);
13203 SELF_CHECK (arm_record.reg_rec_count == 1);
13204 SELF_CHECK (arm_record.arm_regs[0] == 3);
13206 arm_record.this_addr += 2;
13207 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13208 THUMB_INSN_SIZE_BYTES);
13210 SELF_CHECK (ret == 0);
13211 SELF_CHECK (arm_record.mem_rec_count == 0);
13212 SELF_CHECK (arm_record.reg_rec_count == 1);
13213 SELF_CHECK (arm_record.arm_regs[0] == 5);
13216 /* 32-bit Thumb-2 instructions. */
13218 insn_decode_record arm_record;
13220 memset (&arm_record, 0, sizeof (insn_decode_record));
13221 arm_record.gdbarch = gdbarch;
13223 static const uint16_t insns[] = {
13224 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13228 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13229 instruction_reader_thumb reader (endian, insns);
13230 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13231 THUMB2_INSN_SIZE_BYTES);
13233 SELF_CHECK (ret == 0);
13234 SELF_CHECK (arm_record.mem_rec_count == 0);
13235 SELF_CHECK (arm_record.reg_rec_count == 1);
13236 SELF_CHECK (arm_record.arm_regs[0] == 7);
13239 } // namespace selftests
13240 #endif /* GDB_SELF_TEST */
13242 /* Cleans up local record registers and memory allocations. */
13245 deallocate_reg_mem (insn_decode_record *record)
13247 xfree (record->arm_regs);
13248 xfree (record->arm_mems);
13252 /* Parse the current instruction and record the values of the registers and
13253 memory that will be changed in current instruction to record_arch_list".
13254 Return -1 if something is wrong. */
13257 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13258 CORE_ADDR insn_addr)
13261 uint32_t no_of_rec = 0;
13262 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13263 ULONGEST t_bit = 0, insn_id = 0;
13265 ULONGEST u_regval = 0;
13267 insn_decode_record arm_record;
13269 memset (&arm_record, 0, sizeof (insn_decode_record));
13270 arm_record.regcache = regcache;
13271 arm_record.this_addr = insn_addr;
13272 arm_record.gdbarch = gdbarch;
13275 if (record_debug > 1)
13277 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13279 paddress (gdbarch, arm_record.this_addr));
13282 instruction_reader reader;
13283 if (extract_arm_insn (reader, &arm_record, 2))
13287 printf_unfiltered (_("Process record: error reading memory at "
13288 "addr %s len = %d.\n"),
13289 paddress (arm_record.gdbarch,
13290 arm_record.this_addr), 2);
13295 /* Check the insn, whether it is thumb or arm one. */
13297 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13298 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13301 if (!(u_regval & t_bit))
13303 /* We are decoding arm insn. */
13304 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13308 insn_id = bits (arm_record.arm_insn, 11, 15);
13309 /* is it thumb2 insn? */
13310 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13312 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13313 THUMB2_INSN_SIZE_BYTES);
13317 /* We are decoding thumb insn. */
13318 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13319 THUMB_INSN_SIZE_BYTES);
13325 /* Record registers. */
13326 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13327 if (arm_record.arm_regs)
13329 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13331 if (record_full_arch_list_add_reg
13332 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13336 /* Record memories. */
13337 if (arm_record.arm_mems)
13339 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13341 if (record_full_arch_list_add_mem
13342 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13343 arm_record.arm_mems[no_of_rec].len))
13348 if (record_full_arch_list_add_end ())
13353 deallocate_reg_mem (&arm_record);
13358 /* See arm-tdep.h. */
13360 const target_desc *
13361 arm_read_description (arm_fp_type fp_type)
13363 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13365 if (tdesc == nullptr)
13367 tdesc = arm_create_target_description (fp_type);
13368 tdesc_arm_list[fp_type] = tdesc;
13374 /* See arm-tdep.h. */
13376 const target_desc *
13377 arm_read_mprofile_description (arm_m_profile_type m_type)
13379 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13381 if (tdesc == nullptr)
13383 tdesc = arm_create_mprofile_target_description (m_type);
13384 tdesc_arm_mprofile_list[m_type] = tdesc;