1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2021 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2/frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
51 #include "arch/arm-get-next-pcs.h"
53 #include "gdb/sim-arm.h"
56 #include "coff/internal.h"
60 #include "record-full.h"
66 #include "gdbsupport/selftest.h"
69 static bool arm_debug;
71 /* Print an "arm" debug statement. */
73 #define arm_debug_printf(fmt, ...) \
74 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
76 /* Macros for setting and testing a bit in a minimal symbol that marks
77 it as Thumb function. The MSB of the minimal symbol's "info" field
78 is used for this purpose.
80 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
81 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
83 #define MSYMBOL_SET_SPECIAL(msym) \
84 MSYMBOL_TARGET_FLAG_1 (msym) = 1
86 #define MSYMBOL_IS_SPECIAL(msym) \
87 MSYMBOL_TARGET_FLAG_1 (msym)
89 struct arm_mapping_symbol
94 bool operator< (const arm_mapping_symbol &other) const
95 { return this->value < other.value; }
98 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
102 explicit arm_per_bfd (size_t num_sections)
103 : section_maps (new arm_mapping_symbol_vec[num_sections]),
104 section_maps_sorted (new bool[num_sections] ())
107 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
109 /* Information about mapping symbols ($a, $d, $t) in the objfile.
111 The format is an array of vectors of arm_mapping_symbols, there is one
112 vector for each section of the objfile (the array is index by BFD section
115 For each section, the vector of arm_mapping_symbol is sorted by
116 symbol value (address). */
117 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
119 /* For each corresponding element of section_maps above, is this vector
121 std::unique_ptr<bool[]> section_maps_sorted;
124 /* Per-bfd data used for mapping symbols. */
125 static bfd_key<arm_per_bfd> arm_bfd_data_key;
127 /* The list of available "set arm ..." and "show arm ..." commands. */
128 static struct cmd_list_element *setarmcmdlist = NULL;
129 static struct cmd_list_element *showarmcmdlist = NULL;
131 /* The type of floating-point to use. Keep this in sync with enum
132 arm_float_model, and the help string in _initialize_arm_tdep. */
133 static const char *const fp_model_strings[] =
143 /* A variable that can be configured by the user. */
144 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
145 static const char *current_fp_model = "auto";
147 /* The ABI to use. Keep this in sync with arm_abi_kind. */
148 static const char *const arm_abi_strings[] =
156 /* A variable that can be configured by the user. */
157 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
158 static const char *arm_abi_string = "auto";
160 /* The execution mode to assume. */
161 static const char *const arm_mode_strings[] =
169 static const char *arm_fallback_mode_string = "auto";
170 static const char *arm_force_mode_string = "auto";
172 /* The standard register names, and all the valid aliases for them. Note
173 that `fp', `sp' and `pc' are not added in this alias list, because they
174 have been added as builtin user registers in
175 std-regs.c:_initialize_frame_reg. */
180 } arm_register_aliases[] = {
181 /* Basic register numbers. */
198 /* Synonyms (argument and variable registers). */
211 /* Other platform-specific names for r9. */
217 /* Names used by GCC (not listed in the ARM EABI). */
219 /* A special name from the older ATPCS. */
223 static const char *const arm_register_names[] =
224 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
225 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
226 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
227 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
228 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
229 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
230 "fps", "cpsr" }; /* 24 25 */
232 /* Holds the current set of options to be passed to the disassembler. */
233 static char *arm_disassembler_options;
235 /* Valid register name styles. */
236 static const char **valid_disassembly_styles;
238 /* Disassembly style to use. Default to "std" register names. */
239 static const char *disassembly_style;
241 /* All possible arm target descriptors. */
242 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
243 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
245 /* This is used to keep the bfd arch_info in sync with the disassembly
247 static void set_disassembly_style_sfunc (const char *, int,
248 struct cmd_list_element *);
249 static void show_disassembly_style_sfunc (struct ui_file *, int,
250 struct cmd_list_element *,
253 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
254 readable_regcache *regcache,
255 int regnum, gdb_byte *buf);
256 static void arm_neon_quad_write (struct gdbarch *gdbarch,
257 struct regcache *regcache,
258 int regnum, const gdb_byte *buf);
261 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
264 /* get_next_pcs operations. */
265 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
266 arm_get_next_pcs_read_memory_unsigned_integer,
267 arm_get_next_pcs_syscall_next_pc,
268 arm_get_next_pcs_addr_bits_remove,
269 arm_get_next_pcs_is_thumb,
273 struct arm_prologue_cache
275 /* The stack pointer at the time this frame was created; i.e. the
276 caller's stack pointer when this function was called. It is used
277 to identify this frame. */
280 /* The frame base for this frame is just prev_sp - frame size.
281 FRAMESIZE is the distance from the frame pointer to the
282 initial stack pointer. */
286 /* The register used to hold the frame pointer for this frame. */
289 /* Saved register offsets. */
290 trad_frame_saved_reg *saved_regs;
295 /* Abstract class to read ARM instructions from memory. */
297 class arm_instruction_reader
300 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
301 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
304 /* Read instructions from target memory. */
306 class target_arm_instruction_reader : public arm_instruction_reader
309 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
311 return read_code_unsigned_integer (memaddr, 4, byte_order);
317 static CORE_ADDR arm_analyze_prologue
318 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
319 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
321 /* Architecture version for displaced stepping. This effects the behaviour of
322 certain instructions, and really should not be hard-wired. */
324 #define DISPLACED_STEPPING_ARCH_VERSION 5
326 /* See arm-tdep.h. */
328 bool arm_apcs_32 = true;
330 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
333 arm_psr_thumb_bit (struct gdbarch *gdbarch)
335 if (gdbarch_tdep (gdbarch)->is_m)
341 /* Determine if the processor is currently executing in Thumb mode. */
344 arm_is_thumb (struct regcache *regcache)
347 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
349 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
351 return (cpsr & t_bit) != 0;
354 /* Determine if FRAME is executing in Thumb mode. */
357 arm_frame_is_thumb (struct frame_info *frame)
360 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
362 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
363 directly (from a signal frame or dummy frame) or by interpreting
364 the saved LR (from a prologue or DWARF frame). So consult it and
365 trust the unwinders. */
366 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
368 return (cpsr & t_bit) != 0;
371 /* Search for the mapping symbol covering MEMADDR. If one is found,
372 return its type. Otherwise, return 0. If START is non-NULL,
373 set *START to the location of the mapping symbol. */
376 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
378 struct obj_section *sec;
380 /* If there are mapping symbols, consult them. */
381 sec = find_pc_section (memaddr);
384 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
387 unsigned int section_idx = sec->the_bfd_section->index;
388 arm_mapping_symbol_vec &map
389 = data->section_maps[section_idx];
391 /* Sort the vector on first use. */
392 if (!data->section_maps_sorted[section_idx])
394 std::sort (map.begin (), map.end ());
395 data->section_maps_sorted[section_idx] = true;
398 struct arm_mapping_symbol map_key
399 = { memaddr - obj_section_addr (sec), 0 };
400 arm_mapping_symbol_vec::const_iterator it
401 = std::lower_bound (map.begin (), map.end (), map_key);
403 /* std::lower_bound finds the earliest ordered insertion
404 point. If the symbol at this position starts at this exact
405 address, we use that; otherwise, the preceding
406 mapping symbol covers this address. */
409 if (it->value == map_key.value)
412 *start = it->value + obj_section_addr (sec);
417 if (it > map.begin ())
419 arm_mapping_symbol_vec::const_iterator prev_it
423 *start = prev_it->value + obj_section_addr (sec);
424 return prev_it->type;
432 /* Determine if the program counter specified in MEMADDR is in a Thumb
433 function. This function should be called for addresses unrelated to
434 any executing frame; otherwise, prefer arm_frame_is_thumb. */
437 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
439 struct bound_minimal_symbol sym;
441 arm_displaced_step_copy_insn_closure *dsc = nullptr;
443 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
444 dsc = ((arm_displaced_step_copy_insn_closure * )
445 gdbarch_displaced_step_copy_insn_closure_by_addr
446 (gdbarch, current_inferior (), memaddr));
448 /* If checking the mode of displaced instruction in copy area, the mode
449 should be determined by instruction on the original address. */
452 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
453 (unsigned long) dsc->insn_addr,
454 (unsigned long) memaddr);
455 memaddr = dsc->insn_addr;
458 /* If bit 0 of the address is set, assume this is a Thumb address. */
459 if (IS_THUMB_ADDR (memaddr))
462 /* If the user wants to override the symbol table, let him. */
463 if (strcmp (arm_force_mode_string, "arm") == 0)
465 if (strcmp (arm_force_mode_string, "thumb") == 0)
468 /* ARM v6-M and v7-M are always in Thumb mode. */
469 if (gdbarch_tdep (gdbarch)->is_m)
472 /* If there are mapping symbols, consult them. */
473 type = arm_find_mapping_symbol (memaddr, NULL);
477 /* Thumb functions have a "special" bit set in minimal symbols. */
478 sym = lookup_minimal_symbol_by_pc (memaddr);
480 return (MSYMBOL_IS_SPECIAL (sym.minsym));
482 /* If the user wants to override the fallback mode, let them. */
483 if (strcmp (arm_fallback_mode_string, "arm") == 0)
485 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
488 /* If we couldn't find any symbol, but we're talking to a running
489 target, then trust the current value of $cpsr. This lets
490 "display/i $pc" always show the correct mode (though if there is
491 a symbol table we will not reach here, so it still may not be
492 displayed in the mode it will be executed). */
493 if (target_has_registers ())
494 return arm_frame_is_thumb (get_current_frame ());
496 /* Otherwise we're out of luck; we assume ARM. */
500 /* Determine if the address specified equals any of these magic return
501 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
504 From ARMv6-M Reference Manual B1.5.8
505 Table B1-5 Exception return behavior
507 EXC_RETURN Return To Return Stack
508 0xFFFFFFF1 Handler mode Main
509 0xFFFFFFF9 Thread mode Main
510 0xFFFFFFFD Thread mode Process
512 From ARMv7-M Reference Manual B1.5.8
513 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
515 EXC_RETURN Return To Return Stack
516 0xFFFFFFF1 Handler mode Main
517 0xFFFFFFF9 Thread mode Main
518 0xFFFFFFFD Thread mode Process
520 Table B1-9 EXC_RETURN definition of exception return behavior, with
523 EXC_RETURN Return To Return Stack Frame Type
524 0xFFFFFFE1 Handler mode Main Extended
525 0xFFFFFFE9 Thread mode Main Extended
526 0xFFFFFFED Thread mode Process Extended
527 0xFFFFFFF1 Handler mode Main Basic
528 0xFFFFFFF9 Thread mode Main Basic
529 0xFFFFFFFD Thread mode Process Basic
531 For more details see "B1.5.8 Exception return behavior"
532 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
534 In the ARMv8-M Architecture Technical Reference also adds
535 for implementations without the Security Extension:
538 0xFFFFFFB0 Return to Handler mode.
539 0xFFFFFFB8 Return to Thread mode using the main stack.
540 0xFFFFFFBC Return to Thread mode using the process stack. */
543 arm_m_addr_is_magic (CORE_ADDR addr)
547 /* Values from ARMv8-M Architecture Technical Reference. */
551 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
552 the exception return behavior. */
559 /* Address is magic. */
563 /* Address is not magic. */
568 /* Remove useless bits from addresses in a running program. */
570 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
572 /* On M-profile devices, do not strip the low bit from EXC_RETURN
573 (the magic exception return address). */
574 if (gdbarch_tdep (gdbarch)->is_m
575 && arm_m_addr_is_magic (val))
579 return UNMAKE_THUMB_ADDR (val);
581 return (val & 0x03fffffc);
584 /* Return 1 if PC is the start of a compiler helper function which
585 can be safely ignored during prologue skipping. IS_THUMB is true
586 if the function is known to be a Thumb function due to the way it
589 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
591 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
592 struct bound_minimal_symbol msym;
594 msym = lookup_minimal_symbol_by_pc (pc);
595 if (msym.minsym != NULL
596 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
597 && msym.minsym->linkage_name () != NULL)
599 const char *name = msym.minsym->linkage_name ();
601 /* The GNU linker's Thumb call stub to foo is named
603 if (strstr (name, "_from_thumb") != NULL)
606 /* On soft-float targets, __truncdfsf2 is called to convert promoted
607 arguments to their argument types in non-prototyped
609 if (startswith (name, "__truncdfsf2"))
611 if (startswith (name, "__aeabi_d2f"))
614 /* Internal functions related to thread-local storage. */
615 if (startswith (name, "__tls_get_addr"))
617 if (startswith (name, "__aeabi_read_tp"))
622 /* If we run against a stripped glibc, we may be unable to identify
623 special functions by name. Check for one important case,
624 __aeabi_read_tp, by comparing the *code* against the default
625 implementation (this is hand-written ARM assembler in glibc). */
628 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
629 == 0xe3e00a0f /* mov r0, #0xffff0fff */
630 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
631 == 0xe240f01f) /* sub pc, r0, #31 */
638 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
639 the first 16-bit of instruction, and INSN2 is the second 16-bit of
641 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
642 ((bits ((insn1), 0, 3) << 12) \
643 | (bits ((insn1), 10, 10) << 11) \
644 | (bits ((insn2), 12, 14) << 8) \
645 | bits ((insn2), 0, 7))
647 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
648 the 32-bit instruction. */
649 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
650 ((bits ((insn), 16, 19) << 12) \
651 | bits ((insn), 0, 11))
653 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
656 thumb_expand_immediate (unsigned int imm)
658 unsigned int count = imm >> 7;
666 return (imm & 0xff) | ((imm & 0xff) << 16);
668 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
670 return (imm & 0xff) | ((imm & 0xff) << 8)
671 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
674 return (0x80 | (imm & 0x7f)) << (32 - count);
677 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
678 epilogue, 0 otherwise. */
681 thumb_instruction_restores_sp (unsigned short insn)
683 return (insn == 0x46bd /* mov sp, r7 */
684 || (insn & 0xff80) == 0xb000 /* add sp, imm */
685 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
688 /* Analyze a Thumb prologue, looking for a recognizable stack frame
689 and frame pointer. Scan until we encounter a store that could
690 clobber the stack frame unexpectedly, or an unknown instruction.
691 Return the last address which is definitely safe to skip for an
692 initial breakpoint. */
695 thumb_analyze_prologue (struct gdbarch *gdbarch,
696 CORE_ADDR start, CORE_ADDR limit,
697 struct arm_prologue_cache *cache)
699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
700 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
704 CORE_ADDR unrecognized_pc = 0;
706 for (i = 0; i < 16; i++)
707 regs[i] = pv_register (i, 0);
708 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
710 while (start < limit)
714 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
716 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
721 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
724 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
725 whether to save LR (R14). */
726 mask = (insn & 0xff) | ((insn & 0x100) << 6);
728 /* Calculate offsets of saved R0-R7 and LR. */
729 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
730 if (mask & (1 << regno))
732 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
734 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
737 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
739 offset = (insn & 0x7f) << 2; /* get scaled offset */
740 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
743 else if (thumb_instruction_restores_sp (insn))
745 /* Don't scan past the epilogue. */
748 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
749 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
751 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
752 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
753 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
755 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
756 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
757 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
759 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
760 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
761 && pv_is_constant (regs[bits (insn, 3, 5)]))
762 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
763 regs[bits (insn, 6, 8)]);
764 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
765 && pv_is_constant (regs[bits (insn, 3, 6)]))
767 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
768 int rm = bits (insn, 3, 6);
769 regs[rd] = pv_add (regs[rd], regs[rm]);
771 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
773 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
774 int src_reg = (insn & 0x78) >> 3;
775 regs[dst_reg] = regs[src_reg];
777 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
779 /* Handle stores to the stack. Normally pushes are used,
780 but with GCC -mtpcs-frame, there may be other stores
781 in the prologue to create the frame. */
782 int regno = (insn >> 8) & 0x7;
785 offset = (insn & 0xff) << 2;
786 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
788 if (stack.store_would_trash (addr))
791 stack.store (addr, 4, regs[regno]);
793 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
795 int rd = bits (insn, 0, 2);
796 int rn = bits (insn, 3, 5);
799 offset = bits (insn, 6, 10) << 2;
800 addr = pv_add_constant (regs[rn], offset);
802 if (stack.store_would_trash (addr))
805 stack.store (addr, 4, regs[rd]);
807 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
808 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
809 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
810 /* Ignore stores of argument registers to the stack. */
812 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
813 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
814 /* Ignore block loads from the stack, potentially copying
815 parameters from memory. */
817 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
818 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
819 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
820 /* Similarly ignore single loads from the stack. */
822 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
823 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
824 /* Skip register copies, i.e. saves to another register
825 instead of the stack. */
827 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
828 /* Recognize constant loads; even with small stacks these are necessary
830 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
831 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
833 /* Constant pool loads, for the same reason. */
834 unsigned int constant;
837 loc = start + 4 + bits (insn, 0, 7) * 4;
838 constant = read_memory_unsigned_integer (loc, 4, byte_order);
839 regs[bits (insn, 8, 10)] = pv_constant (constant);
841 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
843 unsigned short inst2;
845 inst2 = read_code_unsigned_integer (start + 2, 2,
846 byte_order_for_code);
848 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
850 /* BL, BLX. Allow some special function calls when
851 skipping the prologue; GCC generates these before
852 storing arguments to the stack. */
854 int j1, j2, imm1, imm2;
856 imm1 = sbits (insn, 0, 10);
857 imm2 = bits (inst2, 0, 10);
858 j1 = bit (inst2, 13);
859 j2 = bit (inst2, 11);
861 offset = ((imm1 << 12) + (imm2 << 1));
862 offset ^= ((!j2) << 22) | ((!j1) << 23);
864 nextpc = start + 4 + offset;
865 /* For BLX make sure to clear the low bits. */
866 if (bit (inst2, 12) == 0)
867 nextpc = nextpc & 0xfffffffc;
869 if (!skip_prologue_function (gdbarch, nextpc,
870 bit (inst2, 12) != 0))
874 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
876 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
878 pv_t addr = regs[bits (insn, 0, 3)];
881 if (stack.store_would_trash (addr))
884 /* Calculate offsets of saved registers. */
885 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
886 if (inst2 & (1 << regno))
888 addr = pv_add_constant (addr, -4);
889 stack.store (addr, 4, regs[regno]);
893 regs[bits (insn, 0, 3)] = addr;
896 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
898 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
900 int regno1 = bits (inst2, 12, 15);
901 int regno2 = bits (inst2, 8, 11);
902 pv_t addr = regs[bits (insn, 0, 3)];
904 offset = inst2 & 0xff;
906 addr = pv_add_constant (addr, offset);
908 addr = pv_add_constant (addr, -offset);
910 if (stack.store_would_trash (addr))
913 stack.store (addr, 4, regs[regno1]);
914 stack.store (pv_add_constant (addr, 4),
918 regs[bits (insn, 0, 3)] = addr;
921 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
922 && (inst2 & 0x0c00) == 0x0c00
923 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
925 int regno = bits (inst2, 12, 15);
926 pv_t addr = regs[bits (insn, 0, 3)];
928 offset = inst2 & 0xff;
930 addr = pv_add_constant (addr, offset);
932 addr = pv_add_constant (addr, -offset);
934 if (stack.store_would_trash (addr))
937 stack.store (addr, 4, regs[regno]);
940 regs[bits (insn, 0, 3)] = addr;
943 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
944 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
946 int regno = bits (inst2, 12, 15);
949 offset = inst2 & 0xfff;
950 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
952 if (stack.store_would_trash (addr))
955 stack.store (addr, 4, regs[regno]);
958 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
959 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
960 /* Ignore stores of argument registers to the stack. */
963 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
964 && (inst2 & 0x0d00) == 0x0c00
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Ignore stores of argument registers to the stack. */
969 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
971 && (inst2 & 0x8000) == 0x0000
972 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
973 /* Ignore block loads from the stack, potentially copying
974 parameters from memory. */
977 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
979 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
980 /* Similarly ignore dual loads from the stack. */
983 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
984 && (inst2 & 0x0d00) == 0x0c00
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Similarly ignore single loads from the stack. */
989 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
990 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
991 /* Similarly ignore single loads from the stack. */
994 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
995 && (inst2 & 0x8000) == 0x0000)
997 unsigned int imm = ((bits (insn, 10, 10) << 11)
998 | (bits (inst2, 12, 14) << 8)
999 | bits (inst2, 0, 7));
1001 regs[bits (inst2, 8, 11)]
1002 = pv_add_constant (regs[bits (insn, 0, 3)],
1003 thumb_expand_immediate (imm));
1006 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1017 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1018 && (inst2 & 0x8000) == 0x0000)
1020 unsigned int imm = ((bits (insn, 10, 10) << 11)
1021 | (bits (inst2, 12, 14) << 8)
1022 | bits (inst2, 0, 7));
1024 regs[bits (inst2, 8, 11)]
1025 = pv_add_constant (regs[bits (insn, 0, 3)],
1026 - (CORE_ADDR) thumb_expand_immediate (imm));
1029 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1040 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1042 unsigned int imm = ((bits (insn, 10, 10) << 11)
1043 | (bits (inst2, 12, 14) << 8)
1044 | bits (inst2, 0, 7));
1046 regs[bits (inst2, 8, 11)]
1047 = pv_constant (thumb_expand_immediate (imm));
1050 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1053 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1055 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1058 else if (insn == 0xea5f /* mov.w Rd,Rm */
1059 && (inst2 & 0xf0f0) == 0)
1061 int dst_reg = (inst2 & 0x0f00) >> 8;
1062 int src_reg = inst2 & 0xf;
1063 regs[dst_reg] = regs[src_reg];
1066 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1068 /* Constant pool loads. */
1069 unsigned int constant;
1072 offset = bits (inst2, 0, 11);
1074 loc = start + 4 + offset;
1076 loc = start + 4 - offset;
1078 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1079 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1082 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1084 /* Constant pool loads. */
1085 unsigned int constant;
1088 offset = bits (inst2, 0, 7) << 2;
1090 loc = start + 4 + offset;
1092 loc = start + 4 - offset;
1094 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1095 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1097 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1098 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1101 else if (thumb2_instruction_changes_pc (insn, inst2))
1103 /* Don't scan past anything that might change control flow. */
1108 /* The optimizer might shove anything into the prologue,
1109 so we just skip what we don't recognize. */
1110 unrecognized_pc = start;
1115 else if (thumb_instruction_changes_pc (insn))
1117 /* Don't scan past anything that might change control flow. */
1122 /* The optimizer might shove anything into the prologue,
1123 so we just skip what we don't recognize. */
1124 unrecognized_pc = start;
1130 arm_debug_printf ("Prologue scan stopped at %s",
1131 paddress (gdbarch, start));
1133 if (unrecognized_pc == 0)
1134 unrecognized_pc = start;
1137 return unrecognized_pc;
1139 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1141 /* Frame pointer is fp. Frame size is constant. */
1142 cache->framereg = ARM_FP_REGNUM;
1143 cache->framesize = -regs[ARM_FP_REGNUM].k;
1145 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1147 /* Frame pointer is r7. Frame size is constant. */
1148 cache->framereg = THUMB_FP_REGNUM;
1149 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1153 /* Try the stack pointer... this is a bit desperate. */
1154 cache->framereg = ARM_SP_REGNUM;
1155 cache->framesize = -regs[ARM_SP_REGNUM].k;
1158 for (i = 0; i < 16; i++)
1159 if (stack.find_reg (gdbarch, i, &offset))
1160 cache->saved_regs[i].set_addr (offset);
1162 return unrecognized_pc;
1166 /* Try to analyze the instructions starting from PC, which load symbol
1167 __stack_chk_guard. Return the address of instruction after loading this
1168 symbol, set the dest register number to *BASEREG, and set the size of
1169 instructions for loading symbol in OFFSET. Return 0 if instructions are
1173 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1174 unsigned int *destreg, int *offset)
1176 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1177 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1178 unsigned int low, high, address;
1183 unsigned short insn1
1184 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1186 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1188 *destreg = bits (insn1, 8, 10);
1190 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1191 address = read_memory_unsigned_integer (address, 4,
1192 byte_order_for_code);
1194 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1196 unsigned short insn2
1197 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1199 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1202 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1204 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1206 /* movt Rd, #const */
1207 if ((insn1 & 0xfbc0) == 0xf2c0)
1209 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1210 *destreg = bits (insn2, 8, 11);
1212 address = (high << 16 | low);
1219 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1221 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1223 address = bits (insn, 0, 11) + pc + 8;
1224 address = read_memory_unsigned_integer (address, 4,
1225 byte_order_for_code);
1227 *destreg = bits (insn, 12, 15);
1230 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1232 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1235 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1237 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1239 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1240 *destreg = bits (insn, 12, 15);
1242 address = (high << 16 | low);
1250 /* Try to skip a sequence of instructions used for stack protector. If PC
1251 points to the first instruction of this sequence, return the address of
1252 first instruction after this sequence, otherwise, return original PC.
1254 On arm, this sequence of instructions is composed of mainly three steps,
1255 Step 1: load symbol __stack_chk_guard,
1256 Step 2: load from address of __stack_chk_guard,
1257 Step 3: store it to somewhere else.
1259 Usually, instructions on step 2 and step 3 are the same on various ARM
1260 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1261 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1262 instructions in step 1 vary from different ARM architectures. On ARMv7,
1265 movw Rn, #:lower16:__stack_chk_guard
1266 movt Rn, #:upper16:__stack_chk_guard
1273 .word __stack_chk_guard
1275 Since ldr/str is a very popular instruction, we can't use them as
1276 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1277 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1278 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1281 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1283 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1284 unsigned int basereg;
1285 struct bound_minimal_symbol stack_chk_guard;
1287 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1290 /* Try to parse the instructions in Step 1. */
1291 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1296 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1297 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1298 Otherwise, this sequence cannot be for stack protector. */
1299 if (stack_chk_guard.minsym == NULL
1300 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1305 unsigned int destreg;
1307 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1309 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1310 if ((insn & 0xf800) != 0x6800)
1312 if (bits (insn, 3, 5) != basereg)
1314 destreg = bits (insn, 0, 2);
1316 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1317 byte_order_for_code);
1318 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1319 if ((insn & 0xf800) != 0x6000)
1321 if (destreg != bits (insn, 0, 2))
1326 unsigned int destreg;
1328 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1330 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1331 if ((insn & 0x0e500000) != 0x04100000)
1333 if (bits (insn, 16, 19) != basereg)
1335 destreg = bits (insn, 12, 15);
1336 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1337 insn = read_code_unsigned_integer (pc + offset + 4,
1338 4, byte_order_for_code);
1339 if ((insn & 0x0e500000) != 0x04000000)
1341 if (bits (insn, 12, 15) != destreg)
1344 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1347 return pc + offset + 4;
1349 return pc + offset + 8;
1352 /* Advance the PC across any function entry prologue instructions to
1353 reach some "real" code.
1355 The APCS (ARM Procedure Call Standard) defines the following
1359 [stmfd sp!, {a1,a2,a3,a4}]
1360 stmfd sp!, {...,fp,ip,lr,pc}
1361 [stfe f7, [sp, #-12]!]
1362 [stfe f6, [sp, #-12]!]
1363 [stfe f5, [sp, #-12]!]
1364 [stfe f4, [sp, #-12]!]
1365 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1368 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1370 CORE_ADDR func_addr, limit_pc;
1372 /* See if we can determine the end of the prologue via the symbol table.
1373 If so, then return either PC, or the PC after the prologue, whichever
1375 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1377 CORE_ADDR post_prologue_pc
1378 = skip_prologue_using_sal (gdbarch, func_addr);
1379 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1381 if (post_prologue_pc)
1383 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1386 /* GCC always emits a line note before the prologue and another
1387 one after, even if the two are at the same address or on the
1388 same line. Take advantage of this so that we do not need to
1389 know every instruction that might appear in the prologue. We
1390 will have producer information for most binaries; if it is
1391 missing (e.g. for -gstabs), assuming the GNU tools. */
1392 if (post_prologue_pc
1394 || COMPUNIT_PRODUCER (cust) == NULL
1395 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1396 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1397 return post_prologue_pc;
1399 if (post_prologue_pc != 0)
1401 CORE_ADDR analyzed_limit;
1403 /* For non-GCC compilers, make sure the entire line is an
1404 acceptable prologue; GDB will round this function's
1405 return value up to the end of the following line so we
1406 can not skip just part of a line (and we do not want to).
1408 RealView does not treat the prologue specially, but does
1409 associate prologue code with the opening brace; so this
1410 lets us skip the first line if we think it is the opening
1412 if (arm_pc_is_thumb (gdbarch, func_addr))
1413 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1414 post_prologue_pc, NULL);
1417 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1418 NULL, target_arm_instruction_reader ());
1420 if (analyzed_limit != post_prologue_pc)
1423 return post_prologue_pc;
1427 /* Can't determine prologue from the symbol table, need to examine
1430 /* Find an upper limit on the function prologue using the debug
1431 information. If the debug information could not be used to provide
1432 that bound, then use an arbitrary large number as the upper bound. */
1433 /* Like arm_scan_prologue, stop no later than pc + 64. */
1434 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1436 limit_pc = pc + 64; /* Magic. */
1439 /* Check if this is Thumb code. */
1440 if (arm_pc_is_thumb (gdbarch, pc))
1441 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1443 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1444 target_arm_instruction_reader ());
1448 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1449 This function decodes a Thumb function prologue to determine:
1450 1) the size of the stack frame
1451 2) which registers are saved on it
1452 3) the offsets of saved regs
1453 4) the offset from the stack pointer to the frame pointer
1455 A typical Thumb function prologue would create this stack frame
1456 (offsets relative to FP)
1457 old SP -> 24 stack parameters
1460 R7 -> 0 local variables (16 bytes)
1461 SP -> -12 additional stack space (12 bytes)
1462 The frame size would thus be 36 bytes, and the frame offset would be
1463 12 bytes. The frame register is R7.
1465 The comments for thumb_skip_prolog() describe the algorithm we use
1466 to detect the end of the prolog. */
1470 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1471 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1473 CORE_ADDR prologue_start;
1474 CORE_ADDR prologue_end;
1476 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1479 /* See comment in arm_scan_prologue for an explanation of
1481 if (prologue_end > prologue_start + 64)
1483 prologue_end = prologue_start + 64;
1487 /* We're in the boondocks: we have no idea where the start of the
1491 prologue_end = std::min (prologue_end, prev_pc);
1493 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1496 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1500 arm_instruction_restores_sp (unsigned int insn)
1502 if (bits (insn, 28, 31) != INST_NV)
1504 if ((insn & 0x0df0f000) == 0x0080d000
1505 /* ADD SP (register or immediate). */
1506 || (insn & 0x0df0f000) == 0x0040d000
1507 /* SUB SP (register or immediate). */
1508 || (insn & 0x0ffffff0) == 0x01a0d000
1510 || (insn & 0x0fff0000) == 0x08bd0000
1512 || (insn & 0x0fff0000) == 0x049d0000)
1513 /* POP of a single register. */
1520 /* Implement immediate value decoding, as described in section A5.2.4
1521 (Modified immediate constants in ARM instructions) of the ARM Architecture
1522 Reference Manual (ARMv7-A and ARMv7-R edition). */
1525 arm_expand_immediate (uint32_t imm)
1527 /* Immediate values are 12 bits long. */
1528 gdb_assert ((imm & 0xfffff000) == 0);
1530 uint32_t unrotated_value = imm & 0xff;
1531 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1533 if (rotate_amount == 0)
1534 return unrotated_value;
1536 return ((unrotated_value >> rotate_amount)
1537 | (unrotated_value << (32 - rotate_amount)));
1540 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1541 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1542 fill it in. Return the first address not recognized as a prologue
1545 We recognize all the instructions typically found in ARM prologues,
1546 plus harmless instructions which can be skipped (either for analysis
1547 purposes, or a more restrictive set that can be skipped when finding
1548 the end of the prologue). */
1551 arm_analyze_prologue (struct gdbarch *gdbarch,
1552 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1553 struct arm_prologue_cache *cache,
1554 const arm_instruction_reader &insn_reader)
1556 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1558 CORE_ADDR offset, current_pc;
1559 pv_t regs[ARM_FPS_REGNUM];
1560 CORE_ADDR unrecognized_pc = 0;
1562 /* Search the prologue looking for instructions that set up the
1563 frame pointer, adjust the stack pointer, and save registers.
1565 Be careful, however, and if it doesn't look like a prologue,
1566 don't try to scan it. If, for instance, a frameless function
1567 begins with stmfd sp!, then we will tell ourselves there is
1568 a frame, which will confuse stack traceback, as well as "finish"
1569 and other operations that rely on a knowledge of the stack
1572 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1573 regs[regno] = pv_register (regno, 0);
1574 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1576 for (current_pc = prologue_start;
1577 current_pc < prologue_end;
1580 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1582 if (insn == 0xe1a0c00d) /* mov ip, sp */
1584 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1587 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1588 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1590 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1591 int rd = bits (insn, 12, 15);
1592 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1595 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1596 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1598 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1599 int rd = bits (insn, 12, 15);
1600 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1603 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1606 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1608 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1609 stack.store (regs[ARM_SP_REGNUM], 4,
1610 regs[bits (insn, 12, 15)]);
1613 else if ((insn & 0xffff0000) == 0xe92d0000)
1614 /* stmfd sp!, {..., fp, ip, lr, pc}
1616 stmfd sp!, {a1, a2, a3, a4} */
1618 int mask = insn & 0xffff;
1620 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1623 /* Calculate offsets of saved registers. */
1624 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1625 if (mask & (1 << regno))
1628 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1629 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1632 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1633 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1634 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1636 /* No need to add this to saved_regs -- it's just an arg reg. */
1639 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1640 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1641 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1643 /* No need to add this to saved_regs -- it's just an arg reg. */
1646 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1648 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1650 /* No need to add this to saved_regs -- it's just arg regs. */
1653 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1655 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1656 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1658 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1660 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1661 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1663 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1665 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1667 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1670 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1671 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1672 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1674 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1676 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1678 int n_saved_fp_regs;
1679 unsigned int fp_start_reg, fp_bound_reg;
1681 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1684 if ((insn & 0x800) == 0x800) /* N0 is set */
1686 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1687 n_saved_fp_regs = 3;
1689 n_saved_fp_regs = 1;
1693 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1694 n_saved_fp_regs = 2;
1696 n_saved_fp_regs = 4;
1699 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1700 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1701 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1703 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1704 stack.store (regs[ARM_SP_REGNUM], 12,
1705 regs[fp_start_reg++]);
1708 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1710 /* Allow some special function calls when skipping the
1711 prologue; GCC generates these before storing arguments to
1713 CORE_ADDR dest = BranchDest (current_pc, insn);
1715 if (skip_prologue_function (gdbarch, dest, 0))
1720 else if ((insn & 0xf0000000) != 0xe0000000)
1721 break; /* Condition not true, exit early. */
1722 else if (arm_instruction_changes_pc (insn))
1723 /* Don't scan past anything that might change control flow. */
1725 else if (arm_instruction_restores_sp (insn))
1727 /* Don't scan past the epilogue. */
1730 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1731 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1732 /* Ignore block loads from the stack, potentially copying
1733 parameters from memory. */
1735 else if ((insn & 0xfc500000) == 0xe4100000
1736 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1737 /* Similarly ignore single loads from the stack. */
1739 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1740 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1741 register instead of the stack. */
1745 /* The optimizer might shove anything into the prologue, if
1746 we build up cache (cache != NULL) from scanning prologue,
1747 we just skip what we don't recognize and scan further to
1748 make cache as complete as possible. However, if we skip
1749 prologue, we'll stop immediately on unrecognized
1751 unrecognized_pc = current_pc;
1759 if (unrecognized_pc == 0)
1760 unrecognized_pc = current_pc;
1764 int framereg, framesize;
1766 /* The frame size is just the distance from the frame register
1767 to the original stack pointer. */
1768 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1770 /* Frame pointer is fp. */
1771 framereg = ARM_FP_REGNUM;
1772 framesize = -regs[ARM_FP_REGNUM].k;
1776 /* Try the stack pointer... this is a bit desperate. */
1777 framereg = ARM_SP_REGNUM;
1778 framesize = -regs[ARM_SP_REGNUM].k;
1781 cache->framereg = framereg;
1782 cache->framesize = framesize;
1784 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1785 if (stack.find_reg (gdbarch, regno, &offset))
1786 cache->saved_regs[regno].set_addr (offset);
1789 arm_debug_printf ("Prologue scan stopped at %s",
1790 paddress (gdbarch, unrecognized_pc));
1792 return unrecognized_pc;
1796 arm_scan_prologue (struct frame_info *this_frame,
1797 struct arm_prologue_cache *cache)
1799 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1800 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1801 CORE_ADDR prologue_start, prologue_end;
1802 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1803 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1805 /* Assume there is no frame until proven otherwise. */
1806 cache->framereg = ARM_SP_REGNUM;
1807 cache->framesize = 0;
1809 /* Check for Thumb prologue. */
1810 if (arm_frame_is_thumb (this_frame))
1812 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1816 /* Find the function prologue. If we can't find the function in
1817 the symbol table, peek in the stack frame to find the PC. */
1818 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1821 /* One way to find the end of the prologue (which works well
1822 for unoptimized code) is to do the following:
1824 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1827 prologue_end = prev_pc;
1828 else if (sal.end < prologue_end)
1829 prologue_end = sal.end;
1831 This mechanism is very accurate so long as the optimizer
1832 doesn't move any instructions from the function body into the
1833 prologue. If this happens, sal.end will be the last
1834 instruction in the first hunk of prologue code just before
1835 the first instruction that the scheduler has moved from
1836 the body to the prologue.
1838 In order to make sure that we scan all of the prologue
1839 instructions, we use a slightly less accurate mechanism which
1840 may scan more than necessary. To help compensate for this
1841 lack of accuracy, the prologue scanning loop below contains
1842 several clauses which'll cause the loop to terminate early if
1843 an implausible prologue instruction is encountered.
1849 is a suitable endpoint since it accounts for the largest
1850 possible prologue plus up to five instructions inserted by
1853 if (prologue_end > prologue_start + 64)
1855 prologue_end = prologue_start + 64; /* See above. */
1860 /* We have no symbol information. Our only option is to assume this
1861 function has a standard stack frame and the normal frame register.
1862 Then, we can find the value of our frame pointer on entrance to
1863 the callee (or at the present moment if this is the innermost frame).
1864 The value stored there should be the address of the stmfd + 8. */
1865 CORE_ADDR frame_loc;
1866 ULONGEST return_value;
1868 /* AAPCS does not use a frame register, so we can abort here. */
1869 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1872 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1873 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1878 prologue_start = gdbarch_addr_bits_remove
1879 (gdbarch, return_value) - 8;
1880 prologue_end = prologue_start + 64; /* See above. */
1884 if (prev_pc < prologue_end)
1885 prologue_end = prev_pc;
1887 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
1888 target_arm_instruction_reader ());
1891 static struct arm_prologue_cache *
1892 arm_make_prologue_cache (struct frame_info *this_frame)
1895 struct arm_prologue_cache *cache;
1896 CORE_ADDR unwound_fp;
1898 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1899 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1901 arm_scan_prologue (this_frame, cache);
1903 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1904 if (unwound_fp == 0)
1907 cache->prev_sp = unwound_fp + cache->framesize;
1909 /* Calculate actual addresses of saved registers using offsets
1910 determined by arm_scan_prologue. */
1911 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1912 if (cache->saved_regs[reg].is_addr ())
1913 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
1919 /* Implementation of the stop_reason hook for arm_prologue frames. */
1921 static enum unwind_stop_reason
1922 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1925 struct arm_prologue_cache *cache;
1928 if (*this_cache == NULL)
1929 *this_cache = arm_make_prologue_cache (this_frame);
1930 cache = (struct arm_prologue_cache *) *this_cache;
1932 /* This is meant to halt the backtrace at "_start". */
1933 pc = get_frame_pc (this_frame);
1934 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1935 return UNWIND_OUTERMOST;
1937 /* If we've hit a wall, stop. */
1938 if (cache->prev_sp == 0)
1939 return UNWIND_OUTERMOST;
1941 return UNWIND_NO_REASON;
1944 /* Our frame ID for a normal frame is the current function's starting PC
1945 and the caller's SP when we were called. */
1948 arm_prologue_this_id (struct frame_info *this_frame,
1950 struct frame_id *this_id)
1952 struct arm_prologue_cache *cache;
1956 if (*this_cache == NULL)
1957 *this_cache = arm_make_prologue_cache (this_frame);
1958 cache = (struct arm_prologue_cache *) *this_cache;
1960 /* Use function start address as part of the frame ID. If we cannot
1961 identify the start address (due to missing symbol information),
1962 fall back to just using the current PC. */
1963 pc = get_frame_pc (this_frame);
1964 func = get_frame_func (this_frame);
1968 id = frame_id_build (cache->prev_sp, func);
1972 static struct value *
1973 arm_prologue_prev_register (struct frame_info *this_frame,
1977 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1978 struct arm_prologue_cache *cache;
1980 if (*this_cache == NULL)
1981 *this_cache = arm_make_prologue_cache (this_frame);
1982 cache = (struct arm_prologue_cache *) *this_cache;
1984 /* If we are asked to unwind the PC, then we need to return the LR
1985 instead. The prologue may save PC, but it will point into this
1986 frame's prologue, not the next frame's resume location. Also
1987 strip the saved T bit. A valid LR may have the low bit set, but
1988 a valid PC never does. */
1989 if (prev_regnum == ARM_PC_REGNUM)
1993 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1994 return frame_unwind_got_constant (this_frame, prev_regnum,
1995 arm_addr_bits_remove (gdbarch, lr));
1998 /* SP is generally not saved to the stack, but this frame is
1999 identified by the next frame's stack pointer at the time of the call.
2000 The value was already reconstructed into PREV_SP. */
2001 if (prev_regnum == ARM_SP_REGNUM)
2002 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2004 /* The CPSR may have been changed by the call instruction and by the
2005 called function. The only bit we can reconstruct is the T bit,
2006 by checking the low bit of LR as of the call. This is a reliable
2007 indicator of Thumb-ness except for some ARM v4T pre-interworking
2008 Thumb code, which could get away with a clear low bit as long as
2009 the called function did not use bx. Guess that all other
2010 bits are unchanged; the condition flags are presumably lost,
2011 but the processor status is likely valid. */
2012 if (prev_regnum == ARM_PS_REGNUM)
2015 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2017 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2018 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2019 if (IS_THUMB_ADDR (lr))
2023 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2026 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2030 static frame_unwind arm_prologue_unwind = {
2032 arm_prologue_unwind_stop_reason,
2033 arm_prologue_this_id,
2034 arm_prologue_prev_register,
2036 default_frame_sniffer
2039 /* Maintain a list of ARM exception table entries per objfile, similar to the
2040 list of mapping symbols. We only cache entries for standard ARM-defined
2041 personality routines; the cache will contain only the frame unwinding
2042 instructions associated with the entry (not the descriptors). */
2044 struct arm_exidx_entry
2049 bool operator< (const arm_exidx_entry &other) const
2051 return addr < other.addr;
2055 struct arm_exidx_data
2057 std::vector<std::vector<arm_exidx_entry>> section_maps;
2060 /* Per-BFD key to store exception handling information. */
2061 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2063 static struct obj_section *
2064 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2066 struct obj_section *osect;
2068 ALL_OBJFILE_OSECTIONS (objfile, osect)
2069 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2071 bfd_vma start, size;
2072 start = bfd_section_vma (osect->the_bfd_section);
2073 size = bfd_section_size (osect->the_bfd_section);
2075 if (start <= vma && vma < start + size)
2082 /* Parse contents of exception table and exception index sections
2083 of OBJFILE, and fill in the exception table entry cache.
2085 For each entry that refers to a standard ARM-defined personality
2086 routine, extract the frame unwinding instructions (from either
2087 the index or the table section). The unwinding instructions
2089 - extracting them from the rest of the table data
2090 - converting to host endianness
2091 - appending the implicit 0xb0 ("Finish") code
2093 The extracted and normalized instructions are stored for later
2094 retrieval by the arm_find_exidx_entry routine. */
2097 arm_exidx_new_objfile (struct objfile *objfile)
2099 struct arm_exidx_data *data;
2100 asection *exidx, *extab;
2101 bfd_vma exidx_vma = 0, extab_vma = 0;
2104 /* If we've already touched this file, do nothing. */
2105 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2108 /* Read contents of exception table and index. */
2109 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2110 gdb::byte_vector exidx_data;
2113 exidx_vma = bfd_section_vma (exidx);
2114 exidx_data.resize (bfd_section_size (exidx));
2116 if (!bfd_get_section_contents (objfile->obfd, exidx,
2117 exidx_data.data (), 0,
2118 exidx_data.size ()))
2122 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2123 gdb::byte_vector extab_data;
2126 extab_vma = bfd_section_vma (extab);
2127 extab_data.resize (bfd_section_size (extab));
2129 if (!bfd_get_section_contents (objfile->obfd, extab,
2130 extab_data.data (), 0,
2131 extab_data.size ()))
2135 /* Allocate exception table data structure. */
2136 data = arm_exidx_data_key.emplace (objfile->obfd);
2137 data->section_maps.resize (objfile->obfd->section_count);
2139 /* Fill in exception table. */
2140 for (i = 0; i < exidx_data.size () / 8; i++)
2142 struct arm_exidx_entry new_exidx_entry;
2143 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2144 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2145 exidx_data.data () + i * 8 + 4);
2146 bfd_vma addr = 0, word = 0;
2147 int n_bytes = 0, n_words = 0;
2148 struct obj_section *sec;
2149 gdb_byte *entry = NULL;
2151 /* Extract address of start of function. */
2152 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2153 idx += exidx_vma + i * 8;
2155 /* Find section containing function and compute section offset. */
2156 sec = arm_obj_section_from_vma (objfile, idx);
2159 idx -= bfd_section_vma (sec->the_bfd_section);
2161 /* Determine address of exception table entry. */
2164 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2166 else if ((val & 0xff000000) == 0x80000000)
2168 /* Exception table entry embedded in .ARM.exidx
2169 -- must be short form. */
2173 else if (!(val & 0x80000000))
2175 /* Exception table entry in .ARM.extab. */
2176 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2177 addr += exidx_vma + i * 8 + 4;
2179 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2181 word = bfd_h_get_32 (objfile->obfd,
2182 extab_data.data () + addr - extab_vma);
2185 if ((word & 0xff000000) == 0x80000000)
2190 else if ((word & 0xff000000) == 0x81000000
2191 || (word & 0xff000000) == 0x82000000)
2195 n_words = ((word >> 16) & 0xff);
2197 else if (!(word & 0x80000000))
2200 struct obj_section *pers_sec;
2201 int gnu_personality = 0;
2203 /* Custom personality routine. */
2204 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2205 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2207 /* Check whether we've got one of the variants of the
2208 GNU personality routines. */
2209 pers_sec = arm_obj_section_from_vma (objfile, pers);
2212 static const char *personality[] =
2214 "__gcc_personality_v0",
2215 "__gxx_personality_v0",
2216 "__gcj_personality_v0",
2217 "__gnu_objc_personality_v0",
2221 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2224 for (k = 0; personality[k]; k++)
2225 if (lookup_minimal_symbol_by_pc_name
2226 (pc, personality[k], objfile))
2228 gnu_personality = 1;
2233 /* If so, the next word contains a word count in the high
2234 byte, followed by the same unwind instructions as the
2235 pre-defined forms. */
2237 && addr + 4 <= extab_vma + extab_data.size ())
2239 word = bfd_h_get_32 (objfile->obfd,
2241 + addr - extab_vma));
2244 n_words = ((word >> 24) & 0xff);
2250 /* Sanity check address. */
2252 if (addr < extab_vma
2253 || addr + 4 * n_words > extab_vma + extab_data.size ())
2254 n_words = n_bytes = 0;
2256 /* The unwind instructions reside in WORD (only the N_BYTES least
2257 significant bytes are valid), followed by N_WORDS words in the
2258 extab section starting at ADDR. */
2259 if (n_bytes || n_words)
2262 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2263 n_bytes + n_words * 4 + 1);
2266 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2270 word = bfd_h_get_32 (objfile->obfd,
2271 extab_data.data () + addr - extab_vma);
2274 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2275 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2276 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2277 *p++ = (gdb_byte) (word & 0xff);
2280 /* Implied "Finish" to terminate the list. */
2284 /* Push entry onto vector. They are guaranteed to always
2285 appear in order of increasing addresses. */
2286 new_exidx_entry.addr = idx;
2287 new_exidx_entry.entry = entry;
2288 data->section_maps[sec->the_bfd_section->index].push_back
2293 /* Search for the exception table entry covering MEMADDR. If one is found,
2294 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2295 set *START to the start of the region covered by this entry. */
2298 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2300 struct obj_section *sec;
2302 sec = find_pc_section (memaddr);
2305 struct arm_exidx_data *data;
2306 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2308 data = arm_exidx_data_key.get (sec->objfile->obfd);
2311 std::vector<arm_exidx_entry> &map
2312 = data->section_maps[sec->the_bfd_section->index];
2315 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2317 /* std::lower_bound finds the earliest ordered insertion
2318 point. If the following symbol starts at this exact
2319 address, we use that; otherwise, the preceding
2320 exception table entry covers this address. */
2321 if (idx < map.end ())
2323 if (idx->addr == map_key.addr)
2326 *start = idx->addr + obj_section_addr (sec);
2331 if (idx > map.begin ())
2335 *start = idx->addr + obj_section_addr (sec);
2345 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2346 instruction list from the ARM exception table entry ENTRY, allocate and
2347 return a prologue cache structure describing how to unwind this frame.
2349 Return NULL if the unwinding instruction list contains a "spare",
2350 "reserved" or "refuse to unwind" instruction as defined in section
2351 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2352 for the ARM Architecture" document. */
2354 static struct arm_prologue_cache *
2355 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2360 struct arm_prologue_cache *cache;
2361 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2362 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2368 /* Whenever we reload SP, we actually have to retrieve its
2369 actual value in the current frame. */
2372 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2374 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2375 vsp = get_frame_register_unsigned (this_frame, reg);
2379 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2380 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2386 /* Decode next unwind instruction. */
2389 if ((insn & 0xc0) == 0)
2391 int offset = insn & 0x3f;
2392 vsp += (offset << 2) + 4;
2394 else if ((insn & 0xc0) == 0x40)
2396 int offset = insn & 0x3f;
2397 vsp -= (offset << 2) + 4;
2399 else if ((insn & 0xf0) == 0x80)
2401 int mask = ((insn & 0xf) << 8) | *entry++;
2404 /* The special case of an all-zero mask identifies
2405 "Refuse to unwind". We return NULL to fall back
2406 to the prologue analyzer. */
2410 /* Pop registers r4..r15 under mask. */
2411 for (i = 0; i < 12; i++)
2412 if (mask & (1 << i))
2414 cache->saved_regs[4 + i].set_addr (vsp);
2418 /* Special-case popping SP -- we need to reload vsp. */
2419 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2422 else if ((insn & 0xf0) == 0x90)
2424 int reg = insn & 0xf;
2426 /* Reserved cases. */
2427 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2430 /* Set SP from another register and mark VSP for reload. */
2431 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2434 else if ((insn & 0xf0) == 0xa0)
2436 int count = insn & 0x7;
2437 int pop_lr = (insn & 0x8) != 0;
2440 /* Pop r4..r[4+count]. */
2441 for (i = 0; i <= count; i++)
2443 cache->saved_regs[4 + i].set_addr (vsp);
2447 /* If indicated by flag, pop LR as well. */
2450 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2454 else if (insn == 0xb0)
2456 /* We could only have updated PC by popping into it; if so, it
2457 will show up as address. Otherwise, copy LR into PC. */
2458 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2459 cache->saved_regs[ARM_PC_REGNUM]
2460 = cache->saved_regs[ARM_LR_REGNUM];
2465 else if (insn == 0xb1)
2467 int mask = *entry++;
2470 /* All-zero mask and mask >= 16 is "spare". */
2471 if (mask == 0 || mask >= 16)
2474 /* Pop r0..r3 under mask. */
2475 for (i = 0; i < 4; i++)
2476 if (mask & (1 << i))
2478 cache->saved_regs[i].set_addr (vsp);
2482 else if (insn == 0xb2)
2484 ULONGEST offset = 0;
2489 offset |= (*entry & 0x7f) << shift;
2492 while (*entry++ & 0x80);
2494 vsp += 0x204 + (offset << 2);
2496 else if (insn == 0xb3)
2498 int start = *entry >> 4;
2499 int count = (*entry++) & 0xf;
2502 /* Only registers D0..D15 are valid here. */
2503 if (start + count >= 16)
2506 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2507 for (i = 0; i <= count; i++)
2509 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2513 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2516 else if ((insn & 0xf8) == 0xb8)
2518 int count = insn & 0x7;
2521 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2522 for (i = 0; i <= count; i++)
2524 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2528 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2531 else if (insn == 0xc6)
2533 int start = *entry >> 4;
2534 int count = (*entry++) & 0xf;
2537 /* Only registers WR0..WR15 are valid. */
2538 if (start + count >= 16)
2541 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2542 for (i = 0; i <= count; i++)
2544 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2548 else if (insn == 0xc7)
2550 int mask = *entry++;
2553 /* All-zero mask and mask >= 16 is "spare". */
2554 if (mask == 0 || mask >= 16)
2557 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2558 for (i = 0; i < 4; i++)
2559 if (mask & (1 << i))
2561 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
2565 else if ((insn & 0xf8) == 0xc0)
2567 int count = insn & 0x7;
2570 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2571 for (i = 0; i <= count; i++)
2573 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
2577 else if (insn == 0xc8)
2579 int start = *entry >> 4;
2580 int count = (*entry++) & 0xf;
2583 /* Only registers D0..D31 are valid. */
2584 if (start + count >= 16)
2587 /* Pop VFP double-precision registers
2588 D[16+start]..D[16+start+count]. */
2589 for (i = 0; i <= count; i++)
2591 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
2595 else if (insn == 0xc9)
2597 int start = *entry >> 4;
2598 int count = (*entry++) & 0xf;
2601 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2602 for (i = 0; i <= count; i++)
2604 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2608 else if ((insn & 0xf8) == 0xd0)
2610 int count = insn & 0x7;
2613 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2614 for (i = 0; i <= count; i++)
2616 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2622 /* Everything else is "spare". */
2627 /* If we restore SP from a register, assume this was the frame register.
2628 Otherwise just fall back to SP as frame register. */
2629 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2630 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2632 cache->framereg = ARM_SP_REGNUM;
2634 /* Determine offset to previous frame. */
2636 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2638 /* We already got the previous SP. */
2639 cache->prev_sp = vsp;
2644 /* Unwinding via ARM exception table entries. Note that the sniffer
2645 already computes a filled-in prologue cache, which is then used
2646 with the same arm_prologue_this_id and arm_prologue_prev_register
2647 routines also used for prologue-parsing based unwinding. */
2650 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2651 struct frame_info *this_frame,
2652 void **this_prologue_cache)
2654 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2655 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2656 CORE_ADDR addr_in_block, exidx_region, func_start;
2657 struct arm_prologue_cache *cache;
2660 /* See if we have an ARM exception table entry covering this address. */
2661 addr_in_block = get_frame_address_in_block (this_frame);
2662 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2666 /* The ARM exception table does not describe unwind information
2667 for arbitrary PC values, but is guaranteed to be correct only
2668 at call sites. We have to decide here whether we want to use
2669 ARM exception table information for this frame, or fall back
2670 to using prologue parsing. (Note that if we have DWARF CFI,
2671 this sniffer isn't even called -- CFI is always preferred.)
2673 Before we make this decision, however, we check whether we
2674 actually have *symbol* information for the current frame.
2675 If not, prologue parsing would not work anyway, so we might
2676 as well use the exception table and hope for the best. */
2677 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2681 /* If the next frame is "normal", we are at a call site in this
2682 frame, so exception information is guaranteed to be valid. */
2683 if (get_next_frame (this_frame)
2684 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2687 /* We also assume exception information is valid if we're currently
2688 blocked in a system call. The system library is supposed to
2689 ensure this, so that e.g. pthread cancellation works. */
2690 if (arm_frame_is_thumb (this_frame))
2694 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2695 2, byte_order_for_code, &insn)
2696 && (insn & 0xff00) == 0xdf00 /* svc */)
2703 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2704 4, byte_order_for_code, &insn)
2705 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2709 /* Bail out if we don't know that exception information is valid. */
2713 /* The ARM exception index does not mark the *end* of the region
2714 covered by the entry, and some functions will not have any entry.
2715 To correctly recognize the end of the covered region, the linker
2716 should have inserted dummy records with a CANTUNWIND marker.
2718 Unfortunately, current versions of GNU ld do not reliably do
2719 this, and thus we may have found an incorrect entry above.
2720 As a (temporary) sanity check, we only use the entry if it
2721 lies *within* the bounds of the function. Note that this check
2722 might reject perfectly valid entries that just happen to cover
2723 multiple functions; therefore this check ought to be removed
2724 once the linker is fixed. */
2725 if (func_start > exidx_region)
2729 /* Decode the list of unwinding instructions into a prologue cache.
2730 Note that this may fail due to e.g. a "refuse to unwind" code. */
2731 cache = arm_exidx_fill_cache (this_frame, entry);
2735 *this_prologue_cache = cache;
2739 struct frame_unwind arm_exidx_unwind = {
2741 default_frame_unwind_stop_reason,
2742 arm_prologue_this_id,
2743 arm_prologue_prev_register,
2745 arm_exidx_unwind_sniffer
2748 static struct arm_prologue_cache *
2749 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2751 struct arm_prologue_cache *cache;
2754 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2755 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2757 /* Still rely on the offset calculated from prologue. */
2758 arm_scan_prologue (this_frame, cache);
2760 /* Since we are in epilogue, the SP has been restored. */
2761 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2763 /* Calculate actual addresses of saved registers using offsets
2764 determined by arm_scan_prologue. */
2765 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2766 if (cache->saved_regs[reg].is_addr ())
2767 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
2773 /* Implementation of function hook 'this_id' in
2774 'struct frame_uwnind' for epilogue unwinder. */
2777 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2779 struct frame_id *this_id)
2781 struct arm_prologue_cache *cache;
2784 if (*this_cache == NULL)
2785 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2786 cache = (struct arm_prologue_cache *) *this_cache;
2788 /* Use function start address as part of the frame ID. If we cannot
2789 identify the start address (due to missing symbol information),
2790 fall back to just using the current PC. */
2791 pc = get_frame_pc (this_frame);
2792 func = get_frame_func (this_frame);
2796 (*this_id) = frame_id_build (cache->prev_sp, pc);
2799 /* Implementation of function hook 'prev_register' in
2800 'struct frame_uwnind' for epilogue unwinder. */
2802 static struct value *
2803 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2804 void **this_cache, int regnum)
2806 if (*this_cache == NULL)
2807 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2809 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2812 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2814 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2817 /* Implementation of function hook 'sniffer' in
2818 'struct frame_uwnind' for epilogue unwinder. */
2821 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2822 struct frame_info *this_frame,
2823 void **this_prologue_cache)
2825 if (frame_relative_level (this_frame) == 0)
2827 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2828 CORE_ADDR pc = get_frame_pc (this_frame);
2830 if (arm_frame_is_thumb (this_frame))
2831 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2833 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2839 /* Frame unwinder from epilogue. */
2841 static const struct frame_unwind arm_epilogue_frame_unwind =
2844 default_frame_unwind_stop_reason,
2845 arm_epilogue_frame_this_id,
2846 arm_epilogue_frame_prev_register,
2848 arm_epilogue_frame_sniffer,
2851 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2852 trampoline, return the target PC. Otherwise return 0.
2854 void call0a (char c, short s, int i, long l) {}
2858 (*pointer_to_call0a) (c, s, i, l);
2861 Instead of calling a stub library function _call_via_xx (xx is
2862 the register name), GCC may inline the trampoline in the object
2863 file as below (register r2 has the address of call0a).
2866 .type main, %function
2875 The trampoline 'bx r2' doesn't belong to main. */
2878 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2880 /* The heuristics of recognizing such trampoline is that FRAME is
2881 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2882 if (arm_frame_is_thumb (frame))
2886 if (target_read_memory (pc, buf, 2) == 0)
2888 struct gdbarch *gdbarch = get_frame_arch (frame);
2889 enum bfd_endian byte_order_for_code
2890 = gdbarch_byte_order_for_code (gdbarch);
2892 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2894 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2897 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2899 /* Clear the LSB so that gdb core sets step-resume
2900 breakpoint at the right address. */
2901 return UNMAKE_THUMB_ADDR (dest);
2909 static struct arm_prologue_cache *
2910 arm_make_stub_cache (struct frame_info *this_frame)
2912 struct arm_prologue_cache *cache;
2914 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2915 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2917 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2922 /* Our frame ID for a stub frame is the current SP and LR. */
2925 arm_stub_this_id (struct frame_info *this_frame,
2927 struct frame_id *this_id)
2929 struct arm_prologue_cache *cache;
2931 if (*this_cache == NULL)
2932 *this_cache = arm_make_stub_cache (this_frame);
2933 cache = (struct arm_prologue_cache *) *this_cache;
2935 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2939 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2940 struct frame_info *this_frame,
2941 void **this_prologue_cache)
2943 CORE_ADDR addr_in_block;
2945 CORE_ADDR pc, start_addr;
2948 addr_in_block = get_frame_address_in_block (this_frame);
2949 pc = get_frame_pc (this_frame);
2950 if (in_plt_section (addr_in_block)
2951 /* We also use the stub winder if the target memory is unreadable
2952 to avoid having the prologue unwinder trying to read it. */
2953 || target_read_memory (pc, dummy, 4) != 0)
2956 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2957 && arm_skip_bx_reg (this_frame, pc) != 0)
2963 struct frame_unwind arm_stub_unwind = {
2965 default_frame_unwind_stop_reason,
2967 arm_prologue_prev_register,
2969 arm_stub_unwind_sniffer
2972 /* Put here the code to store, into CACHE->saved_regs, the addresses
2973 of the saved registers of frame described by THIS_FRAME. CACHE is
2976 static struct arm_prologue_cache *
2977 arm_m_exception_cache (struct frame_info *this_frame)
2979 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2980 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2981 struct arm_prologue_cache *cache;
2984 CORE_ADDR unwound_sp;
2986 uint32_t exc_return;
2987 uint32_t process_stack_used;
2988 uint32_t extended_frame_used;
2989 uint32_t secure_stack_used;
2991 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2992 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2994 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2995 describes which bits in LR that define which stack was used prior
2996 to the exception and if FPU is used (causing extended stack frame). */
2998 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
2999 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
3001 /* Check EXC_RETURN indicator bits. */
3002 exc_return = (((lr >> 28) & 0xf) == 0xf);
3004 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3005 process_stack_used = ((lr & (1 << 2)) != 0);
3006 if (exc_return && process_stack_used)
3008 /* Thread (process) stack used.
3009 Potentially this could be other register defined by target, but PSP
3010 can be considered a standard name for the "Process Stack Pointer".
3011 To be fully aware of system registers like MSP and PSP, these could
3012 be added to a separate XML arm-m-system-profile that is valid for
3013 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3014 corefile off-line, then these registers must be defined by GDB,
3015 and also be included in the corefile regsets. */
3017 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
3018 if (psp_regnum == -1)
3020 /* Thread (process) stack could not be fetched,
3021 give warning and exit. */
3023 warning (_("no PSP thread stack unwinding supported."));
3025 /* Terminate any further stack unwinding by refer to self. */
3026 cache->prev_sp = sp;
3031 /* Thread (process) stack used, use PSP as SP. */
3032 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
3037 /* Main stack used, use MSP as SP. */
3041 /* The hardware saves eight 32-bit words, comprising xPSR,
3042 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3043 "B1.5.6 Exception entry behavior" in
3044 "ARMv7-M Architecture Reference Manual". */
3045 cache->saved_regs[0].set_addr (unwound_sp);
3046 cache->saved_regs[1].set_addr (unwound_sp + 4);
3047 cache->saved_regs[2].set_addr (unwound_sp + 8);
3048 cache->saved_regs[3].set_addr (unwound_sp + 12);
3049 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + 16);
3050 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 20);
3051 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 24);
3052 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 28);
3054 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3056 extended_frame_used = ((lr & (1 << 4)) == 0);
3057 if (exc_return && extended_frame_used)
3060 int fpu_regs_stack_offset;
3062 /* This code does not take into account the lazy stacking, see "Lazy
3063 context save of FP state", in B1.5.7, also ARM AN298, supported
3064 by Cortex-M4F architecture.
3065 To fully handle this the FPCCR register (Floating-point Context
3066 Control Register) needs to be read out and the bits ASPEN and LSPEN
3067 could be checked to setup correct lazy stacked FP registers.
3068 This register is located at address 0xE000EF34. */
3070 /* Extended stack frame type used. */
3071 fpu_regs_stack_offset = unwound_sp + 0x20;
3072 for (i = 0; i < 16; i++)
3074 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (fpu_regs_stack_offset);
3075 fpu_regs_stack_offset += 4;
3077 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp + 0x60);
3079 /* Offset 0x64 is reserved. */
3080 cache->prev_sp = unwound_sp + 0x68;
3084 /* Standard stack frame type used. */
3085 cache->prev_sp = unwound_sp + 0x20;
3088 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3089 secure_stack_used = ((lr & (1 << 6)) != 0);
3090 if (exc_return && secure_stack_used)
3092 /* ARMv8-M Exception and interrupt handling is not considered here.
3093 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3094 the Secure or Non-secure stack was used. To separate Secure and
3095 Non-secure stacks, processors that are based on the ARMv8-M
3096 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3097 In addition, a stack limit feature is provided using stack limit
3098 registers (accessible using MSR and MRS instructions) in Privileged
3102 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3103 aligner between the top of the 32-byte stack frame and the
3104 previous context's stack pointer. */
3105 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3106 && (xpsr & (1 << 9)) != 0)
3107 cache->prev_sp += 4;
3112 /* Implementation of function hook 'this_id' in
3113 'struct frame_uwnind'. */
3116 arm_m_exception_this_id (struct frame_info *this_frame,
3118 struct frame_id *this_id)
3120 struct arm_prologue_cache *cache;
3122 if (*this_cache == NULL)
3123 *this_cache = arm_m_exception_cache (this_frame);
3124 cache = (struct arm_prologue_cache *) *this_cache;
3126 /* Our frame ID for a stub frame is the current SP and LR. */
3127 *this_id = frame_id_build (cache->prev_sp,
3128 get_frame_pc (this_frame));
3131 /* Implementation of function hook 'prev_register' in
3132 'struct frame_uwnind'. */
3134 static struct value *
3135 arm_m_exception_prev_register (struct frame_info *this_frame,
3139 struct arm_prologue_cache *cache;
3141 if (*this_cache == NULL)
3142 *this_cache = arm_m_exception_cache (this_frame);
3143 cache = (struct arm_prologue_cache *) *this_cache;
3145 /* The value was already reconstructed into PREV_SP. */
3146 if (prev_regnum == ARM_SP_REGNUM)
3147 return frame_unwind_got_constant (this_frame, prev_regnum,
3150 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3154 /* Implementation of function hook 'sniffer' in
3155 'struct frame_uwnind'. */
3158 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3159 struct frame_info *this_frame,
3160 void **this_prologue_cache)
3162 CORE_ADDR this_pc = get_frame_pc (this_frame);
3164 /* No need to check is_m; this sniffer is only registered for
3165 M-profile architectures. */
3167 /* Check if exception frame returns to a magic PC value. */
3168 return arm_m_addr_is_magic (this_pc);
3171 /* Frame unwinder for M-profile exceptions. */
3173 struct frame_unwind arm_m_exception_unwind =
3176 default_frame_unwind_stop_reason,
3177 arm_m_exception_this_id,
3178 arm_m_exception_prev_register,
3180 arm_m_exception_unwind_sniffer
3184 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3186 struct arm_prologue_cache *cache;
3188 if (*this_cache == NULL)
3189 *this_cache = arm_make_prologue_cache (this_frame);
3190 cache = (struct arm_prologue_cache *) *this_cache;
3192 return cache->prev_sp - cache->framesize;
3195 struct frame_base arm_normal_base = {
3196 &arm_prologue_unwind,
3197 arm_normal_frame_base,
3198 arm_normal_frame_base,
3199 arm_normal_frame_base
3202 static struct value *
3203 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3206 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3208 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3213 /* The PC is normally copied from the return column, which
3214 describes saves of LR. However, that version may have an
3215 extra bit set to indicate Thumb state. The bit is not
3217 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3218 return frame_unwind_got_constant (this_frame, regnum,
3219 arm_addr_bits_remove (gdbarch, lr));
3222 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3223 cpsr = get_frame_register_unsigned (this_frame, regnum);
3224 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3225 if (IS_THUMB_ADDR (lr))
3229 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3232 internal_error (__FILE__, __LINE__,
3233 _("Unexpected register %d"), regnum);
3238 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3239 struct dwarf2_frame_state_reg *reg,
3240 struct frame_info *this_frame)
3246 reg->how = DWARF2_FRAME_REG_FN;
3247 reg->loc.fn = arm_dwarf2_prev_register;
3250 reg->how = DWARF2_FRAME_REG_CFA;
3255 /* Implement the stack_frame_destroyed_p gdbarch method. */
3258 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3260 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3261 unsigned int insn, insn2;
3262 int found_return = 0, found_stack_adjust = 0;
3263 CORE_ADDR func_start, func_end;
3267 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3270 /* The epilogue is a sequence of instructions along the following lines:
3272 - add stack frame size to SP or FP
3273 - [if frame pointer used] restore SP from FP
3274 - restore registers from SP [may include PC]
3275 - a return-type instruction [if PC wasn't already restored]
3277 In a first pass, we scan forward from the current PC and verify the
3278 instructions we find as compatible with this sequence, ending in a
3281 However, this is not sufficient to distinguish indirect function calls
3282 within a function from indirect tail calls in the epilogue in some cases.
3283 Therefore, if we didn't already find any SP-changing instruction during
3284 forward scan, we add a backward scanning heuristic to ensure we actually
3285 are in the epilogue. */
3288 while (scan_pc < func_end && !found_return)
3290 if (target_read_memory (scan_pc, buf, 2))
3294 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3296 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3298 else if (insn == 0x46f7) /* mov pc, lr */
3300 else if (thumb_instruction_restores_sp (insn))
3302 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3305 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3307 if (target_read_memory (scan_pc, buf, 2))
3311 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3313 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3315 if (insn2 & 0x8000) /* <registers> include PC. */
3318 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3319 && (insn2 & 0x0fff) == 0x0b04)
3321 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3324 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3325 && (insn2 & 0x0e00) == 0x0a00)
3337 /* Since any instruction in the epilogue sequence, with the possible
3338 exception of return itself, updates the stack pointer, we need to
3339 scan backwards for at most one instruction. Try either a 16-bit or
3340 a 32-bit instruction. This is just a heuristic, so we do not worry
3341 too much about false positives. */
3343 if (pc - 4 < func_start)
3345 if (target_read_memory (pc - 4, buf, 4))
3348 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3349 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3351 if (thumb_instruction_restores_sp (insn2))
3352 found_stack_adjust = 1;
3353 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3354 found_stack_adjust = 1;
3355 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3356 && (insn2 & 0x0fff) == 0x0b04)
3357 found_stack_adjust = 1;
3358 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3359 && (insn2 & 0x0e00) == 0x0a00)
3360 found_stack_adjust = 1;
3362 return found_stack_adjust;
3366 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3368 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3371 CORE_ADDR func_start, func_end;
3373 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3376 /* We are in the epilogue if the previous instruction was a stack
3377 adjustment and the next instruction is a possible return (bx, mov
3378 pc, or pop). We could have to scan backwards to find the stack
3379 adjustment, or forwards to find the return, but this is a decent
3380 approximation. First scan forwards. */
3383 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3384 if (bits (insn, 28, 31) != INST_NV)
3386 if ((insn & 0x0ffffff0) == 0x012fff10)
3389 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3392 else if ((insn & 0x0fff0000) == 0x08bd0000
3393 && (insn & 0x0000c000) != 0)
3394 /* POP (LDMIA), including PC or LR. */
3401 /* Scan backwards. This is just a heuristic, so do not worry about
3402 false positives from mode changes. */
3404 if (pc < func_start + 4)
3407 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3408 if (arm_instruction_restores_sp (insn))
3414 /* Implement the stack_frame_destroyed_p gdbarch method. */
3417 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3419 if (arm_pc_is_thumb (gdbarch, pc))
3420 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3422 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3425 /* When arguments must be pushed onto the stack, they go on in reverse
3426 order. The code below implements a FILO (stack) to do this. */
3431 struct stack_item *prev;
3435 static struct stack_item *
3436 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3438 struct stack_item *si;
3439 si = XNEW (struct stack_item);
3440 si->data = (gdb_byte *) xmalloc (len);
3443 memcpy (si->data, contents, len);
3447 static struct stack_item *
3448 pop_stack_item (struct stack_item *si)
3450 struct stack_item *dead = si;
3457 /* Implement the gdbarch type alignment method, overrides the generic
3458 alignment algorithm for anything that is arm specific. */
3461 arm_type_align (gdbarch *gdbarch, struct type *t)
3463 t = check_typedef (t);
3464 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3466 /* Use the natural alignment for vector types (the same for
3467 scalar type), but the maximum alignment is 64-bit. */
3468 if (TYPE_LENGTH (t) > 8)
3471 return TYPE_LENGTH (t);
3474 /* Allow the common code to calculate the alignment. */
3478 /* Possible base types for a candidate for passing and returning in
3481 enum arm_vfp_cprc_base_type
3490 /* The length of one element of base type B. */
3493 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3497 case VFP_CPRC_SINGLE:
3499 case VFP_CPRC_DOUBLE:
3501 case VFP_CPRC_VEC64:
3503 case VFP_CPRC_VEC128:
3506 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3511 /* The character ('s', 'd' or 'q') for the type of VFP register used
3512 for passing base type B. */
3515 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3519 case VFP_CPRC_SINGLE:
3521 case VFP_CPRC_DOUBLE:
3523 case VFP_CPRC_VEC64:
3525 case VFP_CPRC_VEC128:
3528 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3533 /* Determine whether T may be part of a candidate for passing and
3534 returning in VFP registers, ignoring the limit on the total number
3535 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3536 classification of the first valid component found; if it is not
3537 VFP_CPRC_UNKNOWN, all components must have the same classification
3538 as *BASE_TYPE. If it is found that T contains a type not permitted
3539 for passing and returning in VFP registers, a type differently
3540 classified from *BASE_TYPE, or two types differently classified
3541 from each other, return -1, otherwise return the total number of
3542 base-type elements found (possibly 0 in an empty structure or
3543 array). Vector types are not currently supported, matching the
3544 generic AAPCS support. */
3547 arm_vfp_cprc_sub_candidate (struct type *t,
3548 enum arm_vfp_cprc_base_type *base_type)
3550 t = check_typedef (t);
3554 switch (TYPE_LENGTH (t))
3557 if (*base_type == VFP_CPRC_UNKNOWN)
3558 *base_type = VFP_CPRC_SINGLE;
3559 else if (*base_type != VFP_CPRC_SINGLE)
3564 if (*base_type == VFP_CPRC_UNKNOWN)
3565 *base_type = VFP_CPRC_DOUBLE;
3566 else if (*base_type != VFP_CPRC_DOUBLE)
3575 case TYPE_CODE_COMPLEX:
3576 /* Arguments of complex T where T is one of the types float or
3577 double get treated as if they are implemented as:
3586 switch (TYPE_LENGTH (t))
3589 if (*base_type == VFP_CPRC_UNKNOWN)
3590 *base_type = VFP_CPRC_SINGLE;
3591 else if (*base_type != VFP_CPRC_SINGLE)
3596 if (*base_type == VFP_CPRC_UNKNOWN)
3597 *base_type = VFP_CPRC_DOUBLE;
3598 else if (*base_type != VFP_CPRC_DOUBLE)
3607 case TYPE_CODE_ARRAY:
3609 if (t->is_vector ())
3611 /* A 64-bit or 128-bit containerized vector type are VFP
3613 switch (TYPE_LENGTH (t))
3616 if (*base_type == VFP_CPRC_UNKNOWN)
3617 *base_type = VFP_CPRC_VEC64;
3620 if (*base_type == VFP_CPRC_UNKNOWN)
3621 *base_type = VFP_CPRC_VEC128;
3632 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3636 if (TYPE_LENGTH (t) == 0)
3638 gdb_assert (count == 0);
3641 else if (count == 0)
3643 unitlen = arm_vfp_cprc_unit_length (*base_type);
3644 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3645 return TYPE_LENGTH (t) / unitlen;
3650 case TYPE_CODE_STRUCT:
3655 for (i = 0; i < t->num_fields (); i++)
3659 if (!field_is_static (&t->field (i)))
3660 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3662 if (sub_count == -1)
3666 if (TYPE_LENGTH (t) == 0)
3668 gdb_assert (count == 0);
3671 else if (count == 0)
3673 unitlen = arm_vfp_cprc_unit_length (*base_type);
3674 if (TYPE_LENGTH (t) != unitlen * count)
3679 case TYPE_CODE_UNION:
3684 for (i = 0; i < t->num_fields (); i++)
3686 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3688 if (sub_count == -1)
3690 count = (count > sub_count ? count : sub_count);
3692 if (TYPE_LENGTH (t) == 0)
3694 gdb_assert (count == 0);
3697 else if (count == 0)
3699 unitlen = arm_vfp_cprc_unit_length (*base_type);
3700 if (TYPE_LENGTH (t) != unitlen * count)
3712 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3713 if passed to or returned from a non-variadic function with the VFP
3714 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3715 *BASE_TYPE to the base type for T and *COUNT to the number of
3716 elements of that base type before returning. */
3719 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3722 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3723 int c = arm_vfp_cprc_sub_candidate (t, &b);
3724 if (c <= 0 || c > 4)
3731 /* Return 1 if the VFP ABI should be used for passing arguments to and
3732 returning values from a function of type FUNC_TYPE, 0
3736 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3738 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3739 /* Variadic functions always use the base ABI. Assume that functions
3740 without debug info are not variadic. */
3741 if (func_type && check_typedef (func_type)->has_varargs ())
3743 /* The VFP ABI is only supported as a variant of AAPCS. */
3744 if (tdep->arm_abi != ARM_ABI_AAPCS)
3746 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3749 /* We currently only support passing parameters in integer registers, which
3750 conforms with GCC's default model, and VFP argument passing following
3751 the VFP variant of AAPCS. Several other variants exist and
3752 we should probably support some of them based on the selected ABI. */
3755 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3756 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3757 struct value **args, CORE_ADDR sp,
3758 function_call_return_method return_method,
3759 CORE_ADDR struct_addr)
3761 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3765 struct stack_item *si = NULL;
3768 unsigned vfp_regs_free = (1 << 16) - 1;
3770 /* Determine the type of this function and whether the VFP ABI
3772 ftype = check_typedef (value_type (function));
3773 if (ftype->code () == TYPE_CODE_PTR)
3774 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3775 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3777 /* Set the return address. For the ARM, the return breakpoint is
3778 always at BP_ADDR. */
3779 if (arm_pc_is_thumb (gdbarch, bp_addr))
3781 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3783 /* Walk through the list of args and determine how large a temporary
3784 stack is required. Need to take care here as structs may be
3785 passed on the stack, and we have to push them. */
3788 argreg = ARM_A1_REGNUM;
3791 /* The struct_return pointer occupies the first parameter
3792 passing register. */
3793 if (return_method == return_method_struct)
3795 arm_debug_printf ("struct return in %s = %s",
3796 gdbarch_register_name (gdbarch, argreg),
3797 paddress (gdbarch, struct_addr));
3799 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3803 for (argnum = 0; argnum < nargs; argnum++)
3806 struct type *arg_type;
3807 struct type *target_type;
3808 enum type_code typecode;
3809 const bfd_byte *val;
3811 enum arm_vfp_cprc_base_type vfp_base_type;
3813 int may_use_core_reg = 1;
3815 arg_type = check_typedef (value_type (args[argnum]));
3816 len = TYPE_LENGTH (arg_type);
3817 target_type = TYPE_TARGET_TYPE (arg_type);
3818 typecode = arg_type->code ();
3819 val = value_contents (args[argnum]);
3821 align = type_align (arg_type);
3822 /* Round alignment up to a whole number of words. */
3823 align = (align + ARM_INT_REGISTER_SIZE - 1)
3824 & ~(ARM_INT_REGISTER_SIZE - 1);
3825 /* Different ABIs have different maximum alignments. */
3826 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3828 /* The APCS ABI only requires word alignment. */
3829 align = ARM_INT_REGISTER_SIZE;
3833 /* The AAPCS requires at most doubleword alignment. */
3834 if (align > ARM_INT_REGISTER_SIZE * 2)
3835 align = ARM_INT_REGISTER_SIZE * 2;
3839 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3847 /* Because this is a CPRC it cannot go in a core register or
3848 cause a core register to be skipped for alignment.
3849 Either it goes in VFP registers and the rest of this loop
3850 iteration is skipped for this argument, or it goes on the
3851 stack (and the stack alignment code is correct for this
3853 may_use_core_reg = 0;
3855 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3856 shift = unit_length / 4;
3857 mask = (1 << (shift * vfp_base_count)) - 1;
3858 for (regno = 0; regno < 16; regno += shift)
3859 if (((vfp_regs_free >> regno) & mask) == mask)
3868 vfp_regs_free &= ~(mask << regno);
3869 reg_scaled = regno / shift;
3870 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3871 for (i = 0; i < vfp_base_count; i++)
3875 if (reg_char == 'q')
3876 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3877 val + i * unit_length);
3880 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3881 reg_char, reg_scaled + i);
3882 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3884 regcache->cooked_write (regnum, val + i * unit_length);
3891 /* This CPRC could not go in VFP registers, so all VFP
3892 registers are now marked as used. */
3897 /* Push stack padding for doubleword alignment. */
3898 if (nstack & (align - 1))
3900 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3901 nstack += ARM_INT_REGISTER_SIZE;
3904 /* Doubleword aligned quantities must go in even register pairs. */
3905 if (may_use_core_reg
3906 && argreg <= ARM_LAST_ARG_REGNUM
3907 && align > ARM_INT_REGISTER_SIZE
3911 /* If the argument is a pointer to a function, and it is a
3912 Thumb function, create a LOCAL copy of the value and set
3913 the THUMB bit in it. */
3914 if (TYPE_CODE_PTR == typecode
3915 && target_type != NULL
3916 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3918 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3919 if (arm_pc_is_thumb (gdbarch, regval))
3921 bfd_byte *copy = (bfd_byte *) alloca (len);
3922 store_unsigned_integer (copy, len, byte_order,
3923 MAKE_THUMB_ADDR (regval));
3928 /* Copy the argument to general registers or the stack in
3929 register-sized pieces. Large arguments are split between
3930 registers and stack. */
3933 int partial_len = len < ARM_INT_REGISTER_SIZE
3934 ? len : ARM_INT_REGISTER_SIZE;
3936 = extract_unsigned_integer (val, partial_len, byte_order);
3938 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3940 /* The argument is being passed in a general purpose
3942 if (byte_order == BFD_ENDIAN_BIG)
3943 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3945 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
3946 gdbarch_register_name (gdbarch, argreg),
3947 phex (regval, ARM_INT_REGISTER_SIZE));
3949 regcache_cooked_write_unsigned (regcache, argreg, regval);
3954 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3956 memset (buf, 0, sizeof (buf));
3957 store_unsigned_integer (buf, partial_len, byte_order, regval);
3959 /* Push the arguments onto the stack. */
3960 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
3961 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3962 nstack += ARM_INT_REGISTER_SIZE;
3969 /* If we have an odd number of words to push, then decrement the stack
3970 by one word now, so first stack argument will be dword aligned. */
3977 write_memory (sp, si->data, si->len);
3978 si = pop_stack_item (si);
3981 /* Finally, update teh SP register. */
3982 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3988 /* Always align the frame to an 8-byte boundary. This is required on
3989 some platforms and harmless on the rest. */
3992 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3994 /* Align the stack to eight bytes. */
3995 return sp & ~ (CORE_ADDR) 7;
3999 print_fpu_flags (struct ui_file *file, int flags)
4001 if (flags & (1 << 0))
4002 fputs_filtered ("IVO ", file);
4003 if (flags & (1 << 1))
4004 fputs_filtered ("DVZ ", file);
4005 if (flags & (1 << 2))
4006 fputs_filtered ("OFL ", file);
4007 if (flags & (1 << 3))
4008 fputs_filtered ("UFL ", file);
4009 if (flags & (1 << 4))
4010 fputs_filtered ("INX ", file);
4011 fputc_filtered ('\n', file);
4014 /* Print interesting information about the floating point processor
4015 (if present) or emulator. */
4017 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4018 struct frame_info *frame, const char *args)
4020 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4023 type = (status >> 24) & 127;
4024 if (status & (1 << 31))
4025 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4027 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4028 /* i18n: [floating point unit] mask */
4029 fputs_filtered (_("mask: "), file);
4030 print_fpu_flags (file, status >> 16);
4031 /* i18n: [floating point unit] flags */
4032 fputs_filtered (_("flags: "), file);
4033 print_fpu_flags (file, status);
4036 /* Construct the ARM extended floating point type. */
4037 static struct type *
4038 arm_ext_type (struct gdbarch *gdbarch)
4040 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4042 if (!tdep->arm_ext_type)
4044 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4045 floatformats_arm_ext);
4047 return tdep->arm_ext_type;
4050 static struct type *
4051 arm_neon_double_type (struct gdbarch *gdbarch)
4053 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4055 if (tdep->neon_double_type == NULL)
4057 struct type *t, *elem;
4059 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4061 elem = builtin_type (gdbarch)->builtin_uint8;
4062 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4063 elem = builtin_type (gdbarch)->builtin_uint16;
4064 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4065 elem = builtin_type (gdbarch)->builtin_uint32;
4066 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_uint64;
4068 append_composite_type_field (t, "u64", elem);
4069 elem = builtin_type (gdbarch)->builtin_float;
4070 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4071 elem = builtin_type (gdbarch)->builtin_double;
4072 append_composite_type_field (t, "f64", elem);
4074 t->set_is_vector (true);
4075 t->set_name ("neon_d");
4076 tdep->neon_double_type = t;
4079 return tdep->neon_double_type;
4082 /* FIXME: The vector types are not correctly ordered on big-endian
4083 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4084 bits of d0 - regardless of what unit size is being held in d0. So
4085 the offset of the first uint8 in d0 is 7, but the offset of the
4086 first float is 4. This code works as-is for little-endian
4089 static struct type *
4090 arm_neon_quad_type (struct gdbarch *gdbarch)
4092 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4094 if (tdep->neon_quad_type == NULL)
4096 struct type *t, *elem;
4098 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4100 elem = builtin_type (gdbarch)->builtin_uint8;
4101 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4102 elem = builtin_type (gdbarch)->builtin_uint16;
4103 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4104 elem = builtin_type (gdbarch)->builtin_uint32;
4105 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4106 elem = builtin_type (gdbarch)->builtin_uint64;
4107 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4108 elem = builtin_type (gdbarch)->builtin_float;
4109 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4110 elem = builtin_type (gdbarch)->builtin_double;
4111 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4113 t->set_is_vector (true);
4114 t->set_name ("neon_q");
4115 tdep->neon_quad_type = t;
4118 return tdep->neon_quad_type;
4121 /* Return the GDB type object for the "standard" data type of data in
4124 static struct type *
4125 arm_register_type (struct gdbarch *gdbarch, int regnum)
4127 int num_regs = gdbarch_num_regs (gdbarch);
4129 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4130 && regnum >= num_regs && regnum < num_regs + 32)
4131 return builtin_type (gdbarch)->builtin_float;
4133 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4134 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4135 return arm_neon_quad_type (gdbarch);
4137 /* If the target description has register information, we are only
4138 in this function so that we can override the types of
4139 double-precision registers for NEON. */
4140 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4142 struct type *t = tdesc_register_type (gdbarch, regnum);
4144 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4145 && t->code () == TYPE_CODE_FLT
4146 && gdbarch_tdep (gdbarch)->have_neon)
4147 return arm_neon_double_type (gdbarch);
4152 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4154 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4155 return builtin_type (gdbarch)->builtin_void;
4157 return arm_ext_type (gdbarch);
4159 else if (regnum == ARM_SP_REGNUM)
4160 return builtin_type (gdbarch)->builtin_data_ptr;
4161 else if (regnum == ARM_PC_REGNUM)
4162 return builtin_type (gdbarch)->builtin_func_ptr;
4163 else if (regnum >= ARRAY_SIZE (arm_register_names))
4164 /* These registers are only supported on targets which supply
4165 an XML description. */
4166 return builtin_type (gdbarch)->builtin_int0;
4168 return builtin_type (gdbarch)->builtin_uint32;
4171 /* Map a DWARF register REGNUM onto the appropriate GDB register
4175 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4177 /* Core integer regs. */
4178 if (reg >= 0 && reg <= 15)
4181 /* Legacy FPA encoding. These were once used in a way which
4182 overlapped with VFP register numbering, so their use is
4183 discouraged, but GDB doesn't support the ARM toolchain
4184 which used them for VFP. */
4185 if (reg >= 16 && reg <= 23)
4186 return ARM_F0_REGNUM + reg - 16;
4188 /* New assignments for the FPA registers. */
4189 if (reg >= 96 && reg <= 103)
4190 return ARM_F0_REGNUM + reg - 96;
4192 /* WMMX register assignments. */
4193 if (reg >= 104 && reg <= 111)
4194 return ARM_WCGR0_REGNUM + reg - 104;
4196 if (reg >= 112 && reg <= 127)
4197 return ARM_WR0_REGNUM + reg - 112;
4199 if (reg >= 192 && reg <= 199)
4200 return ARM_WC0_REGNUM + reg - 192;
4202 /* VFP v2 registers. A double precision value is actually
4203 in d1 rather than s2, but the ABI only defines numbering
4204 for the single precision registers. This will "just work"
4205 in GDB for little endian targets (we'll read eight bytes,
4206 starting in s0 and then progressing to s1), but will be
4207 reversed on big endian targets with VFP. This won't
4208 be a problem for the new Neon quad registers; you're supposed
4209 to use DW_OP_piece for those. */
4210 if (reg >= 64 && reg <= 95)
4214 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4215 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4219 /* VFP v3 / Neon registers. This range is also used for VFP v2
4220 registers, except that it now describes d0 instead of s0. */
4221 if (reg >= 256 && reg <= 287)
4225 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4226 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4233 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4235 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4238 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4240 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4241 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4243 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4244 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4246 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4247 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4249 if (reg < NUM_GREGS)
4250 return SIM_ARM_R0_REGNUM + reg;
4253 if (reg < NUM_FREGS)
4254 return SIM_ARM_FP0_REGNUM + reg;
4257 if (reg < NUM_SREGS)
4258 return SIM_ARM_FPS_REGNUM + reg;
4261 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4264 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4265 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4266 NULL if an error occurs. BUF is freed. */
4269 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4270 int old_len, int new_len)
4273 int bytes_to_read = new_len - old_len;
4275 new_buf = (gdb_byte *) xmalloc (new_len);
4276 memcpy (new_buf + bytes_to_read, buf, old_len);
4278 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4286 /* An IT block is at most the 2-byte IT instruction followed by
4287 four 4-byte instructions. The furthest back we must search to
4288 find an IT block that affects the current instruction is thus
4289 2 + 3 * 4 == 14 bytes. */
4290 #define MAX_IT_BLOCK_PREFIX 14
4292 /* Use a quick scan if there are more than this many bytes of
4294 #define IT_SCAN_THRESHOLD 32
4296 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4297 A breakpoint in an IT block may not be hit, depending on the
4300 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4304 CORE_ADDR boundary, func_start;
4306 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4307 int i, any, last_it, last_it_count;
4309 /* If we are using BKPT breakpoints, none of this is necessary. */
4310 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4313 /* ARM mode does not have this problem. */
4314 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4317 /* We are setting a breakpoint in Thumb code that could potentially
4318 contain an IT block. The first step is to find how much Thumb
4319 code there is; we do not need to read outside of known Thumb
4321 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4323 /* Thumb-2 code must have mapping symbols to have a chance. */
4326 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4328 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4329 && func_start > boundary)
4330 boundary = func_start;
4332 /* Search for a candidate IT instruction. We have to do some fancy
4333 footwork to distinguish a real IT instruction from the second
4334 half of a 32-bit instruction, but there is no need for that if
4335 there's no candidate. */
4336 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4338 /* No room for an IT instruction. */
4341 buf = (gdb_byte *) xmalloc (buf_len);
4342 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4345 for (i = 0; i < buf_len; i += 2)
4347 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4348 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4361 /* OK, the code bytes before this instruction contain at least one
4362 halfword which resembles an IT instruction. We know that it's
4363 Thumb code, but there are still two possibilities. Either the
4364 halfword really is an IT instruction, or it is the second half of
4365 a 32-bit Thumb instruction. The only way we can tell is to
4366 scan forwards from a known instruction boundary. */
4367 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4371 /* There's a lot of code before this instruction. Start with an
4372 optimistic search; it's easy to recognize halfwords that can
4373 not be the start of a 32-bit instruction, and use that to
4374 lock on to the instruction boundaries. */
4375 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4378 buf_len = IT_SCAN_THRESHOLD;
4381 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4383 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4384 if (thumb_insn_size (inst1) == 2)
4391 /* At this point, if DEFINITE, BUF[I] is the first place we
4392 are sure that we know the instruction boundaries, and it is far
4393 enough from BPADDR that we could not miss an IT instruction
4394 affecting BPADDR. If ! DEFINITE, give up - start from a
4398 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4402 buf_len = bpaddr - boundary;
4408 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4411 buf_len = bpaddr - boundary;
4415 /* Scan forwards. Find the last IT instruction before BPADDR. */
4420 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4422 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4427 else if (inst1 & 0x0002)
4429 else if (inst1 & 0x0004)
4434 i += thumb_insn_size (inst1);
4440 /* There wasn't really an IT instruction after all. */
4443 if (last_it_count < 1)
4444 /* It was too far away. */
4447 /* This really is a trouble spot. Move the breakpoint to the IT
4449 return bpaddr - buf_len + last_it;
4452 /* ARM displaced stepping support.
4454 Generally ARM displaced stepping works as follows:
4456 1. When an instruction is to be single-stepped, it is first decoded by
4457 arm_process_displaced_insn. Depending on the type of instruction, it is
4458 then copied to a scratch location, possibly in a modified form. The
4459 copy_* set of functions performs such modification, as necessary. A
4460 breakpoint is placed after the modified instruction in the scratch space
4461 to return control to GDB. Note in particular that instructions which
4462 modify the PC will no longer do so after modification.
4464 2. The instruction is single-stepped, by setting the PC to the scratch
4465 location address, and resuming. Control returns to GDB when the
4468 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4469 function used for the current instruction. This function's job is to
4470 put the CPU/memory state back to what it would have been if the
4471 instruction had been executed unmodified in its original location. */
4473 /* NOP instruction (mov r0, r0). */
4474 #define ARM_NOP 0xe1a00000
4475 #define THUMB_NOP 0x4600
4477 /* Helper for register reads for displaced stepping. In particular, this
4478 returns the PC as it would be seen by the instruction at its original
4482 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4486 CORE_ADDR from = dsc->insn_addr;
4488 if (regno == ARM_PC_REGNUM)
4490 /* Compute pipeline offset:
4491 - When executing an ARM instruction, PC reads as the address of the
4492 current instruction plus 8.
4493 - When executing a Thumb instruction, PC reads as the address of the
4494 current instruction plus 4. */
4501 displaced_debug_printf ("read pc value %.8lx",
4502 (unsigned long) from);
4503 return (ULONGEST) from;
4507 regcache_cooked_read_unsigned (regs, regno, &ret);
4509 displaced_debug_printf ("read r%d value %.8lx",
4510 regno, (unsigned long) ret);
4517 displaced_in_arm_mode (struct regcache *regs)
4520 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4522 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4524 return (ps & t_bit) == 0;
4527 /* Write to the PC as from a branch instruction. */
4530 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4534 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4535 architecture versions < 6. */
4536 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4537 val & ~(ULONGEST) 0x3);
4539 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4540 val & ~(ULONGEST) 0x1);
4543 /* Write to the PC as from a branch-exchange instruction. */
4546 bx_write_pc (struct regcache *regs, ULONGEST val)
4549 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4551 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4555 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4556 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4558 else if ((val & 2) == 0)
4560 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4561 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4565 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4566 mode, align dest to 4 bytes). */
4567 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4568 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4569 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4573 /* Write to the PC as if from a load instruction. */
4576 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4579 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4580 bx_write_pc (regs, val);
4582 branch_write_pc (regs, dsc, val);
4585 /* Write to the PC as if from an ALU instruction. */
4588 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4591 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4592 bx_write_pc (regs, val);
4594 branch_write_pc (regs, dsc, val);
4597 /* Helper for writing to registers for displaced stepping. Writing to the PC
4598 has a varying effects depending on the instruction which does the write:
4599 this is controlled by the WRITE_PC argument. */
4602 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4603 int regno, ULONGEST val, enum pc_write_style write_pc)
4605 if (regno == ARM_PC_REGNUM)
4607 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
4611 case BRANCH_WRITE_PC:
4612 branch_write_pc (regs, dsc, val);
4616 bx_write_pc (regs, val);
4620 load_write_pc (regs, dsc, val);
4624 alu_write_pc (regs, dsc, val);
4627 case CANNOT_WRITE_PC:
4628 warning (_("Instruction wrote to PC in an unexpected way when "
4629 "single-stepping"));
4633 internal_error (__FILE__, __LINE__,
4634 _("Invalid argument to displaced_write_reg"));
4637 dsc->wrote_to_pc = 1;
4641 displaced_debug_printf ("writing r%d value %.8lx",
4642 regno, (unsigned long) val);
4643 regcache_cooked_write_unsigned (regs, regno, val);
4647 /* This function is used to concisely determine if an instruction INSN
4648 references PC. Register fields of interest in INSN should have the
4649 corresponding fields of BITMASK set to 0b1111. The function
4650 returns return 1 if any of these fields in INSN reference the PC
4651 (also 0b1111, r15), else it returns 0. */
4654 insn_references_pc (uint32_t insn, uint32_t bitmask)
4656 uint32_t lowbit = 1;
4658 while (bitmask != 0)
4662 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4668 mask = lowbit * 0xf;
4670 if ((insn & mask) == mask)
4679 /* The simplest copy function. Many instructions have the same effect no
4680 matter what address they are executed at: in those cases, use this. */
4683 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
4684 arm_displaced_step_copy_insn_closure *dsc)
4686 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4687 (unsigned long) insn, iname);
4689 dsc->modinsn[0] = insn;
4695 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4696 uint16_t insn2, const char *iname,
4697 arm_displaced_step_copy_insn_closure *dsc)
4699 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4700 "unmodified", insn1, insn2, iname);
4702 dsc->modinsn[0] = insn1;
4703 dsc->modinsn[1] = insn2;
4709 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4712 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4714 arm_displaced_step_copy_insn_closure *dsc)
4716 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4719 dsc->modinsn[0] = insn;
4724 /* Preload instructions with immediate offset. */
4727 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
4728 arm_displaced_step_copy_insn_closure *dsc)
4730 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4731 if (!dsc->u.preload.immed)
4732 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4736 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4737 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
4740 /* Preload instructions:
4742 {pli/pld} [rn, #+/-imm]
4744 {pli/pld} [r0, #+/-imm]. */
4746 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4747 rn_val = displaced_read_reg (regs, dsc, rn);
4748 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4749 dsc->u.preload.immed = 1;
4751 dsc->cleanup = &cleanup_preload;
4755 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4756 arm_displaced_step_copy_insn_closure *dsc)
4758 unsigned int rn = bits (insn, 16, 19);
4760 if (!insn_references_pc (insn, 0x000f0000ul))
4761 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4763 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
4765 dsc->modinsn[0] = insn & 0xfff0ffff;
4767 install_preload (gdbarch, regs, dsc, rn);
4773 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4774 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4776 unsigned int rn = bits (insn1, 0, 3);
4777 unsigned int u_bit = bit (insn1, 7);
4778 int imm12 = bits (insn2, 0, 11);
4781 if (rn != ARM_PC_REGNUM)
4782 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4784 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4785 PLD (literal) Encoding T1. */
4786 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4787 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4793 /* Rewrite instruction {pli/pld} PC imm12 into:
4794 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4798 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4801 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4803 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4805 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4806 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4807 dsc->u.preload.immed = 0;
4809 /* {pli/pld} [r0, r1] */
4810 dsc->modinsn[0] = insn1 & 0xfff0;
4811 dsc->modinsn[1] = 0xf001;
4814 dsc->cleanup = &cleanup_preload;
4818 /* Preload instructions with register offset. */
4821 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4822 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
4825 ULONGEST rn_val, rm_val;
4827 /* Preload register-offset instructions:
4829 {pli/pld} [rn, rm {, shift}]
4831 {pli/pld} [r0, r1 {, shift}]. */
4833 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4834 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4835 rn_val = displaced_read_reg (regs, dsc, rn);
4836 rm_val = displaced_read_reg (regs, dsc, rm);
4837 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4838 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4839 dsc->u.preload.immed = 0;
4841 dsc->cleanup = &cleanup_preload;
4845 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4846 struct regcache *regs,
4847 arm_displaced_step_copy_insn_closure *dsc)
4849 unsigned int rn = bits (insn, 16, 19);
4850 unsigned int rm = bits (insn, 0, 3);
4853 if (!insn_references_pc (insn, 0x000f000ful))
4854 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4856 displaced_debug_printf ("copying preload insn %.8lx",
4857 (unsigned long) insn);
4859 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4861 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4865 /* Copy/cleanup coprocessor load and store instructions. */
4868 cleanup_copro_load_store (struct gdbarch *gdbarch,
4869 struct regcache *regs,
4870 arm_displaced_step_copy_insn_closure *dsc)
4872 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4874 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4876 if (dsc->u.ldst.writeback)
4877 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4881 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4882 arm_displaced_step_copy_insn_closure *dsc,
4883 int writeback, unsigned int rn)
4887 /* Coprocessor load/store instructions:
4889 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4891 {stc/stc2} [r0, #+/-imm].
4893 ldc/ldc2 are handled identically. */
4895 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4896 rn_val = displaced_read_reg (regs, dsc, rn);
4897 /* PC should be 4-byte aligned. */
4898 rn_val = rn_val & 0xfffffffc;
4899 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4901 dsc->u.ldst.writeback = writeback;
4902 dsc->u.ldst.rn = rn;
4904 dsc->cleanup = &cleanup_copro_load_store;
4908 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4909 struct regcache *regs,
4910 arm_displaced_step_copy_insn_closure *dsc)
4912 unsigned int rn = bits (insn, 16, 19);
4914 if (!insn_references_pc (insn, 0x000f0000ul))
4915 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4917 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4918 (unsigned long) insn);
4920 dsc->modinsn[0] = insn & 0xfff0ffff;
4922 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4928 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4929 uint16_t insn2, struct regcache *regs,
4930 arm_displaced_step_copy_insn_closure *dsc)
4932 unsigned int rn = bits (insn1, 0, 3);
4934 if (rn != ARM_PC_REGNUM)
4935 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4936 "copro load/store", dsc);
4938 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
4941 dsc->modinsn[0] = insn1 & 0xfff0;
4942 dsc->modinsn[1] = insn2;
4945 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4946 doesn't support writeback, so pass 0. */
4947 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4952 /* Clean up branch instructions (actually perform the branch, by setting
4956 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4957 arm_displaced_step_copy_insn_closure *dsc)
4959 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4960 int branch_taken = condition_true (dsc->u.branch.cond, status);
4961 enum pc_write_style write_pc = dsc->u.branch.exchange
4962 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4967 if (dsc->u.branch.link)
4969 /* The value of LR should be the next insn of current one. In order
4970 not to confuse logic handling later insn `bx lr', if current insn mode
4971 is Thumb, the bit 0 of LR value should be set to 1. */
4972 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4975 next_insn_addr |= 0x1;
4977 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4981 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4984 /* Copy B/BL/BLX instructions with immediate destinations. */
4987 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4988 arm_displaced_step_copy_insn_closure *dsc,
4989 unsigned int cond, int exchange, int link, long offset)
4991 /* Implement "BL<cond> <label>" as:
4993 Preparation: cond <- instruction condition
4994 Insn: mov r0, r0 (nop)
4995 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4997 B<cond> similar, but don't set r14 in cleanup. */
4999 dsc->u.branch.cond = cond;
5000 dsc->u.branch.link = link;
5001 dsc->u.branch.exchange = exchange;
5003 dsc->u.branch.dest = dsc->insn_addr;
5004 if (link && exchange)
5005 /* For BLX, offset is computed from the Align (PC, 4). */
5006 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5009 dsc->u.branch.dest += 4 + offset;
5011 dsc->u.branch.dest += 8 + offset;
5013 dsc->cleanup = &cleanup_branch;
5016 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5017 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5019 unsigned int cond = bits (insn, 28, 31);
5020 int exchange = (cond == 0xf);
5021 int link = exchange || bit (insn, 24);
5024 displaced_debug_printf ("copying %s immediate insn %.8lx",
5025 (exchange) ? "blx" : (link) ? "bl" : "b",
5026 (unsigned long) insn);
5028 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5029 then arrange the switch into Thumb mode. */
5030 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5032 offset = bits (insn, 0, 23) << 2;
5034 if (bit (offset, 25))
5035 offset = offset | ~0x3ffffff;
5037 dsc->modinsn[0] = ARM_NOP;
5039 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5044 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5045 uint16_t insn2, struct regcache *regs,
5046 arm_displaced_step_copy_insn_closure *dsc)
5048 int link = bit (insn2, 14);
5049 int exchange = link && !bit (insn2, 12);
5052 int j1 = bit (insn2, 13);
5053 int j2 = bit (insn2, 11);
5054 int s = sbits (insn1, 10, 10);
5055 int i1 = !(j1 ^ bit (insn1, 10));
5056 int i2 = !(j2 ^ bit (insn1, 10));
5058 if (!link && !exchange) /* B */
5060 offset = (bits (insn2, 0, 10) << 1);
5061 if (bit (insn2, 12)) /* Encoding T4 */
5063 offset |= (bits (insn1, 0, 9) << 12)
5069 else /* Encoding T3 */
5071 offset |= (bits (insn1, 0, 5) << 12)
5075 cond = bits (insn1, 6, 9);
5080 offset = (bits (insn1, 0, 9) << 12);
5081 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5082 offset |= exchange ?
5083 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5086 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5087 link ? (exchange) ? "blx" : "bl" : "b",
5088 insn1, insn2, offset);
5090 dsc->modinsn[0] = THUMB_NOP;
5092 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5096 /* Copy B Thumb instructions. */
5098 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5099 arm_displaced_step_copy_insn_closure *dsc)
5101 unsigned int cond = 0;
5103 unsigned short bit_12_15 = bits (insn, 12, 15);
5104 CORE_ADDR from = dsc->insn_addr;
5106 if (bit_12_15 == 0xd)
5108 /* offset = SignExtend (imm8:0, 32) */
5109 offset = sbits ((insn << 1), 0, 8);
5110 cond = bits (insn, 8, 11);
5112 else if (bit_12_15 == 0xe) /* Encoding T2 */
5114 offset = sbits ((insn << 1), 0, 11);
5118 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5121 dsc->u.branch.cond = cond;
5122 dsc->u.branch.link = 0;
5123 dsc->u.branch.exchange = 0;
5124 dsc->u.branch.dest = from + 4 + offset;
5126 dsc->modinsn[0] = THUMB_NOP;
5128 dsc->cleanup = &cleanup_branch;
5133 /* Copy BX/BLX with register-specified destinations. */
5136 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5137 arm_displaced_step_copy_insn_closure *dsc, int link,
5138 unsigned int cond, unsigned int rm)
5140 /* Implement {BX,BLX}<cond> <reg>" as:
5142 Preparation: cond <- instruction condition
5143 Insn: mov r0, r0 (nop)
5144 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5146 Don't set r14 in cleanup for BX. */
5148 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5150 dsc->u.branch.cond = cond;
5151 dsc->u.branch.link = link;
5153 dsc->u.branch.exchange = 1;
5155 dsc->cleanup = &cleanup_branch;
5159 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5160 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5162 unsigned int cond = bits (insn, 28, 31);
5165 int link = bit (insn, 5);
5166 unsigned int rm = bits (insn, 0, 3);
5168 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5170 dsc->modinsn[0] = ARM_NOP;
5172 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5177 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5178 struct regcache *regs,
5179 arm_displaced_step_copy_insn_closure *dsc)
5181 int link = bit (insn, 7);
5182 unsigned int rm = bits (insn, 3, 6);
5184 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5186 dsc->modinsn[0] = THUMB_NOP;
5188 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5194 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5197 cleanup_alu_imm (struct gdbarch *gdbarch,
5198 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5200 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5201 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5202 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5203 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5207 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5208 arm_displaced_step_copy_insn_closure *dsc)
5210 unsigned int rn = bits (insn, 16, 19);
5211 unsigned int rd = bits (insn, 12, 15);
5212 unsigned int op = bits (insn, 21, 24);
5213 int is_mov = (op == 0xd);
5214 ULONGEST rd_val, rn_val;
5216 if (!insn_references_pc (insn, 0x000ff000ul))
5217 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5219 displaced_debug_printf ("copying immediate %s insn %.8lx",
5220 is_mov ? "move" : "ALU",
5221 (unsigned long) insn);
5223 /* Instruction is of form:
5225 <op><cond> rd, [rn,] #imm
5229 Preparation: tmp1, tmp2 <- r0, r1;
5231 Insn: <op><cond> r0, r1, #imm
5232 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5235 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5236 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5237 rn_val = displaced_read_reg (regs, dsc, rn);
5238 rd_val = displaced_read_reg (regs, dsc, rd);
5239 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5240 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5244 dsc->modinsn[0] = insn & 0xfff00fff;
5246 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5248 dsc->cleanup = &cleanup_alu_imm;
5254 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5255 uint16_t insn2, struct regcache *regs,
5256 arm_displaced_step_copy_insn_closure *dsc)
5258 unsigned int op = bits (insn1, 5, 8);
5259 unsigned int rn, rm, rd;
5260 ULONGEST rd_val, rn_val;
5262 rn = bits (insn1, 0, 3); /* Rn */
5263 rm = bits (insn2, 0, 3); /* Rm */
5264 rd = bits (insn2, 8, 11); /* Rd */
5266 /* This routine is only called for instruction MOV. */
5267 gdb_assert (op == 0x2 && rn == 0xf);
5269 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5270 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5272 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5274 /* Instruction is of form:
5276 <op><cond> rd, [rn,] #imm
5280 Preparation: tmp1, tmp2 <- r0, r1;
5282 Insn: <op><cond> r0, r1, #imm
5283 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5286 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5287 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5288 rn_val = displaced_read_reg (regs, dsc, rn);
5289 rd_val = displaced_read_reg (regs, dsc, rd);
5290 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5291 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5294 dsc->modinsn[0] = insn1;
5295 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5298 dsc->cleanup = &cleanup_alu_imm;
5303 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5306 cleanup_alu_reg (struct gdbarch *gdbarch,
5307 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5312 rd_val = displaced_read_reg (regs, dsc, 0);
5314 for (i = 0; i < 3; i++)
5315 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5317 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5321 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5322 arm_displaced_step_copy_insn_closure *dsc,
5323 unsigned int rd, unsigned int rn, unsigned int rm)
5325 ULONGEST rd_val, rn_val, rm_val;
5327 /* Instruction is of form:
5329 <op><cond> rd, [rn,] rm [, <shift>]
5333 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5334 r0, r1, r2 <- rd, rn, rm
5335 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5336 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5339 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5340 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5341 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5342 rd_val = displaced_read_reg (regs, dsc, rd);
5343 rn_val = displaced_read_reg (regs, dsc, rn);
5344 rm_val = displaced_read_reg (regs, dsc, rm);
5345 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5346 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5347 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5350 dsc->cleanup = &cleanup_alu_reg;
5354 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5355 arm_displaced_step_copy_insn_closure *dsc)
5357 unsigned int op = bits (insn, 21, 24);
5358 int is_mov = (op == 0xd);
5360 if (!insn_references_pc (insn, 0x000ff00ful))
5361 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5363 displaced_debug_printf ("copying reg %s insn %.8lx",
5364 is_mov ? "move" : "ALU", (unsigned long) insn);
5367 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5369 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5371 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5377 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5378 struct regcache *regs,
5379 arm_displaced_step_copy_insn_closure *dsc)
5383 rm = bits (insn, 3, 6);
5384 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5386 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5387 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5389 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5391 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5393 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5398 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5401 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5402 struct regcache *regs,
5403 arm_displaced_step_copy_insn_closure *dsc)
5405 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5408 for (i = 0; i < 4; i++)
5409 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5411 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5415 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5416 arm_displaced_step_copy_insn_closure *dsc,
5417 unsigned int rd, unsigned int rn, unsigned int rm,
5421 ULONGEST rd_val, rn_val, rm_val, rs_val;
5423 /* Instruction is of form:
5425 <op><cond> rd, [rn,] rm, <shift> rs
5429 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5430 r0, r1, r2, r3 <- rd, rn, rm, rs
5431 Insn: <op><cond> r0, r1, r2, <shift> r3
5433 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5437 for (i = 0; i < 4; i++)
5438 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5440 rd_val = displaced_read_reg (regs, dsc, rd);
5441 rn_val = displaced_read_reg (regs, dsc, rn);
5442 rm_val = displaced_read_reg (regs, dsc, rm);
5443 rs_val = displaced_read_reg (regs, dsc, rs);
5444 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5445 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5446 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5447 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5449 dsc->cleanup = &cleanup_alu_shifted_reg;
5453 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5454 struct regcache *regs,
5455 arm_displaced_step_copy_insn_closure *dsc)
5457 unsigned int op = bits (insn, 21, 24);
5458 int is_mov = (op == 0xd);
5459 unsigned int rd, rn, rm, rs;
5461 if (!insn_references_pc (insn, 0x000fff0ful))
5462 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5464 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5465 is_mov ? "move" : "ALU",
5466 (unsigned long) insn);
5468 rn = bits (insn, 16, 19);
5469 rm = bits (insn, 0, 3);
5470 rs = bits (insn, 8, 11);
5471 rd = bits (insn, 12, 15);
5474 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5476 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5478 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5483 /* Clean up load instructions. */
5486 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5487 arm_displaced_step_copy_insn_closure *dsc)
5489 ULONGEST rt_val, rt_val2 = 0, rn_val;
5491 rt_val = displaced_read_reg (regs, dsc, 0);
5492 if (dsc->u.ldst.xfersize == 8)
5493 rt_val2 = displaced_read_reg (regs, dsc, 1);
5494 rn_val = displaced_read_reg (regs, dsc, 2);
5496 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5497 if (dsc->u.ldst.xfersize > 4)
5498 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5499 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5500 if (!dsc->u.ldst.immed)
5501 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5503 /* Handle register writeback. */
5504 if (dsc->u.ldst.writeback)
5505 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5506 /* Put result in right place. */
5507 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5508 if (dsc->u.ldst.xfersize == 8)
5509 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5512 /* Clean up store instructions. */
5515 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5516 arm_displaced_step_copy_insn_closure *dsc)
5518 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5520 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5521 if (dsc->u.ldst.xfersize > 4)
5522 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5523 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5524 if (!dsc->u.ldst.immed)
5525 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5526 if (!dsc->u.ldst.restore_r4)
5527 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5530 if (dsc->u.ldst.writeback)
5531 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5534 /* Copy "extra" load/store instructions. These are halfword/doubleword
5535 transfers, which have a different encoding to byte/word transfers. */
5538 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5539 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5541 unsigned int op1 = bits (insn, 20, 24);
5542 unsigned int op2 = bits (insn, 5, 6);
5543 unsigned int rt = bits (insn, 12, 15);
5544 unsigned int rn = bits (insn, 16, 19);
5545 unsigned int rm = bits (insn, 0, 3);
5546 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5547 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5548 int immed = (op1 & 0x4) != 0;
5550 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5552 if (!insn_references_pc (insn, 0x000ff00ful))
5553 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5555 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5556 unprivileged ? "unprivileged " : "",
5557 (unsigned long) insn);
5559 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5562 internal_error (__FILE__, __LINE__,
5563 _("copy_extra_ld_st: instruction decode error"));
5565 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5566 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5567 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5569 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5571 rt_val = displaced_read_reg (regs, dsc, rt);
5572 if (bytesize[opcode] == 8)
5573 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5574 rn_val = displaced_read_reg (regs, dsc, rn);
5576 rm_val = displaced_read_reg (regs, dsc, rm);
5578 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5579 if (bytesize[opcode] == 8)
5580 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5581 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5583 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5586 dsc->u.ldst.xfersize = bytesize[opcode];
5587 dsc->u.ldst.rn = rn;
5588 dsc->u.ldst.immed = immed;
5589 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5590 dsc->u.ldst.restore_r4 = 0;
5593 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5595 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5596 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5598 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5600 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5601 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5603 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5608 /* Copy byte/half word/word loads and stores. */
5611 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5612 arm_displaced_step_copy_insn_closure *dsc, int load,
5613 int immed, int writeback, int size, int usermode,
5614 int rt, int rm, int rn)
5616 ULONGEST rt_val, rn_val, rm_val = 0;
5618 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5619 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5621 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5623 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5625 rt_val = displaced_read_reg (regs, dsc, rt);
5626 rn_val = displaced_read_reg (regs, dsc, rn);
5628 rm_val = displaced_read_reg (regs, dsc, rm);
5630 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5631 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5633 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5635 dsc->u.ldst.xfersize = size;
5636 dsc->u.ldst.rn = rn;
5637 dsc->u.ldst.immed = immed;
5638 dsc->u.ldst.writeback = writeback;
5640 /* To write PC we can do:
5642 Before this sequence of instructions:
5643 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5644 r2 is the Rn value got from displaced_read_reg.
5646 Insn1: push {pc} Write address of STR instruction + offset on stack
5647 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5648 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5649 = addr(Insn1) + offset - addr(Insn3) - 8
5651 Insn4: add r4, r4, #8 r4 = offset - 8
5652 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5654 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5656 Otherwise we don't know what value to write for PC, since the offset is
5657 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5658 of this can be found in Section "Saving from r15" in
5659 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5661 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5666 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5667 uint16_t insn2, struct regcache *regs,
5668 arm_displaced_step_copy_insn_closure *dsc, int size)
5670 unsigned int u_bit = bit (insn1, 7);
5671 unsigned int rt = bits (insn2, 12, 15);
5672 int imm12 = bits (insn2, 0, 11);
5675 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5676 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5682 /* Rewrite instruction LDR Rt imm12 into:
5684 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5688 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5691 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5692 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5693 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5695 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5697 pc_val = pc_val & 0xfffffffc;
5699 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5700 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5704 dsc->u.ldst.xfersize = size;
5705 dsc->u.ldst.immed = 0;
5706 dsc->u.ldst.writeback = 0;
5707 dsc->u.ldst.restore_r4 = 0;
5709 /* LDR R0, R2, R3 */
5710 dsc->modinsn[0] = 0xf852;
5711 dsc->modinsn[1] = 0x3;
5714 dsc->cleanup = &cleanup_load;
5720 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5721 uint16_t insn2, struct regcache *regs,
5722 arm_displaced_step_copy_insn_closure *dsc,
5723 int writeback, int immed)
5725 unsigned int rt = bits (insn2, 12, 15);
5726 unsigned int rn = bits (insn1, 0, 3);
5727 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5728 /* In LDR (register), there is also a register Rm, which is not allowed to
5729 be PC, so we don't have to check it. */
5731 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5732 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5735 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5736 rt, rn, insn1, insn2);
5738 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5741 dsc->u.ldst.restore_r4 = 0;
5744 /* ldr[b]<cond> rt, [rn, #imm], etc.
5746 ldr[b]<cond> r0, [r2, #imm]. */
5748 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5749 dsc->modinsn[1] = insn2 & 0x0fff;
5752 /* ldr[b]<cond> rt, [rn, rm], etc.
5754 ldr[b]<cond> r0, [r2, r3]. */
5756 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5757 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5767 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5768 struct regcache *regs,
5769 arm_displaced_step_copy_insn_closure *dsc,
5770 int load, int size, int usermode)
5772 int immed = !bit (insn, 25);
5773 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5774 unsigned int rt = bits (insn, 12, 15);
5775 unsigned int rn = bits (insn, 16, 19);
5776 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5778 if (!insn_references_pc (insn, 0x000ff00ful))
5779 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5781 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5782 load ? (size == 1 ? "ldrb" : "ldr")
5783 : (size == 1 ? "strb" : "str"),
5784 usermode ? "t" : "",
5786 (unsigned long) insn);
5788 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5789 usermode, rt, rm, rn);
5791 if (load || rt != ARM_PC_REGNUM)
5793 dsc->u.ldst.restore_r4 = 0;
5796 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5798 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5799 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5801 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5803 {ldr,str}[b]<cond> r0, [r2, r3]. */
5804 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5808 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5809 dsc->u.ldst.restore_r4 = 1;
5810 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5811 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5812 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5813 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5814 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5818 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5820 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5825 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5830 /* Cleanup LDM instructions with fully-populated register list. This is an
5831 unfortunate corner case: it's impossible to implement correctly by modifying
5832 the instruction. The issue is as follows: we have an instruction,
5836 which we must rewrite to avoid loading PC. A possible solution would be to
5837 do the load in two halves, something like (with suitable cleanup
5841 ldm[id][ab] r8!, {r0-r7}
5843 ldm[id][ab] r8, {r7-r14}
5846 but at present there's no suitable place for <temp>, since the scratch space
5847 is overwritten before the cleanup routine is called. For now, we simply
5848 emulate the instruction. */
5851 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5852 arm_displaced_step_copy_insn_closure *dsc)
5854 int inc = dsc->u.block.increment;
5855 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5856 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5857 uint32_t regmask = dsc->u.block.regmask;
5858 int regno = inc ? 0 : 15;
5859 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5860 int exception_return = dsc->u.block.load && dsc->u.block.user
5861 && (regmask & 0x8000) != 0;
5862 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5863 int do_transfer = condition_true (dsc->u.block.cond, status);
5864 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5869 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5870 sensible we can do here. Complain loudly. */
5871 if (exception_return)
5872 error (_("Cannot single-step exception return"));
5874 /* We don't handle any stores here for now. */
5875 gdb_assert (dsc->u.block.load != 0);
5877 displaced_debug_printf ("emulating block transfer: %s %s %s",
5878 dsc->u.block.load ? "ldm" : "stm",
5879 dsc->u.block.increment ? "inc" : "dec",
5880 dsc->u.block.before ? "before" : "after");
5887 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5890 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5893 xfer_addr += bump_before;
5895 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5896 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5898 xfer_addr += bump_after;
5900 regmask &= ~(1 << regno);
5903 if (dsc->u.block.writeback)
5904 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5908 /* Clean up an STM which included the PC in the register list. */
5911 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5912 arm_displaced_step_copy_insn_closure *dsc)
5914 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5915 int store_executed = condition_true (dsc->u.block.cond, status);
5916 CORE_ADDR pc_stored_at, transferred_regs
5917 = count_one_bits (dsc->u.block.regmask);
5918 CORE_ADDR stm_insn_addr;
5921 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5923 /* If condition code fails, there's nothing else to do. */
5924 if (!store_executed)
5927 if (dsc->u.block.increment)
5929 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5931 if (dsc->u.block.before)
5936 pc_stored_at = dsc->u.block.xfer_addr;
5938 if (dsc->u.block.before)
5942 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5943 stm_insn_addr = dsc->scratch_base;
5944 offset = pc_val - stm_insn_addr;
5946 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
5949 /* Rewrite the stored PC to the proper value for the non-displaced original
5951 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5952 dsc->insn_addr + offset);
5955 /* Clean up an LDM which includes the PC in the register list. We clumped all
5956 the registers in the transferred list into a contiguous range r0...rX (to
5957 avoid loading PC directly and losing control of the debugged program), so we
5958 must undo that here. */
5961 cleanup_block_load_pc (struct gdbarch *gdbarch,
5962 struct regcache *regs,
5963 arm_displaced_step_copy_insn_closure *dsc)
5965 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5966 int load_executed = condition_true (dsc->u.block.cond, status);
5967 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5968 unsigned int regs_loaded = count_one_bits (mask);
5969 unsigned int num_to_shuffle = regs_loaded, clobbered;
5971 /* The method employed here will fail if the register list is fully populated
5972 (we need to avoid loading PC directly). */
5973 gdb_assert (num_to_shuffle < 16);
5978 clobbered = (1 << num_to_shuffle) - 1;
5980 while (num_to_shuffle > 0)
5982 if ((mask & (1 << write_reg)) != 0)
5984 unsigned int read_reg = num_to_shuffle - 1;
5986 if (read_reg != write_reg)
5988 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5989 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5990 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
5991 read_reg, write_reg);
5994 displaced_debug_printf ("LDM: register r%d already in the right "
5995 "place", write_reg);
5997 clobbered &= ~(1 << write_reg);
6005 /* Restore any registers we scribbled over. */
6006 for (write_reg = 0; clobbered != 0; write_reg++)
6008 if ((clobbered & (1 << write_reg)) != 0)
6010 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6012 displaced_debug_printf ("LDM: restored clobbered register r%d",
6014 clobbered &= ~(1 << write_reg);
6018 /* Perform register writeback manually. */
6019 if (dsc->u.block.writeback)
6021 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6023 if (dsc->u.block.increment)
6024 new_rn_val += regs_loaded * 4;
6026 new_rn_val -= regs_loaded * 4;
6028 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6033 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6034 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6037 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6038 struct regcache *regs,
6039 arm_displaced_step_copy_insn_closure *dsc)
6041 int load = bit (insn, 20);
6042 int user = bit (insn, 22);
6043 int increment = bit (insn, 23);
6044 int before = bit (insn, 24);
6045 int writeback = bit (insn, 21);
6046 int rn = bits (insn, 16, 19);
6048 /* Block transfers which don't mention PC can be run directly
6050 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6051 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6053 if (rn == ARM_PC_REGNUM)
6055 warning (_("displaced: Unpredictable LDM or STM with "
6056 "base register r15"));
6057 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6060 displaced_debug_printf ("copying block transfer insn %.8lx",
6061 (unsigned long) insn);
6063 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6064 dsc->u.block.rn = rn;
6066 dsc->u.block.load = load;
6067 dsc->u.block.user = user;
6068 dsc->u.block.increment = increment;
6069 dsc->u.block.before = before;
6070 dsc->u.block.writeback = writeback;
6071 dsc->u.block.cond = bits (insn, 28, 31);
6073 dsc->u.block.regmask = insn & 0xffff;
6077 if ((insn & 0xffff) == 0xffff)
6079 /* LDM with a fully-populated register list. This case is
6080 particularly tricky. Implement for now by fully emulating the
6081 instruction (which might not behave perfectly in all cases, but
6082 these instructions should be rare enough for that not to matter
6084 dsc->modinsn[0] = ARM_NOP;
6086 dsc->cleanup = &cleanup_block_load_all;
6090 /* LDM of a list of registers which includes PC. Implement by
6091 rewriting the list of registers to be transferred into a
6092 contiguous chunk r0...rX before doing the transfer, then shuffling
6093 registers into the correct places in the cleanup routine. */
6094 unsigned int regmask = insn & 0xffff;
6095 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6098 for (i = 0; i < num_in_list; i++)
6099 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6101 /* Writeback makes things complicated. We need to avoid clobbering
6102 the base register with one of the registers in our modified
6103 register list, but just using a different register can't work in
6106 ldm r14!, {r0-r13,pc}
6108 which would need to be rewritten as:
6112 but that can't work, because there's no free register for N.
6114 Solve this by turning off the writeback bit, and emulating
6115 writeback manually in the cleanup routine. */
6120 new_regmask = (1 << num_in_list) - 1;
6122 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6123 "%.4x, modified list %.4x",
6124 rn, writeback ? "!" : "",
6125 (int) insn & 0xffff, new_regmask);
6127 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6129 dsc->cleanup = &cleanup_block_load_pc;
6134 /* STM of a list of registers which includes PC. Run the instruction
6135 as-is, but out of line: this will store the wrong value for the PC,
6136 so we must manually fix up the memory in the cleanup routine.
6137 Doing things this way has the advantage that we can auto-detect
6138 the offset of the PC write (which is architecture-dependent) in
6139 the cleanup routine. */
6140 dsc->modinsn[0] = insn;
6142 dsc->cleanup = &cleanup_block_store_pc;
6149 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6150 struct regcache *regs,
6151 arm_displaced_step_copy_insn_closure *dsc)
6153 int rn = bits (insn1, 0, 3);
6154 int load = bit (insn1, 4);
6155 int writeback = bit (insn1, 5);
6157 /* Block transfers which don't mention PC can be run directly
6159 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6160 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6162 if (rn == ARM_PC_REGNUM)
6164 warning (_("displaced: Unpredictable LDM or STM with "
6165 "base register r15"));
6166 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6167 "unpredictable ldm/stm", dsc);
6170 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6173 /* Clear bit 13, since it should be always zero. */
6174 dsc->u.block.regmask = (insn2 & 0xdfff);
6175 dsc->u.block.rn = rn;
6177 dsc->u.block.load = load;
6178 dsc->u.block.user = 0;
6179 dsc->u.block.increment = bit (insn1, 7);
6180 dsc->u.block.before = bit (insn1, 8);
6181 dsc->u.block.writeback = writeback;
6182 dsc->u.block.cond = INST_AL;
6183 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6187 if (dsc->u.block.regmask == 0xffff)
6189 /* This branch is impossible to happen. */
6194 unsigned int regmask = dsc->u.block.regmask;
6195 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6198 for (i = 0; i < num_in_list; i++)
6199 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6204 new_regmask = (1 << num_in_list) - 1;
6206 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6207 "%.4x, modified list %.4x",
6208 rn, writeback ? "!" : "",
6209 (int) dsc->u.block.regmask, new_regmask);
6211 dsc->modinsn[0] = insn1;
6212 dsc->modinsn[1] = (new_regmask & 0xffff);
6215 dsc->cleanup = &cleanup_block_load_pc;
6220 dsc->modinsn[0] = insn1;
6221 dsc->modinsn[1] = insn2;
6223 dsc->cleanup = &cleanup_block_store_pc;
6228 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6229 This is used to avoid a dependency on BFD's bfd_endian enum. */
6232 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6235 return read_memory_unsigned_integer (memaddr, len,
6236 (enum bfd_endian) byte_order);
6239 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6242 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6245 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6248 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6251 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6256 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6259 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6261 return arm_is_thumb (self->regcache);
6264 /* single_step() is called just before we want to resume the inferior,
6265 if we want to single-step it but there is no hardware or kernel
6266 single-step support. We find the target of the coming instructions
6267 and breakpoint them. */
6269 std::vector<CORE_ADDR>
6270 arm_software_single_step (struct regcache *regcache)
6272 struct gdbarch *gdbarch = regcache->arch ();
6273 struct arm_get_next_pcs next_pcs_ctx;
6275 arm_get_next_pcs_ctor (&next_pcs_ctx,
6276 &arm_get_next_pcs_ops,
6277 gdbarch_byte_order (gdbarch),
6278 gdbarch_byte_order_for_code (gdbarch),
6282 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6284 for (CORE_ADDR &pc_ref : next_pcs)
6285 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6290 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6291 for Linux, where some SVC instructions must be treated specially. */
6294 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6295 arm_displaced_step_copy_insn_closure *dsc)
6297 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6299 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6300 (unsigned long) resume_addr);
6302 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6306 /* Common copy routine for svc instruction. */
6309 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6310 arm_displaced_step_copy_insn_closure *dsc)
6312 /* Preparation: none.
6313 Insn: unmodified svc.
6314 Cleanup: pc <- insn_addr + insn_size. */
6316 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6318 dsc->wrote_to_pc = 1;
6320 /* Allow OS-specific code to override SVC handling. */
6321 if (dsc->u.svc.copy_svc_os)
6322 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6325 dsc->cleanup = &cleanup_svc;
6331 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6332 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6335 displaced_debug_printf ("copying svc insn %.8lx",
6336 (unsigned long) insn);
6338 dsc->modinsn[0] = insn;
6340 return install_svc (gdbarch, regs, dsc);
6344 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6345 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6348 displaced_debug_printf ("copying svc insn %.4x", insn);
6350 dsc->modinsn[0] = insn;
6352 return install_svc (gdbarch, regs, dsc);
6355 /* Copy undefined instructions. */
6358 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6359 arm_displaced_step_copy_insn_closure *dsc)
6361 displaced_debug_printf ("copying undefined insn %.8lx",
6362 (unsigned long) insn);
6364 dsc->modinsn[0] = insn;
6370 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6371 arm_displaced_step_copy_insn_closure *dsc)
6374 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6375 (unsigned short) insn1, (unsigned short) insn2);
6377 dsc->modinsn[0] = insn1;
6378 dsc->modinsn[1] = insn2;
6384 /* Copy unpredictable instructions. */
6387 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6388 arm_displaced_step_copy_insn_closure *dsc)
6390 displaced_debug_printf ("copying unpredictable insn %.8lx",
6391 (unsigned long) insn);
6393 dsc->modinsn[0] = insn;
6398 /* The decode_* functions are instruction decoding helpers. They mostly follow
6399 the presentation in the ARM ARM. */
6402 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6403 struct regcache *regs,
6404 arm_displaced_step_copy_insn_closure *dsc)
6406 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6407 unsigned int rn = bits (insn, 16, 19);
6409 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6410 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6411 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6412 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6413 else if ((op1 & 0x60) == 0x20)
6414 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6415 else if ((op1 & 0x71) == 0x40)
6416 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6418 else if ((op1 & 0x77) == 0x41)
6419 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6420 else if ((op1 & 0x77) == 0x45)
6421 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6422 else if ((op1 & 0x77) == 0x51)
6425 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6427 return arm_copy_unpred (gdbarch, insn, dsc);
6429 else if ((op1 & 0x77) == 0x55)
6430 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6431 else if (op1 == 0x57)
6434 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6435 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6436 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6437 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6438 default: return arm_copy_unpred (gdbarch, insn, dsc);
6440 else if ((op1 & 0x63) == 0x43)
6441 return arm_copy_unpred (gdbarch, insn, dsc);
6442 else if ((op2 & 0x1) == 0x0)
6443 switch (op1 & ~0x80)
6446 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6448 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6449 case 0x71: case 0x75:
6451 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6452 case 0x63: case 0x67: case 0x73: case 0x77:
6453 return arm_copy_unpred (gdbarch, insn, dsc);
6455 return arm_copy_undef (gdbarch, insn, dsc);
6458 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6462 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6463 struct regcache *regs,
6464 arm_displaced_step_copy_insn_closure *dsc)
6466 if (bit (insn, 27) == 0)
6467 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6468 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6469 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6472 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6475 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6477 case 0x4: case 0x5: case 0x6: case 0x7:
6478 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6481 switch ((insn & 0xe00000) >> 21)
6483 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6485 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6488 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6491 return arm_copy_undef (gdbarch, insn, dsc);
6496 int rn_f = (bits (insn, 16, 19) == 0xf);
6497 switch ((insn & 0xe00000) >> 21)
6500 /* ldc/ldc2 imm (undefined for rn == pc). */
6501 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6502 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6505 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6507 case 0x4: case 0x5: case 0x6: case 0x7:
6508 /* ldc/ldc2 lit (undefined for rn != pc). */
6509 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6510 : arm_copy_undef (gdbarch, insn, dsc);
6513 return arm_copy_undef (gdbarch, insn, dsc);
6518 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6521 if (bits (insn, 16, 19) == 0xf)
6523 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6525 return arm_copy_undef (gdbarch, insn, dsc);
6529 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6531 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6535 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6537 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6540 return arm_copy_undef (gdbarch, insn, dsc);
6544 /* Decode miscellaneous instructions in dp/misc encoding space. */
6547 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6548 struct regcache *regs,
6549 arm_displaced_step_copy_insn_closure *dsc)
6551 unsigned int op2 = bits (insn, 4, 6);
6552 unsigned int op = bits (insn, 21, 22);
6557 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6560 if (op == 0x1) /* bx. */
6561 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6563 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6565 return arm_copy_undef (gdbarch, insn, dsc);
6569 /* Not really supported. */
6570 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6572 return arm_copy_undef (gdbarch, insn, dsc);
6576 return arm_copy_bx_blx_reg (gdbarch, insn,
6577 regs, dsc); /* blx register. */
6579 return arm_copy_undef (gdbarch, insn, dsc);
6582 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6586 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6588 /* Not really supported. */
6589 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6593 return arm_copy_undef (gdbarch, insn, dsc);
6598 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6599 struct regcache *regs,
6600 arm_displaced_step_copy_insn_closure *dsc)
6603 switch (bits (insn, 20, 24))
6606 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6609 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6611 case 0x12: case 0x16:
6612 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6615 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6619 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6621 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6622 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6623 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6624 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6625 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6626 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6627 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6628 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6629 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6630 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6631 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6632 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6633 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6634 /* 2nd arg means "unprivileged". */
6635 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6639 /* Should be unreachable. */
6644 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6645 struct regcache *regs,
6646 arm_displaced_step_copy_insn_closure *dsc)
6648 int a = bit (insn, 25), b = bit (insn, 4);
6649 uint32_t op1 = bits (insn, 20, 24);
6651 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6652 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6653 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6654 else if ((!a && (op1 & 0x17) == 0x02)
6655 || (a && (op1 & 0x17) == 0x02 && !b))
6656 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6657 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6658 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6659 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6660 else if ((!a && (op1 & 0x17) == 0x03)
6661 || (a && (op1 & 0x17) == 0x03 && !b))
6662 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6663 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6664 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6665 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6666 else if ((!a && (op1 & 0x17) == 0x06)
6667 || (a && (op1 & 0x17) == 0x06 && !b))
6668 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6669 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6670 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6671 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6672 else if ((!a && (op1 & 0x17) == 0x07)
6673 || (a && (op1 & 0x17) == 0x07 && !b))
6674 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6676 /* Should be unreachable. */
6681 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6682 arm_displaced_step_copy_insn_closure *dsc)
6684 switch (bits (insn, 20, 24))
6686 case 0x00: case 0x01: case 0x02: case 0x03:
6687 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6689 case 0x04: case 0x05: case 0x06: case 0x07:
6690 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6692 case 0x08: case 0x09: case 0x0a: case 0x0b:
6693 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6694 return arm_copy_unmodified (gdbarch, insn,
6695 "decode/pack/unpack/saturate/reverse", dsc);
6698 if (bits (insn, 5, 7) == 0) /* op2. */
6700 if (bits (insn, 12, 15) == 0xf)
6701 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6703 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6706 return arm_copy_undef (gdbarch, insn, dsc);
6708 case 0x1a: case 0x1b:
6709 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6710 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6712 return arm_copy_undef (gdbarch, insn, dsc);
6714 case 0x1c: case 0x1d:
6715 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6717 if (bits (insn, 0, 3) == 0xf)
6718 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6720 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6723 return arm_copy_undef (gdbarch, insn, dsc);
6725 case 0x1e: case 0x1f:
6726 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6727 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6729 return arm_copy_undef (gdbarch, insn, dsc);
6732 /* Should be unreachable. */
6737 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6738 struct regcache *regs,
6739 arm_displaced_step_copy_insn_closure *dsc)
6742 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6744 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6748 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6749 struct regcache *regs,
6750 arm_displaced_step_copy_insn_closure *dsc)
6752 unsigned int opcode = bits (insn, 20, 24);
6756 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6757 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6759 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6760 case 0x12: case 0x16:
6761 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6763 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6764 case 0x13: case 0x17:
6765 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6767 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6768 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6769 /* Note: no writeback for these instructions. Bit 25 will always be
6770 zero though (via caller), so the following works OK. */
6771 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6774 /* Should be unreachable. */
6778 /* Decode shifted register instructions. */
6781 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6782 uint16_t insn2, struct regcache *regs,
6783 arm_displaced_step_copy_insn_closure *dsc)
6785 /* PC is only allowed to be used in instruction MOV. */
6787 unsigned int op = bits (insn1, 5, 8);
6788 unsigned int rn = bits (insn1, 0, 3);
6790 if (op == 0x2 && rn == 0xf) /* MOV */
6791 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6793 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6794 "dp (shift reg)", dsc);
6798 /* Decode extension register load/store. Exactly the same as
6799 arm_decode_ext_reg_ld_st. */
6802 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6803 uint16_t insn2, struct regcache *regs,
6804 arm_displaced_step_copy_insn_closure *dsc)
6806 unsigned int opcode = bits (insn1, 4, 8);
6810 case 0x04: case 0x05:
6811 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6812 "vfp/neon vmov", dsc);
6814 case 0x08: case 0x0c: /* 01x00 */
6815 case 0x0a: case 0x0e: /* 01x10 */
6816 case 0x12: case 0x16: /* 10x10 */
6817 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6818 "vfp/neon vstm/vpush", dsc);
6820 case 0x09: case 0x0d: /* 01x01 */
6821 case 0x0b: case 0x0f: /* 01x11 */
6822 case 0x13: case 0x17: /* 10x11 */
6823 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6824 "vfp/neon vldm/vpop", dsc);
6826 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6827 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6829 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6830 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6833 /* Should be unreachable. */
6838 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6839 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6841 unsigned int op1 = bits (insn, 20, 25);
6842 int op = bit (insn, 4);
6843 unsigned int coproc = bits (insn, 8, 11);
6845 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6846 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6847 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6848 && (coproc & 0xe) != 0xa)
6850 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6851 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6852 && (coproc & 0xe) != 0xa)
6853 /* ldc/ldc2 imm/lit. */
6854 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6855 else if ((op1 & 0x3e) == 0x00)
6856 return arm_copy_undef (gdbarch, insn, dsc);
6857 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6858 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6859 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6860 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6861 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6862 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6863 else if ((op1 & 0x30) == 0x20 && !op)
6865 if ((coproc & 0xe) == 0xa)
6866 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6868 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6870 else if ((op1 & 0x30) == 0x20 && op)
6871 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6872 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6873 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6874 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6875 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6876 else if ((op1 & 0x30) == 0x30)
6877 return arm_copy_svc (gdbarch, insn, regs, dsc);
6879 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6883 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6884 uint16_t insn2, struct regcache *regs,
6885 arm_displaced_step_copy_insn_closure *dsc)
6887 unsigned int coproc = bits (insn2, 8, 11);
6888 unsigned int bit_5_8 = bits (insn1, 5, 8);
6889 unsigned int bit_9 = bit (insn1, 9);
6890 unsigned int bit_4 = bit (insn1, 4);
6895 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6896 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6898 else if (bit_5_8 == 0) /* UNDEFINED. */
6899 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6902 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6903 if ((coproc & 0xe) == 0xa)
6904 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6906 else /* coproc is not 101x. */
6908 if (bit_4 == 0) /* STC/STC2. */
6909 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6911 else /* LDC/LDC2 {literal, immediate}. */
6912 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6918 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6924 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6925 arm_displaced_step_copy_insn_closure *dsc, int rd)
6931 Preparation: Rd <- PC
6937 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6938 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6942 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6943 arm_displaced_step_copy_insn_closure *dsc,
6944 int rd, unsigned int imm)
6947 /* Encoding T2: ADDS Rd, #imm */
6948 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6950 install_pc_relative (gdbarch, regs, dsc, rd);
6956 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6957 struct regcache *regs,
6958 arm_displaced_step_copy_insn_closure *dsc)
6960 unsigned int rd = bits (insn, 8, 10);
6961 unsigned int imm8 = bits (insn, 0, 7);
6963 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
6966 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6970 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6971 uint16_t insn2, struct regcache *regs,
6972 arm_displaced_step_copy_insn_closure *dsc)
6974 unsigned int rd = bits (insn2, 8, 11);
6975 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6976 extract raw immediate encoding rather than computing immediate. When
6977 generating ADD or SUB instruction, we can simply perform OR operation to
6978 set immediate into ADD. */
6979 unsigned int imm_3_8 = insn2 & 0x70ff;
6980 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6982 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
6983 rd, imm_i, imm_3_8, insn1, insn2);
6985 if (bit (insn1, 7)) /* Encoding T2 */
6987 /* Encoding T3: SUB Rd, Rd, #imm */
6988 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6989 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6991 else /* Encoding T3 */
6993 /* Encoding T3: ADD Rd, Rd, #imm */
6994 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6995 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6999 install_pc_relative (gdbarch, regs, dsc, rd);
7005 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7006 struct regcache *regs,
7007 arm_displaced_step_copy_insn_closure *dsc)
7009 unsigned int rt = bits (insn1, 8, 10);
7011 int imm8 = (bits (insn1, 0, 7) << 2);
7017 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7019 Insn: LDR R0, [R2, R3];
7020 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7022 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7024 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7025 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7026 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7027 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7028 /* The assembler calculates the required value of the offset from the
7029 Align(PC,4) value of this instruction to the label. */
7030 pc = pc & 0xfffffffc;
7032 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7033 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7036 dsc->u.ldst.xfersize = 4;
7038 dsc->u.ldst.immed = 0;
7039 dsc->u.ldst.writeback = 0;
7040 dsc->u.ldst.restore_r4 = 0;
7042 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7044 dsc->cleanup = &cleanup_load;
7049 /* Copy Thumb cbnz/cbz instruction. */
7052 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7053 struct regcache *regs,
7054 arm_displaced_step_copy_insn_closure *dsc)
7056 int non_zero = bit (insn1, 11);
7057 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7058 CORE_ADDR from = dsc->insn_addr;
7059 int rn = bits (insn1, 0, 2);
7060 int rn_val = displaced_read_reg (regs, dsc, rn);
7062 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7063 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7064 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7065 condition is false, let it be, cleanup_branch will do nothing. */
7066 if (dsc->u.branch.cond)
7068 dsc->u.branch.cond = INST_AL;
7069 dsc->u.branch.dest = from + 4 + imm5;
7072 dsc->u.branch.dest = from + 2;
7074 dsc->u.branch.link = 0;
7075 dsc->u.branch.exchange = 0;
7077 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7078 non_zero ? "cbnz" : "cbz",
7079 rn, rn_val, insn1, dsc->u.branch.dest);
7081 dsc->modinsn[0] = THUMB_NOP;
7083 dsc->cleanup = &cleanup_branch;
7087 /* Copy Table Branch Byte/Halfword */
7089 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7090 uint16_t insn2, struct regcache *regs,
7091 arm_displaced_step_copy_insn_closure *dsc)
7093 ULONGEST rn_val, rm_val;
7094 int is_tbh = bit (insn2, 4);
7095 CORE_ADDR halfwords = 0;
7096 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7098 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7099 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7105 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7106 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7112 target_read_memory (rn_val + rm_val, buf, 1);
7113 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7116 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7117 is_tbh ? "tbh" : "tbb",
7118 (unsigned int) rn_val, (unsigned int) rm_val,
7119 (unsigned int) halfwords);
7121 dsc->u.branch.cond = INST_AL;
7122 dsc->u.branch.link = 0;
7123 dsc->u.branch.exchange = 0;
7124 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7126 dsc->cleanup = &cleanup_branch;
7132 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7133 arm_displaced_step_copy_insn_closure *dsc)
7136 int val = displaced_read_reg (regs, dsc, 7);
7137 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7140 val = displaced_read_reg (regs, dsc, 8);
7141 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7144 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7149 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7150 struct regcache *regs,
7151 arm_displaced_step_copy_insn_closure *dsc)
7153 dsc->u.block.regmask = insn1 & 0x00ff;
7155 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7158 (1) register list is full, that is, r0-r7 are used.
7159 Prepare: tmp[0] <- r8
7161 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7162 MOV r8, r7; Move value of r7 to r8;
7163 POP {r7}; Store PC value into r7.
7165 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7167 (2) register list is not full, supposing there are N registers in
7168 register list (except PC, 0 <= N <= 7).
7169 Prepare: for each i, 0 - N, tmp[i] <- ri.
7171 POP {r0, r1, ...., rN};
7173 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7174 from tmp[] properly.
7176 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7177 dsc->u.block.regmask, insn1);
7179 if (dsc->u.block.regmask == 0xff)
7181 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7183 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7184 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7185 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7188 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7192 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7194 unsigned int new_regmask;
7196 for (i = 0; i < num_in_list + 1; i++)
7197 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7199 new_regmask = (1 << (num_in_list + 1)) - 1;
7201 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7202 "modified list %.4x",
7203 (int) dsc->u.block.regmask, new_regmask);
7205 dsc->u.block.regmask |= 0x8000;
7206 dsc->u.block.writeback = 0;
7207 dsc->u.block.cond = INST_AL;
7209 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7211 dsc->cleanup = &cleanup_block_load_pc;
7218 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7219 struct regcache *regs,
7220 arm_displaced_step_copy_insn_closure *dsc)
7222 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7223 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7226 /* 16-bit thumb instructions. */
7227 switch (op_bit_12_15)
7229 /* Shift (imme), add, subtract, move and compare. */
7230 case 0: case 1: case 2: case 3:
7231 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7232 "shift/add/sub/mov/cmp",
7236 switch (op_bit_10_11)
7238 case 0: /* Data-processing */
7239 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7243 case 1: /* Special data instructions and branch and exchange. */
7245 unsigned short op = bits (insn1, 7, 9);
7246 if (op == 6 || op == 7) /* BX or BLX */
7247 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7248 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7249 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7251 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7255 default: /* LDR (literal) */
7256 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7259 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7260 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7263 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7264 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7265 else /* Generate SP-relative address */
7266 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7268 case 11: /* Misc 16-bit instructions */
7270 switch (bits (insn1, 8, 11))
7272 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7273 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7275 case 12: case 13: /* POP */
7276 if (bit (insn1, 8)) /* PC is in register list. */
7277 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7281 case 15: /* If-Then, and hints */
7282 if (bits (insn1, 0, 3))
7283 /* If-Then makes up to four following instructions conditional.
7284 IT instruction itself is not conditional, so handle it as a
7285 common unmodified instruction. */
7286 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7289 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7292 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7297 if (op_bit_10_11 < 2) /* Store multiple registers */
7298 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7299 else /* Load multiple registers */
7300 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7302 case 13: /* Conditional branch and supervisor call */
7303 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7304 err = thumb_copy_b (gdbarch, insn1, dsc);
7306 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7308 case 14: /* Unconditional branch */
7309 err = thumb_copy_b (gdbarch, insn1, dsc);
7316 internal_error (__FILE__, __LINE__,
7317 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7321 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7322 uint16_t insn1, uint16_t insn2,
7323 struct regcache *regs,
7324 arm_displaced_step_copy_insn_closure *dsc)
7326 int rt = bits (insn2, 12, 15);
7327 int rn = bits (insn1, 0, 3);
7328 int op1 = bits (insn1, 7, 8);
7330 switch (bits (insn1, 5, 6))
7332 case 0: /* Load byte and memory hints */
7333 if (rt == 0xf) /* PLD/PLI */
7336 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7337 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7339 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7345 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7348 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7349 "ldrb{reg, immediate}/ldrbt",
7354 case 1: /* Load halfword and memory hints. */
7355 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7356 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7357 "pld/unalloc memhint", dsc);
7361 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7364 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7368 case 2: /* Load word */
7370 int insn2_bit_8_11 = bits (insn2, 8, 11);
7373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7374 else if (op1 == 0x1) /* Encoding T3 */
7375 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7377 else /* op1 == 0x0 */
7379 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7380 /* LDR (immediate) */
7381 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7382 dsc, bit (insn2, 8), 1);
7383 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7384 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7387 /* LDR (register) */
7388 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7394 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7401 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7402 uint16_t insn2, struct regcache *regs,
7403 arm_displaced_step_copy_insn_closure *dsc)
7406 unsigned short op = bit (insn2, 15);
7407 unsigned int op1 = bits (insn1, 11, 12);
7413 switch (bits (insn1, 9, 10))
7418 /* Load/store {dual, exclusive}, table branch. */
7419 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7420 && bits (insn2, 5, 7) == 0)
7421 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7424 /* PC is not allowed to use in load/store {dual, exclusive}
7426 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7427 "load/store dual/ex", dsc);
7429 else /* load/store multiple */
7431 switch (bits (insn1, 7, 8))
7433 case 0: case 3: /* SRS, RFE */
7434 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7437 case 1: case 2: /* LDM/STM/PUSH/POP */
7438 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7445 /* Data-processing (shift register). */
7446 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7449 default: /* Coprocessor instructions. */
7450 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7455 case 2: /* op1 = 2 */
7456 if (op) /* Branch and misc control. */
7458 if (bit (insn2, 14) /* BLX/BL */
7459 || bit (insn2, 12) /* Unconditional branch */
7460 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7461 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7463 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7468 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7470 int dp_op = bits (insn1, 4, 8);
7471 int rn = bits (insn1, 0, 3);
7472 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7473 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7476 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7479 else /* Data processing (modified immediate) */
7480 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7484 case 3: /* op1 = 3 */
7485 switch (bits (insn1, 9, 10))
7489 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7491 else /* NEON Load/Store and Store single data item */
7492 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7493 "neon elt/struct load/store",
7496 case 1: /* op1 = 3, bits (9, 10) == 1 */
7497 switch (bits (insn1, 7, 8))
7499 case 0: case 1: /* Data processing (register) */
7500 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7503 case 2: /* Multiply and absolute difference */
7504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7505 "mul/mua/diff", dsc);
7507 case 3: /* Long multiply and divide */
7508 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7513 default: /* Coprocessor instructions */
7514 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7523 internal_error (__FILE__, __LINE__,
7524 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7529 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7530 struct regcache *regs,
7531 arm_displaced_step_copy_insn_closure *dsc)
7533 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7535 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7537 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7538 insn1, (unsigned long) from);
7541 dsc->insn_size = thumb_insn_size (insn1);
7542 if (thumb_insn_size (insn1) == 4)
7545 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7546 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7549 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7553 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7554 CORE_ADDR to, struct regcache *regs,
7555 arm_displaced_step_copy_insn_closure *dsc)
7558 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7561 /* Most displaced instructions use a 1-instruction scratch space, so set this
7562 here and override below if/when necessary. */
7564 dsc->insn_addr = from;
7565 dsc->scratch_base = to;
7566 dsc->cleanup = NULL;
7567 dsc->wrote_to_pc = 0;
7569 if (!displaced_in_arm_mode (regs))
7570 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7574 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7575 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7576 (unsigned long) insn, (unsigned long) from);
7578 if ((insn & 0xf0000000) == 0xf0000000)
7579 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7580 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7582 case 0x0: case 0x1: case 0x2: case 0x3:
7583 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7586 case 0x4: case 0x5: case 0x6:
7587 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7591 err = arm_decode_media (gdbarch, insn, dsc);
7594 case 0x8: case 0x9: case 0xa: case 0xb:
7595 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7598 case 0xc: case 0xd: case 0xe: case 0xf:
7599 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7604 internal_error (__FILE__, __LINE__,
7605 _("arm_process_displaced_insn: Instruction decode error"));
7608 /* Actually set up the scratch space for a displaced instruction. */
7611 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7613 arm_displaced_step_copy_insn_closure *dsc)
7615 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7616 unsigned int i, len, offset;
7617 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7618 int size = dsc->is_thumb? 2 : 4;
7619 const gdb_byte *bkp_insn;
7622 /* Poke modified instruction(s). */
7623 for (i = 0; i < dsc->numinsns; i++)
7626 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7627 dsc->modinsn[i], (unsigned long) to + offset);
7629 displaced_debug_printf ("writing insn %.4x at %.8lx",
7630 (unsigned short) dsc->modinsn[i],
7631 (unsigned long) to + offset);
7633 write_memory_unsigned_integer (to + offset, size,
7634 byte_order_for_code,
7639 /* Choose the correct breakpoint instruction. */
7642 bkp_insn = tdep->thumb_breakpoint;
7643 len = tdep->thumb_breakpoint_size;
7647 bkp_insn = tdep->arm_breakpoint;
7648 len = tdep->arm_breakpoint_size;
7651 /* Put breakpoint afterwards. */
7652 write_memory (to + offset, bkp_insn, len);
7654 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
7655 paddress (gdbarch, to));
7658 /* Entry point for cleaning things up after a displaced instruction has been
7662 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7663 struct displaced_step_copy_insn_closure *dsc_,
7664 CORE_ADDR from, CORE_ADDR to,
7665 struct regcache *regs)
7667 arm_displaced_step_copy_insn_closure *dsc
7668 = (arm_displaced_step_copy_insn_closure *) dsc_;
7671 dsc->cleanup (gdbarch, regs, dsc);
7673 if (!dsc->wrote_to_pc)
7674 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7675 dsc->insn_addr + dsc->insn_size);
7679 #include "bfd-in2.h"
7680 #include "libcoff.h"
7683 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7685 gdb_disassembler *di
7686 = static_cast<gdb_disassembler *>(info->application_data);
7687 struct gdbarch *gdbarch = di->arch ();
7689 if (arm_pc_is_thumb (gdbarch, memaddr))
7691 static asymbol *asym;
7692 static combined_entry_type ce;
7693 static struct coff_symbol_struct csym;
7694 static struct bfd fake_bfd;
7695 static bfd_target fake_target;
7697 if (csym.native == NULL)
7699 /* Create a fake symbol vector containing a Thumb symbol.
7700 This is solely so that the code in print_insn_little_arm()
7701 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7702 the presence of a Thumb symbol and switch to decoding
7703 Thumb instructions. */
7705 fake_target.flavour = bfd_target_coff_flavour;
7706 fake_bfd.xvec = &fake_target;
7707 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7709 csym.symbol.the_bfd = &fake_bfd;
7710 csym.symbol.name = "fake";
7711 asym = (asymbol *) & csym;
7714 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7715 info->symbols = &asym;
7718 info->symbols = NULL;
7720 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7721 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7722 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7723 the assert on the mismatch of info->mach and
7724 bfd_get_mach (current_program_space->exec_bfd ()) in
7725 default_print_insn. */
7726 if (current_program_space->exec_bfd () != NULL)
7727 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7729 return default_print_insn (memaddr, info);
7732 /* The following define instruction sequences that will cause ARM
7733 cpu's to take an undefined instruction trap. These are used to
7734 signal a breakpoint to GDB.
7736 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7737 modes. A different instruction is required for each mode. The ARM
7738 cpu's can also be big or little endian. Thus four different
7739 instructions are needed to support all cases.
7741 Note: ARMv4 defines several new instructions that will take the
7742 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7743 not in fact add the new instructions. The new undefined
7744 instructions in ARMv4 are all instructions that had no defined
7745 behaviour in earlier chips. There is no guarantee that they will
7746 raise an exception, but may be treated as NOP's. In practice, it
7747 may only safe to rely on instructions matching:
7749 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7750 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7751 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7753 Even this may only true if the condition predicate is true. The
7754 following use a condition predicate of ALWAYS so it is always TRUE.
7756 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7757 and NetBSD all use a software interrupt rather than an undefined
7758 instruction to force a trap. This can be handled by by the
7759 abi-specific code during establishment of the gdbarch vector. */
7761 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7762 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7763 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7764 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7766 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7767 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7768 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7769 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7771 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7774 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7776 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7777 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7779 if (arm_pc_is_thumb (gdbarch, *pcptr))
7781 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7783 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7784 check whether we are replacing a 32-bit instruction. */
7785 if (tdep->thumb2_breakpoint != NULL)
7789 if (target_read_memory (*pcptr, buf, 2) == 0)
7791 unsigned short inst1;
7793 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7794 if (thumb_insn_size (inst1) == 4)
7795 return ARM_BP_KIND_THUMB2;
7799 return ARM_BP_KIND_THUMB;
7802 return ARM_BP_KIND_ARM;
7806 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7808 static const gdb_byte *
7809 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7811 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7815 case ARM_BP_KIND_ARM:
7816 *size = tdep->arm_breakpoint_size;
7817 return tdep->arm_breakpoint;
7818 case ARM_BP_KIND_THUMB:
7819 *size = tdep->thumb_breakpoint_size;
7820 return tdep->thumb_breakpoint;
7821 case ARM_BP_KIND_THUMB2:
7822 *size = tdep->thumb2_breakpoint_size;
7823 return tdep->thumb2_breakpoint;
7825 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7829 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7832 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7833 struct regcache *regcache,
7838 /* Check the memory pointed by PC is readable. */
7839 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7841 struct arm_get_next_pcs next_pcs_ctx;
7843 arm_get_next_pcs_ctor (&next_pcs_ctx,
7844 &arm_get_next_pcs_ops,
7845 gdbarch_byte_order (gdbarch),
7846 gdbarch_byte_order_for_code (gdbarch),
7850 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7852 /* If MEMADDR is the next instruction of current pc, do the
7853 software single step computation, and get the thumb mode by
7854 the destination address. */
7855 for (CORE_ADDR pc : next_pcs)
7857 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7859 if (IS_THUMB_ADDR (pc))
7861 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7862 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7865 return ARM_BP_KIND_ARM;
7870 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7873 /* Extract from an array REGBUF containing the (raw) register state a
7874 function return value of type TYPE, and copy that, in virtual
7875 format, into VALBUF. */
7878 arm_extract_return_value (struct type *type, struct regcache *regs,
7881 struct gdbarch *gdbarch = regs->arch ();
7882 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7884 if (TYPE_CODE_FLT == type->code ())
7886 switch (gdbarch_tdep (gdbarch)->fp_model)
7890 /* The value is in register F0 in internal format. We need to
7891 extract the raw value and then convert it to the desired
7893 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7895 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7896 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7901 case ARM_FLOAT_SOFT_FPA:
7902 case ARM_FLOAT_SOFT_VFP:
7903 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7904 not using the VFP ABI code. */
7906 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7907 if (TYPE_LENGTH (type) > 4)
7908 regs->cooked_read (ARM_A1_REGNUM + 1,
7909 valbuf + ARM_INT_REGISTER_SIZE);
7913 internal_error (__FILE__, __LINE__,
7914 _("arm_extract_return_value: "
7915 "Floating point model not supported"));
7919 else if (type->code () == TYPE_CODE_INT
7920 || type->code () == TYPE_CODE_CHAR
7921 || type->code () == TYPE_CODE_BOOL
7922 || type->code () == TYPE_CODE_PTR
7923 || TYPE_IS_REFERENCE (type)
7924 || type->code () == TYPE_CODE_ENUM)
7926 /* If the type is a plain integer, then the access is
7927 straight-forward. Otherwise we have to play around a bit
7929 int len = TYPE_LENGTH (type);
7930 int regno = ARM_A1_REGNUM;
7935 /* By using store_unsigned_integer we avoid having to do
7936 anything special for small big-endian values. */
7937 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7938 store_unsigned_integer (valbuf,
7939 (len > ARM_INT_REGISTER_SIZE
7940 ? ARM_INT_REGISTER_SIZE : len),
7942 len -= ARM_INT_REGISTER_SIZE;
7943 valbuf += ARM_INT_REGISTER_SIZE;
7948 /* For a structure or union the behaviour is as if the value had
7949 been stored to word-aligned memory and then loaded into
7950 registers with 32-bit load instruction(s). */
7951 int len = TYPE_LENGTH (type);
7952 int regno = ARM_A1_REGNUM;
7953 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7957 regs->cooked_read (regno++, tmpbuf);
7958 memcpy (valbuf, tmpbuf,
7959 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7960 len -= ARM_INT_REGISTER_SIZE;
7961 valbuf += ARM_INT_REGISTER_SIZE;
7967 /* Will a function return an aggregate type in memory or in a
7968 register? Return 0 if an aggregate type can be returned in a
7969 register, 1 if it must be returned in memory. */
7972 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7974 enum type_code code;
7976 type = check_typedef (type);
7978 /* Simple, non-aggregate types (ie not including vectors and
7979 complex) are always returned in a register (or registers). */
7980 code = type->code ();
7981 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7982 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7985 if (TYPE_CODE_ARRAY == code && type->is_vector ())
7987 /* Vector values should be returned using ARM registers if they
7988 are not over 16 bytes. */
7989 return (TYPE_LENGTH (type) > 16);
7992 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7994 /* The AAPCS says all aggregates not larger than a word are returned
7996 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8005 /* All aggregate types that won't fit in a register must be returned
8007 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8010 /* In the ARM ABI, "integer" like aggregate types are returned in
8011 registers. For an aggregate type to be integer like, its size
8012 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8013 offset of each addressable subfield must be zero. Note that bit
8014 fields are not addressable, and all addressable subfields of
8015 unions always start at offset zero.
8017 This function is based on the behaviour of GCC 2.95.1.
8018 See: gcc/arm.c: arm_return_in_memory() for details.
8020 Note: All versions of GCC before GCC 2.95.2 do not set up the
8021 parameters correctly for a function returning the following
8022 structure: struct { float f;}; This should be returned in memory,
8023 not a register. Richard Earnshaw sent me a patch, but I do not
8024 know of any way to detect if a function like the above has been
8025 compiled with the correct calling convention. */
8027 /* Assume all other aggregate types can be returned in a register.
8028 Run a check for structures, unions and arrays. */
8031 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8034 /* Need to check if this struct/union is "integer" like. For
8035 this to be true, its size must be less than or equal to
8036 ARM_INT_REGISTER_SIZE and the offset of each addressable
8037 subfield must be zero. Note that bit fields are not
8038 addressable, and unions always start at offset zero. If any
8039 of the subfields is a floating point type, the struct/union
8040 cannot be an integer type. */
8042 /* For each field in the object, check:
8043 1) Is it FP? --> yes, nRc = 1;
8044 2) Is it addressable (bitpos != 0) and
8045 not packed (bitsize == 0)?
8049 for (i = 0; i < type->num_fields (); i++)
8051 enum type_code field_type_code;
8054 = check_typedef (type->field (i).type ())->code ();
8056 /* Is it a floating point type field? */
8057 if (field_type_code == TYPE_CODE_FLT)
8063 /* If bitpos != 0, then we have to care about it. */
8064 if (TYPE_FIELD_BITPOS (type, i) != 0)
8066 /* Bitfields are not addressable. If the field bitsize is
8067 zero, then the field is not packed. Hence it cannot be
8068 a bitfield or any other packed type. */
8069 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8082 /* Write into appropriate registers a function return value of type
8083 TYPE, given in virtual format. */
8086 arm_store_return_value (struct type *type, struct regcache *regs,
8087 const gdb_byte *valbuf)
8089 struct gdbarch *gdbarch = regs->arch ();
8090 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8092 if (type->code () == TYPE_CODE_FLT)
8094 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8096 switch (gdbarch_tdep (gdbarch)->fp_model)
8100 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8101 regs->cooked_write (ARM_F0_REGNUM, buf);
8104 case ARM_FLOAT_SOFT_FPA:
8105 case ARM_FLOAT_SOFT_VFP:
8106 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8107 not using the VFP ABI code. */
8109 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8110 if (TYPE_LENGTH (type) > 4)
8111 regs->cooked_write (ARM_A1_REGNUM + 1,
8112 valbuf + ARM_INT_REGISTER_SIZE);
8116 internal_error (__FILE__, __LINE__,
8117 _("arm_store_return_value: Floating "
8118 "point model not supported"));
8122 else if (type->code () == TYPE_CODE_INT
8123 || type->code () == TYPE_CODE_CHAR
8124 || type->code () == TYPE_CODE_BOOL
8125 || type->code () == TYPE_CODE_PTR
8126 || TYPE_IS_REFERENCE (type)
8127 || type->code () == TYPE_CODE_ENUM)
8129 if (TYPE_LENGTH (type) <= 4)
8131 /* Values of one word or less are zero/sign-extended and
8133 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8134 LONGEST val = unpack_long (type, valbuf);
8136 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8137 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8141 /* Integral values greater than one word are stored in consecutive
8142 registers starting with r0. This will always be a multiple of
8143 the regiser size. */
8144 int len = TYPE_LENGTH (type);
8145 int regno = ARM_A1_REGNUM;
8149 regs->cooked_write (regno++, valbuf);
8150 len -= ARM_INT_REGISTER_SIZE;
8151 valbuf += ARM_INT_REGISTER_SIZE;
8157 /* For a structure or union the behaviour is as if the value had
8158 been stored to word-aligned memory and then loaded into
8159 registers with 32-bit load instruction(s). */
8160 int len = TYPE_LENGTH (type);
8161 int regno = ARM_A1_REGNUM;
8162 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8166 memcpy (tmpbuf, valbuf,
8167 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8168 regs->cooked_write (regno++, tmpbuf);
8169 len -= ARM_INT_REGISTER_SIZE;
8170 valbuf += ARM_INT_REGISTER_SIZE;
8176 /* Handle function return values. */
8178 static enum return_value_convention
8179 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8180 struct type *valtype, struct regcache *regcache,
8181 gdb_byte *readbuf, const gdb_byte *writebuf)
8183 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8184 struct type *func_type = function ? value_type (function) : NULL;
8185 enum arm_vfp_cprc_base_type vfp_base_type;
8188 if (arm_vfp_abi_for_function (gdbarch, func_type)
8189 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8191 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8192 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8194 for (i = 0; i < vfp_base_count; i++)
8196 if (reg_char == 'q')
8199 arm_neon_quad_write (gdbarch, regcache, i,
8200 writebuf + i * unit_length);
8203 arm_neon_quad_read (gdbarch, regcache, i,
8204 readbuf + i * unit_length);
8211 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8212 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8215 regcache->cooked_write (regnum, writebuf + i * unit_length);
8217 regcache->cooked_read (regnum, readbuf + i * unit_length);
8220 return RETURN_VALUE_REGISTER_CONVENTION;
8223 if (valtype->code () == TYPE_CODE_STRUCT
8224 || valtype->code () == TYPE_CODE_UNION
8225 || valtype->code () == TYPE_CODE_ARRAY)
8227 if (tdep->struct_return == pcc_struct_return
8228 || arm_return_in_memory (gdbarch, valtype))
8229 return RETURN_VALUE_STRUCT_CONVENTION;
8231 else if (valtype->code () == TYPE_CODE_COMPLEX)
8233 if (arm_return_in_memory (gdbarch, valtype))
8234 return RETURN_VALUE_STRUCT_CONVENTION;
8238 arm_store_return_value (valtype, regcache, writebuf);
8241 arm_extract_return_value (valtype, regcache, readbuf);
8243 return RETURN_VALUE_REGISTER_CONVENTION;
8248 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8250 struct gdbarch *gdbarch = get_frame_arch (frame);
8251 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8252 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8254 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8256 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8258 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8259 ARM_INT_REGISTER_SIZE))
8262 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8265 /* A call to cmse secure entry function "foo" at "a" is modified by
8272 b) bl yyyy <__acle_se_foo>
8274 section .gnu.sgstubs:
8276 yyyy: sg // secure gateway
8277 b.w xxxx <__acle_se_foo> // original_branch_dest
8282 When the control at "b", the pc contains "yyyy" (sg address) which is a
8283 trampoline and does not exist in source code. This function returns the
8284 target pc "xxxx". For more details please refer to section 5.4
8285 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8286 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8287 document on www.developer.arm.com. */
8290 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8292 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8293 char *target_name = (char *) alloca (target_len);
8294 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8296 struct bound_minimal_symbol minsym
8297 = lookup_minimal_symbol (target_name, NULL, objfile);
8299 if (minsym.minsym != nullptr)
8300 return BMSYMBOL_VALUE_ADDRESS (minsym);
8305 /* Return true when SEC points to ".gnu.sgstubs" section. */
8308 arm_is_sgstubs_section (struct obj_section *sec)
8310 return (sec != nullptr
8311 && sec->the_bfd_section != nullptr
8312 && sec->the_bfd_section->name != nullptr
8313 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8316 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8317 return the target PC. Otherwise return 0. */
8320 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8324 CORE_ADDR start_addr;
8326 /* Find the starting address and name of the function containing the PC. */
8327 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8329 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8331 start_addr = arm_skip_bx_reg (frame, pc);
8332 if (start_addr != 0)
8338 /* If PC is in a Thumb call or return stub, return the address of the
8339 target PC, which is in a register. The thunk functions are called
8340 _call_via_xx, where x is the register name. The possible names
8341 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8342 functions, named __ARM_call_via_r[0-7]. */
8343 if (startswith (name, "_call_via_")
8344 || startswith (name, "__ARM_call_via_"))
8346 /* Use the name suffix to determine which register contains the
8348 static const char *table[15] =
8349 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8350 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8353 int offset = strlen (name) - 2;
8355 for (regno = 0; regno <= 14; regno++)
8356 if (strcmp (&name[offset], table[regno]) == 0)
8357 return get_frame_register_unsigned (frame, regno);
8360 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8361 non-interworking calls to foo. We could decode the stubs
8362 to find the target but it's easier to use the symbol table. */
8363 namelen = strlen (name);
8364 if (name[0] == '_' && name[1] == '_'
8365 && ((namelen > 2 + strlen ("_from_thumb")
8366 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8367 || (namelen > 2 + strlen ("_from_arm")
8368 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8371 int target_len = namelen - 2;
8372 struct bound_minimal_symbol minsym;
8373 struct objfile *objfile;
8374 struct obj_section *sec;
8376 if (name[namelen - 1] == 'b')
8377 target_len -= strlen ("_from_thumb");
8379 target_len -= strlen ("_from_arm");
8381 target_name = (char *) alloca (target_len + 1);
8382 memcpy (target_name, name + 2, target_len);
8383 target_name[target_len] = '\0';
8385 sec = find_pc_section (pc);
8386 objfile = (sec == NULL) ? NULL : sec->objfile;
8387 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8388 if (minsym.minsym != NULL)
8389 return BMSYMBOL_VALUE_ADDRESS (minsym);
8394 struct obj_section *section = find_pc_section (pc);
8396 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8397 if (arm_is_sgstubs_section (section))
8398 return arm_skip_cmse_entry (pc, name, section->objfile);
8400 return 0; /* not a stub */
8404 arm_update_current_architecture (void)
8406 struct gdbarch_info info;
8408 /* If the current architecture is not ARM, we have nothing to do. */
8409 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8412 /* Update the architecture. */
8413 gdbarch_info_init (&info);
8415 if (!gdbarch_update_p (info))
8416 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8420 set_fp_model_sfunc (const char *args, int from_tty,
8421 struct cmd_list_element *c)
8425 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8426 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8428 arm_fp_model = (enum arm_float_model) fp_model;
8432 if (fp_model == ARM_FLOAT_LAST)
8433 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8436 arm_update_current_architecture ();
8440 show_fp_model (struct ui_file *file, int from_tty,
8441 struct cmd_list_element *c, const char *value)
8443 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8445 if (arm_fp_model == ARM_FLOAT_AUTO
8446 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8447 fprintf_filtered (file, _("\
8448 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8449 fp_model_strings[tdep->fp_model]);
8451 fprintf_filtered (file, _("\
8452 The current ARM floating point model is \"%s\".\n"),
8453 fp_model_strings[arm_fp_model]);
8457 arm_set_abi (const char *args, int from_tty,
8458 struct cmd_list_element *c)
8462 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8463 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8465 arm_abi_global = (enum arm_abi_kind) arm_abi;
8469 if (arm_abi == ARM_ABI_LAST)
8470 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8473 arm_update_current_architecture ();
8477 arm_show_abi (struct ui_file *file, int from_tty,
8478 struct cmd_list_element *c, const char *value)
8480 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8482 if (arm_abi_global == ARM_ABI_AUTO
8483 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8484 fprintf_filtered (file, _("\
8485 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8486 arm_abi_strings[tdep->arm_abi]);
8488 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8493 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8494 struct cmd_list_element *c, const char *value)
8496 fprintf_filtered (file,
8497 _("The current execution mode assumed "
8498 "(when symbols are unavailable) is \"%s\".\n"),
8499 arm_fallback_mode_string);
8503 arm_show_force_mode (struct ui_file *file, int from_tty,
8504 struct cmd_list_element *c, const char *value)
8506 fprintf_filtered (file,
8507 _("The current execution mode assumed "
8508 "(even when symbols are available) is \"%s\".\n"),
8509 arm_force_mode_string);
8512 /* If the user changes the register disassembly style used for info
8513 register and other commands, we have to also switch the style used
8514 in opcodes for disassembly output. This function is run in the "set
8515 arm disassembly" command, and does that. */
8518 set_disassembly_style_sfunc (const char *args, int from_tty,
8519 struct cmd_list_element *c)
8521 /* Convert the short style name into the long style name (eg, reg-names-*)
8522 before calling the generic set_disassembler_options() function. */
8523 std::string long_name = std::string ("reg-names-") + disassembly_style;
8524 set_disassembler_options (&long_name[0]);
8528 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8529 struct cmd_list_element *c, const char *value)
8531 struct gdbarch *gdbarch = get_current_arch ();
8532 char *options = get_disassembler_options (gdbarch);
8533 const char *style = "";
8537 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8538 if (startswith (opt, "reg-names-"))
8540 style = &opt[strlen ("reg-names-")];
8541 len = strcspn (style, ",");
8544 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8547 /* Return the ARM register name corresponding to register I. */
8549 arm_register_name (struct gdbarch *gdbarch, int i)
8551 const int num_regs = gdbarch_num_regs (gdbarch);
8553 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8554 && i >= num_regs && i < num_regs + 32)
8556 static const char *const vfp_pseudo_names[] = {
8557 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8558 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8559 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8560 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8563 return vfp_pseudo_names[i - num_regs];
8566 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8567 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8569 static const char *const neon_pseudo_names[] = {
8570 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8571 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8574 return neon_pseudo_names[i - num_regs - 32];
8577 if (i >= ARRAY_SIZE (arm_register_names))
8578 /* These registers are only supported on targets which supply
8579 an XML description. */
8582 return arm_register_names[i];
8585 /* Test whether the coff symbol specific value corresponds to a Thumb
8589 coff_sym_is_thumb (int val)
8591 return (val == C_THUMBEXT
8592 || val == C_THUMBSTAT
8593 || val == C_THUMBEXTFUNC
8594 || val == C_THUMBSTATFUNC
8595 || val == C_THUMBLABEL);
8598 /* arm_coff_make_msymbol_special()
8599 arm_elf_make_msymbol_special()
8601 These functions test whether the COFF or ELF symbol corresponds to
8602 an address in thumb code, and set a "special" bit in a minimal
8603 symbol to indicate that it does. */
8606 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8608 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8610 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8611 == ST_BRANCH_TO_THUMB)
8612 MSYMBOL_SET_SPECIAL (msym);
8616 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8618 if (coff_sym_is_thumb (val))
8619 MSYMBOL_SET_SPECIAL (msym);
8623 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8626 const char *name = bfd_asymbol_name (sym);
8627 struct arm_per_bfd *data;
8628 struct arm_mapping_symbol new_map_sym;
8630 gdb_assert (name[0] == '$');
8631 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8634 data = arm_bfd_data_key.get (objfile->obfd);
8636 data = arm_bfd_data_key.emplace (objfile->obfd,
8637 objfile->obfd->section_count);
8638 arm_mapping_symbol_vec &map
8639 = data->section_maps[bfd_asymbol_section (sym)->index];
8641 new_map_sym.value = sym->value;
8642 new_map_sym.type = name[1];
8644 /* Insert at the end, the vector will be sorted on first use. */
8645 map.push_back (new_map_sym);
8649 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8651 struct gdbarch *gdbarch = regcache->arch ();
8652 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8654 /* If necessary, set the T bit. */
8657 ULONGEST val, t_bit;
8658 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8659 t_bit = arm_psr_thumb_bit (gdbarch);
8660 if (arm_pc_is_thumb (gdbarch, pc))
8661 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8664 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8669 /* Read the contents of a NEON quad register, by reading from two
8670 double registers. This is used to implement the quad pseudo
8671 registers, and for argument passing in case the quad registers are
8672 missing; vectors are passed in quad registers when using the VFP
8673 ABI, even if a NEON unit is not present. REGNUM is the index of
8674 the quad register, in [0, 15]. */
8676 static enum register_status
8677 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8678 int regnum, gdb_byte *buf)
8681 gdb_byte reg_buf[8];
8682 int offset, double_regnum;
8683 enum register_status status;
8685 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8686 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8689 /* d0 is always the least significant half of q0. */
8690 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8695 status = regcache->raw_read (double_regnum, reg_buf);
8696 if (status != REG_VALID)
8698 memcpy (buf + offset, reg_buf, 8);
8700 offset = 8 - offset;
8701 status = regcache->raw_read (double_regnum + 1, reg_buf);
8702 if (status != REG_VALID)
8704 memcpy (buf + offset, reg_buf, 8);
8709 static enum register_status
8710 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8711 int regnum, gdb_byte *buf)
8713 const int num_regs = gdbarch_num_regs (gdbarch);
8715 gdb_byte reg_buf[8];
8716 int offset, double_regnum;
8718 gdb_assert (regnum >= num_regs);
8721 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8722 /* Quad-precision register. */
8723 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8726 enum register_status status;
8728 /* Single-precision register. */
8729 gdb_assert (regnum < 32);
8731 /* s0 is always the least significant half of d0. */
8732 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8733 offset = (regnum & 1) ? 0 : 4;
8735 offset = (regnum & 1) ? 4 : 0;
8737 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8738 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8741 status = regcache->raw_read (double_regnum, reg_buf);
8742 if (status == REG_VALID)
8743 memcpy (buf, reg_buf + offset, 4);
8748 /* Store the contents of BUF to a NEON quad register, by writing to
8749 two double registers. This is used to implement the quad pseudo
8750 registers, and for argument passing in case the quad registers are
8751 missing; vectors are passed in quad registers when using the VFP
8752 ABI, even if a NEON unit is not present. REGNUM is the index
8753 of the quad register, in [0, 15]. */
8756 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8757 int regnum, const gdb_byte *buf)
8760 int offset, double_regnum;
8762 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8763 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8766 /* d0 is always the least significant half of q0. */
8767 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8772 regcache->raw_write (double_regnum, buf + offset);
8773 offset = 8 - offset;
8774 regcache->raw_write (double_regnum + 1, buf + offset);
8778 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8779 int regnum, const gdb_byte *buf)
8781 const int num_regs = gdbarch_num_regs (gdbarch);
8783 gdb_byte reg_buf[8];
8784 int offset, double_regnum;
8786 gdb_assert (regnum >= num_regs);
8789 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8790 /* Quad-precision register. */
8791 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8794 /* Single-precision register. */
8795 gdb_assert (regnum < 32);
8797 /* s0 is always the least significant half of d0. */
8798 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8799 offset = (regnum & 1) ? 0 : 4;
8801 offset = (regnum & 1) ? 4 : 0;
8803 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8804 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8807 regcache->raw_read (double_regnum, reg_buf);
8808 memcpy (reg_buf + offset, buf, 4);
8809 regcache->raw_write (double_regnum, reg_buf);
8813 static struct value *
8814 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8816 const int *reg_p = (const int *) baton;
8817 return value_of_register (*reg_p, frame);
8820 static enum gdb_osabi
8821 arm_elf_osabi_sniffer (bfd *abfd)
8823 unsigned int elfosabi;
8824 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8826 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8828 if (elfosabi == ELFOSABI_ARM)
8829 /* GNU tools use this value. Check note sections in this case,
8832 for (asection *sect : gdb_bfd_sections (abfd))
8833 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8836 /* Anything else will be handled by the generic ELF sniffer. */
8841 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8842 struct reggroup *group)
8844 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8845 this, FPS register belongs to save_regroup, restore_reggroup, and
8846 all_reggroup, of course. */
8847 if (regnum == ARM_FPS_REGNUM)
8848 return (group == float_reggroup
8849 || group == save_reggroup
8850 || group == restore_reggroup
8851 || group == all_reggroup);
8853 return default_register_reggroup_p (gdbarch, regnum, group);
8856 /* For backward-compatibility we allow two 'g' packet lengths with
8857 the remote protocol depending on whether FPA registers are
8858 supplied. M-profile targets do not have FPA registers, but some
8859 stubs already exist in the wild which use a 'g' packet which
8860 supplies them albeit with dummy values. The packet format which
8861 includes FPA registers should be considered deprecated for
8862 M-profile targets. */
8865 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8867 if (gdbarch_tdep (gdbarch)->is_m)
8869 const target_desc *tdesc;
8871 /* If we know from the executable this is an M-profile target,
8872 cater for remote targets whose register set layout is the
8873 same as the FPA layout. */
8874 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8875 register_remote_g_packet_guess (gdbarch,
8876 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8879 /* The regular M-profile layout. */
8880 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8881 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8884 /* M-profile plus M4F VFP. */
8885 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8886 register_remote_g_packet_guess (gdbarch,
8887 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8891 /* Otherwise we don't have a useful guess. */
8894 /* Implement the code_of_frame_writable gdbarch method. */
8897 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8899 if (gdbarch_tdep (gdbarch)->is_m
8900 && get_frame_type (frame) == SIGTRAMP_FRAME)
8902 /* M-profile exception frames return to some magic PCs, where
8903 isn't writable at all. */
8910 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8911 to be postfixed by a version (eg armv7hl). */
8914 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8916 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8917 return "arm(v[^- ]*)?";
8918 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8921 /* Initialize the current architecture based on INFO. If possible,
8922 re-use an architecture from ARCHES, which is a list of
8923 architectures already created during this debugging session.
8925 Called e.g. at program startup, when reading a core file, and when
8926 reading a binary file. */
8928 static struct gdbarch *
8929 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8931 struct gdbarch_tdep *tdep;
8932 struct gdbarch *gdbarch;
8933 struct gdbarch_list *best_arch;
8934 enum arm_abi_kind arm_abi = arm_abi_global;
8935 enum arm_float_model fp_model = arm_fp_model;
8936 tdesc_arch_data_up tdesc_data;
8939 int vfp_register_count = 0;
8940 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8941 bool have_wmmx_registers = false;
8942 bool have_neon = false;
8943 bool have_fpa_registers = true;
8944 const struct target_desc *tdesc = info.target_desc;
8946 /* If we have an object to base this architecture on, try to determine
8949 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8951 int ei_osabi, e_flags;
8953 switch (bfd_get_flavour (info.abfd))
8955 case bfd_target_coff_flavour:
8956 /* Assume it's an old APCS-style ABI. */
8958 arm_abi = ARM_ABI_APCS;
8961 case bfd_target_elf_flavour:
8962 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8963 e_flags = elf_elfheader (info.abfd)->e_flags;
8965 if (ei_osabi == ELFOSABI_ARM)
8967 /* GNU tools used to use this value, but do not for EABI
8968 objects. There's nowhere to tag an EABI version
8969 anyway, so assume APCS. */
8970 arm_abi = ARM_ABI_APCS;
8972 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8974 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8978 case EF_ARM_EABI_UNKNOWN:
8979 /* Assume GNU tools. */
8980 arm_abi = ARM_ABI_APCS;
8983 case EF_ARM_EABI_VER4:
8984 case EF_ARM_EABI_VER5:
8985 arm_abi = ARM_ABI_AAPCS;
8986 /* EABI binaries default to VFP float ordering.
8987 They may also contain build attributes that can
8988 be used to identify if the VFP argument-passing
8990 if (fp_model == ARM_FLOAT_AUTO)
8993 switch (bfd_elf_get_obj_attr_int (info.abfd,
8997 case AEABI_VFP_args_base:
8998 /* "The user intended FP parameter/result
8999 passing to conform to AAPCS, base
9001 fp_model = ARM_FLOAT_SOFT_VFP;
9003 case AEABI_VFP_args_vfp:
9004 /* "The user intended FP parameter/result
9005 passing to conform to AAPCS, VFP
9007 fp_model = ARM_FLOAT_VFP;
9009 case AEABI_VFP_args_toolchain:
9010 /* "The user intended FP parameter/result
9011 passing to conform to tool chain-specific
9012 conventions" - we don't know any such
9013 conventions, so leave it as "auto". */
9015 case AEABI_VFP_args_compatible:
9016 /* "Code is compatible with both the base
9017 and VFP variants; the user did not permit
9018 non-variadic functions to pass FP
9019 parameters/results" - leave it as
9023 /* Attribute value not mentioned in the
9024 November 2012 ABI, so leave it as
9029 fp_model = ARM_FLOAT_SOFT_VFP;
9035 /* Leave it as "auto". */
9036 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9041 /* Detect M-profile programs. This only works if the
9042 executable file includes build attributes; GCC does
9043 copy them to the executable, but e.g. RealView does
9046 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9049 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9050 Tag_CPU_arch_profile);
9052 /* GCC specifies the profile for v6-M; RealView only
9053 specifies the profile for architectures starting with
9054 V7 (as opposed to architectures with a tag
9055 numerically greater than TAG_CPU_ARCH_V7). */
9056 if (!tdesc_has_registers (tdesc)
9057 && (attr_arch == TAG_CPU_ARCH_V6_M
9058 || attr_arch == TAG_CPU_ARCH_V6S_M
9059 || attr_profile == 'M'))
9064 if (fp_model == ARM_FLOAT_AUTO)
9066 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9069 /* Leave it as "auto". Strictly speaking this case
9070 means FPA, but almost nobody uses that now, and
9071 many toolchains fail to set the appropriate bits
9072 for the floating-point model they use. */
9074 case EF_ARM_SOFT_FLOAT:
9075 fp_model = ARM_FLOAT_SOFT_FPA;
9077 case EF_ARM_VFP_FLOAT:
9078 fp_model = ARM_FLOAT_VFP;
9080 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9081 fp_model = ARM_FLOAT_SOFT_VFP;
9086 if (e_flags & EF_ARM_BE8)
9087 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9092 /* Leave it as "auto". */
9097 /* Check any target description for validity. */
9098 if (tdesc_has_registers (tdesc))
9100 /* For most registers we require GDB's default names; but also allow
9101 the numeric names for sp / lr / pc, as a convenience. */
9102 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9103 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9104 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9106 const struct tdesc_feature *feature;
9109 feature = tdesc_find_feature (tdesc,
9110 "org.gnu.gdb.arm.core");
9111 if (feature == NULL)
9113 feature = tdesc_find_feature (tdesc,
9114 "org.gnu.gdb.arm.m-profile");
9115 if (feature == NULL)
9121 tdesc_data = tdesc_data_alloc ();
9124 for (i = 0; i < ARM_SP_REGNUM; i++)
9125 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9126 arm_register_names[i]);
9127 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9130 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9133 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9137 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9138 ARM_PS_REGNUM, "xpsr");
9140 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9141 ARM_PS_REGNUM, "cpsr");
9146 feature = tdesc_find_feature (tdesc,
9147 "org.gnu.gdb.arm.fpa");
9148 if (feature != NULL)
9151 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9152 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9153 arm_register_names[i]);
9158 have_fpa_registers = false;
9160 feature = tdesc_find_feature (tdesc,
9161 "org.gnu.gdb.xscale.iwmmxt");
9162 if (feature != NULL)
9164 static const char *const iwmmxt_names[] = {
9165 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9166 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9167 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9168 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9172 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9174 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9175 iwmmxt_names[i - ARM_WR0_REGNUM]);
9177 /* Check for the control registers, but do not fail if they
9179 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9180 tdesc_numbered_register (feature, tdesc_data.get (), i,
9181 iwmmxt_names[i - ARM_WR0_REGNUM]);
9183 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9185 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9186 iwmmxt_names[i - ARM_WR0_REGNUM]);
9191 have_wmmx_registers = true;
9194 /* If we have a VFP unit, check whether the single precision registers
9195 are present. If not, then we will synthesize them as pseudo
9197 feature = tdesc_find_feature (tdesc,
9198 "org.gnu.gdb.arm.vfp");
9199 if (feature != NULL)
9201 static const char *const vfp_double_names[] = {
9202 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9203 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9204 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9205 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9208 /* Require the double precision registers. There must be either
9211 for (i = 0; i < 32; i++)
9213 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9215 vfp_double_names[i]);
9219 if (!valid_p && i == 16)
9222 /* Also require FPSCR. */
9223 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9224 ARM_FPSCR_REGNUM, "fpscr");
9228 if (tdesc_unnumbered_register (feature, "s0") == 0)
9229 have_vfp_pseudos = true;
9231 vfp_register_count = i;
9233 /* If we have VFP, also check for NEON. The architecture allows
9234 NEON without VFP (integer vector operations only), but GDB
9235 does not support that. */
9236 feature = tdesc_find_feature (tdesc,
9237 "org.gnu.gdb.arm.neon");
9238 if (feature != NULL)
9240 /* NEON requires 32 double-precision registers. */
9244 /* If there are quad registers defined by the stub, use
9245 their type; otherwise (normally) provide them with
9246 the default type. */
9247 if (tdesc_unnumbered_register (feature, "q0") == 0)
9248 have_neon_pseudos = true;
9255 /* If there is already a candidate, use it. */
9256 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9258 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9260 if (arm_abi != ARM_ABI_AUTO
9261 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9264 if (fp_model != ARM_FLOAT_AUTO
9265 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9268 /* There are various other properties in tdep that we do not
9269 need to check here: those derived from a target description,
9270 since gdbarches with a different target description are
9271 automatically disqualified. */
9273 /* Do check is_m, though, since it might come from the binary. */
9274 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9277 /* Found a match. */
9281 if (best_arch != NULL)
9282 return best_arch->gdbarch;
9284 tdep = XCNEW (struct gdbarch_tdep);
9285 gdbarch = gdbarch_alloc (&info, tdep);
9287 /* Record additional information about the architecture we are defining.
9288 These are gdbarch discriminators, like the OSABI. */
9289 tdep->arm_abi = arm_abi;
9290 tdep->fp_model = fp_model;
9292 tdep->have_fpa_registers = have_fpa_registers;
9293 tdep->have_wmmx_registers = have_wmmx_registers;
9294 gdb_assert (vfp_register_count == 0
9295 || vfp_register_count == 16
9296 || vfp_register_count == 32);
9297 tdep->vfp_register_count = vfp_register_count;
9298 tdep->have_vfp_pseudos = have_vfp_pseudos;
9299 tdep->have_neon_pseudos = have_neon_pseudos;
9300 tdep->have_neon = have_neon;
9302 arm_register_g_packet_guesses (gdbarch);
9305 switch (info.byte_order_for_code)
9307 case BFD_ENDIAN_BIG:
9308 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9309 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9310 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9311 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9315 case BFD_ENDIAN_LITTLE:
9316 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9317 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9318 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9319 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9324 internal_error (__FILE__, __LINE__,
9325 _("arm_gdbarch_init: bad byte order for float format"));
9328 /* On ARM targets char defaults to unsigned. */
9329 set_gdbarch_char_signed (gdbarch, 0);
9331 /* wchar_t is unsigned under the AAPCS. */
9332 if (tdep->arm_abi == ARM_ABI_AAPCS)
9333 set_gdbarch_wchar_signed (gdbarch, 0);
9335 set_gdbarch_wchar_signed (gdbarch, 1);
9337 /* Compute type alignment. */
9338 set_gdbarch_type_align (gdbarch, arm_type_align);
9340 /* Note: for displaced stepping, this includes the breakpoint, and one word
9341 of additional scratch space. This setting isn't used for anything beside
9342 displaced stepping at present. */
9343 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9345 /* This should be low enough for everything. */
9346 tdep->lowest_pc = 0x20;
9347 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9349 /* The default, for both APCS and AAPCS, is to return small
9350 structures in registers. */
9351 tdep->struct_return = reg_struct_return;
9353 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9354 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9357 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9359 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9361 frame_base_set_default (gdbarch, &arm_normal_base);
9363 /* Address manipulation. */
9364 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9366 /* Advance PC across function entry code. */
9367 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9369 /* Detect whether PC is at a point where the stack has been destroyed. */
9370 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9372 /* Skip trampolines. */
9373 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9375 /* The stack grows downward. */
9376 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9378 /* Breakpoint manipulation. */
9379 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9380 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9381 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9382 arm_breakpoint_kind_from_current_state);
9384 /* Information about registers, etc. */
9385 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9386 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9387 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9388 set_gdbarch_register_type (gdbarch, arm_register_type);
9389 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9391 /* This "info float" is FPA-specific. Use the generic version if we
9393 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9394 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9396 /* Internal <-> external register number maps. */
9397 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9398 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9400 set_gdbarch_register_name (gdbarch, arm_register_name);
9402 /* Returning results. */
9403 set_gdbarch_return_value (gdbarch, arm_return_value);
9406 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9408 /* Minsymbol frobbing. */
9409 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9410 set_gdbarch_coff_make_msymbol_special (gdbarch,
9411 arm_coff_make_msymbol_special);
9412 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9414 /* Thumb-2 IT block support. */
9415 set_gdbarch_adjust_breakpoint_address (gdbarch,
9416 arm_adjust_breakpoint_address);
9418 /* Virtual tables. */
9419 set_gdbarch_vbit_in_delta (gdbarch, 1);
9421 /* Hook in the ABI-specific overrides, if they have been registered. */
9422 gdbarch_init_osabi (info, gdbarch);
9424 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9426 /* Add some default predicates. */
9428 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9429 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9430 dwarf2_append_unwinders (gdbarch);
9431 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9432 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9433 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9435 /* Now we have tuned the configuration, set a few final things,
9436 based on what the OS ABI has told us. */
9438 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9439 binaries are always marked. */
9440 if (tdep->arm_abi == ARM_ABI_AUTO)
9441 tdep->arm_abi = ARM_ABI_APCS;
9443 /* Watchpoints are not steppable. */
9444 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9446 /* We used to default to FPA for generic ARM, but almost nobody
9447 uses that now, and we now provide a way for the user to force
9448 the model. So default to the most useful variant. */
9449 if (tdep->fp_model == ARM_FLOAT_AUTO)
9450 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9452 if (tdep->jb_pc >= 0)
9453 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9455 /* Floating point sizes and format. */
9456 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9457 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9459 set_gdbarch_double_format
9460 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9461 set_gdbarch_long_double_format
9462 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9466 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9467 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9470 if (have_vfp_pseudos)
9472 /* NOTE: These are the only pseudo registers used by
9473 the ARM target at the moment. If more are added, a
9474 little more care in numbering will be needed. */
9476 int num_pseudos = 32;
9477 if (have_neon_pseudos)
9479 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9480 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9481 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9484 if (tdesc_data != nullptr)
9486 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9488 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9490 /* Override tdesc_register_type to adjust the types of VFP
9491 registers for NEON. */
9492 set_gdbarch_register_type (gdbarch, arm_register_type);
9495 /* Add standard register aliases. We add aliases even for those
9496 names which are used by the current architecture - it's simpler,
9497 and does no harm, since nothing ever lists user registers. */
9498 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9499 user_reg_add (gdbarch, arm_register_aliases[i].name,
9500 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9502 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9503 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9505 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9511 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9513 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9518 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9519 (int) tdep->fp_model);
9520 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9521 (int) tdep->have_fpa_registers);
9522 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9523 (int) tdep->have_wmmx_registers);
9524 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9525 (int) tdep->vfp_register_count);
9526 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9527 (int) tdep->have_vfp_pseudos);
9528 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9529 (int) tdep->have_neon_pseudos);
9530 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9531 (int) tdep->have_neon);
9532 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9533 (unsigned long) tdep->lowest_pc);
9539 static void arm_record_test (void);
9540 static void arm_analyze_prologue_test ();
9544 void _initialize_arm_tdep ();
9546 _initialize_arm_tdep ()
9550 char regdesc[1024], *rdptr = regdesc;
9551 size_t rest = sizeof (regdesc);
9553 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9555 /* Add ourselves to objfile event chain. */
9556 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9558 /* Register an ELF OS ABI sniffer for ARM binaries. */
9559 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9560 bfd_target_elf_flavour,
9561 arm_elf_osabi_sniffer);
9563 /* Add root prefix command for all "set arm"/"show arm" commands. */
9564 add_basic_prefix_cmd ("arm", no_class,
9565 _("Various ARM-specific commands."),
9566 &setarmcmdlist, "set arm ", 0, &setlist);
9568 add_show_prefix_cmd ("arm", no_class,
9569 _("Various ARM-specific commands."),
9570 &showarmcmdlist, "show arm ", 0, &showlist);
9573 arm_disassembler_options = xstrdup ("reg-names-std");
9574 const disasm_options_t *disasm_options
9575 = &disassembler_options_arm ()->options;
9576 int num_disassembly_styles = 0;
9577 for (i = 0; disasm_options->name[i] != NULL; i++)
9578 if (startswith (disasm_options->name[i], "reg-names-"))
9579 num_disassembly_styles++;
9581 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9582 valid_disassembly_styles = XNEWVEC (const char *,
9583 num_disassembly_styles + 1);
9584 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9585 if (startswith (disasm_options->name[i], "reg-names-"))
9587 size_t offset = strlen ("reg-names-");
9588 const char *style = disasm_options->name[i];
9589 valid_disassembly_styles[j++] = &style[offset];
9590 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9591 disasm_options->description[i]);
9595 /* Mark the end of valid options. */
9596 valid_disassembly_styles[num_disassembly_styles] = NULL;
9598 /* Create the help text. */
9599 std::string helptext = string_printf ("%s%s%s",
9600 _("The valid values are:\n"),
9602 _("The default is \"std\"."));
9604 add_setshow_enum_cmd("disassembler", no_class,
9605 valid_disassembly_styles, &disassembly_style,
9606 _("Set the disassembly style."),
9607 _("Show the disassembly style."),
9609 set_disassembly_style_sfunc,
9610 show_disassembly_style_sfunc,
9611 &setarmcmdlist, &showarmcmdlist);
9613 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9614 _("Set usage of ARM 32-bit mode."),
9615 _("Show usage of ARM 32-bit mode."),
9616 _("When off, a 26-bit PC will be used."),
9618 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9620 &setarmcmdlist, &showarmcmdlist);
9622 /* Add a command to allow the user to force the FPU model. */
9623 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9624 _("Set the floating point type."),
9625 _("Show the floating point type."),
9626 _("auto - Determine the FP typefrom the OS-ABI.\n\
9627 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9628 fpa - FPA co-processor (GCC compiled).\n\
9629 softvfp - Software FP with pure-endian doubles.\n\
9630 vfp - VFP co-processor."),
9631 set_fp_model_sfunc, show_fp_model,
9632 &setarmcmdlist, &showarmcmdlist);
9634 /* Add a command to allow the user to force the ABI. */
9635 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9638 NULL, arm_set_abi, arm_show_abi,
9639 &setarmcmdlist, &showarmcmdlist);
9641 /* Add two commands to allow the user to force the assumed
9643 add_setshow_enum_cmd ("fallback-mode", class_support,
9644 arm_mode_strings, &arm_fallback_mode_string,
9645 _("Set the mode assumed when symbols are unavailable."),
9646 _("Show the mode assumed when symbols are unavailable."),
9647 NULL, NULL, arm_show_fallback_mode,
9648 &setarmcmdlist, &showarmcmdlist);
9649 add_setshow_enum_cmd ("force-mode", class_support,
9650 arm_mode_strings, &arm_force_mode_string,
9651 _("Set the mode assumed even when symbols are available."),
9652 _("Show the mode assumed even when symbols are available."),
9653 NULL, NULL, arm_show_force_mode,
9654 &setarmcmdlist, &showarmcmdlist);
9656 /* Debugging flag. */
9657 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9658 _("Set ARM debugging."),
9659 _("Show ARM debugging."),
9660 _("When on, arm-specific debugging is enabled."),
9662 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9663 &setdebuglist, &showdebuglist);
9666 selftests::register_test ("arm-record", selftests::arm_record_test);
9667 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
9672 /* ARM-reversible process record data structures. */
9674 #define ARM_INSN_SIZE_BYTES 4
9675 #define THUMB_INSN_SIZE_BYTES 2
9676 #define THUMB2_INSN_SIZE_BYTES 4
9679 /* Position of the bit within a 32-bit ARM instruction
9680 that defines whether the instruction is a load or store. */
9681 #define INSN_S_L_BIT_NUM 20
9683 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9686 unsigned int reg_len = LENGTH; \
9689 REGS = XNEWVEC (uint32_t, reg_len); \
9690 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9695 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9698 unsigned int mem_len = LENGTH; \
9701 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9702 memcpy(&MEMS->len, &RECORD_BUF[0], \
9703 sizeof(struct arm_mem_r) * LENGTH); \
9708 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9709 #define INSN_RECORDED(ARM_RECORD) \
9710 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9712 /* ARM memory record structure. */
9715 uint32_t len; /* Record length. */
9716 uint32_t addr; /* Memory address. */
9719 /* ARM instruction record contains opcode of current insn
9720 and execution state (before entry to decode_insn()),
9721 contains list of to-be-modified registers and
9722 memory blocks (on return from decode_insn()). */
9724 typedef struct insn_decode_record_t
9726 struct gdbarch *gdbarch;
9727 struct regcache *regcache;
9728 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9729 uint32_t arm_insn; /* Should accommodate thumb. */
9730 uint32_t cond; /* Condition code. */
9731 uint32_t opcode; /* Insn opcode. */
9732 uint32_t decode; /* Insn decode bits. */
9733 uint32_t mem_rec_count; /* No of mem records. */
9734 uint32_t reg_rec_count; /* No of reg records. */
9735 uint32_t *arm_regs; /* Registers to be saved for this record. */
9736 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9737 } insn_decode_record;
9740 /* Checks ARM SBZ and SBO mandatory fields. */
9743 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9745 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9764 enum arm_record_result
9766 ARM_RECORD_SUCCESS = 0,
9767 ARM_RECORD_FAILURE = 1
9774 } arm_record_strx_t;
9785 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9786 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9789 struct regcache *reg_cache = arm_insn_r->regcache;
9790 ULONGEST u_regval[2]= {0};
9792 uint32_t reg_src1 = 0, reg_src2 = 0;
9793 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9795 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9796 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9798 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9800 /* 1) Handle misc store, immediate offset. */
9801 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9802 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9803 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9804 regcache_raw_read_unsigned (reg_cache, reg_src1,
9806 if (ARM_PC_REGNUM == reg_src1)
9808 /* If R15 was used as Rn, hence current PC+8. */
9809 u_regval[0] = u_regval[0] + 8;
9811 offset_8 = (immed_high << 4) | immed_low;
9812 /* Calculate target store address. */
9813 if (14 == arm_insn_r->opcode)
9815 tgt_mem_addr = u_regval[0] + offset_8;
9819 tgt_mem_addr = u_regval[0] - offset_8;
9821 if (ARM_RECORD_STRH == str_type)
9823 record_buf_mem[0] = 2;
9824 record_buf_mem[1] = tgt_mem_addr;
9825 arm_insn_r->mem_rec_count = 1;
9827 else if (ARM_RECORD_STRD == str_type)
9829 record_buf_mem[0] = 4;
9830 record_buf_mem[1] = tgt_mem_addr;
9831 record_buf_mem[2] = 4;
9832 record_buf_mem[3] = tgt_mem_addr + 4;
9833 arm_insn_r->mem_rec_count = 2;
9836 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9838 /* 2) Store, register offset. */
9840 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9842 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9843 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9844 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9847 /* If R15 was used as Rn, hence current PC+8. */
9848 u_regval[0] = u_regval[0] + 8;
9850 /* Calculate target store address, Rn +/- Rm, register offset. */
9851 if (12 == arm_insn_r->opcode)
9853 tgt_mem_addr = u_regval[0] + u_regval[1];
9857 tgt_mem_addr = u_regval[1] - u_regval[0];
9859 if (ARM_RECORD_STRH == str_type)
9861 record_buf_mem[0] = 2;
9862 record_buf_mem[1] = tgt_mem_addr;
9863 arm_insn_r->mem_rec_count = 1;
9865 else if (ARM_RECORD_STRD == str_type)
9867 record_buf_mem[0] = 4;
9868 record_buf_mem[1] = tgt_mem_addr;
9869 record_buf_mem[2] = 4;
9870 record_buf_mem[3] = tgt_mem_addr + 4;
9871 arm_insn_r->mem_rec_count = 2;
9874 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9875 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9877 /* 3) Store, immediate pre-indexed. */
9878 /* 5) Store, immediate post-indexed. */
9879 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9880 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9881 offset_8 = (immed_high << 4) | immed_low;
9882 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9883 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9884 /* Calculate target store address, Rn +/- Rm, register offset. */
9885 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9887 tgt_mem_addr = u_regval[0] + offset_8;
9891 tgt_mem_addr = u_regval[0] - offset_8;
9893 if (ARM_RECORD_STRH == str_type)
9895 record_buf_mem[0] = 2;
9896 record_buf_mem[1] = tgt_mem_addr;
9897 arm_insn_r->mem_rec_count = 1;
9899 else if (ARM_RECORD_STRD == str_type)
9901 record_buf_mem[0] = 4;
9902 record_buf_mem[1] = tgt_mem_addr;
9903 record_buf_mem[2] = 4;
9904 record_buf_mem[3] = tgt_mem_addr + 4;
9905 arm_insn_r->mem_rec_count = 2;
9907 /* Record Rn also as it changes. */
9908 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9909 arm_insn_r->reg_rec_count = 1;
9911 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9912 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9914 /* 4) Store, register pre-indexed. */
9915 /* 6) Store, register post -indexed. */
9916 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9917 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9918 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9919 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9920 /* Calculate target store address, Rn +/- Rm, register offset. */
9921 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9923 tgt_mem_addr = u_regval[0] + u_regval[1];
9927 tgt_mem_addr = u_regval[1] - u_regval[0];
9929 if (ARM_RECORD_STRH == str_type)
9931 record_buf_mem[0] = 2;
9932 record_buf_mem[1] = tgt_mem_addr;
9933 arm_insn_r->mem_rec_count = 1;
9935 else if (ARM_RECORD_STRD == str_type)
9937 record_buf_mem[0] = 4;
9938 record_buf_mem[1] = tgt_mem_addr;
9939 record_buf_mem[2] = 4;
9940 record_buf_mem[3] = tgt_mem_addr + 4;
9941 arm_insn_r->mem_rec_count = 2;
9943 /* Record Rn also as it changes. */
9944 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9945 arm_insn_r->reg_rec_count = 1;
9950 /* Handling ARM extension space insns. */
9953 arm_record_extension_space (insn_decode_record *arm_insn_r)
9955 int ret = 0; /* Return value: -1:record failure ; 0:success */
9956 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9957 uint32_t record_buf[8], record_buf_mem[8];
9958 uint32_t reg_src1 = 0;
9959 struct regcache *reg_cache = arm_insn_r->regcache;
9960 ULONGEST u_regval = 0;
9962 gdb_assert (!INSN_RECORDED(arm_insn_r));
9963 /* Handle unconditional insn extension space. */
9965 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9966 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9967 if (arm_insn_r->cond)
9969 /* PLD has no affect on architectural state, it just affects
9971 if (5 == ((opcode1 & 0xE0) >> 5))
9974 record_buf[0] = ARM_PS_REGNUM;
9975 record_buf[1] = ARM_LR_REGNUM;
9976 arm_insn_r->reg_rec_count = 2;
9978 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9982 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9983 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9986 /* Undefined instruction on ARM V5; need to handle if later
9987 versions define it. */
9990 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9991 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9992 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9994 /* Handle arithmetic insn extension space. */
9995 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9996 && !INSN_RECORDED(arm_insn_r))
9998 /* Handle MLA(S) and MUL(S). */
9999 if (in_inclusive_range (insn_op1, 0U, 3U))
10001 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10002 record_buf[1] = ARM_PS_REGNUM;
10003 arm_insn_r->reg_rec_count = 2;
10005 else if (in_inclusive_range (insn_op1, 4U, 15U))
10007 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10008 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10009 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10010 record_buf[2] = ARM_PS_REGNUM;
10011 arm_insn_r->reg_rec_count = 3;
10015 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10016 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10017 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10019 /* Handle control insn extension space. */
10021 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10022 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10024 if (!bit (arm_insn_r->arm_insn,25))
10026 if (!bits (arm_insn_r->arm_insn, 4, 7))
10028 if ((0 == insn_op1) || (2 == insn_op1))
10031 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10032 arm_insn_r->reg_rec_count = 1;
10034 else if (1 == insn_op1)
10036 /* CSPR is going to be changed. */
10037 record_buf[0] = ARM_PS_REGNUM;
10038 arm_insn_r->reg_rec_count = 1;
10040 else if (3 == insn_op1)
10042 /* SPSR is going to be changed. */
10043 /* We need to get SPSR value, which is yet to be done. */
10047 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10052 record_buf[0] = ARM_PS_REGNUM;
10053 arm_insn_r->reg_rec_count = 1;
10055 else if (3 == insn_op1)
10058 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10059 arm_insn_r->reg_rec_count = 1;
10062 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10065 record_buf[0] = ARM_PS_REGNUM;
10066 record_buf[1] = ARM_LR_REGNUM;
10067 arm_insn_r->reg_rec_count = 2;
10069 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10071 /* QADD, QSUB, QDADD, QDSUB */
10072 record_buf[0] = ARM_PS_REGNUM;
10073 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10074 arm_insn_r->reg_rec_count = 2;
10076 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10079 record_buf[0] = ARM_PS_REGNUM;
10080 record_buf[1] = ARM_LR_REGNUM;
10081 arm_insn_r->reg_rec_count = 2;
10083 /* Save SPSR also;how? */
10086 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10087 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10088 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10089 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10092 if (0 == insn_op1 || 1 == insn_op1)
10094 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10095 /* We dont do optimization for SMULW<y> where we
10097 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10098 record_buf[1] = ARM_PS_REGNUM;
10099 arm_insn_r->reg_rec_count = 2;
10101 else if (2 == insn_op1)
10104 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10105 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10106 arm_insn_r->reg_rec_count = 2;
10108 else if (3 == insn_op1)
10111 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10112 arm_insn_r->reg_rec_count = 1;
10118 /* MSR : immediate form. */
10121 /* CSPR is going to be changed. */
10122 record_buf[0] = ARM_PS_REGNUM;
10123 arm_insn_r->reg_rec_count = 1;
10125 else if (3 == insn_op1)
10127 /* SPSR is going to be changed. */
10128 /* we need to get SPSR value, which is yet to be done */
10134 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10135 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10136 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10138 /* Handle load/store insn extension space. */
10140 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10141 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10142 && !INSN_RECORDED(arm_insn_r))
10147 /* These insn, changes register and memory as well. */
10148 /* SWP or SWPB insn. */
10149 /* Get memory address given by Rn. */
10150 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10151 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10152 /* SWP insn ?, swaps word. */
10153 if (8 == arm_insn_r->opcode)
10155 record_buf_mem[0] = 4;
10159 /* SWPB insn, swaps only byte. */
10160 record_buf_mem[0] = 1;
10162 record_buf_mem[1] = u_regval;
10163 arm_insn_r->mem_rec_count = 1;
10164 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10165 arm_insn_r->reg_rec_count = 1;
10167 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10170 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10173 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10176 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10177 record_buf[1] = record_buf[0] + 1;
10178 arm_insn_r->reg_rec_count = 2;
10180 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10183 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10186 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10188 /* LDRH, LDRSB, LDRSH. */
10189 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10190 arm_insn_r->reg_rec_count = 1;
10195 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10196 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10197 && !INSN_RECORDED(arm_insn_r))
10200 /* Handle coprocessor insn extension space. */
10203 /* To be done for ARMv5 and later; as of now we return -1. */
10207 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10208 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10213 /* Handling opcode 000 insns. */
10216 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10218 struct regcache *reg_cache = arm_insn_r->regcache;
10219 uint32_t record_buf[8], record_buf_mem[8];
10220 ULONGEST u_regval[2] = {0};
10222 uint32_t reg_src1 = 0;
10223 uint32_t opcode1 = 0;
10225 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10226 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10227 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10229 if (!((opcode1 & 0x19) == 0x10))
10231 /* Data-processing (register) and Data-processing (register-shifted
10233 /* Out of 11 shifter operands mode, all the insn modifies destination
10234 register, which is specified by 13-16 decode. */
10235 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10236 record_buf[1] = ARM_PS_REGNUM;
10237 arm_insn_r->reg_rec_count = 2;
10239 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10241 /* Miscellaneous instructions */
10243 if (3 == arm_insn_r->decode && 0x12 == opcode1
10244 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10246 /* Handle BLX, branch and link/exchange. */
10247 if (9 == arm_insn_r->opcode)
10249 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10250 and R14 stores the return address. */
10251 record_buf[0] = ARM_PS_REGNUM;
10252 record_buf[1] = ARM_LR_REGNUM;
10253 arm_insn_r->reg_rec_count = 2;
10256 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10258 /* Handle enhanced software breakpoint insn, BKPT. */
10259 /* CPSR is changed to be executed in ARM state, disabling normal
10260 interrupts, entering abort mode. */
10261 /* According to high vector configuration PC is set. */
10262 /* user hit breakpoint and type reverse, in
10263 that case, we need to go back with previous CPSR and
10264 Program Counter. */
10265 record_buf[0] = ARM_PS_REGNUM;
10266 record_buf[1] = ARM_LR_REGNUM;
10267 arm_insn_r->reg_rec_count = 2;
10269 /* Save SPSR also; how? */
10272 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10273 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10275 /* Handle BX, branch and link/exchange. */
10276 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10277 record_buf[0] = ARM_PS_REGNUM;
10278 arm_insn_r->reg_rec_count = 1;
10280 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10281 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10282 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10284 /* Count leading zeros: CLZ. */
10285 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10286 arm_insn_r->reg_rec_count = 1;
10288 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10289 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10290 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10291 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10293 /* Handle MRS insn. */
10294 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10295 arm_insn_r->reg_rec_count = 1;
10298 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10300 /* Multiply and multiply-accumulate */
10302 /* Handle multiply instructions. */
10303 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10304 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10306 /* Handle MLA and MUL. */
10307 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10308 record_buf[1] = ARM_PS_REGNUM;
10309 arm_insn_r->reg_rec_count = 2;
10311 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10313 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10314 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10315 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10316 record_buf[2] = ARM_PS_REGNUM;
10317 arm_insn_r->reg_rec_count = 3;
10320 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10322 /* Synchronization primitives */
10324 /* Handling SWP, SWPB. */
10325 /* These insn, changes register and memory as well. */
10326 /* SWP or SWPB insn. */
10328 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10329 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10330 /* SWP insn ?, swaps word. */
10331 if (8 == arm_insn_r->opcode)
10333 record_buf_mem[0] = 4;
10337 /* SWPB insn, swaps only byte. */
10338 record_buf_mem[0] = 1;
10340 record_buf_mem[1] = u_regval[0];
10341 arm_insn_r->mem_rec_count = 1;
10342 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10343 arm_insn_r->reg_rec_count = 1;
10345 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10346 || 15 == arm_insn_r->decode)
10348 if ((opcode1 & 0x12) == 2)
10350 /* Extra load/store (unprivileged) */
10355 /* Extra load/store */
10356 switch (bits (arm_insn_r->arm_insn, 5, 6))
10359 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10361 /* STRH (register), STRH (immediate) */
10362 arm_record_strx (arm_insn_r, &record_buf[0],
10363 &record_buf_mem[0], ARM_RECORD_STRH);
10365 else if ((opcode1 & 0x05) == 0x1)
10367 /* LDRH (register) */
10368 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10369 arm_insn_r->reg_rec_count = 1;
10371 if (bit (arm_insn_r->arm_insn, 21))
10373 /* Write back to Rn. */
10374 record_buf[arm_insn_r->reg_rec_count++]
10375 = bits (arm_insn_r->arm_insn, 16, 19);
10378 else if ((opcode1 & 0x05) == 0x5)
10380 /* LDRH (immediate), LDRH (literal) */
10381 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10383 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10384 arm_insn_r->reg_rec_count = 1;
10388 /*LDRH (immediate) */
10389 if (bit (arm_insn_r->arm_insn, 21))
10391 /* Write back to Rn. */
10392 record_buf[arm_insn_r->reg_rec_count++] = rn;
10400 if ((opcode1 & 0x05) == 0x0)
10402 /* LDRD (register) */
10403 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10404 record_buf[1] = record_buf[0] + 1;
10405 arm_insn_r->reg_rec_count = 2;
10407 if (bit (arm_insn_r->arm_insn, 21))
10409 /* Write back to Rn. */
10410 record_buf[arm_insn_r->reg_rec_count++]
10411 = bits (arm_insn_r->arm_insn, 16, 19);
10414 else if ((opcode1 & 0x05) == 0x1)
10416 /* LDRSB (register) */
10417 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10418 arm_insn_r->reg_rec_count = 1;
10420 if (bit (arm_insn_r->arm_insn, 21))
10422 /* Write back to Rn. */
10423 record_buf[arm_insn_r->reg_rec_count++]
10424 = bits (arm_insn_r->arm_insn, 16, 19);
10427 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10429 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10431 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10433 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10434 arm_insn_r->reg_rec_count = 1;
10438 /*LDRD (immediate), LDRSB (immediate) */
10439 if (bit (arm_insn_r->arm_insn, 21))
10441 /* Write back to Rn. */
10442 record_buf[arm_insn_r->reg_rec_count++] = rn;
10450 if ((opcode1 & 0x05) == 0x0)
10452 /* STRD (register) */
10453 arm_record_strx (arm_insn_r, &record_buf[0],
10454 &record_buf_mem[0], ARM_RECORD_STRD);
10456 else if ((opcode1 & 0x05) == 0x1)
10458 /* LDRSH (register) */
10459 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10460 arm_insn_r->reg_rec_count = 1;
10462 if (bit (arm_insn_r->arm_insn, 21))
10464 /* Write back to Rn. */
10465 record_buf[arm_insn_r->reg_rec_count++]
10466 = bits (arm_insn_r->arm_insn, 16, 19);
10469 else if ((opcode1 & 0x05) == 0x4)
10471 /* STRD (immediate) */
10472 arm_record_strx (arm_insn_r, &record_buf[0],
10473 &record_buf_mem[0], ARM_RECORD_STRD);
10475 else if ((opcode1 & 0x05) == 0x5)
10477 /* LDRSH (immediate), LDRSH (literal) */
10478 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10479 arm_insn_r->reg_rec_count = 1;
10481 if (bit (arm_insn_r->arm_insn, 21))
10483 /* Write back to Rn. */
10484 record_buf[arm_insn_r->reg_rec_count++]
10485 = bits (arm_insn_r->arm_insn, 16, 19);
10501 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10502 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10506 /* Handling opcode 001 insns. */
10509 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10511 uint32_t record_buf[8], record_buf_mem[8];
10513 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10514 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10516 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10517 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10518 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10521 /* Handle MSR insn. */
10522 if (9 == arm_insn_r->opcode)
10524 /* CSPR is going to be changed. */
10525 record_buf[0] = ARM_PS_REGNUM;
10526 arm_insn_r->reg_rec_count = 1;
10530 /* SPSR is going to be changed. */
10533 else if (arm_insn_r->opcode <= 15)
10535 /* Normal data processing insns. */
10536 /* Out of 11 shifter operands mode, all the insn modifies destination
10537 register, which is specified by 13-16 decode. */
10538 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10539 record_buf[1] = ARM_PS_REGNUM;
10540 arm_insn_r->reg_rec_count = 2;
10547 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10548 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10553 arm_record_media (insn_decode_record *arm_insn_r)
10555 uint32_t record_buf[8];
10557 switch (bits (arm_insn_r->arm_insn, 22, 24))
10560 /* Parallel addition and subtraction, signed */
10562 /* Parallel addition and subtraction, unsigned */
10565 /* Packing, unpacking, saturation and reversal */
10567 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10569 record_buf[arm_insn_r->reg_rec_count++] = rd;
10575 /* Signed multiplies */
10577 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10578 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10580 record_buf[arm_insn_r->reg_rec_count++] = rd;
10582 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10583 else if (op1 == 0x4)
10584 record_buf[arm_insn_r->reg_rec_count++]
10585 = bits (arm_insn_r->arm_insn, 12, 15);
10591 if (bit (arm_insn_r->arm_insn, 21)
10592 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10595 record_buf[arm_insn_r->reg_rec_count++]
10596 = bits (arm_insn_r->arm_insn, 12, 15);
10598 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10599 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10601 /* USAD8 and USADA8 */
10602 record_buf[arm_insn_r->reg_rec_count++]
10603 = bits (arm_insn_r->arm_insn, 16, 19);
10610 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10611 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10613 /* Permanently UNDEFINED */
10618 /* BFC, BFI and UBFX */
10619 record_buf[arm_insn_r->reg_rec_count++]
10620 = bits (arm_insn_r->arm_insn, 12, 15);
10629 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10634 /* Handle ARM mode instructions with opcode 010. */
10637 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10639 struct regcache *reg_cache = arm_insn_r->regcache;
10641 uint32_t reg_base , reg_dest;
10642 uint32_t offset_12, tgt_mem_addr;
10643 uint32_t record_buf[8], record_buf_mem[8];
10644 unsigned char wback;
10647 /* Calculate wback. */
10648 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10649 || (bit (arm_insn_r->arm_insn, 21) == 1);
10651 arm_insn_r->reg_rec_count = 0;
10652 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10654 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10656 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10659 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10660 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10662 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10663 preceeds a LDR instruction having R15 as reg_base, it
10664 emulates a branch and link instruction, and hence we need to save
10665 CPSR and PC as well. */
10666 if (ARM_PC_REGNUM == reg_dest)
10667 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10669 /* If wback is true, also save the base register, which is going to be
10672 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10676 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10678 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10679 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10681 /* Handle bit U. */
10682 if (bit (arm_insn_r->arm_insn, 23))
10684 /* U == 1: Add the offset. */
10685 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10689 /* U == 0: subtract the offset. */
10690 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10693 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10695 if (bit (arm_insn_r->arm_insn, 22))
10697 /* STRB and STRBT: 1 byte. */
10698 record_buf_mem[0] = 1;
10702 /* STR and STRT: 4 bytes. */
10703 record_buf_mem[0] = 4;
10706 /* Handle bit P. */
10707 if (bit (arm_insn_r->arm_insn, 24))
10708 record_buf_mem[1] = tgt_mem_addr;
10710 record_buf_mem[1] = (uint32_t) u_regval;
10712 arm_insn_r->mem_rec_count = 1;
10714 /* If wback is true, also save the base register, which is going to be
10717 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10720 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10721 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10725 /* Handling opcode 011 insns. */
10728 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10730 struct regcache *reg_cache = arm_insn_r->regcache;
10732 uint32_t shift_imm = 0;
10733 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10734 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10735 uint32_t record_buf[8], record_buf_mem[8];
10738 ULONGEST u_regval[2];
10740 if (bit (arm_insn_r->arm_insn, 4))
10741 return arm_record_media (arm_insn_r);
10743 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10744 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10746 /* Handle enhanced store insns and LDRD DSP insn,
10747 order begins according to addressing modes for store insns
10751 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10753 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10754 /* LDR insn has a capability to do branching, if
10755 MOV LR, PC is preceded by LDR insn having Rn as R15
10756 in that case, it emulates branch and link insn, and hence we
10757 need to save CSPR and PC as well. */
10758 if (15 != reg_dest)
10760 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10761 arm_insn_r->reg_rec_count = 1;
10765 record_buf[0] = reg_dest;
10766 record_buf[1] = ARM_PS_REGNUM;
10767 arm_insn_r->reg_rec_count = 2;
10772 if (! bits (arm_insn_r->arm_insn, 4, 11))
10774 /* Store insn, register offset and register pre-indexed,
10775 register post-indexed. */
10777 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10779 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10780 regcache_raw_read_unsigned (reg_cache, reg_src1
10782 regcache_raw_read_unsigned (reg_cache, reg_src2
10784 if (15 == reg_src2)
10786 /* If R15 was used as Rn, hence current PC+8. */
10787 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10788 u_regval[0] = u_regval[0] + 8;
10790 /* Calculate target store address, Rn +/- Rm, register offset. */
10792 if (bit (arm_insn_r->arm_insn, 23))
10794 tgt_mem_addr = u_regval[0] + u_regval[1];
10798 tgt_mem_addr = u_regval[1] - u_regval[0];
10801 switch (arm_insn_r->opcode)
10815 record_buf_mem[0] = 4;
10830 record_buf_mem[0] = 1;
10834 gdb_assert_not_reached ("no decoding pattern found");
10837 record_buf_mem[1] = tgt_mem_addr;
10838 arm_insn_r->mem_rec_count = 1;
10840 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10841 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10842 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10843 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10844 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10845 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10848 /* Rn is going to be changed in pre-indexed mode and
10849 post-indexed mode as well. */
10850 record_buf[0] = reg_src2;
10851 arm_insn_r->reg_rec_count = 1;
10856 /* Store insn, scaled register offset; scaled pre-indexed. */
10857 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10859 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10861 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10862 /* Get shift_imm. */
10863 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10864 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10865 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10866 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10867 /* Offset_12 used as shift. */
10871 /* Offset_12 used as index. */
10872 offset_12 = u_regval[0] << shift_imm;
10876 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10882 if (bit (u_regval[0], 31))
10884 offset_12 = 0xFFFFFFFF;
10893 /* This is arithmetic shift. */
10894 offset_12 = s_word >> shift_imm;
10901 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10903 /* Get C flag value and shift it by 31. */
10904 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10905 | (u_regval[0]) >> 1);
10909 offset_12 = (u_regval[0] >> shift_imm) \
10911 (sizeof(uint32_t) - shift_imm));
10916 gdb_assert_not_reached ("no decoding pattern found");
10920 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10922 if (bit (arm_insn_r->arm_insn, 23))
10924 tgt_mem_addr = u_regval[1] + offset_12;
10928 tgt_mem_addr = u_regval[1] - offset_12;
10931 switch (arm_insn_r->opcode)
10945 record_buf_mem[0] = 4;
10960 record_buf_mem[0] = 1;
10964 gdb_assert_not_reached ("no decoding pattern found");
10967 record_buf_mem[1] = tgt_mem_addr;
10968 arm_insn_r->mem_rec_count = 1;
10970 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10971 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10972 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10973 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10974 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10975 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10978 /* Rn is going to be changed in register scaled pre-indexed
10979 mode,and scaled post indexed mode. */
10980 record_buf[0] = reg_src2;
10981 arm_insn_r->reg_rec_count = 1;
10986 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10987 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10991 /* Handle ARM mode instructions with opcode 100. */
10994 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10996 struct regcache *reg_cache = arm_insn_r->regcache;
10997 uint32_t register_count = 0, register_bits;
10998 uint32_t reg_base, addr_mode;
10999 uint32_t record_buf[24], record_buf_mem[48];
11003 /* Fetch the list of registers. */
11004 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11005 arm_insn_r->reg_rec_count = 0;
11007 /* Fetch the base register that contains the address we are loading data
11009 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11011 /* Calculate wback. */
11012 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11014 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11016 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11018 /* Find out which registers are going to be loaded from memory. */
11019 while (register_bits)
11021 if (register_bits & 0x00000001)
11022 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11023 register_bits = register_bits >> 1;
11028 /* If wback is true, also save the base register, which is going to be
11031 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11033 /* Save the CPSR register. */
11034 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11038 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11040 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11042 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11044 /* Find out how many registers are going to be stored to memory. */
11045 while (register_bits)
11047 if (register_bits & 0x00000001)
11049 register_bits = register_bits >> 1;
11054 /* STMDA (STMED): Decrement after. */
11056 record_buf_mem[1] = (uint32_t) u_regval
11057 - register_count * ARM_INT_REGISTER_SIZE + 4;
11059 /* STM (STMIA, STMEA): Increment after. */
11061 record_buf_mem[1] = (uint32_t) u_regval;
11063 /* STMDB (STMFD): Decrement before. */
11065 record_buf_mem[1] = (uint32_t) u_regval
11066 - register_count * ARM_INT_REGISTER_SIZE;
11068 /* STMIB (STMFA): Increment before. */
11070 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11073 gdb_assert_not_reached ("no decoding pattern found");
11077 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11078 arm_insn_r->mem_rec_count = 1;
11080 /* If wback is true, also save the base register, which is going to be
11083 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11086 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11087 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11091 /* Handling opcode 101 insns. */
11094 arm_record_b_bl (insn_decode_record *arm_insn_r)
11096 uint32_t record_buf[8];
11098 /* Handle B, BL, BLX(1) insns. */
11099 /* B simply branches so we do nothing here. */
11100 /* Note: BLX(1) doesnt fall here but instead it falls into
11101 extension space. */
11102 if (bit (arm_insn_r->arm_insn, 24))
11104 record_buf[0] = ARM_LR_REGNUM;
11105 arm_insn_r->reg_rec_count = 1;
11108 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11114 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11116 printf_unfiltered (_("Process record does not support instruction "
11117 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11118 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11123 /* Record handler for vector data transfer instructions. */
11126 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11128 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11129 uint32_t record_buf[4];
11131 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11132 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11133 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11134 bit_l = bit (arm_insn_r->arm_insn, 20);
11135 bit_c = bit (arm_insn_r->arm_insn, 8);
11137 /* Handle VMOV instruction. */
11138 if (bit_l && bit_c)
11140 record_buf[0] = reg_t;
11141 arm_insn_r->reg_rec_count = 1;
11143 else if (bit_l && !bit_c)
11145 /* Handle VMOV instruction. */
11146 if (bits_a == 0x00)
11148 record_buf[0] = reg_t;
11149 arm_insn_r->reg_rec_count = 1;
11151 /* Handle VMRS instruction. */
11152 else if (bits_a == 0x07)
11155 reg_t = ARM_PS_REGNUM;
11157 record_buf[0] = reg_t;
11158 arm_insn_r->reg_rec_count = 1;
11161 else if (!bit_l && !bit_c)
11163 /* Handle VMOV instruction. */
11164 if (bits_a == 0x00)
11166 record_buf[0] = ARM_D0_REGNUM + reg_v;
11168 arm_insn_r->reg_rec_count = 1;
11170 /* Handle VMSR instruction. */
11171 else if (bits_a == 0x07)
11173 record_buf[0] = ARM_FPSCR_REGNUM;
11174 arm_insn_r->reg_rec_count = 1;
11177 else if (!bit_l && bit_c)
11179 /* Handle VMOV instruction. */
11180 if (!(bits_a & 0x04))
11182 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11184 arm_insn_r->reg_rec_count = 1;
11186 /* Handle VDUP instruction. */
11189 if (bit (arm_insn_r->arm_insn, 21))
11191 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11192 record_buf[0] = reg_v + ARM_D0_REGNUM;
11193 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11194 arm_insn_r->reg_rec_count = 2;
11198 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11199 record_buf[0] = reg_v + ARM_D0_REGNUM;
11200 arm_insn_r->reg_rec_count = 1;
11205 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11209 /* Record handler for extension register load/store instructions. */
11212 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11214 uint32_t opcode, single_reg;
11215 uint8_t op_vldm_vstm;
11216 uint32_t record_buf[8], record_buf_mem[128];
11217 ULONGEST u_regval = 0;
11219 struct regcache *reg_cache = arm_insn_r->regcache;
11221 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11222 single_reg = !bit (arm_insn_r->arm_insn, 8);
11223 op_vldm_vstm = opcode & 0x1b;
11225 /* Handle VMOV instructions. */
11226 if ((opcode & 0x1e) == 0x04)
11228 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11230 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11231 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11232 arm_insn_r->reg_rec_count = 2;
11236 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11237 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11241 /* The first S register number m is REG_M:M (M is bit 5),
11242 the corresponding D register number is REG_M:M / 2, which
11244 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11245 /* The second S register number is REG_M:M + 1, the
11246 corresponding D register number is (REG_M:M + 1) / 2.
11247 IOW, if bit M is 1, the first and second S registers
11248 are mapped to different D registers, otherwise, they are
11249 in the same D register. */
11252 record_buf[arm_insn_r->reg_rec_count++]
11253 = ARM_D0_REGNUM + reg_m + 1;
11258 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11259 arm_insn_r->reg_rec_count = 1;
11263 /* Handle VSTM and VPUSH instructions. */
11264 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11265 || op_vldm_vstm == 0x12)
11267 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11268 uint32_t memory_index = 0;
11270 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11271 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11272 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11273 imm_off32 = imm_off8 << 2;
11274 memory_count = imm_off8;
11276 if (bit (arm_insn_r->arm_insn, 23))
11277 start_address = u_regval;
11279 start_address = u_regval - imm_off32;
11281 if (bit (arm_insn_r->arm_insn, 21))
11283 record_buf[0] = reg_rn;
11284 arm_insn_r->reg_rec_count = 1;
11287 while (memory_count > 0)
11291 record_buf_mem[memory_index] = 4;
11292 record_buf_mem[memory_index + 1] = start_address;
11293 start_address = start_address + 4;
11294 memory_index = memory_index + 2;
11298 record_buf_mem[memory_index] = 4;
11299 record_buf_mem[memory_index + 1] = start_address;
11300 record_buf_mem[memory_index + 2] = 4;
11301 record_buf_mem[memory_index + 3] = start_address + 4;
11302 start_address = start_address + 8;
11303 memory_index = memory_index + 4;
11307 arm_insn_r->mem_rec_count = (memory_index >> 1);
11309 /* Handle VLDM instructions. */
11310 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11311 || op_vldm_vstm == 0x13)
11313 uint32_t reg_count, reg_vd;
11314 uint32_t reg_index = 0;
11315 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11317 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11318 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11320 /* REG_VD is the first D register number. If the instruction
11321 loads memory to S registers (SINGLE_REG is TRUE), the register
11322 number is (REG_VD << 1 | bit D), so the corresponding D
11323 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11325 reg_vd = reg_vd | (bit_d << 4);
11327 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11328 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11330 /* If the instruction loads memory to D register, REG_COUNT should
11331 be divided by 2, according to the ARM Architecture Reference
11332 Manual. If the instruction loads memory to S register, divide by
11333 2 as well because two S registers are mapped to D register. */
11334 reg_count = reg_count / 2;
11335 if (single_reg && bit_d)
11337 /* Increase the register count if S register list starts from
11338 an odd number (bit d is one). */
11342 while (reg_count > 0)
11344 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11347 arm_insn_r->reg_rec_count = reg_index;
11349 /* VSTR Vector store register. */
11350 else if ((opcode & 0x13) == 0x10)
11352 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11353 uint32_t memory_index = 0;
11355 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11356 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11357 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11358 imm_off32 = imm_off8 << 2;
11360 if (bit (arm_insn_r->arm_insn, 23))
11361 start_address = u_regval + imm_off32;
11363 start_address = u_regval - imm_off32;
11367 record_buf_mem[memory_index] = 4;
11368 record_buf_mem[memory_index + 1] = start_address;
11369 arm_insn_r->mem_rec_count = 1;
11373 record_buf_mem[memory_index] = 4;
11374 record_buf_mem[memory_index + 1] = start_address;
11375 record_buf_mem[memory_index + 2] = 4;
11376 record_buf_mem[memory_index + 3] = start_address + 4;
11377 arm_insn_r->mem_rec_count = 2;
11380 /* VLDR Vector load register. */
11381 else if ((opcode & 0x13) == 0x11)
11383 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11387 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11388 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11392 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11393 /* Record register D rather than pseudo register S. */
11394 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11396 arm_insn_r->reg_rec_count = 1;
11399 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11400 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11404 /* Record handler for arm/thumb mode VFP data processing instructions. */
11407 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11409 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11410 uint32_t record_buf[4];
11411 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11412 enum insn_types curr_insn_type = INSN_INV;
11414 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11415 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11416 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11417 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11418 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11419 bit_d = bit (arm_insn_r->arm_insn, 22);
11420 /* Mask off the "D" bit. */
11421 opc1 = opc1 & ~0x04;
11423 /* Handle VMLA, VMLS. */
11426 if (bit (arm_insn_r->arm_insn, 10))
11428 if (bit (arm_insn_r->arm_insn, 6))
11429 curr_insn_type = INSN_T0;
11431 curr_insn_type = INSN_T1;
11436 curr_insn_type = INSN_T1;
11438 curr_insn_type = INSN_T2;
11441 /* Handle VNMLA, VNMLS, VNMUL. */
11442 else if (opc1 == 0x01)
11445 curr_insn_type = INSN_T1;
11447 curr_insn_type = INSN_T2;
11450 else if (opc1 == 0x02 && !(opc3 & 0x01))
11452 if (bit (arm_insn_r->arm_insn, 10))
11454 if (bit (arm_insn_r->arm_insn, 6))
11455 curr_insn_type = INSN_T0;
11457 curr_insn_type = INSN_T1;
11462 curr_insn_type = INSN_T1;
11464 curr_insn_type = INSN_T2;
11467 /* Handle VADD, VSUB. */
11468 else if (opc1 == 0x03)
11470 if (!bit (arm_insn_r->arm_insn, 9))
11472 if (bit (arm_insn_r->arm_insn, 6))
11473 curr_insn_type = INSN_T0;
11475 curr_insn_type = INSN_T1;
11480 curr_insn_type = INSN_T1;
11482 curr_insn_type = INSN_T2;
11486 else if (opc1 == 0x08)
11489 curr_insn_type = INSN_T1;
11491 curr_insn_type = INSN_T2;
11493 /* Handle all other vfp data processing instructions. */
11494 else if (opc1 == 0x0b)
11497 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11499 if (bit (arm_insn_r->arm_insn, 4))
11501 if (bit (arm_insn_r->arm_insn, 6))
11502 curr_insn_type = INSN_T0;
11504 curr_insn_type = INSN_T1;
11509 curr_insn_type = INSN_T1;
11511 curr_insn_type = INSN_T2;
11514 /* Handle VNEG and VABS. */
11515 else if ((opc2 == 0x01 && opc3 == 0x01)
11516 || (opc2 == 0x00 && opc3 == 0x03))
11518 if (!bit (arm_insn_r->arm_insn, 11))
11520 if (bit (arm_insn_r->arm_insn, 6))
11521 curr_insn_type = INSN_T0;
11523 curr_insn_type = INSN_T1;
11528 curr_insn_type = INSN_T1;
11530 curr_insn_type = INSN_T2;
11533 /* Handle VSQRT. */
11534 else if (opc2 == 0x01 && opc3 == 0x03)
11537 curr_insn_type = INSN_T1;
11539 curr_insn_type = INSN_T2;
11542 else if (opc2 == 0x07 && opc3 == 0x03)
11545 curr_insn_type = INSN_T1;
11547 curr_insn_type = INSN_T2;
11549 else if (opc3 & 0x01)
11552 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11554 if (!bit (arm_insn_r->arm_insn, 18))
11555 curr_insn_type = INSN_T2;
11559 curr_insn_type = INSN_T1;
11561 curr_insn_type = INSN_T2;
11565 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11568 curr_insn_type = INSN_T1;
11570 curr_insn_type = INSN_T2;
11572 /* Handle VCVTB, VCVTT. */
11573 else if ((opc2 & 0x0e) == 0x02)
11574 curr_insn_type = INSN_T2;
11575 /* Handle VCMP, VCMPE. */
11576 else if ((opc2 & 0x0e) == 0x04)
11577 curr_insn_type = INSN_T3;
11581 switch (curr_insn_type)
11584 reg_vd = reg_vd | (bit_d << 4);
11585 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11586 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11587 arm_insn_r->reg_rec_count = 2;
11591 reg_vd = reg_vd | (bit_d << 4);
11592 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11593 arm_insn_r->reg_rec_count = 1;
11597 reg_vd = (reg_vd << 1) | bit_d;
11598 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11599 arm_insn_r->reg_rec_count = 1;
11603 record_buf[0] = ARM_FPSCR_REGNUM;
11604 arm_insn_r->reg_rec_count = 1;
11608 gdb_assert_not_reached ("no decoding pattern found");
11612 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11616 /* Handling opcode 110 insns. */
11619 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11621 uint32_t op1, op1_ebit, coproc;
11623 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11624 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11625 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11627 if ((coproc & 0x0e) == 0x0a)
11629 /* Handle extension register ld/st instructions. */
11631 return arm_record_exreg_ld_st_insn (arm_insn_r);
11633 /* 64-bit transfers between arm core and extension registers. */
11634 if ((op1 & 0x3e) == 0x04)
11635 return arm_record_exreg_ld_st_insn (arm_insn_r);
11639 /* Handle coprocessor ld/st instructions. */
11644 return arm_record_unsupported_insn (arm_insn_r);
11647 return arm_record_unsupported_insn (arm_insn_r);
11650 /* Move to coprocessor from two arm core registers. */
11652 return arm_record_unsupported_insn (arm_insn_r);
11654 /* Move to two arm core registers from coprocessor. */
11659 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11660 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11661 arm_insn_r->reg_rec_count = 2;
11663 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11667 return arm_record_unsupported_insn (arm_insn_r);
11670 /* Handling opcode 111 insns. */
11673 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11675 uint32_t op, op1_ebit, coproc, bits_24_25;
11676 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11677 struct regcache *reg_cache = arm_insn_r->regcache;
11679 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11680 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11681 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11682 op = bit (arm_insn_r->arm_insn, 4);
11683 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11685 /* Handle arm SWI/SVC system call instructions. */
11686 if (bits_24_25 == 0x3)
11688 if (tdep->arm_syscall_record != NULL)
11690 ULONGEST svc_operand, svc_number;
11692 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11694 if (svc_operand) /* OABI. */
11695 svc_number = svc_operand - 0x900000;
11697 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11699 return tdep->arm_syscall_record (reg_cache, svc_number);
11703 printf_unfiltered (_("no syscall record support\n"));
11707 else if (bits_24_25 == 0x02)
11711 if ((coproc & 0x0e) == 0x0a)
11713 /* 8, 16, and 32-bit transfer */
11714 return arm_record_vdata_transfer_insn (arm_insn_r);
11721 uint32_t record_buf[1];
11723 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11724 if (record_buf[0] == 15)
11725 record_buf[0] = ARM_PS_REGNUM;
11727 arm_insn_r->reg_rec_count = 1;
11728 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11741 if ((coproc & 0x0e) == 0x0a)
11743 /* VFP data-processing instructions. */
11744 return arm_record_vfp_data_proc_insn (arm_insn_r);
11755 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11759 if ((coproc & 0x0e) != 0x0a)
11765 else if (op1 == 4 || op1 == 5)
11767 if ((coproc & 0x0e) == 0x0a)
11769 /* 64-bit transfers between ARM core and extension */
11778 else if (op1 == 0 || op1 == 1)
11785 if ((coproc & 0x0e) == 0x0a)
11787 /* Extension register load/store */
11791 /* STC, STC2, LDC, LDC2 */
11800 /* Handling opcode 000 insns. */
11803 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11805 uint32_t record_buf[8];
11806 uint32_t reg_src1 = 0;
11808 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11810 record_buf[0] = ARM_PS_REGNUM;
11811 record_buf[1] = reg_src1;
11812 thumb_insn_r->reg_rec_count = 2;
11814 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11820 /* Handling opcode 001 insns. */
11823 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11825 uint32_t record_buf[8];
11826 uint32_t reg_src1 = 0;
11828 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11830 record_buf[0] = ARM_PS_REGNUM;
11831 record_buf[1] = reg_src1;
11832 thumb_insn_r->reg_rec_count = 2;
11834 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11839 /* Handling opcode 010 insns. */
11842 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11844 struct regcache *reg_cache = thumb_insn_r->regcache;
11845 uint32_t record_buf[8], record_buf_mem[8];
11847 uint32_t reg_src1 = 0, reg_src2 = 0;
11848 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11850 ULONGEST u_regval[2] = {0};
11852 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11854 if (bit (thumb_insn_r->arm_insn, 12))
11856 /* Handle load/store register offset. */
11857 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11859 if (in_inclusive_range (opB, 4U, 7U))
11861 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11862 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11863 record_buf[0] = reg_src1;
11864 thumb_insn_r->reg_rec_count = 1;
11866 else if (in_inclusive_range (opB, 0U, 2U))
11868 /* STR(2), STRB(2), STRH(2) . */
11869 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11870 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11871 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11872 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11874 record_buf_mem[0] = 4; /* STR (2). */
11876 record_buf_mem[0] = 1; /* STRB (2). */
11878 record_buf_mem[0] = 2; /* STRH (2). */
11879 record_buf_mem[1] = u_regval[0] + u_regval[1];
11880 thumb_insn_r->mem_rec_count = 1;
11883 else if (bit (thumb_insn_r->arm_insn, 11))
11885 /* Handle load from literal pool. */
11887 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11888 record_buf[0] = reg_src1;
11889 thumb_insn_r->reg_rec_count = 1;
11893 /* Special data instructions and branch and exchange */
11894 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11895 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11896 if ((3 == opcode2) && (!opcode3))
11898 /* Branch with exchange. */
11899 record_buf[0] = ARM_PS_REGNUM;
11900 thumb_insn_r->reg_rec_count = 1;
11904 /* Format 8; special data processing insns. */
11905 record_buf[0] = ARM_PS_REGNUM;
11906 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11907 | bits (thumb_insn_r->arm_insn, 0, 2));
11908 thumb_insn_r->reg_rec_count = 2;
11913 /* Format 5; data processing insns. */
11914 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11915 if (bit (thumb_insn_r->arm_insn, 7))
11917 reg_src1 = reg_src1 + 8;
11919 record_buf[0] = ARM_PS_REGNUM;
11920 record_buf[1] = reg_src1;
11921 thumb_insn_r->reg_rec_count = 2;
11924 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11925 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11931 /* Handling opcode 001 insns. */
11934 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11936 struct regcache *reg_cache = thumb_insn_r->regcache;
11937 uint32_t record_buf[8], record_buf_mem[8];
11939 uint32_t reg_src1 = 0;
11940 uint32_t opcode = 0, immed_5 = 0;
11942 ULONGEST u_regval = 0;
11944 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11949 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11950 record_buf[0] = reg_src1;
11951 thumb_insn_r->reg_rec_count = 1;
11956 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11957 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11958 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11959 record_buf_mem[0] = 4;
11960 record_buf_mem[1] = u_regval + (immed_5 * 4);
11961 thumb_insn_r->mem_rec_count = 1;
11964 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11965 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11971 /* Handling opcode 100 insns. */
11974 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11976 struct regcache *reg_cache = thumb_insn_r->regcache;
11977 uint32_t record_buf[8], record_buf_mem[8];
11979 uint32_t reg_src1 = 0;
11980 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11982 ULONGEST u_regval = 0;
11984 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11989 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11990 record_buf[0] = reg_src1;
11991 thumb_insn_r->reg_rec_count = 1;
11993 else if (1 == opcode)
11996 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11997 record_buf[0] = reg_src1;
11998 thumb_insn_r->reg_rec_count = 1;
12000 else if (2 == opcode)
12003 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12004 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12005 record_buf_mem[0] = 4;
12006 record_buf_mem[1] = u_regval + (immed_8 * 4);
12007 thumb_insn_r->mem_rec_count = 1;
12009 else if (0 == opcode)
12012 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12013 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12014 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12015 record_buf_mem[0] = 2;
12016 record_buf_mem[1] = u_regval + (immed_5 * 2);
12017 thumb_insn_r->mem_rec_count = 1;
12020 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12021 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12027 /* Handling opcode 101 insns. */
12030 thumb_record_misc (insn_decode_record *thumb_insn_r)
12032 struct regcache *reg_cache = thumb_insn_r->regcache;
12034 uint32_t opcode = 0;
12035 uint32_t register_bits = 0, register_count = 0;
12036 uint32_t index = 0, start_address = 0;
12037 uint32_t record_buf[24], record_buf_mem[48];
12040 ULONGEST u_regval = 0;
12042 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12044 if (opcode == 0 || opcode == 1)
12046 /* ADR and ADD (SP plus immediate) */
12048 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12049 record_buf[0] = reg_src1;
12050 thumb_insn_r->reg_rec_count = 1;
12054 /* Miscellaneous 16-bit instructions */
12055 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12060 /* SETEND and CPS */
12063 /* ADD/SUB (SP plus immediate) */
12064 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12065 record_buf[0] = ARM_SP_REGNUM;
12066 thumb_insn_r->reg_rec_count = 1;
12068 case 1: /* fall through */
12069 case 3: /* fall through */
12070 case 9: /* fall through */
12075 /* SXTH, SXTB, UXTH, UXTB */
12076 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12077 thumb_insn_r->reg_rec_count = 1;
12079 case 4: /* fall through */
12082 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12083 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12084 while (register_bits)
12086 if (register_bits & 0x00000001)
12088 register_bits = register_bits >> 1;
12090 start_address = u_regval - \
12091 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12092 thumb_insn_r->mem_rec_count = register_count;
12093 while (register_count)
12095 record_buf_mem[(register_count * 2) - 1] = start_address;
12096 record_buf_mem[(register_count * 2) - 2] = 4;
12097 start_address = start_address + 4;
12100 record_buf[0] = ARM_SP_REGNUM;
12101 thumb_insn_r->reg_rec_count = 1;
12104 /* REV, REV16, REVSH */
12105 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12106 thumb_insn_r->reg_rec_count = 1;
12108 case 12: /* fall through */
12111 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12112 while (register_bits)
12114 if (register_bits & 0x00000001)
12115 record_buf[index++] = register_count;
12116 register_bits = register_bits >> 1;
12119 record_buf[index++] = ARM_PS_REGNUM;
12120 record_buf[index++] = ARM_SP_REGNUM;
12121 thumb_insn_r->reg_rec_count = index;
12125 /* Handle enhanced software breakpoint insn, BKPT. */
12126 /* CPSR is changed to be executed in ARM state, disabling normal
12127 interrupts, entering abort mode. */
12128 /* According to high vector configuration PC is set. */
12129 /* User hits breakpoint and type reverse, in that case, we need to go back with
12130 previous CPSR and Program Counter. */
12131 record_buf[0] = ARM_PS_REGNUM;
12132 record_buf[1] = ARM_LR_REGNUM;
12133 thumb_insn_r->reg_rec_count = 2;
12134 /* We need to save SPSR value, which is not yet done. */
12135 printf_unfiltered (_("Process record does not support instruction "
12136 "0x%0x at address %s.\n"),
12137 thumb_insn_r->arm_insn,
12138 paddress (thumb_insn_r->gdbarch,
12139 thumb_insn_r->this_addr));
12143 /* If-Then, and hints */
12150 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12151 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12157 /* Handling opcode 110 insns. */
12160 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12162 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12163 struct regcache *reg_cache = thumb_insn_r->regcache;
12165 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12166 uint32_t reg_src1 = 0;
12167 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12168 uint32_t index = 0, start_address = 0;
12169 uint32_t record_buf[24], record_buf_mem[48];
12171 ULONGEST u_regval = 0;
12173 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12174 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12180 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12182 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12183 while (register_bits)
12185 if (register_bits & 0x00000001)
12186 record_buf[index++] = register_count;
12187 register_bits = register_bits >> 1;
12190 record_buf[index++] = reg_src1;
12191 thumb_insn_r->reg_rec_count = index;
12193 else if (0 == opcode2)
12195 /* It handles both STMIA. */
12196 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12198 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12199 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12200 while (register_bits)
12202 if (register_bits & 0x00000001)
12204 register_bits = register_bits >> 1;
12206 start_address = u_regval;
12207 thumb_insn_r->mem_rec_count = register_count;
12208 while (register_count)
12210 record_buf_mem[(register_count * 2) - 1] = start_address;
12211 record_buf_mem[(register_count * 2) - 2] = 4;
12212 start_address = start_address + 4;
12216 else if (0x1F == opcode1)
12218 /* Handle arm syscall insn. */
12219 if (tdep->arm_syscall_record != NULL)
12221 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12222 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12226 printf_unfiltered (_("no syscall record support\n"));
12231 /* B (1), conditional branch is automatically taken care in process_record,
12232 as PC is saved there. */
12234 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12235 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12241 /* Handling opcode 111 insns. */
12244 thumb_record_branch (insn_decode_record *thumb_insn_r)
12246 uint32_t record_buf[8];
12247 uint32_t bits_h = 0;
12249 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12251 if (2 == bits_h || 3 == bits_h)
12254 record_buf[0] = ARM_LR_REGNUM;
12255 thumb_insn_r->reg_rec_count = 1;
12257 else if (1 == bits_h)
12260 record_buf[0] = ARM_PS_REGNUM;
12261 record_buf[1] = ARM_LR_REGNUM;
12262 thumb_insn_r->reg_rec_count = 2;
12265 /* B(2) is automatically taken care in process_record, as PC is
12268 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12273 /* Handler for thumb2 load/store multiple instructions. */
12276 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12278 struct regcache *reg_cache = thumb2_insn_r->regcache;
12280 uint32_t reg_rn, op;
12281 uint32_t register_bits = 0, register_count = 0;
12282 uint32_t index = 0, start_address = 0;
12283 uint32_t record_buf[24], record_buf_mem[48];
12285 ULONGEST u_regval = 0;
12287 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12288 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12290 if (0 == op || 3 == op)
12292 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12294 /* Handle RFE instruction. */
12295 record_buf[0] = ARM_PS_REGNUM;
12296 thumb2_insn_r->reg_rec_count = 1;
12300 /* Handle SRS instruction after reading banked SP. */
12301 return arm_record_unsupported_insn (thumb2_insn_r);
12304 else if (1 == op || 2 == op)
12306 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12308 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12309 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12310 while (register_bits)
12312 if (register_bits & 0x00000001)
12313 record_buf[index++] = register_count;
12316 register_bits = register_bits >> 1;
12318 record_buf[index++] = reg_rn;
12319 record_buf[index++] = ARM_PS_REGNUM;
12320 thumb2_insn_r->reg_rec_count = index;
12324 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12325 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12326 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12327 while (register_bits)
12329 if (register_bits & 0x00000001)
12332 register_bits = register_bits >> 1;
12337 /* Start address calculation for LDMDB/LDMEA. */
12338 start_address = u_regval;
12342 /* Start address calculation for LDMDB/LDMEA. */
12343 start_address = u_regval - register_count * 4;
12346 thumb2_insn_r->mem_rec_count = register_count;
12347 while (register_count)
12349 record_buf_mem[register_count * 2 - 1] = start_address;
12350 record_buf_mem[register_count * 2 - 2] = 4;
12351 start_address = start_address + 4;
12354 record_buf[0] = reg_rn;
12355 record_buf[1] = ARM_PS_REGNUM;
12356 thumb2_insn_r->reg_rec_count = 2;
12360 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12362 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12364 return ARM_RECORD_SUCCESS;
12367 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12371 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12373 struct regcache *reg_cache = thumb2_insn_r->regcache;
12375 uint32_t reg_rd, reg_rn, offset_imm;
12376 uint32_t reg_dest1, reg_dest2;
12377 uint32_t address, offset_addr;
12378 uint32_t record_buf[8], record_buf_mem[8];
12379 uint32_t op1, op2, op3;
12381 ULONGEST u_regval[2];
12383 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12384 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12385 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12387 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12389 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12391 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12392 record_buf[0] = reg_dest1;
12393 record_buf[1] = ARM_PS_REGNUM;
12394 thumb2_insn_r->reg_rec_count = 2;
12397 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12399 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12400 record_buf[2] = reg_dest2;
12401 thumb2_insn_r->reg_rec_count = 3;
12406 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12407 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12409 if (0 == op1 && 0 == op2)
12411 /* Handle STREX. */
12412 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12413 address = u_regval[0] + (offset_imm * 4);
12414 record_buf_mem[0] = 4;
12415 record_buf_mem[1] = address;
12416 thumb2_insn_r->mem_rec_count = 1;
12417 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12418 record_buf[0] = reg_rd;
12419 thumb2_insn_r->reg_rec_count = 1;
12421 else if (1 == op1 && 0 == op2)
12423 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12424 record_buf[0] = reg_rd;
12425 thumb2_insn_r->reg_rec_count = 1;
12426 address = u_regval[0];
12427 record_buf_mem[1] = address;
12431 /* Handle STREXB. */
12432 record_buf_mem[0] = 1;
12433 thumb2_insn_r->mem_rec_count = 1;
12437 /* Handle STREXH. */
12438 record_buf_mem[0] = 2 ;
12439 thumb2_insn_r->mem_rec_count = 1;
12443 /* Handle STREXD. */
12444 address = u_regval[0];
12445 record_buf_mem[0] = 4;
12446 record_buf_mem[2] = 4;
12447 record_buf_mem[3] = address + 4;
12448 thumb2_insn_r->mem_rec_count = 2;
12453 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12455 if (bit (thumb2_insn_r->arm_insn, 24))
12457 if (bit (thumb2_insn_r->arm_insn, 23))
12458 offset_addr = u_regval[0] + (offset_imm * 4);
12460 offset_addr = u_regval[0] - (offset_imm * 4);
12462 address = offset_addr;
12465 address = u_regval[0];
12467 record_buf_mem[0] = 4;
12468 record_buf_mem[1] = address;
12469 record_buf_mem[2] = 4;
12470 record_buf_mem[3] = address + 4;
12471 thumb2_insn_r->mem_rec_count = 2;
12472 record_buf[0] = reg_rn;
12473 thumb2_insn_r->reg_rec_count = 1;
12477 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12479 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12481 return ARM_RECORD_SUCCESS;
12484 /* Handler for thumb2 data processing (shift register and modified immediate)
12488 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12490 uint32_t reg_rd, op;
12491 uint32_t record_buf[8];
12493 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12494 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12496 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12498 record_buf[0] = ARM_PS_REGNUM;
12499 thumb2_insn_r->reg_rec_count = 1;
12503 record_buf[0] = reg_rd;
12504 record_buf[1] = ARM_PS_REGNUM;
12505 thumb2_insn_r->reg_rec_count = 2;
12508 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12510 return ARM_RECORD_SUCCESS;
12513 /* Generic handler for thumb2 instructions which effect destination and PS
12517 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12520 uint32_t record_buf[8];
12522 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12524 record_buf[0] = reg_rd;
12525 record_buf[1] = ARM_PS_REGNUM;
12526 thumb2_insn_r->reg_rec_count = 2;
12528 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12530 return ARM_RECORD_SUCCESS;
12533 /* Handler for thumb2 branch and miscellaneous control instructions. */
12536 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12538 uint32_t op, op1, op2;
12539 uint32_t record_buf[8];
12541 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12542 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12543 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12545 /* Handle MSR insn. */
12546 if (!(op1 & 0x2) && 0x38 == op)
12550 /* CPSR is going to be changed. */
12551 record_buf[0] = ARM_PS_REGNUM;
12552 thumb2_insn_r->reg_rec_count = 1;
12556 arm_record_unsupported_insn(thumb2_insn_r);
12560 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12563 record_buf[0] = ARM_PS_REGNUM;
12564 record_buf[1] = ARM_LR_REGNUM;
12565 thumb2_insn_r->reg_rec_count = 2;
12568 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12570 return ARM_RECORD_SUCCESS;
12573 /* Handler for thumb2 store single data item instructions. */
12576 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12578 struct regcache *reg_cache = thumb2_insn_r->regcache;
12580 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12581 uint32_t address, offset_addr;
12582 uint32_t record_buf[8], record_buf_mem[8];
12585 ULONGEST u_regval[2];
12587 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12588 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12589 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12590 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12592 if (bit (thumb2_insn_r->arm_insn, 23))
12595 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12596 offset_addr = u_regval[0] + offset_imm;
12597 address = offset_addr;
12602 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12604 /* Handle STRB (register). */
12605 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12606 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12607 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12608 offset_addr = u_regval[1] << shift_imm;
12609 address = u_regval[0] + offset_addr;
12613 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12614 if (bit (thumb2_insn_r->arm_insn, 10))
12616 if (bit (thumb2_insn_r->arm_insn, 9))
12617 offset_addr = u_regval[0] + offset_imm;
12619 offset_addr = u_regval[0] - offset_imm;
12621 address = offset_addr;
12624 address = u_regval[0];
12630 /* Store byte instructions. */
12633 record_buf_mem[0] = 1;
12635 /* Store half word instructions. */
12638 record_buf_mem[0] = 2;
12640 /* Store word instructions. */
12643 record_buf_mem[0] = 4;
12647 gdb_assert_not_reached ("no decoding pattern found");
12651 record_buf_mem[1] = address;
12652 thumb2_insn_r->mem_rec_count = 1;
12653 record_buf[0] = reg_rn;
12654 thumb2_insn_r->reg_rec_count = 1;
12656 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12658 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12660 return ARM_RECORD_SUCCESS;
12663 /* Handler for thumb2 load memory hints instructions. */
12666 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12668 uint32_t record_buf[8];
12669 uint32_t reg_rt, reg_rn;
12671 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12672 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12674 if (ARM_PC_REGNUM != reg_rt)
12676 record_buf[0] = reg_rt;
12677 record_buf[1] = reg_rn;
12678 record_buf[2] = ARM_PS_REGNUM;
12679 thumb2_insn_r->reg_rec_count = 3;
12681 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12683 return ARM_RECORD_SUCCESS;
12686 return ARM_RECORD_FAILURE;
12689 /* Handler for thumb2 load word instructions. */
12692 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12694 uint32_t record_buf[8];
12696 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12697 record_buf[1] = ARM_PS_REGNUM;
12698 thumb2_insn_r->reg_rec_count = 2;
12700 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12702 return ARM_RECORD_SUCCESS;
12705 /* Handler for thumb2 long multiply, long multiply accumulate, and
12706 divide instructions. */
12709 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12711 uint32_t opcode1 = 0, opcode2 = 0;
12712 uint32_t record_buf[8];
12714 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12715 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12717 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12719 /* Handle SMULL, UMULL, SMULAL. */
12720 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12721 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12722 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12723 record_buf[2] = ARM_PS_REGNUM;
12724 thumb2_insn_r->reg_rec_count = 3;
12726 else if (1 == opcode1 || 3 == opcode2)
12728 /* Handle SDIV and UDIV. */
12729 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12730 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12731 record_buf[2] = ARM_PS_REGNUM;
12732 thumb2_insn_r->reg_rec_count = 3;
12735 return ARM_RECORD_FAILURE;
12737 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12739 return ARM_RECORD_SUCCESS;
12742 /* Record handler for thumb32 coprocessor instructions. */
12745 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12747 if (bit (thumb2_insn_r->arm_insn, 25))
12748 return arm_record_coproc_data_proc (thumb2_insn_r);
12750 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12753 /* Record handler for advance SIMD structure load/store instructions. */
12756 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12758 struct regcache *reg_cache = thumb2_insn_r->regcache;
12759 uint32_t l_bit, a_bit, b_bits;
12760 uint32_t record_buf[128], record_buf_mem[128];
12761 uint32_t reg_rn, reg_vd, address, f_elem;
12762 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12765 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12766 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12767 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12768 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12769 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12770 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12771 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12772 f_elem = 8 / f_ebytes;
12776 ULONGEST u_regval = 0;
12777 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12778 address = u_regval;
12783 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12785 if (b_bits == 0x07)
12787 else if (b_bits == 0x0a)
12789 else if (b_bits == 0x06)
12791 else if (b_bits == 0x02)
12796 for (index_r = 0; index_r < bf_regs; index_r++)
12798 for (index_e = 0; index_e < f_elem; index_e++)
12800 record_buf_mem[index_m++] = f_ebytes;
12801 record_buf_mem[index_m++] = address;
12802 address = address + f_ebytes;
12803 thumb2_insn_r->mem_rec_count += 1;
12808 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12810 if (b_bits == 0x09 || b_bits == 0x08)
12812 else if (b_bits == 0x03)
12817 for (index_r = 0; index_r < bf_regs; index_r++)
12818 for (index_e = 0; index_e < f_elem; index_e++)
12820 for (loop_t = 0; loop_t < 2; loop_t++)
12822 record_buf_mem[index_m++] = f_ebytes;
12823 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12824 thumb2_insn_r->mem_rec_count += 1;
12826 address = address + (2 * f_ebytes);
12830 else if ((b_bits & 0x0e) == 0x04)
12832 for (index_e = 0; index_e < f_elem; index_e++)
12834 for (loop_t = 0; loop_t < 3; loop_t++)
12836 record_buf_mem[index_m++] = f_ebytes;
12837 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12838 thumb2_insn_r->mem_rec_count += 1;
12840 address = address + (3 * f_ebytes);
12844 else if (!(b_bits & 0x0e))
12846 for (index_e = 0; index_e < f_elem; index_e++)
12848 for (loop_t = 0; loop_t < 4; loop_t++)
12850 record_buf_mem[index_m++] = f_ebytes;
12851 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12852 thumb2_insn_r->mem_rec_count += 1;
12854 address = address + (4 * f_ebytes);
12860 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12862 if (bft_size == 0x00)
12864 else if (bft_size == 0x01)
12866 else if (bft_size == 0x02)
12872 if (!(b_bits & 0x0b) || b_bits == 0x08)
12873 thumb2_insn_r->mem_rec_count = 1;
12875 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12876 thumb2_insn_r->mem_rec_count = 2;
12878 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12879 thumb2_insn_r->mem_rec_count = 3;
12881 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12882 thumb2_insn_r->mem_rec_count = 4;
12884 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12886 record_buf_mem[index_m] = f_ebytes;
12887 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12896 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12897 thumb2_insn_r->reg_rec_count = 1;
12899 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12900 thumb2_insn_r->reg_rec_count = 2;
12902 else if ((b_bits & 0x0e) == 0x04)
12903 thumb2_insn_r->reg_rec_count = 3;
12905 else if (!(b_bits & 0x0e))
12906 thumb2_insn_r->reg_rec_count = 4;
12911 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12912 thumb2_insn_r->reg_rec_count = 1;
12914 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12915 thumb2_insn_r->reg_rec_count = 2;
12917 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12918 thumb2_insn_r->reg_rec_count = 3;
12920 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12921 thumb2_insn_r->reg_rec_count = 4;
12923 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12924 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12928 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12930 record_buf[index_r] = reg_rn;
12931 thumb2_insn_r->reg_rec_count += 1;
12934 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12936 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12941 /* Decodes thumb2 instruction type and invokes its record handler. */
12943 static unsigned int
12944 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12946 uint32_t op, op1, op2;
12948 op = bit (thumb2_insn_r->arm_insn, 15);
12949 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12950 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12954 if (!(op2 & 0x64 ))
12956 /* Load/store multiple instruction. */
12957 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12959 else if ((op2 & 0x64) == 0x4)
12961 /* Load/store (dual/exclusive) and table branch instruction. */
12962 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12964 else if ((op2 & 0x60) == 0x20)
12966 /* Data-processing (shifted register). */
12967 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12969 else if (op2 & 0x40)
12971 /* Co-processor instructions. */
12972 return thumb2_record_coproc_insn (thumb2_insn_r);
12975 else if (op1 == 0x02)
12979 /* Branches and miscellaneous control instructions. */
12980 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12982 else if (op2 & 0x20)
12984 /* Data-processing (plain binary immediate) instruction. */
12985 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12989 /* Data-processing (modified immediate). */
12990 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12993 else if (op1 == 0x03)
12995 if (!(op2 & 0x71 ))
12997 /* Store single data item. */
12998 return thumb2_record_str_single_data (thumb2_insn_r);
13000 else if (!((op2 & 0x71) ^ 0x10))
13002 /* Advanced SIMD or structure load/store instructions. */
13003 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13005 else if (!((op2 & 0x67) ^ 0x01))
13007 /* Load byte, memory hints instruction. */
13008 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13010 else if (!((op2 & 0x67) ^ 0x03))
13012 /* Load halfword, memory hints instruction. */
13013 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13015 else if (!((op2 & 0x67) ^ 0x05))
13017 /* Load word instruction. */
13018 return thumb2_record_ld_word (thumb2_insn_r);
13020 else if (!((op2 & 0x70) ^ 0x20))
13022 /* Data-processing (register) instruction. */
13023 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13025 else if (!((op2 & 0x78) ^ 0x30))
13027 /* Multiply, multiply accumulate, abs diff instruction. */
13028 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13030 else if (!((op2 & 0x78) ^ 0x38))
13032 /* Long multiply, long multiply accumulate, and divide. */
13033 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13035 else if (op2 & 0x40)
13037 /* Co-processor instructions. */
13038 return thumb2_record_coproc_insn (thumb2_insn_r);
13046 /* Abstract memory reader. */
13048 class abstract_memory_reader
13051 /* Read LEN bytes of target memory at address MEMADDR, placing the
13052 results in GDB's memory at BUF. Return true on success. */
13054 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13057 /* Instruction reader from real target. */
13059 class instruction_reader : public abstract_memory_reader
13062 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13064 if (target_read_memory (memaddr, buf, len))
13073 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13074 and positive val on failure. */
13077 extract_arm_insn (abstract_memory_reader& reader,
13078 insn_decode_record *insn_record, uint32_t insn_size)
13080 gdb_byte buf[insn_size];
13082 memset (&buf[0], 0, insn_size);
13084 if (!reader.read (insn_record->this_addr, buf, insn_size))
13086 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13088 gdbarch_byte_order_for_code (insn_record->gdbarch));
13092 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13094 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13098 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13099 record_type_t record_type, uint32_t insn_size)
13102 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13104 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13106 arm_record_data_proc_misc_ld_str, /* 000. */
13107 arm_record_data_proc_imm, /* 001. */
13108 arm_record_ld_st_imm_offset, /* 010. */
13109 arm_record_ld_st_reg_offset, /* 011. */
13110 arm_record_ld_st_multiple, /* 100. */
13111 arm_record_b_bl, /* 101. */
13112 arm_record_asimd_vfp_coproc, /* 110. */
13113 arm_record_coproc_data_proc /* 111. */
13116 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13118 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13120 thumb_record_shift_add_sub, /* 000. */
13121 thumb_record_add_sub_cmp_mov, /* 001. */
13122 thumb_record_ld_st_reg_offset, /* 010. */
13123 thumb_record_ld_st_imm_offset, /* 011. */
13124 thumb_record_ld_st_stack, /* 100. */
13125 thumb_record_misc, /* 101. */
13126 thumb_record_ldm_stm_swi, /* 110. */
13127 thumb_record_branch /* 111. */
13130 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13131 uint32_t insn_id = 0;
13133 if (extract_arm_insn (reader, arm_record, insn_size))
13137 printf_unfiltered (_("Process record: error reading memory at "
13138 "addr %s len = %d.\n"),
13139 paddress (arm_record->gdbarch,
13140 arm_record->this_addr), insn_size);
13144 else if (ARM_RECORD == record_type)
13146 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13147 insn_id = bits (arm_record->arm_insn, 25, 27);
13149 if (arm_record->cond == 0xf)
13150 ret = arm_record_extension_space (arm_record);
13153 /* If this insn has fallen into extension space
13154 then we need not decode it anymore. */
13155 ret = arm_handle_insn[insn_id] (arm_record);
13157 if (ret != ARM_RECORD_SUCCESS)
13159 arm_record_unsupported_insn (arm_record);
13163 else if (THUMB_RECORD == record_type)
13165 /* As thumb does not have condition codes, we set negative. */
13166 arm_record->cond = -1;
13167 insn_id = bits (arm_record->arm_insn, 13, 15);
13168 ret = thumb_handle_insn[insn_id] (arm_record);
13169 if (ret != ARM_RECORD_SUCCESS)
13171 arm_record_unsupported_insn (arm_record);
13175 else if (THUMB2_RECORD == record_type)
13177 /* As thumb does not have condition codes, we set negative. */
13178 arm_record->cond = -1;
13180 /* Swap first half of 32bit thumb instruction with second half. */
13181 arm_record->arm_insn
13182 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13184 ret = thumb2_record_decode_insn_handler (arm_record);
13186 if (ret != ARM_RECORD_SUCCESS)
13188 arm_record_unsupported_insn (arm_record);
13194 /* Throw assertion. */
13195 gdb_assert_not_reached ("not a valid instruction, could not decode");
13202 namespace selftests {
13204 /* Provide both 16-bit and 32-bit thumb instructions. */
13206 class instruction_reader_thumb : public abstract_memory_reader
13209 template<size_t SIZE>
13210 instruction_reader_thumb (enum bfd_endian endian,
13211 const uint16_t (&insns)[SIZE])
13212 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13215 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13217 SELF_CHECK (len == 4 || len == 2);
13218 SELF_CHECK (memaddr % 2 == 0);
13219 SELF_CHECK ((memaddr / 2) < m_insns_size);
13221 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13224 store_unsigned_integer (&buf[2], 2, m_endian,
13225 m_insns[memaddr / 2 + 1]);
13231 enum bfd_endian m_endian;
13232 const uint16_t *m_insns;
13233 size_t m_insns_size;
13237 arm_record_test (void)
13239 struct gdbarch_info info;
13240 gdbarch_info_init (&info);
13241 info.bfd_arch_info = bfd_scan_arch ("arm");
13243 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13245 SELF_CHECK (gdbarch != NULL);
13247 /* 16-bit Thumb instructions. */
13249 insn_decode_record arm_record;
13251 memset (&arm_record, 0, sizeof (insn_decode_record));
13252 arm_record.gdbarch = gdbarch;
13254 static const uint16_t insns[] = {
13255 /* db b2 uxtb r3, r3 */
13257 /* cd 58 ldr r5, [r1, r3] */
13261 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13262 instruction_reader_thumb reader (endian, insns);
13263 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13264 THUMB_INSN_SIZE_BYTES);
13266 SELF_CHECK (ret == 0);
13267 SELF_CHECK (arm_record.mem_rec_count == 0);
13268 SELF_CHECK (arm_record.reg_rec_count == 1);
13269 SELF_CHECK (arm_record.arm_regs[0] == 3);
13271 arm_record.this_addr += 2;
13272 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13273 THUMB_INSN_SIZE_BYTES);
13275 SELF_CHECK (ret == 0);
13276 SELF_CHECK (arm_record.mem_rec_count == 0);
13277 SELF_CHECK (arm_record.reg_rec_count == 1);
13278 SELF_CHECK (arm_record.arm_regs[0] == 5);
13281 /* 32-bit Thumb-2 instructions. */
13283 insn_decode_record arm_record;
13285 memset (&arm_record, 0, sizeof (insn_decode_record));
13286 arm_record.gdbarch = gdbarch;
13288 static const uint16_t insns[] = {
13289 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13293 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13294 instruction_reader_thumb reader (endian, insns);
13295 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13296 THUMB2_INSN_SIZE_BYTES);
13298 SELF_CHECK (ret == 0);
13299 SELF_CHECK (arm_record.mem_rec_count == 0);
13300 SELF_CHECK (arm_record.reg_rec_count == 1);
13301 SELF_CHECK (arm_record.arm_regs[0] == 7);
13305 /* Instruction reader from manually cooked instruction sequences. */
13307 class test_arm_instruction_reader : public arm_instruction_reader
13310 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
13314 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
13316 SELF_CHECK (memaddr % 4 == 0);
13317 SELF_CHECK (memaddr / 4 < m_insns.size ());
13319 return m_insns[memaddr / 4];
13323 const gdb::array_view<const uint32_t> m_insns;
13327 arm_analyze_prologue_test ()
13329 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
13331 struct gdbarch_info info;
13332 gdbarch_info_init (&info);
13333 info.byte_order = endianness;
13334 info.byte_order_for_code = endianness;
13335 info.bfd_arch_info = bfd_scan_arch ("arm");
13337 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13339 SELF_CHECK (gdbarch != NULL);
13341 /* The "sub" instruction contains an immediate value rotate count of 0,
13342 which resulted in a 32-bit shift of a 32-bit value, caught by
13344 const uint32_t insns[] = {
13345 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13346 0xe1a05000, /* mov r5, r0 */
13347 0xe5903020, /* ldr r3, [r0, #32] */
13348 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13351 test_arm_instruction_reader mem_reader (insns);
13352 arm_prologue_cache cache;
13353 cache.saved_regs = trad_frame_alloc_saved_regs (gdbarch);
13355 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
13359 } // namespace selftests
13360 #endif /* GDB_SELF_TEST */
13362 /* Cleans up local record registers and memory allocations. */
13365 deallocate_reg_mem (insn_decode_record *record)
13367 xfree (record->arm_regs);
13368 xfree (record->arm_mems);
13372 /* Parse the current instruction and record the values of the registers and
13373 memory that will be changed in current instruction to record_arch_list".
13374 Return -1 if something is wrong. */
13377 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13378 CORE_ADDR insn_addr)
13381 uint32_t no_of_rec = 0;
13382 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13383 ULONGEST t_bit = 0, insn_id = 0;
13385 ULONGEST u_regval = 0;
13387 insn_decode_record arm_record;
13389 memset (&arm_record, 0, sizeof (insn_decode_record));
13390 arm_record.regcache = regcache;
13391 arm_record.this_addr = insn_addr;
13392 arm_record.gdbarch = gdbarch;
13395 if (record_debug > 1)
13397 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13399 paddress (gdbarch, arm_record.this_addr));
13402 instruction_reader reader;
13403 if (extract_arm_insn (reader, &arm_record, 2))
13407 printf_unfiltered (_("Process record: error reading memory at "
13408 "addr %s len = %d.\n"),
13409 paddress (arm_record.gdbarch,
13410 arm_record.this_addr), 2);
13415 /* Check the insn, whether it is thumb or arm one. */
13417 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13418 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13421 if (!(u_regval & t_bit))
13423 /* We are decoding arm insn. */
13424 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13428 insn_id = bits (arm_record.arm_insn, 11, 15);
13429 /* is it thumb2 insn? */
13430 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13432 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13433 THUMB2_INSN_SIZE_BYTES);
13437 /* We are decoding thumb insn. */
13438 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13439 THUMB_INSN_SIZE_BYTES);
13445 /* Record registers. */
13446 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13447 if (arm_record.arm_regs)
13449 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13451 if (record_full_arch_list_add_reg
13452 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13456 /* Record memories. */
13457 if (arm_record.arm_mems)
13459 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13461 if (record_full_arch_list_add_mem
13462 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13463 arm_record.arm_mems[no_of_rec].len))
13468 if (record_full_arch_list_add_end ())
13473 deallocate_reg_mem (&arm_record);
13478 /* See arm-tdep.h. */
13480 const target_desc *
13481 arm_read_description (arm_fp_type fp_type)
13483 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13485 if (tdesc == nullptr)
13487 tdesc = arm_create_target_description (fp_type);
13488 tdesc_arm_list[fp_type] = tdesc;
13494 /* See arm-tdep.h. */
13496 const target_desc *
13497 arm_read_mprofile_description (arm_m_profile_type m_type)
13499 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13501 if (tdesc == nullptr)
13503 tdesc = arm_create_mprofile_target_description (m_type);
13504 tdesc_arm_mprofile_list[m_type] = tdesc;