1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2/frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
51 #include "arch/arm-get-next-pcs.h"
53 #include "gdb/sim-arm.h"
56 #include "coff/internal.h"
60 #include "record-full.h"
66 #include "gdbsupport/selftest.h"
69 static bool arm_debug;
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
84 struct arm_mapping_symbol
89 bool operator< (const arm_mapping_symbol &other) const
90 { return this->value < other.value; }
93 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
97 explicit arm_per_bfd (size_t num_sections)
98 : section_maps (new arm_mapping_symbol_vec[num_sections]),
99 section_maps_sorted (new bool[num_sections] ())
102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
114 /* For each corresponding element of section_maps above, is this vector
116 std::unique_ptr<bool[]> section_maps_sorted;
119 /* Per-bfd data used for mapping symbols. */
120 static bfd_key<arm_per_bfd> arm_bfd_data_key;
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element *setarmcmdlist = NULL;
124 static struct cmd_list_element *showarmcmdlist = NULL;
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings[] =
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
140 static const char *current_fp_model = "auto";
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings[] =
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
153 static const char *arm_abi_string = "auto";
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings[] =
164 static const char *arm_fallback_mode_string = "auto";
165 static const char *arm_force_mode_string = "auto";
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
175 } arm_register_aliases[] = {
176 /* Basic register numbers. */
193 /* Synonyms (argument and variable registers). */
206 /* Other platform-specific names for r9. */
212 /* Names used by GCC (not listed in the ARM EABI). */
214 /* A special name from the older ATPCS. */
218 static const char *const arm_register_names[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options;
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles;
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style;
236 /* All possible arm target descriptors. */
237 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID];
238 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
240 /* This is used to keep the bfd arch_info in sync with the disassembly
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element *);
244 static void show_disassembly_style_sfunc (struct ui_file *, int,
245 struct cmd_list_element *,
248 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
249 readable_regcache *regcache,
250 int regnum, gdb_byte *buf);
251 static void arm_neon_quad_write (struct gdbarch *gdbarch,
252 struct regcache *regcache,
253 int regnum, const gdb_byte *buf);
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
261 arm_get_next_pcs_read_memory_unsigned_integer,
262 arm_get_next_pcs_syscall_next_pc,
263 arm_get_next_pcs_addr_bits_remove,
264 arm_get_next_pcs_is_thumb,
268 struct arm_prologue_cache
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
281 /* The register used to hold the frame pointer for this frame. */
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg *saved_regs;
290 /* Abstract class to read ARM instructions from memory. */
292 class arm_instruction_reader
295 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
296 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
299 /* Read instructions from target memory. */
301 class target_arm_instruction_reader : public arm_instruction_reader
304 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
306 return read_code_unsigned_integer (memaddr, 4, byte_order);
312 static CORE_ADDR arm_analyze_prologue
313 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
314 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
316 /* Architecture version for displaced stepping. This effects the behaviour of
317 certain instructions, and really should not be hard-wired. */
319 #define DISPLACED_STEPPING_ARCH_VERSION 5
321 /* See arm-tdep.h. */
323 bool arm_apcs_32 = true;
325 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
328 arm_psr_thumb_bit (struct gdbarch *gdbarch)
330 if (gdbarch_tdep (gdbarch)->is_m)
336 /* Determine if the processor is currently executing in Thumb mode. */
339 arm_is_thumb (struct regcache *regcache)
342 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
344 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
346 return (cpsr & t_bit) != 0;
349 /* Determine if FRAME is executing in Thumb mode. */
352 arm_frame_is_thumb (struct frame_info *frame)
355 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
357 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
358 directly (from a signal frame or dummy frame) or by interpreting
359 the saved LR (from a prologue or DWARF frame). So consult it and
360 trust the unwinders. */
361 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
363 return (cpsr & t_bit) != 0;
366 /* Search for the mapping symbol covering MEMADDR. If one is found,
367 return its type. Otherwise, return 0. If START is non-NULL,
368 set *START to the location of the mapping symbol. */
371 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
373 struct obj_section *sec;
375 /* If there are mapping symbols, consult them. */
376 sec = find_pc_section (memaddr);
379 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd);
382 unsigned int section_idx = sec->the_bfd_section->index;
383 arm_mapping_symbol_vec &map
384 = data->section_maps[section_idx];
386 /* Sort the vector on first use. */
387 if (!data->section_maps_sorted[section_idx])
389 std::sort (map.begin (), map.end ());
390 data->section_maps_sorted[section_idx] = true;
393 struct arm_mapping_symbol map_key
394 = { memaddr - obj_section_addr (sec), 0 };
395 arm_mapping_symbol_vec::const_iterator it
396 = std::lower_bound (map.begin (), map.end (), map_key);
398 /* std::lower_bound finds the earliest ordered insertion
399 point. If the symbol at this position starts at this exact
400 address, we use that; otherwise, the preceding
401 mapping symbol covers this address. */
404 if (it->value == map_key.value)
407 *start = it->value + obj_section_addr (sec);
412 if (it > map.begin ())
414 arm_mapping_symbol_vec::const_iterator prev_it
418 *start = prev_it->value + obj_section_addr (sec);
419 return prev_it->type;
427 /* Determine if the program counter specified in MEMADDR is in a Thumb
428 function. This function should be called for addresses unrelated to
429 any executing frame; otherwise, prefer arm_frame_is_thumb. */
432 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
434 struct bound_minimal_symbol sym;
436 arm_displaced_step_copy_insn_closure *dsc
437 = ((arm_displaced_step_copy_insn_closure * )
438 get_displaced_step_copy_insn_closure_by_addr (memaddr));
440 /* If checking the mode of displaced instruction in copy area, the mode
441 should be determined by instruction on the original address. */
444 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
445 (unsigned long) dsc->insn_addr,
446 (unsigned long) memaddr);
447 memaddr = dsc->insn_addr;
450 /* If bit 0 of the address is set, assume this is a Thumb address. */
451 if (IS_THUMB_ADDR (memaddr))
454 /* If the user wants to override the symbol table, let him. */
455 if (strcmp (arm_force_mode_string, "arm") == 0)
457 if (strcmp (arm_force_mode_string, "thumb") == 0)
460 /* ARM v6-M and v7-M are always in Thumb mode. */
461 if (gdbarch_tdep (gdbarch)->is_m)
464 /* If there are mapping symbols, consult them. */
465 type = arm_find_mapping_symbol (memaddr, NULL);
469 /* Thumb functions have a "special" bit set in minimal symbols. */
470 sym = lookup_minimal_symbol_by_pc (memaddr);
472 return (MSYMBOL_IS_SPECIAL (sym.minsym));
474 /* If the user wants to override the fallback mode, let them. */
475 if (strcmp (arm_fallback_mode_string, "arm") == 0)
477 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
480 /* If we couldn't find any symbol, but we're talking to a running
481 target, then trust the current value of $cpsr. This lets
482 "display/i $pc" always show the correct mode (though if there is
483 a symbol table we will not reach here, so it still may not be
484 displayed in the mode it will be executed). */
485 if (target_has_registers ())
486 return arm_frame_is_thumb (get_current_frame ());
488 /* Otherwise we're out of luck; we assume ARM. */
492 /* Determine if the address specified equals any of these magic return
493 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
496 From ARMv6-M Reference Manual B1.5.8
497 Table B1-5 Exception return behavior
499 EXC_RETURN Return To Return Stack
500 0xFFFFFFF1 Handler mode Main
501 0xFFFFFFF9 Thread mode Main
502 0xFFFFFFFD Thread mode Process
504 From ARMv7-M Reference Manual B1.5.8
505 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
507 EXC_RETURN Return To Return Stack
508 0xFFFFFFF1 Handler mode Main
509 0xFFFFFFF9 Thread mode Main
510 0xFFFFFFFD Thread mode Process
512 Table B1-9 EXC_RETURN definition of exception return behavior, with
515 EXC_RETURN Return To Return Stack Frame Type
516 0xFFFFFFE1 Handler mode Main Extended
517 0xFFFFFFE9 Thread mode Main Extended
518 0xFFFFFFED Thread mode Process Extended
519 0xFFFFFFF1 Handler mode Main Basic
520 0xFFFFFFF9 Thread mode Main Basic
521 0xFFFFFFFD Thread mode Process Basic
523 For more details see "B1.5.8 Exception return behavior"
524 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
526 In the ARMv8-M Architecture Technical Reference also adds
527 for implementations without the Security Extension:
530 0xFFFFFFB0 Return to Handler mode.
531 0xFFFFFFB8 Return to Thread mode using the main stack.
532 0xFFFFFFBC Return to Thread mode using the process stack. */
535 arm_m_addr_is_magic (CORE_ADDR addr)
539 /* Values from ARMv8-M Architecture Technical Reference. */
543 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
544 the exception return behavior. */
551 /* Address is magic. */
555 /* Address is not magic. */
560 /* Remove useless bits from addresses in a running program. */
562 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
564 /* On M-profile devices, do not strip the low bit from EXC_RETURN
565 (the magic exception return address). */
566 if (gdbarch_tdep (gdbarch)->is_m
567 && arm_m_addr_is_magic (val))
571 return UNMAKE_THUMB_ADDR (val);
573 return (val & 0x03fffffc);
576 /* Return 1 if PC is the start of a compiler helper function which
577 can be safely ignored during prologue skipping. IS_THUMB is true
578 if the function is known to be a Thumb function due to the way it
581 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
583 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
584 struct bound_minimal_symbol msym;
586 msym = lookup_minimal_symbol_by_pc (pc);
587 if (msym.minsym != NULL
588 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
589 && msym.minsym->linkage_name () != NULL)
591 const char *name = msym.minsym->linkage_name ();
593 /* The GNU linker's Thumb call stub to foo is named
595 if (strstr (name, "_from_thumb") != NULL)
598 /* On soft-float targets, __truncdfsf2 is called to convert promoted
599 arguments to their argument types in non-prototyped
601 if (startswith (name, "__truncdfsf2"))
603 if (startswith (name, "__aeabi_d2f"))
606 /* Internal functions related to thread-local storage. */
607 if (startswith (name, "__tls_get_addr"))
609 if (startswith (name, "__aeabi_read_tp"))
614 /* If we run against a stripped glibc, we may be unable to identify
615 special functions by name. Check for one important case,
616 __aeabi_read_tp, by comparing the *code* against the default
617 implementation (this is hand-written ARM assembler in glibc). */
620 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
621 == 0xe3e00a0f /* mov r0, #0xffff0fff */
622 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
623 == 0xe240f01f) /* sub pc, r0, #31 */
630 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
631 the first 16-bit of instruction, and INSN2 is the second 16-bit of
633 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
634 ((bits ((insn1), 0, 3) << 12) \
635 | (bits ((insn1), 10, 10) << 11) \
636 | (bits ((insn2), 12, 14) << 8) \
637 | bits ((insn2), 0, 7))
639 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
640 the 32-bit instruction. */
641 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
642 ((bits ((insn), 16, 19) << 12) \
643 | bits ((insn), 0, 11))
645 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
648 thumb_expand_immediate (unsigned int imm)
650 unsigned int count = imm >> 7;
658 return (imm & 0xff) | ((imm & 0xff) << 16);
660 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
662 return (imm & 0xff) | ((imm & 0xff) << 8)
663 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
666 return (0x80 | (imm & 0x7f)) << (32 - count);
669 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
670 epilogue, 0 otherwise. */
673 thumb_instruction_restores_sp (unsigned short insn)
675 return (insn == 0x46bd /* mov sp, r7 */
676 || (insn & 0xff80) == 0xb000 /* add sp, imm */
677 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
680 /* Analyze a Thumb prologue, looking for a recognizable stack frame
681 and frame pointer. Scan until we encounter a store that could
682 clobber the stack frame unexpectedly, or an unknown instruction.
683 Return the last address which is definitely safe to skip for an
684 initial breakpoint. */
687 thumb_analyze_prologue (struct gdbarch *gdbarch,
688 CORE_ADDR start, CORE_ADDR limit,
689 struct arm_prologue_cache *cache)
691 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
692 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
696 CORE_ADDR unrecognized_pc = 0;
698 for (i = 0; i < 16; i++)
699 regs[i] = pv_register (i, 0);
700 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
702 while (start < limit)
706 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
708 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
713 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
716 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
717 whether to save LR (R14). */
718 mask = (insn & 0xff) | ((insn & 0x100) << 6);
720 /* Calculate offsets of saved R0-R7 and LR. */
721 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
722 if (mask & (1 << regno))
724 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
726 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
729 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
731 offset = (insn & 0x7f) << 2; /* get scaled offset */
732 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
735 else if (thumb_instruction_restores_sp (insn))
737 /* Don't scan past the epilogue. */
740 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
741 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
743 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
744 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
745 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
747 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
748 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
749 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
751 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
752 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
753 && pv_is_constant (regs[bits (insn, 3, 5)]))
754 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
755 regs[bits (insn, 6, 8)]);
756 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
757 && pv_is_constant (regs[bits (insn, 3, 6)]))
759 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
760 int rm = bits (insn, 3, 6);
761 regs[rd] = pv_add (regs[rd], regs[rm]);
763 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
765 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
766 int src_reg = (insn & 0x78) >> 3;
767 regs[dst_reg] = regs[src_reg];
769 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
771 /* Handle stores to the stack. Normally pushes are used,
772 but with GCC -mtpcs-frame, there may be other stores
773 in the prologue to create the frame. */
774 int regno = (insn >> 8) & 0x7;
777 offset = (insn & 0xff) << 2;
778 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
780 if (stack.store_would_trash (addr))
783 stack.store (addr, 4, regs[regno]);
785 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
787 int rd = bits (insn, 0, 2);
788 int rn = bits (insn, 3, 5);
791 offset = bits (insn, 6, 10) << 2;
792 addr = pv_add_constant (regs[rn], offset);
794 if (stack.store_would_trash (addr))
797 stack.store (addr, 4, regs[rd]);
799 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
800 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
801 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
802 /* Ignore stores of argument registers to the stack. */
804 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
805 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
806 /* Ignore block loads from the stack, potentially copying
807 parameters from memory. */
809 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
810 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
811 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
812 /* Similarly ignore single loads from the stack. */
814 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
815 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
816 /* Skip register copies, i.e. saves to another register
817 instead of the stack. */
819 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
820 /* Recognize constant loads; even with small stacks these are necessary
822 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
823 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
825 /* Constant pool loads, for the same reason. */
826 unsigned int constant;
829 loc = start + 4 + bits (insn, 0, 7) * 4;
830 constant = read_memory_unsigned_integer (loc, 4, byte_order);
831 regs[bits (insn, 8, 10)] = pv_constant (constant);
833 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
835 unsigned short inst2;
837 inst2 = read_code_unsigned_integer (start + 2, 2,
838 byte_order_for_code);
840 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
842 /* BL, BLX. Allow some special function calls when
843 skipping the prologue; GCC generates these before
844 storing arguments to the stack. */
846 int j1, j2, imm1, imm2;
848 imm1 = sbits (insn, 0, 10);
849 imm2 = bits (inst2, 0, 10);
850 j1 = bit (inst2, 13);
851 j2 = bit (inst2, 11);
853 offset = ((imm1 << 12) + (imm2 << 1));
854 offset ^= ((!j2) << 22) | ((!j1) << 23);
856 nextpc = start + 4 + offset;
857 /* For BLX make sure to clear the low bits. */
858 if (bit (inst2, 12) == 0)
859 nextpc = nextpc & 0xfffffffc;
861 if (!skip_prologue_function (gdbarch, nextpc,
862 bit (inst2, 12) != 0))
866 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
868 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
870 pv_t addr = regs[bits (insn, 0, 3)];
873 if (stack.store_would_trash (addr))
876 /* Calculate offsets of saved registers. */
877 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
878 if (inst2 & (1 << regno))
880 addr = pv_add_constant (addr, -4);
881 stack.store (addr, 4, regs[regno]);
885 regs[bits (insn, 0, 3)] = addr;
888 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
890 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
892 int regno1 = bits (inst2, 12, 15);
893 int regno2 = bits (inst2, 8, 11);
894 pv_t addr = regs[bits (insn, 0, 3)];
896 offset = inst2 & 0xff;
898 addr = pv_add_constant (addr, offset);
900 addr = pv_add_constant (addr, -offset);
902 if (stack.store_would_trash (addr))
905 stack.store (addr, 4, regs[regno1]);
906 stack.store (pv_add_constant (addr, 4),
910 regs[bits (insn, 0, 3)] = addr;
913 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
914 && (inst2 & 0x0c00) == 0x0c00
915 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
917 int regno = bits (inst2, 12, 15);
918 pv_t addr = regs[bits (insn, 0, 3)];
920 offset = inst2 & 0xff;
922 addr = pv_add_constant (addr, offset);
924 addr = pv_add_constant (addr, -offset);
926 if (stack.store_would_trash (addr))
929 stack.store (addr, 4, regs[regno]);
932 regs[bits (insn, 0, 3)] = addr;
935 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
936 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
938 int regno = bits (inst2, 12, 15);
941 offset = inst2 & 0xfff;
942 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
944 if (stack.store_would_trash (addr))
947 stack.store (addr, 4, regs[regno]);
950 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
951 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
952 /* Ignore stores of argument registers to the stack. */
955 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
956 && (inst2 & 0x0d00) == 0x0c00
957 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 /* Ignore stores of argument registers to the stack. */
961 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
963 && (inst2 & 0x8000) == 0x0000
964 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
965 /* Ignore block loads from the stack, potentially copying
966 parameters from memory. */
969 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Similarly ignore dual loads from the stack. */
975 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Similarly ignore single loads from the stack. */
981 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
982 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
983 /* Similarly ignore single loads from the stack. */
986 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
987 && (inst2 & 0x8000) == 0x0000)
989 unsigned int imm = ((bits (insn, 10, 10) << 11)
990 | (bits (inst2, 12, 14) << 8)
991 | bits (inst2, 0, 7));
993 regs[bits (inst2, 8, 11)]
994 = pv_add_constant (regs[bits (insn, 0, 3)],
995 thumb_expand_immediate (imm));
998 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
999 && (inst2 & 0x8000) == 0x0000)
1001 unsigned int imm = ((bits (insn, 10, 10) << 11)
1002 | (bits (inst2, 12, 14) << 8)
1003 | bits (inst2, 0, 7));
1005 regs[bits (inst2, 8, 11)]
1006 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1009 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1010 && (inst2 & 0x8000) == 0x0000)
1012 unsigned int imm = ((bits (insn, 10, 10) << 11)
1013 | (bits (inst2, 12, 14) << 8)
1014 | bits (inst2, 0, 7));
1016 regs[bits (inst2, 8, 11)]
1017 = pv_add_constant (regs[bits (insn, 0, 3)],
1018 - (CORE_ADDR) thumb_expand_immediate (imm));
1021 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1022 && (inst2 & 0x8000) == 0x0000)
1024 unsigned int imm = ((bits (insn, 10, 10) << 11)
1025 | (bits (inst2, 12, 14) << 8)
1026 | bits (inst2, 0, 7));
1028 regs[bits (inst2, 8, 11)]
1029 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1032 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1034 unsigned int imm = ((bits (insn, 10, 10) << 11)
1035 | (bits (inst2, 12, 14) << 8)
1036 | bits (inst2, 0, 7));
1038 regs[bits (inst2, 8, 11)]
1039 = pv_constant (thumb_expand_immediate (imm));
1042 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1045 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1047 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1050 else if (insn == 0xea5f /* mov.w Rd,Rm */
1051 && (inst2 & 0xf0f0) == 0)
1053 int dst_reg = (inst2 & 0x0f00) >> 8;
1054 int src_reg = inst2 & 0xf;
1055 regs[dst_reg] = regs[src_reg];
1058 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1060 /* Constant pool loads. */
1061 unsigned int constant;
1064 offset = bits (inst2, 0, 11);
1066 loc = start + 4 + offset;
1068 loc = start + 4 - offset;
1070 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1071 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1074 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1076 /* Constant pool loads. */
1077 unsigned int constant;
1080 offset = bits (inst2, 0, 7) << 2;
1082 loc = start + 4 + offset;
1084 loc = start + 4 - offset;
1086 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1087 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1089 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1090 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1093 else if (thumb2_instruction_changes_pc (insn, inst2))
1095 /* Don't scan past anything that might change control flow. */
1100 /* The optimizer might shove anything into the prologue,
1101 so we just skip what we don't recognize. */
1102 unrecognized_pc = start;
1107 else if (thumb_instruction_changes_pc (insn))
1109 /* Don't scan past anything that might change control flow. */
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1123 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1124 paddress (gdbarch, start));
1126 if (unrecognized_pc == 0)
1127 unrecognized_pc = start;
1130 return unrecognized_pc;
1132 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1134 /* Frame pointer is fp. Frame size is constant. */
1135 cache->framereg = ARM_FP_REGNUM;
1136 cache->framesize = -regs[ARM_FP_REGNUM].k;
1138 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1140 /* Frame pointer is r7. Frame size is constant. */
1141 cache->framereg = THUMB_FP_REGNUM;
1142 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1146 /* Try the stack pointer... this is a bit desperate. */
1147 cache->framereg = ARM_SP_REGNUM;
1148 cache->framesize = -regs[ARM_SP_REGNUM].k;
1151 for (i = 0; i < 16; i++)
1152 if (stack.find_reg (gdbarch, i, &offset))
1153 cache->saved_regs[i].addr = offset;
1155 return unrecognized_pc;
1159 /* Try to analyze the instructions starting from PC, which load symbol
1160 __stack_chk_guard. Return the address of instruction after loading this
1161 symbol, set the dest register number to *BASEREG, and set the size of
1162 instructions for loading symbol in OFFSET. Return 0 if instructions are
1166 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1167 unsigned int *destreg, int *offset)
1169 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1170 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1171 unsigned int low, high, address;
1176 unsigned short insn1
1177 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1179 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1181 *destreg = bits (insn1, 8, 10);
1183 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1184 address = read_memory_unsigned_integer (address, 4,
1185 byte_order_for_code);
1187 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1189 unsigned short insn2
1190 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1192 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1195 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1197 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1199 /* movt Rd, #const */
1200 if ((insn1 & 0xfbc0) == 0xf2c0)
1202 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1203 *destreg = bits (insn2, 8, 11);
1205 address = (high << 16 | low);
1212 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1214 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1216 address = bits (insn, 0, 11) + pc + 8;
1217 address = read_memory_unsigned_integer (address, 4,
1218 byte_order_for_code);
1220 *destreg = bits (insn, 12, 15);
1223 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1225 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1228 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1230 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1232 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1233 *destreg = bits (insn, 12, 15);
1235 address = (high << 16 | low);
1243 /* Try to skip a sequence of instructions used for stack protector. If PC
1244 points to the first instruction of this sequence, return the address of
1245 first instruction after this sequence, otherwise, return original PC.
1247 On arm, this sequence of instructions is composed of mainly three steps,
1248 Step 1: load symbol __stack_chk_guard,
1249 Step 2: load from address of __stack_chk_guard,
1250 Step 3: store it to somewhere else.
1252 Usually, instructions on step 2 and step 3 are the same on various ARM
1253 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1254 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1255 instructions in step 1 vary from different ARM architectures. On ARMv7,
1258 movw Rn, #:lower16:__stack_chk_guard
1259 movt Rn, #:upper16:__stack_chk_guard
1266 .word __stack_chk_guard
1268 Since ldr/str is a very popular instruction, we can't use them as
1269 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1270 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1271 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1274 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1276 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1277 unsigned int basereg;
1278 struct bound_minimal_symbol stack_chk_guard;
1280 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1283 /* Try to parse the instructions in Step 1. */
1284 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1289 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1290 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1291 Otherwise, this sequence cannot be for stack protector. */
1292 if (stack_chk_guard.minsym == NULL
1293 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1298 unsigned int destreg;
1300 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1302 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1303 if ((insn & 0xf800) != 0x6800)
1305 if (bits (insn, 3, 5) != basereg)
1307 destreg = bits (insn, 0, 2);
1309 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1310 byte_order_for_code);
1311 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1312 if ((insn & 0xf800) != 0x6000)
1314 if (destreg != bits (insn, 0, 2))
1319 unsigned int destreg;
1321 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1323 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1324 if ((insn & 0x0e500000) != 0x04100000)
1326 if (bits (insn, 16, 19) != basereg)
1328 destreg = bits (insn, 12, 15);
1329 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1330 insn = read_code_unsigned_integer (pc + offset + 4,
1331 4, byte_order_for_code);
1332 if ((insn & 0x0e500000) != 0x04000000)
1334 if (bits (insn, 12, 15) != destreg)
1337 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1340 return pc + offset + 4;
1342 return pc + offset + 8;
1345 /* Advance the PC across any function entry prologue instructions to
1346 reach some "real" code.
1348 The APCS (ARM Procedure Call Standard) defines the following
1352 [stmfd sp!, {a1,a2,a3,a4}]
1353 stmfd sp!, {...,fp,ip,lr,pc}
1354 [stfe f7, [sp, #-12]!]
1355 [stfe f6, [sp, #-12]!]
1356 [stfe f5, [sp, #-12]!]
1357 [stfe f4, [sp, #-12]!]
1358 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1361 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1363 CORE_ADDR func_addr, limit_pc;
1365 /* See if we can determine the end of the prologue via the symbol table.
1366 If so, then return either PC, or the PC after the prologue, whichever
1368 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1370 CORE_ADDR post_prologue_pc
1371 = skip_prologue_using_sal (gdbarch, func_addr);
1372 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1374 if (post_prologue_pc)
1376 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1379 /* GCC always emits a line note before the prologue and another
1380 one after, even if the two are at the same address or on the
1381 same line. Take advantage of this so that we do not need to
1382 know every instruction that might appear in the prologue. We
1383 will have producer information for most binaries; if it is
1384 missing (e.g. for -gstabs), assuming the GNU tools. */
1385 if (post_prologue_pc
1387 || COMPUNIT_PRODUCER (cust) == NULL
1388 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1389 || producer_is_llvm (COMPUNIT_PRODUCER (cust))))
1390 return post_prologue_pc;
1392 if (post_prologue_pc != 0)
1394 CORE_ADDR analyzed_limit;
1396 /* For non-GCC compilers, make sure the entire line is an
1397 acceptable prologue; GDB will round this function's
1398 return value up to the end of the following line so we
1399 can not skip just part of a line (and we do not want to).
1401 RealView does not treat the prologue specially, but does
1402 associate prologue code with the opening brace; so this
1403 lets us skip the first line if we think it is the opening
1405 if (arm_pc_is_thumb (gdbarch, func_addr))
1406 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1407 post_prologue_pc, NULL);
1410 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1411 NULL, target_arm_instruction_reader ());
1413 if (analyzed_limit != post_prologue_pc)
1416 return post_prologue_pc;
1420 /* Can't determine prologue from the symbol table, need to examine
1423 /* Find an upper limit on the function prologue using the debug
1424 information. If the debug information could not be used to provide
1425 that bound, then use an arbitrary large number as the upper bound. */
1426 /* Like arm_scan_prologue, stop no later than pc + 64. */
1427 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1429 limit_pc = pc + 64; /* Magic. */
1432 /* Check if this is Thumb code. */
1433 if (arm_pc_is_thumb (gdbarch, pc))
1434 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1436 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1437 target_arm_instruction_reader ());
1441 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1442 This function decodes a Thumb function prologue to determine:
1443 1) the size of the stack frame
1444 2) which registers are saved on it
1445 3) the offsets of saved regs
1446 4) the offset from the stack pointer to the frame pointer
1448 A typical Thumb function prologue would create this stack frame
1449 (offsets relative to FP)
1450 old SP -> 24 stack parameters
1453 R7 -> 0 local variables (16 bytes)
1454 SP -> -12 additional stack space (12 bytes)
1455 The frame size would thus be 36 bytes, and the frame offset would be
1456 12 bytes. The frame register is R7.
1458 The comments for thumb_skip_prolog() describe the algorithm we use
1459 to detect the end of the prolog. */
1463 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1464 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1466 CORE_ADDR prologue_start;
1467 CORE_ADDR prologue_end;
1469 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1472 /* See comment in arm_scan_prologue for an explanation of
1474 if (prologue_end > prologue_start + 64)
1476 prologue_end = prologue_start + 64;
1480 /* We're in the boondocks: we have no idea where the start of the
1484 prologue_end = std::min (prologue_end, prev_pc);
1486 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1489 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1493 arm_instruction_restores_sp (unsigned int insn)
1495 if (bits (insn, 28, 31) != INST_NV)
1497 if ((insn & 0x0df0f000) == 0x0080d000
1498 /* ADD SP (register or immediate). */
1499 || (insn & 0x0df0f000) == 0x0040d000
1500 /* SUB SP (register or immediate). */
1501 || (insn & 0x0ffffff0) == 0x01a0d000
1503 || (insn & 0x0fff0000) == 0x08bd0000
1505 || (insn & 0x0fff0000) == 0x049d0000)
1506 /* POP of a single register. */
1513 /* Implement immediate value decoding, as described in section A5.2.4
1514 (Modified immediate constants in ARM instructions) of the ARM Architecture
1515 Reference Manual (ARMv7-A and ARMv7-R edition). */
1518 arm_expand_immediate (uint32_t imm)
1520 /* Immediate values are 12 bits long. */
1521 gdb_assert ((imm & 0xfffff000) == 0);
1523 uint32_t unrotated_value = imm & 0xff;
1524 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1526 if (rotate_amount == 0)
1527 return unrotated_value;
1529 return ((unrotated_value >> rotate_amount)
1530 | (unrotated_value << (32 - rotate_amount)));
1533 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1534 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1535 fill it in. Return the first address not recognized as a prologue
1538 We recognize all the instructions typically found in ARM prologues,
1539 plus harmless instructions which can be skipped (either for analysis
1540 purposes, or a more restrictive set that can be skipped when finding
1541 the end of the prologue). */
1544 arm_analyze_prologue (struct gdbarch *gdbarch,
1545 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1546 struct arm_prologue_cache *cache,
1547 const arm_instruction_reader &insn_reader)
1549 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1551 CORE_ADDR offset, current_pc;
1552 pv_t regs[ARM_FPS_REGNUM];
1553 CORE_ADDR unrecognized_pc = 0;
1555 /* Search the prologue looking for instructions that set up the
1556 frame pointer, adjust the stack pointer, and save registers.
1558 Be careful, however, and if it doesn't look like a prologue,
1559 don't try to scan it. If, for instance, a frameless function
1560 begins with stmfd sp!, then we will tell ourselves there is
1561 a frame, which will confuse stack traceback, as well as "finish"
1562 and other operations that rely on a knowledge of the stack
1565 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1566 regs[regno] = pv_register (regno, 0);
1567 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1569 for (current_pc = prologue_start;
1570 current_pc < prologue_end;
1573 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1575 if (insn == 0xe1a0c00d) /* mov ip, sp */
1577 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1580 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1581 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1583 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1584 int rd = bits (insn, 12, 15);
1585 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1588 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1589 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1591 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1592 int rd = bits (insn, 12, 15);
1593 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1596 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1599 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1601 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1602 stack.store (regs[ARM_SP_REGNUM], 4,
1603 regs[bits (insn, 12, 15)]);
1606 else if ((insn & 0xffff0000) == 0xe92d0000)
1607 /* stmfd sp!, {..., fp, ip, lr, pc}
1609 stmfd sp!, {a1, a2, a3, a4} */
1611 int mask = insn & 0xffff;
1613 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1616 /* Calculate offsets of saved registers. */
1617 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1618 if (mask & (1 << regno))
1621 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1622 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1625 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1626 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1627 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1629 /* No need to add this to saved_regs -- it's just an arg reg. */
1632 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1633 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1634 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1636 /* No need to add this to saved_regs -- it's just an arg reg. */
1639 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1641 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1643 /* No need to add this to saved_regs -- it's just arg regs. */
1646 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1648 uint32_t imm = arm_expand_immediate (insn & 0xfff);
1649 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1651 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1653 uint32_t imm = arm_expand_immediate(insn & 0xfff);
1654 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1656 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1658 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1660 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1663 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1664 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1665 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
1667 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1669 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1671 int n_saved_fp_regs;
1672 unsigned int fp_start_reg, fp_bound_reg;
1674 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1677 if ((insn & 0x800) == 0x800) /* N0 is set */
1679 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1680 n_saved_fp_regs = 3;
1682 n_saved_fp_regs = 1;
1686 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1687 n_saved_fp_regs = 2;
1689 n_saved_fp_regs = 4;
1692 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1693 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1694 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1696 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1697 stack.store (regs[ARM_SP_REGNUM], 12,
1698 regs[fp_start_reg++]);
1701 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1703 /* Allow some special function calls when skipping the
1704 prologue; GCC generates these before storing arguments to
1706 CORE_ADDR dest = BranchDest (current_pc, insn);
1708 if (skip_prologue_function (gdbarch, dest, 0))
1713 else if ((insn & 0xf0000000) != 0xe0000000)
1714 break; /* Condition not true, exit early. */
1715 else if (arm_instruction_changes_pc (insn))
1716 /* Don't scan past anything that might change control flow. */
1718 else if (arm_instruction_restores_sp (insn))
1720 /* Don't scan past the epilogue. */
1723 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1724 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1725 /* Ignore block loads from the stack, potentially copying
1726 parameters from memory. */
1728 else if ((insn & 0xfc500000) == 0xe4100000
1729 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1730 /* Similarly ignore single loads from the stack. */
1732 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1733 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1734 register instead of the stack. */
1738 /* The optimizer might shove anything into the prologue, if
1739 we build up cache (cache != NULL) from scanning prologue,
1740 we just skip what we don't recognize and scan further to
1741 make cache as complete as possible. However, if we skip
1742 prologue, we'll stop immediately on unrecognized
1744 unrecognized_pc = current_pc;
1752 if (unrecognized_pc == 0)
1753 unrecognized_pc = current_pc;
1757 int framereg, framesize;
1759 /* The frame size is just the distance from the frame register
1760 to the original stack pointer. */
1761 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1763 /* Frame pointer is fp. */
1764 framereg = ARM_FP_REGNUM;
1765 framesize = -regs[ARM_FP_REGNUM].k;
1769 /* Try the stack pointer... this is a bit desperate. */
1770 framereg = ARM_SP_REGNUM;
1771 framesize = -regs[ARM_SP_REGNUM].k;
1774 cache->framereg = framereg;
1775 cache->framesize = framesize;
1777 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1778 if (stack.find_reg (gdbarch, regno, &offset))
1779 cache->saved_regs[regno].addr = offset;
1783 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1784 paddress (gdbarch, unrecognized_pc));
1786 return unrecognized_pc;
1790 arm_scan_prologue (struct frame_info *this_frame,
1791 struct arm_prologue_cache *cache)
1793 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1794 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1795 CORE_ADDR prologue_start, prologue_end;
1796 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1797 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1799 /* Assume there is no frame until proven otherwise. */
1800 cache->framereg = ARM_SP_REGNUM;
1801 cache->framesize = 0;
1803 /* Check for Thumb prologue. */
1804 if (arm_frame_is_thumb (this_frame))
1806 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1810 /* Find the function prologue. If we can't find the function in
1811 the symbol table, peek in the stack frame to find the PC. */
1812 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1815 /* One way to find the end of the prologue (which works well
1816 for unoptimized code) is to do the following:
1818 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1821 prologue_end = prev_pc;
1822 else if (sal.end < prologue_end)
1823 prologue_end = sal.end;
1825 This mechanism is very accurate so long as the optimizer
1826 doesn't move any instructions from the function body into the
1827 prologue. If this happens, sal.end will be the last
1828 instruction in the first hunk of prologue code just before
1829 the first instruction that the scheduler has moved from
1830 the body to the prologue.
1832 In order to make sure that we scan all of the prologue
1833 instructions, we use a slightly less accurate mechanism which
1834 may scan more than necessary. To help compensate for this
1835 lack of accuracy, the prologue scanning loop below contains
1836 several clauses which'll cause the loop to terminate early if
1837 an implausible prologue instruction is encountered.
1843 is a suitable endpoint since it accounts for the largest
1844 possible prologue plus up to five instructions inserted by
1847 if (prologue_end > prologue_start + 64)
1849 prologue_end = prologue_start + 64; /* See above. */
1854 /* We have no symbol information. Our only option is to assume this
1855 function has a standard stack frame and the normal frame register.
1856 Then, we can find the value of our frame pointer on entrance to
1857 the callee (or at the present moment if this is the innermost frame).
1858 The value stored there should be the address of the stmfd + 8. */
1859 CORE_ADDR frame_loc;
1860 ULONGEST return_value;
1862 /* AAPCS does not use a frame register, so we can abort here. */
1863 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_AAPCS)
1866 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1867 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1872 prologue_start = gdbarch_addr_bits_remove
1873 (gdbarch, return_value) - 8;
1874 prologue_end = prologue_start + 64; /* See above. */
1878 if (prev_pc < prologue_end)
1879 prologue_end = prev_pc;
1881 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
1882 target_arm_instruction_reader ());
1885 static struct arm_prologue_cache *
1886 arm_make_prologue_cache (struct frame_info *this_frame)
1889 struct arm_prologue_cache *cache;
1890 CORE_ADDR unwound_fp;
1892 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1893 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1895 arm_scan_prologue (this_frame, cache);
1897 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1898 if (unwound_fp == 0)
1901 cache->prev_sp = unwound_fp + cache->framesize;
1903 /* Calculate actual addresses of saved registers using offsets
1904 determined by arm_scan_prologue. */
1905 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1906 if (trad_frame_addr_p (cache->saved_regs, reg))
1907 cache->saved_regs[reg].addr += cache->prev_sp;
1912 /* Implementation of the stop_reason hook for arm_prologue frames. */
1914 static enum unwind_stop_reason
1915 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1918 struct arm_prologue_cache *cache;
1921 if (*this_cache == NULL)
1922 *this_cache = arm_make_prologue_cache (this_frame);
1923 cache = (struct arm_prologue_cache *) *this_cache;
1925 /* This is meant to halt the backtrace at "_start". */
1926 pc = get_frame_pc (this_frame);
1927 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1928 return UNWIND_OUTERMOST;
1930 /* If we've hit a wall, stop. */
1931 if (cache->prev_sp == 0)
1932 return UNWIND_OUTERMOST;
1934 return UNWIND_NO_REASON;
1937 /* Our frame ID for a normal frame is the current function's starting PC
1938 and the caller's SP when we were called. */
1941 arm_prologue_this_id (struct frame_info *this_frame,
1943 struct frame_id *this_id)
1945 struct arm_prologue_cache *cache;
1949 if (*this_cache == NULL)
1950 *this_cache = arm_make_prologue_cache (this_frame);
1951 cache = (struct arm_prologue_cache *) *this_cache;
1953 /* Use function start address as part of the frame ID. If we cannot
1954 identify the start address (due to missing symbol information),
1955 fall back to just using the current PC. */
1956 pc = get_frame_pc (this_frame);
1957 func = get_frame_func (this_frame);
1961 id = frame_id_build (cache->prev_sp, func);
1965 static struct value *
1966 arm_prologue_prev_register (struct frame_info *this_frame,
1970 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1971 struct arm_prologue_cache *cache;
1973 if (*this_cache == NULL)
1974 *this_cache = arm_make_prologue_cache (this_frame);
1975 cache = (struct arm_prologue_cache *) *this_cache;
1977 /* If we are asked to unwind the PC, then we need to return the LR
1978 instead. The prologue may save PC, but it will point into this
1979 frame's prologue, not the next frame's resume location. Also
1980 strip the saved T bit. A valid LR may have the low bit set, but
1981 a valid PC never does. */
1982 if (prev_regnum == ARM_PC_REGNUM)
1986 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1987 return frame_unwind_got_constant (this_frame, prev_regnum,
1988 arm_addr_bits_remove (gdbarch, lr));
1991 /* SP is generally not saved to the stack, but this frame is
1992 identified by the next frame's stack pointer at the time of the call.
1993 The value was already reconstructed into PREV_SP. */
1994 if (prev_regnum == ARM_SP_REGNUM)
1995 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1997 /* The CPSR may have been changed by the call instruction and by the
1998 called function. The only bit we can reconstruct is the T bit,
1999 by checking the low bit of LR as of the call. This is a reliable
2000 indicator of Thumb-ness except for some ARM v4T pre-interworking
2001 Thumb code, which could get away with a clear low bit as long as
2002 the called function did not use bx. Guess that all other
2003 bits are unchanged; the condition flags are presumably lost,
2004 but the processor status is likely valid. */
2005 if (prev_regnum == ARM_PS_REGNUM)
2008 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2010 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2011 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2012 if (IS_THUMB_ADDR (lr))
2016 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2019 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2023 struct frame_unwind arm_prologue_unwind = {
2025 arm_prologue_unwind_stop_reason,
2026 arm_prologue_this_id,
2027 arm_prologue_prev_register,
2029 default_frame_sniffer
2032 /* Maintain a list of ARM exception table entries per objfile, similar to the
2033 list of mapping symbols. We only cache entries for standard ARM-defined
2034 personality routines; the cache will contain only the frame unwinding
2035 instructions associated with the entry (not the descriptors). */
2037 struct arm_exidx_entry
2042 bool operator< (const arm_exidx_entry &other) const
2044 return addr < other.addr;
2048 struct arm_exidx_data
2050 std::vector<std::vector<arm_exidx_entry>> section_maps;
2053 /* Per-BFD key to store exception handling information. */
2054 static const struct bfd_key<arm_exidx_data> arm_exidx_data_key;
2056 static struct obj_section *
2057 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2059 struct obj_section *osect;
2061 ALL_OBJFILE_OSECTIONS (objfile, osect)
2062 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2064 bfd_vma start, size;
2065 start = bfd_section_vma (osect->the_bfd_section);
2066 size = bfd_section_size (osect->the_bfd_section);
2068 if (start <= vma && vma < start + size)
2075 /* Parse contents of exception table and exception index sections
2076 of OBJFILE, and fill in the exception table entry cache.
2078 For each entry that refers to a standard ARM-defined personality
2079 routine, extract the frame unwinding instructions (from either
2080 the index or the table section). The unwinding instructions
2082 - extracting them from the rest of the table data
2083 - converting to host endianness
2084 - appending the implicit 0xb0 ("Finish") code
2086 The extracted and normalized instructions are stored for later
2087 retrieval by the arm_find_exidx_entry routine. */
2090 arm_exidx_new_objfile (struct objfile *objfile)
2092 struct arm_exidx_data *data;
2093 asection *exidx, *extab;
2094 bfd_vma exidx_vma = 0, extab_vma = 0;
2097 /* If we've already touched this file, do nothing. */
2098 if (!objfile || arm_exidx_data_key.get (objfile->obfd) != NULL)
2101 /* Read contents of exception table and index. */
2102 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2103 gdb::byte_vector exidx_data;
2106 exidx_vma = bfd_section_vma (exidx);
2107 exidx_data.resize (bfd_section_size (exidx));
2109 if (!bfd_get_section_contents (objfile->obfd, exidx,
2110 exidx_data.data (), 0,
2111 exidx_data.size ()))
2115 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2116 gdb::byte_vector extab_data;
2119 extab_vma = bfd_section_vma (extab);
2120 extab_data.resize (bfd_section_size (extab));
2122 if (!bfd_get_section_contents (objfile->obfd, extab,
2123 extab_data.data (), 0,
2124 extab_data.size ()))
2128 /* Allocate exception table data structure. */
2129 data = arm_exidx_data_key.emplace (objfile->obfd);
2130 data->section_maps.resize (objfile->obfd->section_count);
2132 /* Fill in exception table. */
2133 for (i = 0; i < exidx_data.size () / 8; i++)
2135 struct arm_exidx_entry new_exidx_entry;
2136 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2137 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2138 exidx_data.data () + i * 8 + 4);
2139 bfd_vma addr = 0, word = 0;
2140 int n_bytes = 0, n_words = 0;
2141 struct obj_section *sec;
2142 gdb_byte *entry = NULL;
2144 /* Extract address of start of function. */
2145 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2146 idx += exidx_vma + i * 8;
2148 /* Find section containing function and compute section offset. */
2149 sec = arm_obj_section_from_vma (objfile, idx);
2152 idx -= bfd_section_vma (sec->the_bfd_section);
2154 /* Determine address of exception table entry. */
2157 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2159 else if ((val & 0xff000000) == 0x80000000)
2161 /* Exception table entry embedded in .ARM.exidx
2162 -- must be short form. */
2166 else if (!(val & 0x80000000))
2168 /* Exception table entry in .ARM.extab. */
2169 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2170 addr += exidx_vma + i * 8 + 4;
2172 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2174 word = bfd_h_get_32 (objfile->obfd,
2175 extab_data.data () + addr - extab_vma);
2178 if ((word & 0xff000000) == 0x80000000)
2183 else if ((word & 0xff000000) == 0x81000000
2184 || (word & 0xff000000) == 0x82000000)
2188 n_words = ((word >> 16) & 0xff);
2190 else if (!(word & 0x80000000))
2193 struct obj_section *pers_sec;
2194 int gnu_personality = 0;
2196 /* Custom personality routine. */
2197 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2198 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2200 /* Check whether we've got one of the variants of the
2201 GNU personality routines. */
2202 pers_sec = arm_obj_section_from_vma (objfile, pers);
2205 static const char *personality[] =
2207 "__gcc_personality_v0",
2208 "__gxx_personality_v0",
2209 "__gcj_personality_v0",
2210 "__gnu_objc_personality_v0",
2214 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2217 for (k = 0; personality[k]; k++)
2218 if (lookup_minimal_symbol_by_pc_name
2219 (pc, personality[k], objfile))
2221 gnu_personality = 1;
2226 /* If so, the next word contains a word count in the high
2227 byte, followed by the same unwind instructions as the
2228 pre-defined forms. */
2230 && addr + 4 <= extab_vma + extab_data.size ())
2232 word = bfd_h_get_32 (objfile->obfd,
2234 + addr - extab_vma));
2237 n_words = ((word >> 24) & 0xff);
2243 /* Sanity check address. */
2245 if (addr < extab_vma
2246 || addr + 4 * n_words > extab_vma + extab_data.size ())
2247 n_words = n_bytes = 0;
2249 /* The unwind instructions reside in WORD (only the N_BYTES least
2250 significant bytes are valid), followed by N_WORDS words in the
2251 extab section starting at ADDR. */
2252 if (n_bytes || n_words)
2255 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2256 n_bytes + n_words * 4 + 1);
2259 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2263 word = bfd_h_get_32 (objfile->obfd,
2264 extab_data.data () + addr - extab_vma);
2267 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2268 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2269 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2270 *p++ = (gdb_byte) (word & 0xff);
2273 /* Implied "Finish" to terminate the list. */
2277 /* Push entry onto vector. They are guaranteed to always
2278 appear in order of increasing addresses. */
2279 new_exidx_entry.addr = idx;
2280 new_exidx_entry.entry = entry;
2281 data->section_maps[sec->the_bfd_section->index].push_back
2286 /* Search for the exception table entry covering MEMADDR. If one is found,
2287 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2288 set *START to the start of the region covered by this entry. */
2291 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2293 struct obj_section *sec;
2295 sec = find_pc_section (memaddr);
2298 struct arm_exidx_data *data;
2299 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2301 data = arm_exidx_data_key.get (sec->objfile->obfd);
2304 std::vector<arm_exidx_entry> &map
2305 = data->section_maps[sec->the_bfd_section->index];
2308 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2310 /* std::lower_bound finds the earliest ordered insertion
2311 point. If the following symbol starts at this exact
2312 address, we use that; otherwise, the preceding
2313 exception table entry covers this address. */
2314 if (idx < map.end ())
2316 if (idx->addr == map_key.addr)
2319 *start = idx->addr + obj_section_addr (sec);
2324 if (idx > map.begin ())
2328 *start = idx->addr + obj_section_addr (sec);
2338 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2339 instruction list from the ARM exception table entry ENTRY, allocate and
2340 return a prologue cache structure describing how to unwind this frame.
2342 Return NULL if the unwinding instruction list contains a "spare",
2343 "reserved" or "refuse to unwind" instruction as defined in section
2344 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2345 for the ARM Architecture" document. */
2347 static struct arm_prologue_cache *
2348 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2353 struct arm_prologue_cache *cache;
2354 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2355 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2361 /* Whenever we reload SP, we actually have to retrieve its
2362 actual value in the current frame. */
2365 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2367 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2368 vsp = get_frame_register_unsigned (this_frame, reg);
2372 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2373 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2379 /* Decode next unwind instruction. */
2382 if ((insn & 0xc0) == 0)
2384 int offset = insn & 0x3f;
2385 vsp += (offset << 2) + 4;
2387 else if ((insn & 0xc0) == 0x40)
2389 int offset = insn & 0x3f;
2390 vsp -= (offset << 2) + 4;
2392 else if ((insn & 0xf0) == 0x80)
2394 int mask = ((insn & 0xf) << 8) | *entry++;
2397 /* The special case of an all-zero mask identifies
2398 "Refuse to unwind". We return NULL to fall back
2399 to the prologue analyzer. */
2403 /* Pop registers r4..r15 under mask. */
2404 for (i = 0; i < 12; i++)
2405 if (mask & (1 << i))
2407 cache->saved_regs[4 + i].addr = vsp;
2411 /* Special-case popping SP -- we need to reload vsp. */
2412 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2415 else if ((insn & 0xf0) == 0x90)
2417 int reg = insn & 0xf;
2419 /* Reserved cases. */
2420 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2423 /* Set SP from another register and mark VSP for reload. */
2424 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2427 else if ((insn & 0xf0) == 0xa0)
2429 int count = insn & 0x7;
2430 int pop_lr = (insn & 0x8) != 0;
2433 /* Pop r4..r[4+count]. */
2434 for (i = 0; i <= count; i++)
2436 cache->saved_regs[4 + i].addr = vsp;
2440 /* If indicated by flag, pop LR as well. */
2443 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2447 else if (insn == 0xb0)
2449 /* We could only have updated PC by popping into it; if so, it
2450 will show up as address. Otherwise, copy LR into PC. */
2451 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2452 cache->saved_regs[ARM_PC_REGNUM]
2453 = cache->saved_regs[ARM_LR_REGNUM];
2458 else if (insn == 0xb1)
2460 int mask = *entry++;
2463 /* All-zero mask and mask >= 16 is "spare". */
2464 if (mask == 0 || mask >= 16)
2467 /* Pop r0..r3 under mask. */
2468 for (i = 0; i < 4; i++)
2469 if (mask & (1 << i))
2471 cache->saved_regs[i].addr = vsp;
2475 else if (insn == 0xb2)
2477 ULONGEST offset = 0;
2482 offset |= (*entry & 0x7f) << shift;
2485 while (*entry++ & 0x80);
2487 vsp += 0x204 + (offset << 2);
2489 else if (insn == 0xb3)
2491 int start = *entry >> 4;
2492 int count = (*entry++) & 0xf;
2495 /* Only registers D0..D15 are valid here. */
2496 if (start + count >= 16)
2499 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2500 for (i = 0; i <= count; i++)
2502 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2506 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2509 else if ((insn & 0xf8) == 0xb8)
2511 int count = insn & 0x7;
2514 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2515 for (i = 0; i <= count; i++)
2517 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2521 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2524 else if (insn == 0xc6)
2526 int start = *entry >> 4;
2527 int count = (*entry++) & 0xf;
2530 /* Only registers WR0..WR15 are valid. */
2531 if (start + count >= 16)
2534 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2535 for (i = 0; i <= count; i++)
2537 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2541 else if (insn == 0xc7)
2543 int mask = *entry++;
2546 /* All-zero mask and mask >= 16 is "spare". */
2547 if (mask == 0 || mask >= 16)
2550 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2551 for (i = 0; i < 4; i++)
2552 if (mask & (1 << i))
2554 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2558 else if ((insn & 0xf8) == 0xc0)
2560 int count = insn & 0x7;
2563 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2564 for (i = 0; i <= count; i++)
2566 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2570 else if (insn == 0xc8)
2572 int start = *entry >> 4;
2573 int count = (*entry++) & 0xf;
2576 /* Only registers D0..D31 are valid. */
2577 if (start + count >= 16)
2580 /* Pop VFP double-precision registers
2581 D[16+start]..D[16+start+count]. */
2582 for (i = 0; i <= count; i++)
2584 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2588 else if (insn == 0xc9)
2590 int start = *entry >> 4;
2591 int count = (*entry++) & 0xf;
2594 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2595 for (i = 0; i <= count; i++)
2597 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2601 else if ((insn & 0xf8) == 0xd0)
2603 int count = insn & 0x7;
2606 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2607 for (i = 0; i <= count; i++)
2609 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2615 /* Everything else is "spare". */
2620 /* If we restore SP from a register, assume this was the frame register.
2621 Otherwise just fall back to SP as frame register. */
2622 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2623 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2625 cache->framereg = ARM_SP_REGNUM;
2627 /* Determine offset to previous frame. */
2629 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2631 /* We already got the previous SP. */
2632 cache->prev_sp = vsp;
2637 /* Unwinding via ARM exception table entries. Note that the sniffer
2638 already computes a filled-in prologue cache, which is then used
2639 with the same arm_prologue_this_id and arm_prologue_prev_register
2640 routines also used for prologue-parsing based unwinding. */
2643 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2644 struct frame_info *this_frame,
2645 void **this_prologue_cache)
2647 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2648 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2649 CORE_ADDR addr_in_block, exidx_region, func_start;
2650 struct arm_prologue_cache *cache;
2653 /* See if we have an ARM exception table entry covering this address. */
2654 addr_in_block = get_frame_address_in_block (this_frame);
2655 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2659 /* The ARM exception table does not describe unwind information
2660 for arbitrary PC values, but is guaranteed to be correct only
2661 at call sites. We have to decide here whether we want to use
2662 ARM exception table information for this frame, or fall back
2663 to using prologue parsing. (Note that if we have DWARF CFI,
2664 this sniffer isn't even called -- CFI is always preferred.)
2666 Before we make this decision, however, we check whether we
2667 actually have *symbol* information for the current frame.
2668 If not, prologue parsing would not work anyway, so we might
2669 as well use the exception table and hope for the best. */
2670 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2674 /* If the next frame is "normal", we are at a call site in this
2675 frame, so exception information is guaranteed to be valid. */
2676 if (get_next_frame (this_frame)
2677 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2680 /* We also assume exception information is valid if we're currently
2681 blocked in a system call. The system library is supposed to
2682 ensure this, so that e.g. pthread cancellation works. */
2683 if (arm_frame_is_thumb (this_frame))
2687 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2688 2, byte_order_for_code, &insn)
2689 && (insn & 0xff00) == 0xdf00 /* svc */)
2696 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2697 4, byte_order_for_code, &insn)
2698 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2702 /* Bail out if we don't know that exception information is valid. */
2706 /* The ARM exception index does not mark the *end* of the region
2707 covered by the entry, and some functions will not have any entry.
2708 To correctly recognize the end of the covered region, the linker
2709 should have inserted dummy records with a CANTUNWIND marker.
2711 Unfortunately, current versions of GNU ld do not reliably do
2712 this, and thus we may have found an incorrect entry above.
2713 As a (temporary) sanity check, we only use the entry if it
2714 lies *within* the bounds of the function. Note that this check
2715 might reject perfectly valid entries that just happen to cover
2716 multiple functions; therefore this check ought to be removed
2717 once the linker is fixed. */
2718 if (func_start > exidx_region)
2722 /* Decode the list of unwinding instructions into a prologue cache.
2723 Note that this may fail due to e.g. a "refuse to unwind" code. */
2724 cache = arm_exidx_fill_cache (this_frame, entry);
2728 *this_prologue_cache = cache;
2732 struct frame_unwind arm_exidx_unwind = {
2734 default_frame_unwind_stop_reason,
2735 arm_prologue_this_id,
2736 arm_prologue_prev_register,
2738 arm_exidx_unwind_sniffer
2741 static struct arm_prologue_cache *
2742 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2744 struct arm_prologue_cache *cache;
2747 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2748 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2750 /* Still rely on the offset calculated from prologue. */
2751 arm_scan_prologue (this_frame, cache);
2753 /* Since we are in epilogue, the SP has been restored. */
2754 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2756 /* Calculate actual addresses of saved registers using offsets
2757 determined by arm_scan_prologue. */
2758 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2759 if (trad_frame_addr_p (cache->saved_regs, reg))
2760 cache->saved_regs[reg].addr += cache->prev_sp;
2765 /* Implementation of function hook 'this_id' in
2766 'struct frame_uwnind' for epilogue unwinder. */
2769 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2771 struct frame_id *this_id)
2773 struct arm_prologue_cache *cache;
2776 if (*this_cache == NULL)
2777 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2778 cache = (struct arm_prologue_cache *) *this_cache;
2780 /* Use function start address as part of the frame ID. If we cannot
2781 identify the start address (due to missing symbol information),
2782 fall back to just using the current PC. */
2783 pc = get_frame_pc (this_frame);
2784 func = get_frame_func (this_frame);
2788 (*this_id) = frame_id_build (cache->prev_sp, pc);
2791 /* Implementation of function hook 'prev_register' in
2792 'struct frame_uwnind' for epilogue unwinder. */
2794 static struct value *
2795 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2796 void **this_cache, int regnum)
2798 if (*this_cache == NULL)
2799 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2801 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2804 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2806 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2809 /* Implementation of function hook 'sniffer' in
2810 'struct frame_uwnind' for epilogue unwinder. */
2813 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2814 struct frame_info *this_frame,
2815 void **this_prologue_cache)
2817 if (frame_relative_level (this_frame) == 0)
2819 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2820 CORE_ADDR pc = get_frame_pc (this_frame);
2822 if (arm_frame_is_thumb (this_frame))
2823 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2825 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2831 /* Frame unwinder from epilogue. */
2833 static const struct frame_unwind arm_epilogue_frame_unwind =
2836 default_frame_unwind_stop_reason,
2837 arm_epilogue_frame_this_id,
2838 arm_epilogue_frame_prev_register,
2840 arm_epilogue_frame_sniffer,
2843 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2844 trampoline, return the target PC. Otherwise return 0.
2846 void call0a (char c, short s, int i, long l) {}
2850 (*pointer_to_call0a) (c, s, i, l);
2853 Instead of calling a stub library function _call_via_xx (xx is
2854 the register name), GCC may inline the trampoline in the object
2855 file as below (register r2 has the address of call0a).
2858 .type main, %function
2867 The trampoline 'bx r2' doesn't belong to main. */
2870 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2872 /* The heuristics of recognizing such trampoline is that FRAME is
2873 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2874 if (arm_frame_is_thumb (frame))
2878 if (target_read_memory (pc, buf, 2) == 0)
2880 struct gdbarch *gdbarch = get_frame_arch (frame);
2881 enum bfd_endian byte_order_for_code
2882 = gdbarch_byte_order_for_code (gdbarch);
2884 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2886 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2889 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2891 /* Clear the LSB so that gdb core sets step-resume
2892 breakpoint at the right address. */
2893 return UNMAKE_THUMB_ADDR (dest);
2901 static struct arm_prologue_cache *
2902 arm_make_stub_cache (struct frame_info *this_frame)
2904 struct arm_prologue_cache *cache;
2906 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2907 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2909 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2914 /* Our frame ID for a stub frame is the current SP and LR. */
2917 arm_stub_this_id (struct frame_info *this_frame,
2919 struct frame_id *this_id)
2921 struct arm_prologue_cache *cache;
2923 if (*this_cache == NULL)
2924 *this_cache = arm_make_stub_cache (this_frame);
2925 cache = (struct arm_prologue_cache *) *this_cache;
2927 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2931 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2932 struct frame_info *this_frame,
2933 void **this_prologue_cache)
2935 CORE_ADDR addr_in_block;
2937 CORE_ADDR pc, start_addr;
2940 addr_in_block = get_frame_address_in_block (this_frame);
2941 pc = get_frame_pc (this_frame);
2942 if (in_plt_section (addr_in_block)
2943 /* We also use the stub winder if the target memory is unreadable
2944 to avoid having the prologue unwinder trying to read it. */
2945 || target_read_memory (pc, dummy, 4) != 0)
2948 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2949 && arm_skip_bx_reg (this_frame, pc) != 0)
2955 struct frame_unwind arm_stub_unwind = {
2957 default_frame_unwind_stop_reason,
2959 arm_prologue_prev_register,
2961 arm_stub_unwind_sniffer
2964 /* Put here the code to store, into CACHE->saved_regs, the addresses
2965 of the saved registers of frame described by THIS_FRAME. CACHE is
2968 static struct arm_prologue_cache *
2969 arm_m_exception_cache (struct frame_info *this_frame)
2971 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2972 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2973 struct arm_prologue_cache *cache;
2976 CORE_ADDR unwound_sp;
2978 uint32_t exc_return;
2979 uint32_t process_stack_used;
2980 uint32_t extended_frame_used;
2981 uint32_t secure_stack_used;
2983 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2984 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2986 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2987 describes which bits in LR that define which stack was used prior
2988 to the exception and if FPU is used (causing extended stack frame). */
2990 lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
2991 sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2993 /* Check EXC_RETURN indicator bits. */
2994 exc_return = (((lr >> 28) & 0xf) == 0xf);
2996 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
2997 process_stack_used = ((lr & (1 << 2)) != 0);
2998 if (exc_return && process_stack_used)
3000 /* Thread (process) stack used.
3001 Potentially this could be other register defined by target, but PSP
3002 can be considered a standard name for the "Process Stack Pointer".
3003 To be fully aware of system registers like MSP and PSP, these could
3004 be added to a separate XML arm-m-system-profile that is valid for
3005 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
3006 corefile off-line, then these registers must be defined by GDB,
3007 and also be included in the corefile regsets. */
3009 int psp_regnum = user_reg_map_name_to_regnum (gdbarch, "psp", -1);
3010 if (psp_regnum == -1)
3012 /* Thread (process) stack could not be fetched,
3013 give warning and exit. */
3015 warning (_("no PSP thread stack unwinding supported."));
3017 /* Terminate any further stack unwinding by refer to self. */
3018 cache->prev_sp = sp;
3023 /* Thread (process) stack used, use PSP as SP. */
3024 unwound_sp = get_frame_register_unsigned (this_frame, psp_regnum);
3029 /* Main stack used, use MSP as SP. */
3033 /* The hardware saves eight 32-bit words, comprising xPSR,
3034 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3035 "B1.5.6 Exception entry behavior" in
3036 "ARMv7-M Architecture Reference Manual". */
3037 cache->saved_regs[0].addr = unwound_sp;
3038 cache->saved_regs[1].addr = unwound_sp + 4;
3039 cache->saved_regs[2].addr = unwound_sp + 8;
3040 cache->saved_regs[3].addr = unwound_sp + 12;
3041 cache->saved_regs[ARM_IP_REGNUM].addr = unwound_sp + 16;
3042 cache->saved_regs[ARM_LR_REGNUM].addr = unwound_sp + 20;
3043 cache->saved_regs[ARM_PC_REGNUM].addr = unwound_sp + 24;
3044 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3046 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3048 extended_frame_used = ((lr & (1 << 4)) == 0);
3049 if (exc_return && extended_frame_used)
3052 int fpu_regs_stack_offset;
3054 /* This code does not take into account the lazy stacking, see "Lazy
3055 context save of FP state", in B1.5.7, also ARM AN298, supported
3056 by Cortex-M4F architecture.
3057 To fully handle this the FPCCR register (Floating-point Context
3058 Control Register) needs to be read out and the bits ASPEN and LSPEN
3059 could be checked to setup correct lazy stacked FP registers.
3060 This register is located at address 0xE000EF34. */
3062 /* Extended stack frame type used. */
3063 fpu_regs_stack_offset = unwound_sp + 0x20;
3064 for (i = 0; i < 16; i++)
3066 cache->saved_regs[ARM_D0_REGNUM + i].addr = fpu_regs_stack_offset;
3067 fpu_regs_stack_offset += 4;
3069 cache->saved_regs[ARM_FPSCR_REGNUM].addr = unwound_sp + 0x60;
3071 /* Offset 0x64 is reserved. */
3072 cache->prev_sp = unwound_sp + 0x68;
3076 /* Standard stack frame type used. */
3077 cache->prev_sp = unwound_sp + 0x20;
3080 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3081 secure_stack_used = ((lr & (1 << 6)) != 0);
3082 if (exc_return && secure_stack_used)
3084 /* ARMv8-M Exception and interrupt handling is not considered here.
3085 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3086 the Secure or Non-secure stack was used. To separate Secure and
3087 Non-secure stacks, processors that are based on the ARMv8-M
3088 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3089 In addition, a stack limit feature is provided using stack limit
3090 registers (accessible using MSR and MRS instructions) in Privileged
3094 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3095 aligner between the top of the 32-byte stack frame and the
3096 previous context's stack pointer. */
3097 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3098 && (xpsr & (1 << 9)) != 0)
3099 cache->prev_sp += 4;
3104 /* Implementation of function hook 'this_id' in
3105 'struct frame_uwnind'. */
3108 arm_m_exception_this_id (struct frame_info *this_frame,
3110 struct frame_id *this_id)
3112 struct arm_prologue_cache *cache;
3114 if (*this_cache == NULL)
3115 *this_cache = arm_m_exception_cache (this_frame);
3116 cache = (struct arm_prologue_cache *) *this_cache;
3118 /* Our frame ID for a stub frame is the current SP and LR. */
3119 *this_id = frame_id_build (cache->prev_sp,
3120 get_frame_pc (this_frame));
3123 /* Implementation of function hook 'prev_register' in
3124 'struct frame_uwnind'. */
3126 static struct value *
3127 arm_m_exception_prev_register (struct frame_info *this_frame,
3131 struct arm_prologue_cache *cache;
3133 if (*this_cache == NULL)
3134 *this_cache = arm_m_exception_cache (this_frame);
3135 cache = (struct arm_prologue_cache *) *this_cache;
3137 /* The value was already reconstructed into PREV_SP. */
3138 if (prev_regnum == ARM_SP_REGNUM)
3139 return frame_unwind_got_constant (this_frame, prev_regnum,
3142 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3146 /* Implementation of function hook 'sniffer' in
3147 'struct frame_uwnind'. */
3150 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3151 struct frame_info *this_frame,
3152 void **this_prologue_cache)
3154 CORE_ADDR this_pc = get_frame_pc (this_frame);
3156 /* No need to check is_m; this sniffer is only registered for
3157 M-profile architectures. */
3159 /* Check if exception frame returns to a magic PC value. */
3160 return arm_m_addr_is_magic (this_pc);
3163 /* Frame unwinder for M-profile exceptions. */
3165 struct frame_unwind arm_m_exception_unwind =
3168 default_frame_unwind_stop_reason,
3169 arm_m_exception_this_id,
3170 arm_m_exception_prev_register,
3172 arm_m_exception_unwind_sniffer
3176 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3178 struct arm_prologue_cache *cache;
3180 if (*this_cache == NULL)
3181 *this_cache = arm_make_prologue_cache (this_frame);
3182 cache = (struct arm_prologue_cache *) *this_cache;
3184 return cache->prev_sp - cache->framesize;
3187 struct frame_base arm_normal_base = {
3188 &arm_prologue_unwind,
3189 arm_normal_frame_base,
3190 arm_normal_frame_base,
3191 arm_normal_frame_base
3194 static struct value *
3195 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3198 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3200 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3205 /* The PC is normally copied from the return column, which
3206 describes saves of LR. However, that version may have an
3207 extra bit set to indicate Thumb state. The bit is not
3209 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3210 return frame_unwind_got_constant (this_frame, regnum,
3211 arm_addr_bits_remove (gdbarch, lr));
3214 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3215 cpsr = get_frame_register_unsigned (this_frame, regnum);
3216 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3217 if (IS_THUMB_ADDR (lr))
3221 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3224 internal_error (__FILE__, __LINE__,
3225 _("Unexpected register %d"), regnum);
3230 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3231 struct dwarf2_frame_state_reg *reg,
3232 struct frame_info *this_frame)
3238 reg->how = DWARF2_FRAME_REG_FN;
3239 reg->loc.fn = arm_dwarf2_prev_register;
3242 reg->how = DWARF2_FRAME_REG_CFA;
3247 /* Implement the stack_frame_destroyed_p gdbarch method. */
3250 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3252 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3253 unsigned int insn, insn2;
3254 int found_return = 0, found_stack_adjust = 0;
3255 CORE_ADDR func_start, func_end;
3259 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3262 /* The epilogue is a sequence of instructions along the following lines:
3264 - add stack frame size to SP or FP
3265 - [if frame pointer used] restore SP from FP
3266 - restore registers from SP [may include PC]
3267 - a return-type instruction [if PC wasn't already restored]
3269 In a first pass, we scan forward from the current PC and verify the
3270 instructions we find as compatible with this sequence, ending in a
3273 However, this is not sufficient to distinguish indirect function calls
3274 within a function from indirect tail calls in the epilogue in some cases.
3275 Therefore, if we didn't already find any SP-changing instruction during
3276 forward scan, we add a backward scanning heuristic to ensure we actually
3277 are in the epilogue. */
3280 while (scan_pc < func_end && !found_return)
3282 if (target_read_memory (scan_pc, buf, 2))
3286 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3288 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3290 else if (insn == 0x46f7) /* mov pc, lr */
3292 else if (thumb_instruction_restores_sp (insn))
3294 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3297 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3299 if (target_read_memory (scan_pc, buf, 2))
3303 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3305 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3307 if (insn2 & 0x8000) /* <registers> include PC. */
3310 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3311 && (insn2 & 0x0fff) == 0x0b04)
3313 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3316 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3317 && (insn2 & 0x0e00) == 0x0a00)
3329 /* Since any instruction in the epilogue sequence, with the possible
3330 exception of return itself, updates the stack pointer, we need to
3331 scan backwards for at most one instruction. Try either a 16-bit or
3332 a 32-bit instruction. This is just a heuristic, so we do not worry
3333 too much about false positives. */
3335 if (pc - 4 < func_start)
3337 if (target_read_memory (pc - 4, buf, 4))
3340 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3341 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3343 if (thumb_instruction_restores_sp (insn2))
3344 found_stack_adjust = 1;
3345 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3346 found_stack_adjust = 1;
3347 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3348 && (insn2 & 0x0fff) == 0x0b04)
3349 found_stack_adjust = 1;
3350 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3351 && (insn2 & 0x0e00) == 0x0a00)
3352 found_stack_adjust = 1;
3354 return found_stack_adjust;
3358 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3360 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3363 CORE_ADDR func_start, func_end;
3365 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3368 /* We are in the epilogue if the previous instruction was a stack
3369 adjustment and the next instruction is a possible return (bx, mov
3370 pc, or pop). We could have to scan backwards to find the stack
3371 adjustment, or forwards to find the return, but this is a decent
3372 approximation. First scan forwards. */
3375 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3376 if (bits (insn, 28, 31) != INST_NV)
3378 if ((insn & 0x0ffffff0) == 0x012fff10)
3381 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3384 else if ((insn & 0x0fff0000) == 0x08bd0000
3385 && (insn & 0x0000c000) != 0)
3386 /* POP (LDMIA), including PC or LR. */
3393 /* Scan backwards. This is just a heuristic, so do not worry about
3394 false positives from mode changes. */
3396 if (pc < func_start + 4)
3399 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3400 if (arm_instruction_restores_sp (insn))
3406 /* Implement the stack_frame_destroyed_p gdbarch method. */
3409 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3411 if (arm_pc_is_thumb (gdbarch, pc))
3412 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3414 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3417 /* When arguments must be pushed onto the stack, they go on in reverse
3418 order. The code below implements a FILO (stack) to do this. */
3423 struct stack_item *prev;
3427 static struct stack_item *
3428 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3430 struct stack_item *si;
3431 si = XNEW (struct stack_item);
3432 si->data = (gdb_byte *) xmalloc (len);
3435 memcpy (si->data, contents, len);
3439 static struct stack_item *
3440 pop_stack_item (struct stack_item *si)
3442 struct stack_item *dead = si;
3449 /* Implement the gdbarch type alignment method, overrides the generic
3450 alignment algorithm for anything that is arm specific. */
3453 arm_type_align (gdbarch *gdbarch, struct type *t)
3455 t = check_typedef (t);
3456 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
3458 /* Use the natural alignment for vector types (the same for
3459 scalar type), but the maximum alignment is 64-bit. */
3460 if (TYPE_LENGTH (t) > 8)
3463 return TYPE_LENGTH (t);
3466 /* Allow the common code to calculate the alignment. */
3470 /* Possible base types for a candidate for passing and returning in
3473 enum arm_vfp_cprc_base_type
3482 /* The length of one element of base type B. */
3485 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3489 case VFP_CPRC_SINGLE:
3491 case VFP_CPRC_DOUBLE:
3493 case VFP_CPRC_VEC64:
3495 case VFP_CPRC_VEC128:
3498 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3503 /* The character ('s', 'd' or 'q') for the type of VFP register used
3504 for passing base type B. */
3507 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3511 case VFP_CPRC_SINGLE:
3513 case VFP_CPRC_DOUBLE:
3515 case VFP_CPRC_VEC64:
3517 case VFP_CPRC_VEC128:
3520 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3525 /* Determine whether T may be part of a candidate for passing and
3526 returning in VFP registers, ignoring the limit on the total number
3527 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3528 classification of the first valid component found; if it is not
3529 VFP_CPRC_UNKNOWN, all components must have the same classification
3530 as *BASE_TYPE. If it is found that T contains a type not permitted
3531 for passing and returning in VFP registers, a type differently
3532 classified from *BASE_TYPE, or two types differently classified
3533 from each other, return -1, otherwise return the total number of
3534 base-type elements found (possibly 0 in an empty structure or
3535 array). Vector types are not currently supported, matching the
3536 generic AAPCS support. */
3539 arm_vfp_cprc_sub_candidate (struct type *t,
3540 enum arm_vfp_cprc_base_type *base_type)
3542 t = check_typedef (t);
3546 switch (TYPE_LENGTH (t))
3549 if (*base_type == VFP_CPRC_UNKNOWN)
3550 *base_type = VFP_CPRC_SINGLE;
3551 else if (*base_type != VFP_CPRC_SINGLE)
3556 if (*base_type == VFP_CPRC_UNKNOWN)
3557 *base_type = VFP_CPRC_DOUBLE;
3558 else if (*base_type != VFP_CPRC_DOUBLE)
3567 case TYPE_CODE_COMPLEX:
3568 /* Arguments of complex T where T is one of the types float or
3569 double get treated as if they are implemented as:
3578 switch (TYPE_LENGTH (t))
3581 if (*base_type == VFP_CPRC_UNKNOWN)
3582 *base_type = VFP_CPRC_SINGLE;
3583 else if (*base_type != VFP_CPRC_SINGLE)
3588 if (*base_type == VFP_CPRC_UNKNOWN)
3589 *base_type = VFP_CPRC_DOUBLE;
3590 else if (*base_type != VFP_CPRC_DOUBLE)
3599 case TYPE_CODE_ARRAY:
3601 if (t->is_vector ())
3603 /* A 64-bit or 128-bit containerized vector type are VFP
3605 switch (TYPE_LENGTH (t))
3608 if (*base_type == VFP_CPRC_UNKNOWN)
3609 *base_type = VFP_CPRC_VEC64;
3612 if (*base_type == VFP_CPRC_UNKNOWN)
3613 *base_type = VFP_CPRC_VEC128;
3624 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3628 if (TYPE_LENGTH (t) == 0)
3630 gdb_assert (count == 0);
3633 else if (count == 0)
3635 unitlen = arm_vfp_cprc_unit_length (*base_type);
3636 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3637 return TYPE_LENGTH (t) / unitlen;
3642 case TYPE_CODE_STRUCT:
3647 for (i = 0; i < t->num_fields (); i++)
3651 if (!field_is_static (&t->field (i)))
3652 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3654 if (sub_count == -1)
3658 if (TYPE_LENGTH (t) == 0)
3660 gdb_assert (count == 0);
3663 else if (count == 0)
3665 unitlen = arm_vfp_cprc_unit_length (*base_type);
3666 if (TYPE_LENGTH (t) != unitlen * count)
3671 case TYPE_CODE_UNION:
3676 for (i = 0; i < t->num_fields (); i++)
3678 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
3680 if (sub_count == -1)
3682 count = (count > sub_count ? count : sub_count);
3684 if (TYPE_LENGTH (t) == 0)
3686 gdb_assert (count == 0);
3689 else if (count == 0)
3691 unitlen = arm_vfp_cprc_unit_length (*base_type);
3692 if (TYPE_LENGTH (t) != unitlen * count)
3704 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3705 if passed to or returned from a non-variadic function with the VFP
3706 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3707 *BASE_TYPE to the base type for T and *COUNT to the number of
3708 elements of that base type before returning. */
3711 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3714 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3715 int c = arm_vfp_cprc_sub_candidate (t, &b);
3716 if (c <= 0 || c > 4)
3723 /* Return 1 if the VFP ABI should be used for passing arguments to and
3724 returning values from a function of type FUNC_TYPE, 0
3728 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3730 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3731 /* Variadic functions always use the base ABI. Assume that functions
3732 without debug info are not variadic. */
3733 if (func_type && check_typedef (func_type)->has_varargs ())
3735 /* The VFP ABI is only supported as a variant of AAPCS. */
3736 if (tdep->arm_abi != ARM_ABI_AAPCS)
3738 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3741 /* We currently only support passing parameters in integer registers, which
3742 conforms with GCC's default model, and VFP argument passing following
3743 the VFP variant of AAPCS. Several other variants exist and
3744 we should probably support some of them based on the selected ABI. */
3747 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3748 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3749 struct value **args, CORE_ADDR sp,
3750 function_call_return_method return_method,
3751 CORE_ADDR struct_addr)
3753 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3757 struct stack_item *si = NULL;
3760 unsigned vfp_regs_free = (1 << 16) - 1;
3762 /* Determine the type of this function and whether the VFP ABI
3764 ftype = check_typedef (value_type (function));
3765 if (ftype->code () == TYPE_CODE_PTR)
3766 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3767 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3769 /* Set the return address. For the ARM, the return breakpoint is
3770 always at BP_ADDR. */
3771 if (arm_pc_is_thumb (gdbarch, bp_addr))
3773 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3775 /* Walk through the list of args and determine how large a temporary
3776 stack is required. Need to take care here as structs may be
3777 passed on the stack, and we have to push them. */
3780 argreg = ARM_A1_REGNUM;
3783 /* The struct_return pointer occupies the first parameter
3784 passing register. */
3785 if (return_method == return_method_struct)
3788 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3789 gdbarch_register_name (gdbarch, argreg),
3790 paddress (gdbarch, struct_addr));
3791 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3795 for (argnum = 0; argnum < nargs; argnum++)
3798 struct type *arg_type;
3799 struct type *target_type;
3800 enum type_code typecode;
3801 const bfd_byte *val;
3803 enum arm_vfp_cprc_base_type vfp_base_type;
3805 int may_use_core_reg = 1;
3807 arg_type = check_typedef (value_type (args[argnum]));
3808 len = TYPE_LENGTH (arg_type);
3809 target_type = TYPE_TARGET_TYPE (arg_type);
3810 typecode = arg_type->code ();
3811 val = value_contents (args[argnum]);
3813 align = type_align (arg_type);
3814 /* Round alignment up to a whole number of words. */
3815 align = (align + ARM_INT_REGISTER_SIZE - 1)
3816 & ~(ARM_INT_REGISTER_SIZE - 1);
3817 /* Different ABIs have different maximum alignments. */
3818 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3820 /* The APCS ABI only requires word alignment. */
3821 align = ARM_INT_REGISTER_SIZE;
3825 /* The AAPCS requires at most doubleword alignment. */
3826 if (align > ARM_INT_REGISTER_SIZE * 2)
3827 align = ARM_INT_REGISTER_SIZE * 2;
3831 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3839 /* Because this is a CPRC it cannot go in a core register or
3840 cause a core register to be skipped for alignment.
3841 Either it goes in VFP registers and the rest of this loop
3842 iteration is skipped for this argument, or it goes on the
3843 stack (and the stack alignment code is correct for this
3845 may_use_core_reg = 0;
3847 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3848 shift = unit_length / 4;
3849 mask = (1 << (shift * vfp_base_count)) - 1;
3850 for (regno = 0; regno < 16; regno += shift)
3851 if (((vfp_regs_free >> regno) & mask) == mask)
3860 vfp_regs_free &= ~(mask << regno);
3861 reg_scaled = regno / shift;
3862 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3863 for (i = 0; i < vfp_base_count; i++)
3867 if (reg_char == 'q')
3868 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3869 val + i * unit_length);
3872 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3873 reg_char, reg_scaled + i);
3874 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3876 regcache->cooked_write (regnum, val + i * unit_length);
3883 /* This CPRC could not go in VFP registers, so all VFP
3884 registers are now marked as used. */
3889 /* Push stack padding for doubleword alignment. */
3890 if (nstack & (align - 1))
3892 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
3893 nstack += ARM_INT_REGISTER_SIZE;
3896 /* Doubleword aligned quantities must go in even register pairs. */
3897 if (may_use_core_reg
3898 && argreg <= ARM_LAST_ARG_REGNUM
3899 && align > ARM_INT_REGISTER_SIZE
3903 /* If the argument is a pointer to a function, and it is a
3904 Thumb function, create a LOCAL copy of the value and set
3905 the THUMB bit in it. */
3906 if (TYPE_CODE_PTR == typecode
3907 && target_type != NULL
3908 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
3910 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3911 if (arm_pc_is_thumb (gdbarch, regval))
3913 bfd_byte *copy = (bfd_byte *) alloca (len);
3914 store_unsigned_integer (copy, len, byte_order,
3915 MAKE_THUMB_ADDR (regval));
3920 /* Copy the argument to general registers or the stack in
3921 register-sized pieces. Large arguments are split between
3922 registers and stack. */
3925 int partial_len = len < ARM_INT_REGISTER_SIZE
3926 ? len : ARM_INT_REGISTER_SIZE;
3928 = extract_unsigned_integer (val, partial_len, byte_order);
3930 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3932 /* The argument is being passed in a general purpose
3934 if (byte_order == BFD_ENDIAN_BIG)
3935 regval <<= (ARM_INT_REGISTER_SIZE - partial_len) * 8;
3937 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3939 gdbarch_register_name
3941 phex (regval, ARM_INT_REGISTER_SIZE));
3942 regcache_cooked_write_unsigned (regcache, argreg, regval);
3947 gdb_byte buf[ARM_INT_REGISTER_SIZE];
3949 memset (buf, 0, sizeof (buf));
3950 store_unsigned_integer (buf, partial_len, byte_order, regval);
3952 /* Push the arguments onto the stack. */
3954 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3956 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
3957 nstack += ARM_INT_REGISTER_SIZE;
3964 /* If we have an odd number of words to push, then decrement the stack
3965 by one word now, so first stack argument will be dword aligned. */
3972 write_memory (sp, si->data, si->len);
3973 si = pop_stack_item (si);
3976 /* Finally, update teh SP register. */
3977 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3983 /* Always align the frame to an 8-byte boundary. This is required on
3984 some platforms and harmless on the rest. */
3987 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3989 /* Align the stack to eight bytes. */
3990 return sp & ~ (CORE_ADDR) 7;
3994 print_fpu_flags (struct ui_file *file, int flags)
3996 if (flags & (1 << 0))
3997 fputs_filtered ("IVO ", file);
3998 if (flags & (1 << 1))
3999 fputs_filtered ("DVZ ", file);
4000 if (flags & (1 << 2))
4001 fputs_filtered ("OFL ", file);
4002 if (flags & (1 << 3))
4003 fputs_filtered ("UFL ", file);
4004 if (flags & (1 << 4))
4005 fputs_filtered ("INX ", file);
4006 fputc_filtered ('\n', file);
4009 /* Print interesting information about the floating point processor
4010 (if present) or emulator. */
4012 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4013 struct frame_info *frame, const char *args)
4015 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4018 type = (status >> 24) & 127;
4019 if (status & (1 << 31))
4020 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4022 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4023 /* i18n: [floating point unit] mask */
4024 fputs_filtered (_("mask: "), file);
4025 print_fpu_flags (file, status >> 16);
4026 /* i18n: [floating point unit] flags */
4027 fputs_filtered (_("flags: "), file);
4028 print_fpu_flags (file, status);
4031 /* Construct the ARM extended floating point type. */
4032 static struct type *
4033 arm_ext_type (struct gdbarch *gdbarch)
4035 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4037 if (!tdep->arm_ext_type)
4039 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4040 floatformats_arm_ext);
4042 return tdep->arm_ext_type;
4045 static struct type *
4046 arm_neon_double_type (struct gdbarch *gdbarch)
4048 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4050 if (tdep->neon_double_type == NULL)
4052 struct type *t, *elem;
4054 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4056 elem = builtin_type (gdbarch)->builtin_uint8;
4057 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4058 elem = builtin_type (gdbarch)->builtin_uint16;
4059 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4060 elem = builtin_type (gdbarch)->builtin_uint32;
4061 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4062 elem = builtin_type (gdbarch)->builtin_uint64;
4063 append_composite_type_field (t, "u64", elem);
4064 elem = builtin_type (gdbarch)->builtin_float;
4065 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4066 elem = builtin_type (gdbarch)->builtin_double;
4067 append_composite_type_field (t, "f64", elem);
4069 t->set_is_vector (true);
4070 t->set_name ("neon_d");
4071 tdep->neon_double_type = t;
4074 return tdep->neon_double_type;
4077 /* FIXME: The vector types are not correctly ordered on big-endian
4078 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4079 bits of d0 - regardless of what unit size is being held in d0. So
4080 the offset of the first uint8 in d0 is 7, but the offset of the
4081 first float is 4. This code works as-is for little-endian
4084 static struct type *
4085 arm_neon_quad_type (struct gdbarch *gdbarch)
4087 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4089 if (tdep->neon_quad_type == NULL)
4091 struct type *t, *elem;
4093 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4095 elem = builtin_type (gdbarch)->builtin_uint8;
4096 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4097 elem = builtin_type (gdbarch)->builtin_uint16;
4098 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4099 elem = builtin_type (gdbarch)->builtin_uint32;
4100 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4101 elem = builtin_type (gdbarch)->builtin_uint64;
4102 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4103 elem = builtin_type (gdbarch)->builtin_float;
4104 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4105 elem = builtin_type (gdbarch)->builtin_double;
4106 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4108 t->set_is_vector (true);
4109 t->set_name ("neon_q");
4110 tdep->neon_quad_type = t;
4113 return tdep->neon_quad_type;
4116 /* Return the GDB type object for the "standard" data type of data in
4119 static struct type *
4120 arm_register_type (struct gdbarch *gdbarch, int regnum)
4122 int num_regs = gdbarch_num_regs (gdbarch);
4124 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4125 && regnum >= num_regs && regnum < num_regs + 32)
4126 return builtin_type (gdbarch)->builtin_float;
4128 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4129 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4130 return arm_neon_quad_type (gdbarch);
4132 /* If the target description has register information, we are only
4133 in this function so that we can override the types of
4134 double-precision registers for NEON. */
4135 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4137 struct type *t = tdesc_register_type (gdbarch, regnum);
4139 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4140 && t->code () == TYPE_CODE_FLT
4141 && gdbarch_tdep (gdbarch)->have_neon)
4142 return arm_neon_double_type (gdbarch);
4147 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4149 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4150 return builtin_type (gdbarch)->builtin_void;
4152 return arm_ext_type (gdbarch);
4154 else if (regnum == ARM_SP_REGNUM)
4155 return builtin_type (gdbarch)->builtin_data_ptr;
4156 else if (regnum == ARM_PC_REGNUM)
4157 return builtin_type (gdbarch)->builtin_func_ptr;
4158 else if (regnum >= ARRAY_SIZE (arm_register_names))
4159 /* These registers are only supported on targets which supply
4160 an XML description. */
4161 return builtin_type (gdbarch)->builtin_int0;
4163 return builtin_type (gdbarch)->builtin_uint32;
4166 /* Map a DWARF register REGNUM onto the appropriate GDB register
4170 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4172 /* Core integer regs. */
4173 if (reg >= 0 && reg <= 15)
4176 /* Legacy FPA encoding. These were once used in a way which
4177 overlapped with VFP register numbering, so their use is
4178 discouraged, but GDB doesn't support the ARM toolchain
4179 which used them for VFP. */
4180 if (reg >= 16 && reg <= 23)
4181 return ARM_F0_REGNUM + reg - 16;
4183 /* New assignments for the FPA registers. */
4184 if (reg >= 96 && reg <= 103)
4185 return ARM_F0_REGNUM + reg - 96;
4187 /* WMMX register assignments. */
4188 if (reg >= 104 && reg <= 111)
4189 return ARM_WCGR0_REGNUM + reg - 104;
4191 if (reg >= 112 && reg <= 127)
4192 return ARM_WR0_REGNUM + reg - 112;
4194 if (reg >= 192 && reg <= 199)
4195 return ARM_WC0_REGNUM + reg - 192;
4197 /* VFP v2 registers. A double precision value is actually
4198 in d1 rather than s2, but the ABI only defines numbering
4199 for the single precision registers. This will "just work"
4200 in GDB for little endian targets (we'll read eight bytes,
4201 starting in s0 and then progressing to s1), but will be
4202 reversed on big endian targets with VFP. This won't
4203 be a problem for the new Neon quad registers; you're supposed
4204 to use DW_OP_piece for those. */
4205 if (reg >= 64 && reg <= 95)
4209 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4210 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4214 /* VFP v3 / Neon registers. This range is also used for VFP v2
4215 registers, except that it now describes d0 instead of s0. */
4216 if (reg >= 256 && reg <= 287)
4220 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4221 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4228 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4230 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4233 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4235 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4236 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4238 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4239 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4241 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4242 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4244 if (reg < NUM_GREGS)
4245 return SIM_ARM_R0_REGNUM + reg;
4248 if (reg < NUM_FREGS)
4249 return SIM_ARM_FP0_REGNUM + reg;
4252 if (reg < NUM_SREGS)
4253 return SIM_ARM_FPS_REGNUM + reg;
4256 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4259 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4260 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4261 NULL if an error occurs. BUF is freed. */
4264 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4265 int old_len, int new_len)
4268 int bytes_to_read = new_len - old_len;
4270 new_buf = (gdb_byte *) xmalloc (new_len);
4271 memcpy (new_buf + bytes_to_read, buf, old_len);
4273 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4281 /* An IT block is at most the 2-byte IT instruction followed by
4282 four 4-byte instructions. The furthest back we must search to
4283 find an IT block that affects the current instruction is thus
4284 2 + 3 * 4 == 14 bytes. */
4285 #define MAX_IT_BLOCK_PREFIX 14
4287 /* Use a quick scan if there are more than this many bytes of
4289 #define IT_SCAN_THRESHOLD 32
4291 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4292 A breakpoint in an IT block may not be hit, depending on the
4295 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4299 CORE_ADDR boundary, func_start;
4301 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4302 int i, any, last_it, last_it_count;
4304 /* If we are using BKPT breakpoints, none of this is necessary. */
4305 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4308 /* ARM mode does not have this problem. */
4309 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4312 /* We are setting a breakpoint in Thumb code that could potentially
4313 contain an IT block. The first step is to find how much Thumb
4314 code there is; we do not need to read outside of known Thumb
4316 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4318 /* Thumb-2 code must have mapping symbols to have a chance. */
4321 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4323 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4324 && func_start > boundary)
4325 boundary = func_start;
4327 /* Search for a candidate IT instruction. We have to do some fancy
4328 footwork to distinguish a real IT instruction from the second
4329 half of a 32-bit instruction, but there is no need for that if
4330 there's no candidate. */
4331 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4333 /* No room for an IT instruction. */
4336 buf = (gdb_byte *) xmalloc (buf_len);
4337 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4340 for (i = 0; i < buf_len; i += 2)
4342 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4343 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4356 /* OK, the code bytes before this instruction contain at least one
4357 halfword which resembles an IT instruction. We know that it's
4358 Thumb code, but there are still two possibilities. Either the
4359 halfword really is an IT instruction, or it is the second half of
4360 a 32-bit Thumb instruction. The only way we can tell is to
4361 scan forwards from a known instruction boundary. */
4362 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4366 /* There's a lot of code before this instruction. Start with an
4367 optimistic search; it's easy to recognize halfwords that can
4368 not be the start of a 32-bit instruction, and use that to
4369 lock on to the instruction boundaries. */
4370 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4373 buf_len = IT_SCAN_THRESHOLD;
4376 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4378 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4379 if (thumb_insn_size (inst1) == 2)
4386 /* At this point, if DEFINITE, BUF[I] is the first place we
4387 are sure that we know the instruction boundaries, and it is far
4388 enough from BPADDR that we could not miss an IT instruction
4389 affecting BPADDR. If ! DEFINITE, give up - start from a
4393 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4397 buf_len = bpaddr - boundary;
4403 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4406 buf_len = bpaddr - boundary;
4410 /* Scan forwards. Find the last IT instruction before BPADDR. */
4415 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4417 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4422 else if (inst1 & 0x0002)
4424 else if (inst1 & 0x0004)
4429 i += thumb_insn_size (inst1);
4435 /* There wasn't really an IT instruction after all. */
4438 if (last_it_count < 1)
4439 /* It was too far away. */
4442 /* This really is a trouble spot. Move the breakpoint to the IT
4444 return bpaddr - buf_len + last_it;
4447 /* ARM displaced stepping support.
4449 Generally ARM displaced stepping works as follows:
4451 1. When an instruction is to be single-stepped, it is first decoded by
4452 arm_process_displaced_insn. Depending on the type of instruction, it is
4453 then copied to a scratch location, possibly in a modified form. The
4454 copy_* set of functions performs such modification, as necessary. A
4455 breakpoint is placed after the modified instruction in the scratch space
4456 to return control to GDB. Note in particular that instructions which
4457 modify the PC will no longer do so after modification.
4459 2. The instruction is single-stepped, by setting the PC to the scratch
4460 location address, and resuming. Control returns to GDB when the
4463 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4464 function used for the current instruction. This function's job is to
4465 put the CPU/memory state back to what it would have been if the
4466 instruction had been executed unmodified in its original location. */
4468 /* NOP instruction (mov r0, r0). */
4469 #define ARM_NOP 0xe1a00000
4470 #define THUMB_NOP 0x4600
4472 /* Helper for register reads for displaced stepping. In particular, this
4473 returns the PC as it would be seen by the instruction at its original
4477 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4481 CORE_ADDR from = dsc->insn_addr;
4483 if (regno == ARM_PC_REGNUM)
4485 /* Compute pipeline offset:
4486 - When executing an ARM instruction, PC reads as the address of the
4487 current instruction plus 8.
4488 - When executing a Thumb instruction, PC reads as the address of the
4489 current instruction plus 4. */
4496 displaced_debug_printf ("read pc value %.8lx",
4497 (unsigned long) from);
4498 return (ULONGEST) from;
4502 regcache_cooked_read_unsigned (regs, regno, &ret);
4504 displaced_debug_printf ("read r%d value %.8lx",
4505 regno, (unsigned long) ret);
4512 displaced_in_arm_mode (struct regcache *regs)
4515 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4517 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4519 return (ps & t_bit) == 0;
4522 /* Write to the PC as from a branch instruction. */
4525 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4529 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4530 architecture versions < 6. */
4531 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4532 val & ~(ULONGEST) 0x3);
4534 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4535 val & ~(ULONGEST) 0x1);
4538 /* Write to the PC as from a branch-exchange instruction. */
4541 bx_write_pc (struct regcache *regs, ULONGEST val)
4544 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
4546 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4550 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4551 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4553 else if ((val & 2) == 0)
4555 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4556 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4560 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4561 mode, align dest to 4 bytes). */
4562 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4563 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4564 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4568 /* Write to the PC as if from a load instruction. */
4571 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4574 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4575 bx_write_pc (regs, val);
4577 branch_write_pc (regs, dsc, val);
4580 /* Write to the PC as if from an ALU instruction. */
4583 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4586 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4587 bx_write_pc (regs, val);
4589 branch_write_pc (regs, dsc, val);
4592 /* Helper for writing to registers for displaced stepping. Writing to the PC
4593 has a varying effects depending on the instruction which does the write:
4594 this is controlled by the WRITE_PC argument. */
4597 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
4598 int regno, ULONGEST val, enum pc_write_style write_pc)
4600 if (regno == ARM_PC_REGNUM)
4602 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
4606 case BRANCH_WRITE_PC:
4607 branch_write_pc (regs, dsc, val);
4611 bx_write_pc (regs, val);
4615 load_write_pc (regs, dsc, val);
4619 alu_write_pc (regs, dsc, val);
4622 case CANNOT_WRITE_PC:
4623 warning (_("Instruction wrote to PC in an unexpected way when "
4624 "single-stepping"));
4628 internal_error (__FILE__, __LINE__,
4629 _("Invalid argument to displaced_write_reg"));
4632 dsc->wrote_to_pc = 1;
4636 displaced_debug_printf ("writing r%d value %.8lx",
4637 regno, (unsigned long) val);
4638 regcache_cooked_write_unsigned (regs, regno, val);
4642 /* This function is used to concisely determine if an instruction INSN
4643 references PC. Register fields of interest in INSN should have the
4644 corresponding fields of BITMASK set to 0b1111. The function
4645 returns return 1 if any of these fields in INSN reference the PC
4646 (also 0b1111, r15), else it returns 0. */
4649 insn_references_pc (uint32_t insn, uint32_t bitmask)
4651 uint32_t lowbit = 1;
4653 while (bitmask != 0)
4657 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4663 mask = lowbit * 0xf;
4665 if ((insn & mask) == mask)
4674 /* The simplest copy function. Many instructions have the same effect no
4675 matter what address they are executed at: in those cases, use this. */
4678 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
4679 arm_displaced_step_copy_insn_closure *dsc)
4681 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4682 (unsigned long) insn, iname);
4684 dsc->modinsn[0] = insn;
4690 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4691 uint16_t insn2, const char *iname,
4692 arm_displaced_step_copy_insn_closure *dsc)
4694 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4695 "unmodified", insn1, insn2, iname);
4697 dsc->modinsn[0] = insn1;
4698 dsc->modinsn[1] = insn2;
4704 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4707 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4709 arm_displaced_step_copy_insn_closure *dsc)
4711 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4714 dsc->modinsn[0] = insn;
4719 /* Preload instructions with immediate offset. */
4722 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
4723 arm_displaced_step_copy_insn_closure *dsc)
4725 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4726 if (!dsc->u.preload.immed)
4727 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4731 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4732 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
4735 /* Preload instructions:
4737 {pli/pld} [rn, #+/-imm]
4739 {pli/pld} [r0, #+/-imm]. */
4741 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4742 rn_val = displaced_read_reg (regs, dsc, rn);
4743 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4744 dsc->u.preload.immed = 1;
4746 dsc->cleanup = &cleanup_preload;
4750 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4751 arm_displaced_step_copy_insn_closure *dsc)
4753 unsigned int rn = bits (insn, 16, 19);
4755 if (!insn_references_pc (insn, 0x000f0000ul))
4756 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4758 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
4760 dsc->modinsn[0] = insn & 0xfff0ffff;
4762 install_preload (gdbarch, regs, dsc, rn);
4768 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4769 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
4771 unsigned int rn = bits (insn1, 0, 3);
4772 unsigned int u_bit = bit (insn1, 7);
4773 int imm12 = bits (insn2, 0, 11);
4776 if (rn != ARM_PC_REGNUM)
4777 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4779 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4780 PLD (literal) Encoding T1. */
4781 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4782 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4788 /* Rewrite instruction {pli/pld} PC imm12 into:
4789 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4793 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4795 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4796 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4798 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4800 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4801 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4802 dsc->u.preload.immed = 0;
4804 /* {pli/pld} [r0, r1] */
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = 0xf001;
4809 dsc->cleanup = &cleanup_preload;
4813 /* Preload instructions with register offset. */
4816 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4817 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
4820 ULONGEST rn_val, rm_val;
4822 /* Preload register-offset instructions:
4824 {pli/pld} [rn, rm {, shift}]
4826 {pli/pld} [r0, r1 {, shift}]. */
4828 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4829 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4830 rn_val = displaced_read_reg (regs, dsc, rn);
4831 rm_val = displaced_read_reg (regs, dsc, rm);
4832 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4833 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4834 dsc->u.preload.immed = 0;
4836 dsc->cleanup = &cleanup_preload;
4840 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4841 struct regcache *regs,
4842 arm_displaced_step_copy_insn_closure *dsc)
4844 unsigned int rn = bits (insn, 16, 19);
4845 unsigned int rm = bits (insn, 0, 3);
4848 if (!insn_references_pc (insn, 0x000f000ful))
4849 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4851 displaced_debug_printf ("copying preload insn %.8lx",
4852 (unsigned long) insn);
4854 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4856 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4860 /* Copy/cleanup coprocessor load and store instructions. */
4863 cleanup_copro_load_store (struct gdbarch *gdbarch,
4864 struct regcache *regs,
4865 arm_displaced_step_copy_insn_closure *dsc)
4867 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4869 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4871 if (dsc->u.ldst.writeback)
4872 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4876 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4877 arm_displaced_step_copy_insn_closure *dsc,
4878 int writeback, unsigned int rn)
4882 /* Coprocessor load/store instructions:
4884 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4886 {stc/stc2} [r0, #+/-imm].
4888 ldc/ldc2 are handled identically. */
4890 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4891 rn_val = displaced_read_reg (regs, dsc, rn);
4892 /* PC should be 4-byte aligned. */
4893 rn_val = rn_val & 0xfffffffc;
4894 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4896 dsc->u.ldst.writeback = writeback;
4897 dsc->u.ldst.rn = rn;
4899 dsc->cleanup = &cleanup_copro_load_store;
4903 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4904 struct regcache *regs,
4905 arm_displaced_step_copy_insn_closure *dsc)
4907 unsigned int rn = bits (insn, 16, 19);
4909 if (!insn_references_pc (insn, 0x000f0000ul))
4910 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4912 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4913 (unsigned long) insn);
4915 dsc->modinsn[0] = insn & 0xfff0ffff;
4917 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4923 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4924 uint16_t insn2, struct regcache *regs,
4925 arm_displaced_step_copy_insn_closure *dsc)
4927 unsigned int rn = bits (insn1, 0, 3);
4929 if (rn != ARM_PC_REGNUM)
4930 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4931 "copro load/store", dsc);
4933 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
4936 dsc->modinsn[0] = insn1 & 0xfff0;
4937 dsc->modinsn[1] = insn2;
4940 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4941 doesn't support writeback, so pass 0. */
4942 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4947 /* Clean up branch instructions (actually perform the branch, by setting
4951 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4952 arm_displaced_step_copy_insn_closure *dsc)
4954 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4955 int branch_taken = condition_true (dsc->u.branch.cond, status);
4956 enum pc_write_style write_pc = dsc->u.branch.exchange
4957 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4962 if (dsc->u.branch.link)
4964 /* The value of LR should be the next insn of current one. In order
4965 not to confuse logic handling later insn `bx lr', if current insn mode
4966 is Thumb, the bit 0 of LR value should be set to 1. */
4967 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4970 next_insn_addr |= 0x1;
4972 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4976 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4979 /* Copy B/BL/BLX instructions with immediate destinations. */
4982 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4983 arm_displaced_step_copy_insn_closure *dsc,
4984 unsigned int cond, int exchange, int link, long offset)
4986 /* Implement "BL<cond> <label>" as:
4988 Preparation: cond <- instruction condition
4989 Insn: mov r0, r0 (nop)
4990 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4992 B<cond> similar, but don't set r14 in cleanup. */
4994 dsc->u.branch.cond = cond;
4995 dsc->u.branch.link = link;
4996 dsc->u.branch.exchange = exchange;
4998 dsc->u.branch.dest = dsc->insn_addr;
4999 if (link && exchange)
5000 /* For BLX, offset is computed from the Align (PC, 4). */
5001 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5004 dsc->u.branch.dest += 4 + offset;
5006 dsc->u.branch.dest += 8 + offset;
5008 dsc->cleanup = &cleanup_branch;
5011 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5012 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5014 unsigned int cond = bits (insn, 28, 31);
5015 int exchange = (cond == 0xf);
5016 int link = exchange || bit (insn, 24);
5019 displaced_debug_printf ("copying %s immediate insn %.8lx",
5020 (exchange) ? "blx" : (link) ? "bl" : "b",
5021 (unsigned long) insn);
5023 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5024 then arrange the switch into Thumb mode. */
5025 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5027 offset = bits (insn, 0, 23) << 2;
5029 if (bit (offset, 25))
5030 offset = offset | ~0x3ffffff;
5032 dsc->modinsn[0] = ARM_NOP;
5034 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5039 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5040 uint16_t insn2, struct regcache *regs,
5041 arm_displaced_step_copy_insn_closure *dsc)
5043 int link = bit (insn2, 14);
5044 int exchange = link && !bit (insn2, 12);
5047 int j1 = bit (insn2, 13);
5048 int j2 = bit (insn2, 11);
5049 int s = sbits (insn1, 10, 10);
5050 int i1 = !(j1 ^ bit (insn1, 10));
5051 int i2 = !(j2 ^ bit (insn1, 10));
5053 if (!link && !exchange) /* B */
5055 offset = (bits (insn2, 0, 10) << 1);
5056 if (bit (insn2, 12)) /* Encoding T4 */
5058 offset |= (bits (insn1, 0, 9) << 12)
5064 else /* Encoding T3 */
5066 offset |= (bits (insn1, 0, 5) << 12)
5070 cond = bits (insn1, 6, 9);
5075 offset = (bits (insn1, 0, 9) << 12);
5076 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5077 offset |= exchange ?
5078 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5081 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5082 link ? (exchange) ? "blx" : "bl" : "b",
5083 insn1, insn2, offset);
5085 dsc->modinsn[0] = THUMB_NOP;
5087 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5091 /* Copy B Thumb instructions. */
5093 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5094 arm_displaced_step_copy_insn_closure *dsc)
5096 unsigned int cond = 0;
5098 unsigned short bit_12_15 = bits (insn, 12, 15);
5099 CORE_ADDR from = dsc->insn_addr;
5101 if (bit_12_15 == 0xd)
5103 /* offset = SignExtend (imm8:0, 32) */
5104 offset = sbits ((insn << 1), 0, 8);
5105 cond = bits (insn, 8, 11);
5107 else if (bit_12_15 == 0xe) /* Encoding T2 */
5109 offset = sbits ((insn << 1), 0, 11);
5113 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5116 dsc->u.branch.cond = cond;
5117 dsc->u.branch.link = 0;
5118 dsc->u.branch.exchange = 0;
5119 dsc->u.branch.dest = from + 4 + offset;
5121 dsc->modinsn[0] = THUMB_NOP;
5123 dsc->cleanup = &cleanup_branch;
5128 /* Copy BX/BLX with register-specified destinations. */
5131 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5132 arm_displaced_step_copy_insn_closure *dsc, int link,
5133 unsigned int cond, unsigned int rm)
5135 /* Implement {BX,BLX}<cond> <reg>" as:
5137 Preparation: cond <- instruction condition
5138 Insn: mov r0, r0 (nop)
5139 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5141 Don't set r14 in cleanup for BX. */
5143 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5145 dsc->u.branch.cond = cond;
5146 dsc->u.branch.link = link;
5148 dsc->u.branch.exchange = 1;
5150 dsc->cleanup = &cleanup_branch;
5154 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5155 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5157 unsigned int cond = bits (insn, 28, 31);
5160 int link = bit (insn, 5);
5161 unsigned int rm = bits (insn, 0, 3);
5163 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
5165 dsc->modinsn[0] = ARM_NOP;
5167 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5172 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5173 struct regcache *regs,
5174 arm_displaced_step_copy_insn_closure *dsc)
5176 int link = bit (insn, 7);
5177 unsigned int rm = bits (insn, 3, 6);
5179 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
5181 dsc->modinsn[0] = THUMB_NOP;
5183 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5189 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5192 cleanup_alu_imm (struct gdbarch *gdbarch,
5193 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5195 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5196 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5197 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5198 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5202 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5203 arm_displaced_step_copy_insn_closure *dsc)
5205 unsigned int rn = bits (insn, 16, 19);
5206 unsigned int rd = bits (insn, 12, 15);
5207 unsigned int op = bits (insn, 21, 24);
5208 int is_mov = (op == 0xd);
5209 ULONGEST rd_val, rn_val;
5211 if (!insn_references_pc (insn, 0x000ff000ul))
5212 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5214 displaced_debug_printf ("copying immediate %s insn %.8lx",
5215 is_mov ? "move" : "ALU",
5216 (unsigned long) insn);
5218 /* Instruction is of form:
5220 <op><cond> rd, [rn,] #imm
5224 Preparation: tmp1, tmp2 <- r0, r1;
5226 Insn: <op><cond> r0, r1, #imm
5227 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5230 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5231 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5232 rn_val = displaced_read_reg (regs, dsc, rn);
5233 rd_val = displaced_read_reg (regs, dsc, rd);
5234 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5235 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5239 dsc->modinsn[0] = insn & 0xfff00fff;
5241 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5243 dsc->cleanup = &cleanup_alu_imm;
5249 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5250 uint16_t insn2, struct regcache *regs,
5251 arm_displaced_step_copy_insn_closure *dsc)
5253 unsigned int op = bits (insn1, 5, 8);
5254 unsigned int rn, rm, rd;
5255 ULONGEST rd_val, rn_val;
5257 rn = bits (insn1, 0, 3); /* Rn */
5258 rm = bits (insn2, 0, 3); /* Rm */
5259 rd = bits (insn2, 8, 11); /* Rd */
5261 /* This routine is only called for instruction MOV. */
5262 gdb_assert (op == 0x2 && rn == 0xf);
5264 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5265 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5267 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
5269 /* Instruction is of form:
5271 <op><cond> rd, [rn,] #imm
5275 Preparation: tmp1, tmp2 <- r0, r1;
5277 Insn: <op><cond> r0, r1, #imm
5278 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5281 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5282 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5283 rn_val = displaced_read_reg (regs, dsc, rn);
5284 rd_val = displaced_read_reg (regs, dsc, rd);
5285 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5286 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5289 dsc->modinsn[0] = insn1;
5290 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5293 dsc->cleanup = &cleanup_alu_imm;
5298 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5301 cleanup_alu_reg (struct gdbarch *gdbarch,
5302 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5307 rd_val = displaced_read_reg (regs, dsc, 0);
5309 for (i = 0; i < 3; i++)
5310 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5312 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5316 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5317 arm_displaced_step_copy_insn_closure *dsc,
5318 unsigned int rd, unsigned int rn, unsigned int rm)
5320 ULONGEST rd_val, rn_val, rm_val;
5322 /* Instruction is of form:
5324 <op><cond> rd, [rn,] rm [, <shift>]
5328 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5329 r0, r1, r2 <- rd, rn, rm
5330 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5331 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5334 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5335 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5336 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5337 rd_val = displaced_read_reg (regs, dsc, rd);
5338 rn_val = displaced_read_reg (regs, dsc, rn);
5339 rm_val = displaced_read_reg (regs, dsc, rm);
5340 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5341 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5342 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5345 dsc->cleanup = &cleanup_alu_reg;
5349 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5350 arm_displaced_step_copy_insn_closure *dsc)
5352 unsigned int op = bits (insn, 21, 24);
5353 int is_mov = (op == 0xd);
5355 if (!insn_references_pc (insn, 0x000ff00ful))
5356 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5358 displaced_debug_printf ("copying reg %s insn %.8lx",
5359 is_mov ? "move" : "ALU", (unsigned long) insn);
5362 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5364 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5366 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5372 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5373 struct regcache *regs,
5374 arm_displaced_step_copy_insn_closure *dsc)
5378 rm = bits (insn, 3, 6);
5379 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5381 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5382 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5384 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
5386 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5388 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5393 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5396 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5397 struct regcache *regs,
5398 arm_displaced_step_copy_insn_closure *dsc)
5400 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5403 for (i = 0; i < 4; i++)
5404 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5406 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5410 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5411 arm_displaced_step_copy_insn_closure *dsc,
5412 unsigned int rd, unsigned int rn, unsigned int rm,
5416 ULONGEST rd_val, rn_val, rm_val, rs_val;
5418 /* Instruction is of form:
5420 <op><cond> rd, [rn,] rm, <shift> rs
5424 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5425 r0, r1, r2, r3 <- rd, rn, rm, rs
5426 Insn: <op><cond> r0, r1, r2, <shift> r3
5428 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5432 for (i = 0; i < 4; i++)
5433 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5435 rd_val = displaced_read_reg (regs, dsc, rd);
5436 rn_val = displaced_read_reg (regs, dsc, rn);
5437 rm_val = displaced_read_reg (regs, dsc, rm);
5438 rs_val = displaced_read_reg (regs, dsc, rs);
5439 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5440 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5441 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5442 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5444 dsc->cleanup = &cleanup_alu_shifted_reg;
5448 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5449 struct regcache *regs,
5450 arm_displaced_step_copy_insn_closure *dsc)
5452 unsigned int op = bits (insn, 21, 24);
5453 int is_mov = (op == 0xd);
5454 unsigned int rd, rn, rm, rs;
5456 if (!insn_references_pc (insn, 0x000fff0ful))
5457 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5459 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5460 is_mov ? "move" : "ALU",
5461 (unsigned long) insn);
5463 rn = bits (insn, 16, 19);
5464 rm = bits (insn, 0, 3);
5465 rs = bits (insn, 8, 11);
5466 rd = bits (insn, 12, 15);
5469 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5471 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5473 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5478 /* Clean up load instructions. */
5481 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5482 arm_displaced_step_copy_insn_closure *dsc)
5484 ULONGEST rt_val, rt_val2 = 0, rn_val;
5486 rt_val = displaced_read_reg (regs, dsc, 0);
5487 if (dsc->u.ldst.xfersize == 8)
5488 rt_val2 = displaced_read_reg (regs, dsc, 1);
5489 rn_val = displaced_read_reg (regs, dsc, 2);
5491 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5492 if (dsc->u.ldst.xfersize > 4)
5493 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5494 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5495 if (!dsc->u.ldst.immed)
5496 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5498 /* Handle register writeback. */
5499 if (dsc->u.ldst.writeback)
5500 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5501 /* Put result in right place. */
5502 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5503 if (dsc->u.ldst.xfersize == 8)
5504 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5507 /* Clean up store instructions. */
5510 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5511 arm_displaced_step_copy_insn_closure *dsc)
5513 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5515 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5516 if (dsc->u.ldst.xfersize > 4)
5517 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5518 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5519 if (!dsc->u.ldst.immed)
5520 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5521 if (!dsc->u.ldst.restore_r4)
5522 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5525 if (dsc->u.ldst.writeback)
5526 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5529 /* Copy "extra" load/store instructions. These are halfword/doubleword
5530 transfers, which have a different encoding to byte/word transfers. */
5533 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5534 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5536 unsigned int op1 = bits (insn, 20, 24);
5537 unsigned int op2 = bits (insn, 5, 6);
5538 unsigned int rt = bits (insn, 12, 15);
5539 unsigned int rn = bits (insn, 16, 19);
5540 unsigned int rm = bits (insn, 0, 3);
5541 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5542 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5543 int immed = (op1 & 0x4) != 0;
5545 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5547 if (!insn_references_pc (insn, 0x000ff00ful))
5548 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5550 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5551 unprivileged ? "unprivileged " : "",
5552 (unsigned long) insn);
5554 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5557 internal_error (__FILE__, __LINE__,
5558 _("copy_extra_ld_st: instruction decode error"));
5560 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5561 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5562 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5564 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5566 rt_val = displaced_read_reg (regs, dsc, rt);
5567 if (bytesize[opcode] == 8)
5568 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5569 rn_val = displaced_read_reg (regs, dsc, rn);
5571 rm_val = displaced_read_reg (regs, dsc, rm);
5573 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5574 if (bytesize[opcode] == 8)
5575 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5576 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5578 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5581 dsc->u.ldst.xfersize = bytesize[opcode];
5582 dsc->u.ldst.rn = rn;
5583 dsc->u.ldst.immed = immed;
5584 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5585 dsc->u.ldst.restore_r4 = 0;
5588 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5590 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5591 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5593 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5595 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5596 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5598 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5603 /* Copy byte/half word/word loads and stores. */
5606 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5607 arm_displaced_step_copy_insn_closure *dsc, int load,
5608 int immed, int writeback, int size, int usermode,
5609 int rt, int rm, int rn)
5611 ULONGEST rt_val, rn_val, rm_val = 0;
5613 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5614 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5616 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5618 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5620 rt_val = displaced_read_reg (regs, dsc, rt);
5621 rn_val = displaced_read_reg (regs, dsc, rn);
5623 rm_val = displaced_read_reg (regs, dsc, rm);
5625 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5626 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5628 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5630 dsc->u.ldst.xfersize = size;
5631 dsc->u.ldst.rn = rn;
5632 dsc->u.ldst.immed = immed;
5633 dsc->u.ldst.writeback = writeback;
5635 /* To write PC we can do:
5637 Before this sequence of instructions:
5638 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5639 r2 is the Rn value got from displaced_read_reg.
5641 Insn1: push {pc} Write address of STR instruction + offset on stack
5642 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5643 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5644 = addr(Insn1) + offset - addr(Insn3) - 8
5646 Insn4: add r4, r4, #8 r4 = offset - 8
5647 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5649 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5651 Otherwise we don't know what value to write for PC, since the offset is
5652 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5653 of this can be found in Section "Saving from r15" in
5654 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5656 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5661 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5662 uint16_t insn2, struct regcache *regs,
5663 arm_displaced_step_copy_insn_closure *dsc, int size)
5665 unsigned int u_bit = bit (insn1, 7);
5666 unsigned int rt = bits (insn2, 12, 15);
5667 int imm12 = bits (insn2, 0, 11);
5670 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5671 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5677 /* Rewrite instruction LDR Rt imm12 into:
5679 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5683 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5686 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5687 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5688 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5690 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5692 pc_val = pc_val & 0xfffffffc;
5694 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5695 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5699 dsc->u.ldst.xfersize = size;
5700 dsc->u.ldst.immed = 0;
5701 dsc->u.ldst.writeback = 0;
5702 dsc->u.ldst.restore_r4 = 0;
5704 /* LDR R0, R2, R3 */
5705 dsc->modinsn[0] = 0xf852;
5706 dsc->modinsn[1] = 0x3;
5709 dsc->cleanup = &cleanup_load;
5715 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5716 uint16_t insn2, struct regcache *regs,
5717 arm_displaced_step_copy_insn_closure *dsc,
5718 int writeback, int immed)
5720 unsigned int rt = bits (insn2, 12, 15);
5721 unsigned int rn = bits (insn1, 0, 3);
5722 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5723 /* In LDR (register), there is also a register Rm, which is not allowed to
5724 be PC, so we don't have to check it. */
5726 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5727 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5730 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5731 rt, rn, insn1, insn2);
5733 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5736 dsc->u.ldst.restore_r4 = 0;
5739 /* ldr[b]<cond> rt, [rn, #imm], etc.
5741 ldr[b]<cond> r0, [r2, #imm]. */
5743 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5744 dsc->modinsn[1] = insn2 & 0x0fff;
5747 /* ldr[b]<cond> rt, [rn, rm], etc.
5749 ldr[b]<cond> r0, [r2, r3]. */
5751 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5752 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5762 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5763 struct regcache *regs,
5764 arm_displaced_step_copy_insn_closure *dsc,
5765 int load, int size, int usermode)
5767 int immed = !bit (insn, 25);
5768 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5769 unsigned int rt = bits (insn, 12, 15);
5770 unsigned int rn = bits (insn, 16, 19);
5771 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5773 if (!insn_references_pc (insn, 0x000ff00ful))
5774 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5776 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5777 load ? (size == 1 ? "ldrb" : "ldr")
5778 : (size == 1 ? "strb" : "str"),
5779 usermode ? "t" : "",
5781 (unsigned long) insn);
5783 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5784 usermode, rt, rm, rn);
5786 if (load || rt != ARM_PC_REGNUM)
5788 dsc->u.ldst.restore_r4 = 0;
5791 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5793 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5794 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5796 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5798 {ldr,str}[b]<cond> r0, [r2, r3]. */
5799 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5803 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5804 dsc->u.ldst.restore_r4 = 1;
5805 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5806 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5807 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5808 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5809 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5813 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5815 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5820 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5825 /* Cleanup LDM instructions with fully-populated register list. This is an
5826 unfortunate corner case: it's impossible to implement correctly by modifying
5827 the instruction. The issue is as follows: we have an instruction,
5831 which we must rewrite to avoid loading PC. A possible solution would be to
5832 do the load in two halves, something like (with suitable cleanup
5836 ldm[id][ab] r8!, {r0-r7}
5838 ldm[id][ab] r8, {r7-r14}
5841 but at present there's no suitable place for <temp>, since the scratch space
5842 is overwritten before the cleanup routine is called. For now, we simply
5843 emulate the instruction. */
5846 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5847 arm_displaced_step_copy_insn_closure *dsc)
5849 int inc = dsc->u.block.increment;
5850 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5851 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5852 uint32_t regmask = dsc->u.block.regmask;
5853 int regno = inc ? 0 : 15;
5854 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5855 int exception_return = dsc->u.block.load && dsc->u.block.user
5856 && (regmask & 0x8000) != 0;
5857 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5858 int do_transfer = condition_true (dsc->u.block.cond, status);
5859 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5864 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5865 sensible we can do here. Complain loudly. */
5866 if (exception_return)
5867 error (_("Cannot single-step exception return"));
5869 /* We don't handle any stores here for now. */
5870 gdb_assert (dsc->u.block.load != 0);
5872 displaced_debug_printf ("emulating block transfer: %s %s %s",
5873 dsc->u.block.load ? "ldm" : "stm",
5874 dsc->u.block.increment ? "inc" : "dec",
5875 dsc->u.block.before ? "before" : "after");
5882 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5885 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5888 xfer_addr += bump_before;
5890 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5891 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5893 xfer_addr += bump_after;
5895 regmask &= ~(1 << regno);
5898 if (dsc->u.block.writeback)
5899 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5903 /* Clean up an STM which included the PC in the register list. */
5906 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5907 arm_displaced_step_copy_insn_closure *dsc)
5909 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5910 int store_executed = condition_true (dsc->u.block.cond, status);
5911 CORE_ADDR pc_stored_at, transferred_regs
5912 = count_one_bits (dsc->u.block.regmask);
5913 CORE_ADDR stm_insn_addr;
5916 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5918 /* If condition code fails, there's nothing else to do. */
5919 if (!store_executed)
5922 if (dsc->u.block.increment)
5924 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5926 if (dsc->u.block.before)
5931 pc_stored_at = dsc->u.block.xfer_addr;
5933 if (dsc->u.block.before)
5937 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5938 stm_insn_addr = dsc->scratch_base;
5939 offset = pc_val - stm_insn_addr;
5941 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
5944 /* Rewrite the stored PC to the proper value for the non-displaced original
5946 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5947 dsc->insn_addr + offset);
5950 /* Clean up an LDM which includes the PC in the register list. We clumped all
5951 the registers in the transferred list into a contiguous range r0...rX (to
5952 avoid loading PC directly and losing control of the debugged program), so we
5953 must undo that here. */
5956 cleanup_block_load_pc (struct gdbarch *gdbarch,
5957 struct regcache *regs,
5958 arm_displaced_step_copy_insn_closure *dsc)
5960 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5961 int load_executed = condition_true (dsc->u.block.cond, status);
5962 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5963 unsigned int regs_loaded = count_one_bits (mask);
5964 unsigned int num_to_shuffle = regs_loaded, clobbered;
5966 /* The method employed here will fail if the register list is fully populated
5967 (we need to avoid loading PC directly). */
5968 gdb_assert (num_to_shuffle < 16);
5973 clobbered = (1 << num_to_shuffle) - 1;
5975 while (num_to_shuffle > 0)
5977 if ((mask & (1 << write_reg)) != 0)
5979 unsigned int read_reg = num_to_shuffle - 1;
5981 if (read_reg != write_reg)
5983 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5984 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5985 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
5986 read_reg, write_reg);
5989 displaced_debug_printf ("LDM: register r%d already in the right "
5990 "place", write_reg);
5992 clobbered &= ~(1 << write_reg);
6000 /* Restore any registers we scribbled over. */
6001 for (write_reg = 0; clobbered != 0; write_reg++)
6003 if ((clobbered & (1 << write_reg)) != 0)
6005 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6007 displaced_debug_printf ("LDM: restored clobbered register r%d",
6009 clobbered &= ~(1 << write_reg);
6013 /* Perform register writeback manually. */
6014 if (dsc->u.block.writeback)
6016 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6018 if (dsc->u.block.increment)
6019 new_rn_val += regs_loaded * 4;
6021 new_rn_val -= regs_loaded * 4;
6023 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6028 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6029 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6032 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6033 struct regcache *regs,
6034 arm_displaced_step_copy_insn_closure *dsc)
6036 int load = bit (insn, 20);
6037 int user = bit (insn, 22);
6038 int increment = bit (insn, 23);
6039 int before = bit (insn, 24);
6040 int writeback = bit (insn, 21);
6041 int rn = bits (insn, 16, 19);
6043 /* Block transfers which don't mention PC can be run directly
6045 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6046 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6048 if (rn == ARM_PC_REGNUM)
6050 warning (_("displaced: Unpredictable LDM or STM with "
6051 "base register r15"));
6052 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6055 displaced_debug_printf ("copying block transfer insn %.8lx",
6056 (unsigned long) insn);
6058 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6059 dsc->u.block.rn = rn;
6061 dsc->u.block.load = load;
6062 dsc->u.block.user = user;
6063 dsc->u.block.increment = increment;
6064 dsc->u.block.before = before;
6065 dsc->u.block.writeback = writeback;
6066 dsc->u.block.cond = bits (insn, 28, 31);
6068 dsc->u.block.regmask = insn & 0xffff;
6072 if ((insn & 0xffff) == 0xffff)
6074 /* LDM with a fully-populated register list. This case is
6075 particularly tricky. Implement for now by fully emulating the
6076 instruction (which might not behave perfectly in all cases, but
6077 these instructions should be rare enough for that not to matter
6079 dsc->modinsn[0] = ARM_NOP;
6081 dsc->cleanup = &cleanup_block_load_all;
6085 /* LDM of a list of registers which includes PC. Implement by
6086 rewriting the list of registers to be transferred into a
6087 contiguous chunk r0...rX before doing the transfer, then shuffling
6088 registers into the correct places in the cleanup routine. */
6089 unsigned int regmask = insn & 0xffff;
6090 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6093 for (i = 0; i < num_in_list; i++)
6094 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6096 /* Writeback makes things complicated. We need to avoid clobbering
6097 the base register with one of the registers in our modified
6098 register list, but just using a different register can't work in
6101 ldm r14!, {r0-r13,pc}
6103 which would need to be rewritten as:
6107 but that can't work, because there's no free register for N.
6109 Solve this by turning off the writeback bit, and emulating
6110 writeback manually in the cleanup routine. */
6115 new_regmask = (1 << num_in_list) - 1;
6117 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6118 "%.4x, modified list %.4x",
6119 rn, writeback ? "!" : "",
6120 (int) insn & 0xffff, new_regmask);
6122 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6124 dsc->cleanup = &cleanup_block_load_pc;
6129 /* STM of a list of registers which includes PC. Run the instruction
6130 as-is, but out of line: this will store the wrong value for the PC,
6131 so we must manually fix up the memory in the cleanup routine.
6132 Doing things this way has the advantage that we can auto-detect
6133 the offset of the PC write (which is architecture-dependent) in
6134 the cleanup routine. */
6135 dsc->modinsn[0] = insn;
6137 dsc->cleanup = &cleanup_block_store_pc;
6144 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6145 struct regcache *regs,
6146 arm_displaced_step_copy_insn_closure *dsc)
6148 int rn = bits (insn1, 0, 3);
6149 int load = bit (insn1, 4);
6150 int writeback = bit (insn1, 5);
6152 /* Block transfers which don't mention PC can be run directly
6154 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6155 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6157 if (rn == ARM_PC_REGNUM)
6159 warning (_("displaced: Unpredictable LDM or STM with "
6160 "base register r15"));
6161 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6162 "unpredictable ldm/stm", dsc);
6165 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6168 /* Clear bit 13, since it should be always zero. */
6169 dsc->u.block.regmask = (insn2 & 0xdfff);
6170 dsc->u.block.rn = rn;
6172 dsc->u.block.load = load;
6173 dsc->u.block.user = 0;
6174 dsc->u.block.increment = bit (insn1, 7);
6175 dsc->u.block.before = bit (insn1, 8);
6176 dsc->u.block.writeback = writeback;
6177 dsc->u.block.cond = INST_AL;
6178 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6182 if (dsc->u.block.regmask == 0xffff)
6184 /* This branch is impossible to happen. */
6189 unsigned int regmask = dsc->u.block.regmask;
6190 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
6193 for (i = 0; i < num_in_list; i++)
6194 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6199 new_regmask = (1 << num_in_list) - 1;
6201 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6202 "%.4x, modified list %.4x",
6203 rn, writeback ? "!" : "",
6204 (int) dsc->u.block.regmask, new_regmask);
6206 dsc->modinsn[0] = insn1;
6207 dsc->modinsn[1] = (new_regmask & 0xffff);
6210 dsc->cleanup = &cleanup_block_load_pc;
6215 dsc->modinsn[0] = insn1;
6216 dsc->modinsn[1] = insn2;
6218 dsc->cleanup = &cleanup_block_store_pc;
6223 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6224 This is used to avoid a dependency on BFD's bfd_endian enum. */
6227 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6230 return read_memory_unsigned_integer (memaddr, len,
6231 (enum bfd_endian) byte_order);
6234 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6237 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6240 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
6243 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6246 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6251 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6254 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6256 return arm_is_thumb (self->regcache);
6259 /* single_step() is called just before we want to resume the inferior,
6260 if we want to single-step it but there is no hardware or kernel
6261 single-step support. We find the target of the coming instructions
6262 and breakpoint them. */
6264 std::vector<CORE_ADDR>
6265 arm_software_single_step (struct regcache *regcache)
6267 struct gdbarch *gdbarch = regcache->arch ();
6268 struct arm_get_next_pcs next_pcs_ctx;
6270 arm_get_next_pcs_ctor (&next_pcs_ctx,
6271 &arm_get_next_pcs_ops,
6272 gdbarch_byte_order (gdbarch),
6273 gdbarch_byte_order_for_code (gdbarch),
6277 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6279 for (CORE_ADDR &pc_ref : next_pcs)
6280 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
6285 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6286 for Linux, where some SVC instructions must be treated specially. */
6289 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6290 arm_displaced_step_copy_insn_closure *dsc)
6292 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6294 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6295 (unsigned long) resume_addr);
6297 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6301 /* Common copy routine for svc instruction. */
6304 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6305 arm_displaced_step_copy_insn_closure *dsc)
6307 /* Preparation: none.
6308 Insn: unmodified svc.
6309 Cleanup: pc <- insn_addr + insn_size. */
6311 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6313 dsc->wrote_to_pc = 1;
6315 /* Allow OS-specific code to override SVC handling. */
6316 if (dsc->u.svc.copy_svc_os)
6317 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6320 dsc->cleanup = &cleanup_svc;
6326 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6327 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6330 displaced_debug_printf ("copying svc insn %.8lx",
6331 (unsigned long) insn);
6333 dsc->modinsn[0] = insn;
6335 return install_svc (gdbarch, regs, dsc);
6339 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6340 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6343 displaced_debug_printf ("copying svc insn %.4x", insn);
6345 dsc->modinsn[0] = insn;
6347 return install_svc (gdbarch, regs, dsc);
6350 /* Copy undefined instructions. */
6353 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6354 arm_displaced_step_copy_insn_closure *dsc)
6356 displaced_debug_printf ("copying undefined insn %.8lx",
6357 (unsigned long) insn);
6359 dsc->modinsn[0] = insn;
6365 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6366 arm_displaced_step_copy_insn_closure *dsc)
6369 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6370 (unsigned short) insn1, (unsigned short) insn2);
6372 dsc->modinsn[0] = insn1;
6373 dsc->modinsn[1] = insn2;
6379 /* Copy unpredictable instructions. */
6382 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6383 arm_displaced_step_copy_insn_closure *dsc)
6385 displaced_debug_printf ("copying unpredictable insn %.8lx",
6386 (unsigned long) insn);
6388 dsc->modinsn[0] = insn;
6393 /* The decode_* functions are instruction decoding helpers. They mostly follow
6394 the presentation in the ARM ARM. */
6397 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6398 struct regcache *regs,
6399 arm_displaced_step_copy_insn_closure *dsc)
6401 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6402 unsigned int rn = bits (insn, 16, 19);
6404 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
6405 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6406 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
6407 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6408 else if ((op1 & 0x60) == 0x20)
6409 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6410 else if ((op1 & 0x71) == 0x40)
6411 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6413 else if ((op1 & 0x77) == 0x41)
6414 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6415 else if ((op1 & 0x77) == 0x45)
6416 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6417 else if ((op1 & 0x77) == 0x51)
6420 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6422 return arm_copy_unpred (gdbarch, insn, dsc);
6424 else if ((op1 & 0x77) == 0x55)
6425 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6426 else if (op1 == 0x57)
6429 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6430 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6431 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6432 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6433 default: return arm_copy_unpred (gdbarch, insn, dsc);
6435 else if ((op1 & 0x63) == 0x43)
6436 return arm_copy_unpred (gdbarch, insn, dsc);
6437 else if ((op2 & 0x1) == 0x0)
6438 switch (op1 & ~0x80)
6441 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6443 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6444 case 0x71: case 0x75:
6446 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6447 case 0x63: case 0x67: case 0x73: case 0x77:
6448 return arm_copy_unpred (gdbarch, insn, dsc);
6450 return arm_copy_undef (gdbarch, insn, dsc);
6453 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6457 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6458 struct regcache *regs,
6459 arm_displaced_step_copy_insn_closure *dsc)
6461 if (bit (insn, 27) == 0)
6462 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6463 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6464 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6467 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6470 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6472 case 0x4: case 0x5: case 0x6: case 0x7:
6473 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6476 switch ((insn & 0xe00000) >> 21)
6478 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6480 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6483 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6486 return arm_copy_undef (gdbarch, insn, dsc);
6491 int rn_f = (bits (insn, 16, 19) == 0xf);
6492 switch ((insn & 0xe00000) >> 21)
6495 /* ldc/ldc2 imm (undefined for rn == pc). */
6496 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6497 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6500 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6502 case 0x4: case 0x5: case 0x6: case 0x7:
6503 /* ldc/ldc2 lit (undefined for rn != pc). */
6504 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6505 : arm_copy_undef (gdbarch, insn, dsc);
6508 return arm_copy_undef (gdbarch, insn, dsc);
6513 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6516 if (bits (insn, 16, 19) == 0xf)
6518 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6520 return arm_copy_undef (gdbarch, insn, dsc);
6524 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6526 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6530 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6532 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6535 return arm_copy_undef (gdbarch, insn, dsc);
6539 /* Decode miscellaneous instructions in dp/misc encoding space. */
6542 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6543 struct regcache *regs,
6544 arm_displaced_step_copy_insn_closure *dsc)
6546 unsigned int op2 = bits (insn, 4, 6);
6547 unsigned int op = bits (insn, 21, 22);
6552 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6555 if (op == 0x1) /* bx. */
6556 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6558 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6560 return arm_copy_undef (gdbarch, insn, dsc);
6564 /* Not really supported. */
6565 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6567 return arm_copy_undef (gdbarch, insn, dsc);
6571 return arm_copy_bx_blx_reg (gdbarch, insn,
6572 regs, dsc); /* blx register. */
6574 return arm_copy_undef (gdbarch, insn, dsc);
6577 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6581 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6583 /* Not really supported. */
6584 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6588 return arm_copy_undef (gdbarch, insn, dsc);
6593 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6594 struct regcache *regs,
6595 arm_displaced_step_copy_insn_closure *dsc)
6598 switch (bits (insn, 20, 24))
6601 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6604 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6606 case 0x12: case 0x16:
6607 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6610 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6614 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6616 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6617 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6618 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6619 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6620 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6621 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6622 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6623 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6624 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6625 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6626 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6627 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6628 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6629 /* 2nd arg means "unprivileged". */
6630 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6634 /* Should be unreachable. */
6639 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6640 struct regcache *regs,
6641 arm_displaced_step_copy_insn_closure *dsc)
6643 int a = bit (insn, 25), b = bit (insn, 4);
6644 uint32_t op1 = bits (insn, 20, 24);
6646 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6647 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6648 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6649 else if ((!a && (op1 & 0x17) == 0x02)
6650 || (a && (op1 & 0x17) == 0x02 && !b))
6651 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6652 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6653 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6654 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6655 else if ((!a && (op1 & 0x17) == 0x03)
6656 || (a && (op1 & 0x17) == 0x03 && !b))
6657 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6658 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6659 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6660 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6661 else if ((!a && (op1 & 0x17) == 0x06)
6662 || (a && (op1 & 0x17) == 0x06 && !b))
6663 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6664 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6665 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6666 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6667 else if ((!a && (op1 & 0x17) == 0x07)
6668 || (a && (op1 & 0x17) == 0x07 && !b))
6669 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6671 /* Should be unreachable. */
6676 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6677 arm_displaced_step_copy_insn_closure *dsc)
6679 switch (bits (insn, 20, 24))
6681 case 0x00: case 0x01: case 0x02: case 0x03:
6682 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6684 case 0x04: case 0x05: case 0x06: case 0x07:
6685 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6687 case 0x08: case 0x09: case 0x0a: case 0x0b:
6688 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6689 return arm_copy_unmodified (gdbarch, insn,
6690 "decode/pack/unpack/saturate/reverse", dsc);
6693 if (bits (insn, 5, 7) == 0) /* op2. */
6695 if (bits (insn, 12, 15) == 0xf)
6696 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6698 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6701 return arm_copy_undef (gdbarch, insn, dsc);
6703 case 0x1a: case 0x1b:
6704 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6705 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6707 return arm_copy_undef (gdbarch, insn, dsc);
6709 case 0x1c: case 0x1d:
6710 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6712 if (bits (insn, 0, 3) == 0xf)
6713 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6715 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6718 return arm_copy_undef (gdbarch, insn, dsc);
6720 case 0x1e: case 0x1f:
6721 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6722 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6724 return arm_copy_undef (gdbarch, insn, dsc);
6727 /* Should be unreachable. */
6732 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6733 struct regcache *regs,
6734 arm_displaced_step_copy_insn_closure *dsc)
6737 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6739 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6743 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6744 struct regcache *regs,
6745 arm_displaced_step_copy_insn_closure *dsc)
6747 unsigned int opcode = bits (insn, 20, 24);
6751 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6752 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6754 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6755 case 0x12: case 0x16:
6756 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6758 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6759 case 0x13: case 0x17:
6760 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6762 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6763 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6764 /* Note: no writeback for these instructions. Bit 25 will always be
6765 zero though (via caller), so the following works OK. */
6766 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6769 /* Should be unreachable. */
6773 /* Decode shifted register instructions. */
6776 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6777 uint16_t insn2, struct regcache *regs,
6778 arm_displaced_step_copy_insn_closure *dsc)
6780 /* PC is only allowed to be used in instruction MOV. */
6782 unsigned int op = bits (insn1, 5, 8);
6783 unsigned int rn = bits (insn1, 0, 3);
6785 if (op == 0x2 && rn == 0xf) /* MOV */
6786 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6788 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6789 "dp (shift reg)", dsc);
6793 /* Decode extension register load/store. Exactly the same as
6794 arm_decode_ext_reg_ld_st. */
6797 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6798 uint16_t insn2, struct regcache *regs,
6799 arm_displaced_step_copy_insn_closure *dsc)
6801 unsigned int opcode = bits (insn1, 4, 8);
6805 case 0x04: case 0x05:
6806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6807 "vfp/neon vmov", dsc);
6809 case 0x08: case 0x0c: /* 01x00 */
6810 case 0x0a: case 0x0e: /* 01x10 */
6811 case 0x12: case 0x16: /* 10x10 */
6812 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6813 "vfp/neon vstm/vpush", dsc);
6815 case 0x09: case 0x0d: /* 01x01 */
6816 case 0x0b: case 0x0f: /* 01x11 */
6817 case 0x13: case 0x17: /* 10x11 */
6818 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6819 "vfp/neon vldm/vpop", dsc);
6821 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6822 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6824 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6825 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6828 /* Should be unreachable. */
6833 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6834 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6836 unsigned int op1 = bits (insn, 20, 25);
6837 int op = bit (insn, 4);
6838 unsigned int coproc = bits (insn, 8, 11);
6840 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6841 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6842 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6843 && (coproc & 0xe) != 0xa)
6845 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6846 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6847 && (coproc & 0xe) != 0xa)
6848 /* ldc/ldc2 imm/lit. */
6849 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6850 else if ((op1 & 0x3e) == 0x00)
6851 return arm_copy_undef (gdbarch, insn, dsc);
6852 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6853 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6854 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6855 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6856 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6857 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6858 else if ((op1 & 0x30) == 0x20 && !op)
6860 if ((coproc & 0xe) == 0xa)
6861 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6863 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6865 else if ((op1 & 0x30) == 0x20 && op)
6866 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6867 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6868 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6869 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6870 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6871 else if ((op1 & 0x30) == 0x30)
6872 return arm_copy_svc (gdbarch, insn, regs, dsc);
6874 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6878 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6879 uint16_t insn2, struct regcache *regs,
6880 arm_displaced_step_copy_insn_closure *dsc)
6882 unsigned int coproc = bits (insn2, 8, 11);
6883 unsigned int bit_5_8 = bits (insn1, 5, 8);
6884 unsigned int bit_9 = bit (insn1, 9);
6885 unsigned int bit_4 = bit (insn1, 4);
6890 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6891 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6893 else if (bit_5_8 == 0) /* UNDEFINED. */
6894 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6897 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6898 if ((coproc & 0xe) == 0xa)
6899 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6901 else /* coproc is not 101x. */
6903 if (bit_4 == 0) /* STC/STC2. */
6904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6906 else /* LDC/LDC2 {literal, immediate}. */
6907 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6913 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6919 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6920 arm_displaced_step_copy_insn_closure *dsc, int rd)
6926 Preparation: Rd <- PC
6932 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6933 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6937 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6938 arm_displaced_step_copy_insn_closure *dsc,
6939 int rd, unsigned int imm)
6942 /* Encoding T2: ADDS Rd, #imm */
6943 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6945 install_pc_relative (gdbarch, regs, dsc, rd);
6951 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6952 struct regcache *regs,
6953 arm_displaced_step_copy_insn_closure *dsc)
6955 unsigned int rd = bits (insn, 8, 10);
6956 unsigned int imm8 = bits (insn, 0, 7);
6958 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
6961 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6965 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6966 uint16_t insn2, struct regcache *regs,
6967 arm_displaced_step_copy_insn_closure *dsc)
6969 unsigned int rd = bits (insn2, 8, 11);
6970 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6971 extract raw immediate encoding rather than computing immediate. When
6972 generating ADD or SUB instruction, we can simply perform OR operation to
6973 set immediate into ADD. */
6974 unsigned int imm_3_8 = insn2 & 0x70ff;
6975 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6977 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
6978 rd, imm_i, imm_3_8, insn1, insn2);
6980 if (bit (insn1, 7)) /* Encoding T2 */
6982 /* Encoding T3: SUB Rd, Rd, #imm */
6983 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6984 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6986 else /* Encoding T3 */
6988 /* Encoding T3: ADD Rd, Rd, #imm */
6989 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6990 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6994 install_pc_relative (gdbarch, regs, dsc, rd);
7000 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7001 struct regcache *regs,
7002 arm_displaced_step_copy_insn_closure *dsc)
7004 unsigned int rt = bits (insn1, 8, 10);
7006 int imm8 = (bits (insn1, 0, 7) << 2);
7012 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7014 Insn: LDR R0, [R2, R3];
7015 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7017 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
7019 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7020 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7021 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7022 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7023 /* The assembler calculates the required value of the offset from the
7024 Align(PC,4) value of this instruction to the label. */
7025 pc = pc & 0xfffffffc;
7027 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7028 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7031 dsc->u.ldst.xfersize = 4;
7033 dsc->u.ldst.immed = 0;
7034 dsc->u.ldst.writeback = 0;
7035 dsc->u.ldst.restore_r4 = 0;
7037 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7039 dsc->cleanup = &cleanup_load;
7044 /* Copy Thumb cbnz/cbz instruction. */
7047 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7048 struct regcache *regs,
7049 arm_displaced_step_copy_insn_closure *dsc)
7051 int non_zero = bit (insn1, 11);
7052 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7053 CORE_ADDR from = dsc->insn_addr;
7054 int rn = bits (insn1, 0, 2);
7055 int rn_val = displaced_read_reg (regs, dsc, rn);
7057 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7058 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7059 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7060 condition is false, let it be, cleanup_branch will do nothing. */
7061 if (dsc->u.branch.cond)
7063 dsc->u.branch.cond = INST_AL;
7064 dsc->u.branch.dest = from + 4 + imm5;
7067 dsc->u.branch.dest = from + 2;
7069 dsc->u.branch.link = 0;
7070 dsc->u.branch.exchange = 0;
7072 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7073 non_zero ? "cbnz" : "cbz",
7074 rn, rn_val, insn1, dsc->u.branch.dest);
7076 dsc->modinsn[0] = THUMB_NOP;
7078 dsc->cleanup = &cleanup_branch;
7082 /* Copy Table Branch Byte/Halfword */
7084 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7085 uint16_t insn2, struct regcache *regs,
7086 arm_displaced_step_copy_insn_closure *dsc)
7088 ULONGEST rn_val, rm_val;
7089 int is_tbh = bit (insn2, 4);
7090 CORE_ADDR halfwords = 0;
7091 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7093 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7094 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7100 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7101 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7107 target_read_memory (rn_val + rm_val, buf, 1);
7108 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7111 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7112 is_tbh ? "tbh" : "tbb",
7113 (unsigned int) rn_val, (unsigned int) rm_val,
7114 (unsigned int) halfwords);
7116 dsc->u.branch.cond = INST_AL;
7117 dsc->u.branch.link = 0;
7118 dsc->u.branch.exchange = 0;
7119 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7121 dsc->cleanup = &cleanup_branch;
7127 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7128 arm_displaced_step_copy_insn_closure *dsc)
7131 int val = displaced_read_reg (regs, dsc, 7);
7132 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7135 val = displaced_read_reg (regs, dsc, 8);
7136 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7139 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7144 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7145 struct regcache *regs,
7146 arm_displaced_step_copy_insn_closure *dsc)
7148 dsc->u.block.regmask = insn1 & 0x00ff;
7150 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7153 (1) register list is full, that is, r0-r7 are used.
7154 Prepare: tmp[0] <- r8
7156 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7157 MOV r8, r7; Move value of r7 to r8;
7158 POP {r7}; Store PC value into r7.
7160 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7162 (2) register list is not full, supposing there are N registers in
7163 register list (except PC, 0 <= N <= 7).
7164 Prepare: for each i, 0 - N, tmp[i] <- ri.
7166 POP {r0, r1, ...., rN};
7168 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7169 from tmp[] properly.
7171 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7172 dsc->u.block.regmask, insn1);
7174 if (dsc->u.block.regmask == 0xff)
7176 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7178 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7179 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7180 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7183 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7187 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
7189 unsigned int new_regmask;
7191 for (i = 0; i < num_in_list + 1; i++)
7192 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7194 new_regmask = (1 << (num_in_list + 1)) - 1;
7196 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7197 "modified list %.4x",
7198 (int) dsc->u.block.regmask, new_regmask);
7200 dsc->u.block.regmask |= 0x8000;
7201 dsc->u.block.writeback = 0;
7202 dsc->u.block.cond = INST_AL;
7204 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7206 dsc->cleanup = &cleanup_block_load_pc;
7213 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7214 struct regcache *regs,
7215 arm_displaced_step_copy_insn_closure *dsc)
7217 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7218 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7221 /* 16-bit thumb instructions. */
7222 switch (op_bit_12_15)
7224 /* Shift (imme), add, subtract, move and compare. */
7225 case 0: case 1: case 2: case 3:
7226 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7227 "shift/add/sub/mov/cmp",
7231 switch (op_bit_10_11)
7233 case 0: /* Data-processing */
7234 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7238 case 1: /* Special data instructions and branch and exchange. */
7240 unsigned short op = bits (insn1, 7, 9);
7241 if (op == 6 || op == 7) /* BX or BLX */
7242 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7243 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7244 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7246 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7250 default: /* LDR (literal) */
7251 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7254 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7255 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7258 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7259 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7260 else /* Generate SP-relative address */
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7263 case 11: /* Misc 16-bit instructions */
7265 switch (bits (insn1, 8, 11))
7267 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7268 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7270 case 12: case 13: /* POP */
7271 if (bit (insn1, 8)) /* PC is in register list. */
7272 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7274 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7276 case 15: /* If-Then, and hints */
7277 if (bits (insn1, 0, 3))
7278 /* If-Then makes up to four following instructions conditional.
7279 IT instruction itself is not conditional, so handle it as a
7280 common unmodified instruction. */
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7284 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7287 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7292 if (op_bit_10_11 < 2) /* Store multiple registers */
7293 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7294 else /* Load multiple registers */
7295 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7297 case 13: /* Conditional branch and supervisor call */
7298 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7299 err = thumb_copy_b (gdbarch, insn1, dsc);
7301 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7303 case 14: /* Unconditional branch */
7304 err = thumb_copy_b (gdbarch, insn1, dsc);
7311 internal_error (__FILE__, __LINE__,
7312 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7316 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7317 uint16_t insn1, uint16_t insn2,
7318 struct regcache *regs,
7319 arm_displaced_step_copy_insn_closure *dsc)
7321 int rt = bits (insn2, 12, 15);
7322 int rn = bits (insn1, 0, 3);
7323 int op1 = bits (insn1, 7, 8);
7325 switch (bits (insn1, 5, 6))
7327 case 0: /* Load byte and memory hints */
7328 if (rt == 0xf) /* PLD/PLI */
7331 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7332 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7334 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7339 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7340 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7343 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7344 "ldrb{reg, immediate}/ldrbt",
7349 case 1: /* Load halfword and memory hints. */
7350 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7352 "pld/unalloc memhint", dsc);
7356 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7359 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7363 case 2: /* Load word */
7365 int insn2_bit_8_11 = bits (insn2, 8, 11);
7368 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7369 else if (op1 == 0x1) /* Encoding T3 */
7370 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7372 else /* op1 == 0x0 */
7374 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7375 /* LDR (immediate) */
7376 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7377 dsc, bit (insn2, 8), 1);
7378 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7379 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7382 /* LDR (register) */
7383 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7389 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7396 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7397 uint16_t insn2, struct regcache *regs,
7398 arm_displaced_step_copy_insn_closure *dsc)
7401 unsigned short op = bit (insn2, 15);
7402 unsigned int op1 = bits (insn1, 11, 12);
7408 switch (bits (insn1, 9, 10))
7413 /* Load/store {dual, exclusive}, table branch. */
7414 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7415 && bits (insn2, 5, 7) == 0)
7416 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7419 /* PC is not allowed to use in load/store {dual, exclusive}
7421 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7422 "load/store dual/ex", dsc);
7424 else /* load/store multiple */
7426 switch (bits (insn1, 7, 8))
7428 case 0: case 3: /* SRS, RFE */
7429 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7432 case 1: case 2: /* LDM/STM/PUSH/POP */
7433 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7440 /* Data-processing (shift register). */
7441 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7444 default: /* Coprocessor instructions. */
7445 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7450 case 2: /* op1 = 2 */
7451 if (op) /* Branch and misc control. */
7453 if (bit (insn2, 14) /* BLX/BL */
7454 || bit (insn2, 12) /* Unconditional branch */
7455 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7456 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7458 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7463 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7465 int dp_op = bits (insn1, 4, 8);
7466 int rn = bits (insn1, 0, 3);
7467 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
7468 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7471 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7474 else /* Data processing (modified immediate) */
7475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7479 case 3: /* op1 = 3 */
7480 switch (bits (insn1, 9, 10))
7484 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7486 else /* NEON Load/Store and Store single data item */
7487 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7488 "neon elt/struct load/store",
7491 case 1: /* op1 = 3, bits (9, 10) == 1 */
7492 switch (bits (insn1, 7, 8))
7494 case 0: case 1: /* Data processing (register) */
7495 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7498 case 2: /* Multiply and absolute difference */
7499 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7500 "mul/mua/diff", dsc);
7502 case 3: /* Long multiply and divide */
7503 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7508 default: /* Coprocessor instructions */
7509 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7518 internal_error (__FILE__, __LINE__,
7519 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7524 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7525 struct regcache *regs,
7526 arm_displaced_step_copy_insn_closure *dsc)
7528 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7530 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7532 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7533 insn1, (unsigned long) from);
7536 dsc->insn_size = thumb_insn_size (insn1);
7537 if (thumb_insn_size (insn1) == 4)
7540 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7541 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7544 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7548 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7549 CORE_ADDR to, struct regcache *regs,
7550 arm_displaced_step_copy_insn_closure *dsc)
7553 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7556 /* Most displaced instructions use a 1-instruction scratch space, so set this
7557 here and override below if/when necessary. */
7559 dsc->insn_addr = from;
7560 dsc->scratch_base = to;
7561 dsc->cleanup = NULL;
7562 dsc->wrote_to_pc = 0;
7564 if (!displaced_in_arm_mode (regs))
7565 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7569 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7570 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7571 (unsigned long) insn, (unsigned long) from);
7573 if ((insn & 0xf0000000) == 0xf0000000)
7574 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7575 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7577 case 0x0: case 0x1: case 0x2: case 0x3:
7578 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7581 case 0x4: case 0x5: case 0x6:
7582 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7586 err = arm_decode_media (gdbarch, insn, dsc);
7589 case 0x8: case 0x9: case 0xa: case 0xb:
7590 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7593 case 0xc: case 0xd: case 0xe: case 0xf:
7594 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7599 internal_error (__FILE__, __LINE__,
7600 _("arm_process_displaced_insn: Instruction decode error"));
7603 /* Actually set up the scratch space for a displaced instruction. */
7606 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7608 arm_displaced_step_copy_insn_closure *dsc)
7610 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7611 unsigned int i, len, offset;
7612 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7613 int size = dsc->is_thumb? 2 : 4;
7614 const gdb_byte *bkp_insn;
7617 /* Poke modified instruction(s). */
7618 for (i = 0; i < dsc->numinsns; i++)
7621 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7622 dsc->modinsn[i], (unsigned long) to + offset);
7624 displaced_debug_printf ("writing insn %.4x at %.8lx",
7625 (unsigned short) dsc->modinsn[i],
7626 (unsigned long) to + offset);
7628 write_memory_unsigned_integer (to + offset, size,
7629 byte_order_for_code,
7634 /* Choose the correct breakpoint instruction. */
7637 bkp_insn = tdep->thumb_breakpoint;
7638 len = tdep->thumb_breakpoint_size;
7642 bkp_insn = tdep->arm_breakpoint;
7643 len = tdep->arm_breakpoint_size;
7646 /* Put breakpoint afterwards. */
7647 write_memory (to + offset, bkp_insn, len);
7649 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
7650 paddress (gdbarch, to));
7653 /* Entry point for cleaning things up after a displaced instruction has been
7657 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7658 struct displaced_step_copy_insn_closure *dsc_,
7659 CORE_ADDR from, CORE_ADDR to,
7660 struct regcache *regs)
7662 arm_displaced_step_copy_insn_closure *dsc
7663 = (arm_displaced_step_copy_insn_closure *) dsc_;
7666 dsc->cleanup (gdbarch, regs, dsc);
7668 if (!dsc->wrote_to_pc)
7669 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7670 dsc->insn_addr + dsc->insn_size);
7674 #include "bfd-in2.h"
7675 #include "libcoff.h"
7678 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7680 gdb_disassembler *di
7681 = static_cast<gdb_disassembler *>(info->application_data);
7682 struct gdbarch *gdbarch = di->arch ();
7684 if (arm_pc_is_thumb (gdbarch, memaddr))
7686 static asymbol *asym;
7687 static combined_entry_type ce;
7688 static struct coff_symbol_struct csym;
7689 static struct bfd fake_bfd;
7690 static bfd_target fake_target;
7692 if (csym.native == NULL)
7694 /* Create a fake symbol vector containing a Thumb symbol.
7695 This is solely so that the code in print_insn_little_arm()
7696 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7697 the presence of a Thumb symbol and switch to decoding
7698 Thumb instructions. */
7700 fake_target.flavour = bfd_target_coff_flavour;
7701 fake_bfd.xvec = &fake_target;
7702 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7704 csym.symbol.the_bfd = &fake_bfd;
7705 csym.symbol.name = "fake";
7706 asym = (asymbol *) & csym;
7709 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7710 info->symbols = &asym;
7713 info->symbols = NULL;
7715 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7716 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7717 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7718 the assert on the mismatch of info->mach and
7719 bfd_get_mach (current_program_space->exec_bfd ()) in
7720 default_print_insn. */
7721 if (current_program_space->exec_bfd () != NULL)
7722 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
7724 return default_print_insn (memaddr, info);
7727 /* The following define instruction sequences that will cause ARM
7728 cpu's to take an undefined instruction trap. These are used to
7729 signal a breakpoint to GDB.
7731 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7732 modes. A different instruction is required for each mode. The ARM
7733 cpu's can also be big or little endian. Thus four different
7734 instructions are needed to support all cases.
7736 Note: ARMv4 defines several new instructions that will take the
7737 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7738 not in fact add the new instructions. The new undefined
7739 instructions in ARMv4 are all instructions that had no defined
7740 behaviour in earlier chips. There is no guarantee that they will
7741 raise an exception, but may be treated as NOP's. In practice, it
7742 may only safe to rely on instructions matching:
7744 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7745 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7746 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7748 Even this may only true if the condition predicate is true. The
7749 following use a condition predicate of ALWAYS so it is always TRUE.
7751 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7752 and NetBSD all use a software interrupt rather than an undefined
7753 instruction to force a trap. This can be handled by by the
7754 abi-specific code during establishment of the gdbarch vector. */
7756 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7757 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7758 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7759 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7761 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7762 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7763 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7764 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7766 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7769 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7771 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7772 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7774 if (arm_pc_is_thumb (gdbarch, *pcptr))
7776 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7778 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7779 check whether we are replacing a 32-bit instruction. */
7780 if (tdep->thumb2_breakpoint != NULL)
7784 if (target_read_memory (*pcptr, buf, 2) == 0)
7786 unsigned short inst1;
7788 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7789 if (thumb_insn_size (inst1) == 4)
7790 return ARM_BP_KIND_THUMB2;
7794 return ARM_BP_KIND_THUMB;
7797 return ARM_BP_KIND_ARM;
7801 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7803 static const gdb_byte *
7804 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7806 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7810 case ARM_BP_KIND_ARM:
7811 *size = tdep->arm_breakpoint_size;
7812 return tdep->arm_breakpoint;
7813 case ARM_BP_KIND_THUMB:
7814 *size = tdep->thumb_breakpoint_size;
7815 return tdep->thumb_breakpoint;
7816 case ARM_BP_KIND_THUMB2:
7817 *size = tdep->thumb2_breakpoint_size;
7818 return tdep->thumb2_breakpoint;
7820 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7824 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7827 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7828 struct regcache *regcache,
7833 /* Check the memory pointed by PC is readable. */
7834 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7836 struct arm_get_next_pcs next_pcs_ctx;
7838 arm_get_next_pcs_ctor (&next_pcs_ctx,
7839 &arm_get_next_pcs_ops,
7840 gdbarch_byte_order (gdbarch),
7841 gdbarch_byte_order_for_code (gdbarch),
7845 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7847 /* If MEMADDR is the next instruction of current pc, do the
7848 software single step computation, and get the thumb mode by
7849 the destination address. */
7850 for (CORE_ADDR pc : next_pcs)
7852 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7854 if (IS_THUMB_ADDR (pc))
7856 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7857 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7860 return ARM_BP_KIND_ARM;
7865 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7868 /* Extract from an array REGBUF containing the (raw) register state a
7869 function return value of type TYPE, and copy that, in virtual
7870 format, into VALBUF. */
7873 arm_extract_return_value (struct type *type, struct regcache *regs,
7876 struct gdbarch *gdbarch = regs->arch ();
7877 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7879 if (TYPE_CODE_FLT == type->code ())
7881 switch (gdbarch_tdep (gdbarch)->fp_model)
7885 /* The value is in register F0 in internal format. We need to
7886 extract the raw value and then convert it to the desired
7888 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
7890 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
7891 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
7896 case ARM_FLOAT_SOFT_FPA:
7897 case ARM_FLOAT_SOFT_VFP:
7898 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7899 not using the VFP ABI code. */
7901 regs->cooked_read (ARM_A1_REGNUM, valbuf);
7902 if (TYPE_LENGTH (type) > 4)
7903 regs->cooked_read (ARM_A1_REGNUM + 1,
7904 valbuf + ARM_INT_REGISTER_SIZE);
7908 internal_error (__FILE__, __LINE__,
7909 _("arm_extract_return_value: "
7910 "Floating point model not supported"));
7914 else if (type->code () == TYPE_CODE_INT
7915 || type->code () == TYPE_CODE_CHAR
7916 || type->code () == TYPE_CODE_BOOL
7917 || type->code () == TYPE_CODE_PTR
7918 || TYPE_IS_REFERENCE (type)
7919 || type->code () == TYPE_CODE_ENUM)
7921 /* If the type is a plain integer, then the access is
7922 straight-forward. Otherwise we have to play around a bit
7924 int len = TYPE_LENGTH (type);
7925 int regno = ARM_A1_REGNUM;
7930 /* By using store_unsigned_integer we avoid having to do
7931 anything special for small big-endian values. */
7932 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7933 store_unsigned_integer (valbuf,
7934 (len > ARM_INT_REGISTER_SIZE
7935 ? ARM_INT_REGISTER_SIZE : len),
7937 len -= ARM_INT_REGISTER_SIZE;
7938 valbuf += ARM_INT_REGISTER_SIZE;
7943 /* For a structure or union the behaviour is as if the value had
7944 been stored to word-aligned memory and then loaded into
7945 registers with 32-bit load instruction(s). */
7946 int len = TYPE_LENGTH (type);
7947 int regno = ARM_A1_REGNUM;
7948 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
7952 regs->cooked_read (regno++, tmpbuf);
7953 memcpy (valbuf, tmpbuf,
7954 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
7955 len -= ARM_INT_REGISTER_SIZE;
7956 valbuf += ARM_INT_REGISTER_SIZE;
7962 /* Will a function return an aggregate type in memory or in a
7963 register? Return 0 if an aggregate type can be returned in a
7964 register, 1 if it must be returned in memory. */
7967 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7969 enum type_code code;
7971 type = check_typedef (type);
7973 /* Simple, non-aggregate types (ie not including vectors and
7974 complex) are always returned in a register (or registers). */
7975 code = type->code ();
7976 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7977 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7980 if (TYPE_CODE_ARRAY == code && type->is_vector ())
7982 /* Vector values should be returned using ARM registers if they
7983 are not over 16 bytes. */
7984 return (TYPE_LENGTH (type) > 16);
7987 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7989 /* The AAPCS says all aggregates not larger than a word are returned
7991 if (TYPE_LENGTH (type) <= ARM_INT_REGISTER_SIZE)
8000 /* All aggregate types that won't fit in a register must be returned
8002 if (TYPE_LENGTH (type) > ARM_INT_REGISTER_SIZE)
8005 /* In the ARM ABI, "integer" like aggregate types are returned in
8006 registers. For an aggregate type to be integer like, its size
8007 must be less than or equal to ARM_INT_REGISTER_SIZE and the
8008 offset of each addressable subfield must be zero. Note that bit
8009 fields are not addressable, and all addressable subfields of
8010 unions always start at offset zero.
8012 This function is based on the behaviour of GCC 2.95.1.
8013 See: gcc/arm.c: arm_return_in_memory() for details.
8015 Note: All versions of GCC before GCC 2.95.2 do not set up the
8016 parameters correctly for a function returning the following
8017 structure: struct { float f;}; This should be returned in memory,
8018 not a register. Richard Earnshaw sent me a patch, but I do not
8019 know of any way to detect if a function like the above has been
8020 compiled with the correct calling convention. */
8022 /* Assume all other aggregate types can be returned in a register.
8023 Run a check for structures, unions and arrays. */
8026 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8029 /* Need to check if this struct/union is "integer" like. For
8030 this to be true, its size must be less than or equal to
8031 ARM_INT_REGISTER_SIZE and the offset of each addressable
8032 subfield must be zero. Note that bit fields are not
8033 addressable, and unions always start at offset zero. If any
8034 of the subfields is a floating point type, the struct/union
8035 cannot be an integer type. */
8037 /* For each field in the object, check:
8038 1) Is it FP? --> yes, nRc = 1;
8039 2) Is it addressable (bitpos != 0) and
8040 not packed (bitsize == 0)?
8044 for (i = 0; i < type->num_fields (); i++)
8046 enum type_code field_type_code;
8049 = check_typedef (type->field (i).type ())->code ();
8051 /* Is it a floating point type field? */
8052 if (field_type_code == TYPE_CODE_FLT)
8058 /* If bitpos != 0, then we have to care about it. */
8059 if (TYPE_FIELD_BITPOS (type, i) != 0)
8061 /* Bitfields are not addressable. If the field bitsize is
8062 zero, then the field is not packed. Hence it cannot be
8063 a bitfield or any other packed type. */
8064 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8077 /* Write into appropriate registers a function return value of type
8078 TYPE, given in virtual format. */
8081 arm_store_return_value (struct type *type, struct regcache *regs,
8082 const gdb_byte *valbuf)
8084 struct gdbarch *gdbarch = regs->arch ();
8085 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8087 if (type->code () == TYPE_CODE_FLT)
8089 gdb_byte buf[ARM_FP_REGISTER_SIZE];
8091 switch (gdbarch_tdep (gdbarch)->fp_model)
8095 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
8096 regs->cooked_write (ARM_F0_REGNUM, buf);
8099 case ARM_FLOAT_SOFT_FPA:
8100 case ARM_FLOAT_SOFT_VFP:
8101 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8102 not using the VFP ABI code. */
8104 regs->cooked_write (ARM_A1_REGNUM, valbuf);
8105 if (TYPE_LENGTH (type) > 4)
8106 regs->cooked_write (ARM_A1_REGNUM + 1,
8107 valbuf + ARM_INT_REGISTER_SIZE);
8111 internal_error (__FILE__, __LINE__,
8112 _("arm_store_return_value: Floating "
8113 "point model not supported"));
8117 else if (type->code () == TYPE_CODE_INT
8118 || type->code () == TYPE_CODE_CHAR
8119 || type->code () == TYPE_CODE_BOOL
8120 || type->code () == TYPE_CODE_PTR
8121 || TYPE_IS_REFERENCE (type)
8122 || type->code () == TYPE_CODE_ENUM)
8124 if (TYPE_LENGTH (type) <= 4)
8126 /* Values of one word or less are zero/sign-extended and
8128 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8129 LONGEST val = unpack_long (type, valbuf);
8131 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
8132 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
8136 /* Integral values greater than one word are stored in consecutive
8137 registers starting with r0. This will always be a multiple of
8138 the regiser size. */
8139 int len = TYPE_LENGTH (type);
8140 int regno = ARM_A1_REGNUM;
8144 regs->cooked_write (regno++, valbuf);
8145 len -= ARM_INT_REGISTER_SIZE;
8146 valbuf += ARM_INT_REGISTER_SIZE;
8152 /* For a structure or union the behaviour is as if the value had
8153 been stored to word-aligned memory and then loaded into
8154 registers with 32-bit load instruction(s). */
8155 int len = TYPE_LENGTH (type);
8156 int regno = ARM_A1_REGNUM;
8157 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8161 memcpy (tmpbuf, valbuf,
8162 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
8163 regs->cooked_write (regno++, tmpbuf);
8164 len -= ARM_INT_REGISTER_SIZE;
8165 valbuf += ARM_INT_REGISTER_SIZE;
8171 /* Handle function return values. */
8173 static enum return_value_convention
8174 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8175 struct type *valtype, struct regcache *regcache,
8176 gdb_byte *readbuf, const gdb_byte *writebuf)
8178 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8179 struct type *func_type = function ? value_type (function) : NULL;
8180 enum arm_vfp_cprc_base_type vfp_base_type;
8183 if (arm_vfp_abi_for_function (gdbarch, func_type)
8184 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8186 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8187 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8189 for (i = 0; i < vfp_base_count; i++)
8191 if (reg_char == 'q')
8194 arm_neon_quad_write (gdbarch, regcache, i,
8195 writebuf + i * unit_length);
8198 arm_neon_quad_read (gdbarch, regcache, i,
8199 readbuf + i * unit_length);
8206 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8207 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8210 regcache->cooked_write (regnum, writebuf + i * unit_length);
8212 regcache->cooked_read (regnum, readbuf + i * unit_length);
8215 return RETURN_VALUE_REGISTER_CONVENTION;
8218 if (valtype->code () == TYPE_CODE_STRUCT
8219 || valtype->code () == TYPE_CODE_UNION
8220 || valtype->code () == TYPE_CODE_ARRAY)
8222 if (tdep->struct_return == pcc_struct_return
8223 || arm_return_in_memory (gdbarch, valtype))
8224 return RETURN_VALUE_STRUCT_CONVENTION;
8226 else if (valtype->code () == TYPE_CODE_COMPLEX)
8228 if (arm_return_in_memory (gdbarch, valtype))
8229 return RETURN_VALUE_STRUCT_CONVENTION;
8233 arm_store_return_value (valtype, regcache, writebuf);
8236 arm_extract_return_value (valtype, regcache, readbuf);
8238 return RETURN_VALUE_REGISTER_CONVENTION;
8243 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8245 struct gdbarch *gdbarch = get_frame_arch (frame);
8246 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8247 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8249 gdb_byte buf[ARM_INT_REGISTER_SIZE];
8251 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8253 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8254 ARM_INT_REGISTER_SIZE))
8257 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
8260 /* A call to cmse secure entry function "foo" at "a" is modified by
8267 b) bl yyyy <__acle_se_foo>
8269 section .gnu.sgstubs:
8271 yyyy: sg // secure gateway
8272 b.w xxxx <__acle_se_foo> // original_branch_dest
8277 When the control at "b", the pc contains "yyyy" (sg address) which is a
8278 trampoline and does not exist in source code. This function returns the
8279 target pc "xxxx". For more details please refer to section 5.4
8280 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8281 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8282 document on www.developer.arm.com. */
8285 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
8287 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
8288 char *target_name = (char *) alloca (target_len);
8289 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
8291 struct bound_minimal_symbol minsym
8292 = lookup_minimal_symbol (target_name, NULL, objfile);
8294 if (minsym.minsym != nullptr)
8295 return BMSYMBOL_VALUE_ADDRESS (minsym);
8300 /* Return true when SEC points to ".gnu.sgstubs" section. */
8303 arm_is_sgstubs_section (struct obj_section *sec)
8305 return (sec != nullptr
8306 && sec->the_bfd_section != nullptr
8307 && sec->the_bfd_section->name != nullptr
8308 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
8311 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8312 return the target PC. Otherwise return 0. */
8315 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8319 CORE_ADDR start_addr;
8321 /* Find the starting address and name of the function containing the PC. */
8322 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8324 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8326 start_addr = arm_skip_bx_reg (frame, pc);
8327 if (start_addr != 0)
8333 /* If PC is in a Thumb call or return stub, return the address of the
8334 target PC, which is in a register. The thunk functions are called
8335 _call_via_xx, where x is the register name. The possible names
8336 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8337 functions, named __ARM_call_via_r[0-7]. */
8338 if (startswith (name, "_call_via_")
8339 || startswith (name, "__ARM_call_via_"))
8341 /* Use the name suffix to determine which register contains the
8343 static const char *table[15] =
8344 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8345 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8348 int offset = strlen (name) - 2;
8350 for (regno = 0; regno <= 14; regno++)
8351 if (strcmp (&name[offset], table[regno]) == 0)
8352 return get_frame_register_unsigned (frame, regno);
8355 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8356 non-interworking calls to foo. We could decode the stubs
8357 to find the target but it's easier to use the symbol table. */
8358 namelen = strlen (name);
8359 if (name[0] == '_' && name[1] == '_'
8360 && ((namelen > 2 + strlen ("_from_thumb")
8361 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8362 || (namelen > 2 + strlen ("_from_arm")
8363 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8366 int target_len = namelen - 2;
8367 struct bound_minimal_symbol minsym;
8368 struct objfile *objfile;
8369 struct obj_section *sec;
8371 if (name[namelen - 1] == 'b')
8372 target_len -= strlen ("_from_thumb");
8374 target_len -= strlen ("_from_arm");
8376 target_name = (char *) alloca (target_len + 1);
8377 memcpy (target_name, name + 2, target_len);
8378 target_name[target_len] = '\0';
8380 sec = find_pc_section (pc);
8381 objfile = (sec == NULL) ? NULL : sec->objfile;
8382 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8383 if (minsym.minsym != NULL)
8384 return BMSYMBOL_VALUE_ADDRESS (minsym);
8389 struct obj_section *section = find_pc_section (pc);
8391 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8392 if (arm_is_sgstubs_section (section))
8393 return arm_skip_cmse_entry (pc, name, section->objfile);
8395 return 0; /* not a stub */
8399 arm_update_current_architecture (void)
8401 struct gdbarch_info info;
8403 /* If the current architecture is not ARM, we have nothing to do. */
8404 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8407 /* Update the architecture. */
8408 gdbarch_info_init (&info);
8410 if (!gdbarch_update_p (info))
8411 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8415 set_fp_model_sfunc (const char *args, int from_tty,
8416 struct cmd_list_element *c)
8420 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8421 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8423 arm_fp_model = (enum arm_float_model) fp_model;
8427 if (fp_model == ARM_FLOAT_LAST)
8428 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8431 arm_update_current_architecture ();
8435 show_fp_model (struct ui_file *file, int from_tty,
8436 struct cmd_list_element *c, const char *value)
8438 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8440 if (arm_fp_model == ARM_FLOAT_AUTO
8441 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8442 fprintf_filtered (file, _("\
8443 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8444 fp_model_strings[tdep->fp_model]);
8446 fprintf_filtered (file, _("\
8447 The current ARM floating point model is \"%s\".\n"),
8448 fp_model_strings[arm_fp_model]);
8452 arm_set_abi (const char *args, int from_tty,
8453 struct cmd_list_element *c)
8457 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8458 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8460 arm_abi_global = (enum arm_abi_kind) arm_abi;
8464 if (arm_abi == ARM_ABI_LAST)
8465 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8468 arm_update_current_architecture ();
8472 arm_show_abi (struct ui_file *file, int from_tty,
8473 struct cmd_list_element *c, const char *value)
8475 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8477 if (arm_abi_global == ARM_ABI_AUTO
8478 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8479 fprintf_filtered (file, _("\
8480 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8481 arm_abi_strings[tdep->arm_abi]);
8483 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8488 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8489 struct cmd_list_element *c, const char *value)
8491 fprintf_filtered (file,
8492 _("The current execution mode assumed "
8493 "(when symbols are unavailable) is \"%s\".\n"),
8494 arm_fallback_mode_string);
8498 arm_show_force_mode (struct ui_file *file, int from_tty,
8499 struct cmd_list_element *c, const char *value)
8501 fprintf_filtered (file,
8502 _("The current execution mode assumed "
8503 "(even when symbols are available) is \"%s\".\n"),
8504 arm_force_mode_string);
8507 /* If the user changes the register disassembly style used for info
8508 register and other commands, we have to also switch the style used
8509 in opcodes for disassembly output. This function is run in the "set
8510 arm disassembly" command, and does that. */
8513 set_disassembly_style_sfunc (const char *args, int from_tty,
8514 struct cmd_list_element *c)
8516 /* Convert the short style name into the long style name (eg, reg-names-*)
8517 before calling the generic set_disassembler_options() function. */
8518 std::string long_name = std::string ("reg-names-") + disassembly_style;
8519 set_disassembler_options (&long_name[0]);
8523 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8524 struct cmd_list_element *c, const char *value)
8526 struct gdbarch *gdbarch = get_current_arch ();
8527 char *options = get_disassembler_options (gdbarch);
8528 const char *style = "";
8532 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8533 if (CONST_STRNEQ (opt, "reg-names-"))
8535 style = &opt[strlen ("reg-names-")];
8536 len = strcspn (style, ",");
8539 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8542 /* Return the ARM register name corresponding to register I. */
8544 arm_register_name (struct gdbarch *gdbarch, int i)
8546 const int num_regs = gdbarch_num_regs (gdbarch);
8548 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8549 && i >= num_regs && i < num_regs + 32)
8551 static const char *const vfp_pseudo_names[] = {
8552 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8553 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8554 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8555 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8558 return vfp_pseudo_names[i - num_regs];
8561 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8562 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8564 static const char *const neon_pseudo_names[] = {
8565 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8566 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8569 return neon_pseudo_names[i - num_regs - 32];
8572 if (i >= ARRAY_SIZE (arm_register_names))
8573 /* These registers are only supported on targets which supply
8574 an XML description. */
8577 return arm_register_names[i];
8580 /* Test whether the coff symbol specific value corresponds to a Thumb
8584 coff_sym_is_thumb (int val)
8586 return (val == C_THUMBEXT
8587 || val == C_THUMBSTAT
8588 || val == C_THUMBEXTFUNC
8589 || val == C_THUMBSTATFUNC
8590 || val == C_THUMBLABEL);
8593 /* arm_coff_make_msymbol_special()
8594 arm_elf_make_msymbol_special()
8596 These functions test whether the COFF or ELF symbol corresponds to
8597 an address in thumb code, and set a "special" bit in a minimal
8598 symbol to indicate that it does. */
8601 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8603 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8605 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8606 == ST_BRANCH_TO_THUMB)
8607 MSYMBOL_SET_SPECIAL (msym);
8611 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8613 if (coff_sym_is_thumb (val))
8614 MSYMBOL_SET_SPECIAL (msym);
8618 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8621 const char *name = bfd_asymbol_name (sym);
8622 struct arm_per_bfd *data;
8623 struct arm_mapping_symbol new_map_sym;
8625 gdb_assert (name[0] == '$');
8626 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8629 data = arm_bfd_data_key.get (objfile->obfd);
8631 data = arm_bfd_data_key.emplace (objfile->obfd,
8632 objfile->obfd->section_count);
8633 arm_mapping_symbol_vec &map
8634 = data->section_maps[bfd_asymbol_section (sym)->index];
8636 new_map_sym.value = sym->value;
8637 new_map_sym.type = name[1];
8639 /* Insert at the end, the vector will be sorted on first use. */
8640 map.push_back (new_map_sym);
8644 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8646 struct gdbarch *gdbarch = regcache->arch ();
8647 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8649 /* If necessary, set the T bit. */
8652 ULONGEST val, t_bit;
8653 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8654 t_bit = arm_psr_thumb_bit (gdbarch);
8655 if (arm_pc_is_thumb (gdbarch, pc))
8656 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8659 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8664 /* Read the contents of a NEON quad register, by reading from two
8665 double registers. This is used to implement the quad pseudo
8666 registers, and for argument passing in case the quad registers are
8667 missing; vectors are passed in quad registers when using the VFP
8668 ABI, even if a NEON unit is not present. REGNUM is the index of
8669 the quad register, in [0, 15]. */
8671 static enum register_status
8672 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8673 int regnum, gdb_byte *buf)
8676 gdb_byte reg_buf[8];
8677 int offset, double_regnum;
8678 enum register_status status;
8680 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8681 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8684 /* d0 is always the least significant half of q0. */
8685 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8690 status = regcache->raw_read (double_regnum, reg_buf);
8691 if (status != REG_VALID)
8693 memcpy (buf + offset, reg_buf, 8);
8695 offset = 8 - offset;
8696 status = regcache->raw_read (double_regnum + 1, reg_buf);
8697 if (status != REG_VALID)
8699 memcpy (buf + offset, reg_buf, 8);
8704 static enum register_status
8705 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
8706 int regnum, gdb_byte *buf)
8708 const int num_regs = gdbarch_num_regs (gdbarch);
8710 gdb_byte reg_buf[8];
8711 int offset, double_regnum;
8713 gdb_assert (regnum >= num_regs);
8716 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8717 /* Quad-precision register. */
8718 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8721 enum register_status status;
8723 /* Single-precision register. */
8724 gdb_assert (regnum < 32);
8726 /* s0 is always the least significant half of d0. */
8727 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8728 offset = (regnum & 1) ? 0 : 4;
8730 offset = (regnum & 1) ? 4 : 0;
8732 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8733 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8736 status = regcache->raw_read (double_regnum, reg_buf);
8737 if (status == REG_VALID)
8738 memcpy (buf, reg_buf + offset, 4);
8743 /* Store the contents of BUF to a NEON quad register, by writing to
8744 two double registers. This is used to implement the quad pseudo
8745 registers, and for argument passing in case the quad registers are
8746 missing; vectors are passed in quad registers when using the VFP
8747 ABI, even if a NEON unit is not present. REGNUM is the index
8748 of the quad register, in [0, 15]. */
8751 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8752 int regnum, const gdb_byte *buf)
8755 int offset, double_regnum;
8757 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8758 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8761 /* d0 is always the least significant half of q0. */
8762 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8767 regcache->raw_write (double_regnum, buf + offset);
8768 offset = 8 - offset;
8769 regcache->raw_write (double_regnum + 1, buf + offset);
8773 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8774 int regnum, const gdb_byte *buf)
8776 const int num_regs = gdbarch_num_regs (gdbarch);
8778 gdb_byte reg_buf[8];
8779 int offset, double_regnum;
8781 gdb_assert (regnum >= num_regs);
8784 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8785 /* Quad-precision register. */
8786 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8789 /* Single-precision register. */
8790 gdb_assert (regnum < 32);
8792 /* s0 is always the least significant half of d0. */
8793 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8794 offset = (regnum & 1) ? 0 : 4;
8796 offset = (regnum & 1) ? 4 : 0;
8798 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8799 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8802 regcache->raw_read (double_regnum, reg_buf);
8803 memcpy (reg_buf + offset, buf, 4);
8804 regcache->raw_write (double_regnum, reg_buf);
8808 static struct value *
8809 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8811 const int *reg_p = (const int *) baton;
8812 return value_of_register (*reg_p, frame);
8815 static enum gdb_osabi
8816 arm_elf_osabi_sniffer (bfd *abfd)
8818 unsigned int elfosabi;
8819 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8821 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8823 if (elfosabi == ELFOSABI_ARM)
8824 /* GNU tools use this value. Check note sections in this case,
8827 for (asection *sect : gdb_bfd_sections (abfd))
8828 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
8831 /* Anything else will be handled by the generic ELF sniffer. */
8836 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8837 struct reggroup *group)
8839 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8840 this, FPS register belongs to save_regroup, restore_reggroup, and
8841 all_reggroup, of course. */
8842 if (regnum == ARM_FPS_REGNUM)
8843 return (group == float_reggroup
8844 || group == save_reggroup
8845 || group == restore_reggroup
8846 || group == all_reggroup);
8848 return default_register_reggroup_p (gdbarch, regnum, group);
8851 /* For backward-compatibility we allow two 'g' packet lengths with
8852 the remote protocol depending on whether FPA registers are
8853 supplied. M-profile targets do not have FPA registers, but some
8854 stubs already exist in the wild which use a 'g' packet which
8855 supplies them albeit with dummy values. The packet format which
8856 includes FPA registers should be considered deprecated for
8857 M-profile targets. */
8860 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8862 if (gdbarch_tdep (gdbarch)->is_m)
8864 const target_desc *tdesc;
8866 /* If we know from the executable this is an M-profile target,
8867 cater for remote targets whose register set layout is the
8868 same as the FPA layout. */
8869 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
8870 register_remote_g_packet_guess (gdbarch,
8871 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
8874 /* The regular M-profile layout. */
8875 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
8876 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
8879 /* M-profile plus M4F VFP. */
8880 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
8881 register_remote_g_packet_guess (gdbarch,
8882 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
8886 /* Otherwise we don't have a useful guess. */
8889 /* Implement the code_of_frame_writable gdbarch method. */
8892 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8894 if (gdbarch_tdep (gdbarch)->is_m
8895 && get_frame_type (frame) == SIGTRAMP_FRAME)
8897 /* M-profile exception frames return to some magic PCs, where
8898 isn't writable at all. */
8905 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8906 to be postfixed by a version (eg armv7hl). */
8909 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
8911 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
8912 return "arm(v[^- ]*)?";
8913 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
8916 /* Initialize the current architecture based on INFO. If possible,
8917 re-use an architecture from ARCHES, which is a list of
8918 architectures already created during this debugging session.
8920 Called e.g. at program startup, when reading a core file, and when
8921 reading a binary file. */
8923 static struct gdbarch *
8924 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8926 struct gdbarch_tdep *tdep;
8927 struct gdbarch *gdbarch;
8928 struct gdbarch_list *best_arch;
8929 enum arm_abi_kind arm_abi = arm_abi_global;
8930 enum arm_float_model fp_model = arm_fp_model;
8931 tdesc_arch_data_up tdesc_data;
8934 int vfp_register_count = 0;
8935 bool have_vfp_pseudos = false, have_neon_pseudos = false;
8936 bool have_wmmx_registers = false;
8937 bool have_neon = false;
8938 bool have_fpa_registers = true;
8939 const struct target_desc *tdesc = info.target_desc;
8941 /* If we have an object to base this architecture on, try to determine
8944 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8946 int ei_osabi, e_flags;
8948 switch (bfd_get_flavour (info.abfd))
8950 case bfd_target_coff_flavour:
8951 /* Assume it's an old APCS-style ABI. */
8953 arm_abi = ARM_ABI_APCS;
8956 case bfd_target_elf_flavour:
8957 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8958 e_flags = elf_elfheader (info.abfd)->e_flags;
8960 if (ei_osabi == ELFOSABI_ARM)
8962 /* GNU tools used to use this value, but do not for EABI
8963 objects. There's nowhere to tag an EABI version
8964 anyway, so assume APCS. */
8965 arm_abi = ARM_ABI_APCS;
8967 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8969 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8973 case EF_ARM_EABI_UNKNOWN:
8974 /* Assume GNU tools. */
8975 arm_abi = ARM_ABI_APCS;
8978 case EF_ARM_EABI_VER4:
8979 case EF_ARM_EABI_VER5:
8980 arm_abi = ARM_ABI_AAPCS;
8981 /* EABI binaries default to VFP float ordering.
8982 They may also contain build attributes that can
8983 be used to identify if the VFP argument-passing
8985 if (fp_model == ARM_FLOAT_AUTO)
8988 switch (bfd_elf_get_obj_attr_int (info.abfd,
8992 case AEABI_VFP_args_base:
8993 /* "The user intended FP parameter/result
8994 passing to conform to AAPCS, base
8996 fp_model = ARM_FLOAT_SOFT_VFP;
8998 case AEABI_VFP_args_vfp:
8999 /* "The user intended FP parameter/result
9000 passing to conform to AAPCS, VFP
9002 fp_model = ARM_FLOAT_VFP;
9004 case AEABI_VFP_args_toolchain:
9005 /* "The user intended FP parameter/result
9006 passing to conform to tool chain-specific
9007 conventions" - we don't know any such
9008 conventions, so leave it as "auto". */
9010 case AEABI_VFP_args_compatible:
9011 /* "Code is compatible with both the base
9012 and VFP variants; the user did not permit
9013 non-variadic functions to pass FP
9014 parameters/results" - leave it as
9018 /* Attribute value not mentioned in the
9019 November 2012 ABI, so leave it as
9024 fp_model = ARM_FLOAT_SOFT_VFP;
9030 /* Leave it as "auto". */
9031 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9036 /* Detect M-profile programs. This only works if the
9037 executable file includes build attributes; GCC does
9038 copy them to the executable, but e.g. RealView does
9041 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9044 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9045 Tag_CPU_arch_profile);
9047 /* GCC specifies the profile for v6-M; RealView only
9048 specifies the profile for architectures starting with
9049 V7 (as opposed to architectures with a tag
9050 numerically greater than TAG_CPU_ARCH_V7). */
9051 if (!tdesc_has_registers (tdesc)
9052 && (attr_arch == TAG_CPU_ARCH_V6_M
9053 || attr_arch == TAG_CPU_ARCH_V6S_M
9054 || attr_profile == 'M'))
9059 if (fp_model == ARM_FLOAT_AUTO)
9061 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9064 /* Leave it as "auto". Strictly speaking this case
9065 means FPA, but almost nobody uses that now, and
9066 many toolchains fail to set the appropriate bits
9067 for the floating-point model they use. */
9069 case EF_ARM_SOFT_FLOAT:
9070 fp_model = ARM_FLOAT_SOFT_FPA;
9072 case EF_ARM_VFP_FLOAT:
9073 fp_model = ARM_FLOAT_VFP;
9075 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9076 fp_model = ARM_FLOAT_SOFT_VFP;
9081 if (e_flags & EF_ARM_BE8)
9082 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9087 /* Leave it as "auto". */
9092 /* Check any target description for validity. */
9093 if (tdesc_has_registers (tdesc))
9095 /* For most registers we require GDB's default names; but also allow
9096 the numeric names for sp / lr / pc, as a convenience. */
9097 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9098 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9099 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9101 const struct tdesc_feature *feature;
9104 feature = tdesc_find_feature (tdesc,
9105 "org.gnu.gdb.arm.core");
9106 if (feature == NULL)
9108 feature = tdesc_find_feature (tdesc,
9109 "org.gnu.gdb.arm.m-profile");
9110 if (feature == NULL)
9116 tdesc_data = tdesc_data_alloc ();
9119 for (i = 0; i < ARM_SP_REGNUM; i++)
9120 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9121 arm_register_names[i]);
9122 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9125 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9128 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
9132 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9133 ARM_PS_REGNUM, "xpsr");
9135 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9136 ARM_PS_REGNUM, "cpsr");
9141 feature = tdesc_find_feature (tdesc,
9142 "org.gnu.gdb.arm.fpa");
9143 if (feature != NULL)
9146 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9147 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9148 arm_register_names[i]);
9153 have_fpa_registers = false;
9155 feature = tdesc_find_feature (tdesc,
9156 "org.gnu.gdb.xscale.iwmmxt");
9157 if (feature != NULL)
9159 static const char *const iwmmxt_names[] = {
9160 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9161 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9162 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9163 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9167 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9169 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9170 iwmmxt_names[i - ARM_WR0_REGNUM]);
9172 /* Check for the control registers, but do not fail if they
9174 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9175 tdesc_numbered_register (feature, tdesc_data.get (), i,
9176 iwmmxt_names[i - ARM_WR0_REGNUM]);
9178 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9180 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
9181 iwmmxt_names[i - ARM_WR0_REGNUM]);
9186 have_wmmx_registers = true;
9189 /* If we have a VFP unit, check whether the single precision registers
9190 are present. If not, then we will synthesize them as pseudo
9192 feature = tdesc_find_feature (tdesc,
9193 "org.gnu.gdb.arm.vfp");
9194 if (feature != NULL)
9196 static const char *const vfp_double_names[] = {
9197 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9198 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9199 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9200 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9203 /* Require the double precision registers. There must be either
9206 for (i = 0; i < 32; i++)
9208 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9210 vfp_double_names[i]);
9214 if (!valid_p && i == 16)
9217 /* Also require FPSCR. */
9218 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
9219 ARM_FPSCR_REGNUM, "fpscr");
9223 if (tdesc_unnumbered_register (feature, "s0") == 0)
9224 have_vfp_pseudos = true;
9226 vfp_register_count = i;
9228 /* If we have VFP, also check for NEON. The architecture allows
9229 NEON without VFP (integer vector operations only), but GDB
9230 does not support that. */
9231 feature = tdesc_find_feature (tdesc,
9232 "org.gnu.gdb.arm.neon");
9233 if (feature != NULL)
9235 /* NEON requires 32 double-precision registers. */
9239 /* If there are quad registers defined by the stub, use
9240 their type; otherwise (normally) provide them with
9241 the default type. */
9242 if (tdesc_unnumbered_register (feature, "q0") == 0)
9243 have_neon_pseudos = true;
9250 /* If there is already a candidate, use it. */
9251 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9253 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9255 if (arm_abi != ARM_ABI_AUTO
9256 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9259 if (fp_model != ARM_FLOAT_AUTO
9260 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9263 /* There are various other properties in tdep that we do not
9264 need to check here: those derived from a target description,
9265 since gdbarches with a different target description are
9266 automatically disqualified. */
9268 /* Do check is_m, though, since it might come from the binary. */
9269 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9272 /* Found a match. */
9276 if (best_arch != NULL)
9277 return best_arch->gdbarch;
9279 tdep = XCNEW (struct gdbarch_tdep);
9280 gdbarch = gdbarch_alloc (&info, tdep);
9282 /* Record additional information about the architecture we are defining.
9283 These are gdbarch discriminators, like the OSABI. */
9284 tdep->arm_abi = arm_abi;
9285 tdep->fp_model = fp_model;
9287 tdep->have_fpa_registers = have_fpa_registers;
9288 tdep->have_wmmx_registers = have_wmmx_registers;
9289 gdb_assert (vfp_register_count == 0
9290 || vfp_register_count == 16
9291 || vfp_register_count == 32);
9292 tdep->vfp_register_count = vfp_register_count;
9293 tdep->have_vfp_pseudos = have_vfp_pseudos;
9294 tdep->have_neon_pseudos = have_neon_pseudos;
9295 tdep->have_neon = have_neon;
9297 arm_register_g_packet_guesses (gdbarch);
9300 switch (info.byte_order_for_code)
9302 case BFD_ENDIAN_BIG:
9303 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9304 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9305 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9306 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9310 case BFD_ENDIAN_LITTLE:
9311 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9312 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9313 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9314 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9319 internal_error (__FILE__, __LINE__,
9320 _("arm_gdbarch_init: bad byte order for float format"));
9323 /* On ARM targets char defaults to unsigned. */
9324 set_gdbarch_char_signed (gdbarch, 0);
9326 /* wchar_t is unsigned under the AAPCS. */
9327 if (tdep->arm_abi == ARM_ABI_AAPCS)
9328 set_gdbarch_wchar_signed (gdbarch, 0);
9330 set_gdbarch_wchar_signed (gdbarch, 1);
9332 /* Compute type alignment. */
9333 set_gdbarch_type_align (gdbarch, arm_type_align);
9335 /* Note: for displaced stepping, this includes the breakpoint, and one word
9336 of additional scratch space. This setting isn't used for anything beside
9337 displaced stepping at present. */
9338 set_gdbarch_max_insn_length (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
9340 /* This should be low enough for everything. */
9341 tdep->lowest_pc = 0x20;
9342 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9344 /* The default, for both APCS and AAPCS, is to return small
9345 structures in registers. */
9346 tdep->struct_return = reg_struct_return;
9348 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9349 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9352 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9354 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9356 frame_base_set_default (gdbarch, &arm_normal_base);
9358 /* Address manipulation. */
9359 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9361 /* Advance PC across function entry code. */
9362 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9364 /* Detect whether PC is at a point where the stack has been destroyed. */
9365 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9367 /* Skip trampolines. */
9368 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9370 /* The stack grows downward. */
9371 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9373 /* Breakpoint manipulation. */
9374 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9375 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9376 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9377 arm_breakpoint_kind_from_current_state);
9379 /* Information about registers, etc. */
9380 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9381 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9382 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9383 set_gdbarch_register_type (gdbarch, arm_register_type);
9384 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9386 /* This "info float" is FPA-specific. Use the generic version if we
9388 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9389 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9391 /* Internal <-> external register number maps. */
9392 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9393 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9395 set_gdbarch_register_name (gdbarch, arm_register_name);
9397 /* Returning results. */
9398 set_gdbarch_return_value (gdbarch, arm_return_value);
9401 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9403 /* Minsymbol frobbing. */
9404 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9405 set_gdbarch_coff_make_msymbol_special (gdbarch,
9406 arm_coff_make_msymbol_special);
9407 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9409 /* Thumb-2 IT block support. */
9410 set_gdbarch_adjust_breakpoint_address (gdbarch,
9411 arm_adjust_breakpoint_address);
9413 /* Virtual tables. */
9414 set_gdbarch_vbit_in_delta (gdbarch, 1);
9416 /* Hook in the ABI-specific overrides, if they have been registered. */
9417 gdbarch_init_osabi (info, gdbarch);
9419 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9421 /* Add some default predicates. */
9423 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9424 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9425 dwarf2_append_unwinders (gdbarch);
9426 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9427 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9428 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9430 /* Now we have tuned the configuration, set a few final things,
9431 based on what the OS ABI has told us. */
9433 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9434 binaries are always marked. */
9435 if (tdep->arm_abi == ARM_ABI_AUTO)
9436 tdep->arm_abi = ARM_ABI_APCS;
9438 /* Watchpoints are not steppable. */
9439 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9441 /* We used to default to FPA for generic ARM, but almost nobody
9442 uses that now, and we now provide a way for the user to force
9443 the model. So default to the most useful variant. */
9444 if (tdep->fp_model == ARM_FLOAT_AUTO)
9445 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9447 if (tdep->jb_pc >= 0)
9448 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9450 /* Floating point sizes and format. */
9451 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9452 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9454 set_gdbarch_double_format
9455 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9456 set_gdbarch_long_double_format
9457 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9461 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9462 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9465 if (have_vfp_pseudos)
9467 /* NOTE: These are the only pseudo registers used by
9468 the ARM target at the moment. If more are added, a
9469 little more care in numbering will be needed. */
9471 int num_pseudos = 32;
9472 if (have_neon_pseudos)
9474 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9475 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9476 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9479 if (tdesc_data != nullptr)
9481 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9483 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
9485 /* Override tdesc_register_type to adjust the types of VFP
9486 registers for NEON. */
9487 set_gdbarch_register_type (gdbarch, arm_register_type);
9490 /* Add standard register aliases. We add aliases even for those
9491 names which are used by the current architecture - it's simpler,
9492 and does no harm, since nothing ever lists user registers. */
9493 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9494 user_reg_add (gdbarch, arm_register_aliases[i].name,
9495 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9497 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9498 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9500 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
9506 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9508 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9513 fprintf_unfiltered (file, _("arm_dump_tdep: fp_model = %i\n"),
9514 (int) tdep->fp_model);
9515 fprintf_unfiltered (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9516 (int) tdep->have_fpa_registers);
9517 fprintf_unfiltered (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9518 (int) tdep->have_wmmx_registers);
9519 fprintf_unfiltered (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
9520 (int) tdep->vfp_register_count);
9521 fprintf_unfiltered (file, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9522 (int) tdep->have_vfp_pseudos);
9523 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9524 (int) tdep->have_neon_pseudos);
9525 fprintf_unfiltered (file, _("arm_dump_tdep: have_neon = %i\n"),
9526 (int) tdep->have_neon);
9527 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9528 (unsigned long) tdep->lowest_pc);
9534 static void arm_record_test (void);
9535 static void arm_analyze_prologue_test ();
9539 void _initialize_arm_tdep ();
9541 _initialize_arm_tdep ()
9545 char regdesc[1024], *rdptr = regdesc;
9546 size_t rest = sizeof (regdesc);
9548 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9550 /* Add ourselves to objfile event chain. */
9551 gdb::observers::new_objfile.attach (arm_exidx_new_objfile);
9553 /* Register an ELF OS ABI sniffer for ARM binaries. */
9554 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9555 bfd_target_elf_flavour,
9556 arm_elf_osabi_sniffer);
9558 /* Add root prefix command for all "set arm"/"show arm" commands. */
9559 add_basic_prefix_cmd ("arm", no_class,
9560 _("Various ARM-specific commands."),
9561 &setarmcmdlist, "set arm ", 0, &setlist);
9563 add_show_prefix_cmd ("arm", no_class,
9564 _("Various ARM-specific commands."),
9565 &showarmcmdlist, "show arm ", 0, &showlist);
9568 arm_disassembler_options = xstrdup ("reg-names-std");
9569 const disasm_options_t *disasm_options
9570 = &disassembler_options_arm ()->options;
9571 int num_disassembly_styles = 0;
9572 for (i = 0; disasm_options->name[i] != NULL; i++)
9573 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9574 num_disassembly_styles++;
9576 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9577 valid_disassembly_styles = XNEWVEC (const char *,
9578 num_disassembly_styles + 1);
9579 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9580 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9582 size_t offset = strlen ("reg-names-");
9583 const char *style = disasm_options->name[i];
9584 valid_disassembly_styles[j++] = &style[offset];
9585 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9586 disasm_options->description[i]);
9590 /* Mark the end of valid options. */
9591 valid_disassembly_styles[num_disassembly_styles] = NULL;
9593 /* Create the help text. */
9594 std::string helptext = string_printf ("%s%s%s",
9595 _("The valid values are:\n"),
9597 _("The default is \"std\"."));
9599 add_setshow_enum_cmd("disassembler", no_class,
9600 valid_disassembly_styles, &disassembly_style,
9601 _("Set the disassembly style."),
9602 _("Show the disassembly style."),
9604 set_disassembly_style_sfunc,
9605 show_disassembly_style_sfunc,
9606 &setarmcmdlist, &showarmcmdlist);
9608 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9609 _("Set usage of ARM 32-bit mode."),
9610 _("Show usage of ARM 32-bit mode."),
9611 _("When off, a 26-bit PC will be used."),
9613 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9615 &setarmcmdlist, &showarmcmdlist);
9617 /* Add a command to allow the user to force the FPU model. */
9618 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9619 _("Set the floating point type."),
9620 _("Show the floating point type."),
9621 _("auto - Determine the FP typefrom the OS-ABI.\n\
9622 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9623 fpa - FPA co-processor (GCC compiled).\n\
9624 softvfp - Software FP with pure-endian doubles.\n\
9625 vfp - VFP co-processor."),
9626 set_fp_model_sfunc, show_fp_model,
9627 &setarmcmdlist, &showarmcmdlist);
9629 /* Add a command to allow the user to force the ABI. */
9630 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9633 NULL, arm_set_abi, arm_show_abi,
9634 &setarmcmdlist, &showarmcmdlist);
9636 /* Add two commands to allow the user to force the assumed
9638 add_setshow_enum_cmd ("fallback-mode", class_support,
9639 arm_mode_strings, &arm_fallback_mode_string,
9640 _("Set the mode assumed when symbols are unavailable."),
9641 _("Show the mode assumed when symbols are unavailable."),
9642 NULL, NULL, arm_show_fallback_mode,
9643 &setarmcmdlist, &showarmcmdlist);
9644 add_setshow_enum_cmd ("force-mode", class_support,
9645 arm_mode_strings, &arm_force_mode_string,
9646 _("Set the mode assumed even when symbols are available."),
9647 _("Show the mode assumed even when symbols are available."),
9648 NULL, NULL, arm_show_force_mode,
9649 &setarmcmdlist, &showarmcmdlist);
9651 /* Debugging flag. */
9652 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9653 _("Set ARM debugging."),
9654 _("Show ARM debugging."),
9655 _("When on, arm-specific debugging is enabled."),
9657 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9658 &setdebuglist, &showdebuglist);
9661 selftests::register_test ("arm-record", selftests::arm_record_test);
9662 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
9667 /* ARM-reversible process record data structures. */
9669 #define ARM_INSN_SIZE_BYTES 4
9670 #define THUMB_INSN_SIZE_BYTES 2
9671 #define THUMB2_INSN_SIZE_BYTES 4
9674 /* Position of the bit within a 32-bit ARM instruction
9675 that defines whether the instruction is a load or store. */
9676 #define INSN_S_L_BIT_NUM 20
9678 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9681 unsigned int reg_len = LENGTH; \
9684 REGS = XNEWVEC (uint32_t, reg_len); \
9685 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9690 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9693 unsigned int mem_len = LENGTH; \
9696 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9697 memcpy(&MEMS->len, &RECORD_BUF[0], \
9698 sizeof(struct arm_mem_r) * LENGTH); \
9703 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9704 #define INSN_RECORDED(ARM_RECORD) \
9705 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9707 /* ARM memory record structure. */
9710 uint32_t len; /* Record length. */
9711 uint32_t addr; /* Memory address. */
9714 /* ARM instruction record contains opcode of current insn
9715 and execution state (before entry to decode_insn()),
9716 contains list of to-be-modified registers and
9717 memory blocks (on return from decode_insn()). */
9719 typedef struct insn_decode_record_t
9721 struct gdbarch *gdbarch;
9722 struct regcache *regcache;
9723 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9724 uint32_t arm_insn; /* Should accommodate thumb. */
9725 uint32_t cond; /* Condition code. */
9726 uint32_t opcode; /* Insn opcode. */
9727 uint32_t decode; /* Insn decode bits. */
9728 uint32_t mem_rec_count; /* No of mem records. */
9729 uint32_t reg_rec_count; /* No of reg records. */
9730 uint32_t *arm_regs; /* Registers to be saved for this record. */
9731 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9732 } insn_decode_record;
9735 /* Checks ARM SBZ and SBO mandatory fields. */
9738 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9740 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9759 enum arm_record_result
9761 ARM_RECORD_SUCCESS = 0,
9762 ARM_RECORD_FAILURE = 1
9769 } arm_record_strx_t;
9780 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9781 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9784 struct regcache *reg_cache = arm_insn_r->regcache;
9785 ULONGEST u_regval[2]= {0};
9787 uint32_t reg_src1 = 0, reg_src2 = 0;
9788 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9790 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9791 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9793 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9795 /* 1) Handle misc store, immediate offset. */
9796 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9797 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9798 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9799 regcache_raw_read_unsigned (reg_cache, reg_src1,
9801 if (ARM_PC_REGNUM == reg_src1)
9803 /* If R15 was used as Rn, hence current PC+8. */
9804 u_regval[0] = u_regval[0] + 8;
9806 offset_8 = (immed_high << 4) | immed_low;
9807 /* Calculate target store address. */
9808 if (14 == arm_insn_r->opcode)
9810 tgt_mem_addr = u_regval[0] + offset_8;
9814 tgt_mem_addr = u_regval[0] - offset_8;
9816 if (ARM_RECORD_STRH == str_type)
9818 record_buf_mem[0] = 2;
9819 record_buf_mem[1] = tgt_mem_addr;
9820 arm_insn_r->mem_rec_count = 1;
9822 else if (ARM_RECORD_STRD == str_type)
9824 record_buf_mem[0] = 4;
9825 record_buf_mem[1] = tgt_mem_addr;
9826 record_buf_mem[2] = 4;
9827 record_buf_mem[3] = tgt_mem_addr + 4;
9828 arm_insn_r->mem_rec_count = 2;
9831 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9833 /* 2) Store, register offset. */
9835 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9837 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9838 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9839 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9842 /* If R15 was used as Rn, hence current PC+8. */
9843 u_regval[0] = u_regval[0] + 8;
9845 /* Calculate target store address, Rn +/- Rm, register offset. */
9846 if (12 == arm_insn_r->opcode)
9848 tgt_mem_addr = u_regval[0] + u_regval[1];
9852 tgt_mem_addr = u_regval[1] - u_regval[0];
9854 if (ARM_RECORD_STRH == str_type)
9856 record_buf_mem[0] = 2;
9857 record_buf_mem[1] = tgt_mem_addr;
9858 arm_insn_r->mem_rec_count = 1;
9860 else if (ARM_RECORD_STRD == str_type)
9862 record_buf_mem[0] = 4;
9863 record_buf_mem[1] = tgt_mem_addr;
9864 record_buf_mem[2] = 4;
9865 record_buf_mem[3] = tgt_mem_addr + 4;
9866 arm_insn_r->mem_rec_count = 2;
9869 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9870 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9872 /* 3) Store, immediate pre-indexed. */
9873 /* 5) Store, immediate post-indexed. */
9874 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9875 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9876 offset_8 = (immed_high << 4) | immed_low;
9877 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9878 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9879 /* Calculate target store address, Rn +/- Rm, register offset. */
9880 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9882 tgt_mem_addr = u_regval[0] + offset_8;
9886 tgt_mem_addr = u_regval[0] - offset_8;
9888 if (ARM_RECORD_STRH == str_type)
9890 record_buf_mem[0] = 2;
9891 record_buf_mem[1] = tgt_mem_addr;
9892 arm_insn_r->mem_rec_count = 1;
9894 else if (ARM_RECORD_STRD == str_type)
9896 record_buf_mem[0] = 4;
9897 record_buf_mem[1] = tgt_mem_addr;
9898 record_buf_mem[2] = 4;
9899 record_buf_mem[3] = tgt_mem_addr + 4;
9900 arm_insn_r->mem_rec_count = 2;
9902 /* Record Rn also as it changes. */
9903 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9904 arm_insn_r->reg_rec_count = 1;
9906 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9907 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9909 /* 4) Store, register pre-indexed. */
9910 /* 6) Store, register post -indexed. */
9911 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9912 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9913 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9914 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9915 /* Calculate target store address, Rn +/- Rm, register offset. */
9916 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9918 tgt_mem_addr = u_regval[0] + u_regval[1];
9922 tgt_mem_addr = u_regval[1] - u_regval[0];
9924 if (ARM_RECORD_STRH == str_type)
9926 record_buf_mem[0] = 2;
9927 record_buf_mem[1] = tgt_mem_addr;
9928 arm_insn_r->mem_rec_count = 1;
9930 else if (ARM_RECORD_STRD == str_type)
9932 record_buf_mem[0] = 4;
9933 record_buf_mem[1] = tgt_mem_addr;
9934 record_buf_mem[2] = 4;
9935 record_buf_mem[3] = tgt_mem_addr + 4;
9936 arm_insn_r->mem_rec_count = 2;
9938 /* Record Rn also as it changes. */
9939 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9940 arm_insn_r->reg_rec_count = 1;
9945 /* Handling ARM extension space insns. */
9948 arm_record_extension_space (insn_decode_record *arm_insn_r)
9950 int ret = 0; /* Return value: -1:record failure ; 0:success */
9951 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9952 uint32_t record_buf[8], record_buf_mem[8];
9953 uint32_t reg_src1 = 0;
9954 struct regcache *reg_cache = arm_insn_r->regcache;
9955 ULONGEST u_regval = 0;
9957 gdb_assert (!INSN_RECORDED(arm_insn_r));
9958 /* Handle unconditional insn extension space. */
9960 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9961 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9962 if (arm_insn_r->cond)
9964 /* PLD has no affect on architectural state, it just affects
9966 if (5 == ((opcode1 & 0xE0) >> 5))
9969 record_buf[0] = ARM_PS_REGNUM;
9970 record_buf[1] = ARM_LR_REGNUM;
9971 arm_insn_r->reg_rec_count = 2;
9973 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9977 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9978 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9981 /* Undefined instruction on ARM V5; need to handle if later
9982 versions define it. */
9985 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9986 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9987 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9989 /* Handle arithmetic insn extension space. */
9990 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9991 && !INSN_RECORDED(arm_insn_r))
9993 /* Handle MLA(S) and MUL(S). */
9994 if (in_inclusive_range (insn_op1, 0U, 3U))
9996 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9997 record_buf[1] = ARM_PS_REGNUM;
9998 arm_insn_r->reg_rec_count = 2;
10000 else if (in_inclusive_range (insn_op1, 4U, 15U))
10002 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10003 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10004 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10005 record_buf[2] = ARM_PS_REGNUM;
10006 arm_insn_r->reg_rec_count = 3;
10010 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10011 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10012 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10014 /* Handle control insn extension space. */
10016 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10017 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10019 if (!bit (arm_insn_r->arm_insn,25))
10021 if (!bits (arm_insn_r->arm_insn, 4, 7))
10023 if ((0 == insn_op1) || (2 == insn_op1))
10026 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10027 arm_insn_r->reg_rec_count = 1;
10029 else if (1 == insn_op1)
10031 /* CSPR is going to be changed. */
10032 record_buf[0] = ARM_PS_REGNUM;
10033 arm_insn_r->reg_rec_count = 1;
10035 else if (3 == insn_op1)
10037 /* SPSR is going to be changed. */
10038 /* We need to get SPSR value, which is yet to be done. */
10042 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10047 record_buf[0] = ARM_PS_REGNUM;
10048 arm_insn_r->reg_rec_count = 1;
10050 else if (3 == insn_op1)
10053 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10054 arm_insn_r->reg_rec_count = 1;
10057 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10060 record_buf[0] = ARM_PS_REGNUM;
10061 record_buf[1] = ARM_LR_REGNUM;
10062 arm_insn_r->reg_rec_count = 2;
10064 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10066 /* QADD, QSUB, QDADD, QDSUB */
10067 record_buf[0] = ARM_PS_REGNUM;
10068 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10069 arm_insn_r->reg_rec_count = 2;
10071 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10074 record_buf[0] = ARM_PS_REGNUM;
10075 record_buf[1] = ARM_LR_REGNUM;
10076 arm_insn_r->reg_rec_count = 2;
10078 /* Save SPSR also;how? */
10081 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10082 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10083 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10084 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10087 if (0 == insn_op1 || 1 == insn_op1)
10089 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10090 /* We dont do optimization for SMULW<y> where we
10092 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10093 record_buf[1] = ARM_PS_REGNUM;
10094 arm_insn_r->reg_rec_count = 2;
10096 else if (2 == insn_op1)
10099 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10100 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10101 arm_insn_r->reg_rec_count = 2;
10103 else if (3 == insn_op1)
10106 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10107 arm_insn_r->reg_rec_count = 1;
10113 /* MSR : immediate form. */
10116 /* CSPR is going to be changed. */
10117 record_buf[0] = ARM_PS_REGNUM;
10118 arm_insn_r->reg_rec_count = 1;
10120 else if (3 == insn_op1)
10122 /* SPSR is going to be changed. */
10123 /* we need to get SPSR value, which is yet to be done */
10129 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10130 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10131 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10133 /* Handle load/store insn extension space. */
10135 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10136 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10137 && !INSN_RECORDED(arm_insn_r))
10142 /* These insn, changes register and memory as well. */
10143 /* SWP or SWPB insn. */
10144 /* Get memory address given by Rn. */
10145 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10146 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10147 /* SWP insn ?, swaps word. */
10148 if (8 == arm_insn_r->opcode)
10150 record_buf_mem[0] = 4;
10154 /* SWPB insn, swaps only byte. */
10155 record_buf_mem[0] = 1;
10157 record_buf_mem[1] = u_regval;
10158 arm_insn_r->mem_rec_count = 1;
10159 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10160 arm_insn_r->reg_rec_count = 1;
10162 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10165 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10168 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10171 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10172 record_buf[1] = record_buf[0] + 1;
10173 arm_insn_r->reg_rec_count = 2;
10175 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10178 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10181 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10183 /* LDRH, LDRSB, LDRSH. */
10184 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10185 arm_insn_r->reg_rec_count = 1;
10190 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10191 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10192 && !INSN_RECORDED(arm_insn_r))
10195 /* Handle coprocessor insn extension space. */
10198 /* To be done for ARMv5 and later; as of now we return -1. */
10202 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10203 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10208 /* Handling opcode 000 insns. */
10211 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10213 struct regcache *reg_cache = arm_insn_r->regcache;
10214 uint32_t record_buf[8], record_buf_mem[8];
10215 ULONGEST u_regval[2] = {0};
10217 uint32_t reg_src1 = 0;
10218 uint32_t opcode1 = 0;
10220 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10221 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10222 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10224 if (!((opcode1 & 0x19) == 0x10))
10226 /* Data-processing (register) and Data-processing (register-shifted
10228 /* Out of 11 shifter operands mode, all the insn modifies destination
10229 register, which is specified by 13-16 decode. */
10230 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10231 record_buf[1] = ARM_PS_REGNUM;
10232 arm_insn_r->reg_rec_count = 2;
10234 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
10236 /* Miscellaneous instructions */
10238 if (3 == arm_insn_r->decode && 0x12 == opcode1
10239 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10241 /* Handle BLX, branch and link/exchange. */
10242 if (9 == arm_insn_r->opcode)
10244 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10245 and R14 stores the return address. */
10246 record_buf[0] = ARM_PS_REGNUM;
10247 record_buf[1] = ARM_LR_REGNUM;
10248 arm_insn_r->reg_rec_count = 2;
10251 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10253 /* Handle enhanced software breakpoint insn, BKPT. */
10254 /* CPSR is changed to be executed in ARM state, disabling normal
10255 interrupts, entering abort mode. */
10256 /* According to high vector configuration PC is set. */
10257 /* user hit breakpoint and type reverse, in
10258 that case, we need to go back with previous CPSR and
10259 Program Counter. */
10260 record_buf[0] = ARM_PS_REGNUM;
10261 record_buf[1] = ARM_LR_REGNUM;
10262 arm_insn_r->reg_rec_count = 2;
10264 /* Save SPSR also; how? */
10267 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10268 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10270 /* Handle BX, branch and link/exchange. */
10271 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10272 record_buf[0] = ARM_PS_REGNUM;
10273 arm_insn_r->reg_rec_count = 1;
10275 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10276 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10277 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10279 /* Count leading zeros: CLZ. */
10280 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10281 arm_insn_r->reg_rec_count = 1;
10283 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10284 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10285 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10286 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
10288 /* Handle MRS insn. */
10289 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10290 arm_insn_r->reg_rec_count = 1;
10293 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
10295 /* Multiply and multiply-accumulate */
10297 /* Handle multiply instructions. */
10298 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10299 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10301 /* Handle MLA and MUL. */
10302 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10303 record_buf[1] = ARM_PS_REGNUM;
10304 arm_insn_r->reg_rec_count = 2;
10306 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10308 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10309 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10310 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10311 record_buf[2] = ARM_PS_REGNUM;
10312 arm_insn_r->reg_rec_count = 3;
10315 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
10317 /* Synchronization primitives */
10319 /* Handling SWP, SWPB. */
10320 /* These insn, changes register and memory as well. */
10321 /* SWP or SWPB insn. */
10323 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10324 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10325 /* SWP insn ?, swaps word. */
10326 if (8 == arm_insn_r->opcode)
10328 record_buf_mem[0] = 4;
10332 /* SWPB insn, swaps only byte. */
10333 record_buf_mem[0] = 1;
10335 record_buf_mem[1] = u_regval[0];
10336 arm_insn_r->mem_rec_count = 1;
10337 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10338 arm_insn_r->reg_rec_count = 1;
10340 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
10341 || 15 == arm_insn_r->decode)
10343 if ((opcode1 & 0x12) == 2)
10345 /* Extra load/store (unprivileged) */
10350 /* Extra load/store */
10351 switch (bits (arm_insn_r->arm_insn, 5, 6))
10354 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
10356 /* STRH (register), STRH (immediate) */
10357 arm_record_strx (arm_insn_r, &record_buf[0],
10358 &record_buf_mem[0], ARM_RECORD_STRH);
10360 else if ((opcode1 & 0x05) == 0x1)
10362 /* LDRH (register) */
10363 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10364 arm_insn_r->reg_rec_count = 1;
10366 if (bit (arm_insn_r->arm_insn, 21))
10368 /* Write back to Rn. */
10369 record_buf[arm_insn_r->reg_rec_count++]
10370 = bits (arm_insn_r->arm_insn, 16, 19);
10373 else if ((opcode1 & 0x05) == 0x5)
10375 /* LDRH (immediate), LDRH (literal) */
10376 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10378 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10379 arm_insn_r->reg_rec_count = 1;
10383 /*LDRH (immediate) */
10384 if (bit (arm_insn_r->arm_insn, 21))
10386 /* Write back to Rn. */
10387 record_buf[arm_insn_r->reg_rec_count++] = rn;
10395 if ((opcode1 & 0x05) == 0x0)
10397 /* LDRD (register) */
10398 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10399 record_buf[1] = record_buf[0] + 1;
10400 arm_insn_r->reg_rec_count = 2;
10402 if (bit (arm_insn_r->arm_insn, 21))
10404 /* Write back to Rn. */
10405 record_buf[arm_insn_r->reg_rec_count++]
10406 = bits (arm_insn_r->arm_insn, 16, 19);
10409 else if ((opcode1 & 0x05) == 0x1)
10411 /* LDRSB (register) */
10412 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10413 arm_insn_r->reg_rec_count = 1;
10415 if (bit (arm_insn_r->arm_insn, 21))
10417 /* Write back to Rn. */
10418 record_buf[arm_insn_r->reg_rec_count++]
10419 = bits (arm_insn_r->arm_insn, 16, 19);
10422 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
10424 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10426 int rn = bits (arm_insn_r->arm_insn, 16, 19);
10428 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10429 arm_insn_r->reg_rec_count = 1;
10433 /*LDRD (immediate), LDRSB (immediate) */
10434 if (bit (arm_insn_r->arm_insn, 21))
10436 /* Write back to Rn. */
10437 record_buf[arm_insn_r->reg_rec_count++] = rn;
10445 if ((opcode1 & 0x05) == 0x0)
10447 /* STRD (register) */
10448 arm_record_strx (arm_insn_r, &record_buf[0],
10449 &record_buf_mem[0], ARM_RECORD_STRD);
10451 else if ((opcode1 & 0x05) == 0x1)
10453 /* LDRSH (register) */
10454 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10455 arm_insn_r->reg_rec_count = 1;
10457 if (bit (arm_insn_r->arm_insn, 21))
10459 /* Write back to Rn. */
10460 record_buf[arm_insn_r->reg_rec_count++]
10461 = bits (arm_insn_r->arm_insn, 16, 19);
10464 else if ((opcode1 & 0x05) == 0x4)
10466 /* STRD (immediate) */
10467 arm_record_strx (arm_insn_r, &record_buf[0],
10468 &record_buf_mem[0], ARM_RECORD_STRD);
10470 else if ((opcode1 & 0x05) == 0x5)
10472 /* LDRSH (immediate), LDRSH (literal) */
10473 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10474 arm_insn_r->reg_rec_count = 1;
10476 if (bit (arm_insn_r->arm_insn, 21))
10478 /* Write back to Rn. */
10479 record_buf[arm_insn_r->reg_rec_count++]
10480 = bits (arm_insn_r->arm_insn, 16, 19);
10496 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10497 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10501 /* Handling opcode 001 insns. */
10504 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10506 uint32_t record_buf[8], record_buf_mem[8];
10508 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10509 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10511 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10512 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10513 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10516 /* Handle MSR insn. */
10517 if (9 == arm_insn_r->opcode)
10519 /* CSPR is going to be changed. */
10520 record_buf[0] = ARM_PS_REGNUM;
10521 arm_insn_r->reg_rec_count = 1;
10525 /* SPSR is going to be changed. */
10528 else if (arm_insn_r->opcode <= 15)
10530 /* Normal data processing insns. */
10531 /* Out of 11 shifter operands mode, all the insn modifies destination
10532 register, which is specified by 13-16 decode. */
10533 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10534 record_buf[1] = ARM_PS_REGNUM;
10535 arm_insn_r->reg_rec_count = 2;
10542 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10543 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10548 arm_record_media (insn_decode_record *arm_insn_r)
10550 uint32_t record_buf[8];
10552 switch (bits (arm_insn_r->arm_insn, 22, 24))
10555 /* Parallel addition and subtraction, signed */
10557 /* Parallel addition and subtraction, unsigned */
10560 /* Packing, unpacking, saturation and reversal */
10562 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10564 record_buf[arm_insn_r->reg_rec_count++] = rd;
10570 /* Signed multiplies */
10572 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10573 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10575 record_buf[arm_insn_r->reg_rec_count++] = rd;
10577 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10578 else if (op1 == 0x4)
10579 record_buf[arm_insn_r->reg_rec_count++]
10580 = bits (arm_insn_r->arm_insn, 12, 15);
10586 if (bit (arm_insn_r->arm_insn, 21)
10587 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10590 record_buf[arm_insn_r->reg_rec_count++]
10591 = bits (arm_insn_r->arm_insn, 12, 15);
10593 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10594 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10596 /* USAD8 and USADA8 */
10597 record_buf[arm_insn_r->reg_rec_count++]
10598 = bits (arm_insn_r->arm_insn, 16, 19);
10605 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10606 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10608 /* Permanently UNDEFINED */
10613 /* BFC, BFI and UBFX */
10614 record_buf[arm_insn_r->reg_rec_count++]
10615 = bits (arm_insn_r->arm_insn, 12, 15);
10624 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10629 /* Handle ARM mode instructions with opcode 010. */
10632 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10634 struct regcache *reg_cache = arm_insn_r->regcache;
10636 uint32_t reg_base , reg_dest;
10637 uint32_t offset_12, tgt_mem_addr;
10638 uint32_t record_buf[8], record_buf_mem[8];
10639 unsigned char wback;
10642 /* Calculate wback. */
10643 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10644 || (bit (arm_insn_r->arm_insn, 21) == 1);
10646 arm_insn_r->reg_rec_count = 0;
10647 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10649 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10651 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10654 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10655 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10657 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10658 preceeds a LDR instruction having R15 as reg_base, it
10659 emulates a branch and link instruction, and hence we need to save
10660 CPSR and PC as well. */
10661 if (ARM_PC_REGNUM == reg_dest)
10662 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10664 /* If wback is true, also save the base register, which is going to be
10667 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10671 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10673 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10674 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10676 /* Handle bit U. */
10677 if (bit (arm_insn_r->arm_insn, 23))
10679 /* U == 1: Add the offset. */
10680 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10684 /* U == 0: subtract the offset. */
10685 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10688 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10690 if (bit (arm_insn_r->arm_insn, 22))
10692 /* STRB and STRBT: 1 byte. */
10693 record_buf_mem[0] = 1;
10697 /* STR and STRT: 4 bytes. */
10698 record_buf_mem[0] = 4;
10701 /* Handle bit P. */
10702 if (bit (arm_insn_r->arm_insn, 24))
10703 record_buf_mem[1] = tgt_mem_addr;
10705 record_buf_mem[1] = (uint32_t) u_regval;
10707 arm_insn_r->mem_rec_count = 1;
10709 /* If wback is true, also save the base register, which is going to be
10712 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10715 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10716 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10720 /* Handling opcode 011 insns. */
10723 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10725 struct regcache *reg_cache = arm_insn_r->regcache;
10727 uint32_t shift_imm = 0;
10728 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10729 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10730 uint32_t record_buf[8], record_buf_mem[8];
10733 ULONGEST u_regval[2];
10735 if (bit (arm_insn_r->arm_insn, 4))
10736 return arm_record_media (arm_insn_r);
10738 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10739 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10741 /* Handle enhanced store insns and LDRD DSP insn,
10742 order begins according to addressing modes for store insns
10746 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10748 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10749 /* LDR insn has a capability to do branching, if
10750 MOV LR, PC is preceded by LDR insn having Rn as R15
10751 in that case, it emulates branch and link insn, and hence we
10752 need to save CSPR and PC as well. */
10753 if (15 != reg_dest)
10755 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10756 arm_insn_r->reg_rec_count = 1;
10760 record_buf[0] = reg_dest;
10761 record_buf[1] = ARM_PS_REGNUM;
10762 arm_insn_r->reg_rec_count = 2;
10767 if (! bits (arm_insn_r->arm_insn, 4, 11))
10769 /* Store insn, register offset and register pre-indexed,
10770 register post-indexed. */
10772 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10774 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10775 regcache_raw_read_unsigned (reg_cache, reg_src1
10777 regcache_raw_read_unsigned (reg_cache, reg_src2
10779 if (15 == reg_src2)
10781 /* If R15 was used as Rn, hence current PC+8. */
10782 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10783 u_regval[0] = u_regval[0] + 8;
10785 /* Calculate target store address, Rn +/- Rm, register offset. */
10787 if (bit (arm_insn_r->arm_insn, 23))
10789 tgt_mem_addr = u_regval[0] + u_regval[1];
10793 tgt_mem_addr = u_regval[1] - u_regval[0];
10796 switch (arm_insn_r->opcode)
10810 record_buf_mem[0] = 4;
10825 record_buf_mem[0] = 1;
10829 gdb_assert_not_reached ("no decoding pattern found");
10832 record_buf_mem[1] = tgt_mem_addr;
10833 arm_insn_r->mem_rec_count = 1;
10835 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10836 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10837 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10838 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10839 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10840 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10843 /* Rn is going to be changed in pre-indexed mode and
10844 post-indexed mode as well. */
10845 record_buf[0] = reg_src2;
10846 arm_insn_r->reg_rec_count = 1;
10851 /* Store insn, scaled register offset; scaled pre-indexed. */
10852 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10854 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10856 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10857 /* Get shift_imm. */
10858 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10859 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10860 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10861 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10862 /* Offset_12 used as shift. */
10866 /* Offset_12 used as index. */
10867 offset_12 = u_regval[0] << shift_imm;
10871 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10877 if (bit (u_regval[0], 31))
10879 offset_12 = 0xFFFFFFFF;
10888 /* This is arithmetic shift. */
10889 offset_12 = s_word >> shift_imm;
10896 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10898 /* Get C flag value and shift it by 31. */
10899 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10900 | (u_regval[0]) >> 1);
10904 offset_12 = (u_regval[0] >> shift_imm) \
10906 (sizeof(uint32_t) - shift_imm));
10911 gdb_assert_not_reached ("no decoding pattern found");
10915 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10917 if (bit (arm_insn_r->arm_insn, 23))
10919 tgt_mem_addr = u_regval[1] + offset_12;
10923 tgt_mem_addr = u_regval[1] - offset_12;
10926 switch (arm_insn_r->opcode)
10940 record_buf_mem[0] = 4;
10955 record_buf_mem[0] = 1;
10959 gdb_assert_not_reached ("no decoding pattern found");
10962 record_buf_mem[1] = tgt_mem_addr;
10963 arm_insn_r->mem_rec_count = 1;
10965 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10966 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10967 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10968 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10969 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10970 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10973 /* Rn is going to be changed in register scaled pre-indexed
10974 mode,and scaled post indexed mode. */
10975 record_buf[0] = reg_src2;
10976 arm_insn_r->reg_rec_count = 1;
10981 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10982 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10986 /* Handle ARM mode instructions with opcode 100. */
10989 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10991 struct regcache *reg_cache = arm_insn_r->regcache;
10992 uint32_t register_count = 0, register_bits;
10993 uint32_t reg_base, addr_mode;
10994 uint32_t record_buf[24], record_buf_mem[48];
10998 /* Fetch the list of registers. */
10999 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11000 arm_insn_r->reg_rec_count = 0;
11002 /* Fetch the base register that contains the address we are loading data
11004 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11006 /* Calculate wback. */
11007 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11009 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11011 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11013 /* Find out which registers are going to be loaded from memory. */
11014 while (register_bits)
11016 if (register_bits & 0x00000001)
11017 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11018 register_bits = register_bits >> 1;
11023 /* If wback is true, also save the base register, which is going to be
11026 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11028 /* Save the CPSR register. */
11029 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11033 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11035 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11037 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11039 /* Find out how many registers are going to be stored to memory. */
11040 while (register_bits)
11042 if (register_bits & 0x00000001)
11044 register_bits = register_bits >> 1;
11049 /* STMDA (STMED): Decrement after. */
11051 record_buf_mem[1] = (uint32_t) u_regval
11052 - register_count * ARM_INT_REGISTER_SIZE + 4;
11054 /* STM (STMIA, STMEA): Increment after. */
11056 record_buf_mem[1] = (uint32_t) u_regval;
11058 /* STMDB (STMFD): Decrement before. */
11060 record_buf_mem[1] = (uint32_t) u_regval
11061 - register_count * ARM_INT_REGISTER_SIZE;
11063 /* STMIB (STMFA): Increment before. */
11065 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
11068 gdb_assert_not_reached ("no decoding pattern found");
11072 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
11073 arm_insn_r->mem_rec_count = 1;
11075 /* If wback is true, also save the base register, which is going to be
11078 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11081 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11082 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11086 /* Handling opcode 101 insns. */
11089 arm_record_b_bl (insn_decode_record *arm_insn_r)
11091 uint32_t record_buf[8];
11093 /* Handle B, BL, BLX(1) insns. */
11094 /* B simply branches so we do nothing here. */
11095 /* Note: BLX(1) doesnt fall here but instead it falls into
11096 extension space. */
11097 if (bit (arm_insn_r->arm_insn, 24))
11099 record_buf[0] = ARM_LR_REGNUM;
11100 arm_insn_r->reg_rec_count = 1;
11103 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11109 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11111 printf_unfiltered (_("Process record does not support instruction "
11112 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11113 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11118 /* Record handler for vector data transfer instructions. */
11121 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11123 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11124 uint32_t record_buf[4];
11126 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11127 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11128 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11129 bit_l = bit (arm_insn_r->arm_insn, 20);
11130 bit_c = bit (arm_insn_r->arm_insn, 8);
11132 /* Handle VMOV instruction. */
11133 if (bit_l && bit_c)
11135 record_buf[0] = reg_t;
11136 arm_insn_r->reg_rec_count = 1;
11138 else if (bit_l && !bit_c)
11140 /* Handle VMOV instruction. */
11141 if (bits_a == 0x00)
11143 record_buf[0] = reg_t;
11144 arm_insn_r->reg_rec_count = 1;
11146 /* Handle VMRS instruction. */
11147 else if (bits_a == 0x07)
11150 reg_t = ARM_PS_REGNUM;
11152 record_buf[0] = reg_t;
11153 arm_insn_r->reg_rec_count = 1;
11156 else if (!bit_l && !bit_c)
11158 /* Handle VMOV instruction. */
11159 if (bits_a == 0x00)
11161 record_buf[0] = ARM_D0_REGNUM + reg_v;
11163 arm_insn_r->reg_rec_count = 1;
11165 /* Handle VMSR instruction. */
11166 else if (bits_a == 0x07)
11168 record_buf[0] = ARM_FPSCR_REGNUM;
11169 arm_insn_r->reg_rec_count = 1;
11172 else if (!bit_l && bit_c)
11174 /* Handle VMOV instruction. */
11175 if (!(bits_a & 0x04))
11177 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11179 arm_insn_r->reg_rec_count = 1;
11181 /* Handle VDUP instruction. */
11184 if (bit (arm_insn_r->arm_insn, 21))
11186 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11187 record_buf[0] = reg_v + ARM_D0_REGNUM;
11188 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11189 arm_insn_r->reg_rec_count = 2;
11193 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11194 record_buf[0] = reg_v + ARM_D0_REGNUM;
11195 arm_insn_r->reg_rec_count = 1;
11200 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11204 /* Record handler for extension register load/store instructions. */
11207 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11209 uint32_t opcode, single_reg;
11210 uint8_t op_vldm_vstm;
11211 uint32_t record_buf[8], record_buf_mem[128];
11212 ULONGEST u_regval = 0;
11214 struct regcache *reg_cache = arm_insn_r->regcache;
11216 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11217 single_reg = !bit (arm_insn_r->arm_insn, 8);
11218 op_vldm_vstm = opcode & 0x1b;
11220 /* Handle VMOV instructions. */
11221 if ((opcode & 0x1e) == 0x04)
11223 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11225 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11226 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11227 arm_insn_r->reg_rec_count = 2;
11231 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11232 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11236 /* The first S register number m is REG_M:M (M is bit 5),
11237 the corresponding D register number is REG_M:M / 2, which
11239 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11240 /* The second S register number is REG_M:M + 1, the
11241 corresponding D register number is (REG_M:M + 1) / 2.
11242 IOW, if bit M is 1, the first and second S registers
11243 are mapped to different D registers, otherwise, they are
11244 in the same D register. */
11247 record_buf[arm_insn_r->reg_rec_count++]
11248 = ARM_D0_REGNUM + reg_m + 1;
11253 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11254 arm_insn_r->reg_rec_count = 1;
11258 /* Handle VSTM and VPUSH instructions. */
11259 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11260 || op_vldm_vstm == 0x12)
11262 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11263 uint32_t memory_index = 0;
11265 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11266 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11267 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11268 imm_off32 = imm_off8 << 2;
11269 memory_count = imm_off8;
11271 if (bit (arm_insn_r->arm_insn, 23))
11272 start_address = u_regval;
11274 start_address = u_regval - imm_off32;
11276 if (bit (arm_insn_r->arm_insn, 21))
11278 record_buf[0] = reg_rn;
11279 arm_insn_r->reg_rec_count = 1;
11282 while (memory_count > 0)
11286 record_buf_mem[memory_index] = 4;
11287 record_buf_mem[memory_index + 1] = start_address;
11288 start_address = start_address + 4;
11289 memory_index = memory_index + 2;
11293 record_buf_mem[memory_index] = 4;
11294 record_buf_mem[memory_index + 1] = start_address;
11295 record_buf_mem[memory_index + 2] = 4;
11296 record_buf_mem[memory_index + 3] = start_address + 4;
11297 start_address = start_address + 8;
11298 memory_index = memory_index + 4;
11302 arm_insn_r->mem_rec_count = (memory_index >> 1);
11304 /* Handle VLDM instructions. */
11305 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11306 || op_vldm_vstm == 0x13)
11308 uint32_t reg_count, reg_vd;
11309 uint32_t reg_index = 0;
11310 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11312 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11313 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11315 /* REG_VD is the first D register number. If the instruction
11316 loads memory to S registers (SINGLE_REG is TRUE), the register
11317 number is (REG_VD << 1 | bit D), so the corresponding D
11318 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11320 reg_vd = reg_vd | (bit_d << 4);
11322 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11323 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11325 /* If the instruction loads memory to D register, REG_COUNT should
11326 be divided by 2, according to the ARM Architecture Reference
11327 Manual. If the instruction loads memory to S register, divide by
11328 2 as well because two S registers are mapped to D register. */
11329 reg_count = reg_count / 2;
11330 if (single_reg && bit_d)
11332 /* Increase the register count if S register list starts from
11333 an odd number (bit d is one). */
11337 while (reg_count > 0)
11339 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11342 arm_insn_r->reg_rec_count = reg_index;
11344 /* VSTR Vector store register. */
11345 else if ((opcode & 0x13) == 0x10)
11347 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11348 uint32_t memory_index = 0;
11350 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11351 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11352 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11353 imm_off32 = imm_off8 << 2;
11355 if (bit (arm_insn_r->arm_insn, 23))
11356 start_address = u_regval + imm_off32;
11358 start_address = u_regval - imm_off32;
11362 record_buf_mem[memory_index] = 4;
11363 record_buf_mem[memory_index + 1] = start_address;
11364 arm_insn_r->mem_rec_count = 1;
11368 record_buf_mem[memory_index] = 4;
11369 record_buf_mem[memory_index + 1] = start_address;
11370 record_buf_mem[memory_index + 2] = 4;
11371 record_buf_mem[memory_index + 3] = start_address + 4;
11372 arm_insn_r->mem_rec_count = 2;
11375 /* VLDR Vector load register. */
11376 else if ((opcode & 0x13) == 0x11)
11378 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11382 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11383 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11387 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11388 /* Record register D rather than pseudo register S. */
11389 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11391 arm_insn_r->reg_rec_count = 1;
11394 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11395 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11399 /* Record handler for arm/thumb mode VFP data processing instructions. */
11402 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11404 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11405 uint32_t record_buf[4];
11406 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11407 enum insn_types curr_insn_type = INSN_INV;
11409 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11410 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11411 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11412 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11413 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11414 bit_d = bit (arm_insn_r->arm_insn, 22);
11415 /* Mask off the "D" bit. */
11416 opc1 = opc1 & ~0x04;
11418 /* Handle VMLA, VMLS. */
11421 if (bit (arm_insn_r->arm_insn, 10))
11423 if (bit (arm_insn_r->arm_insn, 6))
11424 curr_insn_type = INSN_T0;
11426 curr_insn_type = INSN_T1;
11431 curr_insn_type = INSN_T1;
11433 curr_insn_type = INSN_T2;
11436 /* Handle VNMLA, VNMLS, VNMUL. */
11437 else if (opc1 == 0x01)
11440 curr_insn_type = INSN_T1;
11442 curr_insn_type = INSN_T2;
11445 else if (opc1 == 0x02 && !(opc3 & 0x01))
11447 if (bit (arm_insn_r->arm_insn, 10))
11449 if (bit (arm_insn_r->arm_insn, 6))
11450 curr_insn_type = INSN_T0;
11452 curr_insn_type = INSN_T1;
11457 curr_insn_type = INSN_T1;
11459 curr_insn_type = INSN_T2;
11462 /* Handle VADD, VSUB. */
11463 else if (opc1 == 0x03)
11465 if (!bit (arm_insn_r->arm_insn, 9))
11467 if (bit (arm_insn_r->arm_insn, 6))
11468 curr_insn_type = INSN_T0;
11470 curr_insn_type = INSN_T1;
11475 curr_insn_type = INSN_T1;
11477 curr_insn_type = INSN_T2;
11481 else if (opc1 == 0x08)
11484 curr_insn_type = INSN_T1;
11486 curr_insn_type = INSN_T2;
11488 /* Handle all other vfp data processing instructions. */
11489 else if (opc1 == 0x0b)
11492 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11494 if (bit (arm_insn_r->arm_insn, 4))
11496 if (bit (arm_insn_r->arm_insn, 6))
11497 curr_insn_type = INSN_T0;
11499 curr_insn_type = INSN_T1;
11504 curr_insn_type = INSN_T1;
11506 curr_insn_type = INSN_T2;
11509 /* Handle VNEG and VABS. */
11510 else if ((opc2 == 0x01 && opc3 == 0x01)
11511 || (opc2 == 0x00 && opc3 == 0x03))
11513 if (!bit (arm_insn_r->arm_insn, 11))
11515 if (bit (arm_insn_r->arm_insn, 6))
11516 curr_insn_type = INSN_T0;
11518 curr_insn_type = INSN_T1;
11523 curr_insn_type = INSN_T1;
11525 curr_insn_type = INSN_T2;
11528 /* Handle VSQRT. */
11529 else if (opc2 == 0x01 && opc3 == 0x03)
11532 curr_insn_type = INSN_T1;
11534 curr_insn_type = INSN_T2;
11537 else if (opc2 == 0x07 && opc3 == 0x03)
11540 curr_insn_type = INSN_T1;
11542 curr_insn_type = INSN_T2;
11544 else if (opc3 & 0x01)
11547 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11549 if (!bit (arm_insn_r->arm_insn, 18))
11550 curr_insn_type = INSN_T2;
11554 curr_insn_type = INSN_T1;
11556 curr_insn_type = INSN_T2;
11560 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11563 curr_insn_type = INSN_T1;
11565 curr_insn_type = INSN_T2;
11567 /* Handle VCVTB, VCVTT. */
11568 else if ((opc2 & 0x0e) == 0x02)
11569 curr_insn_type = INSN_T2;
11570 /* Handle VCMP, VCMPE. */
11571 else if ((opc2 & 0x0e) == 0x04)
11572 curr_insn_type = INSN_T3;
11576 switch (curr_insn_type)
11579 reg_vd = reg_vd | (bit_d << 4);
11580 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11581 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11582 arm_insn_r->reg_rec_count = 2;
11586 reg_vd = reg_vd | (bit_d << 4);
11587 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11588 arm_insn_r->reg_rec_count = 1;
11592 reg_vd = (reg_vd << 1) | bit_d;
11593 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11594 arm_insn_r->reg_rec_count = 1;
11598 record_buf[0] = ARM_FPSCR_REGNUM;
11599 arm_insn_r->reg_rec_count = 1;
11603 gdb_assert_not_reached ("no decoding pattern found");
11607 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11611 /* Handling opcode 110 insns. */
11614 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11616 uint32_t op1, op1_ebit, coproc;
11618 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11619 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11620 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11622 if ((coproc & 0x0e) == 0x0a)
11624 /* Handle extension register ld/st instructions. */
11626 return arm_record_exreg_ld_st_insn (arm_insn_r);
11628 /* 64-bit transfers between arm core and extension registers. */
11629 if ((op1 & 0x3e) == 0x04)
11630 return arm_record_exreg_ld_st_insn (arm_insn_r);
11634 /* Handle coprocessor ld/st instructions. */
11639 return arm_record_unsupported_insn (arm_insn_r);
11642 return arm_record_unsupported_insn (arm_insn_r);
11645 /* Move to coprocessor from two arm core registers. */
11647 return arm_record_unsupported_insn (arm_insn_r);
11649 /* Move to two arm core registers from coprocessor. */
11654 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11655 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11656 arm_insn_r->reg_rec_count = 2;
11658 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11662 return arm_record_unsupported_insn (arm_insn_r);
11665 /* Handling opcode 111 insns. */
11668 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11670 uint32_t op, op1_ebit, coproc, bits_24_25;
11671 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11672 struct regcache *reg_cache = arm_insn_r->regcache;
11674 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11675 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11676 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11677 op = bit (arm_insn_r->arm_insn, 4);
11678 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
11680 /* Handle arm SWI/SVC system call instructions. */
11681 if (bits_24_25 == 0x3)
11683 if (tdep->arm_syscall_record != NULL)
11685 ULONGEST svc_operand, svc_number;
11687 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11689 if (svc_operand) /* OABI. */
11690 svc_number = svc_operand - 0x900000;
11692 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11694 return tdep->arm_syscall_record (reg_cache, svc_number);
11698 printf_unfiltered (_("no syscall record support\n"));
11702 else if (bits_24_25 == 0x02)
11706 if ((coproc & 0x0e) == 0x0a)
11708 /* 8, 16, and 32-bit transfer */
11709 return arm_record_vdata_transfer_insn (arm_insn_r);
11716 uint32_t record_buf[1];
11718 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11719 if (record_buf[0] == 15)
11720 record_buf[0] = ARM_PS_REGNUM;
11722 arm_insn_r->reg_rec_count = 1;
11723 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11736 if ((coproc & 0x0e) == 0x0a)
11738 /* VFP data-processing instructions. */
11739 return arm_record_vfp_data_proc_insn (arm_insn_r);
11750 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
11754 if ((coproc & 0x0e) != 0x0a)
11760 else if (op1 == 4 || op1 == 5)
11762 if ((coproc & 0x0e) == 0x0a)
11764 /* 64-bit transfers between ARM core and extension */
11773 else if (op1 == 0 || op1 == 1)
11780 if ((coproc & 0x0e) == 0x0a)
11782 /* Extension register load/store */
11786 /* STC, STC2, LDC, LDC2 */
11795 /* Handling opcode 000 insns. */
11798 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11800 uint32_t record_buf[8];
11801 uint32_t reg_src1 = 0;
11803 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11805 record_buf[0] = ARM_PS_REGNUM;
11806 record_buf[1] = reg_src1;
11807 thumb_insn_r->reg_rec_count = 2;
11809 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11815 /* Handling opcode 001 insns. */
11818 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11820 uint32_t record_buf[8];
11821 uint32_t reg_src1 = 0;
11823 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11825 record_buf[0] = ARM_PS_REGNUM;
11826 record_buf[1] = reg_src1;
11827 thumb_insn_r->reg_rec_count = 2;
11829 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11834 /* Handling opcode 010 insns. */
11837 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11839 struct regcache *reg_cache = thumb_insn_r->regcache;
11840 uint32_t record_buf[8], record_buf_mem[8];
11842 uint32_t reg_src1 = 0, reg_src2 = 0;
11843 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11845 ULONGEST u_regval[2] = {0};
11847 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11849 if (bit (thumb_insn_r->arm_insn, 12))
11851 /* Handle load/store register offset. */
11852 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11854 if (in_inclusive_range (opB, 4U, 7U))
11856 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11857 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11858 record_buf[0] = reg_src1;
11859 thumb_insn_r->reg_rec_count = 1;
11861 else if (in_inclusive_range (opB, 0U, 2U))
11863 /* STR(2), STRB(2), STRH(2) . */
11864 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11865 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11866 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11867 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11869 record_buf_mem[0] = 4; /* STR (2). */
11871 record_buf_mem[0] = 1; /* STRB (2). */
11873 record_buf_mem[0] = 2; /* STRH (2). */
11874 record_buf_mem[1] = u_regval[0] + u_regval[1];
11875 thumb_insn_r->mem_rec_count = 1;
11878 else if (bit (thumb_insn_r->arm_insn, 11))
11880 /* Handle load from literal pool. */
11882 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11883 record_buf[0] = reg_src1;
11884 thumb_insn_r->reg_rec_count = 1;
11888 /* Special data instructions and branch and exchange */
11889 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11890 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11891 if ((3 == opcode2) && (!opcode3))
11893 /* Branch with exchange. */
11894 record_buf[0] = ARM_PS_REGNUM;
11895 thumb_insn_r->reg_rec_count = 1;
11899 /* Format 8; special data processing insns. */
11900 record_buf[0] = ARM_PS_REGNUM;
11901 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11902 | bits (thumb_insn_r->arm_insn, 0, 2));
11903 thumb_insn_r->reg_rec_count = 2;
11908 /* Format 5; data processing insns. */
11909 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11910 if (bit (thumb_insn_r->arm_insn, 7))
11912 reg_src1 = reg_src1 + 8;
11914 record_buf[0] = ARM_PS_REGNUM;
11915 record_buf[1] = reg_src1;
11916 thumb_insn_r->reg_rec_count = 2;
11919 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11920 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11926 /* Handling opcode 001 insns. */
11929 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11931 struct regcache *reg_cache = thumb_insn_r->regcache;
11932 uint32_t record_buf[8], record_buf_mem[8];
11934 uint32_t reg_src1 = 0;
11935 uint32_t opcode = 0, immed_5 = 0;
11937 ULONGEST u_regval = 0;
11939 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11944 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11945 record_buf[0] = reg_src1;
11946 thumb_insn_r->reg_rec_count = 1;
11951 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11952 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11953 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11954 record_buf_mem[0] = 4;
11955 record_buf_mem[1] = u_regval + (immed_5 * 4);
11956 thumb_insn_r->mem_rec_count = 1;
11959 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11960 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11966 /* Handling opcode 100 insns. */
11969 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11971 struct regcache *reg_cache = thumb_insn_r->regcache;
11972 uint32_t record_buf[8], record_buf_mem[8];
11974 uint32_t reg_src1 = 0;
11975 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11977 ULONGEST u_regval = 0;
11979 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11984 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11985 record_buf[0] = reg_src1;
11986 thumb_insn_r->reg_rec_count = 1;
11988 else if (1 == opcode)
11991 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11992 record_buf[0] = reg_src1;
11993 thumb_insn_r->reg_rec_count = 1;
11995 else if (2 == opcode)
11998 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11999 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12000 record_buf_mem[0] = 4;
12001 record_buf_mem[1] = u_regval + (immed_8 * 4);
12002 thumb_insn_r->mem_rec_count = 1;
12004 else if (0 == opcode)
12007 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12008 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12009 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12010 record_buf_mem[0] = 2;
12011 record_buf_mem[1] = u_regval + (immed_5 * 2);
12012 thumb_insn_r->mem_rec_count = 1;
12015 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12016 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12022 /* Handling opcode 101 insns. */
12025 thumb_record_misc (insn_decode_record *thumb_insn_r)
12027 struct regcache *reg_cache = thumb_insn_r->regcache;
12029 uint32_t opcode = 0;
12030 uint32_t register_bits = 0, register_count = 0;
12031 uint32_t index = 0, start_address = 0;
12032 uint32_t record_buf[24], record_buf_mem[48];
12035 ULONGEST u_regval = 0;
12037 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12039 if (opcode == 0 || opcode == 1)
12041 /* ADR and ADD (SP plus immediate) */
12043 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12044 record_buf[0] = reg_src1;
12045 thumb_insn_r->reg_rec_count = 1;
12049 /* Miscellaneous 16-bit instructions */
12050 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
12055 /* SETEND and CPS */
12058 /* ADD/SUB (SP plus immediate) */
12059 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12060 record_buf[0] = ARM_SP_REGNUM;
12061 thumb_insn_r->reg_rec_count = 1;
12063 case 1: /* fall through */
12064 case 3: /* fall through */
12065 case 9: /* fall through */
12070 /* SXTH, SXTB, UXTH, UXTB */
12071 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12072 thumb_insn_r->reg_rec_count = 1;
12074 case 4: /* fall through */
12077 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12078 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12079 while (register_bits)
12081 if (register_bits & 0x00000001)
12083 register_bits = register_bits >> 1;
12085 start_address = u_regval - \
12086 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12087 thumb_insn_r->mem_rec_count = register_count;
12088 while (register_count)
12090 record_buf_mem[(register_count * 2) - 1] = start_address;
12091 record_buf_mem[(register_count * 2) - 2] = 4;
12092 start_address = start_address + 4;
12095 record_buf[0] = ARM_SP_REGNUM;
12096 thumb_insn_r->reg_rec_count = 1;
12099 /* REV, REV16, REVSH */
12100 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12101 thumb_insn_r->reg_rec_count = 1;
12103 case 12: /* fall through */
12106 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12107 while (register_bits)
12109 if (register_bits & 0x00000001)
12110 record_buf[index++] = register_count;
12111 register_bits = register_bits >> 1;
12114 record_buf[index++] = ARM_PS_REGNUM;
12115 record_buf[index++] = ARM_SP_REGNUM;
12116 thumb_insn_r->reg_rec_count = index;
12120 /* Handle enhanced software breakpoint insn, BKPT. */
12121 /* CPSR is changed to be executed in ARM state, disabling normal
12122 interrupts, entering abort mode. */
12123 /* According to high vector configuration PC is set. */
12124 /* User hits breakpoint and type reverse, in that case, we need to go back with
12125 previous CPSR and Program Counter. */
12126 record_buf[0] = ARM_PS_REGNUM;
12127 record_buf[1] = ARM_LR_REGNUM;
12128 thumb_insn_r->reg_rec_count = 2;
12129 /* We need to save SPSR value, which is not yet done. */
12130 printf_unfiltered (_("Process record does not support instruction "
12131 "0x%0x at address %s.\n"),
12132 thumb_insn_r->arm_insn,
12133 paddress (thumb_insn_r->gdbarch,
12134 thumb_insn_r->this_addr));
12138 /* If-Then, and hints */
12145 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12146 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12152 /* Handling opcode 110 insns. */
12155 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12157 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12158 struct regcache *reg_cache = thumb_insn_r->regcache;
12160 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12161 uint32_t reg_src1 = 0;
12162 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12163 uint32_t index = 0, start_address = 0;
12164 uint32_t record_buf[24], record_buf_mem[48];
12166 ULONGEST u_regval = 0;
12168 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12169 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12175 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12177 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12178 while (register_bits)
12180 if (register_bits & 0x00000001)
12181 record_buf[index++] = register_count;
12182 register_bits = register_bits >> 1;
12185 record_buf[index++] = reg_src1;
12186 thumb_insn_r->reg_rec_count = index;
12188 else if (0 == opcode2)
12190 /* It handles both STMIA. */
12191 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12193 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12194 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12195 while (register_bits)
12197 if (register_bits & 0x00000001)
12199 register_bits = register_bits >> 1;
12201 start_address = u_regval;
12202 thumb_insn_r->mem_rec_count = register_count;
12203 while (register_count)
12205 record_buf_mem[(register_count * 2) - 1] = start_address;
12206 record_buf_mem[(register_count * 2) - 2] = 4;
12207 start_address = start_address + 4;
12211 else if (0x1F == opcode1)
12213 /* Handle arm syscall insn. */
12214 if (tdep->arm_syscall_record != NULL)
12216 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12217 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12221 printf_unfiltered (_("no syscall record support\n"));
12226 /* B (1), conditional branch is automatically taken care in process_record,
12227 as PC is saved there. */
12229 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12230 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12236 /* Handling opcode 111 insns. */
12239 thumb_record_branch (insn_decode_record *thumb_insn_r)
12241 uint32_t record_buf[8];
12242 uint32_t bits_h = 0;
12244 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12246 if (2 == bits_h || 3 == bits_h)
12249 record_buf[0] = ARM_LR_REGNUM;
12250 thumb_insn_r->reg_rec_count = 1;
12252 else if (1 == bits_h)
12255 record_buf[0] = ARM_PS_REGNUM;
12256 record_buf[1] = ARM_LR_REGNUM;
12257 thumb_insn_r->reg_rec_count = 2;
12260 /* B(2) is automatically taken care in process_record, as PC is
12263 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12268 /* Handler for thumb2 load/store multiple instructions. */
12271 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12273 struct regcache *reg_cache = thumb2_insn_r->regcache;
12275 uint32_t reg_rn, op;
12276 uint32_t register_bits = 0, register_count = 0;
12277 uint32_t index = 0, start_address = 0;
12278 uint32_t record_buf[24], record_buf_mem[48];
12280 ULONGEST u_regval = 0;
12282 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12283 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12285 if (0 == op || 3 == op)
12287 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12289 /* Handle RFE instruction. */
12290 record_buf[0] = ARM_PS_REGNUM;
12291 thumb2_insn_r->reg_rec_count = 1;
12295 /* Handle SRS instruction after reading banked SP. */
12296 return arm_record_unsupported_insn (thumb2_insn_r);
12299 else if (1 == op || 2 == op)
12301 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12303 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12304 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12305 while (register_bits)
12307 if (register_bits & 0x00000001)
12308 record_buf[index++] = register_count;
12311 register_bits = register_bits >> 1;
12313 record_buf[index++] = reg_rn;
12314 record_buf[index++] = ARM_PS_REGNUM;
12315 thumb2_insn_r->reg_rec_count = index;
12319 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12320 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12321 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12322 while (register_bits)
12324 if (register_bits & 0x00000001)
12327 register_bits = register_bits >> 1;
12332 /* Start address calculation for LDMDB/LDMEA. */
12333 start_address = u_regval;
12337 /* Start address calculation for LDMDB/LDMEA. */
12338 start_address = u_regval - register_count * 4;
12341 thumb2_insn_r->mem_rec_count = register_count;
12342 while (register_count)
12344 record_buf_mem[register_count * 2 - 1] = start_address;
12345 record_buf_mem[register_count * 2 - 2] = 4;
12346 start_address = start_address + 4;
12349 record_buf[0] = reg_rn;
12350 record_buf[1] = ARM_PS_REGNUM;
12351 thumb2_insn_r->reg_rec_count = 2;
12355 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12357 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12359 return ARM_RECORD_SUCCESS;
12362 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12366 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12368 struct regcache *reg_cache = thumb2_insn_r->regcache;
12370 uint32_t reg_rd, reg_rn, offset_imm;
12371 uint32_t reg_dest1, reg_dest2;
12372 uint32_t address, offset_addr;
12373 uint32_t record_buf[8], record_buf_mem[8];
12374 uint32_t op1, op2, op3;
12376 ULONGEST u_regval[2];
12378 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12379 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12380 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12382 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12384 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12386 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12387 record_buf[0] = reg_dest1;
12388 record_buf[1] = ARM_PS_REGNUM;
12389 thumb2_insn_r->reg_rec_count = 2;
12392 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12394 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12395 record_buf[2] = reg_dest2;
12396 thumb2_insn_r->reg_rec_count = 3;
12401 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12402 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12404 if (0 == op1 && 0 == op2)
12406 /* Handle STREX. */
12407 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12408 address = u_regval[0] + (offset_imm * 4);
12409 record_buf_mem[0] = 4;
12410 record_buf_mem[1] = address;
12411 thumb2_insn_r->mem_rec_count = 1;
12412 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12413 record_buf[0] = reg_rd;
12414 thumb2_insn_r->reg_rec_count = 1;
12416 else if (1 == op1 && 0 == op2)
12418 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12419 record_buf[0] = reg_rd;
12420 thumb2_insn_r->reg_rec_count = 1;
12421 address = u_regval[0];
12422 record_buf_mem[1] = address;
12426 /* Handle STREXB. */
12427 record_buf_mem[0] = 1;
12428 thumb2_insn_r->mem_rec_count = 1;
12432 /* Handle STREXH. */
12433 record_buf_mem[0] = 2 ;
12434 thumb2_insn_r->mem_rec_count = 1;
12438 /* Handle STREXD. */
12439 address = u_regval[0];
12440 record_buf_mem[0] = 4;
12441 record_buf_mem[2] = 4;
12442 record_buf_mem[3] = address + 4;
12443 thumb2_insn_r->mem_rec_count = 2;
12448 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12450 if (bit (thumb2_insn_r->arm_insn, 24))
12452 if (bit (thumb2_insn_r->arm_insn, 23))
12453 offset_addr = u_regval[0] + (offset_imm * 4);
12455 offset_addr = u_regval[0] - (offset_imm * 4);
12457 address = offset_addr;
12460 address = u_regval[0];
12462 record_buf_mem[0] = 4;
12463 record_buf_mem[1] = address;
12464 record_buf_mem[2] = 4;
12465 record_buf_mem[3] = address + 4;
12466 thumb2_insn_r->mem_rec_count = 2;
12467 record_buf[0] = reg_rn;
12468 thumb2_insn_r->reg_rec_count = 1;
12472 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12474 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12476 return ARM_RECORD_SUCCESS;
12479 /* Handler for thumb2 data processing (shift register and modified immediate)
12483 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12485 uint32_t reg_rd, op;
12486 uint32_t record_buf[8];
12488 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12489 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12491 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12493 record_buf[0] = ARM_PS_REGNUM;
12494 thumb2_insn_r->reg_rec_count = 1;
12498 record_buf[0] = reg_rd;
12499 record_buf[1] = ARM_PS_REGNUM;
12500 thumb2_insn_r->reg_rec_count = 2;
12503 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12505 return ARM_RECORD_SUCCESS;
12508 /* Generic handler for thumb2 instructions which effect destination and PS
12512 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12515 uint32_t record_buf[8];
12517 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12519 record_buf[0] = reg_rd;
12520 record_buf[1] = ARM_PS_REGNUM;
12521 thumb2_insn_r->reg_rec_count = 2;
12523 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12525 return ARM_RECORD_SUCCESS;
12528 /* Handler for thumb2 branch and miscellaneous control instructions. */
12531 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12533 uint32_t op, op1, op2;
12534 uint32_t record_buf[8];
12536 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12537 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12538 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12540 /* Handle MSR insn. */
12541 if (!(op1 & 0x2) && 0x38 == op)
12545 /* CPSR is going to be changed. */
12546 record_buf[0] = ARM_PS_REGNUM;
12547 thumb2_insn_r->reg_rec_count = 1;
12551 arm_record_unsupported_insn(thumb2_insn_r);
12555 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12558 record_buf[0] = ARM_PS_REGNUM;
12559 record_buf[1] = ARM_LR_REGNUM;
12560 thumb2_insn_r->reg_rec_count = 2;
12563 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12565 return ARM_RECORD_SUCCESS;
12568 /* Handler for thumb2 store single data item instructions. */
12571 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12573 struct regcache *reg_cache = thumb2_insn_r->regcache;
12575 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12576 uint32_t address, offset_addr;
12577 uint32_t record_buf[8], record_buf_mem[8];
12580 ULONGEST u_regval[2];
12582 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12583 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12584 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12585 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12587 if (bit (thumb2_insn_r->arm_insn, 23))
12590 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12591 offset_addr = u_regval[0] + offset_imm;
12592 address = offset_addr;
12597 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12599 /* Handle STRB (register). */
12600 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12601 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12602 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12603 offset_addr = u_regval[1] << shift_imm;
12604 address = u_regval[0] + offset_addr;
12608 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12609 if (bit (thumb2_insn_r->arm_insn, 10))
12611 if (bit (thumb2_insn_r->arm_insn, 9))
12612 offset_addr = u_regval[0] + offset_imm;
12614 offset_addr = u_regval[0] - offset_imm;
12616 address = offset_addr;
12619 address = u_regval[0];
12625 /* Store byte instructions. */
12628 record_buf_mem[0] = 1;
12630 /* Store half word instructions. */
12633 record_buf_mem[0] = 2;
12635 /* Store word instructions. */
12638 record_buf_mem[0] = 4;
12642 gdb_assert_not_reached ("no decoding pattern found");
12646 record_buf_mem[1] = address;
12647 thumb2_insn_r->mem_rec_count = 1;
12648 record_buf[0] = reg_rn;
12649 thumb2_insn_r->reg_rec_count = 1;
12651 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12653 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12655 return ARM_RECORD_SUCCESS;
12658 /* Handler for thumb2 load memory hints instructions. */
12661 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12663 uint32_t record_buf[8];
12664 uint32_t reg_rt, reg_rn;
12666 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12667 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12669 if (ARM_PC_REGNUM != reg_rt)
12671 record_buf[0] = reg_rt;
12672 record_buf[1] = reg_rn;
12673 record_buf[2] = ARM_PS_REGNUM;
12674 thumb2_insn_r->reg_rec_count = 3;
12676 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12678 return ARM_RECORD_SUCCESS;
12681 return ARM_RECORD_FAILURE;
12684 /* Handler for thumb2 load word instructions. */
12687 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12689 uint32_t record_buf[8];
12691 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12692 record_buf[1] = ARM_PS_REGNUM;
12693 thumb2_insn_r->reg_rec_count = 2;
12695 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12697 return ARM_RECORD_SUCCESS;
12700 /* Handler for thumb2 long multiply, long multiply accumulate, and
12701 divide instructions. */
12704 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12706 uint32_t opcode1 = 0, opcode2 = 0;
12707 uint32_t record_buf[8];
12709 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12710 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12712 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12714 /* Handle SMULL, UMULL, SMULAL. */
12715 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12716 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12717 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12718 record_buf[2] = ARM_PS_REGNUM;
12719 thumb2_insn_r->reg_rec_count = 3;
12721 else if (1 == opcode1 || 3 == opcode2)
12723 /* Handle SDIV and UDIV. */
12724 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12725 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12726 record_buf[2] = ARM_PS_REGNUM;
12727 thumb2_insn_r->reg_rec_count = 3;
12730 return ARM_RECORD_FAILURE;
12732 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12734 return ARM_RECORD_SUCCESS;
12737 /* Record handler for thumb32 coprocessor instructions. */
12740 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12742 if (bit (thumb2_insn_r->arm_insn, 25))
12743 return arm_record_coproc_data_proc (thumb2_insn_r);
12745 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12748 /* Record handler for advance SIMD structure load/store instructions. */
12751 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12753 struct regcache *reg_cache = thumb2_insn_r->regcache;
12754 uint32_t l_bit, a_bit, b_bits;
12755 uint32_t record_buf[128], record_buf_mem[128];
12756 uint32_t reg_rn, reg_vd, address, f_elem;
12757 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12760 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12761 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12762 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12763 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12764 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12765 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12766 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12767 f_elem = 8 / f_ebytes;
12771 ULONGEST u_regval = 0;
12772 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12773 address = u_regval;
12778 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12780 if (b_bits == 0x07)
12782 else if (b_bits == 0x0a)
12784 else if (b_bits == 0x06)
12786 else if (b_bits == 0x02)
12791 for (index_r = 0; index_r < bf_regs; index_r++)
12793 for (index_e = 0; index_e < f_elem; index_e++)
12795 record_buf_mem[index_m++] = f_ebytes;
12796 record_buf_mem[index_m++] = address;
12797 address = address + f_ebytes;
12798 thumb2_insn_r->mem_rec_count += 1;
12803 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12805 if (b_bits == 0x09 || b_bits == 0x08)
12807 else if (b_bits == 0x03)
12812 for (index_r = 0; index_r < bf_regs; index_r++)
12813 for (index_e = 0; index_e < f_elem; index_e++)
12815 for (loop_t = 0; loop_t < 2; loop_t++)
12817 record_buf_mem[index_m++] = f_ebytes;
12818 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12819 thumb2_insn_r->mem_rec_count += 1;
12821 address = address + (2 * f_ebytes);
12825 else if ((b_bits & 0x0e) == 0x04)
12827 for (index_e = 0; index_e < f_elem; index_e++)
12829 for (loop_t = 0; loop_t < 3; loop_t++)
12831 record_buf_mem[index_m++] = f_ebytes;
12832 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12833 thumb2_insn_r->mem_rec_count += 1;
12835 address = address + (3 * f_ebytes);
12839 else if (!(b_bits & 0x0e))
12841 for (index_e = 0; index_e < f_elem; index_e++)
12843 for (loop_t = 0; loop_t < 4; loop_t++)
12845 record_buf_mem[index_m++] = f_ebytes;
12846 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12847 thumb2_insn_r->mem_rec_count += 1;
12849 address = address + (4 * f_ebytes);
12855 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12857 if (bft_size == 0x00)
12859 else if (bft_size == 0x01)
12861 else if (bft_size == 0x02)
12867 if (!(b_bits & 0x0b) || b_bits == 0x08)
12868 thumb2_insn_r->mem_rec_count = 1;
12870 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12871 thumb2_insn_r->mem_rec_count = 2;
12873 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12874 thumb2_insn_r->mem_rec_count = 3;
12876 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12877 thumb2_insn_r->mem_rec_count = 4;
12879 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12881 record_buf_mem[index_m] = f_ebytes;
12882 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12891 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12892 thumb2_insn_r->reg_rec_count = 1;
12894 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12895 thumb2_insn_r->reg_rec_count = 2;
12897 else if ((b_bits & 0x0e) == 0x04)
12898 thumb2_insn_r->reg_rec_count = 3;
12900 else if (!(b_bits & 0x0e))
12901 thumb2_insn_r->reg_rec_count = 4;
12906 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12907 thumb2_insn_r->reg_rec_count = 1;
12909 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12910 thumb2_insn_r->reg_rec_count = 2;
12912 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12913 thumb2_insn_r->reg_rec_count = 3;
12915 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12916 thumb2_insn_r->reg_rec_count = 4;
12918 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12919 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12923 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12925 record_buf[index_r] = reg_rn;
12926 thumb2_insn_r->reg_rec_count += 1;
12929 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12931 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12936 /* Decodes thumb2 instruction type and invokes its record handler. */
12938 static unsigned int
12939 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12941 uint32_t op, op1, op2;
12943 op = bit (thumb2_insn_r->arm_insn, 15);
12944 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12945 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12949 if (!(op2 & 0x64 ))
12951 /* Load/store multiple instruction. */
12952 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12954 else if ((op2 & 0x64) == 0x4)
12956 /* Load/store (dual/exclusive) and table branch instruction. */
12957 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12959 else if ((op2 & 0x60) == 0x20)
12961 /* Data-processing (shifted register). */
12962 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12964 else if (op2 & 0x40)
12966 /* Co-processor instructions. */
12967 return thumb2_record_coproc_insn (thumb2_insn_r);
12970 else if (op1 == 0x02)
12974 /* Branches and miscellaneous control instructions. */
12975 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12977 else if (op2 & 0x20)
12979 /* Data-processing (plain binary immediate) instruction. */
12980 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12984 /* Data-processing (modified immediate). */
12985 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12988 else if (op1 == 0x03)
12990 if (!(op2 & 0x71 ))
12992 /* Store single data item. */
12993 return thumb2_record_str_single_data (thumb2_insn_r);
12995 else if (!((op2 & 0x71) ^ 0x10))
12997 /* Advanced SIMD or structure load/store instructions. */
12998 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13000 else if (!((op2 & 0x67) ^ 0x01))
13002 /* Load byte, memory hints instruction. */
13003 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13005 else if (!((op2 & 0x67) ^ 0x03))
13007 /* Load halfword, memory hints instruction. */
13008 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13010 else if (!((op2 & 0x67) ^ 0x05))
13012 /* Load word instruction. */
13013 return thumb2_record_ld_word (thumb2_insn_r);
13015 else if (!((op2 & 0x70) ^ 0x20))
13017 /* Data-processing (register) instruction. */
13018 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13020 else if (!((op2 & 0x78) ^ 0x30))
13022 /* Multiply, multiply accumulate, abs diff instruction. */
13023 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13025 else if (!((op2 & 0x78) ^ 0x38))
13027 /* Long multiply, long multiply accumulate, and divide. */
13028 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13030 else if (op2 & 0x40)
13032 /* Co-processor instructions. */
13033 return thumb2_record_coproc_insn (thumb2_insn_r);
13041 /* Abstract memory reader. */
13043 class abstract_memory_reader
13046 /* Read LEN bytes of target memory at address MEMADDR, placing the
13047 results in GDB's memory at BUF. Return true on success. */
13049 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
13052 /* Instruction reader from real target. */
13054 class instruction_reader : public abstract_memory_reader
13057 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13059 if (target_read_memory (memaddr, buf, len))
13068 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13069 and positive val on failure. */
13072 extract_arm_insn (abstract_memory_reader& reader,
13073 insn_decode_record *insn_record, uint32_t insn_size)
13075 gdb_byte buf[insn_size];
13077 memset (&buf[0], 0, insn_size);
13079 if (!reader.read (insn_record->this_addr, buf, insn_size))
13081 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13083 gdbarch_byte_order_for_code (insn_record->gdbarch));
13087 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13089 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13093 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13094 record_type_t record_type, uint32_t insn_size)
13097 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13099 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13101 arm_record_data_proc_misc_ld_str, /* 000. */
13102 arm_record_data_proc_imm, /* 001. */
13103 arm_record_ld_st_imm_offset, /* 010. */
13104 arm_record_ld_st_reg_offset, /* 011. */
13105 arm_record_ld_st_multiple, /* 100. */
13106 arm_record_b_bl, /* 101. */
13107 arm_record_asimd_vfp_coproc, /* 110. */
13108 arm_record_coproc_data_proc /* 111. */
13111 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13113 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13115 thumb_record_shift_add_sub, /* 000. */
13116 thumb_record_add_sub_cmp_mov, /* 001. */
13117 thumb_record_ld_st_reg_offset, /* 010. */
13118 thumb_record_ld_st_imm_offset, /* 011. */
13119 thumb_record_ld_st_stack, /* 100. */
13120 thumb_record_misc, /* 101. */
13121 thumb_record_ldm_stm_swi, /* 110. */
13122 thumb_record_branch /* 111. */
13125 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13126 uint32_t insn_id = 0;
13128 if (extract_arm_insn (reader, arm_record, insn_size))
13132 printf_unfiltered (_("Process record: error reading memory at "
13133 "addr %s len = %d.\n"),
13134 paddress (arm_record->gdbarch,
13135 arm_record->this_addr), insn_size);
13139 else if (ARM_RECORD == record_type)
13141 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13142 insn_id = bits (arm_record->arm_insn, 25, 27);
13144 if (arm_record->cond == 0xf)
13145 ret = arm_record_extension_space (arm_record);
13148 /* If this insn has fallen into extension space
13149 then we need not decode it anymore. */
13150 ret = arm_handle_insn[insn_id] (arm_record);
13152 if (ret != ARM_RECORD_SUCCESS)
13154 arm_record_unsupported_insn (arm_record);
13158 else if (THUMB_RECORD == record_type)
13160 /* As thumb does not have condition codes, we set negative. */
13161 arm_record->cond = -1;
13162 insn_id = bits (arm_record->arm_insn, 13, 15);
13163 ret = thumb_handle_insn[insn_id] (arm_record);
13164 if (ret != ARM_RECORD_SUCCESS)
13166 arm_record_unsupported_insn (arm_record);
13170 else if (THUMB2_RECORD == record_type)
13172 /* As thumb does not have condition codes, we set negative. */
13173 arm_record->cond = -1;
13175 /* Swap first half of 32bit thumb instruction with second half. */
13176 arm_record->arm_insn
13177 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13179 ret = thumb2_record_decode_insn_handler (arm_record);
13181 if (ret != ARM_RECORD_SUCCESS)
13183 arm_record_unsupported_insn (arm_record);
13189 /* Throw assertion. */
13190 gdb_assert_not_reached ("not a valid instruction, could not decode");
13197 namespace selftests {
13199 /* Provide both 16-bit and 32-bit thumb instructions. */
13201 class instruction_reader_thumb : public abstract_memory_reader
13204 template<size_t SIZE>
13205 instruction_reader_thumb (enum bfd_endian endian,
13206 const uint16_t (&insns)[SIZE])
13207 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13210 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) override
13212 SELF_CHECK (len == 4 || len == 2);
13213 SELF_CHECK (memaddr % 2 == 0);
13214 SELF_CHECK ((memaddr / 2) < m_insns_size);
13216 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13219 store_unsigned_integer (&buf[2], 2, m_endian,
13220 m_insns[memaddr / 2 + 1]);
13226 enum bfd_endian m_endian;
13227 const uint16_t *m_insns;
13228 size_t m_insns_size;
13232 arm_record_test (void)
13234 struct gdbarch_info info;
13235 gdbarch_info_init (&info);
13236 info.bfd_arch_info = bfd_scan_arch ("arm");
13238 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13240 SELF_CHECK (gdbarch != NULL);
13242 /* 16-bit Thumb instructions. */
13244 insn_decode_record arm_record;
13246 memset (&arm_record, 0, sizeof (insn_decode_record));
13247 arm_record.gdbarch = gdbarch;
13249 static const uint16_t insns[] = {
13250 /* db b2 uxtb r3, r3 */
13252 /* cd 58 ldr r5, [r1, r3] */
13256 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13257 instruction_reader_thumb reader (endian, insns);
13258 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13259 THUMB_INSN_SIZE_BYTES);
13261 SELF_CHECK (ret == 0);
13262 SELF_CHECK (arm_record.mem_rec_count == 0);
13263 SELF_CHECK (arm_record.reg_rec_count == 1);
13264 SELF_CHECK (arm_record.arm_regs[0] == 3);
13266 arm_record.this_addr += 2;
13267 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13268 THUMB_INSN_SIZE_BYTES);
13270 SELF_CHECK (ret == 0);
13271 SELF_CHECK (arm_record.mem_rec_count == 0);
13272 SELF_CHECK (arm_record.reg_rec_count == 1);
13273 SELF_CHECK (arm_record.arm_regs[0] == 5);
13276 /* 32-bit Thumb-2 instructions. */
13278 insn_decode_record arm_record;
13280 memset (&arm_record, 0, sizeof (insn_decode_record));
13281 arm_record.gdbarch = gdbarch;
13283 static const uint16_t insns[] = {
13284 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13288 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13289 instruction_reader_thumb reader (endian, insns);
13290 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13291 THUMB2_INSN_SIZE_BYTES);
13293 SELF_CHECK (ret == 0);
13294 SELF_CHECK (arm_record.mem_rec_count == 0);
13295 SELF_CHECK (arm_record.reg_rec_count == 1);
13296 SELF_CHECK (arm_record.arm_regs[0] == 7);
13300 /* Instruction reader from manually cooked instruction sequences. */
13302 class test_arm_instruction_reader : public arm_instruction_reader
13305 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
13309 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
13311 SELF_CHECK (memaddr % 4 == 0);
13312 SELF_CHECK (memaddr / 4 < m_insns.size ());
13314 return m_insns[memaddr / 4];
13318 const gdb::array_view<const uint32_t> m_insns;
13322 arm_analyze_prologue_test ()
13324 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
13326 struct gdbarch_info info;
13327 gdbarch_info_init (&info);
13328 info.byte_order = endianness;
13329 info.byte_order_for_code = endianness;
13330 info.bfd_arch_info = bfd_scan_arch ("arm");
13332 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13334 SELF_CHECK (gdbarch != NULL);
13336 /* The "sub" instruction contains an immediate value rotate count of 0,
13337 which resulted in a 32-bit shift of a 32-bit value, caught by
13339 const uint32_t insns[] = {
13340 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
13341 0xe1a05000, /* mov r5, r0 */
13342 0xe5903020, /* ldr r3, [r0, #32] */
13343 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
13346 test_arm_instruction_reader mem_reader (insns);
13347 arm_prologue_cache cache;
13348 cache.saved_regs = trad_frame_alloc_saved_regs (gdbarch);
13350 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
13354 } // namespace selftests
13355 #endif /* GDB_SELF_TEST */
13357 /* Cleans up local record registers and memory allocations. */
13360 deallocate_reg_mem (insn_decode_record *record)
13362 xfree (record->arm_regs);
13363 xfree (record->arm_mems);
13367 /* Parse the current instruction and record the values of the registers and
13368 memory that will be changed in current instruction to record_arch_list".
13369 Return -1 if something is wrong. */
13372 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13373 CORE_ADDR insn_addr)
13376 uint32_t no_of_rec = 0;
13377 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13378 ULONGEST t_bit = 0, insn_id = 0;
13380 ULONGEST u_regval = 0;
13382 insn_decode_record arm_record;
13384 memset (&arm_record, 0, sizeof (insn_decode_record));
13385 arm_record.regcache = regcache;
13386 arm_record.this_addr = insn_addr;
13387 arm_record.gdbarch = gdbarch;
13390 if (record_debug > 1)
13392 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13394 paddress (gdbarch, arm_record.this_addr));
13397 instruction_reader reader;
13398 if (extract_arm_insn (reader, &arm_record, 2))
13402 printf_unfiltered (_("Process record: error reading memory at "
13403 "addr %s len = %d.\n"),
13404 paddress (arm_record.gdbarch,
13405 arm_record.this_addr), 2);
13410 /* Check the insn, whether it is thumb or arm one. */
13412 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13413 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13416 if (!(u_regval & t_bit))
13418 /* We are decoding arm insn. */
13419 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13423 insn_id = bits (arm_record.arm_insn, 11, 15);
13424 /* is it thumb2 insn? */
13425 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13427 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13428 THUMB2_INSN_SIZE_BYTES);
13432 /* We are decoding thumb insn. */
13433 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13434 THUMB_INSN_SIZE_BYTES);
13440 /* Record registers. */
13441 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13442 if (arm_record.arm_regs)
13444 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13446 if (record_full_arch_list_add_reg
13447 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13451 /* Record memories. */
13452 if (arm_record.arm_mems)
13454 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13456 if (record_full_arch_list_add_mem
13457 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13458 arm_record.arm_mems[no_of_rec].len))
13463 if (record_full_arch_list_add_end ())
13468 deallocate_reg_mem (&arm_record);
13473 /* See arm-tdep.h. */
13475 const target_desc *
13476 arm_read_description (arm_fp_type fp_type)
13478 struct target_desc *tdesc = tdesc_arm_list[fp_type];
13480 if (tdesc == nullptr)
13482 tdesc = arm_create_target_description (fp_type);
13483 tdesc_arm_list[fp_type] = tdesc;
13489 /* See arm-tdep.h. */
13491 const target_desc *
13492 arm_read_mprofile_description (arm_m_profile_type m_type)
13494 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
13496 if (tdesc == nullptr)
13498 tdesc = arm_create_mprofile_target_description (m_type);
13499 tdesc_arm_mprofile_list[m_type] = tdesc;