1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 /* Per-objfile data used for mapping symbols. */
92 static const struct objfile_data *arm_objfile_data_key;
94 struct arm_mapping_symbol
99 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
100 DEF_VEC_O(arm_mapping_symbol_s);
102 struct arm_per_objfile
104 VEC(arm_mapping_symbol_s) **section_maps;
107 /* The list of available "set arm ..." and "show arm ..." commands. */
108 static struct cmd_list_element *setarmcmdlist = NULL;
109 static struct cmd_list_element *showarmcmdlist = NULL;
111 /* The type of floating-point to use. Keep this in sync with enum
112 arm_float_model, and the help string in _initialize_arm_tdep. */
113 static const char *const fp_model_strings[] =
123 /* A variable that can be configured by the user. */
124 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
125 static const char *current_fp_model = "auto";
127 /* The ABI to use. Keep this in sync with arm_abi_kind. */
128 static const char *const arm_abi_strings[] =
136 /* A variable that can be configured by the user. */
137 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
138 static const char *arm_abi_string = "auto";
140 /* The execution mode to assume. */
141 static const char *const arm_mode_strings[] =
149 static const char *arm_fallback_mode_string = "auto";
150 static const char *arm_force_mode_string = "auto";
152 /* The standard register names, and all the valid aliases for them. Note
153 that `fp', `sp' and `pc' are not added in this alias list, because they
154 have been added as builtin user registers in
155 std-regs.c:_initialize_frame_reg. */
160 } arm_register_aliases[] = {
161 /* Basic register numbers. */
178 /* Synonyms (argument and variable registers). */
191 /* Other platform-specific names for r9. */
197 /* Names used by GCC (not listed in the ARM EABI). */
199 /* A special name from the older ATPCS. */
203 static const char *const arm_register_names[] =
204 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
205 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
206 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
207 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
208 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
209 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
210 "fps", "cpsr" }; /* 24 25 */
212 /* Holds the current set of options to be passed to the disassembler. */
213 static char *arm_disassembler_options;
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
221 /* This is used to keep the bfd arch_info in sync with the disassembly
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void show_disassembly_style_sfunc (struct ui_file *, int,
226 struct cmd_list_element *,
229 static void convert_from_extended (const struct floatformat *, const void *,
231 static void convert_to_extended (const struct floatformat *, void *,
234 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, gdb_byte *buf);
237 static void arm_neon_quad_write (struct gdbarch *gdbarch,
238 struct regcache *regcache,
239 int regnum, const gdb_byte *buf);
242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
245 /* get_next_pcs operations. */
246 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
250 arm_get_next_pcs_is_thumb,
254 struct arm_prologue_cache
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
267 /* The register used to hold the frame pointer for this frame. */
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
274 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
279 /* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
282 #define DISPLACED_STEPPING_ARCH_VERSION 5
284 /* Set to true if the 32-bit mode is in use. */
288 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
291 arm_psr_thumb_bit (struct gdbarch *gdbarch)
293 if (gdbarch_tdep (gdbarch)->is_m)
299 /* Determine if the processor is currently executing in Thumb mode. */
302 arm_is_thumb (struct regcache *regcache)
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
309 return (cpsr & t_bit) != 0;
312 /* Determine if FRAME is executing in Thumb mode. */
315 arm_frame_is_thumb (struct frame_info *frame)
318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
326 return (cpsr & t_bit) != 0;
329 /* Callback for VEC_lower_bound. */
332 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
335 return lhs->value < rhs->value;
338 /* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
343 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
345 struct obj_section *sec;
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
364 struct arm_mapping_symbol *map_sym;
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
398 /* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
403 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
405 struct bound_minimal_symbol sym;
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
426 /* If the user wants to override the symbol table, let him. */
427 if (strcmp (arm_force_mode_string, "arm") == 0)
429 if (strcmp (arm_force_mode_string, "thumb") == 0)
432 /* ARM v6-M and v7-M are always in Thumb mode. */
433 if (gdbarch_tdep (gdbarch)->is_m)
436 /* If there are mapping symbols, consult them. */
437 type = arm_find_mapping_symbol (memaddr, NULL);
441 /* Thumb functions have a "special" bit set in minimal symbols. */
442 sym = lookup_minimal_symbol_by_pc (memaddr);
444 return (MSYMBOL_IS_SPECIAL (sym.minsym));
446 /* If the user wants to override the fallback mode, let them. */
447 if (strcmp (arm_fallback_mode_string, "arm") == 0)
449 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
452 /* If we couldn't find any symbol, but we're talking to a running
453 target, then trust the current value of $cpsr. This lets
454 "display/i $pc" always show the correct mode (though if there is
455 a symbol table we will not reach here, so it still may not be
456 displayed in the mode it will be executed). */
457 if (target_has_registers)
458 return arm_frame_is_thumb (get_current_frame ());
460 /* Otherwise we're out of luck; we assume ARM. */
464 /* Determine if the address specified equals any of these magic return
465 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
468 From ARMv6-M Reference Manual B1.5.8
469 Table B1-5 Exception return behavior
471 EXC_RETURN Return To Return Stack
472 0xFFFFFFF1 Handler mode Main
473 0xFFFFFFF9 Thread mode Main
474 0xFFFFFFFD Thread mode Process
476 From ARMv7-M Reference Manual B1.5.8
477 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
479 EXC_RETURN Return To Return Stack
480 0xFFFFFFF1 Handler mode Main
481 0xFFFFFFF9 Thread mode Main
482 0xFFFFFFFD Thread mode Process
484 Table B1-9 EXC_RETURN definition of exception return behavior, with
487 EXC_RETURN Return To Return Stack Frame Type
488 0xFFFFFFE1 Handler mode Main Extended
489 0xFFFFFFE9 Thread mode Main Extended
490 0xFFFFFFED Thread mode Process Extended
491 0xFFFFFFF1 Handler mode Main Basic
492 0xFFFFFFF9 Thread mode Main Basic
493 0xFFFFFFFD Thread mode Process Basic
495 For more details see "B1.5.8 Exception return behavior"
496 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
499 arm_m_addr_is_magic (CORE_ADDR addr)
503 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
504 the exception return behavior. */
511 /* Address is magic. */
515 /* Address is not magic. */
520 /* Remove useless bits from addresses in a running program. */
522 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
524 /* On M-profile devices, do not strip the low bit from EXC_RETURN
525 (the magic exception return address). */
526 if (gdbarch_tdep (gdbarch)->is_m
527 && arm_m_addr_is_magic (val))
531 return UNMAKE_THUMB_ADDR (val);
533 return (val & 0x03fffffc);
536 /* Return 1 if PC is the start of a compiler helper function which
537 can be safely ignored during prologue skipping. IS_THUMB is true
538 if the function is known to be a Thumb function due to the way it
541 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
543 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
544 struct bound_minimal_symbol msym;
546 msym = lookup_minimal_symbol_by_pc (pc);
547 if (msym.minsym != NULL
548 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
549 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
551 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
553 /* The GNU linker's Thumb call stub to foo is named
555 if (strstr (name, "_from_thumb") != NULL)
558 /* On soft-float targets, __truncdfsf2 is called to convert promoted
559 arguments to their argument types in non-prototyped
561 if (startswith (name, "__truncdfsf2"))
563 if (startswith (name, "__aeabi_d2f"))
566 /* Internal functions related to thread-local storage. */
567 if (startswith (name, "__tls_get_addr"))
569 if (startswith (name, "__aeabi_read_tp"))
574 /* If we run against a stripped glibc, we may be unable to identify
575 special functions by name. Check for one important case,
576 __aeabi_read_tp, by comparing the *code* against the default
577 implementation (this is hand-written ARM assembler in glibc). */
580 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
581 == 0xe3e00a0f /* mov r0, #0xffff0fff */
582 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
583 == 0xe240f01f) /* sub pc, r0, #31 */
590 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
591 the first 16-bit of instruction, and INSN2 is the second 16-bit of
593 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
594 ((bits ((insn1), 0, 3) << 12) \
595 | (bits ((insn1), 10, 10) << 11) \
596 | (bits ((insn2), 12, 14) << 8) \
597 | bits ((insn2), 0, 7))
599 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
600 the 32-bit instruction. */
601 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
602 ((bits ((insn), 16, 19) << 12) \
603 | bits ((insn), 0, 11))
605 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
608 thumb_expand_immediate (unsigned int imm)
610 unsigned int count = imm >> 7;
618 return (imm & 0xff) | ((imm & 0xff) << 16);
620 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
622 return (imm & 0xff) | ((imm & 0xff) << 8)
623 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
626 return (0x80 | (imm & 0x7f)) << (32 - count);
629 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
630 epilogue, 0 otherwise. */
633 thumb_instruction_restores_sp (unsigned short insn)
635 return (insn == 0x46bd /* mov sp, r7 */
636 || (insn & 0xff80) == 0xb000 /* add sp, imm */
637 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
640 /* Analyze a Thumb prologue, looking for a recognizable stack frame
641 and frame pointer. Scan until we encounter a store that could
642 clobber the stack frame unexpectedly, or an unknown instruction.
643 Return the last address which is definitely safe to skip for an
644 initial breakpoint. */
647 thumb_analyze_prologue (struct gdbarch *gdbarch,
648 CORE_ADDR start, CORE_ADDR limit,
649 struct arm_prologue_cache *cache)
651 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
652 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
655 struct pv_area *stack;
656 struct cleanup *back_to;
658 CORE_ADDR unrecognized_pc = 0;
660 for (i = 0; i < 16; i++)
661 regs[i] = pv_register (i, 0);
662 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
663 back_to = make_cleanup_free_pv_area (stack);
665 while (start < limit)
669 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
671 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
676 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
679 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
680 whether to save LR (R14). */
681 mask = (insn & 0xff) | ((insn & 0x100) << 6);
683 /* Calculate offsets of saved R0-R7 and LR. */
684 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
685 if (mask & (1 << regno))
687 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
689 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
692 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
694 offset = (insn & 0x7f) << 2; /* get scaled offset */
695 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
698 else if (thumb_instruction_restores_sp (insn))
700 /* Don't scan past the epilogue. */
703 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
704 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
706 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
707 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
708 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
710 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
711 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
712 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
714 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
715 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
716 && pv_is_constant (regs[bits (insn, 3, 5)]))
717 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
718 regs[bits (insn, 6, 8)]);
719 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
720 && pv_is_constant (regs[bits (insn, 3, 6)]))
722 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
723 int rm = bits (insn, 3, 6);
724 regs[rd] = pv_add (regs[rd], regs[rm]);
726 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
728 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
729 int src_reg = (insn & 0x78) >> 3;
730 regs[dst_reg] = regs[src_reg];
732 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
734 /* Handle stores to the stack. Normally pushes are used,
735 but with GCC -mtpcs-frame, there may be other stores
736 in the prologue to create the frame. */
737 int regno = (insn >> 8) & 0x7;
740 offset = (insn & 0xff) << 2;
741 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
743 if (pv_area_store_would_trash (stack, addr))
746 pv_area_store (stack, addr, 4, regs[regno]);
748 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
750 int rd = bits (insn, 0, 2);
751 int rn = bits (insn, 3, 5);
754 offset = bits (insn, 6, 10) << 2;
755 addr = pv_add_constant (regs[rn], offset);
757 if (pv_area_store_would_trash (stack, addr))
760 pv_area_store (stack, addr, 4, regs[rd]);
762 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
763 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 /* Ignore stores of argument registers to the stack. */
767 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 /* Ignore block loads from the stack, potentially copying
770 parameters from memory. */
772 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
773 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
774 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
775 /* Similarly ignore single loads from the stack. */
777 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
778 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
779 /* Skip register copies, i.e. saves to another register
780 instead of the stack. */
782 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
783 /* Recognize constant loads; even with small stacks these are necessary
785 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
786 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
788 /* Constant pool loads, for the same reason. */
789 unsigned int constant;
792 loc = start + 4 + bits (insn, 0, 7) * 4;
793 constant = read_memory_unsigned_integer (loc, 4, byte_order);
794 regs[bits (insn, 8, 10)] = pv_constant (constant);
796 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
798 unsigned short inst2;
800 inst2 = read_code_unsigned_integer (start + 2, 2,
801 byte_order_for_code);
803 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
805 /* BL, BLX. Allow some special function calls when
806 skipping the prologue; GCC generates these before
807 storing arguments to the stack. */
809 int j1, j2, imm1, imm2;
811 imm1 = sbits (insn, 0, 10);
812 imm2 = bits (inst2, 0, 10);
813 j1 = bit (inst2, 13);
814 j2 = bit (inst2, 11);
816 offset = ((imm1 << 12) + (imm2 << 1));
817 offset ^= ((!j2) << 22) | ((!j1) << 23);
819 nextpc = start + 4 + offset;
820 /* For BLX make sure to clear the low bits. */
821 if (bit (inst2, 12) == 0)
822 nextpc = nextpc & 0xfffffffc;
824 if (!skip_prologue_function (gdbarch, nextpc,
825 bit (inst2, 12) != 0))
829 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
831 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
833 pv_t addr = regs[bits (insn, 0, 3)];
836 if (pv_area_store_would_trash (stack, addr))
839 /* Calculate offsets of saved registers. */
840 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
841 if (inst2 & (1 << regno))
843 addr = pv_add_constant (addr, -4);
844 pv_area_store (stack, addr, 4, regs[regno]);
848 regs[bits (insn, 0, 3)] = addr;
851 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
853 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
855 int regno1 = bits (inst2, 12, 15);
856 int regno2 = bits (inst2, 8, 11);
857 pv_t addr = regs[bits (insn, 0, 3)];
859 offset = inst2 & 0xff;
861 addr = pv_add_constant (addr, offset);
863 addr = pv_add_constant (addr, -offset);
865 if (pv_area_store_would_trash (stack, addr))
868 pv_area_store (stack, addr, 4, regs[regno1]);
869 pv_area_store (stack, pv_add_constant (addr, 4),
873 regs[bits (insn, 0, 3)] = addr;
876 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
877 && (inst2 & 0x0c00) == 0x0c00
878 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
880 int regno = bits (inst2, 12, 15);
881 pv_t addr = regs[bits (insn, 0, 3)];
883 offset = inst2 & 0xff;
885 addr = pv_add_constant (addr, offset);
887 addr = pv_add_constant (addr, -offset);
889 if (pv_area_store_would_trash (stack, addr))
892 pv_area_store (stack, addr, 4, regs[regno]);
895 regs[bits (insn, 0, 3)] = addr;
898 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
899 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
901 int regno = bits (inst2, 12, 15);
904 offset = inst2 & 0xfff;
905 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
907 if (pv_area_store_would_trash (stack, addr))
910 pv_area_store (stack, addr, 4, regs[regno]);
913 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
914 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
915 /* Ignore stores of argument registers to the stack. */
918 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
919 && (inst2 & 0x0d00) == 0x0c00
920 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
921 /* Ignore stores of argument registers to the stack. */
924 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
926 && (inst2 & 0x8000) == 0x0000
927 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
928 /* Ignore block loads from the stack, potentially copying
929 parameters from memory. */
932 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
934 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
935 /* Similarly ignore dual loads from the stack. */
938 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
939 && (inst2 & 0x0d00) == 0x0c00
940 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
941 /* Similarly ignore single loads from the stack. */
944 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
945 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
946 /* Similarly ignore single loads from the stack. */
949 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
950 && (inst2 & 0x8000) == 0x0000)
952 unsigned int imm = ((bits (insn, 10, 10) << 11)
953 | (bits (inst2, 12, 14) << 8)
954 | bits (inst2, 0, 7));
956 regs[bits (inst2, 8, 11)]
957 = pv_add_constant (regs[bits (insn, 0, 3)],
958 thumb_expand_immediate (imm));
961 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
962 && (inst2 & 0x8000) == 0x0000)
964 unsigned int imm = ((bits (insn, 10, 10) << 11)
965 | (bits (inst2, 12, 14) << 8)
966 | bits (inst2, 0, 7));
968 regs[bits (inst2, 8, 11)]
969 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
972 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
973 && (inst2 & 0x8000) == 0x0000)
975 unsigned int imm = ((bits (insn, 10, 10) << 11)
976 | (bits (inst2, 12, 14) << 8)
977 | bits (inst2, 0, 7));
979 regs[bits (inst2, 8, 11)]
980 = pv_add_constant (regs[bits (insn, 0, 3)],
981 - (CORE_ADDR) thumb_expand_immediate (imm));
984 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
985 && (inst2 & 0x8000) == 0x0000)
987 unsigned int imm = ((bits (insn, 10, 10) << 11)
988 | (bits (inst2, 12, 14) << 8)
989 | bits (inst2, 0, 7));
991 regs[bits (inst2, 8, 11)]
992 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
995 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
997 unsigned int imm = ((bits (insn, 10, 10) << 11)
998 | (bits (inst2, 12, 14) << 8)
999 | bits (inst2, 0, 7));
1001 regs[bits (inst2, 8, 11)]
1002 = pv_constant (thumb_expand_immediate (imm));
1005 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1008 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1010 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1013 else if (insn == 0xea5f /* mov.w Rd,Rm */
1014 && (inst2 & 0xf0f0) == 0)
1016 int dst_reg = (inst2 & 0x0f00) >> 8;
1017 int src_reg = inst2 & 0xf;
1018 regs[dst_reg] = regs[src_reg];
1021 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1023 /* Constant pool loads. */
1024 unsigned int constant;
1027 offset = bits (inst2, 0, 11);
1029 loc = start + 4 + offset;
1031 loc = start + 4 - offset;
1033 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1034 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1037 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1039 /* Constant pool loads. */
1040 unsigned int constant;
1043 offset = bits (inst2, 0, 7) << 2;
1045 loc = start + 4 + offset;
1047 loc = start + 4 - offset;
1049 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1050 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1052 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1053 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1056 else if (thumb2_instruction_changes_pc (insn, inst2))
1058 /* Don't scan past anything that might change control flow. */
1063 /* The optimizer might shove anything into the prologue,
1064 so we just skip what we don't recognize. */
1065 unrecognized_pc = start;
1070 else if (thumb_instruction_changes_pc (insn))
1072 /* Don't scan past anything that might change control flow. */
1077 /* The optimizer might shove anything into the prologue,
1078 so we just skip what we don't recognize. */
1079 unrecognized_pc = start;
1086 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1087 paddress (gdbarch, start));
1089 if (unrecognized_pc == 0)
1090 unrecognized_pc = start;
1094 do_cleanups (back_to);
1095 return unrecognized_pc;
1098 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1100 /* Frame pointer is fp. Frame size is constant. */
1101 cache->framereg = ARM_FP_REGNUM;
1102 cache->framesize = -regs[ARM_FP_REGNUM].k;
1104 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1106 /* Frame pointer is r7. Frame size is constant. */
1107 cache->framereg = THUMB_FP_REGNUM;
1108 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1112 /* Try the stack pointer... this is a bit desperate. */
1113 cache->framereg = ARM_SP_REGNUM;
1114 cache->framesize = -regs[ARM_SP_REGNUM].k;
1117 for (i = 0; i < 16; i++)
1118 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1119 cache->saved_regs[i].addr = offset;
1121 do_cleanups (back_to);
1122 return unrecognized_pc;
1126 /* Try to analyze the instructions starting from PC, which load symbol
1127 __stack_chk_guard. Return the address of instruction after loading this
1128 symbol, set the dest register number to *BASEREG, and set the size of
1129 instructions for loading symbol in OFFSET. Return 0 if instructions are
1133 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1134 unsigned int *destreg, int *offset)
1136 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1137 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1138 unsigned int low, high, address;
1143 unsigned short insn1
1144 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1146 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1148 *destreg = bits (insn1, 8, 10);
1150 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1151 address = read_memory_unsigned_integer (address, 4,
1152 byte_order_for_code);
1154 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1156 unsigned short insn2
1157 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1159 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1162 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1164 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1166 /* movt Rd, #const */
1167 if ((insn1 & 0xfbc0) == 0xf2c0)
1169 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1170 *destreg = bits (insn2, 8, 11);
1172 address = (high << 16 | low);
1179 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1181 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1183 address = bits (insn, 0, 11) + pc + 8;
1184 address = read_memory_unsigned_integer (address, 4,
1185 byte_order_for_code);
1187 *destreg = bits (insn, 12, 15);
1190 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1192 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1195 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1197 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1199 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1200 *destreg = bits (insn, 12, 15);
1202 address = (high << 16 | low);
1210 /* Try to skip a sequence of instructions used for stack protector. If PC
1211 points to the first instruction of this sequence, return the address of
1212 first instruction after this sequence, otherwise, return original PC.
1214 On arm, this sequence of instructions is composed of mainly three steps,
1215 Step 1: load symbol __stack_chk_guard,
1216 Step 2: load from address of __stack_chk_guard,
1217 Step 3: store it to somewhere else.
1219 Usually, instructions on step 2 and step 3 are the same on various ARM
1220 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1221 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1222 instructions in step 1 vary from different ARM architectures. On ARMv7,
1225 movw Rn, #:lower16:__stack_chk_guard
1226 movt Rn, #:upper16:__stack_chk_guard
1233 .word __stack_chk_guard
1235 Since ldr/str is a very popular instruction, we can't use them as
1236 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1237 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1238 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1241 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1243 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1244 unsigned int basereg;
1245 struct bound_minimal_symbol stack_chk_guard;
1247 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1250 /* Try to parse the instructions in Step 1. */
1251 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1256 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1257 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1258 Otherwise, this sequence cannot be for stack protector. */
1259 if (stack_chk_guard.minsym == NULL
1260 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1265 unsigned int destreg;
1267 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1269 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1270 if ((insn & 0xf800) != 0x6800)
1272 if (bits (insn, 3, 5) != basereg)
1274 destreg = bits (insn, 0, 2);
1276 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1277 byte_order_for_code);
1278 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1279 if ((insn & 0xf800) != 0x6000)
1281 if (destreg != bits (insn, 0, 2))
1286 unsigned int destreg;
1288 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1290 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1291 if ((insn & 0x0e500000) != 0x04100000)
1293 if (bits (insn, 16, 19) != basereg)
1295 destreg = bits (insn, 12, 15);
1296 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1297 insn = read_code_unsigned_integer (pc + offset + 4,
1298 4, byte_order_for_code);
1299 if ((insn & 0x0e500000) != 0x04000000)
1301 if (bits (insn, 12, 15) != destreg)
1304 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1307 return pc + offset + 4;
1309 return pc + offset + 8;
1312 /* Advance the PC across any function entry prologue instructions to
1313 reach some "real" code.
1315 The APCS (ARM Procedure Call Standard) defines the following
1319 [stmfd sp!, {a1,a2,a3,a4}]
1320 stmfd sp!, {...,fp,ip,lr,pc}
1321 [stfe f7, [sp, #-12]!]
1322 [stfe f6, [sp, #-12]!]
1323 [stfe f5, [sp, #-12]!]
1324 [stfe f4, [sp, #-12]!]
1325 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1328 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1330 CORE_ADDR func_addr, limit_pc;
1332 /* See if we can determine the end of the prologue via the symbol table.
1333 If so, then return either PC, or the PC after the prologue, whichever
1335 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1337 CORE_ADDR post_prologue_pc
1338 = skip_prologue_using_sal (gdbarch, func_addr);
1339 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1341 if (post_prologue_pc)
1343 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1346 /* GCC always emits a line note before the prologue and another
1347 one after, even if the two are at the same address or on the
1348 same line. Take advantage of this so that we do not need to
1349 know every instruction that might appear in the prologue. We
1350 will have producer information for most binaries; if it is
1351 missing (e.g. for -gstabs), assuming the GNU tools. */
1352 if (post_prologue_pc
1354 || COMPUNIT_PRODUCER (cust) == NULL
1355 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1356 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1357 return post_prologue_pc;
1359 if (post_prologue_pc != 0)
1361 CORE_ADDR analyzed_limit;
1363 /* For non-GCC compilers, make sure the entire line is an
1364 acceptable prologue; GDB will round this function's
1365 return value up to the end of the following line so we
1366 can not skip just part of a line (and we do not want to).
1368 RealView does not treat the prologue specially, but does
1369 associate prologue code with the opening brace; so this
1370 lets us skip the first line if we think it is the opening
1372 if (arm_pc_is_thumb (gdbarch, func_addr))
1373 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1374 post_prologue_pc, NULL);
1376 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1377 post_prologue_pc, NULL);
1379 if (analyzed_limit != post_prologue_pc)
1382 return post_prologue_pc;
1386 /* Can't determine prologue from the symbol table, need to examine
1389 /* Find an upper limit on the function prologue using the debug
1390 information. If the debug information could not be used to provide
1391 that bound, then use an arbitrary large number as the upper bound. */
1392 /* Like arm_scan_prologue, stop no later than pc + 64. */
1393 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1395 limit_pc = pc + 64; /* Magic. */
1398 /* Check if this is Thumb code. */
1399 if (arm_pc_is_thumb (gdbarch, pc))
1400 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1402 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1406 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1407 This function decodes a Thumb function prologue to determine:
1408 1) the size of the stack frame
1409 2) which registers are saved on it
1410 3) the offsets of saved regs
1411 4) the offset from the stack pointer to the frame pointer
1413 A typical Thumb function prologue would create this stack frame
1414 (offsets relative to FP)
1415 old SP -> 24 stack parameters
1418 R7 -> 0 local variables (16 bytes)
1419 SP -> -12 additional stack space (12 bytes)
1420 The frame size would thus be 36 bytes, and the frame offset would be
1421 12 bytes. The frame register is R7.
1423 The comments for thumb_skip_prolog() describe the algorithm we use
1424 to detect the end of the prolog. */
1428 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1429 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1431 CORE_ADDR prologue_start;
1432 CORE_ADDR prologue_end;
1434 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1437 /* See comment in arm_scan_prologue for an explanation of
1439 if (prologue_end > prologue_start + 64)
1441 prologue_end = prologue_start + 64;
1445 /* We're in the boondocks: we have no idea where the start of the
1449 prologue_end = std::min (prologue_end, prev_pc);
1451 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1454 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1458 arm_instruction_restores_sp (unsigned int insn)
1460 if (bits (insn, 28, 31) != INST_NV)
1462 if ((insn & 0x0df0f000) == 0x0080d000
1463 /* ADD SP (register or immediate). */
1464 || (insn & 0x0df0f000) == 0x0040d000
1465 /* SUB SP (register or immediate). */
1466 || (insn & 0x0ffffff0) == 0x01a0d000
1468 || (insn & 0x0fff0000) == 0x08bd0000
1470 || (insn & 0x0fff0000) == 0x049d0000)
1471 /* POP of a single register. */
1478 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1479 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1480 fill it in. Return the first address not recognized as a prologue
1483 We recognize all the instructions typically found in ARM prologues,
1484 plus harmless instructions which can be skipped (either for analysis
1485 purposes, or a more restrictive set that can be skipped when finding
1486 the end of the prologue). */
1489 arm_analyze_prologue (struct gdbarch *gdbarch,
1490 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1491 struct arm_prologue_cache *cache)
1493 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1495 CORE_ADDR offset, current_pc;
1496 pv_t regs[ARM_FPS_REGNUM];
1497 struct pv_area *stack;
1498 struct cleanup *back_to;
1499 CORE_ADDR unrecognized_pc = 0;
1501 /* Search the prologue looking for instructions that set up the
1502 frame pointer, adjust the stack pointer, and save registers.
1504 Be careful, however, and if it doesn't look like a prologue,
1505 don't try to scan it. If, for instance, a frameless function
1506 begins with stmfd sp!, then we will tell ourselves there is
1507 a frame, which will confuse stack traceback, as well as "finish"
1508 and other operations that rely on a knowledge of the stack
1511 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1512 regs[regno] = pv_register (regno, 0);
1513 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1514 back_to = make_cleanup_free_pv_area (stack);
1516 for (current_pc = prologue_start;
1517 current_pc < prologue_end;
1521 = read_code_unsigned_integer (current_pc, 4, byte_order_for_code);
1523 if (insn == 0xe1a0c00d) /* mov ip, sp */
1525 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1528 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1529 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1531 unsigned imm = insn & 0xff; /* immediate value */
1532 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1533 int rd = bits (insn, 12, 15);
1534 imm = (imm >> rot) | (imm << (32 - rot));
1535 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1538 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1539 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1541 unsigned imm = insn & 0xff; /* immediate value */
1542 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1543 int rd = bits (insn, 12, 15);
1544 imm = (imm >> rot) | (imm << (32 - rot));
1545 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1548 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1551 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1553 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1554 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1555 regs[bits (insn, 12, 15)]);
1558 else if ((insn & 0xffff0000) == 0xe92d0000)
1559 /* stmfd sp!, {..., fp, ip, lr, pc}
1561 stmfd sp!, {a1, a2, a3, a4} */
1563 int mask = insn & 0xffff;
1565 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1568 /* Calculate offsets of saved registers. */
1569 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1570 if (mask & (1 << regno))
1573 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1574 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1577 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1578 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1579 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1585 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1586 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1593 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1595 /* No need to add this to saved_regs -- it's just arg regs. */
1598 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1600 unsigned imm = insn & 0xff; /* immediate value */
1601 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1602 imm = (imm >> rot) | (imm << (32 - rot));
1603 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1605 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1607 unsigned imm = insn & 0xff; /* immediate value */
1608 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1609 imm = (imm >> rot) | (imm << (32 - rot));
1610 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1612 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1614 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1616 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1619 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1620 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1621 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1623 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1625 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1627 int n_saved_fp_regs;
1628 unsigned int fp_start_reg, fp_bound_reg;
1630 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1633 if ((insn & 0x800) == 0x800) /* N0 is set */
1635 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs = 3;
1638 n_saved_fp_regs = 1;
1642 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs = 2;
1645 n_saved_fp_regs = 4;
1648 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1649 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1650 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1652 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1653 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1654 regs[fp_start_reg++]);
1657 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1659 /* Allow some special function calls when skipping the
1660 prologue; GCC generates these before storing arguments to
1662 CORE_ADDR dest = BranchDest (current_pc, insn);
1664 if (skip_prologue_function (gdbarch, dest, 0))
1669 else if ((insn & 0xf0000000) != 0xe0000000)
1670 break; /* Condition not true, exit early. */
1671 else if (arm_instruction_changes_pc (insn))
1672 /* Don't scan past anything that might change control flow. */
1674 else if (arm_instruction_restores_sp (insn))
1676 /* Don't scan past the epilogue. */
1679 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1680 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1681 /* Ignore block loads from the stack, potentially copying
1682 parameters from memory. */
1684 else if ((insn & 0xfc500000) == 0xe4100000
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1686 /* Similarly ignore single loads from the stack. */
1688 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1689 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1690 register instead of the stack. */
1694 /* The optimizer might shove anything into the prologue, if
1695 we build up cache (cache != NULL) from scanning prologue,
1696 we just skip what we don't recognize and scan further to
1697 make cache as complete as possible. However, if we skip
1698 prologue, we'll stop immediately on unrecognized
1700 unrecognized_pc = current_pc;
1708 if (unrecognized_pc == 0)
1709 unrecognized_pc = current_pc;
1713 int framereg, framesize;
1715 /* The frame size is just the distance from the frame register
1716 to the original stack pointer. */
1717 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1719 /* Frame pointer is fp. */
1720 framereg = ARM_FP_REGNUM;
1721 framesize = -regs[ARM_FP_REGNUM].k;
1725 /* Try the stack pointer... this is a bit desperate. */
1726 framereg = ARM_SP_REGNUM;
1727 framesize = -regs[ARM_SP_REGNUM].k;
1730 cache->framereg = framereg;
1731 cache->framesize = framesize;
1733 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1734 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1735 cache->saved_regs[regno].addr = offset;
1739 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1740 paddress (gdbarch, unrecognized_pc));
1742 do_cleanups (back_to);
1743 return unrecognized_pc;
1747 arm_scan_prologue (struct frame_info *this_frame,
1748 struct arm_prologue_cache *cache)
1750 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1751 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1752 CORE_ADDR prologue_start, prologue_end;
1753 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1754 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1756 /* Assume there is no frame until proven otherwise. */
1757 cache->framereg = ARM_SP_REGNUM;
1758 cache->framesize = 0;
1760 /* Check for Thumb prologue. */
1761 if (arm_frame_is_thumb (this_frame))
1763 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1767 /* Find the function prologue. If we can't find the function in
1768 the symbol table, peek in the stack frame to find the PC. */
1769 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1772 /* One way to find the end of the prologue (which works well
1773 for unoptimized code) is to do the following:
1775 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1778 prologue_end = prev_pc;
1779 else if (sal.end < prologue_end)
1780 prologue_end = sal.end;
1782 This mechanism is very accurate so long as the optimizer
1783 doesn't move any instructions from the function body into the
1784 prologue. If this happens, sal.end will be the last
1785 instruction in the first hunk of prologue code just before
1786 the first instruction that the scheduler has moved from
1787 the body to the prologue.
1789 In order to make sure that we scan all of the prologue
1790 instructions, we use a slightly less accurate mechanism which
1791 may scan more than necessary. To help compensate for this
1792 lack of accuracy, the prologue scanning loop below contains
1793 several clauses which'll cause the loop to terminate early if
1794 an implausible prologue instruction is encountered.
1800 is a suitable endpoint since it accounts for the largest
1801 possible prologue plus up to five instructions inserted by
1804 if (prologue_end > prologue_start + 64)
1806 prologue_end = prologue_start + 64; /* See above. */
1811 /* We have no symbol information. Our only option is to assume this
1812 function has a standard stack frame and the normal frame register.
1813 Then, we can find the value of our frame pointer on entrance to
1814 the callee (or at the present moment if this is the innermost frame).
1815 The value stored there should be the address of the stmfd + 8. */
1816 CORE_ADDR frame_loc;
1817 ULONGEST return_value;
1819 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1820 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
1825 prologue_start = gdbarch_addr_bits_remove
1826 (gdbarch, return_value) - 8;
1827 prologue_end = prologue_start + 64; /* See above. */
1831 if (prev_pc < prologue_end)
1832 prologue_end = prev_pc;
1834 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1837 static struct arm_prologue_cache *
1838 arm_make_prologue_cache (struct frame_info *this_frame)
1841 struct arm_prologue_cache *cache;
1842 CORE_ADDR unwound_fp;
1844 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1845 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1847 arm_scan_prologue (this_frame, cache);
1849 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1850 if (unwound_fp == 0)
1853 cache->prev_sp = unwound_fp + cache->framesize;
1855 /* Calculate actual addresses of saved registers using offsets
1856 determined by arm_scan_prologue. */
1857 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1858 if (trad_frame_addr_p (cache->saved_regs, reg))
1859 cache->saved_regs[reg].addr += cache->prev_sp;
1864 /* Implementation of the stop_reason hook for arm_prologue frames. */
1866 static enum unwind_stop_reason
1867 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1870 struct arm_prologue_cache *cache;
1873 if (*this_cache == NULL)
1874 *this_cache = arm_make_prologue_cache (this_frame);
1875 cache = (struct arm_prologue_cache *) *this_cache;
1877 /* This is meant to halt the backtrace at "_start". */
1878 pc = get_frame_pc (this_frame);
1879 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1880 return UNWIND_OUTERMOST;
1882 /* If we've hit a wall, stop. */
1883 if (cache->prev_sp == 0)
1884 return UNWIND_OUTERMOST;
1886 return UNWIND_NO_REASON;
1889 /* Our frame ID for a normal frame is the current function's starting PC
1890 and the caller's SP when we were called. */
1893 arm_prologue_this_id (struct frame_info *this_frame,
1895 struct frame_id *this_id)
1897 struct arm_prologue_cache *cache;
1901 if (*this_cache == NULL)
1902 *this_cache = arm_make_prologue_cache (this_frame);
1903 cache = (struct arm_prologue_cache *) *this_cache;
1905 /* Use function start address as part of the frame ID. If we cannot
1906 identify the start address (due to missing symbol information),
1907 fall back to just using the current PC. */
1908 pc = get_frame_pc (this_frame);
1909 func = get_frame_func (this_frame);
1913 id = frame_id_build (cache->prev_sp, func);
1917 static struct value *
1918 arm_prologue_prev_register (struct frame_info *this_frame,
1922 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1923 struct arm_prologue_cache *cache;
1925 if (*this_cache == NULL)
1926 *this_cache = arm_make_prologue_cache (this_frame);
1927 cache = (struct arm_prologue_cache *) *this_cache;
1929 /* If we are asked to unwind the PC, then we need to return the LR
1930 instead. The prologue may save PC, but it will point into this
1931 frame's prologue, not the next frame's resume location. Also
1932 strip the saved T bit. A valid LR may have the low bit set, but
1933 a valid PC never does. */
1934 if (prev_regnum == ARM_PC_REGNUM)
1938 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1939 return frame_unwind_got_constant (this_frame, prev_regnum,
1940 arm_addr_bits_remove (gdbarch, lr));
1943 /* SP is generally not saved to the stack, but this frame is
1944 identified by the next frame's stack pointer at the time of the call.
1945 The value was already reconstructed into PREV_SP. */
1946 if (prev_regnum == ARM_SP_REGNUM)
1947 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1949 /* The CPSR may have been changed by the call instruction and by the
1950 called function. The only bit we can reconstruct is the T bit,
1951 by checking the low bit of LR as of the call. This is a reliable
1952 indicator of Thumb-ness except for some ARM v4T pre-interworking
1953 Thumb code, which could get away with a clear low bit as long as
1954 the called function did not use bx. Guess that all other
1955 bits are unchanged; the condition flags are presumably lost,
1956 but the processor status is likely valid. */
1957 if (prev_regnum == ARM_PS_REGNUM)
1960 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1962 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1963 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1964 if (IS_THUMB_ADDR (lr))
1968 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1971 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1975 struct frame_unwind arm_prologue_unwind = {
1977 arm_prologue_unwind_stop_reason,
1978 arm_prologue_this_id,
1979 arm_prologue_prev_register,
1981 default_frame_sniffer
1984 /* Maintain a list of ARM exception table entries per objfile, similar to the
1985 list of mapping symbols. We only cache entries for standard ARM-defined
1986 personality routines; the cache will contain only the frame unwinding
1987 instructions associated with the entry (not the descriptors). */
1989 static const struct objfile_data *arm_exidx_data_key;
1991 struct arm_exidx_entry
1996 typedef struct arm_exidx_entry arm_exidx_entry_s;
1997 DEF_VEC_O(arm_exidx_entry_s);
1999 struct arm_exidx_data
2001 VEC(arm_exidx_entry_s) **section_maps;
2005 arm_exidx_data_free (struct objfile *objfile, void *arg)
2007 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2010 for (i = 0; i < objfile->obfd->section_count; i++)
2011 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2015 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2016 const struct arm_exidx_entry *rhs)
2018 return lhs->addr < rhs->addr;
2021 static struct obj_section *
2022 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2024 struct obj_section *osect;
2026 ALL_OBJFILE_OSECTIONS (objfile, osect)
2027 if (bfd_get_section_flags (objfile->obfd,
2028 osect->the_bfd_section) & SEC_ALLOC)
2030 bfd_vma start, size;
2031 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2032 size = bfd_get_section_size (osect->the_bfd_section);
2034 if (start <= vma && vma < start + size)
2041 /* Parse contents of exception table and exception index sections
2042 of OBJFILE, and fill in the exception table entry cache.
2044 For each entry that refers to a standard ARM-defined personality
2045 routine, extract the frame unwinding instructions (from either
2046 the index or the table section). The unwinding instructions
2048 - extracting them from the rest of the table data
2049 - converting to host endianness
2050 - appending the implicit 0xb0 ("Finish") code
2052 The extracted and normalized instructions are stored for later
2053 retrieval by the arm_find_exidx_entry routine. */
2056 arm_exidx_new_objfile (struct objfile *objfile)
2058 struct cleanup *cleanups;
2059 struct arm_exidx_data *data;
2060 asection *exidx, *extab;
2061 bfd_vma exidx_vma = 0, extab_vma = 0;
2062 bfd_size_type exidx_size = 0, extab_size = 0;
2063 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2066 /* If we've already touched this file, do nothing. */
2067 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2069 cleanups = make_cleanup (null_cleanup, NULL);
2071 /* Read contents of exception table and index. */
2072 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2075 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2076 exidx_size = bfd_get_section_size (exidx);
2077 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2078 make_cleanup (xfree, exidx_data);
2080 if (!bfd_get_section_contents (objfile->obfd, exidx,
2081 exidx_data, 0, exidx_size))
2083 do_cleanups (cleanups);
2088 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2091 extab_vma = bfd_section_vma (objfile->obfd, extab);
2092 extab_size = bfd_get_section_size (extab);
2093 extab_data = (gdb_byte *) xmalloc (extab_size);
2094 make_cleanup (xfree, extab_data);
2096 if (!bfd_get_section_contents (objfile->obfd, extab,
2097 extab_data, 0, extab_size))
2099 do_cleanups (cleanups);
2104 /* Allocate exception table data structure. */
2105 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2106 set_objfile_data (objfile, arm_exidx_data_key, data);
2107 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2108 objfile->obfd->section_count,
2109 VEC(arm_exidx_entry_s) *);
2111 /* Fill in exception table. */
2112 for (i = 0; i < exidx_size / 8; i++)
2114 struct arm_exidx_entry new_exidx_entry;
2115 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2116 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2117 bfd_vma addr = 0, word = 0;
2118 int n_bytes = 0, n_words = 0;
2119 struct obj_section *sec;
2120 gdb_byte *entry = NULL;
2122 /* Extract address of start of function. */
2123 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2124 idx += exidx_vma + i * 8;
2126 /* Find section containing function and compute section offset. */
2127 sec = arm_obj_section_from_vma (objfile, idx);
2130 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2132 /* Determine address of exception table entry. */
2135 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2137 else if ((val & 0xff000000) == 0x80000000)
2139 /* Exception table entry embedded in .ARM.exidx
2140 -- must be short form. */
2144 else if (!(val & 0x80000000))
2146 /* Exception table entry in .ARM.extab. */
2147 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2148 addr += exidx_vma + i * 8 + 4;
2150 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2152 word = bfd_h_get_32 (objfile->obfd,
2153 extab_data + addr - extab_vma);
2156 if ((word & 0xff000000) == 0x80000000)
2161 else if ((word & 0xff000000) == 0x81000000
2162 || (word & 0xff000000) == 0x82000000)
2166 n_words = ((word >> 16) & 0xff);
2168 else if (!(word & 0x80000000))
2171 struct obj_section *pers_sec;
2172 int gnu_personality = 0;
2174 /* Custom personality routine. */
2175 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2176 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2178 /* Check whether we've got one of the variants of the
2179 GNU personality routines. */
2180 pers_sec = arm_obj_section_from_vma (objfile, pers);
2183 static const char *personality[] =
2185 "__gcc_personality_v0",
2186 "__gxx_personality_v0",
2187 "__gcj_personality_v0",
2188 "__gnu_objc_personality_v0",
2192 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2195 for (k = 0; personality[k]; k++)
2196 if (lookup_minimal_symbol_by_pc_name
2197 (pc, personality[k], objfile))
2199 gnu_personality = 1;
2204 /* If so, the next word contains a word count in the high
2205 byte, followed by the same unwind instructions as the
2206 pre-defined forms. */
2208 && addr + 4 <= extab_vma + extab_size)
2210 word = bfd_h_get_32 (objfile->obfd,
2211 extab_data + addr - extab_vma);
2214 n_words = ((word >> 24) & 0xff);
2220 /* Sanity check address. */
2222 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2223 n_words = n_bytes = 0;
2225 /* The unwind instructions reside in WORD (only the N_BYTES least
2226 significant bytes are valid), followed by N_WORDS words in the
2227 extab section starting at ADDR. */
2228 if (n_bytes || n_words)
2231 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2232 n_bytes + n_words * 4 + 1);
2235 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2239 word = bfd_h_get_32 (objfile->obfd,
2240 extab_data + addr - extab_vma);
2243 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2244 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2245 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2246 *p++ = (gdb_byte) (word & 0xff);
2249 /* Implied "Finish" to terminate the list. */
2253 /* Push entry onto vector. They are guaranteed to always
2254 appear in order of increasing addresses. */
2255 new_exidx_entry.addr = idx;
2256 new_exidx_entry.entry = entry;
2257 VEC_safe_push (arm_exidx_entry_s,
2258 data->section_maps[sec->the_bfd_section->index],
2262 do_cleanups (cleanups);
2265 /* Search for the exception table entry covering MEMADDR. If one is found,
2266 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2267 set *START to the start of the region covered by this entry. */
2270 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2272 struct obj_section *sec;
2274 sec = find_pc_section (memaddr);
2277 struct arm_exidx_data *data;
2278 VEC(arm_exidx_entry_s) *map;
2279 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2282 data = ((struct arm_exidx_data *)
2283 objfile_data (sec->objfile, arm_exidx_data_key));
2286 map = data->section_maps[sec->the_bfd_section->index];
2287 if (!VEC_empty (arm_exidx_entry_s, map))
2289 struct arm_exidx_entry *map_sym;
2291 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2292 arm_compare_exidx_entries);
2294 /* VEC_lower_bound finds the earliest ordered insertion
2295 point. If the following symbol starts at this exact
2296 address, we use that; otherwise, the preceding
2297 exception table entry covers this address. */
2298 if (idx < VEC_length (arm_exidx_entry_s, map))
2300 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2301 if (map_sym->addr == map_key.addr)
2304 *start = map_sym->addr + obj_section_addr (sec);
2305 return map_sym->entry;
2311 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2313 *start = map_sym->addr + obj_section_addr (sec);
2314 return map_sym->entry;
2323 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2324 instruction list from the ARM exception table entry ENTRY, allocate and
2325 return a prologue cache structure describing how to unwind this frame.
2327 Return NULL if the unwinding instruction list contains a "spare",
2328 "reserved" or "refuse to unwind" instruction as defined in section
2329 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2330 for the ARM Architecture" document. */
2332 static struct arm_prologue_cache *
2333 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2338 struct arm_prologue_cache *cache;
2339 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2340 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2346 /* Whenever we reload SP, we actually have to retrieve its
2347 actual value in the current frame. */
2350 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2352 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2353 vsp = get_frame_register_unsigned (this_frame, reg);
2357 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2358 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2364 /* Decode next unwind instruction. */
2367 if ((insn & 0xc0) == 0)
2369 int offset = insn & 0x3f;
2370 vsp += (offset << 2) + 4;
2372 else if ((insn & 0xc0) == 0x40)
2374 int offset = insn & 0x3f;
2375 vsp -= (offset << 2) + 4;
2377 else if ((insn & 0xf0) == 0x80)
2379 int mask = ((insn & 0xf) << 8) | *entry++;
2382 /* The special case of an all-zero mask identifies
2383 "Refuse to unwind". We return NULL to fall back
2384 to the prologue analyzer. */
2388 /* Pop registers r4..r15 under mask. */
2389 for (i = 0; i < 12; i++)
2390 if (mask & (1 << i))
2392 cache->saved_regs[4 + i].addr = vsp;
2396 /* Special-case popping SP -- we need to reload vsp. */
2397 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2400 else if ((insn & 0xf0) == 0x90)
2402 int reg = insn & 0xf;
2404 /* Reserved cases. */
2405 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2408 /* Set SP from another register and mark VSP for reload. */
2409 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2412 else if ((insn & 0xf0) == 0xa0)
2414 int count = insn & 0x7;
2415 int pop_lr = (insn & 0x8) != 0;
2418 /* Pop r4..r[4+count]. */
2419 for (i = 0; i <= count; i++)
2421 cache->saved_regs[4 + i].addr = vsp;
2425 /* If indicated by flag, pop LR as well. */
2428 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2432 else if (insn == 0xb0)
2434 /* We could only have updated PC by popping into it; if so, it
2435 will show up as address. Otherwise, copy LR into PC. */
2436 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2437 cache->saved_regs[ARM_PC_REGNUM]
2438 = cache->saved_regs[ARM_LR_REGNUM];
2443 else if (insn == 0xb1)
2445 int mask = *entry++;
2448 /* All-zero mask and mask >= 16 is "spare". */
2449 if (mask == 0 || mask >= 16)
2452 /* Pop r0..r3 under mask. */
2453 for (i = 0; i < 4; i++)
2454 if (mask & (1 << i))
2456 cache->saved_regs[i].addr = vsp;
2460 else if (insn == 0xb2)
2462 ULONGEST offset = 0;
2467 offset |= (*entry & 0x7f) << shift;
2470 while (*entry++ & 0x80);
2472 vsp += 0x204 + (offset << 2);
2474 else if (insn == 0xb3)
2476 int start = *entry >> 4;
2477 int count = (*entry++) & 0xf;
2480 /* Only registers D0..D15 are valid here. */
2481 if (start + count >= 16)
2484 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2485 for (i = 0; i <= count; i++)
2487 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2491 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2494 else if ((insn & 0xf8) == 0xb8)
2496 int count = insn & 0x7;
2499 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2500 for (i = 0; i <= count; i++)
2502 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2506 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2509 else if (insn == 0xc6)
2511 int start = *entry >> 4;
2512 int count = (*entry++) & 0xf;
2515 /* Only registers WR0..WR15 are valid. */
2516 if (start + count >= 16)
2519 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2520 for (i = 0; i <= count; i++)
2522 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2526 else if (insn == 0xc7)
2528 int mask = *entry++;
2531 /* All-zero mask and mask >= 16 is "spare". */
2532 if (mask == 0 || mask >= 16)
2535 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2536 for (i = 0; i < 4; i++)
2537 if (mask & (1 << i))
2539 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2543 else if ((insn & 0xf8) == 0xc0)
2545 int count = insn & 0x7;
2548 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2549 for (i = 0; i <= count; i++)
2551 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2555 else if (insn == 0xc8)
2557 int start = *entry >> 4;
2558 int count = (*entry++) & 0xf;
2561 /* Only registers D0..D31 are valid. */
2562 if (start + count >= 16)
2565 /* Pop VFP double-precision registers
2566 D[16+start]..D[16+start+count]. */
2567 for (i = 0; i <= count; i++)
2569 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2573 else if (insn == 0xc9)
2575 int start = *entry >> 4;
2576 int count = (*entry++) & 0xf;
2579 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2580 for (i = 0; i <= count; i++)
2582 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2586 else if ((insn & 0xf8) == 0xd0)
2588 int count = insn & 0x7;
2591 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2592 for (i = 0; i <= count; i++)
2594 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2600 /* Everything else is "spare". */
2605 /* If we restore SP from a register, assume this was the frame register.
2606 Otherwise just fall back to SP as frame register. */
2607 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2608 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2610 cache->framereg = ARM_SP_REGNUM;
2612 /* Determine offset to previous frame. */
2614 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2616 /* We already got the previous SP. */
2617 cache->prev_sp = vsp;
2622 /* Unwinding via ARM exception table entries. Note that the sniffer
2623 already computes a filled-in prologue cache, which is then used
2624 with the same arm_prologue_this_id and arm_prologue_prev_register
2625 routines also used for prologue-parsing based unwinding. */
2628 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2629 struct frame_info *this_frame,
2630 void **this_prologue_cache)
2632 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2633 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2634 CORE_ADDR addr_in_block, exidx_region, func_start;
2635 struct arm_prologue_cache *cache;
2638 /* See if we have an ARM exception table entry covering this address. */
2639 addr_in_block = get_frame_address_in_block (this_frame);
2640 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2644 /* The ARM exception table does not describe unwind information
2645 for arbitrary PC values, but is guaranteed to be correct only
2646 at call sites. We have to decide here whether we want to use
2647 ARM exception table information for this frame, or fall back
2648 to using prologue parsing. (Note that if we have DWARF CFI,
2649 this sniffer isn't even called -- CFI is always preferred.)
2651 Before we make this decision, however, we check whether we
2652 actually have *symbol* information for the current frame.
2653 If not, prologue parsing would not work anyway, so we might
2654 as well use the exception table and hope for the best. */
2655 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2659 /* If the next frame is "normal", we are at a call site in this
2660 frame, so exception information is guaranteed to be valid. */
2661 if (get_next_frame (this_frame)
2662 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2665 /* We also assume exception information is valid if we're currently
2666 blocked in a system call. The system library is supposed to
2667 ensure this, so that e.g. pthread cancellation works. */
2668 if (arm_frame_is_thumb (this_frame))
2672 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 2,
2673 2, byte_order_for_code, &insn)
2674 && (insn & 0xff00) == 0xdf00 /* svc */)
2681 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame) - 4,
2682 4, byte_order_for_code, &insn)
2683 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2687 /* Bail out if we don't know that exception information is valid. */
2691 /* The ARM exception index does not mark the *end* of the region
2692 covered by the entry, and some functions will not have any entry.
2693 To correctly recognize the end of the covered region, the linker
2694 should have inserted dummy records with a CANTUNWIND marker.
2696 Unfortunately, current versions of GNU ld do not reliably do
2697 this, and thus we may have found an incorrect entry above.
2698 As a (temporary) sanity check, we only use the entry if it
2699 lies *within* the bounds of the function. Note that this check
2700 might reject perfectly valid entries that just happen to cover
2701 multiple functions; therefore this check ought to be removed
2702 once the linker is fixed. */
2703 if (func_start > exidx_region)
2707 /* Decode the list of unwinding instructions into a prologue cache.
2708 Note that this may fail due to e.g. a "refuse to unwind" code. */
2709 cache = arm_exidx_fill_cache (this_frame, entry);
2713 *this_prologue_cache = cache;
2717 struct frame_unwind arm_exidx_unwind = {
2719 default_frame_unwind_stop_reason,
2720 arm_prologue_this_id,
2721 arm_prologue_prev_register,
2723 arm_exidx_unwind_sniffer
2726 static struct arm_prologue_cache *
2727 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2729 struct arm_prologue_cache *cache;
2732 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2733 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2735 /* Still rely on the offset calculated from prologue. */
2736 arm_scan_prologue (this_frame, cache);
2738 /* Since we are in epilogue, the SP has been restored. */
2739 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2741 /* Calculate actual addresses of saved registers using offsets
2742 determined by arm_scan_prologue. */
2743 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2744 if (trad_frame_addr_p (cache->saved_regs, reg))
2745 cache->saved_regs[reg].addr += cache->prev_sp;
2750 /* Implementation of function hook 'this_id' in
2751 'struct frame_uwnind' for epilogue unwinder. */
2754 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2756 struct frame_id *this_id)
2758 struct arm_prologue_cache *cache;
2761 if (*this_cache == NULL)
2762 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2763 cache = (struct arm_prologue_cache *) *this_cache;
2765 /* Use function start address as part of the frame ID. If we cannot
2766 identify the start address (due to missing symbol information),
2767 fall back to just using the current PC. */
2768 pc = get_frame_pc (this_frame);
2769 func = get_frame_func (this_frame);
2773 (*this_id) = frame_id_build (cache->prev_sp, pc);
2776 /* Implementation of function hook 'prev_register' in
2777 'struct frame_uwnind' for epilogue unwinder. */
2779 static struct value *
2780 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2781 void **this_cache, int regnum)
2783 if (*this_cache == NULL)
2784 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2786 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2789 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2791 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2794 /* Implementation of function hook 'sniffer' in
2795 'struct frame_uwnind' for epilogue unwinder. */
2798 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2799 struct frame_info *this_frame,
2800 void **this_prologue_cache)
2802 if (frame_relative_level (this_frame) == 0)
2804 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2805 CORE_ADDR pc = get_frame_pc (this_frame);
2807 if (arm_frame_is_thumb (this_frame))
2808 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2810 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2816 /* Frame unwinder from epilogue. */
2818 static const struct frame_unwind arm_epilogue_frame_unwind =
2821 default_frame_unwind_stop_reason,
2822 arm_epilogue_frame_this_id,
2823 arm_epilogue_frame_prev_register,
2825 arm_epilogue_frame_sniffer,
2828 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2829 trampoline, return the target PC. Otherwise return 0.
2831 void call0a (char c, short s, int i, long l) {}
2835 (*pointer_to_call0a) (c, s, i, l);
2838 Instead of calling a stub library function _call_via_xx (xx is
2839 the register name), GCC may inline the trampoline in the object
2840 file as below (register r2 has the address of call0a).
2843 .type main, %function
2852 The trampoline 'bx r2' doesn't belong to main. */
2855 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2857 /* The heuristics of recognizing such trampoline is that FRAME is
2858 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2859 if (arm_frame_is_thumb (frame))
2863 if (target_read_memory (pc, buf, 2) == 0)
2865 struct gdbarch *gdbarch = get_frame_arch (frame);
2866 enum bfd_endian byte_order_for_code
2867 = gdbarch_byte_order_for_code (gdbarch);
2869 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2871 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2874 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2876 /* Clear the LSB so that gdb core sets step-resume
2877 breakpoint at the right address. */
2878 return UNMAKE_THUMB_ADDR (dest);
2886 static struct arm_prologue_cache *
2887 arm_make_stub_cache (struct frame_info *this_frame)
2889 struct arm_prologue_cache *cache;
2891 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2892 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2894 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2899 /* Our frame ID for a stub frame is the current SP and LR. */
2902 arm_stub_this_id (struct frame_info *this_frame,
2904 struct frame_id *this_id)
2906 struct arm_prologue_cache *cache;
2908 if (*this_cache == NULL)
2909 *this_cache = arm_make_stub_cache (this_frame);
2910 cache = (struct arm_prologue_cache *) *this_cache;
2912 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2916 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2917 struct frame_info *this_frame,
2918 void **this_prologue_cache)
2920 CORE_ADDR addr_in_block;
2922 CORE_ADDR pc, start_addr;
2925 addr_in_block = get_frame_address_in_block (this_frame);
2926 pc = get_frame_pc (this_frame);
2927 if (in_plt_section (addr_in_block)
2928 /* We also use the stub winder if the target memory is unreadable
2929 to avoid having the prologue unwinder trying to read it. */
2930 || target_read_memory (pc, dummy, 4) != 0)
2933 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2934 && arm_skip_bx_reg (this_frame, pc) != 0)
2940 struct frame_unwind arm_stub_unwind = {
2942 default_frame_unwind_stop_reason,
2944 arm_prologue_prev_register,
2946 arm_stub_unwind_sniffer
2949 /* Put here the code to store, into CACHE->saved_regs, the addresses
2950 of the saved registers of frame described by THIS_FRAME. CACHE is
2953 static struct arm_prologue_cache *
2954 arm_m_exception_cache (struct frame_info *this_frame)
2956 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2957 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2958 struct arm_prologue_cache *cache;
2959 CORE_ADDR unwound_sp;
2962 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2963 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2965 unwound_sp = get_frame_register_unsigned (this_frame,
2968 /* The hardware saves eight 32-bit words, comprising xPSR,
2969 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2970 "B1.5.6 Exception entry behavior" in
2971 "ARMv7-M Architecture Reference Manual". */
2972 cache->saved_regs[0].addr = unwound_sp;
2973 cache->saved_regs[1].addr = unwound_sp + 4;
2974 cache->saved_regs[2].addr = unwound_sp + 8;
2975 cache->saved_regs[3].addr = unwound_sp + 12;
2976 cache->saved_regs[12].addr = unwound_sp + 16;
2977 cache->saved_regs[14].addr = unwound_sp + 20;
2978 cache->saved_regs[15].addr = unwound_sp + 24;
2979 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2981 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2982 aligner between the top of the 32-byte stack frame and the
2983 previous context's stack pointer. */
2984 cache->prev_sp = unwound_sp + 32;
2985 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2986 && (xpsr & (1 << 9)) != 0)
2987 cache->prev_sp += 4;
2992 /* Implementation of function hook 'this_id' in
2993 'struct frame_uwnind'. */
2996 arm_m_exception_this_id (struct frame_info *this_frame,
2998 struct frame_id *this_id)
3000 struct arm_prologue_cache *cache;
3002 if (*this_cache == NULL)
3003 *this_cache = arm_m_exception_cache (this_frame);
3004 cache = (struct arm_prologue_cache *) *this_cache;
3006 /* Our frame ID for a stub frame is the current SP and LR. */
3007 *this_id = frame_id_build (cache->prev_sp,
3008 get_frame_pc (this_frame));
3011 /* Implementation of function hook 'prev_register' in
3012 'struct frame_uwnind'. */
3014 static struct value *
3015 arm_m_exception_prev_register (struct frame_info *this_frame,
3019 struct arm_prologue_cache *cache;
3021 if (*this_cache == NULL)
3022 *this_cache = arm_m_exception_cache (this_frame);
3023 cache = (struct arm_prologue_cache *) *this_cache;
3025 /* The value was already reconstructed into PREV_SP. */
3026 if (prev_regnum == ARM_SP_REGNUM)
3027 return frame_unwind_got_constant (this_frame, prev_regnum,
3030 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3034 /* Implementation of function hook 'sniffer' in
3035 'struct frame_uwnind'. */
3038 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3039 struct frame_info *this_frame,
3040 void **this_prologue_cache)
3042 CORE_ADDR this_pc = get_frame_pc (this_frame);
3044 /* No need to check is_m; this sniffer is only registered for
3045 M-profile architectures. */
3047 /* Check if exception frame returns to a magic PC value. */
3048 return arm_m_addr_is_magic (this_pc);
3051 /* Frame unwinder for M-profile exceptions. */
3053 struct frame_unwind arm_m_exception_unwind =
3056 default_frame_unwind_stop_reason,
3057 arm_m_exception_this_id,
3058 arm_m_exception_prev_register,
3060 arm_m_exception_unwind_sniffer
3064 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3066 struct arm_prologue_cache *cache;
3068 if (*this_cache == NULL)
3069 *this_cache = arm_make_prologue_cache (this_frame);
3070 cache = (struct arm_prologue_cache *) *this_cache;
3072 return cache->prev_sp - cache->framesize;
3075 struct frame_base arm_normal_base = {
3076 &arm_prologue_unwind,
3077 arm_normal_frame_base,
3078 arm_normal_frame_base,
3079 arm_normal_frame_base
3082 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3083 dummy frame. The frame ID's base needs to match the TOS value
3084 saved by save_dummy_frame_tos() and returned from
3085 arm_push_dummy_call, and the PC needs to match the dummy frame's
3088 static struct frame_id
3089 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3091 return frame_id_build (get_frame_register_unsigned (this_frame,
3093 get_frame_pc (this_frame));
3096 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3097 be used to construct the previous frame's ID, after looking up the
3098 containing function). */
3101 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3104 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3105 return arm_addr_bits_remove (gdbarch, pc);
3109 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3111 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3114 static struct value *
3115 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3118 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3120 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3125 /* The PC is normally copied from the return column, which
3126 describes saves of LR. However, that version may have an
3127 extra bit set to indicate Thumb state. The bit is not
3129 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3130 return frame_unwind_got_constant (this_frame, regnum,
3131 arm_addr_bits_remove (gdbarch, lr));
3134 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3135 cpsr = get_frame_register_unsigned (this_frame, regnum);
3136 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3137 if (IS_THUMB_ADDR (lr))
3141 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3144 internal_error (__FILE__, __LINE__,
3145 _("Unexpected register %d"), regnum);
3150 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3151 struct dwarf2_frame_state_reg *reg,
3152 struct frame_info *this_frame)
3158 reg->how = DWARF2_FRAME_REG_FN;
3159 reg->loc.fn = arm_dwarf2_prev_register;
3162 reg->how = DWARF2_FRAME_REG_CFA;
3167 /* Implement the stack_frame_destroyed_p gdbarch method. */
3170 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3172 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3173 unsigned int insn, insn2;
3174 int found_return = 0, found_stack_adjust = 0;
3175 CORE_ADDR func_start, func_end;
3179 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3182 /* The epilogue is a sequence of instructions along the following lines:
3184 - add stack frame size to SP or FP
3185 - [if frame pointer used] restore SP from FP
3186 - restore registers from SP [may include PC]
3187 - a return-type instruction [if PC wasn't already restored]
3189 In a first pass, we scan forward from the current PC and verify the
3190 instructions we find as compatible with this sequence, ending in a
3193 However, this is not sufficient to distinguish indirect function calls
3194 within a function from indirect tail calls in the epilogue in some cases.
3195 Therefore, if we didn't already find any SP-changing instruction during
3196 forward scan, we add a backward scanning heuristic to ensure we actually
3197 are in the epilogue. */
3200 while (scan_pc < func_end && !found_return)
3202 if (target_read_memory (scan_pc, buf, 2))
3206 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3208 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3210 else if (insn == 0x46f7) /* mov pc, lr */
3212 else if (thumb_instruction_restores_sp (insn))
3214 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3217 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3219 if (target_read_memory (scan_pc, buf, 2))
3223 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3225 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3227 if (insn2 & 0x8000) /* <registers> include PC. */
3230 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3231 && (insn2 & 0x0fff) == 0x0b04)
3233 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3236 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3237 && (insn2 & 0x0e00) == 0x0a00)
3249 /* Since any instruction in the epilogue sequence, with the possible
3250 exception of return itself, updates the stack pointer, we need to
3251 scan backwards for at most one instruction. Try either a 16-bit or
3252 a 32-bit instruction. This is just a heuristic, so we do not worry
3253 too much about false positives. */
3255 if (pc - 4 < func_start)
3257 if (target_read_memory (pc - 4, buf, 4))
3260 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3261 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3263 if (thumb_instruction_restores_sp (insn2))
3264 found_stack_adjust = 1;
3265 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3266 found_stack_adjust = 1;
3267 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3268 && (insn2 & 0x0fff) == 0x0b04)
3269 found_stack_adjust = 1;
3270 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3271 && (insn2 & 0x0e00) == 0x0a00)
3272 found_stack_adjust = 1;
3274 return found_stack_adjust;
3278 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3280 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3283 CORE_ADDR func_start, func_end;
3285 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3288 /* We are in the epilogue if the previous instruction was a stack
3289 adjustment and the next instruction is a possible return (bx, mov
3290 pc, or pop). We could have to scan backwards to find the stack
3291 adjustment, or forwards to find the return, but this is a decent
3292 approximation. First scan forwards. */
3295 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3296 if (bits (insn, 28, 31) != INST_NV)
3298 if ((insn & 0x0ffffff0) == 0x012fff10)
3301 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3304 else if ((insn & 0x0fff0000) == 0x08bd0000
3305 && (insn & 0x0000c000) != 0)
3306 /* POP (LDMIA), including PC or LR. */
3313 /* Scan backwards. This is just a heuristic, so do not worry about
3314 false positives from mode changes. */
3316 if (pc < func_start + 4)
3319 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3320 if (arm_instruction_restores_sp (insn))
3326 /* Implement the stack_frame_destroyed_p gdbarch method. */
3329 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3331 if (arm_pc_is_thumb (gdbarch, pc))
3332 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3334 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3337 /* When arguments must be pushed onto the stack, they go on in reverse
3338 order. The code below implements a FILO (stack) to do this. */
3343 struct stack_item *prev;
3347 static struct stack_item *
3348 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3350 struct stack_item *si;
3351 si = XNEW (struct stack_item);
3352 si->data = (gdb_byte *) xmalloc (len);
3355 memcpy (si->data, contents, len);
3359 static struct stack_item *
3360 pop_stack_item (struct stack_item *si)
3362 struct stack_item *dead = si;
3370 /* Return the alignment (in bytes) of the given type. */
3373 arm_type_align (struct type *t)
3379 t = check_typedef (t);
3380 switch (TYPE_CODE (t))
3383 /* Should never happen. */
3384 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3388 case TYPE_CODE_ENUM:
3392 case TYPE_CODE_RANGE:
3394 case TYPE_CODE_RVALUE_REF:
3395 case TYPE_CODE_CHAR:
3396 case TYPE_CODE_BOOL:
3397 return TYPE_LENGTH (t);
3399 case TYPE_CODE_ARRAY:
3400 if (TYPE_VECTOR (t))
3402 /* Use the natural alignment for vector types (the same for
3403 scalar type), but the maximum alignment is 64-bit. */
3404 if (TYPE_LENGTH (t) > 8)
3407 return TYPE_LENGTH (t);
3410 return arm_type_align (TYPE_TARGET_TYPE (t));
3411 case TYPE_CODE_COMPLEX:
3412 return arm_type_align (TYPE_TARGET_TYPE (t));
3414 case TYPE_CODE_STRUCT:
3415 case TYPE_CODE_UNION:
3417 for (n = 0; n < TYPE_NFIELDS (t); n++)
3419 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3427 /* Possible base types for a candidate for passing and returning in
3430 enum arm_vfp_cprc_base_type
3439 /* The length of one element of base type B. */
3442 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3446 case VFP_CPRC_SINGLE:
3448 case VFP_CPRC_DOUBLE:
3450 case VFP_CPRC_VEC64:
3452 case VFP_CPRC_VEC128:
3455 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3460 /* The character ('s', 'd' or 'q') for the type of VFP register used
3461 for passing base type B. */
3464 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3468 case VFP_CPRC_SINGLE:
3470 case VFP_CPRC_DOUBLE:
3472 case VFP_CPRC_VEC64:
3474 case VFP_CPRC_VEC128:
3477 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3482 /* Determine whether T may be part of a candidate for passing and
3483 returning in VFP registers, ignoring the limit on the total number
3484 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3485 classification of the first valid component found; if it is not
3486 VFP_CPRC_UNKNOWN, all components must have the same classification
3487 as *BASE_TYPE. If it is found that T contains a type not permitted
3488 for passing and returning in VFP registers, a type differently
3489 classified from *BASE_TYPE, or two types differently classified
3490 from each other, return -1, otherwise return the total number of
3491 base-type elements found (possibly 0 in an empty structure or
3492 array). Vector types are not currently supported, matching the
3493 generic AAPCS support. */
3496 arm_vfp_cprc_sub_candidate (struct type *t,
3497 enum arm_vfp_cprc_base_type *base_type)
3499 t = check_typedef (t);
3500 switch (TYPE_CODE (t))
3503 switch (TYPE_LENGTH (t))
3506 if (*base_type == VFP_CPRC_UNKNOWN)
3507 *base_type = VFP_CPRC_SINGLE;
3508 else if (*base_type != VFP_CPRC_SINGLE)
3513 if (*base_type == VFP_CPRC_UNKNOWN)
3514 *base_type = VFP_CPRC_DOUBLE;
3515 else if (*base_type != VFP_CPRC_DOUBLE)
3524 case TYPE_CODE_COMPLEX:
3525 /* Arguments of complex T where T is one of the types float or
3526 double get treated as if they are implemented as:
3535 switch (TYPE_LENGTH (t))
3538 if (*base_type == VFP_CPRC_UNKNOWN)
3539 *base_type = VFP_CPRC_SINGLE;
3540 else if (*base_type != VFP_CPRC_SINGLE)
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_DOUBLE;
3547 else if (*base_type != VFP_CPRC_DOUBLE)
3556 case TYPE_CODE_ARRAY:
3558 if (TYPE_VECTOR (t))
3560 /* A 64-bit or 128-bit containerized vector type are VFP
3562 switch (TYPE_LENGTH (t))
3565 if (*base_type == VFP_CPRC_UNKNOWN)
3566 *base_type = VFP_CPRC_VEC64;
3569 if (*base_type == VFP_CPRC_UNKNOWN)
3570 *base_type = VFP_CPRC_VEC128;
3581 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3585 if (TYPE_LENGTH (t) == 0)
3587 gdb_assert (count == 0);
3590 else if (count == 0)
3592 unitlen = arm_vfp_cprc_unit_length (*base_type);
3593 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3594 return TYPE_LENGTH (t) / unitlen;
3599 case TYPE_CODE_STRUCT:
3604 for (i = 0; i < TYPE_NFIELDS (t); i++)
3608 if (!field_is_static (&TYPE_FIELD (t, i)))
3609 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3611 if (sub_count == -1)
3615 if (TYPE_LENGTH (t) == 0)
3617 gdb_assert (count == 0);
3620 else if (count == 0)
3622 unitlen = arm_vfp_cprc_unit_length (*base_type);
3623 if (TYPE_LENGTH (t) != unitlen * count)
3628 case TYPE_CODE_UNION:
3633 for (i = 0; i < TYPE_NFIELDS (t); i++)
3635 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3637 if (sub_count == -1)
3639 count = (count > sub_count ? count : sub_count);
3641 if (TYPE_LENGTH (t) == 0)
3643 gdb_assert (count == 0);
3646 else if (count == 0)
3648 unitlen = arm_vfp_cprc_unit_length (*base_type);
3649 if (TYPE_LENGTH (t) != unitlen * count)
3661 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3662 if passed to or returned from a non-variadic function with the VFP
3663 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3664 *BASE_TYPE to the base type for T and *COUNT to the number of
3665 elements of that base type before returning. */
3668 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3671 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3672 int c = arm_vfp_cprc_sub_candidate (t, &b);
3673 if (c <= 0 || c > 4)
3680 /* Return 1 if the VFP ABI should be used for passing arguments to and
3681 returning values from a function of type FUNC_TYPE, 0
3685 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3687 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3688 /* Variadic functions always use the base ABI. Assume that functions
3689 without debug info are not variadic. */
3690 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3692 /* The VFP ABI is only supported as a variant of AAPCS. */
3693 if (tdep->arm_abi != ARM_ABI_AAPCS)
3695 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3698 /* We currently only support passing parameters in integer registers, which
3699 conforms with GCC's default model, and VFP argument passing following
3700 the VFP variant of AAPCS. Several other variants exist and
3701 we should probably support some of them based on the selected ABI. */
3704 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3705 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3706 struct value **args, CORE_ADDR sp, int struct_return,
3707 CORE_ADDR struct_addr)
3709 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3713 struct stack_item *si = NULL;
3716 unsigned vfp_regs_free = (1 << 16) - 1;
3718 /* Determine the type of this function and whether the VFP ABI
3720 ftype = check_typedef (value_type (function));
3721 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3722 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3723 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3725 /* Set the return address. For the ARM, the return breakpoint is
3726 always at BP_ADDR. */
3727 if (arm_pc_is_thumb (gdbarch, bp_addr))
3729 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3731 /* Walk through the list of args and determine how large a temporary
3732 stack is required. Need to take care here as structs may be
3733 passed on the stack, and we have to push them. */
3736 argreg = ARM_A1_REGNUM;
3739 /* The struct_return pointer occupies the first parameter
3740 passing register. */
3744 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3745 gdbarch_register_name (gdbarch, argreg),
3746 paddress (gdbarch, struct_addr));
3747 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3751 for (argnum = 0; argnum < nargs; argnum++)
3754 struct type *arg_type;
3755 struct type *target_type;
3756 enum type_code typecode;
3757 const bfd_byte *val;
3759 enum arm_vfp_cprc_base_type vfp_base_type;
3761 int may_use_core_reg = 1;
3763 arg_type = check_typedef (value_type (args[argnum]));
3764 len = TYPE_LENGTH (arg_type);
3765 target_type = TYPE_TARGET_TYPE (arg_type);
3766 typecode = TYPE_CODE (arg_type);
3767 val = value_contents (args[argnum]);
3769 align = arm_type_align (arg_type);
3770 /* Round alignment up to a whole number of words. */
3771 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3772 /* Different ABIs have different maximum alignments. */
3773 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3775 /* The APCS ABI only requires word alignment. */
3776 align = INT_REGISTER_SIZE;
3780 /* The AAPCS requires at most doubleword alignment. */
3781 if (align > INT_REGISTER_SIZE * 2)
3782 align = INT_REGISTER_SIZE * 2;
3786 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3794 /* Because this is a CPRC it cannot go in a core register or
3795 cause a core register to be skipped for alignment.
3796 Either it goes in VFP registers and the rest of this loop
3797 iteration is skipped for this argument, or it goes on the
3798 stack (and the stack alignment code is correct for this
3800 may_use_core_reg = 0;
3802 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3803 shift = unit_length / 4;
3804 mask = (1 << (shift * vfp_base_count)) - 1;
3805 for (regno = 0; regno < 16; regno += shift)
3806 if (((vfp_regs_free >> regno) & mask) == mask)
3815 vfp_regs_free &= ~(mask << regno);
3816 reg_scaled = regno / shift;
3817 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3818 for (i = 0; i < vfp_base_count; i++)
3822 if (reg_char == 'q')
3823 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3824 val + i * unit_length);
3827 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3828 reg_char, reg_scaled + i);
3829 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3831 regcache_cooked_write (regcache, regnum,
3832 val + i * unit_length);
3839 /* This CPRC could not go in VFP registers, so all VFP
3840 registers are now marked as used. */
3845 /* Push stack padding for dowubleword alignment. */
3846 if (nstack & (align - 1))
3848 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3849 nstack += INT_REGISTER_SIZE;
3852 /* Doubleword aligned quantities must go in even register pairs. */
3853 if (may_use_core_reg
3854 && argreg <= ARM_LAST_ARG_REGNUM
3855 && align > INT_REGISTER_SIZE
3859 /* If the argument is a pointer to a function, and it is a
3860 Thumb function, create a LOCAL copy of the value and set
3861 the THUMB bit in it. */
3862 if (TYPE_CODE_PTR == typecode
3863 && target_type != NULL
3864 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3866 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3867 if (arm_pc_is_thumb (gdbarch, regval))
3869 bfd_byte *copy = (bfd_byte *) alloca (len);
3870 store_unsigned_integer (copy, len, byte_order,
3871 MAKE_THUMB_ADDR (regval));
3876 /* Copy the argument to general registers or the stack in
3877 register-sized pieces. Large arguments are split between
3878 registers and stack. */
3881 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3883 = extract_unsigned_integer (val, partial_len, byte_order);
3885 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3887 /* The argument is being passed in a general purpose
3889 if (byte_order == BFD_ENDIAN_BIG)
3890 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3892 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3894 gdbarch_register_name
3896 phex (regval, INT_REGISTER_SIZE));
3897 regcache_cooked_write_unsigned (regcache, argreg, regval);
3902 gdb_byte buf[INT_REGISTER_SIZE];
3904 memset (buf, 0, sizeof (buf));
3905 store_unsigned_integer (buf, partial_len, byte_order, regval);
3907 /* Push the arguments onto the stack. */
3909 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3911 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3912 nstack += INT_REGISTER_SIZE;
3919 /* If we have an odd number of words to push, then decrement the stack
3920 by one word now, so first stack argument will be dword aligned. */
3927 write_memory (sp, si->data, si->len);
3928 si = pop_stack_item (si);
3931 /* Finally, update teh SP register. */
3932 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3938 /* Always align the frame to an 8-byte boundary. This is required on
3939 some platforms and harmless on the rest. */
3942 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3944 /* Align the stack to eight bytes. */
3945 return sp & ~ (CORE_ADDR) 7;
3949 print_fpu_flags (struct ui_file *file, int flags)
3951 if (flags & (1 << 0))
3952 fputs_filtered ("IVO ", file);
3953 if (flags & (1 << 1))
3954 fputs_filtered ("DVZ ", file);
3955 if (flags & (1 << 2))
3956 fputs_filtered ("OFL ", file);
3957 if (flags & (1 << 3))
3958 fputs_filtered ("UFL ", file);
3959 if (flags & (1 << 4))
3960 fputs_filtered ("INX ", file);
3961 fputc_filtered ('\n', file);
3964 /* Print interesting information about the floating point processor
3965 (if present) or emulator. */
3967 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3968 struct frame_info *frame, const char *args)
3970 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3973 type = (status >> 24) & 127;
3974 if (status & (1 << 31))
3975 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3977 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3978 /* i18n: [floating point unit] mask */
3979 fputs_filtered (_("mask: "), file);
3980 print_fpu_flags (file, status >> 16);
3981 /* i18n: [floating point unit] flags */
3982 fputs_filtered (_("flags: "), file);
3983 print_fpu_flags (file, status);
3986 /* Construct the ARM extended floating point type. */
3987 static struct type *
3988 arm_ext_type (struct gdbarch *gdbarch)
3990 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3992 if (!tdep->arm_ext_type)
3994 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3995 floatformats_arm_ext);
3997 return tdep->arm_ext_type;
4000 static struct type *
4001 arm_neon_double_type (struct gdbarch *gdbarch)
4003 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4005 if (tdep->neon_double_type == NULL)
4007 struct type *t, *elem;
4009 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4011 elem = builtin_type (gdbarch)->builtin_uint8;
4012 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4013 elem = builtin_type (gdbarch)->builtin_uint16;
4014 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4015 elem = builtin_type (gdbarch)->builtin_uint32;
4016 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4017 elem = builtin_type (gdbarch)->builtin_uint64;
4018 append_composite_type_field (t, "u64", elem);
4019 elem = builtin_type (gdbarch)->builtin_float;
4020 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4021 elem = builtin_type (gdbarch)->builtin_double;
4022 append_composite_type_field (t, "f64", elem);
4024 TYPE_VECTOR (t) = 1;
4025 TYPE_NAME (t) = "neon_d";
4026 tdep->neon_double_type = t;
4029 return tdep->neon_double_type;
4032 /* FIXME: The vector types are not correctly ordered on big-endian
4033 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4034 bits of d0 - regardless of what unit size is being held in d0. So
4035 the offset of the first uint8 in d0 is 7, but the offset of the
4036 first float is 4. This code works as-is for little-endian
4039 static struct type *
4040 arm_neon_quad_type (struct gdbarch *gdbarch)
4042 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4044 if (tdep->neon_quad_type == NULL)
4046 struct type *t, *elem;
4048 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4050 elem = builtin_type (gdbarch)->builtin_uint8;
4051 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4052 elem = builtin_type (gdbarch)->builtin_uint16;
4053 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4054 elem = builtin_type (gdbarch)->builtin_uint32;
4055 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4056 elem = builtin_type (gdbarch)->builtin_uint64;
4057 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4058 elem = builtin_type (gdbarch)->builtin_float;
4059 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4060 elem = builtin_type (gdbarch)->builtin_double;
4061 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4063 TYPE_VECTOR (t) = 1;
4064 TYPE_NAME (t) = "neon_q";
4065 tdep->neon_quad_type = t;
4068 return tdep->neon_quad_type;
4071 /* Return the GDB type object for the "standard" data type of data in
4074 static struct type *
4075 arm_register_type (struct gdbarch *gdbarch, int regnum)
4077 int num_regs = gdbarch_num_regs (gdbarch);
4079 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4080 && regnum >= num_regs && regnum < num_regs + 32)
4081 return builtin_type (gdbarch)->builtin_float;
4083 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4084 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4085 return arm_neon_quad_type (gdbarch);
4087 /* If the target description has register information, we are only
4088 in this function so that we can override the types of
4089 double-precision registers for NEON. */
4090 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4092 struct type *t = tdesc_register_type (gdbarch, regnum);
4094 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4095 && TYPE_CODE (t) == TYPE_CODE_FLT
4096 && gdbarch_tdep (gdbarch)->have_neon)
4097 return arm_neon_double_type (gdbarch);
4102 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4104 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4105 return builtin_type (gdbarch)->builtin_void;
4107 return arm_ext_type (gdbarch);
4109 else if (regnum == ARM_SP_REGNUM)
4110 return builtin_type (gdbarch)->builtin_data_ptr;
4111 else if (regnum == ARM_PC_REGNUM)
4112 return builtin_type (gdbarch)->builtin_func_ptr;
4113 else if (regnum >= ARRAY_SIZE (arm_register_names))
4114 /* These registers are only supported on targets which supply
4115 an XML description. */
4116 return builtin_type (gdbarch)->builtin_int0;
4118 return builtin_type (gdbarch)->builtin_uint32;
4121 /* Map a DWARF register REGNUM onto the appropriate GDB register
4125 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4127 /* Core integer regs. */
4128 if (reg >= 0 && reg <= 15)
4131 /* Legacy FPA encoding. These were once used in a way which
4132 overlapped with VFP register numbering, so their use is
4133 discouraged, but GDB doesn't support the ARM toolchain
4134 which used them for VFP. */
4135 if (reg >= 16 && reg <= 23)
4136 return ARM_F0_REGNUM + reg - 16;
4138 /* New assignments for the FPA registers. */
4139 if (reg >= 96 && reg <= 103)
4140 return ARM_F0_REGNUM + reg - 96;
4142 /* WMMX register assignments. */
4143 if (reg >= 104 && reg <= 111)
4144 return ARM_WCGR0_REGNUM + reg - 104;
4146 if (reg >= 112 && reg <= 127)
4147 return ARM_WR0_REGNUM + reg - 112;
4149 if (reg >= 192 && reg <= 199)
4150 return ARM_WC0_REGNUM + reg - 192;
4152 /* VFP v2 registers. A double precision value is actually
4153 in d1 rather than s2, but the ABI only defines numbering
4154 for the single precision registers. This will "just work"
4155 in GDB for little endian targets (we'll read eight bytes,
4156 starting in s0 and then progressing to s1), but will be
4157 reversed on big endian targets with VFP. This won't
4158 be a problem for the new Neon quad registers; you're supposed
4159 to use DW_OP_piece for those. */
4160 if (reg >= 64 && reg <= 95)
4164 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4165 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4169 /* VFP v3 / Neon registers. This range is also used for VFP v2
4170 registers, except that it now describes d0 instead of s0. */
4171 if (reg >= 256 && reg <= 287)
4175 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4176 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4183 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4185 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4188 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4190 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4191 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4193 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4194 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4196 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4197 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4199 if (reg < NUM_GREGS)
4200 return SIM_ARM_R0_REGNUM + reg;
4203 if (reg < NUM_FREGS)
4204 return SIM_ARM_FP0_REGNUM + reg;
4207 if (reg < NUM_SREGS)
4208 return SIM_ARM_FPS_REGNUM + reg;
4211 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4214 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4215 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4216 It is thought that this is is the floating-point register format on
4217 little-endian systems. */
4220 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4221 void *dbl, int endianess)
4225 if (endianess == BFD_ENDIAN_BIG)
4226 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4228 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4230 floatformat_from_doublest (fmt, &d, dbl);
4234 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4239 floatformat_to_doublest (fmt, ptr, &d);
4240 if (endianess == BFD_ENDIAN_BIG)
4241 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4243 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4247 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4248 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4249 NULL if an error occurs. BUF is freed. */
4252 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4253 int old_len, int new_len)
4256 int bytes_to_read = new_len - old_len;
4258 new_buf = (gdb_byte *) xmalloc (new_len);
4259 memcpy (new_buf + bytes_to_read, buf, old_len);
4261 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
4269 /* An IT block is at most the 2-byte IT instruction followed by
4270 four 4-byte instructions. The furthest back we must search to
4271 find an IT block that affects the current instruction is thus
4272 2 + 3 * 4 == 14 bytes. */
4273 #define MAX_IT_BLOCK_PREFIX 14
4275 /* Use a quick scan if there are more than this many bytes of
4277 #define IT_SCAN_THRESHOLD 32
4279 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4280 A breakpoint in an IT block may not be hit, depending on the
4283 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4287 CORE_ADDR boundary, func_start;
4289 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4290 int i, any, last_it, last_it_count;
4292 /* If we are using BKPT breakpoints, none of this is necessary. */
4293 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4296 /* ARM mode does not have this problem. */
4297 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4300 /* We are setting a breakpoint in Thumb code that could potentially
4301 contain an IT block. The first step is to find how much Thumb
4302 code there is; we do not need to read outside of known Thumb
4304 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4306 /* Thumb-2 code must have mapping symbols to have a chance. */
4309 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4311 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4312 && func_start > boundary)
4313 boundary = func_start;
4315 /* Search for a candidate IT instruction. We have to do some fancy
4316 footwork to distinguish a real IT instruction from the second
4317 half of a 32-bit instruction, but there is no need for that if
4318 there's no candidate. */
4319 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4321 /* No room for an IT instruction. */
4324 buf = (gdb_byte *) xmalloc (buf_len);
4325 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
4328 for (i = 0; i < buf_len; i += 2)
4330 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4331 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4344 /* OK, the code bytes before this instruction contain at least one
4345 halfword which resembles an IT instruction. We know that it's
4346 Thumb code, but there are still two possibilities. Either the
4347 halfword really is an IT instruction, or it is the second half of
4348 a 32-bit Thumb instruction. The only way we can tell is to
4349 scan forwards from a known instruction boundary. */
4350 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4354 /* There's a lot of code before this instruction. Start with an
4355 optimistic search; it's easy to recognize halfwords that can
4356 not be the start of a 32-bit instruction, and use that to
4357 lock on to the instruction boundaries. */
4358 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4361 buf_len = IT_SCAN_THRESHOLD;
4364 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4366 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4367 if (thumb_insn_size (inst1) == 2)
4374 /* At this point, if DEFINITE, BUF[I] is the first place we
4375 are sure that we know the instruction boundaries, and it is far
4376 enough from BPADDR that we could not miss an IT instruction
4377 affecting BPADDR. If ! DEFINITE, give up - start from a
4381 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4385 buf_len = bpaddr - boundary;
4391 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4394 buf_len = bpaddr - boundary;
4398 /* Scan forwards. Find the last IT instruction before BPADDR. */
4403 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4405 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4410 else if (inst1 & 0x0002)
4412 else if (inst1 & 0x0004)
4417 i += thumb_insn_size (inst1);
4423 /* There wasn't really an IT instruction after all. */
4426 if (last_it_count < 1)
4427 /* It was too far away. */
4430 /* This really is a trouble spot. Move the breakpoint to the IT
4432 return bpaddr - buf_len + last_it;
4435 /* ARM displaced stepping support.
4437 Generally ARM displaced stepping works as follows:
4439 1. When an instruction is to be single-stepped, it is first decoded by
4440 arm_process_displaced_insn. Depending on the type of instruction, it is
4441 then copied to a scratch location, possibly in a modified form. The
4442 copy_* set of functions performs such modification, as necessary. A
4443 breakpoint is placed after the modified instruction in the scratch space
4444 to return control to GDB. Note in particular that instructions which
4445 modify the PC will no longer do so after modification.
4447 2. The instruction is single-stepped, by setting the PC to the scratch
4448 location address, and resuming. Control returns to GDB when the
4451 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4452 function used for the current instruction. This function's job is to
4453 put the CPU/memory state back to what it would have been if the
4454 instruction had been executed unmodified in its original location. */
4456 /* NOP instruction (mov r0, r0). */
4457 #define ARM_NOP 0xe1a00000
4458 #define THUMB_NOP 0x4600
4460 /* Helper for register reads for displaced stepping. In particular, this
4461 returns the PC as it would be seen by the instruction at its original
4465 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4469 CORE_ADDR from = dsc->insn_addr;
4471 if (regno == ARM_PC_REGNUM)
4473 /* Compute pipeline offset:
4474 - When executing an ARM instruction, PC reads as the address of the
4475 current instruction plus 8.
4476 - When executing a Thumb instruction, PC reads as the address of the
4477 current instruction plus 4. */
4484 if (debug_displaced)
4485 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4486 (unsigned long) from);
4487 return (ULONGEST) from;
4491 regcache_cooked_read_unsigned (regs, regno, &ret);
4492 if (debug_displaced)
4493 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4494 regno, (unsigned long) ret);
4500 displaced_in_arm_mode (struct regcache *regs)
4503 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4505 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4507 return (ps & t_bit) == 0;
4510 /* Write to the PC as from a branch instruction. */
4513 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4517 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4518 architecture versions < 6. */
4519 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4520 val & ~(ULONGEST) 0x3);
4522 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4523 val & ~(ULONGEST) 0x1);
4526 /* Write to the PC as from a branch-exchange instruction. */
4529 bx_write_pc (struct regcache *regs, ULONGEST val)
4532 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4534 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4538 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4539 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4541 else if ((val & 2) == 0)
4543 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4544 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4548 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4549 mode, align dest to 4 bytes). */
4550 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4551 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4552 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4556 /* Write to the PC as if from a load instruction. */
4559 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4562 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4563 bx_write_pc (regs, val);
4565 branch_write_pc (regs, dsc, val);
4568 /* Write to the PC as if from an ALU instruction. */
4571 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4574 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4575 bx_write_pc (regs, val);
4577 branch_write_pc (regs, dsc, val);
4580 /* Helper for writing to registers for displaced stepping. Writing to the PC
4581 has a varying effects depending on the instruction which does the write:
4582 this is controlled by the WRITE_PC argument. */
4585 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4586 int regno, ULONGEST val, enum pc_write_style write_pc)
4588 if (regno == ARM_PC_REGNUM)
4590 if (debug_displaced)
4591 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4592 (unsigned long) val);
4595 case BRANCH_WRITE_PC:
4596 branch_write_pc (regs, dsc, val);
4600 bx_write_pc (regs, val);
4604 load_write_pc (regs, dsc, val);
4608 alu_write_pc (regs, dsc, val);
4611 case CANNOT_WRITE_PC:
4612 warning (_("Instruction wrote to PC in an unexpected way when "
4613 "single-stepping"));
4617 internal_error (__FILE__, __LINE__,
4618 _("Invalid argument to displaced_write_reg"));
4621 dsc->wrote_to_pc = 1;
4625 if (debug_displaced)
4626 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4627 regno, (unsigned long) val);
4628 regcache_cooked_write_unsigned (regs, regno, val);
4632 /* This function is used to concisely determine if an instruction INSN
4633 references PC. Register fields of interest in INSN should have the
4634 corresponding fields of BITMASK set to 0b1111. The function
4635 returns return 1 if any of these fields in INSN reference the PC
4636 (also 0b1111, r15), else it returns 0. */
4639 insn_references_pc (uint32_t insn, uint32_t bitmask)
4641 uint32_t lowbit = 1;
4643 while (bitmask != 0)
4647 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4653 mask = lowbit * 0xf;
4655 if ((insn & mask) == mask)
4664 /* The simplest copy function. Many instructions have the same effect no
4665 matter what address they are executed at: in those cases, use this. */
4668 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4669 const char *iname, struct displaced_step_closure *dsc)
4671 if (debug_displaced)
4672 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4673 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4676 dsc->modinsn[0] = insn;
4682 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4683 uint16_t insn2, const char *iname,
4684 struct displaced_step_closure *dsc)
4686 if (debug_displaced)
4687 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4688 "opcode/class '%s' unmodified\n", insn1, insn2,
4691 dsc->modinsn[0] = insn1;
4692 dsc->modinsn[1] = insn2;
4698 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4701 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4703 struct displaced_step_closure *dsc)
4705 if (debug_displaced)
4706 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4707 "opcode/class '%s' unmodified\n", insn,
4710 dsc->modinsn[0] = insn;
4715 /* Preload instructions with immediate offset. */
4718 cleanup_preload (struct gdbarch *gdbarch,
4719 struct regcache *regs, struct displaced_step_closure *dsc)
4721 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4722 if (!dsc->u.preload.immed)
4723 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4727 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4728 struct displaced_step_closure *dsc, unsigned int rn)
4731 /* Preload instructions:
4733 {pli/pld} [rn, #+/-imm]
4735 {pli/pld} [r0, #+/-imm]. */
4737 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4738 rn_val = displaced_read_reg (regs, dsc, rn);
4739 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4740 dsc->u.preload.immed = 1;
4742 dsc->cleanup = &cleanup_preload;
4746 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4747 struct displaced_step_closure *dsc)
4749 unsigned int rn = bits (insn, 16, 19);
4751 if (!insn_references_pc (insn, 0x000f0000ul))
4752 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4754 if (debug_displaced)
4755 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4756 (unsigned long) insn);
4758 dsc->modinsn[0] = insn & 0xfff0ffff;
4760 install_preload (gdbarch, regs, dsc, rn);
4766 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4767 struct regcache *regs, struct displaced_step_closure *dsc)
4769 unsigned int rn = bits (insn1, 0, 3);
4770 unsigned int u_bit = bit (insn1, 7);
4771 int imm12 = bits (insn2, 0, 11);
4774 if (rn != ARM_PC_REGNUM)
4775 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4777 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4778 PLD (literal) Encoding T1. */
4779 if (debug_displaced)
4780 fprintf_unfiltered (gdb_stdlog,
4781 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4782 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4788 /* Rewrite instruction {pli/pld} PC imm12 into:
4789 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4793 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4795 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4796 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4798 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4800 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4801 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4802 dsc->u.preload.immed = 0;
4804 /* {pli/pld} [r0, r1] */
4805 dsc->modinsn[0] = insn1 & 0xfff0;
4806 dsc->modinsn[1] = 0xf001;
4809 dsc->cleanup = &cleanup_preload;
4813 /* Preload instructions with register offset. */
4816 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4817 struct displaced_step_closure *dsc, unsigned int rn,
4820 ULONGEST rn_val, rm_val;
4822 /* Preload register-offset instructions:
4824 {pli/pld} [rn, rm {, shift}]
4826 {pli/pld} [r0, r1 {, shift}]. */
4828 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4829 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4830 rn_val = displaced_read_reg (regs, dsc, rn);
4831 rm_val = displaced_read_reg (regs, dsc, rm);
4832 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4833 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4834 dsc->u.preload.immed = 0;
4836 dsc->cleanup = &cleanup_preload;
4840 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4841 struct regcache *regs,
4842 struct displaced_step_closure *dsc)
4844 unsigned int rn = bits (insn, 16, 19);
4845 unsigned int rm = bits (insn, 0, 3);
4848 if (!insn_references_pc (insn, 0x000f000ful))
4849 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4851 if (debug_displaced)
4852 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4853 (unsigned long) insn);
4855 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4857 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4861 /* Copy/cleanup coprocessor load and store instructions. */
4864 cleanup_copro_load_store (struct gdbarch *gdbarch,
4865 struct regcache *regs,
4866 struct displaced_step_closure *dsc)
4868 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4870 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4872 if (dsc->u.ldst.writeback)
4873 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4877 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4878 struct displaced_step_closure *dsc,
4879 int writeback, unsigned int rn)
4883 /* Coprocessor load/store instructions:
4885 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4887 {stc/stc2} [r0, #+/-imm].
4889 ldc/ldc2 are handled identically. */
4891 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4892 rn_val = displaced_read_reg (regs, dsc, rn);
4893 /* PC should be 4-byte aligned. */
4894 rn_val = rn_val & 0xfffffffc;
4895 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4897 dsc->u.ldst.writeback = writeback;
4898 dsc->u.ldst.rn = rn;
4900 dsc->cleanup = &cleanup_copro_load_store;
4904 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4905 struct regcache *regs,
4906 struct displaced_step_closure *dsc)
4908 unsigned int rn = bits (insn, 16, 19);
4910 if (!insn_references_pc (insn, 0x000f0000ul))
4911 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4913 if (debug_displaced)
4914 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4915 "load/store insn %.8lx\n", (unsigned long) insn);
4917 dsc->modinsn[0] = insn & 0xfff0ffff;
4919 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4925 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4926 uint16_t insn2, struct regcache *regs,
4927 struct displaced_step_closure *dsc)
4929 unsigned int rn = bits (insn1, 0, 3);
4931 if (rn != ARM_PC_REGNUM)
4932 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4933 "copro load/store", dsc);
4935 if (debug_displaced)
4936 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4937 "load/store insn %.4x%.4x\n", insn1, insn2);
4939 dsc->modinsn[0] = insn1 & 0xfff0;
4940 dsc->modinsn[1] = insn2;
4943 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4944 doesn't support writeback, so pass 0. */
4945 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4950 /* Clean up branch instructions (actually perform the branch, by setting
4954 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4955 struct displaced_step_closure *dsc)
4957 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4958 int branch_taken = condition_true (dsc->u.branch.cond, status);
4959 enum pc_write_style write_pc = dsc->u.branch.exchange
4960 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4965 if (dsc->u.branch.link)
4967 /* The value of LR should be the next insn of current one. In order
4968 not to confuse logic hanlding later insn `bx lr', if current insn mode
4969 is Thumb, the bit 0 of LR value should be set to 1. */
4970 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4973 next_insn_addr |= 0x1;
4975 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4979 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4982 /* Copy B/BL/BLX instructions with immediate destinations. */
4985 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4986 struct displaced_step_closure *dsc,
4987 unsigned int cond, int exchange, int link, long offset)
4989 /* Implement "BL<cond> <label>" as:
4991 Preparation: cond <- instruction condition
4992 Insn: mov r0, r0 (nop)
4993 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4995 B<cond> similar, but don't set r14 in cleanup. */
4997 dsc->u.branch.cond = cond;
4998 dsc->u.branch.link = link;
4999 dsc->u.branch.exchange = exchange;
5001 dsc->u.branch.dest = dsc->insn_addr;
5002 if (link && exchange)
5003 /* For BLX, offset is computed from the Align (PC, 4). */
5004 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
5007 dsc->u.branch.dest += 4 + offset;
5009 dsc->u.branch.dest += 8 + offset;
5011 dsc->cleanup = &cleanup_branch;
5014 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
5015 struct regcache *regs, struct displaced_step_closure *dsc)
5017 unsigned int cond = bits (insn, 28, 31);
5018 int exchange = (cond == 0xf);
5019 int link = exchange || bit (insn, 24);
5022 if (debug_displaced)
5023 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
5024 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
5025 (unsigned long) insn);
5027 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5028 then arrange the switch into Thumb mode. */
5029 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5031 offset = bits (insn, 0, 23) << 2;
5033 if (bit (offset, 25))
5034 offset = offset | ~0x3ffffff;
5036 dsc->modinsn[0] = ARM_NOP;
5038 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5043 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5044 uint16_t insn2, struct regcache *regs,
5045 struct displaced_step_closure *dsc)
5047 int link = bit (insn2, 14);
5048 int exchange = link && !bit (insn2, 12);
5051 int j1 = bit (insn2, 13);
5052 int j2 = bit (insn2, 11);
5053 int s = sbits (insn1, 10, 10);
5054 int i1 = !(j1 ^ bit (insn1, 10));
5055 int i2 = !(j2 ^ bit (insn1, 10));
5057 if (!link && !exchange) /* B */
5059 offset = (bits (insn2, 0, 10) << 1);
5060 if (bit (insn2, 12)) /* Encoding T4 */
5062 offset |= (bits (insn1, 0, 9) << 12)
5068 else /* Encoding T3 */
5070 offset |= (bits (insn1, 0, 5) << 12)
5074 cond = bits (insn1, 6, 9);
5079 offset = (bits (insn1, 0, 9) << 12);
5080 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5081 offset |= exchange ?
5082 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5085 if (debug_displaced)
5086 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5087 "%.4x %.4x with offset %.8lx\n",
5088 link ? (exchange) ? "blx" : "bl" : "b",
5089 insn1, insn2, offset);
5091 dsc->modinsn[0] = THUMB_NOP;
5093 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5097 /* Copy B Thumb instructions. */
5099 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5100 struct displaced_step_closure *dsc)
5102 unsigned int cond = 0;
5104 unsigned short bit_12_15 = bits (insn, 12, 15);
5105 CORE_ADDR from = dsc->insn_addr;
5107 if (bit_12_15 == 0xd)
5109 /* offset = SignExtend (imm8:0, 32) */
5110 offset = sbits ((insn << 1), 0, 8);
5111 cond = bits (insn, 8, 11);
5113 else if (bit_12_15 == 0xe) /* Encoding T2 */
5115 offset = sbits ((insn << 1), 0, 11);
5119 if (debug_displaced)
5120 fprintf_unfiltered (gdb_stdlog,
5121 "displaced: copying b immediate insn %.4x "
5122 "with offset %d\n", insn, offset);
5124 dsc->u.branch.cond = cond;
5125 dsc->u.branch.link = 0;
5126 dsc->u.branch.exchange = 0;
5127 dsc->u.branch.dest = from + 4 + offset;
5129 dsc->modinsn[0] = THUMB_NOP;
5131 dsc->cleanup = &cleanup_branch;
5136 /* Copy BX/BLX with register-specified destinations. */
5139 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5140 struct displaced_step_closure *dsc, int link,
5141 unsigned int cond, unsigned int rm)
5143 /* Implement {BX,BLX}<cond> <reg>" as:
5145 Preparation: cond <- instruction condition
5146 Insn: mov r0, r0 (nop)
5147 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5149 Don't set r14 in cleanup for BX. */
5151 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5153 dsc->u.branch.cond = cond;
5154 dsc->u.branch.link = link;
5156 dsc->u.branch.exchange = 1;
5158 dsc->cleanup = &cleanup_branch;
5162 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5163 struct regcache *regs, struct displaced_step_closure *dsc)
5165 unsigned int cond = bits (insn, 28, 31);
5168 int link = bit (insn, 5);
5169 unsigned int rm = bits (insn, 0, 3);
5171 if (debug_displaced)
5172 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5173 (unsigned long) insn);
5175 dsc->modinsn[0] = ARM_NOP;
5177 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5182 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5183 struct regcache *regs,
5184 struct displaced_step_closure *dsc)
5186 int link = bit (insn, 7);
5187 unsigned int rm = bits (insn, 3, 6);
5189 if (debug_displaced)
5190 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5191 (unsigned short) insn);
5193 dsc->modinsn[0] = THUMB_NOP;
5195 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5201 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5204 cleanup_alu_imm (struct gdbarch *gdbarch,
5205 struct regcache *regs, struct displaced_step_closure *dsc)
5207 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5208 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5209 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5210 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5214 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5215 struct displaced_step_closure *dsc)
5217 unsigned int rn = bits (insn, 16, 19);
5218 unsigned int rd = bits (insn, 12, 15);
5219 unsigned int op = bits (insn, 21, 24);
5220 int is_mov = (op == 0xd);
5221 ULONGEST rd_val, rn_val;
5223 if (!insn_references_pc (insn, 0x000ff000ul))
5224 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5226 if (debug_displaced)
5227 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5228 "%.8lx\n", is_mov ? "move" : "ALU",
5229 (unsigned long) insn);
5231 /* Instruction is of form:
5233 <op><cond> rd, [rn,] #imm
5237 Preparation: tmp1, tmp2 <- r0, r1;
5239 Insn: <op><cond> r0, r1, #imm
5240 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5243 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5244 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5245 rn_val = displaced_read_reg (regs, dsc, rn);
5246 rd_val = displaced_read_reg (regs, dsc, rd);
5247 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5248 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5252 dsc->modinsn[0] = insn & 0xfff00fff;
5254 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5256 dsc->cleanup = &cleanup_alu_imm;
5262 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5263 uint16_t insn2, struct regcache *regs,
5264 struct displaced_step_closure *dsc)
5266 unsigned int op = bits (insn1, 5, 8);
5267 unsigned int rn, rm, rd;
5268 ULONGEST rd_val, rn_val;
5270 rn = bits (insn1, 0, 3); /* Rn */
5271 rm = bits (insn2, 0, 3); /* Rm */
5272 rd = bits (insn2, 8, 11); /* Rd */
5274 /* This routine is only called for instruction MOV. */
5275 gdb_assert (op == 0x2 && rn == 0xf);
5277 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5278 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5280 if (debug_displaced)
5281 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5282 "ALU", insn1, insn2);
5284 /* Instruction is of form:
5286 <op><cond> rd, [rn,] #imm
5290 Preparation: tmp1, tmp2 <- r0, r1;
5292 Insn: <op><cond> r0, r1, #imm
5293 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5296 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5297 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5298 rn_val = displaced_read_reg (regs, dsc, rn);
5299 rd_val = displaced_read_reg (regs, dsc, rd);
5300 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5301 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5304 dsc->modinsn[0] = insn1;
5305 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5308 dsc->cleanup = &cleanup_alu_imm;
5313 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5316 cleanup_alu_reg (struct gdbarch *gdbarch,
5317 struct regcache *regs, struct displaced_step_closure *dsc)
5322 rd_val = displaced_read_reg (regs, dsc, 0);
5324 for (i = 0; i < 3; i++)
5325 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5327 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5331 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5332 struct displaced_step_closure *dsc,
5333 unsigned int rd, unsigned int rn, unsigned int rm)
5335 ULONGEST rd_val, rn_val, rm_val;
5337 /* Instruction is of form:
5339 <op><cond> rd, [rn,] rm [, <shift>]
5343 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5344 r0, r1, r2 <- rd, rn, rm
5345 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5346 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5349 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5350 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5351 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5352 rd_val = displaced_read_reg (regs, dsc, rd);
5353 rn_val = displaced_read_reg (regs, dsc, rn);
5354 rm_val = displaced_read_reg (regs, dsc, rm);
5355 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5356 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5357 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5360 dsc->cleanup = &cleanup_alu_reg;
5364 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5365 struct displaced_step_closure *dsc)
5367 unsigned int op = bits (insn, 21, 24);
5368 int is_mov = (op == 0xd);
5370 if (!insn_references_pc (insn, 0x000ff00ful))
5371 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5373 if (debug_displaced)
5374 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5375 is_mov ? "move" : "ALU", (unsigned long) insn);
5378 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5380 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5382 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5388 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5389 struct regcache *regs,
5390 struct displaced_step_closure *dsc)
5394 rm = bits (insn, 3, 6);
5395 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5397 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5398 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5400 if (debug_displaced)
5401 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5402 (unsigned short) insn);
5404 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5406 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5411 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5414 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5415 struct regcache *regs,
5416 struct displaced_step_closure *dsc)
5418 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5421 for (i = 0; i < 4; i++)
5422 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5424 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5428 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5429 struct displaced_step_closure *dsc,
5430 unsigned int rd, unsigned int rn, unsigned int rm,
5434 ULONGEST rd_val, rn_val, rm_val, rs_val;
5436 /* Instruction is of form:
5438 <op><cond> rd, [rn,] rm, <shift> rs
5442 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5443 r0, r1, r2, r3 <- rd, rn, rm, rs
5444 Insn: <op><cond> r0, r1, r2, <shift> r3
5446 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5450 for (i = 0; i < 4; i++)
5451 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5453 rd_val = displaced_read_reg (regs, dsc, rd);
5454 rn_val = displaced_read_reg (regs, dsc, rn);
5455 rm_val = displaced_read_reg (regs, dsc, rm);
5456 rs_val = displaced_read_reg (regs, dsc, rs);
5457 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5458 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5459 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5460 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5462 dsc->cleanup = &cleanup_alu_shifted_reg;
5466 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5467 struct regcache *regs,
5468 struct displaced_step_closure *dsc)
5470 unsigned int op = bits (insn, 21, 24);
5471 int is_mov = (op == 0xd);
5472 unsigned int rd, rn, rm, rs;
5474 if (!insn_references_pc (insn, 0x000fff0ful))
5475 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5477 if (debug_displaced)
5478 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5479 "%.8lx\n", is_mov ? "move" : "ALU",
5480 (unsigned long) insn);
5482 rn = bits (insn, 16, 19);
5483 rm = bits (insn, 0, 3);
5484 rs = bits (insn, 8, 11);
5485 rd = bits (insn, 12, 15);
5488 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5490 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5492 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5497 /* Clean up load instructions. */
5500 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5501 struct displaced_step_closure *dsc)
5503 ULONGEST rt_val, rt_val2 = 0, rn_val;
5505 rt_val = displaced_read_reg (regs, dsc, 0);
5506 if (dsc->u.ldst.xfersize == 8)
5507 rt_val2 = displaced_read_reg (regs, dsc, 1);
5508 rn_val = displaced_read_reg (regs, dsc, 2);
5510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5511 if (dsc->u.ldst.xfersize > 4)
5512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5514 if (!dsc->u.ldst.immed)
5515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5517 /* Handle register writeback. */
5518 if (dsc->u.ldst.writeback)
5519 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5520 /* Put result in right place. */
5521 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5522 if (dsc->u.ldst.xfersize == 8)
5523 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5526 /* Clean up store instructions. */
5529 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5530 struct displaced_step_closure *dsc)
5532 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5534 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5535 if (dsc->u.ldst.xfersize > 4)
5536 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5537 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5538 if (!dsc->u.ldst.immed)
5539 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5540 if (!dsc->u.ldst.restore_r4)
5541 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5544 if (dsc->u.ldst.writeback)
5545 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5548 /* Copy "extra" load/store instructions. These are halfword/doubleword
5549 transfers, which have a different encoding to byte/word transfers. */
5552 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5553 struct regcache *regs, struct displaced_step_closure *dsc)
5555 unsigned int op1 = bits (insn, 20, 24);
5556 unsigned int op2 = bits (insn, 5, 6);
5557 unsigned int rt = bits (insn, 12, 15);
5558 unsigned int rn = bits (insn, 16, 19);
5559 unsigned int rm = bits (insn, 0, 3);
5560 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5561 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5562 int immed = (op1 & 0x4) != 0;
5564 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5566 if (!insn_references_pc (insn, 0x000ff00ful))
5567 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5569 if (debug_displaced)
5570 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5571 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5572 (unsigned long) insn);
5574 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5577 internal_error (__FILE__, __LINE__,
5578 _("copy_extra_ld_st: instruction decode error"));
5580 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5581 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5582 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5584 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5586 rt_val = displaced_read_reg (regs, dsc, rt);
5587 if (bytesize[opcode] == 8)
5588 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5589 rn_val = displaced_read_reg (regs, dsc, rn);
5591 rm_val = displaced_read_reg (regs, dsc, rm);
5593 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5594 if (bytesize[opcode] == 8)
5595 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5596 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5598 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5601 dsc->u.ldst.xfersize = bytesize[opcode];
5602 dsc->u.ldst.rn = rn;
5603 dsc->u.ldst.immed = immed;
5604 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5605 dsc->u.ldst.restore_r4 = 0;
5608 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5610 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5611 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5613 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5615 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5616 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5618 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5623 /* Copy byte/half word/word loads and stores. */
5626 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5627 struct displaced_step_closure *dsc, int load,
5628 int immed, int writeback, int size, int usermode,
5629 int rt, int rm, int rn)
5631 ULONGEST rt_val, rn_val, rm_val = 0;
5633 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5634 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5636 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5638 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5640 rt_val = displaced_read_reg (regs, dsc, rt);
5641 rn_val = displaced_read_reg (regs, dsc, rn);
5643 rm_val = displaced_read_reg (regs, dsc, rm);
5645 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5646 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5648 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5650 dsc->u.ldst.xfersize = size;
5651 dsc->u.ldst.rn = rn;
5652 dsc->u.ldst.immed = immed;
5653 dsc->u.ldst.writeback = writeback;
5655 /* To write PC we can do:
5657 Before this sequence of instructions:
5658 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5659 r2 is the Rn value got from dispalced_read_reg.
5661 Insn1: push {pc} Write address of STR instruction + offset on stack
5662 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5663 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5664 = addr(Insn1) + offset - addr(Insn3) - 8
5666 Insn4: add r4, r4, #8 r4 = offset - 8
5667 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5669 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5671 Otherwise we don't know what value to write for PC, since the offset is
5672 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5673 of this can be found in Section "Saving from r15" in
5674 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5676 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5681 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5682 uint16_t insn2, struct regcache *regs,
5683 struct displaced_step_closure *dsc, int size)
5685 unsigned int u_bit = bit (insn1, 7);
5686 unsigned int rt = bits (insn2, 12, 15);
5687 int imm12 = bits (insn2, 0, 11);
5690 if (debug_displaced)
5691 fprintf_unfiltered (gdb_stdlog,
5692 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5693 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5699 /* Rewrite instruction LDR Rt imm12 into:
5701 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5705 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5708 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5709 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5710 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5712 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5714 pc_val = pc_val & 0xfffffffc;
5716 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5717 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5721 dsc->u.ldst.xfersize = size;
5722 dsc->u.ldst.immed = 0;
5723 dsc->u.ldst.writeback = 0;
5724 dsc->u.ldst.restore_r4 = 0;
5726 /* LDR R0, R2, R3 */
5727 dsc->modinsn[0] = 0xf852;
5728 dsc->modinsn[1] = 0x3;
5731 dsc->cleanup = &cleanup_load;
5737 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5738 uint16_t insn2, struct regcache *regs,
5739 struct displaced_step_closure *dsc,
5740 int writeback, int immed)
5742 unsigned int rt = bits (insn2, 12, 15);
5743 unsigned int rn = bits (insn1, 0, 3);
5744 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5745 /* In LDR (register), there is also a register Rm, which is not allowed to
5746 be PC, so we don't have to check it. */
5748 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5749 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5752 if (debug_displaced)
5753 fprintf_unfiltered (gdb_stdlog,
5754 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5755 rt, rn, insn1, insn2);
5757 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5760 dsc->u.ldst.restore_r4 = 0;
5763 /* ldr[b]<cond> rt, [rn, #imm], etc.
5765 ldr[b]<cond> r0, [r2, #imm]. */
5767 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5768 dsc->modinsn[1] = insn2 & 0x0fff;
5771 /* ldr[b]<cond> rt, [rn, rm], etc.
5773 ldr[b]<cond> r0, [r2, r3]. */
5775 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5776 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5786 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5787 struct regcache *regs,
5788 struct displaced_step_closure *dsc,
5789 int load, int size, int usermode)
5791 int immed = !bit (insn, 25);
5792 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5793 unsigned int rt = bits (insn, 12, 15);
5794 unsigned int rn = bits (insn, 16, 19);
5795 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5797 if (!insn_references_pc (insn, 0x000ff00ful))
5798 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5800 if (debug_displaced)
5801 fprintf_unfiltered (gdb_stdlog,
5802 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5803 load ? (size == 1 ? "ldrb" : "ldr")
5804 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5806 (unsigned long) insn);
5808 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5809 usermode, rt, rm, rn);
5811 if (load || rt != ARM_PC_REGNUM)
5813 dsc->u.ldst.restore_r4 = 0;
5816 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5818 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5819 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5821 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5823 {ldr,str}[b]<cond> r0, [r2, r3]. */
5824 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5828 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5829 dsc->u.ldst.restore_r4 = 1;
5830 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5831 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5832 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5833 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5834 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5838 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5840 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5845 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5850 /* Cleanup LDM instructions with fully-populated register list. This is an
5851 unfortunate corner case: it's impossible to implement correctly by modifying
5852 the instruction. The issue is as follows: we have an instruction,
5856 which we must rewrite to avoid loading PC. A possible solution would be to
5857 do the load in two halves, something like (with suitable cleanup
5861 ldm[id][ab] r8!, {r0-r7}
5863 ldm[id][ab] r8, {r7-r14}
5866 but at present there's no suitable place for <temp>, since the scratch space
5867 is overwritten before the cleanup routine is called. For now, we simply
5868 emulate the instruction. */
5871 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5872 struct displaced_step_closure *dsc)
5874 int inc = dsc->u.block.increment;
5875 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5876 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5877 uint32_t regmask = dsc->u.block.regmask;
5878 int regno = inc ? 0 : 15;
5879 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5880 int exception_return = dsc->u.block.load && dsc->u.block.user
5881 && (regmask & 0x8000) != 0;
5882 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5883 int do_transfer = condition_true (dsc->u.block.cond, status);
5884 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5889 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5890 sensible we can do here. Complain loudly. */
5891 if (exception_return)
5892 error (_("Cannot single-step exception return"));
5894 /* We don't handle any stores here for now. */
5895 gdb_assert (dsc->u.block.load != 0);
5897 if (debug_displaced)
5898 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5899 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5900 dsc->u.block.increment ? "inc" : "dec",
5901 dsc->u.block.before ? "before" : "after");
5908 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5911 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5914 xfer_addr += bump_before;
5916 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5917 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5919 xfer_addr += bump_after;
5921 regmask &= ~(1 << regno);
5924 if (dsc->u.block.writeback)
5925 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5929 /* Clean up an STM which included the PC in the register list. */
5932 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5933 struct displaced_step_closure *dsc)
5935 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5936 int store_executed = condition_true (dsc->u.block.cond, status);
5937 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5938 CORE_ADDR stm_insn_addr;
5941 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5943 /* If condition code fails, there's nothing else to do. */
5944 if (!store_executed)
5947 if (dsc->u.block.increment)
5949 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5951 if (dsc->u.block.before)
5956 pc_stored_at = dsc->u.block.xfer_addr;
5958 if (dsc->u.block.before)
5962 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5963 stm_insn_addr = dsc->scratch_base;
5964 offset = pc_val - stm_insn_addr;
5966 if (debug_displaced)
5967 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5968 "STM instruction\n", offset);
5970 /* Rewrite the stored PC to the proper value for the non-displaced original
5972 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5973 dsc->insn_addr + offset);
5976 /* Clean up an LDM which includes the PC in the register list. We clumped all
5977 the registers in the transferred list into a contiguous range r0...rX (to
5978 avoid loading PC directly and losing control of the debugged program), so we
5979 must undo that here. */
5982 cleanup_block_load_pc (struct gdbarch *gdbarch,
5983 struct regcache *regs,
5984 struct displaced_step_closure *dsc)
5986 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5987 int load_executed = condition_true (dsc->u.block.cond, status);
5988 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5989 unsigned int regs_loaded = bitcount (mask);
5990 unsigned int num_to_shuffle = regs_loaded, clobbered;
5992 /* The method employed here will fail if the register list is fully populated
5993 (we need to avoid loading PC directly). */
5994 gdb_assert (num_to_shuffle < 16);
5999 clobbered = (1 << num_to_shuffle) - 1;
6001 while (num_to_shuffle > 0)
6003 if ((mask & (1 << write_reg)) != 0)
6005 unsigned int read_reg = num_to_shuffle - 1;
6007 if (read_reg != write_reg)
6009 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
6010 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
6011 if (debug_displaced)
6012 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
6013 "loaded register r%d to r%d\n"), read_reg,
6016 else if (debug_displaced)
6017 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
6018 "r%d already in the right place\n"),
6021 clobbered &= ~(1 << write_reg);
6029 /* Restore any registers we scribbled over. */
6030 for (write_reg = 0; clobbered != 0; write_reg++)
6032 if ((clobbered & (1 << write_reg)) != 0)
6034 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6036 if (debug_displaced)
6037 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6038 "clobbered register r%d\n"), write_reg);
6039 clobbered &= ~(1 << write_reg);
6043 /* Perform register writeback manually. */
6044 if (dsc->u.block.writeback)
6046 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6048 if (dsc->u.block.increment)
6049 new_rn_val += regs_loaded * 4;
6051 new_rn_val -= regs_loaded * 4;
6053 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6058 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6059 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6062 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6063 struct regcache *regs,
6064 struct displaced_step_closure *dsc)
6066 int load = bit (insn, 20);
6067 int user = bit (insn, 22);
6068 int increment = bit (insn, 23);
6069 int before = bit (insn, 24);
6070 int writeback = bit (insn, 21);
6071 int rn = bits (insn, 16, 19);
6073 /* Block transfers which don't mention PC can be run directly
6075 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6076 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6078 if (rn == ARM_PC_REGNUM)
6080 warning (_("displaced: Unpredictable LDM or STM with "
6081 "base register r15"));
6082 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6085 if (debug_displaced)
6086 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6087 "%.8lx\n", (unsigned long) insn);
6089 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6090 dsc->u.block.rn = rn;
6092 dsc->u.block.load = load;
6093 dsc->u.block.user = user;
6094 dsc->u.block.increment = increment;
6095 dsc->u.block.before = before;
6096 dsc->u.block.writeback = writeback;
6097 dsc->u.block.cond = bits (insn, 28, 31);
6099 dsc->u.block.regmask = insn & 0xffff;
6103 if ((insn & 0xffff) == 0xffff)
6105 /* LDM with a fully-populated register list. This case is
6106 particularly tricky. Implement for now by fully emulating the
6107 instruction (which might not behave perfectly in all cases, but
6108 these instructions should be rare enough for that not to matter
6110 dsc->modinsn[0] = ARM_NOP;
6112 dsc->cleanup = &cleanup_block_load_all;
6116 /* LDM of a list of registers which includes PC. Implement by
6117 rewriting the list of registers to be transferred into a
6118 contiguous chunk r0...rX before doing the transfer, then shuffling
6119 registers into the correct places in the cleanup routine. */
6120 unsigned int regmask = insn & 0xffff;
6121 unsigned int num_in_list = bitcount (regmask), new_regmask;
6124 for (i = 0; i < num_in_list; i++)
6125 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6127 /* Writeback makes things complicated. We need to avoid clobbering
6128 the base register with one of the registers in our modified
6129 register list, but just using a different register can't work in
6132 ldm r14!, {r0-r13,pc}
6134 which would need to be rewritten as:
6138 but that can't work, because there's no free register for N.
6140 Solve this by turning off the writeback bit, and emulating
6141 writeback manually in the cleanup routine. */
6146 new_regmask = (1 << num_in_list) - 1;
6148 if (debug_displaced)
6149 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6150 "{..., pc}: original reg list %.4x, modified "
6151 "list %.4x\n"), rn, writeback ? "!" : "",
6152 (int) insn & 0xffff, new_regmask);
6154 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6156 dsc->cleanup = &cleanup_block_load_pc;
6161 /* STM of a list of registers which includes PC. Run the instruction
6162 as-is, but out of line: this will store the wrong value for the PC,
6163 so we must manually fix up the memory in the cleanup routine.
6164 Doing things this way has the advantage that we can auto-detect
6165 the offset of the PC write (which is architecture-dependent) in
6166 the cleanup routine. */
6167 dsc->modinsn[0] = insn;
6169 dsc->cleanup = &cleanup_block_store_pc;
6176 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6177 struct regcache *regs,
6178 struct displaced_step_closure *dsc)
6180 int rn = bits (insn1, 0, 3);
6181 int load = bit (insn1, 4);
6182 int writeback = bit (insn1, 5);
6184 /* Block transfers which don't mention PC can be run directly
6186 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6187 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6189 if (rn == ARM_PC_REGNUM)
6191 warning (_("displaced: Unpredictable LDM or STM with "
6192 "base register r15"));
6193 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6194 "unpredictable ldm/stm", dsc);
6197 if (debug_displaced)
6198 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6199 "%.4x%.4x\n", insn1, insn2);
6201 /* Clear bit 13, since it should be always zero. */
6202 dsc->u.block.regmask = (insn2 & 0xdfff);
6203 dsc->u.block.rn = rn;
6205 dsc->u.block.load = load;
6206 dsc->u.block.user = 0;
6207 dsc->u.block.increment = bit (insn1, 7);
6208 dsc->u.block.before = bit (insn1, 8);
6209 dsc->u.block.writeback = writeback;
6210 dsc->u.block.cond = INST_AL;
6211 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6215 if (dsc->u.block.regmask == 0xffff)
6217 /* This branch is impossible to happen. */
6222 unsigned int regmask = dsc->u.block.regmask;
6223 unsigned int num_in_list = bitcount (regmask), new_regmask;
6226 for (i = 0; i < num_in_list; i++)
6227 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6232 new_regmask = (1 << num_in_list) - 1;
6234 if (debug_displaced)
6235 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6236 "{..., pc}: original reg list %.4x, modified "
6237 "list %.4x\n"), rn, writeback ? "!" : "",
6238 (int) dsc->u.block.regmask, new_regmask);
6240 dsc->modinsn[0] = insn1;
6241 dsc->modinsn[1] = (new_regmask & 0xffff);
6244 dsc->cleanup = &cleanup_block_load_pc;
6249 dsc->modinsn[0] = insn1;
6250 dsc->modinsn[1] = insn2;
6252 dsc->cleanup = &cleanup_block_store_pc;
6257 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6258 This is used to avoid a dependency on BFD's bfd_endian enum. */
6261 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6264 return read_memory_unsigned_integer (memaddr, len,
6265 (enum bfd_endian) byte_order);
6268 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6271 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6274 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6277 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6280 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6285 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6288 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6290 return arm_is_thumb (self->regcache);
6293 /* single_step() is called just before we want to resume the inferior,
6294 if we want to single-step it but there is no hardware or kernel
6295 single-step support. We find the target of the coming instructions
6296 and breakpoint them. */
6299 arm_software_single_step (struct regcache *regcache)
6301 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6302 struct arm_get_next_pcs next_pcs_ctx;
6305 VEC (CORE_ADDR) *next_pcs = NULL;
6306 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6308 arm_get_next_pcs_ctor (&next_pcs_ctx,
6309 &arm_get_next_pcs_ops,
6310 gdbarch_byte_order (gdbarch),
6311 gdbarch_byte_order_for_code (gdbarch),
6315 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6317 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6319 pc = gdbarch_addr_bits_remove (gdbarch, pc);
6320 VEC_replace (CORE_ADDR, next_pcs, i, pc);
6323 discard_cleanups (old_chain);
6328 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6329 for Linux, where some SVC instructions must be treated specially. */
6332 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6333 struct displaced_step_closure *dsc)
6335 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6337 if (debug_displaced)
6338 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6339 "%.8lx\n", (unsigned long) resume_addr);
6341 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6345 /* Common copy routine for svc instruciton. */
6348 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6349 struct displaced_step_closure *dsc)
6351 /* Preparation: none.
6352 Insn: unmodified svc.
6353 Cleanup: pc <- insn_addr + insn_size. */
6355 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6357 dsc->wrote_to_pc = 1;
6359 /* Allow OS-specific code to override SVC handling. */
6360 if (dsc->u.svc.copy_svc_os)
6361 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6364 dsc->cleanup = &cleanup_svc;
6370 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6371 struct regcache *regs, struct displaced_step_closure *dsc)
6374 if (debug_displaced)
6375 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6376 (unsigned long) insn);
6378 dsc->modinsn[0] = insn;
6380 return install_svc (gdbarch, regs, dsc);
6384 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6385 struct regcache *regs, struct displaced_step_closure *dsc)
6388 if (debug_displaced)
6389 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6392 dsc->modinsn[0] = insn;
6394 return install_svc (gdbarch, regs, dsc);
6397 /* Copy undefined instructions. */
6400 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6401 struct displaced_step_closure *dsc)
6403 if (debug_displaced)
6404 fprintf_unfiltered (gdb_stdlog,
6405 "displaced: copying undefined insn %.8lx\n",
6406 (unsigned long) insn);
6408 dsc->modinsn[0] = insn;
6414 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6415 struct displaced_step_closure *dsc)
6418 if (debug_displaced)
6419 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6420 "%.4x %.4x\n", (unsigned short) insn1,
6421 (unsigned short) insn2);
6423 dsc->modinsn[0] = insn1;
6424 dsc->modinsn[1] = insn2;
6430 /* Copy unpredictable instructions. */
6433 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6434 struct displaced_step_closure *dsc)
6436 if (debug_displaced)
6437 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6438 "%.8lx\n", (unsigned long) insn);
6440 dsc->modinsn[0] = insn;
6445 /* The decode_* functions are instruction decoding helpers. They mostly follow
6446 the presentation in the ARM ARM. */
6449 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6450 struct regcache *regs,
6451 struct displaced_step_closure *dsc)
6453 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6454 unsigned int rn = bits (insn, 16, 19);
6456 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6457 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6458 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6459 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6460 else if ((op1 & 0x60) == 0x20)
6461 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6462 else if ((op1 & 0x71) == 0x40)
6463 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6465 else if ((op1 & 0x77) == 0x41)
6466 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6467 else if ((op1 & 0x77) == 0x45)
6468 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6469 else if ((op1 & 0x77) == 0x51)
6472 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6474 return arm_copy_unpred (gdbarch, insn, dsc);
6476 else if ((op1 & 0x77) == 0x55)
6477 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6478 else if (op1 == 0x57)
6481 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6482 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6483 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6484 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6485 default: return arm_copy_unpred (gdbarch, insn, dsc);
6487 else if ((op1 & 0x63) == 0x43)
6488 return arm_copy_unpred (gdbarch, insn, dsc);
6489 else if ((op2 & 0x1) == 0x0)
6490 switch (op1 & ~0x80)
6493 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6495 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6496 case 0x71: case 0x75:
6498 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6499 case 0x63: case 0x67: case 0x73: case 0x77:
6500 return arm_copy_unpred (gdbarch, insn, dsc);
6502 return arm_copy_undef (gdbarch, insn, dsc);
6505 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6509 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6510 struct regcache *regs,
6511 struct displaced_step_closure *dsc)
6513 if (bit (insn, 27) == 0)
6514 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6515 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6516 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6519 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6522 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6524 case 0x4: case 0x5: case 0x6: case 0x7:
6525 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6528 switch ((insn & 0xe00000) >> 21)
6530 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6532 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6535 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6538 return arm_copy_undef (gdbarch, insn, dsc);
6543 int rn_f = (bits (insn, 16, 19) == 0xf);
6544 switch ((insn & 0xe00000) >> 21)
6547 /* ldc/ldc2 imm (undefined for rn == pc). */
6548 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6549 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6552 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6554 case 0x4: case 0x5: case 0x6: case 0x7:
6555 /* ldc/ldc2 lit (undefined for rn != pc). */
6556 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6557 : arm_copy_undef (gdbarch, insn, dsc);
6560 return arm_copy_undef (gdbarch, insn, dsc);
6565 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6568 if (bits (insn, 16, 19) == 0xf)
6570 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6572 return arm_copy_undef (gdbarch, insn, dsc);
6576 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6578 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6582 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6584 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6587 return arm_copy_undef (gdbarch, insn, dsc);
6591 /* Decode miscellaneous instructions in dp/misc encoding space. */
6594 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6595 struct regcache *regs,
6596 struct displaced_step_closure *dsc)
6598 unsigned int op2 = bits (insn, 4, 6);
6599 unsigned int op = bits (insn, 21, 22);
6604 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6607 if (op == 0x1) /* bx. */
6608 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6610 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6612 return arm_copy_undef (gdbarch, insn, dsc);
6616 /* Not really supported. */
6617 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6619 return arm_copy_undef (gdbarch, insn, dsc);
6623 return arm_copy_bx_blx_reg (gdbarch, insn,
6624 regs, dsc); /* blx register. */
6626 return arm_copy_undef (gdbarch, insn, dsc);
6629 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6633 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6635 /* Not really supported. */
6636 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6639 return arm_copy_undef (gdbarch, insn, dsc);
6644 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6645 struct regcache *regs,
6646 struct displaced_step_closure *dsc)
6649 switch (bits (insn, 20, 24))
6652 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6655 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6657 case 0x12: case 0x16:
6658 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6661 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6665 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6667 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6668 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6669 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6670 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6671 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6672 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6673 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6674 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6675 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6676 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6677 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6678 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6679 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6680 /* 2nd arg means "unprivileged". */
6681 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6685 /* Should be unreachable. */
6690 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6691 struct regcache *regs,
6692 struct displaced_step_closure *dsc)
6694 int a = bit (insn, 25), b = bit (insn, 4);
6695 uint32_t op1 = bits (insn, 20, 24);
6697 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6698 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6699 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6700 else if ((!a && (op1 & 0x17) == 0x02)
6701 || (a && (op1 & 0x17) == 0x02 && !b))
6702 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6703 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6704 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6705 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6706 else if ((!a && (op1 & 0x17) == 0x03)
6707 || (a && (op1 & 0x17) == 0x03 && !b))
6708 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6709 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6710 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6711 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6712 else if ((!a && (op1 & 0x17) == 0x06)
6713 || (a && (op1 & 0x17) == 0x06 && !b))
6714 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6715 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6716 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6717 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6718 else if ((!a && (op1 & 0x17) == 0x07)
6719 || (a && (op1 & 0x17) == 0x07 && !b))
6720 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6722 /* Should be unreachable. */
6727 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6728 struct displaced_step_closure *dsc)
6730 switch (bits (insn, 20, 24))
6732 case 0x00: case 0x01: case 0x02: case 0x03:
6733 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6735 case 0x04: case 0x05: case 0x06: case 0x07:
6736 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6738 case 0x08: case 0x09: case 0x0a: case 0x0b:
6739 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6740 return arm_copy_unmodified (gdbarch, insn,
6741 "decode/pack/unpack/saturate/reverse", dsc);
6744 if (bits (insn, 5, 7) == 0) /* op2. */
6746 if (bits (insn, 12, 15) == 0xf)
6747 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6749 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6752 return arm_copy_undef (gdbarch, insn, dsc);
6754 case 0x1a: case 0x1b:
6755 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6756 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6758 return arm_copy_undef (gdbarch, insn, dsc);
6760 case 0x1c: case 0x1d:
6761 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6763 if (bits (insn, 0, 3) == 0xf)
6764 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6766 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6769 return arm_copy_undef (gdbarch, insn, dsc);
6771 case 0x1e: case 0x1f:
6772 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6773 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6775 return arm_copy_undef (gdbarch, insn, dsc);
6778 /* Should be unreachable. */
6783 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6784 struct regcache *regs,
6785 struct displaced_step_closure *dsc)
6788 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6790 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6794 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6795 struct regcache *regs,
6796 struct displaced_step_closure *dsc)
6798 unsigned int opcode = bits (insn, 20, 24);
6802 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6803 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6805 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6806 case 0x12: case 0x16:
6807 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6809 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6810 case 0x13: case 0x17:
6811 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6813 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6814 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6815 /* Note: no writeback for these instructions. Bit 25 will always be
6816 zero though (via caller), so the following works OK. */
6817 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6820 /* Should be unreachable. */
6824 /* Decode shifted register instructions. */
6827 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6828 uint16_t insn2, struct regcache *regs,
6829 struct displaced_step_closure *dsc)
6831 /* PC is only allowed to be used in instruction MOV. */
6833 unsigned int op = bits (insn1, 5, 8);
6834 unsigned int rn = bits (insn1, 0, 3);
6836 if (op == 0x2 && rn == 0xf) /* MOV */
6837 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6839 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6840 "dp (shift reg)", dsc);
6844 /* Decode extension register load/store. Exactly the same as
6845 arm_decode_ext_reg_ld_st. */
6848 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6849 uint16_t insn2, struct regcache *regs,
6850 struct displaced_step_closure *dsc)
6852 unsigned int opcode = bits (insn1, 4, 8);
6856 case 0x04: case 0x05:
6857 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6858 "vfp/neon vmov", dsc);
6860 case 0x08: case 0x0c: /* 01x00 */
6861 case 0x0a: case 0x0e: /* 01x10 */
6862 case 0x12: case 0x16: /* 10x10 */
6863 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6864 "vfp/neon vstm/vpush", dsc);
6866 case 0x09: case 0x0d: /* 01x01 */
6867 case 0x0b: case 0x0f: /* 01x11 */
6868 case 0x13: case 0x17: /* 10x11 */
6869 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6870 "vfp/neon vldm/vpop", dsc);
6872 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6873 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6875 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6876 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6879 /* Should be unreachable. */
6884 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6885 struct regcache *regs, struct displaced_step_closure *dsc)
6887 unsigned int op1 = bits (insn, 20, 25);
6888 int op = bit (insn, 4);
6889 unsigned int coproc = bits (insn, 8, 11);
6891 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6892 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6893 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6894 && (coproc & 0xe) != 0xa)
6896 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6897 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6898 && (coproc & 0xe) != 0xa)
6899 /* ldc/ldc2 imm/lit. */
6900 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6901 else if ((op1 & 0x3e) == 0x00)
6902 return arm_copy_undef (gdbarch, insn, dsc);
6903 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6904 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6905 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6906 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6907 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6908 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6909 else if ((op1 & 0x30) == 0x20 && !op)
6911 if ((coproc & 0xe) == 0xa)
6912 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6914 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6916 else if ((op1 & 0x30) == 0x20 && op)
6917 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6918 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6919 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6920 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6921 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6922 else if ((op1 & 0x30) == 0x30)
6923 return arm_copy_svc (gdbarch, insn, regs, dsc);
6925 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6929 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6930 uint16_t insn2, struct regcache *regs,
6931 struct displaced_step_closure *dsc)
6933 unsigned int coproc = bits (insn2, 8, 11);
6934 unsigned int bit_5_8 = bits (insn1, 5, 8);
6935 unsigned int bit_9 = bit (insn1, 9);
6936 unsigned int bit_4 = bit (insn1, 4);
6941 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6942 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6944 else if (bit_5_8 == 0) /* UNDEFINED. */
6945 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6948 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6949 if ((coproc & 0xe) == 0xa)
6950 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6952 else /* coproc is not 101x. */
6954 if (bit_4 == 0) /* STC/STC2. */
6955 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6957 else /* LDC/LDC2 {literal, immeidate}. */
6958 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6964 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6970 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6971 struct displaced_step_closure *dsc, int rd)
6977 Preparation: Rd <- PC
6983 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6984 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6988 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6989 struct displaced_step_closure *dsc,
6990 int rd, unsigned int imm)
6993 /* Encoding T2: ADDS Rd, #imm */
6994 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6996 install_pc_relative (gdbarch, regs, dsc, rd);
7002 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7003 struct regcache *regs,
7004 struct displaced_step_closure *dsc)
7006 unsigned int rd = bits (insn, 8, 10);
7007 unsigned int imm8 = bits (insn, 0, 7);
7009 if (debug_displaced)
7010 fprintf_unfiltered (gdb_stdlog,
7011 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
7014 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7018 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7019 uint16_t insn2, struct regcache *regs,
7020 struct displaced_step_closure *dsc)
7022 unsigned int rd = bits (insn2, 8, 11);
7023 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7024 extract raw immediate encoding rather than computing immediate. When
7025 generating ADD or SUB instruction, we can simply perform OR operation to
7026 set immediate into ADD. */
7027 unsigned int imm_3_8 = insn2 & 0x70ff;
7028 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7030 if (debug_displaced)
7031 fprintf_unfiltered (gdb_stdlog,
7032 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7033 rd, imm_i, imm_3_8, insn1, insn2);
7035 if (bit (insn1, 7)) /* Encoding T2 */
7037 /* Encoding T3: SUB Rd, Rd, #imm */
7038 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7039 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7041 else /* Encoding T3 */
7043 /* Encoding T3: ADD Rd, Rd, #imm */
7044 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7045 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7049 install_pc_relative (gdbarch, regs, dsc, rd);
7055 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7056 struct regcache *regs,
7057 struct displaced_step_closure *dsc)
7059 unsigned int rt = bits (insn1, 8, 10);
7061 int imm8 = (bits (insn1, 0, 7) << 2);
7067 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7069 Insn: LDR R0, [R2, R3];
7070 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7072 if (debug_displaced)
7073 fprintf_unfiltered (gdb_stdlog,
7074 "displaced: copying thumb ldr r%d [pc #%d]\n"
7077 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7078 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7079 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7080 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7081 /* The assembler calculates the required value of the offset from the
7082 Align(PC,4) value of this instruction to the label. */
7083 pc = pc & 0xfffffffc;
7085 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7086 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7089 dsc->u.ldst.xfersize = 4;
7091 dsc->u.ldst.immed = 0;
7092 dsc->u.ldst.writeback = 0;
7093 dsc->u.ldst.restore_r4 = 0;
7095 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7097 dsc->cleanup = &cleanup_load;
7102 /* Copy Thumb cbnz/cbz insruction. */
7105 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7106 struct regcache *regs,
7107 struct displaced_step_closure *dsc)
7109 int non_zero = bit (insn1, 11);
7110 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7111 CORE_ADDR from = dsc->insn_addr;
7112 int rn = bits (insn1, 0, 2);
7113 int rn_val = displaced_read_reg (regs, dsc, rn);
7115 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7116 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7117 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7118 condition is false, let it be, cleanup_branch will do nothing. */
7119 if (dsc->u.branch.cond)
7121 dsc->u.branch.cond = INST_AL;
7122 dsc->u.branch.dest = from + 4 + imm5;
7125 dsc->u.branch.dest = from + 2;
7127 dsc->u.branch.link = 0;
7128 dsc->u.branch.exchange = 0;
7130 if (debug_displaced)
7131 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7132 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7133 rn, rn_val, insn1, dsc->u.branch.dest);
7135 dsc->modinsn[0] = THUMB_NOP;
7137 dsc->cleanup = &cleanup_branch;
7141 /* Copy Table Branch Byte/Halfword */
7143 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7144 uint16_t insn2, struct regcache *regs,
7145 struct displaced_step_closure *dsc)
7147 ULONGEST rn_val, rm_val;
7148 int is_tbh = bit (insn2, 4);
7149 CORE_ADDR halfwords = 0;
7150 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7152 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7153 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7159 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7160 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7166 target_read_memory (rn_val + rm_val, buf, 1);
7167 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7170 if (debug_displaced)
7171 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7172 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7173 (unsigned int) rn_val, (unsigned int) rm_val,
7174 (unsigned int) halfwords);
7176 dsc->u.branch.cond = INST_AL;
7177 dsc->u.branch.link = 0;
7178 dsc->u.branch.exchange = 0;
7179 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7181 dsc->cleanup = &cleanup_branch;
7187 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7188 struct displaced_step_closure *dsc)
7191 int val = displaced_read_reg (regs, dsc, 7);
7192 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7195 val = displaced_read_reg (regs, dsc, 8);
7196 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7199 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7204 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7205 struct regcache *regs,
7206 struct displaced_step_closure *dsc)
7208 dsc->u.block.regmask = insn1 & 0x00ff;
7210 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7213 (1) register list is full, that is, r0-r7 are used.
7214 Prepare: tmp[0] <- r8
7216 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7217 MOV r8, r7; Move value of r7 to r8;
7218 POP {r7}; Store PC value into r7.
7220 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7222 (2) register list is not full, supposing there are N registers in
7223 register list (except PC, 0 <= N <= 7).
7224 Prepare: for each i, 0 - N, tmp[i] <- ri.
7226 POP {r0, r1, ...., rN};
7228 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7229 from tmp[] properly.
7231 if (debug_displaced)
7232 fprintf_unfiltered (gdb_stdlog,
7233 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7234 dsc->u.block.regmask, insn1);
7236 if (dsc->u.block.regmask == 0xff)
7238 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7240 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7241 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7242 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7245 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7249 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7251 unsigned int new_regmask;
7253 for (i = 0; i < num_in_list + 1; i++)
7254 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7256 new_regmask = (1 << (num_in_list + 1)) - 1;
7258 if (debug_displaced)
7259 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7260 "{..., pc}: original reg list %.4x,"
7261 " modified list %.4x\n"),
7262 (int) dsc->u.block.regmask, new_regmask);
7264 dsc->u.block.regmask |= 0x8000;
7265 dsc->u.block.writeback = 0;
7266 dsc->u.block.cond = INST_AL;
7268 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7270 dsc->cleanup = &cleanup_block_load_pc;
7277 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7278 struct regcache *regs,
7279 struct displaced_step_closure *dsc)
7281 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7282 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7285 /* 16-bit thumb instructions. */
7286 switch (op_bit_12_15)
7288 /* Shift (imme), add, subtract, move and compare. */
7289 case 0: case 1: case 2: case 3:
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7291 "shift/add/sub/mov/cmp",
7295 switch (op_bit_10_11)
7297 case 0: /* Data-processing */
7298 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7302 case 1: /* Special data instructions and branch and exchange. */
7304 unsigned short op = bits (insn1, 7, 9);
7305 if (op == 6 || op == 7) /* BX or BLX */
7306 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7307 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7308 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7310 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7314 default: /* LDR (literal) */
7315 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7318 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7319 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7322 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7323 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7324 else /* Generate SP-relative address */
7325 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7327 case 11: /* Misc 16-bit instructions */
7329 switch (bits (insn1, 8, 11))
7331 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7332 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7334 case 12: case 13: /* POP */
7335 if (bit (insn1, 8)) /* PC is in register list. */
7336 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7338 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7340 case 15: /* If-Then, and hints */
7341 if (bits (insn1, 0, 3))
7342 /* If-Then makes up to four following instructions conditional.
7343 IT instruction itself is not conditional, so handle it as a
7344 common unmodified instruction. */
7345 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7351 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7356 if (op_bit_10_11 < 2) /* Store multiple registers */
7357 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7358 else /* Load multiple registers */
7359 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7361 case 13: /* Conditional branch and supervisor call */
7362 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7363 err = thumb_copy_b (gdbarch, insn1, dsc);
7365 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7367 case 14: /* Unconditional branch */
7368 err = thumb_copy_b (gdbarch, insn1, dsc);
7375 internal_error (__FILE__, __LINE__,
7376 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7380 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7381 uint16_t insn1, uint16_t insn2,
7382 struct regcache *regs,
7383 struct displaced_step_closure *dsc)
7385 int rt = bits (insn2, 12, 15);
7386 int rn = bits (insn1, 0, 3);
7387 int op1 = bits (insn1, 7, 8);
7389 switch (bits (insn1, 5, 6))
7391 case 0: /* Load byte and memory hints */
7392 if (rt == 0xf) /* PLD/PLI */
7395 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7396 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7398 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7403 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7404 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7407 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7408 "ldrb{reg, immediate}/ldrbt",
7413 case 1: /* Load halfword and memory hints. */
7414 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7415 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7416 "pld/unalloc memhint", dsc);
7420 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7423 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7427 case 2: /* Load word */
7429 int insn2_bit_8_11 = bits (insn2, 8, 11);
7432 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7433 else if (op1 == 0x1) /* Encoding T3 */
7434 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7436 else /* op1 == 0x0 */
7438 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7439 /* LDR (immediate) */
7440 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7441 dsc, bit (insn2, 8), 1);
7442 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7443 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7446 /* LDR (register) */
7447 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7453 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7460 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7461 uint16_t insn2, struct regcache *regs,
7462 struct displaced_step_closure *dsc)
7465 unsigned short op = bit (insn2, 15);
7466 unsigned int op1 = bits (insn1, 11, 12);
7472 switch (bits (insn1, 9, 10))
7477 /* Load/store {dual, execlusive}, table branch. */
7478 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7479 && bits (insn2, 5, 7) == 0)
7480 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7483 /* PC is not allowed to use in load/store {dual, exclusive}
7485 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7486 "load/store dual/ex", dsc);
7488 else /* load/store multiple */
7490 switch (bits (insn1, 7, 8))
7492 case 0: case 3: /* SRS, RFE */
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7496 case 1: case 2: /* LDM/STM/PUSH/POP */
7497 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7504 /* Data-processing (shift register). */
7505 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7508 default: /* Coprocessor instructions. */
7509 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7514 case 2: /* op1 = 2 */
7515 if (op) /* Branch and misc control. */
7517 if (bit (insn2, 14) /* BLX/BL */
7518 || bit (insn2, 12) /* Unconditional branch */
7519 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7520 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7522 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7527 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7529 int op = bits (insn1, 4, 8);
7530 int rn = bits (insn1, 0, 3);
7531 if ((op == 0 || op == 0xa) && rn == 0xf)
7532 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7535 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7538 else /* Data processing (modified immeidate) */
7539 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7543 case 3: /* op1 = 3 */
7544 switch (bits (insn1, 9, 10))
7548 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7550 else /* NEON Load/Store and Store single data item */
7551 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7552 "neon elt/struct load/store",
7555 case 1: /* op1 = 3, bits (9, 10) == 1 */
7556 switch (bits (insn1, 7, 8))
7558 case 0: case 1: /* Data processing (register) */
7559 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7562 case 2: /* Multiply and absolute difference */
7563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7564 "mul/mua/diff", dsc);
7566 case 3: /* Long multiply and divide */
7567 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7572 default: /* Coprocessor instructions */
7573 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7582 internal_error (__FILE__, __LINE__,
7583 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7588 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7589 struct regcache *regs,
7590 struct displaced_step_closure *dsc)
7592 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7594 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7596 if (debug_displaced)
7597 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7598 "at %.8lx\n", insn1, (unsigned long) from);
7601 dsc->insn_size = thumb_insn_size (insn1);
7602 if (thumb_insn_size (insn1) == 4)
7605 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7606 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7609 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7613 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7614 CORE_ADDR to, struct regcache *regs,
7615 struct displaced_step_closure *dsc)
7618 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7621 /* Most displaced instructions use a 1-instruction scratch space, so set this
7622 here and override below if/when necessary. */
7624 dsc->insn_addr = from;
7625 dsc->scratch_base = to;
7626 dsc->cleanup = NULL;
7627 dsc->wrote_to_pc = 0;
7629 if (!displaced_in_arm_mode (regs))
7630 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7634 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7635 if (debug_displaced)
7636 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7637 "at %.8lx\n", (unsigned long) insn,
7638 (unsigned long) from);
7640 if ((insn & 0xf0000000) == 0xf0000000)
7641 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7642 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7644 case 0x0: case 0x1: case 0x2: case 0x3:
7645 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7648 case 0x4: case 0x5: case 0x6:
7649 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7653 err = arm_decode_media (gdbarch, insn, dsc);
7656 case 0x8: case 0x9: case 0xa: case 0xb:
7657 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7660 case 0xc: case 0xd: case 0xe: case 0xf:
7661 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7666 internal_error (__FILE__, __LINE__,
7667 _("arm_process_displaced_insn: Instruction decode error"));
7670 /* Actually set up the scratch space for a displaced instruction. */
7673 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7674 CORE_ADDR to, struct displaced_step_closure *dsc)
7676 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7677 unsigned int i, len, offset;
7678 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7679 int size = dsc->is_thumb? 2 : 4;
7680 const gdb_byte *bkp_insn;
7683 /* Poke modified instruction(s). */
7684 for (i = 0; i < dsc->numinsns; i++)
7686 if (debug_displaced)
7688 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7690 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7693 fprintf_unfiltered (gdb_stdlog, "%.4x",
7694 (unsigned short)dsc->modinsn[i]);
7696 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7697 (unsigned long) to + offset);
7700 write_memory_unsigned_integer (to + offset, size,
7701 byte_order_for_code,
7706 /* Choose the correct breakpoint instruction. */
7709 bkp_insn = tdep->thumb_breakpoint;
7710 len = tdep->thumb_breakpoint_size;
7714 bkp_insn = tdep->arm_breakpoint;
7715 len = tdep->arm_breakpoint_size;
7718 /* Put breakpoint afterwards. */
7719 write_memory (to + offset, bkp_insn, len);
7721 if (debug_displaced)
7722 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7723 paddress (gdbarch, from), paddress (gdbarch, to));
7726 /* Entry point for cleaning things up after a displaced instruction has been
7730 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7731 struct displaced_step_closure *dsc,
7732 CORE_ADDR from, CORE_ADDR to,
7733 struct regcache *regs)
7736 dsc->cleanup (gdbarch, regs, dsc);
7738 if (!dsc->wrote_to_pc)
7739 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7740 dsc->insn_addr + dsc->insn_size);
7744 #include "bfd-in2.h"
7745 #include "libcoff.h"
7748 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7750 gdb_disassembler *di
7751 = static_cast<gdb_disassembler *>(info->application_data);
7752 struct gdbarch *gdbarch = di->arch ();
7754 if (arm_pc_is_thumb (gdbarch, memaddr))
7756 static asymbol *asym;
7757 static combined_entry_type ce;
7758 static struct coff_symbol_struct csym;
7759 static struct bfd fake_bfd;
7760 static bfd_target fake_target;
7762 if (csym.native == NULL)
7764 /* Create a fake symbol vector containing a Thumb symbol.
7765 This is solely so that the code in print_insn_little_arm()
7766 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7767 the presence of a Thumb symbol and switch to decoding
7768 Thumb instructions. */
7770 fake_target.flavour = bfd_target_coff_flavour;
7771 fake_bfd.xvec = &fake_target;
7772 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7774 csym.symbol.the_bfd = &fake_bfd;
7775 csym.symbol.name = "fake";
7776 asym = (asymbol *) & csym;
7779 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7780 info->symbols = &asym;
7783 info->symbols = NULL;
7785 if (info->endian == BFD_ENDIAN_BIG)
7786 return print_insn_big_arm (memaddr, info);
7788 return print_insn_little_arm (memaddr, info);
7791 /* The following define instruction sequences that will cause ARM
7792 cpu's to take an undefined instruction trap. These are used to
7793 signal a breakpoint to GDB.
7795 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7796 modes. A different instruction is required for each mode. The ARM
7797 cpu's can also be big or little endian. Thus four different
7798 instructions are needed to support all cases.
7800 Note: ARMv4 defines several new instructions that will take the
7801 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7802 not in fact add the new instructions. The new undefined
7803 instructions in ARMv4 are all instructions that had no defined
7804 behaviour in earlier chips. There is no guarantee that they will
7805 raise an exception, but may be treated as NOP's. In practice, it
7806 may only safe to rely on instructions matching:
7808 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7809 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7810 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7812 Even this may only true if the condition predicate is true. The
7813 following use a condition predicate of ALWAYS so it is always TRUE.
7815 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7816 and NetBSD all use a software interrupt rather than an undefined
7817 instruction to force a trap. This can be handled by by the
7818 abi-specific code during establishment of the gdbarch vector. */
7820 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7821 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7822 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7823 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7825 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7826 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7827 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7828 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7830 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7833 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
7835 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7836 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7838 if (arm_pc_is_thumb (gdbarch, *pcptr))
7840 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7842 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7843 check whether we are replacing a 32-bit instruction. */
7844 if (tdep->thumb2_breakpoint != NULL)
7848 if (target_read_memory (*pcptr, buf, 2) == 0)
7850 unsigned short inst1;
7852 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7853 if (thumb_insn_size (inst1) == 4)
7854 return ARM_BP_KIND_THUMB2;
7858 return ARM_BP_KIND_THUMB;
7861 return ARM_BP_KIND_ARM;
7865 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7867 static const gdb_byte *
7868 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
7870 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7874 case ARM_BP_KIND_ARM:
7875 *size = tdep->arm_breakpoint_size;
7876 return tdep->arm_breakpoint;
7877 case ARM_BP_KIND_THUMB:
7878 *size = tdep->thumb_breakpoint_size;
7879 return tdep->thumb_breakpoint;
7880 case ARM_BP_KIND_THUMB2:
7881 *size = tdep->thumb2_breakpoint_size;
7882 return tdep->thumb2_breakpoint;
7884 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7888 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7891 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
7892 struct regcache *regcache,
7897 /* Check the memory pointed by PC is readable. */
7898 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
7900 struct arm_get_next_pcs next_pcs_ctx;
7903 VEC (CORE_ADDR) *next_pcs = NULL;
7904 struct cleanup *old_chain
7905 = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
7907 arm_get_next_pcs_ctor (&next_pcs_ctx,
7908 &arm_get_next_pcs_ops,
7909 gdbarch_byte_order (gdbarch),
7910 gdbarch_byte_order_for_code (gdbarch),
7914 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7916 /* If MEMADDR is the next instruction of current pc, do the
7917 software single step computation, and get the thumb mode by
7918 the destination address. */
7919 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
7921 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
7923 do_cleanups (old_chain);
7925 if (IS_THUMB_ADDR (pc))
7927 *pcptr = MAKE_THUMB_ADDR (*pcptr);
7928 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7931 return ARM_BP_KIND_ARM;
7935 do_cleanups (old_chain);
7938 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
7941 /* Extract from an array REGBUF containing the (raw) register state a
7942 function return value of type TYPE, and copy that, in virtual
7943 format, into VALBUF. */
7946 arm_extract_return_value (struct type *type, struct regcache *regs,
7949 struct gdbarch *gdbarch = get_regcache_arch (regs);
7950 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7952 if (TYPE_CODE_FLT == TYPE_CODE (type))
7954 switch (gdbarch_tdep (gdbarch)->fp_model)
7958 /* The value is in register F0 in internal format. We need to
7959 extract the raw value and then convert it to the desired
7961 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7963 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7964 convert_from_extended (floatformat_from_type (type), tmpbuf,
7965 valbuf, gdbarch_byte_order (gdbarch));
7969 case ARM_FLOAT_SOFT_FPA:
7970 case ARM_FLOAT_SOFT_VFP:
7971 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7972 not using the VFP ABI code. */
7974 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7975 if (TYPE_LENGTH (type) > 4)
7976 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7977 valbuf + INT_REGISTER_SIZE);
7981 internal_error (__FILE__, __LINE__,
7982 _("arm_extract_return_value: "
7983 "Floating point model not supported"));
7987 else if (TYPE_CODE (type) == TYPE_CODE_INT
7988 || TYPE_CODE (type) == TYPE_CODE_CHAR
7989 || TYPE_CODE (type) == TYPE_CODE_BOOL
7990 || TYPE_CODE (type) == TYPE_CODE_PTR
7991 || TYPE_IS_REFERENCE (type)
7992 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7994 /* If the type is a plain integer, then the access is
7995 straight-forward. Otherwise we have to play around a bit
7997 int len = TYPE_LENGTH (type);
7998 int regno = ARM_A1_REGNUM;
8003 /* By using store_unsigned_integer we avoid having to do
8004 anything special for small big-endian values. */
8005 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8006 store_unsigned_integer (valbuf,
8007 (len > INT_REGISTER_SIZE
8008 ? INT_REGISTER_SIZE : len),
8010 len -= INT_REGISTER_SIZE;
8011 valbuf += INT_REGISTER_SIZE;
8016 /* For a structure or union the behaviour is as if the value had
8017 been stored to word-aligned memory and then loaded into
8018 registers with 32-bit load instruction(s). */
8019 int len = TYPE_LENGTH (type);
8020 int regno = ARM_A1_REGNUM;
8021 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8025 regcache_cooked_read (regs, regno++, tmpbuf);
8026 memcpy (valbuf, tmpbuf,
8027 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8028 len -= INT_REGISTER_SIZE;
8029 valbuf += INT_REGISTER_SIZE;
8035 /* Will a function return an aggregate type in memory or in a
8036 register? Return 0 if an aggregate type can be returned in a
8037 register, 1 if it must be returned in memory. */
8040 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8042 enum type_code code;
8044 type = check_typedef (type);
8046 /* Simple, non-aggregate types (ie not including vectors and
8047 complex) are always returned in a register (or registers). */
8048 code = TYPE_CODE (type);
8049 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
8050 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
8053 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
8055 /* Vector values should be returned using ARM registers if they
8056 are not over 16 bytes. */
8057 return (TYPE_LENGTH (type) > 16);
8060 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
8062 /* The AAPCS says all aggregates not larger than a word are returned
8064 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
8073 /* All aggregate types that won't fit in a register must be returned
8075 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
8078 /* In the ARM ABI, "integer" like aggregate types are returned in
8079 registers. For an aggregate type to be integer like, its size
8080 must be less than or equal to INT_REGISTER_SIZE and the
8081 offset of each addressable subfield must be zero. Note that bit
8082 fields are not addressable, and all addressable subfields of
8083 unions always start at offset zero.
8085 This function is based on the behaviour of GCC 2.95.1.
8086 See: gcc/arm.c: arm_return_in_memory() for details.
8088 Note: All versions of GCC before GCC 2.95.2 do not set up the
8089 parameters correctly for a function returning the following
8090 structure: struct { float f;}; This should be returned in memory,
8091 not a register. Richard Earnshaw sent me a patch, but I do not
8092 know of any way to detect if a function like the above has been
8093 compiled with the correct calling convention. */
8095 /* Assume all other aggregate types can be returned in a register.
8096 Run a check for structures, unions and arrays. */
8099 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8102 /* Need to check if this struct/union is "integer" like. For
8103 this to be true, its size must be less than or equal to
8104 INT_REGISTER_SIZE and the offset of each addressable
8105 subfield must be zero. Note that bit fields are not
8106 addressable, and unions always start at offset zero. If any
8107 of the subfields is a floating point type, the struct/union
8108 cannot be an integer type. */
8110 /* For each field in the object, check:
8111 1) Is it FP? --> yes, nRc = 1;
8112 2) Is it addressable (bitpos != 0) and
8113 not packed (bitsize == 0)?
8117 for (i = 0; i < TYPE_NFIELDS (type); i++)
8119 enum type_code field_type_code;
8122 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8125 /* Is it a floating point type field? */
8126 if (field_type_code == TYPE_CODE_FLT)
8132 /* If bitpos != 0, then we have to care about it. */
8133 if (TYPE_FIELD_BITPOS (type, i) != 0)
8135 /* Bitfields are not addressable. If the field bitsize is
8136 zero, then the field is not packed. Hence it cannot be
8137 a bitfield or any other packed type. */
8138 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8151 /* Write into appropriate registers a function return value of type
8152 TYPE, given in virtual format. */
8155 arm_store_return_value (struct type *type, struct regcache *regs,
8156 const gdb_byte *valbuf)
8158 struct gdbarch *gdbarch = get_regcache_arch (regs);
8159 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8161 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8163 gdb_byte buf[MAX_REGISTER_SIZE];
8165 switch (gdbarch_tdep (gdbarch)->fp_model)
8169 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8170 gdbarch_byte_order (gdbarch));
8171 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8174 case ARM_FLOAT_SOFT_FPA:
8175 case ARM_FLOAT_SOFT_VFP:
8176 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8177 not using the VFP ABI code. */
8179 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8180 if (TYPE_LENGTH (type) > 4)
8181 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8182 valbuf + INT_REGISTER_SIZE);
8186 internal_error (__FILE__, __LINE__,
8187 _("arm_store_return_value: Floating "
8188 "point model not supported"));
8192 else if (TYPE_CODE (type) == TYPE_CODE_INT
8193 || TYPE_CODE (type) == TYPE_CODE_CHAR
8194 || TYPE_CODE (type) == TYPE_CODE_BOOL
8195 || TYPE_CODE (type) == TYPE_CODE_PTR
8196 || TYPE_IS_REFERENCE (type)
8197 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8199 if (TYPE_LENGTH (type) <= 4)
8201 /* Values of one word or less are zero/sign-extended and
8203 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8204 LONGEST val = unpack_long (type, valbuf);
8206 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8207 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8211 /* Integral values greater than one word are stored in consecutive
8212 registers starting with r0. This will always be a multiple of
8213 the regiser size. */
8214 int len = TYPE_LENGTH (type);
8215 int regno = ARM_A1_REGNUM;
8219 regcache_cooked_write (regs, regno++, valbuf);
8220 len -= INT_REGISTER_SIZE;
8221 valbuf += INT_REGISTER_SIZE;
8227 /* For a structure or union the behaviour is as if the value had
8228 been stored to word-aligned memory and then loaded into
8229 registers with 32-bit load instruction(s). */
8230 int len = TYPE_LENGTH (type);
8231 int regno = ARM_A1_REGNUM;
8232 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8236 memcpy (tmpbuf, valbuf,
8237 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8238 regcache_cooked_write (regs, regno++, tmpbuf);
8239 len -= INT_REGISTER_SIZE;
8240 valbuf += INT_REGISTER_SIZE;
8246 /* Handle function return values. */
8248 static enum return_value_convention
8249 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8250 struct type *valtype, struct regcache *regcache,
8251 gdb_byte *readbuf, const gdb_byte *writebuf)
8253 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8254 struct type *func_type = function ? value_type (function) : NULL;
8255 enum arm_vfp_cprc_base_type vfp_base_type;
8258 if (arm_vfp_abi_for_function (gdbarch, func_type)
8259 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8261 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8262 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8264 for (i = 0; i < vfp_base_count; i++)
8266 if (reg_char == 'q')
8269 arm_neon_quad_write (gdbarch, regcache, i,
8270 writebuf + i * unit_length);
8273 arm_neon_quad_read (gdbarch, regcache, i,
8274 readbuf + i * unit_length);
8281 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8282 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8285 regcache_cooked_write (regcache, regnum,
8286 writebuf + i * unit_length);
8288 regcache_cooked_read (regcache, regnum,
8289 readbuf + i * unit_length);
8292 return RETURN_VALUE_REGISTER_CONVENTION;
8295 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8296 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8297 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8299 if (tdep->struct_return == pcc_struct_return
8300 || arm_return_in_memory (gdbarch, valtype))
8301 return RETURN_VALUE_STRUCT_CONVENTION;
8303 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8305 if (arm_return_in_memory (gdbarch, valtype))
8306 return RETURN_VALUE_STRUCT_CONVENTION;
8310 arm_store_return_value (valtype, regcache, writebuf);
8313 arm_extract_return_value (valtype, regcache, readbuf);
8315 return RETURN_VALUE_REGISTER_CONVENTION;
8320 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8322 struct gdbarch *gdbarch = get_frame_arch (frame);
8323 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8324 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8326 gdb_byte buf[INT_REGISTER_SIZE];
8328 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8330 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8334 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8338 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8339 return the target PC. Otherwise return 0. */
8342 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8346 CORE_ADDR start_addr;
8348 /* Find the starting address and name of the function containing the PC. */
8349 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8351 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8353 start_addr = arm_skip_bx_reg (frame, pc);
8354 if (start_addr != 0)
8360 /* If PC is in a Thumb call or return stub, return the address of the
8361 target PC, which is in a register. The thunk functions are called
8362 _call_via_xx, where x is the register name. The possible names
8363 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8364 functions, named __ARM_call_via_r[0-7]. */
8365 if (startswith (name, "_call_via_")
8366 || startswith (name, "__ARM_call_via_"))
8368 /* Use the name suffix to determine which register contains the
8370 static char *table[15] =
8371 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8372 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8375 int offset = strlen (name) - 2;
8377 for (regno = 0; regno <= 14; regno++)
8378 if (strcmp (&name[offset], table[regno]) == 0)
8379 return get_frame_register_unsigned (frame, regno);
8382 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8383 non-interworking calls to foo. We could decode the stubs
8384 to find the target but it's easier to use the symbol table. */
8385 namelen = strlen (name);
8386 if (name[0] == '_' && name[1] == '_'
8387 && ((namelen > 2 + strlen ("_from_thumb")
8388 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8389 || (namelen > 2 + strlen ("_from_arm")
8390 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8393 int target_len = namelen - 2;
8394 struct bound_minimal_symbol minsym;
8395 struct objfile *objfile;
8396 struct obj_section *sec;
8398 if (name[namelen - 1] == 'b')
8399 target_len -= strlen ("_from_thumb");
8401 target_len -= strlen ("_from_arm");
8403 target_name = (char *) alloca (target_len + 1);
8404 memcpy (target_name, name + 2, target_len);
8405 target_name[target_len] = '\0';
8407 sec = find_pc_section (pc);
8408 objfile = (sec == NULL) ? NULL : sec->objfile;
8409 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8410 if (minsym.minsym != NULL)
8411 return BMSYMBOL_VALUE_ADDRESS (minsym);
8416 return 0; /* not a stub */
8420 set_arm_command (char *args, int from_tty)
8422 printf_unfiltered (_("\
8423 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8424 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8428 show_arm_command (char *args, int from_tty)
8430 cmd_show_list (showarmcmdlist, from_tty, "");
8434 arm_update_current_architecture (void)
8436 struct gdbarch_info info;
8438 /* If the current architecture is not ARM, we have nothing to do. */
8439 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8442 /* Update the architecture. */
8443 gdbarch_info_init (&info);
8445 if (!gdbarch_update_p (info))
8446 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8450 set_fp_model_sfunc (char *args, int from_tty,
8451 struct cmd_list_element *c)
8455 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8456 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8458 arm_fp_model = (enum arm_float_model) fp_model;
8462 if (fp_model == ARM_FLOAT_LAST)
8463 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8466 arm_update_current_architecture ();
8470 show_fp_model (struct ui_file *file, int from_tty,
8471 struct cmd_list_element *c, const char *value)
8473 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8475 if (arm_fp_model == ARM_FLOAT_AUTO
8476 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8477 fprintf_filtered (file, _("\
8478 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8479 fp_model_strings[tdep->fp_model]);
8481 fprintf_filtered (file, _("\
8482 The current ARM floating point model is \"%s\".\n"),
8483 fp_model_strings[arm_fp_model]);
8487 arm_set_abi (char *args, int from_tty,
8488 struct cmd_list_element *c)
8492 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8493 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8495 arm_abi_global = (enum arm_abi_kind) arm_abi;
8499 if (arm_abi == ARM_ABI_LAST)
8500 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8503 arm_update_current_architecture ();
8507 arm_show_abi (struct ui_file *file, int from_tty,
8508 struct cmd_list_element *c, const char *value)
8510 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8512 if (arm_abi_global == ARM_ABI_AUTO
8513 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8514 fprintf_filtered (file, _("\
8515 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8516 arm_abi_strings[tdep->arm_abi]);
8518 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8523 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8524 struct cmd_list_element *c, const char *value)
8526 fprintf_filtered (file,
8527 _("The current execution mode assumed "
8528 "(when symbols are unavailable) is \"%s\".\n"),
8529 arm_fallback_mode_string);
8533 arm_show_force_mode (struct ui_file *file, int from_tty,
8534 struct cmd_list_element *c, const char *value)
8536 fprintf_filtered (file,
8537 _("The current execution mode assumed "
8538 "(even when symbols are available) is \"%s\".\n"),
8539 arm_force_mode_string);
8542 /* If the user changes the register disassembly style used for info
8543 register and other commands, we have to also switch the style used
8544 in opcodes for disassembly output. This function is run in the "set
8545 arm disassembly" command, and does that. */
8548 set_disassembly_style_sfunc (char *args, int from_tty,
8549 struct cmd_list_element *c)
8551 /* Convert the short style name into the long style name (eg, reg-names-*)
8552 before calling the generic set_disassembler_options() function. */
8553 std::string long_name = std::string ("reg-names-") + disassembly_style;
8554 set_disassembler_options (&long_name[0]);
8558 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
8559 struct cmd_list_element *c, const char *value)
8561 struct gdbarch *gdbarch = get_current_arch ();
8562 char *options = get_disassembler_options (gdbarch);
8563 const char *style = "";
8567 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
8568 if (CONST_STRNEQ (opt, "reg-names-"))
8570 style = &opt[strlen ("reg-names-")];
8571 len = strcspn (style, ",");
8574 fprintf_unfiltered (file, "The disassembly style is \"%.*s\".\n", len, style);
8577 /* Return the ARM register name corresponding to register I. */
8579 arm_register_name (struct gdbarch *gdbarch, int i)
8581 const int num_regs = gdbarch_num_regs (gdbarch);
8583 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8584 && i >= num_regs && i < num_regs + 32)
8586 static const char *const vfp_pseudo_names[] = {
8587 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8588 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8589 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8590 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8593 return vfp_pseudo_names[i - num_regs];
8596 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8597 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8599 static const char *const neon_pseudo_names[] = {
8600 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8601 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8604 return neon_pseudo_names[i - num_regs - 32];
8607 if (i >= ARRAY_SIZE (arm_register_names))
8608 /* These registers are only supported on targets which supply
8609 an XML description. */
8612 return arm_register_names[i];
8615 /* Test whether the coff symbol specific value corresponds to a Thumb
8619 coff_sym_is_thumb (int val)
8621 return (val == C_THUMBEXT
8622 || val == C_THUMBSTAT
8623 || val == C_THUMBEXTFUNC
8624 || val == C_THUMBSTATFUNC
8625 || val == C_THUMBLABEL);
8628 /* arm_coff_make_msymbol_special()
8629 arm_elf_make_msymbol_special()
8631 These functions test whether the COFF or ELF symbol corresponds to
8632 an address in thumb code, and set a "special" bit in a minimal
8633 symbol to indicate that it does. */
8636 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8638 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8640 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8641 == ST_BRANCH_TO_THUMB)
8642 MSYMBOL_SET_SPECIAL (msym);
8646 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8648 if (coff_sym_is_thumb (val))
8649 MSYMBOL_SET_SPECIAL (msym);
8653 arm_objfile_data_free (struct objfile *objfile, void *arg)
8655 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8658 for (i = 0; i < objfile->obfd->section_count; i++)
8659 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8663 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8666 const char *name = bfd_asymbol_name (sym);
8667 struct arm_per_objfile *data;
8668 VEC(arm_mapping_symbol_s) **map_p;
8669 struct arm_mapping_symbol new_map_sym;
8671 gdb_assert (name[0] == '$');
8672 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8675 data = (struct arm_per_objfile *) objfile_data (objfile,
8676 arm_objfile_data_key);
8679 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8680 struct arm_per_objfile);
8681 set_objfile_data (objfile, arm_objfile_data_key, data);
8682 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8683 objfile->obfd->section_count,
8684 VEC(arm_mapping_symbol_s) *);
8686 map_p = &data->section_maps[bfd_get_section (sym)->index];
8688 new_map_sym.value = sym->value;
8689 new_map_sym.type = name[1];
8691 /* Assume that most mapping symbols appear in order of increasing
8692 value. If they were randomly distributed, it would be faster to
8693 always push here and then sort at first use. */
8694 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8696 struct arm_mapping_symbol *prev_map_sym;
8698 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8699 if (prev_map_sym->value >= sym->value)
8702 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8703 arm_compare_mapping_symbols);
8704 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8709 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8713 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8715 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8716 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8718 /* If necessary, set the T bit. */
8721 ULONGEST val, t_bit;
8722 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8723 t_bit = arm_psr_thumb_bit (gdbarch);
8724 if (arm_pc_is_thumb (gdbarch, pc))
8725 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8728 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8733 /* Read the contents of a NEON quad register, by reading from two
8734 double registers. This is used to implement the quad pseudo
8735 registers, and for argument passing in case the quad registers are
8736 missing; vectors are passed in quad registers when using the VFP
8737 ABI, even if a NEON unit is not present. REGNUM is the index of
8738 the quad register, in [0, 15]. */
8740 static enum register_status
8741 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8742 int regnum, gdb_byte *buf)
8745 gdb_byte reg_buf[8];
8746 int offset, double_regnum;
8747 enum register_status status;
8749 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8750 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8753 /* d0 is always the least significant half of q0. */
8754 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8759 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8760 if (status != REG_VALID)
8762 memcpy (buf + offset, reg_buf, 8);
8764 offset = 8 - offset;
8765 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8766 if (status != REG_VALID)
8768 memcpy (buf + offset, reg_buf, 8);
8773 static enum register_status
8774 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8775 int regnum, gdb_byte *buf)
8777 const int num_regs = gdbarch_num_regs (gdbarch);
8779 gdb_byte reg_buf[8];
8780 int offset, double_regnum;
8782 gdb_assert (regnum >= num_regs);
8785 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8786 /* Quad-precision register. */
8787 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8790 enum register_status status;
8792 /* Single-precision register. */
8793 gdb_assert (regnum < 32);
8795 /* s0 is always the least significant half of d0. */
8796 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8797 offset = (regnum & 1) ? 0 : 4;
8799 offset = (regnum & 1) ? 4 : 0;
8801 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8802 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8805 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8806 if (status == REG_VALID)
8807 memcpy (buf, reg_buf + offset, 4);
8812 /* Store the contents of BUF to a NEON quad register, by writing to
8813 two double registers. This is used to implement the quad pseudo
8814 registers, and for argument passing in case the quad registers are
8815 missing; vectors are passed in quad registers when using the VFP
8816 ABI, even if a NEON unit is not present. REGNUM is the index
8817 of the quad register, in [0, 15]. */
8820 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8821 int regnum, const gdb_byte *buf)
8824 int offset, double_regnum;
8826 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8827 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8830 /* d0 is always the least significant half of q0. */
8831 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8836 regcache_raw_write (regcache, double_regnum, buf + offset);
8837 offset = 8 - offset;
8838 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8842 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8843 int regnum, const gdb_byte *buf)
8845 const int num_regs = gdbarch_num_regs (gdbarch);
8847 gdb_byte reg_buf[8];
8848 int offset, double_regnum;
8850 gdb_assert (regnum >= num_regs);
8853 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8854 /* Quad-precision register. */
8855 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8858 /* Single-precision register. */
8859 gdb_assert (regnum < 32);
8861 /* s0 is always the least significant half of d0. */
8862 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8863 offset = (regnum & 1) ? 0 : 4;
8865 offset = (regnum & 1) ? 4 : 0;
8867 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8868 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8871 regcache_raw_read (regcache, double_regnum, reg_buf);
8872 memcpy (reg_buf + offset, buf, 4);
8873 regcache_raw_write (regcache, double_regnum, reg_buf);
8877 static struct value *
8878 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8880 const int *reg_p = (const int *) baton;
8881 return value_of_register (*reg_p, frame);
8884 static enum gdb_osabi
8885 arm_elf_osabi_sniffer (bfd *abfd)
8887 unsigned int elfosabi;
8888 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8890 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8892 if (elfosabi == ELFOSABI_ARM)
8893 /* GNU tools use this value. Check note sections in this case,
8895 bfd_map_over_sections (abfd,
8896 generic_elf_osabi_sniff_abi_tag_sections,
8899 /* Anything else will be handled by the generic ELF sniffer. */
8904 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8905 struct reggroup *group)
8907 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8908 this, FPS register belongs to save_regroup, restore_reggroup, and
8909 all_reggroup, of course. */
8910 if (regnum == ARM_FPS_REGNUM)
8911 return (group == float_reggroup
8912 || group == save_reggroup
8913 || group == restore_reggroup
8914 || group == all_reggroup);
8916 return default_register_reggroup_p (gdbarch, regnum, group);
8920 /* For backward-compatibility we allow two 'g' packet lengths with
8921 the remote protocol depending on whether FPA registers are
8922 supplied. M-profile targets do not have FPA registers, but some
8923 stubs already exist in the wild which use a 'g' packet which
8924 supplies them albeit with dummy values. The packet format which
8925 includes FPA registers should be considered deprecated for
8926 M-profile targets. */
8929 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8931 if (gdbarch_tdep (gdbarch)->is_m)
8933 /* If we know from the executable this is an M-profile target,
8934 cater for remote targets whose register set layout is the
8935 same as the FPA layout. */
8936 register_remote_g_packet_guess (gdbarch,
8937 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8938 (16 * INT_REGISTER_SIZE)
8939 + (8 * FP_REGISTER_SIZE)
8940 + (2 * INT_REGISTER_SIZE),
8941 tdesc_arm_with_m_fpa_layout);
8943 /* The regular M-profile layout. */
8944 register_remote_g_packet_guess (gdbarch,
8945 /* r0-r12,sp,lr,pc; xpsr */
8946 (16 * INT_REGISTER_SIZE)
8947 + INT_REGISTER_SIZE,
8950 /* M-profile plus M4F VFP. */
8951 register_remote_g_packet_guess (gdbarch,
8952 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8953 (16 * INT_REGISTER_SIZE)
8954 + (16 * VFP_REGISTER_SIZE)
8955 + (2 * INT_REGISTER_SIZE),
8956 tdesc_arm_with_m_vfp_d16);
8959 /* Otherwise we don't have a useful guess. */
8962 /* Implement the code_of_frame_writable gdbarch method. */
8965 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8967 if (gdbarch_tdep (gdbarch)->is_m
8968 && get_frame_type (frame) == SIGTRAMP_FRAME)
8970 /* M-profile exception frames return to some magic PCs, where
8971 isn't writable at all. */
8979 /* Initialize the current architecture based on INFO. If possible,
8980 re-use an architecture from ARCHES, which is a list of
8981 architectures already created during this debugging session.
8983 Called e.g. at program startup, when reading a core file, and when
8984 reading a binary file. */
8986 static struct gdbarch *
8987 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8989 struct gdbarch_tdep *tdep;
8990 struct gdbarch *gdbarch;
8991 struct gdbarch_list *best_arch;
8992 enum arm_abi_kind arm_abi = arm_abi_global;
8993 enum arm_float_model fp_model = arm_fp_model;
8994 struct tdesc_arch_data *tdesc_data = NULL;
8996 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8997 int have_wmmx_registers = 0;
8999 int have_fpa_registers = 1;
9000 const struct target_desc *tdesc = info.target_desc;
9002 /* If we have an object to base this architecture on, try to determine
9005 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9007 int ei_osabi, e_flags;
9009 switch (bfd_get_flavour (info.abfd))
9011 case bfd_target_coff_flavour:
9012 /* Assume it's an old APCS-style ABI. */
9014 arm_abi = ARM_ABI_APCS;
9017 case bfd_target_elf_flavour:
9018 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9019 e_flags = elf_elfheader (info.abfd)->e_flags;
9021 if (ei_osabi == ELFOSABI_ARM)
9023 /* GNU tools used to use this value, but do not for EABI
9024 objects. There's nowhere to tag an EABI version
9025 anyway, so assume APCS. */
9026 arm_abi = ARM_ABI_APCS;
9028 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9030 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9031 int attr_arch, attr_profile;
9035 case EF_ARM_EABI_UNKNOWN:
9036 /* Assume GNU tools. */
9037 arm_abi = ARM_ABI_APCS;
9040 case EF_ARM_EABI_VER4:
9041 case EF_ARM_EABI_VER5:
9042 arm_abi = ARM_ABI_AAPCS;
9043 /* EABI binaries default to VFP float ordering.
9044 They may also contain build attributes that can
9045 be used to identify if the VFP argument-passing
9047 if (fp_model == ARM_FLOAT_AUTO)
9050 switch (bfd_elf_get_obj_attr_int (info.abfd,
9054 case AEABI_VFP_args_base:
9055 /* "The user intended FP parameter/result
9056 passing to conform to AAPCS, base
9058 fp_model = ARM_FLOAT_SOFT_VFP;
9060 case AEABI_VFP_args_vfp:
9061 /* "The user intended FP parameter/result
9062 passing to conform to AAPCS, VFP
9064 fp_model = ARM_FLOAT_VFP;
9066 case AEABI_VFP_args_toolchain:
9067 /* "The user intended FP parameter/result
9068 passing to conform to tool chain-specific
9069 conventions" - we don't know any such
9070 conventions, so leave it as "auto". */
9072 case AEABI_VFP_args_compatible:
9073 /* "Code is compatible with both the base
9074 and VFP variants; the user did not permit
9075 non-variadic functions to pass FP
9076 parameters/results" - leave it as
9080 /* Attribute value not mentioned in the
9081 November 2012 ABI, so leave it as
9086 fp_model = ARM_FLOAT_SOFT_VFP;
9092 /* Leave it as "auto". */
9093 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9098 /* Detect M-profile programs. This only works if the
9099 executable file includes build attributes; GCC does
9100 copy them to the executable, but e.g. RealView does
9102 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9104 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9106 Tag_CPU_arch_profile);
9107 /* GCC specifies the profile for v6-M; RealView only
9108 specifies the profile for architectures starting with
9109 V7 (as opposed to architectures with a tag
9110 numerically greater than TAG_CPU_ARCH_V7). */
9111 if (!tdesc_has_registers (tdesc)
9112 && (attr_arch == TAG_CPU_ARCH_V6_M
9113 || attr_arch == TAG_CPU_ARCH_V6S_M
9114 || attr_profile == 'M'))
9119 if (fp_model == ARM_FLOAT_AUTO)
9121 int e_flags = elf_elfheader (info.abfd)->e_flags;
9123 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9126 /* Leave it as "auto". Strictly speaking this case
9127 means FPA, but almost nobody uses that now, and
9128 many toolchains fail to set the appropriate bits
9129 for the floating-point model they use. */
9131 case EF_ARM_SOFT_FLOAT:
9132 fp_model = ARM_FLOAT_SOFT_FPA;
9134 case EF_ARM_VFP_FLOAT:
9135 fp_model = ARM_FLOAT_VFP;
9137 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9138 fp_model = ARM_FLOAT_SOFT_VFP;
9143 if (e_flags & EF_ARM_BE8)
9144 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9149 /* Leave it as "auto". */
9154 /* Check any target description for validity. */
9155 if (tdesc_has_registers (tdesc))
9157 /* For most registers we require GDB's default names; but also allow
9158 the numeric names for sp / lr / pc, as a convenience. */
9159 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9160 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9161 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9163 const struct tdesc_feature *feature;
9166 feature = tdesc_find_feature (tdesc,
9167 "org.gnu.gdb.arm.core");
9168 if (feature == NULL)
9170 feature = tdesc_find_feature (tdesc,
9171 "org.gnu.gdb.arm.m-profile");
9172 if (feature == NULL)
9178 tdesc_data = tdesc_data_alloc ();
9181 for (i = 0; i < ARM_SP_REGNUM; i++)
9182 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9183 arm_register_names[i]);
9184 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9187 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9190 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9194 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9195 ARM_PS_REGNUM, "xpsr");
9197 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9198 ARM_PS_REGNUM, "cpsr");
9202 tdesc_data_cleanup (tdesc_data);
9206 feature = tdesc_find_feature (tdesc,
9207 "org.gnu.gdb.arm.fpa");
9208 if (feature != NULL)
9211 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9212 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9213 arm_register_names[i]);
9216 tdesc_data_cleanup (tdesc_data);
9221 have_fpa_registers = 0;
9223 feature = tdesc_find_feature (tdesc,
9224 "org.gnu.gdb.xscale.iwmmxt");
9225 if (feature != NULL)
9227 static const char *const iwmmxt_names[] = {
9228 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9229 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9230 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9231 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9235 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9237 &= tdesc_numbered_register (feature, tdesc_data, i,
9238 iwmmxt_names[i - ARM_WR0_REGNUM]);
9240 /* Check for the control registers, but do not fail if they
9242 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9243 tdesc_numbered_register (feature, tdesc_data, i,
9244 iwmmxt_names[i - ARM_WR0_REGNUM]);
9246 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9248 &= tdesc_numbered_register (feature, tdesc_data, i,
9249 iwmmxt_names[i - ARM_WR0_REGNUM]);
9253 tdesc_data_cleanup (tdesc_data);
9257 have_wmmx_registers = 1;
9260 /* If we have a VFP unit, check whether the single precision registers
9261 are present. If not, then we will synthesize them as pseudo
9263 feature = tdesc_find_feature (tdesc,
9264 "org.gnu.gdb.arm.vfp");
9265 if (feature != NULL)
9267 static const char *const vfp_double_names[] = {
9268 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9269 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9270 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9271 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9274 /* Require the double precision registers. There must be either
9277 for (i = 0; i < 32; i++)
9279 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9281 vfp_double_names[i]);
9285 if (!valid_p && i == 16)
9288 /* Also require FPSCR. */
9289 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9290 ARM_FPSCR_REGNUM, "fpscr");
9293 tdesc_data_cleanup (tdesc_data);
9297 if (tdesc_unnumbered_register (feature, "s0") == 0)
9298 have_vfp_pseudos = 1;
9300 vfp_register_count = i;
9302 /* If we have VFP, also check for NEON. The architecture allows
9303 NEON without VFP (integer vector operations only), but GDB
9304 does not support that. */
9305 feature = tdesc_find_feature (tdesc,
9306 "org.gnu.gdb.arm.neon");
9307 if (feature != NULL)
9309 /* NEON requires 32 double-precision registers. */
9312 tdesc_data_cleanup (tdesc_data);
9316 /* If there are quad registers defined by the stub, use
9317 their type; otherwise (normally) provide them with
9318 the default type. */
9319 if (tdesc_unnumbered_register (feature, "q0") == 0)
9320 have_neon_pseudos = 1;
9327 /* If there is already a candidate, use it. */
9328 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9330 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9332 if (arm_abi != ARM_ABI_AUTO
9333 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9336 if (fp_model != ARM_FLOAT_AUTO
9337 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9340 /* There are various other properties in tdep that we do not
9341 need to check here: those derived from a target description,
9342 since gdbarches with a different target description are
9343 automatically disqualified. */
9345 /* Do check is_m, though, since it might come from the binary. */
9346 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9349 /* Found a match. */
9353 if (best_arch != NULL)
9355 if (tdesc_data != NULL)
9356 tdesc_data_cleanup (tdesc_data);
9357 return best_arch->gdbarch;
9360 tdep = XCNEW (struct gdbarch_tdep);
9361 gdbarch = gdbarch_alloc (&info, tdep);
9363 /* Record additional information about the architecture we are defining.
9364 These are gdbarch discriminators, like the OSABI. */
9365 tdep->arm_abi = arm_abi;
9366 tdep->fp_model = fp_model;
9368 tdep->have_fpa_registers = have_fpa_registers;
9369 tdep->have_wmmx_registers = have_wmmx_registers;
9370 gdb_assert (vfp_register_count == 0
9371 || vfp_register_count == 16
9372 || vfp_register_count == 32);
9373 tdep->vfp_register_count = vfp_register_count;
9374 tdep->have_vfp_pseudos = have_vfp_pseudos;
9375 tdep->have_neon_pseudos = have_neon_pseudos;
9376 tdep->have_neon = have_neon;
9378 arm_register_g_packet_guesses (gdbarch);
9381 switch (info.byte_order_for_code)
9383 case BFD_ENDIAN_BIG:
9384 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9385 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9386 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9387 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9391 case BFD_ENDIAN_LITTLE:
9392 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9393 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9394 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9395 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9400 internal_error (__FILE__, __LINE__,
9401 _("arm_gdbarch_init: bad byte order for float format"));
9404 /* On ARM targets char defaults to unsigned. */
9405 set_gdbarch_char_signed (gdbarch, 0);
9407 /* Note: for displaced stepping, this includes the breakpoint, and one word
9408 of additional scratch space. This setting isn't used for anything beside
9409 displaced stepping at present. */
9410 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9412 /* This should be low enough for everything. */
9413 tdep->lowest_pc = 0x20;
9414 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9416 /* The default, for both APCS and AAPCS, is to return small
9417 structures in registers. */
9418 tdep->struct_return = reg_struct_return;
9420 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9421 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9424 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9426 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9428 /* Frame handling. */
9429 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9430 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9431 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9433 frame_base_set_default (gdbarch, &arm_normal_base);
9435 /* Address manipulation. */
9436 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9438 /* Advance PC across function entry code. */
9439 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9441 /* Detect whether PC is at a point where the stack has been destroyed. */
9442 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9444 /* Skip trampolines. */
9445 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9447 /* The stack grows downward. */
9448 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9450 /* Breakpoint manipulation. */
9451 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
9452 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
9453 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
9454 arm_breakpoint_kind_from_current_state);
9456 /* Information about registers, etc. */
9457 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9458 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9459 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9460 set_gdbarch_register_type (gdbarch, arm_register_type);
9461 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9463 /* This "info float" is FPA-specific. Use the generic version if we
9465 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9466 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9468 /* Internal <-> external register number maps. */
9469 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9470 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9472 set_gdbarch_register_name (gdbarch, arm_register_name);
9474 /* Returning results. */
9475 set_gdbarch_return_value (gdbarch, arm_return_value);
9478 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9480 /* Minsymbol frobbing. */
9481 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9482 set_gdbarch_coff_make_msymbol_special (gdbarch,
9483 arm_coff_make_msymbol_special);
9484 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9486 /* Thumb-2 IT block support. */
9487 set_gdbarch_adjust_breakpoint_address (gdbarch,
9488 arm_adjust_breakpoint_address);
9490 /* Virtual tables. */
9491 set_gdbarch_vbit_in_delta (gdbarch, 1);
9493 /* Hook in the ABI-specific overrides, if they have been registered. */
9494 gdbarch_init_osabi (info, gdbarch);
9496 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9498 /* Add some default predicates. */
9500 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9501 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9502 dwarf2_append_unwinders (gdbarch);
9503 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9504 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9505 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9507 /* Now we have tuned the configuration, set a few final things,
9508 based on what the OS ABI has told us. */
9510 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9511 binaries are always marked. */
9512 if (tdep->arm_abi == ARM_ABI_AUTO)
9513 tdep->arm_abi = ARM_ABI_APCS;
9515 /* Watchpoints are not steppable. */
9516 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9518 /* We used to default to FPA for generic ARM, but almost nobody
9519 uses that now, and we now provide a way for the user to force
9520 the model. So default to the most useful variant. */
9521 if (tdep->fp_model == ARM_FLOAT_AUTO)
9522 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9524 if (tdep->jb_pc >= 0)
9525 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9527 /* Floating point sizes and format. */
9528 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9529 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9531 set_gdbarch_double_format
9532 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9533 set_gdbarch_long_double_format
9534 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9538 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9539 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9542 if (have_vfp_pseudos)
9544 /* NOTE: These are the only pseudo registers used by
9545 the ARM target at the moment. If more are added, a
9546 little more care in numbering will be needed. */
9548 int num_pseudos = 32;
9549 if (have_neon_pseudos)
9551 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9552 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9553 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9558 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9560 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9562 /* Override tdesc_register_type to adjust the types of VFP
9563 registers for NEON. */
9564 set_gdbarch_register_type (gdbarch, arm_register_type);
9567 /* Add standard register aliases. We add aliases even for those
9568 nanes which are used by the current architecture - it's simpler,
9569 and does no harm, since nothing ever lists user registers. */
9570 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9571 user_reg_add (gdbarch, arm_register_aliases[i].name,
9572 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9574 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
9575 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
9581 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9583 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9588 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9589 (unsigned long) tdep->lowest_pc);
9594 static void arm_record_test (void);
9597 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9600 _initialize_arm_tdep (void)
9603 const char *setname;
9604 const char *setdesc;
9606 char regdesc[1024], *rdptr = regdesc;
9607 size_t rest = sizeof (regdesc);
9609 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9611 arm_objfile_data_key
9612 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9614 /* Add ourselves to objfile event chain. */
9615 observer_attach_new_objfile (arm_exidx_new_objfile);
9617 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9619 /* Register an ELF OS ABI sniffer for ARM binaries. */
9620 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9621 bfd_target_elf_flavour,
9622 arm_elf_osabi_sniffer);
9624 /* Initialize the standard target descriptions. */
9625 initialize_tdesc_arm_with_m ();
9626 initialize_tdesc_arm_with_m_fpa_layout ();
9627 initialize_tdesc_arm_with_m_vfp_d16 ();
9628 initialize_tdesc_arm_with_iwmmxt ();
9629 initialize_tdesc_arm_with_vfpv2 ();
9630 initialize_tdesc_arm_with_vfpv3 ();
9631 initialize_tdesc_arm_with_neon ();
9633 /* Add root prefix command for all "set arm"/"show arm" commands. */
9634 add_prefix_cmd ("arm", no_class, set_arm_command,
9635 _("Various ARM-specific commands."),
9636 &setarmcmdlist, "set arm ", 0, &setlist);
9638 add_prefix_cmd ("arm", no_class, show_arm_command,
9639 _("Various ARM-specific commands."),
9640 &showarmcmdlist, "show arm ", 0, &showlist);
9643 arm_disassembler_options = xstrdup ("reg-names-std");
9644 const disasm_options_t *disasm_options = disassembler_options_arm ();
9645 int num_disassembly_styles = 0;
9646 for (i = 0; disasm_options->name[i] != NULL; i++)
9647 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9648 num_disassembly_styles++;
9650 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9651 valid_disassembly_styles = XNEWVEC (const char *,
9652 num_disassembly_styles + 1);
9653 for (i = j = 0; disasm_options->name[i] != NULL; i++)
9654 if (CONST_STRNEQ (disasm_options->name[i], "reg-names-"))
9656 size_t offset = strlen ("reg-names-");
9657 const char *style = disasm_options->name[i];
9658 valid_disassembly_styles[j++] = &style[offset];
9659 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
9660 disasm_options->description[i]);
9664 /* Mark the end of valid options. */
9665 valid_disassembly_styles[num_disassembly_styles] = NULL;
9667 /* Create the help text. */
9668 std::string helptext = string_printf ("%s%s%s",
9669 _("The valid values are:\n"),
9671 _("The default is \"std\"."));
9673 add_setshow_enum_cmd("disassembler", no_class,
9674 valid_disassembly_styles, &disassembly_style,
9675 _("Set the disassembly style."),
9676 _("Show the disassembly style."),
9678 set_disassembly_style_sfunc,
9679 show_disassembly_style_sfunc,
9680 &setarmcmdlist, &showarmcmdlist);
9682 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9683 _("Set usage of ARM 32-bit mode."),
9684 _("Show usage of ARM 32-bit mode."),
9685 _("When off, a 26-bit PC will be used."),
9687 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9689 &setarmcmdlist, &showarmcmdlist);
9691 /* Add a command to allow the user to force the FPU model. */
9692 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9693 _("Set the floating point type."),
9694 _("Show the floating point type."),
9695 _("auto - Determine the FP typefrom the OS-ABI.\n\
9696 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9697 fpa - FPA co-processor (GCC compiled).\n\
9698 softvfp - Software FP with pure-endian doubles.\n\
9699 vfp - VFP co-processor."),
9700 set_fp_model_sfunc, show_fp_model,
9701 &setarmcmdlist, &showarmcmdlist);
9703 /* Add a command to allow the user to force the ABI. */
9704 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9707 NULL, arm_set_abi, arm_show_abi,
9708 &setarmcmdlist, &showarmcmdlist);
9710 /* Add two commands to allow the user to force the assumed
9712 add_setshow_enum_cmd ("fallback-mode", class_support,
9713 arm_mode_strings, &arm_fallback_mode_string,
9714 _("Set the mode assumed when symbols are unavailable."),
9715 _("Show the mode assumed when symbols are unavailable."),
9716 NULL, NULL, arm_show_fallback_mode,
9717 &setarmcmdlist, &showarmcmdlist);
9718 add_setshow_enum_cmd ("force-mode", class_support,
9719 arm_mode_strings, &arm_force_mode_string,
9720 _("Set the mode assumed even when symbols are available."),
9721 _("Show the mode assumed even when symbols are available."),
9722 NULL, NULL, arm_show_force_mode,
9723 &setarmcmdlist, &showarmcmdlist);
9725 /* Debugging flag. */
9726 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9727 _("Set ARM debugging."),
9728 _("Show ARM debugging."),
9729 _("When on, arm-specific debugging is enabled."),
9731 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9732 &setdebuglist, &showdebuglist);
9735 register_self_test (selftests::arm_record_test);
9740 /* ARM-reversible process record data structures. */
9742 #define ARM_INSN_SIZE_BYTES 4
9743 #define THUMB_INSN_SIZE_BYTES 2
9744 #define THUMB2_INSN_SIZE_BYTES 4
9747 /* Position of the bit within a 32-bit ARM instruction
9748 that defines whether the instruction is a load or store. */
9749 #define INSN_S_L_BIT_NUM 20
9751 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9754 unsigned int reg_len = LENGTH; \
9757 REGS = XNEWVEC (uint32_t, reg_len); \
9758 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9763 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9766 unsigned int mem_len = LENGTH; \
9769 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9770 memcpy(&MEMS->len, &RECORD_BUF[0], \
9771 sizeof(struct arm_mem_r) * LENGTH); \
9776 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9777 #define INSN_RECORDED(ARM_RECORD) \
9778 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9780 /* ARM memory record structure. */
9783 uint32_t len; /* Record length. */
9784 uint32_t addr; /* Memory address. */
9787 /* ARM instruction record contains opcode of current insn
9788 and execution state (before entry to decode_insn()),
9789 contains list of to-be-modified registers and
9790 memory blocks (on return from decode_insn()). */
9792 typedef struct insn_decode_record_t
9794 struct gdbarch *gdbarch;
9795 struct regcache *regcache;
9796 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9797 uint32_t arm_insn; /* Should accommodate thumb. */
9798 uint32_t cond; /* Condition code. */
9799 uint32_t opcode; /* Insn opcode. */
9800 uint32_t decode; /* Insn decode bits. */
9801 uint32_t mem_rec_count; /* No of mem records. */
9802 uint32_t reg_rec_count; /* No of reg records. */
9803 uint32_t *arm_regs; /* Registers to be saved for this record. */
9804 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9805 } insn_decode_record;
9808 /* Checks ARM SBZ and SBO mandatory fields. */
9811 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9813 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9832 enum arm_record_result
9834 ARM_RECORD_SUCCESS = 0,
9835 ARM_RECORD_FAILURE = 1
9842 } arm_record_strx_t;
9853 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9854 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9857 struct regcache *reg_cache = arm_insn_r->regcache;
9858 ULONGEST u_regval[2]= {0};
9860 uint32_t reg_src1 = 0, reg_src2 = 0;
9861 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9863 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9864 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9866 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9868 /* 1) Handle misc store, immediate offset. */
9869 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9870 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9871 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9872 regcache_raw_read_unsigned (reg_cache, reg_src1,
9874 if (ARM_PC_REGNUM == reg_src1)
9876 /* If R15 was used as Rn, hence current PC+8. */
9877 u_regval[0] = u_regval[0] + 8;
9879 offset_8 = (immed_high << 4) | immed_low;
9880 /* Calculate target store address. */
9881 if (14 == arm_insn_r->opcode)
9883 tgt_mem_addr = u_regval[0] + offset_8;
9887 tgt_mem_addr = u_regval[0] - offset_8;
9889 if (ARM_RECORD_STRH == str_type)
9891 record_buf_mem[0] = 2;
9892 record_buf_mem[1] = tgt_mem_addr;
9893 arm_insn_r->mem_rec_count = 1;
9895 else if (ARM_RECORD_STRD == str_type)
9897 record_buf_mem[0] = 4;
9898 record_buf_mem[1] = tgt_mem_addr;
9899 record_buf_mem[2] = 4;
9900 record_buf_mem[3] = tgt_mem_addr + 4;
9901 arm_insn_r->mem_rec_count = 2;
9904 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9906 /* 2) Store, register offset. */
9908 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9910 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9911 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9912 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9915 /* If R15 was used as Rn, hence current PC+8. */
9916 u_regval[0] = u_regval[0] + 8;
9918 /* Calculate target store address, Rn +/- Rm, register offset. */
9919 if (12 == arm_insn_r->opcode)
9921 tgt_mem_addr = u_regval[0] + u_regval[1];
9925 tgt_mem_addr = u_regval[1] - u_regval[0];
9927 if (ARM_RECORD_STRH == str_type)
9929 record_buf_mem[0] = 2;
9930 record_buf_mem[1] = tgt_mem_addr;
9931 arm_insn_r->mem_rec_count = 1;
9933 else if (ARM_RECORD_STRD == str_type)
9935 record_buf_mem[0] = 4;
9936 record_buf_mem[1] = tgt_mem_addr;
9937 record_buf_mem[2] = 4;
9938 record_buf_mem[3] = tgt_mem_addr + 4;
9939 arm_insn_r->mem_rec_count = 2;
9942 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9943 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9945 /* 3) Store, immediate pre-indexed. */
9946 /* 5) Store, immediate post-indexed. */
9947 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9948 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9949 offset_8 = (immed_high << 4) | immed_low;
9950 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9951 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9952 /* Calculate target store address, Rn +/- Rm, register offset. */
9953 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9955 tgt_mem_addr = u_regval[0] + offset_8;
9959 tgt_mem_addr = u_regval[0] - offset_8;
9961 if (ARM_RECORD_STRH == str_type)
9963 record_buf_mem[0] = 2;
9964 record_buf_mem[1] = tgt_mem_addr;
9965 arm_insn_r->mem_rec_count = 1;
9967 else if (ARM_RECORD_STRD == str_type)
9969 record_buf_mem[0] = 4;
9970 record_buf_mem[1] = tgt_mem_addr;
9971 record_buf_mem[2] = 4;
9972 record_buf_mem[3] = tgt_mem_addr + 4;
9973 arm_insn_r->mem_rec_count = 2;
9975 /* Record Rn also as it changes. */
9976 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9977 arm_insn_r->reg_rec_count = 1;
9979 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9980 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9982 /* 4) Store, register pre-indexed. */
9983 /* 6) Store, register post -indexed. */
9984 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9985 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9986 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9987 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9988 /* Calculate target store address, Rn +/- Rm, register offset. */
9989 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9991 tgt_mem_addr = u_regval[0] + u_regval[1];
9995 tgt_mem_addr = u_regval[1] - u_regval[0];
9997 if (ARM_RECORD_STRH == str_type)
9999 record_buf_mem[0] = 2;
10000 record_buf_mem[1] = tgt_mem_addr;
10001 arm_insn_r->mem_rec_count = 1;
10003 else if (ARM_RECORD_STRD == str_type)
10005 record_buf_mem[0] = 4;
10006 record_buf_mem[1] = tgt_mem_addr;
10007 record_buf_mem[2] = 4;
10008 record_buf_mem[3] = tgt_mem_addr + 4;
10009 arm_insn_r->mem_rec_count = 2;
10011 /* Record Rn also as it changes. */
10012 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10013 arm_insn_r->reg_rec_count = 1;
10018 /* Handling ARM extension space insns. */
10021 arm_record_extension_space (insn_decode_record *arm_insn_r)
10023 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10024 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10025 uint32_t record_buf[8], record_buf_mem[8];
10026 uint32_t reg_src1 = 0;
10027 struct regcache *reg_cache = arm_insn_r->regcache;
10028 ULONGEST u_regval = 0;
10030 gdb_assert (!INSN_RECORDED(arm_insn_r));
10031 /* Handle unconditional insn extension space. */
10033 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10034 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10035 if (arm_insn_r->cond)
10037 /* PLD has no affect on architectural state, it just affects
10039 if (5 == ((opcode1 & 0xE0) >> 5))
10042 record_buf[0] = ARM_PS_REGNUM;
10043 record_buf[1] = ARM_LR_REGNUM;
10044 arm_insn_r->reg_rec_count = 2;
10046 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10050 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10051 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10054 /* Undefined instruction on ARM V5; need to handle if later
10055 versions define it. */
10058 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10059 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10060 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10062 /* Handle arithmetic insn extension space. */
10063 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10064 && !INSN_RECORDED(arm_insn_r))
10066 /* Handle MLA(S) and MUL(S). */
10067 if (0 <= insn_op1 && 3 >= insn_op1)
10069 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10070 record_buf[1] = ARM_PS_REGNUM;
10071 arm_insn_r->reg_rec_count = 2;
10073 else if (4 <= insn_op1 && 15 >= insn_op1)
10075 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10076 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10077 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10078 record_buf[2] = ARM_PS_REGNUM;
10079 arm_insn_r->reg_rec_count = 3;
10083 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
10084 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
10085 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
10087 /* Handle control insn extension space. */
10089 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
10090 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10092 if (!bit (arm_insn_r->arm_insn,25))
10094 if (!bits (arm_insn_r->arm_insn, 4, 7))
10096 if ((0 == insn_op1) || (2 == insn_op1))
10099 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10100 arm_insn_r->reg_rec_count = 1;
10102 else if (1 == insn_op1)
10104 /* CSPR is going to be changed. */
10105 record_buf[0] = ARM_PS_REGNUM;
10106 arm_insn_r->reg_rec_count = 1;
10108 else if (3 == insn_op1)
10110 /* SPSR is going to be changed. */
10111 /* We need to get SPSR value, which is yet to be done. */
10115 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10120 record_buf[0] = ARM_PS_REGNUM;
10121 arm_insn_r->reg_rec_count = 1;
10123 else if (3 == insn_op1)
10126 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10127 arm_insn_r->reg_rec_count = 1;
10130 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10133 record_buf[0] = ARM_PS_REGNUM;
10134 record_buf[1] = ARM_LR_REGNUM;
10135 arm_insn_r->reg_rec_count = 2;
10137 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10139 /* QADD, QSUB, QDADD, QDSUB */
10140 record_buf[0] = ARM_PS_REGNUM;
10141 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10142 arm_insn_r->reg_rec_count = 2;
10144 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10147 record_buf[0] = ARM_PS_REGNUM;
10148 record_buf[1] = ARM_LR_REGNUM;
10149 arm_insn_r->reg_rec_count = 2;
10151 /* Save SPSR also;how? */
10154 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10155 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10156 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10157 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10160 if (0 == insn_op1 || 1 == insn_op1)
10162 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10163 /* We dont do optimization for SMULW<y> where we
10165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10166 record_buf[1] = ARM_PS_REGNUM;
10167 arm_insn_r->reg_rec_count = 2;
10169 else if (2 == insn_op1)
10172 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10173 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10174 arm_insn_r->reg_rec_count = 2;
10176 else if (3 == insn_op1)
10179 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10180 arm_insn_r->reg_rec_count = 1;
10186 /* MSR : immediate form. */
10189 /* CSPR is going to be changed. */
10190 record_buf[0] = ARM_PS_REGNUM;
10191 arm_insn_r->reg_rec_count = 1;
10193 else if (3 == insn_op1)
10195 /* SPSR is going to be changed. */
10196 /* we need to get SPSR value, which is yet to be done */
10202 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10203 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10204 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10206 /* Handle load/store insn extension space. */
10208 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10209 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10210 && !INSN_RECORDED(arm_insn_r))
10215 /* These insn, changes register and memory as well. */
10216 /* SWP or SWPB insn. */
10217 /* Get memory address given by Rn. */
10218 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10219 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10220 /* SWP insn ?, swaps word. */
10221 if (8 == arm_insn_r->opcode)
10223 record_buf_mem[0] = 4;
10227 /* SWPB insn, swaps only byte. */
10228 record_buf_mem[0] = 1;
10230 record_buf_mem[1] = u_regval;
10231 arm_insn_r->mem_rec_count = 1;
10232 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10233 arm_insn_r->reg_rec_count = 1;
10235 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10238 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10241 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10244 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10245 record_buf[1] = record_buf[0] + 1;
10246 arm_insn_r->reg_rec_count = 2;
10248 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10251 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10254 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10256 /* LDRH, LDRSB, LDRSH. */
10257 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10258 arm_insn_r->reg_rec_count = 1;
10263 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10264 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10265 && !INSN_RECORDED(arm_insn_r))
10268 /* Handle coprocessor insn extension space. */
10271 /* To be done for ARMv5 and later; as of now we return -1. */
10275 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10276 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10281 /* Handling opcode 000 insns. */
10284 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10286 struct regcache *reg_cache = arm_insn_r->regcache;
10287 uint32_t record_buf[8], record_buf_mem[8];
10288 ULONGEST u_regval[2] = {0};
10290 uint32_t reg_src1 = 0, reg_dest = 0;
10291 uint32_t opcode1 = 0;
10293 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10294 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10295 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10297 /* Data processing insn /multiply insn. */
10298 if (9 == arm_insn_r->decode
10299 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10300 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10302 /* Handle multiply instructions. */
10303 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10304 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10306 /* Handle MLA and MUL. */
10307 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10308 record_buf[1] = ARM_PS_REGNUM;
10309 arm_insn_r->reg_rec_count = 2;
10311 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10313 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10314 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10315 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10316 record_buf[2] = ARM_PS_REGNUM;
10317 arm_insn_r->reg_rec_count = 3;
10320 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10321 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10323 /* Handle misc load insns, as 20th bit (L = 1). */
10324 /* LDR insn has a capability to do branching, if
10325 MOV LR, PC is precceded by LDR insn having Rn as R15
10326 in that case, it emulates branch and link insn, and hence we
10327 need to save CSPR and PC as well. I am not sure this is right
10328 place; as opcode = 010 LDR insn make this happen, if R15 was
10330 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10331 if (15 != reg_dest)
10333 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10334 arm_insn_r->reg_rec_count = 1;
10338 record_buf[0] = reg_dest;
10339 record_buf[1] = ARM_PS_REGNUM;
10340 arm_insn_r->reg_rec_count = 2;
10343 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10344 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10345 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10346 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10348 /* Handle MSR insn. */
10349 if (9 == arm_insn_r->opcode)
10351 /* CSPR is going to be changed. */
10352 record_buf[0] = ARM_PS_REGNUM;
10353 arm_insn_r->reg_rec_count = 1;
10357 /* SPSR is going to be changed. */
10358 /* How to read SPSR value? */
10362 else if (9 == arm_insn_r->decode
10363 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10364 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10366 /* Handling SWP, SWPB. */
10367 /* These insn, changes register and memory as well. */
10368 /* SWP or SWPB insn. */
10370 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10371 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10372 /* SWP insn ?, swaps word. */
10373 if (8 == arm_insn_r->opcode)
10375 record_buf_mem[0] = 4;
10379 /* SWPB insn, swaps only byte. */
10380 record_buf_mem[0] = 1;
10382 record_buf_mem[1] = u_regval[0];
10383 arm_insn_r->mem_rec_count = 1;
10384 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10385 arm_insn_r->reg_rec_count = 1;
10387 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10388 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10390 /* Handle BLX, branch and link/exchange. */
10391 if (9 == arm_insn_r->opcode)
10393 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10394 and R14 stores the return address. */
10395 record_buf[0] = ARM_PS_REGNUM;
10396 record_buf[1] = ARM_LR_REGNUM;
10397 arm_insn_r->reg_rec_count = 2;
10400 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10402 /* Handle enhanced software breakpoint insn, BKPT. */
10403 /* CPSR is changed to be executed in ARM state, disabling normal
10404 interrupts, entering abort mode. */
10405 /* According to high vector configuration PC is set. */
10406 /* user hit breakpoint and type reverse, in
10407 that case, we need to go back with previous CPSR and
10408 Program Counter. */
10409 record_buf[0] = ARM_PS_REGNUM;
10410 record_buf[1] = ARM_LR_REGNUM;
10411 arm_insn_r->reg_rec_count = 2;
10413 /* Save SPSR also; how? */
10416 else if (11 == arm_insn_r->decode
10417 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10419 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10421 /* Handle str(x) insn */
10422 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10425 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10426 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10428 /* Handle BX, branch and link/exchange. */
10429 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10430 record_buf[0] = ARM_PS_REGNUM;
10431 arm_insn_r->reg_rec_count = 1;
10433 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10434 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10435 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10437 /* Count leading zeros: CLZ. */
10438 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10439 arm_insn_r->reg_rec_count = 1;
10441 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10442 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10443 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10444 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10447 /* Handle MRS insn. */
10448 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10449 arm_insn_r->reg_rec_count = 1;
10451 else if (arm_insn_r->opcode <= 15)
10453 /* Normal data processing insns. */
10454 /* Out of 11 shifter operands mode, all the insn modifies destination
10455 register, which is specified by 13-16 decode. */
10456 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10457 record_buf[1] = ARM_PS_REGNUM;
10458 arm_insn_r->reg_rec_count = 2;
10465 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10466 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10470 /* Handling opcode 001 insns. */
10473 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10475 uint32_t record_buf[8], record_buf_mem[8];
10477 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10478 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10480 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10481 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10482 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10485 /* Handle MSR insn. */
10486 if (9 == arm_insn_r->opcode)
10488 /* CSPR is going to be changed. */
10489 record_buf[0] = ARM_PS_REGNUM;
10490 arm_insn_r->reg_rec_count = 1;
10494 /* SPSR is going to be changed. */
10497 else if (arm_insn_r->opcode <= 15)
10499 /* Normal data processing insns. */
10500 /* Out of 11 shifter operands mode, all the insn modifies destination
10501 register, which is specified by 13-16 decode. */
10502 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10503 record_buf[1] = ARM_PS_REGNUM;
10504 arm_insn_r->reg_rec_count = 2;
10511 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10512 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10517 arm_record_media (insn_decode_record *arm_insn_r)
10519 uint32_t record_buf[8];
10521 switch (bits (arm_insn_r->arm_insn, 22, 24))
10524 /* Parallel addition and subtraction, signed */
10526 /* Parallel addition and subtraction, unsigned */
10529 /* Packing, unpacking, saturation and reversal */
10531 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10533 record_buf[arm_insn_r->reg_rec_count++] = rd;
10539 /* Signed multiplies */
10541 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10542 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10544 record_buf[arm_insn_r->reg_rec_count++] = rd;
10546 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10547 else if (op1 == 0x4)
10548 record_buf[arm_insn_r->reg_rec_count++]
10549 = bits (arm_insn_r->arm_insn, 12, 15);
10555 if (bit (arm_insn_r->arm_insn, 21)
10556 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10559 record_buf[arm_insn_r->reg_rec_count++]
10560 = bits (arm_insn_r->arm_insn, 12, 15);
10562 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10563 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10565 /* USAD8 and USADA8 */
10566 record_buf[arm_insn_r->reg_rec_count++]
10567 = bits (arm_insn_r->arm_insn, 16, 19);
10574 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10575 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10577 /* Permanently UNDEFINED */
10582 /* BFC, BFI and UBFX */
10583 record_buf[arm_insn_r->reg_rec_count++]
10584 = bits (arm_insn_r->arm_insn, 12, 15);
10593 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10598 /* Handle ARM mode instructions with opcode 010. */
10601 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10603 struct regcache *reg_cache = arm_insn_r->regcache;
10605 uint32_t reg_base , reg_dest;
10606 uint32_t offset_12, tgt_mem_addr;
10607 uint32_t record_buf[8], record_buf_mem[8];
10608 unsigned char wback;
10611 /* Calculate wback. */
10612 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10613 || (bit (arm_insn_r->arm_insn, 21) == 1);
10615 arm_insn_r->reg_rec_count = 0;
10616 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10618 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10620 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10623 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10624 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10626 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10627 preceeds a LDR instruction having R15 as reg_base, it
10628 emulates a branch and link instruction, and hence we need to save
10629 CPSR and PC as well. */
10630 if (ARM_PC_REGNUM == reg_dest)
10631 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10633 /* If wback is true, also save the base register, which is going to be
10636 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10640 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10642 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10643 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10645 /* Handle bit U. */
10646 if (bit (arm_insn_r->arm_insn, 23))
10648 /* U == 1: Add the offset. */
10649 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10653 /* U == 0: subtract the offset. */
10654 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10657 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10659 if (bit (arm_insn_r->arm_insn, 22))
10661 /* STRB and STRBT: 1 byte. */
10662 record_buf_mem[0] = 1;
10666 /* STR and STRT: 4 bytes. */
10667 record_buf_mem[0] = 4;
10670 /* Handle bit P. */
10671 if (bit (arm_insn_r->arm_insn, 24))
10672 record_buf_mem[1] = tgt_mem_addr;
10674 record_buf_mem[1] = (uint32_t) u_regval;
10676 arm_insn_r->mem_rec_count = 1;
10678 /* If wback is true, also save the base register, which is going to be
10681 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10684 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10685 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10689 /* Handling opcode 011 insns. */
10692 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10694 struct regcache *reg_cache = arm_insn_r->regcache;
10696 uint32_t shift_imm = 0;
10697 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10698 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10699 uint32_t record_buf[8], record_buf_mem[8];
10702 ULONGEST u_regval[2];
10704 if (bit (arm_insn_r->arm_insn, 4))
10705 return arm_record_media (arm_insn_r);
10707 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10708 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10710 /* Handle enhanced store insns and LDRD DSP insn,
10711 order begins according to addressing modes for store insns
10715 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10717 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10718 /* LDR insn has a capability to do branching, if
10719 MOV LR, PC is precedded by LDR insn having Rn as R15
10720 in that case, it emulates branch and link insn, and hence we
10721 need to save CSPR and PC as well. */
10722 if (15 != reg_dest)
10724 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10725 arm_insn_r->reg_rec_count = 1;
10729 record_buf[0] = reg_dest;
10730 record_buf[1] = ARM_PS_REGNUM;
10731 arm_insn_r->reg_rec_count = 2;
10736 if (! bits (arm_insn_r->arm_insn, 4, 11))
10738 /* Store insn, register offset and register pre-indexed,
10739 register post-indexed. */
10741 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10743 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10744 regcache_raw_read_unsigned (reg_cache, reg_src1
10746 regcache_raw_read_unsigned (reg_cache, reg_src2
10748 if (15 == reg_src2)
10750 /* If R15 was used as Rn, hence current PC+8. */
10751 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10752 u_regval[0] = u_regval[0] + 8;
10754 /* Calculate target store address, Rn +/- Rm, register offset. */
10756 if (bit (arm_insn_r->arm_insn, 23))
10758 tgt_mem_addr = u_regval[0] + u_regval[1];
10762 tgt_mem_addr = u_regval[1] - u_regval[0];
10765 switch (arm_insn_r->opcode)
10779 record_buf_mem[0] = 4;
10794 record_buf_mem[0] = 1;
10798 gdb_assert_not_reached ("no decoding pattern found");
10801 record_buf_mem[1] = tgt_mem_addr;
10802 arm_insn_r->mem_rec_count = 1;
10804 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10805 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10806 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10807 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10808 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10809 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10812 /* Rn is going to be changed in pre-indexed mode and
10813 post-indexed mode as well. */
10814 record_buf[0] = reg_src2;
10815 arm_insn_r->reg_rec_count = 1;
10820 /* Store insn, scaled register offset; scaled pre-indexed. */
10821 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10823 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10825 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10826 /* Get shift_imm. */
10827 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10828 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10829 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10830 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10831 /* Offset_12 used as shift. */
10835 /* Offset_12 used as index. */
10836 offset_12 = u_regval[0] << shift_imm;
10840 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10846 if (bit (u_regval[0], 31))
10848 offset_12 = 0xFFFFFFFF;
10857 /* This is arithmetic shift. */
10858 offset_12 = s_word >> shift_imm;
10865 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10867 /* Get C flag value and shift it by 31. */
10868 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10869 | (u_regval[0]) >> 1);
10873 offset_12 = (u_regval[0] >> shift_imm) \
10875 (sizeof(uint32_t) - shift_imm));
10880 gdb_assert_not_reached ("no decoding pattern found");
10884 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10886 if (bit (arm_insn_r->arm_insn, 23))
10888 tgt_mem_addr = u_regval[1] + offset_12;
10892 tgt_mem_addr = u_regval[1] - offset_12;
10895 switch (arm_insn_r->opcode)
10909 record_buf_mem[0] = 4;
10924 record_buf_mem[0] = 1;
10928 gdb_assert_not_reached ("no decoding pattern found");
10931 record_buf_mem[1] = tgt_mem_addr;
10932 arm_insn_r->mem_rec_count = 1;
10934 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10935 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10936 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10937 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10938 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10939 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10942 /* Rn is going to be changed in register scaled pre-indexed
10943 mode,and scaled post indexed mode. */
10944 record_buf[0] = reg_src2;
10945 arm_insn_r->reg_rec_count = 1;
10950 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10951 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10955 /* Handle ARM mode instructions with opcode 100. */
10958 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10960 struct regcache *reg_cache = arm_insn_r->regcache;
10961 uint32_t register_count = 0, register_bits;
10962 uint32_t reg_base, addr_mode;
10963 uint32_t record_buf[24], record_buf_mem[48];
10967 /* Fetch the list of registers. */
10968 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10969 arm_insn_r->reg_rec_count = 0;
10971 /* Fetch the base register that contains the address we are loading data
10973 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10975 /* Calculate wback. */
10976 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10978 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10980 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10982 /* Find out which registers are going to be loaded from memory. */
10983 while (register_bits)
10985 if (register_bits & 0x00000001)
10986 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10987 register_bits = register_bits >> 1;
10992 /* If wback is true, also save the base register, which is going to be
10995 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10997 /* Save the CPSR register. */
10998 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11002 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11004 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11006 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11008 /* Find out how many registers are going to be stored to memory. */
11009 while (register_bits)
11011 if (register_bits & 0x00000001)
11013 register_bits = register_bits >> 1;
11018 /* STMDA (STMED): Decrement after. */
11020 record_buf_mem[1] = (uint32_t) u_regval
11021 - register_count * INT_REGISTER_SIZE + 4;
11023 /* STM (STMIA, STMEA): Increment after. */
11025 record_buf_mem[1] = (uint32_t) u_regval;
11027 /* STMDB (STMFD): Decrement before. */
11029 record_buf_mem[1] = (uint32_t) u_regval
11030 - register_count * INT_REGISTER_SIZE;
11032 /* STMIB (STMFA): Increment before. */
11034 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11037 gdb_assert_not_reached ("no decoding pattern found");
11041 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11042 arm_insn_r->mem_rec_count = 1;
11044 /* If wback is true, also save the base register, which is going to be
11047 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11050 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11051 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11055 /* Handling opcode 101 insns. */
11058 arm_record_b_bl (insn_decode_record *arm_insn_r)
11060 uint32_t record_buf[8];
11062 /* Handle B, BL, BLX(1) insns. */
11063 /* B simply branches so we do nothing here. */
11064 /* Note: BLX(1) doesnt fall here but instead it falls into
11065 extension space. */
11066 if (bit (arm_insn_r->arm_insn, 24))
11068 record_buf[0] = ARM_LR_REGNUM;
11069 arm_insn_r->reg_rec_count = 1;
11072 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11078 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11080 printf_unfiltered (_("Process record does not support instruction "
11081 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11082 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11087 /* Record handler for vector data transfer instructions. */
11090 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11092 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11093 uint32_t record_buf[4];
11095 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11096 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11097 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11098 bit_l = bit (arm_insn_r->arm_insn, 20);
11099 bit_c = bit (arm_insn_r->arm_insn, 8);
11101 /* Handle VMOV instruction. */
11102 if (bit_l && bit_c)
11104 record_buf[0] = reg_t;
11105 arm_insn_r->reg_rec_count = 1;
11107 else if (bit_l && !bit_c)
11109 /* Handle VMOV instruction. */
11110 if (bits_a == 0x00)
11112 record_buf[0] = reg_t;
11113 arm_insn_r->reg_rec_count = 1;
11115 /* Handle VMRS instruction. */
11116 else if (bits_a == 0x07)
11119 reg_t = ARM_PS_REGNUM;
11121 record_buf[0] = reg_t;
11122 arm_insn_r->reg_rec_count = 1;
11125 else if (!bit_l && !bit_c)
11127 /* Handle VMOV instruction. */
11128 if (bits_a == 0x00)
11130 record_buf[0] = ARM_D0_REGNUM + reg_v;
11132 arm_insn_r->reg_rec_count = 1;
11134 /* Handle VMSR instruction. */
11135 else if (bits_a == 0x07)
11137 record_buf[0] = ARM_FPSCR_REGNUM;
11138 arm_insn_r->reg_rec_count = 1;
11141 else if (!bit_l && bit_c)
11143 /* Handle VMOV instruction. */
11144 if (!(bits_a & 0x04))
11146 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11148 arm_insn_r->reg_rec_count = 1;
11150 /* Handle VDUP instruction. */
11153 if (bit (arm_insn_r->arm_insn, 21))
11155 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11156 record_buf[0] = reg_v + ARM_D0_REGNUM;
11157 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11158 arm_insn_r->reg_rec_count = 2;
11162 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11163 record_buf[0] = reg_v + ARM_D0_REGNUM;
11164 arm_insn_r->reg_rec_count = 1;
11169 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11173 /* Record handler for extension register load/store instructions. */
11176 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11178 uint32_t opcode, single_reg;
11179 uint8_t op_vldm_vstm;
11180 uint32_t record_buf[8], record_buf_mem[128];
11181 ULONGEST u_regval = 0;
11183 struct regcache *reg_cache = arm_insn_r->regcache;
11185 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11186 single_reg = !bit (arm_insn_r->arm_insn, 8);
11187 op_vldm_vstm = opcode & 0x1b;
11189 /* Handle VMOV instructions. */
11190 if ((opcode & 0x1e) == 0x04)
11192 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11194 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11195 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11196 arm_insn_r->reg_rec_count = 2;
11200 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11201 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11205 /* The first S register number m is REG_M:M (M is bit 5),
11206 the corresponding D register number is REG_M:M / 2, which
11208 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11209 /* The second S register number is REG_M:M + 1, the
11210 corresponding D register number is (REG_M:M + 1) / 2.
11211 IOW, if bit M is 1, the first and second S registers
11212 are mapped to different D registers, otherwise, they are
11213 in the same D register. */
11216 record_buf[arm_insn_r->reg_rec_count++]
11217 = ARM_D0_REGNUM + reg_m + 1;
11222 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11223 arm_insn_r->reg_rec_count = 1;
11227 /* Handle VSTM and VPUSH instructions. */
11228 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11229 || op_vldm_vstm == 0x12)
11231 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11232 uint32_t memory_index = 0;
11234 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11235 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11236 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11237 imm_off32 = imm_off8 << 2;
11238 memory_count = imm_off8;
11240 if (bit (arm_insn_r->arm_insn, 23))
11241 start_address = u_regval;
11243 start_address = u_regval - imm_off32;
11245 if (bit (arm_insn_r->arm_insn, 21))
11247 record_buf[0] = reg_rn;
11248 arm_insn_r->reg_rec_count = 1;
11251 while (memory_count > 0)
11255 record_buf_mem[memory_index] = 4;
11256 record_buf_mem[memory_index + 1] = start_address;
11257 start_address = start_address + 4;
11258 memory_index = memory_index + 2;
11262 record_buf_mem[memory_index] = 4;
11263 record_buf_mem[memory_index + 1] = start_address;
11264 record_buf_mem[memory_index + 2] = 4;
11265 record_buf_mem[memory_index + 3] = start_address + 4;
11266 start_address = start_address + 8;
11267 memory_index = memory_index + 4;
11271 arm_insn_r->mem_rec_count = (memory_index >> 1);
11273 /* Handle VLDM instructions. */
11274 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11275 || op_vldm_vstm == 0x13)
11277 uint32_t reg_count, reg_vd;
11278 uint32_t reg_index = 0;
11279 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11281 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11282 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11284 /* REG_VD is the first D register number. If the instruction
11285 loads memory to S registers (SINGLE_REG is TRUE), the register
11286 number is (REG_VD << 1 | bit D), so the corresponding D
11287 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11289 reg_vd = reg_vd | (bit_d << 4);
11291 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11292 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11294 /* If the instruction loads memory to D register, REG_COUNT should
11295 be divided by 2, according to the ARM Architecture Reference
11296 Manual. If the instruction loads memory to S register, divide by
11297 2 as well because two S registers are mapped to D register. */
11298 reg_count = reg_count / 2;
11299 if (single_reg && bit_d)
11301 /* Increase the register count if S register list starts from
11302 an odd number (bit d is one). */
11306 while (reg_count > 0)
11308 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11311 arm_insn_r->reg_rec_count = reg_index;
11313 /* VSTR Vector store register. */
11314 else if ((opcode & 0x13) == 0x10)
11316 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11317 uint32_t memory_index = 0;
11319 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11320 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11321 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11322 imm_off32 = imm_off8 << 2;
11324 if (bit (arm_insn_r->arm_insn, 23))
11325 start_address = u_regval + imm_off32;
11327 start_address = u_regval - imm_off32;
11331 record_buf_mem[memory_index] = 4;
11332 record_buf_mem[memory_index + 1] = start_address;
11333 arm_insn_r->mem_rec_count = 1;
11337 record_buf_mem[memory_index] = 4;
11338 record_buf_mem[memory_index + 1] = start_address;
11339 record_buf_mem[memory_index + 2] = 4;
11340 record_buf_mem[memory_index + 3] = start_address + 4;
11341 arm_insn_r->mem_rec_count = 2;
11344 /* VLDR Vector load register. */
11345 else if ((opcode & 0x13) == 0x11)
11347 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11351 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11352 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11356 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11357 /* Record register D rather than pseudo register S. */
11358 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11360 arm_insn_r->reg_rec_count = 1;
11363 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11364 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11368 /* Record handler for arm/thumb mode VFP data processing instructions. */
11371 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11373 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11374 uint32_t record_buf[4];
11375 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11376 enum insn_types curr_insn_type = INSN_INV;
11378 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11379 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11380 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11381 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11382 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11383 bit_d = bit (arm_insn_r->arm_insn, 22);
11384 opc1 = opc1 & 0x04;
11386 /* Handle VMLA, VMLS. */
11389 if (bit (arm_insn_r->arm_insn, 10))
11391 if (bit (arm_insn_r->arm_insn, 6))
11392 curr_insn_type = INSN_T0;
11394 curr_insn_type = INSN_T1;
11399 curr_insn_type = INSN_T1;
11401 curr_insn_type = INSN_T2;
11404 /* Handle VNMLA, VNMLS, VNMUL. */
11405 else if (opc1 == 0x01)
11408 curr_insn_type = INSN_T1;
11410 curr_insn_type = INSN_T2;
11413 else if (opc1 == 0x02 && !(opc3 & 0x01))
11415 if (bit (arm_insn_r->arm_insn, 10))
11417 if (bit (arm_insn_r->arm_insn, 6))
11418 curr_insn_type = INSN_T0;
11420 curr_insn_type = INSN_T1;
11425 curr_insn_type = INSN_T1;
11427 curr_insn_type = INSN_T2;
11430 /* Handle VADD, VSUB. */
11431 else if (opc1 == 0x03)
11433 if (!bit (arm_insn_r->arm_insn, 9))
11435 if (bit (arm_insn_r->arm_insn, 6))
11436 curr_insn_type = INSN_T0;
11438 curr_insn_type = INSN_T1;
11443 curr_insn_type = INSN_T1;
11445 curr_insn_type = INSN_T2;
11449 else if (opc1 == 0x0b)
11452 curr_insn_type = INSN_T1;
11454 curr_insn_type = INSN_T2;
11456 /* Handle all other vfp data processing instructions. */
11457 else if (opc1 == 0x0b)
11460 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11462 if (bit (arm_insn_r->arm_insn, 4))
11464 if (bit (arm_insn_r->arm_insn, 6))
11465 curr_insn_type = INSN_T0;
11467 curr_insn_type = INSN_T1;
11472 curr_insn_type = INSN_T1;
11474 curr_insn_type = INSN_T2;
11477 /* Handle VNEG and VABS. */
11478 else if ((opc2 == 0x01 && opc3 == 0x01)
11479 || (opc2 == 0x00 && opc3 == 0x03))
11481 if (!bit (arm_insn_r->arm_insn, 11))
11483 if (bit (arm_insn_r->arm_insn, 6))
11484 curr_insn_type = INSN_T0;
11486 curr_insn_type = INSN_T1;
11491 curr_insn_type = INSN_T1;
11493 curr_insn_type = INSN_T2;
11496 /* Handle VSQRT. */
11497 else if (opc2 == 0x01 && opc3 == 0x03)
11500 curr_insn_type = INSN_T1;
11502 curr_insn_type = INSN_T2;
11505 else if (opc2 == 0x07 && opc3 == 0x03)
11508 curr_insn_type = INSN_T1;
11510 curr_insn_type = INSN_T2;
11512 else if (opc3 & 0x01)
11515 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11517 if (!bit (arm_insn_r->arm_insn, 18))
11518 curr_insn_type = INSN_T2;
11522 curr_insn_type = INSN_T1;
11524 curr_insn_type = INSN_T2;
11528 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11531 curr_insn_type = INSN_T1;
11533 curr_insn_type = INSN_T2;
11535 /* Handle VCVTB, VCVTT. */
11536 else if ((opc2 & 0x0e) == 0x02)
11537 curr_insn_type = INSN_T2;
11538 /* Handle VCMP, VCMPE. */
11539 else if ((opc2 & 0x0e) == 0x04)
11540 curr_insn_type = INSN_T3;
11544 switch (curr_insn_type)
11547 reg_vd = reg_vd | (bit_d << 4);
11548 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11549 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11550 arm_insn_r->reg_rec_count = 2;
11554 reg_vd = reg_vd | (bit_d << 4);
11555 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11556 arm_insn_r->reg_rec_count = 1;
11560 reg_vd = (reg_vd << 1) | bit_d;
11561 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11562 arm_insn_r->reg_rec_count = 1;
11566 record_buf[0] = ARM_FPSCR_REGNUM;
11567 arm_insn_r->reg_rec_count = 1;
11571 gdb_assert_not_reached ("no decoding pattern found");
11575 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11579 /* Handling opcode 110 insns. */
11582 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11584 uint32_t op1, op1_ebit, coproc;
11586 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11587 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11588 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11590 if ((coproc & 0x0e) == 0x0a)
11592 /* Handle extension register ld/st instructions. */
11594 return arm_record_exreg_ld_st_insn (arm_insn_r);
11596 /* 64-bit transfers between arm core and extension registers. */
11597 if ((op1 & 0x3e) == 0x04)
11598 return arm_record_exreg_ld_st_insn (arm_insn_r);
11602 /* Handle coprocessor ld/st instructions. */
11607 return arm_record_unsupported_insn (arm_insn_r);
11610 return arm_record_unsupported_insn (arm_insn_r);
11613 /* Move to coprocessor from two arm core registers. */
11615 return arm_record_unsupported_insn (arm_insn_r);
11617 /* Move to two arm core registers from coprocessor. */
11622 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11623 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11624 arm_insn_r->reg_rec_count = 2;
11626 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11630 return arm_record_unsupported_insn (arm_insn_r);
11633 /* Handling opcode 111 insns. */
11636 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11638 uint32_t op, op1_sbit, op1_ebit, coproc;
11639 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11640 struct regcache *reg_cache = arm_insn_r->regcache;
11642 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11643 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11644 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11645 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11646 op = bit (arm_insn_r->arm_insn, 4);
11648 /* Handle arm SWI/SVC system call instructions. */
11651 if (tdep->arm_syscall_record != NULL)
11653 ULONGEST svc_operand, svc_number;
11655 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11657 if (svc_operand) /* OABI. */
11658 svc_number = svc_operand - 0x900000;
11660 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11662 return tdep->arm_syscall_record (reg_cache, svc_number);
11666 printf_unfiltered (_("no syscall record support\n"));
11671 if ((coproc & 0x0e) == 0x0a)
11673 /* VFP data-processing instructions. */
11674 if (!op1_sbit && !op)
11675 return arm_record_vfp_data_proc_insn (arm_insn_r);
11677 /* Advanced SIMD, VFP instructions. */
11678 if (!op1_sbit && op)
11679 return arm_record_vdata_transfer_insn (arm_insn_r);
11683 /* Coprocessor data operations. */
11684 if (!op1_sbit && !op)
11685 return arm_record_unsupported_insn (arm_insn_r);
11687 /* Move to Coprocessor from ARM core register. */
11688 if (!op1_sbit && !op1_ebit && op)
11689 return arm_record_unsupported_insn (arm_insn_r);
11691 /* Move to arm core register from coprocessor. */
11692 if (!op1_sbit && op1_ebit && op)
11694 uint32_t record_buf[1];
11696 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11697 if (record_buf[0] == 15)
11698 record_buf[0] = ARM_PS_REGNUM;
11700 arm_insn_r->reg_rec_count = 1;
11701 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11707 return arm_record_unsupported_insn (arm_insn_r);
11710 /* Handling opcode 000 insns. */
11713 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11715 uint32_t record_buf[8];
11716 uint32_t reg_src1 = 0;
11718 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11720 record_buf[0] = ARM_PS_REGNUM;
11721 record_buf[1] = reg_src1;
11722 thumb_insn_r->reg_rec_count = 2;
11724 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11730 /* Handling opcode 001 insns. */
11733 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11735 uint32_t record_buf[8];
11736 uint32_t reg_src1 = 0;
11738 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11740 record_buf[0] = ARM_PS_REGNUM;
11741 record_buf[1] = reg_src1;
11742 thumb_insn_r->reg_rec_count = 2;
11744 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11749 /* Handling opcode 010 insns. */
11752 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11754 struct regcache *reg_cache = thumb_insn_r->regcache;
11755 uint32_t record_buf[8], record_buf_mem[8];
11757 uint32_t reg_src1 = 0, reg_src2 = 0;
11758 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11760 ULONGEST u_regval[2] = {0};
11762 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11764 if (bit (thumb_insn_r->arm_insn, 12))
11766 /* Handle load/store register offset. */
11767 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
11769 if (opB >= 4 && opB <= 7)
11771 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11772 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11773 record_buf[0] = reg_src1;
11774 thumb_insn_r->reg_rec_count = 1;
11776 else if (opB >= 0 && opB <= 2)
11778 /* STR(2), STRB(2), STRH(2) . */
11779 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11780 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11781 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11782 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11784 record_buf_mem[0] = 4; /* STR (2). */
11786 record_buf_mem[0] = 1; /* STRB (2). */
11788 record_buf_mem[0] = 2; /* STRH (2). */
11789 record_buf_mem[1] = u_regval[0] + u_regval[1];
11790 thumb_insn_r->mem_rec_count = 1;
11793 else if (bit (thumb_insn_r->arm_insn, 11))
11795 /* Handle load from literal pool. */
11797 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11798 record_buf[0] = reg_src1;
11799 thumb_insn_r->reg_rec_count = 1;
11803 /* Special data instructions and branch and exchange */
11804 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11805 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11806 if ((3 == opcode2) && (!opcode3))
11808 /* Branch with exchange. */
11809 record_buf[0] = ARM_PS_REGNUM;
11810 thumb_insn_r->reg_rec_count = 1;
11814 /* Format 8; special data processing insns. */
11815 record_buf[0] = ARM_PS_REGNUM;
11816 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11817 | bits (thumb_insn_r->arm_insn, 0, 2));
11818 thumb_insn_r->reg_rec_count = 2;
11823 /* Format 5; data processing insns. */
11824 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11825 if (bit (thumb_insn_r->arm_insn, 7))
11827 reg_src1 = reg_src1 + 8;
11829 record_buf[0] = ARM_PS_REGNUM;
11830 record_buf[1] = reg_src1;
11831 thumb_insn_r->reg_rec_count = 2;
11834 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11835 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11841 /* Handling opcode 001 insns. */
11844 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11846 struct regcache *reg_cache = thumb_insn_r->regcache;
11847 uint32_t record_buf[8], record_buf_mem[8];
11849 uint32_t reg_src1 = 0;
11850 uint32_t opcode = 0, immed_5 = 0;
11852 ULONGEST u_regval = 0;
11854 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11859 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11860 record_buf[0] = reg_src1;
11861 thumb_insn_r->reg_rec_count = 1;
11866 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11867 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11868 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11869 record_buf_mem[0] = 4;
11870 record_buf_mem[1] = u_regval + (immed_5 * 4);
11871 thumb_insn_r->mem_rec_count = 1;
11874 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11875 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11881 /* Handling opcode 100 insns. */
11884 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11886 struct regcache *reg_cache = thumb_insn_r->regcache;
11887 uint32_t record_buf[8], record_buf_mem[8];
11889 uint32_t reg_src1 = 0;
11890 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11892 ULONGEST u_regval = 0;
11894 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11899 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11900 record_buf[0] = reg_src1;
11901 thumb_insn_r->reg_rec_count = 1;
11903 else if (1 == opcode)
11906 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11907 record_buf[0] = reg_src1;
11908 thumb_insn_r->reg_rec_count = 1;
11910 else if (2 == opcode)
11913 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11914 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11915 record_buf_mem[0] = 4;
11916 record_buf_mem[1] = u_regval + (immed_8 * 4);
11917 thumb_insn_r->mem_rec_count = 1;
11919 else if (0 == opcode)
11922 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11923 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11924 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11925 record_buf_mem[0] = 2;
11926 record_buf_mem[1] = u_regval + (immed_5 * 2);
11927 thumb_insn_r->mem_rec_count = 1;
11930 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11931 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11937 /* Handling opcode 101 insns. */
11940 thumb_record_misc (insn_decode_record *thumb_insn_r)
11942 struct regcache *reg_cache = thumb_insn_r->regcache;
11944 uint32_t opcode = 0;
11945 uint32_t register_bits = 0, register_count = 0;
11946 uint32_t index = 0, start_address = 0;
11947 uint32_t record_buf[24], record_buf_mem[48];
11950 ULONGEST u_regval = 0;
11952 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11954 if (opcode == 0 || opcode == 1)
11956 /* ADR and ADD (SP plus immediate) */
11958 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11959 record_buf[0] = reg_src1;
11960 thumb_insn_r->reg_rec_count = 1;
11964 /* Miscellaneous 16-bit instructions */
11965 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
11970 /* SETEND and CPS */
11973 /* ADD/SUB (SP plus immediate) */
11974 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11975 record_buf[0] = ARM_SP_REGNUM;
11976 thumb_insn_r->reg_rec_count = 1;
11978 case 1: /* fall through */
11979 case 3: /* fall through */
11980 case 9: /* fall through */
11985 /* SXTH, SXTB, UXTH, UXTB */
11986 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
11987 thumb_insn_r->reg_rec_count = 1;
11989 case 4: /* fall through */
11992 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11993 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11994 while (register_bits)
11996 if (register_bits & 0x00000001)
11998 register_bits = register_bits >> 1;
12000 start_address = u_regval - \
12001 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12002 thumb_insn_r->mem_rec_count = register_count;
12003 while (register_count)
12005 record_buf_mem[(register_count * 2) - 1] = start_address;
12006 record_buf_mem[(register_count * 2) - 2] = 4;
12007 start_address = start_address + 4;
12010 record_buf[0] = ARM_SP_REGNUM;
12011 thumb_insn_r->reg_rec_count = 1;
12014 /* REV, REV16, REVSH */
12015 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
12016 thumb_insn_r->reg_rec_count = 1;
12018 case 12: /* fall through */
12021 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12022 while (register_bits)
12024 if (register_bits & 0x00000001)
12025 record_buf[index++] = register_count;
12026 register_bits = register_bits >> 1;
12029 record_buf[index++] = ARM_PS_REGNUM;
12030 record_buf[index++] = ARM_SP_REGNUM;
12031 thumb_insn_r->reg_rec_count = index;
12035 /* Handle enhanced software breakpoint insn, BKPT. */
12036 /* CPSR is changed to be executed in ARM state, disabling normal
12037 interrupts, entering abort mode. */
12038 /* According to high vector configuration PC is set. */
12039 /* User hits breakpoint and type reverse, in that case, we need to go back with
12040 previous CPSR and Program Counter. */
12041 record_buf[0] = ARM_PS_REGNUM;
12042 record_buf[1] = ARM_LR_REGNUM;
12043 thumb_insn_r->reg_rec_count = 2;
12044 /* We need to save SPSR value, which is not yet done. */
12045 printf_unfiltered (_("Process record does not support instruction "
12046 "0x%0x at address %s.\n"),
12047 thumb_insn_r->arm_insn,
12048 paddress (thumb_insn_r->gdbarch,
12049 thumb_insn_r->this_addr));
12053 /* If-Then, and hints */
12060 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12061 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12067 /* Handling opcode 110 insns. */
12070 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12072 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12073 struct regcache *reg_cache = thumb_insn_r->regcache;
12075 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12076 uint32_t reg_src1 = 0;
12077 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12078 uint32_t index = 0, start_address = 0;
12079 uint32_t record_buf[24], record_buf_mem[48];
12081 ULONGEST u_regval = 0;
12083 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12084 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12090 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12092 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12093 while (register_bits)
12095 if (register_bits & 0x00000001)
12096 record_buf[index++] = register_count;
12097 register_bits = register_bits >> 1;
12100 record_buf[index++] = reg_src1;
12101 thumb_insn_r->reg_rec_count = index;
12103 else if (0 == opcode2)
12105 /* It handles both STMIA. */
12106 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12108 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12109 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12110 while (register_bits)
12112 if (register_bits & 0x00000001)
12114 register_bits = register_bits >> 1;
12116 start_address = u_regval;
12117 thumb_insn_r->mem_rec_count = register_count;
12118 while (register_count)
12120 record_buf_mem[(register_count * 2) - 1] = start_address;
12121 record_buf_mem[(register_count * 2) - 2] = 4;
12122 start_address = start_address + 4;
12126 else if (0x1F == opcode1)
12128 /* Handle arm syscall insn. */
12129 if (tdep->arm_syscall_record != NULL)
12131 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12132 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12136 printf_unfiltered (_("no syscall record support\n"));
12141 /* B (1), conditional branch is automatically taken care in process_record,
12142 as PC is saved there. */
12144 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12145 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12151 /* Handling opcode 111 insns. */
12154 thumb_record_branch (insn_decode_record *thumb_insn_r)
12156 uint32_t record_buf[8];
12157 uint32_t bits_h = 0;
12159 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12161 if (2 == bits_h || 3 == bits_h)
12164 record_buf[0] = ARM_LR_REGNUM;
12165 thumb_insn_r->reg_rec_count = 1;
12167 else if (1 == bits_h)
12170 record_buf[0] = ARM_PS_REGNUM;
12171 record_buf[1] = ARM_LR_REGNUM;
12172 thumb_insn_r->reg_rec_count = 2;
12175 /* B(2) is automatically taken care in process_record, as PC is
12178 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12183 /* Handler for thumb2 load/store multiple instructions. */
12186 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12188 struct regcache *reg_cache = thumb2_insn_r->regcache;
12190 uint32_t reg_rn, op;
12191 uint32_t register_bits = 0, register_count = 0;
12192 uint32_t index = 0, start_address = 0;
12193 uint32_t record_buf[24], record_buf_mem[48];
12195 ULONGEST u_regval = 0;
12197 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12198 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12200 if (0 == op || 3 == op)
12202 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12204 /* Handle RFE instruction. */
12205 record_buf[0] = ARM_PS_REGNUM;
12206 thumb2_insn_r->reg_rec_count = 1;
12210 /* Handle SRS instruction after reading banked SP. */
12211 return arm_record_unsupported_insn (thumb2_insn_r);
12214 else if (1 == op || 2 == op)
12216 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12218 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12219 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12220 while (register_bits)
12222 if (register_bits & 0x00000001)
12223 record_buf[index++] = register_count;
12226 register_bits = register_bits >> 1;
12228 record_buf[index++] = reg_rn;
12229 record_buf[index++] = ARM_PS_REGNUM;
12230 thumb2_insn_r->reg_rec_count = index;
12234 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12235 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12236 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12237 while (register_bits)
12239 if (register_bits & 0x00000001)
12242 register_bits = register_bits >> 1;
12247 /* Start address calculation for LDMDB/LDMEA. */
12248 start_address = u_regval;
12252 /* Start address calculation for LDMDB/LDMEA. */
12253 start_address = u_regval - register_count * 4;
12256 thumb2_insn_r->mem_rec_count = register_count;
12257 while (register_count)
12259 record_buf_mem[register_count * 2 - 1] = start_address;
12260 record_buf_mem[register_count * 2 - 2] = 4;
12261 start_address = start_address + 4;
12264 record_buf[0] = reg_rn;
12265 record_buf[1] = ARM_PS_REGNUM;
12266 thumb2_insn_r->reg_rec_count = 2;
12270 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12272 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12274 return ARM_RECORD_SUCCESS;
12277 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12281 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12283 struct regcache *reg_cache = thumb2_insn_r->regcache;
12285 uint32_t reg_rd, reg_rn, offset_imm;
12286 uint32_t reg_dest1, reg_dest2;
12287 uint32_t address, offset_addr;
12288 uint32_t record_buf[8], record_buf_mem[8];
12289 uint32_t op1, op2, op3;
12291 ULONGEST u_regval[2];
12293 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12294 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12295 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12297 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12299 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12301 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12302 record_buf[0] = reg_dest1;
12303 record_buf[1] = ARM_PS_REGNUM;
12304 thumb2_insn_r->reg_rec_count = 2;
12307 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12309 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12310 record_buf[2] = reg_dest2;
12311 thumb2_insn_r->reg_rec_count = 3;
12316 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12317 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12319 if (0 == op1 && 0 == op2)
12321 /* Handle STREX. */
12322 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12323 address = u_regval[0] + (offset_imm * 4);
12324 record_buf_mem[0] = 4;
12325 record_buf_mem[1] = address;
12326 thumb2_insn_r->mem_rec_count = 1;
12327 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12328 record_buf[0] = reg_rd;
12329 thumb2_insn_r->reg_rec_count = 1;
12331 else if (1 == op1 && 0 == op2)
12333 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12334 record_buf[0] = reg_rd;
12335 thumb2_insn_r->reg_rec_count = 1;
12336 address = u_regval[0];
12337 record_buf_mem[1] = address;
12341 /* Handle STREXB. */
12342 record_buf_mem[0] = 1;
12343 thumb2_insn_r->mem_rec_count = 1;
12347 /* Handle STREXH. */
12348 record_buf_mem[0] = 2 ;
12349 thumb2_insn_r->mem_rec_count = 1;
12353 /* Handle STREXD. */
12354 address = u_regval[0];
12355 record_buf_mem[0] = 4;
12356 record_buf_mem[2] = 4;
12357 record_buf_mem[3] = address + 4;
12358 thumb2_insn_r->mem_rec_count = 2;
12363 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12365 if (bit (thumb2_insn_r->arm_insn, 24))
12367 if (bit (thumb2_insn_r->arm_insn, 23))
12368 offset_addr = u_regval[0] + (offset_imm * 4);
12370 offset_addr = u_regval[0] - (offset_imm * 4);
12372 address = offset_addr;
12375 address = u_regval[0];
12377 record_buf_mem[0] = 4;
12378 record_buf_mem[1] = address;
12379 record_buf_mem[2] = 4;
12380 record_buf_mem[3] = address + 4;
12381 thumb2_insn_r->mem_rec_count = 2;
12382 record_buf[0] = reg_rn;
12383 thumb2_insn_r->reg_rec_count = 1;
12387 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12389 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12391 return ARM_RECORD_SUCCESS;
12394 /* Handler for thumb2 data processing (shift register and modified immediate)
12398 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12400 uint32_t reg_rd, op;
12401 uint32_t record_buf[8];
12403 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12404 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12406 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12408 record_buf[0] = ARM_PS_REGNUM;
12409 thumb2_insn_r->reg_rec_count = 1;
12413 record_buf[0] = reg_rd;
12414 record_buf[1] = ARM_PS_REGNUM;
12415 thumb2_insn_r->reg_rec_count = 2;
12418 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12420 return ARM_RECORD_SUCCESS;
12423 /* Generic handler for thumb2 instructions which effect destination and PS
12427 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12430 uint32_t record_buf[8];
12432 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12434 record_buf[0] = reg_rd;
12435 record_buf[1] = ARM_PS_REGNUM;
12436 thumb2_insn_r->reg_rec_count = 2;
12438 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12440 return ARM_RECORD_SUCCESS;
12443 /* Handler for thumb2 branch and miscellaneous control instructions. */
12446 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12448 uint32_t op, op1, op2;
12449 uint32_t record_buf[8];
12451 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12452 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12453 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12455 /* Handle MSR insn. */
12456 if (!(op1 & 0x2) && 0x38 == op)
12460 /* CPSR is going to be changed. */
12461 record_buf[0] = ARM_PS_REGNUM;
12462 thumb2_insn_r->reg_rec_count = 1;
12466 arm_record_unsupported_insn(thumb2_insn_r);
12470 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12473 record_buf[0] = ARM_PS_REGNUM;
12474 record_buf[1] = ARM_LR_REGNUM;
12475 thumb2_insn_r->reg_rec_count = 2;
12478 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12480 return ARM_RECORD_SUCCESS;
12483 /* Handler for thumb2 store single data item instructions. */
12486 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12488 struct regcache *reg_cache = thumb2_insn_r->regcache;
12490 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12491 uint32_t address, offset_addr;
12492 uint32_t record_buf[8], record_buf_mem[8];
12495 ULONGEST u_regval[2];
12497 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12498 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12499 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12500 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12502 if (bit (thumb2_insn_r->arm_insn, 23))
12505 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12506 offset_addr = u_regval[0] + offset_imm;
12507 address = offset_addr;
12512 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12514 /* Handle STRB (register). */
12515 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12516 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12517 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12518 offset_addr = u_regval[1] << shift_imm;
12519 address = u_regval[0] + offset_addr;
12523 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12524 if (bit (thumb2_insn_r->arm_insn, 10))
12526 if (bit (thumb2_insn_r->arm_insn, 9))
12527 offset_addr = u_regval[0] + offset_imm;
12529 offset_addr = u_regval[0] - offset_imm;
12531 address = offset_addr;
12534 address = u_regval[0];
12540 /* Store byte instructions. */
12543 record_buf_mem[0] = 1;
12545 /* Store half word instructions. */
12548 record_buf_mem[0] = 2;
12550 /* Store word instructions. */
12553 record_buf_mem[0] = 4;
12557 gdb_assert_not_reached ("no decoding pattern found");
12561 record_buf_mem[1] = address;
12562 thumb2_insn_r->mem_rec_count = 1;
12563 record_buf[0] = reg_rn;
12564 thumb2_insn_r->reg_rec_count = 1;
12566 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12568 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12570 return ARM_RECORD_SUCCESS;
12573 /* Handler for thumb2 load memory hints instructions. */
12576 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12578 uint32_t record_buf[8];
12579 uint32_t reg_rt, reg_rn;
12581 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12582 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12584 if (ARM_PC_REGNUM != reg_rt)
12586 record_buf[0] = reg_rt;
12587 record_buf[1] = reg_rn;
12588 record_buf[2] = ARM_PS_REGNUM;
12589 thumb2_insn_r->reg_rec_count = 3;
12591 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12593 return ARM_RECORD_SUCCESS;
12596 return ARM_RECORD_FAILURE;
12599 /* Handler for thumb2 load word instructions. */
12602 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12604 uint32_t record_buf[8];
12606 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12607 record_buf[1] = ARM_PS_REGNUM;
12608 thumb2_insn_r->reg_rec_count = 2;
12610 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12612 return ARM_RECORD_SUCCESS;
12615 /* Handler for thumb2 long multiply, long multiply accumulate, and
12616 divide instructions. */
12619 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12621 uint32_t opcode1 = 0, opcode2 = 0;
12622 uint32_t record_buf[8];
12624 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12625 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12627 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12629 /* Handle SMULL, UMULL, SMULAL. */
12630 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12631 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12632 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12633 record_buf[2] = ARM_PS_REGNUM;
12634 thumb2_insn_r->reg_rec_count = 3;
12636 else if (1 == opcode1 || 3 == opcode2)
12638 /* Handle SDIV and UDIV. */
12639 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12640 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12641 record_buf[2] = ARM_PS_REGNUM;
12642 thumb2_insn_r->reg_rec_count = 3;
12645 return ARM_RECORD_FAILURE;
12647 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12649 return ARM_RECORD_SUCCESS;
12652 /* Record handler for thumb32 coprocessor instructions. */
12655 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12657 if (bit (thumb2_insn_r->arm_insn, 25))
12658 return arm_record_coproc_data_proc (thumb2_insn_r);
12660 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12663 /* Record handler for advance SIMD structure load/store instructions. */
12666 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12668 struct regcache *reg_cache = thumb2_insn_r->regcache;
12669 uint32_t l_bit, a_bit, b_bits;
12670 uint32_t record_buf[128], record_buf_mem[128];
12671 uint32_t reg_rn, reg_vd, address, f_elem;
12672 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12675 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12676 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12677 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12678 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12679 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12680 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12681 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12682 f_elem = 8 / f_ebytes;
12686 ULONGEST u_regval = 0;
12687 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12688 address = u_regval;
12693 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12695 if (b_bits == 0x07)
12697 else if (b_bits == 0x0a)
12699 else if (b_bits == 0x06)
12701 else if (b_bits == 0x02)
12706 for (index_r = 0; index_r < bf_regs; index_r++)
12708 for (index_e = 0; index_e < f_elem; index_e++)
12710 record_buf_mem[index_m++] = f_ebytes;
12711 record_buf_mem[index_m++] = address;
12712 address = address + f_ebytes;
12713 thumb2_insn_r->mem_rec_count += 1;
12718 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12720 if (b_bits == 0x09 || b_bits == 0x08)
12722 else if (b_bits == 0x03)
12727 for (index_r = 0; index_r < bf_regs; index_r++)
12728 for (index_e = 0; index_e < f_elem; index_e++)
12730 for (loop_t = 0; loop_t < 2; loop_t++)
12732 record_buf_mem[index_m++] = f_ebytes;
12733 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12734 thumb2_insn_r->mem_rec_count += 1;
12736 address = address + (2 * f_ebytes);
12740 else if ((b_bits & 0x0e) == 0x04)
12742 for (index_e = 0; index_e < f_elem; index_e++)
12744 for (loop_t = 0; loop_t < 3; loop_t++)
12746 record_buf_mem[index_m++] = f_ebytes;
12747 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12748 thumb2_insn_r->mem_rec_count += 1;
12750 address = address + (3 * f_ebytes);
12754 else if (!(b_bits & 0x0e))
12756 for (index_e = 0; index_e < f_elem; index_e++)
12758 for (loop_t = 0; loop_t < 4; loop_t++)
12760 record_buf_mem[index_m++] = f_ebytes;
12761 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12762 thumb2_insn_r->mem_rec_count += 1;
12764 address = address + (4 * f_ebytes);
12770 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12772 if (bft_size == 0x00)
12774 else if (bft_size == 0x01)
12776 else if (bft_size == 0x02)
12782 if (!(b_bits & 0x0b) || b_bits == 0x08)
12783 thumb2_insn_r->mem_rec_count = 1;
12785 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12786 thumb2_insn_r->mem_rec_count = 2;
12788 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12789 thumb2_insn_r->mem_rec_count = 3;
12791 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12792 thumb2_insn_r->mem_rec_count = 4;
12794 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12796 record_buf_mem[index_m] = f_ebytes;
12797 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12806 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12807 thumb2_insn_r->reg_rec_count = 1;
12809 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12810 thumb2_insn_r->reg_rec_count = 2;
12812 else if ((b_bits & 0x0e) == 0x04)
12813 thumb2_insn_r->reg_rec_count = 3;
12815 else if (!(b_bits & 0x0e))
12816 thumb2_insn_r->reg_rec_count = 4;
12821 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12822 thumb2_insn_r->reg_rec_count = 1;
12824 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12825 thumb2_insn_r->reg_rec_count = 2;
12827 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12828 thumb2_insn_r->reg_rec_count = 3;
12830 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12831 thumb2_insn_r->reg_rec_count = 4;
12833 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12834 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12838 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12840 record_buf[index_r] = reg_rn;
12841 thumb2_insn_r->reg_rec_count += 1;
12844 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12846 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12851 /* Decodes thumb2 instruction type and invokes its record handler. */
12853 static unsigned int
12854 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12856 uint32_t op, op1, op2;
12858 op = bit (thumb2_insn_r->arm_insn, 15);
12859 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12860 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12864 if (!(op2 & 0x64 ))
12866 /* Load/store multiple instruction. */
12867 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12869 else if ((op2 & 0x64) == 0x4)
12871 /* Load/store (dual/exclusive) and table branch instruction. */
12872 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12874 else if ((op2 & 0x60) == 0x20)
12876 /* Data-processing (shifted register). */
12877 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12879 else if (op2 & 0x40)
12881 /* Co-processor instructions. */
12882 return thumb2_record_coproc_insn (thumb2_insn_r);
12885 else if (op1 == 0x02)
12889 /* Branches and miscellaneous control instructions. */
12890 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12892 else if (op2 & 0x20)
12894 /* Data-processing (plain binary immediate) instruction. */
12895 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12899 /* Data-processing (modified immediate). */
12900 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12903 else if (op1 == 0x03)
12905 if (!(op2 & 0x71 ))
12907 /* Store single data item. */
12908 return thumb2_record_str_single_data (thumb2_insn_r);
12910 else if (!((op2 & 0x71) ^ 0x10))
12912 /* Advanced SIMD or structure load/store instructions. */
12913 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12915 else if (!((op2 & 0x67) ^ 0x01))
12917 /* Load byte, memory hints instruction. */
12918 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12920 else if (!((op2 & 0x67) ^ 0x03))
12922 /* Load halfword, memory hints instruction. */
12923 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12925 else if (!((op2 & 0x67) ^ 0x05))
12927 /* Load word instruction. */
12928 return thumb2_record_ld_word (thumb2_insn_r);
12930 else if (!((op2 & 0x70) ^ 0x20))
12932 /* Data-processing (register) instruction. */
12933 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12935 else if (!((op2 & 0x78) ^ 0x30))
12937 /* Multiply, multiply accumulate, abs diff instruction. */
12938 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12940 else if (!((op2 & 0x78) ^ 0x38))
12942 /* Long multiply, long multiply accumulate, and divide. */
12943 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12945 else if (op2 & 0x40)
12947 /* Co-processor instructions. */
12948 return thumb2_record_coproc_insn (thumb2_insn_r);
12956 /* Abstract memory reader. */
12958 class abstract_memory_reader
12961 /* Read LEN bytes of target memory at address MEMADDR, placing the
12962 results in GDB's memory at BUF. Return true on success. */
12964 virtual bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len) = 0;
12967 /* Instruction reader from real target. */
12969 class instruction_reader : public abstract_memory_reader
12972 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
12974 if (target_read_memory (memaddr, buf, len))
12983 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12984 and positive val on fauilure. */
12987 extract_arm_insn (abstract_memory_reader& reader,
12988 insn_decode_record *insn_record, uint32_t insn_size)
12990 gdb_byte buf[insn_size];
12992 memset (&buf[0], 0, insn_size);
12994 if (!reader.read (insn_record->this_addr, buf, insn_size))
12996 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12998 gdbarch_byte_order_for_code (insn_record->gdbarch));
13002 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13004 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13008 decode_insn (abstract_memory_reader &reader, insn_decode_record *arm_record,
13009 record_type_t record_type, uint32_t insn_size)
13012 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13014 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13016 arm_record_data_proc_misc_ld_str, /* 000. */
13017 arm_record_data_proc_imm, /* 001. */
13018 arm_record_ld_st_imm_offset, /* 010. */
13019 arm_record_ld_st_reg_offset, /* 011. */
13020 arm_record_ld_st_multiple, /* 100. */
13021 arm_record_b_bl, /* 101. */
13022 arm_record_asimd_vfp_coproc, /* 110. */
13023 arm_record_coproc_data_proc /* 111. */
13026 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13028 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13030 thumb_record_shift_add_sub, /* 000. */
13031 thumb_record_add_sub_cmp_mov, /* 001. */
13032 thumb_record_ld_st_reg_offset, /* 010. */
13033 thumb_record_ld_st_imm_offset, /* 011. */
13034 thumb_record_ld_st_stack, /* 100. */
13035 thumb_record_misc, /* 101. */
13036 thumb_record_ldm_stm_swi, /* 110. */
13037 thumb_record_branch /* 111. */
13040 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13041 uint32_t insn_id = 0;
13043 if (extract_arm_insn (reader, arm_record, insn_size))
13047 printf_unfiltered (_("Process record: error reading memory at "
13048 "addr %s len = %d.\n"),
13049 paddress (arm_record->gdbarch,
13050 arm_record->this_addr), insn_size);
13054 else if (ARM_RECORD == record_type)
13056 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13057 insn_id = bits (arm_record->arm_insn, 25, 27);
13059 if (arm_record->cond == 0xf)
13060 ret = arm_record_extension_space (arm_record);
13063 /* If this insn has fallen into extension space
13064 then we need not decode it anymore. */
13065 ret = arm_handle_insn[insn_id] (arm_record);
13067 if (ret != ARM_RECORD_SUCCESS)
13069 arm_record_unsupported_insn (arm_record);
13073 else if (THUMB_RECORD == record_type)
13075 /* As thumb does not have condition codes, we set negative. */
13076 arm_record->cond = -1;
13077 insn_id = bits (arm_record->arm_insn, 13, 15);
13078 ret = thumb_handle_insn[insn_id] (arm_record);
13079 if (ret != ARM_RECORD_SUCCESS)
13081 arm_record_unsupported_insn (arm_record);
13085 else if (THUMB2_RECORD == record_type)
13087 /* As thumb does not have condition codes, we set negative. */
13088 arm_record->cond = -1;
13090 /* Swap first half of 32bit thumb instruction with second half. */
13091 arm_record->arm_insn
13092 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13094 ret = thumb2_record_decode_insn_handler (arm_record);
13096 if (ret != ARM_RECORD_SUCCESS)
13098 arm_record_unsupported_insn (arm_record);
13104 /* Throw assertion. */
13105 gdb_assert_not_reached ("not a valid instruction, could not decode");
13112 namespace selftests {
13114 /* Provide both 16-bit and 32-bit thumb instructions. */
13116 class instruction_reader_thumb : public abstract_memory_reader
13119 template<size_t SIZE>
13120 instruction_reader_thumb (enum bfd_endian endian,
13121 const uint16_t (&insns)[SIZE])
13122 : m_endian (endian), m_insns (insns), m_insns_size (SIZE)
13125 bool read (CORE_ADDR memaddr, gdb_byte *buf, const size_t len)
13127 SELF_CHECK (len == 4 || len == 2);
13128 SELF_CHECK (memaddr % 2 == 0);
13129 SELF_CHECK ((memaddr / 2) < m_insns_size);
13131 store_unsigned_integer (buf, 2, m_endian, m_insns[memaddr / 2]);
13134 store_unsigned_integer (&buf[2], 2, m_endian,
13135 m_insns[memaddr / 2 + 1]);
13141 enum bfd_endian m_endian;
13142 const uint16_t *m_insns;
13143 size_t m_insns_size;
13147 arm_record_test (void)
13149 struct gdbarch_info info;
13150 gdbarch_info_init (&info);
13151 info.bfd_arch_info = bfd_scan_arch ("arm");
13153 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
13155 SELF_CHECK (gdbarch != NULL);
13157 /* 16-bit Thumb instructions. */
13159 insn_decode_record arm_record;
13161 memset (&arm_record, 0, sizeof (insn_decode_record));
13162 arm_record.gdbarch = gdbarch;
13164 static const uint16_t insns[] = {
13165 /* db b2 uxtb r3, r3 */
13167 /* cd 58 ldr r5, [r1, r3] */
13171 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13172 instruction_reader_thumb reader (endian, insns);
13173 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13174 THUMB_INSN_SIZE_BYTES);
13176 SELF_CHECK (ret == 0);
13177 SELF_CHECK (arm_record.mem_rec_count == 0);
13178 SELF_CHECK (arm_record.reg_rec_count == 1);
13179 SELF_CHECK (arm_record.arm_regs[0] == 3);
13181 arm_record.this_addr += 2;
13182 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13183 THUMB_INSN_SIZE_BYTES);
13185 SELF_CHECK (ret == 0);
13186 SELF_CHECK (arm_record.mem_rec_count == 0);
13187 SELF_CHECK (arm_record.reg_rec_count == 1);
13188 SELF_CHECK (arm_record.arm_regs[0] == 5);
13191 /* 32-bit Thumb-2 instructions. */
13193 insn_decode_record arm_record;
13195 memset (&arm_record, 0, sizeof (insn_decode_record));
13196 arm_record.gdbarch = gdbarch;
13198 static const uint16_t insns[] = {
13199 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13203 enum bfd_endian endian = gdbarch_byte_order_for_code (arm_record.gdbarch);
13204 instruction_reader_thumb reader (endian, insns);
13205 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13206 THUMB2_INSN_SIZE_BYTES);
13208 SELF_CHECK (ret == 0);
13209 SELF_CHECK (arm_record.mem_rec_count == 0);
13210 SELF_CHECK (arm_record.reg_rec_count == 1);
13211 SELF_CHECK (arm_record.arm_regs[0] == 7);
13214 } // namespace selftests
13215 #endif /* GDB_SELF_TEST */
13217 /* Cleans up local record registers and memory allocations. */
13220 deallocate_reg_mem (insn_decode_record *record)
13222 xfree (record->arm_regs);
13223 xfree (record->arm_mems);
13227 /* Parse the current instruction and record the values of the registers and
13228 memory that will be changed in current instruction to record_arch_list".
13229 Return -1 if something is wrong. */
13232 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13233 CORE_ADDR insn_addr)
13236 uint32_t no_of_rec = 0;
13237 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13238 ULONGEST t_bit = 0, insn_id = 0;
13240 ULONGEST u_regval = 0;
13242 insn_decode_record arm_record;
13244 memset (&arm_record, 0, sizeof (insn_decode_record));
13245 arm_record.regcache = regcache;
13246 arm_record.this_addr = insn_addr;
13247 arm_record.gdbarch = gdbarch;
13250 if (record_debug > 1)
13252 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13254 paddress (gdbarch, arm_record.this_addr));
13257 instruction_reader reader;
13258 if (extract_arm_insn (reader, &arm_record, 2))
13262 printf_unfiltered (_("Process record: error reading memory at "
13263 "addr %s len = %d.\n"),
13264 paddress (arm_record.gdbarch,
13265 arm_record.this_addr), 2);
13270 /* Check the insn, whether it is thumb or arm one. */
13272 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13273 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13276 if (!(u_regval & t_bit))
13278 /* We are decoding arm insn. */
13279 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13283 insn_id = bits (arm_record.arm_insn, 11, 15);
13284 /* is it thumb2 insn? */
13285 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13287 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
13288 THUMB2_INSN_SIZE_BYTES);
13292 /* We are decoding thumb insn. */
13293 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
13294 THUMB_INSN_SIZE_BYTES);
13300 /* Record registers. */
13301 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13302 if (arm_record.arm_regs)
13304 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13306 if (record_full_arch_list_add_reg
13307 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13311 /* Record memories. */
13312 if (arm_record.arm_mems)
13314 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13316 if (record_full_arch_list_add_mem
13317 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13318 arm_record.arm_mems[no_of_rec].len))
13323 if (record_full_arch_list_add_end ())
13328 deallocate_reg_mem (&arm_record);