1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "arch/arm-get-next-pcs.h"
51 #include "gdb/sim-arm.h"
54 #include "coff/internal.h"
60 #include "record-full.h"
62 #include "features/arm-with-m.c"
63 #include "features/arm-with-m-fpa-layout.c"
64 #include "features/arm-with-m-vfp-d16.c"
65 #include "features/arm-with-iwmmxt.c"
66 #include "features/arm-with-vfpv2.c"
67 #include "features/arm-with-vfpv3.c"
68 #include "features/arm-with-neon.c"
72 /* Macros for setting and testing a bit in a minimal symbol that marks
73 it as Thumb function. The MSB of the minimal symbol's "info" field
74 is used for this purpose.
76 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
77 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
79 #define MSYMBOL_SET_SPECIAL(msym) \
80 MSYMBOL_TARGET_FLAG_1 (msym) = 1
82 #define MSYMBOL_IS_SPECIAL(msym) \
83 MSYMBOL_TARGET_FLAG_1 (msym)
85 /* Per-objfile data used for mapping symbols. */
86 static const struct objfile_data *arm_objfile_data_key;
88 struct arm_mapping_symbol
93 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
94 DEF_VEC_O(arm_mapping_symbol_s);
96 struct arm_per_objfile
98 VEC(arm_mapping_symbol_s) **section_maps;
101 /* The list of available "set arm ..." and "show arm ..." commands. */
102 static struct cmd_list_element *setarmcmdlist = NULL;
103 static struct cmd_list_element *showarmcmdlist = NULL;
105 /* The type of floating-point to use. Keep this in sync with enum
106 arm_float_model, and the help string in _initialize_arm_tdep. */
107 static const char *const fp_model_strings[] =
117 /* A variable that can be configured by the user. */
118 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
119 static const char *current_fp_model = "auto";
121 /* The ABI to use. Keep this in sync with arm_abi_kind. */
122 static const char *const arm_abi_strings[] =
130 /* A variable that can be configured by the user. */
131 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
132 static const char *arm_abi_string = "auto";
134 /* The execution mode to assume. */
135 static const char *const arm_mode_strings[] =
143 static const char *arm_fallback_mode_string = "auto";
144 static const char *arm_force_mode_string = "auto";
146 /* Internal override of the execution mode. -1 means no override,
147 0 means override to ARM mode, 1 means override to Thumb mode.
148 The effect is the same as if arm_force_mode has been set by the
149 user (except the internal override has precedence over a user's
150 arm_force_mode override). */
151 static int arm_override_mode = -1;
153 /* Number of different reg name sets (options). */
154 static int num_disassembly_options;
156 /* The standard register names, and all the valid aliases for them. Note
157 that `fp', `sp' and `pc' are not added in this alias list, because they
158 have been added as builtin user registers in
159 std-regs.c:_initialize_frame_reg. */
164 } arm_register_aliases[] = {
165 /* Basic register numbers. */
182 /* Synonyms (argument and variable registers). */
195 /* Other platform-specific names for r9. */
201 /* Names used by GCC (not listed in the ARM EABI). */
203 /* A special name from the older ATPCS. */
207 static const char *const arm_register_names[] =
208 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
209 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
210 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
211 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
212 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
213 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
214 "fps", "cpsr" }; /* 24 25 */
216 /* Valid register name styles. */
217 static const char **valid_disassembly_styles;
219 /* Disassembly style to use. Default to "std" register names. */
220 static const char *disassembly_style;
222 /* This is used to keep the bfd arch_info in sync with the disassembly
224 static void set_disassembly_style_sfunc(char *, int,
225 struct cmd_list_element *);
226 static void set_disassembly_style (void);
228 static void convert_from_extended (const struct floatformat *, const void *,
230 static void convert_to_extended (const struct floatformat *, void *,
233 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
234 struct regcache *regcache,
235 int regnum, gdb_byte *buf);
236 static void arm_neon_quad_write (struct gdbarch *gdbarch,
237 struct regcache *regcache,
238 int regnum, const gdb_byte *buf);
241 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self,
245 /* get_next_pcs operations. */
246 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
250 arm_get_next_pcs_is_thumb
253 struct arm_prologue_cache
255 /* The stack pointer at the time this frame was created; i.e. the
256 caller's stack pointer when this function was called. It is used
257 to identify this frame. */
260 /* The frame base for this frame is just prev_sp - frame size.
261 FRAMESIZE is the distance from the frame pointer to the
262 initial stack pointer. */
266 /* The register used to hold the frame pointer for this frame. */
269 /* Saved register offsets. */
270 struct trad_frame_saved_reg *saved_regs;
273 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
274 CORE_ADDR prologue_start,
275 CORE_ADDR prologue_end,
276 struct arm_prologue_cache *cache);
278 /* Architecture version for displaced stepping. This effects the behaviour of
279 certain instructions, and really should not be hard-wired. */
281 #define DISPLACED_STEPPING_ARCH_VERSION 5
283 /* Set to true if the 32-bit mode is in use. */
287 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
290 arm_psr_thumb_bit (struct gdbarch *gdbarch)
292 if (gdbarch_tdep (gdbarch)->is_m)
298 /* Determine if the processor is currently executing in Thumb mode. */
301 arm_is_thumb (struct regcache *regcache)
304 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
306 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
308 return (cpsr & t_bit) != 0;
311 /* Determine if FRAME is executing in Thumb mode. */
314 arm_frame_is_thumb (struct frame_info *frame)
317 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
319 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
320 directly (from a signal frame or dummy frame) or by interpreting
321 the saved LR (from a prologue or DWARF frame). So consult it and
322 trust the unwinders. */
323 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
325 return (cpsr & t_bit) != 0;
328 /* Callback for VEC_lower_bound. */
331 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
332 const struct arm_mapping_symbol *rhs)
334 return lhs->value < rhs->value;
337 /* Search for the mapping symbol covering MEMADDR. If one is found,
338 return its type. Otherwise, return 0. If START is non-NULL,
339 set *START to the location of the mapping symbol. */
342 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
344 struct obj_section *sec;
346 /* If there are mapping symbols, consult them. */
347 sec = find_pc_section (memaddr);
350 struct arm_per_objfile *data;
351 VEC(arm_mapping_symbol_s) *map;
352 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
356 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
357 arm_objfile_data_key);
360 map = data->section_maps[sec->the_bfd_section->index];
361 if (!VEC_empty (arm_mapping_symbol_s, map))
363 struct arm_mapping_symbol *map_sym;
365 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
366 arm_compare_mapping_symbols);
368 /* VEC_lower_bound finds the earliest ordered insertion
369 point. If the following symbol starts at this exact
370 address, we use that; otherwise, the preceding
371 mapping symbol covers this address. */
372 if (idx < VEC_length (arm_mapping_symbol_s, map))
374 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
375 if (map_sym->value == map_key.value)
378 *start = map_sym->value + obj_section_addr (sec);
379 return map_sym->type;
385 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
387 *start = map_sym->value + obj_section_addr (sec);
388 return map_sym->type;
397 /* Determine if the program counter specified in MEMADDR is in a Thumb
398 function. This function should be called for addresses unrelated to
399 any executing frame; otherwise, prefer arm_frame_is_thumb. */
402 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
404 struct bound_minimal_symbol sym;
406 struct displaced_step_closure* dsc
407 = get_displaced_step_closure_by_addr(memaddr);
409 /* If checking the mode of displaced instruction in copy area, the mode
410 should be determined by instruction on the original address. */
414 fprintf_unfiltered (gdb_stdlog,
415 "displaced: check mode of %.8lx instead of %.8lx\n",
416 (unsigned long) dsc->insn_addr,
417 (unsigned long) memaddr);
418 memaddr = dsc->insn_addr;
421 /* If bit 0 of the address is set, assume this is a Thumb address. */
422 if (IS_THUMB_ADDR (memaddr))
425 /* Respect internal mode override if active. */
426 if (arm_override_mode != -1)
427 return arm_override_mode;
429 /* If the user wants to override the symbol table, let him. */
430 if (strcmp (arm_force_mode_string, "arm") == 0)
432 if (strcmp (arm_force_mode_string, "thumb") == 0)
435 /* ARM v6-M and v7-M are always in Thumb mode. */
436 if (gdbarch_tdep (gdbarch)->is_m)
439 /* If there are mapping symbols, consult them. */
440 type = arm_find_mapping_symbol (memaddr, NULL);
444 /* Thumb functions have a "special" bit set in minimal symbols. */
445 sym = lookup_minimal_symbol_by_pc (memaddr);
447 return (MSYMBOL_IS_SPECIAL (sym.minsym));
449 /* If the user wants to override the fallback mode, let them. */
450 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
455 /* If we couldn't find any symbol, but we're talking to a running
456 target, then trust the current value of $cpsr. This lets
457 "display/i $pc" always show the correct mode (though if there is
458 a symbol table we will not reach here, so it still may not be
459 displayed in the mode it will be executed). */
460 if (target_has_registers)
461 return arm_frame_is_thumb (get_current_frame ());
463 /* Otherwise we're out of luck; we assume ARM. */
467 /* Remove useless bits from addresses in a running program. */
469 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
471 /* On M-profile devices, do not strip the low bit from EXC_RETURN
472 (the magic exception return address). */
473 if (gdbarch_tdep (gdbarch)->is_m
474 && (val & 0xfffffff0) == 0xfffffff0)
478 return UNMAKE_THUMB_ADDR (val);
480 return (val & 0x03fffffc);
483 /* Return 1 if PC is the start of a compiler helper function which
484 can be safely ignored during prologue skipping. IS_THUMB is true
485 if the function is known to be a Thumb function due to the way it
488 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
490 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
491 struct bound_minimal_symbol msym;
493 msym = lookup_minimal_symbol_by_pc (pc);
494 if (msym.minsym != NULL
495 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
496 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
498 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
500 /* The GNU linker's Thumb call stub to foo is named
502 if (strstr (name, "_from_thumb") != NULL)
505 /* On soft-float targets, __truncdfsf2 is called to convert promoted
506 arguments to their argument types in non-prototyped
508 if (startswith (name, "__truncdfsf2"))
510 if (startswith (name, "__aeabi_d2f"))
513 /* Internal functions related to thread-local storage. */
514 if (startswith (name, "__tls_get_addr"))
516 if (startswith (name, "__aeabi_read_tp"))
521 /* If we run against a stripped glibc, we may be unable to identify
522 special functions by name. Check for one important case,
523 __aeabi_read_tp, by comparing the *code* against the default
524 implementation (this is hand-written ARM assembler in glibc). */
527 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
528 == 0xe3e00a0f /* mov r0, #0xffff0fff */
529 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
530 == 0xe240f01f) /* sub pc, r0, #31 */
537 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
538 the first 16-bit of instruction, and INSN2 is the second 16-bit of
540 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
541 ((bits ((insn1), 0, 3) << 12) \
542 | (bits ((insn1), 10, 10) << 11) \
543 | (bits ((insn2), 12, 14) << 8) \
544 | bits ((insn2), 0, 7))
546 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
547 the 32-bit instruction. */
548 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
549 ((bits ((insn), 16, 19) << 12) \
550 | bits ((insn), 0, 11))
552 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
555 thumb_expand_immediate (unsigned int imm)
557 unsigned int count = imm >> 7;
565 return (imm & 0xff) | ((imm & 0xff) << 16);
567 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
569 return (imm & 0xff) | ((imm & 0xff) << 8)
570 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
573 return (0x80 | (imm & 0x7f)) << (32 - count);
576 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
577 epilogue, 0 otherwise. */
580 thumb_instruction_restores_sp (unsigned short insn)
582 return (insn == 0x46bd /* mov sp, r7 */
583 || (insn & 0xff80) == 0xb000 /* add sp, imm */
584 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
587 /* Analyze a Thumb prologue, looking for a recognizable stack frame
588 and frame pointer. Scan until we encounter a store that could
589 clobber the stack frame unexpectedly, or an unknown instruction.
590 Return the last address which is definitely safe to skip for an
591 initial breakpoint. */
594 thumb_analyze_prologue (struct gdbarch *gdbarch,
595 CORE_ADDR start, CORE_ADDR limit,
596 struct arm_prologue_cache *cache)
598 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
599 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
602 struct pv_area *stack;
603 struct cleanup *back_to;
605 CORE_ADDR unrecognized_pc = 0;
607 for (i = 0; i < 16; i++)
608 regs[i] = pv_register (i, 0);
609 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
610 back_to = make_cleanup_free_pv_area (stack);
612 while (start < limit)
616 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
618 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
623 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
626 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
627 whether to save LR (R14). */
628 mask = (insn & 0xff) | ((insn & 0x100) << 6);
630 /* Calculate offsets of saved R0-R7 and LR. */
631 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
632 if (mask & (1 << regno))
634 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
636 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
639 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
641 offset = (insn & 0x7f) << 2; /* get scaled offset */
642 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
645 else if (thumb_instruction_restores_sp (insn))
647 /* Don't scan past the epilogue. */
650 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
651 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
653 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
654 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
655 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
657 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
658 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
659 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
661 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
662 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
663 && pv_is_constant (regs[bits (insn, 3, 5)]))
664 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
665 regs[bits (insn, 6, 8)]);
666 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
667 && pv_is_constant (regs[bits (insn, 3, 6)]))
669 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
670 int rm = bits (insn, 3, 6);
671 regs[rd] = pv_add (regs[rd], regs[rm]);
673 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
675 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
676 int src_reg = (insn & 0x78) >> 3;
677 regs[dst_reg] = regs[src_reg];
679 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
681 /* Handle stores to the stack. Normally pushes are used,
682 but with GCC -mtpcs-frame, there may be other stores
683 in the prologue to create the frame. */
684 int regno = (insn >> 8) & 0x7;
687 offset = (insn & 0xff) << 2;
688 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
690 if (pv_area_store_would_trash (stack, addr))
693 pv_area_store (stack, addr, 4, regs[regno]);
695 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
697 int rd = bits (insn, 0, 2);
698 int rn = bits (insn, 3, 5);
701 offset = bits (insn, 6, 10) << 2;
702 addr = pv_add_constant (regs[rn], offset);
704 if (pv_area_store_would_trash (stack, addr))
707 pv_area_store (stack, addr, 4, regs[rd]);
709 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
710 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
711 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
712 /* Ignore stores of argument registers to the stack. */
714 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
715 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
716 /* Ignore block loads from the stack, potentially copying
717 parameters from memory. */
719 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
720 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
721 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
722 /* Similarly ignore single loads from the stack. */
724 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
725 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
726 /* Skip register copies, i.e. saves to another register
727 instead of the stack. */
729 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
730 /* Recognize constant loads; even with small stacks these are necessary
732 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
733 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
735 /* Constant pool loads, for the same reason. */
736 unsigned int constant;
739 loc = start + 4 + bits (insn, 0, 7) * 4;
740 constant = read_memory_unsigned_integer (loc, 4, byte_order);
741 regs[bits (insn, 8, 10)] = pv_constant (constant);
743 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
745 unsigned short inst2;
747 inst2 = read_memory_unsigned_integer (start + 2, 2,
748 byte_order_for_code);
750 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
752 /* BL, BLX. Allow some special function calls when
753 skipping the prologue; GCC generates these before
754 storing arguments to the stack. */
756 int j1, j2, imm1, imm2;
758 imm1 = sbits (insn, 0, 10);
759 imm2 = bits (inst2, 0, 10);
760 j1 = bit (inst2, 13);
761 j2 = bit (inst2, 11);
763 offset = ((imm1 << 12) + (imm2 << 1));
764 offset ^= ((!j2) << 22) | ((!j1) << 23);
766 nextpc = start + 4 + offset;
767 /* For BLX make sure to clear the low bits. */
768 if (bit (inst2, 12) == 0)
769 nextpc = nextpc & 0xfffffffc;
771 if (!skip_prologue_function (gdbarch, nextpc,
772 bit (inst2, 12) != 0))
776 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
778 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
780 pv_t addr = regs[bits (insn, 0, 3)];
783 if (pv_area_store_would_trash (stack, addr))
786 /* Calculate offsets of saved registers. */
787 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
788 if (inst2 & (1 << regno))
790 addr = pv_add_constant (addr, -4);
791 pv_area_store (stack, addr, 4, regs[regno]);
795 regs[bits (insn, 0, 3)] = addr;
798 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
800 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
802 int regno1 = bits (inst2, 12, 15);
803 int regno2 = bits (inst2, 8, 11);
804 pv_t addr = regs[bits (insn, 0, 3)];
806 offset = inst2 & 0xff;
808 addr = pv_add_constant (addr, offset);
810 addr = pv_add_constant (addr, -offset);
812 if (pv_area_store_would_trash (stack, addr))
815 pv_area_store (stack, addr, 4, regs[regno1]);
816 pv_area_store (stack, pv_add_constant (addr, 4),
820 regs[bits (insn, 0, 3)] = addr;
823 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
824 && (inst2 & 0x0c00) == 0x0c00
825 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
827 int regno = bits (inst2, 12, 15);
828 pv_t addr = regs[bits (insn, 0, 3)];
830 offset = inst2 & 0xff;
832 addr = pv_add_constant (addr, offset);
834 addr = pv_add_constant (addr, -offset);
836 if (pv_area_store_would_trash (stack, addr))
839 pv_area_store (stack, addr, 4, regs[regno]);
842 regs[bits (insn, 0, 3)] = addr;
845 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
846 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
848 int regno = bits (inst2, 12, 15);
851 offset = inst2 & 0xfff;
852 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
854 if (pv_area_store_would_trash (stack, addr))
857 pv_area_store (stack, addr, 4, regs[regno]);
860 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
861 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
862 /* Ignore stores of argument registers to the stack. */
865 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
866 && (inst2 & 0x0d00) == 0x0c00
867 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
868 /* Ignore stores of argument registers to the stack. */
871 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
873 && (inst2 & 0x8000) == 0x0000
874 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
875 /* Ignore block loads from the stack, potentially copying
876 parameters from memory. */
879 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
881 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
882 /* Similarly ignore dual loads from the stack. */
885 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
886 && (inst2 & 0x0d00) == 0x0c00
887 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
888 /* Similarly ignore single loads from the stack. */
891 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
892 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
893 /* Similarly ignore single loads from the stack. */
896 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
897 && (inst2 & 0x8000) == 0x0000)
899 unsigned int imm = ((bits (insn, 10, 10) << 11)
900 | (bits (inst2, 12, 14) << 8)
901 | bits (inst2, 0, 7));
903 regs[bits (inst2, 8, 11)]
904 = pv_add_constant (regs[bits (insn, 0, 3)],
905 thumb_expand_immediate (imm));
908 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
909 && (inst2 & 0x8000) == 0x0000)
911 unsigned int imm = ((bits (insn, 10, 10) << 11)
912 | (bits (inst2, 12, 14) << 8)
913 | bits (inst2, 0, 7));
915 regs[bits (inst2, 8, 11)]
916 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
919 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
920 && (inst2 & 0x8000) == 0x0000)
922 unsigned int imm = ((bits (insn, 10, 10) << 11)
923 | (bits (inst2, 12, 14) << 8)
924 | bits (inst2, 0, 7));
926 regs[bits (inst2, 8, 11)]
927 = pv_add_constant (regs[bits (insn, 0, 3)],
928 - (CORE_ADDR) thumb_expand_immediate (imm));
931 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
932 && (inst2 & 0x8000) == 0x0000)
934 unsigned int imm = ((bits (insn, 10, 10) << 11)
935 | (bits (inst2, 12, 14) << 8)
936 | bits (inst2, 0, 7));
938 regs[bits (inst2, 8, 11)]
939 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
942 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
944 unsigned int imm = ((bits (insn, 10, 10) << 11)
945 | (bits (inst2, 12, 14) << 8)
946 | bits (inst2, 0, 7));
948 regs[bits (inst2, 8, 11)]
949 = pv_constant (thumb_expand_immediate (imm));
952 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
955 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
957 regs[bits (inst2, 8, 11)] = pv_constant (imm);
960 else if (insn == 0xea5f /* mov.w Rd,Rm */
961 && (inst2 & 0xf0f0) == 0)
963 int dst_reg = (inst2 & 0x0f00) >> 8;
964 int src_reg = inst2 & 0xf;
965 regs[dst_reg] = regs[src_reg];
968 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
970 /* Constant pool loads. */
971 unsigned int constant;
974 offset = bits (inst2, 0, 11);
976 loc = start + 4 + offset;
978 loc = start + 4 - offset;
980 constant = read_memory_unsigned_integer (loc, 4, byte_order);
981 regs[bits (inst2, 12, 15)] = pv_constant (constant);
984 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
986 /* Constant pool loads. */
987 unsigned int constant;
990 offset = bits (inst2, 0, 7) << 2;
992 loc = start + 4 + offset;
994 loc = start + 4 - offset;
996 constant = read_memory_unsigned_integer (loc, 4, byte_order);
997 regs[bits (inst2, 12, 15)] = pv_constant (constant);
999 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1000 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1003 else if (thumb2_instruction_changes_pc (insn, inst2))
1005 /* Don't scan past anything that might change control flow. */
1010 /* The optimizer might shove anything into the prologue,
1011 so we just skip what we don't recognize. */
1012 unrecognized_pc = start;
1017 else if (thumb_instruction_changes_pc (insn))
1019 /* Don't scan past anything that might change control flow. */
1024 /* The optimizer might shove anything into the prologue,
1025 so we just skip what we don't recognize. */
1026 unrecognized_pc = start;
1033 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1034 paddress (gdbarch, start));
1036 if (unrecognized_pc == 0)
1037 unrecognized_pc = start;
1041 do_cleanups (back_to);
1042 return unrecognized_pc;
1045 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1047 /* Frame pointer is fp. Frame size is constant. */
1048 cache->framereg = ARM_FP_REGNUM;
1049 cache->framesize = -regs[ARM_FP_REGNUM].k;
1051 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1053 /* Frame pointer is r7. Frame size is constant. */
1054 cache->framereg = THUMB_FP_REGNUM;
1055 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1059 /* Try the stack pointer... this is a bit desperate. */
1060 cache->framereg = ARM_SP_REGNUM;
1061 cache->framesize = -regs[ARM_SP_REGNUM].k;
1064 for (i = 0; i < 16; i++)
1065 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1066 cache->saved_regs[i].addr = offset;
1068 do_cleanups (back_to);
1069 return unrecognized_pc;
1073 /* Try to analyze the instructions starting from PC, which load symbol
1074 __stack_chk_guard. Return the address of instruction after loading this
1075 symbol, set the dest register number to *BASEREG, and set the size of
1076 instructions for loading symbol in OFFSET. Return 0 if instructions are
1080 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1081 unsigned int *destreg, int *offset)
1083 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1084 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1085 unsigned int low, high, address;
1090 unsigned short insn1
1091 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1093 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1095 *destreg = bits (insn1, 8, 10);
1097 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1098 address = read_memory_unsigned_integer (address, 4,
1099 byte_order_for_code);
1101 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1103 unsigned short insn2
1104 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1106 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1109 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1111 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1113 /* movt Rd, #const */
1114 if ((insn1 & 0xfbc0) == 0xf2c0)
1116 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1117 *destreg = bits (insn2, 8, 11);
1119 address = (high << 16 | low);
1126 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1128 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1130 address = bits (insn, 0, 11) + pc + 8;
1131 address = read_memory_unsigned_integer (address, 4,
1132 byte_order_for_code);
1134 *destreg = bits (insn, 12, 15);
1137 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1139 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1142 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1144 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1146 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1147 *destreg = bits (insn, 12, 15);
1149 address = (high << 16 | low);
1157 /* Try to skip a sequence of instructions used for stack protector. If PC
1158 points to the first instruction of this sequence, return the address of
1159 first instruction after this sequence, otherwise, return original PC.
1161 On arm, this sequence of instructions is composed of mainly three steps,
1162 Step 1: load symbol __stack_chk_guard,
1163 Step 2: load from address of __stack_chk_guard,
1164 Step 3: store it to somewhere else.
1166 Usually, instructions on step 2 and step 3 are the same on various ARM
1167 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1168 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1169 instructions in step 1 vary from different ARM architectures. On ARMv7,
1172 movw Rn, #:lower16:__stack_chk_guard
1173 movt Rn, #:upper16:__stack_chk_guard
1180 .word __stack_chk_guard
1182 Since ldr/str is a very popular instruction, we can't use them as
1183 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1184 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1185 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1188 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1190 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1191 unsigned int basereg;
1192 struct bound_minimal_symbol stack_chk_guard;
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1197 /* Try to parse the instructions in Step 1. */
1198 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1203 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1204 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1205 Otherwise, this sequence cannot be for stack protector. */
1206 if (stack_chk_guard.minsym == NULL
1207 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1212 unsigned int destreg;
1214 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1216 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1217 if ((insn & 0xf800) != 0x6800)
1219 if (bits (insn, 3, 5) != basereg)
1221 destreg = bits (insn, 0, 2);
1223 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1224 byte_order_for_code);
1225 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1226 if ((insn & 0xf800) != 0x6000)
1228 if (destreg != bits (insn, 0, 2))
1233 unsigned int destreg;
1235 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1237 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1238 if ((insn & 0x0e500000) != 0x04100000)
1240 if (bits (insn, 16, 19) != basereg)
1242 destreg = bits (insn, 12, 15);
1243 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1244 insn = read_memory_unsigned_integer (pc + offset + 4,
1245 4, byte_order_for_code);
1246 if ((insn & 0x0e500000) != 0x04000000)
1248 if (bits (insn, 12, 15) != destreg)
1251 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1254 return pc + offset + 4;
1256 return pc + offset + 8;
1259 /* Advance the PC across any function entry prologue instructions to
1260 reach some "real" code.
1262 The APCS (ARM Procedure Call Standard) defines the following
1266 [stmfd sp!, {a1,a2,a3,a4}]
1267 stmfd sp!, {...,fp,ip,lr,pc}
1268 [stfe f7, [sp, #-12]!]
1269 [stfe f6, [sp, #-12]!]
1270 [stfe f5, [sp, #-12]!]
1271 [stfe f4, [sp, #-12]!]
1272 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1275 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1277 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1279 CORE_ADDR func_addr, limit_pc;
1281 /* See if we can determine the end of the prologue via the symbol table.
1282 If so, then return either PC, or the PC after the prologue, whichever
1284 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1286 CORE_ADDR post_prologue_pc
1287 = skip_prologue_using_sal (gdbarch, func_addr);
1288 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1290 if (post_prologue_pc)
1292 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1295 /* GCC always emits a line note before the prologue and another
1296 one after, even if the two are at the same address or on the
1297 same line. Take advantage of this so that we do not need to
1298 know every instruction that might appear in the prologue. We
1299 will have producer information for most binaries; if it is
1300 missing (e.g. for -gstabs), assuming the GNU tools. */
1301 if (post_prologue_pc
1303 || COMPUNIT_PRODUCER (cust) == NULL
1304 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1305 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1306 return post_prologue_pc;
1308 if (post_prologue_pc != 0)
1310 CORE_ADDR analyzed_limit;
1312 /* For non-GCC compilers, make sure the entire line is an
1313 acceptable prologue; GDB will round this function's
1314 return value up to the end of the following line so we
1315 can not skip just part of a line (and we do not want to).
1317 RealView does not treat the prologue specially, but does
1318 associate prologue code with the opening brace; so this
1319 lets us skip the first line if we think it is the opening
1321 if (arm_pc_is_thumb (gdbarch, func_addr))
1322 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1323 post_prologue_pc, NULL);
1325 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1326 post_prologue_pc, NULL);
1328 if (analyzed_limit != post_prologue_pc)
1331 return post_prologue_pc;
1335 /* Can't determine prologue from the symbol table, need to examine
1338 /* Find an upper limit on the function prologue using the debug
1339 information. If the debug information could not be used to provide
1340 that bound, then use an arbitrary large number as the upper bound. */
1341 /* Like arm_scan_prologue, stop no later than pc + 64. */
1342 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1344 limit_pc = pc + 64; /* Magic. */
1347 /* Check if this is Thumb code. */
1348 if (arm_pc_is_thumb (gdbarch, pc))
1349 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1351 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1355 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1356 This function decodes a Thumb function prologue to determine:
1357 1) the size of the stack frame
1358 2) which registers are saved on it
1359 3) the offsets of saved regs
1360 4) the offset from the stack pointer to the frame pointer
1362 A typical Thumb function prologue would create this stack frame
1363 (offsets relative to FP)
1364 old SP -> 24 stack parameters
1367 R7 -> 0 local variables (16 bytes)
1368 SP -> -12 additional stack space (12 bytes)
1369 The frame size would thus be 36 bytes, and the frame offset would be
1370 12 bytes. The frame register is R7.
1372 The comments for thumb_skip_prolog() describe the algorithm we use
1373 to detect the end of the prolog. */
1377 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1378 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1380 CORE_ADDR prologue_start;
1381 CORE_ADDR prologue_end;
1383 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1386 /* See comment in arm_scan_prologue for an explanation of
1388 if (prologue_end > prologue_start + 64)
1390 prologue_end = prologue_start + 64;
1394 /* We're in the boondocks: we have no idea where the start of the
1398 prologue_end = min (prologue_end, prev_pc);
1400 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1403 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1407 arm_instruction_restores_sp (unsigned int insn)
1409 if (bits (insn, 28, 31) != INST_NV)
1411 if ((insn & 0x0df0f000) == 0x0080d000
1412 /* ADD SP (register or immediate). */
1413 || (insn & 0x0df0f000) == 0x0040d000
1414 /* SUB SP (register or immediate). */
1415 || (insn & 0x0ffffff0) == 0x01a0d000
1417 || (insn & 0x0fff0000) == 0x08bd0000
1419 || (insn & 0x0fff0000) == 0x049d0000)
1420 /* POP of a single register. */
1427 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1428 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1429 fill it in. Return the first address not recognized as a prologue
1432 We recognize all the instructions typically found in ARM prologues,
1433 plus harmless instructions which can be skipped (either for analysis
1434 purposes, or a more restrictive set that can be skipped when finding
1435 the end of the prologue). */
1438 arm_analyze_prologue (struct gdbarch *gdbarch,
1439 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1440 struct arm_prologue_cache *cache)
1442 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1443 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1445 CORE_ADDR offset, current_pc;
1446 pv_t regs[ARM_FPS_REGNUM];
1447 struct pv_area *stack;
1448 struct cleanup *back_to;
1449 CORE_ADDR unrecognized_pc = 0;
1451 /* Search the prologue looking for instructions that set up the
1452 frame pointer, adjust the stack pointer, and save registers.
1454 Be careful, however, and if it doesn't look like a prologue,
1455 don't try to scan it. If, for instance, a frameless function
1456 begins with stmfd sp!, then we will tell ourselves there is
1457 a frame, which will confuse stack traceback, as well as "finish"
1458 and other operations that rely on a knowledge of the stack
1461 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1462 regs[regno] = pv_register (regno, 0);
1463 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1464 back_to = make_cleanup_free_pv_area (stack);
1466 for (current_pc = prologue_start;
1467 current_pc < prologue_end;
1471 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1473 if (insn == 0xe1a0c00d) /* mov ip, sp */
1475 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1478 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1479 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1481 unsigned imm = insn & 0xff; /* immediate value */
1482 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1483 int rd = bits (insn, 12, 15);
1484 imm = (imm >> rot) | (imm << (32 - rot));
1485 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1488 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1489 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1491 unsigned imm = insn & 0xff; /* immediate value */
1492 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1493 int rd = bits (insn, 12, 15);
1494 imm = (imm >> rot) | (imm << (32 - rot));
1495 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1498 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1501 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1503 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1504 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1505 regs[bits (insn, 12, 15)]);
1508 else if ((insn & 0xffff0000) == 0xe92d0000)
1509 /* stmfd sp!, {..., fp, ip, lr, pc}
1511 stmfd sp!, {a1, a2, a3, a4} */
1513 int mask = insn & 0xffff;
1515 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1518 /* Calculate offsets of saved registers. */
1519 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1520 if (mask & (1 << regno))
1523 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1524 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1527 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1528 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1529 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1531 /* No need to add this to saved_regs -- it's just an arg reg. */
1534 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1535 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1536 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1538 /* No need to add this to saved_regs -- it's just an arg reg. */
1541 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1543 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1545 /* No need to add this to saved_regs -- it's just arg regs. */
1548 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1550 unsigned imm = insn & 0xff; /* immediate value */
1551 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1552 imm = (imm >> rot) | (imm << (32 - rot));
1553 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1555 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1557 unsigned imm = insn & 0xff; /* immediate value */
1558 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1559 imm = (imm >> rot) | (imm << (32 - rot));
1560 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1562 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1564 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1566 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1569 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1570 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1571 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1573 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1575 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1577 int n_saved_fp_regs;
1578 unsigned int fp_start_reg, fp_bound_reg;
1580 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1583 if ((insn & 0x800) == 0x800) /* N0 is set */
1585 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1586 n_saved_fp_regs = 3;
1588 n_saved_fp_regs = 1;
1592 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1593 n_saved_fp_regs = 2;
1595 n_saved_fp_regs = 4;
1598 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1599 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1600 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1602 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1603 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1604 regs[fp_start_reg++]);
1607 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1609 /* Allow some special function calls when skipping the
1610 prologue; GCC generates these before storing arguments to
1612 CORE_ADDR dest = BranchDest (current_pc, insn);
1614 if (skip_prologue_function (gdbarch, dest, 0))
1619 else if ((insn & 0xf0000000) != 0xe0000000)
1620 break; /* Condition not true, exit early. */
1621 else if (arm_instruction_changes_pc (insn))
1622 /* Don't scan past anything that might change control flow. */
1624 else if (arm_instruction_restores_sp (insn))
1626 /* Don't scan past the epilogue. */
1629 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1630 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1631 /* Ignore block loads from the stack, potentially copying
1632 parameters from memory. */
1634 else if ((insn & 0xfc500000) == 0xe4100000
1635 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1636 /* Similarly ignore single loads from the stack. */
1638 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1639 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1640 register instead of the stack. */
1644 /* The optimizer might shove anything into the prologue, if
1645 we build up cache (cache != NULL) from scanning prologue,
1646 we just skip what we don't recognize and scan further to
1647 make cache as complete as possible. However, if we skip
1648 prologue, we'll stop immediately on unrecognized
1650 unrecognized_pc = current_pc;
1658 if (unrecognized_pc == 0)
1659 unrecognized_pc = current_pc;
1663 int framereg, framesize;
1665 /* The frame size is just the distance from the frame register
1666 to the original stack pointer. */
1667 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1669 /* Frame pointer is fp. */
1670 framereg = ARM_FP_REGNUM;
1671 framesize = -regs[ARM_FP_REGNUM].k;
1675 /* Try the stack pointer... this is a bit desperate. */
1676 framereg = ARM_SP_REGNUM;
1677 framesize = -regs[ARM_SP_REGNUM].k;
1680 cache->framereg = framereg;
1681 cache->framesize = framesize;
1683 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1684 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1685 cache->saved_regs[regno].addr = offset;
1689 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1690 paddress (gdbarch, unrecognized_pc));
1692 do_cleanups (back_to);
1693 return unrecognized_pc;
1697 arm_scan_prologue (struct frame_info *this_frame,
1698 struct arm_prologue_cache *cache)
1700 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1701 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1703 CORE_ADDR prologue_start, prologue_end, current_pc;
1704 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1705 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1706 pv_t regs[ARM_FPS_REGNUM];
1707 struct pv_area *stack;
1708 struct cleanup *back_to;
1711 /* Assume there is no frame until proven otherwise. */
1712 cache->framereg = ARM_SP_REGNUM;
1713 cache->framesize = 0;
1715 /* Check for Thumb prologue. */
1716 if (arm_frame_is_thumb (this_frame))
1718 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1722 /* Find the function prologue. If we can't find the function in
1723 the symbol table, peek in the stack frame to find the PC. */
1724 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1727 /* One way to find the end of the prologue (which works well
1728 for unoptimized code) is to do the following:
1730 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1733 prologue_end = prev_pc;
1734 else if (sal.end < prologue_end)
1735 prologue_end = sal.end;
1737 This mechanism is very accurate so long as the optimizer
1738 doesn't move any instructions from the function body into the
1739 prologue. If this happens, sal.end will be the last
1740 instruction in the first hunk of prologue code just before
1741 the first instruction that the scheduler has moved from
1742 the body to the prologue.
1744 In order to make sure that we scan all of the prologue
1745 instructions, we use a slightly less accurate mechanism which
1746 may scan more than necessary. To help compensate for this
1747 lack of accuracy, the prologue scanning loop below contains
1748 several clauses which'll cause the loop to terminate early if
1749 an implausible prologue instruction is encountered.
1755 is a suitable endpoint since it accounts for the largest
1756 possible prologue plus up to five instructions inserted by
1759 if (prologue_end > prologue_start + 64)
1761 prologue_end = prologue_start + 64; /* See above. */
1766 /* We have no symbol information. Our only option is to assume this
1767 function has a standard stack frame and the normal frame register.
1768 Then, we can find the value of our frame pointer on entrance to
1769 the callee (or at the present moment if this is the innermost frame).
1770 The value stored there should be the address of the stmfd + 8. */
1771 CORE_ADDR frame_loc;
1772 LONGEST return_value;
1774 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1775 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1779 prologue_start = gdbarch_addr_bits_remove
1780 (gdbarch, return_value) - 8;
1781 prologue_end = prologue_start + 64; /* See above. */
1785 if (prev_pc < prologue_end)
1786 prologue_end = prev_pc;
1788 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1791 static struct arm_prologue_cache *
1792 arm_make_prologue_cache (struct frame_info *this_frame)
1795 struct arm_prologue_cache *cache;
1796 CORE_ADDR unwound_fp;
1798 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1799 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1801 arm_scan_prologue (this_frame, cache);
1803 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1804 if (unwound_fp == 0)
1807 cache->prev_sp = unwound_fp + cache->framesize;
1809 /* Calculate actual addresses of saved registers using offsets
1810 determined by arm_scan_prologue. */
1811 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1812 if (trad_frame_addr_p (cache->saved_regs, reg))
1813 cache->saved_regs[reg].addr += cache->prev_sp;
1818 /* Implementation of the stop_reason hook for arm_prologue frames. */
1820 static enum unwind_stop_reason
1821 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1824 struct arm_prologue_cache *cache;
1827 if (*this_cache == NULL)
1828 *this_cache = arm_make_prologue_cache (this_frame);
1829 cache = (struct arm_prologue_cache *) *this_cache;
1831 /* This is meant to halt the backtrace at "_start". */
1832 pc = get_frame_pc (this_frame);
1833 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1834 return UNWIND_OUTERMOST;
1836 /* If we've hit a wall, stop. */
1837 if (cache->prev_sp == 0)
1838 return UNWIND_OUTERMOST;
1840 return UNWIND_NO_REASON;
1843 /* Our frame ID for a normal frame is the current function's starting PC
1844 and the caller's SP when we were called. */
1847 arm_prologue_this_id (struct frame_info *this_frame,
1849 struct frame_id *this_id)
1851 struct arm_prologue_cache *cache;
1855 if (*this_cache == NULL)
1856 *this_cache = arm_make_prologue_cache (this_frame);
1857 cache = (struct arm_prologue_cache *) *this_cache;
1859 /* Use function start address as part of the frame ID. If we cannot
1860 identify the start address (due to missing symbol information),
1861 fall back to just using the current PC. */
1862 pc = get_frame_pc (this_frame);
1863 func = get_frame_func (this_frame);
1867 id = frame_id_build (cache->prev_sp, func);
1871 static struct value *
1872 arm_prologue_prev_register (struct frame_info *this_frame,
1876 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1877 struct arm_prologue_cache *cache;
1879 if (*this_cache == NULL)
1880 *this_cache = arm_make_prologue_cache (this_frame);
1881 cache = (struct arm_prologue_cache *) *this_cache;
1883 /* If we are asked to unwind the PC, then we need to return the LR
1884 instead. The prologue may save PC, but it will point into this
1885 frame's prologue, not the next frame's resume location. Also
1886 strip the saved T bit. A valid LR may have the low bit set, but
1887 a valid PC never does. */
1888 if (prev_regnum == ARM_PC_REGNUM)
1892 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1893 return frame_unwind_got_constant (this_frame, prev_regnum,
1894 arm_addr_bits_remove (gdbarch, lr));
1897 /* SP is generally not saved to the stack, but this frame is
1898 identified by the next frame's stack pointer at the time of the call.
1899 The value was already reconstructed into PREV_SP. */
1900 if (prev_regnum == ARM_SP_REGNUM)
1901 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1903 /* The CPSR may have been changed by the call instruction and by the
1904 called function. The only bit we can reconstruct is the T bit,
1905 by checking the low bit of LR as of the call. This is a reliable
1906 indicator of Thumb-ness except for some ARM v4T pre-interworking
1907 Thumb code, which could get away with a clear low bit as long as
1908 the called function did not use bx. Guess that all other
1909 bits are unchanged; the condition flags are presumably lost,
1910 but the processor status is likely valid. */
1911 if (prev_regnum == ARM_PS_REGNUM)
1914 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1916 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1917 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1918 if (IS_THUMB_ADDR (lr))
1922 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1925 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1929 struct frame_unwind arm_prologue_unwind = {
1931 arm_prologue_unwind_stop_reason,
1932 arm_prologue_this_id,
1933 arm_prologue_prev_register,
1935 default_frame_sniffer
1938 /* Maintain a list of ARM exception table entries per objfile, similar to the
1939 list of mapping symbols. We only cache entries for standard ARM-defined
1940 personality routines; the cache will contain only the frame unwinding
1941 instructions associated with the entry (not the descriptors). */
1943 static const struct objfile_data *arm_exidx_data_key;
1945 struct arm_exidx_entry
1950 typedef struct arm_exidx_entry arm_exidx_entry_s;
1951 DEF_VEC_O(arm_exidx_entry_s);
1953 struct arm_exidx_data
1955 VEC(arm_exidx_entry_s) **section_maps;
1959 arm_exidx_data_free (struct objfile *objfile, void *arg)
1961 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1964 for (i = 0; i < objfile->obfd->section_count; i++)
1965 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1969 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1970 const struct arm_exidx_entry *rhs)
1972 return lhs->addr < rhs->addr;
1975 static struct obj_section *
1976 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1978 struct obj_section *osect;
1980 ALL_OBJFILE_OSECTIONS (objfile, osect)
1981 if (bfd_get_section_flags (objfile->obfd,
1982 osect->the_bfd_section) & SEC_ALLOC)
1984 bfd_vma start, size;
1985 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1986 size = bfd_get_section_size (osect->the_bfd_section);
1988 if (start <= vma && vma < start + size)
1995 /* Parse contents of exception table and exception index sections
1996 of OBJFILE, and fill in the exception table entry cache.
1998 For each entry that refers to a standard ARM-defined personality
1999 routine, extract the frame unwinding instructions (from either
2000 the index or the table section). The unwinding instructions
2002 - extracting them from the rest of the table data
2003 - converting to host endianness
2004 - appending the implicit 0xb0 ("Finish") code
2006 The extracted and normalized instructions are stored for later
2007 retrieval by the arm_find_exidx_entry routine. */
2010 arm_exidx_new_objfile (struct objfile *objfile)
2012 struct cleanup *cleanups;
2013 struct arm_exidx_data *data;
2014 asection *exidx, *extab;
2015 bfd_vma exidx_vma = 0, extab_vma = 0;
2016 bfd_size_type exidx_size = 0, extab_size = 0;
2017 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2020 /* If we've already touched this file, do nothing. */
2021 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2023 cleanups = make_cleanup (null_cleanup, NULL);
2025 /* Read contents of exception table and index. */
2026 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2029 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2030 exidx_size = bfd_get_section_size (exidx);
2031 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2032 make_cleanup (xfree, exidx_data);
2034 if (!bfd_get_section_contents (objfile->obfd, exidx,
2035 exidx_data, 0, exidx_size))
2037 do_cleanups (cleanups);
2042 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2045 extab_vma = bfd_section_vma (objfile->obfd, extab);
2046 extab_size = bfd_get_section_size (extab);
2047 extab_data = (gdb_byte *) xmalloc (extab_size);
2048 make_cleanup (xfree, extab_data);
2050 if (!bfd_get_section_contents (objfile->obfd, extab,
2051 extab_data, 0, extab_size))
2053 do_cleanups (cleanups);
2058 /* Allocate exception table data structure. */
2059 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2060 set_objfile_data (objfile, arm_exidx_data_key, data);
2061 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2062 objfile->obfd->section_count,
2063 VEC(arm_exidx_entry_s) *);
2065 /* Fill in exception table. */
2066 for (i = 0; i < exidx_size / 8; i++)
2068 struct arm_exidx_entry new_exidx_entry;
2069 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2070 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2071 bfd_vma addr = 0, word = 0;
2072 int n_bytes = 0, n_words = 0;
2073 struct obj_section *sec;
2074 gdb_byte *entry = NULL;
2076 /* Extract address of start of function. */
2077 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2078 idx += exidx_vma + i * 8;
2080 /* Find section containing function and compute section offset. */
2081 sec = arm_obj_section_from_vma (objfile, idx);
2084 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2086 /* Determine address of exception table entry. */
2089 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2091 else if ((val & 0xff000000) == 0x80000000)
2093 /* Exception table entry embedded in .ARM.exidx
2094 -- must be short form. */
2098 else if (!(val & 0x80000000))
2100 /* Exception table entry in .ARM.extab. */
2101 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2102 addr += exidx_vma + i * 8 + 4;
2104 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2106 word = bfd_h_get_32 (objfile->obfd,
2107 extab_data + addr - extab_vma);
2110 if ((word & 0xff000000) == 0x80000000)
2115 else if ((word & 0xff000000) == 0x81000000
2116 || (word & 0xff000000) == 0x82000000)
2120 n_words = ((word >> 16) & 0xff);
2122 else if (!(word & 0x80000000))
2125 struct obj_section *pers_sec;
2126 int gnu_personality = 0;
2128 /* Custom personality routine. */
2129 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2130 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2132 /* Check whether we've got one of the variants of the
2133 GNU personality routines. */
2134 pers_sec = arm_obj_section_from_vma (objfile, pers);
2137 static const char *personality[] =
2139 "__gcc_personality_v0",
2140 "__gxx_personality_v0",
2141 "__gcj_personality_v0",
2142 "__gnu_objc_personality_v0",
2146 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2149 for (k = 0; personality[k]; k++)
2150 if (lookup_minimal_symbol_by_pc_name
2151 (pc, personality[k], objfile))
2153 gnu_personality = 1;
2158 /* If so, the next word contains a word count in the high
2159 byte, followed by the same unwind instructions as the
2160 pre-defined forms. */
2162 && addr + 4 <= extab_vma + extab_size)
2164 word = bfd_h_get_32 (objfile->obfd,
2165 extab_data + addr - extab_vma);
2168 n_words = ((word >> 24) & 0xff);
2174 /* Sanity check address. */
2176 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2177 n_words = n_bytes = 0;
2179 /* The unwind instructions reside in WORD (only the N_BYTES least
2180 significant bytes are valid), followed by N_WORDS words in the
2181 extab section starting at ADDR. */
2182 if (n_bytes || n_words)
2185 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2186 n_bytes + n_words * 4 + 1);
2189 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2193 word = bfd_h_get_32 (objfile->obfd,
2194 extab_data + addr - extab_vma);
2197 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2198 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2199 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2200 *p++ = (gdb_byte) (word & 0xff);
2203 /* Implied "Finish" to terminate the list. */
2207 /* Push entry onto vector. They are guaranteed to always
2208 appear in order of increasing addresses. */
2209 new_exidx_entry.addr = idx;
2210 new_exidx_entry.entry = entry;
2211 VEC_safe_push (arm_exidx_entry_s,
2212 data->section_maps[sec->the_bfd_section->index],
2216 do_cleanups (cleanups);
2219 /* Search for the exception table entry covering MEMADDR. If one is found,
2220 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2221 set *START to the start of the region covered by this entry. */
2224 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2226 struct obj_section *sec;
2228 sec = find_pc_section (memaddr);
2231 struct arm_exidx_data *data;
2232 VEC(arm_exidx_entry_s) *map;
2233 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2236 data = ((struct arm_exidx_data *)
2237 objfile_data (sec->objfile, arm_exidx_data_key));
2240 map = data->section_maps[sec->the_bfd_section->index];
2241 if (!VEC_empty (arm_exidx_entry_s, map))
2243 struct arm_exidx_entry *map_sym;
2245 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2246 arm_compare_exidx_entries);
2248 /* VEC_lower_bound finds the earliest ordered insertion
2249 point. If the following symbol starts at this exact
2250 address, we use that; otherwise, the preceding
2251 exception table entry covers this address. */
2252 if (idx < VEC_length (arm_exidx_entry_s, map))
2254 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2255 if (map_sym->addr == map_key.addr)
2258 *start = map_sym->addr + obj_section_addr (sec);
2259 return map_sym->entry;
2265 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2267 *start = map_sym->addr + obj_section_addr (sec);
2268 return map_sym->entry;
2277 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2278 instruction list from the ARM exception table entry ENTRY, allocate and
2279 return a prologue cache structure describing how to unwind this frame.
2281 Return NULL if the unwinding instruction list contains a "spare",
2282 "reserved" or "refuse to unwind" instruction as defined in section
2283 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2284 for the ARM Architecture" document. */
2286 static struct arm_prologue_cache *
2287 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2292 struct arm_prologue_cache *cache;
2293 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2294 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2300 /* Whenever we reload SP, we actually have to retrieve its
2301 actual value in the current frame. */
2304 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2306 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2307 vsp = get_frame_register_unsigned (this_frame, reg);
2311 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2312 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2318 /* Decode next unwind instruction. */
2321 if ((insn & 0xc0) == 0)
2323 int offset = insn & 0x3f;
2324 vsp += (offset << 2) + 4;
2326 else if ((insn & 0xc0) == 0x40)
2328 int offset = insn & 0x3f;
2329 vsp -= (offset << 2) + 4;
2331 else if ((insn & 0xf0) == 0x80)
2333 int mask = ((insn & 0xf) << 8) | *entry++;
2336 /* The special case of an all-zero mask identifies
2337 "Refuse to unwind". We return NULL to fall back
2338 to the prologue analyzer. */
2342 /* Pop registers r4..r15 under mask. */
2343 for (i = 0; i < 12; i++)
2344 if (mask & (1 << i))
2346 cache->saved_regs[4 + i].addr = vsp;
2350 /* Special-case popping SP -- we need to reload vsp. */
2351 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2354 else if ((insn & 0xf0) == 0x90)
2356 int reg = insn & 0xf;
2358 /* Reserved cases. */
2359 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2362 /* Set SP from another register and mark VSP for reload. */
2363 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2366 else if ((insn & 0xf0) == 0xa0)
2368 int count = insn & 0x7;
2369 int pop_lr = (insn & 0x8) != 0;
2372 /* Pop r4..r[4+count]. */
2373 for (i = 0; i <= count; i++)
2375 cache->saved_regs[4 + i].addr = vsp;
2379 /* If indicated by flag, pop LR as well. */
2382 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2386 else if (insn == 0xb0)
2388 /* We could only have updated PC by popping into it; if so, it
2389 will show up as address. Otherwise, copy LR into PC. */
2390 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2391 cache->saved_regs[ARM_PC_REGNUM]
2392 = cache->saved_regs[ARM_LR_REGNUM];
2397 else if (insn == 0xb1)
2399 int mask = *entry++;
2402 /* All-zero mask and mask >= 16 is "spare". */
2403 if (mask == 0 || mask >= 16)
2406 /* Pop r0..r3 under mask. */
2407 for (i = 0; i < 4; i++)
2408 if (mask & (1 << i))
2410 cache->saved_regs[i].addr = vsp;
2414 else if (insn == 0xb2)
2416 ULONGEST offset = 0;
2421 offset |= (*entry & 0x7f) << shift;
2424 while (*entry++ & 0x80);
2426 vsp += 0x204 + (offset << 2);
2428 else if (insn == 0xb3)
2430 int start = *entry >> 4;
2431 int count = (*entry++) & 0xf;
2434 /* Only registers D0..D15 are valid here. */
2435 if (start + count >= 16)
2438 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2439 for (i = 0; i <= count; i++)
2441 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2445 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2448 else if ((insn & 0xf8) == 0xb8)
2450 int count = insn & 0x7;
2453 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2454 for (i = 0; i <= count; i++)
2456 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2460 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2463 else if (insn == 0xc6)
2465 int start = *entry >> 4;
2466 int count = (*entry++) & 0xf;
2469 /* Only registers WR0..WR15 are valid. */
2470 if (start + count >= 16)
2473 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2474 for (i = 0; i <= count; i++)
2476 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2480 else if (insn == 0xc7)
2482 int mask = *entry++;
2485 /* All-zero mask and mask >= 16 is "spare". */
2486 if (mask == 0 || mask >= 16)
2489 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2490 for (i = 0; i < 4; i++)
2491 if (mask & (1 << i))
2493 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2497 else if ((insn & 0xf8) == 0xc0)
2499 int count = insn & 0x7;
2502 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2503 for (i = 0; i <= count; i++)
2505 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2509 else if (insn == 0xc8)
2511 int start = *entry >> 4;
2512 int count = (*entry++) & 0xf;
2515 /* Only registers D0..D31 are valid. */
2516 if (start + count >= 16)
2519 /* Pop VFP double-precision registers
2520 D[16+start]..D[16+start+count]. */
2521 for (i = 0; i <= count; i++)
2523 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2527 else if (insn == 0xc9)
2529 int start = *entry >> 4;
2530 int count = (*entry++) & 0xf;
2533 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2534 for (i = 0; i <= count; i++)
2536 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2540 else if ((insn & 0xf8) == 0xd0)
2542 int count = insn & 0x7;
2545 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2546 for (i = 0; i <= count; i++)
2548 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2554 /* Everything else is "spare". */
2559 /* If we restore SP from a register, assume this was the frame register.
2560 Otherwise just fall back to SP as frame register. */
2561 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2562 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2564 cache->framereg = ARM_SP_REGNUM;
2566 /* Determine offset to previous frame. */
2568 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2570 /* We already got the previous SP. */
2571 cache->prev_sp = vsp;
2576 /* Unwinding via ARM exception table entries. Note that the sniffer
2577 already computes a filled-in prologue cache, which is then used
2578 with the same arm_prologue_this_id and arm_prologue_prev_register
2579 routines also used for prologue-parsing based unwinding. */
2582 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2583 struct frame_info *this_frame,
2584 void **this_prologue_cache)
2586 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2587 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2588 CORE_ADDR addr_in_block, exidx_region, func_start;
2589 struct arm_prologue_cache *cache;
2592 /* See if we have an ARM exception table entry covering this address. */
2593 addr_in_block = get_frame_address_in_block (this_frame);
2594 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2598 /* The ARM exception table does not describe unwind information
2599 for arbitrary PC values, but is guaranteed to be correct only
2600 at call sites. We have to decide here whether we want to use
2601 ARM exception table information for this frame, or fall back
2602 to using prologue parsing. (Note that if we have DWARF CFI,
2603 this sniffer isn't even called -- CFI is always preferred.)
2605 Before we make this decision, however, we check whether we
2606 actually have *symbol* information for the current frame.
2607 If not, prologue parsing would not work anyway, so we might
2608 as well use the exception table and hope for the best. */
2609 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2613 /* If the next frame is "normal", we are at a call site in this
2614 frame, so exception information is guaranteed to be valid. */
2615 if (get_next_frame (this_frame)
2616 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2619 /* We also assume exception information is valid if we're currently
2620 blocked in a system call. The system library is supposed to
2621 ensure this, so that e.g. pthread cancellation works. */
2622 if (arm_frame_is_thumb (this_frame))
2626 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2627 byte_order_for_code, &insn)
2628 && (insn & 0xff00) == 0xdf00 /* svc */)
2635 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2636 byte_order_for_code, &insn)
2637 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2641 /* Bail out if we don't know that exception information is valid. */
2645 /* The ARM exception index does not mark the *end* of the region
2646 covered by the entry, and some functions will not have any entry.
2647 To correctly recognize the end of the covered region, the linker
2648 should have inserted dummy records with a CANTUNWIND marker.
2650 Unfortunately, current versions of GNU ld do not reliably do
2651 this, and thus we may have found an incorrect entry above.
2652 As a (temporary) sanity check, we only use the entry if it
2653 lies *within* the bounds of the function. Note that this check
2654 might reject perfectly valid entries that just happen to cover
2655 multiple functions; therefore this check ought to be removed
2656 once the linker is fixed. */
2657 if (func_start > exidx_region)
2661 /* Decode the list of unwinding instructions into a prologue cache.
2662 Note that this may fail due to e.g. a "refuse to unwind" code. */
2663 cache = arm_exidx_fill_cache (this_frame, entry);
2667 *this_prologue_cache = cache;
2671 struct frame_unwind arm_exidx_unwind = {
2673 default_frame_unwind_stop_reason,
2674 arm_prologue_this_id,
2675 arm_prologue_prev_register,
2677 arm_exidx_unwind_sniffer
2680 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2681 trampoline, return the target PC. Otherwise return 0.
2683 void call0a (char c, short s, int i, long l) {}
2687 (*pointer_to_call0a) (c, s, i, l);
2690 Instead of calling a stub library function _call_via_xx (xx is
2691 the register name), GCC may inline the trampoline in the object
2692 file as below (register r2 has the address of call0a).
2695 .type main, %function
2704 The trampoline 'bx r2' doesn't belong to main. */
2707 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2709 /* The heuristics of recognizing such trampoline is that FRAME is
2710 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2711 if (arm_frame_is_thumb (frame))
2715 if (target_read_memory (pc, buf, 2) == 0)
2717 struct gdbarch *gdbarch = get_frame_arch (frame);
2718 enum bfd_endian byte_order_for_code
2719 = gdbarch_byte_order_for_code (gdbarch);
2721 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2723 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2726 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2728 /* Clear the LSB so that gdb core sets step-resume
2729 breakpoint at the right address. */
2730 return UNMAKE_THUMB_ADDR (dest);
2738 static struct arm_prologue_cache *
2739 arm_make_stub_cache (struct frame_info *this_frame)
2741 struct arm_prologue_cache *cache;
2743 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2744 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2746 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2751 /* Our frame ID for a stub frame is the current SP and LR. */
2754 arm_stub_this_id (struct frame_info *this_frame,
2756 struct frame_id *this_id)
2758 struct arm_prologue_cache *cache;
2760 if (*this_cache == NULL)
2761 *this_cache = arm_make_stub_cache (this_frame);
2762 cache = (struct arm_prologue_cache *) *this_cache;
2764 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2768 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2769 struct frame_info *this_frame,
2770 void **this_prologue_cache)
2772 CORE_ADDR addr_in_block;
2774 CORE_ADDR pc, start_addr;
2777 addr_in_block = get_frame_address_in_block (this_frame);
2778 pc = get_frame_pc (this_frame);
2779 if (in_plt_section (addr_in_block)
2780 /* We also use the stub winder if the target memory is unreadable
2781 to avoid having the prologue unwinder trying to read it. */
2782 || target_read_memory (pc, dummy, 4) != 0)
2785 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2786 && arm_skip_bx_reg (this_frame, pc) != 0)
2792 struct frame_unwind arm_stub_unwind = {
2794 default_frame_unwind_stop_reason,
2796 arm_prologue_prev_register,
2798 arm_stub_unwind_sniffer
2801 /* Put here the code to store, into CACHE->saved_regs, the addresses
2802 of the saved registers of frame described by THIS_FRAME. CACHE is
2805 static struct arm_prologue_cache *
2806 arm_m_exception_cache (struct frame_info *this_frame)
2808 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2809 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2810 struct arm_prologue_cache *cache;
2811 CORE_ADDR unwound_sp;
2814 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2815 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2817 unwound_sp = get_frame_register_unsigned (this_frame,
2820 /* The hardware saves eight 32-bit words, comprising xPSR,
2821 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2822 "B1.5.6 Exception entry behavior" in
2823 "ARMv7-M Architecture Reference Manual". */
2824 cache->saved_regs[0].addr = unwound_sp;
2825 cache->saved_regs[1].addr = unwound_sp + 4;
2826 cache->saved_regs[2].addr = unwound_sp + 8;
2827 cache->saved_regs[3].addr = unwound_sp + 12;
2828 cache->saved_regs[12].addr = unwound_sp + 16;
2829 cache->saved_regs[14].addr = unwound_sp + 20;
2830 cache->saved_regs[15].addr = unwound_sp + 24;
2831 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2833 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2834 aligner between the top of the 32-byte stack frame and the
2835 previous context's stack pointer. */
2836 cache->prev_sp = unwound_sp + 32;
2837 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2838 && (xpsr & (1 << 9)) != 0)
2839 cache->prev_sp += 4;
2844 /* Implementation of function hook 'this_id' in
2845 'struct frame_uwnind'. */
2848 arm_m_exception_this_id (struct frame_info *this_frame,
2850 struct frame_id *this_id)
2852 struct arm_prologue_cache *cache;
2854 if (*this_cache == NULL)
2855 *this_cache = arm_m_exception_cache (this_frame);
2856 cache = (struct arm_prologue_cache *) *this_cache;
2858 /* Our frame ID for a stub frame is the current SP and LR. */
2859 *this_id = frame_id_build (cache->prev_sp,
2860 get_frame_pc (this_frame));
2863 /* Implementation of function hook 'prev_register' in
2864 'struct frame_uwnind'. */
2866 static struct value *
2867 arm_m_exception_prev_register (struct frame_info *this_frame,
2871 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2872 struct arm_prologue_cache *cache;
2874 if (*this_cache == NULL)
2875 *this_cache = arm_m_exception_cache (this_frame);
2876 cache = (struct arm_prologue_cache *) *this_cache;
2878 /* The value was already reconstructed into PREV_SP. */
2879 if (prev_regnum == ARM_SP_REGNUM)
2880 return frame_unwind_got_constant (this_frame, prev_regnum,
2883 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2887 /* Implementation of function hook 'sniffer' in
2888 'struct frame_uwnind'. */
2891 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2892 struct frame_info *this_frame,
2893 void **this_prologue_cache)
2895 CORE_ADDR this_pc = get_frame_pc (this_frame);
2897 /* No need to check is_m; this sniffer is only registered for
2898 M-profile architectures. */
2900 /* Exception frames return to one of these magic PCs. Other values
2901 are not defined as of v7-M. See details in "B1.5.8 Exception
2902 return behavior" in "ARMv7-M Architecture Reference Manual". */
2903 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2904 || this_pc == 0xfffffffd)
2910 /* Frame unwinder for M-profile exceptions. */
2912 struct frame_unwind arm_m_exception_unwind =
2915 default_frame_unwind_stop_reason,
2916 arm_m_exception_this_id,
2917 arm_m_exception_prev_register,
2919 arm_m_exception_unwind_sniffer
2923 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
2925 struct arm_prologue_cache *cache;
2927 if (*this_cache == NULL)
2928 *this_cache = arm_make_prologue_cache (this_frame);
2929 cache = (struct arm_prologue_cache *) *this_cache;
2931 return cache->prev_sp - cache->framesize;
2934 struct frame_base arm_normal_base = {
2935 &arm_prologue_unwind,
2936 arm_normal_frame_base,
2937 arm_normal_frame_base,
2938 arm_normal_frame_base
2941 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
2942 dummy frame. The frame ID's base needs to match the TOS value
2943 saved by save_dummy_frame_tos() and returned from
2944 arm_push_dummy_call, and the PC needs to match the dummy frame's
2947 static struct frame_id
2948 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
2950 return frame_id_build (get_frame_register_unsigned (this_frame,
2952 get_frame_pc (this_frame));
2955 /* Given THIS_FRAME, find the previous frame's resume PC (which will
2956 be used to construct the previous frame's ID, after looking up the
2957 containing function). */
2960 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
2963 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
2964 return arm_addr_bits_remove (gdbarch, pc);
2968 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
2970 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
2973 static struct value *
2974 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
2977 struct gdbarch * gdbarch = get_frame_arch (this_frame);
2979 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2984 /* The PC is normally copied from the return column, which
2985 describes saves of LR. However, that version may have an
2986 extra bit set to indicate Thumb state. The bit is not
2988 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2989 return frame_unwind_got_constant (this_frame, regnum,
2990 arm_addr_bits_remove (gdbarch, lr));
2993 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
2994 cpsr = get_frame_register_unsigned (this_frame, regnum);
2995 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2996 if (IS_THUMB_ADDR (lr))
3000 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3003 internal_error (__FILE__, __LINE__,
3004 _("Unexpected register %d"), regnum);
3009 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3010 struct dwarf2_frame_state_reg *reg,
3011 struct frame_info *this_frame)
3017 reg->how = DWARF2_FRAME_REG_FN;
3018 reg->loc.fn = arm_dwarf2_prev_register;
3021 reg->how = DWARF2_FRAME_REG_CFA;
3026 /* Implement the stack_frame_destroyed_p gdbarch method. */
3029 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3031 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3032 unsigned int insn, insn2;
3033 int found_return = 0, found_stack_adjust = 0;
3034 CORE_ADDR func_start, func_end;
3038 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3041 /* The epilogue is a sequence of instructions along the following lines:
3043 - add stack frame size to SP or FP
3044 - [if frame pointer used] restore SP from FP
3045 - restore registers from SP [may include PC]
3046 - a return-type instruction [if PC wasn't already restored]
3048 In a first pass, we scan forward from the current PC and verify the
3049 instructions we find as compatible with this sequence, ending in a
3052 However, this is not sufficient to distinguish indirect function calls
3053 within a function from indirect tail calls in the epilogue in some cases.
3054 Therefore, if we didn't already find any SP-changing instruction during
3055 forward scan, we add a backward scanning heuristic to ensure we actually
3056 are in the epilogue. */
3059 while (scan_pc < func_end && !found_return)
3061 if (target_read_memory (scan_pc, buf, 2))
3065 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3067 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3069 else if (insn == 0x46f7) /* mov pc, lr */
3071 else if (thumb_instruction_restores_sp (insn))
3073 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3076 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3078 if (target_read_memory (scan_pc, buf, 2))
3082 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3084 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3086 if (insn2 & 0x8000) /* <registers> include PC. */
3089 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3090 && (insn2 & 0x0fff) == 0x0b04)
3092 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3095 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3096 && (insn2 & 0x0e00) == 0x0a00)
3108 /* Since any instruction in the epilogue sequence, with the possible
3109 exception of return itself, updates the stack pointer, we need to
3110 scan backwards for at most one instruction. Try either a 16-bit or
3111 a 32-bit instruction. This is just a heuristic, so we do not worry
3112 too much about false positives. */
3114 if (pc - 4 < func_start)
3116 if (target_read_memory (pc - 4, buf, 4))
3119 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3120 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3122 if (thumb_instruction_restores_sp (insn2))
3123 found_stack_adjust = 1;
3124 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3125 found_stack_adjust = 1;
3126 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3127 && (insn2 & 0x0fff) == 0x0b04)
3128 found_stack_adjust = 1;
3129 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3130 && (insn2 & 0x0e00) == 0x0a00)
3131 found_stack_adjust = 1;
3133 return found_stack_adjust;
3136 /* Implement the stack_frame_destroyed_p gdbarch method. */
3139 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3141 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3144 CORE_ADDR func_start, func_end;
3146 if (arm_pc_is_thumb (gdbarch, pc))
3147 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3149 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3152 /* We are in the epilogue if the previous instruction was a stack
3153 adjustment and the next instruction is a possible return (bx, mov
3154 pc, or pop). We could have to scan backwards to find the stack
3155 adjustment, or forwards to find the return, but this is a decent
3156 approximation. First scan forwards. */
3159 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3160 if (bits (insn, 28, 31) != INST_NV)
3162 if ((insn & 0x0ffffff0) == 0x012fff10)
3165 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3168 else if ((insn & 0x0fff0000) == 0x08bd0000
3169 && (insn & 0x0000c000) != 0)
3170 /* POP (LDMIA), including PC or LR. */
3177 /* Scan backwards. This is just a heuristic, so do not worry about
3178 false positives from mode changes. */
3180 if (pc < func_start + 4)
3183 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3184 if (arm_instruction_restores_sp (insn))
3191 /* When arguments must be pushed onto the stack, they go on in reverse
3192 order. The code below implements a FILO (stack) to do this. */
3197 struct stack_item *prev;
3201 static struct stack_item *
3202 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3204 struct stack_item *si;
3205 si = XNEW (struct stack_item);
3206 si->data = (gdb_byte *) xmalloc (len);
3209 memcpy (si->data, contents, len);
3213 static struct stack_item *
3214 pop_stack_item (struct stack_item *si)
3216 struct stack_item *dead = si;
3224 /* Return the alignment (in bytes) of the given type. */
3227 arm_type_align (struct type *t)
3233 t = check_typedef (t);
3234 switch (TYPE_CODE (t))
3237 /* Should never happen. */
3238 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3242 case TYPE_CODE_ENUM:
3246 case TYPE_CODE_RANGE:
3248 case TYPE_CODE_CHAR:
3249 case TYPE_CODE_BOOL:
3250 return TYPE_LENGTH (t);
3252 case TYPE_CODE_ARRAY:
3253 if (TYPE_VECTOR (t))
3255 /* Use the natural alignment for vector types (the same for
3256 scalar type), but the maximum alignment is 64-bit. */
3257 if (TYPE_LENGTH (t) > 8)
3260 return TYPE_LENGTH (t);
3263 return arm_type_align (TYPE_TARGET_TYPE (t));
3264 case TYPE_CODE_COMPLEX:
3265 return arm_type_align (TYPE_TARGET_TYPE (t));
3267 case TYPE_CODE_STRUCT:
3268 case TYPE_CODE_UNION:
3270 for (n = 0; n < TYPE_NFIELDS (t); n++)
3272 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3280 /* Possible base types for a candidate for passing and returning in
3283 enum arm_vfp_cprc_base_type
3292 /* The length of one element of base type B. */
3295 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3299 case VFP_CPRC_SINGLE:
3301 case VFP_CPRC_DOUBLE:
3303 case VFP_CPRC_VEC64:
3305 case VFP_CPRC_VEC128:
3308 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3313 /* The character ('s', 'd' or 'q') for the type of VFP register used
3314 for passing base type B. */
3317 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3321 case VFP_CPRC_SINGLE:
3323 case VFP_CPRC_DOUBLE:
3325 case VFP_CPRC_VEC64:
3327 case VFP_CPRC_VEC128:
3330 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3335 /* Determine whether T may be part of a candidate for passing and
3336 returning in VFP registers, ignoring the limit on the total number
3337 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3338 classification of the first valid component found; if it is not
3339 VFP_CPRC_UNKNOWN, all components must have the same classification
3340 as *BASE_TYPE. If it is found that T contains a type not permitted
3341 for passing and returning in VFP registers, a type differently
3342 classified from *BASE_TYPE, or two types differently classified
3343 from each other, return -1, otherwise return the total number of
3344 base-type elements found (possibly 0 in an empty structure or
3345 array). Vector types are not currently supported, matching the
3346 generic AAPCS support. */
3349 arm_vfp_cprc_sub_candidate (struct type *t,
3350 enum arm_vfp_cprc_base_type *base_type)
3352 t = check_typedef (t);
3353 switch (TYPE_CODE (t))
3356 switch (TYPE_LENGTH (t))
3359 if (*base_type == VFP_CPRC_UNKNOWN)
3360 *base_type = VFP_CPRC_SINGLE;
3361 else if (*base_type != VFP_CPRC_SINGLE)
3366 if (*base_type == VFP_CPRC_UNKNOWN)
3367 *base_type = VFP_CPRC_DOUBLE;
3368 else if (*base_type != VFP_CPRC_DOUBLE)
3377 case TYPE_CODE_COMPLEX:
3378 /* Arguments of complex T where T is one of the types float or
3379 double get treated as if they are implemented as:
3388 switch (TYPE_LENGTH (t))
3391 if (*base_type == VFP_CPRC_UNKNOWN)
3392 *base_type = VFP_CPRC_SINGLE;
3393 else if (*base_type != VFP_CPRC_SINGLE)
3398 if (*base_type == VFP_CPRC_UNKNOWN)
3399 *base_type = VFP_CPRC_DOUBLE;
3400 else if (*base_type != VFP_CPRC_DOUBLE)
3409 case TYPE_CODE_ARRAY:
3411 if (TYPE_VECTOR (t))
3413 /* A 64-bit or 128-bit containerized vector type are VFP
3415 switch (TYPE_LENGTH (t))
3418 if (*base_type == VFP_CPRC_UNKNOWN)
3419 *base_type = VFP_CPRC_VEC64;
3422 if (*base_type == VFP_CPRC_UNKNOWN)
3423 *base_type = VFP_CPRC_VEC128;
3434 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3438 if (TYPE_LENGTH (t) == 0)
3440 gdb_assert (count == 0);
3443 else if (count == 0)
3445 unitlen = arm_vfp_cprc_unit_length (*base_type);
3446 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3447 return TYPE_LENGTH (t) / unitlen;
3452 case TYPE_CODE_STRUCT:
3457 for (i = 0; i < TYPE_NFIELDS (t); i++)
3459 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3461 if (sub_count == -1)
3465 if (TYPE_LENGTH (t) == 0)
3467 gdb_assert (count == 0);
3470 else if (count == 0)
3472 unitlen = arm_vfp_cprc_unit_length (*base_type);
3473 if (TYPE_LENGTH (t) != unitlen * count)
3478 case TYPE_CODE_UNION:
3483 for (i = 0; i < TYPE_NFIELDS (t); i++)
3485 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3487 if (sub_count == -1)
3489 count = (count > sub_count ? count : sub_count);
3491 if (TYPE_LENGTH (t) == 0)
3493 gdb_assert (count == 0);
3496 else if (count == 0)
3498 unitlen = arm_vfp_cprc_unit_length (*base_type);
3499 if (TYPE_LENGTH (t) != unitlen * count)
3511 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3512 if passed to or returned from a non-variadic function with the VFP
3513 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3514 *BASE_TYPE to the base type for T and *COUNT to the number of
3515 elements of that base type before returning. */
3518 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3521 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3522 int c = arm_vfp_cprc_sub_candidate (t, &b);
3523 if (c <= 0 || c > 4)
3530 /* Return 1 if the VFP ABI should be used for passing arguments to and
3531 returning values from a function of type FUNC_TYPE, 0
3535 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3537 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3538 /* Variadic functions always use the base ABI. Assume that functions
3539 without debug info are not variadic. */
3540 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3542 /* The VFP ABI is only supported as a variant of AAPCS. */
3543 if (tdep->arm_abi != ARM_ABI_AAPCS)
3545 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3548 /* We currently only support passing parameters in integer registers, which
3549 conforms with GCC's default model, and VFP argument passing following
3550 the VFP variant of AAPCS. Several other variants exist and
3551 we should probably support some of them based on the selected ABI. */
3554 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3555 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3556 struct value **args, CORE_ADDR sp, int struct_return,
3557 CORE_ADDR struct_addr)
3559 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3563 struct stack_item *si = NULL;
3566 unsigned vfp_regs_free = (1 << 16) - 1;
3568 /* Determine the type of this function and whether the VFP ABI
3570 ftype = check_typedef (value_type (function));
3571 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3572 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3573 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3575 /* Set the return address. For the ARM, the return breakpoint is
3576 always at BP_ADDR. */
3577 if (arm_pc_is_thumb (gdbarch, bp_addr))
3579 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3581 /* Walk through the list of args and determine how large a temporary
3582 stack is required. Need to take care here as structs may be
3583 passed on the stack, and we have to push them. */
3586 argreg = ARM_A1_REGNUM;
3589 /* The struct_return pointer occupies the first parameter
3590 passing register. */
3594 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3595 gdbarch_register_name (gdbarch, argreg),
3596 paddress (gdbarch, struct_addr));
3597 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3601 for (argnum = 0; argnum < nargs; argnum++)
3604 struct type *arg_type;
3605 struct type *target_type;
3606 enum type_code typecode;
3607 const bfd_byte *val;
3609 enum arm_vfp_cprc_base_type vfp_base_type;
3611 int may_use_core_reg = 1;
3613 arg_type = check_typedef (value_type (args[argnum]));
3614 len = TYPE_LENGTH (arg_type);
3615 target_type = TYPE_TARGET_TYPE (arg_type);
3616 typecode = TYPE_CODE (arg_type);
3617 val = value_contents (args[argnum]);
3619 align = arm_type_align (arg_type);
3620 /* Round alignment up to a whole number of words. */
3621 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3622 /* Different ABIs have different maximum alignments. */
3623 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3625 /* The APCS ABI only requires word alignment. */
3626 align = INT_REGISTER_SIZE;
3630 /* The AAPCS requires at most doubleword alignment. */
3631 if (align > INT_REGISTER_SIZE * 2)
3632 align = INT_REGISTER_SIZE * 2;
3636 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3644 /* Because this is a CPRC it cannot go in a core register or
3645 cause a core register to be skipped for alignment.
3646 Either it goes in VFP registers and the rest of this loop
3647 iteration is skipped for this argument, or it goes on the
3648 stack (and the stack alignment code is correct for this
3650 may_use_core_reg = 0;
3652 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3653 shift = unit_length / 4;
3654 mask = (1 << (shift * vfp_base_count)) - 1;
3655 for (regno = 0; regno < 16; regno += shift)
3656 if (((vfp_regs_free >> regno) & mask) == mask)
3665 vfp_regs_free &= ~(mask << regno);
3666 reg_scaled = regno / shift;
3667 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3668 for (i = 0; i < vfp_base_count; i++)
3672 if (reg_char == 'q')
3673 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3674 val + i * unit_length);
3677 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3678 reg_char, reg_scaled + i);
3679 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3681 regcache_cooked_write (regcache, regnum,
3682 val + i * unit_length);
3689 /* This CPRC could not go in VFP registers, so all VFP
3690 registers are now marked as used. */
3695 /* Push stack padding for dowubleword alignment. */
3696 if (nstack & (align - 1))
3698 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3699 nstack += INT_REGISTER_SIZE;
3702 /* Doubleword aligned quantities must go in even register pairs. */
3703 if (may_use_core_reg
3704 && argreg <= ARM_LAST_ARG_REGNUM
3705 && align > INT_REGISTER_SIZE
3709 /* If the argument is a pointer to a function, and it is a
3710 Thumb function, create a LOCAL copy of the value and set
3711 the THUMB bit in it. */
3712 if (TYPE_CODE_PTR == typecode
3713 && target_type != NULL
3714 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3716 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3717 if (arm_pc_is_thumb (gdbarch, regval))
3719 bfd_byte *copy = (bfd_byte *) alloca (len);
3720 store_unsigned_integer (copy, len, byte_order,
3721 MAKE_THUMB_ADDR (regval));
3726 /* Copy the argument to general registers or the stack in
3727 register-sized pieces. Large arguments are split between
3728 registers and stack. */
3731 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3733 = extract_unsigned_integer (val, partial_len, byte_order);
3735 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3737 /* The argument is being passed in a general purpose
3739 if (byte_order == BFD_ENDIAN_BIG)
3740 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3742 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3744 gdbarch_register_name
3746 phex (regval, INT_REGISTER_SIZE));
3747 regcache_cooked_write_unsigned (regcache, argreg, regval);
3752 gdb_byte buf[INT_REGISTER_SIZE];
3754 memset (buf, 0, sizeof (buf));
3755 store_unsigned_integer (buf, partial_len, byte_order, regval);
3757 /* Push the arguments onto the stack. */
3759 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3761 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3762 nstack += INT_REGISTER_SIZE;
3769 /* If we have an odd number of words to push, then decrement the stack
3770 by one word now, so first stack argument will be dword aligned. */
3777 write_memory (sp, si->data, si->len);
3778 si = pop_stack_item (si);
3781 /* Finally, update teh SP register. */
3782 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3788 /* Always align the frame to an 8-byte boundary. This is required on
3789 some platforms and harmless on the rest. */
3792 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3794 /* Align the stack to eight bytes. */
3795 return sp & ~ (CORE_ADDR) 7;
3799 print_fpu_flags (struct ui_file *file, int flags)
3801 if (flags & (1 << 0))
3802 fputs_filtered ("IVO ", file);
3803 if (flags & (1 << 1))
3804 fputs_filtered ("DVZ ", file);
3805 if (flags & (1 << 2))
3806 fputs_filtered ("OFL ", file);
3807 if (flags & (1 << 3))
3808 fputs_filtered ("UFL ", file);
3809 if (flags & (1 << 4))
3810 fputs_filtered ("INX ", file);
3811 fputc_filtered ('\n', file);
3814 /* Print interesting information about the floating point processor
3815 (if present) or emulator. */
3817 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3818 struct frame_info *frame, const char *args)
3820 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3823 type = (status >> 24) & 127;
3824 if (status & (1 << 31))
3825 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3827 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3828 /* i18n: [floating point unit] mask */
3829 fputs_filtered (_("mask: "), file);
3830 print_fpu_flags (file, status >> 16);
3831 /* i18n: [floating point unit] flags */
3832 fputs_filtered (_("flags: "), file);
3833 print_fpu_flags (file, status);
3836 /* Construct the ARM extended floating point type. */
3837 static struct type *
3838 arm_ext_type (struct gdbarch *gdbarch)
3840 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3842 if (!tdep->arm_ext_type)
3844 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3845 floatformats_arm_ext);
3847 return tdep->arm_ext_type;
3850 static struct type *
3851 arm_neon_double_type (struct gdbarch *gdbarch)
3853 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3855 if (tdep->neon_double_type == NULL)
3857 struct type *t, *elem;
3859 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3861 elem = builtin_type (gdbarch)->builtin_uint8;
3862 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3863 elem = builtin_type (gdbarch)->builtin_uint16;
3864 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3865 elem = builtin_type (gdbarch)->builtin_uint32;
3866 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3867 elem = builtin_type (gdbarch)->builtin_uint64;
3868 append_composite_type_field (t, "u64", elem);
3869 elem = builtin_type (gdbarch)->builtin_float;
3870 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3871 elem = builtin_type (gdbarch)->builtin_double;
3872 append_composite_type_field (t, "f64", elem);
3874 TYPE_VECTOR (t) = 1;
3875 TYPE_NAME (t) = "neon_d";
3876 tdep->neon_double_type = t;
3879 return tdep->neon_double_type;
3882 /* FIXME: The vector types are not correctly ordered on big-endian
3883 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3884 bits of d0 - regardless of what unit size is being held in d0. So
3885 the offset of the first uint8 in d0 is 7, but the offset of the
3886 first float is 4. This code works as-is for little-endian
3889 static struct type *
3890 arm_neon_quad_type (struct gdbarch *gdbarch)
3892 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3894 if (tdep->neon_quad_type == NULL)
3896 struct type *t, *elem;
3898 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
3900 elem = builtin_type (gdbarch)->builtin_uint8;
3901 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
3902 elem = builtin_type (gdbarch)->builtin_uint16;
3903 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
3904 elem = builtin_type (gdbarch)->builtin_uint32;
3905 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
3906 elem = builtin_type (gdbarch)->builtin_uint64;
3907 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
3908 elem = builtin_type (gdbarch)->builtin_float;
3909 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
3910 elem = builtin_type (gdbarch)->builtin_double;
3911 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
3913 TYPE_VECTOR (t) = 1;
3914 TYPE_NAME (t) = "neon_q";
3915 tdep->neon_quad_type = t;
3918 return tdep->neon_quad_type;
3921 /* Return the GDB type object for the "standard" data type of data in
3924 static struct type *
3925 arm_register_type (struct gdbarch *gdbarch, int regnum)
3927 int num_regs = gdbarch_num_regs (gdbarch);
3929 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
3930 && regnum >= num_regs && regnum < num_regs + 32)
3931 return builtin_type (gdbarch)->builtin_float;
3933 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
3934 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
3935 return arm_neon_quad_type (gdbarch);
3937 /* If the target description has register information, we are only
3938 in this function so that we can override the types of
3939 double-precision registers for NEON. */
3940 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
3942 struct type *t = tdesc_register_type (gdbarch, regnum);
3944 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
3945 && TYPE_CODE (t) == TYPE_CODE_FLT
3946 && gdbarch_tdep (gdbarch)->have_neon)
3947 return arm_neon_double_type (gdbarch);
3952 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
3954 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
3955 return builtin_type (gdbarch)->builtin_void;
3957 return arm_ext_type (gdbarch);
3959 else if (regnum == ARM_SP_REGNUM)
3960 return builtin_type (gdbarch)->builtin_data_ptr;
3961 else if (regnum == ARM_PC_REGNUM)
3962 return builtin_type (gdbarch)->builtin_func_ptr;
3963 else if (regnum >= ARRAY_SIZE (arm_register_names))
3964 /* These registers are only supported on targets which supply
3965 an XML description. */
3966 return builtin_type (gdbarch)->builtin_int0;
3968 return builtin_type (gdbarch)->builtin_uint32;
3971 /* Map a DWARF register REGNUM onto the appropriate GDB register
3975 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
3977 /* Core integer regs. */
3978 if (reg >= 0 && reg <= 15)
3981 /* Legacy FPA encoding. These were once used in a way which
3982 overlapped with VFP register numbering, so their use is
3983 discouraged, but GDB doesn't support the ARM toolchain
3984 which used them for VFP. */
3985 if (reg >= 16 && reg <= 23)
3986 return ARM_F0_REGNUM + reg - 16;
3988 /* New assignments for the FPA registers. */
3989 if (reg >= 96 && reg <= 103)
3990 return ARM_F0_REGNUM + reg - 96;
3992 /* WMMX register assignments. */
3993 if (reg >= 104 && reg <= 111)
3994 return ARM_WCGR0_REGNUM + reg - 104;
3996 if (reg >= 112 && reg <= 127)
3997 return ARM_WR0_REGNUM + reg - 112;
3999 if (reg >= 192 && reg <= 199)
4000 return ARM_WC0_REGNUM + reg - 192;
4002 /* VFP v2 registers. A double precision value is actually
4003 in d1 rather than s2, but the ABI only defines numbering
4004 for the single precision registers. This will "just work"
4005 in GDB for little endian targets (we'll read eight bytes,
4006 starting in s0 and then progressing to s1), but will be
4007 reversed on big endian targets with VFP. This won't
4008 be a problem for the new Neon quad registers; you're supposed
4009 to use DW_OP_piece for those. */
4010 if (reg >= 64 && reg <= 95)
4014 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4015 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4019 /* VFP v3 / Neon registers. This range is also used for VFP v2
4020 registers, except that it now describes d0 instead of s0. */
4021 if (reg >= 256 && reg <= 287)
4025 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4026 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4033 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4035 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4038 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4040 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4041 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4043 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4044 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4046 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4047 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4049 if (reg < NUM_GREGS)
4050 return SIM_ARM_R0_REGNUM + reg;
4053 if (reg < NUM_FREGS)
4054 return SIM_ARM_FP0_REGNUM + reg;
4057 if (reg < NUM_SREGS)
4058 return SIM_ARM_FPS_REGNUM + reg;
4061 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4064 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4065 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4066 It is thought that this is is the floating-point register format on
4067 little-endian systems. */
4070 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4071 void *dbl, int endianess)
4075 if (endianess == BFD_ENDIAN_BIG)
4076 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4078 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4080 floatformat_from_doublest (fmt, &d, dbl);
4084 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4089 floatformat_to_doublest (fmt, ptr, &d);
4090 if (endianess == BFD_ENDIAN_BIG)
4091 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4093 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4097 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4098 of the appropriate mode (as encoded in the PC value), even if this
4099 differs from what would be expected according to the symbol tables. */
4102 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4103 struct address_space *aspace,
4106 struct cleanup *old_chain
4107 = make_cleanup_restore_integer (&arm_override_mode);
4109 arm_override_mode = IS_THUMB_ADDR (pc);
4110 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4112 insert_single_step_breakpoint (gdbarch, aspace, pc);
4114 do_cleanups (old_chain);
4117 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4118 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4119 NULL if an error occurs. BUF is freed. */
4122 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4123 int old_len, int new_len)
4126 int bytes_to_read = new_len - old_len;
4128 new_buf = (gdb_byte *) xmalloc (new_len);
4129 memcpy (new_buf + bytes_to_read, buf, old_len);
4131 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4139 /* An IT block is at most the 2-byte IT instruction followed by
4140 four 4-byte instructions. The furthest back we must search to
4141 find an IT block that affects the current instruction is thus
4142 2 + 3 * 4 == 14 bytes. */
4143 #define MAX_IT_BLOCK_PREFIX 14
4145 /* Use a quick scan if there are more than this many bytes of
4147 #define IT_SCAN_THRESHOLD 32
4149 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4150 A breakpoint in an IT block may not be hit, depending on the
4153 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4157 CORE_ADDR boundary, func_start;
4159 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4160 int i, any, last_it, last_it_count;
4162 /* If we are using BKPT breakpoints, none of this is necessary. */
4163 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4166 /* ARM mode does not have this problem. */
4167 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4170 /* We are setting a breakpoint in Thumb code that could potentially
4171 contain an IT block. The first step is to find how much Thumb
4172 code there is; we do not need to read outside of known Thumb
4174 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4176 /* Thumb-2 code must have mapping symbols to have a chance. */
4179 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4181 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4182 && func_start > boundary)
4183 boundary = func_start;
4185 /* Search for a candidate IT instruction. We have to do some fancy
4186 footwork to distinguish a real IT instruction from the second
4187 half of a 32-bit instruction, but there is no need for that if
4188 there's no candidate. */
4189 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
4191 /* No room for an IT instruction. */
4194 buf = (gdb_byte *) xmalloc (buf_len);
4195 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4198 for (i = 0; i < buf_len; i += 2)
4200 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4201 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4214 /* OK, the code bytes before this instruction contain at least one
4215 halfword which resembles an IT instruction. We know that it's
4216 Thumb code, but there are still two possibilities. Either the
4217 halfword really is an IT instruction, or it is the second half of
4218 a 32-bit Thumb instruction. The only way we can tell is to
4219 scan forwards from a known instruction boundary. */
4220 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4224 /* There's a lot of code before this instruction. Start with an
4225 optimistic search; it's easy to recognize halfwords that can
4226 not be the start of a 32-bit instruction, and use that to
4227 lock on to the instruction boundaries. */
4228 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4231 buf_len = IT_SCAN_THRESHOLD;
4234 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4236 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4237 if (thumb_insn_size (inst1) == 2)
4244 /* At this point, if DEFINITE, BUF[I] is the first place we
4245 are sure that we know the instruction boundaries, and it is far
4246 enough from BPADDR that we could not miss an IT instruction
4247 affecting BPADDR. If ! DEFINITE, give up - start from a
4251 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4255 buf_len = bpaddr - boundary;
4261 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4264 buf_len = bpaddr - boundary;
4268 /* Scan forwards. Find the last IT instruction before BPADDR. */
4273 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4275 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4280 else if (inst1 & 0x0002)
4282 else if (inst1 & 0x0004)
4287 i += thumb_insn_size (inst1);
4293 /* There wasn't really an IT instruction after all. */
4296 if (last_it_count < 1)
4297 /* It was too far away. */
4300 /* This really is a trouble spot. Move the breakpoint to the IT
4302 return bpaddr - buf_len + last_it;
4305 /* ARM displaced stepping support.
4307 Generally ARM displaced stepping works as follows:
4309 1. When an instruction is to be single-stepped, it is first decoded by
4310 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
4311 Depending on the type of instruction, it is then copied to a scratch
4312 location, possibly in a modified form. The copy_* set of functions
4313 performs such modification, as necessary. A breakpoint is placed after
4314 the modified instruction in the scratch space to return control to GDB.
4315 Note in particular that instructions which modify the PC will no longer
4316 do so after modification.
4318 2. The instruction is single-stepped, by setting the PC to the scratch
4319 location address, and resuming. Control returns to GDB when the
4322 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4323 function used for the current instruction. This function's job is to
4324 put the CPU/memory state back to what it would have been if the
4325 instruction had been executed unmodified in its original location. */
4327 /* NOP instruction (mov r0, r0). */
4328 #define ARM_NOP 0xe1a00000
4329 #define THUMB_NOP 0x4600
4331 /* Helper for register reads for displaced stepping. In particular, this
4332 returns the PC as it would be seen by the instruction at its original
4336 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4340 CORE_ADDR from = dsc->insn_addr;
4342 if (regno == ARM_PC_REGNUM)
4344 /* Compute pipeline offset:
4345 - When executing an ARM instruction, PC reads as the address of the
4346 current instruction plus 8.
4347 - When executing a Thumb instruction, PC reads as the address of the
4348 current instruction plus 4. */
4355 if (debug_displaced)
4356 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4357 (unsigned long) from);
4358 return (ULONGEST) from;
4362 regcache_cooked_read_unsigned (regs, regno, &ret);
4363 if (debug_displaced)
4364 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4365 regno, (unsigned long) ret);
4371 displaced_in_arm_mode (struct regcache *regs)
4374 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4376 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4378 return (ps & t_bit) == 0;
4381 /* Write to the PC as from a branch instruction. */
4384 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4388 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4389 architecture versions < 6. */
4390 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4391 val & ~(ULONGEST) 0x3);
4393 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4394 val & ~(ULONGEST) 0x1);
4397 /* Write to the PC as from a branch-exchange instruction. */
4400 bx_write_pc (struct regcache *regs, ULONGEST val)
4403 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4405 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4409 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4410 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4412 else if ((val & 2) == 0)
4414 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4415 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4419 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4420 mode, align dest to 4 bytes). */
4421 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4422 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4423 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4427 /* Write to the PC as if from a load instruction. */
4430 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4433 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4434 bx_write_pc (regs, val);
4436 branch_write_pc (regs, dsc, val);
4439 /* Write to the PC as if from an ALU instruction. */
4442 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4445 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4446 bx_write_pc (regs, val);
4448 branch_write_pc (regs, dsc, val);
4451 /* Helper for writing to registers for displaced stepping. Writing to the PC
4452 has a varying effects depending on the instruction which does the write:
4453 this is controlled by the WRITE_PC argument. */
4456 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4457 int regno, ULONGEST val, enum pc_write_style write_pc)
4459 if (regno == ARM_PC_REGNUM)
4461 if (debug_displaced)
4462 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4463 (unsigned long) val);
4466 case BRANCH_WRITE_PC:
4467 branch_write_pc (regs, dsc, val);
4471 bx_write_pc (regs, val);
4475 load_write_pc (regs, dsc, val);
4479 alu_write_pc (regs, dsc, val);
4482 case CANNOT_WRITE_PC:
4483 warning (_("Instruction wrote to PC in an unexpected way when "
4484 "single-stepping"));
4488 internal_error (__FILE__, __LINE__,
4489 _("Invalid argument to displaced_write_reg"));
4492 dsc->wrote_to_pc = 1;
4496 if (debug_displaced)
4497 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4498 regno, (unsigned long) val);
4499 regcache_cooked_write_unsigned (regs, regno, val);
4503 /* This function is used to concisely determine if an instruction INSN
4504 references PC. Register fields of interest in INSN should have the
4505 corresponding fields of BITMASK set to 0b1111. The function
4506 returns return 1 if any of these fields in INSN reference the PC
4507 (also 0b1111, r15), else it returns 0. */
4510 insn_references_pc (uint32_t insn, uint32_t bitmask)
4512 uint32_t lowbit = 1;
4514 while (bitmask != 0)
4518 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4524 mask = lowbit * 0xf;
4526 if ((insn & mask) == mask)
4535 /* The simplest copy function. Many instructions have the same effect no
4536 matter what address they are executed at: in those cases, use this. */
4539 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4540 const char *iname, struct displaced_step_closure *dsc)
4542 if (debug_displaced)
4543 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4544 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4547 dsc->modinsn[0] = insn;
4553 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4554 uint16_t insn2, const char *iname,
4555 struct displaced_step_closure *dsc)
4557 if (debug_displaced)
4558 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4559 "opcode/class '%s' unmodified\n", insn1, insn2,
4562 dsc->modinsn[0] = insn1;
4563 dsc->modinsn[1] = insn2;
4569 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4572 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
4574 struct displaced_step_closure *dsc)
4576 if (debug_displaced)
4577 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4578 "opcode/class '%s' unmodified\n", insn,
4581 dsc->modinsn[0] = insn;
4586 /* Preload instructions with immediate offset. */
4589 cleanup_preload (struct gdbarch *gdbarch,
4590 struct regcache *regs, struct displaced_step_closure *dsc)
4592 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4593 if (!dsc->u.preload.immed)
4594 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4598 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4599 struct displaced_step_closure *dsc, unsigned int rn)
4602 /* Preload instructions:
4604 {pli/pld} [rn, #+/-imm]
4606 {pli/pld} [r0, #+/-imm]. */
4608 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4609 rn_val = displaced_read_reg (regs, dsc, rn);
4610 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4611 dsc->u.preload.immed = 1;
4613 dsc->cleanup = &cleanup_preload;
4617 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4618 struct displaced_step_closure *dsc)
4620 unsigned int rn = bits (insn, 16, 19);
4622 if (!insn_references_pc (insn, 0x000f0000ul))
4623 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4625 if (debug_displaced)
4626 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4627 (unsigned long) insn);
4629 dsc->modinsn[0] = insn & 0xfff0ffff;
4631 install_preload (gdbarch, regs, dsc, rn);
4637 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4638 struct regcache *regs, struct displaced_step_closure *dsc)
4640 unsigned int rn = bits (insn1, 0, 3);
4641 unsigned int u_bit = bit (insn1, 7);
4642 int imm12 = bits (insn2, 0, 11);
4645 if (rn != ARM_PC_REGNUM)
4646 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4648 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4649 PLD (literal) Encoding T1. */
4650 if (debug_displaced)
4651 fprintf_unfiltered (gdb_stdlog,
4652 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4653 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4659 /* Rewrite instruction {pli/pld} PC imm12 into:
4660 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4664 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4666 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4667 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4669 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4671 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4672 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4673 dsc->u.preload.immed = 0;
4675 /* {pli/pld} [r0, r1] */
4676 dsc->modinsn[0] = insn1 & 0xfff0;
4677 dsc->modinsn[1] = 0xf001;
4680 dsc->cleanup = &cleanup_preload;
4684 /* Preload instructions with register offset. */
4687 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4688 struct displaced_step_closure *dsc, unsigned int rn,
4691 ULONGEST rn_val, rm_val;
4693 /* Preload register-offset instructions:
4695 {pli/pld} [rn, rm {, shift}]
4697 {pli/pld} [r0, r1 {, shift}]. */
4699 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4700 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4701 rn_val = displaced_read_reg (regs, dsc, rn);
4702 rm_val = displaced_read_reg (regs, dsc, rm);
4703 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4704 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4705 dsc->u.preload.immed = 0;
4707 dsc->cleanup = &cleanup_preload;
4711 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4712 struct regcache *regs,
4713 struct displaced_step_closure *dsc)
4715 unsigned int rn = bits (insn, 16, 19);
4716 unsigned int rm = bits (insn, 0, 3);
4719 if (!insn_references_pc (insn, 0x000f000ful))
4720 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4722 if (debug_displaced)
4723 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4724 (unsigned long) insn);
4726 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4728 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4732 /* Copy/cleanup coprocessor load and store instructions. */
4735 cleanup_copro_load_store (struct gdbarch *gdbarch,
4736 struct regcache *regs,
4737 struct displaced_step_closure *dsc)
4739 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4741 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4743 if (dsc->u.ldst.writeback)
4744 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4748 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4749 struct displaced_step_closure *dsc,
4750 int writeback, unsigned int rn)
4754 /* Coprocessor load/store instructions:
4756 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4758 {stc/stc2} [r0, #+/-imm].
4760 ldc/ldc2 are handled identically. */
4762 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4763 rn_val = displaced_read_reg (regs, dsc, rn);
4764 /* PC should be 4-byte aligned. */
4765 rn_val = rn_val & 0xfffffffc;
4766 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4768 dsc->u.ldst.writeback = writeback;
4769 dsc->u.ldst.rn = rn;
4771 dsc->cleanup = &cleanup_copro_load_store;
4775 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4776 struct regcache *regs,
4777 struct displaced_step_closure *dsc)
4779 unsigned int rn = bits (insn, 16, 19);
4781 if (!insn_references_pc (insn, 0x000f0000ul))
4782 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4784 if (debug_displaced)
4785 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4786 "load/store insn %.8lx\n", (unsigned long) insn);
4788 dsc->modinsn[0] = insn & 0xfff0ffff;
4790 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4796 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4797 uint16_t insn2, struct regcache *regs,
4798 struct displaced_step_closure *dsc)
4800 unsigned int rn = bits (insn1, 0, 3);
4802 if (rn != ARM_PC_REGNUM)
4803 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4804 "copro load/store", dsc);
4806 if (debug_displaced)
4807 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4808 "load/store insn %.4x%.4x\n", insn1, insn2);
4810 dsc->modinsn[0] = insn1 & 0xfff0;
4811 dsc->modinsn[1] = insn2;
4814 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4815 doesn't support writeback, so pass 0. */
4816 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4821 /* Clean up branch instructions (actually perform the branch, by setting
4825 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4826 struct displaced_step_closure *dsc)
4828 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4829 int branch_taken = condition_true (dsc->u.branch.cond, status);
4830 enum pc_write_style write_pc = dsc->u.branch.exchange
4831 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4836 if (dsc->u.branch.link)
4838 /* The value of LR should be the next insn of current one. In order
4839 not to confuse logic hanlding later insn `bx lr', if current insn mode
4840 is Thumb, the bit 0 of LR value should be set to 1. */
4841 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4844 next_insn_addr |= 0x1;
4846 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4850 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4853 /* Copy B/BL/BLX instructions with immediate destinations. */
4856 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4857 struct displaced_step_closure *dsc,
4858 unsigned int cond, int exchange, int link, long offset)
4860 /* Implement "BL<cond> <label>" as:
4862 Preparation: cond <- instruction condition
4863 Insn: mov r0, r0 (nop)
4864 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4866 B<cond> similar, but don't set r14 in cleanup. */
4868 dsc->u.branch.cond = cond;
4869 dsc->u.branch.link = link;
4870 dsc->u.branch.exchange = exchange;
4872 dsc->u.branch.dest = dsc->insn_addr;
4873 if (link && exchange)
4874 /* For BLX, offset is computed from the Align (PC, 4). */
4875 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4878 dsc->u.branch.dest += 4 + offset;
4880 dsc->u.branch.dest += 8 + offset;
4882 dsc->cleanup = &cleanup_branch;
4885 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4886 struct regcache *regs, struct displaced_step_closure *dsc)
4888 unsigned int cond = bits (insn, 28, 31);
4889 int exchange = (cond == 0xf);
4890 int link = exchange || bit (insn, 24);
4893 if (debug_displaced)
4894 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4895 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4896 (unsigned long) insn);
4898 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4899 then arrange the switch into Thumb mode. */
4900 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
4902 offset = bits (insn, 0, 23) << 2;
4904 if (bit (offset, 25))
4905 offset = offset | ~0x3ffffff;
4907 dsc->modinsn[0] = ARM_NOP;
4909 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4914 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
4915 uint16_t insn2, struct regcache *regs,
4916 struct displaced_step_closure *dsc)
4918 int link = bit (insn2, 14);
4919 int exchange = link && !bit (insn2, 12);
4922 int j1 = bit (insn2, 13);
4923 int j2 = bit (insn2, 11);
4924 int s = sbits (insn1, 10, 10);
4925 int i1 = !(j1 ^ bit (insn1, 10));
4926 int i2 = !(j2 ^ bit (insn1, 10));
4928 if (!link && !exchange) /* B */
4930 offset = (bits (insn2, 0, 10) << 1);
4931 if (bit (insn2, 12)) /* Encoding T4 */
4933 offset |= (bits (insn1, 0, 9) << 12)
4939 else /* Encoding T3 */
4941 offset |= (bits (insn1, 0, 5) << 12)
4945 cond = bits (insn1, 6, 9);
4950 offset = (bits (insn1, 0, 9) << 12);
4951 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
4952 offset |= exchange ?
4953 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
4956 if (debug_displaced)
4957 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
4958 "%.4x %.4x with offset %.8lx\n",
4959 link ? (exchange) ? "blx" : "bl" : "b",
4960 insn1, insn2, offset);
4962 dsc->modinsn[0] = THUMB_NOP;
4964 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
4968 /* Copy B Thumb instructions. */
4970 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
4971 struct displaced_step_closure *dsc)
4973 unsigned int cond = 0;
4975 unsigned short bit_12_15 = bits (insn, 12, 15);
4976 CORE_ADDR from = dsc->insn_addr;
4978 if (bit_12_15 == 0xd)
4980 /* offset = SignExtend (imm8:0, 32) */
4981 offset = sbits ((insn << 1), 0, 8);
4982 cond = bits (insn, 8, 11);
4984 else if (bit_12_15 == 0xe) /* Encoding T2 */
4986 offset = sbits ((insn << 1), 0, 11);
4990 if (debug_displaced)
4991 fprintf_unfiltered (gdb_stdlog,
4992 "displaced: copying b immediate insn %.4x "
4993 "with offset %d\n", insn, offset);
4995 dsc->u.branch.cond = cond;
4996 dsc->u.branch.link = 0;
4997 dsc->u.branch.exchange = 0;
4998 dsc->u.branch.dest = from + 4 + offset;
5000 dsc->modinsn[0] = THUMB_NOP;
5002 dsc->cleanup = &cleanup_branch;
5007 /* Copy BX/BLX with register-specified destinations. */
5010 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5011 struct displaced_step_closure *dsc, int link,
5012 unsigned int cond, unsigned int rm)
5014 /* Implement {BX,BLX}<cond> <reg>" as:
5016 Preparation: cond <- instruction condition
5017 Insn: mov r0, r0 (nop)
5018 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5020 Don't set r14 in cleanup for BX. */
5022 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5024 dsc->u.branch.cond = cond;
5025 dsc->u.branch.link = link;
5027 dsc->u.branch.exchange = 1;
5029 dsc->cleanup = &cleanup_branch;
5033 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5034 struct regcache *regs, struct displaced_step_closure *dsc)
5036 unsigned int cond = bits (insn, 28, 31);
5039 int link = bit (insn, 5);
5040 unsigned int rm = bits (insn, 0, 3);
5042 if (debug_displaced)
5043 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5044 (unsigned long) insn);
5046 dsc->modinsn[0] = ARM_NOP;
5048 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5053 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5054 struct regcache *regs,
5055 struct displaced_step_closure *dsc)
5057 int link = bit (insn, 7);
5058 unsigned int rm = bits (insn, 3, 6);
5060 if (debug_displaced)
5061 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5062 (unsigned short) insn);
5064 dsc->modinsn[0] = THUMB_NOP;
5066 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5072 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5075 cleanup_alu_imm (struct gdbarch *gdbarch,
5076 struct regcache *regs, struct displaced_step_closure *dsc)
5078 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5079 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5080 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5081 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5085 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5086 struct displaced_step_closure *dsc)
5088 unsigned int rn = bits (insn, 16, 19);
5089 unsigned int rd = bits (insn, 12, 15);
5090 unsigned int op = bits (insn, 21, 24);
5091 int is_mov = (op == 0xd);
5092 ULONGEST rd_val, rn_val;
5094 if (!insn_references_pc (insn, 0x000ff000ul))
5095 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5097 if (debug_displaced)
5098 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5099 "%.8lx\n", is_mov ? "move" : "ALU",
5100 (unsigned long) insn);
5102 /* Instruction is of form:
5104 <op><cond> rd, [rn,] #imm
5108 Preparation: tmp1, tmp2 <- r0, r1;
5110 Insn: <op><cond> r0, r1, #imm
5111 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5114 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5115 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5116 rn_val = displaced_read_reg (regs, dsc, rn);
5117 rd_val = displaced_read_reg (regs, dsc, rd);
5118 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5119 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5123 dsc->modinsn[0] = insn & 0xfff00fff;
5125 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5127 dsc->cleanup = &cleanup_alu_imm;
5133 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5134 uint16_t insn2, struct regcache *regs,
5135 struct displaced_step_closure *dsc)
5137 unsigned int op = bits (insn1, 5, 8);
5138 unsigned int rn, rm, rd;
5139 ULONGEST rd_val, rn_val;
5141 rn = bits (insn1, 0, 3); /* Rn */
5142 rm = bits (insn2, 0, 3); /* Rm */
5143 rd = bits (insn2, 8, 11); /* Rd */
5145 /* This routine is only called for instruction MOV. */
5146 gdb_assert (op == 0x2 && rn == 0xf);
5148 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5149 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5151 if (debug_displaced)
5152 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5153 "ALU", insn1, insn2);
5155 /* Instruction is of form:
5157 <op><cond> rd, [rn,] #imm
5161 Preparation: tmp1, tmp2 <- r0, r1;
5163 Insn: <op><cond> r0, r1, #imm
5164 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5167 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5168 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5169 rn_val = displaced_read_reg (regs, dsc, rn);
5170 rd_val = displaced_read_reg (regs, dsc, rd);
5171 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5172 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5175 dsc->modinsn[0] = insn1;
5176 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5179 dsc->cleanup = &cleanup_alu_imm;
5184 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5187 cleanup_alu_reg (struct gdbarch *gdbarch,
5188 struct regcache *regs, struct displaced_step_closure *dsc)
5193 rd_val = displaced_read_reg (regs, dsc, 0);
5195 for (i = 0; i < 3; i++)
5196 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5198 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5202 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5203 struct displaced_step_closure *dsc,
5204 unsigned int rd, unsigned int rn, unsigned int rm)
5206 ULONGEST rd_val, rn_val, rm_val;
5208 /* Instruction is of form:
5210 <op><cond> rd, [rn,] rm [, <shift>]
5214 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5215 r0, r1, r2 <- rd, rn, rm
5216 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5217 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5220 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5221 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5222 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5223 rd_val = displaced_read_reg (regs, dsc, rd);
5224 rn_val = displaced_read_reg (regs, dsc, rn);
5225 rm_val = displaced_read_reg (regs, dsc, rm);
5226 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5227 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5228 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5231 dsc->cleanup = &cleanup_alu_reg;
5235 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5236 struct displaced_step_closure *dsc)
5238 unsigned int op = bits (insn, 21, 24);
5239 int is_mov = (op == 0xd);
5241 if (!insn_references_pc (insn, 0x000ff00ful))
5242 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5244 if (debug_displaced)
5245 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5246 is_mov ? "move" : "ALU", (unsigned long) insn);
5249 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5251 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5253 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5259 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5260 struct regcache *regs,
5261 struct displaced_step_closure *dsc)
5265 rm = bits (insn, 3, 6);
5266 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5268 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5269 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5271 if (debug_displaced)
5272 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5273 (unsigned short) insn);
5275 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5277 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5282 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5285 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5286 struct regcache *regs,
5287 struct displaced_step_closure *dsc)
5289 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5292 for (i = 0; i < 4; i++)
5293 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5295 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5299 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5300 struct displaced_step_closure *dsc,
5301 unsigned int rd, unsigned int rn, unsigned int rm,
5305 ULONGEST rd_val, rn_val, rm_val, rs_val;
5307 /* Instruction is of form:
5309 <op><cond> rd, [rn,] rm, <shift> rs
5313 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5314 r0, r1, r2, r3 <- rd, rn, rm, rs
5315 Insn: <op><cond> r0, r1, r2, <shift> r3
5317 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5321 for (i = 0; i < 4; i++)
5322 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5324 rd_val = displaced_read_reg (regs, dsc, rd);
5325 rn_val = displaced_read_reg (regs, dsc, rn);
5326 rm_val = displaced_read_reg (regs, dsc, rm);
5327 rs_val = displaced_read_reg (regs, dsc, rs);
5328 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5329 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5330 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5331 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5333 dsc->cleanup = &cleanup_alu_shifted_reg;
5337 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5338 struct regcache *regs,
5339 struct displaced_step_closure *dsc)
5341 unsigned int op = bits (insn, 21, 24);
5342 int is_mov = (op == 0xd);
5343 unsigned int rd, rn, rm, rs;
5345 if (!insn_references_pc (insn, 0x000fff0ful))
5346 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5348 if (debug_displaced)
5349 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5350 "%.8lx\n", is_mov ? "move" : "ALU",
5351 (unsigned long) insn);
5353 rn = bits (insn, 16, 19);
5354 rm = bits (insn, 0, 3);
5355 rs = bits (insn, 8, 11);
5356 rd = bits (insn, 12, 15);
5359 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5361 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5363 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5368 /* Clean up load instructions. */
5371 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5372 struct displaced_step_closure *dsc)
5374 ULONGEST rt_val, rt_val2 = 0, rn_val;
5376 rt_val = displaced_read_reg (regs, dsc, 0);
5377 if (dsc->u.ldst.xfersize == 8)
5378 rt_val2 = displaced_read_reg (regs, dsc, 1);
5379 rn_val = displaced_read_reg (regs, dsc, 2);
5381 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5382 if (dsc->u.ldst.xfersize > 4)
5383 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5384 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5385 if (!dsc->u.ldst.immed)
5386 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5388 /* Handle register writeback. */
5389 if (dsc->u.ldst.writeback)
5390 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5391 /* Put result in right place. */
5392 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5393 if (dsc->u.ldst.xfersize == 8)
5394 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5397 /* Clean up store instructions. */
5400 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5401 struct displaced_step_closure *dsc)
5403 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5405 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5406 if (dsc->u.ldst.xfersize > 4)
5407 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5408 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5409 if (!dsc->u.ldst.immed)
5410 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5411 if (!dsc->u.ldst.restore_r4)
5412 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5415 if (dsc->u.ldst.writeback)
5416 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5419 /* Copy "extra" load/store instructions. These are halfword/doubleword
5420 transfers, which have a different encoding to byte/word transfers. */
5423 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
5424 struct regcache *regs, struct displaced_step_closure *dsc)
5426 unsigned int op1 = bits (insn, 20, 24);
5427 unsigned int op2 = bits (insn, 5, 6);
5428 unsigned int rt = bits (insn, 12, 15);
5429 unsigned int rn = bits (insn, 16, 19);
5430 unsigned int rm = bits (insn, 0, 3);
5431 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5432 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5433 int immed = (op1 & 0x4) != 0;
5435 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5437 if (!insn_references_pc (insn, 0x000ff00ful))
5438 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5440 if (debug_displaced)
5441 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5442 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
5443 (unsigned long) insn);
5445 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5448 internal_error (__FILE__, __LINE__,
5449 _("copy_extra_ld_st: instruction decode error"));
5451 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5452 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5453 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5455 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5457 rt_val = displaced_read_reg (regs, dsc, rt);
5458 if (bytesize[opcode] == 8)
5459 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5460 rn_val = displaced_read_reg (regs, dsc, rn);
5462 rm_val = displaced_read_reg (regs, dsc, rm);
5464 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5465 if (bytesize[opcode] == 8)
5466 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5467 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5469 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5472 dsc->u.ldst.xfersize = bytesize[opcode];
5473 dsc->u.ldst.rn = rn;
5474 dsc->u.ldst.immed = immed;
5475 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5476 dsc->u.ldst.restore_r4 = 0;
5479 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5481 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5482 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5484 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5486 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5487 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5489 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5494 /* Copy byte/half word/word loads and stores. */
5497 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5498 struct displaced_step_closure *dsc, int load,
5499 int immed, int writeback, int size, int usermode,
5500 int rt, int rm, int rn)
5502 ULONGEST rt_val, rn_val, rm_val = 0;
5504 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5505 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5507 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5509 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5511 rt_val = displaced_read_reg (regs, dsc, rt);
5512 rn_val = displaced_read_reg (regs, dsc, rn);
5514 rm_val = displaced_read_reg (regs, dsc, rm);
5516 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5517 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5519 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5521 dsc->u.ldst.xfersize = size;
5522 dsc->u.ldst.rn = rn;
5523 dsc->u.ldst.immed = immed;
5524 dsc->u.ldst.writeback = writeback;
5526 /* To write PC we can do:
5528 Before this sequence of instructions:
5529 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5530 r2 is the Rn value got from dispalced_read_reg.
5532 Insn1: push {pc} Write address of STR instruction + offset on stack
5533 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5534 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5535 = addr(Insn1) + offset - addr(Insn3) - 8
5537 Insn4: add r4, r4, #8 r4 = offset - 8
5538 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5540 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5542 Otherwise we don't know what value to write for PC, since the offset is
5543 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5544 of this can be found in Section "Saving from r15" in
5545 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5547 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5552 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5553 uint16_t insn2, struct regcache *regs,
5554 struct displaced_step_closure *dsc, int size)
5556 unsigned int u_bit = bit (insn1, 7);
5557 unsigned int rt = bits (insn2, 12, 15);
5558 int imm12 = bits (insn2, 0, 11);
5561 if (debug_displaced)
5562 fprintf_unfiltered (gdb_stdlog,
5563 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5564 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5570 /* Rewrite instruction LDR Rt imm12 into:
5572 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5576 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5579 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5580 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5581 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5583 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5585 pc_val = pc_val & 0xfffffffc;
5587 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5588 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5592 dsc->u.ldst.xfersize = size;
5593 dsc->u.ldst.immed = 0;
5594 dsc->u.ldst.writeback = 0;
5595 dsc->u.ldst.restore_r4 = 0;
5597 /* LDR R0, R2, R3 */
5598 dsc->modinsn[0] = 0xf852;
5599 dsc->modinsn[1] = 0x3;
5602 dsc->cleanup = &cleanup_load;
5608 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5609 uint16_t insn2, struct regcache *regs,
5610 struct displaced_step_closure *dsc,
5611 int writeback, int immed)
5613 unsigned int rt = bits (insn2, 12, 15);
5614 unsigned int rn = bits (insn1, 0, 3);
5615 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5616 /* In LDR (register), there is also a register Rm, which is not allowed to
5617 be PC, so we don't have to check it. */
5619 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5620 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5623 if (debug_displaced)
5624 fprintf_unfiltered (gdb_stdlog,
5625 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5626 rt, rn, insn1, insn2);
5628 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5631 dsc->u.ldst.restore_r4 = 0;
5634 /* ldr[b]<cond> rt, [rn, #imm], etc.
5636 ldr[b]<cond> r0, [r2, #imm]. */
5638 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5639 dsc->modinsn[1] = insn2 & 0x0fff;
5642 /* ldr[b]<cond> rt, [rn, rm], etc.
5644 ldr[b]<cond> r0, [r2, r3]. */
5646 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5647 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5657 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5658 struct regcache *regs,
5659 struct displaced_step_closure *dsc,
5660 int load, int size, int usermode)
5662 int immed = !bit (insn, 25);
5663 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5664 unsigned int rt = bits (insn, 12, 15);
5665 unsigned int rn = bits (insn, 16, 19);
5666 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5668 if (!insn_references_pc (insn, 0x000ff00ful))
5669 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5671 if (debug_displaced)
5672 fprintf_unfiltered (gdb_stdlog,
5673 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5674 load ? (size == 1 ? "ldrb" : "ldr")
5675 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5677 (unsigned long) insn);
5679 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5680 usermode, rt, rm, rn);
5682 if (load || rt != ARM_PC_REGNUM)
5684 dsc->u.ldst.restore_r4 = 0;
5687 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5689 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5690 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5692 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5694 {ldr,str}[b]<cond> r0, [r2, r3]. */
5695 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5699 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5700 dsc->u.ldst.restore_r4 = 1;
5701 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5702 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5703 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5704 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5705 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5709 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5711 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5716 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5721 /* Cleanup LDM instructions with fully-populated register list. This is an
5722 unfortunate corner case: it's impossible to implement correctly by modifying
5723 the instruction. The issue is as follows: we have an instruction,
5727 which we must rewrite to avoid loading PC. A possible solution would be to
5728 do the load in two halves, something like (with suitable cleanup
5732 ldm[id][ab] r8!, {r0-r7}
5734 ldm[id][ab] r8, {r7-r14}
5737 but at present there's no suitable place for <temp>, since the scratch space
5738 is overwritten before the cleanup routine is called. For now, we simply
5739 emulate the instruction. */
5742 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5743 struct displaced_step_closure *dsc)
5745 int inc = dsc->u.block.increment;
5746 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5747 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5748 uint32_t regmask = dsc->u.block.regmask;
5749 int regno = inc ? 0 : 15;
5750 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5751 int exception_return = dsc->u.block.load && dsc->u.block.user
5752 && (regmask & 0x8000) != 0;
5753 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5754 int do_transfer = condition_true (dsc->u.block.cond, status);
5755 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5760 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5761 sensible we can do here. Complain loudly. */
5762 if (exception_return)
5763 error (_("Cannot single-step exception return"));
5765 /* We don't handle any stores here for now. */
5766 gdb_assert (dsc->u.block.load != 0);
5768 if (debug_displaced)
5769 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5770 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5771 dsc->u.block.increment ? "inc" : "dec",
5772 dsc->u.block.before ? "before" : "after");
5779 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5782 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5785 xfer_addr += bump_before;
5787 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5788 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5790 xfer_addr += bump_after;
5792 regmask &= ~(1 << regno);
5795 if (dsc->u.block.writeback)
5796 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5800 /* Clean up an STM which included the PC in the register list. */
5803 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5804 struct displaced_step_closure *dsc)
5806 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5807 int store_executed = condition_true (dsc->u.block.cond, status);
5808 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5809 CORE_ADDR stm_insn_addr;
5812 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5814 /* If condition code fails, there's nothing else to do. */
5815 if (!store_executed)
5818 if (dsc->u.block.increment)
5820 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5822 if (dsc->u.block.before)
5827 pc_stored_at = dsc->u.block.xfer_addr;
5829 if (dsc->u.block.before)
5833 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5834 stm_insn_addr = dsc->scratch_base;
5835 offset = pc_val - stm_insn_addr;
5837 if (debug_displaced)
5838 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5839 "STM instruction\n", offset);
5841 /* Rewrite the stored PC to the proper value for the non-displaced original
5843 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5844 dsc->insn_addr + offset);
5847 /* Clean up an LDM which includes the PC in the register list. We clumped all
5848 the registers in the transferred list into a contiguous range r0...rX (to
5849 avoid loading PC directly and losing control of the debugged program), so we
5850 must undo that here. */
5853 cleanup_block_load_pc (struct gdbarch *gdbarch,
5854 struct regcache *regs,
5855 struct displaced_step_closure *dsc)
5857 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5858 int load_executed = condition_true (dsc->u.block.cond, status);
5859 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5860 unsigned int regs_loaded = bitcount (mask);
5861 unsigned int num_to_shuffle = regs_loaded, clobbered;
5863 /* The method employed here will fail if the register list is fully populated
5864 (we need to avoid loading PC directly). */
5865 gdb_assert (num_to_shuffle < 16);
5870 clobbered = (1 << num_to_shuffle) - 1;
5872 while (num_to_shuffle > 0)
5874 if ((mask & (1 << write_reg)) != 0)
5876 unsigned int read_reg = num_to_shuffle - 1;
5878 if (read_reg != write_reg)
5880 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5881 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5882 if (debug_displaced)
5883 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5884 "loaded register r%d to r%d\n"), read_reg,
5887 else if (debug_displaced)
5888 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5889 "r%d already in the right place\n"),
5892 clobbered &= ~(1 << write_reg);
5900 /* Restore any registers we scribbled over. */
5901 for (write_reg = 0; clobbered != 0; write_reg++)
5903 if ((clobbered & (1 << write_reg)) != 0)
5905 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
5907 if (debug_displaced)
5908 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
5909 "clobbered register r%d\n"), write_reg);
5910 clobbered &= ~(1 << write_reg);
5914 /* Perform register writeback manually. */
5915 if (dsc->u.block.writeback)
5917 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
5919 if (dsc->u.block.increment)
5920 new_rn_val += regs_loaded * 4;
5922 new_rn_val -= regs_loaded * 4;
5924 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
5929 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5930 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5933 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
5934 struct regcache *regs,
5935 struct displaced_step_closure *dsc)
5937 int load = bit (insn, 20);
5938 int user = bit (insn, 22);
5939 int increment = bit (insn, 23);
5940 int before = bit (insn, 24);
5941 int writeback = bit (insn, 21);
5942 int rn = bits (insn, 16, 19);
5944 /* Block transfers which don't mention PC can be run directly
5946 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
5947 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
5949 if (rn == ARM_PC_REGNUM)
5951 warning (_("displaced: Unpredictable LDM or STM with "
5952 "base register r15"));
5953 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
5956 if (debug_displaced)
5957 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
5958 "%.8lx\n", (unsigned long) insn);
5960 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
5961 dsc->u.block.rn = rn;
5963 dsc->u.block.load = load;
5964 dsc->u.block.user = user;
5965 dsc->u.block.increment = increment;
5966 dsc->u.block.before = before;
5967 dsc->u.block.writeback = writeback;
5968 dsc->u.block.cond = bits (insn, 28, 31);
5970 dsc->u.block.regmask = insn & 0xffff;
5974 if ((insn & 0xffff) == 0xffff)
5976 /* LDM with a fully-populated register list. This case is
5977 particularly tricky. Implement for now by fully emulating the
5978 instruction (which might not behave perfectly in all cases, but
5979 these instructions should be rare enough for that not to matter
5981 dsc->modinsn[0] = ARM_NOP;
5983 dsc->cleanup = &cleanup_block_load_all;
5987 /* LDM of a list of registers which includes PC. Implement by
5988 rewriting the list of registers to be transferred into a
5989 contiguous chunk r0...rX before doing the transfer, then shuffling
5990 registers into the correct places in the cleanup routine. */
5991 unsigned int regmask = insn & 0xffff;
5992 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
5993 unsigned int to = 0, from = 0, i, new_rn;
5995 for (i = 0; i < num_in_list; i++)
5996 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5998 /* Writeback makes things complicated. We need to avoid clobbering
5999 the base register with one of the registers in our modified
6000 register list, but just using a different register can't work in
6003 ldm r14!, {r0-r13,pc}
6005 which would need to be rewritten as:
6009 but that can't work, because there's no free register for N.
6011 Solve this by turning off the writeback bit, and emulating
6012 writeback manually in the cleanup routine. */
6017 new_regmask = (1 << num_in_list) - 1;
6019 if (debug_displaced)
6020 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6021 "{..., pc}: original reg list %.4x, modified "
6022 "list %.4x\n"), rn, writeback ? "!" : "",
6023 (int) insn & 0xffff, new_regmask);
6025 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6027 dsc->cleanup = &cleanup_block_load_pc;
6032 /* STM of a list of registers which includes PC. Run the instruction
6033 as-is, but out of line: this will store the wrong value for the PC,
6034 so we must manually fix up the memory in the cleanup routine.
6035 Doing things this way has the advantage that we can auto-detect
6036 the offset of the PC write (which is architecture-dependent) in
6037 the cleanup routine. */
6038 dsc->modinsn[0] = insn;
6040 dsc->cleanup = &cleanup_block_store_pc;
6047 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6048 struct regcache *regs,
6049 struct displaced_step_closure *dsc)
6051 int rn = bits (insn1, 0, 3);
6052 int load = bit (insn1, 4);
6053 int writeback = bit (insn1, 5);
6055 /* Block transfers which don't mention PC can be run directly
6057 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6058 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6060 if (rn == ARM_PC_REGNUM)
6062 warning (_("displaced: Unpredictable LDM or STM with "
6063 "base register r15"));
6064 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6065 "unpredictable ldm/stm", dsc);
6068 if (debug_displaced)
6069 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6070 "%.4x%.4x\n", insn1, insn2);
6072 /* Clear bit 13, since it should be always zero. */
6073 dsc->u.block.regmask = (insn2 & 0xdfff);
6074 dsc->u.block.rn = rn;
6076 dsc->u.block.load = load;
6077 dsc->u.block.user = 0;
6078 dsc->u.block.increment = bit (insn1, 7);
6079 dsc->u.block.before = bit (insn1, 8);
6080 dsc->u.block.writeback = writeback;
6081 dsc->u.block.cond = INST_AL;
6082 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6086 if (dsc->u.block.regmask == 0xffff)
6088 /* This branch is impossible to happen. */
6093 unsigned int regmask = dsc->u.block.regmask;
6094 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
6095 unsigned int to = 0, from = 0, i, new_rn;
6097 for (i = 0; i < num_in_list; i++)
6098 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6103 new_regmask = (1 << num_in_list) - 1;
6105 if (debug_displaced)
6106 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6107 "{..., pc}: original reg list %.4x, modified "
6108 "list %.4x\n"), rn, writeback ? "!" : "",
6109 (int) dsc->u.block.regmask, new_regmask);
6111 dsc->modinsn[0] = insn1;
6112 dsc->modinsn[1] = (new_regmask & 0xffff);
6115 dsc->cleanup = &cleanup_block_load_pc;
6120 dsc->modinsn[0] = insn1;
6121 dsc->modinsn[1] = insn2;
6123 dsc->cleanup = &cleanup_block_store_pc;
6128 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6129 This is used to avoid a dependency on BFD's bfd_endian enum. */
6132 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6135 return read_memory_unsigned_integer (memaddr, len,
6136 (enum bfd_endian) byte_order);
6139 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6142 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6145 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6148 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6151 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self,
6157 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6160 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6162 return arm_is_thumb (self->regcache);
6165 /* single_step() is called just before we want to resume the inferior,
6166 if we want to single-step it but there is no hardware or kernel
6167 single-step support. We find the target of the coming instructions
6168 and breakpoint them. */
6171 arm_software_single_step (struct frame_info *frame)
6173 struct regcache *regcache = get_current_regcache ();
6174 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6175 struct address_space *aspace = get_regcache_aspace (regcache);
6176 struct arm_get_next_pcs next_pcs_ctx;
6179 VEC (CORE_ADDR) *next_pcs = NULL;
6180 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6182 arm_get_next_pcs_ctor (&next_pcs_ctx,
6183 &arm_get_next_pcs_ops,
6184 gdbarch_byte_order (gdbarch),
6185 gdbarch_byte_order_for_code (gdbarch),
6189 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6191 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6192 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6194 do_cleanups (old_chain);
6199 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6200 for Linux, where some SVC instructions must be treated specially. */
6203 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6204 struct displaced_step_closure *dsc)
6206 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6208 if (debug_displaced)
6209 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6210 "%.8lx\n", (unsigned long) resume_addr);
6212 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6216 /* Common copy routine for svc instruciton. */
6219 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6220 struct displaced_step_closure *dsc)
6222 /* Preparation: none.
6223 Insn: unmodified svc.
6224 Cleanup: pc <- insn_addr + insn_size. */
6226 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6228 dsc->wrote_to_pc = 1;
6230 /* Allow OS-specific code to override SVC handling. */
6231 if (dsc->u.svc.copy_svc_os)
6232 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6235 dsc->cleanup = &cleanup_svc;
6241 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6242 struct regcache *regs, struct displaced_step_closure *dsc)
6245 if (debug_displaced)
6246 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6247 (unsigned long) insn);
6249 dsc->modinsn[0] = insn;
6251 return install_svc (gdbarch, regs, dsc);
6255 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6256 struct regcache *regs, struct displaced_step_closure *dsc)
6259 if (debug_displaced)
6260 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6263 dsc->modinsn[0] = insn;
6265 return install_svc (gdbarch, regs, dsc);
6268 /* Copy undefined instructions. */
6271 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6272 struct displaced_step_closure *dsc)
6274 if (debug_displaced)
6275 fprintf_unfiltered (gdb_stdlog,
6276 "displaced: copying undefined insn %.8lx\n",
6277 (unsigned long) insn);
6279 dsc->modinsn[0] = insn;
6285 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6286 struct displaced_step_closure *dsc)
6289 if (debug_displaced)
6290 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6291 "%.4x %.4x\n", (unsigned short) insn1,
6292 (unsigned short) insn2);
6294 dsc->modinsn[0] = insn1;
6295 dsc->modinsn[1] = insn2;
6301 /* Copy unpredictable instructions. */
6304 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6305 struct displaced_step_closure *dsc)
6307 if (debug_displaced)
6308 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6309 "%.8lx\n", (unsigned long) insn);
6311 dsc->modinsn[0] = insn;
6316 /* The decode_* functions are instruction decoding helpers. They mostly follow
6317 the presentation in the ARM ARM. */
6320 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6321 struct regcache *regs,
6322 struct displaced_step_closure *dsc)
6324 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6325 unsigned int rn = bits (insn, 16, 19);
6327 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6328 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6329 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6330 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6331 else if ((op1 & 0x60) == 0x20)
6332 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6333 else if ((op1 & 0x71) == 0x40)
6334 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6336 else if ((op1 & 0x77) == 0x41)
6337 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6338 else if ((op1 & 0x77) == 0x45)
6339 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6340 else if ((op1 & 0x77) == 0x51)
6343 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6345 return arm_copy_unpred (gdbarch, insn, dsc);
6347 else if ((op1 & 0x77) == 0x55)
6348 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6349 else if (op1 == 0x57)
6352 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6353 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6354 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6355 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6356 default: return arm_copy_unpred (gdbarch, insn, dsc);
6358 else if ((op1 & 0x63) == 0x43)
6359 return arm_copy_unpred (gdbarch, insn, dsc);
6360 else if ((op2 & 0x1) == 0x0)
6361 switch (op1 & ~0x80)
6364 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6366 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6367 case 0x71: case 0x75:
6369 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6370 case 0x63: case 0x67: case 0x73: case 0x77:
6371 return arm_copy_unpred (gdbarch, insn, dsc);
6373 return arm_copy_undef (gdbarch, insn, dsc);
6376 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6380 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6381 struct regcache *regs,
6382 struct displaced_step_closure *dsc)
6384 if (bit (insn, 27) == 0)
6385 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6386 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6387 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6390 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6393 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6395 case 0x4: case 0x5: case 0x6: case 0x7:
6396 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6399 switch ((insn & 0xe00000) >> 21)
6401 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6403 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6406 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6409 return arm_copy_undef (gdbarch, insn, dsc);
6414 int rn_f = (bits (insn, 16, 19) == 0xf);
6415 switch ((insn & 0xe00000) >> 21)
6418 /* ldc/ldc2 imm (undefined for rn == pc). */
6419 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6420 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6423 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6425 case 0x4: case 0x5: case 0x6: case 0x7:
6426 /* ldc/ldc2 lit (undefined for rn != pc). */
6427 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6428 : arm_copy_undef (gdbarch, insn, dsc);
6431 return arm_copy_undef (gdbarch, insn, dsc);
6436 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6439 if (bits (insn, 16, 19) == 0xf)
6441 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6443 return arm_copy_undef (gdbarch, insn, dsc);
6447 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6449 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6453 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6455 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6458 return arm_copy_undef (gdbarch, insn, dsc);
6462 /* Decode miscellaneous instructions in dp/misc encoding space. */
6465 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6466 struct regcache *regs,
6467 struct displaced_step_closure *dsc)
6469 unsigned int op2 = bits (insn, 4, 6);
6470 unsigned int op = bits (insn, 21, 22);
6471 unsigned int op1 = bits (insn, 16, 19);
6476 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6479 if (op == 0x1) /* bx. */
6480 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6482 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6484 return arm_copy_undef (gdbarch, insn, dsc);
6488 /* Not really supported. */
6489 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6491 return arm_copy_undef (gdbarch, insn, dsc);
6495 return arm_copy_bx_blx_reg (gdbarch, insn,
6496 regs, dsc); /* blx register. */
6498 return arm_copy_undef (gdbarch, insn, dsc);
6501 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6505 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6507 /* Not really supported. */
6508 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6511 return arm_copy_undef (gdbarch, insn, dsc);
6516 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6517 struct regcache *regs,
6518 struct displaced_step_closure *dsc)
6521 switch (bits (insn, 20, 24))
6524 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6527 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6529 case 0x12: case 0x16:
6530 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6533 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6537 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6539 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6540 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6541 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6542 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6543 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6544 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6545 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6546 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6547 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6548 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6549 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6550 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6551 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6552 /* 2nd arg means "unpriveleged". */
6553 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6557 /* Should be unreachable. */
6562 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6563 struct regcache *regs,
6564 struct displaced_step_closure *dsc)
6566 int a = bit (insn, 25), b = bit (insn, 4);
6567 uint32_t op1 = bits (insn, 20, 24);
6568 int rn_f = bits (insn, 16, 19) == 0xf;
6570 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6571 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6572 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6573 else if ((!a && (op1 & 0x17) == 0x02)
6574 || (a && (op1 & 0x17) == 0x02 && !b))
6575 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6576 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6577 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6578 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6579 else if ((!a && (op1 & 0x17) == 0x03)
6580 || (a && (op1 & 0x17) == 0x03 && !b))
6581 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6582 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6583 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6584 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6585 else if ((!a && (op1 & 0x17) == 0x06)
6586 || (a && (op1 & 0x17) == 0x06 && !b))
6587 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6588 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6589 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6590 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6591 else if ((!a && (op1 & 0x17) == 0x07)
6592 || (a && (op1 & 0x17) == 0x07 && !b))
6593 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6595 /* Should be unreachable. */
6600 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6601 struct displaced_step_closure *dsc)
6603 switch (bits (insn, 20, 24))
6605 case 0x00: case 0x01: case 0x02: case 0x03:
6606 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6608 case 0x04: case 0x05: case 0x06: case 0x07:
6609 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6611 case 0x08: case 0x09: case 0x0a: case 0x0b:
6612 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6613 return arm_copy_unmodified (gdbarch, insn,
6614 "decode/pack/unpack/saturate/reverse", dsc);
6617 if (bits (insn, 5, 7) == 0) /* op2. */
6619 if (bits (insn, 12, 15) == 0xf)
6620 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6622 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6625 return arm_copy_undef (gdbarch, insn, dsc);
6627 case 0x1a: case 0x1b:
6628 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6629 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6631 return arm_copy_undef (gdbarch, insn, dsc);
6633 case 0x1c: case 0x1d:
6634 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6636 if (bits (insn, 0, 3) == 0xf)
6637 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6639 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6642 return arm_copy_undef (gdbarch, insn, dsc);
6644 case 0x1e: case 0x1f:
6645 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6646 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6648 return arm_copy_undef (gdbarch, insn, dsc);
6651 /* Should be unreachable. */
6656 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
6657 struct regcache *regs,
6658 struct displaced_step_closure *dsc)
6661 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6663 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6667 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6668 struct regcache *regs,
6669 struct displaced_step_closure *dsc)
6671 unsigned int opcode = bits (insn, 20, 24);
6675 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6676 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6678 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6679 case 0x12: case 0x16:
6680 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6682 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6683 case 0x13: case 0x17:
6684 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6686 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6687 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6688 /* Note: no writeback for these instructions. Bit 25 will always be
6689 zero though (via caller), so the following works OK. */
6690 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6693 /* Should be unreachable. */
6697 /* Decode shifted register instructions. */
6700 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6701 uint16_t insn2, struct regcache *regs,
6702 struct displaced_step_closure *dsc)
6704 /* PC is only allowed to be used in instruction MOV. */
6706 unsigned int op = bits (insn1, 5, 8);
6707 unsigned int rn = bits (insn1, 0, 3);
6709 if (op == 0x2 && rn == 0xf) /* MOV */
6710 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6712 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6713 "dp (shift reg)", dsc);
6717 /* Decode extension register load/store. Exactly the same as
6718 arm_decode_ext_reg_ld_st. */
6721 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6722 uint16_t insn2, struct regcache *regs,
6723 struct displaced_step_closure *dsc)
6725 unsigned int opcode = bits (insn1, 4, 8);
6729 case 0x04: case 0x05:
6730 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6731 "vfp/neon vmov", dsc);
6733 case 0x08: case 0x0c: /* 01x00 */
6734 case 0x0a: case 0x0e: /* 01x10 */
6735 case 0x12: case 0x16: /* 10x10 */
6736 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6737 "vfp/neon vstm/vpush", dsc);
6739 case 0x09: case 0x0d: /* 01x01 */
6740 case 0x0b: case 0x0f: /* 01x11 */
6741 case 0x13: case 0x17: /* 10x11 */
6742 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6743 "vfp/neon vldm/vpop", dsc);
6745 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6746 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6748 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6749 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6752 /* Should be unreachable. */
6757 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
6758 struct regcache *regs, struct displaced_step_closure *dsc)
6760 unsigned int op1 = bits (insn, 20, 25);
6761 int op = bit (insn, 4);
6762 unsigned int coproc = bits (insn, 8, 11);
6763 unsigned int rn = bits (insn, 16, 19);
6765 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6766 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6767 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6768 && (coproc & 0xe) != 0xa)
6770 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6771 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6772 && (coproc & 0xe) != 0xa)
6773 /* ldc/ldc2 imm/lit. */
6774 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6775 else if ((op1 & 0x3e) == 0x00)
6776 return arm_copy_undef (gdbarch, insn, dsc);
6777 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6778 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6779 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6780 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6781 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6782 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6783 else if ((op1 & 0x30) == 0x20 && !op)
6785 if ((coproc & 0xe) == 0xa)
6786 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6788 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6790 else if ((op1 & 0x30) == 0x20 && op)
6791 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6792 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6793 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6794 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6795 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6796 else if ((op1 & 0x30) == 0x30)
6797 return arm_copy_svc (gdbarch, insn, regs, dsc);
6799 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6803 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6804 uint16_t insn2, struct regcache *regs,
6805 struct displaced_step_closure *dsc)
6807 unsigned int coproc = bits (insn2, 8, 11);
6808 unsigned int op1 = bits (insn1, 4, 9);
6809 unsigned int bit_5_8 = bits (insn1, 5, 8);
6810 unsigned int bit_9 = bit (insn1, 9);
6811 unsigned int bit_4 = bit (insn1, 4);
6812 unsigned int rn = bits (insn1, 0, 3);
6817 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6818 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6820 else if (bit_5_8 == 0) /* UNDEFINED. */
6821 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6824 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6825 if ((coproc & 0xe) == 0xa)
6826 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6828 else /* coproc is not 101x. */
6830 if (bit_4 == 0) /* STC/STC2. */
6831 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6833 else /* LDC/LDC2 {literal, immeidate}. */
6834 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6846 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6847 struct displaced_step_closure *dsc, int rd)
6853 Preparation: Rd <- PC
6859 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6860 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6864 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6865 struct displaced_step_closure *dsc,
6866 int rd, unsigned int imm)
6869 /* Encoding T2: ADDS Rd, #imm */
6870 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6872 install_pc_relative (gdbarch, regs, dsc, rd);
6878 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6879 struct regcache *regs,
6880 struct displaced_step_closure *dsc)
6882 unsigned int rd = bits (insn, 8, 10);
6883 unsigned int imm8 = bits (insn, 0, 7);
6885 if (debug_displaced)
6886 fprintf_unfiltered (gdb_stdlog,
6887 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6890 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6894 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6895 uint16_t insn2, struct regcache *regs,
6896 struct displaced_step_closure *dsc)
6898 unsigned int rd = bits (insn2, 8, 11);
6899 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6900 extract raw immediate encoding rather than computing immediate. When
6901 generating ADD or SUB instruction, we can simply perform OR operation to
6902 set immediate into ADD. */
6903 unsigned int imm_3_8 = insn2 & 0x70ff;
6904 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
6906 if (debug_displaced)
6907 fprintf_unfiltered (gdb_stdlog,
6908 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6909 rd, imm_i, imm_3_8, insn1, insn2);
6911 if (bit (insn1, 7)) /* Encoding T2 */
6913 /* Encoding T3: SUB Rd, Rd, #imm */
6914 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
6915 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6917 else /* Encoding T3 */
6919 /* Encoding T3: ADD Rd, Rd, #imm */
6920 dsc->modinsn[0] = (0xf100 | rd | imm_i);
6921 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
6925 install_pc_relative (gdbarch, regs, dsc, rd);
6931 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
6932 struct regcache *regs,
6933 struct displaced_step_closure *dsc)
6935 unsigned int rt = bits (insn1, 8, 10);
6937 int imm8 = (bits (insn1, 0, 7) << 2);
6938 CORE_ADDR from = dsc->insn_addr;
6944 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6946 Insn: LDR R0, [R2, R3];
6947 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6949 if (debug_displaced)
6950 fprintf_unfiltered (gdb_stdlog,
6951 "displaced: copying thumb ldr r%d [pc #%d]\n"
6954 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6955 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6956 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6957 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6958 /* The assembler calculates the required value of the offset from the
6959 Align(PC,4) value of this instruction to the label. */
6960 pc = pc & 0xfffffffc;
6962 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
6963 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
6966 dsc->u.ldst.xfersize = 4;
6968 dsc->u.ldst.immed = 0;
6969 dsc->u.ldst.writeback = 0;
6970 dsc->u.ldst.restore_r4 = 0;
6972 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6974 dsc->cleanup = &cleanup_load;
6979 /* Copy Thumb cbnz/cbz insruction. */
6982 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
6983 struct regcache *regs,
6984 struct displaced_step_closure *dsc)
6986 int non_zero = bit (insn1, 11);
6987 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
6988 CORE_ADDR from = dsc->insn_addr;
6989 int rn = bits (insn1, 0, 2);
6990 int rn_val = displaced_read_reg (regs, dsc, rn);
6992 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
6993 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6994 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6995 condition is false, let it be, cleanup_branch will do nothing. */
6996 if (dsc->u.branch.cond)
6998 dsc->u.branch.cond = INST_AL;
6999 dsc->u.branch.dest = from + 4 + imm5;
7002 dsc->u.branch.dest = from + 2;
7004 dsc->u.branch.link = 0;
7005 dsc->u.branch.exchange = 0;
7007 if (debug_displaced)
7008 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7009 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7010 rn, rn_val, insn1, dsc->u.branch.dest);
7012 dsc->modinsn[0] = THUMB_NOP;
7014 dsc->cleanup = &cleanup_branch;
7018 /* Copy Table Branch Byte/Halfword */
7020 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7021 uint16_t insn2, struct regcache *regs,
7022 struct displaced_step_closure *dsc)
7024 ULONGEST rn_val, rm_val;
7025 int is_tbh = bit (insn2, 4);
7026 CORE_ADDR halfwords = 0;
7027 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7029 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7030 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7036 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7037 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7043 target_read_memory (rn_val + rm_val, buf, 1);
7044 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7047 if (debug_displaced)
7048 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7049 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7050 (unsigned int) rn_val, (unsigned int) rm_val,
7051 (unsigned int) halfwords);
7053 dsc->u.branch.cond = INST_AL;
7054 dsc->u.branch.link = 0;
7055 dsc->u.branch.exchange = 0;
7056 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7058 dsc->cleanup = &cleanup_branch;
7064 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7065 struct displaced_step_closure *dsc)
7068 int val = displaced_read_reg (regs, dsc, 7);
7069 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7072 val = displaced_read_reg (regs, dsc, 8);
7073 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7076 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7081 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
7082 struct regcache *regs,
7083 struct displaced_step_closure *dsc)
7085 dsc->u.block.regmask = insn1 & 0x00ff;
7087 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7090 (1) register list is full, that is, r0-r7 are used.
7091 Prepare: tmp[0] <- r8
7093 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7094 MOV r8, r7; Move value of r7 to r8;
7095 POP {r7}; Store PC value into r7.
7097 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7099 (2) register list is not full, supposing there are N registers in
7100 register list (except PC, 0 <= N <= 7).
7101 Prepare: for each i, 0 - N, tmp[i] <- ri.
7103 POP {r0, r1, ...., rN};
7105 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7106 from tmp[] properly.
7108 if (debug_displaced)
7109 fprintf_unfiltered (gdb_stdlog,
7110 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7111 dsc->u.block.regmask, insn1);
7113 if (dsc->u.block.regmask == 0xff)
7115 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7117 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7118 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7119 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7122 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7126 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7127 unsigned int new_regmask, bit = 1;
7128 unsigned int to = 0, from = 0, i, new_rn;
7130 for (i = 0; i < num_in_list + 1; i++)
7131 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7133 new_regmask = (1 << (num_in_list + 1)) - 1;
7135 if (debug_displaced)
7136 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7137 "{..., pc}: original reg list %.4x,"
7138 " modified list %.4x\n"),
7139 (int) dsc->u.block.regmask, new_regmask);
7141 dsc->u.block.regmask |= 0x8000;
7142 dsc->u.block.writeback = 0;
7143 dsc->u.block.cond = INST_AL;
7145 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7147 dsc->cleanup = &cleanup_block_load_pc;
7154 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7155 struct regcache *regs,
7156 struct displaced_step_closure *dsc)
7158 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7159 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7162 /* 16-bit thumb instructions. */
7163 switch (op_bit_12_15)
7165 /* Shift (imme), add, subtract, move and compare. */
7166 case 0: case 1: case 2: case 3:
7167 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7168 "shift/add/sub/mov/cmp",
7172 switch (op_bit_10_11)
7174 case 0: /* Data-processing */
7175 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7179 case 1: /* Special data instructions and branch and exchange. */
7181 unsigned short op = bits (insn1, 7, 9);
7182 if (op == 6 || op == 7) /* BX or BLX */
7183 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7184 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7185 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7187 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7191 default: /* LDR (literal) */
7192 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7195 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7196 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7199 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7200 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7201 else /* Generate SP-relative address */
7202 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7204 case 11: /* Misc 16-bit instructions */
7206 switch (bits (insn1, 8, 11))
7208 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7209 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7211 case 12: case 13: /* POP */
7212 if (bit (insn1, 8)) /* PC is in register list. */
7213 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7215 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7217 case 15: /* If-Then, and hints */
7218 if (bits (insn1, 0, 3))
7219 /* If-Then makes up to four following instructions conditional.
7220 IT instruction itself is not conditional, so handle it as a
7221 common unmodified instruction. */
7222 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7225 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7228 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7233 if (op_bit_10_11 < 2) /* Store multiple registers */
7234 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7235 else /* Load multiple registers */
7236 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7238 case 13: /* Conditional branch and supervisor call */
7239 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7240 err = thumb_copy_b (gdbarch, insn1, dsc);
7242 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7244 case 14: /* Unconditional branch */
7245 err = thumb_copy_b (gdbarch, insn1, dsc);
7252 internal_error (__FILE__, __LINE__,
7253 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7257 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7258 uint16_t insn1, uint16_t insn2,
7259 struct regcache *regs,
7260 struct displaced_step_closure *dsc)
7262 int rt = bits (insn2, 12, 15);
7263 int rn = bits (insn1, 0, 3);
7264 int op1 = bits (insn1, 7, 8);
7267 switch (bits (insn1, 5, 6))
7269 case 0: /* Load byte and memory hints */
7270 if (rt == 0xf) /* PLD/PLI */
7273 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7274 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7276 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7281 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7282 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7285 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7286 "ldrb{reg, immediate}/ldrbt",
7291 case 1: /* Load halfword and memory hints. */
7292 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7293 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7294 "pld/unalloc memhint", dsc);
7298 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7301 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7305 case 2: /* Load word */
7307 int insn2_bit_8_11 = bits (insn2, 8, 11);
7310 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7311 else if (op1 == 0x1) /* Encoding T3 */
7312 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7314 else /* op1 == 0x0 */
7316 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7317 /* LDR (immediate) */
7318 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7319 dsc, bit (insn2, 8), 1);
7320 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7321 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7324 /* LDR (register) */
7325 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7331 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7338 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7339 uint16_t insn2, struct regcache *regs,
7340 struct displaced_step_closure *dsc)
7343 unsigned short op = bit (insn2, 15);
7344 unsigned int op1 = bits (insn1, 11, 12);
7350 switch (bits (insn1, 9, 10))
7355 /* Load/store {dual, execlusive}, table branch. */
7356 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7357 && bits (insn2, 5, 7) == 0)
7358 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7361 /* PC is not allowed to use in load/store {dual, exclusive}
7363 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7364 "load/store dual/ex", dsc);
7366 else /* load/store multiple */
7368 switch (bits (insn1, 7, 8))
7370 case 0: case 3: /* SRS, RFE */
7371 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7374 case 1: case 2: /* LDM/STM/PUSH/POP */
7375 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7382 /* Data-processing (shift register). */
7383 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7386 default: /* Coprocessor instructions. */
7387 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7392 case 2: /* op1 = 2 */
7393 if (op) /* Branch and misc control. */
7395 if (bit (insn2, 14) /* BLX/BL */
7396 || bit (insn2, 12) /* Unconditional branch */
7397 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7398 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7400 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7405 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7407 int op = bits (insn1, 4, 8);
7408 int rn = bits (insn1, 0, 3);
7409 if ((op == 0 || op == 0xa) && rn == 0xf)
7410 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7413 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7416 else /* Data processing (modified immeidate) */
7417 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7421 case 3: /* op1 = 3 */
7422 switch (bits (insn1, 9, 10))
7426 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7428 else /* NEON Load/Store and Store single data item */
7429 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7430 "neon elt/struct load/store",
7433 case 1: /* op1 = 3, bits (9, 10) == 1 */
7434 switch (bits (insn1, 7, 8))
7436 case 0: case 1: /* Data processing (register) */
7437 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7440 case 2: /* Multiply and absolute difference */
7441 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7442 "mul/mua/diff", dsc);
7444 case 3: /* Long multiply and divide */
7445 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7450 default: /* Coprocessor instructions */
7451 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7460 internal_error (__FILE__, __LINE__,
7461 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7466 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7467 CORE_ADDR to, struct regcache *regs,
7468 struct displaced_step_closure *dsc)
7470 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7472 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7474 if (debug_displaced)
7475 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7476 "at %.8lx\n", insn1, (unsigned long) from);
7479 dsc->insn_size = thumb_insn_size (insn1);
7480 if (thumb_insn_size (insn1) == 4)
7483 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7484 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7487 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7491 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7492 CORE_ADDR to, struct regcache *regs,
7493 struct displaced_step_closure *dsc)
7496 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7499 /* Most displaced instructions use a 1-instruction scratch space, so set this
7500 here and override below if/when necessary. */
7502 dsc->insn_addr = from;
7503 dsc->scratch_base = to;
7504 dsc->cleanup = NULL;
7505 dsc->wrote_to_pc = 0;
7507 if (!displaced_in_arm_mode (regs))
7508 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
7512 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7513 if (debug_displaced)
7514 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7515 "at %.8lx\n", (unsigned long) insn,
7516 (unsigned long) from);
7518 if ((insn & 0xf0000000) == 0xf0000000)
7519 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7520 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7522 case 0x0: case 0x1: case 0x2: case 0x3:
7523 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7526 case 0x4: case 0x5: case 0x6:
7527 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7531 err = arm_decode_media (gdbarch, insn, dsc);
7534 case 0x8: case 0x9: case 0xa: case 0xb:
7535 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7538 case 0xc: case 0xd: case 0xe: case 0xf:
7539 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
7544 internal_error (__FILE__, __LINE__,
7545 _("arm_process_displaced_insn: Instruction decode error"));
7548 /* Actually set up the scratch space for a displaced instruction. */
7551 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7552 CORE_ADDR to, struct displaced_step_closure *dsc)
7554 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7555 unsigned int i, len, offset;
7556 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7557 int size = dsc->is_thumb? 2 : 4;
7558 const gdb_byte *bkp_insn;
7561 /* Poke modified instruction(s). */
7562 for (i = 0; i < dsc->numinsns; i++)
7564 if (debug_displaced)
7566 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7568 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7571 fprintf_unfiltered (gdb_stdlog, "%.4x",
7572 (unsigned short)dsc->modinsn[i]);
7574 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7575 (unsigned long) to + offset);
7578 write_memory_unsigned_integer (to + offset, size,
7579 byte_order_for_code,
7584 /* Choose the correct breakpoint instruction. */
7587 bkp_insn = tdep->thumb_breakpoint;
7588 len = tdep->thumb_breakpoint_size;
7592 bkp_insn = tdep->arm_breakpoint;
7593 len = tdep->arm_breakpoint_size;
7596 /* Put breakpoint afterwards. */
7597 write_memory (to + offset, bkp_insn, len);
7599 if (debug_displaced)
7600 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7601 paddress (gdbarch, from), paddress (gdbarch, to));
7604 /* Entry point for copying an instruction into scratch space for displaced
7607 struct displaced_step_closure *
7608 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
7609 CORE_ADDR from, CORE_ADDR to,
7610 struct regcache *regs)
7612 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
7614 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
7615 arm_displaced_init_closure (gdbarch, from, to, dsc);
7620 /* Entry point for cleaning things up after a displaced instruction has been
7624 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7625 struct displaced_step_closure *dsc,
7626 CORE_ADDR from, CORE_ADDR to,
7627 struct regcache *regs)
7630 dsc->cleanup (gdbarch, regs, dsc);
7632 if (!dsc->wrote_to_pc)
7633 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7634 dsc->insn_addr + dsc->insn_size);
7638 #include "bfd-in2.h"
7639 #include "libcoff.h"
7642 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7644 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7646 if (arm_pc_is_thumb (gdbarch, memaddr))
7648 static asymbol *asym;
7649 static combined_entry_type ce;
7650 static struct coff_symbol_struct csym;
7651 static struct bfd fake_bfd;
7652 static bfd_target fake_target;
7654 if (csym.native == NULL)
7656 /* Create a fake symbol vector containing a Thumb symbol.
7657 This is solely so that the code in print_insn_little_arm()
7658 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7659 the presence of a Thumb symbol and switch to decoding
7660 Thumb instructions. */
7662 fake_target.flavour = bfd_target_coff_flavour;
7663 fake_bfd.xvec = &fake_target;
7664 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7666 csym.symbol.the_bfd = &fake_bfd;
7667 csym.symbol.name = "fake";
7668 asym = (asymbol *) & csym;
7671 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7672 info->symbols = &asym;
7675 info->symbols = NULL;
7677 if (info->endian == BFD_ENDIAN_BIG)
7678 return print_insn_big_arm (memaddr, info);
7680 return print_insn_little_arm (memaddr, info);
7683 /* The following define instruction sequences that will cause ARM
7684 cpu's to take an undefined instruction trap. These are used to
7685 signal a breakpoint to GDB.
7687 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7688 modes. A different instruction is required for each mode. The ARM
7689 cpu's can also be big or little endian. Thus four different
7690 instructions are needed to support all cases.
7692 Note: ARMv4 defines several new instructions that will take the
7693 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7694 not in fact add the new instructions. The new undefined
7695 instructions in ARMv4 are all instructions that had no defined
7696 behaviour in earlier chips. There is no guarantee that they will
7697 raise an exception, but may be treated as NOP's. In practice, it
7698 may only safe to rely on instructions matching:
7700 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7701 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7702 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7704 Even this may only true if the condition predicate is true. The
7705 following use a condition predicate of ALWAYS so it is always TRUE.
7707 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7708 and NetBSD all use a software interrupt rather than an undefined
7709 instruction to force a trap. This can be handled by by the
7710 abi-specific code during establishment of the gdbarch vector. */
7712 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7713 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7714 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7715 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7717 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7718 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7719 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7720 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7722 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7723 the program counter value to determine whether a 16-bit or 32-bit
7724 breakpoint should be used. It returns a pointer to a string of
7725 bytes that encode a breakpoint instruction, stores the length of
7726 the string to *lenptr, and adjusts the program counter (if
7727 necessary) to point to the actual memory location where the
7728 breakpoint should be inserted. */
7730 static const unsigned char *
7731 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7733 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7734 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7736 if (arm_pc_is_thumb (gdbarch, *pcptr))
7738 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7740 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7741 check whether we are replacing a 32-bit instruction. */
7742 if (tdep->thumb2_breakpoint != NULL)
7745 if (target_read_memory (*pcptr, buf, 2) == 0)
7747 unsigned short inst1;
7748 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7749 if (thumb_insn_size (inst1) == 4)
7751 *lenptr = tdep->thumb2_breakpoint_size;
7752 return tdep->thumb2_breakpoint;
7757 *lenptr = tdep->thumb_breakpoint_size;
7758 return tdep->thumb_breakpoint;
7762 *lenptr = tdep->arm_breakpoint_size;
7763 return tdep->arm_breakpoint;
7768 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7771 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7773 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7774 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7775 that this is not confused with a 32-bit ARM breakpoint. */
7779 /* Extract from an array REGBUF containing the (raw) register state a
7780 function return value of type TYPE, and copy that, in virtual
7781 format, into VALBUF. */
7784 arm_extract_return_value (struct type *type, struct regcache *regs,
7787 struct gdbarch *gdbarch = get_regcache_arch (regs);
7788 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7790 if (TYPE_CODE_FLT == TYPE_CODE (type))
7792 switch (gdbarch_tdep (gdbarch)->fp_model)
7796 /* The value is in register F0 in internal format. We need to
7797 extract the raw value and then convert it to the desired
7799 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7801 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7802 convert_from_extended (floatformat_from_type (type), tmpbuf,
7803 valbuf, gdbarch_byte_order (gdbarch));
7807 case ARM_FLOAT_SOFT_FPA:
7808 case ARM_FLOAT_SOFT_VFP:
7809 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7810 not using the VFP ABI code. */
7812 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7813 if (TYPE_LENGTH (type) > 4)
7814 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7815 valbuf + INT_REGISTER_SIZE);
7819 internal_error (__FILE__, __LINE__,
7820 _("arm_extract_return_value: "
7821 "Floating point model not supported"));
7825 else if (TYPE_CODE (type) == TYPE_CODE_INT
7826 || TYPE_CODE (type) == TYPE_CODE_CHAR
7827 || TYPE_CODE (type) == TYPE_CODE_BOOL
7828 || TYPE_CODE (type) == TYPE_CODE_PTR
7829 || TYPE_CODE (type) == TYPE_CODE_REF
7830 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7832 /* If the type is a plain integer, then the access is
7833 straight-forward. Otherwise we have to play around a bit
7835 int len = TYPE_LENGTH (type);
7836 int regno = ARM_A1_REGNUM;
7841 /* By using store_unsigned_integer we avoid having to do
7842 anything special for small big-endian values. */
7843 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7844 store_unsigned_integer (valbuf,
7845 (len > INT_REGISTER_SIZE
7846 ? INT_REGISTER_SIZE : len),
7848 len -= INT_REGISTER_SIZE;
7849 valbuf += INT_REGISTER_SIZE;
7854 /* For a structure or union the behaviour is as if the value had
7855 been stored to word-aligned memory and then loaded into
7856 registers with 32-bit load instruction(s). */
7857 int len = TYPE_LENGTH (type);
7858 int regno = ARM_A1_REGNUM;
7859 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7863 regcache_cooked_read (regs, regno++, tmpbuf);
7864 memcpy (valbuf, tmpbuf,
7865 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7866 len -= INT_REGISTER_SIZE;
7867 valbuf += INT_REGISTER_SIZE;
7873 /* Will a function return an aggregate type in memory or in a
7874 register? Return 0 if an aggregate type can be returned in a
7875 register, 1 if it must be returned in memory. */
7878 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7880 enum type_code code;
7882 type = check_typedef (type);
7884 /* Simple, non-aggregate types (ie not including vectors and
7885 complex) are always returned in a register (or registers). */
7886 code = TYPE_CODE (type);
7887 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7888 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7891 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7893 /* Vector values should be returned using ARM registers if they
7894 are not over 16 bytes. */
7895 return (TYPE_LENGTH (type) > 16);
7898 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7900 /* The AAPCS says all aggregates not larger than a word are returned
7902 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7911 /* All aggregate types that won't fit in a register must be returned
7913 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7916 /* In the ARM ABI, "integer" like aggregate types are returned in
7917 registers. For an aggregate type to be integer like, its size
7918 must be less than or equal to INT_REGISTER_SIZE and the
7919 offset of each addressable subfield must be zero. Note that bit
7920 fields are not addressable, and all addressable subfields of
7921 unions always start at offset zero.
7923 This function is based on the behaviour of GCC 2.95.1.
7924 See: gcc/arm.c: arm_return_in_memory() for details.
7926 Note: All versions of GCC before GCC 2.95.2 do not set up the
7927 parameters correctly for a function returning the following
7928 structure: struct { float f;}; This should be returned in memory,
7929 not a register. Richard Earnshaw sent me a patch, but I do not
7930 know of any way to detect if a function like the above has been
7931 compiled with the correct calling convention. */
7933 /* Assume all other aggregate types can be returned in a register.
7934 Run a check for structures, unions and arrays. */
7937 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
7940 /* Need to check if this struct/union is "integer" like. For
7941 this to be true, its size must be less than or equal to
7942 INT_REGISTER_SIZE and the offset of each addressable
7943 subfield must be zero. Note that bit fields are not
7944 addressable, and unions always start at offset zero. If any
7945 of the subfields is a floating point type, the struct/union
7946 cannot be an integer type. */
7948 /* For each field in the object, check:
7949 1) Is it FP? --> yes, nRc = 1;
7950 2) Is it addressable (bitpos != 0) and
7951 not packed (bitsize == 0)?
7955 for (i = 0; i < TYPE_NFIELDS (type); i++)
7957 enum type_code field_type_code;
7960 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
7963 /* Is it a floating point type field? */
7964 if (field_type_code == TYPE_CODE_FLT)
7970 /* If bitpos != 0, then we have to care about it. */
7971 if (TYPE_FIELD_BITPOS (type, i) != 0)
7973 /* Bitfields are not addressable. If the field bitsize is
7974 zero, then the field is not packed. Hence it cannot be
7975 a bitfield or any other packed type. */
7976 if (TYPE_FIELD_BITSIZE (type, i) == 0)
7989 /* Write into appropriate registers a function return value of type
7990 TYPE, given in virtual format. */
7993 arm_store_return_value (struct type *type, struct regcache *regs,
7994 const gdb_byte *valbuf)
7996 struct gdbarch *gdbarch = get_regcache_arch (regs);
7997 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7999 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8001 gdb_byte buf[MAX_REGISTER_SIZE];
8003 switch (gdbarch_tdep (gdbarch)->fp_model)
8007 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8008 gdbarch_byte_order (gdbarch));
8009 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8012 case ARM_FLOAT_SOFT_FPA:
8013 case ARM_FLOAT_SOFT_VFP:
8014 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8015 not using the VFP ABI code. */
8017 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8018 if (TYPE_LENGTH (type) > 4)
8019 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8020 valbuf + INT_REGISTER_SIZE);
8024 internal_error (__FILE__, __LINE__,
8025 _("arm_store_return_value: Floating "
8026 "point model not supported"));
8030 else if (TYPE_CODE (type) == TYPE_CODE_INT
8031 || TYPE_CODE (type) == TYPE_CODE_CHAR
8032 || TYPE_CODE (type) == TYPE_CODE_BOOL
8033 || TYPE_CODE (type) == TYPE_CODE_PTR
8034 || TYPE_CODE (type) == TYPE_CODE_REF
8035 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8037 if (TYPE_LENGTH (type) <= 4)
8039 /* Values of one word or less are zero/sign-extended and
8041 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8042 LONGEST val = unpack_long (type, valbuf);
8044 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8045 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8049 /* Integral values greater than one word are stored in consecutive
8050 registers starting with r0. This will always be a multiple of
8051 the regiser size. */
8052 int len = TYPE_LENGTH (type);
8053 int regno = ARM_A1_REGNUM;
8057 regcache_cooked_write (regs, regno++, valbuf);
8058 len -= INT_REGISTER_SIZE;
8059 valbuf += INT_REGISTER_SIZE;
8065 /* For a structure or union the behaviour is as if the value had
8066 been stored to word-aligned memory and then loaded into
8067 registers with 32-bit load instruction(s). */
8068 int len = TYPE_LENGTH (type);
8069 int regno = ARM_A1_REGNUM;
8070 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8074 memcpy (tmpbuf, valbuf,
8075 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8076 regcache_cooked_write (regs, regno++, tmpbuf);
8077 len -= INT_REGISTER_SIZE;
8078 valbuf += INT_REGISTER_SIZE;
8084 /* Handle function return values. */
8086 static enum return_value_convention
8087 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8088 struct type *valtype, struct regcache *regcache,
8089 gdb_byte *readbuf, const gdb_byte *writebuf)
8091 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8092 struct type *func_type = function ? value_type (function) : NULL;
8093 enum arm_vfp_cprc_base_type vfp_base_type;
8096 if (arm_vfp_abi_for_function (gdbarch, func_type)
8097 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8099 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8100 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8102 for (i = 0; i < vfp_base_count; i++)
8104 if (reg_char == 'q')
8107 arm_neon_quad_write (gdbarch, regcache, i,
8108 writebuf + i * unit_length);
8111 arm_neon_quad_read (gdbarch, regcache, i,
8112 readbuf + i * unit_length);
8119 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8120 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8123 regcache_cooked_write (regcache, regnum,
8124 writebuf + i * unit_length);
8126 regcache_cooked_read (regcache, regnum,
8127 readbuf + i * unit_length);
8130 return RETURN_VALUE_REGISTER_CONVENTION;
8133 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8134 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8135 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8137 if (tdep->struct_return == pcc_struct_return
8138 || arm_return_in_memory (gdbarch, valtype))
8139 return RETURN_VALUE_STRUCT_CONVENTION;
8141 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8143 if (arm_return_in_memory (gdbarch, valtype))
8144 return RETURN_VALUE_STRUCT_CONVENTION;
8148 arm_store_return_value (valtype, regcache, writebuf);
8151 arm_extract_return_value (valtype, regcache, readbuf);
8153 return RETURN_VALUE_REGISTER_CONVENTION;
8158 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8160 struct gdbarch *gdbarch = get_frame_arch (frame);
8161 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8162 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8164 gdb_byte buf[INT_REGISTER_SIZE];
8166 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8168 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8172 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8176 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8177 return the target PC. Otherwise return 0. */
8180 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8184 CORE_ADDR start_addr;
8186 /* Find the starting address and name of the function containing the PC. */
8187 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8189 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8191 start_addr = arm_skip_bx_reg (frame, pc);
8192 if (start_addr != 0)
8198 /* If PC is in a Thumb call or return stub, return the address of the
8199 target PC, which is in a register. The thunk functions are called
8200 _call_via_xx, where x is the register name. The possible names
8201 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8202 functions, named __ARM_call_via_r[0-7]. */
8203 if (startswith (name, "_call_via_")
8204 || startswith (name, "__ARM_call_via_"))
8206 /* Use the name suffix to determine which register contains the
8208 static char *table[15] =
8209 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8210 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8213 int offset = strlen (name) - 2;
8215 for (regno = 0; regno <= 14; regno++)
8216 if (strcmp (&name[offset], table[regno]) == 0)
8217 return get_frame_register_unsigned (frame, regno);
8220 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8221 non-interworking calls to foo. We could decode the stubs
8222 to find the target but it's easier to use the symbol table. */
8223 namelen = strlen (name);
8224 if (name[0] == '_' && name[1] == '_'
8225 && ((namelen > 2 + strlen ("_from_thumb")
8226 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8227 || (namelen > 2 + strlen ("_from_arm")
8228 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8231 int target_len = namelen - 2;
8232 struct bound_minimal_symbol minsym;
8233 struct objfile *objfile;
8234 struct obj_section *sec;
8236 if (name[namelen - 1] == 'b')
8237 target_len -= strlen ("_from_thumb");
8239 target_len -= strlen ("_from_arm");
8241 target_name = (char *) alloca (target_len + 1);
8242 memcpy (target_name, name + 2, target_len);
8243 target_name[target_len] = '\0';
8245 sec = find_pc_section (pc);
8246 objfile = (sec == NULL) ? NULL : sec->objfile;
8247 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8248 if (minsym.minsym != NULL)
8249 return BMSYMBOL_VALUE_ADDRESS (minsym);
8254 return 0; /* not a stub */
8258 set_arm_command (char *args, int from_tty)
8260 printf_unfiltered (_("\
8261 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8262 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8266 show_arm_command (char *args, int from_tty)
8268 cmd_show_list (showarmcmdlist, from_tty, "");
8272 arm_update_current_architecture (void)
8274 struct gdbarch_info info;
8276 /* If the current architecture is not ARM, we have nothing to do. */
8277 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8280 /* Update the architecture. */
8281 gdbarch_info_init (&info);
8283 if (!gdbarch_update_p (info))
8284 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8288 set_fp_model_sfunc (char *args, int from_tty,
8289 struct cmd_list_element *c)
8293 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8294 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8296 arm_fp_model = (enum arm_float_model) fp_model;
8300 if (fp_model == ARM_FLOAT_LAST)
8301 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8304 arm_update_current_architecture ();
8308 show_fp_model (struct ui_file *file, int from_tty,
8309 struct cmd_list_element *c, const char *value)
8311 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8313 if (arm_fp_model == ARM_FLOAT_AUTO
8314 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8315 fprintf_filtered (file, _("\
8316 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8317 fp_model_strings[tdep->fp_model]);
8319 fprintf_filtered (file, _("\
8320 The current ARM floating point model is \"%s\".\n"),
8321 fp_model_strings[arm_fp_model]);
8325 arm_set_abi (char *args, int from_tty,
8326 struct cmd_list_element *c)
8330 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8331 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8333 arm_abi_global = (enum arm_abi_kind) arm_abi;
8337 if (arm_abi == ARM_ABI_LAST)
8338 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8341 arm_update_current_architecture ();
8345 arm_show_abi (struct ui_file *file, int from_tty,
8346 struct cmd_list_element *c, const char *value)
8348 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8350 if (arm_abi_global == ARM_ABI_AUTO
8351 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8352 fprintf_filtered (file, _("\
8353 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8354 arm_abi_strings[tdep->arm_abi]);
8356 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8361 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8362 struct cmd_list_element *c, const char *value)
8364 fprintf_filtered (file,
8365 _("The current execution mode assumed "
8366 "(when symbols are unavailable) is \"%s\".\n"),
8367 arm_fallback_mode_string);
8371 arm_show_force_mode (struct ui_file *file, int from_tty,
8372 struct cmd_list_element *c, const char *value)
8374 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8376 fprintf_filtered (file,
8377 _("The current execution mode assumed "
8378 "(even when symbols are available) is \"%s\".\n"),
8379 arm_force_mode_string);
8382 /* If the user changes the register disassembly style used for info
8383 register and other commands, we have to also switch the style used
8384 in opcodes for disassembly output. This function is run in the "set
8385 arm disassembly" command, and does that. */
8388 set_disassembly_style_sfunc (char *args, int from_tty,
8389 struct cmd_list_element *c)
8391 set_disassembly_style ();
8394 /* Return the ARM register name corresponding to register I. */
8396 arm_register_name (struct gdbarch *gdbarch, int i)
8398 const int num_regs = gdbarch_num_regs (gdbarch);
8400 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8401 && i >= num_regs && i < num_regs + 32)
8403 static const char *const vfp_pseudo_names[] = {
8404 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8405 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8406 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8407 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8410 return vfp_pseudo_names[i - num_regs];
8413 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8414 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8416 static const char *const neon_pseudo_names[] = {
8417 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8418 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8421 return neon_pseudo_names[i - num_regs - 32];
8424 if (i >= ARRAY_SIZE (arm_register_names))
8425 /* These registers are only supported on targets which supply
8426 an XML description. */
8429 return arm_register_names[i];
8433 set_disassembly_style (void)
8437 /* Find the style that the user wants. */
8438 for (current = 0; current < num_disassembly_options; current++)
8439 if (disassembly_style == valid_disassembly_styles[current])
8441 gdb_assert (current < num_disassembly_options);
8443 /* Synchronize the disassembler. */
8444 set_arm_regname_option (current);
8447 /* Test whether the coff symbol specific value corresponds to a Thumb
8451 coff_sym_is_thumb (int val)
8453 return (val == C_THUMBEXT
8454 || val == C_THUMBSTAT
8455 || val == C_THUMBEXTFUNC
8456 || val == C_THUMBSTATFUNC
8457 || val == C_THUMBLABEL);
8460 /* arm_coff_make_msymbol_special()
8461 arm_elf_make_msymbol_special()
8463 These functions test whether the COFF or ELF symbol corresponds to
8464 an address in thumb code, and set a "special" bit in a minimal
8465 symbol to indicate that it does. */
8468 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8470 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
8471 == ST_BRANCH_TO_THUMB)
8472 MSYMBOL_SET_SPECIAL (msym);
8476 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8478 if (coff_sym_is_thumb (val))
8479 MSYMBOL_SET_SPECIAL (msym);
8483 arm_objfile_data_free (struct objfile *objfile, void *arg)
8485 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8488 for (i = 0; i < objfile->obfd->section_count; i++)
8489 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8493 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8496 const char *name = bfd_asymbol_name (sym);
8497 struct arm_per_objfile *data;
8498 VEC(arm_mapping_symbol_s) **map_p;
8499 struct arm_mapping_symbol new_map_sym;
8501 gdb_assert (name[0] == '$');
8502 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8505 data = (struct arm_per_objfile *) objfile_data (objfile,
8506 arm_objfile_data_key);
8509 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8510 struct arm_per_objfile);
8511 set_objfile_data (objfile, arm_objfile_data_key, data);
8512 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8513 objfile->obfd->section_count,
8514 VEC(arm_mapping_symbol_s) *);
8516 map_p = &data->section_maps[bfd_get_section (sym)->index];
8518 new_map_sym.value = sym->value;
8519 new_map_sym.type = name[1];
8521 /* Assume that most mapping symbols appear in order of increasing
8522 value. If they were randomly distributed, it would be faster to
8523 always push here and then sort at first use. */
8524 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8526 struct arm_mapping_symbol *prev_map_sym;
8528 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8529 if (prev_map_sym->value >= sym->value)
8532 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8533 arm_compare_mapping_symbols);
8534 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8539 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8543 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8545 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8546 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8548 /* If necessary, set the T bit. */
8551 ULONGEST val, t_bit;
8552 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8553 t_bit = arm_psr_thumb_bit (gdbarch);
8554 if (arm_pc_is_thumb (gdbarch, pc))
8555 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8558 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8563 /* Read the contents of a NEON quad register, by reading from two
8564 double registers. This is used to implement the quad pseudo
8565 registers, and for argument passing in case the quad registers are
8566 missing; vectors are passed in quad registers when using the VFP
8567 ABI, even if a NEON unit is not present. REGNUM is the index of
8568 the quad register, in [0, 15]. */
8570 static enum register_status
8571 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8572 int regnum, gdb_byte *buf)
8575 gdb_byte reg_buf[8];
8576 int offset, double_regnum;
8577 enum register_status status;
8579 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8580 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8583 /* d0 is always the least significant half of q0. */
8584 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8589 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8590 if (status != REG_VALID)
8592 memcpy (buf + offset, reg_buf, 8);
8594 offset = 8 - offset;
8595 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8596 if (status != REG_VALID)
8598 memcpy (buf + offset, reg_buf, 8);
8603 static enum register_status
8604 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8605 int regnum, gdb_byte *buf)
8607 const int num_regs = gdbarch_num_regs (gdbarch);
8609 gdb_byte reg_buf[8];
8610 int offset, double_regnum;
8612 gdb_assert (regnum >= num_regs);
8615 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8616 /* Quad-precision register. */
8617 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8620 enum register_status status;
8622 /* Single-precision register. */
8623 gdb_assert (regnum < 32);
8625 /* s0 is always the least significant half of d0. */
8626 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8627 offset = (regnum & 1) ? 0 : 4;
8629 offset = (regnum & 1) ? 4 : 0;
8631 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8632 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8635 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8636 if (status == REG_VALID)
8637 memcpy (buf, reg_buf + offset, 4);
8642 /* Store the contents of BUF to a NEON quad register, by writing to
8643 two double registers. This is used to implement the quad pseudo
8644 registers, and for argument passing in case the quad registers are
8645 missing; vectors are passed in quad registers when using the VFP
8646 ABI, even if a NEON unit is not present. REGNUM is the index
8647 of the quad register, in [0, 15]. */
8650 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8651 int regnum, const gdb_byte *buf)
8654 int offset, double_regnum;
8656 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8657 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8660 /* d0 is always the least significant half of q0. */
8661 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8666 regcache_raw_write (regcache, double_regnum, buf + offset);
8667 offset = 8 - offset;
8668 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8672 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8673 int regnum, const gdb_byte *buf)
8675 const int num_regs = gdbarch_num_regs (gdbarch);
8677 gdb_byte reg_buf[8];
8678 int offset, double_regnum;
8680 gdb_assert (regnum >= num_regs);
8683 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8684 /* Quad-precision register. */
8685 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8688 /* Single-precision register. */
8689 gdb_assert (regnum < 32);
8691 /* s0 is always the least significant half of d0. */
8692 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8693 offset = (regnum & 1) ? 0 : 4;
8695 offset = (regnum & 1) ? 4 : 0;
8697 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8698 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8701 regcache_raw_read (regcache, double_regnum, reg_buf);
8702 memcpy (reg_buf + offset, buf, 4);
8703 regcache_raw_write (regcache, double_regnum, reg_buf);
8707 static struct value *
8708 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8710 const int *reg_p = (const int *) baton;
8711 return value_of_register (*reg_p, frame);
8714 static enum gdb_osabi
8715 arm_elf_osabi_sniffer (bfd *abfd)
8717 unsigned int elfosabi;
8718 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8720 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8722 if (elfosabi == ELFOSABI_ARM)
8723 /* GNU tools use this value. Check note sections in this case,
8725 bfd_map_over_sections (abfd,
8726 generic_elf_osabi_sniff_abi_tag_sections,
8729 /* Anything else will be handled by the generic ELF sniffer. */
8734 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8735 struct reggroup *group)
8737 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8738 this, FPS register belongs to save_regroup, restore_reggroup, and
8739 all_reggroup, of course. */
8740 if (regnum == ARM_FPS_REGNUM)
8741 return (group == float_reggroup
8742 || group == save_reggroup
8743 || group == restore_reggroup
8744 || group == all_reggroup);
8746 return default_register_reggroup_p (gdbarch, regnum, group);
8750 /* For backward-compatibility we allow two 'g' packet lengths with
8751 the remote protocol depending on whether FPA registers are
8752 supplied. M-profile targets do not have FPA registers, but some
8753 stubs already exist in the wild which use a 'g' packet which
8754 supplies them albeit with dummy values. The packet format which
8755 includes FPA registers should be considered deprecated for
8756 M-profile targets. */
8759 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8761 if (gdbarch_tdep (gdbarch)->is_m)
8763 /* If we know from the executable this is an M-profile target,
8764 cater for remote targets whose register set layout is the
8765 same as the FPA layout. */
8766 register_remote_g_packet_guess (gdbarch,
8767 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8768 (16 * INT_REGISTER_SIZE)
8769 + (8 * FP_REGISTER_SIZE)
8770 + (2 * INT_REGISTER_SIZE),
8771 tdesc_arm_with_m_fpa_layout);
8773 /* The regular M-profile layout. */
8774 register_remote_g_packet_guess (gdbarch,
8775 /* r0-r12,sp,lr,pc; xpsr */
8776 (16 * INT_REGISTER_SIZE)
8777 + INT_REGISTER_SIZE,
8780 /* M-profile plus M4F VFP. */
8781 register_remote_g_packet_guess (gdbarch,
8782 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8783 (16 * INT_REGISTER_SIZE)
8784 + (16 * VFP_REGISTER_SIZE)
8785 + (2 * INT_REGISTER_SIZE),
8786 tdesc_arm_with_m_vfp_d16);
8789 /* Otherwise we don't have a useful guess. */
8793 /* Initialize the current architecture based on INFO. If possible,
8794 re-use an architecture from ARCHES, which is a list of
8795 architectures already created during this debugging session.
8797 Called e.g. at program startup, when reading a core file, and when
8798 reading a binary file. */
8800 static struct gdbarch *
8801 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8803 struct gdbarch_tdep *tdep;
8804 struct gdbarch *gdbarch;
8805 struct gdbarch_list *best_arch;
8806 enum arm_abi_kind arm_abi = arm_abi_global;
8807 enum arm_float_model fp_model = arm_fp_model;
8808 struct tdesc_arch_data *tdesc_data = NULL;
8810 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8811 int have_wmmx_registers = 0;
8813 int have_fpa_registers = 1;
8814 const struct target_desc *tdesc = info.target_desc;
8816 /* If we have an object to base this architecture on, try to determine
8819 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8821 int ei_osabi, e_flags;
8823 switch (bfd_get_flavour (info.abfd))
8825 case bfd_target_aout_flavour:
8826 /* Assume it's an old APCS-style ABI. */
8827 arm_abi = ARM_ABI_APCS;
8830 case bfd_target_coff_flavour:
8831 /* Assume it's an old APCS-style ABI. */
8833 arm_abi = ARM_ABI_APCS;
8836 case bfd_target_elf_flavour:
8837 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8838 e_flags = elf_elfheader (info.abfd)->e_flags;
8840 if (ei_osabi == ELFOSABI_ARM)
8842 /* GNU tools used to use this value, but do not for EABI
8843 objects. There's nowhere to tag an EABI version
8844 anyway, so assume APCS. */
8845 arm_abi = ARM_ABI_APCS;
8847 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8849 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8850 int attr_arch, attr_profile;
8854 case EF_ARM_EABI_UNKNOWN:
8855 /* Assume GNU tools. */
8856 arm_abi = ARM_ABI_APCS;
8859 case EF_ARM_EABI_VER4:
8860 case EF_ARM_EABI_VER5:
8861 arm_abi = ARM_ABI_AAPCS;
8862 /* EABI binaries default to VFP float ordering.
8863 They may also contain build attributes that can
8864 be used to identify if the VFP argument-passing
8866 if (fp_model == ARM_FLOAT_AUTO)
8869 switch (bfd_elf_get_obj_attr_int (info.abfd,
8873 case AEABI_VFP_args_base:
8874 /* "The user intended FP parameter/result
8875 passing to conform to AAPCS, base
8877 fp_model = ARM_FLOAT_SOFT_VFP;
8879 case AEABI_VFP_args_vfp:
8880 /* "The user intended FP parameter/result
8881 passing to conform to AAPCS, VFP
8883 fp_model = ARM_FLOAT_VFP;
8885 case AEABI_VFP_args_toolchain:
8886 /* "The user intended FP parameter/result
8887 passing to conform to tool chain-specific
8888 conventions" - we don't know any such
8889 conventions, so leave it as "auto". */
8891 case AEABI_VFP_args_compatible:
8892 /* "Code is compatible with both the base
8893 and VFP variants; the user did not permit
8894 non-variadic functions to pass FP
8895 parameters/results" - leave it as
8899 /* Attribute value not mentioned in the
8900 November 2012 ABI, so leave it as
8905 fp_model = ARM_FLOAT_SOFT_VFP;
8911 /* Leave it as "auto". */
8912 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
8917 /* Detect M-profile programs. This only works if the
8918 executable file includes build attributes; GCC does
8919 copy them to the executable, but e.g. RealView does
8921 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
8923 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
8925 Tag_CPU_arch_profile);
8926 /* GCC specifies the profile for v6-M; RealView only
8927 specifies the profile for architectures starting with
8928 V7 (as opposed to architectures with a tag
8929 numerically greater than TAG_CPU_ARCH_V7). */
8930 if (!tdesc_has_registers (tdesc)
8931 && (attr_arch == TAG_CPU_ARCH_V6_M
8932 || attr_arch == TAG_CPU_ARCH_V6S_M
8933 || attr_profile == 'M'))
8938 if (fp_model == ARM_FLOAT_AUTO)
8940 int e_flags = elf_elfheader (info.abfd)->e_flags;
8942 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
8945 /* Leave it as "auto". Strictly speaking this case
8946 means FPA, but almost nobody uses that now, and
8947 many toolchains fail to set the appropriate bits
8948 for the floating-point model they use. */
8950 case EF_ARM_SOFT_FLOAT:
8951 fp_model = ARM_FLOAT_SOFT_FPA;
8953 case EF_ARM_VFP_FLOAT:
8954 fp_model = ARM_FLOAT_VFP;
8956 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
8957 fp_model = ARM_FLOAT_SOFT_VFP;
8962 if (e_flags & EF_ARM_BE8)
8963 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
8968 /* Leave it as "auto". */
8973 /* Check any target description for validity. */
8974 if (tdesc_has_registers (tdesc))
8976 /* For most registers we require GDB's default names; but also allow
8977 the numeric names for sp / lr / pc, as a convenience. */
8978 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
8979 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
8980 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
8982 const struct tdesc_feature *feature;
8985 feature = tdesc_find_feature (tdesc,
8986 "org.gnu.gdb.arm.core");
8987 if (feature == NULL)
8989 feature = tdesc_find_feature (tdesc,
8990 "org.gnu.gdb.arm.m-profile");
8991 if (feature == NULL)
8997 tdesc_data = tdesc_data_alloc ();
9000 for (i = 0; i < ARM_SP_REGNUM; i++)
9001 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9002 arm_register_names[i]);
9003 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9006 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9009 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9013 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9014 ARM_PS_REGNUM, "xpsr");
9016 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9017 ARM_PS_REGNUM, "cpsr");
9021 tdesc_data_cleanup (tdesc_data);
9025 feature = tdesc_find_feature (tdesc,
9026 "org.gnu.gdb.arm.fpa");
9027 if (feature != NULL)
9030 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9031 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9032 arm_register_names[i]);
9035 tdesc_data_cleanup (tdesc_data);
9040 have_fpa_registers = 0;
9042 feature = tdesc_find_feature (tdesc,
9043 "org.gnu.gdb.xscale.iwmmxt");
9044 if (feature != NULL)
9046 static const char *const iwmmxt_names[] = {
9047 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9048 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9049 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9050 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9054 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9056 &= tdesc_numbered_register (feature, tdesc_data, i,
9057 iwmmxt_names[i - ARM_WR0_REGNUM]);
9059 /* Check for the control registers, but do not fail if they
9061 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9062 tdesc_numbered_register (feature, tdesc_data, i,
9063 iwmmxt_names[i - ARM_WR0_REGNUM]);
9065 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9067 &= tdesc_numbered_register (feature, tdesc_data, i,
9068 iwmmxt_names[i - ARM_WR0_REGNUM]);
9072 tdesc_data_cleanup (tdesc_data);
9076 have_wmmx_registers = 1;
9079 /* If we have a VFP unit, check whether the single precision registers
9080 are present. If not, then we will synthesize them as pseudo
9082 feature = tdesc_find_feature (tdesc,
9083 "org.gnu.gdb.arm.vfp");
9084 if (feature != NULL)
9086 static const char *const vfp_double_names[] = {
9087 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9088 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9089 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9090 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9093 /* Require the double precision registers. There must be either
9096 for (i = 0; i < 32; i++)
9098 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9100 vfp_double_names[i]);
9104 if (!valid_p && i == 16)
9107 /* Also require FPSCR. */
9108 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9109 ARM_FPSCR_REGNUM, "fpscr");
9112 tdesc_data_cleanup (tdesc_data);
9116 if (tdesc_unnumbered_register (feature, "s0") == 0)
9117 have_vfp_pseudos = 1;
9119 vfp_register_count = i;
9121 /* If we have VFP, also check for NEON. The architecture allows
9122 NEON without VFP (integer vector operations only), but GDB
9123 does not support that. */
9124 feature = tdesc_find_feature (tdesc,
9125 "org.gnu.gdb.arm.neon");
9126 if (feature != NULL)
9128 /* NEON requires 32 double-precision registers. */
9131 tdesc_data_cleanup (tdesc_data);
9135 /* If there are quad registers defined by the stub, use
9136 their type; otherwise (normally) provide them with
9137 the default type. */
9138 if (tdesc_unnumbered_register (feature, "q0") == 0)
9139 have_neon_pseudos = 1;
9146 /* If there is already a candidate, use it. */
9147 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9149 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9151 if (arm_abi != ARM_ABI_AUTO
9152 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9155 if (fp_model != ARM_FLOAT_AUTO
9156 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9159 /* There are various other properties in tdep that we do not
9160 need to check here: those derived from a target description,
9161 since gdbarches with a different target description are
9162 automatically disqualified. */
9164 /* Do check is_m, though, since it might come from the binary. */
9165 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9168 /* Found a match. */
9172 if (best_arch != NULL)
9174 if (tdesc_data != NULL)
9175 tdesc_data_cleanup (tdesc_data);
9176 return best_arch->gdbarch;
9179 tdep = XCNEW (struct gdbarch_tdep);
9180 gdbarch = gdbarch_alloc (&info, tdep);
9182 /* Record additional information about the architecture we are defining.
9183 These are gdbarch discriminators, like the OSABI. */
9184 tdep->arm_abi = arm_abi;
9185 tdep->fp_model = fp_model;
9187 tdep->have_fpa_registers = have_fpa_registers;
9188 tdep->have_wmmx_registers = have_wmmx_registers;
9189 gdb_assert (vfp_register_count == 0
9190 || vfp_register_count == 16
9191 || vfp_register_count == 32);
9192 tdep->vfp_register_count = vfp_register_count;
9193 tdep->have_vfp_pseudos = have_vfp_pseudos;
9194 tdep->have_neon_pseudos = have_neon_pseudos;
9195 tdep->have_neon = have_neon;
9197 arm_register_g_packet_guesses (gdbarch);
9200 switch (info.byte_order_for_code)
9202 case BFD_ENDIAN_BIG:
9203 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9204 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9205 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9206 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9210 case BFD_ENDIAN_LITTLE:
9211 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9212 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9213 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9214 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9219 internal_error (__FILE__, __LINE__,
9220 _("arm_gdbarch_init: bad byte order for float format"));
9223 /* On ARM targets char defaults to unsigned. */
9224 set_gdbarch_char_signed (gdbarch, 0);
9226 /* Note: for displaced stepping, this includes the breakpoint, and one word
9227 of additional scratch space. This setting isn't used for anything beside
9228 displaced stepping at present. */
9229 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9231 /* This should be low enough for everything. */
9232 tdep->lowest_pc = 0x20;
9233 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9235 /* The default, for both APCS and AAPCS, is to return small
9236 structures in registers. */
9237 tdep->struct_return = reg_struct_return;
9239 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9240 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9242 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9244 /* Frame handling. */
9245 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9246 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9247 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9249 frame_base_set_default (gdbarch, &arm_normal_base);
9251 /* Address manipulation. */
9252 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9254 /* Advance PC across function entry code. */
9255 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9257 /* Detect whether PC is at a point where the stack has been destroyed. */
9258 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9260 /* Skip trampolines. */
9261 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9263 /* The stack grows downward. */
9264 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9266 /* Breakpoint manipulation. */
9267 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9268 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9269 arm_remote_breakpoint_from_pc);
9271 /* Information about registers, etc. */
9272 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9273 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9274 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9275 set_gdbarch_register_type (gdbarch, arm_register_type);
9276 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9278 /* This "info float" is FPA-specific. Use the generic version if we
9280 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9281 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9283 /* Internal <-> external register number maps. */
9284 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9285 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9287 set_gdbarch_register_name (gdbarch, arm_register_name);
9289 /* Returning results. */
9290 set_gdbarch_return_value (gdbarch, arm_return_value);
9293 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9295 /* Minsymbol frobbing. */
9296 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9297 set_gdbarch_coff_make_msymbol_special (gdbarch,
9298 arm_coff_make_msymbol_special);
9299 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9301 /* Thumb-2 IT block support. */
9302 set_gdbarch_adjust_breakpoint_address (gdbarch,
9303 arm_adjust_breakpoint_address);
9305 /* Virtual tables. */
9306 set_gdbarch_vbit_in_delta (gdbarch, 1);
9308 /* Hook in the ABI-specific overrides, if they have been registered. */
9309 gdbarch_init_osabi (info, gdbarch);
9311 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9313 /* Add some default predicates. */
9315 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9316 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9317 dwarf2_append_unwinders (gdbarch);
9318 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9319 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9321 /* Now we have tuned the configuration, set a few final things,
9322 based on what the OS ABI has told us. */
9324 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9325 binaries are always marked. */
9326 if (tdep->arm_abi == ARM_ABI_AUTO)
9327 tdep->arm_abi = ARM_ABI_APCS;
9329 /* Watchpoints are not steppable. */
9330 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9332 /* We used to default to FPA for generic ARM, but almost nobody
9333 uses that now, and we now provide a way for the user to force
9334 the model. So default to the most useful variant. */
9335 if (tdep->fp_model == ARM_FLOAT_AUTO)
9336 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9338 if (tdep->jb_pc >= 0)
9339 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9341 /* Floating point sizes and format. */
9342 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9343 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9345 set_gdbarch_double_format
9346 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9347 set_gdbarch_long_double_format
9348 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9352 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9353 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9356 if (have_vfp_pseudos)
9358 /* NOTE: These are the only pseudo registers used by
9359 the ARM target at the moment. If more are added, a
9360 little more care in numbering will be needed. */
9362 int num_pseudos = 32;
9363 if (have_neon_pseudos)
9365 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9366 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9367 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9372 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9374 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9376 /* Override tdesc_register_type to adjust the types of VFP
9377 registers for NEON. */
9378 set_gdbarch_register_type (gdbarch, arm_register_type);
9381 /* Add standard register aliases. We add aliases even for those
9382 nanes which are used by the current architecture - it's simpler,
9383 and does no harm, since nothing ever lists user registers. */
9384 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9385 user_reg_add (gdbarch, arm_register_aliases[i].name,
9386 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9392 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9394 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9399 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9400 (unsigned long) tdep->lowest_pc);
9403 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9406 _initialize_arm_tdep (void)
9408 struct ui_file *stb;
9410 struct cmd_list_element *new_set, *new_show;
9411 const char *setname;
9412 const char *setdesc;
9413 const char *const *regnames;
9415 static char *helptext;
9416 char regdesc[1024], *rdptr = regdesc;
9417 size_t rest = sizeof (regdesc);
9419 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9421 arm_objfile_data_key
9422 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9424 /* Add ourselves to objfile event chain. */
9425 observer_attach_new_objfile (arm_exidx_new_objfile);
9427 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9429 /* Register an ELF OS ABI sniffer for ARM binaries. */
9430 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9431 bfd_target_elf_flavour,
9432 arm_elf_osabi_sniffer);
9434 /* Initialize the standard target descriptions. */
9435 initialize_tdesc_arm_with_m ();
9436 initialize_tdesc_arm_with_m_fpa_layout ();
9437 initialize_tdesc_arm_with_m_vfp_d16 ();
9438 initialize_tdesc_arm_with_iwmmxt ();
9439 initialize_tdesc_arm_with_vfpv2 ();
9440 initialize_tdesc_arm_with_vfpv3 ();
9441 initialize_tdesc_arm_with_neon ();
9443 /* Get the number of possible sets of register names defined in opcodes. */
9444 num_disassembly_options = get_arm_regname_num_options ();
9446 /* Add root prefix command for all "set arm"/"show arm" commands. */
9447 add_prefix_cmd ("arm", no_class, set_arm_command,
9448 _("Various ARM-specific commands."),
9449 &setarmcmdlist, "set arm ", 0, &setlist);
9451 add_prefix_cmd ("arm", no_class, show_arm_command,
9452 _("Various ARM-specific commands."),
9453 &showarmcmdlist, "show arm ", 0, &showlist);
9455 /* Sync the opcode insn printer with our register viewer. */
9456 parse_arm_disassembler_option ("reg-names-std");
9458 /* Initialize the array that will be passed to
9459 add_setshow_enum_cmd(). */
9460 valid_disassembly_styles = XNEWVEC (const char *,
9461 num_disassembly_options + 1);
9462 for (i = 0; i < num_disassembly_options; i++)
9464 numregs = get_arm_regnames (i, &setname, &setdesc, ®names);
9465 valid_disassembly_styles[i] = setname;
9466 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9469 /* When we find the default names, tell the disassembler to use
9471 if (!strcmp (setname, "std"))
9473 disassembly_style = setname;
9474 set_arm_regname_option (i);
9477 /* Mark the end of valid options. */
9478 valid_disassembly_styles[num_disassembly_options] = NULL;
9480 /* Create the help text. */
9481 stb = mem_fileopen ();
9482 fprintf_unfiltered (stb, "%s%s%s",
9483 _("The valid values are:\n"),
9485 _("The default is \"std\"."));
9486 helptext = ui_file_xstrdup (stb, NULL);
9487 ui_file_delete (stb);
9489 add_setshow_enum_cmd("disassembler", no_class,
9490 valid_disassembly_styles, &disassembly_style,
9491 _("Set the disassembly style."),
9492 _("Show the disassembly style."),
9494 set_disassembly_style_sfunc,
9495 NULL, /* FIXME: i18n: The disassembly style is
9497 &setarmcmdlist, &showarmcmdlist);
9499 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9500 _("Set usage of ARM 32-bit mode."),
9501 _("Show usage of ARM 32-bit mode."),
9502 _("When off, a 26-bit PC will be used."),
9504 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9506 &setarmcmdlist, &showarmcmdlist);
9508 /* Add a command to allow the user to force the FPU model. */
9509 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, ¤t_fp_model,
9510 _("Set the floating point type."),
9511 _("Show the floating point type."),
9512 _("auto - Determine the FP typefrom the OS-ABI.\n\
9513 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9514 fpa - FPA co-processor (GCC compiled).\n\
9515 softvfp - Software FP with pure-endian doubles.\n\
9516 vfp - VFP co-processor."),
9517 set_fp_model_sfunc, show_fp_model,
9518 &setarmcmdlist, &showarmcmdlist);
9520 /* Add a command to allow the user to force the ABI. */
9521 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9524 NULL, arm_set_abi, arm_show_abi,
9525 &setarmcmdlist, &showarmcmdlist);
9527 /* Add two commands to allow the user to force the assumed
9529 add_setshow_enum_cmd ("fallback-mode", class_support,
9530 arm_mode_strings, &arm_fallback_mode_string,
9531 _("Set the mode assumed when symbols are unavailable."),
9532 _("Show the mode assumed when symbols are unavailable."),
9533 NULL, NULL, arm_show_fallback_mode,
9534 &setarmcmdlist, &showarmcmdlist);
9535 add_setshow_enum_cmd ("force-mode", class_support,
9536 arm_mode_strings, &arm_force_mode_string,
9537 _("Set the mode assumed even when symbols are available."),
9538 _("Show the mode assumed even when symbols are available."),
9539 NULL, NULL, arm_show_force_mode,
9540 &setarmcmdlist, &showarmcmdlist);
9542 /* Debugging flag. */
9543 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9544 _("Set ARM debugging."),
9545 _("Show ARM debugging."),
9546 _("When on, arm-specific debugging is enabled."),
9548 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9549 &setdebuglist, &showdebuglist);
9552 /* ARM-reversible process record data structures. */
9554 #define ARM_INSN_SIZE_BYTES 4
9555 #define THUMB_INSN_SIZE_BYTES 2
9556 #define THUMB2_INSN_SIZE_BYTES 4
9559 /* Position of the bit within a 32-bit ARM instruction
9560 that defines whether the instruction is a load or store. */
9561 #define INSN_S_L_BIT_NUM 20
9563 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9566 unsigned int reg_len = LENGTH; \
9569 REGS = XNEWVEC (uint32_t, reg_len); \
9570 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9575 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9578 unsigned int mem_len = LENGTH; \
9581 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9582 memcpy(&MEMS->len, &RECORD_BUF[0], \
9583 sizeof(struct arm_mem_r) * LENGTH); \
9588 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9589 #define INSN_RECORDED(ARM_RECORD) \
9590 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9592 /* ARM memory record structure. */
9595 uint32_t len; /* Record length. */
9596 uint32_t addr; /* Memory address. */
9599 /* ARM instruction record contains opcode of current insn
9600 and execution state (before entry to decode_insn()),
9601 contains list of to-be-modified registers and
9602 memory blocks (on return from decode_insn()). */
9604 typedef struct insn_decode_record_t
9606 struct gdbarch *gdbarch;
9607 struct regcache *regcache;
9608 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9609 uint32_t arm_insn; /* Should accommodate thumb. */
9610 uint32_t cond; /* Condition code. */
9611 uint32_t opcode; /* Insn opcode. */
9612 uint32_t decode; /* Insn decode bits. */
9613 uint32_t mem_rec_count; /* No of mem records. */
9614 uint32_t reg_rec_count; /* No of reg records. */
9615 uint32_t *arm_regs; /* Registers to be saved for this record. */
9616 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9617 } insn_decode_record;
9620 /* Checks ARM SBZ and SBO mandatory fields. */
9623 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9625 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9644 enum arm_record_result
9646 ARM_RECORD_SUCCESS = 0,
9647 ARM_RECORD_FAILURE = 1
9654 } arm_record_strx_t;
9665 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9666 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9669 struct regcache *reg_cache = arm_insn_r->regcache;
9670 ULONGEST u_regval[2]= {0};
9672 uint32_t reg_src1 = 0, reg_src2 = 0;
9673 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9674 uint32_t opcode1 = 0;
9676 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9677 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9678 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
9681 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9683 /* 1) Handle misc store, immediate offset. */
9684 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9685 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9686 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9687 regcache_raw_read_unsigned (reg_cache, reg_src1,
9689 if (ARM_PC_REGNUM == reg_src1)
9691 /* If R15 was used as Rn, hence current PC+8. */
9692 u_regval[0] = u_regval[0] + 8;
9694 offset_8 = (immed_high << 4) | immed_low;
9695 /* Calculate target store address. */
9696 if (14 == arm_insn_r->opcode)
9698 tgt_mem_addr = u_regval[0] + offset_8;
9702 tgt_mem_addr = u_regval[0] - offset_8;
9704 if (ARM_RECORD_STRH == str_type)
9706 record_buf_mem[0] = 2;
9707 record_buf_mem[1] = tgt_mem_addr;
9708 arm_insn_r->mem_rec_count = 1;
9710 else if (ARM_RECORD_STRD == str_type)
9712 record_buf_mem[0] = 4;
9713 record_buf_mem[1] = tgt_mem_addr;
9714 record_buf_mem[2] = 4;
9715 record_buf_mem[3] = tgt_mem_addr + 4;
9716 arm_insn_r->mem_rec_count = 2;
9719 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9721 /* 2) Store, register offset. */
9723 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9725 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9726 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9727 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9730 /* If R15 was used as Rn, hence current PC+8. */
9731 u_regval[0] = u_regval[0] + 8;
9733 /* Calculate target store address, Rn +/- Rm, register offset. */
9734 if (12 == arm_insn_r->opcode)
9736 tgt_mem_addr = u_regval[0] + u_regval[1];
9740 tgt_mem_addr = u_regval[1] - u_regval[0];
9742 if (ARM_RECORD_STRH == str_type)
9744 record_buf_mem[0] = 2;
9745 record_buf_mem[1] = tgt_mem_addr;
9746 arm_insn_r->mem_rec_count = 1;
9748 else if (ARM_RECORD_STRD == str_type)
9750 record_buf_mem[0] = 4;
9751 record_buf_mem[1] = tgt_mem_addr;
9752 record_buf_mem[2] = 4;
9753 record_buf_mem[3] = tgt_mem_addr + 4;
9754 arm_insn_r->mem_rec_count = 2;
9757 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9758 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9760 /* 3) Store, immediate pre-indexed. */
9761 /* 5) Store, immediate post-indexed. */
9762 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9763 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9764 offset_8 = (immed_high << 4) | immed_low;
9765 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9766 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9767 /* Calculate target store address, Rn +/- Rm, register offset. */
9768 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9770 tgt_mem_addr = u_regval[0] + offset_8;
9774 tgt_mem_addr = u_regval[0] - offset_8;
9776 if (ARM_RECORD_STRH == str_type)
9778 record_buf_mem[0] = 2;
9779 record_buf_mem[1] = tgt_mem_addr;
9780 arm_insn_r->mem_rec_count = 1;
9782 else if (ARM_RECORD_STRD == str_type)
9784 record_buf_mem[0] = 4;
9785 record_buf_mem[1] = tgt_mem_addr;
9786 record_buf_mem[2] = 4;
9787 record_buf_mem[3] = tgt_mem_addr + 4;
9788 arm_insn_r->mem_rec_count = 2;
9790 /* Record Rn also as it changes. */
9791 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9792 arm_insn_r->reg_rec_count = 1;
9794 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9795 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9797 /* 4) Store, register pre-indexed. */
9798 /* 6) Store, register post -indexed. */
9799 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9800 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9801 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9802 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9803 /* Calculate target store address, Rn +/- Rm, register offset. */
9804 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9806 tgt_mem_addr = u_regval[0] + u_regval[1];
9810 tgt_mem_addr = u_regval[1] - u_regval[0];
9812 if (ARM_RECORD_STRH == str_type)
9814 record_buf_mem[0] = 2;
9815 record_buf_mem[1] = tgt_mem_addr;
9816 arm_insn_r->mem_rec_count = 1;
9818 else if (ARM_RECORD_STRD == str_type)
9820 record_buf_mem[0] = 4;
9821 record_buf_mem[1] = tgt_mem_addr;
9822 record_buf_mem[2] = 4;
9823 record_buf_mem[3] = tgt_mem_addr + 4;
9824 arm_insn_r->mem_rec_count = 2;
9826 /* Record Rn also as it changes. */
9827 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9828 arm_insn_r->reg_rec_count = 1;
9833 /* Handling ARM extension space insns. */
9836 arm_record_extension_space (insn_decode_record *arm_insn_r)
9838 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9839 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9840 uint32_t record_buf[8], record_buf_mem[8];
9841 uint32_t reg_src1 = 0;
9842 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9843 struct regcache *reg_cache = arm_insn_r->regcache;
9844 ULONGEST u_regval = 0;
9846 gdb_assert (!INSN_RECORDED(arm_insn_r));
9847 /* Handle unconditional insn extension space. */
9849 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9850 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9851 if (arm_insn_r->cond)
9853 /* PLD has no affect on architectural state, it just affects
9855 if (5 == ((opcode1 & 0xE0) >> 5))
9858 record_buf[0] = ARM_PS_REGNUM;
9859 record_buf[1] = ARM_LR_REGNUM;
9860 arm_insn_r->reg_rec_count = 2;
9862 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9866 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9867 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9870 /* Undefined instruction on ARM V5; need to handle if later
9871 versions define it. */
9874 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9875 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9876 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9878 /* Handle arithmetic insn extension space. */
9879 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9880 && !INSN_RECORDED(arm_insn_r))
9882 /* Handle MLA(S) and MUL(S). */
9883 if (0 <= insn_op1 && 3 >= insn_op1)
9885 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9886 record_buf[1] = ARM_PS_REGNUM;
9887 arm_insn_r->reg_rec_count = 2;
9889 else if (4 <= insn_op1 && 15 >= insn_op1)
9891 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9892 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9893 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9894 record_buf[2] = ARM_PS_REGNUM;
9895 arm_insn_r->reg_rec_count = 3;
9899 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9900 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9901 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9903 /* Handle control insn extension space. */
9905 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9906 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
9908 if (!bit (arm_insn_r->arm_insn,25))
9910 if (!bits (arm_insn_r->arm_insn, 4, 7))
9912 if ((0 == insn_op1) || (2 == insn_op1))
9915 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9916 arm_insn_r->reg_rec_count = 1;
9918 else if (1 == insn_op1)
9920 /* CSPR is going to be changed. */
9921 record_buf[0] = ARM_PS_REGNUM;
9922 arm_insn_r->reg_rec_count = 1;
9924 else if (3 == insn_op1)
9926 /* SPSR is going to be changed. */
9927 /* We need to get SPSR value, which is yet to be done. */
9928 printf_unfiltered (_("Process record does not support "
9929 "instruction 0x%0x at address %s.\n"),
9930 arm_insn_r->arm_insn,
9931 paddress (arm_insn_r->gdbarch,
9932 arm_insn_r->this_addr));
9936 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
9941 record_buf[0] = ARM_PS_REGNUM;
9942 arm_insn_r->reg_rec_count = 1;
9944 else if (3 == insn_op1)
9947 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9948 arm_insn_r->reg_rec_count = 1;
9951 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
9954 record_buf[0] = ARM_PS_REGNUM;
9955 record_buf[1] = ARM_LR_REGNUM;
9956 arm_insn_r->reg_rec_count = 2;
9958 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
9960 /* QADD, QSUB, QDADD, QDSUB */
9961 record_buf[0] = ARM_PS_REGNUM;
9962 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9963 arm_insn_r->reg_rec_count = 2;
9965 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
9968 record_buf[0] = ARM_PS_REGNUM;
9969 record_buf[1] = ARM_LR_REGNUM;
9970 arm_insn_r->reg_rec_count = 2;
9972 /* Save SPSR also;how? */
9973 printf_unfiltered (_("Process record does not support "
9974 "instruction 0x%0x at address %s.\n"),
9975 arm_insn_r->arm_insn,
9976 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
9979 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
9980 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
9981 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
9982 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
9985 if (0 == insn_op1 || 1 == insn_op1)
9987 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9988 /* We dont do optimization for SMULW<y> where we
9990 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9991 record_buf[1] = ARM_PS_REGNUM;
9992 arm_insn_r->reg_rec_count = 2;
9994 else if (2 == insn_op1)
9997 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9998 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
9999 arm_insn_r->reg_rec_count = 2;
10001 else if (3 == insn_op1)
10004 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10005 arm_insn_r->reg_rec_count = 1;
10011 /* MSR : immediate form. */
10014 /* CSPR is going to be changed. */
10015 record_buf[0] = ARM_PS_REGNUM;
10016 arm_insn_r->reg_rec_count = 1;
10018 else if (3 == insn_op1)
10020 /* SPSR is going to be changed. */
10021 /* we need to get SPSR value, which is yet to be done */
10022 printf_unfiltered (_("Process record does not support "
10023 "instruction 0x%0x at address %s.\n"),
10024 arm_insn_r->arm_insn,
10025 paddress (arm_insn_r->gdbarch,
10026 arm_insn_r->this_addr));
10032 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10033 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10034 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10036 /* Handle load/store insn extension space. */
10038 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10039 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10040 && !INSN_RECORDED(arm_insn_r))
10045 /* These insn, changes register and memory as well. */
10046 /* SWP or SWPB insn. */
10047 /* Get memory address given by Rn. */
10048 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10049 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10050 /* SWP insn ?, swaps word. */
10051 if (8 == arm_insn_r->opcode)
10053 record_buf_mem[0] = 4;
10057 /* SWPB insn, swaps only byte. */
10058 record_buf_mem[0] = 1;
10060 record_buf_mem[1] = u_regval;
10061 arm_insn_r->mem_rec_count = 1;
10062 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10063 arm_insn_r->reg_rec_count = 1;
10065 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10068 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10071 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10074 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10075 record_buf[1] = record_buf[0] + 1;
10076 arm_insn_r->reg_rec_count = 2;
10078 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10081 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10084 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10086 /* LDRH, LDRSB, LDRSH. */
10087 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10088 arm_insn_r->reg_rec_count = 1;
10093 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10094 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10095 && !INSN_RECORDED(arm_insn_r))
10098 /* Handle coprocessor insn extension space. */
10101 /* To be done for ARMv5 and later; as of now we return -1. */
10103 printf_unfiltered (_("Process record does not support instruction x%0x "
10104 "at address %s.\n"),arm_insn_r->arm_insn,
10105 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10108 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10109 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10114 /* Handling opcode 000 insns. */
10117 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10119 struct regcache *reg_cache = arm_insn_r->regcache;
10120 uint32_t record_buf[8], record_buf_mem[8];
10121 ULONGEST u_regval[2] = {0};
10123 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10124 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
10125 uint32_t opcode1 = 0;
10127 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10128 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10129 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10131 /* Data processing insn /multiply insn. */
10132 if (9 == arm_insn_r->decode
10133 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10134 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10136 /* Handle multiply instructions. */
10137 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10138 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10140 /* Handle MLA and MUL. */
10141 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10142 record_buf[1] = ARM_PS_REGNUM;
10143 arm_insn_r->reg_rec_count = 2;
10145 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10147 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10148 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10149 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10150 record_buf[2] = ARM_PS_REGNUM;
10151 arm_insn_r->reg_rec_count = 3;
10154 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10155 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10157 /* Handle misc load insns, as 20th bit (L = 1). */
10158 /* LDR insn has a capability to do branching, if
10159 MOV LR, PC is precceded by LDR insn having Rn as R15
10160 in that case, it emulates branch and link insn, and hence we
10161 need to save CSPR and PC as well. I am not sure this is right
10162 place; as opcode = 010 LDR insn make this happen, if R15 was
10164 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10165 if (15 != reg_dest)
10167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10168 arm_insn_r->reg_rec_count = 1;
10172 record_buf[0] = reg_dest;
10173 record_buf[1] = ARM_PS_REGNUM;
10174 arm_insn_r->reg_rec_count = 2;
10177 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10178 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10179 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10180 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10182 /* Handle MSR insn. */
10183 if (9 == arm_insn_r->opcode)
10185 /* CSPR is going to be changed. */
10186 record_buf[0] = ARM_PS_REGNUM;
10187 arm_insn_r->reg_rec_count = 1;
10191 /* SPSR is going to be changed. */
10192 /* How to read SPSR value? */
10193 printf_unfiltered (_("Process record does not support instruction "
10194 "0x%0x at address %s.\n"),
10195 arm_insn_r->arm_insn,
10196 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10200 else if (9 == arm_insn_r->decode
10201 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10202 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10204 /* Handling SWP, SWPB. */
10205 /* These insn, changes register and memory as well. */
10206 /* SWP or SWPB insn. */
10208 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10209 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10210 /* SWP insn ?, swaps word. */
10211 if (8 == arm_insn_r->opcode)
10213 record_buf_mem[0] = 4;
10217 /* SWPB insn, swaps only byte. */
10218 record_buf_mem[0] = 1;
10220 record_buf_mem[1] = u_regval[0];
10221 arm_insn_r->mem_rec_count = 1;
10222 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10223 arm_insn_r->reg_rec_count = 1;
10225 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10226 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10228 /* Handle BLX, branch and link/exchange. */
10229 if (9 == arm_insn_r->opcode)
10231 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10232 and R14 stores the return address. */
10233 record_buf[0] = ARM_PS_REGNUM;
10234 record_buf[1] = ARM_LR_REGNUM;
10235 arm_insn_r->reg_rec_count = 2;
10238 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10240 /* Handle enhanced software breakpoint insn, BKPT. */
10241 /* CPSR is changed to be executed in ARM state, disabling normal
10242 interrupts, entering abort mode. */
10243 /* According to high vector configuration PC is set. */
10244 /* user hit breakpoint and type reverse, in
10245 that case, we need to go back with previous CPSR and
10246 Program Counter. */
10247 record_buf[0] = ARM_PS_REGNUM;
10248 record_buf[1] = ARM_LR_REGNUM;
10249 arm_insn_r->reg_rec_count = 2;
10251 /* Save SPSR also; how? */
10252 printf_unfiltered (_("Process record does not support instruction "
10253 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10254 paddress (arm_insn_r->gdbarch,
10255 arm_insn_r->this_addr));
10258 else if (11 == arm_insn_r->decode
10259 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10261 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10263 /* Handle str(x) insn */
10264 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10267 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10268 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10270 /* Handle BX, branch and link/exchange. */
10271 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10272 record_buf[0] = ARM_PS_REGNUM;
10273 arm_insn_r->reg_rec_count = 1;
10275 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10276 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10277 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10279 /* Count leading zeros: CLZ. */
10280 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10281 arm_insn_r->reg_rec_count = 1;
10283 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10284 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10285 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10286 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10289 /* Handle MRS insn. */
10290 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10291 arm_insn_r->reg_rec_count = 1;
10293 else if (arm_insn_r->opcode <= 15)
10295 /* Normal data processing insns. */
10296 /* Out of 11 shifter operands mode, all the insn modifies destination
10297 register, which is specified by 13-16 decode. */
10298 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10299 record_buf[1] = ARM_PS_REGNUM;
10300 arm_insn_r->reg_rec_count = 2;
10307 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10308 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10312 /* Handling opcode 001 insns. */
10315 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10317 uint32_t record_buf[8], record_buf_mem[8];
10319 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10320 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10322 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10323 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10324 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10327 /* Handle MSR insn. */
10328 if (9 == arm_insn_r->opcode)
10330 /* CSPR is going to be changed. */
10331 record_buf[0] = ARM_PS_REGNUM;
10332 arm_insn_r->reg_rec_count = 1;
10336 /* SPSR is going to be changed. */
10339 else if (arm_insn_r->opcode <= 15)
10341 /* Normal data processing insns. */
10342 /* Out of 11 shifter operands mode, all the insn modifies destination
10343 register, which is specified by 13-16 decode. */
10344 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10345 record_buf[1] = ARM_PS_REGNUM;
10346 arm_insn_r->reg_rec_count = 2;
10353 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10354 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10358 /* Handle ARM mode instructions with opcode 010. */
10361 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10363 struct regcache *reg_cache = arm_insn_r->regcache;
10365 uint32_t reg_base , reg_dest;
10366 uint32_t offset_12, tgt_mem_addr;
10367 uint32_t record_buf[8], record_buf_mem[8];
10368 unsigned char wback;
10371 /* Calculate wback. */
10372 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10373 || (bit (arm_insn_r->arm_insn, 21) == 1);
10375 arm_insn_r->reg_rec_count = 0;
10376 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10378 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10380 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10383 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10384 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10386 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10387 preceeds a LDR instruction having R15 as reg_base, it
10388 emulates a branch and link instruction, and hence we need to save
10389 CPSR and PC as well. */
10390 if (ARM_PC_REGNUM == reg_dest)
10391 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10393 /* If wback is true, also save the base register, which is going to be
10396 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10400 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10402 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10403 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10405 /* Handle bit U. */
10406 if (bit (arm_insn_r->arm_insn, 23))
10408 /* U == 1: Add the offset. */
10409 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10413 /* U == 0: subtract the offset. */
10414 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10417 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10419 if (bit (arm_insn_r->arm_insn, 22))
10421 /* STRB and STRBT: 1 byte. */
10422 record_buf_mem[0] = 1;
10426 /* STR and STRT: 4 bytes. */
10427 record_buf_mem[0] = 4;
10430 /* Handle bit P. */
10431 if (bit (arm_insn_r->arm_insn, 24))
10432 record_buf_mem[1] = tgt_mem_addr;
10434 record_buf_mem[1] = (uint32_t) u_regval;
10436 arm_insn_r->mem_rec_count = 1;
10438 /* If wback is true, also save the base register, which is going to be
10441 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10444 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10445 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10449 /* Handling opcode 011 insns. */
10452 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10454 struct regcache *reg_cache = arm_insn_r->regcache;
10456 uint32_t shift_imm = 0;
10457 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10458 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10459 uint32_t record_buf[8], record_buf_mem[8];
10462 ULONGEST u_regval[2];
10464 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10465 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10467 /* Handle enhanced store insns and LDRD DSP insn,
10468 order begins according to addressing modes for store insns
10472 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10474 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10475 /* LDR insn has a capability to do branching, if
10476 MOV LR, PC is precedded by LDR insn having Rn as R15
10477 in that case, it emulates branch and link insn, and hence we
10478 need to save CSPR and PC as well. */
10479 if (15 != reg_dest)
10481 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10482 arm_insn_r->reg_rec_count = 1;
10486 record_buf[0] = reg_dest;
10487 record_buf[1] = ARM_PS_REGNUM;
10488 arm_insn_r->reg_rec_count = 2;
10493 if (! bits (arm_insn_r->arm_insn, 4, 11))
10495 /* Store insn, register offset and register pre-indexed,
10496 register post-indexed. */
10498 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10500 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10501 regcache_raw_read_unsigned (reg_cache, reg_src1
10503 regcache_raw_read_unsigned (reg_cache, reg_src2
10505 if (15 == reg_src2)
10507 /* If R15 was used as Rn, hence current PC+8. */
10508 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10509 u_regval[0] = u_regval[0] + 8;
10511 /* Calculate target store address, Rn +/- Rm, register offset. */
10513 if (bit (arm_insn_r->arm_insn, 23))
10515 tgt_mem_addr = u_regval[0] + u_regval[1];
10519 tgt_mem_addr = u_regval[1] - u_regval[0];
10522 switch (arm_insn_r->opcode)
10536 record_buf_mem[0] = 4;
10551 record_buf_mem[0] = 1;
10555 gdb_assert_not_reached ("no decoding pattern found");
10558 record_buf_mem[1] = tgt_mem_addr;
10559 arm_insn_r->mem_rec_count = 1;
10561 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10562 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10563 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10564 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10565 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10566 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10569 /* Rn is going to be changed in pre-indexed mode and
10570 post-indexed mode as well. */
10571 record_buf[0] = reg_src2;
10572 arm_insn_r->reg_rec_count = 1;
10577 /* Store insn, scaled register offset; scaled pre-indexed. */
10578 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10580 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10582 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10583 /* Get shift_imm. */
10584 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10585 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10586 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10587 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10588 /* Offset_12 used as shift. */
10592 /* Offset_12 used as index. */
10593 offset_12 = u_regval[0] << shift_imm;
10597 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10603 if (bit (u_regval[0], 31))
10605 offset_12 = 0xFFFFFFFF;
10614 /* This is arithmetic shift. */
10615 offset_12 = s_word >> shift_imm;
10622 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10624 /* Get C flag value and shift it by 31. */
10625 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10626 | (u_regval[0]) >> 1);
10630 offset_12 = (u_regval[0] >> shift_imm) \
10632 (sizeof(uint32_t) - shift_imm));
10637 gdb_assert_not_reached ("no decoding pattern found");
10641 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10643 if (bit (arm_insn_r->arm_insn, 23))
10645 tgt_mem_addr = u_regval[1] + offset_12;
10649 tgt_mem_addr = u_regval[1] - offset_12;
10652 switch (arm_insn_r->opcode)
10666 record_buf_mem[0] = 4;
10681 record_buf_mem[0] = 1;
10685 gdb_assert_not_reached ("no decoding pattern found");
10688 record_buf_mem[1] = tgt_mem_addr;
10689 arm_insn_r->mem_rec_count = 1;
10691 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10692 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10693 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10694 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10695 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10696 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10699 /* Rn is going to be changed in register scaled pre-indexed
10700 mode,and scaled post indexed mode. */
10701 record_buf[0] = reg_src2;
10702 arm_insn_r->reg_rec_count = 1;
10707 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10708 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10712 /* Handle ARM mode instructions with opcode 100. */
10715 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10717 struct regcache *reg_cache = arm_insn_r->regcache;
10718 uint32_t register_count = 0, register_bits;
10719 uint32_t reg_base, addr_mode;
10720 uint32_t record_buf[24], record_buf_mem[48];
10724 /* Fetch the list of registers. */
10725 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10726 arm_insn_r->reg_rec_count = 0;
10728 /* Fetch the base register that contains the address we are loading data
10730 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10732 /* Calculate wback. */
10733 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10735 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10737 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10739 /* Find out which registers are going to be loaded from memory. */
10740 while (register_bits)
10742 if (register_bits & 0x00000001)
10743 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10744 register_bits = register_bits >> 1;
10749 /* If wback is true, also save the base register, which is going to be
10752 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10754 /* Save the CPSR register. */
10755 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10759 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10761 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10763 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10765 /* Find out how many registers are going to be stored to memory. */
10766 while (register_bits)
10768 if (register_bits & 0x00000001)
10770 register_bits = register_bits >> 1;
10775 /* STMDA (STMED): Decrement after. */
10777 record_buf_mem[1] = (uint32_t) u_regval
10778 - register_count * INT_REGISTER_SIZE + 4;
10780 /* STM (STMIA, STMEA): Increment after. */
10782 record_buf_mem[1] = (uint32_t) u_regval;
10784 /* STMDB (STMFD): Decrement before. */
10786 record_buf_mem[1] = (uint32_t) u_regval
10787 - register_count * INT_REGISTER_SIZE;
10789 /* STMIB (STMFA): Increment before. */
10791 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10794 gdb_assert_not_reached ("no decoding pattern found");
10798 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10799 arm_insn_r->mem_rec_count = 1;
10801 /* If wback is true, also save the base register, which is going to be
10804 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10807 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10808 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10812 /* Handling opcode 101 insns. */
10815 arm_record_b_bl (insn_decode_record *arm_insn_r)
10817 uint32_t record_buf[8];
10819 /* Handle B, BL, BLX(1) insns. */
10820 /* B simply branches so we do nothing here. */
10821 /* Note: BLX(1) doesnt fall here but instead it falls into
10822 extension space. */
10823 if (bit (arm_insn_r->arm_insn, 24))
10825 record_buf[0] = ARM_LR_REGNUM;
10826 arm_insn_r->reg_rec_count = 1;
10829 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10834 /* Handling opcode 110 insns. */
10837 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10839 printf_unfiltered (_("Process record does not support instruction "
10840 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10841 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10846 /* Record handler for vector data transfer instructions. */
10849 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
10851 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
10852 uint32_t record_buf[4];
10854 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10855 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
10856 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
10857 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
10858 bit_l = bit (arm_insn_r->arm_insn, 20);
10859 bit_c = bit (arm_insn_r->arm_insn, 8);
10861 /* Handle VMOV instruction. */
10862 if (bit_l && bit_c)
10864 record_buf[0] = reg_t;
10865 arm_insn_r->reg_rec_count = 1;
10867 else if (bit_l && !bit_c)
10869 /* Handle VMOV instruction. */
10870 if (bits_a == 0x00)
10872 if (bit (arm_insn_r->arm_insn, 20))
10873 record_buf[0] = reg_t;
10875 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10878 arm_insn_r->reg_rec_count = 1;
10880 /* Handle VMRS instruction. */
10881 else if (bits_a == 0x07)
10884 reg_t = ARM_PS_REGNUM;
10886 record_buf[0] = reg_t;
10887 arm_insn_r->reg_rec_count = 1;
10890 else if (!bit_l && !bit_c)
10892 /* Handle VMOV instruction. */
10893 if (bits_a == 0x00)
10895 if (bit (arm_insn_r->arm_insn, 20))
10896 record_buf[0] = reg_t;
10898 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
10901 arm_insn_r->reg_rec_count = 1;
10903 /* Handle VMSR instruction. */
10904 else if (bits_a == 0x07)
10906 record_buf[0] = ARM_FPSCR_REGNUM;
10907 arm_insn_r->reg_rec_count = 1;
10910 else if (!bit_l && bit_c)
10912 /* Handle VMOV instruction. */
10913 if (!(bits_a & 0x04))
10915 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
10917 arm_insn_r->reg_rec_count = 1;
10919 /* Handle VDUP instruction. */
10922 if (bit (arm_insn_r->arm_insn, 21))
10924 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10925 record_buf[0] = reg_v + ARM_D0_REGNUM;
10926 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
10927 arm_insn_r->reg_rec_count = 2;
10931 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
10932 record_buf[0] = reg_v + ARM_D0_REGNUM;
10933 arm_insn_r->reg_rec_count = 1;
10938 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10942 /* Record handler for extension register load/store instructions. */
10945 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
10947 uint32_t opcode, single_reg;
10948 uint8_t op_vldm_vstm;
10949 uint32_t record_buf[8], record_buf_mem[128];
10950 ULONGEST u_regval = 0;
10952 struct regcache *reg_cache = arm_insn_r->regcache;
10953 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
10955 opcode = bits (arm_insn_r->arm_insn, 20, 24);
10956 single_reg = bit (arm_insn_r->arm_insn, 8);
10957 op_vldm_vstm = opcode & 0x1b;
10959 /* Handle VMOV instructions. */
10960 if ((opcode & 0x1e) == 0x04)
10962 if (bit (arm_insn_r->arm_insn, 4))
10964 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10965 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10966 arm_insn_r->reg_rec_count = 2;
10970 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
10971 | bit (arm_insn_r->arm_insn, 5);
10975 record_buf[0] = num_regs + reg_m;
10976 record_buf[1] = num_regs + reg_m + 1;
10977 arm_insn_r->reg_rec_count = 2;
10981 record_buf[0] = reg_m + ARM_D0_REGNUM;
10982 arm_insn_r->reg_rec_count = 1;
10986 /* Handle VSTM and VPUSH instructions. */
10987 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
10988 || op_vldm_vstm == 0x12)
10990 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
10991 uint32_t memory_index = 0;
10993 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
10994 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
10995 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
10996 imm_off32 = imm_off8 << 24;
10997 memory_count = imm_off8;
10999 if (bit (arm_insn_r->arm_insn, 23))
11000 start_address = u_regval;
11002 start_address = u_regval - imm_off32;
11004 if (bit (arm_insn_r->arm_insn, 21))
11006 record_buf[0] = reg_rn;
11007 arm_insn_r->reg_rec_count = 1;
11010 while (memory_count > 0)
11014 record_buf_mem[memory_index] = start_address;
11015 record_buf_mem[memory_index + 1] = 4;
11016 start_address = start_address + 4;
11017 memory_index = memory_index + 2;
11021 record_buf_mem[memory_index] = start_address;
11022 record_buf_mem[memory_index + 1] = 4;
11023 record_buf_mem[memory_index + 2] = start_address + 4;
11024 record_buf_mem[memory_index + 3] = 4;
11025 start_address = start_address + 8;
11026 memory_index = memory_index + 4;
11030 arm_insn_r->mem_rec_count = (memory_index >> 1);
11032 /* Handle VLDM instructions. */
11033 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11034 || op_vldm_vstm == 0x13)
11036 uint32_t reg_count, reg_vd;
11037 uint32_t reg_index = 0;
11039 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11040 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11043 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11045 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11047 if (bit (arm_insn_r->arm_insn, 21))
11048 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11050 while (reg_count > 0)
11053 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
11055 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11059 arm_insn_r->reg_rec_count = reg_index;
11061 /* VSTR Vector store register. */
11062 else if ((opcode & 0x13) == 0x10)
11064 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11065 uint32_t memory_index = 0;
11067 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11068 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11069 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11070 imm_off32 = imm_off8 << 24;
11071 memory_count = imm_off8;
11073 if (bit (arm_insn_r->arm_insn, 23))
11074 start_address = u_regval + imm_off32;
11076 start_address = u_regval - imm_off32;
11080 record_buf_mem[memory_index] = start_address;
11081 record_buf_mem[memory_index + 1] = 4;
11082 arm_insn_r->mem_rec_count = 1;
11086 record_buf_mem[memory_index] = start_address;
11087 record_buf_mem[memory_index + 1] = 4;
11088 record_buf_mem[memory_index + 2] = start_address + 4;
11089 record_buf_mem[memory_index + 3] = 4;
11090 arm_insn_r->mem_rec_count = 2;
11093 /* VLDR Vector load register. */
11094 else if ((opcode & 0x13) == 0x11)
11096 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11100 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11101 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11105 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11106 record_buf[0] = num_regs + reg_vd;
11108 arm_insn_r->reg_rec_count = 1;
11111 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11112 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11116 /* Record handler for arm/thumb mode VFP data processing instructions. */
11119 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11121 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11122 uint32_t record_buf[4];
11123 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11124 enum insn_types curr_insn_type = INSN_INV;
11126 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11127 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11128 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11129 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11130 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11131 bit_d = bit (arm_insn_r->arm_insn, 22);
11132 opc1 = opc1 & 0x04;
11134 /* Handle VMLA, VMLS. */
11137 if (bit (arm_insn_r->arm_insn, 10))
11139 if (bit (arm_insn_r->arm_insn, 6))
11140 curr_insn_type = INSN_T0;
11142 curr_insn_type = INSN_T1;
11147 curr_insn_type = INSN_T1;
11149 curr_insn_type = INSN_T2;
11152 /* Handle VNMLA, VNMLS, VNMUL. */
11153 else if (opc1 == 0x01)
11156 curr_insn_type = INSN_T1;
11158 curr_insn_type = INSN_T2;
11161 else if (opc1 == 0x02 && !(opc3 & 0x01))
11163 if (bit (arm_insn_r->arm_insn, 10))
11165 if (bit (arm_insn_r->arm_insn, 6))
11166 curr_insn_type = INSN_T0;
11168 curr_insn_type = INSN_T1;
11173 curr_insn_type = INSN_T1;
11175 curr_insn_type = INSN_T2;
11178 /* Handle VADD, VSUB. */
11179 else if (opc1 == 0x03)
11181 if (!bit (arm_insn_r->arm_insn, 9))
11183 if (bit (arm_insn_r->arm_insn, 6))
11184 curr_insn_type = INSN_T0;
11186 curr_insn_type = INSN_T1;
11191 curr_insn_type = INSN_T1;
11193 curr_insn_type = INSN_T2;
11197 else if (opc1 == 0x0b)
11200 curr_insn_type = INSN_T1;
11202 curr_insn_type = INSN_T2;
11204 /* Handle all other vfp data processing instructions. */
11205 else if (opc1 == 0x0b)
11208 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11210 if (bit (arm_insn_r->arm_insn, 4))
11212 if (bit (arm_insn_r->arm_insn, 6))
11213 curr_insn_type = INSN_T0;
11215 curr_insn_type = INSN_T1;
11220 curr_insn_type = INSN_T1;
11222 curr_insn_type = INSN_T2;
11225 /* Handle VNEG and VABS. */
11226 else if ((opc2 == 0x01 && opc3 == 0x01)
11227 || (opc2 == 0x00 && opc3 == 0x03))
11229 if (!bit (arm_insn_r->arm_insn, 11))
11231 if (bit (arm_insn_r->arm_insn, 6))
11232 curr_insn_type = INSN_T0;
11234 curr_insn_type = INSN_T1;
11239 curr_insn_type = INSN_T1;
11241 curr_insn_type = INSN_T2;
11244 /* Handle VSQRT. */
11245 else if (opc2 == 0x01 && opc3 == 0x03)
11248 curr_insn_type = INSN_T1;
11250 curr_insn_type = INSN_T2;
11253 else if (opc2 == 0x07 && opc3 == 0x03)
11256 curr_insn_type = INSN_T1;
11258 curr_insn_type = INSN_T2;
11260 else if (opc3 & 0x01)
11263 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11265 if (!bit (arm_insn_r->arm_insn, 18))
11266 curr_insn_type = INSN_T2;
11270 curr_insn_type = INSN_T1;
11272 curr_insn_type = INSN_T2;
11276 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11279 curr_insn_type = INSN_T1;
11281 curr_insn_type = INSN_T2;
11283 /* Handle VCVTB, VCVTT. */
11284 else if ((opc2 & 0x0e) == 0x02)
11285 curr_insn_type = INSN_T2;
11286 /* Handle VCMP, VCMPE. */
11287 else if ((opc2 & 0x0e) == 0x04)
11288 curr_insn_type = INSN_T3;
11292 switch (curr_insn_type)
11295 reg_vd = reg_vd | (bit_d << 4);
11296 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11297 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11298 arm_insn_r->reg_rec_count = 2;
11302 reg_vd = reg_vd | (bit_d << 4);
11303 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11304 arm_insn_r->reg_rec_count = 1;
11308 reg_vd = (reg_vd << 1) | bit_d;
11309 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11310 arm_insn_r->reg_rec_count = 1;
11314 record_buf[0] = ARM_FPSCR_REGNUM;
11315 arm_insn_r->reg_rec_count = 1;
11319 gdb_assert_not_reached ("no decoding pattern found");
11323 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11327 /* Handling opcode 110 insns. */
11330 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11332 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
11334 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11335 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11336 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11338 if ((coproc & 0x0e) == 0x0a)
11340 /* Handle extension register ld/st instructions. */
11342 return arm_record_exreg_ld_st_insn (arm_insn_r);
11344 /* 64-bit transfers between arm core and extension registers. */
11345 if ((op1 & 0x3e) == 0x04)
11346 return arm_record_exreg_ld_st_insn (arm_insn_r);
11350 /* Handle coprocessor ld/st instructions. */
11355 return arm_record_unsupported_insn (arm_insn_r);
11358 return arm_record_unsupported_insn (arm_insn_r);
11361 /* Move to coprocessor from two arm core registers. */
11363 return arm_record_unsupported_insn (arm_insn_r);
11365 /* Move to two arm core registers from coprocessor. */
11370 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11371 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11372 arm_insn_r->reg_rec_count = 2;
11374 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11378 return arm_record_unsupported_insn (arm_insn_r);
11381 /* Handling opcode 111 insns. */
11384 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11386 uint32_t op, op1_sbit, op1_ebit, coproc;
11387 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11388 struct regcache *reg_cache = arm_insn_r->regcache;
11389 ULONGEST u_regval = 0;
11391 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11392 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11393 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11394 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11395 op = bit (arm_insn_r->arm_insn, 4);
11397 /* Handle arm SWI/SVC system call instructions. */
11400 if (tdep->arm_syscall_record != NULL)
11402 ULONGEST svc_operand, svc_number;
11404 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11406 if (svc_operand) /* OABI. */
11407 svc_number = svc_operand - 0x900000;
11409 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11411 return tdep->arm_syscall_record (reg_cache, svc_number);
11415 printf_unfiltered (_("no syscall record support\n"));
11420 if ((coproc & 0x0e) == 0x0a)
11422 /* VFP data-processing instructions. */
11423 if (!op1_sbit && !op)
11424 return arm_record_vfp_data_proc_insn (arm_insn_r);
11426 /* Advanced SIMD, VFP instructions. */
11427 if (!op1_sbit && op)
11428 return arm_record_vdata_transfer_insn (arm_insn_r);
11432 /* Coprocessor data operations. */
11433 if (!op1_sbit && !op)
11434 return arm_record_unsupported_insn (arm_insn_r);
11436 /* Move to Coprocessor from ARM core register. */
11437 if (!op1_sbit && !op1_ebit && op)
11438 return arm_record_unsupported_insn (arm_insn_r);
11440 /* Move to arm core register from coprocessor. */
11441 if (!op1_sbit && op1_ebit && op)
11443 uint32_t record_buf[1];
11445 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11446 if (record_buf[0] == 15)
11447 record_buf[0] = ARM_PS_REGNUM;
11449 arm_insn_r->reg_rec_count = 1;
11450 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11456 return arm_record_unsupported_insn (arm_insn_r);
11459 /* Handling opcode 000 insns. */
11462 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11464 uint32_t record_buf[8];
11465 uint32_t reg_src1 = 0;
11467 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11469 record_buf[0] = ARM_PS_REGNUM;
11470 record_buf[1] = reg_src1;
11471 thumb_insn_r->reg_rec_count = 2;
11473 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11479 /* Handling opcode 001 insns. */
11482 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11484 uint32_t record_buf[8];
11485 uint32_t reg_src1 = 0;
11487 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11489 record_buf[0] = ARM_PS_REGNUM;
11490 record_buf[1] = reg_src1;
11491 thumb_insn_r->reg_rec_count = 2;
11493 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11498 /* Handling opcode 010 insns. */
11501 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11503 struct regcache *reg_cache = thumb_insn_r->regcache;
11504 uint32_t record_buf[8], record_buf_mem[8];
11506 uint32_t reg_src1 = 0, reg_src2 = 0;
11507 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11509 ULONGEST u_regval[2] = {0};
11511 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11513 if (bit (thumb_insn_r->arm_insn, 12))
11515 /* Handle load/store register offset. */
11516 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11517 if (opcode2 >= 12 && opcode2 <= 15)
11519 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11520 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11521 record_buf[0] = reg_src1;
11522 thumb_insn_r->reg_rec_count = 1;
11524 else if (opcode2 >= 8 && opcode2 <= 10)
11526 /* STR(2), STRB(2), STRH(2) . */
11527 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11528 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11529 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11530 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11532 record_buf_mem[0] = 4; /* STR (2). */
11533 else if (10 == opcode2)
11534 record_buf_mem[0] = 1; /* STRB (2). */
11535 else if (9 == opcode2)
11536 record_buf_mem[0] = 2; /* STRH (2). */
11537 record_buf_mem[1] = u_regval[0] + u_regval[1];
11538 thumb_insn_r->mem_rec_count = 1;
11541 else if (bit (thumb_insn_r->arm_insn, 11))
11543 /* Handle load from literal pool. */
11545 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11546 record_buf[0] = reg_src1;
11547 thumb_insn_r->reg_rec_count = 1;
11551 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11552 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11553 if ((3 == opcode2) && (!opcode3))
11555 /* Branch with exchange. */
11556 record_buf[0] = ARM_PS_REGNUM;
11557 thumb_insn_r->reg_rec_count = 1;
11561 /* Format 8; special data processing insns. */
11562 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11563 record_buf[0] = ARM_PS_REGNUM;
11564 record_buf[1] = reg_src1;
11565 thumb_insn_r->reg_rec_count = 2;
11570 /* Format 5; data processing insns. */
11571 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11572 if (bit (thumb_insn_r->arm_insn, 7))
11574 reg_src1 = reg_src1 + 8;
11576 record_buf[0] = ARM_PS_REGNUM;
11577 record_buf[1] = reg_src1;
11578 thumb_insn_r->reg_rec_count = 2;
11581 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11582 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11588 /* Handling opcode 001 insns. */
11591 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11593 struct regcache *reg_cache = thumb_insn_r->regcache;
11594 uint32_t record_buf[8], record_buf_mem[8];
11596 uint32_t reg_src1 = 0;
11597 uint32_t opcode = 0, immed_5 = 0;
11599 ULONGEST u_regval = 0;
11601 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11606 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11607 record_buf[0] = reg_src1;
11608 thumb_insn_r->reg_rec_count = 1;
11613 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11614 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11615 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11616 record_buf_mem[0] = 4;
11617 record_buf_mem[1] = u_regval + (immed_5 * 4);
11618 thumb_insn_r->mem_rec_count = 1;
11621 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11622 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11628 /* Handling opcode 100 insns. */
11631 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11633 struct regcache *reg_cache = thumb_insn_r->regcache;
11634 uint32_t record_buf[8], record_buf_mem[8];
11636 uint32_t reg_src1 = 0;
11637 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11639 ULONGEST u_regval = 0;
11641 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11646 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11647 record_buf[0] = reg_src1;
11648 thumb_insn_r->reg_rec_count = 1;
11650 else if (1 == opcode)
11653 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11654 record_buf[0] = reg_src1;
11655 thumb_insn_r->reg_rec_count = 1;
11657 else if (2 == opcode)
11660 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11661 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11662 record_buf_mem[0] = 4;
11663 record_buf_mem[1] = u_regval + (immed_8 * 4);
11664 thumb_insn_r->mem_rec_count = 1;
11666 else if (0 == opcode)
11669 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11670 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11671 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11672 record_buf_mem[0] = 2;
11673 record_buf_mem[1] = u_regval + (immed_5 * 2);
11674 thumb_insn_r->mem_rec_count = 1;
11677 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11678 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11684 /* Handling opcode 101 insns. */
11687 thumb_record_misc (insn_decode_record *thumb_insn_r)
11689 struct regcache *reg_cache = thumb_insn_r->regcache;
11691 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11692 uint32_t register_bits = 0, register_count = 0;
11693 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
11694 uint32_t record_buf[24], record_buf_mem[48];
11697 ULONGEST u_regval = 0;
11699 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11700 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11701 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11706 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11707 while (register_bits)
11709 if (register_bits & 0x00000001)
11710 record_buf[index++] = register_count;
11711 register_bits = register_bits >> 1;
11714 record_buf[index++] = ARM_PS_REGNUM;
11715 record_buf[index++] = ARM_SP_REGNUM;
11716 thumb_insn_r->reg_rec_count = index;
11718 else if (10 == opcode2)
11721 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11722 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11723 while (register_bits)
11725 if (register_bits & 0x00000001)
11727 register_bits = register_bits >> 1;
11729 start_address = u_regval - \
11730 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11731 thumb_insn_r->mem_rec_count = register_count;
11732 while (register_count)
11734 record_buf_mem[(register_count * 2) - 1] = start_address;
11735 record_buf_mem[(register_count * 2) - 2] = 4;
11736 start_address = start_address + 4;
11739 record_buf[0] = ARM_SP_REGNUM;
11740 thumb_insn_r->reg_rec_count = 1;
11742 else if (0x1E == opcode1)
11745 /* Handle enhanced software breakpoint insn, BKPT. */
11746 /* CPSR is changed to be executed in ARM state, disabling normal
11747 interrupts, entering abort mode. */
11748 /* According to high vector configuration PC is set. */
11749 /* User hits breakpoint and type reverse, in that case, we need to go back with
11750 previous CPSR and Program Counter. */
11751 record_buf[0] = ARM_PS_REGNUM;
11752 record_buf[1] = ARM_LR_REGNUM;
11753 thumb_insn_r->reg_rec_count = 2;
11754 /* We need to save SPSR value, which is not yet done. */
11755 printf_unfiltered (_("Process record does not support instruction "
11756 "0x%0x at address %s.\n"),
11757 thumb_insn_r->arm_insn,
11758 paddress (thumb_insn_r->gdbarch,
11759 thumb_insn_r->this_addr));
11762 else if ((0 == opcode) || (1 == opcode))
11764 /* ADD(5), ADD(6). */
11765 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11766 record_buf[0] = reg_src1;
11767 thumb_insn_r->reg_rec_count = 1;
11769 else if (2 == opcode)
11771 /* ADD(7), SUB(4). */
11772 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11773 record_buf[0] = ARM_SP_REGNUM;
11774 thumb_insn_r->reg_rec_count = 1;
11777 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11778 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11784 /* Handling opcode 110 insns. */
11787 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11789 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11790 struct regcache *reg_cache = thumb_insn_r->regcache;
11792 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11793 uint32_t reg_src1 = 0;
11794 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11795 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
11796 uint32_t record_buf[24], record_buf_mem[48];
11798 ULONGEST u_regval = 0;
11800 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11801 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11807 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11809 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11810 while (register_bits)
11812 if (register_bits & 0x00000001)
11813 record_buf[index++] = register_count;
11814 register_bits = register_bits >> 1;
11817 record_buf[index++] = reg_src1;
11818 thumb_insn_r->reg_rec_count = index;
11820 else if (0 == opcode2)
11822 /* It handles both STMIA. */
11823 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11825 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11826 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11827 while (register_bits)
11829 if (register_bits & 0x00000001)
11831 register_bits = register_bits >> 1;
11833 start_address = u_regval;
11834 thumb_insn_r->mem_rec_count = register_count;
11835 while (register_count)
11837 record_buf_mem[(register_count * 2) - 1] = start_address;
11838 record_buf_mem[(register_count * 2) - 2] = 4;
11839 start_address = start_address + 4;
11843 else if (0x1F == opcode1)
11845 /* Handle arm syscall insn. */
11846 if (tdep->arm_syscall_record != NULL)
11848 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
11849 ret = tdep->arm_syscall_record (reg_cache, u_regval);
11853 printf_unfiltered (_("no syscall record support\n"));
11858 /* B (1), conditional branch is automatically taken care in process_record,
11859 as PC is saved there. */
11861 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11862 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11868 /* Handling opcode 111 insns. */
11871 thumb_record_branch (insn_decode_record *thumb_insn_r)
11873 uint32_t record_buf[8];
11874 uint32_t bits_h = 0;
11876 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
11878 if (2 == bits_h || 3 == bits_h)
11881 record_buf[0] = ARM_LR_REGNUM;
11882 thumb_insn_r->reg_rec_count = 1;
11884 else if (1 == bits_h)
11887 record_buf[0] = ARM_PS_REGNUM;
11888 record_buf[1] = ARM_LR_REGNUM;
11889 thumb_insn_r->reg_rec_count = 2;
11892 /* B(2) is automatically taken care in process_record, as PC is
11895 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11900 /* Handler for thumb2 load/store multiple instructions. */
11903 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
11905 struct regcache *reg_cache = thumb2_insn_r->regcache;
11907 uint32_t reg_rn, op;
11908 uint32_t register_bits = 0, register_count = 0;
11909 uint32_t index = 0, start_address = 0;
11910 uint32_t record_buf[24], record_buf_mem[48];
11912 ULONGEST u_regval = 0;
11914 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
11915 op = bits (thumb2_insn_r->arm_insn, 23, 24);
11917 if (0 == op || 3 == op)
11919 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11921 /* Handle RFE instruction. */
11922 record_buf[0] = ARM_PS_REGNUM;
11923 thumb2_insn_r->reg_rec_count = 1;
11927 /* Handle SRS instruction after reading banked SP. */
11928 return arm_record_unsupported_insn (thumb2_insn_r);
11931 else if (1 == op || 2 == op)
11933 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11935 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
11936 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11937 while (register_bits)
11939 if (register_bits & 0x00000001)
11940 record_buf[index++] = register_count;
11943 register_bits = register_bits >> 1;
11945 record_buf[index++] = reg_rn;
11946 record_buf[index++] = ARM_PS_REGNUM;
11947 thumb2_insn_r->reg_rec_count = index;
11951 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
11952 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
11953 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11954 while (register_bits)
11956 if (register_bits & 0x00000001)
11959 register_bits = register_bits >> 1;
11964 /* Start address calculation for LDMDB/LDMEA. */
11965 start_address = u_regval;
11969 /* Start address calculation for LDMDB/LDMEA. */
11970 start_address = u_regval - register_count * 4;
11973 thumb2_insn_r->mem_rec_count = register_count;
11974 while (register_count)
11976 record_buf_mem[register_count * 2 - 1] = start_address;
11977 record_buf_mem[register_count * 2 - 2] = 4;
11978 start_address = start_address + 4;
11981 record_buf[0] = reg_rn;
11982 record_buf[1] = ARM_PS_REGNUM;
11983 thumb2_insn_r->reg_rec_count = 2;
11987 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
11989 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
11991 return ARM_RECORD_SUCCESS;
11994 /* Handler for thumb2 load/store (dual/exclusive) and table branch
11998 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12000 struct regcache *reg_cache = thumb2_insn_r->regcache;
12002 uint32_t reg_rd, reg_rn, offset_imm;
12003 uint32_t reg_dest1, reg_dest2;
12004 uint32_t address, offset_addr;
12005 uint32_t record_buf[8], record_buf_mem[8];
12006 uint32_t op1, op2, op3;
12009 ULONGEST u_regval[2];
12011 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12012 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12013 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12015 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12017 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12019 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12020 record_buf[0] = reg_dest1;
12021 record_buf[1] = ARM_PS_REGNUM;
12022 thumb2_insn_r->reg_rec_count = 2;
12025 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12027 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12028 record_buf[2] = reg_dest2;
12029 thumb2_insn_r->reg_rec_count = 3;
12034 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12035 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12037 if (0 == op1 && 0 == op2)
12039 /* Handle STREX. */
12040 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12041 address = u_regval[0] + (offset_imm * 4);
12042 record_buf_mem[0] = 4;
12043 record_buf_mem[1] = address;
12044 thumb2_insn_r->mem_rec_count = 1;
12045 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12046 record_buf[0] = reg_rd;
12047 thumb2_insn_r->reg_rec_count = 1;
12049 else if (1 == op1 && 0 == op2)
12051 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12052 record_buf[0] = reg_rd;
12053 thumb2_insn_r->reg_rec_count = 1;
12054 address = u_regval[0];
12055 record_buf_mem[1] = address;
12059 /* Handle STREXB. */
12060 record_buf_mem[0] = 1;
12061 thumb2_insn_r->mem_rec_count = 1;
12065 /* Handle STREXH. */
12066 record_buf_mem[0] = 2 ;
12067 thumb2_insn_r->mem_rec_count = 1;
12071 /* Handle STREXD. */
12072 address = u_regval[0];
12073 record_buf_mem[0] = 4;
12074 record_buf_mem[2] = 4;
12075 record_buf_mem[3] = address + 4;
12076 thumb2_insn_r->mem_rec_count = 2;
12081 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12083 if (bit (thumb2_insn_r->arm_insn, 24))
12085 if (bit (thumb2_insn_r->arm_insn, 23))
12086 offset_addr = u_regval[0] + (offset_imm * 4);
12088 offset_addr = u_regval[0] - (offset_imm * 4);
12090 address = offset_addr;
12093 address = u_regval[0];
12095 record_buf_mem[0] = 4;
12096 record_buf_mem[1] = address;
12097 record_buf_mem[2] = 4;
12098 record_buf_mem[3] = address + 4;
12099 thumb2_insn_r->mem_rec_count = 2;
12100 record_buf[0] = reg_rn;
12101 thumb2_insn_r->reg_rec_count = 1;
12105 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12107 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12109 return ARM_RECORD_SUCCESS;
12112 /* Handler for thumb2 data processing (shift register and modified immediate)
12116 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12118 uint32_t reg_rd, op;
12119 uint32_t record_buf[8];
12121 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12122 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12124 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12126 record_buf[0] = ARM_PS_REGNUM;
12127 thumb2_insn_r->reg_rec_count = 1;
12131 record_buf[0] = reg_rd;
12132 record_buf[1] = ARM_PS_REGNUM;
12133 thumb2_insn_r->reg_rec_count = 2;
12136 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12138 return ARM_RECORD_SUCCESS;
12141 /* Generic handler for thumb2 instructions which effect destination and PS
12145 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12148 uint32_t record_buf[8];
12150 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12152 record_buf[0] = reg_rd;
12153 record_buf[1] = ARM_PS_REGNUM;
12154 thumb2_insn_r->reg_rec_count = 2;
12156 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12158 return ARM_RECORD_SUCCESS;
12161 /* Handler for thumb2 branch and miscellaneous control instructions. */
12164 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12166 uint32_t op, op1, op2;
12167 uint32_t record_buf[8];
12169 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12170 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12171 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12173 /* Handle MSR insn. */
12174 if (!(op1 & 0x2) && 0x38 == op)
12178 /* CPSR is going to be changed. */
12179 record_buf[0] = ARM_PS_REGNUM;
12180 thumb2_insn_r->reg_rec_count = 1;
12184 arm_record_unsupported_insn(thumb2_insn_r);
12188 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12191 record_buf[0] = ARM_PS_REGNUM;
12192 record_buf[1] = ARM_LR_REGNUM;
12193 thumb2_insn_r->reg_rec_count = 2;
12196 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12198 return ARM_RECORD_SUCCESS;
12201 /* Handler for thumb2 store single data item instructions. */
12204 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12206 struct regcache *reg_cache = thumb2_insn_r->regcache;
12208 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12209 uint32_t address, offset_addr;
12210 uint32_t record_buf[8], record_buf_mem[8];
12213 ULONGEST u_regval[2];
12215 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12216 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12217 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12218 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12220 if (bit (thumb2_insn_r->arm_insn, 23))
12223 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12224 offset_addr = u_regval[0] + offset_imm;
12225 address = offset_addr;
12230 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12232 /* Handle STRB (register). */
12233 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12234 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12235 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12236 offset_addr = u_regval[1] << shift_imm;
12237 address = u_regval[0] + offset_addr;
12241 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12242 if (bit (thumb2_insn_r->arm_insn, 10))
12244 if (bit (thumb2_insn_r->arm_insn, 9))
12245 offset_addr = u_regval[0] + offset_imm;
12247 offset_addr = u_regval[0] - offset_imm;
12249 address = offset_addr;
12252 address = u_regval[0];
12258 /* Store byte instructions. */
12261 record_buf_mem[0] = 1;
12263 /* Store half word instructions. */
12266 record_buf_mem[0] = 2;
12268 /* Store word instructions. */
12271 record_buf_mem[0] = 4;
12275 gdb_assert_not_reached ("no decoding pattern found");
12279 record_buf_mem[1] = address;
12280 thumb2_insn_r->mem_rec_count = 1;
12281 record_buf[0] = reg_rn;
12282 thumb2_insn_r->reg_rec_count = 1;
12284 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12286 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12288 return ARM_RECORD_SUCCESS;
12291 /* Handler for thumb2 load memory hints instructions. */
12294 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12296 uint32_t record_buf[8];
12297 uint32_t reg_rt, reg_rn;
12299 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12300 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12302 if (ARM_PC_REGNUM != reg_rt)
12304 record_buf[0] = reg_rt;
12305 record_buf[1] = reg_rn;
12306 record_buf[2] = ARM_PS_REGNUM;
12307 thumb2_insn_r->reg_rec_count = 3;
12309 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12311 return ARM_RECORD_SUCCESS;
12314 return ARM_RECORD_FAILURE;
12317 /* Handler for thumb2 load word instructions. */
12320 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12322 uint32_t opcode1 = 0, opcode2 = 0;
12323 uint32_t record_buf[8];
12325 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12326 record_buf[1] = ARM_PS_REGNUM;
12327 thumb2_insn_r->reg_rec_count = 2;
12329 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12331 return ARM_RECORD_SUCCESS;
12334 /* Handler for thumb2 long multiply, long multiply accumulate, and
12335 divide instructions. */
12338 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12340 uint32_t opcode1 = 0, opcode2 = 0;
12341 uint32_t record_buf[8];
12342 uint32_t reg_src1 = 0;
12344 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12345 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12347 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12349 /* Handle SMULL, UMULL, SMULAL. */
12350 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12351 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12352 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12353 record_buf[2] = ARM_PS_REGNUM;
12354 thumb2_insn_r->reg_rec_count = 3;
12356 else if (1 == opcode1 || 3 == opcode2)
12358 /* Handle SDIV and UDIV. */
12359 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12360 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12361 record_buf[2] = ARM_PS_REGNUM;
12362 thumb2_insn_r->reg_rec_count = 3;
12365 return ARM_RECORD_FAILURE;
12367 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12369 return ARM_RECORD_SUCCESS;
12372 /* Record handler for thumb32 coprocessor instructions. */
12375 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12377 if (bit (thumb2_insn_r->arm_insn, 25))
12378 return arm_record_coproc_data_proc (thumb2_insn_r);
12380 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12383 /* Record handler for advance SIMD structure load/store instructions. */
12386 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12388 struct regcache *reg_cache = thumb2_insn_r->regcache;
12389 uint32_t l_bit, a_bit, b_bits;
12390 uint32_t record_buf[128], record_buf_mem[128];
12391 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
12392 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12395 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12396 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12397 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12398 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12399 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12400 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12401 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12402 f_esize = 8 * f_ebytes;
12403 f_elem = 8 / f_ebytes;
12407 ULONGEST u_regval = 0;
12408 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12409 address = u_regval;
12414 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12416 if (b_bits == 0x07)
12418 else if (b_bits == 0x0a)
12420 else if (b_bits == 0x06)
12422 else if (b_bits == 0x02)
12427 for (index_r = 0; index_r < bf_regs; index_r++)
12429 for (index_e = 0; index_e < f_elem; index_e++)
12431 record_buf_mem[index_m++] = f_ebytes;
12432 record_buf_mem[index_m++] = address;
12433 address = address + f_ebytes;
12434 thumb2_insn_r->mem_rec_count += 1;
12439 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12441 if (b_bits == 0x09 || b_bits == 0x08)
12443 else if (b_bits == 0x03)
12448 for (index_r = 0; index_r < bf_regs; index_r++)
12449 for (index_e = 0; index_e < f_elem; index_e++)
12451 for (loop_t = 0; loop_t < 2; loop_t++)
12453 record_buf_mem[index_m++] = f_ebytes;
12454 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12455 thumb2_insn_r->mem_rec_count += 1;
12457 address = address + (2 * f_ebytes);
12461 else if ((b_bits & 0x0e) == 0x04)
12463 for (index_e = 0; index_e < f_elem; index_e++)
12465 for (loop_t = 0; loop_t < 3; loop_t++)
12467 record_buf_mem[index_m++] = f_ebytes;
12468 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12469 thumb2_insn_r->mem_rec_count += 1;
12471 address = address + (3 * f_ebytes);
12475 else if (!(b_bits & 0x0e))
12477 for (index_e = 0; index_e < f_elem; index_e++)
12479 for (loop_t = 0; loop_t < 4; loop_t++)
12481 record_buf_mem[index_m++] = f_ebytes;
12482 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12483 thumb2_insn_r->mem_rec_count += 1;
12485 address = address + (4 * f_ebytes);
12491 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12493 if (bft_size == 0x00)
12495 else if (bft_size == 0x01)
12497 else if (bft_size == 0x02)
12503 if (!(b_bits & 0x0b) || b_bits == 0x08)
12504 thumb2_insn_r->mem_rec_count = 1;
12506 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12507 thumb2_insn_r->mem_rec_count = 2;
12509 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12510 thumb2_insn_r->mem_rec_count = 3;
12512 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12513 thumb2_insn_r->mem_rec_count = 4;
12515 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12517 record_buf_mem[index_m] = f_ebytes;
12518 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12527 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12528 thumb2_insn_r->reg_rec_count = 1;
12530 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12531 thumb2_insn_r->reg_rec_count = 2;
12533 else if ((b_bits & 0x0e) == 0x04)
12534 thumb2_insn_r->reg_rec_count = 3;
12536 else if (!(b_bits & 0x0e))
12537 thumb2_insn_r->reg_rec_count = 4;
12542 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12543 thumb2_insn_r->reg_rec_count = 1;
12545 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12546 thumb2_insn_r->reg_rec_count = 2;
12548 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12549 thumb2_insn_r->reg_rec_count = 3;
12551 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12552 thumb2_insn_r->reg_rec_count = 4;
12554 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12555 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12559 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12561 record_buf[index_r] = reg_rn;
12562 thumb2_insn_r->reg_rec_count += 1;
12565 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12567 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12572 /* Decodes thumb2 instruction type and invokes its record handler. */
12574 static unsigned int
12575 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12577 uint32_t op, op1, op2;
12579 op = bit (thumb2_insn_r->arm_insn, 15);
12580 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12581 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12585 if (!(op2 & 0x64 ))
12587 /* Load/store multiple instruction. */
12588 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12590 else if (!((op2 & 0x64) ^ 0x04))
12592 /* Load/store (dual/exclusive) and table branch instruction. */
12593 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12595 else if (!((op2 & 0x20) ^ 0x20))
12597 /* Data-processing (shifted register). */
12598 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12600 else if (op2 & 0x40)
12602 /* Co-processor instructions. */
12603 return thumb2_record_coproc_insn (thumb2_insn_r);
12606 else if (op1 == 0x02)
12610 /* Branches and miscellaneous control instructions. */
12611 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12613 else if (op2 & 0x20)
12615 /* Data-processing (plain binary immediate) instruction. */
12616 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12620 /* Data-processing (modified immediate). */
12621 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12624 else if (op1 == 0x03)
12626 if (!(op2 & 0x71 ))
12628 /* Store single data item. */
12629 return thumb2_record_str_single_data (thumb2_insn_r);
12631 else if (!((op2 & 0x71) ^ 0x10))
12633 /* Advanced SIMD or structure load/store instructions. */
12634 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12636 else if (!((op2 & 0x67) ^ 0x01))
12638 /* Load byte, memory hints instruction. */
12639 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12641 else if (!((op2 & 0x67) ^ 0x03))
12643 /* Load halfword, memory hints instruction. */
12644 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12646 else if (!((op2 & 0x67) ^ 0x05))
12648 /* Load word instruction. */
12649 return thumb2_record_ld_word (thumb2_insn_r);
12651 else if (!((op2 & 0x70) ^ 0x20))
12653 /* Data-processing (register) instruction. */
12654 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12656 else if (!((op2 & 0x78) ^ 0x30))
12658 /* Multiply, multiply accumulate, abs diff instruction. */
12659 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12661 else if (!((op2 & 0x78) ^ 0x38))
12663 /* Long multiply, long multiply accumulate, and divide. */
12664 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12666 else if (op2 & 0x40)
12668 /* Co-processor instructions. */
12669 return thumb2_record_coproc_insn (thumb2_insn_r);
12676 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12677 and positive val on fauilure. */
12680 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12682 gdb_byte buf[insn_size];
12684 memset (&buf[0], 0, insn_size);
12686 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12688 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12690 gdbarch_byte_order_for_code (insn_record->gdbarch));
12694 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12696 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12700 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12701 uint32_t insn_size)
12704 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
12705 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12707 arm_record_data_proc_misc_ld_str, /* 000. */
12708 arm_record_data_proc_imm, /* 001. */
12709 arm_record_ld_st_imm_offset, /* 010. */
12710 arm_record_ld_st_reg_offset, /* 011. */
12711 arm_record_ld_st_multiple, /* 100. */
12712 arm_record_b_bl, /* 101. */
12713 arm_record_asimd_vfp_coproc, /* 110. */
12714 arm_record_coproc_data_proc /* 111. */
12717 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
12718 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12720 thumb_record_shift_add_sub, /* 000. */
12721 thumb_record_add_sub_cmp_mov, /* 001. */
12722 thumb_record_ld_st_reg_offset, /* 010. */
12723 thumb_record_ld_st_imm_offset, /* 011. */
12724 thumb_record_ld_st_stack, /* 100. */
12725 thumb_record_misc, /* 101. */
12726 thumb_record_ldm_stm_swi, /* 110. */
12727 thumb_record_branch /* 111. */
12730 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12731 uint32_t insn_id = 0;
12733 if (extract_arm_insn (arm_record, insn_size))
12737 printf_unfiltered (_("Process record: error reading memory at "
12738 "addr %s len = %d.\n"),
12739 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
12743 else if (ARM_RECORD == record_type)
12745 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12746 insn_id = bits (arm_record->arm_insn, 25, 27);
12747 ret = arm_record_extension_space (arm_record);
12748 /* If this insn has fallen into extension space
12749 then we need not decode it anymore. */
12750 if (ret != -1 && !INSN_RECORDED(arm_record))
12752 ret = arm_handle_insn[insn_id] (arm_record);
12755 else if (THUMB_RECORD == record_type)
12757 /* As thumb does not have condition codes, we set negative. */
12758 arm_record->cond = -1;
12759 insn_id = bits (arm_record->arm_insn, 13, 15);
12760 ret = thumb_handle_insn[insn_id] (arm_record);
12762 else if (THUMB2_RECORD == record_type)
12764 /* As thumb does not have condition codes, we set negative. */
12765 arm_record->cond = -1;
12767 /* Swap first half of 32bit thumb instruction with second half. */
12768 arm_record->arm_insn
12769 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12771 insn_id = thumb2_record_decode_insn_handler (arm_record);
12773 if (insn_id != ARM_RECORD_SUCCESS)
12775 arm_record_unsupported_insn (arm_record);
12781 /* Throw assertion. */
12782 gdb_assert_not_reached ("not a valid instruction, could not decode");
12789 /* Cleans up local record registers and memory allocations. */
12792 deallocate_reg_mem (insn_decode_record *record)
12794 xfree (record->arm_regs);
12795 xfree (record->arm_mems);
12799 /* Parse the current instruction and record the values of the registers and
12800 memory that will be changed in current instruction to record_arch_list".
12801 Return -1 if something is wrong. */
12804 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12805 CORE_ADDR insn_addr)
12808 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
12809 uint32_t no_of_rec = 0;
12810 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12811 ULONGEST t_bit = 0, insn_id = 0;
12813 ULONGEST u_regval = 0;
12815 insn_decode_record arm_record;
12817 memset (&arm_record, 0, sizeof (insn_decode_record));
12818 arm_record.regcache = regcache;
12819 arm_record.this_addr = insn_addr;
12820 arm_record.gdbarch = gdbarch;
12823 if (record_debug > 1)
12825 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12827 paddress (gdbarch, arm_record.this_addr));
12830 if (extract_arm_insn (&arm_record, 2))
12834 printf_unfiltered (_("Process record: error reading memory at "
12835 "addr %s len = %d.\n"),
12836 paddress (arm_record.gdbarch,
12837 arm_record.this_addr), 2);
12842 /* Check the insn, whether it is thumb or arm one. */
12844 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
12845 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
12848 if (!(u_regval & t_bit))
12850 /* We are decoding arm insn. */
12851 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
12855 insn_id = bits (arm_record.arm_insn, 11, 15);
12856 /* is it thumb2 insn? */
12857 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
12859 ret = decode_insn (&arm_record, THUMB2_RECORD,
12860 THUMB2_INSN_SIZE_BYTES);
12864 /* We are decoding thumb insn. */
12865 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
12871 /* Record registers. */
12872 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
12873 if (arm_record.arm_regs)
12875 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
12877 if (record_full_arch_list_add_reg
12878 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
12882 /* Record memories. */
12883 if (arm_record.arm_mems)
12885 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
12887 if (record_full_arch_list_add_mem
12888 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
12889 arm_record.arm_mems[no_of_rec].len))
12894 if (record_full_arch_list_add_end ())
12899 deallocate_reg_mem (&arm_record);